From c02578b689987713a9f261291359339fbb87d703 Mon Sep 17 00:00:00 2001 From: "ci.datadog-api-spec" Date: Thu, 8 Jan 2026 20:59:45 +0000 Subject: [PATCH] Regenerate client from commit 0a38908 of spec repo --- .generator/schemas/v2/openapi.yaml | 32 +- examples/v2/reference-tables/UpsertRows.java | 6 +- .../BatchUpsertRowsRequestDataAttributes.java | 19 +- ...hUpsertRowsRequestDataAttributesValue.java | 284 ++++++++++++++++++ .../client/v2/api/reference_tables.feature | 6 +- 5 files changed, 326 insertions(+), 21 deletions(-) create mode 100644 src/main/java/com/datadog/api/client/v2/model/BatchUpsertRowsRequestDataAttributesValue.java diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index a4628b43a71..e14e4a498bb 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -7075,19 +7075,27 @@ components: BatchUpsertRowsRequestDataAttributes: description: Attributes containing row data values for row creation or update operations. + example: + values: {} properties: values: additionalProperties: - x-required-field: true - description: Key-value pairs representing row data, where keys are field - names from the schema. - example: - example_key_value: primary_key_value - name: row_name + $ref: '#/components/schemas/BatchUpsertRowsRequestDataAttributesValue' + description: Key-value pairs representing row data, where keys are schema + field names and values match the corresponding column types. type: object required: - values type: object + BatchUpsertRowsRequestDataAttributesValue: + description: Types allowed for Reference Table row values. + oneOf: + - example: row_name + type: string + - example: 25 + format: int32 + maximum: 2147483647 + type: integer BillConfig: description: Bill config. properties: @@ -79331,6 +79339,18 @@ paths: requestBody: content: application/json: + examples: + happy_path: + summary: Upsert a row with mixed string and int values + value: + data: + - attributes: + values: + age: 25 + example_key_value: primary_key_value + name: row_name + id: primary_key_value + type: row schema: $ref: '#/components/schemas/BatchUpsertRowsRequestArray' required: true diff --git a/examples/v2/reference-tables/UpsertRows.java b/examples/v2/reference-tables/UpsertRows.java index c336192695f..6b075311da2 100644 --- a/examples/v2/reference-tables/UpsertRows.java +++ b/examples/v2/reference-tables/UpsertRows.java @@ -21,11 +21,7 @@ public static void main(String[] args) { Collections.singletonList( new BatchUpsertRowsRequestData() .attributes( - new BatchUpsertRowsRequestDataAttributes() - .values( - Map.ofEntries( - Map.entry("example_key_value", "primary_key_value"), - Map.entry("name", "row_name")))) + new BatchUpsertRowsRequestDataAttributes().values(Map.ofEntries())) .id("primary_key_value") .type(TableRowResourceDataType.ROW))); diff --git a/src/main/java/com/datadog/api/client/v2/model/BatchUpsertRowsRequestDataAttributes.java b/src/main/java/com/datadog/api/client/v2/model/BatchUpsertRowsRequestDataAttributes.java index 80d0068fd1f..795508825ae 100644 --- a/src/main/java/com/datadog/api/client/v2/model/BatchUpsertRowsRequestDataAttributes.java +++ b/src/main/java/com/datadog/api/client/v2/model/BatchUpsertRowsRequestDataAttributes.java @@ -24,38 +24,43 @@ public class BatchUpsertRowsRequestDataAttributes { @JsonIgnore public boolean unparsed = false; public static final String JSON_PROPERTY_VALUES = "values"; - private Map values = new HashMap(); + private Map values = + new HashMap(); public BatchUpsertRowsRequestDataAttributes() {} @JsonCreator public BatchUpsertRowsRequestDataAttributes( - @JsonProperty(required = true, value = JSON_PROPERTY_VALUES) Map values) { + @JsonProperty(required = true, value = JSON_PROPERTY_VALUES) + Map values) { this.values = values; } - public BatchUpsertRowsRequestDataAttributes values(Map values) { + public BatchUpsertRowsRequestDataAttributes values( + Map values) { this.values = values; return this; } - public BatchUpsertRowsRequestDataAttributes putValuesItem(String key, Object valuesItem) { + public BatchUpsertRowsRequestDataAttributes putValuesItem( + String key, BatchUpsertRowsRequestDataAttributesValue valuesItem) { this.values.put(key, valuesItem); return this; } /** - * Key-value pairs representing row data, where keys are field names from the schema. + * Key-value pairs representing row data, where keys are schema field names and values match the + * corresponding column types. * * @return values */ @JsonProperty(JSON_PROPERTY_VALUES) @JsonInclude(value = JsonInclude.Include.ALWAYS) - public Map getValues() { + public Map getValues() { return values; } - public void setValues(Map values) { + public void setValues(Map values) { this.values = values; } diff --git a/src/main/java/com/datadog/api/client/v2/model/BatchUpsertRowsRequestDataAttributesValue.java b/src/main/java/com/datadog/api/client/v2/model/BatchUpsertRowsRequestDataAttributesValue.java new file mode 100644 index 00000000000..5094cf96dae --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/BatchUpsertRowsRequestDataAttributesValue.java @@ -0,0 +1,284 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.AbstractOpenApiSchema; +import com.datadog.api.client.JSON; +import com.datadog.api.client.UnparsedObject; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.JsonToken; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import jakarta.ws.rs.core.GenericType; +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; + +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +@JsonDeserialize( + using = + BatchUpsertRowsRequestDataAttributesValue + .BatchUpsertRowsRequestDataAttributesValueDeserializer.class) +@JsonSerialize( + using = + BatchUpsertRowsRequestDataAttributesValue + .BatchUpsertRowsRequestDataAttributesValueSerializer.class) +public class BatchUpsertRowsRequestDataAttributesValue extends AbstractOpenApiSchema { + private static final Logger log = + Logger.getLogger(BatchUpsertRowsRequestDataAttributesValue.class.getName()); + + @JsonIgnore public boolean unparsed = false; + + public static class BatchUpsertRowsRequestDataAttributesValueSerializer + extends StdSerializer { + public BatchUpsertRowsRequestDataAttributesValueSerializer( + Class t) { + super(t); + } + + public BatchUpsertRowsRequestDataAttributesValueSerializer() { + this(null); + } + + @Override + public void serialize( + BatchUpsertRowsRequestDataAttributesValue value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.getActualInstance()); + } + } + + public static class BatchUpsertRowsRequestDataAttributesValueDeserializer + extends StdDeserializer { + public BatchUpsertRowsRequestDataAttributesValueDeserializer() { + this(BatchUpsertRowsRequestDataAttributesValue.class); + } + + public BatchUpsertRowsRequestDataAttributesValueDeserializer(Class vc) { + super(vc); + } + + @Override + public BatchUpsertRowsRequestDataAttributesValue deserialize( + JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { + JsonNode tree = jp.readValueAsTree(); + Object deserialized = null; + Object tmp = null; + boolean typeCoercion = ctxt.isEnabled(MapperFeature.ALLOW_COERCION_OF_SCALARS); + int match = 0; + JsonToken token = tree.traverse(jp.getCodec()).nextToken(); + // deserialize String + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (String.class.equals(Integer.class) + || String.class.equals(Long.class) + || String.class.equals(Float.class) + || String.class.equals(Double.class) + || String.class.equals(Boolean.class) + || String.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((String.class.equals(Integer.class) || String.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((String.class.equals(Float.class) || String.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (String.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (String.class.equals(String.class) && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = tree.traverse(jp.getCodec()).readValueAs(String.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + deserialized = tmp; + match++; + + log.log(Level.FINER, "Input data matches schema 'String'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log(Level.FINER, "Input data does not match schema 'String'", e); + } + + // deserialize Integer + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (Integer.class.equals(Integer.class) + || Integer.class.equals(Long.class) + || Integer.class.equals(Float.class) + || Integer.class.equals(Double.class) + || Integer.class.equals(Boolean.class) + || Integer.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((Integer.class.equals(Integer.class) || Integer.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((Integer.class.equals(Float.class) || Integer.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (Integer.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (Integer.class.equals(String.class) && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = tree.traverse(jp.getCodec()).readValueAs(Integer.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + deserialized = tmp; + match++; + + log.log(Level.FINER, "Input data matches schema 'Integer'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log(Level.FINER, "Input data does not match schema 'Integer'", e); + } + + BatchUpsertRowsRequestDataAttributesValue ret = + new BatchUpsertRowsRequestDataAttributesValue(); + if (match == 1) { + ret.setActualInstance(deserialized); + } else { + Map res = + new ObjectMapper() + .readValue( + tree.traverse(jp.getCodec()).readValueAsTree().toString(), + new TypeReference>() {}); + ret.setActualInstance(new UnparsedObject(res)); + } + return ret; + } + + /** Handle deserialization of the 'null' value. */ + @Override + public BatchUpsertRowsRequestDataAttributesValue getNullValue(DeserializationContext ctxt) + throws JsonMappingException { + throw new JsonMappingException( + ctxt.getParser(), "BatchUpsertRowsRequestDataAttributesValue cannot be null"); + } + } + + // store a list of schema names defined in oneOf + public static final Map schemas = new HashMap(); + + public BatchUpsertRowsRequestDataAttributesValue() { + super("oneOf", Boolean.FALSE); + } + + public BatchUpsertRowsRequestDataAttributesValue(String o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public BatchUpsertRowsRequestDataAttributesValue(Integer o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + static { + schemas.put("String", new GenericType() {}); + schemas.put("Integer", new GenericType() {}); + JSON.registerDescendants( + BatchUpsertRowsRequestDataAttributesValue.class, Collections.unmodifiableMap(schemas)); + } + + @Override + public Map getSchemas() { + return BatchUpsertRowsRequestDataAttributesValue.schemas; + } + + /** + * Set the instance that matches the oneOf child schema, check the instance parameter is valid + * against the oneOf child schemas: String, Integer + * + *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a + * composed schema (allOf, anyOf, oneOf). + */ + @Override + public void setActualInstance(Object instance) { + if (JSON.isInstanceOf(String.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf(Integer.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + + if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + throw new RuntimeException("Invalid instance type. Must be String, Integer"); + } + + /** + * Get the actual instance, which can be the following: String, Integer + * + * @return The actual instance (String, Integer) + */ + @Override + public Object getActualInstance() { + return super.getActualInstance(); + } + + /** + * Get the actual instance of `String`. If the actual instance is not `String`, the + * ClassCastException will be thrown. + * + * @return The actual instance of `String` + * @throws ClassCastException if the instance is not `String` + */ + public String getString() throws ClassCastException { + return (String) super.getActualInstance(); + } + + /** + * Get the actual instance of `Integer`. If the actual instance is not `Integer`, the + * ClassCastException will be thrown. + * + * @return The actual instance of `Integer` + * @throws ClassCastException if the instance is not `Integer` + */ + public Integer getInteger() throws ClassCastException { + return (Integer) super.getActualInstance(); + } +} diff --git a/src/test/resources/com/datadog/api/client/v2/api/reference_tables.feature b/src/test/resources/com/datadog/api/client/v2/api/reference_tables.feature index 45a2861c15b..230be9e8f4d 100644 --- a/src/test/resources/com/datadog/api/client/v2/api/reference_tables.feature +++ b/src/test/resources/com/datadog/api/client/v2/api/reference_tables.feature @@ -148,7 +148,7 @@ Feature: Reference Tables Scenario: Upsert rows returns "Bad Request" response Given new "UpsertRows" request And request contains "id" parameter from "REPLACE.ME" - And body with value {"data": [{"attributes": {"values": {"example_key_value": "primary_key_value", "name": "row_name"}}, "id": "primary_key_value", "type": "row"}]} + And body with value {"data": [{"attributes": {"values": {}}, "id": "primary_key_value", "type": "row"}]} When the request is sent Then the response status is 400 Bad Request @@ -156,7 +156,7 @@ Feature: Reference Tables Scenario: Upsert rows returns "Not Found" response Given new "UpsertRows" request And request contains "id" parameter from "REPLACE.ME" - And body with value {"data": [{"attributes": {"values": {"example_key_value": "primary_key_value", "name": "row_name"}}, "id": "primary_key_value", "type": "row"}]} + And body with value {"data": [{"attributes": {"values": {}}, "id": "primary_key_value", "type": "row"}]} When the request is sent Then the response status is 404 Not Found @@ -164,6 +164,6 @@ Feature: Reference Tables Scenario: Upsert rows returns "Rows created or updated successfully" response Given new "UpsertRows" request And request contains "id" parameter from "REPLACE.ME" - And body with value {"data": [{"attributes": {"values": {"example_key_value": "primary_key_value", "name": "row_name"}}, "id": "primary_key_value", "type": "row"}]} + And body with value {"data": [{"attributes": {"values": {}}, "id": "primary_key_value", "type": "row"}]} When the request is sent Then the response status is 200 Rows created or updated successfully