diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 3a97eab54..37eb184ef 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -8e2bc4c67d9f4c3aed496549fed9f3e29e4874b1 \ No newline at end of file +ed8b5e7ec1f143395503eebae9fbc74cf8c309bc \ No newline at end of file diff --git a/.github/workflows/tagging.yml b/.github/workflows/tagging.yml index 16fe6ed5a..b039887b8 100644 --- a/.github/workflows/tagging.yml +++ b/.github/workflows/tagging.yml @@ -2,9 +2,12 @@ name: tagging on: + # Manual dispatch. workflow_dispatch: - # Runs at 8:00 UTC on Tuesday, Wednesday, and Thursday. - # To disable this flow, simply comment the schedule section. + # No inputs are required for the manual dispatch. + + # Runs at 8:00 UTC on Tuesday, Wednesday, and Thursday. To enable automated + # tagging for a repository, simply add it to the if block of the tag job. schedule: - cron: '0 8 * * TUE,WED,THU' @@ -15,6 +18,14 @@ concurrency: jobs: tag: + # Only run the tag job if the trigger is manual (workflow_dispatch) or + # the repository has been approved for automated releases. + # + # To disable release for a repository, simply exclude it from the if + # condition. + if: >- + github.event_name == 'workflow_dispatch' || + github.repository == 'databricks/databricks-sdk-go' environment: "release-is" runs-on: group: databricks-deco-testing-runner-group diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index f755b7109..fa52856dc 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -13,3 +13,5 @@ ### Internal Changes ### API Changes +* Add `outputs` field for `com.databricks.sdk.service.serving.QueryEndpointResponse`. +* Add `sessionId` field for `com.databricks.sdk.service.sql.QueryInfo`. \ No newline at end of file diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java index 9c8d46606..cdd501ecb 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Next Id: 54 */ +/** Next Id: 72 */ @Generated public enum ConnectionType { BIGQUERY, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSpec.java index e8ada5a05..4df4b51e1 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSpec.java @@ -35,9 +35,8 @@ public class EndpointSpec { private EndpointSettings settings; /** - * Duration of inactivity after which the compute endpoint is automatically suspended. Supported - * values: -1s (never suspend), 0s (use default), or value should be between 60s and 604800s (1 - * minute to 1 week). + * Duration of inactivity after which the compute endpoint is automatically suspended. If + * specified should be between 60s and 604800s (1 minute to 1 week). */ @JsonProperty("suspend_timeout_duration") private Duration suspendTimeoutDuration; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectDefaultEndpointSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectDefaultEndpointSettings.java index d347aa6b5..ef4e29b4b 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectDefaultEndpointSettings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectDefaultEndpointSettings.java @@ -25,9 +25,8 @@ public class ProjectDefaultEndpointSettings { private Map pgSettings; /** - * Duration of inactivity after which the compute endpoint is automatically suspended. Supported - * values: -1s (never suspend), 0s (use default), or value should be between 60s and 604800s (1 - * minute to 1 week). + * Duration of inactivity after which the compute endpoint is automatically suspended. If + * specified should be between 60s and 604800s (1 minute to 1 week). */ @JsonProperty("suspend_timeout_duration") private Duration suspendTimeoutDuration; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointResponse.java old mode 100755 new mode 100644 index cc5be3bc5..574c2a00d --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointResponse.java @@ -7,6 +7,7 @@ import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.JsonNode; import java.util.Collection; import java.util.Objects; @@ -54,6 +55,10 @@ public class QueryEndpointResponse { @JsonProperty("object") private QueryEndpointResponseObject object; + /** The outputs of the feature serving endpoint. */ + @JsonProperty("outputs") + private Collection outputs; + /** The predictions returned by the serving endpoint. */ @JsonProperty("predictions") private Collection predictions; @@ -127,6 +132,15 @@ public QueryEndpointResponseObject getObject() { return object; } + public QueryEndpointResponse setOutputs(Collection outputs) { + this.outputs = outputs; + return this; + } + + public Collection getOutputs() { + return outputs; + } + public QueryEndpointResponse setPredictions(Collection predictions) { this.predictions = predictions; return this; @@ -165,6 +179,7 @@ public boolean equals(Object o) { && Objects.equals(id, that.id) && Objects.equals(model, that.model) && Objects.equals(object, that.object) + && Objects.equals(outputs, that.outputs) && Objects.equals(predictions, that.predictions) && Objects.equals(servedModelName, that.servedModelName) && Objects.equals(usage, that.usage); @@ -173,7 +188,7 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - choices, created, data, id, model, object, predictions, servedModelName, usage); + choices, created, data, id, model, object, outputs, predictions, servedModelName, usage); } @Override @@ -185,6 +200,7 @@ public String toString() { .add("id", id) .add("model", model) .add("object", object) + .add("outputs", outputs) .add("predictions", predictions) .add("servedModelName", servedModelName) .add("usage", usage) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java old mode 100755 new mode 100644 index b9af6b0f9..dc38ff0b5 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java @@ -97,6 +97,13 @@ public class QueryInfo { @JsonProperty("rows_produced") private Long rowsProduced; + /** + * The spark session UUID that query ran on. This is either the Spark Connect, DBSQL, or SDP + * session ID. + */ + @JsonProperty("session_id") + private String sessionId; + /** URL to the Spark UI query plan. */ @JsonProperty("spark_ui_url") private String sparkUiUrl; @@ -298,6 +305,15 @@ public Long getRowsProduced() { return rowsProduced; } + public QueryInfo setSessionId(String sessionId) { + this.sessionId = sessionId; + return this; + } + + public String getSessionId() { + return sessionId; + } + public QueryInfo setSparkUiUrl(String sparkUiUrl) { this.sparkUiUrl = sparkUiUrl; return this; @@ -376,6 +392,7 @@ public boolean equals(Object o) { && Objects.equals(queryStartTimeMs, that.queryStartTimeMs) && Objects.equals(queryText, that.queryText) && Objects.equals(rowsProduced, that.rowsProduced) + && Objects.equals(sessionId, that.sessionId) && Objects.equals(sparkUiUrl, that.sparkUiUrl) && Objects.equals(statementType, that.statementType) && Objects.equals(status, that.status) @@ -406,6 +423,7 @@ public int hashCode() { queryStartTimeMs, queryText, rowsProduced, + sessionId, sparkUiUrl, statementType, status, @@ -436,6 +454,7 @@ public String toString() { .add("queryStartTimeMs", queryStartTimeMs) .add("queryText", queryText) .add("rowsProduced", rowsProduced) + .add("sessionId", sessionId) .add("sparkUiUrl", sparkUiUrl) .add("statementType", statementType) .add("status", status)