Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .codegen/_openapi_sha
Original file line number Diff line number Diff line change
@@ -1 +1 @@
8e2bc4c67d9f4c3aed496549fed9f3e29e4874b1
ed8b5e7ec1f143395503eebae9fbc74cf8c309bc
15 changes: 13 additions & 2 deletions .github/workflows/tagging.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,12 @@
name: tagging

on:
# Manual dispatch.
workflow_dispatch:
# Runs at 8:00 UTC on Tuesday, Wednesday, and Thursday.
# To disable this flow, simply comment the schedule section.
# No inputs are required for the manual dispatch.

# Runs at 8:00 UTC on Tuesday, Wednesday, and Thursday. To enable automated
# tagging for a repository, simply add it to the if block of the tag job.
schedule:
- cron: '0 8 * * TUE,WED,THU'

Expand All @@ -15,6 +18,14 @@ concurrency:

jobs:
tag:
# Only run the tag job if the trigger is manual (workflow_dispatch) or
# the repository has been approved for automated releases.
#
# To disable release for a repository, simply exclude it from the if
# condition.
if: >-
github.event_name == 'workflow_dispatch' ||
github.repository == 'databricks/databricks-sdk-go'
environment: "release-is"
runs-on:
group: databricks-deco-testing-runner-group
Expand Down
2 changes: 2 additions & 0 deletions NEXT_CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,5 @@
### Internal Changes

### API Changes
* Add `outputs` field for `com.databricks.sdk.service.serving.QueryEndpointResponse`.
* Add `sessionId` field for `com.databricks.sdk.service.sql.QueryInfo`.
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import com.databricks.sdk.support.Generated;

/** Next Id: 54 */
/** Next Id: 72 */
@Generated
public enum ConnectionType {
BIGQUERY,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,8 @@ public class EndpointSpec {
private EndpointSettings settings;

/**
* Duration of inactivity after which the compute endpoint is automatically suspended. Supported
* values: -1s (never suspend), 0s (use default), or value should be between 60s and 604800s (1
* minute to 1 week).
* Duration of inactivity after which the compute endpoint is automatically suspended. If
* specified should be between 60s and 604800s (1 minute to 1 week).
*/
@JsonProperty("suspend_timeout_duration")
private Duration suspendTimeoutDuration;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,8 @@ public class ProjectDefaultEndpointSettings {
private Map<String, String> pgSettings;

/**
* Duration of inactivity after which the compute endpoint is automatically suspended. Supported
* values: -1s (never suspend), 0s (use default), or value should be between 60s and 604800s (1
* minute to 1 week).
* Duration of inactivity after which the compute endpoint is automatically suspended. If
* specified should be between 60s and 604800s (1 minute to 1 week).
*/
@JsonProperty("suspend_timeout_duration")
private Duration suspendTimeoutDuration;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.JsonNode;
import java.util.Collection;
import java.util.Objects;

Expand Down Expand Up @@ -54,6 +55,10 @@ public class QueryEndpointResponse {
@JsonProperty("object")
private QueryEndpointResponseObject object;

/** The outputs of the feature serving endpoint. */
@JsonProperty("outputs")
private Collection<JsonNode> outputs;

/** The predictions returned by the serving endpoint. */
@JsonProperty("predictions")
private Collection<Object> predictions;
Expand Down Expand Up @@ -127,6 +132,15 @@ public QueryEndpointResponseObject getObject() {
return object;
}

public QueryEndpointResponse setOutputs(Collection<JsonNode> outputs) {
this.outputs = outputs;
return this;
}

public Collection<JsonNode> getOutputs() {
return outputs;
}

public QueryEndpointResponse setPredictions(Collection<Object> predictions) {
this.predictions = predictions;
return this;
Expand Down Expand Up @@ -165,6 +179,7 @@ public boolean equals(Object o) {
&& Objects.equals(id, that.id)
&& Objects.equals(model, that.model)
&& Objects.equals(object, that.object)
&& Objects.equals(outputs, that.outputs)
&& Objects.equals(predictions, that.predictions)
&& Objects.equals(servedModelName, that.servedModelName)
&& Objects.equals(usage, that.usage);
Expand All @@ -173,7 +188,7 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
choices, created, data, id, model, object, predictions, servedModelName, usage);
choices, created, data, id, model, object, outputs, predictions, servedModelName, usage);
}

@Override
Expand All @@ -185,6 +200,7 @@ public String toString() {
.add("id", id)
.add("model", model)
.add("object", object)
.add("outputs", outputs)
.add("predictions", predictions)
.add("servedModelName", servedModelName)
.add("usage", usage)
Expand Down
19 changes: 19 additions & 0 deletions databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java
100755 → 100644
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,13 @@ public class QueryInfo {
@JsonProperty("rows_produced")
private Long rowsProduced;

/**
* The spark session UUID that query ran on. This is either the Spark Connect, DBSQL, or SDP
* session ID.
*/
@JsonProperty("session_id")
private String sessionId;

/** URL to the Spark UI query plan. */
@JsonProperty("spark_ui_url")
private String sparkUiUrl;
Expand Down Expand Up @@ -298,6 +305,15 @@ public Long getRowsProduced() {
return rowsProduced;
}

public QueryInfo setSessionId(String sessionId) {
this.sessionId = sessionId;
return this;
}

public String getSessionId() {
return sessionId;
}

public QueryInfo setSparkUiUrl(String sparkUiUrl) {
this.sparkUiUrl = sparkUiUrl;
return this;
Expand Down Expand Up @@ -376,6 +392,7 @@ public boolean equals(Object o) {
&& Objects.equals(queryStartTimeMs, that.queryStartTimeMs)
&& Objects.equals(queryText, that.queryText)
&& Objects.equals(rowsProduced, that.rowsProduced)
&& Objects.equals(sessionId, that.sessionId)
&& Objects.equals(sparkUiUrl, that.sparkUiUrl)
&& Objects.equals(statementType, that.statementType)
&& Objects.equals(status, that.status)
Expand Down Expand Up @@ -406,6 +423,7 @@ public int hashCode() {
queryStartTimeMs,
queryText,
rowsProduced,
sessionId,
sparkUiUrl,
statementType,
status,
Expand Down Expand Up @@ -436,6 +454,7 @@ public String toString() {
.add("queryStartTimeMs", queryStartTimeMs)
.add("queryText", queryText)
.add("rowsProduced", rowsProduced)
.add("sessionId", sessionId)
.add("sparkUiUrl", sparkUiUrl)
.add("statementType", statementType)
.add("status", status)
Expand Down
Loading