Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,13 @@

import com.databricks.sdk.support.InternalApi;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.time.Duration;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;

@InternalApi
class ConfigAttributeAccessor {
Expand Down Expand Up @@ -63,6 +67,22 @@ public void setValueOnConfig(DatabricksConfig cfg, String value) throws IllegalA
field.set(cfg, seconds > 0 ? Duration.ofSeconds(seconds) : null);
} else if (field.getType() == ProxyConfig.ProxyAuthType.class) {
field.set(cfg, ProxyConfig.ProxyAuthType.valueOf(value));
} else if (List.class.isAssignableFrom(field.getType())) {
// Handle List<String> fields (e.g., scopes)
Comment on lines 63 to +71
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I have a question about this big if-else block. This block doesn't have a default else, which throws an error when an unexpected type appears in the config. So is the current code simply ignoring those types?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, it silently ignores those types. I can't think of a good reason for it to be this way.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We considered adding a default block that would fail for unexpected types, but rejected it for this PR, as it could lead to breaking changes.

// Parse comma-separated values from environment variable or config file.
if (field.getGenericType() instanceof ParameterizedType) {
ParameterizedType paramType = (ParameterizedType) field.getGenericType();
if (paramType.getActualTypeArguments().length > 0
&& paramType.getActualTypeArguments()[0] == String.class) {
// Split by comma, trim each value, and filter out empty strings
List<String> list =
Arrays.stream(value.trim().split(","))
.map(String::trim)
.filter(s -> !s.isEmpty())
.collect(Collectors.toList());
field.set(cfg, list);
}
}
}
field.setAccessible(false);
}
Expand Down Expand Up @@ -91,6 +111,10 @@ public String toString() {
}

public String getAsString(Object value) {
if (value instanceof List) {
List<?> list = (List<?>) value;
return list.stream().map(Object::toString).collect(Collectors.joining(", ", "[", "]"));
}
return value.toString();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ public class DatabricksConfig {
@ConfigAttribute(env = "DATABRICKS_CLIENT_SECRET", auth = "oauth", sensitive = true)
private String clientSecret;

@ConfigAttribute(env = "DATABRICKS_SCOPES", auth = "oauth")
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It actually wasn't possible to pass scopes through environment variable because Lists weren't parsed correctly, so removing this is not a breaking change.
Removing the environment variable attribute for consistency with the other SDKs where we are not supporting setting scopes via environment variables.

@ConfigAttribute(auth = "oauth")
private List<String> scopes;

@ConfigAttribute(env = "DATABRICKS_REDIRECT_URL", auth = "oauth")
Expand Down Expand Up @@ -204,6 +204,7 @@ private synchronized DatabricksConfig innerResolve() {
try {
ConfigLoader.resolve(this);
ConfigLoader.validate(this);
sortScopes();
ConfigLoader.fixHostIfNeeded(this);
initHttp();
return this;
Expand All @@ -212,6 +213,13 @@ private synchronized DatabricksConfig innerResolve() {
}
}

// Sort scopes in-place for better de-duplication in the refresh token cache.
private void sortScopes() {
if (scopes != null && !scopes.isEmpty()) {
java.util.Collections.sort(scopes);
}
}

private void initHttp() {
if (httpClient != null) {
return;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,15 @@
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;

public class DatabricksConfigTest {
@Test
Expand Down Expand Up @@ -322,4 +327,35 @@ public void testDisableOauthRefreshTokenEnvironmentVariable() {

assertEquals(true, config.getDisableOauthRefreshToken());
}

// Config File Scope Parsing Tests
private static Stream<Arguments> provideConfigFileScopesTestCases() {
return Stream.of(
Arguments.of("Empty scopes defaults to all-apis", "scope-empty", Arrays.asList("all-apis")),
Arguments.of("Single scope", "scope-single", Arrays.asList("clusters:read")),
Arguments.of(
"Multiple scopes sorted alphabetically",
"scope-multiple",
Arrays.asList(
"clusters",
"files:read",
"iam:read",
"jobs",
"mlflow",
"model-serving:read",
"pipelines")));
}

@ParameterizedTest(name = "{0}")
@MethodSource("provideConfigFileScopesTestCases")
public void testConfigFileScopes(String testName, String profile, List<String> expectedScopes) {
Map<String, String> env = new HashMap<>();
env.put("HOME", "src/test/resources/testdata");

DatabricksConfig config = new DatabricksConfig().setProfile(profile);
config.resolve(new Environment(env, new ArrayList<>(), System.getProperty("os.name")));

List<String> scopes = config.getScopes();
assertIterableEquals(expectedScopes, scopes);
}
}
13 changes: 12 additions & 1 deletion databricks-sdk-java/src/test/resources/testdata/.databrickscfg
Original file line number Diff line number Diff line change
Expand Up @@ -38,4 +38,15 @@ google_credentials = paw48590aw8e09t8apu

[pat.with.dot]
host = https://dbc-XXXXXXXX-YYYY.cloud.databricks.com/
token = PT0+IC9kZXYvdXJhbmRvbSA8PT0KYFZ
token = PT0+IC9kZXYvdXJhbmRvbSA8PT0KYFZ

[scope-empty]
host = https://example.cloud.databricks.com

[scope-single]
host = https://example.cloud.databricks.com
scopes = clusters:read

[scope-multiple]
host = https://example.cloud.databricks.com
scopes = clusters, jobs, pipelines, iam:read, files:read, mlflow, model-serving:read
Loading