From dd9d90ce7cbdbfeed0dd5e3c99c387dea504d1e6 Mon Sep 17 00:00:00 2001 From: Karen Cheng Date: Tue, 13 Feb 2024 13:27:39 +1000 Subject: [PATCH] Currently the sparkVersion in the selector doesn't work due to requiring an exact match instead of a contains. The API doesn't return the result in the format that matches the current clause --- .../src/main/java/com/databricks/sdk/mixin/ClustersExt.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/mixin/ClustersExt.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/mixin/ClustersExt.java index 79be6c740..dc86da730 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/mixin/ClustersExt.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/mixin/ClustersExt.java @@ -50,7 +50,7 @@ public String selectSparkVersion(SparkVersionSelector selector) throws IllegalAr matches = version.getName().contains("LTS") || version.getKey().contains("-esr-"); } if (matches && selector.sparkVersion != null) { - matches = ("Apache Spark " + selector.sparkVersion).equals(version.getName()); + matches = version.getName().contains("Apache Spark " + selector.sparkVersion); } if (matches) { versions.add(version.getKey());