diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/SparkResource.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/SparkResource.java index 98ebe54b96..453a50ad11 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/SparkResource.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/SparkResource.java @@ -37,7 +37,12 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.io.File; +import java.util.Collections; import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; /** * Spark resource for etl or query. @@ -159,16 +164,20 @@ public class SparkResource extends Resource { } public Map getEnvConfigsWithoutPrefix() { - Map envConfig = Maps.newHashMap(); - if (envConfigs != null) { - for (Map.Entry entry : envConfigs.entrySet()) { - if (entry.getKey().startsWith(ENV_PREFIX)) { - String key = entry.getKey().substring(ENV_PREFIX.length()); - envConfig.put(key, entry.getValue()); - } - } - } - return envConfig; + return Stream.concat( + getSystemEnvConfigs().entrySet().stream(), + Optional.ofNullable(envConfigs).orElse(Collections.emptyMap()).entrySet().stream() + ) + .filter(entry -> entry.getKey().startsWith(ENV_PREFIX)) + .collect(Collectors.toMap( + entry -> entry.getKey().substring(ENV_PREFIX.length()), + Entry::getValue, + (oldValue, newValue) -> newValue + )); + } + + public Map getSystemEnvConfigs() { + return System.getenv(); } public Pair getYarnResourcemanagerAddressPair() { diff --git a/fe/fe-core/src/test/java/org/apache/doris/catalog/SparkResourceTest.java b/fe/fe-core/src/test/java/org/apache/doris/catalog/SparkResourceTest.java index 0184acc5c2..ac047ed17f 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/catalog/SparkResourceTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/catalog/SparkResourceTest.java @@ -36,6 +36,8 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import java.util.Collections; +import java.util.HashMap; import java.util.Map; public class SparkResourceTest { @@ -195,4 +197,39 @@ public class SparkResourceTest { stmt.analyze(analyzer); Resource.fromStmt(stmt); } + + @Test + public void testGetEnvConfigsWithoutPrefix() { + Map envConfigs = new HashMap<>(); + envConfigs.put("env.testKey2", "testValue2"); + SparkResource resource = new SparkResource("test", Maps.newHashMap(), null, null, Maps.newHashMap(), + envConfigs) { + @Override + public Map getSystemEnvConfigs() { + return Collections.singletonMap("env.testKey1", "testValue1"); + } + }; + Map expected1 = new HashMap<>(); + expected1.put("testKey1", "testValue1"); + expected1.put("testKey2", "testValue2"); + Map actual1 = resource.getEnvConfigsWithoutPrefix(); + Assert.assertEquals(expected1, actual1); + + Map envConfigs2 = new HashMap<>(); + envConfigs2.put("env.testKey1", "testValue3"); + envConfigs2.put("env.testKey2", "testValue2"); + SparkResource resource2 = new SparkResource("test2", Maps.newHashMap(), null, null, Maps.newHashMap(), + envConfigs2) { + @Override + public Map getSystemEnvConfigs() { + return Collections.singletonMap("env.testKey1", "testValue1"); + } + }; + Map expected2 = new HashMap<>(); + expected2.put("testKey1", "testValue3"); + expected2.put("testKey2", "testValue2"); + Map actual2 = resource2.getEnvConfigsWithoutPrefix(); + Assert.assertEquals(expected2, actual2); + } + }