[Improvement][SparkLoad] Use system env configs when users don't set env configs. (#21837)
This commit is contained in:
@ -37,7 +37,12 @@ import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
* Spark resource for etl or query.
|
||||
@ -159,16 +164,20 @@ public class SparkResource extends Resource {
|
||||
}
|
||||
|
||||
public Map<String, String> getEnvConfigsWithoutPrefix() {
|
||||
Map<String, String> envConfig = Maps.newHashMap();
|
||||
if (envConfigs != null) {
|
||||
for (Map.Entry<String, String> entry : envConfigs.entrySet()) {
|
||||
if (entry.getKey().startsWith(ENV_PREFIX)) {
|
||||
String key = entry.getKey().substring(ENV_PREFIX.length());
|
||||
envConfig.put(key, entry.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
return envConfig;
|
||||
return Stream.concat(
|
||||
getSystemEnvConfigs().entrySet().stream(),
|
||||
Optional.ofNullable(envConfigs).orElse(Collections.emptyMap()).entrySet().stream()
|
||||
)
|
||||
.filter(entry -> entry.getKey().startsWith(ENV_PREFIX))
|
||||
.collect(Collectors.toMap(
|
||||
entry -> entry.getKey().substring(ENV_PREFIX.length()),
|
||||
Entry::getValue,
|
||||
(oldValue, newValue) -> newValue
|
||||
));
|
||||
}
|
||||
|
||||
public Map<String, String> getSystemEnvConfigs() {
|
||||
return System.getenv();
|
||||
}
|
||||
|
||||
public Pair<String, String> getYarnResourcemanagerAddressPair() {
|
||||
|
||||
@ -36,6 +36,8 @@ import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class SparkResourceTest {
|
||||
@ -195,4 +197,39 @@ public class SparkResourceTest {
|
||||
stmt.analyze(analyzer);
|
||||
Resource.fromStmt(stmt);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetEnvConfigsWithoutPrefix() {
|
||||
Map<String, String> envConfigs = new HashMap<>();
|
||||
envConfigs.put("env.testKey2", "testValue2");
|
||||
SparkResource resource = new SparkResource("test", Maps.newHashMap(), null, null, Maps.newHashMap(),
|
||||
envConfigs) {
|
||||
@Override
|
||||
public Map<String, String> getSystemEnvConfigs() {
|
||||
return Collections.singletonMap("env.testKey1", "testValue1");
|
||||
}
|
||||
};
|
||||
Map<String, String> expected1 = new HashMap<>();
|
||||
expected1.put("testKey1", "testValue1");
|
||||
expected1.put("testKey2", "testValue2");
|
||||
Map<String, String> actual1 = resource.getEnvConfigsWithoutPrefix();
|
||||
Assert.assertEquals(expected1, actual1);
|
||||
|
||||
Map<String, String> envConfigs2 = new HashMap<>();
|
||||
envConfigs2.put("env.testKey1", "testValue3");
|
||||
envConfigs2.put("env.testKey2", "testValue2");
|
||||
SparkResource resource2 = new SparkResource("test2", Maps.newHashMap(), null, null, Maps.newHashMap(),
|
||||
envConfigs2) {
|
||||
@Override
|
||||
public Map<String, String> getSystemEnvConfigs() {
|
||||
return Collections.singletonMap("env.testKey1", "testValue1");
|
||||
}
|
||||
};
|
||||
Map<String, String> expected2 = new HashMap<>();
|
||||
expected2.put("testKey1", "testValue3");
|
||||
expected2.put("testKey2", "testValue2");
|
||||
Map<String, String> actual2 = resource2.getEnvConfigsWithoutPrefix();
|
||||
Assert.assertEquals(expected2, actual2);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user