[fix](backup) fix backup fail on s3 (#25496)

The s3 client properties are not passed to BE correctly.
The test cases will be added later
This commit is contained in:
Mingyu Chen
2023-10-18 13:52:12 +08:00
committed by GitHub
parent d2400d1d7b
commit db16a14970
5 changed files with 25 additions and 14 deletions

View File

@ -36,6 +36,7 @@ import org.apache.doris.catalog.Tablet;
import org.apache.doris.catalog.View;
import org.apache.doris.common.io.Text;
import org.apache.doris.common.util.TimeUtils;
import org.apache.doris.datasource.property.S3ClientBEProperties;
import org.apache.doris.persist.BarrierLog;
import org.apache.doris.task.AgentBatchTask;
import org.apache.doris.task.AgentTask;
@ -627,9 +628,9 @@ public class BackupJob extends AbstractJob {
}
long signature = env.getNextId();
UploadTask task = new UploadTask(null, beId, signature, jobId, dbId, srcToDest,
brokers.get(0), repo.getRemoteFileSystem().getProperties(),
brokers.get(0),
S3ClientBEProperties.getBeFSProperties(repo.getRemoteFileSystem().getProperties()),
repo.getRemoteFileSystem().getStorageType(), repo.getLocation());
LOG.info("yy debug upload location: " + repo.getLocation());
batchTask.addTask(task);
unfinishedTaskIds.put(signature, beId);
}
@ -1013,3 +1014,4 @@ public class BackupJob extends AbstractJob {
return sb.toString();
}
}

View File

@ -62,6 +62,7 @@ import org.apache.doris.common.util.DbUtil;
import org.apache.doris.common.util.DynamicPartitionUtil;
import org.apache.doris.common.util.PropertyAnalyzer;
import org.apache.doris.common.util.TimeUtils;
import org.apache.doris.datasource.property.S3ClientBEProperties;
import org.apache.doris.resource.Tag;
import org.apache.doris.task.AgentBatchTask;
import org.apache.doris.task.AgentTask;
@ -97,7 +98,6 @@ import java.io.DataOutput;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
@ -1434,7 +1434,8 @@ public class RestoreJob extends AbstractJob {
}
long signature = env.getNextId();
DownloadTask task = new DownloadTask(null, beId, signature, jobId, dbId, srcToDest,
brokerAddrs.get(0), repo.getRemoteFileSystem().getProperties(),
brokerAddrs.get(0),
S3ClientBEProperties.getBeFSProperties(repo.getRemoteFileSystem().getProperties()),
repo.getRemoteFileSystem().getStorageType(), repo.getLocation());
batchTask.addTask(task);
unfinishedSignatureToId.put(signature, beId);
@ -2160,3 +2161,4 @@ public class RestoreJob extends AbstractJob {
return sb.toString();
}
}

View File

@ -143,16 +143,17 @@ public class PropertyConverter {
CloudCredential credential,
Map<String, String> s3Properties) {
Map<String, String> heteroProps = new HashMap<>(s3Properties);
Map<String, String> copiedProps = new HashMap<>(props);
if (s3CliEndpoint.contains(CosProperties.COS_PREFIX)) {
props.putIfAbsent(CosProperties.ENDPOINT, s3CliEndpoint);
copiedProps.putIfAbsent(CosProperties.ENDPOINT, s3CliEndpoint);
// CosN is not compatible with S3, when use s3 properties, will convert to cosn properties.
heteroProps.putAll(convertToCOSProperties(props, credential));
heteroProps.putAll(convertToCOSProperties(copiedProps, credential));
} else if (s3CliEndpoint.contains(ObsProperties.OBS_PREFIX)) {
props.putIfAbsent(ObsProperties.ENDPOINT, s3CliEndpoint);
heteroProps.putAll(convertToOBSProperties(props, credential));
copiedProps.putIfAbsent(ObsProperties.ENDPOINT, s3CliEndpoint);
heteroProps.putAll(convertToOBSProperties(copiedProps, credential));
} else if (s3CliEndpoint.contains(OssProperties.OSS_REGION_PREFIX)) {
props.putIfAbsent(OssProperties.ENDPOINT, s3CliEndpoint);
heteroProps.putAll(convertToOSSProperties(props, credential));
copiedProps.putIfAbsent(OssProperties.ENDPOINT, s3CliEndpoint);
heteroProps.putAll(convertToOSSProperties(copiedProps, credential));
}
return heteroProps;
}
@ -328,7 +329,7 @@ public class PropertyConverter {
if (endpointSplit.length > 0) {
String region = endpointSplit[0].replace("oss-", "").replace("-internal", "");
ossProperties.put(org.apache.hadoop.fs.aliyun.oss.Constants.ENDPOINT_KEY,
region + ".oss-dls.aliyuncs.com");
region + ".oss-dls.aliyuncs.com");
}
}
ossProperties.put("fs.oss.impl", "com.aliyun.emr.fs.oss.JindoOssFileSystem");
@ -564,3 +565,4 @@ public class PropertyConverter {
return props;
}
}

View File

@ -42,13 +42,17 @@ public class S3FileSystem extends ObjFileSystem {
public S3FileSystem(Map<String, String> properties) {
super(StorageBackend.StorageType.S3.name(), StorageBackend.StorageType.S3, new S3ObjStorage(properties));
this.properties.putAll(properties);
initFsProperties();
}
@VisibleForTesting
public S3FileSystem(S3ObjStorage storage) {
super(StorageBackend.StorageType.S3.name(), StorageBackend.StorageType.S3, storage);
this.properties.putAll(storage.getProperties());
initFsProperties();
}
private void initFsProperties() {
this.properties.putAll(((S3ObjStorage) objStorage).getProperties());
}
@Override
@ -104,3 +108,4 @@ public class S3FileSystem extends ObjFileSystem {
return Status.OK;
}
}

View File

@ -170,7 +170,7 @@ public class PropertyConverterTest extends TestWithFeService {
CreateRepositoryStmt analyzedStmt = createStmt(s3Repo);
Assertions.assertEquals(analyzedStmt.getProperties().size(), 4);
Repository repository = getRepository(analyzedStmt, "s3_repo");
Assertions.assertEquals(repository.getRemoteFileSystem().getProperties().size(), 5);
Assertions.assertEquals(9, repository.getRemoteFileSystem().getProperties().size());
String s3RepoNew = "CREATE REPOSITORY `s3_repo_new`\n"
+ "WITH S3\n"