[improve](routineload) improve show routine load output (#24264)

This commit is contained in:
camby
2023-09-15 10:22:47 +08:00
committed by GitHub
parent e0834b2f46
commit 320f1e9bbf
4 changed files with 20 additions and 14 deletions

View File

@ -92,6 +92,7 @@ public class CreateRoutineLoadStmt extends DdlStmt {
// routine load properties
public static final String DESIRED_CONCURRENT_NUMBER_PROPERTY = "desired_concurrent_number";
public static final String CURRENT_CONCURRENT_NUMBER_PROPERTY = "current_concurrent_number";
// max error number in ten thousand records
public static final String MAX_ERROR_NUMBER_PROPERTY = "max_error_number";
public static final String MAX_FILTER_RATIO_PROPERTY = "max_filter_ratio";

View File

@ -1584,21 +1584,26 @@ public abstract class RoutineLoadJob extends AbstractTxnStateChangeCallback impl
jobProperties.put("precedingFilter", precedingFilter == null ? STAR_STRING : precedingFilter.toSql());
jobProperties.put("whereExpr", whereExpr == null ? STAR_STRING : whereExpr.toSql());
if (getFormat().equalsIgnoreCase("json")) {
jobProperties.put("dataFormat", "json");
jobProperties.put(PROPS_FORMAT, "json");
} else {
jobProperties.put("columnSeparator", columnSeparator == null ? "\t" : columnSeparator.toString());
jobProperties.put("lineDelimiter", lineDelimiter == null ? "\n" : lineDelimiter.toString());
jobProperties.put(LoadStmt.KEY_IN_PARAM_COLUMN_SEPARATOR,
columnSeparator == null ? "\t" : columnSeparator.toString());
jobProperties.put(LoadStmt.KEY_IN_PARAM_LINE_DELIMITER,
lineDelimiter == null ? "\n" : lineDelimiter.toString());
}
jobProperties.put(CreateRoutineLoadStmt.PARTIAL_COLUMNS, String.valueOf(isPartialUpdate));
jobProperties.put("maxErrorNum", String.valueOf(maxErrorNum));
jobProperties.put("maxBatchIntervalS", String.valueOf(maxBatchIntervalS));
jobProperties.put("maxBatchRows", String.valueOf(maxBatchRows));
jobProperties.put("maxBatchSizeBytes", String.valueOf(maxBatchSizeBytes));
jobProperties.put("currentTaskConcurrentNum", String.valueOf(currentTaskConcurrentNum));
jobProperties.put("desireTaskConcurrentNum", String.valueOf(desireTaskConcurrentNum));
jobProperties.put("execMemLimit", String.valueOf(execMemLimit));
jobProperties.put("mergeType", mergeType.toString());
jobProperties.put("deleteCondition", deleteCondition == null ? STAR_STRING : deleteCondition.toSql());
jobProperties.put(CreateRoutineLoadStmt.MAX_ERROR_NUMBER_PROPERTY, String.valueOf(maxErrorNum));
jobProperties.put(CreateRoutineLoadStmt.MAX_BATCH_INTERVAL_SEC_PROPERTY, String.valueOf(maxBatchIntervalS));
jobProperties.put(CreateRoutineLoadStmt.MAX_BATCH_ROWS_PROPERTY, String.valueOf(maxBatchRows));
jobProperties.put(CreateRoutineLoadStmt.MAX_BATCH_SIZE_PROPERTY, String.valueOf(maxBatchSizeBytes));
jobProperties.put(CreateRoutineLoadStmt.CURRENT_CONCURRENT_NUMBER_PROPERTY,
String.valueOf(currentTaskConcurrentNum));
jobProperties.put(CreateRoutineLoadStmt.DESIRED_CONCURRENT_NUMBER_PROPERTY,
String.valueOf(desireTaskConcurrentNum));
jobProperties.put(LoadStmt.EXEC_MEM_LIMIT, String.valueOf(execMemLimit));
jobProperties.put(LoadStmt.KEY_IN_PARAM_MERGE_TYPE, mergeType.toString());
jobProperties.put(LoadStmt.KEY_IN_PARAM_DELETE_CONDITION,
deleteCondition == null ? STAR_STRING : deleteCondition.toSql());
jobProperties.putAll(this.jobProperties);
Gson gson = new GsonBuilder().disableHtmlEscaping().create();
return gson.toJson(jobProperties);