[Enhancement] move common codes from fe-core to fe-common and remove log4j1 (#5317) (#5318)

The io related codes may be used by new modules, so It's better to move them to fe-common.

The modification to fe-core is frequent, but there are many generated java files by thrift
will slow down the compilation, so It's better to move thrift generation process to fe-common.

Currently both log4j1 and log4j2 are used, which leads to logs are written to wrong files.
Our modification will remove log4j1 from dependency, use slf4j + slf4j -> log4j2 instead.
This commit is contained in:
copperybean
2021-02-04 13:41:03 +08:00
committed by GitHub
parent b6abcbdd35
commit d8202ca9cc
27 changed files with 199 additions and 98 deletions

View File

@ -30,16 +30,136 @@ under the License.
<relativePath>../pom.xml</relativePath>
</parent>
<artifactId>fe-common</artifactId>
<artifactId>doris-fe-common</artifactId>
<version>1.0.0</version>
<packaging>jar</packaging>
<properties>
<doris.home>${basedir}/../../</doris.home>
<doris.thirdparty>${basedir}/../../thirdparty</doris.thirdparty>
</properties>
<profiles>
<profile>
<id>thirdparty</id>
<activation>
<property>
<name>env.DORIS_THIRDPARTY</name>
</property>
</activation>
<properties>
<doris.thirdparty>${env.DORIS_THIRDPARTY}</doris.thirdparty>
</properties>
</profile>
</profiles>
<dependencies>
<dependency>
<groupId>javax.annotation</groupId>
<artifactId>javax.annotation-api</artifactId>
<scope>provided</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/com.google.guava/guava -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.thrift/libfb303 -->
<dependency>
<groupId>org.apache.thrift</groupId>
<artifactId>libfb303</artifactId>
<type>pom</type>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.thrift/libthrift -->
<dependency>
<groupId>org.apache.thrift</groupId>
<artifactId>libthrift</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<!-- https://mvnrepository.com/artifact/org.jmockit/jmockit -->
<dependency>
<groupId>org.jmockit</groupId>
<artifactId>jmockit</artifactId>
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/junit/junit -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<finalName>doris-fe-common</finalName>
<plugins>
<!--thrift-->
<plugin>
<groupId>org.apache.thrift.tools</groupId>
<artifactId>maven-thrift-plugin</artifactId>
<version>0.1.11</version>
<configuration>
<thriftExecutable>${doris.thirdparty}/installed/bin/thrift</thriftExecutable>
<thriftSourceRoot>${doris.home}/gensrc/thrift</thriftSourceRoot>
<generator>java:fullcamel</generator>
</configuration>
<executions>
<execution>
<id>thrift-sources</id>
<phase>generate-sources</phase>
<goals>
<goal>compile</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.1.0</version>
<configuration>
<attach>true</attach>
</configuration>
<executions>
<execution>
<id>create-source-jar</id>
<goals>
<goal>jar-no-fork</goal>
<goal>test-jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- Build test-jar's for all projects, since some projects depend on tests from others -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>3.1.2</version>
<executions>
<execution>
<id>prepare-test-jar</id>
<phase>test-compile</phase>
<goals>
<goal>test-jar</goal>
</goals>
<configuration>
<excludes>
<exclude>log4j.properties</exclude>
</excludes>
</configuration>
</execution>
</executions>
</plugin>
<!-- for FE java code style checking -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>

View File

@ -18,9 +18,8 @@
package org.apache.doris.common.io;
import org.apache.doris.meta.MetaContext;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.DataInput;
import java.io.DataInputStream;
@ -31,7 +30,7 @@ import java.lang.reflect.Method;
* This class is for deep copying a writable instance.
*/
public class DeepCopy {
private static final Logger LOG = LogManager.getLogger(DeepCopy.class);
private static final Logger LOG = LoggerFactory.getLogger(DeepCopy.class);
public static final String READ_FIELDS_METHOD_NAME = "readFields";
public static final String READ_METHOD_NAME = "read";

View File

@ -18,7 +18,7 @@
package org.apache.doris.common.io;
import com.google.common.base.Strings;
import org.apache.logging.log4j.Logger;
import org.slf4j.Logger;
import java.io.DataInput;
import java.io.DataOutput;

View File

@ -17,8 +17,8 @@
package org.apache.doris.common.io;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
@ -27,7 +27,7 @@ import java.io.InputStream;
* A LimitInputStream that the InputStream is limited .
*/
public class LimitInputStream extends InputStream {
private static final Logger LOG = LogManager.getLogger(LimitInputStream.class);
private static final Logger LOG = LoggerFactory.getLogger(LimitInputStream.class);
/**
* The input stream to be limited.

View File

@ -17,8 +17,8 @@
package org.apache.doris.common.io;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.OutputStream;
@ -27,7 +27,7 @@ import java.io.OutputStream;
* A LimitOutputStream that the OutputStream is limited .
*/
public class LimitOutputStream extends OutputStream {
private static final Logger LOG = LogManager.getLogger(LimitOutputStream.class.getName());
private static final Logger LOG = LoggerFactory.getLogger(LimitOutputStream.class.getName());
/**
* The output stream to be limited.

View File

@ -17,8 +17,8 @@
package org.apache.doris.common.io;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.DataInput;
@ -46,7 +46,7 @@ import java.text.StringCharacterIterator;
* length of an encoded string.
*/
public class Text implements Writable {
private static final Logger LOG = LogManager.getLogger(Text.class);
private static final Logger LOG = LoggerFactory.getLogger(Text.class);
private static ThreadLocal<CharsetEncoder> ENCODER_FACTORY = new ThreadLocal<CharsetEncoder>() {
protected CharsetEncoder initialValue() {

View File

@ -37,7 +37,7 @@ under the License.
<properties>
<doris.home>${basedir}/../../</doris.home>
<fe_ut_parallel>1</fe_ut_parallel>
<doris.thridparty>${basedir}/../../thirdparty</doris.thridparty>
<doris.thirdparty>${basedir}/../../thirdparty</doris.thirdparty>
</properties>
<profiles>
@ -49,7 +49,7 @@ under the License.
</property>
</activation>
<properties>
<doris.thridparty>${env.DORIS_THIRDPARTY}</doris.thridparty>
<doris.thirdparty>${env.DORIS_THIRDPARTY}</doris.thirdparty>
</properties>
</profile>
<profile>
@ -66,6 +66,10 @@ under the License.
</profiles>
<dependencies>
<dependency>
<groupId>org.apache</groupId>
<artifactId>doris-fe-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
@ -257,19 +261,6 @@ under the License.
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.thrift/libfb303 -->
<dependency>
<groupId>org.apache.thrift</groupId>
<artifactId>libfb303</artifactId>
<type>pom</type>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.thrift/libthrift -->
<dependency>
<groupId>org.apache.thrift</groupId>
<artifactId>libthrift</artifactId>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-api -->
<dependency>
<groupId>org.apache.logging.log4j</groupId>
@ -421,12 +412,6 @@ under the License.
<artifactId>zjsonpatch</artifactId>
</dependency>
<!-- https://mvnrepository.com/artifact/log4j/log4j -->
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-api -->
<dependency>
<groupId>org.slf4j</groupId>
@ -581,26 +566,6 @@ under the License.
<finalName>palo-fe</finalName>
<plugins>
<!--thrift-->
<plugin>
<groupId>org.apache.thrift.tools</groupId>
<artifactId>maven-thrift-plugin</artifactId>
<version>0.1.11</version>
<configuration>
<thriftExecutable>${doris.thridparty}/installed/bin/thrift</thriftExecutable>
<thriftSourceRoot>${doris.home}/gensrc/thrift</thriftSourceRoot>
<generator>java:fullcamel</generator>
</configuration>
<executions>
<execution>
<id>thrift-sources</id>
<phase>generate-sources</phase>
<goals>
<goal>compile</goal>
</goals>
</execution>
</executions>
</plugin>
<!--jcup-->
<plugin>
<groupId>net.sourceforge.czt.dev</groupId>

View File

@ -28,11 +28,11 @@ import org.apache.doris.load.routineload.LoadDataSourceType;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
@ -46,7 +46,7 @@ import mockit.Mocked;
public class CreateRoutineLoadStmtTest {
private static final Logger LOG = LogManager.getLogger(CreateRoutineLoadStmtTest.class);
private static final Logger LOG = LoggerFactory.getLogger(CreateRoutineLoadStmtTest.class);
@Mocked
Database database;

View File

@ -39,6 +39,7 @@ import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.doris.thrift.TStorageType;
import org.junit.Assert;
import org.junit.Test;
@ -355,24 +356,27 @@ public class MaterializedViewSelectorTest {
@Test
public void testCompensateIndex(@Injectable SelectStmt selectStmt, @Injectable Analyzer analyzer,
@Injectable OlapTable table) {
Map<Long, List<Column>> candidateIndexIdToSchema = Maps.newHashMap();
Map<Long, List<Column>> allVisibleIndexes = Maps.newHashMap();
Map<Long, MaterializedIndexMeta> candidateIndexIdToSchema = Maps.newHashMap();
Map<Long, MaterializedIndexMeta> allVisibleIndexes = Maps.newHashMap();
List<Column> index1Columns = Lists.newArrayList();
Column index1Column1 = new Column("c2", Type.INT, true, AggregateType.SUM, true, "", "");
index1Columns.add(index1Column1);
allVisibleIndexes.put(new Long(1), index1Columns);
allVisibleIndexes.put(new Long(1), new MaterializedIndexMeta(
0, index1Columns, 0, 0, (short) 0, TStorageType.COLUMN, KeysType.AGG_KEYS, null));
List<Column> index2Columns = Lists.newArrayList();
Column index2Column1 = new Column("c1", Type.INT, true, null, true, "", "");
index2Columns.add(index2Column1);
Column index2Column2 = new Column("c2", Type.INT, false, AggregateType.SUM, true, "", "");
index2Columns.add(index2Column2);
allVisibleIndexes.put(new Long(2), index2Columns);
allVisibleIndexes.put(new Long(2), new MaterializedIndexMeta(
0, index2Columns, 0, 0, (short) 0, TStorageType.COLUMN, KeysType.AGG_KEYS, null));
List<Column> index3Columns = Lists.newArrayList();
Column index3Column1 = new Column("c1", Type.INT, true, null, true, "", "");
index3Columns.add(index3Column1);
Column index3Column2 = new Column("c3", Type.INT, false, AggregateType.SUM, true, "", "");
index3Columns.add(index3Column2);
allVisibleIndexes.put(new Long(3), index3Columns);
allVisibleIndexes.put(new Long(3), new MaterializedIndexMeta(
0, index3Columns, 0, 0, (short) 0, TStorageType.COLUMN, KeysType.AGG_KEYS, null));
List<Column> keyColumns = Lists.newArrayList();
keyColumns.add(index2Column1);
new Expectations() {

View File

@ -122,7 +122,7 @@ under the License.
<dependencies>
<dependency>
<groupId>org.apache</groupId>
<artifactId>fe-common</artifactId>
<artifactId>doris-fe-common</artifactId>
<version>1.0.0</version>
</dependency>
@ -513,13 +513,6 @@ under the License.
<version>0.2.3</version>
</dependency>
<!-- https://mvnrepository.com/artifact/log4j/log4j -->
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-api -->
<dependency>
<groupId>org.slf4j</groupId>
@ -581,6 +574,16 @@ under the License.
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.12</artifactId>
<version>2.4.5</version>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.spark/spark-launcher_2.12 -->
@ -604,49 +607,49 @@ under the License.
<version>2.6.5</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-column</artifactId>
<version>1.10.1</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-hadoop</artifactId>
<version>1.10.1</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-common</artifactId>
<version>1.10.1</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<version>3.2.1</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>2.12.10</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.esotericsoftware</groupId>
<artifactId>kryo-shaded</artifactId>
<version>4.0.2</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-catalyst_2.12</artifactId>

View File

@ -42,7 +42,7 @@ under the License.
<dependencies>
<dependency>
<groupId>org.apache</groupId>
<artifactId>fe-common</artifactId>
<artifactId>doris-fe-common</artifactId>
</dependency>
<!-- https://mvnrepository.com/artifact/commons-codec/commons-codec -->
@ -86,11 +86,10 @@ under the License.
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/log4j/log4j -->
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<scope>provided</scope>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<scope>test</scope>
</dependency>
<dependency>

View File

@ -19,10 +19,10 @@ package org.apache.doris.load.loadv2.dpp;
import org.apache.doris.common.SparkDppException;
import org.apache.doris.load.loadv2.etl.EtlJobConfig;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Serializable;
import java.math.BigDecimal;
@ -31,7 +31,7 @@ import java.math.BigInteger;
// Parser to validate value for different type
public abstract class ColumnParser implements Serializable {
protected static final Logger LOG = LogManager.getLogger(ColumnParser.class);
protected static final Logger LOG = LoggerFactory.getLogger(ColumnParser.class);
// thread safe formatter
public static final DateTimeFormatter DATE_FORMATTER = DateTimeFormat.forPattern("yyyy-MM-dd");

View File

@ -27,8 +27,8 @@ import org.apache.spark.sql.catalog.Column;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
@ -63,7 +63,7 @@ import java.util.stream.Collectors;
public class GlobalDictBuilder {
protected static final Logger LOG = LogManager.getLogger(GlobalDictBuilder.class);
protected static final Logger LOG = LoggerFactory.getLogger(GlobalDictBuilder.class);
// name of the column in doris table which need to build global dict
// for example: some dict columns a,b,c

View File

@ -17,8 +17,6 @@
package org.apache.doris.load.loadv2.dpp;
import scala.Tuple2;
import org.apache.doris.common.SparkDppException;
import org.apache.doris.load.loadv2.etl.EtlJobConfig;
import com.google.common.base.Strings;
@ -31,8 +29,6 @@ import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.parquet.column.ParquetProperties;
import org.apache.parquet.hadoop.ParquetWriter;
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
@ -58,6 +54,8 @@ import org.apache.spark.sql.types.StructType;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.util.LongAccumulator;
import org.apache.spark.util.SerializableConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
@ -77,6 +75,8 @@ import java.util.Map;
import java.util.Queue;
import java.util.Set;
import scala.Tuple2;
// This class is a Spark-based data preprocessing program,
// which will make use of the distributed compute framework of spark to
// do ETL job/sort/preaggregate jobs in spark job
@ -89,7 +89,7 @@ import java.util.Set;
// 3. process aggregation if needed
// 4. write data to parquet file
public final class SparkDpp implements java.io.Serializable {
private static final Logger LOG = LogManager.getLogger(SparkDpp.class);
private static final Logger LOG = LoggerFactory.getLogger(SparkDpp.class);
private static final String NULL_FLAG = "\\N";
private static final String DPP_RESULT_FILE = "dpp_result.json";

View File

@ -27,12 +27,12 @@ import org.apache.doris.load.loadv2.etl.EtlJobConfig.EtlIndex;
import org.apache.doris.load.loadv2.etl.EtlJobConfig.EtlTable;
import org.apache.commons.collections.map.MultiValueMap;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.functions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
@ -50,7 +50,7 @@ import java.util.Set;
* 4. dpp (data partition, data sort and data aggregation)
*/
public class SparkEtlJob {
private static final Logger LOG = LogManager.getLogger(SparkEtlJob.class);
private static final Logger LOG = LoggerFactory.getLogger(SparkEtlJob.class);
private static final String BITMAP_DICT_FUNC = "bitmap_dict";
private static final String TO_BITMAP_FUNC = "to_bitmap";

View File

@ -17,6 +17,10 @@
<groupId>org.apache</groupId>
<artifactId>doris-fe</artifactId>
</dependency>
<dependency>
<groupId>org.apache</groupId>
<artifactId>doris-fe-common</artifactId>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-api -->
<dependency>

View File

@ -50,6 +50,13 @@
<scope>system</scope>
<systemPath>${doris.home}/fe/fe-core/target/palo-fe.jar</systemPath>
</dependency>
<dependency>
<groupId>org.apache</groupId>
<artifactId>doris-fe-common</artifactId>
<version>1.0.0</version>
<scope>system</scope>
<systemPath>${doris.home}/fe/fe-common/target/doris-fe-common.jar</systemPath>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-api -->
<dependency>