Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ log/
fe_plugins/*/target
fe_plugins/output
fe/mocked
fe/*/target
dependency-reduced-pom.xml


#ignore eclipse project file & idea project file
Expand All @@ -43,6 +45,8 @@ be/src/gen_cpp/*.cc
be/src/gen_cpp/*.cpp
be/src/gen_cpp/*.h
be/src/gen_cpp/opcode
be/ut_build_ASAN/
be/tags

#ignore vscode project file
.vscode
85 changes: 57 additions & 28 deletions build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -49,20 +49,22 @@ usage() {
Usage: $0 <options>
Optional options:
--be build Backend
--fe build Frontend
--fe build Frontend and Spark Dpp application
--spark-dpp build Spark DPP application
--clean clean and build target
--with-mysql enable MySQL support(default)
--without-mysql disable MySQL support
--with-lzo enable LZO compress support(default)
--without-lzo disable LZO compress support

Eg.
$0 build Backend and Frontend without clean
$0 build all
$0 --be build Backend without clean
$0 --be --without-mysql build Backend with MySQL disable
$0 --be --without-mysql --without-lzo build Backend with both MySQL and LZO disable
$0 --fe --clean clean and build Frontend
$0 --fe --be --clean clean and build both Frontend and Backend
$0 --fe --clean clean and build Frontend and Spark Dpp application
$0 --fe --be --clean clean and build Frontend, Spark Dpp application and Backend
$0 --spark-dpp build Spark DPP application alone
"
exit 1
}
Expand All @@ -73,6 +75,7 @@ OPTS=$(getopt \
-o 'h' \
-l 'be' \
-l 'fe' \
-l 'spark-dpp' \
-l 'clean' \
-l 'with-mysql' \
-l 'without-mysql' \
Expand All @@ -89,26 +92,30 @@ eval set -- "$OPTS"

BUILD_BE=
BUILD_FE=
BUILD_SPARK_DPP=
CLEAN=
RUN_UT=
WITH_MYSQL=ON
WITH_LZO=ON
HELP=0
if [ $# == 1 ] ; then
# defuat
# default
BUILD_BE=1
BUILD_FE=1
BUILD_SPARK_DPP=1
CLEAN=0
RUN_UT=0
else
BUILD_BE=0
BUILD_FE=0
BUILD_SPARK_DPP=0
CLEAN=0
RUN_UT=0
while true; do
case "$1" in
--be) BUILD_BE=1 ; shift ;;
--fe) BUILD_FE=1 ; shift ;;
--spark-dpp) BUILD_SPARK_DPP=1 ; shift ;;
--clean) CLEAN=1 ; shift ;;
--ut) RUN_UT=1 ; shift ;;
--with-mysql) WITH_MYSQL=ON; shift ;;
Expand All @@ -128,18 +135,19 @@ if [[ ${HELP} -eq 1 ]]; then
exit
fi

if [ ${CLEAN} -eq 1 -a ${BUILD_BE} -eq 0 -a ${BUILD_FE} -eq 0 ]; then
echo "--clean can not be specified without --fe or --be"
if [ ${CLEAN} -eq 1 -a ${BUILD_BE} -eq 0 -a ${BUILD_FE} -eq 0 -a ${BUILD_SPARK_DPP} -eq 0 ]; then
echo "--clean can not be specified without --fe or --be or --spark-dpp"
exit 1
fi

echo "Get params:
BUILD_BE -- $BUILD_BE
BUILD_FE -- $BUILD_FE
CLEAN -- $CLEAN
RUN_UT -- $RUN_UT
WITH_MYSQL -- $WITH_MYSQL
WITH_LZO -- $WITH_LZO
BUILD_BE -- $BUILD_BE
BUILD_FE -- $BUILD_FE
BUILD_SPARK_DPP -- $BUILD_SPARK_DPP
CLEAN -- $CLEAN
RUN_UT -- $RUN_UT
WITH_MYSQL -- $WITH_MYSQL
WITH_LZO -- $WITH_LZO
"

# Clean and build generated code
Expand Down Expand Up @@ -175,34 +183,55 @@ cd ${DORIS_HOME}/docs
./build_help_zip.sh
cd ${DORIS_HOME}

# Assesmble FE modules
FE_MODULES=
if [ ${BUILD_FE} -eq 1 -o ${BUILD_SPARK_DPP} -eq 1 ]; then
if [ ${BUILD_SPARK_DPP} -eq 1 ]; then
FE_MODULES="fe-common,spark-dpp"
fi
if [ ${BUILD_FE} -eq 1 ]; then
FE_MODULES="fe-common,spark-dpp,fe-core"
fi
fi

# Clean and build Frontend
if [ ${BUILD_FE} -eq 1 ] ; then
echo "Build Frontend"
if [ ${FE_MODULES}x != ""x ]; then
echo "Build Frontend Modules: $FE_MODULES"
cd ${DORIS_HOME}/fe
if [ ${CLEAN} -eq 1 ]; then
${MVN_CMD} clean
fi
${MVN_CMD} package -DskipTests
${MVN_CMD} package -pl ${FE_MODULES} -DskipTests
cd ${DORIS_HOME}
fi

# Clean and prepare output dir
DORIS_OUTPUT=${DORIS_HOME}/output/
mkdir -p ${DORIS_OUTPUT}

#Copy Frontend and Backend
if [ ${BUILD_FE} -eq 1 ]; then
install -d ${DORIS_OUTPUT}/fe/bin ${DORIS_OUTPUT}/fe/conf \
${DORIS_OUTPUT}/fe/webroot/ ${DORIS_OUTPUT}/fe/lib/

cp -r -p ${DORIS_HOME}/bin/*_fe.sh ${DORIS_OUTPUT}/fe/bin/
cp -r -p ${DORIS_HOME}/conf/fe.conf ${DORIS_OUTPUT}/fe/conf/
rm -rf ${DORIS_OUTPUT}/fe/lib/*
cp -r -p ${DORIS_HOME}/fe/fe-core/target/lib/* ${DORIS_OUTPUT}/fe/lib/
cp -r -p ${DORIS_HOME}/fe/fe-core/target/palo-fe.jar ${DORIS_OUTPUT}/fe/lib/
cp -r -p ${DORIS_HOME}/docs/build/help-resource.zip ${DORIS_OUTPUT}/fe/lib/
cp -r -p ${DORIS_HOME}/webroot/* ${DORIS_OUTPUT}/fe/webroot/
# Copy Frontend and Backend
if [ ${BUILD_FE} -eq 1 -o ${BUILD_SPARK_DPP} -eq 1 ]; then
if [ ${BUILD_FE} -eq 1 ]; then
install -d ${DORIS_OUTPUT}/fe/bin ${DORIS_OUTPUT}/fe/conf \
${DORIS_OUTPUT}/fe/webroot/ ${DORIS_OUTPUT}/fe/lib/ \
${DORIS_OUTPUT}/fe/spark-dpp/

cp -r -p ${DORIS_HOME}/bin/*_fe.sh ${DORIS_OUTPUT}/fe/bin/
cp -r -p ${DORIS_HOME}/conf/fe.conf ${DORIS_OUTPUT}/fe/conf/
rm -rf ${DORIS_OUTPUT}/fe/lib/*
cp -r -p ${DORIS_HOME}/fe/fe-core/target/lib/* ${DORIS_OUTPUT}/fe/lib/
cp -r -p ${DORIS_HOME}/fe/fe-core/target/palo-fe.jar ${DORIS_OUTPUT}/fe/lib/
cp -r -p ${DORIS_HOME}/docs/build/help-resource.zip ${DORIS_OUTPUT}/fe/lib/
cp -r -p ${DORIS_HOME}/webroot/* ${DORIS_OUTPUT}/fe/webroot/
cp -r -p ${DORIS_HOME}/fe/spark-dpp/target/spark-dpp-*-jar-with-dependencies.jar ${DORIS_OUTPUT}/fe/spark-dpp/

elif [ ${BUILD_SPARK_DPP} -eq 1 ]; then
install -d ${DORIS_OUTPUT}/fe/spark-dpp/
rm -rf ${DORIS_OUTPUT}/fe/spark-dpp/*
cp -r -p ${DORIS_HOME}/fe/spark-dpp/target/spark-dpp-*-jar-with-dependencies.jar ${DORIS_OUTPUT}/fe/spark-dpp/
fi
fi

if [ ${BUILD_BE} -eq 1 ]; then
install -d ${DORIS_OUTPUT}/be/bin \
${DORIS_OUTPUT}/be/conf \
Expand Down
14 changes: 14 additions & 0 deletions fe/README
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# fe-common

This module is used to store some common classes of other modules.

# spark-dpp
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Would better add a explain for dpp.


This module is Spark DPP program, used for Spark Load function.
Depends: fe-common

# fe-core

This module is the main process module of FE.
Depends: fe-common, spark-dpp

68 changes: 68 additions & 0 deletions fe/fe-common/pom.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
<?xml version="1.0" encoding="UTF-8"?>

<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

<parent>
<groupId>org.apache</groupId>
<artifactId>doris-fe</artifactId>
<version>3.4.0</version>
<relativePath>../pom.xml</relativePath>
</parent>

<artifactId>fe-common</artifactId>
<version>1.0.0</version>
<packaging>jar</packaging>

<properties>
<doris.home>${basedir}/../../</doris.home>
</properties>

<build>
<plugins>
<!-- for FE java code style checking -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>3.1.0</version>
<configuration>
<configLocation>checkstyle.xml</configLocation>
<encoding>UTF-8</encoding>
<consoleOutput>true</consoleOutput>
<failsOnError>true</failsOnError>
<linkXRef>false</linkXRef>
<excludes>**/jmockit/**/*</excludes>
</configuration>
<executions>
<execution>
<id>validate</id>
<phase>validate</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
7 changes: 6 additions & 1 deletion fe/fe-core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ under the License.
<relativePath>../pom.xml</relativePath>
</parent>

<artifactId>doris-fe-core</artifactId>
<artifactId>fe-core</artifactId>
<version>3.4.0</version>
<packaging>jar</packaging>

Expand All @@ -40,6 +40,11 @@ under the License.
</properties>

<dependencies>
<dependency>
<groupId>org.apache</groupId>
<artifactId>spark-dpp</artifactId>
</dependency>

<!-- https://mvnrepository.com/artifact/cglib/cglib -->
<dependency>
<groupId>cglib</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -514,7 +514,7 @@ public class Config extends ConfigBase {
* Default spark dpp version
*/
@ConfField
public static String spark_dpp_version = "1_0_0";
public static String spark_dpp_version = "1.0.0";
/**
* Default spark load timeout
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
import org.apache.spark.launcher.SparkAppHandle.State;
import org.apache.spark.launcher.SparkLauncher;

import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.List;
Expand All @@ -78,10 +79,14 @@ public class SparkEtlJobHandler {

class SparkAppListener implements Listener {
@Override
public void stateChanged(SparkAppHandle sparkAppHandle) {}
public void stateChanged(SparkAppHandle sparkAppHandle) {
LOG.info("get spark state changed: {}, app id: {}", sparkAppHandle.getState(), sparkAppHandle.getAppId());
}

@Override
public void infoChanged(SparkAppHandle sparkAppHandle) {}
public void infoChanged(SparkAppHandle sparkAppHandle) {
LOG.info("get spark info changed: {}, app id: {}", sparkAppHandle.getState(), sparkAppHandle.getAppId());
}
}

public void submitEtlJob(long loadJobId, String loadLabel, EtlJobConfig etlJobConfig, SparkResource resource,
Expand Down Expand Up @@ -134,7 +139,9 @@ public void submitEtlJob(long loadJobId, String loadLabel, EtlJobConfig etlJobCo
.setMainClass(SparkEtlJob.class.getCanonicalName())
.setAppName(String.format(ETL_JOB_NAME, loadLabel))
.setSparkHome(sparkHome)
.addAppArgs(jobConfigHdfsPath);
.addAppArgs(jobConfigHdfsPath)
.redirectError()
.redirectOutput(new File(Config.sys_log_dir + "/spark-submitter.log"));

// spark configs
for (Map.Entry<String, String> entry : resource.getSparkConfigs().entrySet()) {
Expand Down
Loading