Skip to content

Commit

Permalink
Merge branch 'apache:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
FreeOnePlus authored Jan 24, 2024
2 parents 027a73d + b304d26 commit f575143
Show file tree
Hide file tree
Showing 1,137 changed files with 83,353 additions and 12,517 deletions.
2 changes: 0 additions & 2 deletions .asf.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,6 @@ github:
- Build Broker
- Build Documents
- ShellCheck
- clickbench-new (clickbench)
- Build Third Party Libraries (Linux)
- Build Third Party Libraries (macOS)
- COMPILE (DORIS_COMPILE)
Expand Down Expand Up @@ -96,7 +95,6 @@ github:
- Build Broker
- ShellCheck
- Build Documents
- clickbench-new (clickbench)
- Build Third Party Libraries (Linux)
- Build Third Party Libraries (macOS)
- COMPILE (DORIS_COMPILE)
Expand Down
11 changes: 6 additions & 5 deletions .github/workflows/code-checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -79,15 +79,16 @@ jobs:
id: filter
with:
filters: |
be_changes:
cpp_changes:
- 'be/**'
- 'cloud/**'
- 'gensrc/proto/**'
- 'gensrc/thrift/**'
- name: Generate compile_commands.json
id: generate
run: |
if [[ "${{ steps.filter.outputs.be_changes }}" == 'true' ]]; then
if [[ "${{ steps.filter.outputs.cpp_changes }}" == 'true' ]]; then
export DEFAULT_DIR='/opt/doris'
mkdir "${DEFAULT_DIR}"
Expand All @@ -104,14 +105,14 @@ jobs:
popd
export PATH="${DEFAULT_DIR}/ldb-toolchain/bin/:$(pwd)/thirdparty/installed/bin/:${PATH}"
DISABLE_JAVA_UDF=ON DORIS_TOOLCHAIN=clang ENABLE_PCH=OFF OUTPUT_BE_BINARY=0 ./build.sh --be
DISABLE_JAVA_UDF=ON DORIS_TOOLCHAIN=clang ENABLE_PCH=OFF OUTPUT_BE_BINARY=0 ./build.sh --be --cloud
fi
echo "should_check=${{ steps.filter.outputs.be_changes }}" >>${GITHUB_OUTPUT}
echo "should_check=${{ steps.filter.outputs.cpp_changes }}" >>${GITHUB_OUTPUT}
- name: Upload
uses: actions/upload-artifact@v3
if: ${{ steps.filter.outputs.be_changes == 'true' }}
if: ${{ steps.filter.outputs.cpp_changes == 'true' }}
with:
name: compile_commands
path: ./be/build_Release/compile_commands.json
Expand Down
36 changes: 18 additions & 18 deletions .github/workflows/comment-to-trigger-teamcity.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,11 +41,11 @@ jobs:
"${COMMENT_BODY}" == *'run compile'* ||
"${COMMENT_BODY}" == *'run beut'* ||
"${COMMENT_BODY}" == *'run feut'* ||
"${COMMENT_BODY}" == *'run cloudut'* ||
"${COMMENT_BODY}" == *'run p0'* ||
"${COMMENT_BODY}" == *'run p1'* ||
"${COMMENT_BODY}" == *'run external'* ||
"${COMMENT_BODY}" == *'run pipelinex_p0'* ||
"${COMMENT_BODY}" == *'run clickbench'* ||
"${COMMENT_BODY}" == *'run arm'* ||
"${COMMENT_BODY}" == *'run performance'* ]]; then
echo "comment_trigger=true" | tee -a "$GITHUB_OUTPUT"
Expand All @@ -63,7 +63,7 @@ jobs:
echo "TARGET_BRANCH='${TARGET_BRANCH}'" | tee -a "$GITHUB_OUTPUT"
echo "COMMENT_BODY='${COMMENT_BODY}'" | tee -a "$GITHUB_OUTPUT"
reg="run (buildall|compile|p0|p1|feut|beut|external|clickbench|pipelinex_p0|arm|performance)( [1-9]*[0-9]+)*"
reg="run (buildall|compile|p0|p1|feut|beut|cloudut|external|clickbench|pipelinex_p0|arm|performance)( [1-9]*[0-9]+)*"
COMMENT_TRIGGER_TYPE="$(echo -e "${COMMENT_BODY}" | xargs | grep -E "${reg}" | awk -F' ' '{print $2}' | sed -n 1p | sed 's/\r//g')"
COMMENT_REPEAT_TIMES="$(echo -e "${COMMENT_BODY}" | xargs | grep -E "${reg}" | awk -F' ' '{print $3}' | sed -n 1p | sed 's/\r//g')"
echo "COMMENT_TRIGGER_TYPE=${COMMENT_TRIGGER_TYPE}" | tee -a "$GITHUB_OUTPUT"
Expand All @@ -90,10 +90,10 @@ jobs:
else
echo "changed_be_ut=false" | tee -a "$GITHUB_OUTPUT"
fi
if file_changed_ckb; then
echo "changed_ckb=true" | tee -a "$GITHUB_OUTPUT"
if file_changed_cloud_ut; then
echo "changed_cloud_ut=true" | tee -a "$GITHUB_OUTPUT"
else
echo "changed_ckb=false" | tee -a "$GITHUB_OUTPUT"
echo "changed_cloud_ut=false" | tee -a "$GITHUB_OUTPUT"
fi
if file_changed_regression_p0; then
echo "changed_p0=true" | tee -a "$GITHUB_OUTPUT"
Expand All @@ -120,7 +120,7 @@ jobs:
echo "INFO: failed to _get_pr_changed_files, default trigger all"
echo "changed_fe_ut=true" | tee -a "$GITHUB_OUTPUT"
echo "changed_be_ut=true" | tee -a "$GITHUB_OUTPUT"
echo "changed_ckb=true" | tee -a "$GITHUB_OUTPUT"
echo "changed_cloud_ut=true" | tee -a "$GITHUB_OUTPUT"
echo "changed_p0=true" | tee -a "$GITHUB_OUTPUT"
echo "changed_external=true" | tee -a "$GITHUB_OUTPUT"
echo "changed_pipelinex_p0=true" | tee -a "$GITHUB_OUTPUT"
Expand Down Expand Up @@ -158,6 +158,18 @@ jobs:
"beut" \
"${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
- name: "Trigger or Skip cloudut"
if: ${{ fromJSON(steps.parse.outputs.comment_trigger) && contains(fromJSON('["cloudut", "buildall"]'), steps.parse.outputs.COMMENT_TRIGGER_TYPE) }}
run: |
source ./regression-test/pipeline/common/teamcity-utils.sh
set -x
trigger_or_skip_build \
"${{ steps.changes.outputs.changed_cloud_ut }}" \
"${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \
"${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
"beut" \
"${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
- name: "Trigger or Skip compile"
if: ${{ fromJSON(steps.parse.outputs.comment_trigger) && contains(fromJSON('["compile", "buildall"]'), steps.parse.outputs.COMMENT_TRIGGER_TYPE) }}
run: |
Expand Down Expand Up @@ -243,18 +255,6 @@ jobs:
"arm" \
"${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
- name: "Trigger or Skip clickbench"
if: ${{ fromJSON(steps.parse.outputs.comment_trigger) && contains(fromJSON('["clickbench", "buildall"]'), steps.parse.outputs.COMMENT_TRIGGER_TYPE) }}
run: |
source ./regression-test/pipeline/common/teamcity-utils.sh
set -x
trigger_or_skip_build \
"${{ steps.changes.outputs.changed_ckb }}" \
"${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \
"${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
"clickbench" \
"${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
- name: "Trigger or Skip performance"
if: ${{ fromJSON(steps.parse.outputs.comment_trigger) && contains(fromJSON('["performance", "buildall"]'), steps.parse.outputs.COMMENT_TRIGGER_TYPE) }}
run: |
Expand Down
5 changes: 5 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,11 @@ be/.devcontainer/
be/src/apache-orc/
zoneinfo/

# Cloud
cloud/build*/
cloud/cmake-build*/
cloud/ut_build*/

## tools
tools/ssb-tools/ssb-data/
tools/ssb-tools/ssb-dbgen/
Expand Down
10 changes: 10 additions & 0 deletions be/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,10 @@ execute_process(COMMAND bash "-c" "uname -m"
OUTPUT_STRIP_TRAILING_WHITESPACE)
message(STATUS "Build target arch is ${CMAKE_BUILD_TARGET_ARCH}")

if ("${CMAKE_BUILD_TARGET_ARCH}" STREQUAL "aarch64" OR "${CMAKE_BUILD_TARGET_ARCH}" STREQUAL "arm64")
set (ARCH_ARM 1)
endif()

# Set dirs
set(BASE_DIR "${CMAKE_CURRENT_SOURCE_DIR}")
set(ENV{DORIS_HOME} "${BASE_DIR}/..")
Expand Down Expand Up @@ -302,13 +306,19 @@ if (RECORD_COMPILER_SWITCHES)
add_compile_options(-frecord-gcc-switches)
endif()

# simd for architectures
if ("${CMAKE_BUILD_TARGET_ARCH}" STREQUAL "x86" OR "${CMAKE_BUILD_TARGET_ARCH}" STREQUAL "x86_64")
add_compile_options(-msse4.2)
if (USE_AVX2)
add_compile_options(-mavx2)
endif()
endif()

if (ARCH_ARM)
add_compile_options(-march=armv8-a+crc)
endif()
#

if (WITH_MYSQL)
add_compile_options(-DDORIS_WITH_MYSQL)
endif()
Expand Down
17 changes: 9 additions & 8 deletions be/src/agent/agent_server.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ AgentServer::~AgentServer() = default;

void AgentServer::start_workers(ExecEnv* exec_env) {
// TODO(plat1ko): CloudStorageEngine
auto& engine = *StorageEngine::instance();
auto& engine = ExecEnv::GetInstance()->storage_engine().to_local();
// clang-format off
_alter_inverted_index_workers = std::make_unique<TaskWorkerPool>(
"ALTER_INVERTED_INDEX", config::alter_index_worker_count, [&engine](auto&& task) { return alter_inverted_index_callback(engine, task); });
Expand Down Expand Up @@ -131,11 +131,11 @@ void AgentServer::start_workers(ExecEnv* exec_env) {
"CLEAR_TRANSACTION_TASK", config::clear_transaction_task_worker_count, [&engine](auto&& task) { return clear_transaction_task_callback(engine, task); });

_push_delete_workers = std::make_unique<TaskWorkerPool>(
"DELETE", config::delete_worker_count, push_callback);
"DELETE", config::delete_worker_count, [&engine](auto&& task) { push_callback(engine, task); });

// Both PUSH and REALTIME_PUSH type use push_callback
_push_load_workers = std::make_unique<PriorTaskWorkerPool>(
"PUSH", config::push_worker_count_normal_priority, config::push_worker_count_high_priority, push_callback);
"PUSH", config::push_worker_count_normal_priority, config::push_worker_count_high_priority, [&engine](auto&& task) { push_callback(engine, task); });

_update_tablet_meta_info_workers = std::make_unique<TaskWorkerPool>(
"UPDATE_TABLET_META_INFO", 1, [&engine](auto&& task) { return update_tablet_meta_callback(engine, task); });
Expand Down Expand Up @@ -292,12 +292,12 @@ void AgentServer::submit_tasks(TAgentResult& agent_result,
ret_st.to_thrift(&agent_result.status);
}

void AgentServer::make_snapshot(TAgentResult& t_agent_result,
void AgentServer::make_snapshot(StorageEngine& engine, TAgentResult& t_agent_result,
const TSnapshotRequest& snapshot_request) {
string snapshot_path;
bool allow_incremental_clone = false;
Status status = SnapshotManager::instance()->make_snapshot(snapshot_request, &snapshot_path,
&allow_incremental_clone);
Status status = engine.snapshot_mgr()->make_snapshot(snapshot_request, &snapshot_path,
&allow_incremental_clone);
if (!status) {
LOG_WARNING("failed to make snapshot")
.tag("tablet_id", snapshot_request.tablet_id)
Expand All @@ -316,8 +316,9 @@ void AgentServer::make_snapshot(TAgentResult& t_agent_result,
t_agent_result.__set_snapshot_version(snapshot_request.preferred_snapshot_version);
}

void AgentServer::release_snapshot(TAgentResult& t_agent_result, const std::string& snapshot_path) {
Status status = SnapshotManager::instance()->release_snapshot(snapshot_path);
void AgentServer::release_snapshot(StorageEngine& engine, TAgentResult& t_agent_result,
const std::string& snapshot_path) {
Status status = engine.snapshot_mgr()->release_snapshot(snapshot_path);
if (!status) {
LOG_WARNING("failed to release snapshot").tag("snapshot_path", snapshot_path).error(status);
} else {
Expand Down
9 changes: 5 additions & 4 deletions be/src/agent/agent_server.h
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ class TAgentResult;
class TAgentTaskRequest;
class TMasterInfo;
class TSnapshotRequest;
class StorageEngine;

// Each method corresponds to one RPC from FE Master, see BackendService.
class AgentServer {
Expand All @@ -46,8 +47,10 @@ class AgentServer {
void submit_tasks(TAgentResult& agent_result, const std::vector<TAgentTaskRequest>& tasks);

// TODO(lingbin): make the agent_result to be a pointer, because it will be modified.
void make_snapshot(TAgentResult& agent_result, const TSnapshotRequest& snapshot_request);
void release_snapshot(TAgentResult& agent_result, const std::string& snapshot_path);
static void make_snapshot(StorageEngine& engine, TAgentResult& agent_result,
const TSnapshotRequest& snapshot_request);
static void release_snapshot(StorageEngine& engine, TAgentResult& agent_result,
const std::string& snapshot_path);

// Deprecated
// TODO(lingbin): This method is deprecated, should be removed later.
Expand All @@ -59,8 +62,6 @@ class AgentServer {
private:
void start_workers(ExecEnv* exec_env);

DISALLOW_COPY_AND_ASSIGN(AgentServer);

// Reference to the ExecEnv::_master_info
const TMasterInfo& _master_info;

Expand Down
3 changes: 2 additions & 1 deletion be/src/agent/be_exec_version_manager.h
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,9 @@ class BeExecVersionManager {
* b. array contains/position/countequal function return nullable in less situations.
* c. cleared old version of Version 2.
* d. unix_timestamp function support timestamp with float for datetimev2, and change nullable mode.
* e. the right function outputs NULL when the function contains NULL, substr function returns empty if start > str.length.
* 4: start from doris 2.1.x
* a. change shuffle serialize/deserialize way
* a. change shuffle serialize/deserialize way
*/
inline const int BeExecVersionManager::max_be_exec_version = 4;
inline const int BeExecVersionManager::min_be_exec_version = 0;
Expand Down
14 changes: 7 additions & 7 deletions be/src/agent/heartbeat_server.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -46,13 +46,14 @@ class TProcessor;
namespace doris {

HeartbeatServer::HeartbeatServer(TMasterInfo* master_info)
: _master_info(master_info), _fe_epoch(0) {
_olap_engine = StorageEngine::instance();
: _engine(ExecEnv::GetInstance()->storage_engine()),
_master_info(master_info),
_fe_epoch(0) {
_be_epoch = GetCurrentTimeMicros() / 1000;
}

void HeartbeatServer::init_cluster_id() {
_master_info->cluster_id = _olap_engine->effective_cluster_id();
_master_info->cluster_id = _engine.effective_cluster_id();
}

void HeartbeatServer::heartbeat(THeartbeatResult& heartbeat_result,
Expand Down Expand Up @@ -96,7 +97,7 @@ Status HeartbeatServer::_heartbeat(const TMasterInfo& master_info) {
if (_master_info->cluster_id == -1) {
LOG(INFO) << "get first heartbeat. update cluster id";
// write and update cluster id
RETURN_IF_ERROR(_olap_engine->set_cluster_id(master_info.cluster_id));
RETURN_IF_ERROR(_engine.set_cluster_id(master_info.cluster_id));

_master_info->cluster_id = master_info.cluster_id;
LOG(INFO) << "record cluster id. host: " << master_info.network_address.hostname
Expand Down Expand Up @@ -200,8 +201,7 @@ Status HeartbeatServer::_heartbeat(const TMasterInfo& master_info) {
_master_info->__set_token(master_info.token);
LOG(INFO) << "get token. token: " << _master_info->token;
} else if (_master_info->token != master_info.token) {
return Status::InternalError("invalid token. local_token: {}, token: {}",
_master_info->token, master_info.token);
return Status::InternalError("invalid token");
}
}

Expand Down Expand Up @@ -229,7 +229,7 @@ Status HeartbeatServer::_heartbeat(const TMasterInfo& master_info) {

if (need_report) {
LOG(INFO) << "Master FE is changed or restarted. report tablet and disk info immediately";
_olap_engine->notify_listeners();
_engine.notify_listeners();
}

return Status::OK();
Expand Down
4 changes: 2 additions & 2 deletions be/src/agent/heartbeat_server.h
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ class THeartbeatResult;
class TMasterInfo;

const uint32_t HEARTBEAT_INTERVAL = 10;
class StorageEngine;
class BaseStorageEngine;
class ThriftServer;

class HeartbeatServer : public HeartbeatServiceIf {
Expand All @@ -53,7 +53,7 @@ class HeartbeatServer : public HeartbeatServiceIf {
private:
Status _heartbeat(const TMasterInfo& master_info);

StorageEngine* _olap_engine = nullptr;
BaseStorageEngine& _engine;
int64_t _be_epoch;

// mutex to protect master_info and _epoch
Expand Down
Loading

0 comments on commit f575143

Please sign in to comment.