Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Docs] Refine the API reference generation process for libraries #453

Merged
merged 10 commits into from
Apr 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/ci-nightly.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ jobs:
if: ${{ github.ref == 'refs/heads/main' && github.repository == 'apache/incubator-graphar' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
submodules: true

Expand Down
38 changes: 19 additions & 19 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,8 @@ concurrency:
jobs:
GraphAr-ubuntu-arrow-installed:
runs-on: ubuntu-latest
env:
GAR_TEST_DATA: ${{ github.workspace }}/testing/
steps:
- uses: actions/checkout@v3
with:
Expand All @@ -57,7 +59,7 @@ jobs:
libarrow-dataset-dev=14.0.1-1 \
libarrow-acero-dev=14.0.1-1 \
libparquet-dev=14.0.1-1
sudo apt-get install -y libboost-graph-dev ccache libcurl4-openssl-dev
sudo apt-get install -y libboost-graph-dev ccache libcurl4-openssl-dev doxygen

# install benchmark
git clone --branch v1.8.3 https://github.com/google/benchmark.git --depth 1
Expand All @@ -67,20 +69,19 @@ jobs:
popd

- name: CMake
working-directory: "cpp"
run: |
mkdir build
pushd build
cmake ../cpp -DCMAKE_BUILD_TYPE=Debug -DBUILD_TESTS=ON -DBUILD_EXAMPLES=ON -DBUILD_BENCHMARKS=ON
popd
cd build
cmake .. -DCMAKE_BUILD_TYPE=Debug -DBUILD_TESTS=ON -DBUILD_EXAMPLES=ON -DBUILD_BENCHMARKS=ON

- name: Cpp Format and lint
working-directory: "cpp/build"
run: |
# install clang-format
sudo curl -L https://github.com/muttleyxd/clang-tools-static-binaries/releases/download/master-22538c65/clang-format-8_linux-amd64 --output /usr/bin/clang-format
sudo chmod +x /usr/bin/clang-format

pushd build

# validate format
function prepend() { while read line; do echo "${1}${line}"; done; }

Expand Down Expand Up @@ -120,29 +121,27 @@ jobs:
exit -1
fi

popd
- name: Build Docs
working-directory: "cpp/build"
run: |
cmake -DENABLE_DOCS=ON ..
make docs

- name: Build GraphAr
run: |
pushd build
make -j$(nproc)
popd
working-directory: "cpp/build"
run: make -j$(nproc)

- name: Test
working-directory: "cpp/build"
run: |
pushd build
export ASAN_OPTIONS=detect_leaks=0
export GAR_TEST_DATA=$PWD/../testing/
make test
popd
ctest --output-on-failure

- name: Benchmark
working-directory: "cpp/build"
run: |
pushd build
export GAR_TEST_DATA=$PWD/../testing/
./graph_info_benchmark
./arrow_chunk_reader_benchmark
popd

GraphAr-centos-arrow-installed:
runs-on: ubuntu-latest
Expand Down Expand Up @@ -178,9 +177,10 @@ jobs:

- name: Build GraphAr
shell: scl enable devtoolset-8 -- bash --noprofile --norc -eo pipefail {0}
working-directory: "cpp"
run: |
mkdir build
pushd build
cmake ../cpp
cmake ..
make -j$(nproc)
popd
7 changes: 3 additions & 4 deletions .github/workflows/docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,14 @@ on:
push:
branches:
- main
paths:
- 'docs/**'
- '.github/workflows/docs.yml'
pull_request:
branches:
- main
paths:
- 'docs/**'
- 'cpp/**'
- 'java/**'
- 'spark/**'
- 'pyspark/**'
- '.github/workflows/docs.yml'

concurrency:
Expand Down
20 changes: 12 additions & 8 deletions .github/workflows/java.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,10 @@ concurrency:
cancel-in-progress: true

jobs:
GraphAr-java:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
submodules: true

Expand All @@ -61,18 +61,22 @@ jobs:
sudo apt-get install llvm-11 clang-11 lld-11 libclang-11-dev libz-dev -y

- name: Code Format Check
working-directory: java
run: |
export JAVA_HOME=${JAVA_HOME_11_X64}
pushd java
mvn spotless:check
popd
mvn --no-transfer-progress spotless:check

- name: Build Java Docs
working-directory: java
run: |
export JAVA_HOME=${JAVA_HOME_11_X64}
mvn --no-transfer-progress javadoc:javadoc

- name: Run test
working-directory: java
run: |
# Temporarily using Java 8, related issue: https://github.com/apache/incubator-graphar/issues/277
export JAVA_HOME=${JAVA_HOME_8_X64}
export LLVM11_HOME=/usr/lib/llvm-11
pushd java
export GAR_TEST_DATA=$PWD/../testing/
mvn clean test -Dspotless.check.skip=true
popd
mvn --no-transfer-progress clean test -Dspotless.check.skip=true
31 changes: 17 additions & 14 deletions .github/workflows/pyspark.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,10 @@ concurrency:
cancel-in-progress: true

jobs:
GraphAr-spark:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
submodules: true

Expand All @@ -53,24 +53,27 @@ jobs:
python-version: 3.9

- name: Install Poetry
working-directory: pyspark
run: |
yes | sudo python3 -m pip install poetry --quiet
cd pyspark
poetry env use python3

- name: Install Spark Scala && PySpark
run: |
cd pyspark
make install_test

- name: Run PyTest
run: |
cd pyspark
make test

- name: Lint
working-directory: pyspark
run: |
cd pyspark
make install_lint
make lint

- name: Build Docs
working-directory: pyspark
run: |
make install_docs
make docs

- name: Install Spark Scala && PySpark
working-directory: pyspark
run: make install_test

- name: Run PyTest
working-directory: pyspark
run: make test
39 changes: 18 additions & 21 deletions .github/workflows/spark.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ concurrency:
cancel-in-progress: true

jobs:
GraphAr-spark:
test:
runs-on: ubuntu-20.04
strategy:
fail-fast: false
Expand All @@ -52,38 +52,44 @@ jobs:
spark-hadoop: "spark-3.3.4-bin-hadoop3"

steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
submodules: true

- name: Code Format Check
working-directory: spark
run: |
export JAVA_HOME=${JAVA_HOME_11_X64}
pushd spark
mvn --no-transfer-progress spotless:check
popd

- name: Build GraphAr Spark
working-directory: spark
run: |
export JAVA_HOME=${JAVA_HOME_11_X64}
pushd spark
echo "Build ${{ matrix.mvn-profile }}"
mvn --no-transfer-progress clean package -DskipTests -Dspotless.check.skip=true -P ${{ matrix.mvn-profile }}
popd

- name: Build Spark Docs
working-directory: spark
run: |
export JAVA_HOME=${JAVA_HOME_11_X64}
echo "Build ${{ matrix.mvn-profile }}"
# FIXME: the install is not necessary, but it is a workaround for the issue
mvn install --no-transfer-progress --no-transfer-progress -DskipTests -Dspotless.check.skip=true
mvn --no-transfer-progress scala:doc

- name: Run test
working-directory: spark
run: |
export JAVA_HOME=${JAVA_HOME_11_X64}
export SPARK_TESTING=1
pushd spark
echo "Test ${{ matrix.mvn-profile }}"
mvn --no-transfer-progress test -Dspotless.check.skip=true -P ${{ matrix.mvn-profile }}
popd
mvn test --no-transfer-progress -Dspotless.check.skip=true -P ${{ matrix.mvn-profile }}

- name: Run Neo4j2GraphAr example
working-directory: spark
run: |
export JAVA_HOME=${JAVA_HOME_11_X64}
pushd spark
scripts/get-spark-to-home.sh ${{ matrix.spark }} ${{ matrix.spark-hadoop }}
export SPARK_HOME="${HOME}/${{ matrix.spark-hadoop }}"
export PATH="${SPARK_HOME}/bin":"${PATH}"
Expand All @@ -105,13 +111,10 @@ jobs:
echo "match (a) -[r] -> () delete a, r;match (a) delete a;" | cypher-shell -u ${NEO4J_USR} -p ${NEO4J_PWD} -d neo4j --format plain
scripts/run-graphar2neo4j.sh

# stop and clean
popd

- name: Run Nebula2GraphAr example
working-directory: spark
run: |
export JAVA_HOME=${JAVA_HOME_11_X64}
pushd spark
scripts/get-nebula-to-home.sh
export SPARK_HOME="${HOME}/${{ matrix.spark-hadoop }}"
export PATH="${SPARK_HOME}/bin":"${PATH}"
Expand All @@ -134,13 +137,10 @@ jobs:
# import from GraphAr
scripts/run-graphar2nebula.sh

# stop and clean
popd

- name: Run Neo4j importer
working-directory: spark
run: |
export JAVA_HOME=${JAVA_HOME_11_X64}
pushd spark
export SPARK_HOME="${HOME}/${{ matrix.spark-hadoop }}"
export PATH="${SPARK_HOME}/bin":"${PATH}"

Expand All @@ -149,6 +149,3 @@ jobs:
# run the importer
cd import
./neo4j.sh neo4j.json

# stop and clean
popd
37 changes: 21 additions & 16 deletions cpp/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,27 @@ project(graph-archive LANGUAGES C CXX VERSION ${GAR_VERSION})
option(BUILD_TESTS "Build unit tests" OFF)
option(BUILD_EXAMPLES "Build examples" OFF)
option(BUILD_BENCHMARKS "Build benchmarks" OFF)
option(ENABLE_DOCS "Enable documentation" OFF)
option(BUILD_DOCS_ONLY "Build docs only" OFF)

if (ENABLE_DOCS OR BUILD_DOCS_ONLY)
set(PROJECT_DOCUMENT_SOURCE ${PROJECT_SOURCE_DIR}/include ${PROJECT_SOURCE_DIR}/README.md)
string(REPLACE ";" " " PROJECT_DOCUMENT_SOURCE "${PROJECT_DOCUMENT_SOURCE}")
file(DOWNLOAD https://cdn.jsdelivr.net/gh/jothepro/doxygen-awesome-css@2.2.1/doxygen-awesome.min.css ${CMAKE_BINARY_DIR}/doxygen-awesome.css)
find_package(Doxygen REQUIRED)
set(DOXYGEN_IN ${PROJECT_SOURCE_DIR}/Doxyfile)
set(DOXYGEN_OUT ${CMAKE_BINARY_DIR}/Doxyfile.out)
configure_file(${DOXYGEN_IN} ${DOXYGEN_OUT} @ONLY)
add_custom_target(docs
COMMAND ${DOXYGEN_EXECUTABLE} ${DOXYGEN_OUT}
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
COMMENT "Generating API documentation with Doxygen"
VERBATIM)

if (BUILD_DOCS_ONLY)
return()
endif()
endif()

# ------------------------------------------------------------------------------
# setting default cmake type to Release
Expand Down Expand Up @@ -451,19 +472,3 @@ add_custom_target(gar-cpplint
COMMAND ${PROJECT_SOURCE_DIR}/misc/cpplint.py --root=${PROJECT_SOURCE_DIR}/include ${FILES_NEED_LINT}
COMMENT "Running cpplint check."
VERBATIM)

# ------------------------------------------------------------------------------
# build cpp api doc
# ------------------------------------------------------------------------------
find_program(doxygen_EXECUTABLE doxygen NO_CMAKE_SYSTEM_PATH)
if(doxygen_EXECUTABLE)
add_custom_target(gar-cpp-doc
COMMAND ${doxygen_EXECUTABLE}
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}/apidoc
VERBATIM
)
else()
if(NOT doxygen_EXECUTABLE)
message(STATUS "Cannot find the doxygen executable.")
endif()
endif()
Loading
Loading