diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..4d074ee63 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,24 @@ +# Set the default behavior, in case people don't have core.autocrlf set. +* text=auto + +# Explicitly declare text files you want to always be normalized and converted +# to native line endings on checkout. +*.c text +*.h text +*.java text +*.scala text +*.sbt text +*.py text + +# Declare files that will always have CR line endings on checkout +*.sh text eol=lf +*.template text eol=lf +*.yml text eol=lf +*.sql text eol=lf + +# Declare files that will always have CRLF line endings on checkout. +*.sln text eol=crlf + +# Denote all files that are truly binary and should not be modified. +*.png binary +*.jpg binary diff --git a/.github/workflows/actions/build-check/action.yml b/.github/workflows/actions/build-check/action.yml new file mode 100644 index 000000000..093df8d1b --- /dev/null +++ b/.github/workflows/actions/build-check/action.yml @@ -0,0 +1,27 @@ +name: Action > Build + +runs: + using: 'composite' + steps: + - uses: ./.github/workflows/actions/cached-dependencies + - name: Build javascript app (ui) + run: | + source "$HOME/.nvm/nvm.sh" + cd $JEMPI_APP_PATH/JeMPI_UI + yarn install --frozen-lockfile + yarn build + shell: bash + - name: Build Scala Apps + run: | + set -eo pipefail + source "$HOME/.sdkman/bin/sdkman-init.sh" + cd $JEMPI_APP_PATH/JeMPI_EM_Scala + sbt clean assembly + shell: bash + - name: Build Java App + run: | + set -eo pipefail + source "$HOME/.sdkman/bin/sdkman-init.sh" + cd $JEMPI_APP_PATH + mvn clean package + shell: bash \ No newline at end of file diff --git a/.github/workflows/actions/build-deploy-images/action.yml b/.github/workflows/actions/build-deploy-images/action.yml new file mode 100644 index 000000000..1620c1765 --- /dev/null +++ b/.github/workflows/actions/build-deploy-images/action.yml @@ -0,0 +1,25 @@ +name: Build and Deploy Images +inputs: + docker-push-tag: + required: false + image-build-tag: + required: true + docker-host: + required: true + docker-username: + required: true + docker-password: + required: true +runs: + using: 'composite' + steps: + - uses: ./.github/workflows/actions/docker-images-build + with: + image-build-tag: ${{ inputs.image-build-tag }} + - uses: ./.github/workflows/actions/docker-images-deploy + with: + image-build-tag: ${{ inputs.image-build-tag }} + docker-push-tag: ${{ inputs.docker-push-tag }} + docker-username: ${{ inputs.docker-username }} + docker-password: ${{ inputs.docker-password }} + docker-host: ${{ inputs.docker-host }} \ No newline at end of file diff --git a/.github/workflows/actions/cached-dependencies/action.yml b/.github/workflows/actions/cached-dependencies/action.yml new file mode 100644 index 000000000..729162fe6 --- /dev/null +++ b/.github/workflows/actions/cached-dependencies/action.yml @@ -0,0 +1,15 @@ +name: Action > CacheDependencies +runs: + using: 'composite' + steps: + - name: Cache SDKMan Install + uses: actions/cache@v4 + with: + path: | + ~/.sdkman + ~/.nvm + ~/.npm + ~/.cache/yarn + $GITHUB_WORKSPACE/JeMPI_Apps/JeMPI_UI/node_modules + # Using the prepare file as it contains all the version of the dependencies + key: build-dependencies-${{ hashFiles('**/.github/workflows/actions/prepare/action.yml', '**/yarn.lock') }} \ No newline at end of file diff --git a/.github/workflows/actions/docker-images-build/action.yml b/.github/workflows/actions/docker-images-build/action.yml new file mode 100644 index 000000000..80d4417e2 --- /dev/null +++ b/.github/workflows/actions/docker-images-build/action.yml @@ -0,0 +1,22 @@ +name: Action > Docker Images Build +inputs: + image-build-tag: + required: true +runs: + using: 'composite' + steps: + - uses: ./.github/workflows/actions/cached-dependencies + - uses: ./.github/workflows/actions/load-conf-env + - name: Build Docker Images + run: | + set -eo pipefail + source "$HOME/.nvm/nvm.sh" + source "$HOME/.sdkman/bin/sdkman-init.sh" + source $GITHUB_WORKSPACE/devops/linux/docker/conf.env + source $GITHUB_WORKSPACE/devops/linux/docker/conf/images/conf-app-images.sh + pushd $GITHUB_WORKSPACE/JeMPI_Apps + source ./build-all-ci.sh "${{ inputs.image-build-tag }}" + popd + docker image ls + shell: bash + \ No newline at end of file diff --git a/.github/workflows/actions/docker-images-deploy/action.yml b/.github/workflows/actions/docker-images-deploy/action.yml new file mode 100644 index 000000000..d09bbd33e --- /dev/null +++ b/.github/workflows/actions/docker-images-deploy/action.yml @@ -0,0 +1,24 @@ +name: Deploy Docker Images +inputs: + image-build-tag: + required: true + docker-push-tag: + required: false + docker-host: + required: true + docker-username: + required: true + docker-password: + required: true +runs: + using: 'composite' + steps: + - run: | + set -eo pipefail + source $GITHUB_WORKFLOW_FOLDER/actions/docker-images-deploy/deployDockerImages.sh \ + "${{ inputs.image-build-tag }}" \ + "${{ inputs.docker-push-tag }}" \ + "${{ inputs.docker-host }}" \ + "${{ inputs.docker-username }}" \ + "${{ inputs.docker-password }}" + shell: bash \ No newline at end of file diff --git a/.github/workflows/actions/docker-images-deploy/deployDockerImages.sh b/.github/workflows/actions/docker-images-deploy/deployDockerImages.sh new file mode 100644 index 000000000..6614a4d9e --- /dev/null +++ b/.github/workflows/actions/docker-images-deploy/deployDockerImages.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +original_tag=$1 +push_tag=$2 +registry_url=$3 +username=$4 +password=$5 + +if [ -z "$registry_url" ] || [ -z "$username" ] || [ -z "$password" ]; then + echo "Docker host details not set. Skipping deploying" + exit 0 +fi + + +if [ -z "$push_tag" ]; then + push_tag=$original_tag +fi + +if ! docker login "$registry_url" -u "$username" -p "$password"; then + echo "Failed to authenticate with Docker registry. Cannot push." + exit 1 +fi + + +IMAGE_LIST=$(docker image ls --filter "reference=*:$original_tag" --format "{{.Repository}}:{{.Tag}}") + +for IMAGE in $IMAGE_LIST; do + IFS=':' read -a image_details <<< "$IMAGE" + push_tag_url="$registry_url/$username/${image_details[0]}:$push_tag" + + echo "Pushing image: $IMAGE to '$push_tag_url'" + + docker tag "$IMAGE" $push_tag_url + docker push $push_tag_url +done \ No newline at end of file diff --git a/.github/workflows/actions/docker-images-save/action.yml b/.github/workflows/actions/docker-images-save/action.yml new file mode 100644 index 000000000..67eeaa403 --- /dev/null +++ b/.github/workflows/actions/docker-images-save/action.yml @@ -0,0 +1,21 @@ +name: Action > Docker Images Build +inputs: + image-build-tag: + required: true +runs: + using: 'composite' + steps: + - uses: ./.github/workflows/actions/cached-dependencies + - uses: ./.github/workflows/actions/load-conf-env + - name: Build Docker Save + run: | + set -eo pipefail + source $GITHUB_WORKFLOW_FOLDER/actions/docker-images-save/saveImages.sh "${{ inputs.image-build-tag }}" "./.github/workflows/actions/docker-images-save/docker-images" + shell: bash + - uses: actions/upload-artifact@v4 + with: + name: docker-images-${{ inputs.image-build-tag }} + path: | + ./.github/workflows/actions/docker-images-save/docker-images/ + retention-days: 2 + \ No newline at end of file diff --git a/.github/workflows/actions/docker-images-save/saveImages.sh b/.github/workflows/actions/docker-images-save/saveImages.sh new file mode 100644 index 000000000..8ea7f8934 --- /dev/null +++ b/.github/workflows/actions/docker-images-save/saveImages.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +images_path="$2" + +if [ ! -d "$images_path" ]; then + mkdir -p "$images_path" +fi + +IMAGE_LIST=$(docker image ls --filter "reference=*:$1" --format "{{.Repository}}:{{.Tag}}") + +for IMAGE in $IMAGE_LIST; do + IFS=':' read -a image_details <<< "$IMAGE" + echo "Saving image: $IMAGE to '$images_path/${image_details[0]}.${image_details[1]}.tar'" + docker save -o "$images_path/${image_details[0]}.${image_details[1]}.tar" "$IMAGE" +done \ No newline at end of file diff --git a/.github/workflows/actions/install-node/action.yml b/.github/workflows/actions/install-node/action.yml new file mode 100644 index 000000000..478096231 --- /dev/null +++ b/.github/workflows/actions/install-node/action.yml @@ -0,0 +1,24 @@ +name: Install Node +inputs: + node-version: + required: true +runs: + using: 'composite' + steps: + - name: Install Nvm + shell: bash + run: | + curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash + source "$HOME/.nvm/nvm.sh" + nvm --version + - name: Install node ${{ inputs.node-version }} + run: | + source "$HOME/.nvm/nvm.sh" + nvm install ${{ inputs.node-version }} + shell: bash + - name: Install UI packages + run: | + source "$HOME/.nvm/nvm.sh" + cd $JEMPI_APP_PATH/JeMPI_UI + yarn install --frozen-lockfile + shell: bash \ No newline at end of file diff --git a/.github/workflows/actions/install-sdkman/action.yml b/.github/workflows/actions/install-sdkman/action.yml new file mode 100644 index 000000000..6d9f5c9e4 --- /dev/null +++ b/.github/workflows/actions/install-sdkman/action.yml @@ -0,0 +1,10 @@ +name: Install SDKMan +runs: + using: 'composite' + steps: + - name: Install SDKMan + shell: bash + run: | + curl -s "https://get.sdkman.io" | bash + source "$HOME/.sdkman/bin/sdkman-init.sh" + sdk version \ No newline at end of file diff --git a/.github/workflows/actions/lint/action.yml b/.github/workflows/actions/lint/action.yml new file mode 100644 index 000000000..998169353 --- /dev/null +++ b/.github/workflows/actions/lint/action.yml @@ -0,0 +1,20 @@ +name: Action > Lint + +runs: + using: 'composite' + steps: + - uses: ./.github/workflows/actions/cached-dependencies + - name: Running javascript linter + run: | + source "$HOME/.nvm/nvm.sh" + cd $JEMPI_APP_PATH/JeMPI_UI + yarn install --frozen-lockfile + yarn lint && yarn format + shell: bash + - name: Running java linter + run: | + set -eo pipefail + source "$HOME/.sdkman/bin/sdkman-init.sh" + source $GITHUB_WORKFLOW_FOLDER/actions/lint/mvn_linter.sh $JEMPI_APP_PATH + shell: bash + \ No newline at end of file diff --git a/.github/workflows/actions/lint/mvn_linter.sh b/.github/workflows/actions/lint/mvn_linter.sh new file mode 100644 index 000000000..8db1e2392 --- /dev/null +++ b/.github/workflows/actions/lint/mvn_linter.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +cd "$1" + +for dir in */; do + dir="${dir%/}" + if [ -f "$dir/pom.xml" ]; then + echo "Running Checkstyle for $dir ..." + mvn -f "$dir/pom.xml" checkstyle:check -Dcheckstyle.suppressions.location="$dir/checkstyle/suppression.xml" + fi +done diff --git a/.github/workflows/actions/load-conf-env/action.yml b/.github/workflows/actions/load-conf-env/action.yml new file mode 100644 index 000000000..2df5fca08 --- /dev/null +++ b/.github/workflows/actions/load-conf-env/action.yml @@ -0,0 +1,12 @@ +name: Action > Load Conf Env + +runs: + using: 'composite' + steps: + - name: Load Conf Env + run: | + pushd $GITHUB_WORKSPACE/devops/linux/docker/conf/env + ./create-env-linux-high-1.sh + popd + source $GITHUB_WORKSPACE/devops/linux/docker/conf.env + shell: bash diff --git a/.github/workflows/actions/prepare/action.yml b/.github/workflows/actions/prepare/action.yml new file mode 100644 index 000000000..b3f986242 --- /dev/null +++ b/.github/workflows/actions/prepare/action.yml @@ -0,0 +1,33 @@ +name: Prepare + +runs: + using: 'composite' + steps: + - uses: ./.github/workflows/actions/cached-dependencies + id: cache-dependencies + - if: ${{ steps.cache-dependencies.outputs.cache-hit != 'true' }} + name: Set up Node + uses: ./.github/workflows/actions/install-node + with: + node-version: 20 + - if: ${{ steps.cache-dependencies.outputs.cache-hit != 'true' }} + name: Set up SDKMan + uses: ./.github/workflows/actions/install-sdkman + - if: ${{ steps.cache-dependencies.outputs.cache-hit != 'true' }} + name: Set up Java + uses: ./.github/workflows/actions/sdkman-installer + with: + candidate: java + version: '21.0.1-tem' + - if: ${{ steps.cache-dependencies.outputs.cache-hit != 'true' }} + name: Set up Maven + uses: ./.github/workflows/actions/sdkman-installer + with: + candidate: maven + version: '3.9.5' + - if: ${{ steps.cache-dependencies.outputs.cache-hit != 'true' }} + name: Set Scala Build Tools + uses: ./.github/workflows/actions/sdkman-installer + with: + candidate: sbt + version: '1.9.8' \ No newline at end of file diff --git a/.github/workflows/actions/sdkman-installer/action.yml b/.github/workflows/actions/sdkman-installer/action.yml new file mode 100644 index 000000000..53d01cff4 --- /dev/null +++ b/.github/workflows/actions/sdkman-installer/action.yml @@ -0,0 +1,15 @@ +name: SDKMan Installer +inputs: + candidate: + required: true + version: + required: true +runs: + using: 'composite' + steps: + - name: Installing ${{ inputs.candidate }} (version ${{ inputs.version }}) + shell: bash + run: | + echo "$HOME/.sdkman/bin/sdkman-init.sh" + source "$HOME/.sdkman/bin/sdkman-init.sh" + sdk install ${{ inputs.candidate }} ${{ inputs.version }} diff --git a/.github/workflows/actions/smoke-test/action.yml b/.github/workflows/actions/smoke-test/action.yml new file mode 100644 index 000000000..69699655b --- /dev/null +++ b/.github/workflows/actions/smoke-test/action.yml @@ -0,0 +1,7 @@ +name: Action > Smoke Test + +runs: + using: 'composite' + steps: + - name: Checkout Code + uses: actions/checkout@v4 \ No newline at end of file diff --git a/.github/workflows/actions/test/action.yml b/.github/workflows/actions/test/action.yml new file mode 100644 index 000000000..d5ec01713 --- /dev/null +++ b/.github/workflows/actions/test/action.yml @@ -0,0 +1,20 @@ +name: Action > Test + +runs: + using: 'composite' + steps: + - uses: ./.github/workflows/actions/cached-dependencies + - name: Testing Java Apps + run: | + set -eo pipefail + source "$HOME/.sdkman/bin/sdkman-init.sh" + cd $JEMPI_APP_PATH + mvn clean test + shell: bash + - name: Testing javascript app (ui) + run: | + source "$HOME/.nvm/nvm.sh" + cd $JEMPI_APP_PATH/JeMPI_UI + yarn install --frozen-lockfile + yarn run test -- --watchAll=false + shell: bash diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml deleted file mode 100644 index e03bd26f3..000000000 --- a/.github/workflows/build.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: JeMPI Maven Build - -on: - pull_request: - branches: [ "main" ] - -jobs: - build: - - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - name: Set up JDK 17 - uses: actions/setup-java@v3 - with: - java-version: '17' - distribution: 'temurin' - cache: maven - - name: Build with Maven - run: mvn -B package --file ./JeMPI_Apps/pom.xml diff --git a/.github/workflows/deploy-images-dockerhub.yml b/.github/workflows/deploy-images-dockerhub.yml new file mode 100644 index 000000000..92b0c7293 --- /dev/null +++ b/.github/workflows/deploy-images-dockerhub.yml @@ -0,0 +1,54 @@ +name: Deploy Images to DockerHub + +on: + workflow_dispatch: + inputs: + tag: + description: 'Tag to use' + required: true + type: string + +env: + GITHUB_WORKFLOW_FOLDER: ./.github/workflows + JEMPI_APP_PATH: ./JeMPI_Apps + +defaults: + run: + shell: bash + +jobs: + prepare: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + - uses: ./.github/workflows/actions/prepare + build-deploy-images: + runs-on: ubuntu-22.04 + needs: [prepare] + steps: + - uses: actions/checkout@v4 + - id: validate-tag + run: | + + CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD) + if [ "$CURRENT_BRANCH" != "main" ] && [ "$CURRENT_BRANCH" != "master" ]; then + echo "Can only do a manual deployment on main / master. Exiting." + exit 1 + fi + + git fetch --tags + if git rev-parse -q --verify "refs/tags/${{ inputs.tag }}" > /dev/null; then + echo "image-build-tag=$(git rev-parse --abbrev-ref HEAD)-$(git log -1 --pretty=format:%h)" >> $GITHUB_OUTPUT + echo "docker-push-tag=${{ inputs.tag }}" >> $GITHUB_OUTPUT + else + echo "The tag '${{ inputs.tag }}' does not exist on the branch '$GITHUB_REF_NAME'" + exit 1 + fi + + - uses: ./.github/workflows/actions/build-deploy-images + with: + image-build-tag: ${{ steps.validate-tag.outputs.image-build-tag }} + docker-push-tag: ${{ steps.validate-tag.outputs.docker-push-tag }} + docker-host: "docker.io" + docker-username: ${{ secrets.DOCKER_HUB_USER_NAME }} + docker-password: ${{ secrets.DOCKER_HUB_PASSWORD }} \ No newline at end of file diff --git a/.github/workflows/entry-on-merge.yml b/.github/workflows/entry-on-merge.yml new file mode 100644 index 000000000..22dbfae9c --- /dev/null +++ b/.github/workflows/entry-on-merge.yml @@ -0,0 +1,39 @@ +name: OnMerge + +on: + pull_request: + branches: + - 'dev' + - 'main' + - 'master' + types: + - closed + +env: + GITHUB_WORKFLOW_FOLDER: ./.github/workflows + JEMPI_APP_PATH: ./JeMPI_Apps + +defaults: + run: + shell: bash + +jobs: + prepare: + if: github.event.pull_request.merged == true + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + - uses: ./.github/workflows/actions/prepare + build-deploy-images: + runs-on: ubuntu-22.04 + needs: [prepare] + steps: + - uses: actions/checkout@v4 + - id: get-image-build-tag + run: echo "image-build-tag=$(git rev-parse --abbrev-ref HEAD)-$(git log -1 --pretty=format:%h)" >> $GITHUB_OUTPUT + - uses: ./.github/workflows/actions/build-deploy-images + with: + image-build-tag: ${{ steps.get-image-build-tag.outputs.image-build-tag }} + docker-host: ${{ vars.DOCKER_LOCAL_HOST_NAME }} + docker-username: ${{ secrets.DOCKER_LOCAL_USER_NAME }} + docker-password: ${{ secrets.DOCKER_LOCAL_PASSWORD }} \ No newline at end of file diff --git a/.github/workflows/entry-on-pull-request.yml b/.github/workflows/entry-on-pull-request.yml new file mode 100644 index 000000000..9729c6ab7 --- /dev/null +++ b/.github/workflows/entry-on-pull-request.yml @@ -0,0 +1,49 @@ +name: OnPullRequest + +on: + pull_request: + branches: + - 'dev' + - 'main' + - 'master' + +defaults: + run: + shell: bash + +env: + GITHUB_WORKFLOW_FOLDER: ./.github/workflows + JEMPI_APP_PATH: ./JeMPI_Apps + +jobs: + prepare: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + - uses: ./.github/workflows/actions/prepare + lint-check: + needs: [prepare] + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + - uses: ./.github/workflows/actions/lint + build-check: + needs: [lint-check] + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + - uses: ./.github/workflows/actions/build-check + test: + needs: [build-check] + runs-on: ubuntu-22.04 + continue-on-error: true # TODO: Uncomment this out once tests are in a better state - ${{ github.ref != 'refs/heads/main' && github.ref != 'refs/heads/master' }} + steps: + - uses: actions/checkout@v4 + - uses: ./.github/workflows/actions/test + smoke-test: + needs: [test] + runs-on: ubuntu-22.04 + continue-on-error: ${{ github.ref != 'refs/heads/main' && github.ref != 'refs/heads/master' }} + steps: + - uses: actions/checkout@v4 + - uses: ./.github/workflows/actions/smoke-test \ No newline at end of file diff --git a/.github/workflows/entry-on-release.yml b/.github/workflows/entry-on-release.yml new file mode 100644 index 000000000..2a8f298bd --- /dev/null +++ b/.github/workflows/entry-on-release.yml @@ -0,0 +1,39 @@ +name: OnRelease + +on: + release: + branches: + - 'main' + - 'master' + types: [published] + +env: + GITHUB_WORKFLOW_FOLDER: ./.github/workflows + JEMPI_APP_PATH: ./JeMPI_Apps + +defaults: + run: + shell: bash + +jobs: + prepare: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + - uses: ./.github/workflows/actions/prepare + build-deploy-images: + runs-on: ubuntu-22.04 + needs: [prepare] + steps: + - uses: actions/checkout@v4 + - id: validate-tag + run: | + echo "image-build-tag=$(git rev-parse --abbrev-ref HEAD)-$(git log -1 --pretty=format:%h)" >> $GITHUB_OUTPUT + echo "docker-push-tag=$GITHUB_REF_NAME" >> $GITHUB_OUTPUT + - uses: ./.github/workflows/actions/build-deploy-images + with: + image-build-tag: ${{ steps.validate-tag.outputs.image-build-tag }} + docker-push-tag: ${{ steps.validate-tag.outputs.docker-push-tag }} + docker-host: "docker.io" + docker-username: ${{ secrets.DOCKER_HUB_USER_NAME }} + docker-password: ${{ secrets.DOCKER_HUB_PASSWORD }} \ No newline at end of file diff --git a/.github/workflows/jempiUI.yml b/.github/workflows/jempiUI.yml deleted file mode 100644 index e91bdac9d..000000000 --- a/.github/workflows/jempiUI.yml +++ /dev/null @@ -1,47 +0,0 @@ -on: - pull_request: - branches: - - dev - paths: - - "JeMPI_Apps/JeMPI_UI/**" - push: - branches: - - dev - paths: - - 'JeMPI_Apps/JeMPI_UI/**' -jobs: - common-setup: - runs-on: ubuntu-latest - steps: - - name: Checkout Code - uses: actions/checkout@v2 - - - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: "18" - - - name: Install Yarn Package Manager - run: npm install -g yarn - - - name: Change Directory - run: cd JeMPI_Apps/JeMPI_UI - - - name: Install Dependencies - run: yarn install --frozen-lockfile - - lint-and-format: - name: Lint and Format - needs: common-setup - runs-on: ubuntu-latest - steps: - - name: Run Formatter/Linter - run: yarn lint && yarn format - - build: - name: Build - needs: common-setup - runs-on: ubuntu-latest - steps: - - name: Build - run: yarn build diff --git a/.github/workflows/save-docker-images.yml b/.github/workflows/save-docker-images.yml new file mode 100644 index 000000000..dfafe0b88 --- /dev/null +++ b/.github/workflows/save-docker-images.yml @@ -0,0 +1,43 @@ +name: Save Docker Images + +on: + workflow_dispatch: + inputs: + tag: + description: 'Tag to use (defaults to [branch]-[commit])' + required: false + type: string + +env: + GITHUB_WORKFLOW_FOLDER: ./.github/workflows + JEMPI_APP_PATH: ./JeMPI_Apps + +defaults: + run: + shell: bash + +jobs: + prepare: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + - uses: ./.github/workflows/actions/prepare + save-docker-images: + runs-on: ubuntu-22.04 + needs: [prepare] + steps: + - uses: actions/checkout@v4 + - id: get-image-build-tag + run: | + user_tag=${{ inputs.tag }} + if [ ! -z "$user_tag" ]; then + echo "image-build-tag=$user_tag" >> $GITHUB_OUTPUT + else + echo "image-build-tag=$(git rev-parse --abbrev-ref HEAD)-$(git log -1 --pretty=format:%h)" >> $GITHUB_OUTPUT + fi + - uses: ./.github/workflows/actions/docker-images-build + with: + image-build-tag: ${{ steps.get-image-build-tag.outputs.image-build-tag }} + - uses: ./.github/workflows/actions/docker-images-save + with: + image-build-tag: ${{ steps.get-image-build-tag.outputs.image-build-tag }} \ No newline at end of file diff --git a/.gitignore b/.gitignore index ec7af3b03..4ab1743ec 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,7 @@ sandbox my-notes .Rproj.user +.settings +.classpath +.project +.metals \ No newline at end of file diff --git a/JeMPI_Apps/.scalafmt.conf b/JeMPI_Apps/.scalafmt.conf new file mode 100644 index 000000000..259f078cf --- /dev/null +++ b/JeMPI_Apps/.scalafmt.conf @@ -0,0 +1,2 @@ +version = 3.7.17 +runner.dialect = scala213 \ No newline at end of file diff --git a/JeMPI_Apps/JeMPI_API/docker/Dockerfile b/JeMPI_Apps/JeMPI_API/docker/Dockerfile index 7aff0c8bd..02219df10 100644 --- a/JeMPI_Apps/JeMPI_API/docker/Dockerfile +++ b/JeMPI_Apps/JeMPI_API/docker/Dockerfile @@ -6,7 +6,7 @@ ADD API-1.0-SNAPSHOT-spring-boot.jar /app/app.jar RUN printf "#!/bin/bash\n\ cd /app\n\ -java -server --enable-preview -XX:MaxRAMPercentage=80 -XX:+UseZGC -jar /app/app.jar\n" > /entrypoint.sh +java -server -XX:MaxRAMPercentage=80 -jar /app/app.jar\n" > /entrypoint.sh RUN chmod +x /entrypoint.sh diff --git a/JeMPI_Apps/JeMPI_API/pom.xml b/JeMPI_Apps/JeMPI_API/pom.xml index c2a4b1b54..d34add8d2 100644 --- a/JeMPI_Apps/JeMPI_API/pom.xml +++ b/JeMPI_Apps/JeMPI_API/pom.xml @@ -216,15 +216,15 @@ - - org.apache.maven.plugins - maven-compiler-plugin - - 17 - 17 - --enable-preview - - + + + + + + + + + diff --git a/JeMPI_Apps/JeMPI_API/src/main/java/org/jembi/jempi/AppConfig.java b/JeMPI_Apps/JeMPI_API/src/main/java/org/jembi/jempi/AppConfig.java index 350388b82..859286aea 100644 --- a/JeMPI_Apps/JeMPI_API/src/main/java/org/jembi/jempi/AppConfig.java +++ b/JeMPI_Apps/JeMPI_API/src/main/java/org/jembi/jempi/AppConfig.java @@ -14,21 +14,21 @@ public final class AppConfig { private static final Logger LOGGER = LogManager.getLogger(AppConfig.class); private static final Config SYSTEM_PROPERTIES = ConfigFactory.systemProperties(); private static final Config SYSTEM_ENVIRONMENT = ConfigFactory.systemEnvironment(); - public static final Config CONFIG = new Builder() - .withSystemEnvironment() - .withSystemProperties() - .withOptionalRelativeFile("/conf/server.production.conf") - .withOptionalRelativeFile("/conf/server.staging.conf") - .withOptionalRelativeFile("/conf/server.test.conf") - .withResource("application.local.conf") - .withResource("application.conf") - .build(); + public static final Config CONFIG = new Builder().withSystemEnvironment() + .withSystemProperties() + .withOptionalRelativeFile("/conf/server.production.conf") + .withOptionalRelativeFile("/conf/server.staging.conf") + .withOptionalRelativeFile("/conf/server.test.conf") + .withResource("application.local.conf") + .withResource("application.conf") + .build(); public static final String POSTGRESQL_IP = CONFIG.getString("POSTGRESQL_IP"); public static final Integer POSTGRESQL_PORT = CONFIG.getInt("POSTGRESQL_PORT"); public static final String POSTGRESQL_USER = CONFIG.getString("POSTGRESQL_USER"); public static final String POSTGRESQL_PASSWORD = CONFIG.getString("POSTGRESQL_PASSWORD"); - public static final String POSTGRESQL_DATABASE = CONFIG.getString("POSTGRESQL_DATABASE"); + public static final String POSTGRESQL_NOTIFICATIONS_DB = CONFIG.getString("POSTGRESQL_NOTIFICATIONS_DB"); + public static final String POSTGRESQL_AUDIT_DB = CONFIG.getString("POSTGRESQL_AUDIT_DB"); public static final String KAFKA_BOOTSTRAP_SERVERS = CONFIG.getString("KAFKA_BOOTSTRAP_SERVERS"); public static final String KAFKA_APPLICATION_ID = CONFIG.getString("KAFKA_APPLICATION_ID"); private static final String[] DGRAPH_ALPHA_HOSTS = CONFIG.getString("DGRAPH_HOSTS").split(","); @@ -42,18 +42,23 @@ public final class AppConfig { public static final String LINKER_IP = CONFIG.getString("LINKER_IP"); public static final Integer LINKER_HTTP_PORT = CONFIG.getInt("LINKER_HTTP_PORT"); + + public static final String CONTROLLER_IP = CONFIG.getString("CONTROLLER_IP"); + public static final Integer CONTROLLER_HTTP_PORT = CONFIG.getInt("CONTROLLER_HTTP_PORT"); public static final Integer API_HTTP_PORT = CONFIG.getInt("API_HTTP_PORT"); public static final Level GET_LOG_LEVEL = Level.toLevel(CONFIG.getString("LOG4J2_LEVEL")); + + private AppConfig() { + } + public static String[] getDGraphHosts() { return DGRAPH_ALPHA_HOSTS; } + public static int[] getDGraphPorts() { return DGRAPH_ALPHA_PORTS; } - private AppConfig() { - } - private static class Builder { private Config conf = ConfigFactory.empty(); @@ -80,7 +85,9 @@ Builder withSystemEnvironment() { Builder withResource(final String resource) { Config resourceConfig = ConfigFactory.parseResources(resource); - String empty = resourceConfig.entrySet().isEmpty() ? " contains no values" : ""; + String empty = resourceConfig.entrySet().isEmpty() + ? " contains no values" + : ""; conf = conf.withFallback(resourceConfig); LOGGER.info("Loaded config file from resource ({}){}", resource, empty); return this; diff --git a/JeMPI_Apps/JeMPI_API/src/main/java/org/jembi/jempi/api/API.java b/JeMPI_Apps/JeMPI_API/src/main/java/org/jembi/jempi/api/API.java index a0ac242df..9dac6fab1 100644 --- a/JeMPI_Apps/JeMPI_API/src/main/java/org/jembi/jempi/api/API.java +++ b/JeMPI_Apps/JeMPI_API/src/main/java/org/jembi/jempi/api/API.java @@ -16,7 +16,7 @@ public final class API { private static final Logger LOGGER = LogManager.getLogger(API.class); - private static final String CONFIG_RESOURCE_FILE_NAME = "/config-api.json"; + private static final String CONFIG_RESOURCE_FILE_NAME = "config-api.json"; private final JsonFieldsConfig jsonFieldsConfig = new JsonFieldsConfig(CONFIG_RESOURCE_FILE_NAME); private HttpServer httpServer; @@ -35,25 +35,20 @@ public static void main(final String[] args) { public Behavior create() { return Behaviors.setup(context -> { - ActorRef backEnd = - context.spawn(BackEnd.create(AppConfig.GET_LOG_LEVEL, - AppConfig.getDGraphHosts(), - AppConfig.getDGraphPorts(), - AppConfig.POSTGRESQL_IP, - AppConfig.POSTGRESQL_PORT, - AppConfig.POSTGRESQL_USER, - AppConfig.POSTGRESQL_PASSWORD, - AppConfig.POSTGRESQL_DATABASE, - AppConfig.KAFKA_BOOTSTRAP_SERVERS, - "CLIENT_ID_API-" + UUID.randomUUID()), - "BackEnd"); + ActorRef backEnd = context.spawn(BackEnd.create(AppConfig.GET_LOG_LEVEL, + AppConfig.getDGraphHosts(), + AppConfig.getDGraphPorts(), + AppConfig.POSTGRESQL_IP, + AppConfig.POSTGRESQL_PORT, + AppConfig.POSTGRESQL_USER, + AppConfig.POSTGRESQL_PASSWORD, + AppConfig.POSTGRESQL_NOTIFICATIONS_DB, + AppConfig.POSTGRESQL_AUDIT_DB, + AppConfig.KAFKA_BOOTSTRAP_SERVERS, + "CLIENT_ID_API-" + UUID.randomUUID()), "BackEnd"); context.watch(backEnd); httpServer = HttpServer.create(); - httpServer.open("0.0.0.0", - AppConfig.API_HTTP_PORT, - context.getSystem(), - backEnd, - jsonFieldsConfig.jsonFields); + httpServer.open("0.0.0.0", AppConfig.API_HTTP_PORT, context.getSystem(), backEnd, jsonFieldsConfig.jsonFields); return Behaviors.receive(Void.class).onSignal(Terminated.class, sig -> { httpServer.close(context.getSystem()); return Behaviors.stopped(); diff --git a/JeMPI_Apps/JeMPI_API/src/main/java/org/jembi/jempi/api/HttpServer.java b/JeMPI_Apps/JeMPI_API/src/main/java/org/jembi/jempi/api/HttpServer.java index 6d42e48b8..bb1b55a2c 100644 --- a/JeMPI_Apps/JeMPI_API/src/main/java/org/jembi/jempi/api/HttpServer.java +++ b/JeMPI_Apps/JeMPI_API/src/main/java/org/jembi/jempi/api/HttpServer.java @@ -4,8 +4,11 @@ import akka.actor.typed.ActorSystem; import akka.http.javadsl.Http; import akka.http.javadsl.ServerBinding; +import akka.http.javadsl.model.HttpEntity; import akka.http.javadsl.model.StatusCodes; import akka.http.javadsl.server.AllDirectives; +import akka.http.javadsl.server.ExceptionHandler; +import akka.http.javadsl.server.RejectionHandler; import akka.http.javadsl.server.Route; import ch.megard.akka.http.cors.javadsl.settings.CorsSettings; import org.apache.logging.log4j.LogManager; @@ -15,12 +18,9 @@ import org.jembi.jempi.libapi.BackEnd; import org.jembi.jempi.libapi.Routes; import org.jembi.jempi.shared.models.GlobalConstants; -import org.jembi.jempi.shared.models.RecordType; import java.util.concurrent.CompletionStage; -import java.util.regex.Pattern; -import static akka.http.javadsl.server.PathMatchers.segment; import static ch.megard.akka.http.cors.javadsl.CorsDirectives.cors; public final class HttpServer extends AllDirectives { @@ -55,94 +55,39 @@ public void close(final ActorSystem actorSystem) { .thenAccept(unbound -> actorSystem.terminate()); // and shutdown when done } - - private Route createJeMPIRoutes( - final ActorSystem actorSystem, - final ActorRef backEnd, - final String jsonFields) { - return concat(post(() -> concat(path(GlobalConstants.SEGMENT_POST_UPDATE_NOTIFICATION, - () -> Routes.postUpdateNotification(actorSystem, backEnd)), - path(segment(GlobalConstants.SEGMENT_POST_SIMPLE_SEARCH).slash(segment(Pattern.compile( - "^(golden|patient)$"))), - type -> Routes.postSimpleSearch(actorSystem, - backEnd, - type.equals("golden") - ? RecordType.GoldenRecord - : RecordType.Interaction)), - path(segment(GlobalConstants.SEGMENT_POST_CUSTOM_SEARCH).slash(segment(Pattern.compile( - "^(golden|patient)$"))), - type -> Routes.postCustomSearch(actorSystem, - backEnd, - type.equals("golden") - ? RecordType.GoldenRecord - : RecordType.Interaction)), - path(GlobalConstants.SEGMENT_POST_UPLOAD_CSV_FILE, - () -> Routes.postUploadCsvFile(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_PROXY_POST_CALCULATE_SCORES, - () -> Routes.proxyPostCalculateScores(AppConfig.LINKER_IP, - AppConfig.LINKER_HTTP_PORT, - http)), - path(GlobalConstants.SEGMENT_POST_FILTER_GIDS, - () -> Routes.postFilterGids(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_PROXY_CR_REGISTER, - () -> Routes.postCrRegister(AppConfig.LINKER_IP, AppConfig.LINKER_HTTP_PORT, http)), - path(GlobalConstants.SEGMENT_PROXY_CR_FIND, - () -> Routes.postCrFind(AppConfig.LINKER_IP, AppConfig.LINKER_HTTP_PORT, http)), - path(GlobalConstants.SEGMENT_PROXY_CR_CANDIDATES, - () -> Routes.postCrCandidates(AppConfig.LINKER_IP, AppConfig.LINKER_HTTP_PORT, http)), - path(GlobalConstants.SEGMENT_POST_FILTER_GIDS_WITH_INTERACTION_COUNT, - () -> Routes.postFilterGidsWithInteractionCount(actorSystem, backEnd)))), - patch(() -> concat(path(segment(GlobalConstants.SEGMENT_PATCH_GOLDEN_RECORD).slash(segment(Pattern.compile( - "^[A-z0-9]+$"))), gid -> Routes.patchGoldenRecord(actorSystem, backEnd, gid)), - path(GlobalConstants.SEGMENT_PATCH_IID_NEW_GID_LINK, - () -> Routes.patchIidNewGidLink(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_PATCH_IID_GID_LINK, - () -> Routes.patchIidGidLink(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_PROXY_CR_UPDATE_FIELDS, - () -> Routes.patchCrUpdateFields(AppConfig.LINKER_IP, AppConfig.LINKER_HTTP_PORT, http)))), - get(() -> concat(path(GlobalConstants.SEGMENT_COUNT_GOLDEN_RECORDS, - () -> Routes.countGoldenRecords(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_COUNT_INTERACTIONS, - () -> Routes.countInteractions(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_COUNT_RECORDS, - () -> Routes.countRecords(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_GET_GIDS_ALL, - () -> Routes.getGidsAll(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_GET_GIDS_PAGED, - () -> Routes.getGidsPaged(actorSystem, backEnd)), - path(segment(GlobalConstants.SEGMENT_GET_INTERACTION).slash(segment(Pattern.compile( - "^[A-z0-9]+$"))), iid -> Routes.getInteraction(actorSystem, backEnd, iid)), - path(segment(GlobalConstants.SEGMENT_GET_EXPANDED_GOLDEN_RECORD).slash(segment(Pattern.compile( - "^[A-z0-9]+$"))), gid -> Routes.getExpandedGoldenRecord(actorSystem, backEnd, gid)), - path(GlobalConstants.SEGMENT_GET_EXPANDED_GOLDEN_RECORDS_USING_PARAMETER_LIST, - () -> Routes.getExpandedGoldenRecordsUsingParameterList(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_GET_EXPANDED_GOLDEN_RECORDS_USING_CSV, - () -> Routes.getExpandedGoldenRecordsFromUsingCSV(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_GET_EXPANDED_INTERACTIONS_USING_CSV, - () -> Routes.getExpandedInteractionsUsingCSV(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_GET_GOLDEN_RECORD_AUDIT_TRAIL, - () -> Routes.getGoldenRecordAuditTrail(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_GET_INTERACTION_AUDIT_TRAIL, - () -> Routes.getInteractionAuditTrail(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_GET_NOTIFICATIONS, - () -> Routes.getNotifications(actorSystem, backEnd)), - path(segment(GlobalConstants.SEGMENT_GET_INTERACTION).slash(segment(Pattern.compile( - "^[A-z0-9]+$"))), iid -> Routes.getInteraction(actorSystem, backEnd, iid)), - path(segment(GlobalConstants.SEGMENT_GET_EXPANDED_GOLDEN_RECORD).slash(segment(Pattern.compile( - "^[A-z0-9]+$"))), gid -> Routes.getExpandedGoldenRecord(actorSystem, backEnd, gid)), - path(GlobalConstants.SEGMENT_GET_FIELDS_CONFIG, () -> complete(StatusCodes.OK, jsonFields)), - path(GlobalConstants.SEGMENT_PROXY_GET_CANDIDATES_WITH_SCORES, - () -> Routes.proxyGetCandidatesWithScore(AppConfig.LINKER_IP, - AppConfig.LINKER_HTTP_PORT, - http))))); - } - - Route createCorsRoutes( + public Route createCorsRoutes( final ActorSystem actorSystem, final ActorRef backEnd, final String jsonFields) { final var settings = CorsSettings.create(AppConfig.CONFIG); - return cors(settings, () -> pathPrefix("JeMPI", () -> createJeMPIRoutes(actorSystem, backEnd, jsonFields))); + + final RejectionHandler rejectionHandler = RejectionHandler.defaultHandler().mapRejectionResponse(response -> { + if (response.entity() instanceof HttpEntity.Strict) { + String message = ((HttpEntity.Strict) response.entity()).getData().utf8String(); + LOGGER.warn(String.format("Request was rejected. Reason: %s", message)); + } + + return response; + }); + + final ExceptionHandler exceptionHandler = ExceptionHandler.newBuilder().match(Exception.class, x -> { + LOGGER.error("An exception occurred while executing the Route", x); + return complete(StatusCodes.INTERNAL_SERVER_ERROR, "An exception occurred, see server logs for details"); + }).build(); + + return cors(settings, + () -> pathPrefix("JeMPI", + () -> concat(Routes.createCoreAPIRoutes(actorSystem, + backEnd, + jsonFields, + AppConfig.LINKER_IP, + AppConfig.LINKER_HTTP_PORT, + AppConfig.CONTROLLER_IP, + AppConfig.CONTROLLER_HTTP_PORT, + http), + path(GlobalConstants.SEGMENT_GET_FIELDS_CONFIG, + () -> complete(StatusCodes.OK, jsonFields))))).seal(rejectionHandler, + exceptionHandler); } } diff --git a/JeMPI_Apps/JeMPI_API_KC/docker/Dockerfile b/JeMPI_Apps/JeMPI_API_KC/docker/Dockerfile index 938e16308..20a470f3c 100644 --- a/JeMPI_Apps/JeMPI_API_KC/docker/Dockerfile +++ b/JeMPI_Apps/JeMPI_API_KC/docker/Dockerfile @@ -6,7 +6,7 @@ ADD API_KC-1.0-SNAPSHOT-spring-boot.jar /app/app.jar RUN printf "#!/bin/bash\n\ cd /app\n\ -java -server --enable-preview -XX:MaxRAMPercentage=80 -XX:+UseZGC -jar /app/app.jar\n" > /entrypoint.sh +java -server -XX:MaxRAMPercentage=80 -jar /app/app.jar\n" > /entrypoint.sh RUN chmod +x /entrypoint.sh diff --git a/JeMPI_Apps/JeMPI_API_KC/pom.xml b/JeMPI_Apps/JeMPI_API_KC/pom.xml index 253194802..7eee2e845 100644 --- a/JeMPI_Apps/JeMPI_API_KC/pom.xml +++ b/JeMPI_Apps/JeMPI_API_KC/pom.xml @@ -230,9 +230,9 @@ org.apache.maven.plugins maven-compiler-plugin - 17 - 17 - --enable-preview + ${java.version} + ${java.version} + diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/AppConfig.java b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/AppConfig.java index 3f62c4e67..79d621c49 100644 --- a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/AppConfig.java +++ b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/AppConfig.java @@ -14,20 +14,21 @@ public final class AppConfig { private static final Logger LOGGER = LogManager.getLogger(AppConfig.class); private static final Config SYSTEM_PROPERTIES = ConfigFactory.systemProperties(); private static final Config SYSTEM_ENVIRONMENT = ConfigFactory.systemEnvironment(); - public static final Config CONFIG = new Builder() - .withSystemEnvironment() - .withSystemProperties() - .withOptionalRelativeFile("/conf/server.production.conf") - .withOptionalRelativeFile("/conf/server.staging.conf") - .withOptionalRelativeFile("/conf/server.test.conf") - .withResource("application.local.conf") - .withResource("application.conf") - .build(); + public static final Config CONFIG = new Builder().withSystemEnvironment() + .withSystemProperties() + .withOptionalRelativeFile("/conf/server.production.conf") + .withOptionalRelativeFile("/conf/server.staging.conf") + .withOptionalRelativeFile("/conf/server.test.conf") + .withResource("application.local.conf") + .withResource("application.conf") + .build(); public static final String POSTGRESQL_IP = CONFIG.getString("POSTGRESQL_IP"); public static final Integer POSTGRESQL_PORT = CONFIG.getInt("POSTGRESQL_PORT"); public static final String POSTGRESQL_USER = CONFIG.getString("POSTGRESQL_USER"); public static final String POSTGRESQL_PASSWORD = CONFIG.getString("POSTGRESQL_PASSWORD"); - public static final String POSTGRESQL_DATABASE = CONFIG.getString("POSTGRESQL_DATABASE"); + public static final String POSTGRESQL_USERS_DB = CONFIG.getString("POSTGRESQL_USERS_DB"); + public static final String POSTGRESQL_NOTIFICATIONS_DB = CONFIG.getString("POSTGRESQL_NOTIFICATIONS_DB"); + public static final String POSTGRESQL_AUDIT_DB = CONFIG.getString("POSTGRESQL_AUDIT_DB"); public static final String KAFKA_BOOTSTRAP_SERVERS = CONFIG.getString("KAFKA_BOOTSTRAP_SERVERS"); public static final String KAFKA_APPLICATION_ID = CONFIG.getString("KAFKA_APPLICATION_ID"); private static final String[] DGRAPH_ALPHA_HOSTS = CONFIG.getString("DGRAPH_HOSTS").split(","); @@ -41,6 +42,9 @@ public final class AppConfig { public static final Integer API_KC_HTTP_PORT = CONFIG.getInt("API_KC_HTTP_PORT"); public static final String LINKER_IP = CONFIG.getString("LINKER_IP"); public static final Integer LINKER_HTTP_PORT = CONFIG.getInt("LINKER_HTTP_PORT"); + + public static final String CONTROLLER_IP = CONFIG.getString("CONTROLLER_IP"); + public static final Integer CONTROLLER_HTTP_PORT = CONFIG.getInt("CONTROLLER_HTTP_PORT"); public static final String SESSION_SECRET = CONFIG.getString("JEMPI_SESSION_SECRET"); public static final Level GET_LOG_LEVEL = Level.toLevel(CONFIG.getString("LOG4J2_LEVEL")); diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/APIKC.java b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/APIKC.java index 8704c9204..78e4fd6ba 100644 --- a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/APIKC.java +++ b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/APIKC.java @@ -6,6 +6,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.jembi.jempi.AppConfig; +import org.jembi.jempi.api.httpServer.HttpServer; import org.jembi.jempi.libapi.BackEnd; import org.jembi.jempi.libapi.JsonFieldsConfig; @@ -14,7 +15,7 @@ public final class APIKC { private static final Logger LOGGER = LogManager.getLogger(APIKC.class); - private static final String CONFIG_RESOURCE_FILE_NAME = "/config-api.json"; + private static final String CONFIG_RESOURCE_FILE_NAME = "config-api.json"; private final JsonFieldsConfig jsonFieldsConfig = new JsonFieldsConfig(CONFIG_RESOURCE_FILE_NAME); private HttpServer httpServer; @@ -41,7 +42,8 @@ public Behavior create() { AppConfig.POSTGRESQL_PORT, AppConfig.POSTGRESQL_USER, AppConfig.POSTGRESQL_PASSWORD, - AppConfig.POSTGRESQL_DATABASE, + AppConfig.POSTGRESQL_NOTIFICATIONS_DB, + AppConfig.POSTGRESQL_AUDIT_DB, AppConfig.KAFKA_BOOTSTRAP_SERVERS, "CLIENT_ID_API_KC-" + UUID.randomUUID()), "BackEnd"); @@ -54,12 +56,9 @@ public Behavior create() { final DispatcherSelector selector = DispatcherSelector.fromConfig("akka.actor.default-dispatcher"); final MessageDispatcher dispatcher = (MessageDispatcher) system.dispatchers().lookup(selector); httpServer = new HttpServer(dispatcher); - httpServer.open("0.0.0.0", - AppConfig.API_KC_HTTP_PORT, - context.getSystem(), - backEnd, - jsonFieldsConfig.jsonFields); + httpServer.open("0.0.0.0", AppConfig.API_KC_HTTP_PORT, context.getSystem(), backEnd, jsonFieldsConfig.jsonFields); return Behaviors.receive(Void.class).onSignal(Terminated.class, sig -> { + LOGGER.info("API Server Terminated. Reason {}", sig); httpServer.close(context.getSystem()); return Behaviors.stopped(); }).build(); diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/HttpServer.java b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/HttpServer.java deleted file mode 100644 index dd12708b3..000000000 --- a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/HttpServer.java +++ /dev/null @@ -1,356 +0,0 @@ -package org.jembi.jempi.api; - -import akka.actor.typed.ActorRef; -import akka.actor.typed.ActorSystem; -import akka.dispatch.MessageDispatcher; -import akka.http.javadsl.Http; -import akka.http.javadsl.ServerBinding; -import akka.http.javadsl.marshallers.jackson.Jackson; -import akka.http.javadsl.model.HttpResponse; -import akka.http.javadsl.model.StatusCodes; -import akka.http.javadsl.server.Route; -import ch.megard.akka.http.cors.javadsl.settings.CorsSettings; -import com.softwaremill.session.*; -import com.softwaremill.session.javadsl.HttpSessionAwareDirectives; -import com.softwaremill.session.javadsl.InMemoryRefreshTokenStorage; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.core.config.Configurator; -import org.jembi.jempi.AppConfig; -import org.jembi.jempi.libapi.Ask; -import org.jembi.jempi.libapi.BackEnd; -import org.jembi.jempi.libapi.Routes; -import org.jembi.jempi.shared.models.GlobalConstants; -import org.jembi.jempi.shared.models.RecordType; -import org.keycloak.adapters.KeycloakDeployment; -import org.keycloak.adapters.ServerRequest; -import org.keycloak.adapters.rotation.AdapterTokenVerifier; -import org.keycloak.common.VerificationException; -import org.keycloak.representations.AccessToken; -import org.keycloak.representations.AccessTokenResponse; - -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.CompletionStage; -import java.util.regex.Pattern; - -import static akka.http.javadsl.server.PathMatchers.segment; -import static ch.megard.akka.http.cors.javadsl.CorsDirectives.cors; -import static com.softwaremill.session.javadsl.SessionTransports.CookieST; - -final class HttpServer extends HttpSessionAwareDirectives { - - private static final Logger LOGGER = LogManager.getLogger(HttpServer.class); - - private static final SessionEncoder BASIC_ENCODER = new BasicSessionEncoder<>(UserSession.getSerializer()); - // in-memory refresh token storage - private static final RefreshTokenStorage REFRESH_TOKEN_STORAGE = new InMemoryRefreshTokenStorage<>() { - @Override - public void log(final String msg) { - LOGGER.info(msg); - } - }; - private final Refreshable refreshable; - private final SetSessionTransport sessionTransport; - private CompletionStage binding = null; - private AkkaAdapterConfig keycloakConfig = null; - private KeycloakDeployment keycloak = null; - - private Http http = null; - - HttpServer(final MessageDispatcher dispatcher) { - super(new SessionManager<>(SessionConfig.defaultConfig(AppConfig.SESSION_SECRET), BASIC_ENCODER)); - - // use Refreshable for sessions, which needs to be refreshed or OneOff otherwise - // using Refreshable, a refresh token is set in form of a cookie or a custom header - refreshable = new Refreshable<>(getSessionManager(), REFRESH_TOKEN_STORAGE, dispatcher); - - // set the session transport - based on Cookies (or Headers) - sessionTransport = CookieST; - - ClassLoader classLoader = getClass().getClassLoader(); - InputStream keycloakConfigStream = classLoader.getResourceAsStream("/keycloak.json"); - keycloakConfig = AkkaKeycloakDeploymentBuilder.loadAdapterConfig(keycloakConfigStream); - keycloak = AkkaKeycloakDeploymentBuilder.build(keycloakConfig); - } - - public void close(final ActorSystem actorSystem) { - binding.thenCompose(ServerBinding::unbind) // trigger unbinding from the port - .thenAccept(unbound -> actorSystem.terminate()); // and shutdown when done - } - - public void open( - final String httpServerHost, - final int httpPort, - final ActorSystem actorSystem, - final ActorRef backEnd, - final String jsonFields) { - Configurator.setLevel(this.getClass(), AppConfig.GET_LOG_LEVEL); - http = Http.get(actorSystem); - binding = http.newServerAt(httpServerHost, httpPort) - .bind(this.createCorsRoutes(actorSystem, backEnd, jsonFields)); - LOGGER.info("Server online at http://{}:{}", httpServerHost, httpPort); - } - - private Route patchGoldenRecord( - final ActorSystem actorSystem, - final ActorRef backEnd, - final String gid) { - return requiredSession(refreshable, sessionTransport, session -> { - if (session != null) { - LOGGER.info("Current session: {}", session.getEmail()); - return Routes.patchGoldenRecord(actorSystem, backEnd, gid); - } - LOGGER.info("No active session"); - return complete(StatusCodes.FORBIDDEN); - }); - } - - private Route getExpandedGoldenRecord( - final ActorSystem actorSystem, - final ActorRef backEnd, - final String gid) { - return requiredSession(refreshable, - sessionTransport, - session -> Routes.getExpandedGoldenRecord(actorSystem, backEnd, gid)); - } - - private Route getInteraction( - final ActorSystem actorSystem, - final ActorRef backEnd, - final String iid) { - return requiredSession(refreshable, - sessionTransport, - session -> Routes.getInteraction(actorSystem, backEnd, iid)); - } - -// private Route routeGetPatientResource( -// final ActorSystem actorSystem, -// final ActorRef backEnd, -// final String patientResourceId) { -// return onComplete(askFindPatientResource(actorSystem, backEnd, patientResourceId), -// result -> result.isSuccess() -// ? result.get() -// .patientResource() -// .mapLeft(this::mapError) -// .fold(error -> error, -// patientResource -> complete(StatusCodes.OK, -// patientResource -// )) -// : complete(StatusCodes.IM_A_TEAPOT)); -// } - -// private Route routeSessionGetPatientResource( -// final ActorSystem actorSystem, -// final ActorRef backEnd, -// final String patientResourceId) { -// return requiredSession(refreshable, sessionTransport, session -> Routes.routeGetPatientResource(actorSystem, backEnd, -// patientResourceId)); -// } - - private User loginWithKeycloakHandler(final OAuthCodeRequestPayload payload) { - LOGGER.debug("loginWithKeycloak"); - LOGGER.debug("Logging in {}", payload); - try { - // Exchange code for a token from Keycloak - AccessTokenResponse tokenResponse = ServerRequest.invokeAccessCodeToToken(keycloak, payload.code(), - keycloakConfig.getRedirectUri(), - payload.sessionId()); - LOGGER.debug("Token Exchange succeeded!"); - - String tokenString = tokenResponse.getToken(); - String idTokenString = tokenResponse.getIdToken(); - - AdapterTokenVerifier.VerifiedTokens tokens = AdapterTokenVerifier.verifyTokens(tokenString, idTokenString, - keycloak); - LOGGER.debug("Token Verification succeeded!"); - AccessToken token = tokens.getAccessToken(); - LOGGER.debug("Is user already registered?"); - String email = token.getEmail(); - User user = PsqlQueries.getUserByEmail(email); - if (user == null) { - // Register new user - LOGGER.debug("User registration ... {}", email); - User newUser = User.buildUserFromToken(token); - user = PsqlQueries.registerUser(newUser); - } - LOGGER.debug("User has signed in : {}", user.getEmail()); - return user; - } catch (VerificationException e) { - LOGGER.error("failed verification of token: {}", e.getMessage()); - } catch (ServerRequest.HttpFailure failure) { - LOGGER.error("failed to turn code into token"); - LOGGER.error("status from server: {}", failure.getStatus()); - if (failure.getError() != null && !failure.getError().trim().isEmpty()) { - LOGGER.error(failure.getLocalizedMessage(), failure); - } - } catch (IOException e) { - LOGGER.error("failed to turn code into token", e); - } - return null; - } - - private CompletionStage askLoginWithKeycloak( - final OAuthCodeRequestPayload body) { - CompletionStage stage = CompletableFuture.completedFuture(loginWithKeycloakHandler(body)); - return stage.thenApply(response -> response); - } - - private Route routeLoginWithKeycloakRequest(final CheckHeader checkHeader) { - return entity( - Jackson.unmarshaller(OAuthCodeRequestPayload.class), - obj -> onComplete(askLoginWithKeycloak(obj), response -> { - if (response.isSuccess()) { - final var user = response.get(); - if (user != null) { - return setSession(refreshable, - sessionTransport, - new UserSession(user), - () -> setNewCsrfToken(checkHeader, - () -> complete(StatusCodes.OK, user, Jackson.marshaller()))); - } else { - return complete(StatusCodes.FORBIDDEN); - } - } else { - return complete(StatusCodes.IM_A_TEAPOT); - } - })); - } - - private Route routeCurrentUser() { - return requiredSession(refreshable, sessionTransport, session -> { - if (session != null) { - LOGGER.info("Current session: {}", session.getEmail()); - return complete(StatusCodes.OK, session, Jackson.marshaller()); - } - LOGGER.info("No active session"); - return complete(StatusCodes.FORBIDDEN); - }); - } - - private Route routeLogout() { - return requiredSession(refreshable, - sessionTransport, - session -> invalidateSession(refreshable, sessionTransport, () -> extractRequestContext(ctx -> { - LOGGER.info("Logging out {}", session.getUsername()); - return onSuccess(() -> ctx.completeWith(HttpResponse.create()), - routeResult -> complete("success")); - }))); - } - - private Route postUploadCsvFile( - final ActorSystem actorSystem, - final ActorRef backEnd) { - return withSizeLimit(1024L * 1024L * 10L, - () -> requiredSession(refreshable, sessionTransport, session -> { - if (session != null) { - LOGGER.info("Current session: {}", session.getEmail()); - return storeUploadedFile("csv", - info -> { - try { - return File.createTempFile("import-", ".csv"); - } catch (Exception e) { - LOGGER.error("error", e); - return null; - } - }, - (info, file) -> onComplete(Ask.postUploadCsvFile(actorSystem, backEnd, - info, file), - response -> response.isSuccess() - ? complete(StatusCodes.OK) - : complete(StatusCodes.IM_A_TEAPOT))); - } - LOGGER.info("No active session"); - return complete(StatusCodes.FORBIDDEN); - })); - } - - private Route routeCustomSearch( - final ActorSystem actorSystem, - final ActorRef backEnd, - final RecordType recordType) { - return requiredSession(refreshable, sessionTransport, session -> { - LOGGER.info("Custom search on {}", recordType); - // Simple search for golden records - return Routes.postCustomSearch(actorSystem, backEnd, recordType); - }); - } - - private Route createJeMPIRoutes( - final ActorSystem actorSystem, - final ActorRef backEnd) { - return concat(post(() -> concat(path(GlobalConstants.SEGMENT_POST_UPDATE_NOTIFICATION, - () -> Routes.postUpdateNotification(actorSystem, backEnd)), - path(segment(GlobalConstants.SEGMENT_POST_SIMPLE_SEARCH).slash(segment(Pattern.compile( - "^(golden|patient)$"))), type -> { - final var t = type.equals("golden") - ? RecordType.GoldenRecord - : RecordType.Interaction; - return Routes.postSimpleSearch(actorSystem, backEnd, t); - }), - path(segment(GlobalConstants.SEGMENT_POST_CUSTOM_SEARCH).slash(segment(Pattern.compile( - "^(golden|patient)$"))), type -> { - final var t = type.equals("golden") - ? RecordType.GoldenRecord - : RecordType.Interaction; - return this.routeCustomSearch(actorSystem, backEnd, t); - }), - path(GlobalConstants.SEGMENT_PROXY_GET_CANDIDATES_WITH_SCORES, () -> Routes.proxyGetCandidatesWithScore(AppConfig.LINKER_IP, AppConfig.LINKER_HTTP_PORT, http)), - path(GlobalConstants.SEGMENT_POST_UPLOAD_CSV_FILE, - () -> this.postUploadCsvFile(actorSystem, backEnd)))), - patch(() -> concat(path(segment(GlobalConstants.SEGMENT_PATCH_GOLDEN_RECORD).slash(segment(Pattern.compile( - "^[A-z0-9]+$"))), gid -> this.patchGoldenRecord(actorSystem, backEnd, gid)), - path(GlobalConstants.SEGMENT_PATCH_IID_NEW_GID_LINK, - () -> Routes.patchIidNewGidLink(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_PATCH_IID_GID_LINK, - () -> Routes.patchIidGidLink(actorSystem, backEnd)))), - get(() -> concat( - path(GlobalConstants.SEGMENT_CURRENT_USER, this::routeCurrentUser), - path(GlobalConstants.SEGMENT_LOGOUT, this::routeLogout), - path(GlobalConstants.SEGMENT_COUNT_GOLDEN_RECORDS, - () -> Routes.countGoldenRecords(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_COUNT_INTERACTIONS, - () -> Routes.countInteractions(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_COUNT_RECORDS, () -> Routes.countRecords(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_GET_GIDS_ALL, () -> Routes.getGidsAll(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_GET_GIDS_PAGED, () -> Routes.getGidsPaged(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_GET_EXPANDED_GOLDEN_RECORDS_USING_PARAMETER_LIST, - () -> Routes.getExpandedGoldenRecordsUsingParameterList(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_GET_EXPANDED_GOLDEN_RECORDS_USING_CSV, - () -> Routes.getExpandedGoldenRecordsFromUsingCSV(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_GET_EXPANDED_INTERACTIONS_USING_CSV, - () -> Routes.getExpandedInteractionsUsingCSV(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_GET_NOTIFICATIONS, - () -> Routes.getNotifications(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_PROXY_GET_CANDIDATES_WITH_SCORES, - () -> Routes.proxyGetCandidatesWithScore(AppConfig.LINKER_IP, AppConfig.LINKER_HTTP_PORT, http)), - path(segment(GlobalConstants.SEGMENT_GET_INTERACTION).slash(segment(Pattern.compile("^[A-z0-9]+$"))), - iid -> this.getInteraction(actorSystem, backEnd, iid)), - path(segment(GlobalConstants.SEGMENT_GET_EXPANDED_GOLDEN_RECORD).slash( - segment(Pattern.compile("^[A-z0-9]+$"))), - gid -> this.getExpandedGoldenRecord(actorSystem, backEnd, gid))))); - } - - Route createCorsRoutes( - final ActorSystem actorSystem, - final ActorRef backEnd, - final String jsonFields) { - final var settings = CorsSettings.create(AppConfig.CONFIG); - final CheckHeader checkHeader = new CheckHeader<>(getSessionManager()); - return cors( - settings, - () -> randomTokenCsrfProtection( - checkHeader, - () -> pathPrefix("JeMPI", - () -> concat( - createJeMPIRoutes(actorSystem, backEnd), - post(() -> path(GlobalConstants.SEGMENT_VALIDATE_OAUTH, - () -> routeLoginWithKeycloakRequest(checkHeader))), - get(() -> path(GlobalConstants.SEGMENT_GET_FIELDS_CONFIG, - () -> setNewCsrfToken(checkHeader, - () -> complete(StatusCodes.OK, jsonFields)))))))); - } - -} diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/PsqlQueries.java b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/PsqlQueries.java deleted file mode 100644 index b99bc1200..000000000 --- a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/PsqlQueries.java +++ /dev/null @@ -1,51 +0,0 @@ -package org.jembi.jempi.api; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.jembi.jempi.AppConfig; - -import java.sql.*; - -final class PsqlQueries { - private static final Logger LOGGER = LogManager.getLogger(PsqlQueries.class); - private static final String URL = String.format("jdbc:postgresql://%s:%d/%s", - AppConfig.POSTGRESQL_IP, - AppConfig.POSTGRESQL_PORT, - AppConfig.POSTGRESQL_DATABASE); - - private PsqlQueries() { - } - - static User getUserByEmail(final String email) { - try (Connection conn = DriverManager.getConnection(URL, AppConfig.POSTGRESQL_USER, AppConfig.POSTGRESQL_PASSWORD); - Statement stmt = conn.createStatement()) { - ResultSet rs = stmt.executeQuery("select * from users where email = '" + email + "'"); - if (rs.next()) { - return new User( - rs.getString("id"), - rs.getString("username"), - rs.getString("email"), - rs.getString("family_name"), - rs.getString("given_name") - ); - } - } catch (SQLException e) { - LOGGER.error(e.getLocalizedMessage(), e); - } - return null; - } - - static User registerUser(final User user) { - String sql = "INSERT INTO users (given_name, family_name, email, username) VALUES" - + "('" + user.getGivenName() + "', '" + user.getFamilyName() + "', '" + user.getEmail() + "', '" + user.getUsername() + "')"; - try (Connection conn = DriverManager.getConnection(URL, AppConfig.POSTGRESQL_USER, AppConfig.POSTGRESQL_PASSWORD); - Statement statement = conn.createStatement()) { - statement.executeUpdate(sql); - LOGGER.info("Registered a new user"); - } catch (SQLException e) { - LOGGER.error(e.getLocalizedMessage(), e); - } - return getUserByEmail(user.getEmail()); - } - -} diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/httpServer/HttpServer.java b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/httpServer/HttpServer.java new file mode 100644 index 000000000..82a842a4a --- /dev/null +++ b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/httpServer/HttpServer.java @@ -0,0 +1,98 @@ +package org.jembi.jempi.api.httpServer; + +import akka.actor.typed.ActorRef; +import akka.actor.typed.ActorSystem; +import akka.dispatch.MessageDispatcher; +import akka.http.javadsl.Http; +import akka.http.javadsl.ServerBinding; +import akka.http.javadsl.model.HttpEntity; +import akka.http.javadsl.model.StatusCodes; +import akka.http.javadsl.server.ExceptionHandler; +import akka.http.javadsl.server.RejectionHandler; +import akka.http.javadsl.server.Route; +import ch.megard.akka.http.cors.javadsl.settings.CorsSettings; +import com.softwaremill.session.javadsl.HttpSessionAwareDirectives; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.core.config.Configurator; +import org.jembi.jempi.AppConfig; +import org.jembi.jempi.api.httpServer.httpServerRoutes.RoutesEntries; +import org.jembi.jempi.api.user.UserSession; +import org.jembi.jempi.libapi.BackEnd; + +import java.util.concurrent.CompletionStage; + +import static ch.megard.akka.http.cors.javadsl.CorsDirectives.cors; + +public final class HttpServer extends HttpSessionAwareDirectives { + private static final Logger LOGGER = LogManager.getLogger(HttpServer.class); + private CompletionStage binding = null; + + private ActorSystem actorSystem; + private ActorRef backEnd; + private String jsonFields; + private Http akkaHttpServer = null; + + public HttpServer(final MessageDispatcher dispatcher) { + super(new HttpServerSessionManager(dispatcher)); + } + + public void close(final ActorSystem actorSystem) { + binding.thenCompose(ServerBinding::unbind) // trigger unbinding from the port + .thenAccept(unbound -> actorSystem.terminate()); // and shutdown when done + } + + public void open( + final String httpServerHost, + final int httpPort, + final ActorSystem actorSystem, + final ActorRef backEnd, + final String jsonFields) { + + this.actorSystem = actorSystem; + this.backEnd = backEnd; + this.jsonFields = jsonFields; + Configurator.setLevel(this.getClass(), AppConfig.GET_LOG_LEVEL); + + akkaHttpServer = Http.get(actorSystem); + binding = akkaHttpServer.newServerAt(httpServerHost, httpPort).bind(this.createCorsRoutes()); + LOGGER.info("Server online at http://{}:{}", httpServerHost, httpPort); + } + + public ActorSystem getActorSystem() { + return actorSystem; + } + + public Http getAkkaHttpServer() { + return akkaHttpServer; + } + + public String getJsonFields() { + return jsonFields; + } + + public ActorRef getBackEnd() { + return backEnd; + } + + Route createCorsRoutes() { + final RejectionHandler rejectionHandler = RejectionHandler.defaultHandler().mapRejectionResponse(response -> { + if (response.entity() instanceof HttpEntity.Strict) { + String message = ((HttpEntity.Strict) response.entity()).getData().utf8String(); + LOGGER.warn(String.format("Request was rejected. Reason: %s", message)); + } + + return response; + }); + final ExceptionHandler exceptionHandler = ExceptionHandler.newBuilder().match(Exception.class, x -> { + LOGGER.error("An exception occurred while executing the Route", x); + return complete(StatusCodes.INTERNAL_SERVER_ERROR, "An exception occurred. Please see server logs for details"); + }).build(); + + + return cors(CorsSettings.create(AppConfig.CONFIG), + () -> pathPrefix("JeMPI", () -> new RoutesEntries(this).getRouteEntries())).seal(rejectionHandler, + exceptionHandler); + } + +} diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/httpServer/HttpServerSessionManager.java b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/httpServer/HttpServerSessionManager.java new file mode 100644 index 000000000..fa2963db0 --- /dev/null +++ b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/httpServer/HttpServerSessionManager.java @@ -0,0 +1,43 @@ +package org.jembi.jempi.api.httpServer; + +import akka.dispatch.MessageDispatcher; +import com.softwaremill.session.*; +import com.softwaremill.session.javadsl.InMemoryRefreshTokenStorage; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.jembi.jempi.AppConfig; +import org.jembi.jempi.api.user.UserSession; + +import static com.softwaremill.session.javadsl.SessionTransports.HeaderST; + +public final class HttpServerSessionManager extends SessionManager { + private static final Logger LOGGER = LogManager.getLogger(HttpServerSessionManager.class); + private static final SessionEncoder BASIC_ENCODER = new BasicSessionEncoder<>(UserSession.getSerializer()); + // in-memory refresh token storage + private static final RefreshTokenStorage REFRESH_TOKEN_STORAGE = new InMemoryRefreshTokenStorage<>() { + @Override + public void log(final String msg) { + LOGGER.info(msg); + } + }; + private final Refreshable refreshable; + private final SetSessionTransport sessionTransport; + + public HttpServerSessionManager(final MessageDispatcher dispatcher) { + super(SessionConfig.defaultConfig(AppConfig.SESSION_SECRET), BASIC_ENCODER); + // use Refreshable for sessions, which needs to be refreshed or OneOff otherwise + // using Refreshable, a refresh token is set in form of a cookie or a custom header + refreshable = new Refreshable<>(this, REFRESH_TOKEN_STORAGE, dispatcher); + + // set the session transport - based on Cookies (or Headers) + sessionTransport = HeaderST; + } + + public Refreshable getRefreshable() { + return refreshable; + } + + public SetSessionTransport getSessionTransport() { + return sessionTransport; + } +} diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/httpServer/httpServerRoutes/ApiHttpServerRouteEntries.java b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/httpServer/httpServerRoutes/ApiHttpServerRouteEntries.java new file mode 100644 index 000000000..d15c83408 --- /dev/null +++ b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/httpServer/httpServerRoutes/ApiHttpServerRouteEntries.java @@ -0,0 +1,38 @@ +package org.jembi.jempi.api.httpServer.httpServerRoutes; + +import akka.http.javadsl.model.StatusCodes; +import akka.http.javadsl.server.Route; +import com.softwaremill.session.CheckHeader; +import org.jembi.jempi.api.httpServer.HttpServer; +import org.jembi.jempi.api.httpServer.HttpServerSessionManager; +import org.jembi.jempi.api.user.UserSession; +import org.jembi.jempi.libapi.httpServer.HttpServerRouteEntries; + +import static akka.http.javadsl.server.Directives.complete; + +public abstract class ApiHttpServerRouteEntries extends HttpServerRouteEntries { + protected HttpServerSessionManager sessionManager; + protected CheckHeader checkHeader; + + public ApiHttpServerRouteEntries(final HttpServer ihttpServer) { + super(ihttpServer); + sessionManager = (HttpServerSessionManager) this.httpServer.getSessionManager(); + checkHeader = new CheckHeader<>(sessionManager); + } + + /** + * @param routes + * @return + */ + protected Route requireSession(final Route routes) { + return this.httpServer.requiredSession(sessionManager.getRefreshable(), sessionManager.getSessionTransport(), session -> { + if (session != null) { + return routes; + } + return complete(StatusCodes.FORBIDDEN); + }); + } + + @Override + public abstract Route getRouteEntries(); +} diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/httpServer/httpServerRoutes/RoutesEntries.java b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/httpServer/httpServerRoutes/RoutesEntries.java new file mode 100644 index 000000000..929259c27 --- /dev/null +++ b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/httpServer/httpServerRoutes/RoutesEntries.java @@ -0,0 +1,30 @@ +package org.jembi.jempi.api.httpServer.httpServerRoutes; + +import akka.http.javadsl.server.Route; +import org.jembi.jempi.AppConfig; +import org.jembi.jempi.api.httpServer.HttpServer; +import org.jembi.jempi.api.httpServer.httpServerRoutes.routes.UserRoutes; +import org.jembi.jempi.libapi.Routes; + +import static akka.http.javadsl.server.Directives.concat; + +public final class RoutesEntries extends ApiHttpServerRouteEntries { + public RoutesEntries(final HttpServer ihttpServer) { + super(ihttpServer); + } + + @Override + public Route getRouteEntries() { + + return concat(new UserRoutes(this.httpServer).getRouteEntries(), + requireSession(Routes.createCoreAPIRoutes(this.httpServer.getActorSystem(), + this.httpServer.getBackEnd(), + this.httpServer.getJsonFields(), + AppConfig.LINKER_IP, + AppConfig.LINKER_HTTP_PORT, + AppConfig.CONTROLLER_IP, + AppConfig.CONTROLLER_HTTP_PORT, + this.httpServer.getAkkaHttpServer()))); + + } +} diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/httpServer/httpServerRoutes/routes/UserRoutes.java b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/httpServer/httpServerRoutes/routes/UserRoutes.java new file mode 100644 index 000000000..0f1490250 --- /dev/null +++ b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/httpServer/httpServerRoutes/routes/UserRoutes.java @@ -0,0 +1,89 @@ +package org.jembi.jempi.api.httpServer.httpServerRoutes.routes; + +import akka.http.javadsl.marshallers.jackson.Jackson; +import akka.http.javadsl.model.HttpResponse; +import akka.http.javadsl.model.StatusCodes; +import akka.http.javadsl.server.Route; +import com.softwaremill.session.CheckHeader; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.jembi.jempi.api.httpServer.HttpServer; +import org.jembi.jempi.api.httpServer.httpServerRoutes.ApiHttpServerRouteEntries; +import org.jembi.jempi.api.keyCloak.KeyCloakAuthenticator; +import org.jembi.jempi.api.keyCloak.OAuthCodeRequestPayload; +import org.jembi.jempi.api.user.UserSession; +import org.jembi.jempi.shared.models.GlobalConstants; + +import static akka.http.javadsl.server.Directives.*; + +public final class UserRoutes extends ApiHttpServerRouteEntries { + private static final Logger LOGGER = LogManager.getLogger(UserRoutes.class); + private final KeyCloakAuthenticator keyCloakAuthenticator; + + public UserRoutes(final HttpServer ihttpServer) { + super(ihttpServer); + keyCloakAuthenticator = new KeyCloakAuthenticator(); + } + + private Route routeLoginWithKeycloakRequest(final CheckHeader checkHeader) { + + return entity(Jackson.unmarshaller(OAuthCodeRequestPayload.class), + obj -> onComplete(keyCloakAuthenticator.askLoginWithKeycloak(obj), response -> { + if (response.isSuccess()) { + final var user = response.get(); + if (user != null) { + return this.httpServer.setSession(sessionManager.getRefreshable(), + sessionManager.getSessionTransport(), + new UserSession(user), + () -> this.httpServer.setNewCsrfToken(checkHeader, + () -> complete(StatusCodes.OK, + user, + Jackson.marshaller()))); + } else { + return complete(StatusCodes.FORBIDDEN); + } + } else { + return complete(StatusCodes.IM_A_TEAPOT); + } + })); + } + + private Route routeCurrentUser() { + return this.httpServer.optionalSession(sessionManager.getRefreshable(), sessionManager.getSessionTransport(), session -> { + if (session.isPresent()) { + LOGGER.info("Current session: {}", session.get().getUsername()); + return complete(StatusCodes.OK, session, Jackson.marshaller()); + } + LOGGER.info("No active session"); + return complete(StatusCodes.OK, ""); + }); + } + + private Route routeLogout() { + return this.httpServer.requiredSession(sessionManager.getRefreshable(), + sessionManager.getSessionTransport(), + session -> this.httpServer.invalidateSession(sessionManager.getRefreshable(), + sessionManager.getSessionTransport(), + () -> extractRequestContext(ctx -> { + LOGGER.info("Logging out {}", + session.getUsername()); + return onSuccess(() -> ctx.completeWith( + HttpResponse.create()), + routeResult -> complete( + "success")); + }))); + } + + @Override + public Route getRouteEntries() { + return concat(post(() -> path(GlobalConstants.SEGMENT_VALIDATE_OAUTH, () -> routeLoginWithKeycloakRequest(checkHeader))), + get(() -> concat(path(GlobalConstants.SEGMENT_GET_FIELDS_CONFIG, + () -> httpServer.setNewCsrfToken(checkHeader, + () -> complete(StatusCodes.OK, + httpServer.getJsonFields()))), + path(GlobalConstants.SEGMENT_CURRENT_USER, this::routeCurrentUser), + path(GlobalConstants.SEGMENT_LOGOUT, this::routeLogout))) + + ); + } +} diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/AkkaAdapterConfig.java b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/keyCloak/AkkaAdapterConfig.java similarity index 51% rename from JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/AkkaAdapterConfig.java rename to JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/keyCloak/AkkaAdapterConfig.java index 16e2fe5dc..e93d94d57 100644 --- a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/AkkaAdapterConfig.java +++ b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/keyCloak/AkkaAdapterConfig.java @@ -1,4 +1,4 @@ -package org.jembi.jempi.api; +package org.jembi.jempi.api.keyCloak; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; @@ -9,16 +9,20 @@ import java.util.TreeMap; -@JsonPropertyOrder({"realm", "realm-public-key", "auth-server-url", "redirect-uri", "ssl-required", "resource", - "public-client", "credentials", "use-resource-role-mappings", "enable-cors", "cors-max-age", - "cors-allowed-methods", "cors-exposed-headers", "expose-token", "bearer-only", "autodetect-bearer-only", - "connection-pool-size", "socket-timeout-millis", "connection-ttl-millis", "connection-timeout-millis", - "allow-any-hostname", "disable-trust-manager", "truststore", "truststore-password", "client-keystore", - "client-keystore-password", "client-key-password", "always-refresh-token", "register-node-at-startup", - "register-node-period", "token-store", "adapter-state-cookie-path", "principal-attribute", "proxy-url", - "turn-off-change-session-id-on-login", "token-minimum-time-to-live", "min-time-between-jwks-requests", - "public-key-cache-ttl", "policy-enforcer", "ignore-oauth-query-parameter", "verify-token-audience"}) -final class AkkaAdapterConfig extends AdapterConfig { +@JsonPropertyOrder( + {"realm", "realm-public-key", "auth-server-url", "redirect-uri", "ssl-required", "resource", "public-client", + "credentials", "use-resource-role-mappings", "enable-cors", "cors-max-age", "cors-allowed-methods", + "cors-exposed-headers", "expose-token", "bearer-only", "autodetect-bearer-only", "connection-pool-size", + "socket-timeout-millis", "connection-ttl-millis", "connection-timeout-millis", "allow-any-hostname", + "disable-trust-manager", "truststore", "truststore" + "-password", "client-keystore", "client-keystore-password", + "client-key-password", "always-refresh-token", "register-node-at-startup", "register-node-period", "token-store", + "adapter-state-cookie-path", "principal-attribute", "proxy-url", "turn-off-change-session-id-on-login", + "token-minimum-time-to-live", "min-time-between-jwks-requests", "public-key-cache-ttl", "policy-enforcer", + "ignore-oauth-query-parameter", "verify-token-audience"}) +public final class AkkaAdapterConfig extends AdapterConfig { + @JsonProperty("frontend-kc-url") + private String frontendKcUri; + @JsonProperty("redirect-uri") private String redirectUri; @@ -50,6 +54,10 @@ public Map getCredentials() { return credentials; } + String getFrontendKcUri() { + return EnvUtil.replace(this.frontendKcUri); + } + String getRedirectUri() { return EnvUtil.replace(this.redirectUri); } diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/AkkaKeycloakDeploymentBuilder.java b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/keyCloak/AkkaKeycloakDeploymentBuilder.java similarity index 92% rename from JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/AkkaKeycloakDeploymentBuilder.java rename to JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/keyCloak/AkkaKeycloakDeploymentBuilder.java index 3b9acfc0a..f5dbc22b4 100644 --- a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/AkkaKeycloakDeploymentBuilder.java +++ b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/keyCloak/AkkaKeycloakDeploymentBuilder.java @@ -1,4 +1,4 @@ -package org.jembi.jempi.api; +package org.jembi.jempi.api.keyCloak; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.databind.ObjectMapper; @@ -10,7 +10,7 @@ import java.io.IOException; import java.io.InputStream; -class AkkaKeycloakDeploymentBuilder extends KeycloakDeploymentBuilder { +public class AkkaKeycloakDeploymentBuilder extends KeycloakDeploymentBuilder { protected AkkaKeycloakDeploymentBuilder() { } diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/keyCloak/KeyCloakAdapterTokenVerifier.java b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/keyCloak/KeyCloakAdapterTokenVerifier.java new file mode 100644 index 000000000..e23b2345b --- /dev/null +++ b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/keyCloak/KeyCloakAdapterTokenVerifier.java @@ -0,0 +1,77 @@ +package org.jembi.jempi.api.keyCloak; + +import org.keycloak.TokenVerifier; +import org.keycloak.adapters.KeycloakDeployment; +import org.keycloak.adapters.rotation.AdapterTokenVerifier; +import org.keycloak.adapters.rotation.PublicKeyLocator; +import org.keycloak.common.VerificationException; +import org.keycloak.representations.AccessToken; +import org.keycloak.representations.IDToken; +import org.keycloak.representations.JsonWebToken; + +import java.security.PublicKey; + + +// Code taken from the super class org.keycloak.adapters.rotation.AdapterTokenVerifier +// Since they are static methods we need to redeclare them here +// The reason for override is that within JeMPI keycloak has 2 adddress which it is accessed from +// 1) The frontend url (KC_FRONTEND_URL) which the ui uses, and 2) the backend url (KC_API_URL), which the api uses +// of which the default verification assumes the address are the same. +// This change also us to use the appropiate url when verifying the tokenss + +public final class KeyCloakAdapterTokenVerifier extends AdapterTokenVerifier { + + public static VerifiedTokens verifyTokens( + final String accessTokenString, + final String idTokenString, + final KeycloakDeployment deployment, + final AkkaAdapterConfig keycloakConfig) throws VerificationException { + TokenVerifier tokenVerifier = + createVerifier(accessTokenString, deployment, true, AccessToken.class, keycloakConfig); + AccessToken accessToken = tokenVerifier.verify().getToken(); + + if (idTokenString != null) { + IDToken idToken = TokenVerifier.create(idTokenString, IDToken.class).getToken(); + TokenVerifier idTokenVerifier = TokenVerifier.createWithoutSignature(idToken); + + idTokenVerifier.audience(deployment.getResourceName()); + idTokenVerifier.issuedFor(deployment.getResourceName()); + + idTokenVerifier.verify(); + return new VerifiedTokens(accessToken, idToken); + } else { + return new VerifiedTokens(accessToken, null); + } + } + + private static PublicKey getPublicKey( + final String kid, + final KeycloakDeployment deployment) throws VerificationException { + PublicKeyLocator pkLocator = deployment.getPublicKeyLocator(); + + PublicKey publicKey = pkLocator.getPublicKey(kid, deployment); + if (publicKey == null) { + throw new VerificationException("Didn't find publicKey for specified kid"); + } + + return publicKey; + } + + public static TokenVerifier createVerifier( + final String tokenString, + final KeycloakDeployment deployment, + final boolean withDefaultChecks, + final Class tokenClass, + final AkkaAdapterConfig keycloakConfig) throws VerificationException { + TokenVerifier tokenVerifier = TokenVerifier.create(tokenString, tokenClass); + + tokenVerifier.withDefaultChecks() + .realmUrl(String.format("%s/realms/%s", keycloakConfig.getFrontendKcUri(), deployment.getRealm())); + + String kid = tokenVerifier.getHeader().getKeyId(); + PublicKey publicKey = getPublicKey(kid, deployment); + tokenVerifier.publicKey(publicKey); + + return tokenVerifier; + } +} diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/keyCloak/KeyCloakAuthenticator.java b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/keyCloak/KeyCloakAuthenticator.java new file mode 100644 index 000000000..1ce60680b --- /dev/null +++ b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/keyCloak/KeyCloakAuthenticator.java @@ -0,0 +1,80 @@ +package org.jembi.jempi.api.keyCloak; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.jembi.jempi.api.persistance.postgres.queries.UserQueries; +import org.jembi.jempi.api.user.User; +import org.keycloak.adapters.KeycloakDeployment; +import org.keycloak.adapters.ServerRequest; +import org.keycloak.common.VerificationException; +import org.keycloak.representations.AccessToken; +import org.keycloak.representations.AccessTokenResponse; + +import java.io.IOException; +import java.io.InputStream; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; + +public final class KeyCloakAuthenticator { + + private static final Logger LOGGER = LogManager.getLogger(KeyCloakAuthenticator.class); + private final KeycloakDeployment keycloak; + private final AkkaAdapterConfig keycloakConfig; + private final UserQueries userQueries; + + public KeyCloakAuthenticator() { + ClassLoader classLoader = getClass().getClassLoader(); + InputStream keycloakConfigStream = classLoader.getResourceAsStream("keycloak.json"); + keycloakConfig = AkkaKeycloakDeploymentBuilder.loadAdapterConfig(keycloakConfigStream); + keycloak = AkkaKeycloakDeploymentBuilder.build(keycloakConfig); + userQueries = new UserQueries(); + } + + private User loginWithKeycloakHandler(final OAuthCodeRequestPayload payload) { + LOGGER.debug("loginWithKeycloak"); + LOGGER.debug("Logging in {}", payload); + try { + // Exchange code for a token from Keycloak + AccessTokenResponse tokenResponse = ServerRequest.invokeAccessCodeToToken(keycloak, + payload.code(), + keycloakConfig.getRedirectUri(), + payload.sessionId()); + LOGGER.debug("Token Exchange succeeded!"); + + String tokenString = tokenResponse.getToken(); + String idTokenString = tokenResponse.getIdToken(); + + KeyCloakAdapterTokenVerifier.VerifiedTokens tokens = + KeyCloakAdapterTokenVerifier.verifyTokens(tokenString, idTokenString, keycloak, keycloakConfig); + LOGGER.debug("Token Verification succeeded!"); + AccessToken token = tokens.getAccessToken(); + LOGGER.debug("Is user already registered?"); + String username = token.getPreferredUsername(); + User user = userQueries.getUser(username); + if (user == null) { + // Register new user + LOGGER.debug("User registration ... {}", username); + User newUser = User.buildUserFromToken(token); + user = userQueries.registerUser(newUser); + } + LOGGER.debug("User has signed in : {}", username); + return user; + } catch (VerificationException e) { + LOGGER.error("failed verification of token: {}", e.getMessage()); + } catch (ServerRequest.HttpFailure failure) { + LOGGER.error("failed to turn code into token"); + LOGGER.error("status from server: {}", failure.getStatus()); + if (failure.getError() != null && !failure.getError().trim().isEmpty()) { + LOGGER.error(failure.getLocalizedMessage(), failure); + } + } catch (IOException e) { + LOGGER.error("failed to turn code into token", e); + } + return null; + } + + public CompletionStage askLoginWithKeycloak(final OAuthCodeRequestPayload body) { + CompletionStage stage = CompletableFuture.completedFuture(loginWithKeycloakHandler(body)); + return stage.thenApply(response -> response); + } +} diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/OAuthCodeRequestPayload.java b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/keyCloak/OAuthCodeRequestPayload.java similarity index 79% rename from JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/OAuthCodeRequestPayload.java rename to JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/keyCloak/OAuthCodeRequestPayload.java index c909e9bec..218390638 100644 --- a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/OAuthCodeRequestPayload.java +++ b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/keyCloak/OAuthCodeRequestPayload.java @@ -1,10 +1,10 @@ -package org.jembi.jempi.api; +package org.jembi.jempi.api.keyCloak; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; @JsonInclude(JsonInclude.Include.NON_NULL) -record OAuthCodeRequestPayload( +public record OAuthCodeRequestPayload( @JsonProperty("code") String code, @JsonProperty("state") String state, @JsonProperty("session_state") String sessionId) { diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/persistance/postgres/QueryRunner.java b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/persistance/postgres/QueryRunner.java new file mode 100644 index 000000000..bebe1658b --- /dev/null +++ b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/persistance/postgres/QueryRunner.java @@ -0,0 +1,71 @@ +package org.jembi.jempi.api.persistance.postgres; + +import org.jembi.jempi.AppConfig; + +import java.sql.*; + +public class QueryRunner { + + private static final String URL = String.format("jdbc:postgresql://%s:%d/%s", + AppConfig.POSTGRESQL_IP, + AppConfig.POSTGRESQL_PORT, + AppConfig.POSTGRESQL_USERS_DB); + + protected final Connection establishConnection() throws SQLException { + return DriverManager.getConnection(URL, AppConfig.POSTGRESQL_USER, AppConfig.POSTGRESQL_PASSWORD); + } + + /** + * @param sqlQuery + * @param statementUpdater + * @param runner + * @param + * @return + * @throws SQLException + */ + public T executor( + final String sqlQuery, + final ExceptionalConsumer statementUpdater, + final ExceptionalFunction runner) throws SQLException { + try (Connection connection = establishConnection()) { + PreparedStatement preparedStatement = connection.prepareStatement(sqlQuery); + statementUpdater.accept(preparedStatement); + return runner.apply(preparedStatement); + + } + } + + /** + * @param sqlQuery + * @param statementUpdater + * @return + * @throws SQLException + */ + public ResultSet executeQuery( + final String sqlQuery, + final ExceptionalConsumer statementUpdater) throws SQLException { + return executor(sqlQuery, statementUpdater, PreparedStatement::executeQuery); + } + + /** + * @param sqlQuery + * @param statementUpdater + * @return + * @throws SQLException + */ + public int executeUpdate( + final String sqlQuery, + final ExceptionalConsumer statementUpdater) throws SQLException { + return executor(sqlQuery, statementUpdater, PreparedStatement::executeUpdate); + } + + @FunctionalInterface + public interface ExceptionalConsumer { + void accept(T t) throws E; + } + + @FunctionalInterface + public interface ExceptionalFunction { + R apply(T t) throws E; + } +} diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/persistance/postgres/queries/UserQueries.java b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/persistance/postgres/queries/UserQueries.java new file mode 100644 index 000000000..53178a339 --- /dev/null +++ b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/persistance/postgres/queries/UserQueries.java @@ -0,0 +1,79 @@ +package org.jembi.jempi.api.persistance.postgres.queries; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.jembi.jempi.api.persistance.postgres.QueryRunner; +import org.jembi.jempi.api.user.User; + +import java.sql.ResultSet; +import java.sql.SQLException; + + +public final class UserQueries extends QueryRunner { + + private static final Logger LOGGER = LogManager.getLogger(UserQueries.class); + + public User getUser(final String username) { + return this.getUser("username", username); + } + + public User getUser( + final String field, + final String value) { + try { + + ResultSet rs = executeQuery(String.format("SELECT * FROM users where %s = ?", field), preparedStatement -> { + preparedStatement.setString(1, value); + }); + + if (rs.next()) { + return new User(rs.getString("id"), + rs.getString("username"), + rs.getString("email"), + rs.getString("family_name"), + rs.getString("given_name")); + } + } catch (SQLException e) { + LOGGER.error(e.getLocalizedMessage(), e); + } + + return null; + + } + + public User registerUser(final User user) { + + try { + executeUpdate("INSERT INTO users (given_name, family_name, email, username) VALUES (?, ?, ?, ?)", preparedStatement -> { + String givenName = user.getGivenName(); + String familyName = user.getFamilyName(); + String email = user.getEmail(); + String username = user.getUsername(); + + preparedStatement.setString(1, + givenName == null + ? "" + : givenName); + preparedStatement.setString(2, + familyName == null + ? "" + : familyName); + preparedStatement.setString(3, + email == null + ? "" + : email); + preparedStatement.setString(4, + username == null + ? "" + : username); + }); + + LOGGER.info("Registered a new user"); + return getUser(user.getUsername()); + } catch (SQLException e) { + LOGGER.error(e.getLocalizedMessage(), e); + } + + return null; + } +} diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/User.java b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/user/User.java similarity index 65% rename from JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/User.java rename to JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/user/User.java index f0caac314..0c46e7ba3 100644 --- a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/User.java +++ b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/user/User.java @@ -1,15 +1,15 @@ -package org.jembi.jempi.api; +package org.jembi.jempi.api.user; import org.keycloak.representations.AccessToken; -class User { +public class User { private String id; private String username; private String email; private String familyName; private String givenName; - User( + public User( final String id, final String username, final String email, @@ -22,23 +22,21 @@ class User { this.setGivenName(givenName); } - static User buildUserFromToken(final AccessToken token) { + public static User buildUserFromToken(final AccessToken token) { String familyName = token.getFamilyName(); String givenName = token.getGivenName(); - return new User( - null, - token.getPreferredUsername(), - token.getEmail(), - familyName != null - ? familyName - : "", - givenName != null - ? givenName - : "" - ); + return new User(null, + token.getPreferredUsername(), + token.getEmail(), + familyName != null + ? familyName + : "", + givenName != null + ? givenName + : ""); } - String getUsername() { + public String getUsername() { return username; } @@ -54,7 +52,7 @@ void setId(final String id) { this.id = id; } - String getEmail() { + public String getEmail() { return email; } @@ -62,7 +60,7 @@ void setEmail(final String email) { this.email = email; } - String getFamilyName() { + public String getFamilyName() { return familyName; } @@ -70,7 +68,7 @@ void setFamilyName(final String familyName) { this.familyName = familyName; } - String getGivenName() { + public String getGivenName() { return givenName; } diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/UserSession.java b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/user/UserSession.java similarity index 58% rename from JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/UserSession.java rename to JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/user/UserSession.java index 9957c02f6..044758bea 100644 --- a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/UserSession.java +++ b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/user/UserSession.java @@ -1,4 +1,4 @@ -package org.jembi.jempi.api; +package org.jembi.jempi.api.user; import com.softwaremill.session.converters.MapConverters; import scala.collection.immutable.Map; @@ -8,7 +8,7 @@ import java.util.HashMap; -class UserSession extends User { +public class UserSession extends User { /** * This session serializer converts a session type into a value (always a String type). The first two arguments are just @@ -18,8 +18,8 @@ class UserSession extends User { * in the com.softwaremill.session.SessionSerializer companion object, like stringToString and mapToString, just to name a * few. */ - private static final UserSessionSerializer SERIALIZER = new UserSessionSerializer( - (JFunction1>) user -> { + private static final UserSessionSerializer SERIALIZER = + new UserSessionSerializer((JFunction1>) user -> { final java.util.Map m = new HashMap<>(); m.put("id", user.getId()); m.put("email", user.getEmail()); @@ -28,21 +28,18 @@ class UserSession extends User { m.put("familyName", user.getFamilyName()); return MapConverters.toImmutableMap(m); }, - (JFunction1, Try>) value -> - Try.apply((JFunction0) () -> new UserSession(new User( - value.get("id").get(), - value.get("username").get(), - value.get("email").get(), - value.get("familyName").get(), - value.get("givenName").get() - ))) - ); + (JFunction1, Try>) value -> Try.apply((JFunction0) () -> new UserSession( + new User(value.get("id").get(), + value.get("username").get(), + value.get("email").get(), + value.get("familyName").get(), + value.get("givenName").get())))); - UserSession(final User user) { + public UserSession(final User user) { super(user.getId(), user.getUsername(), user.getEmail(), user.getFamilyName(), user.getGivenName()); } - static UserSessionSerializer getSerializer() { + public static UserSessionSerializer getSerializer() { return SERIALIZER; } diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/UserSessionSerializer.java b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/user/UserSessionSerializer.java similarity index 92% rename from JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/UserSessionSerializer.java rename to JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/user/UserSessionSerializer.java index 5e167f464..790efde6b 100644 --- a/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/UserSessionSerializer.java +++ b/JeMPI_Apps/JeMPI_API_KC/src/main/java/org/jembi/jempi/api/user/UserSessionSerializer.java @@ -1,4 +1,4 @@ -package org.jembi.jempi.api; +package org.jembi.jempi.api.user; import com.softwaremill.session.MultiValueSessionSerializer; diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/resources/application.conf b/JeMPI_Apps/JeMPI_API_KC/src/main/resources/application.conf index f1ac311ec..415e1d456 100644 --- a/JeMPI_Apps/JeMPI_API_KC/src/main/resources/application.conf +++ b/JeMPI_Apps/JeMPI_API_KC/src/main/resources/application.conf @@ -51,7 +51,7 @@ akka-http-cors { # List of headers (other than simple response headers) that browsers are allowed to access. # If not empty, this list is returned as part of the `Access-Control-Expose-Headers` # header in the actual response. - exposed-headers = [] + exposed-headers = ["Set-Authorization"] # When set, the amount of seconds the browser is allowed to cache the results of a preflight request. # This value is returned as part of the `Access-Control-Max-Age` preflight response header. @@ -59,16 +59,28 @@ akka-http-cors { max-age = 1800 seconds } +# Note: For authentication with use the header session transport (com.softwaremill.session.javadsl.SessionTransports.HeaderST) +# The cookie setting here are mainly for csrf, which we are not yet full utilizing because of limitations of cross-domain cookie setting (which requires https) +# Might need to consider an alternative approach (outside akka.http.session) if we still want this feature, or consider removing it all together +# Keeping it here for posterity for now. akka.http.session { server-secret = ${JEMPI_SESSION_SECRET} + encrypt-data = true cookie { - secure = ${JEMPI_SESSION_SECURE} + secure = true + # This has to be None, as we access the api from another domain + same-site = "None" + } + header { + send-to-client-name = "Set-Authorization" + get-from-client-name = "Authorization" } csrf { cookie { name = "XSRF-TOKEN" - secure = ${JEMPI_SESSION_SECURE} - domain = ${JEMPI_SESSION_DOMAIN_NAME} + secure = true + # This has to be None, as we access the api from another domain + same-site = "None" } submitted-name = "X-XSRF-TOKEN" } diff --git a/JeMPI_Apps/JeMPI_API_KC/src/main/resources/keycloak.json b/JeMPI_Apps/JeMPI_API_KC/src/main/resources/keycloak.json index 4360edf3d..98b7d7484 100644 --- a/JeMPI_Apps/JeMPI_API_KC/src/main/resources/keycloak.json +++ b/JeMPI_Apps/JeMPI_API_KC/src/main/resources/keycloak.json @@ -2,6 +2,7 @@ "realm" : "${env.KC_REALM_NAME}", "resource" : "${env.KC_JEMPI_CLIENT_ID}", "auth-server-url" : "${env.KC_API_URL}", + "frontend-kc-url": "${env.KC_FRONTEND_URL}", "redirect-uri": "${env.KC_JEMPI_ROOT_URL}/login", "ssl-required" : "none", "use-resource-role-mappings" : false, diff --git a/JeMPI_Apps/JeMPI_AsyncReceiver/docker/Dockerfile b/JeMPI_Apps/JeMPI_AsyncReceiver/docker/Dockerfile index 92894bd47..e973893d2 100644 --- a/JeMPI_Apps/JeMPI_AsyncReceiver/docker/Dockerfile +++ b/JeMPI_Apps/JeMPI_AsyncReceiver/docker/Dockerfile @@ -6,7 +6,7 @@ ADD AsyncReceiver-1.0-SNAPSHOT-spring-boot.jar /app/AsyncReceiver-1.0-SNAPSHOT-s RUN printf "#!/bin/bash\n\ cd /app\n\ - java -server --enable-preview -XX:MaxRAMPercentage=80 -XX:+UseZGC -jar /app/AsyncReceiver-1.0-SNAPSHOT-spring-boot.jar\n" > /entrypoint.sh + java -server -XX:MaxRAMPercentage=80 -jar /app/AsyncReceiver-1.0-SNAPSHOT-spring-boot.jar\n" > /entrypoint.sh RUN chmod +x /entrypoint.sh RUN mkdir /app/csv diff --git a/JeMPI_Apps/JeMPI_AsyncReceiver/pom.xml b/JeMPI_Apps/JeMPI_AsyncReceiver/pom.xml index 41a39a35c..e8c5d8312 100644 --- a/JeMPI_Apps/JeMPI_AsyncReceiver/pom.xml +++ b/JeMPI_Apps/JeMPI_AsyncReceiver/pom.xml @@ -44,6 +44,12 @@ commons-csv + + commons-io + commons-io + + + org.apache.kafka kafka-clients @@ -158,4 +164,4 @@ - \ No newline at end of file + diff --git a/JeMPI_Apps/JeMPI_AsyncReceiver/src/main/java/org/jembi/jempi/async_receiver/Main.java b/JeMPI_Apps/JeMPI_AsyncReceiver/src/main/java/org/jembi/jempi/async_receiver/Main.java index 613c0c797..1d0eec10c 100644 --- a/JeMPI_Apps/JeMPI_AsyncReceiver/src/main/java/org/jembi/jempi/async_receiver/Main.java +++ b/JeMPI_Apps/JeMPI_AsyncReceiver/src/main/java/org/jembi/jempi/async_receiver/Main.java @@ -2,6 +2,7 @@ import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVRecord; +import org.apache.commons.io.FilenameUtils; import org.apache.kafka.common.serialization.Serializer; import org.apache.kafka.common.serialization.StringSerializer; import org.apache.logging.log4j.LogManager; @@ -36,8 +37,7 @@ public Main() { Configurator.setLevel(this.getClass(), AppConfig.GET_LOG_LEVEL); } - public static void main(final String[] args) - throws InterruptedException, ExecutionException, IOException { + public static void main(final String[] args) throws InterruptedException, ExecutionException, IOException { new Main().run(); } @@ -67,8 +67,7 @@ static String parseRecordNumber(final String in) { private void sendToKafka( final String key, - final InteractionEnvelop interactionEnvelop) - throws InterruptedException, ExecutionException { + final InteractionEnvelop interactionEnvelop) throws InterruptedException, ExecutionException { try { interactionEnvelopProducer.produceSync(key, interactionEnvelop); } catch (NullPointerException ex) { @@ -76,46 +75,66 @@ private void sendToKafka( } } + private long getRowSize(final String[] values) { + long size = 0; + + for (String str : values) { + if (str != null) { + size += 24 + (str.length() * 2L); + } + } + return size; + } + private void apacheReadCSV(final String fileName) throws InterruptedException, ExecutionException { try { - final var reader = Files.newBufferedReader(Paths.get(fileName)); + final var filePathUri = Paths.get(fileName); + final var reader = Files.newBufferedReader(filePathUri); final var dtf = DateTimeFormatter.ofPattern("uuuu/MM/dd HH:mm:ss"); final var now = LocalDateTime.now(); final var stanDate = dtf.format(now); final var uuid = UUID.randomUUID().toString(); + final var tag = FilenameUtils.getBaseName(FilenameUtils.removeExtension(fileName)); - final var csvParser = CSVFormat - .DEFAULT - .builder() - .setHeader() - .setSkipHeaderRecord(true) - .setIgnoreEmptyLines(true) - .setNullString(null) - .build() - .parse(reader); + final var csvParser = CSVFormat.DEFAULT.builder() + .setHeader() + .setSkipHeaderRecord(true) + .setIgnoreEmptyLines(true) + .setNullString(null) + .build() + .parse(reader); int index = 0; - sendToKafka(uuid, new InteractionEnvelop(InteractionEnvelop.ContentType.BATCH_START_SENTINEL, fileName, - String.format(Locale.ROOT, "%s:%07d", stanDate, ++index), null)); + sendToKafka(uuid, + new InteractionEnvelop(InteractionEnvelop.ContentType.BATCH_START_SENTINEL, + tag, + String.format(Locale.ROOT, "%s:%07d", stanDate, ++index), + null)); for (CSVRecord csvRecord : csvParser) { - sendToKafka(UUID.randomUUID().toString(), - new InteractionEnvelop(InteractionEnvelop.ContentType.BATCH_INTERACTION, fileName, - String.format(Locale.ROOT, "%s:%07d", stanDate, ++index), - new Interaction(null, - CustomAsyncHelper.customSourceId(csvRecord), - CustomAsyncHelper.customUniqueInteractionData(csvRecord), - CustomAsyncHelper.customDemographicData(csvRecord)))); + final var interactionEnvelop = new InteractionEnvelop(InteractionEnvelop.ContentType.BATCH_INTERACTION, + tag, + String.format(Locale.ROOT, "%s:%07d", stanDate, ++index), + new Interaction(null, + CustomAsyncHelper.customSourceId(csvRecord), + CustomAsyncHelper.customUniqueInteractionData( + csvRecord), + CustomAsyncHelper.customDemographicData( + csvRecord))); + + sendToKafka(UUID.randomUUID().toString(), interactionEnvelop); } - sendToKafka(uuid, new InteractionEnvelop(InteractionEnvelop.ContentType.BATCH_END_SENTINEL, fileName, - String.format(Locale.ROOT, "%s:%07d", stanDate, ++index), null)); + sendToKafka(uuid, + new InteractionEnvelop(InteractionEnvelop.ContentType.BATCH_END_SENTINEL, + tag, + String.format(Locale.ROOT, "%s:%07d", stanDate, ++index), + null)); } catch (IOException ex) { LOGGER.error(ex.getLocalizedMessage(), ex); } } - private void handleEvent(final WatchEvent event) - throws InterruptedException, ExecutionException { + private void handleEvent(final WatchEvent event) throws InterruptedException, ExecutionException { WatchEvent.Kind kind = event.kind(); LOGGER.info("EVENT: {}", kind); if (ENTRY_CREATE.equals(kind)) { @@ -143,12 +162,11 @@ private Serializer valueSerializer() { } private void run() throws InterruptedException, ExecutionException, IOException { - LOGGER.info("KAFKA: {} {}", - AppConfig.KAFKA_BOOTSTRAP_SERVERS, - AppConfig.KAFKA_CLIENT_ID); + LOGGER.info("KAFKA: {} {}", AppConfig.KAFKA_BOOTSTRAP_SERVERS, AppConfig.KAFKA_CLIENT_ID); interactionEnvelopProducer = new MyKafkaProducer<>(AppConfig.KAFKA_BOOTSTRAP_SERVERS, - GlobalConstants.TOPIC_INTERACTION_ASYNC_ETL, - keySerializer(), valueSerializer(), + GlobalConstants.TOPIC_INTERACTION_ETL, + keySerializer(), + valueSerializer(), AppConfig.KAFKA_CLIENT_ID); try (WatchService watchService = FileSystems.getDefault().newWatchService()) { final var csvDir = Path.of("./csv"); diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/README.md b/JeMPI_Apps/JeMPI_Bootstrapper/README.md new file mode 100644 index 000000000..985d6fefa --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/README.md @@ -0,0 +1,75 @@ +The app fall under the devops part of JeMPI, and is currently used to manage data used by JeMPI. More specifically, it is used to manage JeMPI postgres, draph, and kafka data. It manages this data through a CLI interface of which the options are list below: + +./bootstrapper.sh data -h +``` +Usage:
data [-hV] [-c=] [COMMAND] + -c, --config= Config file + -h, --help Show this help message and exit. + -V, --version Print version information and exit. +Commands: + kafka + dgraph + postgres + resetAll Deletes all data and schemas associated with JeMPI, then + recreates schemas, and add initial data. + deleteAllSchemaData Delete all the data and schema used by JeMPI. + createAllSchemaData Create all the required schema's and data for JeMPI. +``` + +./bootstrapper.sh data kafka -h +``` +Usage:
data kafka [-hV] [COMMAND] + -h, --help Show this help message and exit. + -V, --version Print version information and exit. +Commands: + resetAll Deletes all data and schemas associated with JeMPI kafka + instance, then recreates schemas, and add initial data. + deleteAll Delete all the data and schema used by JeMPI kafka + instance. + createAllSchemaData Create all the required schema's and data for JeMPI + Kafka instance. + listTopics List all the topics associated with the JeMPI instance. + describeTopic Describe a topic associated with the JeMPI instance. +``` + +./bootstrapper.sh data postgres -h +``` +Usage:
data postgres [-hV] [COMMAND] + -h, --help Show this help message and exit. + -V, --version Print version information and exit. +Commands: + resetAll Deletes all data and schemas associated with JeMPI + Postgres instance, then recreates schemas, and add + initial data. + deleteDataOnly Delete all the data (only) used by JeMPI Postgres + instance. + deleteAll Delete all the data and schema used by JeMPI Postgres + instance. + createAllSchemaData Create all the required schema's and data for JeMPI + Postgres instance. +``` + +./bootstrapper.sh data dgraph -h +``` +Usage:
data dgraph [-hV] [COMMAND] + -h, --help Show this help message and exit. + -V, --version Print version information and exit. +Commands: + resetAll Deletes all data and schemas associated with JeMPI + Dgraph instance, then recreates schemas, and add + initial data. + deleteAll Delete all the data and schema used by JeMPI Dgraph + instance. + createAllSchemaData Create all the required schema's and data for JeMPI + Dgraph instance. +``` + + +**Other notes** + +- This application can be run directly (as a java app), or via the script found at devops/linux/bootstrapper.sh (i.e `devops/linux/bootstrapper.sh -h`) + +- The app uses the JeMPI environment variable to know what to connect to for the various instances. You can however pass in a config file that contains the variables you want to use instead. These variables in the config file will then be merged with the available environment variables. + - A sample to the config format can be found here (JeMPI_Apps/JeMPI_Bootstrapper/boostrap.conf.sample) + - To use this config file you need to specify the config option (i.e `./bootstrapper data resetAll config=""`) + diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/boostrap.conf.sample b/JeMPI_Apps/JeMPI_Bootstrapper/boostrap.conf.sample new file mode 100644 index 000000000..1efeeef99 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/boostrap.conf.sample @@ -0,0 +1,12 @@ +POSTGRESQL_IP=127.0.0.1 +POSTGRESQL_PORT=5432 +POSTGRESQL_USER=postgres +POSTGRESQL_PASSWORD= +POSTGRESQL_USERS_DB= +POSTGRESQL_NOTIFICATIONS_DB= +POSTGRESQL_AUDIT_DB= +POSTGRESQL_KC_TEST_DB= +KAFKA_BOOTSTRAP_SERVERS=127.0.0.1 +KAFKA_APPLICATION_ID=aId +DGRAPH_HOSTS=127.0.0.1 +DGRAPH_PORTS=5080 \ No newline at end of file diff --git a/JeMPI_Apps/JeMPI_EM/build.sh b/JeMPI_Apps/JeMPI_Bootstrapper/build.sh similarity index 65% rename from JeMPI_Apps/JeMPI_EM/build.sh rename to JeMPI_Apps/JeMPI_Bootstrapper/build.sh index 4f888224f..c8b32e1a8 100755 --- a/JeMPI_Apps/JeMPI_EM/build.sh +++ b/JeMPI_Apps/JeMPI_Bootstrapper/build.sh @@ -6,8 +6,8 @@ set -u source $PROJECT_DEVOPS_DIR/conf/images/conf-app-images.sh source ../build-check-jdk.sh -JAR_FILE=${EM_JAR} -APP_IMAGE=${EM_IMAGE} -APP=em +JAR_FILE=${BOOTSTRAPPER_JAR} +APP_IMAGE=${BOOTSTRAPPER_IMAGE} +APP=bootstrapper source ../build-app-image.sh diff --git a/JeMPI_Apps/JeMPI_EM/checkstyle/suppression.xml b/JeMPI_Apps/JeMPI_Bootstrapper/checkstyle/suppression.xml similarity index 67% rename from JeMPI_Apps/JeMPI_EM/checkstyle/suppression.xml rename to JeMPI_Apps/JeMPI_Bootstrapper/checkstyle/suppression.xml index c6d6bec56..062da494b 100644 --- a/JeMPI_Apps/JeMPI_EM/checkstyle/suppression.xml +++ b/JeMPI_Apps/JeMPI_Bootstrapper/checkstyle/suppression.xml @@ -17,8 +17,18 @@ /> + + + + diff --git a/JeMPI_Apps/JeMPI_EM/docker/.gitignore b/JeMPI_Apps/JeMPI_Bootstrapper/docker/.gitignore similarity index 100% rename from JeMPI_Apps/JeMPI_EM/docker/.gitignore rename to JeMPI_Apps/JeMPI_Bootstrapper/docker/.gitignore diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/docker/Dockerfile b/JeMPI_Apps/JeMPI_Bootstrapper/docker/Dockerfile new file mode 100644 index 000000000..02a528f6c --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/docker/Dockerfile @@ -0,0 +1,14 @@ +ARG JAVA_VERSION + +FROM eclipse-temurin:${JAVA_VERSION}-jre + +ADD Bootstrapper-1.0-SNAPSHOT-spring-boot.jar /app/Bootstrapper-1.0-SNAPSHOT-spring-boot.jar + +RUN printf "#!/bin/bash\n\ +cd /app\n\ +java_args=\"\${@:1}\" \n\ +java --enable-preview -XX:MaxRAMPercentage=80 -XX:+UseZGC -jar /app/Bootstrapper-1.0-SNAPSHOT-spring-boot.jar \$java_args \n" > /bootstrapper.sh + +RUN chmod +x /bootstrapper.sh + +ENTRYPOINT tail -f /dev/null diff --git a/JeMPI_Apps/JeMPI_EM/pom.xml b/JeMPI_Apps/JeMPI_Bootstrapper/pom.xml similarity index 82% rename from JeMPI_Apps/JeMPI_EM/pom.xml rename to JeMPI_Apps/JeMPI_Bootstrapper/pom.xml index 3c1c87af4..b64181841 100644 --- a/JeMPI_Apps/JeMPI_EM/pom.xml +++ b/JeMPI_Apps/JeMPI_Bootstrapper/pom.xml @@ -3,59 +3,58 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 + org.jembi.jempi JeMPI 1.0-SNAPSHOT - EM + Bootstrapper jar + - ${project.groupId}.em.EM + ${project.groupId}.bootstrapper.BootstrapperCLI - - org.jembi.jempi - JeMPI_LibShared - 1.0-SNAPSHOT - - com.typesafe.akka - akka-actor-typed_${scala.tools.version} + org.postgresql + postgresql - com.typesafe.akka - akka-stream_${scala.tools.version} + info.picocli + picocli + 4.6.1 - com.typesafe.akka - akka-http_${scala.tools.version} + org.jembi.jempi + JeMPI_LibShared + 1.0-SNAPSHOT - org.apache.commons - commons-lang3 + com.typesafe + config - org.apache.commons - commons-text + io.vavr + vavr - org.apache.kafka - kafka-clients + io.dgraph + dgraph4j - org.apache.kafka - kafka-streams + com.fasterxml.jackson.core + jackson-databind @@ -64,8 +63,8 @@ - com.fasterxml.jackson.core - jackson-databind + com.fasterxml.jackson.datatype + jackson-datatype-jsr310 @@ -73,6 +72,11 @@ commons-codec + + org.apache.commons + commons-lang3 + + org.apache.logging.log4j log4j-api @@ -93,6 +97,23 @@ org.apache.logging.log4j log4j-jcl + + + org.apache.commons + commons-text + + + org.testng + testng + 7.8.0 + test + + + org.junit.jupiter + junit-jupiter-api + 5.10.0 + test + org.jembi.jempi JeMPI_LibMPI @@ -104,7 +125,6 @@ - org.apache.maven.plugins maven-checkstyle-plugin @@ -139,9 +159,7 @@ - - @@ -173,5 +191,4 @@ - \ No newline at end of file diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/push.sh b/JeMPI_Apps/JeMPI_Bootstrapper/push.sh new file mode 100755 index 000000000..b757c151c --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/push.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +set -e +set -u + +source $PROJECT_DEVOPS_DIR/conf.env +source $PROJECT_DEVOPS_DIR/conf/images/conf-app-images.sh + +APP_IMAGE=$BOOTSTRAPPER_IMAGE + +docker tag ${APP_IMAGE} ${REGISTRY_NODE_IP}/${APP_IMAGE} +docker push ${REGISTRY_NODE_IP}/${APP_IMAGE} +docker rmi ${REGISTRY_NODE_IP}/${APP_IMAGE} + \ No newline at end of file diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/Bootstrapper.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/Bootstrapper.java new file mode 100644 index 000000000..64a61898d --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/Bootstrapper.java @@ -0,0 +1,14 @@ +package org.jembi.jempi.bootstrapper; + +import org.apache.logging.log4j.Logger; +import org.jembi.jempi.bootstrapper.utils.BootstrapperLogger; + +public class Bootstrapper { + protected static final Logger LOGGER = BootstrapperLogger.getLogger("Jempi Bootstrapper"); + protected BootstrapperConfig loadedConfig; + + public Bootstrapper(final String configFilePath) { + this.loadedConfig = BootstrapperConfig.create(configFilePath, LOGGER); + } +} + diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/BootstrapperCLI.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/BootstrapperCLI.java new file mode 100644 index 000000000..cbafd68af --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/BootstrapperCLI.java @@ -0,0 +1,22 @@ +package org.jembi.jempi.bootstrapper; + +import org.jembi.jempi.bootstrapper.data.cli.CLI; +import picocli.CommandLine; +import picocli.CommandLine.Command; + +@Command(mixinStandardHelpOptions = true, subcommands = {CLI.class}) + +public class BootstrapperCLI implements Runnable { + + @CommandLine.Option(names = {"-c", "--config"}, description = "Config file") + private String config; + + public static void main(final String... args) { + int exitCode = new CommandLine(new BootstrapperCLI()).execute(args); + System.exit(exitCode); + } + + @Override + public void run() { + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/BootstrapperConfig.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/BootstrapperConfig.java new file mode 100644 index 000000000..421c45897 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/BootstrapperConfig.java @@ -0,0 +1,112 @@ +package org.jembi.jempi.bootstrapper; + +import com.typesafe.config.Config; +import com.typesafe.config.ConfigFactory; +import org.apache.logging.log4j.Logger; + +import java.io.File; +import java.util.Arrays; + +public class BootstrapperConfig { + + public final String POSTGRESQL_IP; + public final Integer POSTGRESQL_PORT; + public final String POSTGRESQL_USER; + public final String POSTGRESQL_PASSWORD; + public final String POSTGRESQL_DATABASE; + public final String POSTGRESQL_USERS_DB; + public final String POSTGRESQL_NOTIFICATIONS_DB; + public final String POSTGRESQL_AUDIT_DB; + public final String POSTGRESQL_KC_TEST_DB; + public final String KAFKA_BOOTSTRAP_SERVERS; + public final String KAFKA_APPLICATION_ID; + public final String[] DGRAPH_ALPHA_HOSTS; + public final int[] DGRAPH_ALPHA_PORTS; + + public BootstrapperConfig(final Config parsedConfig) { + POSTGRESQL_IP = parsedConfig.getString("POSTGRESQL_IP"); + POSTGRESQL_PORT = parsedConfig.getInt("POSTGRESQL_PORT"); + POSTGRESQL_USER = parsedConfig.getString("POSTGRESQL_USER"); + POSTGRESQL_PASSWORD = parsedConfig.getString("POSTGRESQL_PASSWORD"); + + POSTGRESQL_DATABASE = parsedConfig.getString("POSTGRESQL_DATABASE"); + POSTGRESQL_USERS_DB = parsedConfig.getString("POSTGRESQL_USERS_DB"); + POSTGRESQL_NOTIFICATIONS_DB = parsedConfig.getString("POSTGRESQL_NOTIFICATIONS_DB"); + POSTGRESQL_AUDIT_DB = parsedConfig.getString("POSTGRESQL_AUDIT_DB"); + POSTGRESQL_KC_TEST_DB = parsedConfig.getString("POSTGRESQL_KC_TEST_DB"); + + KAFKA_BOOTSTRAP_SERVERS = parsedConfig.getString("KAFKA_BOOTSTRAP_SERVERS"); + KAFKA_APPLICATION_ID = parsedConfig.getString("KAFKA_APPLICATION_ID"); + DGRAPH_ALPHA_HOSTS = parsedConfig.getString("DGRAPH_HOSTS").split(","); + DGRAPH_ALPHA_PORTS = Arrays.stream(parsedConfig.getString("DGRAPH_PORTS").split(",")).mapToInt(s -> { + try { + return Integer.parseInt(s); + } catch (NumberFormatException ex) { + return Integer.MIN_VALUE; + } + }).toArray(); + } + + public static BootstrapperConfig create( + final String filepath, + final Logger logger) { + return new BootstrapperConfig(new Builder(logger).withOptionalFile(filepath) + .withSystemEnvironment() + .withSystemProperties() + .build()); + } + + private static class Builder { + + private static final Config SYSTEM_PROPERTIES = ConfigFactory.systemProperties(); + private static final Config SYSTEM_ENVIRONMENT = ConfigFactory.systemEnvironment(); + private final Logger logger; + private Config conf = ConfigFactory.empty(); + + Builder(final Logger logger) { + this.logger = logger; + } + + // This should return the current executing user path + private static String getExecutionDirectory() { + return SYSTEM_PROPERTIES.getString("user.dir"); + } + + Builder withSystemProperties() { + conf = conf.withFallback(SYSTEM_PROPERTIES); + return this; + } + + Builder withSystemEnvironment() { + conf = conf.withFallback(SYSTEM_ENVIRONMENT); + return this; + } + + Builder withOptionalFile(final String path) { + if (path == null) { + return this; + } + File secureConfFile = new File(path); + + if (!secureConfFile.isAbsolute()) { + secureConfFile = new File(getExecutionDirectory() + path); + } + if (secureConfFile.exists()) { + this.logger.info("Loaded config file from path ({})", path); + conf = conf.withFallback(ConfigFactory.parseFile(secureConfFile)); + } else { + this.logger.info("Attempted to load file from path ({}) but it was not found", path); + } + return this; + } + + Config build() { + conf = conf.resolve(); + return conf; + } + + } + +} + + diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/BaseDataBootstrapperCommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/BaseDataBootstrapperCommand.java new file mode 100644 index 000000000..ff515da6c --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/BaseDataBootstrapperCommand.java @@ -0,0 +1,42 @@ +package org.jembi.jempi.bootstrapper.data; + +import org.apache.logging.log4j.Logger; +import org.jembi.jempi.bootstrapper.utils.BootstrapperLogger; +import picocli.CommandLine; + +import java.util.concurrent.Callable; + +public abstract class BaseDataBootstrapperCommand implements Callable { + protected static final Logger LOGGER = BootstrapperLogger.getLogger("DataBootstrapperCLI"); + + @CommandLine.Option(names = "config", scope = CommandLine.ScopeType.INHERIT) + protected String config; + + protected T bootstrapper; + + public BaseDataBootstrapperCommand init() throws Exception { + bootstrapper = getBootstrapper(config); + return this; + } + + protected Integer execute(final Callable bootstrapperFunc) { + try { + Integer bootstrapperResult = bootstrapperFunc.call(); + if (bootstrapperResult != 0) { + LOGGER.warn("Command completed successfully with some errors"); + return CommandLine.ExitCode.SOFTWARE; + } + return CommandLine.ExitCode.OK; + } catch (Exception e) { + LOGGER.error("An error occurred whilst executing the command.", e); + return CommandLine.ExitCode.SOFTWARE; + } + } + + protected abstract T getBootstrapper(String configPath) throws Exception; + + public BaseDataBootstrapperCommand setConfigPath(final String config) { + this.config = config; + return this; + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/DataBootstrapper.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/DataBootstrapper.java new file mode 100644 index 000000000..31e3c881b --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/DataBootstrapper.java @@ -0,0 +1,13 @@ +package org.jembi.jempi.bootstrapper.data; + +import org.apache.logging.log4j.Logger; +import org.jembi.jempi.bootstrapper.Bootstrapper; +import org.jembi.jempi.bootstrapper.utils.BootstrapperLogger; + +public abstract class DataBootstrapper extends Bootstrapper implements IDataBootstrapper { + protected static final Logger LOGGER = BootstrapperLogger.getChildLogger(Bootstrapper.LOGGER, "Data"); + + public DataBootstrapper(final String configFilePath) { + super(configFilePath); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/IDataBootstrapper.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/IDataBootstrapper.java new file mode 100644 index 000000000..4d6964f6a --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/IDataBootstrapper.java @@ -0,0 +1,9 @@ +package org.jembi.jempi.bootstrapper.data; + +public interface IDataBootstrapper { + Boolean createSchema() throws Exception; + + Boolean deleteData() throws Exception; + + Boolean resetAll() throws Exception; +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/cli/BaseCLICommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/cli/BaseCLICommand.java new file mode 100644 index 000000000..b31a30440 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/cli/BaseCLICommand.java @@ -0,0 +1,28 @@ +package org.jembi.jempi.bootstrapper.data.cli; + +import org.jembi.jempi.bootstrapper.data.BaseDataBootstrapperCommand; +import org.jembi.jempi.bootstrapper.data.DataBootstrapper; + +import java.util.concurrent.Callable; + + +public abstract class BaseCLICommand extends BaseDataBootstrapperCommand implements Callable { + @Override + public BaseCLICommand init() throws Exception { + super.init(); + return this; + } + + @Override + protected DataBootstrapper getBootstrapper(final String configPath) { + return null; + } + + protected Integer callMultiple(final BaseDataBootstrapperCommand[] bootstrapperCommands) throws Exception { + Integer execResult = 0; + for (BaseDataBootstrapperCommand b : bootstrapperCommands) { + execResult += b.setConfigPath(this.config).init().call(); + } + return execResult; + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/cli/CLI.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/cli/CLI.java new file mode 100644 index 000000000..cff5d187b --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/cli/CLI.java @@ -0,0 +1,14 @@ +package org.jembi.jempi.bootstrapper.data.cli; + +import org.jembi.jempi.bootstrapper.data.graph.dgraph.cli.DgraphCLI; +import org.jembi.jempi.bootstrapper.data.sql.postgres.cli.PostgresCLI; +import org.jembi.jempi.bootstrapper.data.stream.kafka.cli.KafkaCLI; +import picocli.CommandLine; +import picocli.CommandLine.Command; + +@Command(name = "data", mixinStandardHelpOptions = true, subcommands = {KafkaCLI.class, DgraphCLI.class, PostgresCLI.class, + ResetAllCommand.class, DeleteAllSchemaDataCommand.class, CreateAllSchemaDataCommand.class}) +public class CLI { + @CommandLine.Option(names = {"-c", "--config"}, description = "Config file") + private String config; +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/cli/CreateAllSchemaDataCommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/cli/CreateAllSchemaDataCommand.java new file mode 100644 index 000000000..7545d39aa --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/cli/CreateAllSchemaDataCommand.java @@ -0,0 +1,21 @@ +package org.jembi.jempi.bootstrapper.data.cli; + +import org.jembi.jempi.bootstrapper.data.BaseDataBootstrapperCommand; +import org.jembi.jempi.bootstrapper.data.graph.dgraph.cli.DgraphCreateAllSchemaDataCommand; +import org.jembi.jempi.bootstrapper.data.sql.postgres.cli.PostgresCreateAllSchemaDataCommand; +import org.jembi.jempi.bootstrapper.data.stream.kafka.cli.KafkaCreateAllSchemaDataCommand; +import picocli.CommandLine; + +import java.util.concurrent.Callable; + +@CommandLine.Command(name = "createAllSchemaData", mixinStandardHelpOptions = true, description = "Create all the required " + + "schema's and data for JeMPI.") +public class CreateAllSchemaDataCommand extends BaseCLICommand implements Callable { + @Override + public Integer call() throws Exception { + + return this.execute(() -> this.callMultiple(new BaseDataBootstrapperCommand[]{new PostgresCreateAllSchemaDataCommand(), + new DgraphCreateAllSchemaDataCommand(), + new KafkaCreateAllSchemaDataCommand()})); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/cli/DeleteAllSchemaDataCommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/cli/DeleteAllSchemaDataCommand.java new file mode 100644 index 000000000..d5048730c --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/cli/DeleteAllSchemaDataCommand.java @@ -0,0 +1,21 @@ +package org.jembi.jempi.bootstrapper.data.cli; + +import org.jembi.jempi.bootstrapper.data.BaseDataBootstrapperCommand; +import org.jembi.jempi.bootstrapper.data.graph.dgraph.cli.DgraphDeleteAllCommand; +import org.jembi.jempi.bootstrapper.data.sql.postgres.cli.PostgresDeleteAllCommand; +import org.jembi.jempi.bootstrapper.data.stream.kafka.cli.KafkaDeleteAllCommand; +import picocli.CommandLine; + +import java.util.concurrent.Callable; + +@CommandLine.Command(name = "deleteAllSchemaData", mixinStandardHelpOptions = true, description = "Delete all the data and " + + "schema used by JeMPI.") +public class DeleteAllSchemaDataCommand extends BaseCLICommand implements Callable { + @Override + public Integer call() throws Exception { + + return this.execute(() -> this.callMultiple(new BaseDataBootstrapperCommand[]{new PostgresDeleteAllCommand(), + new DgraphDeleteAllCommand(), + new KafkaDeleteAllCommand()})); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/cli/ResetAllCommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/cli/ResetAllCommand.java new file mode 100644 index 000000000..a4a2005c3 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/cli/ResetAllCommand.java @@ -0,0 +1,21 @@ +package org.jembi.jempi.bootstrapper.data.cli; + +import org.jembi.jempi.bootstrapper.data.BaseDataBootstrapperCommand; +import org.jembi.jempi.bootstrapper.data.graph.dgraph.cli.DgraphResetAllCommand; +import org.jembi.jempi.bootstrapper.data.sql.postgres.cli.PostgresResetAllCommand; +import org.jembi.jempi.bootstrapper.data.stream.kafka.cli.KafkaResetAllCommand; +import picocli.CommandLine; + +import java.util.concurrent.Callable; + +@CommandLine.Command(name = "resetAll", mixinStandardHelpOptions = true, description = "Deletes all data and schemas associated" + + " with JeMPI, then recreates schemas, and add initial data.") +public class ResetAllCommand extends BaseCLICommand implements Callable { + @Override + public Integer call() throws Exception { + + return this.execute(() -> this.callMultiple(new BaseDataBootstrapperCommand[]{new PostgresResetAllCommand(), + new DgraphResetAllCommand(), + new KafkaResetAllCommand()})); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/graph/dgraph/DgraphDataBootstrapper.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/graph/dgraph/DgraphDataBootstrapper.java new file mode 100644 index 000000000..020c2100e --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/graph/dgraph/DgraphDataBootstrapper.java @@ -0,0 +1,43 @@ +package org.jembi.jempi.bootstrapper.data.graph.dgraph; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.Logger; +import org.jembi.jempi.bootstrapper.data.DataBootstrapper; +import org.jembi.jempi.bootstrapper.utils.BootstrapperLogger; +import org.jembi.jempi.libmpi.dgraph.LibDgraph; + +public class DgraphDataBootstrapper extends DataBootstrapper { + protected static final Logger LOGGER = BootstrapperLogger.getChildLogger(DataBootstrapper.LOGGER, "DGraph"); + private LibDgraph libDgraph; + + public DgraphDataBootstrapper(final String configFilePath) { + super(configFilePath); + this.loadDgraphLib(); + } + + public void loadDgraphLib() { + libDgraph = new LibDgraph(Level.INFO, this.loadedConfig.DGRAPH_ALPHA_HOSTS, this.loadedConfig.DGRAPH_ALPHA_PORTS); + } + + @Override + public Boolean createSchema() { + LOGGER.info("Loading DGraph schema data."); + libDgraph.startTransaction(); + libDgraph.createSchema(); + return true; + } + + @Override + public Boolean deleteData() { + LOGGER.info("Deleting DGraph data and schemas."); + libDgraph.startTransaction(); + libDgraph.dropAll(); + return true; + } + + @Override + public Boolean resetAll() { + LOGGER.info("Resetting DGraph data and schemas."); + return this.deleteData() && this.createSchema(); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/graph/dgraph/cli/BaseDgraphCommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/graph/dgraph/cli/BaseDgraphCommand.java new file mode 100644 index 000000000..b6fd634ca --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/graph/dgraph/cli/BaseDgraphCommand.java @@ -0,0 +1,13 @@ +package org.jembi.jempi.bootstrapper.data.graph.dgraph.cli; + +import org.jembi.jempi.bootstrapper.data.BaseDataBootstrapperCommand; +import org.jembi.jempi.bootstrapper.data.graph.dgraph.DgraphDataBootstrapper; + +import java.util.concurrent.Callable; + +public abstract class BaseDgraphCommand extends BaseDataBootstrapperCommand implements Callable { + @Override + protected DgraphDataBootstrapper getBootstrapper(final String configPath) { + return new DgraphDataBootstrapper(configPath); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/graph/dgraph/cli/DgraphCLI.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/graph/dgraph/cli/DgraphCLI.java new file mode 100644 index 000000000..c14aa7bb3 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/graph/dgraph/cli/DgraphCLI.java @@ -0,0 +1,9 @@ +package org.jembi.jempi.bootstrapper.data.graph.dgraph.cli; + +import picocli.CommandLine.Command; + +@Command(name = "dgraph", mixinStandardHelpOptions = true, subcommands = {DgraphResetAllCommand.class, + DgraphDeleteAllCommand.class, + DgraphCreateAllSchemaDataCommand.class}) +public class DgraphCLI { +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/graph/dgraph/cli/DgraphCreateAllSchemaDataCommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/graph/dgraph/cli/DgraphCreateAllSchemaDataCommand.java new file mode 100644 index 000000000..f423fcf84 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/graph/dgraph/cli/DgraphCreateAllSchemaDataCommand.java @@ -0,0 +1,17 @@ +package org.jembi.jempi.bootstrapper.data.graph.dgraph.cli; + +import picocli.CommandLine; + +import java.util.concurrent.Callable; + +@CommandLine.Command(name = "createAllSchemaData", mixinStandardHelpOptions = true, description = "Create all the required " + + "schema's and data for JeMPI Dgraph instance.") +public class DgraphCreateAllSchemaDataCommand extends BaseDgraphCommand implements Callable { + @Override + public Integer call() throws Exception { + this.init(); + return this.execute(() -> this.bootstrapper.createSchema() + ? 0 + : 1); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/graph/dgraph/cli/DgraphDeleteAllCommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/graph/dgraph/cli/DgraphDeleteAllCommand.java new file mode 100644 index 000000000..02c7540ed --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/graph/dgraph/cli/DgraphDeleteAllCommand.java @@ -0,0 +1,16 @@ +package org.jembi.jempi.bootstrapper.data.graph.dgraph.cli; + +import picocli.CommandLine; + +import java.util.concurrent.Callable; + +@CommandLine.Command(name = "deleteAll", mixinStandardHelpOptions = true, description = "Delete all the data and schema used by JeMPI Dgraph instance.") +public class DgraphDeleteAllCommand extends BaseDgraphCommand implements Callable { + @Override + public Integer call() throws Exception { + this.init(); + return this.execute(() -> this.bootstrapper.deleteData() + ? 0 + : 1); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/graph/dgraph/cli/DgraphResetAllCommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/graph/dgraph/cli/DgraphResetAllCommand.java new file mode 100644 index 000000000..08ac5c4f4 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/graph/dgraph/cli/DgraphResetAllCommand.java @@ -0,0 +1,17 @@ +package org.jembi.jempi.bootstrapper.data.graph.dgraph.cli; + +import picocli.CommandLine; + +import java.util.concurrent.Callable; + +@CommandLine.Command(name = "resetAll", mixinStandardHelpOptions = true, description = "Deletes all data and schemas associated" + + " with JeMPI Dgraph instance, then recreates schemas, and add initial data.") +public class DgraphResetAllCommand extends BaseDgraphCommand implements Callable { + @Override + public Integer call() throws Exception { + this.init(); + return this.execute(() -> this.bootstrapper.resetAll() + ? 0 + : 1); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/PostgresDALLib.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/PostgresDALLib.java new file mode 100644 index 000000000..12f61421f --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/PostgresDALLib.java @@ -0,0 +1,91 @@ +package org.jembi.jempi.bootstrapper.data.sql.postgres; + +import java.sql.*; +import java.util.Locale; + + +public class PostgresDALLib { + + private final String usr; + private final String psw; + private final String ip; + private final String defaultDb; + private final int port; + public PostgresDALLib( + final String ip, + final int port, + final String usr, + final String db, + final String psw) { + this.ip = ip; + this.port = port; + this.defaultDb = db; + this.usr = usr; + this.psw = psw; + } + + private String getDbUrl(final String db) { + return String.format(Locale.ROOT, "jdbc:postgresql://%s:%d/%s", ip, port, db); + } + private Connection getConnection(final String dbName) throws SQLException { + return DriverManager.getConnection(getDbUrl(dbName != null ? dbName : defaultDb), this.usr, this.psw); + } + + public Boolean createDb(final String dbName) throws SQLException { + if (!databaseExists(dbName)) { + return runQuery(connection -> { + return connection.prepareStatement(getCreateDbSchema(dbName)); + }, true, null); + } + return true; + } + + protected boolean databaseExists(final String databaseName) throws SQLException { + String query = "SELECT 1 FROM pg_database WHERE datname = ?"; + try (PreparedStatement preparedStatement = getConnection(null).prepareStatement(query)) { + preparedStatement.setString(1, databaseName); + try (ResultSet resultSet = preparedStatement.executeQuery()) { + return resultSet.next(); + } + } + } + public String getCreateDbSchema(final String dbName) { + return String.format(""" + CREATE DATABASE %s + """, dbName); + } + + public Boolean runQuery(final ThrowingFunction getStatement, final Boolean autoCommit, final String dbName) throws SQLException { + try (Connection connection = this.getConnection(dbName)) { + connection.setAutoCommit(autoCommit); + + try { + T statement = getStatement.apply(connection); + if (statement != null) { + statement.executeUpdate(); + if (!autoCommit) { + connection.commit(); + } + + } + + return true; + } catch (SQLException e) { + if (!autoCommit) { + connection.rollback(); + } + + throw e; + } + } catch (SQLException e) { + throw e; + } + } + + @FunctionalInterface + interface ThrowingFunction { + R apply(T t) throws E; + } +} + + diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/PostgresDataBootstrapper.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/PostgresDataBootstrapper.java new file mode 100644 index 000000000..a0ed29ab6 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/PostgresDataBootstrapper.java @@ -0,0 +1,119 @@ +package org.jembi.jempi.bootstrapper.data.sql.postgres; + +import org.jembi.jempi.bootstrapper.data.DataBootstrapper; +import org.jembi.jempi.bootstrapper.data.utils.DataBootstraperConsts; + +import java.io.BufferedReader; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.sql.SQLException; +import java.util.List; +import java.util.stream.Collectors; + +public class PostgresDataBootstrapper extends DataBootstrapper { + + private record DBSchemaDetails(String dbName, String schemaFilePath) { } + private final PostgresDALLib postgresDALLib; + + public PostgresDataBootstrapper(final String configFilePath) { + super(configFilePath); + postgresDALLib = new PostgresDALLib(this.loadedConfig.POSTGRESQL_IP, + this.loadedConfig.POSTGRESQL_PORT, + this.loadedConfig.POSTGRESQL_USER, + this.loadedConfig.POSTGRESQL_USER, + this.loadedConfig.POSTGRESQL_PASSWORD); + } + + protected String getCreateSchemaScript(final String fileName) { + InputStream postgresSchemaScript = this.getClass().getResourceAsStream(fileName); + return new BufferedReader(new InputStreamReader(postgresSchemaScript, StandardCharsets.UTF_8)).lines() + .collect(Collectors.joining( + "\n")); + } + + protected List getAllDbSchemas() { + return List.of( + new DBSchemaDetails(this.loadedConfig.POSTGRESQL_USERS_DB, DataBootstraperConsts.POSTGRES_INIT_SCHEMA_USERS_DB), + new DBSchemaDetails(this.loadedConfig.POSTGRESQL_NOTIFICATIONS_DB, DataBootstraperConsts.POSTGRES_INIT_SCHEMA_NOTIFICATION_DB), + new DBSchemaDetails(this.loadedConfig.POSTGRESQL_AUDIT_DB, DataBootstraperConsts.POSTGRES_INIT_SCHEMA_AUDIT_DB), + new DBSchemaDetails(this.loadedConfig.POSTGRESQL_KC_TEST_DB, null) + ); + } + @Override + public Boolean createSchema() throws SQLException { + LOGGER.info("Loading Postgres schema data."); + + for (DBSchemaDetails schemaDetails: getAllDbSchemas()) { + String dbName = schemaDetails.dbName(); + String dbSchemaFilePath = schemaDetails.schemaFilePath(); + + LOGGER.info(String.format("---> Create schema for database %s", dbName)); + + postgresDALLib.createDb(dbName); + + if (dbSchemaFilePath != null) { + postgresDALLib.runQuery(connection -> connection.prepareStatement(getCreateSchemaScript(dbSchemaFilePath)), + true, + dbName); + } + + } + return true; + } + + protected String getAllTablesWrapper(final String innerQuery) { + return String.format(""" + SET session_replication_role = replica; + DO $$ + DECLARE + table_name text; + BEGIN + FOR table_name IN (SELECT tablename FROM pg_tables WHERE schemaname = 'public') + LOOP + EXECUTE %s + END LOOP; + END $$; + SET session_replication_role = DEFAULT; + """, innerQuery); + } + + public Boolean deleteTables() throws SQLException { + LOGGER.info("Deleting Postgres tables"); + for (DBSchemaDetails schemaDetails: getAllDbSchemas()) { + String dbName = schemaDetails.dbName(); + if (postgresDALLib.databaseExists(dbName)) { + LOGGER.info(String.format("---> Deleting tables for database %s", dbName)); + postgresDALLib.runQuery(connection -> { + return connection.prepareStatement(this.getAllTablesWrapper( + "'DROP TABLE ' || table_name || ' CASCADE ;';")); + + }, true, dbName); + } + } + return true; + } + + @Override + public Boolean deleteData() throws SQLException { + LOGGER.info("Deleting Postgres data"); + for (DBSchemaDetails schemaDetails: getAllDbSchemas()) { + String dbName = schemaDetails.dbName(); + if (postgresDALLib.databaseExists(dbName)) { + LOGGER.info(String.format("---> Deleting data for database %s", dbName)); + postgresDALLib.runQuery(connection -> { + return connection.prepareStatement(this.getAllTablesWrapper( + "'DELETE FROM ' || table_name || ';';")); + + }, true, dbName); + } + } + return true; + } + + @Override + public Boolean resetAll() throws SQLException { + LOGGER.info("Resetting Postgres data and schemas."); + return this.deleteData() && this.deleteTables() && this.createSchema(); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/cli/BasePostgresCommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/cli/BasePostgresCommand.java new file mode 100644 index 000000000..2b6b36f69 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/cli/BasePostgresCommand.java @@ -0,0 +1,14 @@ +package org.jembi.jempi.bootstrapper.data.sql.postgres.cli; + +import org.jembi.jempi.bootstrapper.data.BaseDataBootstrapperCommand; +import org.jembi.jempi.bootstrapper.data.sql.postgres.PostgresDataBootstrapper; + +import java.util.concurrent.Callable; + +public abstract class BasePostgresCommand extends BaseDataBootstrapperCommand implements Callable { + @Override + protected PostgresDataBootstrapper getBootstrapper(final String configPath) { + return new PostgresDataBootstrapper(configPath); + } +} + diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/cli/PostgresCLI.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/cli/PostgresCLI.java new file mode 100644 index 000000000..61b5c955e --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/cli/PostgresCLI.java @@ -0,0 +1,11 @@ +package org.jembi.jempi.bootstrapper.data.sql.postgres.cli; + +import picocli.CommandLine.Command; + +@Command(name = "postgres", mixinStandardHelpOptions = true, subcommands = {PostgresResetAllCommand.class, + PostgresDeleteDataOnlyCommand.class, + PostgresDeleteAllCommand.class, + PostgresCreateAllSchemaDataCommand.class}) + +public class PostgresCLI { +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/cli/PostgresCreateAllSchemaDataCommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/cli/PostgresCreateAllSchemaDataCommand.java new file mode 100644 index 000000000..e268c5334 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/cli/PostgresCreateAllSchemaDataCommand.java @@ -0,0 +1,17 @@ +package org.jembi.jempi.bootstrapper.data.sql.postgres.cli; + +import picocli.CommandLine; + +import java.util.concurrent.Callable; + +@CommandLine.Command(name = "createAllSchemaData", mixinStandardHelpOptions = true, description = "Create all the required " + + "schema's and data for JeMPI Postgres instance.") +public class PostgresCreateAllSchemaDataCommand extends BasePostgresCommand implements Callable { + @Override + public Integer call() throws Exception { + this.init(); + return this.execute(() -> this.bootstrapper.createSchema() + ? 0 + : 1); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/cli/PostgresDeleteAllCommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/cli/PostgresDeleteAllCommand.java new file mode 100644 index 000000000..9e9596e43 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/cli/PostgresDeleteAllCommand.java @@ -0,0 +1,17 @@ +package org.jembi.jempi.bootstrapper.data.sql.postgres.cli; + +import picocli.CommandLine; + +import java.util.concurrent.Callable; + +@CommandLine.Command(name = "deleteAll", mixinStandardHelpOptions = true, description = "Delete all the data and schema used by" + + " JeMPI Postgres instance.") +public class PostgresDeleteAllCommand extends BasePostgresCommand implements Callable { + @Override + public Integer call() throws Exception { + this.init(); + return this.execute(() -> this.bootstrapper.deleteData() && this.bootstrapper.deleteTables() + ? 0 + : 1); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/cli/PostgresDeleteDataOnlyCommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/cli/PostgresDeleteDataOnlyCommand.java new file mode 100644 index 000000000..d3bb7fac8 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/cli/PostgresDeleteDataOnlyCommand.java @@ -0,0 +1,17 @@ +package org.jembi.jempi.bootstrapper.data.sql.postgres.cli; + +import picocli.CommandLine; + +import java.util.concurrent.Callable; + +@CommandLine.Command(name = "deleteDataOnly", mixinStandardHelpOptions = true, description = "Delete all the data (only) used " + + "by JeMPI Postgres instance.") +public class PostgresDeleteDataOnlyCommand extends BasePostgresCommand implements Callable { + @Override + public Integer call() throws Exception { + this.init(); + return this.execute(() -> this.bootstrapper.deleteData() + ? 0 + : 1); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/cli/PostgresResetAllCommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/cli/PostgresResetAllCommand.java new file mode 100644 index 000000000..b2d82e481 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/sql/postgres/cli/PostgresResetAllCommand.java @@ -0,0 +1,17 @@ +package org.jembi.jempi.bootstrapper.data.sql.postgres.cli; + +import picocli.CommandLine; + +import java.util.concurrent.Callable; + +@CommandLine.Command(name = "resetAll", mixinStandardHelpOptions = true, description = "Deletes all data and schemas associated" + + " with JeMPI Postgres instance, then recreates schemas, and add initial data.") +public class PostgresResetAllCommand extends BasePostgresCommand implements Callable { + @Override + public Integer call() throws Exception { + this.init(); + return this.execute(() -> this.bootstrapper.resetAll() + ? 0 + : 1); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/KafkaBootstrapConfig.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/KafkaBootstrapConfig.java new file mode 100644 index 000000000..0f6b7d6e2 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/KafkaBootstrapConfig.java @@ -0,0 +1,45 @@ +package org.jembi.jempi.bootstrapper.data.stream.kafka; + +import com.fasterxml.jackson.annotation.JsonProperty; + +import java.util.HashMap; + +public class KafkaBootstrapConfig { + + public HashMap topics; + + public HashMap getTopics() { + return topics; + } + + public static class BootstrapperTopicConfig { + private String topicName; + private Integer partition; + private short replications; + @JsonProperty("retention_ms") + private Integer retentionMs; + + @JsonProperty("segments_bytes") + private Integer segmentsBytes; + + public Integer getPartition() { + return partition; + } + + public short getReplications() { + return replications; + } + + public Integer getRetentionMs() { + return retentionMs; + } + + public Integer getSegmentsBytes() { + return segmentsBytes; + } + + public String getTopicName() { + return topicName; + } + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/KafkaDataBootstrapper.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/KafkaDataBootstrapper.java new file mode 100644 index 000000000..426fcc96b --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/KafkaDataBootstrapper.java @@ -0,0 +1,137 @@ +package org.jembi.jempi.bootstrapper.data.stream.kafka; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.kafka.clients.admin.TopicDescription; +import org.apache.kafka.clients.admin.TopicListing; +import org.apache.logging.log4j.Logger; +import org.jembi.jempi.bootstrapper.data.DataBootstrapper; +import org.jembi.jempi.bootstrapper.data.utils.DataBootstraperConsts; +import org.jembi.jempi.bootstrapper.utils.BootstrapperLogger; +import org.jembi.jempi.shared.kafka.KafkaTopicManager; +import org.jembi.jempi.shared.kafka.global_context.store_processor.Utilities; + +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.function.Function; +import java.util.stream.Collectors; + +public class KafkaDataBootstrapper extends DataBootstrapper { + protected static final Logger LOGGER = BootstrapperLogger.getChildLogger(DataBootstrapper.LOGGER, "Kafka"); + protected KafkaBootstrapConfig kafkaBootstrapConfig; + protected KafkaTopicManager kafkaTopicManager; + + public KafkaDataBootstrapper(final String configFilePath) throws IOException { + super(configFilePath); + this.loadKafkaConfig(); + this.loadKafkaTopicManager(); + } + + protected void loadKafkaTopicManager() { + LOGGER.info(String.format("Connecting to the kafka bootstrap server '%s'", this.loadedConfig.KAFKA_BOOTSTRAP_SERVERS)); + kafkaTopicManager = new KafkaTopicManager(this.loadedConfig.KAFKA_BOOTSTRAP_SERVERS); + } + + protected void loadKafkaConfig() throws IOException { + InputStream keycloakConfigStream = this.getClass().getResourceAsStream(DataBootstraperConsts.KAFKA_BOOT_STRAP_CONFIG_JSON); + ObjectMapper objectMapper = new ObjectMapper(); + + this.kafkaBootstrapConfig = objectMapper.readValue(keycloakConfigStream, KafkaBootstrapConfig.class); + } + + private void awaitOperationComplete(final Function, Boolean> checkFunc) { + boolean isComplete = false; + int count = 0; + while (!isComplete) { + try { + Thread.sleep(1000); + isComplete = checkFunc.apply(kafkaTopicManager.getAllTopics()) || count > 5000; + count += 1000; + } catch (ExecutionException | InterruptedException e) { + isComplete = true; + } + } + } + + @Override + public Boolean createSchema() throws InterruptedException { + LOGGER.info("Loading Kafka schema data."); + for (HashMap.Entry topicDetails + : this.kafkaBootstrapConfig.topics.entrySet()) { + KafkaBootstrapConfig.BootstrapperTopicConfig topic = topicDetails.getValue(); + + LOGGER.info(String.format("--> Creating topic '%s'", topic.getTopicName())); + try { + kafkaTopicManager.createTopic(topic.getTopicName(), + topic.getPartition(), + topic.getReplications(), + topic.getRetentionMs(), + topic.getSegmentsBytes()); + } catch (ExecutionException e) { + LOGGER.warn(e.getMessage()); + } + } + awaitOperationComplete(topics -> topics.size() >= this.kafkaBootstrapConfig.topics.size()); + return true; + } + + public Boolean listTopics() throws ExecutionException, InterruptedException { + for (TopicListing t : kafkaTopicManager.getAllTopics()) { + System.out.println(t.toString()); + } + return true; + } + + public Boolean describeTopic(final String topicName) throws ExecutionException, InterruptedException { + for (Map.Entry t : kafkaTopicManager.describeTopic(topicName).entrySet()) { + System.out.println(t.getValue().toString()); + } + return true; + } + + private void doTopicDelete(final String topicName) { + LOGGER.info(String.format("--> Deleting topic '%s'", topicName)); + try { + kafkaTopicManager.deleteTopic(topicName); + } catch (ExecutionException | InterruptedException e) { + LOGGER.warn(e.getMessage()); + } + } + + public Boolean deleteGlobalStoreTopicsData() throws ExecutionException, InterruptedException { + LOGGER.info("Deleting global store kafka topics."); + Collection collection = kafkaTopicManager.getAllTopics().stream() + .map(TopicListing::name) + .filter(name -> name.startsWith(Utilities.JEMPI_GLOBAL_STORE_PREFIX)) + .collect(Collectors.toCollection(ArrayList::new)); + + for (String topic: collection) { + doTopicDelete(topic); + } + + kafkaTopicManager.checkTopicsWithWait(topics -> topics.stream().filter(t -> t.name().startsWith(Utilities.JEMPI_GLOBAL_STORE_PREFIX)).count() == 0, 5000); + return true; + } + + @Override + public Boolean deleteData() throws InterruptedException, ExecutionException { + LOGGER.info("Deleting kafka topics."); + for (HashMap.Entry topicDetails : this.kafkaBootstrapConfig.topics.entrySet()) { + KafkaBootstrapConfig.BootstrapperTopicConfig topic = topicDetails.getValue(); + doTopicDelete(topic.getTopicName()); + } + deleteGlobalStoreTopicsData(); + kafkaTopicManager.checkTopicsWithWait(topics -> topics.size() == 0, 5000); + return true; + } + + @Override + public Boolean resetAll() throws ExecutionException, InterruptedException { + LOGGER.info("Resetting kafka data and schemas."); + return this.deleteData() && this.createSchema(); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/BaseKafkaCommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/BaseKafkaCommand.java new file mode 100644 index 000000000..73d0ed7c7 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/BaseKafkaCommand.java @@ -0,0 +1,14 @@ +package org.jembi.jempi.bootstrapper.data.stream.kafka.cli; + +import org.jembi.jempi.bootstrapper.data.BaseDataBootstrapperCommand; +import org.jembi.jempi.bootstrapper.data.stream.kafka.KafkaDataBootstrapper; + +import java.io.IOException; +import java.util.concurrent.Callable; + +public abstract class BaseKafkaCommand extends BaseDataBootstrapperCommand implements Callable { + @Override + protected KafkaDataBootstrapper getBootstrapper(final String configPath) throws IOException { + return new KafkaDataBootstrapper(configPath); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaCLI.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaCLI.java new file mode 100644 index 000000000..e151b4fb8 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaCLI.java @@ -0,0 +1,11 @@ +package org.jembi.jempi.bootstrapper.data.stream.kafka.cli; + +import picocli.CommandLine.Command; + +@Command(name = "kafka", mixinStandardHelpOptions = true, subcommands = {KafkaResetAllCommand.class, + KafkaDeleteAllCommand.class, + KafkaCreateAllSchemaDataCommand.class, + KafkaListTopicsCommand.class, + KafkaDescribeTopicCommand.class}) +public class KafkaCLI { +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaCreateAllSchemaDataCommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaCreateAllSchemaDataCommand.java new file mode 100644 index 000000000..0dfde4c9a --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaCreateAllSchemaDataCommand.java @@ -0,0 +1,17 @@ +package org.jembi.jempi.bootstrapper.data.stream.kafka.cli; + +import picocli.CommandLine; + +import java.util.concurrent.Callable; + +@CommandLine.Command(name = "createAllSchemaData", mixinStandardHelpOptions = true, description = "Create all the required " + + "schema's and data for JeMPI Kafka instance.") +public class KafkaCreateAllSchemaDataCommand extends BaseKafkaCommand implements Callable { + @Override + public Integer call() throws Exception { + this.init(); + return this.execute(() -> this.bootstrapper.createSchema() + ? 0 + : 1); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaDeleteAllCommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaDeleteAllCommand.java new file mode 100644 index 000000000..7ce3d434e --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaDeleteAllCommand.java @@ -0,0 +1,17 @@ +package org.jembi.jempi.bootstrapper.data.stream.kafka.cli; + +import picocli.CommandLine; + +import java.util.concurrent.Callable; + +@CommandLine.Command(name = "deleteAll", mixinStandardHelpOptions = true, description = "Delete all the data and schema used by" + + " JeMPI kafka instance.") +public class KafkaDeleteAllCommand extends BaseKafkaCommand implements Callable { + @Override + public Integer call() throws Exception { + this.init(); + return this.execute(() -> this.bootstrapper.deleteData() + ? 0 + : 1); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaDeleteGlobalStoreDataCommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaDeleteGlobalStoreDataCommand.java new file mode 100644 index 000000000..d9dab26be --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaDeleteGlobalStoreDataCommand.java @@ -0,0 +1,14 @@ +package org.jembi.jempi.bootstrapper.data.stream.kafka.cli; + +import picocli.CommandLine; + +import java.util.concurrent.Callable; + +@CommandLine.Command(name = "deleteGlobalStoreData", mixinStandardHelpOptions = true, description = "Delete all global store topics used by JeMPI.") +public class KafkaDeleteGlobalStoreDataCommand extends BaseKafkaCommand implements Callable { + @Override + public Integer call() throws Exception { + this.init(); + return this.execute(() -> this.bootstrapper.deleteGlobalStoreTopicsData() ? 0 : 1); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaDescribeTopicCommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaDescribeTopicCommand.java new file mode 100644 index 000000000..f5e831035 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaDescribeTopicCommand.java @@ -0,0 +1,21 @@ +package org.jembi.jempi.bootstrapper.data.stream.kafka.cli; + +import picocli.CommandLine; + +import java.util.concurrent.Callable; + +@CommandLine.Command(name = "describeTopic", mixinStandardHelpOptions = true, description = "Describe a topic associated with " + + "the JeMPI instance.") +public class KafkaDescribeTopicCommand extends BaseKafkaCommand implements Callable { + + @CommandLine.Option(names = {"-t", "--topicName"}, description = "Topic Name", required = true) + private String topicName; + + @Override + public Integer call() throws Exception { + this.init(); + return this.execute(() -> this.bootstrapper.describeTopic(topicName) + ? 0 + : 1); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaListTopicsCommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaListTopicsCommand.java new file mode 100644 index 000000000..b6de2bd68 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaListTopicsCommand.java @@ -0,0 +1,17 @@ +package org.jembi.jempi.bootstrapper.data.stream.kafka.cli; + +import picocli.CommandLine; + +import java.util.concurrent.Callable; + +@CommandLine.Command(name = "listTopics", mixinStandardHelpOptions = true, description = "List all the topics associated with " + + "the JeMPI instance.") +public class KafkaListTopicsCommand extends BaseKafkaCommand implements Callable { + @Override + public Integer call() throws Exception { + this.init(); + return this.execute(() -> this.bootstrapper.listTopics() + ? 0 + : 1); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaResetAllCommand.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaResetAllCommand.java new file mode 100644 index 000000000..80d6f058e --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/stream/kafka/cli/KafkaResetAllCommand.java @@ -0,0 +1,17 @@ +package org.jembi.jempi.bootstrapper.data.stream.kafka.cli; + +import picocli.CommandLine; + +import java.util.concurrent.Callable; + +@CommandLine.Command(name = "resetAll", mixinStandardHelpOptions = true, description = "Deletes all data and schemas associated" + + " with JeMPI kafka instance, then recreates schemas, and add initial data.") +public class KafkaResetAllCommand extends BaseKafkaCommand implements Callable { + @Override + public Integer call() throws Exception { + this.init(); + return this.execute(() -> this.bootstrapper.resetAll() + ? 0 + : 1); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/utils/DataBootstraperConsts.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/utils/DataBootstraperConsts.java new file mode 100644 index 000000000..9df49d282 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/data/utils/DataBootstraperConsts.java @@ -0,0 +1,9 @@ +package org.jembi.jempi.bootstrapper.data.utils; + +public class DataBootstraperConsts { + protected DataBootstraperConsts() { } + public static final String KAFKA_BOOT_STRAP_CONFIG_JSON = "/data/kafka/kafkaBootStrapConfig.json"; + public static final String POSTGRES_INIT_SCHEMA_AUDIT_DB = "/data/postgres/audit-schema.sql"; + public static final String POSTGRES_INIT_SCHEMA_NOTIFICATION_DB = "/data/postgres/notifications-schema.sql"; + public static final String POSTGRES_INIT_SCHEMA_USERS_DB = "/data/postgres/users-schema.sql"; +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/utils/BootstrapperLogger.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/utils/BootstrapperLogger.java new file mode 100644 index 000000000..54dc8d8ab --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/java/org/jembi/jempi/bootstrapper/utils/BootstrapperLogger.java @@ -0,0 +1,17 @@ +package org.jembi.jempi.bootstrapper.utils; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +public class BootstrapperLogger { + protected BootstrapperLogger() { } + public static Logger getChildLogger( + final Logger parentLogger, + final String childLoggerName) { + return LogManager.getLogger(String.format("%s > %s", parentLogger.getName(), childLoggerName)); + } + + public static Logger getLogger(final String loggerName) { + return LogManager.getLogger(loggerName); + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/resources/data/kafka/kafkaBootStrapConfig.json b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/resources/data/kafka/kafkaBootStrapConfig.json new file mode 100644 index 000000000..b89f997b8 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/resources/data/kafka/kafkaBootStrapConfig.json @@ -0,0 +1,67 @@ +{ + "topics": { + "JeMPI-interaction-etl": { + "topicName": "JeMPI-interaction-etl", + "partition": 1, + "replications": 1, + "retention_ms": 86400000, + "segments_bytes": 4194304 + }, + "JeMPI-interaction-controller": { + "topicName": "JeMPI-interaction-controller", + "partition": 1, + "replications": 1, + "retention_ms": 86400000, + "segments_bytes": 4194304 + }, + "JeMPI-interaction-processor-controller": { + "topicName": "JeMPI-interaction-processor-controller", + "partition": 1, + "replications": 1, + "retention_ms": 86400000, + "segments_bytes": 4194304 + }, + "JeMPI-interaction-em": { + "topicName": "JeMPI-interaction-em", + "partition": 1, + "replications": 1, + "retention_ms": 86400000, + "segments_bytes": 4194304 + }, + "JeMPI-interaction-linker": { + "topicName": "JeMPI-interaction-linker", + "partition": 1, + "replications": 1, + "retention_ms": 86400000, + "segments_bytes": 4194304 + }, + "JeMPI-mu-controller": { + "topicName": "JeMPI-mu-controller", + "partition": 1, + "replications": 1, + "retention_ms": 86400000, + "segments_bytes": 4194304 + }, + "JeMPI-mu-linker": { + "topicName": "JeMPI-mu-linker", + "partition": 1, + "replications": 1, + "retention_ms": 86400000, + "segments_bytes": 4194304 + }, + "JeMPI-notifications": { + "topicName": "JeMPI-notifications", + "partition": 1, + "replications": 1, + "retention_ms": 86400000, + "segments_bytes": 4194304 + }, + "JeMPI-audit-trail": { + "topicName": "JeMPI-audit-trail", + "partition": 1, + "replications": 1, + "retention_ms": 600000, + "segments_bytes": 1048576 + } + } +} \ No newline at end of file diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/resources/data/postgres/audit-schema.sql b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/resources/data/postgres/audit-schema.sql new file mode 100644 index 000000000..aa3b4b7c1 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/resources/data/postgres/audit-schema.sql @@ -0,0 +1,11 @@ +CREATE TABLE IF NOT EXISTS audit_trail ( + id UUID NOT NULL DEFAULT gen_random_uuid(), + insertedAt TIMESTAMP NOT NULL DEFAULT now(), + createdAt TIMESTAMP NOT NULL, + interactionID VARCHAR(64), + goldenID VARCHAR(64), + event VARCHAR(256), + CONSTRAINT PKEY_AUDIT_TRAIL PRIMARY KEY (id) +); +CREATE INDEX IF NOT EXISTS idx_gid ON audit_trail(goldenID); +CREATE INDEX IF NOT EXISTS idx_iid ON audit_trail(interactionID); diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/resources/data/postgres/notifications-schema.sql b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/resources/data/postgres/notifications-schema.sql new file mode 100644 index 000000000..ad411a51a --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/resources/data/postgres/notifications-schema.sql @@ -0,0 +1,80 @@ +CREATE TABLE IF NOT EXISTS Notification_Type +( + Id uuid DEFAULT gen_random_uuid() PRIMARY KEY, + Type VARCHAR(50) +); + +CREATE TABLE IF NOT EXISTS Action_Type +( + Id UUID DEFAULT gen_random_uuid() PRIMARY KEY UNIQUE, + Type VARCHAR(50) +); + +CREATE TABLE IF NOT EXISTS Notification_State +( + Id UUID DEFAULT gen_random_uuid() PRIMARY KEY, + State VARCHAR(50) +); + +CREATE TABLE IF NOT EXISTS Notification +( + Id uuid DEFAULT gen_random_uuid() PRIMARY KEY, + Type VARCHAR(50), + Created timestamp without time zone, + Reviewd_By uuid, + Reviewed_At timestamp without time zone, + State VARCHAR(50), + Patient_Id VARCHAR(50), + Names VARCHAR(100), + Golden_Id VARCHAR(50), + Score Numeric +); + +CREATE TABLE IF NOT EXISTS Action +( + Id UUID DEFAULT gen_random_uuid() PRIMARY KEY, + Notification_Id UUID, + Action_Type_Id UUID, + Date date, + CONSTRAINT FK_Notification + FOREIGN KEY(Notification_Id) + REFERENCES Notification(Id), + CONSTRAINT FK_Action_Type + FOREIGN KEY(Action_Type_Id) + REFERENCES Action_Type(Id) +); + +CREATE TABLE IF NOT EXISTS Match +( + Notification_Id UUID, + Score Numeric, + Golden_Id VARCHAR(50), + CONSTRAINT FK_Notification + FOREIGN KEY(Notification_Id) + REFERENCES Notification(Id) +); + +CREATE TABLE IF NOT EXISTS candidates +( + Notification_Id UUID, + Score Numeric, + Golden_Id VARCHAR(50), + CONSTRAINT FK_Notification + FOREIGN KEY(Notification_Id) + REFERENCES Notification(Id) +); + +CREATE TABLE IF NOT EXISTS users +( + id UUID DEFAULT gen_random_uuid() PRIMARY KEY UNIQUE, + given_name VARCHAR(255), + family_name VARCHAR(255), + email VARCHAR(255) UNIQUE, + username VARCHAR(255) UNIQUE +); + +INSERT INTO Notification_State(State) +VALUES ('OPEN'), ('CLOSED'); + +INSERT INTO Notification_Type(Type) +VALUES ('ABOVE_THRESHOLD'), ('BELOW_THRESHOLD'), ('MARGIN'), ('UPDATE'); \ No newline at end of file diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/resources/data/postgres/users-schema.sql b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/resources/data/postgres/users-schema.sql new file mode 100644 index 000000000..84ce1ed2a --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/resources/data/postgres/users-schema.sql @@ -0,0 +1,8 @@ +CREATE TABLE IF NOT EXISTS users +( + id UUID DEFAULT gen_random_uuid() PRIMARY KEY UNIQUE, + given_name VARCHAR(255), + family_name VARCHAR(255), + email VARCHAR(255) UNIQUE, + username VARCHAR(255) UNIQUE +); diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/main/resources/log4j2.xml b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/resources/log4j2.xml new file mode 100644 index 000000000..27f2461ea --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/main/resources/log4j2.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/test/java/org/jembi/jempi/bootstrapper/BootstrapperConfigTest.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/test/java/org/jembi/jempi/bootstrapper/BootstrapperConfigTest.java new file mode 100644 index 000000000..48bca4012 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/test/java/org/jembi/jempi/bootstrapper/BootstrapperConfigTest.java @@ -0,0 +1,83 @@ +package org.jembi.jempi.bootstrapper; + +import com.typesafe.config.Config; +import com.typesafe.config.ConfigException; +import com.typesafe.config.ConfigFactory; +import org.apache.logging.log4j.LogManager; +import org.junit.jupiter.api.*; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; + +import static java.lang.Thread.sleep; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; + +@TestMethodOrder(MethodOrderer.OrderAnnotation.class) +public class BootstrapperConfigTest { + @Test + @Order(1) + public void testItErrorWhenConfigNotAvailable(){ + Assertions.assertThrows(ConfigException.class, () -> { + BootstrapperConfig.create(null, LogManager.getLogger()); + }); + } + + public void assertPropsCorrect(BootstrapperConfig config){ + assertEquals("127.0.0.1", config.POSTGRESQL_IP); + assertEquals(5432, config.POSTGRESQL_PORT); + assertEquals("postgres", config.POSTGRESQL_USER); + assertEquals("", config.POSTGRESQL_PASSWORD); + assertEquals("jempi", config.POSTGRESQL_NOTIFICATIONS_DB); + + assertEquals("127.0.0.1", config.KAFKA_BOOTSTRAP_SERVERS); + assertEquals("aId", config.KAFKA_APPLICATION_ID); + + assertArrayEquals(new String[]{"127.0.0.1"}, config.DGRAPH_ALPHA_HOSTS); + assertArrayEquals(new int[]{5080}, config.DGRAPH_ALPHA_PORTS); + } + @Test + @Order(3) + public void testItCanLoadConfigFromEnvironment(){ + System.setProperty("POSTGRESQL_IP", "127.0.0.1"); + System.setProperty("POSTGRESQL_PORT", "5432"); + System.setProperty("POSTGRESQL_USER", "postgres"); + System.setProperty("POSTGRESQL_PASSWORD", ""); + System.setProperty("POSTGRESQL_DATABASE", "jempi"); + + System.setProperty("KAFKA_BOOTSTRAP_SERVERS", "127.0.0.1"); + System.setProperty("KAFKA_APPLICATION_ID", "aId"); + + System.setProperty("DGRAPH_HOSTS", "127.0.0.1"); + System.setProperty("DGRAPH_PORTS", "5080"); + ConfigFactory.systemProperties(); + + assertPropsCorrect(BootstrapperConfig.create(null, LogManager.getLogger())); + } + @Test + @Order(2) + public void testCanLoadConfigFromPath() throws IOException, InterruptedException { + File file = File.createTempFile( "config", "conf"); + file.deleteOnExit(); + + FileWriter fileWriter = new FileWriter(file); + BufferedWriter bufferedWriter = new BufferedWriter(fileWriter); + bufferedWriter.write(""" + POSTGRESQL_IP=127.0.0.1 + POSTGRESQL_PORT=5432 + POSTGRESQL_USER=postgres + POSTGRESQL_PASSWORD="" + POSTGRESQL_DATABASE=jempi + KAFKA_BOOTSTRAP_SERVERS=127.0.0.1 + KAFKA_APPLICATION_ID=aId + DGRAPH_HOSTS=127.0.0.1 + DGRAPH_PORTS=5080 + """); + bufferedWriter.close(); + + assertPropsCorrect(BootstrapperConfig.create(file.getPath(), LogManager.getLogger())); + + } +} diff --git a/JeMPI_Apps/JeMPI_Bootstrapper/src/test/java/org/jembi/jempi/bootstrapper/data/stream/kafka/KafkaDataBootstrapperTest.java b/JeMPI_Apps/JeMPI_Bootstrapper/src/test/java/org/jembi/jempi/bootstrapper/data/stream/kafka/KafkaDataBootstrapperTest.java new file mode 100644 index 000000000..e822fba86 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Bootstrapper/src/test/java/org/jembi/jempi/bootstrapper/data/stream/kafka/KafkaDataBootstrapperTest.java @@ -0,0 +1,52 @@ +package org.jembi.jempi.bootstrapper.data.stream.kafka; +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.TopicListing; +import org.apache.kafka.streams.StreamsConfig; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; + + +import java.util.Collection; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +public class KafkaDataBootstrapperTest { + + private AdminClient kafkaAdminClient; + private KafkaDataBootstrapper kafkaDataBootstrapper; + @BeforeAll + public void createMockObjects() throws Exception{ + Properties properties = new Properties(); + properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); + kafkaAdminClient = AdminClient.create(properties); + kafkaDataBootstrapper = new KafkaDataBootstrapper(null); + + } + @Test + public void testCanCreateSchemaTopics() throws InterruptedException, ExecutionException { + + kafkaDataBootstrapper.deleteData(); + kafkaDataBootstrapper.createSchema(); + + Collection topicsAfterUpdateFuture = kafkaAdminClient.listTopics().listings().get(); + assertEquals(7, topicsAfterUpdateFuture.size()); + + } + @Test + public void testCanDeleteTopics() throws InterruptedException, ExecutionException { + kafkaDataBootstrapper.createSchema(); + kafkaDataBootstrapper.deleteData(); + Collection topicsAfterUpdateFuture = kafkaAdminClient.listTopics().listings().get(); + assertEquals(0, topicsAfterUpdateFuture.size()); + } + @Test + public void testCanResetAll() throws ExecutionException, InterruptedException { + kafkaDataBootstrapper.resetAll(); + Collection topicsAfterUpdateFuture = kafkaAdminClient.listTopics().listings().get(); + assertEquals(7, topicsAfterUpdateFuture.size()); + } +} diff --git a/JeMPI_Apps/JeMPI_Configuration/.scalafmt.conf b/JeMPI_Apps/JeMPI_Configuration/.scalafmt.conf new file mode 100644 index 000000000..9e6b67ed5 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Configuration/.scalafmt.conf @@ -0,0 +1,2 @@ +version = 3.7.17 +runner.dialect = scala3 diff --git a/JeMPI_Apps/JeMPI_Configuration/build.sbt b/JeMPI_Apps/JeMPI_Configuration/build.sbt index 7b65d05b0..61b515e35 100644 --- a/JeMPI_Apps/JeMPI_Configuration/build.sbt +++ b/JeMPI_Apps/JeMPI_Configuration/build.sbt @@ -6,13 +6,13 @@ ThisBuild / scalaVersion := "3.3.1" ThisBuild / libraryDependencies += "org.scala-lang.modules" %% "scala-parser-combinators" % "2.3.0" // https://mvnrepository.com/artifact/org.apache.commons/commons-text -ThisBuild / libraryDependencies += "org.apache.commons" % "commons-text" % "1.10.0" +ThisBuild / libraryDependencies += "org.apache.commons" % "commons-text" % "1.11.0" // https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-databind -ThisBuild / libraryDependencies += "com.fasterxml.jackson.core" % "jackson-databind" % "2.15.2" +ThisBuild / libraryDependencies += "com.fasterxml.jackson.core" % "jackson-databind" % "2.16.1" // https://mvnrepository.com/artifact/com.fasterxml.jackson.module/jackson-module-scala -ThisBuild / libraryDependencies += "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.15.2" +ThisBuild / libraryDependencies += "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.16.1" lazy val root = (project in file(".")) .settings( diff --git a/JeMPI_Apps/JeMPI_Configuration/config-api.json b/JeMPI_Apps/JeMPI_Configuration/config-api.json deleted file mode 120000 index f58db882f..000000000 --- a/JeMPI_Apps/JeMPI_Configuration/config-api.json +++ /dev/null @@ -1 +0,0 @@ -reference/config-reference-api.json \ No newline at end of file diff --git a/JeMPI_Apps/JeMPI_Configuration/create.ps1 b/JeMPI_Apps/JeMPI_Configuration/create.ps1 new file mode 100644 index 000000000..25e33e31d --- /dev/null +++ b/JeMPI_Apps/JeMPI_Configuration/create.ps1 @@ -0,0 +1,8 @@ +$script_path = $MyInvocation.MyCommand.Path +$script_dir = Split-Path $script_path +Set-Location $script_dir + +$config = 'reference\config-reference' + +Invoke-Expression -Command "sbt 'run ${config}.json'" +Copy-Item ${config}-api.json .\config-api.json diff --git a/JeMPI_Apps/JeMPI_Configuration/create.sh b/JeMPI_Apps/JeMPI_Configuration/create.sh index 128cc2855..9eaa89d20 100755 --- a/JeMPI_Apps/JeMPI_Configuration/create.sh +++ b/JeMPI_Apps/JeMPI_Configuration/create.sh @@ -8,4 +8,4 @@ API_FILENAME="${CONFIG%.*}"-api.json COMMAND="run $CONFIG" sbt "$COMMAND" rm -f config-api.json -ln -s $API_FILENAME config-api.json +cp $API_FILENAME config-api.json diff --git a/JeMPI_Apps/JeMPI_Configuration/deprecated/config-backend.json b/JeMPI_Apps/JeMPI_Configuration/deprecated/config-backend.json deleted file mode 100644 index 8fdd0f3ef..000000000 --- a/JeMPI_Apps/JeMPI_Configuration/deprecated/config-backend.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "rules": { - "deterministic": { - "QUERY_DETERMINISTIC_GOLDEN_RECORD_CANDIDATES": { - "vars": ["given_name", "family_name", "phone_number", "national_id"], - "text": "eq(national_id) or (eq(given_name) and eq(family_name) and eq(phone_number))" - } - }, - "probabilistic": { - "QUERY_MATCH_GOLDEN_RECORD_CANDIDATES_BY_DISTANCE": { - "vars": ["given_name", "family_name", "city"], - "text": "match(given_name,3) and match(family_name,3) or match(given_name,3) and match(city,3) or match(family_name,3) and match(city,3)" - }, - "QUERY_MATCH_GOLDEN_RECORD_CANDIDATES_BY_PHONE_NUMBER": { - "vars": ["phone_number"], - "text": "match(phone_number,3)" - }, - "QUERY_MATCH_GOLDEN_RECORD_CANDIDATES_BY_NATIONAL_ID": { - "vars": ["national_id"], - "text": "match(national_id,3)" - } - } - } -} diff --git a/JeMPI_Apps/JeMPI_Configuration/deprecated/config-ethiopia-new.json b/JeMPI_Apps/JeMPI_Configuration/deprecated/config-ethiopia-new.json deleted file mode 100644 index 9f37cd0d9..000000000 --- a/JeMPI_Apps/JeMPI_Configuration/deprecated/config-ethiopia-new.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "fields": [ - { - "fieldName": "aux_id", - "fieldType": "String", - "indexGoldenRecord": "@index(exact)" - }, - { - "fieldName": "name_given", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "m": 0.8, - "u": 0.1 - }, - { - "fieldName": "name_father", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "m": 0.7, - "u": 0.2 - }, - { - "fieldName": "name_fathers_father", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "m": 0.7, - "u": 0.2 - }, - { - "fieldName": "name_mother", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "m": 0.7, - "u": 0.2 - }, - { - "fieldName": "name_mothers_father", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "m": 0.7, - "u": 0.2 - }, - { - "fieldName": "gender", - "fieldType": "String", - "indexGoldenRecord": "@index(exact)", - "m": 0.8, - "u": 0.5 - }, - { - "fieldName": "dob", - "fieldType": "String", - "m": 0.8, - "u": 0.2 - }, - { - "fieldName": "city", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "m": 0.8, - "u": 0.2 - }, - { - "fieldName": "phone_number", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "m": 0.8, - "u": 0.2 - } - ], - "rules": { - "deterministic": { - "QUERY_DETERMINISTIC_CANDIDATES": { - "vars": [ - "name_given", - "name_father", - "phone_number" - ], - "text": "eq(name_given) and eq(name_father) and eq(phone_number)" - } - }, - "probabilistic": { - "QUERY_MATCH_GOLDEN_RECORDS_BY_DISTANCE": { - "vars": [ - "name_given", - "name_father", - "city" - ], - "text": "match(name_given,3) and match(name_father,3) or match(name_given,3) and match(city,3) or match(name_father,3) and match(city,3)" - } - }, - "probabilistic": { - "QUERY_MATCH_GOLDEN_RECORDS_BY_PHONE_NUMBER": { - "vars": [ - "phone_number" - ], - "text": "match(phone_number,3)" - } - } - } -} \ No newline at end of file diff --git a/JeMPI_Apps/JeMPI_Configuration/deprecated/config-ethiopia.json b/JeMPI_Apps/JeMPI_Configuration/deprecated/config-ethiopia.json deleted file mode 100644 index ed87d8015..000000000 --- a/JeMPI_Apps/JeMPI_Configuration/deprecated/config-ethiopia.json +++ /dev/null @@ -1,102 +0,0 @@ -{ - "fields": [ - { - "fieldName": "aux_id", - "fieldType": "String", - "indexGoldenRecord": "@index(exact)" - }, - { - "fieldName": "name_given", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "m": 0.8, - "u": 0.002 - }, - { - "fieldName": "name_father", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "m": 0.8, - "u": 0.002 - }, - { - "fieldName": "name_fathers_father", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "m": 0.84, - "u": 0.002 - }, - { - "fieldName": "name_mother", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "m": 0.835, - "u": 0.005 - }, - { - "fieldName": "name_mothers_father", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "m": 0.838, - "u": 0.002 - }, - { - "fieldName": "gender", - "fieldType": "String", - "indexGoldenRecord": "@index(exact)", - "m": 0.81, - "u": 0.386 - }, - { - "fieldName": "dob", - "fieldType": "String", - "indexGoldenRecord": "@index(exact)", - "m": 0.999, - "u": 0.012 - }, - { - "fieldName": "city", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "m": 0.843, - "u": 0.059 - }, - { - "fieldName": "phone_number", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "m": 0.967, - "u": 0.003 - } - ], - "rules": { - "deterministic": { - "QUERY_DETERMINISTIC_GOLDEN_RECORD_CANDIDATES": { - "vars": [ - "name_given", - "name_father", - "name_fathers_father", - "name_mother", - "name_mothers_father", - "gender", - "dob", - "phone_number" - ], - "text": "eq(name_given) and eq(name_father) and eq(name_fathers_father) or eq(name_given) and eq(name_mother) and eq(name_mothers_father) or eq(name_given) and eq(gender) and eq(phone_number) or eq(name_given) and eq(gender) and eq(dob)" - } - }, - "probabilistic": { - "QUERY_MATCH_GOLDEN_RECORDS_BY_PHONE_NUMBER": { - "vars": [ - "name_given", - "name_father", - "name_fathers_father", - "name_mother", - "name_mothers_father", - "phone_number" - ], - "text": "match(phone_number,3) or match(name_given,3) and match(name_father,3) and match(name_fathers_father,3) or match(name_given,3) and match(name_mother,3) and match(name_mothers_father,3)" - } - } - } -} \ No newline at end of file diff --git a/JeMPI_Apps/JeMPI_Configuration/deprecated/config-frontend.json b/JeMPI_Apps/JeMPI_Configuration/deprecated/config-frontend.json deleted file mode 100644 index 8e99463a1..000000000 --- a/JeMPI_Apps/JeMPI_Configuration/deprecated/config-frontend.json +++ /dev/null @@ -1,209 +0,0 @@ -{ - "fields": [ - { - "fieldName": "aux_id", - "fieldType": "String", - "indexGoldenRecord": "@index(exact)", - "fieldLabel": "AUX ID", - "groups": ["identifiers"], - "scope": [ - "/patient-record/:uid", - "/golden-record/:uid", - "/search/custom" - ], - "accessLevel": [], - "fhirPath": "extension.where(url = 'http://example.com/fhir/extensions#aux_id').value" - }, - { - "fieldName": "given_name", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "indexEntity": "@index(exact,trigram)", - "m": 0.782501, - "u": 0.02372, - "fieldLabel": "First Name", - "groups": ["name", "demographics", "linked_records"], - "scope": [ - "/patient-record/:uid", - "/golden-record/:uid", - "/notifications/match-details", - "/golden-record/:uid/linked-records", - "/golden-record/:uid/audit-trail", - "/search/simple", - "/search/custom", - "/search-results/golden", - "/search-results/patient" - ], - "accessLevel": [], - "fhirPath": "name.given" - }, - { - "fieldName": "family_name", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "indexEntity": "@index(exact,trigram)", - "m": 0.850909, - "u": 0.02975, - "fieldLabel": "Last Name", - "groups": ["name", "demographics", "linked_records"], - "scope": [ - "/patient-record/:uid", - "/golden-record/:uid", - "/notifications/match-details", - "/golden-record/:uid/linked-records", - "/golden-record/:uid/audit-trail", - "/search/simple", - "/search/custom", - "/search-results/golden", - "/search-results/patient" - ], - "accessLevel": [], - "fhirPath": "name.family" - }, - { - "fieldName": "gender", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "m": 0.786614, - "u": 0.443018, - "fieldLabel": "Gender", - "groups": ["demographics", "sub_heading", "linked_records"], - "scope": [ - "/patient-record/:uid", - "/golden-record/:uid", - "/notifications/match-details", - "/golden-record/:uid/linked-records", - "/search/custom" - ], - "accessLevel": [], - "fhirPath": "gender" - }, - { - "fieldName": "dob", - "fieldType": "String", - "m": 0.894637, - "u": 0.012448, - "fieldLabel": "Date of Birth", - "groups": ["demographics", "sub_heading", "linked_records"], - "scope": [ - "/patient-record/:uid", - "/golden-record/:uid", - "/notifications/match-details", - "/golden-record/:uid/linked-records", - "/search/simple", - "/search/custom", - "/search-results/golden", - "/search-results/patient" - ], - "accessLevel": [], - "fhirPath": "birthDate" - }, - { - "fieldName": "city", - "fieldType": "String", - "indexGoldenRecord": "@index(trigram)", - "m": 0.872691, - "u": 0.132717, - "fieldLabel": "City", - "groups": ["demographics", "linked_records"], - "scope": [ - "/patient-record/:uid", - "/golden-record/:uid", - "/notifications/match-details", - "/golden-record/:uid/linked-records", - "/search/custom" - ], - "accessLevel": [], - "fhirPath": "address.city" - - }, - { - "fieldName": "phone_number", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "m": 0.920281, - "u": 0.322629, - "fieldLabel": "Phone No", - "groups": ["demographics", "linked_records"], - "scope": [ - "/patient-record/:uid", - "/golden-record/:uid", - "/notifications/match-details", - "/golden-record/:uid/linked-records", - "/search/custom" - ], - "accessLevel": [], - "fhirPath": "telecom.where(system = 'phone').value" - - }, - { - "fieldName": "national_id", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "indexEntity": "@index(exact,trigram)", - "m": 0.832336, - "u": 0.000133, - "fieldLabel": "National ID", - "groups": ["identifiers", "linked_records"], - "scope": [ - "/patient-record/:uid", - "/golden-record/:uid", - "/notifications/match-details", - "/golden-record/:uid/linked-records", - "/search/simple", - "/search/custom", - "/search-results/golden", - "/search-results/patient" - ], - "accessLevel": [], - "fhirPath": "identifier.where(system='http://hl7.org/fhir/sid/us-ssn').value" - } - ], - "systemFields": [ - { - "fieldName": "recordType", - "fieldType": "String", - "fieldLabel": "Record Type", - "groups": ["none"], - "scope": [ - "/notifications/match-details" - ], - "accessLevel": [] - }, - { - "fieldName": "uid", - "fieldType": "String", - "fieldLabel": "UID", - "groups": ["identifiers", "sub_heading", "linked_records"], - "scope": [ - "/patient-record/:uid", - "/golden-record/:uid", - "/notifications/match-details", - "/golden-record/:uid/linked-records", - "/search-results/golden", - "/search-results/patient" - ], - "accessLevel": [] - }, - { - "fieldName": "score", - "fieldType": "Number", - "fieldLabel": "Match", - "groups": ["none"], - "scope": ["/patient-record/:uid", "/golden-record/:uid"], - "accessLevel": [] - }, - { - "fieldName": "sourceId", - "fieldType": "SourceId", - "fieldLabel": "Site Code", - "groups": ["registering_facility", "linked_records"], - "scope": [ - "/patient-record/:uid", - "/golden-record/:uid", - "/golden-record/:uid/linked-records" - ], - "accessLevel": [] - } - ] -} diff --git a/JeMPI_Apps/JeMPI_Configuration/deprecated/config-nigeria-example.json b/JeMPI_Apps/JeMPI_Configuration/deprecated/config-nigeria-example.json deleted file mode 100644 index 4378d2781..000000000 --- a/JeMPI_Apps/JeMPI_Configuration/deprecated/config-nigeria-example.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "fields": [ - { - "fieldName": "aux_id", - "fieldType": "String", - "indexGoldenRecord": "@index(exact)" - }, - { - "fieldName": "nat_fingerprint_code", - "fieldType": "String", - "indexGoldenRecord": "@index(exact)" - }, - { - "fieldName": "emr_fingerprint_code", - "fieldType": "String", - "indexEntity": "@index(exact)" - }, - { - "fieldName": "gender", - "fieldType": "String", - "indexGoldenRecord": "@index(exact)", - "indexEntity": "@index(exact)" - }, - { - "fieldName": "dob", - "fieldType": "String", - "indexGoldenRecord": "@index(exact)", - "indexEntity": "@index(exact)" - }, - { - "fieldName": "city", - "fieldType": "String", - "indexGoldenRecord": "@index(exact)", - "indexEntity": "@index(exact)" - } - ], - "rules": { - "deterministic": { - "QUERY_DETERMINISTIC_GOLDEN_RECORD_CANDIDATES": { - "vars": [ - "nat_fingerprint_code" - ], - "text": "eq(nat_fingerprint_code)" - } - } - } -} - - diff --git a/JeMPI_Apps/JeMPI_Configuration/deprecated/config-nigeria-gender-dob.json b/JeMPI_Apps/JeMPI_Configuration/deprecated/config-nigeria-gender-dob.json deleted file mode 100644 index f9a9e5861..000000000 --- a/JeMPI_Apps/JeMPI_Configuration/deprecated/config-nigeria-gender-dob.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "fields": [ - { - "fieldName": "aux_id", - "fieldType": "String", - "indexGoldenRecord": "@index(exact)" - }, - { - "fieldName": "nat_fingerprint_code", - "fieldType": "String", - "indexGoldenRecord": "@index(exact)" - }, - { - "fieldName": "emr_fingerprint_code", - "fieldType": "String", - "indexEntity": "@index(exact)" - }, - { - "fieldName": "gender", - "fieldType": "String", - "indexGoldenRecord": "@index(exact)", - "indexEntity": "@index(exact)" - }, - { - "fieldName": "dob", - "fieldType": "String", - "indexGoldenRecord": "@index(exact)", - "indexEntity": "@index(exact)" - } - ], - "rules": { - "deterministic": { - "QUERY_DETERMINISTIC_GOLDEN_RECORD_CANDIDATES": { - "vars": [ - "nat_fingerprint_code" - ], - "text": "eq(nat_fingerprint_code)" - } - } - } -} - - diff --git a/JeMPI_Apps/JeMPI_Configuration/deprecated/config-nigeria.json b/JeMPI_Apps/JeMPI_Configuration/deprecated/config-nigeria.json deleted file mode 100644 index ed412f66d..000000000 --- a/JeMPI_Apps/JeMPI_Configuration/deprecated/config-nigeria.json +++ /dev/null @@ -1,102 +0,0 @@ -{ - "fields": [ - { - "fieldName": "aux_id", - "fieldType": "String", - "indexGoldenRecord": "@index(exact)" - }, - { - "fieldName": "nat_fingerprint_code", - "fieldType": "String", - "indexGoldenRecord": "@index(exact)" - }, - { - "fieldName": "emr_fingerprint_code", - "fieldType": "String", - "indexEntity": "@index(exact)" - }, - { - "fieldName": "given_name", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "m": 0.782501, - "u": 0.023720 - }, - { - "fieldName": "family_name", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "indexEntity": "@index(exact,trigram)", - "m": 0.850909, - "u": 0.029750 - }, - { - "fieldName": "gender", - "fieldType": "String", - "indexGoldenRecord": "@index(exact)", - "m": 0.786614, - "u": 0.443018 - }, - { - "fieldName": "dob", - "fieldType": "String", - "indexGoldenRecord": "@index(exact)", - "m": 0.894637, - "u": 0.012448 - }, - { - "fieldName": "city", - "fieldType": "String", - "indexGoldenRecord": "@index(trigram)", - "m": 0.872691, - "u": 0.132717 - }, - { - "fieldName": "phone_number", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "m": 0.920281, - "u": 0.322629 - }, - { - "fieldName": "national_id", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "indexEntity": "@index(exact,trigram)", - "m": 0.832336, - "u": 0.000133 - } - ], - "rules": { - "deterministic": { - "QUERY_DETERMINISTIC_GOLDEN_RECORD_CANDIDATES": { - "vars": [ - "given_name", - "family_name", - "phone_number", - "national_id", - "nat_fingerprint_code" - ], - "text": "eq(nat_fingerprint_code) or (eq(national_id) and eq(given_name) and eq(family_name) and eq(phone_number))" - } - }, - "probabilistic": { - "QUERY_MATCH_GOLDEN_RECORD_CANDIDATES_BY_DISTANCE": { - "vars": [ - "given_name", - "family_name", - "city" - ], - "text": "match(given_name,3) and match(family_name,3) or match(given_name,3) and match(city,3) or match(family_name,3) and match(city,3)" - }, - "QUERY_MATCH_GOLDEN_RECORD_CANDIDATES_BY_PHONE_NUMBER": { - "vars": [ - "phone_number" - ], - "text": "eq(phone_number)" - } - } - } -} - - diff --git a/JeMPI_Apps/JeMPI_Configuration/deprecated/config-reference_backup.json b/JeMPI_Apps/JeMPI_Configuration/deprecated/config-reference_backup.json deleted file mode 100644 index f1abbc4e6..000000000 --- a/JeMPI_Apps/JeMPI_Configuration/deprecated/config-reference_backup.json +++ /dev/null @@ -1,231 +0,0 @@ -{ - "fields": [ - { - "fieldName": "aux_id", - "fieldType": "String", - "indexGoldenRecord": "@index(exact)", - "fieldLabel": "AUX ID", - "groups": ["identifiers"], - "scope": [ - "/patient-record/:uid", - "/golden-record/:uid", - "/search/custom" - ], - "accessLevel": [], - "fhirPath": "extension.where(url = 'http://example.com/fhir/extensions#aux_id').value" - }, - { - "fieldName": "given_name", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "indexEntity": "@index(exact,trigram)", - "m": 0.782501, - "u": 0.02372, - "fieldLabel": "First Name", - "groups": ["name", "demographics", "linked_records"], - "scope": [ - "/patient-record/:uid", - "/golden-record/:uid", - "/notifications/match-details", - "/golden-record/:uid/linked-records", - "/golden-record/:uid/audit-trail", - "/search/simple", - "/search/custom", - "/search-results/golden", - "/search-results/patient" - ], - "accessLevel": [], - "fhirPath": "name.given" - }, - { - "fieldName": "family_name", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "indexEntity": "@index(exact,trigram)", - "m": 0.850909, - "u": 0.02975, - "fieldLabel": "Last Name", - "groups": ["name", "demographics", "linked_records"], - "scope": [ - "/patient-record/:uid", - "/golden-record/:uid", - "/notifications/match-details", - "/golden-record/:uid/linked-records", - "/golden-record/:uid/audit-trail", - "/search/simple", - "/search/custom", - "/search-results/golden", - "/search-results/patient" - ], - "accessLevel": [], - "fhirPath": "name.family" - }, - { - "fieldName": "gender", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "m": 0.786614, - "u": 0.443018, - "fieldLabel": "Gender", - "groups": ["demographics", "sub_heading", "linked_records"], - "scope": [ - "/patient-record/:uid", - "/golden-record/:uid", - "/notifications/match-details", - "/golden-record/:uid/linked-records", - "/search/custom" - ], - "accessLevel": [], - "fhirPath": "gender" - }, - { - "fieldName": "dob", - "fieldType": "String", - "m": 0.894637, - "u": 0.012448, - "fieldLabel": "Date of Birth", - "groups": ["demographics", "sub_heading", "linked_records"], - "scope": [ - "/patient-record/:uid", - "/golden-record/:uid", - "/notifications/match-details", - "/golden-record/:uid/linked-records", - "/search/simple", - "/search/custom", - "/search-results/golden", - "/search-results/patient" - ], - "accessLevel": [], - "fhirPath": "birthDate" - }, - { - "fieldName": "city", - "fieldType": "String", - "indexGoldenRecord": "@index(trigram)", - "m": 0.872691, - "u": 0.132717, - "fieldLabel": "City", - "groups": ["demographics", "linked_records"], - "scope": [ - "/patient-record/:uid", - "/golden-record/:uid", - "/notifications/match-details", - "/golden-record/:uid/linked-records", - "/search/custom" - ], - "accessLevel": [], - "fhirPath": "address.city" - - }, - { - "fieldName": "phone_number", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "m": 0.920281, - "u": 0.322629, - "fieldLabel": "Phone No", - "groups": ["demographics", "linked_records"], - "scope": [ - "/patient-record/:uid", - "/golden-record/:uid", - "/notifications/match-details", - "/golden-record/:uid/linked-records", - "/search/custom" - ], - "accessLevel": [], - "fhirPath": "telecom.where(system = 'phone').value" - - }, - { - "fieldName": "national_id", - "fieldType": "String", - "indexGoldenRecord": "@index(exact,trigram)", - "indexEntity": "@index(exact,trigram)", - "m": 0.832336, - "u": 0.000133, - "fieldLabel": "National ID", - "groups": ["identifiers", "linked_records"], - "scope": [ - "/patient-record/:uid", - "/golden-record/:uid", - "/notifications/match-details", - "/golden-record/:uid/linked-records", - "/search/simple", - "/search/custom", - "/search-results/golden", - "/search-results/patient" - ], - "accessLevel": [], - "fhirPath": "identifier.where(system='http://hl7.org/fhir/sid/us-ssn').value" - } - ], - "systemFields": [ - { - "fieldName": "recordType", - "fieldType": "String", - "fieldLabel": "Record Type", - "groups": ["none"], - "scope": [ - "/notifications/match-details" - ], - "accessLevel": [] - }, - { - "fieldName": "uid", - "fieldType": "String", - "fieldLabel": "UID", - "groups": ["identifiers", "sub_heading", "linked_records"], - "scope": [ - "/patient-record/:uid", - "/golden-record/:uid", - "/notifications/match-details", - "/golden-record/:uid/linked-records", - "/search-results/golden", - "/search-results/patient" - ], - "accessLevel": [] - }, - { - "fieldName": "score", - "fieldType": "Number", - "fieldLabel": "Match", - "groups": ["none"], - "scope": ["/patient-record/:uid", "/golden-record/:uid"], - "accessLevel": [] - }, - { - "fieldName": "sourceId", - "fieldType": "SourceId", - "fieldLabel": "Site Code", - "groups": ["registering_facility", "linked_records"], - "scope": [ - "/patient-record/:uid", - "/golden-record/:uid", - "/golden-record/:uid/linked-records" - ], - "accessLevel": [] - } - ], - "rules": { - "deterministic": { - "QUERY_DETERMINISTIC_GOLDEN_RECORD_CANDIDATES": { - "vars": ["given_name", "family_name", "phone_number", "national_id"], - "text": "eq(national_id) or (eq(given_name) and eq(family_name) and eq(phone_number))" - } - }, - "probabilistic": { - "QUERY_MATCH_GOLDEN_RECORD_CANDIDATES_BY_DISTANCE": { - "vars": ["given_name", "family_name", "city"], - "text": "match(given_name,3) and match(family_name,3) or match(given_name,3) and match(city,3) or match(family_name,3) and match(city,3)" - }, - "QUERY_MATCH_GOLDEN_RECORD_CANDIDATES_BY_PHONE_NUMBER": { - "vars": ["phone_number"], - "text": "match(phone_number,3)" - }, - "QUERY_MATCH_GOLDEN_RECORD_CANDIDATES_BY_NATIONAL_ID": { - "vars": ["national_id"], - "text": "match(national_id,3)" - } - } - } -} diff --git a/JeMPI_Apps/JeMPI_Configuration/project/build.properties b/JeMPI_Apps/JeMPI_Configuration/project/build.properties index 303541e50..63df6ac85 100644 --- a/JeMPI_Apps/JeMPI_Configuration/project/build.properties +++ b/JeMPI_Apps/JeMPI_Configuration/project/build.properties @@ -1 +1 @@ -sbt.version = 1.9.6 +sbt.version = 1.9.8 \ No newline at end of file diff --git a/JeMPI_Apps/JeMPI_Configuration/project/plugins.sbt b/JeMPI_Apps/JeMPI_Configuration/project/plugins.sbt index e9d3fd12d..4f3e1177c 100644 --- a/JeMPI_Apps/JeMPI_Configuration/project/plugins.sbt +++ b/JeMPI_Apps/JeMPI_Configuration/project/plugins.sbt @@ -1 +1,5 @@ addSbtPlugin("org.jetbrains.scala" % "sbt-ide-settings" % "1.1.1") +addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "2.1.5") +addSbtPlugin("org.jmotor.sbt" % "sbt-dependency-updates" % "1.2.7") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") + diff --git a/JeMPI_Apps/JeMPI_Configuration/reference/config-reference-roles-api.json b/JeMPI_Apps/JeMPI_Configuration/reference/config-reference-roles-api.json index 47c7dde03..33c65f53c 100644 --- a/JeMPI_Apps/JeMPI_Configuration/reference/config-reference-roles-api.json +++ b/JeMPI_Apps/JeMPI_Configuration/reference/config-reference-roles-api.json @@ -11,7 +11,7 @@ "scope": [ "/patient-record/:uid", "/golden-record/:uid", - "/browse-records/record-details/:uid", + "/record-details/:uid", "/search/custom" ], "readOnly": true, @@ -30,12 +30,12 @@ ], "scope": [ "/notifications/match-details", - "/browse-records/record-details/:uid/relink", + "/record-details/:uid/relink", "/search/simple", "/search/custom", "/search-results/golden", "/search-results/patient", - "/browse-records/record-details/:uid", + "/record-details/:uid", "/browse-records" ], "validation": { @@ -56,9 +56,9 @@ "record_details" ], "scope": [ - "/browse-records/record-details/:uid", + "/record-details/:uid", "/notifications/match-details", - "/browse-records/record-details/:uid/relink", + "/record-details/:uid/relink", "/search/simple", "/search/custom", "/search-results/golden", @@ -83,9 +83,9 @@ "record_details" ], "scope": [ - "/browse-records/record-details/:uid", + "/record-details/:uid", "/notifications/match-details", - "/browse-records/record-details/:uid/relink", + "/record-details/:uid/relink", "/search/custom", "/browse-records" ], @@ -108,9 +108,9 @@ "record_details" ], "scope": [ - "/browse-records/record-details/:uid", + "/record-details/:uid", "/notifications/match-details", - "/browse-records/record-details/:uid/relink", + "/record-details/:uid/relink", "/search/simple", "/search/custom", "/search-results/golden", @@ -134,9 +134,9 @@ "record_details" ], "scope": [ - "/browse-records/record-details/:uid", + "/record-details/:uid", "/notifications/match-details", - "/browse-records/record-details/:uid/relink", + "/record-details/:uid/relink", "/search/custom", "/browse-records" ], @@ -157,9 +157,9 @@ "record_details" ], "scope": [ - "/browse-records/record-details/:uid", + "/record-details/:uid", "/notifications/match-details", - "/browse-records/record-details/:uid/relink", + "/record-details/:uid/relink", "/search/custom", "/browse-records" ], @@ -175,9 +175,9 @@ "record_details" ], "scope": [ - "/browse-records/record-details/:uid", + "/record-details/:uid", "/notifications/match-details", - "/browse-records/record-details/:uid/relink", + "/record-details/:uid/relink", "/search/simple", "/search/custom", "/search-results/golden", @@ -202,7 +202,7 @@ ], "scope": [ "/notifications/match-details", - "/browse-records/record-details/:uid/relink" + "/record-details/:uid/relink" ], "accessLevel": [] }, @@ -219,10 +219,10 @@ ], "scope": [ "/notifications/match-details", - "/browse-records/record-details/:uid/relink", + "/record-details/:uid/relink", "/search-results/golden", "/search-results/patient", - "/browse-records/record-details/:uid", + "/record-details/:uid", "/browse-records" ], "accessLevel": [] @@ -237,24 +237,11 @@ "audit_trail" ], "scope": [ - "/browse-records/record-details/:uid", + "/record-details/:uid", "/browse-records" ], "accessLevel": [] }, - { - "fieldName": "actions", - "fieldType": "", - "fieldLabel": "Actions", - "groups": [ - "linked_records" - ], - "scope": [ - "/notifications/match-details", - "/browse-records/record-details/:uid/relink" - ], - "accessLevel": [] - }, { "fieldName": "sourceId", "fieldType": "SourceId", @@ -265,7 +252,7 @@ "record_details" ], "scope": [ - "/browse-records/record-details/:uid", + "/record-details/:uid", "/browse-records" ], "accessLevel": [] @@ -281,9 +268,9 @@ "scope": [ "/patient-record/:uid", "/golden-record/:uid", - "/browse-records/record-details/:uid", + "/record-details/:uid", "/notifications/match-details", - "/browse-records/record-details/:uid/relink" + "/record-details/:uid/relink" ], "accessLevel": [] } diff --git a/JeMPI_Apps/JeMPI_Configuration/reference/config-reference-roles.json b/JeMPI_Apps/JeMPI_Configuration/reference/config-reference-roles.json index 031bd7942..ac35ab15c 100644 --- a/JeMPI_Apps/JeMPI_Configuration/reference/config-reference-roles.json +++ b/JeMPI_Apps/JeMPI_Configuration/reference/config-reference-roles.json @@ -27,7 +27,8 @@ }, { "fieldName": "aux_id", - "fieldType": "String" + "fieldType": "String", + "source": "aux_id" } ], "additionalNodes": [ diff --git a/JeMPI_Apps/JeMPI_Configuration/reference/config-reference.json b/JeMPI_Apps/JeMPI_Configuration/reference/config-reference.json index 6b6c7ab29..55a730868 100644 --- a/JeMPI_Apps/JeMPI_Configuration/reference/config-reference.json +++ b/JeMPI_Apps/JeMPI_Configuration/reference/config-reference.json @@ -27,7 +27,8 @@ }, { "fieldName": "aux_id", - "fieldType": "String" + "fieldType": "String", + "source": "aux_id" } ], "additionalNodes": [ diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/Config.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/Config.scala index 572cd2818..217894d66 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/Config.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/Config.scala @@ -1,53 +1,65 @@ package configuration -case class UniqueField(fieldName: String, - fieldType: String, - csvCol: Option[Int], - index: Option[String], - isList: Option[Boolean], - default: Option[String]) - -case class AdditionalNode(nodeName: String, - fields: Array[AdditionalNodeField]) - -case class AdditionalNodeField(fieldName: String, - fieldType: String, - csvCol: Option[Int]) - -case class DemographicField(fieldName: String, - fieldType: String, - isList: Option[Boolean], - source: Option[Source], - indexGoldenRecord: Option[String], - indexInteraction: Option[String], - linkMetaData: Option[ProbabilisticMetaData], - validateMetaData: Option[ProbabilisticMetaData], - matchMetaData: Option[ProbabilisticMetaData]) - -case class Source(generate: Option[Generate], - csvCol: Option[Int]) +case class UniqueField( + fieldName: String, + fieldType: String, + csvCol: Option[Int], + index: Option[String], + isList: Option[Boolean], + source: Option[String], + default: Option[String] +) + +case class AdditionalNode(nodeName: String, fields: Array[AdditionalNodeField]) + +case class AdditionalNodeField( + fieldName: String, + fieldType: String, + csvCol: Option[Int] +) + +case class DemographicField( + fieldName: String, + fieldType: String, + isList: Option[Boolean], + source: Option[Source], + indexGoldenRecord: Option[String], + indexInteraction: Option[String], + linkMetaData: Option[ProbabilisticMetaData], + validateMetaData: Option[ProbabilisticMetaData], + matchMetaData: Option[ProbabilisticMetaData] +) + +case class Source(generate: Option[Generate], csvCol: Option[Int]) case class Generate(func: String) -case class ProbabilisticMetaData(comparison: String, - comparisonLevels: List[Double], - m: Double, - u: Double) +case class ProbabilisticMetaData( + comparison: String, + comparisonLevels: List[Double], + m: Double, + u: Double +) -case class Rule(vars: Array[String], - text: String) +case class Rule(vars: Array[String], text: String) -case class AllRules(deterministic: Option[Map[String, Rule]], - probabilistic: Option[Map[String, Rule]]) +case class AllRules( + deterministic: Option[Map[String, Rule]], + probabilistic: Option[Map[String, Rule]] +) case class ValidateRules(deterministic: Map[String, Rule]) -case class Rules(link: Option[AllRules], - validate: Option[ValidateRules], - matchNotification: Option[AllRules]) +case class Rules( + link: Option[AllRules], + validate: Option[ValidateRules], + matchNotification: Option[AllRules] +) -case class Config(uniqueInteractionFields: Option[Array[UniqueField]], - uniqueGoldenRecordFields: Option[Array[UniqueField]], - additionalNodes: Option[Array[AdditionalNode]], - demographicFields: Array[DemographicField], - rules: Rules) +case class Config( + uniqueInteractionFields: Option[Array[UniqueField]], + uniqueGoldenRecordFields: Option[Array[UniqueField]], + additionalNodes: Option[Array[AdditionalNode]], + demographicFields: Array[DemographicField], + rules: Rules +) diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomAsyncHelper.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomAsyncHelper.scala index f4da1a334..9712c0e63 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomAsyncHelper.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomAsyncHelper.scala @@ -4,17 +4,18 @@ import java.io.{File, PrintWriter} private object CustomAsyncHelper { - private val classLocation = "../JeMPI_AsyncReceiver/src/main/java/org/jembi/jempi/async_receiver" + private val classLocation = + "../JeMPI_AsyncReceiver/src/main/java/org/jembi/jempi/async_receiver" private val customClassName = "CustomAsyncHelper" private val packageText = "org.jembi.jempi.async_receiver" def generate(config: Config): Unit = - val classFile: String = classLocation + File.separator + customClassName + ".java" + val classFile: String = + classLocation + File.separator + customClassName + ".java" println("Creating " + classFile) val file: File = new File(classFile) val writer: PrintWriter = new PrintWriter(file) - writer.println( - s"""package $packageText; + writer.println(s"""package $packageText; | |import org.apache.commons.csv.CSVRecord; |import org.jembi.jempi.shared.models.CustomDemographicData; @@ -37,7 +38,13 @@ private object CustomAsyncHelper { |${demographicFields()}); | } | - |${if (config.additionalNodes.isEmpty) "" else config.additionalNodes.get.map(x => customNodeConstructor(x)).mkString} + |${ + if (config.additionalNodes.isEmpty) "" + else + config.additionalNodes.get + .map(x => customNodeConstructor(x)) + .mkString + } |} |""".stripMargin) @@ -48,53 +55,69 @@ private object CustomAsyncHelper { def additionalNodeFields(additionalNode: AdditionalNode): String = additionalNode.fields - .map(f => s"""${" " * 3}private static final int ${additionalNode.nodeName.toUpperCase}_${f.fieldName.toUpperCase}_COL_NUM = ${f.csvCol.get}""") - .mkString( - s"""; - |""".stripMargin) + .map(f => + if (f.csvCol.isEmpty) "" + else + s""" private static final int ${additionalNode.nodeName.toUpperCase}_${f.fieldName.toUpperCase}_COL_NUM = ${f.csvCol.get}; + |""".stripMargin + ) + .mkString("") + .stripMargin .stripTrailing() end additionalNodeFields def uniqueInteractionFields(fields: Array[UniqueField]): String = fields - .map(f => if (f.csvCol.isEmpty) "" else - s""" private static final int ${f.fieldName.toUpperCase()}_COL_NUM = ${f.csvCol.get}; - |""".stripMargin) + .map(f => + if (f.csvCol.isEmpty) "" + else + s""" private static final int ${f.fieldName + .toUpperCase()}_COL_NUM = ${f.csvCol.get}; + |""".stripMargin + ) .mkString("") .stripMargin end uniqueInteractionFields - (if (config.uniqueInteractionFields.isEmpty) "" else - uniqueInteractionFields(config.uniqueInteractionFields.get)) + (if (config.uniqueInteractionFields.isEmpty) "" + else + uniqueInteractionFields(config.uniqueInteractionFields.get)) + - (if (config.additionalNodes.isEmpty) "" else - config.additionalNodes.get.map(x => s"""${additionalNodeFields(x)};""").mkString("\n")) + "\n" - + - config - .demographicFields - .filter(f => f.source.isDefined && f.source.get.csvCol.isDefined) - .map(f => - s"""${" " * 3}private static final int ${f.fieldName.toUpperCase}_COL_NUM = ${f.source.get.csvCol.get};""") - .mkString("\n") + (if ( + config.additionalNodes.isEmpty || config.additionalNodes.get.isEmpty + ) "" + else + config.additionalNodes.get + .map(x => s"""${additionalNodeFields(x)}""") + .mkString(sys.props("line.separator"))) + sys.props( + "line.separator" + ) + + + config.demographicFields + .filter(f => f.source.isDefined && f.source.get.csvCol.isDefined) + .map(f => + s"""${" " * 3}private static final int ${f.fieldName.toUpperCase}_COL_NUM = ${f.source.get.csvCol.get};""" + ) + .mkString(sys.props("line.separator")) end columnIndices def demographicFields(): String = - config - .demographicFields - .map(f => if (f.source.isDefined && f.source.get.generate.isDefined) { - s"""${" " * 9}null,""" - } else { - s"""${" " * 9}csvRecord.get(${f.fieldName.toUpperCase}_COL_NUM),""" - }) - .mkString("\n") + config.demographicFields + .map(f => + if (f.source.isDefined && f.source.get.generate.isDefined) { + s"""${" " * 9}null,""" + } else { + s"""${" " * 9}csvRecord.get(${f.fieldName.toUpperCase}_COL_NUM),""" + } + ) + .mkString(sys.props("line.separator")) .dropRight(1) end demographicFields def customUniqueInteractionArguments(): String = - if (config.uniqueInteractionFields.isEmpty) "" else - config - .uniqueInteractionFields - .get + if (config.uniqueInteractionFields.isEmpty) "" + else + config.uniqueInteractionFields.get .map(f => if (f.fieldName.toUpperCase.equals("AUX_ID")) { s"""${" " * 45}Main.parseRecordNumber(csvRecord.get(${f.fieldName.toUpperCase}_COL_NUM))""" @@ -102,9 +125,9 @@ private object CustomAsyncHelper { s"""${" " * 45}java.time.LocalDateTime.now()""" } else { s"""${" " * 45}csvRecord.get(${f.fieldName.toUpperCase}_COL_NUM)""" - }) - .mkString( - """, + } + ) + .mkString(""", |""".stripMargin) .trim end customUniqueInteractionArguments @@ -112,7 +135,15 @@ private object CustomAsyncHelper { def customNodeConstructor(additionalNode: AdditionalNode): String = def arguments(fields: Array[AdditionalNodeField]): String = - fields.map(f => s""" csvRecord.get(${additionalNode.nodeName.toUpperCase}_${f.fieldName.toUpperCase()}_COL_NUM)""").mkString(",\n") + fields + .map(f => + if (f.csvCol.isEmpty) + s"""${" " * 9}null""" + else + s"""${" " * 9}csvRecord.get(${additionalNode.nodeName.toUpperCase}_${f.fieldName + .toUpperCase()}_COL_NUM)""" + ) + .mkString(s",${sys.props("line.separator")}") end arguments s""" static Custom${additionalNode.nodeName} custom${additionalNode.nodeName}(final CSVRecord csvRecord) { diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomControllerDashboardMU.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomControllerDashboardMU.scala new file mode 100644 index 000000000..540d99582 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomControllerDashboardMU.scala @@ -0,0 +1,74 @@ +package configuration + +import java.io.{File, PrintWriter} + +private object CustomControllerDashboardMU { + + private val classLocation = + "../JeMPI_Controller/src/main/java/org/jembi/jempi/controller" + private val customClassName = "CustomControllerDashboardMU" + private val packageSharedModels = " org.jembi.jempi.controller" + + def generate(config: Config): Unit = + + def fieldParameters(): String = + config.demographicFields + .map(f => { + val fieldName = Utils.snakeCaseToCamelCase(f.fieldName) + s"""${" " * 6}MU ${fieldName},""" + }) + .mkString(sys.props("line.separator")) + .trim + .dropRight(1) + end fieldParameters + + def fromFields(): String = + config.demographicFields + .map(f => { + val fieldName = Utils.snakeCaseToCamelCase(f.fieldName) + s"""${" " * 45}getMU(customFieldTallies.${fieldName}()),""" + }) + .mkString(sys.props("line.separator")) + .trim + .dropRight(1) + end fromFields + + val classFile: String = + classLocation + File.separator + customClassName + ".java" + println("Creating " + classFile) + val file: File = new File(classFile) + val writer: PrintWriter = new PrintWriter(file) + writer.print( + s"""package $packageSharedModels; + | + |import org.jembi.jempi.shared.models.CustomFieldTallies; + |import org.jembi.jempi.shared.models.CustomFieldTallies.FieldTally; + | + |record $customClassName( + | ${fieldParameters()}) { + | + | static MU getMU(final FieldTally fieldTally) { + | if (fieldTally.a() + fieldTally.b() == 0 || fieldTally.c() + fieldTally.d() == 0) { + | return new MU(-1.0, -1.0); + | } + | return new MU(fieldTally.a().doubleValue() / (fieldTally.a().doubleValue() + fieldTally.b().doubleValue()), + | fieldTally.c().doubleValue() / (fieldTally.c().doubleValue() + fieldTally.d().doubleValue())); + | } + | + | record MU( + | Double m, + | Double u) { + | } + | + | static CustomControllerDashboardMU fromCustomFieldTallies(final CustomFieldTallies customFieldTallies) { + | return new CustomControllerDashboardMU(${fromFields()}); + | } + | + |} + |""".stripMargin + ) + writer.flush() + writer.close() + end generate + +} diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphConstants.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphConstants.scala index 805131f5e..6c867acc3 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphConstants.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphConstants.scala @@ -4,19 +4,20 @@ import java.io.{File, PrintWriter} private object CustomDgraphConstants { - private val classLocation = "../JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph" + private val classLocation = + "../JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph" private val customClassName = "CustomDgraphConstants" private val packageText = "org.jembi.jempi.libmpi.dgraph" def generate(config: Config): Unit = { - val classFile: String = classLocation + File.separator + customClassName + ".java" + val classFile: String = + classLocation + File.separator + customClassName + ".java" println("Creating " + classFile) val file: File = new File(classFile) val writer: PrintWriter = new PrintWriter(file) - writer.println( - s"""package $packageText; + writer.println(s"""package $packageText; | |public final class $customClassName { |""".stripMargin) @@ -39,8 +40,7 @@ private object CustomDgraphConstants { mutation_create_interaction_type() mutation_create_interaction_fields() - writer.println( - s""" private $customClassName() {} + writer.println(s""" private $customClassName() {} | |}""".stripMargin) writer.flush() @@ -48,33 +48,45 @@ private object CustomDgraphConstants { def golden_record_predicates(): Unit = if (config.uniqueGoldenRecordFields.isDefined) - config.uniqueGoldenRecordFields.get.zipWithIndex.foreach { case (field, _) => val name = Utils.camelCaseToSnakeCase(field.fieldName) - writer.println( - s""" public static final String PREDICATE_GOLDEN_RECORD_${name.toUpperCase} = "GoldenRecord.$name";""".stripMargin) + config.uniqueGoldenRecordFields.get.zipWithIndex.foreach { + case (field, _) => + val name = Utils.camelCaseToSnakeCase(field.fieldName) + writer.println( + s""" public static final String PREDICATE_GOLDEN_RECORD_${name.toUpperCase} = "GoldenRecord.$name";""".stripMargin + ) } end if - config.demographicFields.zipWithIndex.foreach { case (field, _) => val name = Utils.camelCaseToSnakeCase(field.fieldName) + config.demographicFields.zipWithIndex.foreach { case (field, _) => + val name = Utils.camelCaseToSnakeCase(field.fieldName) writer.println( - s""" public static final String PREDICATE_GOLDEN_RECORD_${name.toUpperCase} = "GoldenRecord.$name";""".stripMargin) + s""" public static final String PREDICATE_GOLDEN_RECORD_${name.toUpperCase} = "GoldenRecord.$name";""".stripMargin + ) } - writer.println(s""" public static final String PREDICATE_GOLDEN_RECORD_INTERACTIONS = "GoldenRecord.interactions";""".stripMargin) + writer.println( + s""" public static final String PREDICATE_GOLDEN_RECORD_INTERACTIONS = "GoldenRecord.interactions";""".stripMargin + ) end golden_record_predicates def interaction_predicates(): Unit = if (config.uniqueInteractionFields.isDefined) - config.uniqueInteractionFields.get.zipWithIndex.foreach { case (field, _) => val name = Utils.camelCaseToSnakeCase(field.fieldName) - writer.println( - s""" public static final String PREDICATE_INTERACTION_${name.toUpperCase} = "Interaction.$name";""".stripMargin) + config.uniqueInteractionFields.get.zipWithIndex.foreach { + case (field, _) => + val name = Utils.camelCaseToSnakeCase(field.fieldName) + writer.println( + s""" public static final String PREDICATE_INTERACTION_${name.toUpperCase} = "Interaction.$name";""".stripMargin + ) } end if - config.demographicFields.zipWithIndex.foreach { case (field, _) => val fieldName = Utils.camelCaseToSnakeCase(field.fieldName) - writer.println(s""" public static final String PREDICATE_INTERACTION_${fieldName.toUpperCase} = "Interaction.$fieldName";""".stripMargin) + config.demographicFields.zipWithIndex.foreach { case (field, _) => + val fieldName = Utils.camelCaseToSnakeCase(field.fieldName) + writer.println( + s""" public static final String PREDICATE_INTERACTION_${fieldName.toUpperCase} = "Interaction.$fieldName";""".stripMargin + ) } end interaction_predicates def golden_record_field_names(): Unit = - writer.println( - s""" + writer.println(s""" | static final String GOLDEN_RECORD_FIELD_NAMES = | \"\"\" | uid @@ -97,8 +109,7 @@ private object CustomDgraphConstants { end golden_record_field_names def expanded_golden_record_field_names(): Unit = { - writer.println( - s""" + writer.println(s""" | static final String EXPANDED_GOLDEN_RECORD_FIELD_NAMES = | \"\"\" | uid @@ -118,8 +129,7 @@ private object CustomDgraphConstants { val name = field.fieldName writer.println(s" GoldenRecord.$name") }) - writer.println( - s""" GoldenRecord.interactions @facets(score) { + writer.println(s""" GoldenRecord.interactions @facets(score) { | uid | Interaction.source_id { | uid @@ -136,14 +146,12 @@ private object CustomDgraphConstants { val name = field.fieldName writer.println(s" Interaction.$name") }) - writer.println( - s""" } + writer.println(s""" } | \"\"\";""".stripMargin) } def interaction_field_names(): Unit = { - writer.println( - s""" static final String INTERACTION_FIELD_NAMES = + writer.println(s""" static final String INTERACTION_FIELD_NAMES = | \"\"\" | uid | Interaction.source_id { @@ -163,8 +171,7 @@ private object CustomDgraphConstants { writer.println(s" Interaction.$name") }) - writer.println( - s""" \"\"\"; + writer.println(s""" \"\"\"; |""".stripMargin) } @@ -178,7 +185,8 @@ private object CustomDgraphConstants { | uid | SourceId.facility | SourceId.patient - | }""".stripMargin) + | }""".stripMargin + ) if (config.uniqueInteractionFields.isDefined) { config.uniqueInteractionFields.get.foreach(field => { @@ -191,8 +199,7 @@ private object CustomDgraphConstants { writer.println(s" Interaction.$name") }) - writer.println( - s""" ~GoldenRecord.interactions @facets(score) { + writer.println(s""" ~GoldenRecord.interactions @facets(score) { | uid | GoldenRecord.source_id { | uid @@ -210,16 +217,14 @@ private object CustomDgraphConstants { val name = field.fieldName writer.println(s" GoldenRecord.$name") }) - writer.println( - s""" } + writer.println(s""" } | \"\"\"; |""".stripMargin) } def query_get_interaction_by_uid(): Unit = { - writer.println( - s""" static final String QUERY_GET_INTERACTION_BY_UID = + writer.println(s""" static final String QUERY_GET_INTERACTION_BY_UID = | \"\"\" | query interactionByUid($$uid: string) { | all(func: uid($$uid)) { @@ -239,16 +244,14 @@ private object CustomDgraphConstants { val name = field.fieldName writer.println(s" Interaction.$name") }) - writer.println( - s""" } + writer.println(s""" } | } | \"\"\"; |""".stripMargin) } def query_get_golden_record_by_uid(): Unit = { - writer.println( - s""" static final String QUERY_GET_GOLDEN_RECORD_BY_UID = + writer.println(s""" static final String QUERY_GET_GOLDEN_RECORD_BY_UID = | \"\"\" | query goldenRecordByUid($$uid: string) { | all(func: uid($$uid)) { @@ -268,8 +271,7 @@ private object CustomDgraphConstants { val name = field.fieldName writer.println(s" GoldenRecord.$name") }) - writer.println( - s""" } + writer.println(s""" } | } | \"\"\"; |""".stripMargin) @@ -286,7 +288,8 @@ private object CustomDgraphConstants { | uid | SourceId.facility | SourceId.patient - | }""".stripMargin) + | }""".stripMargin + ) if (config.uniqueInteractionFields.isDefined) { config.uniqueInteractionFields.get.foreach(field => { val name = field.fieldName @@ -304,7 +307,8 @@ private object CustomDgraphConstants { | uid | SourceId.facility | SourceId.patient - | }""".stripMargin) + | }""".stripMargin + ) if (config.uniqueGoldenRecordFields.isDefined) { config.uniqueGoldenRecordFields.get.foreach(field => { val name = field.fieldName @@ -315,8 +319,7 @@ private object CustomDgraphConstants { val name = field.fieldName writer.println(s" GoldenRecord.$name") }) - writer.println( - s""" } + writer.println(s""" } | } | } | \"\"\"; @@ -324,8 +327,7 @@ private object CustomDgraphConstants { } def query_get_golden_records(): Unit = { - writer.println( - s""" static final String QUERY_GET_GOLDEN_RECORDS = + writer.println(s""" static final String QUERY_GET_GOLDEN_RECORDS = | \"\"\" | query goldenRecord() { | all(func: uid(%s)) { @@ -345,16 +347,14 @@ private object CustomDgraphConstants { val name = field.fieldName writer.println(s" GoldenRecord.$name") }) - writer.println( - s""" } + writer.println(s""" } | } | \"\"\"; |""".stripMargin) } def query_get_expanded_golden_records(): Unit = { - writer.println( - s""" static final String QUERY_GET_EXPANDED_GOLDEN_RECORDS = + writer.println(s""" static final String QUERY_GET_EXPANDED_GOLDEN_RECORDS = | \"\"\" | query expandedGoldenRecord() { | all(func: uid(%s), orderdesc: GoldenRecord.aux_date_created) { @@ -381,7 +381,8 @@ private object CustomDgraphConstants { | uid | SourceId.facility | SourceId.patient - | }""".stripMargin) + | }""".stripMargin + ) if (config.uniqueInteractionFields.isDefined) { config.uniqueInteractionFields.get.foreach(field => { val name = field.fieldName @@ -392,8 +393,7 @@ private object CustomDgraphConstants { val name = field.fieldName writer.println(s" Interaction.$name") }) - writer.println( - s""" } + writer.println(s""" } | } | } | \"\"\"; @@ -401,8 +401,7 @@ private object CustomDgraphConstants { } def mutation_create_source_id_type(): Unit = { - writer.println( - s""" static final String MUTATION_CREATE_SOURCE_ID_TYPE = + writer.println(s""" static final String MUTATION_CREATE_SOURCE_ID_TYPE = | \"\"\" | type SourceId { | SourceId.facility @@ -413,8 +412,7 @@ private object CustomDgraphConstants { } def mutation_create_source_id_fields(): Unit = { - writer.println( - s""" static final String MUTATION_CREATE_SOURCE_ID_FIELDS = + writer.println(s""" static final String MUTATION_CREATE_SOURCE_ID_FIELDS = | \"\"\" | SourceId.facility: string @index(exact) . | SourceId.patient: string @index(exact) . @@ -428,7 +426,8 @@ private object CustomDgraphConstants { | \"\"\" | | type GoldenRecord { - | GoldenRecord.source_id: [SourceId]""".stripMargin) + | GoldenRecord.source_id: [SourceId]""".stripMargin + ) if (config.uniqueGoldenRecordFields.isDefined) { config.uniqueGoldenRecordFields.get.foreach(field => { val name = field.fieldName @@ -443,38 +442,48 @@ private object CustomDgraphConstants { s""" GoldenRecord.interactions: [Interaction] | } | \"\"\"; - """.stripMargin) + """.stripMargin + ) } def mutation_create_golden_record_fields(): Unit = { writer.println( s""" static final String MUTATION_CREATE_GOLDEN_RECORD_FIELDS = | \"\"\" - | GoldenRecord.source_id: [uid] .""".stripMargin) + | GoldenRecord.source_id: [uid] .""".stripMargin + ) if (config.uniqueGoldenRecordFields.isDefined) { config.uniqueGoldenRecordFields.get.foreach(field => { val name = field.fieldName val index = field.index.getOrElse("") - val fieldType = (if field.isList.isDefined && field.isList.get then "[" else "") + field.fieldType.toLowerCase + (if field.isList.isDefined && field.isList.get then "]" else "") + val fieldType = + (if field.isList.isDefined && field.isList.get then "[" + else "") + field.fieldType.toLowerCase + (if field.isList.isDefined && field.isList.get + then "]" + else "") writer.println( - s"""${" " * 9}GoldenRecord.$name:${" " * (25 - name.length)}$fieldType${ - " " * (10 - fieldType.length) - }$index${" " * (35 - index.length)}.""".stripMargin) + s"""${" " * 9}GoldenRecord.$name:${" " * (25 - name.length)}$fieldType${" " * (10 - fieldType.length)}$index${" " * (35 - index.length)}.""".stripMargin + ) }) } - config.demographicFields.foreach(field => { - val name = field.fieldName - val index = field.indexGoldenRecord.getOrElse("") - val fieldType = (if field.isList.isDefined && field.isList.get then "[" else "") + field.fieldType.toLowerCase + (if field.isList.isDefined && field.isList.get then "]" else "") - writer.println( - s"""${" " * 9}GoldenRecord.$name:${" " * (25 - name.length)}$fieldType${ - " " * (10 - fieldType.length) - }$index${" " * (35 - index.length)}.""".stripMargin) - }) + config.demographicFields + .foreach(field => { + val name = field.fieldName + val index = field.indexGoldenRecord.getOrElse("") + val fieldType = + (if field.isList.isDefined && field.isList.get then "[" + else "") + field.fieldType.toLowerCase + (if field.isList.isDefined && field.isList.get + then "]" + else "") + writer.println( + s"""${" " * 9}GoldenRecord.$name:${" " * (25 - name.length)}$fieldType${" " * (10 - fieldType.length)}$index${" " * (35 - index.length)}.""".stripMargin + ) + }) writer.println( s""" GoldenRecord.interactions: [uid] @reverse . | \"\"\"; - |""".stripMargin) + |""".stripMargin + ) } def mutation_create_interaction_type(): Unit = @@ -483,7 +492,8 @@ private object CustomDgraphConstants { | \"\"\" | | type Interaction { - | Interaction.source_id: SourceId""".stripMargin) + | Interaction.source_id: SourceId""".stripMargin + ) if (config.uniqueInteractionFields.isDefined) { config.uniqueInteractionFields.get.foreach(field => { val name = field.fieldName @@ -494,8 +504,7 @@ private object CustomDgraphConstants { val name = field.fieldName writer.println(s" Interaction.$name") }) - writer.println( - s""" } + writer.println(s""" } | \"\"\"; |""".stripMargin) end mutation_create_interaction_type @@ -504,16 +513,20 @@ private object CustomDgraphConstants { writer.println( s""" static final String MUTATION_CREATE_INTERACTION_FIELDS = | \"\"\" - | Interaction.source_id: uid .""".stripMargin) + | Interaction.source_id: uid .""".stripMargin + ) if (config.uniqueInteractionFields.isDefined) { config.uniqueInteractionFields.get.foreach(field => { val name = field.fieldName val index = field.index.getOrElse("") - val fieldType = (if field.isList.isDefined && field.isList.get then "[" else "") + field.fieldType.toLowerCase + (if field.isList.isDefined && field.isList.get then "]" else "") + val fieldType = + (if field.isList.isDefined && field.isList.get then "[" + else "") + field.fieldType.toLowerCase + (if field.isList.isDefined && field.isList.get + then "]" + else "") writer.println( - s"""${" " * 9}Interaction.$name:${" " * (29 - name.length)}$fieldType${ - " " * (10 - fieldType.length) - }$index${" " * (35 - index.length)}.""".stripMargin) + s"""${" " * 9}Interaction.$name:${" " * (29 - name.length)}$fieldType${" " * (10 - fieldType.length)}$index${" " * (35 - index.length)}.""".stripMargin + ) }) } config.demographicFields.foreach(field => { @@ -521,16 +534,10 @@ private object CustomDgraphConstants { val index = field.indexInteraction.getOrElse("") val fieldType = field.fieldType.toLowerCase writer.println( - s"""${" " * 9}Interaction.$name:${ - " " * (29 - name.length) - }$fieldType${ - " " * (10 - fieldType.length) - }$index${ - " " * (35 - index.length) - }.""".stripMargin) + s"""${" " * 9}Interaction.$name:${" " * (29 - name.length)}$fieldType${" " * (10 - fieldType.length)}$index${" " * (35 - index.length)}.""".stripMargin + ) }) - writer.println( - s""" \"\"\"; + writer.println(s""" \"\"\"; |""".stripMargin) } diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphExpandedGoldenRecord.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphExpandedGoldenRecord.scala index 78ff32ff6..d6bd6d5e1 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphExpandedGoldenRecord.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphExpandedGoldenRecord.scala @@ -4,17 +4,18 @@ import java.io.{File, PrintWriter} private object CustomDgraphExpandedGoldenRecord { - private val classLocation = "../JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph" + private val classLocation = + "../JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph" private val customClassName = "CustomDgraphExpandedGoldenRecord" private val packageText = "org.jembi.jempi.libmpi.dgraph" def generate(config: Config): Unit = - val classFile: String = classLocation + File.separator + customClassName + ".java" + val classFile: String = + classLocation + File.separator + customClassName + ".java" println("Creating " + classFile) val file: File = new File(classFile) val writer: PrintWriter = new PrintWriter(file) - writer.println( - s"""package $packageText; + writer.println(s"""package $packageText; | |import com.fasterxml.jackson.annotation.JsonInclude; |import com.fasterxml.jackson.annotation.JsonProperty; @@ -53,20 +54,21 @@ private object CustomDgraphExpandedGoldenRecord { def goldenRecordFields(): String = - def mapField(fieldName: String, fieldType: String): String = s"""${" " * 6}@JsonProperty(CustomDgraphConstants.PREDICATE_GOLDEN_RECORD_${fieldName.toUpperCase}) ${Utils.javaType(fieldType)} ${Utils.snakeCaseToCamelCase(fieldName)},""" + def mapField(fieldName: String, fieldType: String): String = + s"""${" " * 6}@JsonProperty(CustomDgraphConstants.PREDICATE_GOLDEN_RECORD_${fieldName.toUpperCase}) ${Utils + .javaType(fieldType)} ${Utils.snakeCaseToCamelCase(fieldName)},""" - val f1 = if (config.uniqueGoldenRecordFields.isEmpty) "" else - config - .uniqueGoldenRecordFields - .get - .map(f => mapField(f.fieldName, f.fieldType)) - .mkString("\n") + "\n" + val f1 = + if (config.uniqueGoldenRecordFields.isEmpty) "" + else + config.uniqueGoldenRecordFields.get + .map(f => mapField(f.fieldName, f.fieldType)) + .mkString(sys.props("line.separator")) + sys.props("line.separator") val f2 = - config - .demographicFields + config.demographicFields .map(f => mapField(f.fieldName, f.fieldType)) - .mkString("\n") + .mkString(sys.props("line.separator")) f1 + f2 @@ -76,21 +78,24 @@ private object CustomDgraphExpandedGoldenRecord { if (config.uniqueGoldenRecordFields.isEmpty) "" else - config - .uniqueGoldenRecordFields - .get + config.uniqueGoldenRecordFields.get .map(f => - s"""${" " * 63}this.${Utils.snakeCaseToCamelCase(f.fieldName)}(),""") - .mkString("\n").trim.dropRight(1) + s"""${" " * 63}this.${Utils.snakeCaseToCamelCase(f.fieldName)}(),""" + ) + .mkString(sys.props("line.separator")) + .trim + .dropRight(1) end uniqueArguments def demographicArguments(): String = - config - .demographicFields + config.demographicFields .map(f => - s"""${" " * 56}this.${Utils.snakeCaseToCamelCase(f.fieldName)}(),""") - .mkString("\n").trim.dropRight(1) + s"""${" " * 56}this.${Utils.snakeCaseToCamelCase(f.fieldName)}(),""" + ) + .mkString(sys.props("line.separator")) + .trim + .dropRight(1) end demographicArguments end generate diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphExpandedInteraction.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphExpandedInteraction.scala index 36dbf7b3b..1f3066cbe 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphExpandedInteraction.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphExpandedInteraction.scala @@ -4,17 +4,18 @@ import java.io.{File, PrintWriter} private object CustomDgraphExpandedInteraction { - private val classLocation = "../JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph" + private val classLocation = + "../JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph" private val customClassName = "CustomDgraphExpandedInteraction" private val packageText = "org.jembi.jempi.libmpi.dgraph" def generate(config: Config): Unit = - val classFile: String = classLocation + File.separator + customClassName + ".java" + val classFile: String = + classLocation + File.separator + customClassName + ".java" println("Creating " + classFile) val file: File = new File(classFile) val writer: PrintWriter = new PrintWriter(file) - writer.println( - s"""package $packageText; + writer.println(s"""package $packageText; | |import com.fasterxml.jackson.annotation.JsonInclude; |import com.fasterxml.jackson.annotation.JsonProperty; @@ -52,22 +53,22 @@ private object CustomDgraphExpandedInteraction { writer.flush() writer.close() - def interactionFields(): String = - def mapField(fieldName: String, fieldType: String): String = s""" @JsonProperty(CustomDgraphConstants.PREDICATE_INTERACTION_${fieldName.toUpperCase}) ${Utils.javaType(fieldType)} ${Utils.snakeCaseToCamelCase(fieldName)},""" + def mapField(fieldName: String, fieldType: String): String = + s""" @JsonProperty(CustomDgraphConstants.PREDICATE_INTERACTION_${fieldName.toUpperCase}) ${Utils + .javaType(fieldType)} ${Utils.snakeCaseToCamelCase(fieldName)},""" - val f1 = if (config.uniqueInteractionFields.isEmpty) "" else - config - .uniqueInteractionFields - .get - .map(f => mapField(f.fieldName, f.fieldType)) - .mkString("\n") + "\n" + val f1 = + if (config.uniqueInteractionFields.isEmpty) "" + else + config.uniqueInteractionFields.get + .map(f => mapField(f.fieldName, f.fieldType)) + .mkString(sys.props("line.separator")) + sys.props("line.separator") - val f2 = config - .demographicFields + val f2 = config.demographicFields .map(f => mapField(f.fieldName, f.fieldType)) - .mkString("\n") + .mkString(sys.props("line.separator")) f1 + f2 end interactionFields @@ -76,20 +77,23 @@ private object CustomDgraphExpandedInteraction { if (config.uniqueInteractionFields.isEmpty) "" else - config - .uniqueInteractionFields - .get + config.uniqueInteractionFields.get .map(f => - s"""${" " * 63}this.${Utils.snakeCaseToCamelCase(f.fieldName)}(),""") - .mkString("\n").trim.dropRight(1) + s"""${" " * 63}this.${Utils.snakeCaseToCamelCase(f.fieldName)}(),""" + ) + .mkString(sys.props("line.separator")) + .trim + .dropRight(1) end uniqueArguments def demographicArguments(): String = - config - .demographicFields + config.demographicFields .map(f => - s"""${" " * 55}this.${Utils.snakeCaseToCamelCase(f.fieldName)}(),""") - .mkString("\n").trim.dropRight(1) + s"""${" " * 55}this.${Utils.snakeCaseToCamelCase(f.fieldName)}(),""" + ) + .mkString(sys.props("line.separator")) + .trim + .dropRight(1) end demographicArguments end generate diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphGoldenRecord.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphGoldenRecord.scala index 7bd310e46..c1ae81b5f 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphGoldenRecord.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphGoldenRecord.scala @@ -4,17 +4,18 @@ import java.io.{File, PrintWriter} private object CustomDgraphGoldenRecord { - private val classLocation = "../JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph" + private val classLocation = + "../JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph" private val customClassName = "CustomDgraphGoldenRecord" private val packageText = "org.jembi.jempi.libmpi.dgraph" def generate(config: Config): Unit = - val classFile: String = classLocation + File.separator + customClassName + ".java" + val classFile: String = + classLocation + File.separator + customClassName + ".java" println("Creating " + classFile) val file: File = new File(classFile) val writer: PrintWriter = new PrintWriter(file) - writer.println( - s"""package $packageText; + writer.println(s"""package $packageText; | |import com.fasterxml.jackson.annotation.JsonInclude; |import com.fasterxml.jackson.annotation.JsonProperty; @@ -45,43 +46,49 @@ private object CustomDgraphGoldenRecord { writer.flush() writer.close() - def goldenRecordFields(): String = - def field(fieldName: String, fieldType: String): String = s"""${" " * 6}@JsonProperty(CustomDgraphConstants.PREDICATE_GOLDEN_RECORD_${fieldName.toUpperCase}) ${Utils.javaType(fieldType)} ${Utils.snakeCaseToCamelCase(fieldName)},""" + def field(fieldName: String, fieldType: String): String = + s"""${" " * 6}@JsonProperty(CustomDgraphConstants.PREDICATE_GOLDEN_RECORD_${fieldName.toUpperCase}) ${Utils + .javaType(fieldType)} ${Utils.snakeCaseToCamelCase(fieldName)},""" - val f1 = (if (config.uniqueGoldenRecordFields.isEmpty) "" else - config - .uniqueGoldenRecordFields - .get - .map(f => field(f.fieldName, f.fieldType)) - .mkString("\n") + "\n") + val f1 = + (if (config.uniqueGoldenRecordFields.isEmpty) "" + else + config.uniqueGoldenRecordFields.get + .map(f => field(f.fieldName, f.fieldType)) + .mkString(sys.props("line.separator")) + sys.props( + "line.separator" + )) val f2 = - config - .demographicFields + config.demographicFields .map(f => field(f.fieldName, f.fieldType)) - .mkString("\n") + .mkString(sys.props("line.separator")) .dropRight(1) f1 + f2 end goldenRecordFields def uniqueArguments(): String = - if (config.uniqueGoldenRecordFields.isEmpty) "" else - config - .uniqueGoldenRecordFields - .get + if (config.uniqueGoldenRecordFields.isEmpty) "" + else + config.uniqueGoldenRecordFields.get .map(f => - s"""${" " * 63}this.${Utils.snakeCaseToCamelCase(f.fieldName)}(),""") - .mkString("\n").trim.dropRight(1) + s"""${" " * 63}this.${Utils.snakeCaseToCamelCase(f.fieldName)}(),""" + ) + .mkString(sys.props("line.separator")) + .trim + .dropRight(1) end if end uniqueArguments def demographicArguments(): String = - config - .demographicFields + config.demographicFields .map(f => - s"""${" " * 56}this.${Utils.snakeCaseToCamelCase(f.fieldName)}(),""") - .mkString("\n").trim.dropRight(1) + s"""${" " * 56}this.${Utils.snakeCaseToCamelCase(f.fieldName)}(),""" + ) + .mkString(sys.props("line.separator")) + .trim + .dropRight(1) end demographicArguments end generate diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphInteraction.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphInteraction.scala index 7dfb5bb2e..e6b3e3a1e 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphInteraction.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphInteraction.scala @@ -4,18 +4,19 @@ import java.io.{File, PrintWriter} private object CustomDgraphInteraction { - private val classLocation = "../JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph" + private val classLocation = + "../JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph" private val customClassName = "CustomDgraphInteraction" private val packageText = "org.jembi.jempi.libmpi.dgraph" def generate(config: Config): Unit = - val classFile: String = classLocation + File.separator + customClassName + ".java" + val classFile: String = + classLocation + File.separator + customClassName + ".java" println("Creating " + classFile) val file: File = new File(classFile) val writer: PrintWriter = new PrintWriter(file) - writer.println( - s"""package $packageText; + writer.println(s"""package $packageText; | |import com.fasterxml.jackson.annotation.JsonInclude; |import com.fasterxml.jackson.annotation.JsonProperty; @@ -60,35 +61,41 @@ private object CustomDgraphInteraction { def interactionFields(): String = - def mapField(fieldName: String, fieldType: String): String = s"""${" " * 6}@JsonProperty(CustomDgraphConstants.PREDICATE_INTERACTION_${fieldName.toUpperCase}) ${Utils.javaType(fieldType)} ${Utils.snakeCaseToCamelCase(fieldName)},""" + def mapField(fieldName: String, fieldType: String): String = + s"""${" " * 6}@JsonProperty(CustomDgraphConstants.PREDICATE_INTERACTION_${fieldName.toUpperCase}) ${Utils + .javaType(fieldType)} ${Utils.snakeCaseToCamelCase(fieldName)},""" - val f1 = if (config.uniqueInteractionFields.isEmpty) "" else - config - .uniqueInteractionFields - .get - .map(f => mapField(f.fieldName, f.fieldType)) - .mkString("\n") + "\n" + val f1 = + if (config.uniqueInteractionFields.isEmpty) "" + else + config.uniqueInteractionFields.get + .map(f => mapField(f.fieldName, f.fieldType)) + .mkString(sys.props("line.separator")) + sys.props("line.separator") - val f2 = config - .demographicFields + val f2 = config.demographicFields .map(f => mapField(f.fieldName, f.fieldType)) - .mkString("\n") + .mkString(sys.props("line.separator")) f1 + f2 end interactionFields def interactionConstructorArguments(): String = - val f1 = if (config.uniqueInteractionFields.isEmpty) "" else - config - .uniqueInteractionFields - .get - .map(f => s"""${" " * 11}interaction.uniqueInteractionData().${Utils.snakeCaseToCamelCase(f.fieldName)}(),""") - .mkString("\n") + "\n" - - val f2 = config - .demographicFields - .map(f => s"""${" " * 11}interaction.demographicData().${Utils.snakeCaseToCamelCase(f.fieldName)},""") - .mkString("\n") + val f1 = + if (config.uniqueInteractionFields.isEmpty) "" + else + config.uniqueInteractionFields.get + .map(f => + s"""${" " * 11}interaction.uniqueInteractionData().${Utils + .snakeCaseToCamelCase(f.fieldName)}(),""" + ) + .mkString(sys.props("line.separator")) + sys.props("line.separator") + + val f2 = config.demographicFields + .map(f => + s"""${" " * 11}interaction.demographicData().${Utils + .snakeCaseToCamelCase(f.fieldName)},""" + ) + .mkString(sys.props("line.separator")) f1 + f2 end interactionConstructorArguments @@ -97,25 +104,26 @@ private object CustomDgraphInteraction { if (config.uniqueInteractionFields.isEmpty) "" else - config - .uniqueInteractionFields - .get - .map(f => s"""${" " * 63}this.${Utils.snakeCaseToCamelCase(f.fieldName)},""") - .mkString("\n") + config.uniqueInteractionFields.get + .map(f => + s"""${" " * 63}this.${Utils.snakeCaseToCamelCase(f.fieldName)},""" + ) + .mkString(sys.props("line.separator")) .trim .dropRight(1) end if end uniqueArguments def demographicArguments(): String = - config - .demographicFields - .map(f => s"""${" " * 55}this.${Utils.snakeCaseToCamelCase(f.fieldName)},""") - .mkString("\n") + config.demographicFields + .map(f => + s"""${" " * 55}this.${Utils.snakeCaseToCamelCase(f.fieldName)},""" + ) + .mkString(sys.props("line.separator")) .trim .dropRight(1) end demographicArguments end generate -} \ No newline at end of file +} diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphMutations.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphMutations.scala index 68ef45623..f063a127f 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphMutations.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphMutations.scala @@ -4,17 +4,18 @@ import java.io.{File, PrintWriter} private object CustomDgraphMutations { - private val classLocation = "../JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph" + private val classLocation = + "../JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph" private val custom_className = "CustomDgraphMutations" private val packageText = "org.jembi.jempi.libmpi.dgraph" def generate(config: Config): Unit = - val classFile: String = classLocation + File.separator + custom_className + ".java" + val classFile: String = + classLocation + File.separator + custom_className + ".java" println("Creating " + classFile) val file: File = new File(classFile) val writer: PrintWriter = new PrintWriter(file) - writer.println( - s"""package $packageText; + writer.println(s"""package $packageText; | |import org.jembi.jempi.shared.models.CustomUniqueInteractionData; |import org.jembi.jempi.shared.models.CustomUniqueGoldenRecordData; @@ -68,18 +69,17 @@ private object CustomDgraphMutations { writer.flush() writer.close() - def checkToString(v: String): String = v match - case "Bool" => ".toString()" + case "Bool" => ".toString()" case "DateTime" => ".toString()" - case _ => "" + case _ => "" end checkToString def castAs(t: String): String = t match - case "String" => "" - case "Bool" => "^^" + case "String" => "" + case "Bool" => "^^" case "DateTime" => "^^" end castAs @@ -90,17 +90,16 @@ private object CustomDgraphMutations { s"""${" " * 27}_:%s ${" " * (30 - fieldName.length)}%s$c${" " * (20 - c.length)}.""" end mapField - val f1 = if (config.uniqueInteractionFields.isEmpty) "" else - config - .uniqueInteractionFields - .get - .map(f => mapField(f.fieldName, f.fieldType)) - .mkString("\n") + "\n" + val f1 = + if (config.uniqueInteractionFields.isEmpty) "" + else + config.uniqueInteractionFields.get + .map(f => mapField(f.fieldName, f.fieldType)) + .mkString(sys.props("line.separator")) + sys.props("line.separator") - val f2 = config - .demographicFields + val f2 = config.demographicFields .map(f => mapField(f.fieldName, f.fieldType)) - .mkString("\n") + .mkString(sys.props("line.separator")) f1 + f2 end interactionFields @@ -108,24 +107,28 @@ private object CustomDgraphMutations { def interactionArguments(): String = def mapUniqueField(f: UniqueField): String = - s"""AppUtils.quotedValue(uniqueInteractionData.${Utils.snakeCaseToCamelCase(f.fieldName)}()${checkToString(f.fieldType)})""" + s"""AppUtils.quotedValue(uniqueInteractionData.${Utils + .snakeCaseToCamelCase(f.fieldName)}()${checkToString( + f.fieldType + )})""" end mapUniqueField def mapCommonField(f: DemographicField): String = - s"""AppUtils.quotedValue(demographicData.${Utils.snakeCaseToCamelCase(f.fieldName)})${checkToString(f.fieldType)}""" + s"""AppUtils.quotedValue(demographicData.${Utils.snakeCaseToCamelCase( + f.fieldName + )})${checkToString(f.fieldType)}""" end mapCommonField - val f1 = if (config.uniqueInteractionFields.isEmpty) "" else - config - .uniqueInteractionFields - .get - .map(f => s"""${" " * 27}uuid, ${mapUniqueField(f)},""") - .mkString("\n") + "\n" + val f1 = + if (config.uniqueInteractionFields.isEmpty) "" + else + config.uniqueInteractionFields.get + .map(f => s"""${" " * 27}uuid, ${mapUniqueField(f)},""") + .mkString(sys.props("line.separator")) + sys.props("line.separator") - val f2 = config - .demographicFields + val f2 = config.demographicFields .map(f => s"""${" " * 27}uuid, ${mapCommonField(f)},""") - .mkString("\n") + .mkString(sys.props("line.separator")) f1 + f2 @@ -138,17 +141,16 @@ private object CustomDgraphMutations { s"""${" " * 27}_:%s ${" " * (30 - fieldName.length)}%s$c${" " * (20 - c.length)}.""" end mapField - val f1 = if (config.uniqueGoldenRecordFields.isEmpty) "" else - config - .uniqueGoldenRecordFields - .get - .map(f => mapField(f.fieldName, f.fieldType)) - .mkString("\n") + "\n" + val f1 = + if (config.uniqueGoldenRecordFields.isEmpty) "" + else + config.uniqueGoldenRecordFields.get + .map(f => mapField(f.fieldName, f.fieldType)) + .mkString(sys.props("line.separator")) + sys.props("line.separator") - val f2 = config - .demographicFields + val f2 = config.demographicFields .map(f => mapField(f.fieldName, f.fieldType)) - .mkString("\n") + .mkString(sys.props("line.separator")) f1 + f2 @@ -157,26 +159,28 @@ private object CustomDgraphMutations { def goldenRecordArguments(): String = def mapUniqueField(f: UniqueField): String = - s"""AppUtils.quotedValue(uniqueGoldenRecordData.${Utils.snakeCaseToCamelCase(f.fieldName)}()${checkToString(f.fieldType)})""" + s"""AppUtils.quotedValue(uniqueGoldenRecordData.${Utils + .snakeCaseToCamelCase(f.fieldName)}()${checkToString( + f.fieldType + )})""" end mapUniqueField def mapDemographicField(f: DemographicField): String = - s"""AppUtils.quotedValue(demographicData.${Utils.snakeCaseToCamelCase(f.fieldName)})${checkToString(f.fieldType)}""" + s"""AppUtils.quotedValue(demographicData.${Utils.snakeCaseToCamelCase( + f.fieldName + )})${checkToString(f.fieldType)}""" end mapDemographicField - val f1 = if (config.uniqueGoldenRecordFields.isEmpty) "" else - config - .uniqueGoldenRecordFields - .get - .map(f => - s"""${" " * 27}uuid, ${mapUniqueField(f)},""") - .mkString("\n") + "\n" - - val f2 = config - .demographicFields - .map(f => - s"""${" " * 27}uuid, ${mapDemographicField(f)},""") - .mkString("\n") + val f1 = + if (config.uniqueGoldenRecordFields.isEmpty) "" + else + config.uniqueGoldenRecordFields.get + .map(f => s"""${" " * 27}uuid, ${mapUniqueField(f)},""") + .mkString(sys.props("line.separator")) + sys.props("line.separator") + + val f2 = config.demographicFields + .map(f => s"""${" " * 27}uuid, ${mapDemographicField(f)},""") + .mkString(sys.props("line.separator")) f1 + f2 diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphQueries.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphQueries.scala index 282138e4e..d2aafefc7 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphQueries.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphQueries.scala @@ -3,21 +3,21 @@ package configuration import java.io.{File, PrintWriter} import scala.language.{existentials, postfixOps} - object CustomDgraphQueries { - private val classLocation = "../JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph" + private val classLocation = + "../JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph" private val custom_className = "CustomDgraphQueries" private val packageText = "org.jembi.jempi.libmpi.dgraph" def generate(config: Config): Any = { - val classFile: String = classLocation + File.separator + custom_className + ".java" + val classFile: String = + classLocation + File.separator + custom_className + ".java" println("Creating " + classFile) val file: File = new File(classFile) val writer: PrintWriter = new PrintWriter(file) - writer.println( - s"""package $packageText; + writer.println(s"""package $packageText; | |import io.vavr.Function1; |import org.apache.commons.lang3.StringUtils; @@ -38,66 +38,114 @@ object CustomDgraphQueries { if (config.rules.matchNotification.isDefined) { writer.println( s""" static final List> DETERMINISTIC_MATCH_FUNCTIONS = - | List.of(${getDeterministicFunctions(config.rules.matchNotification.get)}); - |""".stripMargin) + | List.of(${getDeterministicFunctions( + config.rules.matchNotification.get + )}); + |""".stripMargin + ) } else { writer.println( s""" static final List> DETERMINISTIC_MATCH_FUNCTIONS = | List.of(); - |""".stripMargin) + |""".stripMargin + ) } if (config.rules.link.get.deterministic.isDefined) { - config.rules.link.get.deterministic.get.foreach((name, rule) => emitRuleTemplate(name, rule)) + config.rules.link.get.deterministic.get.foreach((name, rule) => + emitRuleTemplate(name, rule) + ) } - if (config.rules.link.isDefined && config.rules.link.get.probabilistic.isDefined) { - config.rules.link.get.probabilistic.get.foreach((name, rule) => emitRuleTemplate(name, rule)) + if ( + config.rules.link.isDefined && config.rules.link.get.probabilistic.isDefined + ) { + config.rules.link.get.probabilistic.get.foreach((name, rule) => + emitRuleTemplate(name, rule) + ) } - if (config.rules.matchNotification.isDefined && config.rules.matchNotification.get.deterministic.isDefined) { - config.rules.matchNotification.get.deterministic.get.foreach((name, rule) => emitRuleTemplate(name, rule)) + if ( + config.rules.matchNotification.isDefined && config.rules.matchNotification.get.deterministic.isDefined + ) { + config.rules.matchNotification.get.deterministic.get.foreach( + (name, rule) => emitRuleTemplate(name, rule) + ) } - if (config.rules.matchNotification.isDefined && config.rules.matchNotification.get.probabilistic.isDefined) { - config.rules.matchNotification.get.probabilistic.get.foreach((name, rule) => emitRuleTemplate(name, rule)) + if ( + config.rules.matchNotification.isDefined && config.rules.matchNotification.get.probabilistic.isDefined + ) { + config.rules.matchNotification.get.probabilistic.get.foreach( + (name, rule) => emitRuleTemplate(name, rule) + ) } - if (config.rules.link.isDefined && config.rules.link.get.deterministic.isDefined) { - config.rules.link.get.deterministic.get.foreach((name, rule) => emitRuleFunction(name, rule)) + if ( + config.rules.link.isDefined && config.rules.link.get.deterministic.isDefined + ) { + config.rules.link.get.deterministic.get.foreach((name, rule) => + emitRuleFunction(name, rule) + ) } - if (config.rules.link.isDefined && config.rules.link.get.probabilistic.isDefined) { - config.rules.link.get.probabilistic.get.foreach((name, rule) => emitRuleFunction(name, rule)) + if ( + config.rules.link.isDefined && config.rules.link.get.probabilistic.isDefined + ) { + config.rules.link.get.probabilistic.get.foreach((name, rule) => + emitRuleFunction(name, rule) + ) } emitMergeCandidates() - emitFindCandidates("Link", "DETERMINISTIC_LINK_FUNCTIONS", config.rules.link) - - if (config.rules.matchNotification.isDefined && config.rules.matchNotification.get.deterministic.isDefined) { - config.rules.matchNotification.get.deterministic.get.foreach((name, rule) => emitRuleFunction(name, rule)) + emitFindCandidates( + "Link", + "DETERMINISTIC_LINK_FUNCTIONS", + config.rules.link + ) + + if ( + config.rules.matchNotification.isDefined && config.rules.matchNotification.get.deterministic.isDefined + ) { + config.rules.matchNotification.get.deterministic.get.foreach( + (name, rule) => emitRuleFunction(name, rule) + ) } - if (config.rules.matchNotification.isDefined && config.rules.matchNotification.get.probabilistic.isDefined && config.rules.matchNotification.get.probabilistic.isDefined) { - config.rules.matchNotification.get.probabilistic.get.foreach((name, rule) => emitRuleFunction(name, rule)) + if ( + config.rules.matchNotification.isDefined && config.rules.matchNotification.get.probabilistic.isDefined && config.rules.matchNotification.get.probabilistic.isDefined + ) { + config.rules.matchNotification.get.probabilistic.get.foreach( + (name, rule) => emitRuleFunction(name, rule) + ) } - emitFindCandidates("Match", "DETERMINISTIC_MATCH_FUNCTIONS", config.rules.matchNotification) + emitFindCandidates( + "Match", + "DETERMINISTIC_MATCH_FUNCTIONS", + config.rules.matchNotification + ) - writer.println( - s""" private $custom_className() { + writer.println(s""" private $custom_className() { | } | |}""".stripMargin) writer.flush() writer.close() - def getDeterministicFunctions(allRules: AllRules): String = { allRules.deterministic.get - .map((name, _) => s"""${" " * 14}CustomDgraphQueries::${Utils.snakeCaseToCamelCase(name.toLowerCase)},""".stripMargin) - .mkString("\n") + .map((name, _) => + s"""${" " * 14}CustomDgraphQueries::${Utils.snakeCaseToCamelCase( + name.toLowerCase + )},""".stripMargin + ) + .mkString(sys.props("line.separator")) .trim .dropRight(1) } - def emitFindCandidates(funcQualifier: String, filterList: String, rules: Option[AllRules]): Unit = { + def emitFindCandidates( + funcQualifier: String, + filterList: String, + rules: Option[AllRules] + ): Unit = { writer.println( s""" static List find${funcQualifier}Candidates( | final CustomDemographicData interaction) { @@ -105,16 +153,18 @@ object CustomDgraphQueries { | if (!result.isEmpty()) { | return result; | } - | result = new LinkedList<>();""".stripMargin) + | result = new LinkedList<>();""".stripMargin + ) if (rules.isDefined && rules.get.probabilistic.isDefined) { rules.get.probabilistic.get.foreach((name, _) => { val filterName = Utils.snakeCaseToCamelCase(name.toLowerCase) val vars = "interaction" - writer.println(s"""${" " * 6}mergeCandidates(result, $filterName($vars));""".stripMargin) + writer.println( + s"""${" " * 6}mergeCandidates(result, $filterName($vars));""".stripMargin + ) }) } - writer.println( - """ return result; + writer.println(""" return result; | } |""".stripMargin) } @@ -125,11 +175,17 @@ object CustomDgraphQueries { expr match { case Ast.Or(x) => "(" - + (for (k <- x.zipWithIndex) yield if (k._2 == 0) expression(k._1) else " && " + expression(k._1)).mkString + + (for (k <- x.zipWithIndex) + yield + if (k._2 == 0) expression(k._1) + else " && " + expression(k._1)).mkString + ")" case Ast.And(x) => "(" - + (for (k <- x.zipWithIndex) yield if (k._2 == 0) expression(k._1) else " || " + expression(k._1)).mkString + + (for (k <- x.zipWithIndex) + yield + if (k._2 == 0) expression(k._1) + else " || " + expression(k._1)).mkString + ")" case Ast.Not(x) => "NOT (" + expression(x) + ")" @@ -150,41 +206,49 @@ object CustomDgraphQueries { val v = vars(0) writer.println( s""" private static DgraphGoldenRecords $functionName(final CustomDemographicData demographicData) { - | if (StringUtils.isBlank(demographicData.${Utils.snakeCaseToCamelCase(v)})) { + | if (StringUtils.isBlank(demographicData.${Utils + .snakeCaseToCamelCase(v)})) { | return new DgraphGoldenRecords(List.of()); | } - | final Map map = Map.of("$$$v", demographicData.${Utils.snakeCaseToCamelCase(v)}); + | final Map map = Map.of("$$$v", demographicData.${Utils + .snakeCaseToCamelCase(v)}); | return runGoldenRecordsQuery($name, map); | } - |""".stripMargin) + |""".stripMargin + ) else val expr = expression(ParseRule.parse(text)) - writer.println(s" private static DgraphGoldenRecords $functionName(final CustomDemographicData demographicData) {") + writer.println( + s" private static DgraphGoldenRecords $functionName(final CustomDemographicData demographicData) {" + ) vars.foreach(v => { val camelCaseVarName = Utils.snakeCaseToCamelCase(v) - writer.println(s" final var $camelCaseVarName = demographicData.$camelCaseVarName;") + writer.println( + s" final var $camelCaseVarName = demographicData.$camelCaseVarName;" + ) }) vars.foreach(v => { val camelCaseVarName = Utils.snakeCaseToCamelCase(v) val isNullVar = camelCaseVarName + s"IsBlank" - writer.println(s" final var $isNullVar = StringUtils.isBlank($camelCaseVarName);") + writer.println( + s" final var $isNullVar = StringUtils.isBlank($camelCaseVarName);" + ) }) - writer.print( - s""" if ($expr) { + writer.print(s""" if ($expr) { | return new DgraphGoldenRecords(List.of()); | } | final var map = Map.of(""".stripMargin) vars.zipWithIndex.foreach((v, idx) => { val camelCaseVarName = Utils.snakeCaseToCamelCase(v) - writer.println( - s"""${" " * (if (idx == 0) 0 else 29)}"$$$v", + writer.println(s"""${" " * (if (idx == 0) 0 else 29)}"$$$v", |${" " * 29}StringUtils.isNotBlank($camelCaseVarName) |${" " * 29} ? $camelCaseVarName - |${" " * 29} : DgraphQueries.EMPTY_FIELD_SENTINEL${if (idx + 1 < vars.length) "," else ");"}""".stripMargin) + |${" " * 29} : DgraphQueries.EMPTY_FIELD_SENTINEL${ + if (idx + 1 < vars.length) "," else ");" + }""".stripMargin) }) - writer.println( - s""" return runGoldenRecordsQuery($name, map); + writer.println(s""" return runGoldenRecordsQuery($name, map); | } |""".stripMargin) end if @@ -195,17 +259,27 @@ object CustomDgraphQueries { val vars = for (v <- rule.vars) yield v val text = rule.text val expression: Ast.Expression = ParseRule.parse(text) - val varsMap = vars.zipWithIndex.toMap[String, Int].map((k, i) => (k, ("A".head + i).toChar.toString)) + val varsMap = vars.zipWithIndex + .toMap[String, Int] + .map((k, i) => (k, ("A".head + i).toChar.toString)) var meta = Map[String, (String, Option[Integer])]() def main_func(expression: Ast.Expression): String = { expression match { - case Ast.Or(x) => "(" - + (for (k <- x.zipWithIndex) yield if (k._2 == 0) main_func(k._1) else " OR " + main_func(k._1)).mkString - + ")" - case Ast.And(x) => "(" - + (for (k <- x.zipWithIndex) yield if (k._2 == 0) main_func(k._1) else " AND " + main_func(k._1)).mkString - + ")" + case Ast.Or(x) => + "(" + + (for (k <- x.zipWithIndex) + yield + if (k._2 == 0) main_func(k._1) + else " OR " + main_func(k._1)).mkString + + ")" + case Ast.And(x) => + "(" + + (for (k <- x.zipWithIndex) + yield + if (k._2 == 0) main_func(k._1) + else " AND " + main_func(k._1)).mkString + + ")" case Ast.Not(x) => "NOT (" + main_func(x) + ")" case Ast.Match(variable, distance) => @@ -224,13 +298,15 @@ object CustomDgraphQueries { val fn = meta(v)._1 writer.println( s"""${" " * 12}all(func:type(GoldenRecord)) @filter($fn(GoldenRecord.$v, $$$v${ - if (meta(v)._2.isDefined) ", " + meta(v)._2.get else - "" - })) { + if (meta(v)._2.isDefined) ", " + meta(v)._2.get + else + "" + })) { |${" " * 15}uid |${" " * 15}GoldenRecord.source_id { |${" " * 18}uid - |${" " * 15}}""".stripMargin) + |${" " * 15}}""".stripMargin + ) if (config.uniqueGoldenRecordFields.isDefined) { config.uniqueGoldenRecordFields.get.foreach(field => { writer.println(s"${" " * 15}GoldenRecord.${field.fieldName}") @@ -248,15 +324,17 @@ object CustomDgraphQueries { val fn = meta(v)._1 writer.println( s"""${" " * 12}var(func:type(GoldenRecord)) @filter($fn(GoldenRecord.$v, $$$v${ - if (meta(v)._2.isDefined) ", " + meta(v)._2.get else "" - })) { + if (meta(v)._2.isDefined) ", " + meta(v)._2.get else "" + })) { |${" " * 15}${varsMap(v)} as uid - |${" " * 12}}""".stripMargin) + |${" " * 12}}""".stripMargin + ) }) - writer.println( - s"""${" " * 12}all(func:type(GoldenRecord)) @filter${ - if (all_func_str.startsWith("(")) "" else "(" - }$all_func_str${if (all_func_str.startsWith("(")) "" else "("} { + writer.println(s"""${" " * 12}all(func:type(GoldenRecord)) @filter${ + if (all_func_str.startsWith("(")) "" else "(" + }$all_func_str${ + if (all_func_str.startsWith("(")) "" else "(" + } { |${" " * 15}uid |${" " * 15}GoldenRecord.source_id { |${" " * 18}uid @@ -280,7 +358,8 @@ object CustomDgraphQueries { writer.print( s"""${" " * 3}private static final String ${name.toUpperCase} = |${" " * 9}\"\"\" - |${" " * 9}query ${name.toLowerCase}(""".stripMargin) + |${" " * 9}query ${name.toLowerCase}(""".stripMargin + ) vars.zipWithIndex.foreach((v, i) => { if (i > 0) @@ -295,15 +374,13 @@ object CustomDgraphQueries { else createFilterFunc(all_func_str) end if - writer.println( - s"""${" " * 9}} + writer.println(s"""${" " * 9}} |${" " * 9}\"\"\"; |""".stripMargin) } def emitMergeCandidates(): Unit = { - writer.println( - s""" private static void mergeCandidates( + writer.println(s""" private static void mergeCandidates( | final List goldenRecords, | final DgraphGoldenRecords block) { | final var candidates = block.all(); diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphReverseGoldenRecord.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphReverseGoldenRecord.scala index 1a743c9eb..be9f5b523 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphReverseGoldenRecord.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomDgraphReverseGoldenRecord.scala @@ -4,17 +4,18 @@ import java.io.{File, PrintWriter} private object CustomDgraphReverseGoldenRecord { - private val classLocation = "../JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph" + private val classLocation = + "../JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph" private val customClassName = "CustomDgraphReverseGoldenRecord" private val packageText = "org.jembi.jempi.libmpi.dgraph" def generate(config: Config): Unit = - val classFile: String = classLocation + File.separator + customClassName + ".java" + val classFile: String = + classLocation + File.separator + customClassName + ".java" println("Creating " + classFile) val file: File = new File(classFile) val writer: PrintWriter = new PrintWriter(file) - writer.println( - s"""package $packageText; + writer.println(s"""package $packageText; | |import com.fasterxml.jackson.annotation.JsonInclude; |import com.fasterxml.jackson.annotation.JsonProperty; @@ -50,21 +51,22 @@ private object CustomDgraphReverseGoldenRecord { writer.flush() writer.close() - def goldenRecordFields(): String = - def mapField(fieldName: String, fieldType: String): String = s"""${" " * 6}@JsonProperty(CustomDgraphConstants.PREDICATE_GOLDEN_RECORD_${fieldName.toUpperCase}) ${Utils.javaType(fieldType)} ${Utils.snakeCaseToCamelCase(fieldName)},""" + def mapField(fieldName: String, fieldType: String): String = + s"""${" " * 6}@JsonProperty(CustomDgraphConstants.PREDICATE_GOLDEN_RECORD_${fieldName.toUpperCase}) ${Utils + .javaType(fieldType)} ${Utils.snakeCaseToCamelCase(fieldName)},""" - val f1 = if (config.uniqueGoldenRecordFields.isEmpty) "" else config - .uniqueGoldenRecordFields - .get - .map(f => mapField(f.fieldName, f.fieldType)) - .mkString("\n") + "\n" + val f1 = + if (config.uniqueGoldenRecordFields.isEmpty) "" + else + config.uniqueGoldenRecordFields.get + .map(f => mapField(f.fieldName, f.fieldType)) + .mkString(sys.props("line.separator")) + sys.props("line.separator") - val f2 = config - .demographicFields + val f2 = config.demographicFields .map(f => mapField(f.fieldName, f.fieldType)) - .mkString("\n") + .mkString(sys.props("line.separator")) f1 + f2 @@ -72,33 +74,39 @@ private object CustomDgraphReverseGoldenRecord { def addFields(): Unit = { val margin = 6 - config.demographicFields.zipWithIndex.foreach { - case (field, idx) => - val propertyName = s"GoldenRecord.${field.fieldName}" - val parameterName = Utils.snakeCaseToCamelCase(field.fieldName) - val parameterType = field.fieldType - writer.println( - s"""${" " * margin}@JsonProperty("$propertyName") $parameterType $parameterName,""".stripMargin) + config.demographicFields.zipWithIndex.foreach { case (field, idx) => + val propertyName = s"GoldenRecord.${field.fieldName}" + val parameterName = Utils.snakeCaseToCamelCase(field.fieldName) + val parameterType = field.fieldType + writer.println( + s"""${" " * margin}@JsonProperty("$propertyName") $parameterType $parameterName,""".stripMargin + ) } - writer.println(s"""${" " * margin}@JsonProperty("~GoldenRecord.interactions|score") Float score) {""") + writer.println( + s"""${" " * margin}@JsonProperty("~GoldenRecord.interactions|score") Float score) {""" + ) } def uniqueArguments(): String = - if (config.uniqueGoldenRecordFields.isEmpty) "" else - config - .uniqueGoldenRecordFields - .get + if (config.uniqueGoldenRecordFields.isEmpty) "" + else + config.uniqueGoldenRecordFields.get .map(f => - s"""${" " * 63}this.${Utils.snakeCaseToCamelCase(f.fieldName)}(),""") - .mkString("\n").trim.dropRight(1) + s"""${" " * 63}this.${Utils.snakeCaseToCamelCase(f.fieldName)}(),""" + ) + .mkString(sys.props("line.separator")) + .trim + .dropRight(1) end uniqueArguments def demographicArguments(): String = - config - .demographicFields + config.demographicFields .map(f => - s"""${" " * 56}this.${Utils.snakeCaseToCamelCase(f.fieldName)}(),""") - .mkString("\n").trim.dropRight(1) + s"""${" " * 56}this.${Utils.snakeCaseToCamelCase(f.fieldName)}(),""" + ) + .mkString(sys.props("line.separator")) + .trim + .dropRight(1) end demographicArguments end generate diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomFieldTallies.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomFieldTallies.scala new file mode 100644 index 000000000..88a75fc01 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomFieldTallies.scala @@ -0,0 +1,156 @@ +package configuration + +import java.io.{File, PrintWriter} + +private object CustomFieldTallies { + + private val classLocation = + "../JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models" + private val customClassName = "CustomFieldTallies" + private val packageSharedModels = "org.jembi.jempi.shared.models" + + def generate(config: Config): Unit = + + def fieldParameters(): String = + config.demographicFields + .map(f => { + val fieldName = Utils.snakeCaseToCamelCase(f.fieldName) + s"""${" " * 6}FieldTally ${fieldName},""" + }) + .mkString(sys.props("line.separator")) + .trim + .dropRight(1) + end fieldParameters + + def sumFields(): String = + config.demographicFields + .map(f => { + val fieldName = Utils.snakeCaseToCamelCase(f.fieldName) + s"""${" " * 36}this.${fieldName}.sum(r.${fieldName}),""" + }) + .mkString(sys.props("line.separator")) + .trim + .dropRight(1) + end sumFields + + def logFields(): String = + config.demographicFields + .map(f => { + val fieldName = Utils.snakeCaseToCamelCase(f.fieldName) + s"""${" " * 6}logMU("${fieldName}", ${fieldName});""" + }) + .mkString(sys.props("line.separator")) + .trim + .dropRight(1) + end logFields + + def getFieldTally(): String = + config.demographicFields + .map(f => { + val fieldName = Utils.snakeCaseToCamelCase(f.fieldName) + s"""${" " * 36}getFieldTally(recordsMatch, left.${fieldName}, right.${fieldName}),""" + }) + .mkString(sys.props("line.separator")) + .trim + .dropRight(1) + end getFieldTally + + val classFile: String = + classLocation + File.separator + customClassName + ".java" + println("Creating " + classFile) + val file: File = new File(classFile) + val writer: PrintWriter = new PrintWriter(file) + writer.print( + s"""package $packageSharedModels; + | + |import org.apache.commons.lang3.StringUtils; + |import org.apache.commons.text.similarity.JaroWinklerSimilarity; + |import org.apache.logging.log4j.LogManager; + |import org.apache.logging.log4j.Logger; + | + |public record $customClassName( + | ${fieldParameters()}) { + | + | private static final Logger LOGGER = LogManager.getFormatterLogger(CustomFieldTallies.class); + | private static final JaroWinklerSimilarity JARO_WINKLER_SIMILARITY = new JaroWinklerSimilarity(); + | private static final FieldTally A = new FieldTally(1L, 0L, 0L, 0L); + | private static final FieldTally B = new FieldTally(0L, 1L, 0L, 0L); + | private static final FieldTally C = new FieldTally(0L, 0L, 1L, 0L); + | private static final FieldTally D = new FieldTally(0L, 0L, 0L, 1L); + | public static final CustomFieldTallies.FieldTally FIELD_TALLY_SUM_IDENTITY = new CustomFieldTallies.FieldTally(0L, 0L, 0L, 0L); + | public static final CustomFieldTallies CUSTOM_FIELD_TALLIES_SUM_IDENTITY = new CustomFieldTallies( + | ${("FIELD_TALLY_SUM_IDENTITY," * config.demographicFields.length) + .split(",") + .mkString("", "," + sys.props("line.separator") + " " * 6, "")}); + | + | private static FieldTally getFieldTally( + | final boolean recordsMatch, + | final String left, + | final String right) { + | if (StringUtils.isEmpty(left) || StringUtils.isEmpty(right)) { + | return FIELD_TALLY_SUM_IDENTITY; + | } + | final var fieldMatches = JARO_WINKLER_SIMILARITY.apply(left.toLowerCase(), right.toLowerCase()) >= 0.97; + | if (recordsMatch) { + | if (fieldMatches) { + | return A; + | } else { + | return B; + | } + | } else { + | if (fieldMatches) { + | return C; + | } else { + | return D; + | } + | } + | } + | + | private static void logMU( + | final String tag, + | final CustomFieldTallies.FieldTally fieldTally) { + | LOGGER.debug("%-15s %,.5f %,.5f", + | tag, + | fieldTally.a().doubleValue() / (fieldTally.a().doubleValue() + fieldTally.b().doubleValue()), + | fieldTally.c().doubleValue() / (fieldTally.c().doubleValue() + fieldTally.d().doubleValue())); + | } + | + | public static CustomFieldTallies map( + | final boolean recordsMatch, + | final CustomDemographicData left, + | final CustomDemographicData right) { + | return new CustomFieldTallies(${getFieldTally()}); + | } + | + | public void logFieldMU() { + | LOGGER.debug("Tally derived M&U's"); + | ${logFields()}; + | } + | + | public CustomFieldTallies sum(final CustomFieldTallies r) { + | return new CustomFieldTallies(${sumFields()}); + | } + | + | public record FieldTally( + | Long a, + | Long b, + | Long c, + | Long d) { + | + | FieldTally sum(final FieldTally r) { + | return new FieldTally(this.a + r.a, + | this.b + r.b, + | this.c + r.c, + | this.d + r.d); + | } + | + | } + | + |} + |""".stripMargin + ) + writer.flush() + writer.close() + end generate + +} diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomLinkerBackEnd.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomLinkerBackEnd.scala index aeb4bf3bf..f3660d3ec 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomLinkerBackEnd.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomLinkerBackEnd.scala @@ -3,16 +3,17 @@ package configuration import java.io.{File, PrintWriter} import scala.language.{existentials, postfixOps} - object CustomLinkerBackEnd { - private val classLocation = "../JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend" + private val classLocation = + "../JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend" private val custom_className = "CustomLinkerBackEnd" private val packageText = "org.jembi.jempi.linker.backend" def generate(config: Config): Any = { - val classFile: String = classLocation + File.separator + custom_className + ".java" + val classFile: String = + classLocation + File.separator + custom_className + ".java" println("Creating " + classFile) val file: File = new File(classFile) val writer: PrintWriter = new PrintWriter(file) @@ -20,26 +21,35 @@ object CustomLinkerBackEnd { def createGenerateFunctions(): String = { config.demographicFields .filter(f => f.source.isDefined && f.source.get.generate.isDefined) - .map(f => s""" public static final Supplier GENERATE_${Utils.camelCaseToSnakeCase(Utils.snakeCaseToCamelCase(f.fieldName)).toUpperCase} = ${f.source.get.generate.get.func};""".stripMargin) - .mkString("\n") + .map(f => + s""" public static final Supplier GENERATE_${Utils + .camelCaseToSnakeCase(Utils.snakeCaseToCamelCase(f.fieldName)) + .toUpperCase} = ${f.source.get.generate.get.func};""".stripMargin + ) + .mkString(sys.props("line.separator")) } - def createApplyFunctions() : String = { + def createApplyFunctions(): String = { - def applyFields() : String = { + def applyFields(): String = { - def applyFunction(f : DemographicField) : String = { + def applyFunction(f: DemographicField): String = { if (f.source.isDefined && f.source.get.generate.isDefined) { - s"""GENERATE_${Utils.camelCaseToSnakeCase(Utils.snakeCaseToCamelCase(f.fieldName)).toUpperCase()}.get()""".stripMargin - } else { - s"""interaction.demographicData().${Utils.snakeCaseToCamelCase(f.fieldName)}""".stripMargin + s"""GENERATE_${Utils + .camelCaseToSnakeCase(Utils.snakeCaseToCamelCase(f.fieldName)) + .toUpperCase()}.get()""".stripMargin + } else { + s"""interaction.demographicData().${Utils.snakeCaseToCamelCase( + f.fieldName + )}""".stripMargin } } - config.demographicFields.map(f => s"""${" " * 55}${applyFunction(f)},""".stripMargin) - .mkString("\n") - .drop(55) // drop 55 spaces - .dropRight(1); // drop the comma + config.demographicFields + .map(f => s"""${" " * 55}${applyFunction(f)},""".stripMargin) + .mkString(sys.props("line.separator")) + .drop(55) // drop 55 spaces + .dropRight(1); // drop the comma } @@ -56,14 +66,13 @@ object CustomLinkerBackEnd { .foreach(f => { var t = (f.fieldName, f.linkMetaData.get.m, f.linkMetaData.get.u) }) - val muList = for ( - t <- config.demographicFields.filter(f => f.linkMetaData.isDefined) - ) yield t + val muList = + for (t <- config.demographicFields.filter(f => f.linkMetaData.isDefined)) + yield t writer.println(s"package $packageText;") writer.println() - writer.println( - s"""import org.jembi.jempi.libmpi.LibMPI; + writer.println(s"""import org.jembi.jempi.libmpi.LibMPI; |import org.jembi.jempi.shared.models.CustomDemographicData; |import org.jembi.jempi.shared.models.Interaction; |import org.jembi.jempi.shared.utils.AppUtils; @@ -96,27 +105,31 @@ object CustomLinkerBackEnd { val fieldName = Utils.snakeCaseToCamelCase(field_name) writer.println( s"""${" " * 6}k += LinkerDWH.helperUpdateGoldenRecordField(libMPI, interactionId, expandedGoldenRecord, - |${" " * 6} "$fieldName", demographicData.$fieldName, CustomDemographicData::get${fieldName.charAt(0).toUpper}${fieldName.substring(1)}) + |${" " * 6} "$fieldName", demographicData.$fieldName, CustomDemographicData::get${fieldName + .charAt(0) + .toUpper}${fieldName.substring(1)}) |${" " * 12}? 1 - |${" " * 12}: 0;""".stripMargin) + |${" " * 12}: 0;""".stripMargin + ) }) - writer.println( - s""" + writer.println(s""" |${" " * 6}if (k > 0) { |${" " * 6} LinkerDWH.helperUpdateInteractionsScore(libMPI, threshold, expandedGoldenRecord); |${" " * 6}}""".stripMargin) writer.println() - config.demographicFields.filter(field => field.isList.isDefined && field.isList.get).foreach(field => { - val field_name = field.fieldName - val fieldName = Utils.snakeCaseToCamelCase(field_name) - writer.println( - s"""${" " * 6}backEnd.updateGoldenRecordListField(expandedGoldenRecord, "GoldenRecord.$field_name", + config.demographicFields + .filter(field => field.isList.isDefined && field.isList.get) + .foreach(field => { + val field_name = field.fieldName + val fieldName = Utils.snakeCaseToCamelCase(field_name) + writer.println( + s"""${" " * 6}backEnd.updateGoldenRecordListField(expandedGoldenRecord, "GoldenRecord.$field_name", |${" " * 42}expandedGoldenRecord.entity().$fieldName(), - |${" " * 42}CustomDocEntity::$fieldName);""".stripMargin) - }) + |${" " * 42}CustomDocEntity::$fieldName);""".stripMargin + ) + }) - writer.println( - s""" } + writer.println(s""" } | |}""".stripMargin) writer.flush() diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomLinkerDeterministic.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomLinkerDeterministic.scala index 62e975fad..445fddb2d 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomLinkerDeterministic.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomLinkerDeterministic.scala @@ -3,49 +3,65 @@ package configuration import java.io.{File, PrintWriter} import scala.language.{existentials, postfixOps} - object CustomLinkerDeterministic { - private val classLocation = "../JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend" + private val classLocation = + "../JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend" private val custom_className = "CustomLinkerDeterministic" private val packageText = "org.jembi.jempi.linker.backend" def generate(config: Config): Any = { - val classFile: String = classLocation + File.separator + custom_className + ".java" + val classFile: String = + classLocation + File.separator + custom_className + ".java" println("Creating " + classFile) val file: File = new File(classFile) val writer: PrintWriter = new PrintWriter(file) - def emitCanApplyLinking(rules: Map[String, Rule]): Unit = { writer.print( s""" static boolean canApplyLinking( | final CustomDemographicData interaction) { - | return CustomLinkerProbabilistic.PROBABILISTIC_DO_LINKING""".stripMargin) + | return CustomLinkerProbabilistic.PROBABILISTIC_DO_LINKING""".stripMargin + ) rules.zipWithIndex.foreach((rule, rule_idx) => { - writer.print( - s""" + writer.print(s""" | || """.stripMargin) rule._2.vars.zipWithIndex.foreach((field, var_idx) => - writer.print(s"StringUtils.isNotBlank(interaction.${Utils.snakeCaseToCamelCase(field)})${if (var_idx + 1 < rule._2.vars.length) s"\n${" " * 13}&& " else ""}") + writer.print( + s"StringUtils.isNotBlank(interaction.${Utils.snakeCaseToCamelCase(field)})${ + if (var_idx + 1 < rule._2.vars.length) + s"${sys.props("line.separator")}${" " * 13}&& " + else "" + }" + ) ) }) - writer.println( - s"""; + writer.println(s"""; | } |""".stripMargin) } - def emitDeterministicMatch(funcName: String, map: Map[String, Rule]): Unit = { + def emitDeterministicMatch( + funcName: String, + map: Map[String, Rule] + ): Unit = { def checkNullExpression(expr: Ast.Expression): String = { expr match { - case Ast.Or(x) => "(" - + (for (k <- x.zipWithIndex) yield if (k._2 == 0) checkNullExpression(k._1) else " || " + checkNullExpression(k._1)).mkString - + ")" - case Ast.And(x) => "(" - + (for (k <- x.zipWithIndex) yield if (k._2 == 0) checkNullExpression(k._1) else " && " + checkNullExpression(k._1)).mkString - + ")" + case Ast.Or(x) => + "(" + + (for (k <- x.zipWithIndex) + yield + if (k._2 == 0) checkNullExpression(k._1) + else " || " + checkNullExpression(k._1)).mkString + + ")" + case Ast.And(x) => + "(" + + (for (k <- x.zipWithIndex) + yield + if (k._2 == 0) checkNullExpression(k._1) + else " && " + checkNullExpression(k._1)).mkString + + ")" case Ast.Not(x) => "NOT (" + checkNullExpression(x) + ")" case Ast.Match(variable, _) => @@ -63,14 +79,12 @@ object CustomLinkerDeterministic { } } - writer.println( - s""" static boolean $funcName( + writer.println(s""" static boolean $funcName( | final CustomDemographicData goldenRecord, | final CustomDemographicData interaction) {""".stripMargin) if (map.isEmpty) { - writer.println( - s""" return false; + writer.println(s""" return false; | } |""".stripMargin) } else { @@ -86,17 +100,14 @@ object CustomLinkerDeterministic { writer.println(" " * 6 + s"final var $right = interaction.$field;") }) if (index < z.size - 1) { - writer.println( - s""" if ($expr_1) { + writer.println(s""" if ($expr_1) { | return true; | }""".stripMargin) } else { - writer.println( - s""" return $expr_1;""".stripMargin) + writer.println(s""" return $expr_1;""".stripMargin) } }) - writer.println( - """ } + writer.println(""" } |""".stripMargin) } @@ -111,9 +122,15 @@ object CustomLinkerDeterministic { | |final class $custom_className { | - | static final boolean DETERMINISTIC_DO_LINKING = ${if (config.rules.link.get.deterministic.nonEmpty) "true" else "false"}; - | static final boolean DETERMINISTIC_DO_VALIDATING = ${if (config.rules.validate.nonEmpty) "true" else "false"}; - | static final boolean DETERMINISTIC_DO_MATCHING = ${if (config.rules.matchNotification.nonEmpty) "true" else "false"}; + | static final boolean DETERMINISTIC_DO_LINKING = ${ + if (config.rules.link.get.deterministic.nonEmpty) "true" else "false" + }; + | static final boolean DETERMINISTIC_DO_VALIDATING = ${ + if (config.rules.validate.nonEmpty) "true" else "false" + }; + | static final boolean DETERMINISTIC_DO_MATCHING = ${ + if (config.rules.matchNotification.nonEmpty) "true" else "false" + }; | | private $custom_className() { | } @@ -123,11 +140,25 @@ object CustomLinkerDeterministic { | final String right) { | return StringUtils.isNotBlank(left) && StringUtils.equals(left, right); | } - |""".stripMargin) + |""".stripMargin + ) emitCanApplyLinking(config.rules.link.get.deterministic.get) - emitDeterministicMatch("linkDeterministicMatch", config.rules.link.get.deterministic.get) - emitDeterministicMatch("validateDeterministicMatch", if (config.rules.validate.isDefined) config.rules.validate.get.deterministic else Map.empty[String, Rule]) - emitDeterministicMatch("matchNotificationDeterministicMatch", if (config.rules.matchNotification.isDefined) config.rules.matchNotification.get.deterministic.get else Map.empty[String, Rule]) + emitDeterministicMatch( + "linkDeterministicMatch", + config.rules.link.get.deterministic.get + ) + emitDeterministicMatch( + "validateDeterministicMatch", + if (config.rules.validate.isDefined) + config.rules.validate.get.deterministic + else Map.empty[String, Rule] + ) + emitDeterministicMatch( + "matchNotificationDeterministicMatch", + if (config.rules.matchNotification.isDefined) + config.rules.matchNotification.get.deterministic.get + else Map.empty[String, Rule] + ) writer.println("}") writer.flush() writer.close() diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomLinkerMU.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomLinkerMU.scala index a4b0a82d5..faf5785cf 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomLinkerMU.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomLinkerMU.scala @@ -3,34 +3,33 @@ package configuration import java.io.{File, PrintWriter} import scala.language.{existentials, postfixOps} - object CustomLinkerMU { - private val classLocation = "../JeMPI_Linker/src/main/java/org/jembi/jempi/linker" + private val classLocation = + "../JeMPI_Linker/src/main/java/org/jembi/jempi/linker" private val custom_className = "CustomLinkerMU" private val packageText = "org.jembi.jempi.linker" def generate(config: Config): Any = { - val classFile: String = classLocation + File.separator + custom_className + ".java" + val classFile: String = + classLocation + File.separator + custom_className + ".java" println("Creating " + classFile) val file: File = new File(classFile) val writer: PrintWriter = new PrintWriter(file) - val muList = for ( - t <- config.demographicFields.filter(f => f.linkMetaData.isDefined) - ) yield t + val muList = + for (t <- config.demographicFields.filter(f => f.linkMetaData.isDefined)) + yield t writer.println(s"package $packageText;") writer.println() if (muList.length == 0) { - writer.println( - s"""public class $custom_className { + writer.println(s"""public class $custom_className { |} |""".stripMargin) } else { - writer.println( - s"""import org.apache.commons.lang3.StringUtils; + writer.println(s"""import org.apache.commons.lang3.StringUtils; |import org.apache.commons.text.similarity.SimilarityScore; |import org.apache.commons.text.similarity.JaroWinklerSimilarity; |import org.apache.logging.log4j.LogManager; @@ -80,8 +79,7 @@ object CustomLinkerMU { | } |""".stripMargin) - writer.println( - s""" void updateMatchSums( + writer.println(s""" void updateMatchSums( | final CustomDemographicData patient, | final CustomDemographicData goldenRecord) {""".stripMargin) if (muList.nonEmpty) { @@ -89,25 +87,24 @@ object CustomLinkerMU { val fieldName = Utils.snakeCaseToCamelCase(mu.fieldName) writer.println( s" updateMatchedPair(fields.$fieldName, patient.$fieldName, goldenRecord.$fieldName" + - s");") + s");" + ) }) - writer.println( - """ LOGGER.debug("{}", fields); + writer.println(""" LOGGER.debug("{}", fields); | } |""".stripMargin) } - writer.println( - s""" void updateMissmatchSums( + writer.println(s""" void updateMissmatchSums( | final CustomDemographicData patient, | final CustomDemographicData goldenRecord) {""".stripMargin) muList.foreach(mu => { val fieldName = Utils.snakeCaseToCamelCase(mu.fieldName) writer.println( - s" updateUnMatchedPair(fields.$fieldName, patient.$fieldName, goldenRecord.$fieldName);") + s" updateUnMatchedPair(fields.$fieldName, patient.$fieldName, goldenRecord.$fieldName);" + ) }) - writer.println( - """ LOGGER.debug("{}", fields); + writer.println(""" LOGGER.debug("{}", fields); | } | | static class Field { @@ -128,10 +125,11 @@ object CustomLinkerMU { | static class Fields {""".stripMargin) muList.foreach(mu => { val fieldName = Utils.snakeCaseToCamelCase(mu.fieldName) - writer.println(s" final Field $fieldName = new Field(JARO_WINKLER_SIMILARITY, 0.92);") + writer.println( + s" final Field $fieldName = new Field(JARO_WINKLER_SIMILARITY, 0.92);" + ) }) - writer.println( - """ + writer.println(""" | private float computeM(final Field field) { | return (float) (field.matchedPairFieldMatched) | / (float) (field.matchedPairFieldMatched + field.matchedPairFieldUnmatched); @@ -143,8 +141,7 @@ object CustomLinkerMU { | } |""".stripMargin) - writer.println( - """ @Override + writer.println(""" @Override | public String toString() {""".stripMargin) if (muList.nonEmpty) { @@ -154,16 +151,18 @@ object CustomLinkerMU { // println(fmt) writer.println( - s""" return String.format(Locale.ROOT, "$fmt",""".stripMargin) + s""" return String.format(Locale.ROOT, "$fmt",""".stripMargin + ) muList.zipWithIndex.foreach((mu, idx) => { val fieldName = Utils.snakeCaseToCamelCase(mu.fieldName) - writer.println(s" computeM($fieldName), computeU($fieldName)" - + (if ((idx + 1) != muList.length) "," else ");")) + writer.println( + s" computeM($fieldName), computeU($fieldName)" + + (if ((idx + 1) != muList.length) "," else ");") + ) }) } - writer.println( - s""" } + writer.println(s""" } | | } | diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomLinkerProbabilistic.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomLinkerProbabilistic.scala index 7f8e74c59..56f391eb8 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomLinkerProbabilistic.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomLinkerProbabilistic.scala @@ -3,7 +3,6 @@ package configuration import java.io.{File, PrintWriter} import scala.language.{existentials, postfixOps} - object CustomLinkerProbabilistic { private val classLocation = "../JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend" @@ -32,6 +31,8 @@ object CustomLinkerProbabilistic { writer.println(s"""package $packageText;""") writer.println( s""" + |import org.apache.logging.log4j.LogManager; + |import org.apache.logging.log4j.Logger; |import org.jembi.jempi.shared.models.CustomDemographicData; |import org.jembi.jempi.shared.models.CustomMU; | @@ -45,11 +46,15 @@ object CustomLinkerProbabilistic { | |final class $custom_className { | + | private static final Logger LOGGER = LogManager.getLogger(${custom_className}.class); | static final int METRIC_MIN = 0; | static final int METRIC_MAX = 1; | static final int METRIC_SCORE = 2; | static final int METRIC_MISSING_PENALTY = 3; - | static final boolean PROBABILISTIC_DO_LINKING = ${if (config.demographicFields.exists(x => x.linkMetaData.isDefined)) "true" else "false"}; + | static final boolean PROBABILISTIC_DO_LINKING = ${ + if (config.demographicFields.exists(x => x.linkMetaData.isDefined)) "true" + else "false" + }; | static final boolean PROBABILISTIC_DO_VALIDATING = ${if (config.demographicFields.exists(x => x.validateMetaData.isDefined)) "true" else "false"}; | static final boolean PROBABILISTIC_DO_MATCHING = ${if (config.demographicFields.exists(x => x.matchMetaData.isDefined)) "true" else "false"}; | @@ -61,7 +66,7 @@ object CustomLinkerProbabilistic { | } |""".stripMargin) - generateGetMU() + // generateGetMU() if (!linkMuList.isEmpty) { generateFieldsRecord("LinkFields", linkMuList) } @@ -86,6 +91,7 @@ object CustomLinkerProbabilistic { validateProbabilisticScore() matchNotificationProbabilisticScore() updateMU() + checkUpdatedMU() writer.println("}") writer.flush() @@ -97,59 +103,70 @@ object CustomLinkerProbabilistic { writer.println(" static CustomMU getMU() {") writer.println(" return new CustomMU(") linkMuList.zipWithIndex.foreach((mu, idx) => - writer.print(" " * 9 + s"LinkerProbabilistic.getProbability(currentLinkFields.${Utils.snakeCaseToCamelCase(mu.fieldName)})") - if (idx + 1 < linkMuList.length) - writer.println(",") - else - writer.println( - """); - | } - |""".stripMargin) - end if - ) + writer.print(" " * 9 + s"LinkerProbabilistic.getProbability(currentLinkFields.${ + Utils.snakeCaseToCamelCase(mu.fieldName) + })") + if (idx + 1 < linkMuList.length) + writer.println(",") + else + writer.println( + """); + | } + |""".stripMargin) + end if + ) end if end generateGetMU def generateFieldsRecord(recordName: String, demographicFields: Array[DemographicField]): Unit = writer.println(s" private record $recordName(") demographicFields.zipWithIndex.foreach((mu, idx) => - writer.print(s"""${" " * 9}LinkerProbabilistic.Field ${Utils.snakeCaseToCamelCase(mu.fieldName)}""") - if (idx + 1 < demographicFields.length) - writer.println(",") - else - writer.println( - s""") { - | }""".stripMargin) - writer.println() - end if - ) + writer.print(s"""${" " * 9}LinkerProbabilistic.Field ${Utils.snakeCaseToCamelCase(mu.fieldName)}""") + if (idx + 1 < demographicFields.length) + writer.println(",") + else + writer.println( + s""") { + | }""".stripMargin) + writer.println() + end if + ) end generateFieldsRecord - def generateCurrentFields(recordName: String, varName: String, linking: Boolean, demographicFields: Array[DemographicField]): Unit = + def generateCurrentFields(recordName: String, + varName: String, + linking: Boolean, + demographicFields: Array[DemographicField]): Unit = writer.print( s""" static $recordName $varName = | new $recordName( | """.stripMargin) var margin = 0 demographicFields.zipWithIndex.foreach((field, idx) => - if ((linking && field.linkMetaData.isDefined) || (!linking && field.validateMetaData.isDefined)) - val comparison = if (linking) field.linkMetaData.get.comparison else field.validateMetaData.get.comparison - val comparisonLevels = if (linking) field.linkMetaData.get.comparisonLevels else field.validateMetaData.get.comparisonLevels - val m: Double = if (linking) field.linkMetaData.get.m else field.validateMetaData.get.m - val u: Double = if (linking) field.linkMetaData.get.u else field.validateMetaData.get.u - - def extractComparisonList(levels: List[Double]): String = levels.map(level => s""" ${level.toString}F""".stripMargin).mkString(",").trim - - writer.print(" " * margin + s"new LinkerProbabilistic.Field($comparison, ${if (comparisonLevels.length == 1) "List.of(" else "Arrays.asList("}${extractComparisonList(comparisonLevels)}), ${m}F, ${u}F)") - if (idx + 1 < demographicFields.length) - writer.println(",") - margin = 9 - else - writer.println(");") - writer.println() - end if - end if - ) + if ((linking && field.linkMetaData.isDefined) || (!linking && field.validateMetaData.isDefined)) + val comparison = if (linking) field.linkMetaData.get.comparison else field.validateMetaData.get.comparison + val comparisonLevels = if (linking) field.linkMetaData.get.comparisonLevels + else field.validateMetaData.get.comparisonLevels + val m: Double = if (linking) field.linkMetaData.get.m else field.validateMetaData.get.m + val u: Double = if (linking) field.linkMetaData.get.u else field.validateMetaData.get.u + + def extractComparisonList(levels: List[Double]): String = levels.map(level => + s""" ${ + level.toString + }F""".stripMargin).mkString(",").trim + + writer.print(" " * margin + s"new LinkerProbabilistic.Field($comparison, ${ + if (comparisonLevels.length == 1) "List.of(" else "Arrays.asList(" + }${extractComparisonList(comparisonLevels)}), ${m}F, ${u}F)") + if (idx + 1 < demographicFields.length) + writer.println(",") + margin = 9 + else + writer.println(");") + writer.println() + end if + end if + ) end generateCurrentFields def linkProbabilisticScore(): Unit = @@ -165,10 +182,10 @@ object CustomLinkerProbabilistic { | final float[] metrics = {0, 0, 0, 1.0F};""".stripMargin) end if linkMuList.zipWithIndex.foreach((field, _) => - val fieldName = Utils.snakeCaseToCamelCase(field.fieldName) - writer.println(" " * 6 + "LinkerProbabilistic.updateMetricsForStringField(metrics,") - writer.println(" " * 54 + s"goldenRecord.$fieldName, interaction.$fieldName, currentLinkFields" + - s".$fieldName);")) + val fieldName = Utils.snakeCaseToCamelCase(field.fieldName) + writer.println(" " * 6 + "LinkerProbabilistic.updateMetricsForStringField(metrics,") + writer.println(" " * 54 + s"goldenRecord.$fieldName, interaction.$fieldName, currentLinkFields" + + s".$fieldName);")) if (!linkMuList.isEmpty) writer.println( s"""${" " * 6}return ((metrics[METRIC_SCORE] - metrics[METRIC_MIN]) / (metrics[METRIC_MAX] - metrics[METRIC_MIN])) * metrics[METRIC_MISSING_PENALTY]; @@ -196,7 +213,7 @@ object CustomLinkerProbabilistic { val fieldName = Utils.snakeCaseToCamelCase(field.fieldName) writer.println(" " * 6 + "LinkerProbabilistic.updateMetricsForStringField(metrics,") writer.println(" " * 54 + s"goldenRecord.$fieldName, interaction.$fieldName, currentValidateFields" + - s".$fieldName);") + s".$fieldName);") }) writer.print( s"""${" " * 6}return ((metrics[METRIC_SCORE] - metrics[METRIC_MIN]) / (metrics[METRIC_MAX] - metrics[METRIC_MIN])) * metrics[METRIC_MISSING_PENALTY]; @@ -219,11 +236,11 @@ object CustomLinkerProbabilistic { | // min, max, score, missingPenalty | final float[] metrics = {0, 0, 0, 1.0F};""".stripMargin) matchNotificationMuList.foreach(field => - val fieldName = Utils.snakeCaseToCamelCase(field.fieldName) - writer.println(" " * 6 + "LinkerProbabilistic.updateMetricsForStringField(metrics,") - writer.println(" " * 54 + s"goldenRecord.$fieldName, interaction.$fieldName, currentMatchNotificationFields" + - s".$fieldName);") - ) + val fieldName = Utils.snakeCaseToCamelCase(field.fieldName) + writer.println(" " * 6 + "LinkerProbabilistic.updateMetricsForStringField(metrics,") + writer.println(" " * 54 + s"goldenRecord.$fieldName, interaction.$fieldName, currentMatchNotificationFields" + + s".$fieldName);") + ) writer.print( s"""${" " * 6}return ((metrics[METRIC_SCORE] - metrics[METRIC_MIN]) / (metrics[METRIC_MAX] - metrics[METRIC_MIN])) * metrics[METRIC_MISSING_PENALTY]; |""".stripMargin) @@ -234,36 +251,38 @@ object CustomLinkerProbabilistic { def updateMU(): Unit = writer.println(" public static void updateMU(final CustomMU mu) {") linkMuList.zipWithIndex.foreach((field, idx) => - val fieldName = Utils.snakeCaseToCamelCase(field.fieldName) - if (idx == 0) - writer.print(" " * 6 + s"if (mu.$fieldName().m() > mu.$fieldName().u()") - else - writer.print(" " * 10 + s"&& mu.$fieldName().m() > mu.$fieldName().u()") - end if - if (idx + 1 < linkMuList.length) - writer.println() - else - writer.println(") {") - end if - ) + val fieldName = Utils.snakeCaseToCamelCase(field.fieldName) + if (idx == 0) + writer.print(" " * 6 + s"if (mu.$fieldName().m() > mu.$fieldName().u()") + else + writer.print(" " * 10 + s"&& mu.$fieldName().m() > mu.$fieldName().u()") + end if + if (idx + 1 < linkMuList.length) + writer.println() + else + writer.println(") {") + end if + ) if (!linkMuList.isEmpty) writer.println(" " * 9 + "updatedLinkFields = new LinkFields(") linkMuList.zipWithIndex.foreach((field, idx) => - val fieldName = Utils.snakeCaseToCamelCase(field.fieldName) - val comparison = field.linkMetaData.get.comparison - val comparisonLevels = field.linkMetaData.get.comparisonLevels - - def extractComparisonList(levels: List[Double]): String = { - levels.map(level => s""" ${level.toString}F""".stripMargin).mkString(",").trim - } - - writer.print(" " * 12 + s"new LinkerProbabilistic.Field($comparison, ${if (comparisonLevels.length == 1) "List.of(" else "Arrays.asList("}${extractComparisonList(comparisonLevels)}), mu.$fieldName().m(), mu.$fieldName().u())") - if (idx + 1 < linkMuList.length) - writer.println(",") - else - writer.println(");") - end if - ) + val fieldName = Utils.snakeCaseToCamelCase(field.fieldName) + val comparison = field.linkMetaData.get.comparison + val comparisonLevels = field.linkMetaData.get.comparisonLevels + + def extractComparisonList(levels: List[Double]): String = { + levels.map(level => s""" ${level.toString}F""".stripMargin).mkString(",").trim + } + + writer.print(" " * 12 + s"new LinkerProbabilistic.Field($comparison, ${ + if (comparisonLevels.length == 1) "List.of(" else "Arrays.asList(" + }${extractComparisonList(comparisonLevels)}), mu.$fieldName().m(), mu.$fieldName().u())") + if (idx + 1 < linkMuList.length) + writer.println(",") + else + writer.println(");") + end if + ) writer.println(" " * 6 + "}") end if writer.println(" }") @@ -271,6 +290,24 @@ object CustomLinkerProbabilistic { end updateMU + + def checkUpdatedMU(): Unit = + + def generateCode(): String = + return s""" if (updatedLinkFields != null) { + | LOGGER.info("Using updated Link MU values: {}", updatedLinkFields); + | CustomLinkerProbabilistic.currentLinkFields = updatedLinkFields; + | updatedLinkFields = null; + | }""".stripMargin + end generateCode + + writer.println( + s""" public static void checkUpdatedLinkMU() { + |${if (linkMuList.length > 0) generateCode() else ""} + | } + |""".stripMargin) + end checkUpdatedMU + } } diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomMU.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomMU.scala index 0016bc35d..3b7368c26 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomMU.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomMU.scala @@ -4,60 +4,49 @@ import java.io.{File, PrintWriter} private object CustomMU { - private val classLocation = "../JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models" + private val classLocation = + "../JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models" private val customClassName = "CustomMU" private val packageSharedModels = "org.jembi.jempi.shared.models" def generate(fields: Array[DemographicField]): Unit = - val classFile: String = classLocation + File.separator + customClassName + ".java" + val classFile: String = + classLocation + File.separator + customClassName + ".java" println("Creating " + classFile) val file: File = new File(classFile) val writer: PrintWriter = new PrintWriter(file) - writer.print( - s"""package $packageSharedModels; + writer.print(s"""package $packageSharedModels; | |import com.fasterxml.jackson.annotation.JsonInclude; | |@JsonInclude(JsonInclude.Include.NON_NULL) - |public record $customClassName(""".stripMargin) + |public record $customClassName(String tag, + |""".stripMargin) val margin = 23 val filteredFields = fields.filter(f => f.linkMetaData.isDefined) if (filteredFields.length == 0) - writer.println("Probability dummy) {") + writer.println(s""" Probability dummy) { + | + | public static final Boolean SEND_INTERACTIONS_TO_EM = false; + |""".stripMargin) else - filteredFields.zipWithIndex.foreach { - case (f, i) => - val parameterName = Utils.snakeCaseToCamelCase(f.fieldName) - if (i > 0) - writer.print(" " * margin) - end if - writer.print(s"Probability $parameterName") - if (i + 1 < filteredFields.length) - writer.println(",") - else - writer.println(") {") - end if + filteredFields.zipWithIndex.foreach { case (f, i) => + val parameterName = Utils.snakeCaseToCamelCase(f.fieldName) + writer.print(" " * margin) + writer.print(s"Probability $parameterName") + if (i + 1 < filteredFields.length) writer.println(",") + else + writer.println(") {") + writer.print( + s""" + | public static final Boolean SEND_INTERACTIONS_TO_EM = true; + |""".stripMargin + ) + end if } end if writer.println() - writer.println(s" public $customClassName(final double[] mHat, final double[] uHat) {") - if (filteredFields.length == 0) - writer.println(s" this(new $customClassName.Probability(0.0F, 0.0F));") - else - var s = s"""${" " * 6}this(""".stripMargin - filteredFields.zipWithIndex.foreach((_, idx) => - s += - s"""${" " * (if (idx > 0) 11 else 0)}new $customClassName.Probability((float) mHat[$idx], (float) uHat[$idx])${ - if (idx < filteredFields.length - 1) "," else ");" - } - |""".stripMargin - ) - writer.print(s); - end if - writer.println( - s""" } - | - | public record Probability(float m, float u) { + writer.println(s""" public record Probability(float m, float u) { | } | |}""".stripMargin) diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomPatient.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomPatient.scala index 58f899466..afdb2f8b5 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomPatient.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomPatient.scala @@ -4,15 +4,21 @@ import java.io.{File, PrintWriter} private object CustomPatient { - private val classLocation = "../JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models" + private val classLocation = + "../JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models" private val packageText = "org.jembi.jempi.shared.models" private val customClassNameCustomDemographicData = "CustomDemographicData" - private val customClassNameCustomUniqueGoldenRecordData = "CustomUniqueGoldenRecordData" - private val customClassNameCustomUniqueInteractionData = "CustomUniqueInteractionData" - - private val classCustomDemographicDataFile: String = classLocation + File.separator + customClassNameCustomDemographicData + ".java" - private val classCustomUniqueGoldenRecordDataFile: String = classLocation + File.separator + customClassNameCustomUniqueGoldenRecordData + ".java" - private val classCustomUniqueInteractionDataFile: String = classLocation + File.separator + customClassNameCustomUniqueInteractionData + ".java" + private val customClassNameCustomUniqueGoldenRecordData = + "CustomUniqueGoldenRecordData" + private val customClassNameCustomUniqueInteractionData = + "CustomUniqueInteractionData" + + private val classCustomDemographicDataFile: String = + classLocation + File.separator + customClassNameCustomDemographicData + ".java" + private val classCustomUniqueGoldenRecordDataFile: String = + classLocation + File.separator + customClassNameCustomUniqueGoldenRecordData + ".java" + private val classCustomUniqueInteractionDataFile: String = + classLocation + File.separator + customClassNameCustomUniqueInteractionData + ".java" private val indent = 3 @@ -26,11 +32,13 @@ private object CustomPatient { private def generateDemographicData(config: Config): Unit = def cleanedFields(config: Config): String = - config - .demographicFields + config.demographicFields .map(f => - s"""${" " * 39}this.${Utils.snakeCaseToCamelCase(f.fieldName)}.toLowerCase().replaceAll("\\\\W", ""),""") - .mkString("\n") + s"""${" " * 39}this.${Utils.snakeCaseToCamelCase( + f.fieldName + )}.trim().toLowerCase().replaceAll("\\\\W", ""),""" + ) + .mkString(sys.props("line.separator")) .trim .dropRight(1) end cleanedFields @@ -38,58 +46,67 @@ private object CustomPatient { println("Creating " + classCustomDemographicDataFile) val file: File = new File(classCustomDemographicDataFile) val writer: PrintWriter = new PrintWriter(file) - writer.print( - s"""package $packageText; + writer.print(s"""package $packageText; | |import com.fasterxml.jackson.annotation.JsonInclude; | |@JsonInclude(JsonInclude.Include.NON_NULL) |public class $customClassNameCustomDemographicData { |""".stripMargin) - config.demographicFields.zipWithIndex.foreach { - case (field, _) => - val typeString = field.fieldType - val fieldName = Utils.snakeCaseToCamelCase(field.fieldName) - writer.println(s"""${" " * (indent * 1)}public final $typeString $fieldName;""") + config.demographicFields.zipWithIndex.foreach { case (field, _) => + val typeString = field.fieldType + val fieldName = Utils.snakeCaseToCamelCase(field.fieldName) + writer.println( + s"""${" " * (indent * 1)}public final $typeString $fieldName;""" + ) } writer.println() for (field <- config.demographicFields) { val typeString = field.fieldType val fieldName = Utils.snakeCaseToCamelCase(field.fieldName) writer.println( - s"""${" " * (indent * 1)}public final $typeString get${fieldName.charAt(0).toUpper}${fieldName.substring(1)}() { + s"""${" " * (indent * 1)}public final $typeString get${fieldName + .charAt(0) + .toUpper}${fieldName.substring(1)}() { |${" " * (indent * 2)}return $fieldName; |${" " * (indent * 1)}} - |""".stripMargin) + |""".stripMargin + ) } - writer.println(s"""${" " * indent * 1}public $customClassNameCustomDemographicData() {""".stripMargin) + writer.println( + s"""${" " * indent * 1}public $customClassNameCustomDemographicData() {""".stripMargin + ) writer.println( s"""${" " * indent * 2}this(${"null, " * (config.demographicFields.length - 1)}null); |${" " * indent * 1}} - |""".stripMargin) + |""".stripMargin + ) writer.println( - s"""${" " * indent * 1}public $customClassNameCustomDemographicData(""".stripMargin) - config.demographicFields.zipWithIndex.foreach { - case (field, idx) => - val typeString = field.fieldType - val fieldName = Utils.snakeCaseToCamelCase(field.fieldName) - writer.println( - s"""${" " * indent * 2}final $typeString $fieldName${if (idx < config.demographicFields.length - 1) ',' else ") {"}""".stripMargin) + s"""${" " * indent * 1}public $customClassNameCustomDemographicData(""".stripMargin + ) + config.demographicFields.zipWithIndex.foreach { case (field, idx) => + val typeString = field.fieldType + val fieldName = Utils.snakeCaseToCamelCase(field.fieldName) + writer.println(s"""${" " * indent * 2}final $typeString $fieldName${ + if (idx < config.demographicFields.length - 1) ',' + else ") {" + }""".stripMargin) } - config.demographicFields.zipWithIndex.foreach { - case (field, _) => - // val typeString = field.fieldType - val fieldName = Utils.snakeCaseToCamelCase(field.fieldName) - writer.println( - s"""${" " * indent * 3}this.$fieldName = $fieldName;""".stripMargin) + config.demographicFields.zipWithIndex.foreach { case (field, _) => + // val typeString = field.fieldType + val fieldName = Utils.snakeCaseToCamelCase(field.fieldName) + writer.println( + s"""${" " * indent * 3}this.$fieldName = $fieldName;""".stripMargin + ) } - writer.println( - s"""${" " * indent * 1}} + writer.println(s"""${" " * indent * 1}} | | public $customClassNameCustomDemographicData clean() { - | return new $customClassNameCustomDemographicData(${cleanedFields(config)}); + | return new $customClassNameCustomDemographicData(${cleanedFields( + config + )}); | } | |}""".stripMargin) @@ -101,27 +118,54 @@ private object CustomPatient { private def generateUniqueGoldenRecordData(config: Config): Unit = def fields(config: Config): String = - if (config.uniqueGoldenRecordFields.isEmpty) "" else - config - .uniqueGoldenRecordFields - .get + if (config.uniqueGoldenRecordFields.isEmpty) "" + else + config.uniqueGoldenRecordFields.get .map(f => - s"""${" " * 43}${Utils.javaType(f.fieldType)} ${Utils.snakeCaseToCamelCase(f.fieldName)},""") - .mkString("\n") + s"""${" " * 43}${Utils.javaType(f.fieldType)} ${Utils + .snakeCaseToCamelCase(f.fieldName)},""" + ) + .mkString(sys.props("line.separator")) .trim .dropRight(1) end fields + def fromInteraction(): String = + if (config.uniqueGoldenRecordFields.isEmpty) "" + else + config.uniqueGoldenRecordFields.get + .map(f => + if (f.source.isEmpty) "" + else + s""", + |${" " * 9}uniqueInteractionData.${Utils.snakeCaseToCamelCase( + f.source.get + )}()""".stripMargin + ) + .mkString(sys.props("line.separator")) + .trim + end fromInteraction + println("Creating " + classCustomUniqueGoldenRecordDataFile) val file: File = new File(classCustomUniqueGoldenRecordDataFile) val writer: PrintWriter = new PrintWriter(file) - writer.print( - s"""package $packageText; + writer.print(s"""package $packageText; | |import com.fasterxml.jackson.annotation.JsonInclude; | + |import java.time.LocalDateTime; + | |@JsonInclude(JsonInclude.Include.NON_NULL) - |public record $customClassNameCustomUniqueGoldenRecordData(${fields(config)}) { + |public record $customClassNameCustomUniqueGoldenRecordData(${fields( + config + )}) { + | + | public CustomUniqueGoldenRecordData(final CustomUniqueInteractionData uniqueInteractionData) { + | this(LocalDateTime.now(), + | true${fromInteraction()} + | ); + | } + | |} |""".stripMargin) writer.flush() @@ -131,13 +175,14 @@ private object CustomPatient { private def generateUniqueInteractionData(config: Config): Unit = def fields(config: Config): String = - if (config.uniqueInteractionFields.isEmpty) "" else - config - .uniqueInteractionFields - .get + if (config.uniqueInteractionFields.isEmpty) "" + else + config.uniqueInteractionFields.get .map(f => - s"""${" " * 42}${Utils.javaType(f.fieldType)} ${Utils.snakeCaseToCamelCase(f.fieldName)},""") - .mkString("\n") + s"""${" " * 42}${Utils.javaType(f.fieldType)} ${Utils + .snakeCaseToCamelCase(f.fieldName)},""" + ) + .mkString(sys.props("line.separator")) .trim .dropRight(1) end fields @@ -145,13 +190,14 @@ private object CustomPatient { println("Creating " + classCustomUniqueInteractionDataFile) val file: File = new File(classCustomUniqueInteractionDataFile) val writer: PrintWriter = new PrintWriter(file) - writer.print( - s"""package $packageText; + writer.print(s"""package $packageText; | |import com.fasterxml.jackson.annotation.JsonInclude; | |@JsonInclude(JsonInclude.Include.NON_NULL) - |public record $customClassNameCustomUniqueInteractionData(${fields(config)}) { + |public record $customClassNameCustomUniqueInteractionData(${fields( + config + )}) { |} |""".stripMargin) writer.flush() @@ -166,9 +212,10 @@ private object CustomPatient { def nodeFields(): String = { n.fields - .map(f => s"""${" " * 6}${f.fieldType} ${f.fieldName}""".stripMargin) - .mkString( - s""", + .map(f => + s"""${" " * 6}${f.fieldType} ${f.fieldName}""".stripMargin + ) + .mkString(s""", |""") } @@ -176,8 +223,7 @@ private object CustomPatient { val fileName = classLocation + File.separator + className + ".java" val file: File = new File(fileName) val writer: PrintWriter = new PrintWriter(file) - writer.println( - s""" + writer.println(s""" |package org.jembi.jempi.shared.models; | |import com.fasterxml.jackson.annotation.JsonInclude; diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomPostgresqlGoldenRecord.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomPostgresqlGoldenRecord.scala index 7b24db8cc..0a24eebdd 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomPostgresqlGoldenRecord.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomPostgresqlGoldenRecord.scala @@ -4,10 +4,12 @@ import java.io.{File, PrintWriter} private object CustomPostgresqlGoldenRecord { - private val classLocation = "../JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql" + private val classLocation = + "../JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql" private val packageText = "org.jembi.jempi.libmpi.postgresql" private val customClassName = "CustomGoldenRecordData" - private val classFile: String = classLocation + File.separator + customClassName + ".java" + private val classFile: String = + classLocation + File.separator + customClassName + ".java" def generate(fields: Array[DemographicField]): Unit = { println("Creating " + classFile) @@ -21,11 +23,18 @@ private object CustomPostgresqlGoldenRecord { |final class $customClassName extends CustomDemographicData implements NodeData { | | $customClassName(final CustomDemographicData customDemographicData) { - | super(${fields.map(field => s"""customDemographicData.${Utils.snakeCaseToCamelCase(field.fieldName)}""").mkString(",\n ")}); + | super(${fields + .map(field => + s"""customDemographicData.${Utils.snakeCaseToCamelCase( + field.fieldName + )}""" + ) + .mkString(s",${sys.props("line.separator")} ")}); | } | |} - |""".stripMargin) + |""".stripMargin + ) writer.close() } diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomPostgresqlInteraction.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomPostgresqlInteraction.scala index 8a0787272..3b0edefa4 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomPostgresqlInteraction.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/CustomPostgresqlInteraction.scala @@ -4,10 +4,12 @@ import java.io.{File, PrintWriter} private object CustomPostgresqlInteraction { - private val classLocation = "../JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql" + private val classLocation = + "../JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql" private val packageText = "org.jembi.jempi.libmpi.postgresql" private val customClassName = "CustomInteractionData" - private val classFile: String = classLocation + File.separator + customClassName + ".java" + private val classFile: String = + classLocation + File.separator + customClassName + ".java" def generate(fields: Array[DemographicField]): Unit = { println("Creating " + classFile) @@ -21,11 +23,18 @@ private object CustomPostgresqlInteraction { |final class $customClassName extends CustomDemographicData implements NodeData { | | $customClassName(final CustomDemographicData customDemographicData) { - | super(${fields.map(field => s"""customDemographicData.${Utils.snakeCaseToCamelCase(field.fieldName)}""").mkString(",\n ")}); + | super(${fields + .map(field => + s"""customDemographicData.${Utils.snakeCaseToCamelCase( + field.fieldName + )}""" + ) + .mkString(s",${sys.props("line.separator")} ")}); | } | |} - |""".stripMargin) + |""".stripMargin + ) writer.close() } diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/Main.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/Main.scala index 4729baf1d..c8b5f6ad8 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/Main.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/Main.scala @@ -2,13 +2,15 @@ package configuration import com.fasterxml.jackson.core.`type`.TypeReference import com.fasterxml.jackson.databind.json.JsonMapper -import com.fasterxml.jackson.module.scala.{ClassTagExtensions, DefaultScalaModule} +import com.fasterxml.jackson.module.scala.{ + ClassTagExtensions, + DefaultScalaModule +} import java.nio.file.Paths object Main { - @main def configure(in_config_file_name: String): Any = val config_file_name = if (in_config_file_name.isBlank) { @@ -19,8 +21,14 @@ object Main { } println(s"name = $config_file_name") - val mapper = JsonMapper.builder().addModule(DefaultScalaModule).build() :: ClassTagExtensions - val config = mapper.readValue(Paths.get(config_file_name).toFile, new TypeReference[Config] {}) + val mapper = JsonMapper + .builder() + .addModule(DefaultScalaModule) + .build() :: ClassTagExtensions + val config = mapper.readValue( + Paths.get(config_file_name).toFile, + new TypeReference[Config] {} + ) CustomMU.generate(config.demographicFields) CustomDgraphConstants.generate(config) @@ -39,5 +47,10 @@ object Main { CustomPostgresqlGoldenRecord.generate(config.demographicFields) CustomAsyncHelper.generate(config) CustomPatient.generate(config) + CustomFieldTallies.generate(config) + CustomControllerDashboardMU.generate(config) + ScalaCustomFields.generate(config) + ScalaCustomInteractionEnvelop.generate(config) + ScalaCustomMU.generate(config) } diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/ParseRule.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/ParseRule.scala index 672c33eaf..77175f065 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/ParseRule.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/ParseRule.scala @@ -4,12 +4,13 @@ import scala.util.parsing.combinator.JavaTokenParsers object ParseRule extends JavaTokenParsers { - import Ast._ + import Ast.* def parse(str: String): Ast.Expression = parseAll(expression, str) match { case Success(result, _) => result - case failedOrIncomplete => throw new RuntimeException(failedOrIncomplete.toString) + case failedOrIncomplete => + throw new RuntimeException(failedOrIncomplete.toString) } private def expression: Parser[Expression] = @@ -18,11 +19,10 @@ object ParseRule extends JavaTokenParsers { private def combinationExpression: Parser[Expression] = comment.? ~> or | and <~ comment.? - /** - * Expressions that can be used as left part of recursive expression - * - * @return - */ + /** Expressions that can be used as left part of recursive expression + * + * @return + */ private def leftExpression: Parser[Expression] = comment.? ~> not | brackets | matchField | eqField <~ comment.? @@ -33,7 +33,9 @@ object ParseRule extends JavaTokenParsers { "eq" ~ "(" ~>! variable <~! ")" ^^ (parameter => Eq.apply(parameter)) private def matchField: Parser[Match] = - "match" ~ "(" ~>! matchParameters <~! ")" ^^ (parameters => Match.apply(parameters._1, parameters._2)) + "match" ~ "(" ~>! matchParameters <~! ")" ^^ (parameters => + Match.apply(parameters._1, parameters._2) + ) private def matchParameters: Parser[(Variable, Int)] = variable ~! matchDistance ^^ (parameters => (parameters._1, parameters._2)) diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/ScalaCustomFields.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/ScalaCustomFields.scala new file mode 100644 index 000000000..36c6b8225 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/ScalaCustomFields.scala @@ -0,0 +1,48 @@ +package configuration + +import java.io.{File, PrintWriter} +import scala.language.{existentials, postfixOps} + +object ScalaCustomFields { + + private val classLocation = + "../JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em" + private val custom_className = "CustomFields" + private val packageText = "org.jembi.jempi.em" + + def generate(config: Config): Any = { + + def fieldDefs(): String = + config.demographicFields.zipWithIndex + .map((f, i) => { + val fieldName = Utils.snakeCaseToCamelCase(f.fieldName) + s"""${" " * 4}Field("${fieldName}", ${i}),""" + }) + .mkString(sys.props("line.separator")) + .trim + .dropRight(1) + end fieldDefs + + val classFile: String = + classLocation + File.separator + custom_className + ".scala" + println("Creating " + classFile) + val file: File = new File(classFile) + val writer: PrintWriter = new PrintWriter(file) + + writer.println(s"""package $packageText + | + |import scala.collection.immutable.ArraySeq + | + |object CustomFields { + | + | val FIELDS: ArraySeq[Field] = ArraySeq( + | ${fieldDefs()} + | ) + | + |} + |""".stripMargin) + writer.flush() + writer.close() + } + +} diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/ScalaCustomInteractionEnvelop.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/ScalaCustomInteractionEnvelop.scala new file mode 100644 index 000000000..44ae9a64b --- /dev/null +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/ScalaCustomInteractionEnvelop.scala @@ -0,0 +1,119 @@ +package configuration + +import java.io.{File, PrintWriter} +import scala.language.{existentials, postfixOps} + +object ScalaCustomInteractionEnvelop { + + private val classLocation = + "../JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em" + private val custom_className = "CustomInteractionEnvelop" + private val packageText = "org.jembi.jempi.em" + + def generate(config: Config): Any = { + + val muList = + for (t <- config.demographicFields.filter(f => f.linkMetaData.isDefined)) + yield t + + def fieldDefs(): String = + muList.zipWithIndex + .map((f, i) => { + val fieldName = Utils.snakeCaseToCamelCase(f.fieldName) + s"""${" " * 4}${fieldName}: String,""" + }) + .mkString(sys.props("line.separator")) + .trim + .dropRight(1) + end fieldDefs + + def fieldList(): String = + muList.zipWithIndex + .map((f, i) => { + val fieldName = Utils.snakeCaseToCamelCase(f.fieldName) + s"""${fieldName}""" + }) + .mkString("", "," + sys.props("line.separator") + (" " * 12), "") + .trim + end fieldList + + val classFile: String = + classLocation + File.separator + custom_className + ".scala" + println("Creating " + classFile) + val file: File = new File(classFile) + val writer: PrintWriter = new PrintWriter(file) + + writer.println(s"package $packageText") + writer.println() + + if (muList.length == 0) { + writer.println(s""" + |import com.fasterxml.jackson.annotation.JsonIgnoreProperties + | + | + |@JsonIgnoreProperties(ignoreUnknown = true) + |case class ${custom_className}( + | contentType: String, + | tag: Option[String], + | stan: Option[String], + | interaction: Option[Interaction] + |) {} + | + |@JsonIgnoreProperties(ignoreUnknown = true) + |case class Interaction( + | uniqueInteractionData: UniqueInteractionData, + | demographicData: DemographicData + |) + | + |@JsonIgnoreProperties(ignoreUnknown = true) + |case class UniqueInteractionData(auxId: String) + | + |@JsonIgnoreProperties(ignoreUnknown = true) + |case class DemographicData( + | ${fieldDefs()} + |) { + | + | def toArray: Array[String] = + | Array(${fieldList()}) + | + |} + |""".stripMargin) + } else { + writer.println(s""" + |import com.fasterxml.jackson.annotation.JsonIgnoreProperties + | + | + |@JsonIgnoreProperties(ignoreUnknown = true) + |case class ${custom_className}( + | contentType: String, + | tag: Option[String], + | stan: Option[String], + | interaction: Option[Interaction] + |) {} + | + |@JsonIgnoreProperties(ignoreUnknown = true) + |case class Interaction( + | uniqueInteractionData: UniqueInteractionData, + | demographicData: DemographicData + |) + | + |@JsonIgnoreProperties(ignoreUnknown = true) + |case class UniqueInteractionData(auxId: String) + | + |@JsonIgnoreProperties(ignoreUnknown = true) + |case class DemographicData( + | ${fieldDefs()} + |) { + | + | def toArray: Array[String] = + | Array(${fieldList()}) + | + |} + |""".stripMargin) + + } + writer.flush() + writer.close() + } + +} diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/ScalaCustomMU.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/ScalaCustomMU.scala new file mode 100644 index 000000000..aa77df96d --- /dev/null +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/ScalaCustomMU.scala @@ -0,0 +1,73 @@ +package configuration + +import java.io.{File, PrintWriter} +import scala.language.{existentials, postfixOps} + +object ScalaCustomMU { + + private val classLocation = + "../JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/kafka" + private val custom_className = "CustomMU" + private val packageText = "org.jembi.jempi.em.kafka" + + def generate(config: Config): Any = { + + def fieldDefs(): String = + config.demographicFields.zipWithIndex + .map((f, i) => { + val fieldName = Utils.snakeCaseToCamelCase(f.fieldName) + s"""${" " * 4}${fieldName}: Probability,""" + }) + .mkString(sys.props("line.separator")) + .trim + .dropRight(1) + end fieldDefs + + def probSeqDefs(): String = + config.demographicFields.zipWithIndex + .map((f, i) => { + s"""${" " * 12}Probability(muSeq.apply(${i}).m, muSeq.apply(${i}).u),""" + }) + .mkString(sys.props("line.separator")) + .trim + .dropRight(1) + end probSeqDefs + + val classFile: String = + classLocation + File.separator + custom_className + ".scala" + println("Creating " + classFile) + val file: File = new File(classFile) + val writer: PrintWriter = new PrintWriter(file) + + val muList = + for (t <- config.demographicFields.filter(f => f.linkMetaData.isDefined)) + yield t + + writer.println(s"""package $packageText + | + | + |import org.jembi.jempi.em.MU + | + |import scala.collection.immutable.ArraySeq + | + |case class ${custom_className}( + | tag: String, + | ${fieldDefs()} + |) + | + |object ${custom_className} { + | + | def fromArraySeq(tag: String, muSeq: ArraySeq[MU]): CustomMU = + | CustomMU( + | tag, + | ${probSeqDefs()} + | ) + | + |} + |""".stripMargin) + + writer.flush() + writer.close() + } + +} diff --git a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/Utils.scala b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/Utils.scala index f8b8bad7e..9247f6b9f 100644 --- a/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/Utils.scala +++ b/JeMPI_Apps/JeMPI_Configuration/src/main/scala/configuration/Utils.scala @@ -2,14 +2,24 @@ package configuration object Utils { - val javaType: Map[String, String] = Map.apply("String" -> "String", "Bool" -> "Boolean", "DateTime" -> "java.time.LocalDateTime") + val javaType: Map[String, String] = Map.apply( + "String" -> "String", + "Bool" -> "Boolean", + "DateTime" -> "java.time.LocalDateTime" + ) - def camelCaseToSnakeCase(name: String): String = "[A-Z\\d]".r.replaceAllIn(name, { - m => "_" + m.group(0).toLowerCase() - }) + def camelCaseToSnakeCase(name: String): String = "[A-Z\\d]".r.replaceAllIn( + name, + { m => + "_" + m.group(0).toLowerCase() + } + ) - def snakeCaseToCamelCase(name: String): String = "_([a-z\\d])".r.replaceAllIn(name, { - m => m.group(1).toUpperCase() - }) + def snakeCaseToCamelCase(name: String): String = "_([a-z\\d])".r.replaceAllIn( + name, + { m => + m.group(1).toUpperCase() + } + ) } diff --git a/JeMPI_Apps/JeMPI_Controller/checkstyle/suppression.xml b/JeMPI_Apps/JeMPI_Controller/checkstyle/suppression.xml index 5966b62ca..cb69ad021 100644 --- a/JeMPI_Apps/JeMPI_Controller/checkstyle/suppression.xml +++ b/JeMPI_Apps/JeMPI_Controller/checkstyle/suppression.xml @@ -16,9 +16,19 @@ files="().java" /> - - - - + + + + + diff --git a/JeMPI_Apps/JeMPI_Controller/docker/Dockerfile b/JeMPI_Apps/JeMPI_Controller/docker/Dockerfile index 059c4d462..22fd130bb 100644 --- a/JeMPI_Apps/JeMPI_Controller/docker/Dockerfile +++ b/JeMPI_Apps/JeMPI_Controller/docker/Dockerfile @@ -6,7 +6,7 @@ ADD Controller-1.0-SNAPSHOT-spring-boot.jar /app/Controller-1.0-SNAPSHOT-spring- RUN printf "#!/bin/bash\n\ cd /app\n\ -java -server --enable-preview -XX:MaxRAMPercentage=80 -XX:+UseZGC -jar /app/Controller-1.0-SNAPSHOT-spring-boot.jar\n" > /entrypoint.sh +java -server -XX:MaxRAMPercentage=80 -jar /app/Controller-1.0-SNAPSHOT-spring-boot.jar\n" > /entrypoint.sh RUN chmod +x /entrypoint.sh diff --git a/JeMPI_Apps/JeMPI_Controller/pom.xml b/JeMPI_Apps/JeMPI_Controller/pom.xml index 94b5c9102..80e385356 100644 --- a/JeMPI_Apps/JeMPI_Controller/pom.xml +++ b/JeMPI_Apps/JeMPI_Controller/pom.xml @@ -87,6 +87,11 @@ jackson-databind + + com.fasterxml.jackson.datatype + jackson-datatype-jsr310 + + org.postgresql postgresql @@ -112,6 +117,12 @@ org.apache.logging.log4j log4j-jcl + + org.jembi.jempi + JeMPI_LibMPI + 1.0-SNAPSHOT + compile + diff --git a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/AppConfig.java b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/AppConfig.java index 82d7f0782..b11c5d727 100644 --- a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/AppConfig.java +++ b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/AppConfig.java @@ -7,21 +7,23 @@ import org.apache.logging.log4j.Logger; import java.io.File; +import java.util.Arrays; public final class AppConfig { private static final Logger LOGGER = LogManager.getLogger(org.jembi.jempi.AppConfig.class); private static final Config SYSTEM_PROPERTIES = ConfigFactory.systemProperties(); private static final Config SYSTEM_ENVIRONMENT = ConfigFactory.systemEnvironment(); - private static final Config CONFIG = new org.jembi.jempi.AppConfig.Builder() - .withSystemEnvironment() - .withSystemProperties() - .withOptionalRelativeFile("/conf/server.production.conf") - .withOptionalRelativeFile("/conf/server.staging.conf") - .withOptionalRelativeFile("/conf/server.test.conf") - .withResource("application.local.conf") - .withResource("application.conf") - .build(); + private static final Config CONFIG = new org.jembi.jempi.AppConfig.Builder().withSystemEnvironment() + .withSystemProperties() + .withOptionalRelativeFile( + "/conf/server.production.conf") + .withOptionalRelativeFile( + "/conf/server.staging.conf") + .withOptionalRelativeFile("/conf/server.test.conf") + .withResource("application.local.conf") + .withResource("application.conf") + .build(); public static final String KAFKA_BOOTSTRAP_SERVERS = CONFIG.getString("KAFKA_BOOTSTRAP_SERVERS"); public static final String KAFKA_APPLICATION_ID = CONFIG.getString("KAFKA_APPLICATION_ID"); public static final String KAFKA_CLIENT_ID = CONFIG.getString("KAFKA_CLIENT_ID"); @@ -29,12 +31,29 @@ public final class AppConfig { public static final Integer POSTGRESQL_PORT = CONFIG.getInt("POSTGRESQL_PORT"); public static final String POSTGRESQL_USER = CONFIG.getString("POSTGRESQL_USER"); public static final String POSTGRESQL_PASSWORD = CONFIG.getString("POSTGRESQL_PASSWORD"); - public static final String POSTGRESQL_DATABASE = CONFIG.getString("POSTGRESQL_DATABASE"); + public static final String POSTGRESQL_NOTIFICATIONS_DB = CONFIG.getString("POSTGRESQL_NOTIFICATIONS_DB"); + public static final String POSTGRESQL_AUDIT_DB = CONFIG.getString("POSTGRESQL_AUDIT_DB"); public static final Integer CONTROLLER_HTTP_PORT = CONFIG.getInt("CONTROLLER_HTTP_PORT"); public static final String LINKER_IP = CONFIG.getString("LINKER_IP"); public static final Integer LINKER_HTTP_PORT = CONFIG.getInt("LINKER_HTTP_PORT"); public static final Level GET_LOG_LEVEL = Level.toLevel(CONFIG.getString("LOG4J2_LEVEL")); + private static final String[] DGRAPH_ALPHA_HOSTS = CONFIG.getString("DGRAPH_HOSTS").split(","); + private static final int[] DGRAPH_ALPHA_PORTS = Arrays.stream(CONFIG.getString("DGRAPH_PORTS").split(",")).mapToInt(s -> { + try { + return Integer.parseInt(s); + } catch (NumberFormatException ex) { + return Integer.MIN_VALUE; + } + }).toArray(); + + public static String[] getDGraphHosts() { + return DGRAPH_ALPHA_HOSTS; + } + public static int[] getDGraphPorts() { + return DGRAPH_ALPHA_PORTS; + } + private AppConfig() { } diff --git a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/BackEnd.java b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/BackEnd.java index 9c0574919..1d2c0c990 100644 --- a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/BackEnd.java +++ b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/BackEnd.java @@ -1,16 +1,18 @@ package org.jembi.jempi.controller; +import akka.actor.typed.ActorRef; +import akka.actor.typed.ActorSystem; import akka.actor.typed.Behavior; -import akka.actor.typed.javadsl.AbstractBehavior; -import akka.actor.typed.javadsl.ActorContext; -import akka.actor.typed.javadsl.Behaviors; -import akka.actor.typed.javadsl.Receive; +import akka.actor.typed.javadsl.*; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.config.Configurator; import org.jembi.jempi.AppConfig; +import org.jembi.jempi.shared.models.NotificationResolutionProcessorData; import java.time.Duration; +import java.util.HashMap; +import java.util.concurrent.CompletionStage; public final class BackEnd extends AbstractBehavior { @@ -26,14 +28,38 @@ public static Behavior create() { return Behaviors.setup(BackEnd::new); } + static CompletionStage askOnNotificationResolution( + final ActorSystem actorSystem, + final ActorRef backEnd, + final NotificationResolutionProcessorData notificationResolutionDetails) { + final CompletionStage stage = AskPattern + .ask(backEnd, + replyTo -> new OnNotificationResolutionRequest(replyTo, notificationResolutionDetails), + java.time.Duration.ofSeconds(6), + actorSystem.scheduler()); + return stage.thenApply(response -> response); + } + + static CompletionStage askGetDashboardData( + final ActorSystem actorSystem, + final ActorRef backEnd) { + final CompletionStage stage = AskPattern + .ask(backEnd, + replyTo -> new DashboardDataRequest(replyTo), + java.time.Duration.ofSeconds(6), + actorSystem.scheduler()); + return stage.thenApply(response -> response); + } + public void close() { } public Receive createReceive() { - return newReceiveBuilder() - .onMessage(EventTeaTime.class, this::eventTeaTimeHandler) - .onMessage(EventWorkTime.class, this::eventWorkTimeHandler) - .build(); + return newReceiveBuilder().onMessage(EventTeaTime.class, this::eventTeaTimeHandler) + .onMessage(EventWorkTime.class, this::eventWorkTimeHandler) + .onMessage(OnNotificationResolutionRequest.class, this::onNotificationResolutionHandler) + .onMessage(DashboardDataRequest.class, this::getDashboardDataHandler) + .build(); } private Behavior eventWorkTimeHandler(final EventWorkTime request) { @@ -49,6 +75,38 @@ private Behavior eventTeaTimeHandler(final EventTeaTime request) { }); } + private Behavior onNotificationResolutionHandler(final OnNotificationResolutionRequest request) { + request.replyTo.tell(new OnNotificationResolutionResponse(true)); + return Behaviors.same(); + } + + private Behavior getDashboardDataHandler(final DashboardDataRequest request) { + final var dashboardData = new HashMap(); + final var linkStatsMeta = LinkStatsMetaCache.get(); + if (linkStatsMeta != null) { + dashboardData.put("linker_stats", new LinkerStats(123L, 456L)); + dashboardData.put("m_and_u", CustomControllerDashboardMU.fromCustomFieldTallies(linkStatsMeta.customFieldTallies())); + final var tp = linkStatsMeta.confusionMatrix().TP(); + final var fp = linkStatsMeta.confusionMatrix().FP(); + final var tn = linkStatsMeta.confusionMatrix().TN(); + final var fn = linkStatsMeta.confusionMatrix().FN(); + final var b1 = 0.25; // beta = 0.5 + final var b2 = 1.0; // beta = 1.0 + final var b3 = 4.0; // beta = 2.0; + final var f1 = ((1.0 + b1) * tp) / ((1 + b1) * tp + b1 * fn + fp); + final var f2 = ((1.0 + b2) * tp) / ((1 + b2) * tp + b2 * fn + fp); + final var f3 = ((1.0 + b3) * tp) / ((1 + b3) * tp + b3 * fn + fp); + dashboardData.put("tptn", + new TPTN(new TPTN.TPTNMatrix(tp.longValue(), + fp.longValue(), + tn.longValue(), + fn.longValue()), + new TPTN.TPTNfScore(f1, f2, f3))); + } + request.replyTo.tell(new DashboardDataResponse(dashboardData)); + return Behaviors.same(); + } + private enum EventTeaTime implements Event { INSTANCE } @@ -60,4 +118,47 @@ private enum EventWorkTime implements Event { interface Event { } + private record LinkerStats( + Long goldenRecordCount, + Long interactionsCount) { + } + + private record TPTN( + TPTNMatrix tptnMatrix, + TPTNfScore tptnfScore) { + record TPTNMatrix( + Long truePositive, + Long falsePositive, + Long trueNegative, + Long falseNegative) { + } + + record TPTNfScore( + Double precision, + Double recall_precision, + Double recall + ) { + } + + } + + public record OnNotificationResolutionRequest( + ActorRef replyTo, + NotificationResolutionProcessorData notificationResolutionDetails + ) implements Event { + } + + public record OnNotificationResolutionResponse(Boolean updated) + implements Event { + } + + public record DashboardDataRequest( + ActorRef replyTo + ) implements Event { + } + + public record DashboardDataResponse(HashMap dashboardData) + implements Event { + } + } diff --git a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/CustomControllerDashboardMU.java b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/CustomControllerDashboardMU.java new file mode 100644 index 000000000..ca573c984 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/CustomControllerDashboardMU.java @@ -0,0 +1,38 @@ +package org.jembi.jempi.controller; + +import org.jembi.jempi.shared.models.CustomFieldTallies; +import org.jembi.jempi.shared.models.CustomFieldTallies.FieldTally; + +record CustomControllerDashboardMU( + MU givenName, + MU familyName, + MU gender, + MU dob, + MU city, + MU phoneNumber, + MU nationalId) { + + static MU getMU(final FieldTally fieldTally) { + if (fieldTally.a() + fieldTally.b() == 0 || fieldTally.c() + fieldTally.d() == 0) { + return new MU(-1.0, -1.0); + } + return new MU(fieldTally.a().doubleValue() / (fieldTally.a().doubleValue() + fieldTally.b().doubleValue()), + fieldTally.c().doubleValue() / (fieldTally.c().doubleValue() + fieldTally.d().doubleValue())); + } + + record MU( + Double m, + Double u) { + } + + static CustomControllerDashboardMU fromCustomFieldTallies(final CustomFieldTallies customFieldTallies) { + return new CustomControllerDashboardMU(getMU(customFieldTallies.givenName()), + getMU(customFieldTallies.familyName()), + getMU(customFieldTallies.gender()), + getMU(customFieldTallies.dob()), + getMU(customFieldTallies.city()), + getMU(customFieldTallies.phoneNumber()), + getMU(customFieldTallies.nationalId())); + } + +} diff --git a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/HttpServer.java b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/HttpServer.java index c50149eea..186e293a0 100644 --- a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/HttpServer.java +++ b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/HttpServer.java @@ -12,14 +12,15 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.jembi.jempi.AppConfig; +import org.jembi.jempi.shared.models.ApiModels; import org.jembi.jempi.shared.models.GlobalConstants; -import org.jembi.jempi.shared.models.LinkInteractionSyncBody; -import org.jembi.jempi.shared.models.LinkInteractionToGidSyncBody; -import org.jembi.jempi.shared.utils.AppUtils; +import org.jembi.jempi.shared.models.NotificationResolutionProcessorData; import java.util.Locale; import java.util.concurrent.CompletionStage; +import static org.jembi.jempi.shared.utils.AppUtils.OBJECT_MAPPER; + public final class HttpServer extends AllDirectives { private static final Logger LOGGER = LogManager.getLogger(HttpServer.class); @@ -36,76 +37,96 @@ void open( final ActorSystem system, final ActorRef backEnd) { http = Http.get(system); - binding = http.newServerAt("0.0.0.0", - AppConfig.CONTROLLER_HTTP_PORT) - .bind(this.createRoute(system, backEnd)); + binding = http.newServerAt("0.0.0.0", AppConfig.CONTROLLER_HTTP_PORT).bind(this.createRoute(system, backEnd)); LOGGER.info("Server online at http://{}:{}", "0.0.0.0", AppConfig.CONTROLLER_HTTP_PORT); } - private CompletionStage postLinkInteraction(final LinkInteractionSyncBody body) throws JsonProcessingException { + private CompletionStage postLinkInteraction(final ApiModels.LinkInteractionSyncBody body) throws JsonProcessingException { final HttpRequest request; - request = HttpRequest - .create(String.format(Locale.ROOT, - "http://%s:%d/JeMPI/%s", - AppConfig.LINKER_IP, - AppConfig.LINKER_HTTP_PORT, - GlobalConstants.SEGMENT_PROXY_POST_LINK_INTERACTION)) - .withMethod(HttpMethods.POST) - .withEntity(ContentTypes.APPLICATION_JSON, AppUtils.OBJECT_MAPPER.writeValueAsBytes(body)); + request = HttpRequest.create(String.format(Locale.ROOT, + "http://%s:%d/JeMPI/%s", + AppConfig.LINKER_IP, + AppConfig.LINKER_HTTP_PORT, + GlobalConstants.SEGMENT_PROXY_POST_LINK_INTERACTION)) + .withMethod(HttpMethods.POST) + .withEntity(ContentTypes.APPLICATION_JSON, OBJECT_MAPPER.writeValueAsBytes(body)); final var stage = http.singleRequest(request); return stage.thenApply(response -> response); } - private CompletionStage postLinkInteractionToGid(final LinkInteractionToGidSyncBody body) throws JsonProcessingException { - final var request = HttpRequest - .create(String.format(Locale.ROOT, - "http://%s:%d/JeMPI/%s", - AppConfig.LINKER_IP, - AppConfig.LINKER_HTTP_PORT, - GlobalConstants.SEGMENT_PROXY_POST_LINK_INTERACTION_TO_GID)) - .withMethod(HttpMethods.POST) - .withEntity(ContentTypes.APPLICATION_JSON, AppUtils.OBJECT_MAPPER.writeValueAsBytes(body)); + private CompletionStage postLinkInteractionToGid(final ApiModels.LinkInteractionToGidSyncBody body) throws JsonProcessingException { + final var request = HttpRequest.create(String.format(Locale.ROOT, + "http://%s:%d/JeMPI/%s", + AppConfig.LINKER_IP, + AppConfig.LINKER_HTTP_PORT, + GlobalConstants.SEGMENT_PROXY_POST_LINK_INTERACTION_TO_GID)) + .withMethod(HttpMethods.POST) + .withEntity(ContentTypes.APPLICATION_JSON, OBJECT_MAPPER.writeValueAsBytes(body)); final var stage = http.singleRequest(request); return stage.thenApply(response -> response); } private CompletionStage getMU() { - final var request = HttpRequest - .create(String.format(Locale.ROOT, "http://%s:%d/JeMPI/mu", AppConfig.LINKER_IP, AppConfig.LINKER_HTTP_PORT)) - .withMethod(HttpMethods.GET); + final var request = HttpRequest.create(String.format(Locale.ROOT, + "http://%s:%d/JeMPI/mu", + AppConfig.LINKER_IP, + AppConfig.LINKER_HTTP_PORT)).withMethod(HttpMethods.GET); final var stage = http.singleRequest(request); return stage.thenApply(response -> response); } + private Route onNotificationResolution( + final ActorSystem actorSystem, + final ActorRef backEnd) { + return entity(Jackson.unmarshaller(NotificationResolutionProcessorData.class), + obj -> onComplete(BackEnd.askOnNotificationResolution(actorSystem, backEnd, obj), response -> { + if (response.isSuccess() && Boolean.TRUE.equals(response.get().updated())) { + return complete(StatusCodes.OK); + } else { + return complete(StatusCodes.IM_A_TEAPOT); + } + })); + } + + private Route routeDashboardData( + final ActorSystem actorSystem, + final ActorRef backEnd) { + return onComplete(BackEnd.askGetDashboardData(actorSystem, backEnd), response -> { + if (response.isSuccess()) { + return complete(StatusCodes.OK, response.get(), Jackson.marshaller()); + } else { + return complete(StatusCodes.IM_A_TEAPOT); + } + }); + } + private Route routeLinkInteraction() { - return entity(Jackson.unmarshaller(LinkInteractionSyncBody.class), - obj -> { - try { - LOGGER.debug("{}", obj); - return onComplete(postLinkInteraction(obj), - response -> response.isSuccess() - ? complete(response.get()) - : complete(StatusCodes.IM_A_TEAPOT)); - } catch (JsonProcessingException e) { - LOGGER.error(e.getLocalizedMessage(), e); - return complete(StatusCodes.IM_A_TEAPOT); - } - }); + return entity(Jackson.unmarshaller(ApiModels.LinkInteractionSyncBody.class), obj -> { + try { + LOGGER.debug("{}", obj); + return onComplete(postLinkInteraction(obj), + response -> response.isSuccess() + ? complete(response.get()) + : complete(StatusCodes.IM_A_TEAPOT)); + } catch (JsonProcessingException e) { + LOGGER.error(e.getLocalizedMessage(), e); + return complete(StatusCodes.IM_A_TEAPOT); + } + }); } private Route routeLinkInteractionToGid() { - return entity(Jackson.unmarshaller(LinkInteractionToGidSyncBody.class), - obj -> { - try { - return onComplete(postLinkInteractionToGid(obj), - response -> response.isSuccess() - ? complete(response.get()) - : complete(StatusCodes.IM_A_TEAPOT)); - } catch (JsonProcessingException e) { - LOGGER.error(e.getLocalizedMessage(), e); - return complete(StatusCodes.IM_A_TEAPOT); - } - }); + return entity(Jackson.unmarshaller(ApiModels.LinkInteractionToGidSyncBody.class), obj -> { + try { + return onComplete(postLinkInteractionToGid(obj), + response -> response.isSuccess() + ? complete(response.get()) + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT))); + } catch (JsonProcessingException e) { + LOGGER.error(e.getLocalizedMessage(), e); + } + return complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT)); + }); } private Route routeMU() { @@ -119,12 +140,16 @@ private Route createRoute( final ActorSystem actorSystem, final ActorRef backEnd) { return pathPrefix("JeMPI", - () -> concat( - post(() -> concat( - path(GlobalConstants.SEGMENT_PROXY_POST_LINK_INTERACTION, this::routeLinkInteraction), - path(GlobalConstants.SEGMENT_PROXY_POST_LINK_INTERACTION_TO_GID, - this::routeLinkInteractionToGid))), - get(() -> path("mu", this::routeMU)))); + () -> concat(post(() -> concat(path(GlobalConstants.SEGMENT_PROXY_POST_LINK_INTERACTION, + this::routeLinkInteraction), + path(GlobalConstants.SEGMENT_PROXY_POST_LINK_INTERACTION_TO_GID, + this::routeLinkInteractionToGid), + path(GlobalConstants.SEGMENT_PROXY_ON_NOTIFICATION_RESOLUTION, + () -> onNotificationResolution(actorSystem, backEnd)))), + get(() -> concat(path("mu", this::routeMU), + path(GlobalConstants.SEGMENT_PROXY_GET_DASHBOARD_DATA, + () -> routeDashboardData(actorSystem, backEnd)) + )))); } } diff --git a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/LinkStatsMetaCache.java b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/LinkStatsMetaCache.java new file mode 100644 index 000000000..f16eb0c27 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/LinkStatsMetaCache.java @@ -0,0 +1,27 @@ +package org.jembi.jempi.controller; + +import org.jembi.jempi.shared.models.LinkStatsMeta; + +public final class LinkStatsMetaCache { + + private static LinkStatsMeta linkStatsMeta = null; + + private LinkStatsMetaCache() { + } + + public static LinkStatsMeta get() { + final LinkStatsMeta rsp; + synchronized (LinkStatsMetaCache.class) { + rsp = new LinkStatsMeta(linkStatsMeta.confusionMatrix(), linkStatsMeta.customFieldTallies()); + } + return rsp; + } + + public static void set(final LinkStatsMeta meta) { + final var work = new LinkStatsMeta(meta.confusionMatrix(), meta.customFieldTallies()); + synchronized (LinkStatsMetaCache.class) { + linkStatsMeta = work; + } + } + +} diff --git a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/Main.java b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/Main.java index 681ca5c62..ce985ac78 100644 --- a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/Main.java +++ b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/Main.java @@ -4,13 +4,35 @@ import akka.actor.typed.Behavior; import akka.actor.typed.Terminated; import akka.actor.typed.javadsl.Behaviors; +import org.apache.kafka.common.serialization.Deserializer; +import org.apache.kafka.common.serialization.Serde; +import org.apache.kafka.common.serialization.Serdes; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.streams.KafkaStreams; +import org.apache.kafka.streams.StreamsConfig; +import org.apache.kafka.streams.Topology; +import org.apache.kafka.streams.state.KeyValueStore; +import org.apache.kafka.streams.state.StoreBuilder; +import org.apache.kafka.streams.state.Stores; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.jembi.jempi.AppConfig; +import org.jembi.jempi.shared.models.GlobalConstants; +import org.jembi.jempi.shared.models.LinkStatsMeta; +import org.jembi.jempi.shared.serdes.JsonPojoDeserializer; +import org.jembi.jempi.shared.serdes.JsonPojoSerializer; + +import java.util.Properties; public final class Main { private static final Logger LOGGER = LogManager.getLogger(Main.class); + private static final Deserializer STRING_DESERIALIZER = new StringDeserializer(); + private static final Deserializer LINK_STATS_META_DESERIALIZER = + new JsonPojoDeserializer<>(LinkStatsMeta.class); + private static final Serde STRING_SERDE = Serdes.String(); + private static final Serde LINK_STATS_META_SERDE = + Serdes.serdeFrom(new JsonPojoSerializer<>(), LINK_STATS_META_DESERIALIZER); private Main() { } @@ -19,32 +41,58 @@ public static void main(final String[] args) { new Main().run(); } + public static Topology createTopology() { + StoreBuilder> stateStoreBuilder = + Stores.keyValueStoreBuilder(Stores.persistentKeyValueStore(SPLinkStatsMeta.STATE_STORE_NAME), + STRING_SERDE, + LINK_STATS_META_SERDE); + + return new Topology() + .addSource("Source", + STRING_DESERIALIZER, LINK_STATS_META_DESERIALIZER, + GlobalConstants.TOPIC_INTERACTION_PROCESSOR_CONTROLLER) + .addProcessor("Process", SPLinkStatsMeta::new, "Source") + .addStateStore(stateStoreBuilder, "Process"); + } + + private Properties getProps() { + final Properties props = new Properties(); + props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, AppConfig.KAFKA_BOOTSTRAP_SERVERS); + props.put(StreamsConfig.APPLICATION_ID_CONFIG, AppConfig.KAFKA_APPLICATION_ID + "-SP_TALLIES_MU"); + props.put(StreamsConfig.POLL_MS_CONFIG, 50); + return props; + } + public Behavior create() { - return Behaviors.setup( - context -> { - final var backEndActor = context.spawn(BackEnd.create(), "BackEnd"); - context.watch(backEndActor); - final var spAuditTrail = new SPAuditTrail(); - spAuditTrail.open(); - final var spNotification = new SPNotification(); - spNotification.open(); - final var spInteractions = new SPInteractions(); - spInteractions.open(context.getSystem(), backEndActor); - final var httpServer = new HttpServer(); - httpServer.open(context.getSystem(), backEndActor); - return Behaviors.receive(Void.class) - .onSignal(Terminated.class, - sig -> { - httpServer.close(context.getSystem()); - return Behaviors.stopped(); - }) - .build(); - }); + return Behaviors.setup(context -> { + final var backEndActor = context.spawn(BackEnd.create(), "BackEnd"); + + context.watch(backEndActor); + final var spAuditTrail = new SPAuditTrail(); + spAuditTrail.open(); + final var spNotification = new SPNotification(); + spNotification.open(); + final var spInteractions = new SPInteractions(); + spInteractions.open(context.getSystem(), backEndActor); + final var spMU = new SPMU(); + spMU.open(context.getSystem(), backEndActor); +// new SPInteractionProcessor().open(); + final var streaming = new KafkaStreams(createTopology(), getProps()); + streaming.start(); + final var httpServer = new HttpServer(); + httpServer.open(context.getSystem(), backEndActor); + return Behaviors.receive(Void.class).onSignal(Terminated.class, sig -> { + httpServer.close(context.getSystem()); + streaming.close(); + return Behaviors.stopped(); + }).build(); + }); } private void run() { - LOGGER.info("CONFIG: {} {} {} {}", - AppConfig.POSTGRESQL_DATABASE, + LOGGER.info("CONFIG: {} {} {} {} {}", + AppConfig.POSTGRESQL_NOTIFICATIONS_DB, + AppConfig.POSTGRESQL_AUDIT_DB, AppConfig.KAFKA_BOOTSTRAP_SERVERS, AppConfig.KAFKA_APPLICATION_ID, AppConfig.KAFKA_CLIENT_ID); diff --git a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/PsqlAuditTrail.java b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/PsqlAuditTrail.java index 8edb5fc5f..556eb3eb5 100644 --- a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/PsqlAuditTrail.java +++ b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/PsqlAuditTrail.java @@ -2,6 +2,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.jembi.jempi.AppConfig; import org.jembi.jempi.shared.models.AuditEvent; import java.sql.SQLException; @@ -18,8 +19,9 @@ final class PsqlAuditTrail { } void createSchemas() { +/* LOGGER.debug("Create Schemas"); - psqlClient.connect(); + psqlClient.connect(AppConfig.POSTGRESQL_AUDIT_DB); try (var stmt = psqlClient.createStatement()) { stmt.executeUpdate(String.format( Locale.ROOT, @@ -48,17 +50,16 @@ CONSTRAINT PKEY_AUDIT_TRAIL PRIMARY KEY (id) } catch (SQLException e) { LOGGER.error(e.getLocalizedMessage(), e); } +*/ } void addAuditEvent(final AuditEvent event) { - psqlClient.connect(); - try (var preparedStatement = psqlClient.prepareStatement( - String.format( - Locale.ROOT, - """ - INSERT INTO %s (createdAt, interactionID, goldenID, event) - VALUES (?, ?, ?, ?); - """, PSQL_TABLE_AUDIT_TRAIL).stripIndent())) { + psqlClient.connect(AppConfig.POSTGRESQL_AUDIT_DB); + try (var preparedStatement = psqlClient.prepareStatement(String.format(Locale.ROOT, """ + INSERT INTO %s (createdAt, interactionID, goldenID, event) + VALUES (?, ?, ?, ?); + """, PSQL_TABLE_AUDIT_TRAIL) + .stripIndent())) { preparedStatement.setTimestamp(1, event.createdAt()); preparedStatement.setString(2, event.interactionID()); preparedStatement.setString(3, event.goldenID()); diff --git a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/PsqlClient.java b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/PsqlClient.java index e4935b42d..f8163d926 100644 --- a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/PsqlClient.java +++ b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/PsqlClient.java @@ -16,10 +16,14 @@ final class PsqlClient { connection = null; } - boolean connect() { + boolean connect(final String database) { if (connection == null) { try { - final var url = String.format(Locale.ROOT, "jdbc:postgresql://%s:%d/%s", AppConfig.POSTGRESQL_IP, AppConfig.POSTGRESQL_PORT, AppConfig.POSTGRESQL_DATABASE); + final var url = String.format(Locale.ROOT, + "jdbc:postgresql://%s:%d/%s", + AppConfig.POSTGRESQL_IP, + AppConfig.POSTGRESQL_PORT, + database); connection = DriverManager.getConnection(url, AppConfig.POSTGRESQL_USER, AppConfig.POSTGRESQL_PASSWORD); return connection.isValid(5); } catch (SQLException e) { @@ -31,8 +35,11 @@ boolean connect() { try { if (!connection.isValid(5)) { connection.close(); - final var url = - String.format(Locale.ROOT, "jdbc:postgresql://%s:%d/%s", AppConfig.POSTGRESQL_IP, AppConfig.POSTGRESQL_PORT, AppConfig.POSTGRESQL_DATABASE); + final var url = String.format(Locale.ROOT, + "jdbc:postgresql://%s:%d/%s", + AppConfig.POSTGRESQL_IP, + AppConfig.POSTGRESQL_PORT, + database); connection = DriverManager.getConnection(url, AppConfig.POSTGRESQL_USER, AppConfig.POSTGRESQL_PASSWORD); } } catch (SQLException e) { diff --git a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/PsqlNotifications.java b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/PsqlNotifications.java index 7eb189c84..8e5cc0053 100644 --- a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/PsqlNotifications.java +++ b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/PsqlNotifications.java @@ -2,10 +2,12 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.jembi.jempi.AppConfig; +import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Statement; -import java.util.Date; +import java.sql.Timestamp; import java.util.UUID; final class PsqlNotifications { @@ -21,24 +23,28 @@ void insert( final String type, final String patientNames, final Float score, - final Long created, + final Timestamp created, final String gID, final String dID) throws SQLException { - psqlClient.connect(); - try (Statement stmt = psqlClient.createStatement()) { - - // Set auto-commit to false + psqlClient.connect(AppConfig.POSTGRESQL_NOTIFICATIONS_DB); + String sql = "INSERT INTO notification (id, type, state, names, created, patient_id, golden_id, score) " + + "VALUES (?, ?, ?, ?, ?, ?, ?, ?)"; + try (PreparedStatement pstmt = psqlClient.prepareStatement(sql)) { psqlClient.setAutoCommit(false); - Date res = new Date(created); - String state = "New"; - - String sql = "INSERT INTO notification (id, type, state, names, created, patient_id, golden_id, score) " - + "VALUES ('" + id + "','" + type + "','" + state + "','" + patientNames + "', '" + res + "', '" + dID - + "', '" + gID + "', '" + score + "')"; - stmt.addBatch(sql); - stmt.executeBatch(); - psqlClient.commit(); + pstmt.setObject(1, id); + pstmt.setString(2, type); + pstmt.setString(3, "OPEN"); + pstmt.setString(4, patientNames); + pstmt.setTimestamp(5, created); + pstmt.setString(6, dID); + pstmt.setString(7, gID); + pstmt.setFloat(8, score); + pstmt.executeUpdate(); + } catch (SQLException e) { + LOGGER.error("Error executing INSERT statement: {}", e.getMessage(), e); + } finally { + psqlClient.setAutoCommit(true); } } @@ -46,12 +52,11 @@ void insertCandidates( final UUID id, final Float score, final String gID) throws SQLException { - psqlClient.connect(); + psqlClient.connect(AppConfig.POSTGRESQL_NOTIFICATIONS_DB); try (Statement stmt = psqlClient.createStatement()) { psqlClient.setAutoCommit(false); String sql = - "INSERT INTO candidates (notification_id, score, golden_id)" + " VALUES ('" + id + "','" + score + "', '" + gID - + "')"; + "INSERT INTO candidates (notification_id, score, golden_id)" + " VALUES ('" + id + "','" + score + "', '" + gID + "')"; stmt.addBatch(sql); stmt.executeBatch(); psqlClient.commit(); diff --git a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/SPAuditTrail.java b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/SPAuditTrail.java index 88a1e0269..2de35d371 100644 --- a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/SPAuditTrail.java +++ b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/SPAuditTrail.java @@ -36,9 +36,8 @@ public void open() { final Deserializer auditEventDeserializer = new JsonPojoDeserializer<>(AuditEvent.class); final Serde auditEventSerde = Serdes.serdeFrom(auditEventSerializer, auditEventDeserializer); final StreamsBuilder streamsBuilder = new StreamsBuilder(); - final KStream auditEventKStream = streamsBuilder.stream( - GlobalConstants.TOPIC_AUDIT_TRAIL, - Consumed.with(stringSerde, auditEventSerde)); + final KStream auditEventKStream = + streamsBuilder.stream(GlobalConstants.TOPIC_AUDIT_TRAIL, Consumed.with(stringSerde, auditEventSerde)); auditEventKStream.foreach((key, value) -> psqlAuditTrail.addAuditEvent(value)); final var auditTrailKafkaStreams = new KafkaStreams(streamsBuilder.build(), props); auditTrailKafkaStreams.cleanUp(); diff --git a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/SPInteractions.java b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/SPInteractions.java index 111d62c47..4401451f9 100644 --- a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/SPInteractions.java +++ b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/SPInteractions.java @@ -2,6 +2,7 @@ import akka.actor.typed.ActorRef; import akka.actor.typed.ActorSystem; +import com.fasterxml.jackson.core.JsonProcessingException; import org.apache.kafka.common.serialization.*; import org.apache.kafka.streams.KafkaStreams; import org.apache.kafka.streams.StreamsBuilder; @@ -9,20 +10,30 @@ import org.apache.kafka.streams.kstream.Consumed; import org.apache.kafka.streams.kstream.KStream; import org.apache.kafka.streams.kstream.Produced; +import org.apache.kafka.streams.processor.TopicNameExtractor; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.jembi.jempi.AppConfig; +import org.jembi.jempi.shared.kafka.KafkaTopicManager; import org.jembi.jempi.shared.kafka.MyKafkaProducer; +import org.jembi.jempi.shared.models.CustomMU; import org.jembi.jempi.shared.models.GlobalConstants; import org.jembi.jempi.shared.models.InteractionEnvelop; import org.jembi.jempi.shared.serdes.JsonPojoDeserializer; import org.jembi.jempi.shared.serdes.JsonPojoSerializer; import java.util.Properties; +import java.util.concurrent.ExecutionException; + +import static org.jembi.jempi.shared.utils.AppUtils.OBJECT_MAPPER; public final class SPInteractions { private static final Logger LOGGER = LogManager.getLogger(SPInteractions.class); + private final TopicNameExtractor topicNameExtractor = + Boolean.TRUE.equals(CustomMU.SEND_INTERACTIONS_TO_EM) + ? (key, value, recordContext) -> value.tag() + : (key, value, recordContext) -> GlobalConstants.TOPIC_INTERACTION_LINKER; private MyKafkaProducer topicEM; private KafkaStreams interactionKafkaStreams = null; @@ -40,22 +51,57 @@ void open( final Serde batchPatientRecordSerde = Serdes.serdeFrom(batchPatientRecordSerializer, batchPatientRecordDeserializer); final StreamsBuilder streamsBuilder = new StreamsBuilder(); - final KStream batchPatientRecordKStream = streamsBuilder.stream( - GlobalConstants.TOPIC_INTERACTION_CONTROLLER, - Consumed.with(stringSerde, batchPatientRecordSerde)); + final KStream batchPatientRecordKStream = + streamsBuilder.stream(GlobalConstants.TOPIC_INTERACTION_CONTROLLER, + Consumed.with(stringSerde, batchPatientRecordSerde)); topicEM = new MyKafkaProducer<>(AppConfig.KAFKA_BOOTSTRAP_SERVERS, GlobalConstants.TOPIC_INTERACTION_EM, - new StringSerializer(), new JsonPojoSerializer<>(), + new StringSerializer(), + new JsonPojoSerializer<>(), AppConfig.KAFKA_CLIENT_ID); batchPatientRecordKStream .peek((key, batchPatient) -> { - topicEM.produceAsync(key, batchPatient, ((metadata, exception) -> { - if (exception != null) { - LOGGER.error(exception.toString()); + if (Boolean.TRUE.equals(CustomMU.SEND_INTERACTIONS_TO_EM)) { + switch (batchPatient.contentType()) { + case BATCH_START_SENTINEL: + try { + LOGGER.debug("START SENTINEL {}", OBJECT_MAPPER.writeValueAsString(batchPatient)); + } catch (JsonProcessingException e) { + LOGGER.error(e.getLocalizedMessage(), e); + } + if (CustomMU.SEND_INTERACTIONS_TO_EM) { + var kafkaTopicManager = new KafkaTopicManager(AppConfig.KAFKA_BOOTSTRAP_SERVERS); + try { + kafkaTopicManager.createTopic(batchPatient.tag(), + 1, + (short) 1, + 7 * 24 * 60 * 60 * 1000, + 4 * 1024 * 1024); + } catch (ExecutionException | InterruptedException e) { + LOGGER.error(e.getLocalizedMessage(), e); + } finally { + kafkaTopicManager.close(); + } + } + break; + case BATCH_END_SENTINEL: + try { + LOGGER.debug("END SENTINEL {}", OBJECT_MAPPER.writeValueAsString(batchPatient)); + } catch (JsonProcessingException e) { + LOGGER.error(e.getLocalizedMessage(), e); + } + break; + default: + break; } - })); + topicEM.produceAsync(key, batchPatient, ((metadata, exception) -> { + if (exception != null) { + LOGGER.error(exception.toString()); + } + })); + } }) - .to(GlobalConstants.TOPIC_INTERACTION_LINKER, Produced.with(stringSerde, batchPatientRecordSerde)); + .to(topicNameExtractor, Produced.with(stringSerde, batchPatientRecordSerde)); interactionKafkaStreams = new KafkaStreams(streamsBuilder.build(), props); interactionKafkaStreams.cleanUp(); interactionKafkaStreams.start(); diff --git a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/SPLinkStatsMeta.java b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/SPLinkStatsMeta.java new file mode 100644 index 000000000..3b3ad18ea --- /dev/null +++ b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/SPLinkStatsMeta.java @@ -0,0 +1,33 @@ +package org.jembi.jempi.controller; + +import org.apache.kafka.streams.processor.api.ContextualProcessor; +import org.apache.kafka.streams.processor.api.Record; +import org.apache.kafka.streams.state.KeyValueStore; +import org.jembi.jempi.shared.models.LinkStatsMeta; + +public class SPLinkStatsMeta extends ContextualProcessor { + + public static final String STATE_STORE_NAME = "link-stats-meta"; + + @Override + public void process(final Record recordToProcess) { + + final KeyValueStore keyValueStore = super.context().getStateStore(STATE_STORE_NAME); + final var linkStatsMeta = keyValueStore.get("Totals"); + + if (linkStatsMeta == null) { + keyValueStore.put("Totals", recordToProcess.value()); + } else { + final var updatedConfusionMatrix = + linkStatsMeta.confusionMatrix().sum(recordToProcess.value().confusionMatrix()); + final var updatedCustomFieldTallies = + linkStatsMeta.customFieldTallies().sum(recordToProcess.value().customFieldTallies()); + final var updatedLinkStatsMeta = new LinkStatsMeta(updatedConfusionMatrix, updatedCustomFieldTallies); + LinkStatsMetaCache.set(updatedLinkStatsMeta); + keyValueStore.put("Totals", updatedLinkStatsMeta); + } + + } + +} + diff --git a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/SPMU.java b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/SPMU.java new file mode 100644 index 000000000..481961dd5 --- /dev/null +++ b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/SPMU.java @@ -0,0 +1,72 @@ +package org.jembi.jempi.controller; + +import akka.actor.typed.ActorRef; +import akka.actor.typed.ActorSystem; +import com.fasterxml.jackson.core.JsonProcessingException; +import org.apache.kafka.common.serialization.Deserializer; +import org.apache.kafka.common.serialization.Serde; +import org.apache.kafka.common.serialization.Serdes; +import org.apache.kafka.common.serialization.Serializer; +import org.apache.kafka.streams.KafkaStreams; +import org.apache.kafka.streams.StreamsBuilder; +import org.apache.kafka.streams.StreamsConfig; +import org.apache.kafka.streams.kstream.Consumed; +import org.apache.kafka.streams.kstream.KStream; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.jembi.jempi.AppConfig; +import org.jembi.jempi.shared.models.CustomMU; +import org.jembi.jempi.shared.models.GlobalConstants; +import org.jembi.jempi.shared.serdes.JsonPojoDeserializer; +import org.jembi.jempi.shared.serdes.JsonPojoSerializer; +import org.jembi.jempi.shared.utils.AppUtils; + +import java.util.Properties; + +public final class SPMU { + + private static final Logger LOGGER = LogManager.getLogger(SPMU.class); + private KafkaStreams customMUKafkaStreams = null; + + void open( + final ActorSystem system, + final ActorRef backEnd) { + LOGGER.info("Stream Processor"); + final Properties props = loadConfig(); + final Serde stringSerde = Serdes.String(); + final Serializer customMUSerializer = new JsonPojoSerializer<>(); + final Deserializer customMUDeserializer = new JsonPojoDeserializer<>(CustomMU.class); + final Serde customMUSerde = Serdes.serdeFrom(customMUSerializer, customMUDeserializer); + final StreamsBuilder streamsBuilder = new StreamsBuilder(); + final KStream customMUKStream = streamsBuilder.stream(GlobalConstants.TOPIC_MU_CONTROLLER, + Consumed.with(stringSerde, customMUSerde)); + customMUKStream + .peek((key, customMU) -> { + try { + LOGGER.debug(AppUtils.OBJECT_MAPPER.writeValueAsString(customMU)); + } catch (JsonProcessingException e) { + LOGGER.error(e.getLocalizedMessage(), e); + } + }) + .to(GlobalConstants.TOPIC_MU_LINKER); + customMUKafkaStreams = new KafkaStreams(streamsBuilder.build(), props); + customMUKafkaStreams.cleanUp(); + customMUKafkaStreams.start(); + Runtime.getRuntime().addShutdownHook(new Thread(customMUKafkaStreams::close)); + LOGGER.info("KafkaStreams started"); + } + + public void close() { + customMUKafkaStreams.close(); + } + + private Properties loadConfig() { + final Properties props = new Properties(); + props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, AppConfig.KAFKA_BOOTSTRAP_SERVERS); + props.put(StreamsConfig.APPLICATION_ID_CONFIG, AppConfig.KAFKA_APPLICATION_ID + "-MU"); + props.put(StreamsConfig.POLL_MS_CONFIG, 10); + return props; + } + + +} diff --git a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/SPNotification.java b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/SPNotification.java index 1c20b28c6..6de058e0b 100644 --- a/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/SPNotification.java +++ b/JeMPI_Apps/JeMPI_Controller/src/main/java/org/jembi/jempi/controller/SPNotification.java @@ -18,6 +18,7 @@ import org.jembi.jempi.shared.serdes.JsonPojoSerializer; import java.sql.SQLException; +import java.sql.Timestamp; import java.util.Properties; import java.util.UUID; @@ -46,20 +47,20 @@ void open() { try { UUID id = UUID.randomUUID(); psqlNotifications.insert(id, - value.notificationType().toString(), - value.patientNames(), - value.linkedTo().score(), - value.timeStamp(), - value.linkedTo().gID(), - value.dID()); + value.notificationType().toString(), + value.patientNames(), + value.linkedTo().score(), + new Timestamp(value.timeStamp()), + value.linkedTo().gID(), + value.dID()); for (int i = 0; i < value.candidates().size(); i++) { psqlNotifications.insertCandidates(id, - value.candidates().get(i).score(), - value.candidates().get(i).gID()); + value.candidates().get(i).score(), + value.candidates().get(i).gID()); } } catch (SQLException e) { - LOGGER.debug(e.toString()); + LOGGER.debug("Error inserting notification", e.toString()); } }); diff --git a/JeMPI_Apps/JeMPI_EM/.gitignore b/JeMPI_Apps/JeMPI_EM/.gitignore deleted file mode 100644 index b60e4267a..000000000 --- a/JeMPI_Apps/JeMPI_EM/.gitignore +++ /dev/null @@ -1,114 +0,0 @@ -### Java template -# Compiled class file -*.class - -# Log file -*.log - -# BlueJ files -*.ctxt - -# Mobile Tools for Java (J2ME) -.mtj.tmp/ - -# Package Files # -*.jar -*.war -*.nar -*.ear -*.zip -*.tar.gz -*.rar - -# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml -hs_err_pid* - -### Maven template -target/ -pom.xml.tag -pom.xml.releaseBackup -pom.xml.versionsBackup -pom.xml.next -release.properties -dependency-reduced-pom.xml -buildNumber.properties -.mvn/timing.properties -# https://github.com/takari/maven-wrapper#usage-without-binary-jar -.mvn/wrapper/maven-wrapper.jar - -### JetBrains template -# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider -# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 - -# User-specific stuff -.idea/**/workspace.xml -.idea/**/tasks.xml -.idea/**/usage.statistics.xml -.idea/**/dictionaries -.idea/**/shelf - -# Generated files -.idea/**/contentModel.xml - -# Sensitive or high-churn files -.idea/**/dataSources/ -.idea/**/dataSources.ids -.idea/**/dataSources.local.xml -.idea/**/sqlDataSources.xml -.idea/**/dynamic.xml -.idea/**/uiDesigner.xml -.idea/**/dbnavigator.xml - -# Gradle -.idea/**/gradle.xml -.idea/**/libraries - -# SonarLint -.idea/sonarlint - -# Gradle and Maven with auto-import -# When using Gradle or Maven with auto-import, you should exclude module files, -# since they will be recreated, and may cause churn. Uncomment if using -# auto-import. -# .idea/artifacts -# .idea/compiler.xml -# .idea/jarRepositories.xml -# .idea/modules.xml -# .idea/*.iml -# .idea/modules -# *.iml -# *.ipr - -# CMake -cmake-build-*/ - -# Mongo Explorer plugin -.idea/**/mongoSettings.xml - -# File-based project format -*.iws - -# IntelliJ -out/ - -# mpeltonen/sbt-idea plugin -.idea_modules/ - -# JIRA plugin -atlassian-ide-plugin.xml - -# Cursive Clojure plugin -.idea/replstate.xml - -# Crashlytics plugin (for Android Studio and IntelliJ) -com_crashlytics_export_strings.xml -crashlytics.properties -crashlytics-build.properties -fabric.properties - -# Editor-based Rest Client -.idea/httpRequests - -# Android studio 3.1+ serialized cache file -.idea/caches/build_file_checksums.ser - diff --git a/JeMPI_Apps/JeMPI_EM/docker/Dockerfile b/JeMPI_Apps/JeMPI_EM/docker/Dockerfile deleted file mode 100644 index 95f60a448..000000000 --- a/JeMPI_Apps/JeMPI_EM/docker/Dockerfile +++ /dev/null @@ -1,13 +0,0 @@ -ARG JAVA_VERSION - -FROM eclipse-temurin:${JAVA_VERSION}-jre - -ADD EM-1.0-SNAPSHOT-spring-boot.jar /app/EM-1.0-SNAPSHOT-spring-boot.jar - -RUN printf "#!/bin/bash\n\ -cd /app\n\ -java -server --enable-preview -XX:MaxRAMPercentage=80 -XX:+UseZGC -jar /app/EM-1.0-SNAPSHOT-spring-boot.jar\n" > /entrypoint.sh - -RUN chmod +x /entrypoint.sh - -ENTRYPOINT ["/entrypoint.sh"] diff --git a/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/AppConfig.java b/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/AppConfig.java deleted file mode 100644 index e7c8f7dba..000000000 --- a/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/AppConfig.java +++ /dev/null @@ -1,109 +0,0 @@ -package org.jembi.jempi; - -import com.typesafe.config.Config; -import com.typesafe.config.ConfigFactory; -import org.apache.logging.log4j.Level; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - -import java.io.File; -import java.util.Arrays; - -public final class AppConfig { - - private static final Logger LOGGER = LogManager.getLogger(AppConfig.class); - private static final Config SYSTEM_PROPERTIES = ConfigFactory.systemProperties(); - private static final Config SYSTEM_ENVIRONMENT = ConfigFactory.systemEnvironment(); - private static final Config CONFIG = new Builder() - .withSystemEnvironment() - .withSystemProperties() - .withOptionalRelativeFile("/conf/server.production.conf") - .withOptionalRelativeFile("/conf/server.staging.conf") - .withOptionalRelativeFile("/conf/server.test.conf") - .withResource("application.local.conf") - .withResource("application.conf") - .build(); - public static final String KAFKA_BOOTSTRAP_SERVERS = CONFIG.getString("kafka.bootstrap.servers"); - public static final String KAFKA_APPLICATION_ID = CONFIG.getString("kafka.application-id"); - public static final String KAFKA_CLIENT_ID = CONFIG.getString("kafka.client-id"); - - public static final String KAFKA_GROUP_ID = CONFIG.getString("kafka.group-id"); - public static final Long BACKEND_N_OLD_VALUES = CONFIG.getLong("backend.n-old-values"); - public static final Long BACKEND_N_NEW_VALUES = CONFIG.getLong("backend.n-new-values"); - public static final String[] DGRAPH_ALPHA_HOSTS = CONFIG.getString("dgraph.hosts").split(","); - public static final int[] DGRAPH_ALPHA_PORTS = Arrays.stream(CONFIG.getString("dgraph.ports").split(",")).mapToInt(s -> { - try { - return Integer.parseInt(s); - } catch (NumberFormatException ex) { - return Integer.MIN_VALUE; - } - }).toArray(); - public static final String POSTGRESQL_IP = CONFIG.getString("POSTGRESQL_IP"); - public static final Integer POSTGRESQL_PORT = CONFIG.getInt("POSTGRESQL_PORT"); - public static final Level GET_LOG_LEVEL = Level.toLevel(CONFIG.getString("LOG4J2_LEVEL")); - - private AppConfig() { - } - - private static class Builder { - private Config conf = ConfigFactory.empty(); - - Builder() { - LOGGER.info("Loading configs first row is highest priority, second row is fallback and so on"); - } - - // This should return the current executing user path - private static String getExecutionDirectory() { - return SYSTEM_PROPERTIES.getString("user.dir"); - } - - Builder withSystemProperties() { - conf = conf.withFallback(SYSTEM_PROPERTIES); - LOGGER.info("Loaded system properties into config"); - return this; - } - - Builder withSystemEnvironment() { - conf = conf.withFallback(SYSTEM_ENVIRONMENT); - LOGGER.info("Loaded system environment into config"); - return this; - } - - Builder withResource(final String resource) { - Config resourceConfig = ConfigFactory.parseResources(resource); - String empty = resourceConfig.entrySet().isEmpty() - ? " contains no values" - : ""; - conf = conf.withFallback(resourceConfig); - LOGGER.info("Loaded config file from resource ({}){}", resource, empty); - return this; - } - - Builder withOptionalFile(final String path) { - File secureConfFile = new File(path); - if (secureConfFile.exists()) { - LOGGER.info("Loaded config file from path ({})", path); - conf = conf.withFallback(ConfigFactory.parseFile(secureConfFile)); - } else { - LOGGER.info("Attempted to load file from path ({}) but it was not found", path); - } - return this; - } - - Builder withOptionalRelativeFile(final String path) { - return withOptionalFile(getExecutionDirectory() + path); - } - - Config build() { - // Resolve substitutions. - conf = conf.resolve(); - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Logging properties. Make sure sensitive data such as passwords or secrets are not logged!"); - LOGGER.debug(conf.root().render()); - } - return conf; - } - - } - -} diff --git a/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/BackEnd.java b/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/BackEnd.java deleted file mode 100644 index 3584bcdac..000000000 --- a/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/BackEnd.java +++ /dev/null @@ -1,108 +0,0 @@ -package org.jembi.jempi.em; - -import akka.actor.typed.ActorRef; -import akka.actor.typed.Behavior; -import akka.actor.typed.DispatcherSelector; -import akka.actor.typed.javadsl.*; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.jembi.jempi.AppConfig; -import org.jembi.jempi.shared.models.InteractionEnvelop; - -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.Executor; - -public final class BackEnd extends AbstractBehavior { - - private static final Logger LOGGER = LogManager.getLogger(BackEnd.class); - - private final Executor ec; - private long receivedCount = 0L; - private long processedCount = 0L; - private boolean taskBusy = false; - - private BackEnd(final ActorContext context) { - super(context); - ec = context - .getSystem() - .dispatchers() - .lookup(DispatcherSelector.fromConfig("my-blocking-dispatcher")); - } - - public static Behavior create() { - return Behaviors.setup(BackEnd::new); - } - - @Override - public Receive createReceive() { - ReceiveBuilder builder = newReceiveBuilder(); - return builder - .onMessage(EventPatientReq.class, this::eventPatientReqHandler) - .onMessage(EventWorkTimeReq.class, this::eventWorkTimeReqHandler) - .build(); - } - - private void doWork(final boolean newRecord) { - if (newRecord) { - receivedCount += 1; - } - if (receivedCount - processedCount >= AppConfig.BACKEND_N_NEW_VALUES && !taskBusy) { - final var startOffset = Math.max(0, processedCount - AppConfig.BACKEND_N_OLD_VALUES); - final var count = AppConfig.BACKEND_N_NEW_VALUES + (processedCount - startOffset); - LOGGER.debug("receivedCount({}), startOffset({}), count({})", receivedCount, startOffset, count); - - taskBusy = true; - var cf = CompletableFuture.supplyAsync( - () -> { - LOGGER.info("START EM"); - final var emTask = new CustomEMTask(); - var rc = emTask.doIt(startOffset, count); - LOGGER.info("END EM {}", rc); - return rc; - }, - ec); - - cf.whenComplete((event, exception) -> { - LOGGER.debug("Done: {}", event); - taskBusy = false; - processedCount += AppConfig.BACKEND_N_NEW_VALUES; - if (receivedCount - processedCount >= AppConfig.BACKEND_N_NEW_VALUES) { - getContext().getSelf().tell(EventWorkTimeReq.INSTANCE); - } - }); - - } - } - - private Behavior eventPatientReqHandler(final EventPatientReq request) { - doWork(true); - request.replyTo.tell(new BackEnd.EventPatientRsp(true)); - return Behaviors.same(); - } - - private Behavior eventWorkTimeReqHandler(final EventWorkTimeReq request) { - doWork(false); - return Behaviors.same(); - } - - - private enum EventWorkTimeReq implements Event { - INSTANCE - } - - interface Event { - } - - interface EventResponse { - } - - public record EventPatientReq( - String key, - InteractionEnvelop batchInteraction, - ActorRef replyTo) implements Event { - } - - public record EventPatientRsp(boolean result) implements EventResponse { - } - -} diff --git a/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/CustomEMPatient.java b/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/CustomEMPatient.java deleted file mode 100644 index 4567bdc5f..000000000 --- a/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/CustomEMPatient.java +++ /dev/null @@ -1,29 +0,0 @@ -package org.jembi.jempi.em; - -import org.apache.commons.lang3.StringUtils; -import org.jembi.jempi.shared.models.CustomDemographicData; - -record CustomEMPatient( - String col1, - String col1Phonetic, - String col2, - String col2Phonetic, - String genderAtBirth, - String dateOfBirth, - String city, - String cityPhonetic, - String phoneNumber, - String nationalID) { - - CustomEMPatient(final CustomDemographicData patient) { - this(StringUtils.EMPTY, StringUtils.EMPTY, - StringUtils.EMPTY, StringUtils.EMPTY, - StringUtils.EMPTY, - StringUtils.EMPTY, - StringUtils.EMPTY, StringUtils.EMPTY, // patient.city, CustomEMTask.getPhonetic(patient.city), - StringUtils.EMPTY, // patient.phoneNumber, - null); - } -} - - diff --git a/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/CustomEMTask.java b/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/CustomEMTask.java deleted file mode 100644 index a356990e3..000000000 --- a/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/CustomEMTask.java +++ /dev/null @@ -1,341 +0,0 @@ -package org.jembi.jempi.em; - -import org.apache.commons.codec.language.DoubleMetaphone; -import org.apache.commons.text.similarity.JaroWinklerSimilarity; -import org.apache.commons.text.similarity.SimilarityScore; -import org.apache.kafka.common.serialization.Deserializer; -import org.apache.kafka.common.serialization.StringDeserializer; -import org.apache.kafka.common.serialization.StringSerializer; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.jembi.jempi.AppConfig; -import org.jembi.jempi.shared.kafka.MyKafkaConsumerByPartition; -import org.jembi.jempi.shared.kafka.MyKafkaProducer; -import org.jembi.jempi.shared.models.CustomMU; -import org.jembi.jempi.shared.models.GlobalConstants; -import org.jembi.jempi.shared.models.InteractionEnvelop; -import org.jembi.jempi.shared.serdes.JsonPojoDeserializer; -import org.jembi.jempi.shared.serdes.JsonPojoSerializer; - -import java.time.Duration; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.concurrent.ExecutionException; - -import static java.lang.Math.abs; -import static java.lang.Math.log; - -/* - * - * Implements the EM algorithm as specified in section 3 from - * https://www.ons.gov.uk/methodology/methodologicalpublications/generalmethodology/onsworkingpaperseries/developingstandardtoolsfordatalinkagefebruary2021 - * - * https://www.ons.gov.uk/methodology/methodologicalpublications/generalmethodology/onsworkingpaperseries - * /developingstandardtoolsfordatalinkagefebruary2021 - * */ - -class CustomEMTask { - - private static final Logger LOGGER = LogManager.getLogger(CustomEMTask.class); - private static final double LOG2 = log(2.0); - - private static final int IDX_GIVEN_NAME = 0; - private static final int IDX_FAMILY_NAME = 1; - private static final int IDX_GENDER = 2; - private static final int IDX_DOB = 3; - private static final int IDX_CITY = 4; - private static final int IDX_PHONE_NUMBER = 5; - private static final int IDX_NATIONAL_ID = 6; - private static final int N_LINK_FIELDS = 7; - private static final int MISSING_ELEMENT_INT = Integer.MIN_VALUE; - private static final int LEVELS = 3; - private static final DoubleMetaphone DOUBLE_METAPHONE = new DoubleMetaphone(); - - CustomEMTask() { - LOGGER.info("RUN EMTask"); - } - - private static Deserializer stringDeserializer() { - return new StringDeserializer(); - } - - private static Deserializer patientJsonValueDeserializer() { - return new JsonPojoDeserializer<>(InteractionEnvelop.class); - } - - static String getPhonetic(final String s) { - return s == null - ? null - : DOUBLE_METAPHONE.doubleMetaphone(s); - } - - private void sendToKafka( - final double[] mHat, - final double[] uHat) throws InterruptedException { - final CustomMU rec = new CustomMU(mHat, uHat); - LOGGER.info("{}", rec); - try { - final var myProducer = new MyKafkaProducer(AppConfig.KAFKA_BOOTSTRAP_SERVERS, - GlobalConstants.TOPIC_MU_LINKER, - new StringSerializer(), - new JsonPojoSerializer<>(), - AppConfig.KAFKA_CLIENT_ID); - myProducer.produceSync("MU", rec); - myProducer.close(); - } catch (ExecutionException e) { - LOGGER.error("{}", e.getMessage()); - } - } - - private int[] setRowLevels( - final int rowNumber, - final SimilarityScore similarityScore, - final String[] left, - final String[] right) { - final int[] row = new int[N_LINK_FIELDS + 1]; - for (int i = 0; i < N_LINK_FIELDS; i++) { - final String l = left[i]; - final String r = right[i]; - if (l == null || r == null) { - row[i] = MISSING_ELEMENT_INT; - } else { - row[i] = similarityScore.apply(l, r) >= 0.92 - ? LEVELS - 1 - : 0; - } - } - row[N_LINK_FIELDS] = rowNumber; - return row; - } - - // Assumption: consumer offset already set to postion to read from. - private ArrayList getGammaMatrix( - final MyKafkaConsumerByPartition consumer, - final long nRecords) { - final var jaroWinklerSimilarity = new JaroWinklerSimilarity(); - final var gamma = new ArrayList(); - final var patients = new ArrayList(); - final int[] rowNumber = {0}; - - boolean busy = true; - final int[] count = {0}; - LOGGER.debug("{} {} {}", busy, count, nRecords); - while (busy && count[0] < nRecords) { - var records = consumer.poll(Duration.ofMillis(200)); - if (records.isEmpty()) { - LOGGER.info("No records"); - busy = false; - } else { - records.forEach(r -> { - if (r.value().contentType() == InteractionEnvelop.ContentType.BATCH_INTERACTION && count[0] < nRecords) { - count[0] += 1; - final var v = r.value(); - final var patient = new CustomEMPatient(v.interaction().demographicData()); - patients.forEach(p -> { - var k = 0; - k += (patient.col1Phonetic() == null || !patient.col1Phonetic().equals(p.col1Phonetic())) - ? 0 - : 1; - k += (patient.col2Phonetic() == null || !patient.col2Phonetic().equals(p.col2Phonetic())) - ? 0 - : 1; - k += (patient.cityPhonetic() == null || !patient.cityPhonetic().equals(p.cityPhonetic())) - ? 0 - : 1; - if (k >= 1) { - final String[] left = {patient.col1(), patient.col2(), - patient.genderAtBirth(), patient.dateOfBirth(), patient.city(), - patient.phoneNumber(), patient.nationalID()}; - final String[] right = {p.col1(), p.col2(), p.genderAtBirth(), - p.dateOfBirth(), p.city(), p.phoneNumber(), p.nationalID()}; - gamma.add(setRowLevels(rowNumber[0]++, jaroWinklerSimilarity, left, right)); - } - }); - patients.add(patient); - } - }); - } - } - return gamma; - } - - private double[] expectation( - final double[] mHat, - final double[] uHat, - final double pHat, - final ArrayList gammaMatrix) { - final long startTime = System.currentTimeMillis(); - final int nRecords = gammaMatrix.size(); - final int nFields = gammaMatrix.get(0).length - 1; - final double[] gHat = new double[nRecords]; - for (int j = 0; j < nRecords; j++) { - final var gRow = gammaMatrix.get(j); - double numerator = pHat; - double denominator = 1.0 - pHat; - for (int i = 0; i < nFields; i++) { - final int gamma = gRow[i]; - if (gamma != MISSING_ELEMENT_INT) { - final double m = mHat[i]; - final double u = uHat[i]; - numerator *= (gamma == (LEVELS - 1) - ? m - : (1.0 - m)); - denominator *= (gamma == (LEVELS - 1) - ? u - : (1.0 - u)); - } - } - gHat[j] = numerator / (numerator + denominator); - } - final long endTime = System.currentTimeMillis(); - final long totalTime = endTime - startTime; - LOGGER.debug("Expectation step : {} ms", totalTime); - return gHat; - } - - private double maximization( - final double[] mHat, - final double[] uHat, - final double[] gHat, - final ArrayList gammaMatrix) { - final long startTime = System.currentTimeMillis(); - final int nRecords = gammaMatrix.size(); - final int nLinkFields = gammaMatrix.get(0).length - 1; - final double[] mNumerator = new double[nLinkFields]; - final double[] mDenominator = new double[nLinkFields]; - final double[] uNumerator = new double[nLinkFields]; - final double[] uDenominator = new double[nLinkFields]; - for (int j = 0; j < nRecords; j++) { - final var rowGammas = gammaMatrix.get(j); - final var gJ = gHat[j]; - for (int i = 0; i < nLinkFields; i++) { - final int gamma = rowGammas[i]; - if (gamma != MISSING_ELEMENT_INT) { - if (gamma == LEVELS - 1) { - mNumerator[i] += gJ; - uNumerator[i] += (1.0 - gJ); - } - mDenominator[i] += gJ; - uDenominator[i] += (1.0 - gJ); - } - } - } - for (int i = 0; i < nLinkFields; i++) { - mHat[i] = mNumerator[i] / mDenominator[i]; - uHat[i] = uNumerator[i] / uDenominator[i]; - } - final var gHatSum = Arrays.stream(gHat).sum(); - final long endTime = System.currentTimeMillis(); - final long totalTime = endTime - startTime; - LOGGER.debug("Maximization step : {} ms", totalTime); - return gHatSum / nRecords; - } - - private double calcLogLikelihood( - final ArrayList gammaMatrix, - final double[] gHat, - final double[] mHat, - final double[] uHat, - final double pHat) { - final long startTime = System.currentTimeMillis(); - final int nRecords = gammaMatrix.size(); - double logLikelihood = 0.0; - for (int j = 0; j < nRecords; j++) { - final var gRow = gammaMatrix.get(j); - var mProduct = pHat; - var uProduct = 1.0 - pHat; - for (int i = 0; i < N_LINK_FIELDS; i++) { - final int gamma = gRow[i]; - if (gamma != MISSING_ELEMENT_INT) { - final var m = mHat[i]; - final var u = uHat[i]; - mProduct *= (gamma == (LEVELS - 1) - ? m - : 1.0 - m); - uProduct *= (gamma == (LEVELS - 1) - ? u - : 1.0 - u); - } - } - logLikelihood += (gHat[j] * (log(mProduct) / LOG2) + (1.0 - gHat[j] * (log(uProduct) / LOG2))); - } - final long endTime = System.currentTimeMillis(); - final long totalTime = endTime - startTime; - LOGGER.debug("LogLikelihood step: {} ms", totalTime); - return logLikelihood; - } - - public boolean doIt( - final long startOffset, - final long nRecords) { - LOGGER.debug("doIt: {} {}", startOffset, nRecords); - - var topic = GlobalConstants.TOPIC_INTERACTION_EM; - var consumer = new MyKafkaConsumerByPartition<>(AppConfig.KAFKA_BOOTSTRAP_SERVERS, - topic, stringDeserializer(), patientJsonValueDeserializer(), - AppConfig.KAFKA_CLIENT_ID + topic, - AppConfig.KAFKA_GROUP_ID + topic, 500, 10); - try { - consumer.setOffset(0, startOffset); - } catch (Exception e) { - LOGGER.error(e.getLocalizedMessage(), e); - return false; - } - - final var gammaMatrix = getGammaMatrix(consumer, nRecords); - consumer.close(); - - if (gammaMatrix.isEmpty()) { - LOGGER.warn("Empty gamma matrix"); - return false; - } - - LOGGER.debug("gammaMatrix[{}][{}]", gammaMatrix.size(), gammaMatrix.get(0).length); - - final double[] mHat = new double[N_LINK_FIELDS]; - final double[] uHat = new double[N_LINK_FIELDS]; - - mHat[IDX_GIVEN_NAME] = 0.78; - uHat[IDX_GIVEN_NAME] = 0.05; - mHat[IDX_FAMILY_NAME] = 0.84; - uHat[IDX_FAMILY_NAME] = 0.07; - mHat[IDX_GENDER] = 0.90; - uHat[IDX_GENDER] = 0.50; - mHat[IDX_DOB] = 0.97; - uHat[IDX_DOB] = 0.01; - mHat[IDX_CITY] = 0.88; - uHat[IDX_CITY] = 0.79; - mHat[IDX_PHONE_NUMBER] = 0.99; - uHat[IDX_PHONE_NUMBER] = 0.01; - mHat[IDX_NATIONAL_ID] = 0.97; - uHat[IDX_NATIONAL_ID] = 0.01; - - double pHat = 0.5; - - final double[] logLikelihood = {1.0, 2.0}; - for (int loop = 0; abs((logLikelihood[0] - logLikelihood[1])) > (0.00001 * logLikelihood[0]) && loop < 30; loop++) { - final var gHat = expectation(mHat, uHat, pHat, gammaMatrix); - pHat = maximization(mHat, uHat, gHat, gammaMatrix); - logLikelihood[0] = logLikelihood[1]; - logLikelihood[1] = calcLogLikelihood(gammaMatrix, gHat, mHat, uHat, pHat); - LOGGER.debug("pHat: {}", pHat); - LOGGER.debug("mHat: {}", mHat); - LOGGER.debug("uHat: {}", uHat); - LOGGER.debug("{}", logLikelihood); - } - - LOGGER.debug("pHat: {}", pHat); - LOGGER.debug("mHat: {}", mHat); - LOGGER.debug("uHat: {}", uHat); - LOGGER.debug("{}", gammaMatrix.size()); - try { - sendToKafka(mHat, uHat); - return true; - } catch (InterruptedException e) { - LOGGER.error(e.getLocalizedMessage()); - } - return false; - - } - -} diff --git a/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/EM.java b/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/EM.java deleted file mode 100644 index b58e9e5e6..000000000 --- a/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/EM.java +++ /dev/null @@ -1,42 +0,0 @@ -package org.jembi.jempi.em; - -import akka.actor.typed.ActorRef; -import akka.actor.typed.ActorSystem; -import akka.actor.typed.Behavior; -import akka.actor.typed.javadsl.Behaviors; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.jembi.jempi.AppConfig; - -public final class EM { - - private static final Logger LOGGER = LogManager.getLogger(EM.class); - - private EM() { - } - - public static void main(final String[] args) { - new EM().run(); - } - - public static Behavior create() { - return Behaviors.setup(context -> { - ActorRef backEnd = context.spawn(BackEnd.create(), "BackEnd"); - context.watch(backEnd); - final FrontEndStream frontEndStream = new FrontEndStream(); - frontEndStream.open(context.getSystem(), backEnd); - return Behaviors.receive(Void.class) - .onSignal(akka.actor.typed.Terminated.class, sig -> Behaviors.stopped()) - .build(); - }); - } - - private void run() { - LOGGER.info("KAFKA: {} {} {}", - AppConfig.KAFKA_BOOTSTRAP_SERVERS, - AppConfig.KAFKA_APPLICATION_ID, - AppConfig.KAFKA_CLIENT_ID); - ActorSystem.create(EM.create(), "EMApp"); - } - -} diff --git a/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/FrontEndStream.java b/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/FrontEndStream.java deleted file mode 100644 index 4beccaf27..000000000 --- a/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/FrontEndStream.java +++ /dev/null @@ -1,96 +0,0 @@ -package org.jembi.jempi.em; - -import akka.actor.typed.ActorRef; -import akka.actor.typed.ActorSystem; -import akka.actor.typed.javadsl.AskPattern; -import org.apache.kafka.common.serialization.Serde; -import org.apache.kafka.common.serialization.Serdes; -import org.apache.kafka.streams.KafkaStreams; -import org.apache.kafka.streams.StreamsBuilder; -import org.apache.kafka.streams.StreamsConfig; -import org.apache.kafka.streams.kstream.Consumed; -import org.apache.kafka.streams.kstream.KStream; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.jembi.jempi.AppConfig; -import org.jembi.jempi.shared.models.GlobalConstants; -import org.jembi.jempi.shared.models.InteractionEnvelop; -import org.jembi.jempi.shared.serdes.JsonPojoDeserializer; -import org.jembi.jempi.shared.serdes.JsonPojoSerializer; - -import java.util.Properties; -import java.util.concurrent.CompletionStage; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - -public final class FrontEndStream { - - private static final Logger LOGGER = LogManager.getLogger(FrontEndStream.class); - private KafkaStreams patientKafkaStreams; - - FrontEndStream() { - LOGGER.info("FrontEndStream constructor"); - } - - void addPatient( - final ActorSystem system, - final ActorRef backEnd, - final String key, - final InteractionEnvelop batchInteraction) { - if (batchInteraction.contentType() == InteractionEnvelop.ContentType.BATCH_INTERACTION) { - final CompletionStage result = - AskPattern.ask( - backEnd, - replyTo -> new BackEnd.EventPatientReq(key, batchInteraction, replyTo), - java.time.Duration.ofSeconds(3), - system.scheduler()); - final var completableFuture = result.toCompletableFuture(); - try { - final var reply = completableFuture.get(5, TimeUnit.SECONDS); - if (reply != null) { - if (!reply.result()) { - LOGGER.error("BACK END RESPONSE(ERROR)"); - } - } else { - LOGGER.error("Incorrect class response"); - } - } catch (InterruptedException | ExecutionException | TimeoutException e) { - LOGGER.error(e.getMessage()); - } - } - } - - public void open( - final ActorSystem system, - final ActorRef backEnd) { - LOGGER.info("EM Stream Processor"); - final Properties props = loadConfig(); - final Serde stringSerde = Serdes.String(); - final Serde batchPatientRecordSerde = Serdes.serdeFrom(new JsonPojoSerializer<>(), - new JsonPojoDeserializer<>(InteractionEnvelop.class)); - final StreamsBuilder streamsBuilder = new StreamsBuilder(); - final KStream patientRecordKStream = streamsBuilder.stream( - GlobalConstants.TOPIC_INTERACTION_EM, - Consumed.with(stringSerde, batchPatientRecordSerde)); - patientRecordKStream.foreach((key, patient) -> addPatient(system, backEnd, key, patient)); - patientKafkaStreams = new KafkaStreams(streamsBuilder.build(), props); - patientKafkaStreams.cleanUp(); - patientKafkaStreams.start(); - LOGGER.info("KafkaStreams started"); - } - - public void close() { - LOGGER.warn("Stream closed"); - patientKafkaStreams.close(); - } - - private Properties loadConfig() { - final Properties props = new Properties(); - props.put(StreamsConfig.APPLICATION_ID_CONFIG, AppConfig.KAFKA_APPLICATION_ID); - props.put(StreamsConfig.CLIENT_ID_CONFIG, AppConfig.KAFKA_CLIENT_ID); - props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, AppConfig.KAFKA_BOOTSTRAP_SERVERS); - return props; - } - -} diff --git a/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/util/FieldComparator.java b/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/util/FieldComparator.java deleted file mode 100644 index 1b2f4ce3c..000000000 --- a/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/util/FieldComparator.java +++ /dev/null @@ -1,64 +0,0 @@ -package org.jembi.jempi.em.util; - -import org.apache.commons.text.similarity.JaroWinklerDistance; -import org.apache.commons.text.similarity.LevenshteinDistance; -import org.jembi.jempi.shared.models.GoldenRecord; -import org.jembi.jempi.shared.models.Interaction; - -public class FieldComparator { - - private boolean compareField(final String field1, final String field2, final FieldType fieldType) { - double similarity = 0.0; - switch (fieldType) { - case GIVEN_NAME: - case FAMILY_NAME: - similarity = new JaroWinklerDistance().apply(field1, field2); - break; - case NATIONAL_ID: - similarity = field1.equals(field2) ? 1.0 : 0.0; - break; - case DATE_OF_BIRTH: - // Use a suitable date comparison library to compare the datesbreak; - case CITY: - similarity = LevenshteinDistance.getDefaultInstance().apply(field1, field2); - break; - case PHONE_NUMBER: - similarity = LevenshteinDistance.getDefaultInstance().apply(field1, field2); - break; - default: - //to be handled - break; - } - - return similarity >= 0.5; - } - - public final boolean compareFields(final Interaction patient, final GoldenRecord candidate) { -// boolean givenNameAgreement = compareField(patient.demographicData().getGivenName(), candidate.demographicData().getGivenName(), FieldType.GIVEN_NAME); -// boolean familyNameAgreement = compareField(patient.demographicData().getFamilyName(), candidate.demographicData().getFamilyName(), FieldType.FAMILY_NAME); -// boolean dobAgreement = compareField(patient.demographicData().getDob(), candidate.demographicData().getDob(), FieldType.DATE_OF_BIRTH); -// boolean cityAgreement = compareField(patient.demographicData().getCity(), candidate.demographicData().getCity(), FieldType.CITY); -// boolean nationalIdAgreement = compareField(patient.demographicData().nationalId, candidate.demographicData().getNationalId(), FieldType.NATIONAL_ID); - - return false; // givenNameAgreement && familyNameAgreement && dobAgreement; // && cityAgreement && nationalIdAgreement; - } - public enum FieldType { - GIVEN_NAME("given_name"), - FAMILY_NAME("family_name"), - NATIONAL_ID("national_id"), - DATE_OF_BIRTH("date_of_birth"), - CITY("city"), - PHONE_NUMBER("phone_number"); - - private final String fieldName; - - FieldType(final String someFieldName) { - this.fieldName = someFieldName; - } - - public String getFieldName() { - return fieldName; - } - } - -} diff --git a/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/util/GammaMatrixGenerator.java b/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/util/GammaMatrixGenerator.java deleted file mode 100644 index 6f6d90d24..000000000 --- a/JeMPI_Apps/JeMPI_EM/src/main/java/org/jembi/jempi/em/util/GammaMatrixGenerator.java +++ /dev/null @@ -1,72 +0,0 @@ -package org.jembi.jempi.em.util; - - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.jembi.jempi.AppConfig; -import org.jembi.jempi.libmpi.LibMPI; - -import java.util.Locale; -import java.util.UUID; - -public class GammaMatrixGenerator { - private static final Logger LOGGER = LogManager.getLogger(GammaMatrixGenerator.class); - private static LibMPI libMPI = null; - private final FieldComparator fieldComparator = new FieldComparator(); - - public GammaMatrixGenerator() { - if (libMPI == null) { - openMPI(true); - } - LOGGER.debug(libMPI); - } - - private static void openMPI(final boolean useDGraph) { - if (useDGraph) { - final var host = AppConfig.DGRAPH_ALPHA_HOSTS; - final var port = AppConfig.DGRAPH_ALPHA_PORTS; - libMPI = new LibMPI(AppConfig.GET_LOG_LEVEL, - host, - port, - AppConfig.KAFKA_BOOTSTRAP_SERVERS, - "CLIENT_ID_EM-" + UUID.randomUUID()); - } else { - libMPI = new LibMPI(String.format(Locale.ROOT, "jdbc:postgresql://%s:%d/notifications", AppConfig.POSTGRESQL_IP, AppConfig.POSTGRESQL_PORT), - "postgres", - null, - AppConfig.KAFKA_BOOTSTRAP_SERVERS, - "CLIENT_ID_EM-" + UUID.randomUUID()); - } - } - -// public final List getRandomPatients(final int scale) { -// -// try { -// LOGGER.debug("I just got Patients from libMPI "); -// return libMPI.getRandomPatients(); -// -// } catch (Exception e) { -// LOGGER.debug(e.getMessage()); -// return null; -// } -// } -// public final List> generateGammaMatrix() { -// List randomPatients = libMPI.getRandomPatients(); -// -// List> gammaMatrix = new ArrayList<>(); -// -// for (Interaction patient : randomPatients) { -// List agreementVector = new ArrayList<>(); -// List candidateGoldenRecords = libMPI.getCandidates(patient.demographicData(), true); -// for (GoldenRecord candidate : candidateGoldenRecords) { -// boolean agreement = fieldComparator.compareFields(patient, candidate); -// agreementVector.add(agreement); -// } -// -// gammaMatrix.add(agreementVector); -// } -// -// return gammaMatrix; -// } - -} diff --git a/JeMPI_Apps/JeMPI_EM/src/main/resources/application.conf b/JeMPI_Apps/JeMPI_EM/src/main/resources/application.conf deleted file mode 100644 index bc62d008b..000000000 --- a/JeMPI_Apps/JeMPI_EM/src/main/resources/application.conf +++ /dev/null @@ -1,46 +0,0 @@ -akka.http { - server { - idle-timeout = 10 s - request-timeout = 5 s - linger-timeout = 5 s - } -} - -http-server { - host = "0.0.0.0" - port = 50000 -} - -worker { - max-post-records = 20 -} - -backend { - n-old-values=10000 - n-new-values=10000 -} - -kafka { - bootstrap.servers = ${KAFKA_SERVERS} - application-id = "em-app-id" - client-id = "em-client-id" - group-id = "em-group-id" -} -dgraph { - hosts = ${DGRAPH_HOSTS} - ports = ${DGRAPH_PORTS} -} - -postgres { - server = "postgresql:5432" -} - -my-blocking-dispatcher { - type = Dispatcher - executor = "thread-pool-executor" - thread-pool-executor { - fixed-pool-size = 512 - } - throughput = 1 -} - diff --git a/JeMPI_Apps/JeMPI_EM/src/main/resources/log4j.properties b/JeMPI_Apps/JeMPI_EM/src/main/resources/log4j.properties deleted file mode 100644 index 5ee72e6dd..000000000 --- a/JeMPI_Apps/JeMPI_EM/src/main/resources/log4j.properties +++ /dev/null @@ -1,10 +0,0 @@ -# Root logger option -log4j.rootLogger=DEBUG, stdout - -# Direct log messages to stdout -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.Target=System.err -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n - -#log4j.logger.org.apache.kafka.clients.consumer.ConsumerConfig=DEBUG diff --git a/JeMPI_Apps/JeMPI_EM/src/main/resources/log4j2.properties b/JeMPI_Apps/JeMPI_EM/src/main/resources/log4j2.properties deleted file mode 100644 index 687120e00..000000000 --- a/JeMPI_Apps/JeMPI_EM/src/main/resources/log4j2.properties +++ /dev/null @@ -1,14 +0,0 @@ -status = warn - -appender.console.type = Console -appender.console.name = LogToConsole -appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%-5level] %d{yyyy-MM-dd HH:mm:ss.SSS} %c{1}:%L - %m%n - -logger.app.name = org.jembi.jempi -logger.app.level = debug -logger.app.additivity = false -logger.app.appenderRef.console.ref = LogToConsole - -rootLogger.level = info -rootLogger.appenderRef.stdout.ref = LogToConsoles diff --git a/JeMPI_Apps/JeMPI_EM_Scala/.gitignore b/JeMPI_Apps/JeMPI_EM_Scala/.gitignore new file mode 100644 index 000000000..5ecfca1f8 --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/.gitignore @@ -0,0 +1,24 @@ +### SBT template +# Simple Build Tool +# http://www.scala-sbt.org/release/docs/Getting-Started/Directories.html#configuring-version-control + +.idea +dist/* +target/ +lib_managed/ +src_managed/ +project/target/ +project/project/ +project/boot/ +project/plugins/project/ +.history +.cache +.lib/ + +### Scala template +*.class +*.log + +# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml +hs_err_pid* + diff --git a/JeMPI_Apps/JeMPI_EM_Scala/.scalafmt.conf b/JeMPI_Apps/JeMPI_EM_Scala/.scalafmt.conf new file mode 100644 index 000000000..259f078cf --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/.scalafmt.conf @@ -0,0 +1,2 @@ +version = 3.7.17 +runner.dialect = scala213 \ No newline at end of file diff --git a/JeMPI_Apps/JeMPI_EM_Scala/build.sbt b/JeMPI_Apps/JeMPI_EM_Scala/build.sbt new file mode 100644 index 000000000..5eda3af29 --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/build.sbt @@ -0,0 +1,53 @@ +ThisBuild / version := "0.1.0-SNAPSHOT" + +ThisBuild / scalaVersion := "2.13.12" + +lazy val root = (project in file(".")) + .settings( + name := "JeMPI_EM_Scala", + scalacOptions += "-deprecation", + libraryDependencies ++= Seq( + // https://mvnrepository.com/artifact/org.scala-lang.modules/scala-parallel-collections + "org.scala-lang.modules" %% "scala-parallel-collections" % "1.0.4", + // https://mvnrepository.com/artifact/com.typesafe.scala-logging/scala-logging + "com.typesafe.scala-logging" %% "scala-logging" % "3.9.5", + // https://mvnrepository.com/artifact/org.apache.kafka/kafka-streams + "org.apache.kafka" % "kafka-streams" % "3.6.1", + // https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients + "org.apache.kafka" % "kafka-clients" % "3.6.1", + // https://mvnrepository.com/artifact/org.apache.kafka/kafka-streams-scala + "org.apache.kafka" %% "kafka-streams-scala" % "3.6.1", + // https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-jsr310 + "com.fasterxml.jackson.datatype" % "jackson-datatype-jsr310" % "2.16.1", + "com.fasterxml.jackson.core" % "jackson-databind" % "2.16.1", + "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.16.1", + // https://mvnrepository.com/artifact/ch.qos.logback/logback-classic + "ch.qos.logback" % "logback-classic" % "1.4.14" + ), + assembly / assemblyJarName := "em-scala-fatjar-1.0.jar", + assembly / assemblyMergeStrategy := { + case x if Assembly.isConfigFile(x) => MergeStrategy.concat + case PathList(ps @ _*) + if Assembly.isReadme(ps.last) || Assembly.isLicenseFile(ps.last) => + MergeStrategy.rename + case PathList("META-INF", xs @ _*) => + (xs map { + _.toLowerCase + }) match { + case ("manifest.mf" :: Nil) | ("index.list" :: Nil) | + ("dependencies" :: Nil) => + MergeStrategy.discard + case ps @ (x :: xs) + if ps.last.endsWith(".sf") || ps.last.endsWith(".dsa") => + MergeStrategy.discard + case "plexus" :: xs => + MergeStrategy.discard + case "services" :: xs => + MergeStrategy.filterDistinctLines + case ("spring.schemas" :: Nil) | ("spring.handlers" :: Nil) => + MergeStrategy.filterDistinctLines + case _ => MergeStrategy.first + } + case _ => MergeStrategy.first + } + ) diff --git a/JeMPI_Apps/JeMPI_EM_Scala/build.sh b/JeMPI_Apps/JeMPI_EM_Scala/build.sh new file mode 100755 index 000000000..f4c496b23 --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/build.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +set -e +set -u + +source "$PROJECT_DEVOPS_DIR"/conf/images/conf-app-images.sh + +JAR_FILE=${EM_SCALA_JAR} +APP_IMAGE=${EM_SCALA_IMAGE} +APP=em_scala + +source ../build-scala-app-image.sh diff --git a/devops/JeMPI_TestData/Zambia/metadata/private/.gitignore b/JeMPI_Apps/JeMPI_EM_Scala/docker/.gitignore similarity index 53% rename from devops/JeMPI_TestData/Zambia/metadata/private/.gitignore rename to JeMPI_Apps/JeMPI_EM_Scala/docker/.gitignore index a5baada18..c9d5c946a 100644 --- a/devops/JeMPI_TestData/Zambia/metadata/private/.gitignore +++ b/JeMPI_Apps/JeMPI_EM_Scala/docker/.gitignore @@ -1,3 +1,3 @@ * !.gitignore - +!Dockerfile diff --git a/JeMPI_Apps/JeMPI_EM_Scala/docker/Dockerfile b/JeMPI_Apps/JeMPI_EM_Scala/docker/Dockerfile new file mode 100644 index 000000000..34fa9ae0b --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/docker/Dockerfile @@ -0,0 +1,13 @@ +ARG JAVA_VERSION + +FROM eclipse-temurin:${JAVA_VERSION}-jre + +ADD em-scala-fatjar-1.0.jar /app/em-scala-fatjar-1.0.jar + +RUN printf "#!/bin/bash\n\ +cd /app\n\ +java -server -XX:MaxRAMPercentage=80 -jar /app/em-scala-fatjar-1.0.jar\n" > /entrypoint.sh + +RUN chmod +x /entrypoint.sh + +ENTRYPOINT ["/entrypoint.sh"] diff --git a/JeMPI_Apps/JeMPI_EM_Scala/project/build.properties b/JeMPI_Apps/JeMPI_EM_Scala/project/build.properties new file mode 100644 index 000000000..0aa5c39b8 --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/project/build.properties @@ -0,0 +1 @@ +sbt.version = 1.9.8 diff --git a/JeMPI_Apps/JeMPI_EM_Scala/project/plugins.sbt b/JeMPI_Apps/JeMPI_EM_Scala/project/plugins.sbt new file mode 100644 index 000000000..9f6562b02 --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/project/plugins.sbt @@ -0,0 +1,3 @@ +addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "2.1.5") +addSbtPlugin("org.jmotor.sbt" % "sbt-dependency-updates" % "1.2.7") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") diff --git a/JeMPI_Apps/JeMPI_EM/push.sh b/JeMPI_Apps/JeMPI_EM_Scala/push.sh similarity index 90% rename from JeMPI_Apps/JeMPI_EM/push.sh rename to JeMPI_Apps/JeMPI_EM_Scala/push.sh index c031003e8..7188dae0f 100755 --- a/JeMPI_Apps/JeMPI_EM/push.sh +++ b/JeMPI_Apps/JeMPI_EM_Scala/push.sh @@ -6,9 +6,9 @@ set -u source $PROJECT_DEVOPS_DIR/conf.env source $PROJECT_DEVOPS_DIR/conf/images/conf-app-images.sh -APP_IMAGE=$EM_IMAGE +APP_IMAGE=$EM_SCALA_IMAGE docker tag ${APP_IMAGE} ${REGISTRY_NODE_IP}/${APP_IMAGE} docker push ${REGISTRY_NODE_IP}/${APP_IMAGE} docker rmi ${REGISTRY_NODE_IP}/${APP_IMAGE} - \ No newline at end of file + diff --git a/JeMPI_Apps/JeMPI_EM_Scala/src/main/resources/logback.xml b/JeMPI_Apps/JeMPI_EM_Scala/src/main/resources/logback.xml new file mode 100644 index 000000000..7109b58ae --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/src/main/resources/logback.xml @@ -0,0 +1,21 @@ + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/ContributionSplit.scala b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/ContributionSplit.scala new file mode 100644 index 000000000..b7e1adf20 --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/ContributionSplit.scala @@ -0,0 +1,3 @@ +package org.jembi.jempi.em + +case class ContributionSplit(matched: Double, unmatched: Double) diff --git a/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/CustomFields.scala b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/CustomFields.scala new file mode 100644 index 000000000..f828844ec --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/CustomFields.scala @@ -0,0 +1,18 @@ +package org.jembi.jempi.em + +import scala.collection.immutable.ArraySeq + +object CustomFields { + + val FIELDS: ArraySeq[Field] = ArraySeq( + Field("givenName", 0), + Field("familyName", 1), + Field("gender", 2), + Field("dob", 3), + Field("city", 4), + Field("phoneNumber", 5), + Field("nationalId", 6) + ) + +} + diff --git a/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/CustomInteractionEnvelop.scala b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/CustomInteractionEnvelop.scala new file mode 100644 index 000000000..f4fec43de --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/CustomInteractionEnvelop.scala @@ -0,0 +1,45 @@ +package org.jembi.jempi.em + + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties + + +@JsonIgnoreProperties(ignoreUnknown = true) +case class CustomInteractionEnvelop( + contentType: String, + tag: Option[String], + stan: Option[String], + interaction: Option[Interaction] +) {} + +@JsonIgnoreProperties(ignoreUnknown = true) +case class Interaction( + uniqueInteractionData: UniqueInteractionData, + demographicData: DemographicData +) + +@JsonIgnoreProperties(ignoreUnknown = true) +case class UniqueInteractionData(auxId: String) + +@JsonIgnoreProperties(ignoreUnknown = true) +case class DemographicData( + givenName: String, + familyName: String, + gender: String, + dob: String, + city: String, + phoneNumber: String, + nationalId: String +) { + + def toArray: Array[String] = + Array(givenName, + familyName, + gender, + dob, + city, + phoneNumber, + nationalId) + +} + diff --git a/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/EM_Scala.scala b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/EM_Scala.scala new file mode 100644 index 000000000..d581ba0f6 --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/EM_Scala.scala @@ -0,0 +1,97 @@ +package org.jembi.jempi.em + +import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper} +import com.fasterxml.jackson.module.scala.{ClassTagExtensions, DefaultScalaModule} +import com.typesafe.scalalogging.LazyLogging +import org.apache.kafka.common.serialization.{Serde, Serdes} +import org.apache.kafka.streams.kstream.{Consumed, KStream} +import org.apache.kafka.streams.{KafkaStreams, StreamsBuilder, StreamsConfig} +import org.jembi.jempi.em.kafka.Config.{CFG_KAFKA_APPLICATION_ID, CFG_KAFKA_BOOTSTRAP_SERVERS, CFG_KAFKA_CLIENT_ID, CFG_KAFKA_TOPIC_INTERACTION_EM} +import org.jembi.jempi.em.kafka.Producer + +import java.util.Properties +import scala.collection.immutable.ArraySeq +import scala.collection.mutable.ArrayBuffer +import scala.collection.parallel.immutable.ParVector +import scala.util.Random + +object EM_Scala extends LazyLogging { + + private val mapper = new ObjectMapper() with ClassTagExtensions + mapper.registerModule(DefaultScalaModule) + mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) + + private val buffer = new ArrayBuffer[Array[String]]() + + def main(args: Array[String]): Unit = { + + val props = loadConfig() + val stringSerde: Serde[String] = Serdes.String() + val streamsBuilder: StreamsBuilder = new StreamsBuilder() + val patientRecordKStream: KStream[String, String] = streamsBuilder.stream( + CFG_KAFKA_TOPIC_INTERACTION_EM, + Consumed.`with`(stringSerde, stringSerde) + ) + patientRecordKStream.foreach((_, json) => { + val interactionEnvelop = + mapper.readValue(json, classOf[CustomInteractionEnvelop]) + interactionEnvelop.contentType match { + case "BATCH_START_SENTINEL" => buffer.clearAndShrink() + case "BATCH_END_SENTINEL" => + val parVector = new ParVector( + if (buffer.length <= 50_000) buffer.toVector + else Random.shuffle(buffer.toVector).take(50_000) + ) + buffer.clearAndShrink() + val emRunnable: EM_Runnable = + new EM_Runnable(interactionEnvelop.tag.get, parVector) + val thread: Thread = new Thread(emRunnable) + thread.start() + case "BATCH_INTERACTION" => + if (interactionEnvelop.interaction.isDefined) { + val interaction = + interactionEnvelop.interaction.get.demographicData.toArray + buffer += interaction + } + } + }) + val patientKafkaStreams: KafkaStreams = + new KafkaStreams(streamsBuilder.build(), props) + patientKafkaStreams.cleanUp() + patientKafkaStreams.start() + + } + + private def loadConfig(): Properties = { + val props = new Properties() + props.put(StreamsConfig.APPLICATION_ID_CONFIG, CFG_KAFKA_APPLICATION_ID) + props.put(StreamsConfig.CLIENT_ID_CONFIG, CFG_KAFKA_CLIENT_ID) + props.put( + StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, + CFG_KAFKA_BOOTSTRAP_SERVERS + ) + props + } + + private class EM_Runnable( + val tag: String, + val interactions: ParVector[Array[String]] + ) extends Runnable { + + def run(): Unit = { + val interactions_ : ParVector[ArraySeq[String]] = + interactions.map((fields: Array[String]) => + ArraySeq.unsafeWrapArray(fields) + ) + val (mu, ms) = Profile.profile(EM_Task.run(interactions_)) + + CustomFields.FIELDS.zipWithIndex.foreach(x => + Utils.printMU(x._1.name, mu(x._2)) + ) + logger.info(s"$ms ms") + Producer.send(tag, mu); + } + + } + +} diff --git a/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/EM_Task.scala b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/EM_Task.scala new file mode 100644 index 000000000..b0242b2a9 --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/EM_Task.scala @@ -0,0 +1,215 @@ +package org.jembi.jempi.em + +import com.typesafe.scalalogging.LazyLogging +import org.jembi.jempi.em.CustomFields.FIELDS +import org.jembi.jempi.em.Utils._ + +import java.lang.Math.log +import scala.annotation.tailrec +import scala.collection.immutable.ArraySeq +import scala.collection.parallel.immutable.ParVector +import scala.util.Random + +object EM_Task extends LazyLogging { + + def run(interactions: ParVector[ArraySeq[String]]): ArraySeq[MU] = { + + val (gamma, ms2) = Profile.profile( + Gamma.getGamma( + Map[String, Long](), + interactions.head, + interactions.tail + ) + ) + logger.info(s"$ms2 ms") + + if (LOCK_U) { + @tailrec + def randomlyChooseIndexes( + size: Int, + soFar: Set[Int], + remaining: Int + ): Set[Int] = { + if (remaining == 0) soFar + else { + val nextValue = Random.nextInt(size) + if (soFar.contains(nextValue)) { + randomlyChooseIndexes(size, soFar, remaining) + } else { + randomlyChooseIndexes(size, soFar + nextValue, remaining - 1) + } + } + } + + val randIndexes = randomlyChooseIndexes( + interactions.size, + Set[Int](), + Math.min(20_000, (interactions.size * 2) / 4) + ) + val randInteractions: ParVector[ArraySeq[String]] = new ParVector( + randIndexes.map(idx => interactions(idx)).toVector + ) + val (tallies2, ms1) = Profile.profile( + scan(isPairMatch2(0.92), randInteractions) + ) + val lockedU = computeMU(tallies2) + FIELDS.zipWithIndex.foreach(x => + printTalliesAndMU( + x._1.name, + tallies2.colTally(x._2), + lockedU(x._2) + ) + ) + logger.info(s"$ms1 ms") + runEM(0, lockedU.map(x => MU(0.8, x.u)), gamma) + } else { + runEM(0, for { _ <- FIELDS } yield MU(m = 0.8, u = 0.0001), gamma) + } + } + + @tailrec + private def runEM( + iterations: Int, + currentMU: ArraySeq[MU], + gamma: Map[String, Long] + ): ArraySeq[MU] = { + + case class GammaMetrics( + matches: Array[Int], + count: Long, + weight: Double, + odds: Double, + probability: Double, + tallies: Tallies + ) {} + + def computeGammaMetrics(matches: Array[Int], count: Long): GammaMetrics = { + val w = matches.zipWithIndex + .map(matchResult => { + val m = currentMU.apply(matchResult._2).m + val u = currentMU.apply(matchResult._2).u + matchResult._1 match { + case GAMMA_TAG_NOT_EQUAL => log((1.0 - m) / (1.0 - u)) / LOG_BASE + case GAMMA_TAG_EQUAL => log(m / u) / LOG_BASE + case _ => 0.0 + } + }) + .fold(LOG_LAMBDA)(_ + _) + val odds = Math.pow(BASE, w) + val probability = Math.max(1e-10, odds / (1.0 + odds)) + val tallies: Tallies = Tallies( + ArraySeq.unsafeWrapArray( + matches.zipWithIndex.map(m => + m._1 match { + case GAMMA_TAG_NOT_EQUAL => + Tally(b = probability * count, d = (1.0 - probability) * count) + case GAMMA_TAG_EQUAL => + Tally(a = probability * count, c = (1.0 - probability) * count) + case _ => Tally() + } + ) + ) + ) + GammaMetrics(matches, count, w, odds, probability, tallies) + } + + def matchAsInts(x: String): Array[Int] = { + x.slice(1, x.length - 1) + .split(',') + .map(y => + y.trim() match { + case GAMMA_TAG_EQUAL_STR => GAMMA_TAG_EQUAL + case GAMMA_TAG_NOT_EQUAL_STR => GAMMA_TAG_NOT_EQUAL + case GAMMA_TAG_MISSING_STR => GAMMA_TAG_MISSING + } + ) + } + + logger.info(s"iteration: $iterations") + if (iterations >= MAX_EM_ITERATIONS) { + currentMU + } else { + if (iterations == 2) { + logger.info("break") + } + val gamma_ = + gamma.toVector + .map(x => x._1 -> (matchAsInts(x._1), x._2)) + .toMap + val mapGammaMetrics = + gamma_.map(x => x._1 -> computeGammaMetrics(x._2._1, x._2._2)) + val tallies = mapGammaMetrics.values + .map(x => x.tallies) + .fold(Tallies())((x, y) => addTallies(x, y)) + val newMU = computeMU(tallies) + FIELDS.zipWithIndex.foreach(x => + printTalliesAndMU(x._1.name, tallies.colTally(x._2), newMU(x._2)) + ) + if (LOCK_U) { + runEM(iterations + 1, mergeMU(newMU, currentMU), gamma) + } else { + runEM(iterations + 1, newMU, gamma) + } + } + } + + private def scan( + isMatch: (ArraySeq[String], ArraySeq[String]) => ContributionSplit, + interactions: ParVector[ArraySeq[String]] + ): Tallies = { + + def tallyFieldsContribution( + left: ArraySeq[String], + right: ArraySeq[String] + ): Tallies = { + + def tallyFieldContribution(split: ContributionSplit, col: Int): Tally = { + if (left.apply(col).isEmpty || right.apply(col).isEmpty) { + Tally(b = split.matched, d = split.unmatched) + } else { + val score = Jaro.jaro(left.apply(col), right.apply(col)) + if (score > JARO_THRESHOLD) + Tally(a = split.matched, c = split.unmatched) + else + Tally(b = split.matched, d = split.unmatched) + } + } + + val split = isMatch(left, right) + Tallies( + FIELDS.map(field => tallyFieldContribution(split, field.csvCol)) + ) + } + + @tailrec + def outerLoop( + acc: Tallies, + left: ArraySeq[String], + right: ParVector[ArraySeq[String]] + ): Tallies = { + + def innerLoop( + left: ArraySeq[String], + interactions: ParVector[ArraySeq[String]] + ): Tallies = { + interactions + .map(right => tallyFieldsContribution(left, right)) + .fold(Tallies()) { (x, y) => addTallies(x, y) } + } + + if (right.isEmpty) { + acc + } else { + outerLoop( + addTallies(acc, innerLoop(left, right)), + right.head, + right.tail + ) + } + + } + + outerLoop(new Tallies, interactions.head, interactions.tail) + } + +} diff --git a/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Field.scala b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Field.scala new file mode 100644 index 000000000..d4ac39c49 --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Field.scala @@ -0,0 +1,3 @@ +package org.jembi.jempi.em + +case class Field(name: String, csvCol: Int) diff --git a/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Gamma.scala b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Gamma.scala new file mode 100644 index 000000000..2f3f0ddd6 --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Gamma.scala @@ -0,0 +1,69 @@ +package org.jembi.jempi.em + +import scala.annotation.tailrec +import scala.collection.immutable.ArraySeq +import scala.collection.parallel.immutable.ParVector + +object Gamma { + + @tailrec + def getGamma( + gamma: Map[String, Long], + left: ArraySeq[String], + right: ParVector[ArraySeq[String]] + ): Map[String, Long] = { + + def innerLoop( + left: ArraySeq[String], + interactions: ParVector[ArraySeq[String]] + ): Map[String, Long] = { + + def combineOp( + m1: Map[String, Long], + m2: Map[String, Long] + ): Map[String, Long] = { + m1 ++ m2.map { case (k: String, v: Long) => + k -> (v + m1.getOrElse(k, 0L)) + } + } + + def sequenceOp(m1: Map[String, Long], t: String): Map[String, Long] = { + m1 ++ Map(t -> (1L + m1.getOrElse(t, 0L))) + } + + val gamma: ParVector[String] = + interactions.map(right => gammaKey(left, right)) + gamma.aggregate(Map[String, Long]())(sequenceOp, combineOp) + } + + if (right.isEmpty) { + gamma + } else { + getGamma( + gamma ++ innerLoop(left, right) + .map { case (k: String, v: Long) => + k -> (v + gamma.getOrElse(k, 0L)) + }, + right.head, + right.tail + ) + } + } + + private def gammaKey( + left: ArraySeq[String], + right: ArraySeq[String] + ): String = { + val key: ArraySeq[Int] = (left zip right).map { case (l, r) => + if (l.isEmpty || r.isEmpty) { + Utils.GAMMA_TAG_MISSING + } else if (l.equals(r)) { + Utils.GAMMA_TAG_EQUAL + } else { + Utils.GAMMA_TAG_NOT_EQUAL + } + } + key.mkString("<", ",", ">") + } + +} diff --git a/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Jaro.scala b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Jaro.scala new file mode 100644 index 000000000..870b684a2 --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Jaro.scala @@ -0,0 +1,37 @@ +package org.jembi.jempi.em + +object Jaro { + + def jaro(s1: String, s2: String): Double = { + val s1_len = s1.length + val s2_len = s2.length + if (s1_len == 0 && s2_len == 0) return 1.0 + val match_distance = Math.max(s1_len, s2_len) / 2 - 1 + val s1_matches = Array.ofDim[Boolean](s1_len) + val s2_matches = Array.ofDim[Boolean](s2_len) + var matches = 0 + for (i <- 0 until s1_len) { + val start = Math.max(0, i - match_distance) + val end = Math.min(i + match_distance + 1, s2_len) + start until end find { j => !s2_matches(j) && s1(i) == s2(j) } match { + case Some(j) => + s1_matches(i) = true + s2_matches(j) = true + matches += 1 + case None => + } + } + if (matches == 0) return 0.0 + var t = 0.0 + var k = 0 + 0 until s1_len filter s1_matches foreach { i => + while (!s2_matches(k)) k += 1 + if (s1(i) != s2(k)) t += 0.5 + k += 1 + } + + val m = matches.toDouble + (m / s1_len + m / s2_len + (m - t) / m) / 3.0 + } + +} diff --git a/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/MU.scala b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/MU.scala new file mode 100644 index 000000000..14051843a --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/MU.scala @@ -0,0 +1,3 @@ +package org.jembi.jempi.em + +case class MU(m: Double, u: Double) diff --git a/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Profile.scala b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Profile.scala new file mode 100644 index 000000000..77a1f2132 --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Profile.scala @@ -0,0 +1,8 @@ +package org.jembi.jempi.em + +object Profile { + + def profile[R](code: => R, t: Long = System.nanoTime): (R, Double) = + (code, ((System.nanoTime - t) / 1000) / 1000.0) + +} diff --git a/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Tallies.scala b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Tallies.scala new file mode 100644 index 000000000..83c09becf --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Tallies.scala @@ -0,0 +1,6 @@ +package org.jembi.jempi.em + +import CustomFields.FIELDS +import scala.collection.immutable.ArraySeq + +case class Tallies(colTally: ArraySeq[Tally] = FIELDS.map(_ => Tally())) diff --git a/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Tally.scala b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Tally.scala new file mode 100644 index 000000000..8d154db28 --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Tally.scala @@ -0,0 +1,8 @@ +package org.jembi.jempi.em + +case class Tally( + a: Double = 0.0, + b: Double = 0.0, + c: Double = 0.0, + d: Double = 0.0 +) diff --git a/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Utils.scala b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Utils.scala new file mode 100644 index 000000000..c27d42c3a --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/Utils.scala @@ -0,0 +1,116 @@ +package org.jembi.jempi.em + +import com.typesafe.scalalogging.LazyLogging +import Jaro.jaro +import org.jembi.jempi.em.CustomFields.FIELDS + +import scala.collection.immutable.ArraySeq + +object Utils extends LazyLogging { + + val BASE: Double = 2.0 + val LOG_BASE: Double = Math.log(BASE) + val LAMBDA: Double = 1.0 / 2_000_000.0 + val LOG_LAMBDA: Double = Math.log(LAMBDA / (1.0 - LAMBDA)) / LOG_BASE + val JARO_THRESHOLD: Double = 0.92 + val MAX_EM_ITERATIONS = 100 + val GAMMA_TAG_MISSING: Int = 0 + val GAMMA_TAG_NOT_EQUAL: Int = 1 + val GAMMA_TAG_EQUAL: Int = 2 + val GAMMA_TAG_MISSING_STR: String = GAMMA_TAG_MISSING.toString + val GAMMA_TAG_NOT_EQUAL_STR: String = GAMMA_TAG_NOT_EQUAL.toString + val GAMMA_TAG_EQUAL_STR: String = GAMMA_TAG_EQUAL.toString + val LOCK_U = false + private val MIN_U: Double = 1e-10 + private val MAX_M: Double = 1.0 - 1e-10 + + def printTalliesAndMU(label: String, tally: Tally, mu: MU): Unit = { + logger.info( + f"$label%-15s ${tally.a}%15.1f ${tally.b}%15.1f ${tally.c}%15.1f ${tally.d}%15.1f -> ${mu.m}%9.7f, ${mu.u}%9.7f" + ) + } + + def printMU(label: String, mu: MU): Unit = { + logger.info(f"$label%-15s -> ${mu.m}%9.7f, ${mu.u}%9.7f") + } + + def mergeMU(mSource: ArraySeq[MU], uSource: ArraySeq[MU]): ArraySeq[MU] = { + mSource.zipWithIndex.map(x => + MU(mSource.apply(x._2).m, uSource.apply(x._2).u) + ) + } + + def computeMU(tallies: Tallies): ArraySeq[MU] = { + tallies.colTally.map(tally => + MU( + m = Math.min(tally.a / (tally.a + tally.b), MAX_M), + u = Math.max(tally.c / (tally.c + tally.d), MIN_U) + ) + ) + } + + /* + def isPairMatch1( + left: ArraySeq[String], + right: ArraySeq[String] + ): ContributionSplit = { + if ( + left + .apply(Utils.COL_REC_NUM) + .regionMatches(true, 4, right.apply(Utils.COL_REC_NUM), 4, 10) + ) { + ContributionSplit(1.0, 0.0) + } else { + ContributionSplit(0.0, 1.0) + } + } + */ + + def isPairMatch2( + fieldThreshold: Double + )(left: ArraySeq[String], right: ArraySeq[String]): ContributionSplit = { + if ( + Array + .range(0, left.length) + .map(idx => + if (jaro(left.apply(idx), right.apply(idx)) > fieldThreshold) 1 else 0 + ) + .sum >= 4 + ) { + ContributionSplit(1.0, 0.0) + } else { + ContributionSplit(0.0, 1.0) + } + } + + def isPairMatch3( + muSeq: ArraySeq[MU], + fieldThreshold: Double + )(left: ArraySeq[String], right: ArraySeq[String]): ContributionSplit = { + + val omega = muSeq.zipWithIndex.foldLeft(LOG_LAMBDA)((acc, v) => + acc + (if (jaro(left.apply(v._2), right.apply(v._2)) > fieldThreshold) { + Math.log(v._1.m / v._1.u) / LOG_BASE + } else { + Math.log((1.0 - v._1.m) / (1.0 - v._1.u)) / LOG_BASE + }) + ) + val odds = Math.pow(Utils.BASE, omega) // anti log + val probability = odds / (1.0 + odds) + ContributionSplit(probability, 1.0 - probability) + } + + def addTallies(x: Tallies, y: Tallies): Tallies = { + + def addTally(x: Tally, y: Tally): Tally = { + Tally(x.a + y.a, x.b + y.b, x.c + y.c, x.d + y.d) + } + + Tallies( + ArraySeq + .range(0, FIELDS.length) // x.colTally.length) + .map(idx => addTally(x.colTally(idx), y.colTally(idx))) + ) + } + +} diff --git a/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/kafka/Config.scala b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/kafka/Config.scala new file mode 100644 index 000000000..dd34be5ff --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/kafka/Config.scala @@ -0,0 +1,12 @@ +package org.jembi.jempi.em.kafka + +object Config { + + val CFG_KAFKA_APPLICATION_ID = "AppID_EM_Scala" + val CFG_KAFKA_CLIENT_ID = "ClientID_EM_Scala" + val CFG_KAFKA_BOOTSTRAP_SERVERS = "kafka-01:9092" + val CFG_KAFKA_TOPIC_INTERACTION_EM = "JeMPI-interaction-em" + val CFG_KAFKA_TOPIC_MU_LINKER = "JeMPI-mu-linker" + val CFG_KAFKA_TOPIC_MU_CONTROLLER = "JeMPI-mu-controller" + +} diff --git a/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/kafka/CustomMU.scala b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/kafka/CustomMU.scala new file mode 100644 index 000000000..e08e6ae4b --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/kafka/CustomMU.scala @@ -0,0 +1,34 @@ +package org.jembi.jempi.em.kafka + + +import org.jembi.jempi.em.MU + +import scala.collection.immutable.ArraySeq + +case class CustomMU( + tag: String, + givenName: Probability, + familyName: Probability, + gender: Probability, + dob: Probability, + city: Probability, + phoneNumber: Probability, + nationalId: Probability +) + +object CustomMU { + + def fromArraySeq(tag: String, muSeq: ArraySeq[MU]): CustomMU = + CustomMU( + tag, + Probability(muSeq.apply(0).m, muSeq.apply(0).u), + Probability(muSeq.apply(1).m, muSeq.apply(1).u), + Probability(muSeq.apply(2).m, muSeq.apply(2).u), + Probability(muSeq.apply(3).m, muSeq.apply(3).u), + Probability(muSeq.apply(4).m, muSeq.apply(4).u), + Probability(muSeq.apply(5).m, muSeq.apply(5).u), + Probability(muSeq.apply(6).m, muSeq.apply(6).u) + ) + +} + diff --git a/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/kafka/Probability.scala b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/kafka/Probability.scala new file mode 100644 index 000000000..1ef9e8957 --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/kafka/Probability.scala @@ -0,0 +1,3 @@ +package org.jembi.jempi.em.kafka + +case class Probability(m: Double, u: Double) diff --git a/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/kafka/Producer.scala b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/kafka/Producer.scala new file mode 100644 index 000000000..c27a95679 --- /dev/null +++ b/JeMPI_Apps/JeMPI_EM_Scala/src/main/scala/org/jembi/jempi/em/kafka/Producer.scala @@ -0,0 +1,48 @@ +package org.jembi.jempi.em.kafka + +import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper} +import com.fasterxml.jackson.module.scala.{ + ClassTagExtensions, + DefaultScalaModule +} +import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord} +import org.jembi.jempi.em.MU +import org.jembi.jempi.em.kafka.Config.{ + CFG_KAFKA_BOOTSTRAP_SERVERS, + CFG_KAFKA_TOPIC_MU_CONTROLLER, + CFG_KAFKA_TOPIC_MU_LINKER +} + +import java.util.Properties +import scala.collection.immutable.ArraySeq + +object Producer { + + def send(tag: String, muSeq: ArraySeq[MU]): Unit = { + val mapper = new ObjectMapper() with ClassTagExtensions + mapper.registerModule(DefaultScalaModule) + mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) + + val props = new Properties() + props.put("bootstrap.servers", CFG_KAFKA_BOOTSTRAP_SERVERS) + props.put( + "key.serializer", + "org.apache.kafka.common.serialization.StringSerializer" + ) + props.put( + "value.serializer", + "org.apache.kafka.common.serialization.StringSerializer" + ) + + val producer = new KafkaProducer[String, String](props) + + val customMU = CustomMU.fromArraySeq(tag, muSeq) + + val json = mapper.writeValueAsString(customMU) + + val record = new ProducerRecord(CFG_KAFKA_TOPIC_MU_CONTROLLER, "key", json) + producer.send(record) + producer.close() + } + +} diff --git a/JeMPI_Apps/JeMPI_ETL/docker/Dockerfile b/JeMPI_Apps/JeMPI_ETL/docker/Dockerfile index b7519fee5..24fcf06f8 100644 --- a/JeMPI_Apps/JeMPI_ETL/docker/Dockerfile +++ b/JeMPI_Apps/JeMPI_ETL/docker/Dockerfile @@ -6,7 +6,7 @@ ADD ETL-1.0-SNAPSHOT-spring-boot.jar /app/ETL-1.0-SNAPSHOT-spring-boot.jar RUN printf "#!/bin/bash\n\ cd /app\n\ -java -server --enable-preview -XX:+UseZGC -jar /app/ETL-1.0-SNAPSHOT-spring-boot.jar\n" > /entrypoint.sh +java -server -jar /app/ETL-1.0-SNAPSHOT-spring-boot.jar\n" > /entrypoint.sh RUN chmod +x /entrypoint.sh diff --git a/JeMPI_Apps/JeMPI_ETL/src/main/java/org/jembi/jempi/AppConfig.java b/JeMPI_Apps/JeMPI_ETL/src/main/java/org/jembi/jempi/AppConfig.java index 6a8d68ad4..6eab23753 100644 --- a/JeMPI_Apps/JeMPI_ETL/src/main/java/org/jembi/jempi/AppConfig.java +++ b/JeMPI_Apps/JeMPI_ETL/src/main/java/org/jembi/jempi/AppConfig.java @@ -13,15 +13,14 @@ public final class AppConfig { private static final Logger LOGGER = LogManager.getLogger(AppConfig.class); private static final Config SYSTEM_PROPERTIES = ConfigFactory.systemProperties(); private static final Config SYSTEM_ENVIRONMENT = ConfigFactory.systemEnvironment(); - private static final Config CONFIG = new Builder() - .withSystemEnvironment() - .withSystemProperties() - .withOptionalRelativeFile("/conf/server.production.conf") - .withOptionalRelativeFile("/conf/server.staging.conf") - .withOptionalRelativeFile("/conf/server.test.conf") - .withResource("application.local.conf") - .withResource("application.conf") - .build(); + private static final Config CONFIG = new Builder().withSystemEnvironment() + .withSystemProperties() + .withOptionalRelativeFile("/conf/server.production.conf") + .withOptionalRelativeFile("/conf/server.staging.conf") + .withOptionalRelativeFile("/conf/server.test.conf") + .withResource("application.local.conf") + .withResource("application.conf") + .build(); public static final String KAFKA_BOOTSTRAP_SERVERS = CONFIG.getString("KAFKA_BOOTSTRAP_SERVERS"); public static final String KAFKA_APPLICATION_ID = CONFIG.getString("KAFKA_APPLICATION_ID"); public static final Level GET_LOG_LEVEL = Level.toLevel(CONFIG.getString("LOG4J2_LEVEL")); diff --git a/JeMPI_Apps/JeMPI_ETL/src/main/java/org/jembi/jempi/etl/CustomSourceRecordStream.java b/JeMPI_Apps/JeMPI_ETL/src/main/java/org/jembi/jempi/etl/CustomSourceRecordStream.java index 85106f227..0fcdd74fd 100644 --- a/JeMPI_Apps/JeMPI_ETL/src/main/java/org/jembi/jempi/etl/CustomSourceRecordStream.java +++ b/JeMPI_Apps/JeMPI_ETL/src/main/java/org/jembi/jempi/etl/CustomSourceRecordStream.java @@ -40,34 +40,29 @@ public void open() { final Properties props = loadConfig(); final Serde stringSerde = Serdes.String(); final Serializer interactionEnvelopSerializer = new JsonPojoSerializer<>(); - final Deserializer interactionEnvelopDeserializer = new JsonPojoDeserializer<>( - InteractionEnvelop.class); - final Serde interactionEnvelopSerde = Serdes.serdeFrom(interactionEnvelopSerializer, - interactionEnvelopDeserializer); + final Deserializer interactionEnvelopDeserializer = + new JsonPojoDeserializer<>(InteractionEnvelop.class); + final Serde interactionEnvelopSerde = + Serdes.serdeFrom(interactionEnvelopSerializer, interactionEnvelopDeserializer); final StreamsBuilder streamsBuilder = new StreamsBuilder(); - final KStream sourceKStream = streamsBuilder.stream( - GlobalConstants.TOPIC_INTERACTION_ASYNC_ETL, - Consumed.with(stringSerde, - interactionEnvelopSerde)); - sourceKStream - .map((key, rec) -> { - if (rec.contentType() == InteractionEnvelop.ContentType.BATCH_INTERACTION) { - final var interaction = rec.interaction(); - final var demographicData = interaction.demographicData(); - final var newEnvelop = new InteractionEnvelop( - rec.contentType(), - rec.tag(), - rec.stan(), - new Interaction(null, - rec.interaction().sourceId(), - interaction.uniqueInteractionData(), - demographicData.clean())); - return KeyValue.pair(key, newEnvelop); - } else { - return KeyValue.pair(key, rec); - } - }) - .to(GlobalConstants.TOPIC_INTERACTION_CONTROLLER, Produced.with(stringSerde, interactionEnvelopSerde)); + final KStream sourceKStream = + streamsBuilder.stream(GlobalConstants.TOPIC_INTERACTION_ETL, Consumed.with(stringSerde, interactionEnvelopSerde)); + sourceKStream.map((key, rec) -> { + if (rec.contentType() == InteractionEnvelop.ContentType.BATCH_INTERACTION) { + final var interaction = rec.interaction(); + final var demographicData = interaction.demographicData(); + final var newEnvelop = new InteractionEnvelop(rec.contentType(), + rec.tag(), + rec.stan(), + new Interaction(null, + rec.interaction().sourceId(), + interaction.uniqueInteractionData(), + demographicData.clean())); + return KeyValue.pair(key, newEnvelop); + } else { + return KeyValue.pair(key, rec); + } + }).to(GlobalConstants.TOPIC_INTERACTION_CONTROLLER, Produced.with(stringSerde, interactionEnvelopSerde)); interactionKafkaStreams = new KafkaStreams(streamsBuilder.build(), props); interactionKafkaStreams.cleanUp(); interactionKafkaStreams.start(); diff --git a/JeMPI_Apps/JeMPI_ETL/src/main/java/org/jembi/jempi/etl/Main.java b/JeMPI_Apps/JeMPI_ETL/src/main/java/org/jembi/jempi/etl/Main.java index 935534ee1..0a0515e0b 100644 --- a/JeMPI_Apps/JeMPI_ETL/src/main/java/org/jembi/jempi/etl/Main.java +++ b/JeMPI_Apps/JeMPI_ETL/src/main/java/org/jembi/jempi/etl/Main.java @@ -16,22 +16,16 @@ public static void main(final String[] args) { } public Behavior create() { - return Behaviors.setup( - context -> { - final var customSourceRecordStream = new CustomSourceRecordStream(); - customSourceRecordStream.open(); - return Behaviors.receive(Void.class) - .onSignal(Terminated.class, - sig -> Behaviors.stopped()) - .build(); - }); + return Behaviors.setup(context -> { + final var customSourceRecordStream = new CustomSourceRecordStream(); + customSourceRecordStream.open(); + return Behaviors.receive(Void.class).onSignal(Terminated.class, sig -> Behaviors.stopped()).build(); + }); } private void run() { LOGGER.info("ETL"); - LOGGER.info("KAFKA: {} {}", - AppConfig.KAFKA_BOOTSTRAP_SERVERS, - AppConfig.KAFKA_APPLICATION_ID); - ActorSystem.create(this.create(), "ETL"); + LOGGER.info("KAFKA: {} {}", AppConfig.KAFKA_BOOTSTRAP_SERVERS, AppConfig.KAFKA_APPLICATION_ID); + ActorSystem.create(this.create(), "ETL"); } } diff --git a/JeMPI_Apps/JeMPI_GUI/src/main/java/org/jembi/jempi/gui/Cache.java b/JeMPI_Apps/JeMPI_GUI/src/main/java/org/jembi/jempi/gui/Cache.java index d38c04d44..524224734 100644 --- a/JeMPI_Apps/JeMPI_GUI/src/main/java/org/jembi/jempi/gui/Cache.java +++ b/JeMPI_Apps/JeMPI_GUI/src/main/java/org/jembi/jempi/gui/Cache.java @@ -55,7 +55,7 @@ private static final class GoldenRecordBuffer { private static String[] getGoldenRecordVector(final ApiModels.ApiExpandedGoldenRecord expandedGoldenRecord) { final var demographicFields = CustomDemographicData.class.getDeclaredFields(); final String[] vector = new String[3 + demographicFields.length + 1]; - vector[0] = expandedGoldenRecord.goldenRecord().uniqueGoldenRecordData().auxId(); + vector[0] = ""; // expandedGoldenRecord.goldenRecord().uniqueGoldenRecordData().auxId(); vector[1] = expandedGoldenRecord.goldenRecord().uid(); vector[2] = expandedGoldenRecord.goldenRecord().uniqueGoldenRecordData().auxDateCreated().toString(); for (int i = 0; i < demographicFields.length; i++) { @@ -72,7 +72,7 @@ private static String[] getGoldenRecordVector(final ApiModels.ApiExpandedGoldenR private static String[] getInteractionVector(final ApiModels.ApiInteractionWithScore interactionWithScore) { final var demographicFields = CustomDemographicData.class.getDeclaredFields(); final String[] vector = new String[3 + demographicFields.length + 1]; - vector[0] = interactionWithScore.interaction().uniqueInteractionData().auxId(); + vector[0] = ""; // interactionWithScore.interaction().uniqueInteractionData().auxId(); vector[1] = interactionWithScore.interaction().uid(); vector[2] = interactionWithScore.interaction().uniqueInteractionData().auxDateCreated().toString(); for (int i = 0; i < demographicFields.length; i++) { @@ -103,9 +103,8 @@ private void bufferFillPrev() { expandedGoldenRecord.interactionsWithScore() .forEach(interactionWithScore -> rowData.add(getInteractionVector(interactionWithScore))); base = (base + (BUFFER_SIZE - 1)) % BUFFER_SIZE; - buffer[base] = new BufferItem(fromGidIndex[0] + i, - buffer[(base + 1) % BUFFER_SIZE].rowNumber - rowData.size(), - rowData); + buffer[base] = + new BufferItem(fromGidIndex[0] + i, buffer[(base + 1) % BUFFER_SIZE].rowNumber - rowData.size(), rowData); } synchronized (LOCK) { Cache.totalFetched -= fillSize; @@ -141,13 +140,11 @@ private void bufferFill() { int rowNumber = 0; for (int i = 0; i < BUFFER_SIZE; i++) { int startRow = rowNumber; - final var expandedGoldenRecord = - API_CLIENT.getGoldenRecordsInteractions(List.of(GID_BUFFER.get(i))).get(0); + final var expandedGoldenRecord = API_CLIENT.getGoldenRecordsInteractions(List.of(GID_BUFFER.get(i))).get(0); final ArrayList rowData = new ArrayList<>(); rowData.add(getGoldenRecordVector(expandedGoldenRecord)); - expandedGoldenRecord - .interactionsWithScore() - .forEach(interactionWithScore -> rowData.add(getInteractionVector(interactionWithScore))); + expandedGoldenRecord.interactionsWithScore() + .forEach(interactionWithScore -> rowData.add(getInteractionVector(interactionWithScore))); rowNumber += (1 + expandedGoldenRecord.interactionsWithScore().size()); buffer[i] = new BufferItem(i, startRow, rowData); } diff --git a/JeMPI_Apps/JeMPI_GUI/src/main/java/org/jembi/jempi/gui/Main.java b/JeMPI_Apps/JeMPI_GUI/src/main/java/org/jembi/jempi/gui/Main.java index cfc4c78cd..f3477c40b 100644 --- a/JeMPI_Apps/JeMPI_GUI/src/main/java/org/jembi/jempi/gui/Main.java +++ b/JeMPI_Apps/JeMPI_GUI/src/main/java/org/jembi/jempi/gui/Main.java @@ -53,8 +53,7 @@ private static class MyTableModel extends AbstractTableModel { Collections.addAll(colNamesList, new ArrayList<>(Arrays.stream(DEMOGRAPHIC_FIELDS) .map(Field::getName) - .toList()) - .toArray(new String[0])); + .toList()).toArray(new String[0])); Collections.addAll(colNamesList, "Score"); colNames = colNamesList.toArray(new String[0]); } @@ -151,9 +150,9 @@ private void setUp() { totalWidth.addAndGet(setColWidth("Aux ID", charWidth, 17)); totalWidth.addAndGet(setColWidth("UID", charWidth, 10)); totalWidth.addAndGet(setColWidth("Created", charWidth, 28)); - Arrays.stream(DEMOGRAPHIC_FIELDS).sequential().forEach(x -> totalWidth.addAndGet(setColWidth(x.getName(), - charWidth, - 15))); + Arrays.stream(DEMOGRAPHIC_FIELDS) + .sequential() + .forEach(x -> totalWidth.addAndGet(setColWidth(x.getName(), charWidth, 15))); totalWidth.addAndGet(setColWidth("Score", charWidth, 10)); this.setPreferredScrollableViewportSize(new Dimension(totalWidth.get(), 30 * Math.round((metrics.getHeight() * 1.4F)))); this.setFillsViewportHeight(true); diff --git a/JeMPI_Apps/JeMPI_LibAPI/checkstyle/suppression.xml b/JeMPI_Apps/JeMPI_LibAPI/checkstyle/suppression.xml index 92666b726..ae92a113b 100644 --- a/JeMPI_Apps/JeMPI_LibAPI/checkstyle/suppression.xml +++ b/JeMPI_Apps/JeMPI_LibAPI/checkstyle/suppression.xml @@ -28,7 +28,7 @@ diff --git a/JeMPI_Apps/JeMPI_LibAPI/pom.xml b/JeMPI_Apps/JeMPI_LibAPI/pom.xml index e7e66dd24..95ffcc809 100644 --- a/JeMPI_Apps/JeMPI_LibAPI/pom.xml +++ b/JeMPI_Apps/JeMPI_LibAPI/pom.xml @@ -14,8 +14,8 @@ jar - 17 - 17 + ${java.version} + ${java.version} UTF-8 @@ -194,9 +194,8 @@ org.apache.maven.plugins maven-compiler-plugin - 17 - 17 - --enable-preview + ${maven.compiler.source} + ${maven.compiler.source} diff --git a/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/Ask.java b/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/Ask.java index 15a8f12ba..2675b815b 100644 --- a/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/Ask.java +++ b/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/Ask.java @@ -9,7 +9,7 @@ import org.jembi.jempi.shared.models.*; import java.io.File; -import java.time.LocalDate; +import java.sql.Timestamp; import java.util.List; import java.util.concurrent.CompletionStage; @@ -23,22 +23,20 @@ private Ask() { static CompletionStage countGoldenRecords( final ActorSystem actorSystem, final ActorRef backEnd) { - CompletionStage stage = AskPattern - .ask(backEnd, - BackEnd.CountGoldenRecordsRequest::new, - java.time.Duration.ofSeconds(10), - actorSystem.scheduler()); + CompletionStage stage = AskPattern.ask(backEnd, + BackEnd.CountGoldenRecordsRequest::new, + java.time.Duration.ofSeconds(10), + actorSystem.scheduler()); return stage.thenApply(response -> response); } static CompletionStage countInteractions( final ActorSystem actorSystem, final ActorRef backEnd) { - CompletionStage stage = AskPattern - .ask(backEnd, - BackEnd.CountInteractionsRequest::new, - java.time.Duration.ofSeconds(10), - actorSystem.scheduler()); + CompletionStage stage = AskPattern.ask(backEnd, + BackEnd.CountInteractionsRequest::new, + java.time.Duration.ofSeconds(10), + actorSystem.scheduler()); return stage.thenApply(response -> response); } @@ -46,22 +44,16 @@ static CompletionStage countInteractions( static CompletionStage countRecords( final ActorSystem actorSystem, final ActorRef backEnd) { - CompletionStage stage = AskPattern - .ask(backEnd, - BackEnd.CountRecordsRequest::new, - java.time.Duration.ofSeconds(10), - actorSystem.scheduler()); + CompletionStage stage = + AskPattern.ask(backEnd, BackEnd.CountRecordsRequest::new, java.time.Duration.ofSeconds(10), actorSystem.scheduler()); return stage.thenApply(response -> response); } static CompletionStage getGidsAll( final ActorSystem actorSystem, final ActorRef backEnd) { - CompletionStage stage = AskPattern - .ask(backEnd, - BackEnd.GetGidsAllRequest::new, - java.time.Duration.ofSeconds(30), - actorSystem.scheduler()); + CompletionStage stage = + AskPattern.ask(backEnd, BackEnd.GetGidsAllRequest::new, java.time.Duration.ofSeconds(30), actorSystem.scheduler()); return stage.thenApply(response -> response); } @@ -70,11 +62,12 @@ static CompletionStage getNotifications( final ActorRef backEnd, final int limit, final int offset, - final LocalDate date, - final String state) { + final Timestamp startDate, + final Timestamp endDate, + final List states) { CompletionStage stage = AskPattern .ask(backEnd, - replyTo -> new BackEnd.GetNotificationsRequest(replyTo, limit, offset, date, state), + replyTo -> new BackEnd.GetNotificationsRequest(replyTo, limit, offset, startDate, endDate, states), java.time.Duration.ofSeconds(30), actorSystem.scheduler()); return stage.thenApply(response -> response); @@ -84,11 +77,12 @@ static CompletionStage getExpandedGolde final ActorSystem actorSystem, final ActorRef backEnd, final String gid) { - final CompletionStage stage = AskPattern - .ask(backEnd, - replyTo -> new BackEnd.GetExpandedGoldenRecordRequest(replyTo, gid), - java.time.Duration.ofSeconds(5), - actorSystem.scheduler()); + final CompletionStage stage = AskPattern.ask(backEnd, + replyTo -> new BackEnd.GetExpandedGoldenRecordRequest( + replyTo, + gid), + java.time.Duration.ofSeconds(5), + actorSystem.scheduler()); return stage.thenApply(response -> response); } @@ -96,11 +90,12 @@ static CompletionStage getInteraction( final ActorSystem actorSystem, final ActorRef backEnd, final String iid) { - final CompletionStage stage = AskPattern - .ask(backEnd, - replyTo -> new BackEnd.GetInteractionRequest(replyTo, iid), - java.time.Duration.ofSeconds(5), - actorSystem.scheduler()); + final CompletionStage stage = AskPattern.ask(backEnd, + replyTo -> new BackEnd.GetInteractionRequest( + replyTo, + iid), + java.time.Duration.ofSeconds(5), + actorSystem.scheduler()); return stage.thenApply(response -> response); } @@ -123,11 +118,12 @@ static CompletionStage getExpandedGold final ActorSystem actorSystem, final ActorRef backEnd, final List gidList) { - CompletionStage stage = AskPattern - .ask(backEnd, - replyTo -> new BackEnd.GetExpandedGoldenRecordsRequest(replyTo, gidList), - java.time.Duration.ofSeconds(6), - actorSystem.scheduler()); + CompletionStage stage = AskPattern.ask(backEnd, + replyTo -> new BackEnd.GetExpandedGoldenRecordsRequest( + replyTo, + gidList), + java.time.Duration.ofSeconds(6), + actorSystem.scheduler()); return stage.thenApply(response -> response); } @@ -135,11 +131,12 @@ static CompletionStage getExpandedInter final ActorSystem actorSystem, final ActorRef backEnd, final List uidList) { - CompletionStage stage = AskPattern - .ask(backEnd, - replyTo -> new BackEnd.GetExpandedInteractionsRequest(replyTo, uidList), - java.time.Duration.ofSeconds(6), - actorSystem.scheduler()); + CompletionStage stage = AskPattern.ask(backEnd, + replyTo -> new BackEnd.GetExpandedInteractionsRequest( + replyTo, + uidList), + java.time.Duration.ofSeconds(6), + actorSystem.scheduler()); return stage.thenApply(response -> response); } @@ -148,11 +145,13 @@ static CompletionStage patchGoldenRecord( final ActorRef backEnd, final String goldenId, final GoldenRecordUpdateRequestPayload payload) { - CompletionStage stage = AskPattern - .ask(backEnd, - replyTo -> new BackEnd.PatchGoldenRecordRequest(replyTo, goldenId, payload.fields()), - java.time.Duration.ofSeconds(6), - actorSystem.scheduler()); + CompletionStage stage = AskPattern.ask(backEnd, + replyTo -> new BackEnd.PatchGoldenRecordRequest( + replyTo, + goldenId, + payload.fields()), + java.time.Duration.ofSeconds(6), + actorSystem.scheduler()); return stage.thenApply(response -> response); } @@ -163,15 +162,15 @@ static CompletionStage patchIidGidLink( final String newGoldenId, final String patientId, final Float score) { - final CompletionStage stage = AskPattern - .ask(backEnd, - replyTo -> new BackEnd.PatchIidGidLinkRequest(replyTo, - currentGoldenId, - newGoldenId, - patientId, - score), - java.time.Duration.ofSeconds(6), - actorSystem.scheduler()); + final CompletionStage stage = AskPattern.ask(backEnd, + replyTo -> new BackEnd.PatchIidGidLinkRequest( + replyTo, + currentGoldenId, + newGoldenId, + patientId, + score), + java.time.Duration.ofSeconds(6), + actorSystem.scheduler()); return stage.thenApply(response -> response); } @@ -180,11 +179,26 @@ static CompletionStage patchIidNewGidLink( final ActorRef backEnd, final String currentGoldenId, final String patientId) { - final CompletionStage stage = AskPattern - .ask(backEnd, - replyTo -> new BackEnd.PatchIidNewGidLinkRequest(replyTo, currentGoldenId, patientId, 2.0F), - java.time.Duration.ofSeconds(6), - actorSystem.scheduler()); + final CompletionStage stage = AskPattern.ask(backEnd, + replyTo -> new BackEnd.PatchIidNewGidLinkRequest( + replyTo, + currentGoldenId, + patientId, + 2.0F), + java.time.Duration.ofSeconds(6), + actorSystem.scheduler()); + return stage.thenApply(response -> response); + } + + static CompletionStage getSQLDashboardData( + final ActorSystem actorSystem, + final ActorRef backEnd + ) { + final CompletionStage stage = AskPattern + .ask(backEnd, + replyTo -> new BackEnd.SQLDashboardDataRequest(replyTo), + java.time.Duration.ofSeconds(6), + actorSystem.scheduler()); return stage.thenApply(response -> response); } @@ -193,11 +207,13 @@ static CompletionStage getGidsPaged( final ActorRef backEnd, final long offset, final long length) { - final CompletionStage stage = AskPattern - .ask(backEnd, - replyTo -> new BackEnd.GetGidsPagedRequest(replyTo, offset, length), - java.time.Duration.ofSeconds(6), - actorSystem.scheduler()); + final CompletionStage stage = AskPattern.ask(backEnd, + replyTo -> new BackEnd.GetGidsPagedRequest( + replyTo, + offset, + length), + java.time.Duration.ofSeconds(6), + actorSystem.scheduler()); return stage.thenApply(response -> response); } @@ -205,11 +221,12 @@ static CompletionStage getGoldenRecor final ActorSystem actorSystem, final ActorRef backEnd, final String gid) { - final CompletionStage stage = AskPattern - .ask(backEnd, - replyTo -> new BackEnd.GetGoldenRecordAuditTrailRequest(replyTo, gid), - java.time.Duration.ofSeconds(6), - actorSystem.scheduler()); + final CompletionStage stage = AskPattern.ask(backEnd, + replyTo -> new BackEnd.GetGoldenRecordAuditTrailRequest( + replyTo, + gid), + java.time.Duration.ofSeconds(6), + actorSystem.scheduler()); return stage.thenApply(response -> response); } @@ -217,11 +234,12 @@ static CompletionStage getInteractionA final ActorSystem actorSystem, final ActorRef backEnd, final String uid) { - final CompletionStage stage = AskPattern - .ask(backEnd, - replyTo -> new BackEnd.GetInteractionAuditTrailRequest(replyTo, uid), - java.time.Duration.ofSeconds(6), - actorSystem.scheduler()); + final CompletionStage stage = AskPattern.ask(backEnd, + replyTo -> new BackEnd.GetInteractionAuditTrailRequest( + replyTo, + uid), + java.time.Duration.ofSeconds(6), + actorSystem.scheduler()); return stage.thenApply(response -> response); } @@ -229,11 +247,12 @@ static CompletionStage postSimpleSearchGoldenRe final ActorSystem actorSystem, final ActorRef backEnd, final ApiModels.ApiSimpleSearchRequestPayload searchRequestPayload) { - CompletionStage stage = AskPattern - .ask(backEnd, - replyTo -> new BackEnd.PostSimpleSearchGoldenRecordsRequest(replyTo, searchRequestPayload), - java.time.Duration.ofSeconds(11), - actorSystem.scheduler()); + CompletionStage stage = AskPattern.ask(backEnd, + replyTo -> new BackEnd.PostSimpleSearchGoldenRecordsRequest( + replyTo, + searchRequestPayload), + java.time.Duration.ofSeconds(11), + actorSystem.scheduler()); return stage.thenApply(response -> ApiModels.ApiExpandedGoldenRecordsPaginatedResultSet.fromLibMPIPaginatedResultSet( response.records())); } @@ -242,23 +261,24 @@ static CompletionStage postFilterGids( final ActorSystem actorSystem, final ActorRef backEnd, final FilterGidsRequestPayload filterRequestPayload) { - CompletionStage stage = AskPattern - .ask(backEnd, - replyTo -> new BackEnd.PostFilterGidsRequest(replyTo, filterRequestPayload), - java.time.Duration.ofSeconds(11), - actorSystem.scheduler()); - return stage.thenApply(response -> ApiModels.ApiFiteredGidsPaginatedResultSet.fromLibMPIPaginatedResultSet( - response.goldenIds())); + CompletionStage stage = AskPattern.ask(backEnd, + replyTo -> new BackEnd.PostFilterGidsRequest(replyTo, + filterRequestPayload), + java.time.Duration.ofSeconds(11), + actorSystem.scheduler()); + return stage.thenApply(response -> ApiModels.ApiFiteredGidsPaginatedResultSet.fromLibMPIPaginatedResultSet(response.goldenIds())); } + static CompletionStage postFilterGidsWithInteractionCount( final ActorSystem actorSystem, final ActorRef backEnd, final FilterGidsRequestPayload filterRequestPayload) { - CompletionStage stage = AskPattern - .ask(backEnd, - replyTo -> new BackEnd.PostFilterGidsWithInteractionCountRequest(replyTo, filterRequestPayload), - java.time.Duration.ofSeconds(11), - actorSystem.scheduler()); + CompletionStage stage = AskPattern.ask(backEnd, + replyTo -> new BackEnd.PostFilterGidsWithInteractionCountRequest( + replyTo, + filterRequestPayload), + java.time.Duration.ofSeconds(11), + actorSystem.scheduler()); return stage.thenApply(response -> ApiModels.ApiFiteredGidsWithInteractionCountPaginatedResultSet.fromPaginatedGidsWithInteractionCount( response.goldenIds())); } @@ -267,39 +287,40 @@ static CompletionStage postSimpleSearchInteract final ActorSystem actorSystem, final ActorRef backEnd, final ApiModels.ApiSimpleSearchRequestPayload simpleSearchRequestPayload) { - CompletionStage stage = AskPattern - .ask(backEnd, - replyTo -> new BackEnd.PostSimpleSearchInteractionsRequest(replyTo, simpleSearchRequestPayload), - java.time.Duration.ofSeconds(11), - actorSystem.scheduler()); - return stage.thenApply(response -> ApiModels.ApiInteractionsPaginatedResultSet.fromLibMPIPaginatedResultSet( - response.records())); + CompletionStage stage = AskPattern.ask(backEnd, + replyTo -> new BackEnd.PostSimpleSearchInteractionsRequest( + replyTo, + simpleSearchRequestPayload), + java.time.Duration.ofSeconds(11), + actorSystem.scheduler()); + return stage.thenApply(response -> ApiModels.ApiInteractionsPaginatedResultSet.fromLibMPIPaginatedResultSet(response.records())); } static CompletionStage postCustomSearchGoldenRecords( final ActorSystem actorSystem, final ActorRef backEnd, final CustomSearchRequestPayload customSearchRequestPayload) { - CompletionStage stage = AskPattern - .ask(backEnd, - replyTo -> new BackEnd.PostCustomSearchGoldenRecordsRequest(replyTo, customSearchRequestPayload), - java.time.Duration.ofSeconds(11), - actorSystem.scheduler()); - return stage.thenApply( - response -> ApiModels.ApiExpandedGoldenRecordsPaginatedResultSet.fromLibMPIPaginatedResultSet(response.records())); + CompletionStage stage = AskPattern.ask(backEnd, + replyTo -> new BackEnd.PostCustomSearchGoldenRecordsRequest( + replyTo, + customSearchRequestPayload), + java.time.Duration.ofSeconds(11), + actorSystem.scheduler()); + return stage.thenApply(response -> ApiModels.ApiExpandedGoldenRecordsPaginatedResultSet.fromLibMPIPaginatedResultSet( + response.records())); } static CompletionStage postCustomSearchInteractions( final ActorSystem actorSystem, final ActorRef backEnd, final CustomSearchRequestPayload customSearchRequestPayload) { - CompletionStage stage = AskPattern - .ask(backEnd, - replyTo -> new BackEnd.PostCustomSearchInteractionsRequest(replyTo, customSearchRequestPayload), - java.time.Duration.ofSeconds(11), - actorSystem.scheduler()); - return stage.thenApply( - response -> ApiModels.ApiInteractionsPaginatedResultSet.fromLibMPIPaginatedResultSet(response.records())); + CompletionStage stage = AskPattern.ask(backEnd, + replyTo -> new BackEnd.PostCustomSearchInteractionsRequest( + replyTo, + customSearchRequestPayload), + java.time.Duration.ofSeconds(11), + actorSystem.scheduler()); + return stage.thenApply(response -> ApiModels.ApiInteractionsPaginatedResultSet.fromLibMPIPaginatedResultSet(response.records())); } static CompletionStage postUpdateNotification( @@ -309,8 +330,7 @@ static CompletionStage postUpdateNotific CompletionStage stage = AskPattern .ask(backEnd, replyTo -> new BackEnd.PostUpdateNotificationRequest(replyTo, - notificationRequest.notificationId(), - notificationRequest.state()), + notificationRequest.notificationId()), java.time.Duration.ofSeconds(11), actorSystem.scheduler()); return stage.thenApply(response -> response); @@ -321,11 +341,13 @@ public static CompletionStage postUploadCsvFi final ActorRef backEnd, final FileInfo info, final File file) { - CompletionStage stage = AskPattern - .ask(backEnd, - replyTo -> new BackEnd.PostUploadCsvFileRequest(replyTo, info, file), - java.time.Duration.ofSeconds(11), - actorSystem.scheduler()); + CompletionStage stage = AskPattern.ask(backEnd, + replyTo -> new BackEnd.PostUploadCsvFileRequest( + replyTo, + info, + file), + java.time.Duration.ofSeconds(11), + actorSystem.scheduler()); return stage.thenApply(response -> response); } diff --git a/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/BackEnd.java b/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/BackEnd.java index 6bc435a3a..5095587ca 100644 --- a/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/BackEnd.java +++ b/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/BackEnd.java @@ -13,6 +13,8 @@ import org.jembi.jempi.libmpi.MpiGeneralError; import org.jembi.jempi.libmpi.MpiServiceError; import org.jembi.jempi.shared.models.*; +import org.jembi.jempi.shared.models.dashboard.NotificationStats; +import org.jembi.jempi.shared.models.dashboard.SQLDashboardData; import org.jembi.jempi.shared.utils.AppUtils; import java.io.File; @@ -21,7 +23,7 @@ import java.nio.file.NoSuchFileException; import java.nio.file.Paths; import java.sql.SQLException; -import java.time.LocalDate; +import java.sql.Timestamp; import java.time.LocalDateTime; import java.util.*; @@ -32,7 +34,8 @@ public final class BackEnd extends AbstractBehavior { private final Integer pgPort; private final String pgUser; private final String pgPassword; - private final String pgDatabase; + private final String pgNotificationsDb; + private final String pgAuditDb; private final PsqlNotifications psqlNotifications; private final PsqlAuditTrail psqlAuditTrail; private LibMPI libMPI = null; @@ -49,21 +52,29 @@ private BackEnd( final int sqlPort, final String sqlUser, final String sqlPassword, - final String sqlDatabase, + final String sqlNotificationsDb, + final String sqlAuditDb, final String kafkaBootstrapServers, final String kafkaClientId) { super(context); - this.libMPI = null; - this.dgraphHosts = dgraphHosts; - this.dgraphPorts = dgraphPorts; - this.pgIP = sqlIP; - this.pgPort = sqlPort; - this.pgUser = sqlUser; - this.pgPassword = sqlPassword; - this.pgDatabase = sqlDatabase; - psqlNotifications = new PsqlNotifications(sqlIP, sqlPort, sqlDatabase, sqlUser, sqlPassword); - psqlAuditTrail = new PsqlAuditTrail(sqlIP, sqlPort, sqlDatabase, sqlUser, sqlPassword); - openMPI(kafkaBootstrapServers, kafkaClientId, debugLevel); + try { + this.libMPI = null; + this.dgraphHosts = dgraphHosts; + this.dgraphPorts = dgraphPorts; + this.pgIP = sqlIP; + this.pgPort = sqlPort; + this.pgUser = sqlUser; + this.pgPassword = sqlPassword; + this.pgNotificationsDb = sqlNotificationsDb; + this.pgAuditDb = sqlAuditDb; + psqlNotifications = new PsqlNotifications(sqlIP, sqlPort, sqlNotificationsDb, sqlUser, sqlPassword); + psqlAuditTrail = new PsqlAuditTrail(sqlIP, sqlPort, sqlAuditDb, sqlUser, sqlPassword); + openMPI(kafkaBootstrapServers, kafkaClientId, debugLevel); + } catch (Exception e) { + LOGGER.error(e.getMessage(), e); + throw e; + } + } public static Behavior create( @@ -74,7 +85,8 @@ public static Behavior create( final int sqlPort, final String sqlUser, final String sqlPassword, - final String sqlDatabase, + final String sqlNotificationsDb, + final String sqlAuditDb, final String kafkaBootstrapServers, final String kafkaClientId) { return Behaviors.setup(context -> new BackEnd(level, @@ -85,7 +97,8 @@ public static Behavior create( sqlPort, sqlUser, sqlPassword, - sqlDatabase, + sqlNotificationsDb, + sqlAuditDb, kafkaBootstrapServers, kafkaClientId)); } @@ -97,7 +110,7 @@ private void openMPI( if (!AppUtils.isNullOrEmpty(Arrays.stream(dgraphHosts).toList())) { libMPI = new LibMPI(debugLevel, dgraphHosts, dgraphPorts, kafkaBootstrapServers, kafkaClientId); } else { - libMPI = new LibMPI(String.format(Locale.ROOT, "jdbc:postgresql://%s:%d/%s", pgIP, pgPort, pgDatabase), + libMPI = new LibMPI(String.format(Locale.ROOT, "jdbc:postgresql://%s:%d/%s", pgIP, pgPort, pgAuditDb), pgUser, pgPassword, kafkaBootstrapServers, @@ -112,31 +125,31 @@ public Receive createReceive() { public Receive actor() { ReceiveBuilder builder = newReceiveBuilder(); - return builder - .onMessage(CountGoldenRecordsRequest.class, this::countGoldenRecordsHandler) - .onMessage(CountInteractionsRequest.class, this::countInteractionsHandler) - .onMessage(CountRecordsRequest.class, this::countRecordsHandler) - .onMessage(GetGidsAllRequest.class, this::getGidsAllHandler) - .onMessage(GetGidsPagedRequest.class, this::getGidsPagedHandler) - .onMessage(GetInteractionRequest.class, this::getInteractionHandler) - .onMessage(GetExpandedInteractionsRequest.class, this::getExpandedInteractionsHandler) - .onMessage(GetExpandedGoldenRecordRequest.class, this::getExpandedGoldenRecordHandler) - .onMessage(GetExpandedGoldenRecordsRequest.class, this::getExpandedGoldenRecordsHandler) - .onMessage(GetGoldenRecordAuditTrailRequest.class, this::getGoldenRecordAuditTrailHandler) - .onMessage(GetInteractionAuditTrailRequest.class, this::getInteractionAuditTrailHandler) - .onMessage(GetNotificationsRequest.class, this::getNotificationsHandler) - .onMessage(PatchGoldenRecordRequest.class, this::patchGoldenRecordHandler) - .onMessage(PatchIidGidLinkRequest.class, this::patchIidGidLinkHandler) - .onMessage(PatchIidNewGidLinkRequest.class, this::patchIidNewGidLinkHandler) - .onMessage(PostUpdateNotificationRequest.class, this::postUpdateNotificationHandler) - .onMessage(PostSimpleSearchGoldenRecordsRequest.class, this::postSimpleSearchGoldenRecordsHandler) - .onMessage(PostCustomSearchGoldenRecordsRequest.class, this::postCustomSearchGoldenRecordsHandler) - .onMessage(PostSimpleSearchInteractionsRequest.class, this::postSimpleSearchInteractionsHandler) - .onMessage(PostCustomSearchInteractionsRequest.class, this::postCustomSearchInteractionsHandler) - .onMessage(PostFilterGidsRequest.class, this::postFilterGidsHandler) - .onMessage(PostFilterGidsWithInteractionCountRequest.class, this::postFilterGidsWithInteractionCountHandler) - .onMessage(PostUploadCsvFileRequest.class, this::postUploadCsvFileHandler) - .build(); + return builder.onMessage(CountGoldenRecordsRequest.class, this::countGoldenRecordsHandler) + .onMessage(CountInteractionsRequest.class, this::countInteractionsHandler) + .onMessage(CountRecordsRequest.class, this::countRecordsHandler) + .onMessage(GetGidsAllRequest.class, this::getGidsAllHandler) + .onMessage(GetGidsPagedRequest.class, this::getGidsPagedHandler) + .onMessage(GetInteractionRequest.class, this::getInteractionHandler) + .onMessage(GetExpandedInteractionsRequest.class, this::getExpandedInteractionsHandler) + .onMessage(GetExpandedGoldenRecordRequest.class, this::getExpandedGoldenRecordHandler) + .onMessage(GetExpandedGoldenRecordsRequest.class, this::getExpandedGoldenRecordsHandler) + .onMessage(GetGoldenRecordAuditTrailRequest.class, this::getGoldenRecordAuditTrailHandler) + .onMessage(GetInteractionAuditTrailRequest.class, this::getInteractionAuditTrailHandler) + .onMessage(GetNotificationsRequest.class, this::getNotificationsHandler) + .onMessage(PatchGoldenRecordRequest.class, this::patchGoldenRecordHandler) + .onMessage(PatchIidGidLinkRequest.class, this::patchIidGidLinkHandler) + .onMessage(PatchIidNewGidLinkRequest.class, this::patchIidNewGidLinkHandler) + .onMessage(PostUpdateNotificationRequest.class, this::postUpdateNotificationHandler) + .onMessage(PostSimpleSearchGoldenRecordsRequest.class, this::postSimpleSearchGoldenRecordsHandler) + .onMessage(PostCustomSearchGoldenRecordsRequest.class, this::postCustomSearchGoldenRecordsHandler) + .onMessage(PostSimpleSearchInteractionsRequest.class, this::postSimpleSearchInteractionsHandler) + .onMessage(PostCustomSearchInteractionsRequest.class, this::postCustomSearchInteractionsHandler) + .onMessage(PostFilterGidsRequest.class, this::postFilterGidsHandler) + .onMessage(PostFilterGidsWithInteractionCountRequest.class, this::postFilterGidsWithInteractionCountHandler) + .onMessage(PostUploadCsvFileRequest.class, this::postUploadCsvFileHandler) + .onMessage(SQLDashboardDataRequest.class, this::getSqlDashboardDataHandler) + .build(); } private Behavior postSimpleSearchGoldenRecordsHandler(final PostSimpleSearchGoldenRecordsRequest request) { @@ -228,8 +241,8 @@ private Behavior postFilterGidsWithInteractionCountHandler(final PostFilt } private Behavior getNotificationsHandler(final GetNotificationsRequest request) { - MatchesForReviewResult result = - psqlNotifications.getMatchesForReview(request.limit(), request.offset(), request.date(), request.state); + MatchesForReviewResult result = psqlNotifications.getMatchesForReview(request.limit(), request.offset(), + request.startDate(), request.endDate(), request.states()); request.replyTo.tell(new GetNotificationsResponse(result.getCount(), result.getSkippedRecords(), result.getNotifications())); @@ -422,7 +435,7 @@ private Behavior getGidsPagedHandler(final GetGidsPagedRequest request) { private Behavior postUpdateNotificationHandler(final PostUpdateNotificationRequest request) { try { - psqlNotifications.updateNotificationState(request.notificationId, request.state); + psqlNotifications.updateNotificationState(request.notificationId); } catch (SQLException exception) { LOGGER.error(exception.getMessage()); } @@ -433,7 +446,8 @@ private Behavior postUpdateNotificationHandler(final PostUpdateNotificati private Behavior postUploadCsvFileHandler(final PostUploadCsvFileRequest request) { File file = request.file(); try { - Files.copy(file.toPath(), Paths.get("/app/csv/" + file.getName())); + String userCSVPath = System.getenv("UPLOAD_CSV_PATH"); + Files.copy(file.toPath(), Paths.get((userCSVPath != null ? userCSVPath : "/app/csv") + "/" + file.getName())); Files.delete(file.toPath()); } catch (NoSuchFileException e) { LOGGER.error("No such file"); @@ -444,6 +458,13 @@ private Behavior postUploadCsvFileHandler(final PostUploadCsvFileRequest return Behaviors.same(); } + private Behavior getSqlDashboardDataHandler(final SQLDashboardDataRequest request) { + int openNotifications = psqlNotifications.getNotificationCount("OPEN"); + int closedNotifications = psqlNotifications.getNotificationCount("CLOSED"); + request.replyTo.tell(new SQLDashboardDataResponse(new SQLDashboardData(new NotificationStats(openNotifications, closedNotifications)))); + return Behaviors.same(); + } + public interface Event { } @@ -492,6 +513,12 @@ public record GetInteractionAuditTrailRequest( String uid) implements Event { } + public record SQLDashboardDataResponse(SQLDashboardData dashboardData) { } + + public record SQLDashboardDataRequest( + ActorRef replyTo) implements Event { + } + public record GetInteractionAuditTrailResponse(List auditTrail) { } @@ -504,8 +531,7 @@ public record GetGidsAllResponse(List records) implements EventResponse public record GetExpandedGoldenRecordRequest( ActorRef replyTo, - String goldenId) - implements Event { + String goldenId) implements Event { } public record GetExpandedGoldenRecordResponse(Either goldenRecord) implements EventResponse { @@ -516,8 +542,7 @@ public record GetExpandedGoldenRecordsRequest( List goldenIds) implements Event { } - public record GetExpandedGoldenRecordsResponse(Either> expandedGoldenRecords) - implements EventResponse { + public record GetExpandedGoldenRecordsResponse(Either> expandedGoldenRecords) implements EventResponse { } public record GetExpandedInteractionsRequest( @@ -525,8 +550,7 @@ public record GetExpandedInteractionsRequest( List patientIds) implements Event { } - public record GetExpandedInteractionsResponse(Either> expandedPatientRecords) - implements EventResponse { + public record GetExpandedInteractionsResponse(Either> expandedPatientRecords) implements EventResponse { } public record GetInteractionRequest( @@ -534,16 +558,16 @@ public record GetInteractionRequest( String iid) implements Event { } - public record GetInteractionResponse(Either patient) - implements EventResponse { + public record GetInteractionResponse(Either patient) implements EventResponse { } public record GetNotificationsRequest( ActorRef replyTo, int limit, int offset, - LocalDate date, - String state) implements Event { + Timestamp startDate, + Timestamp endDate, + List states) implements Event { } public record GetNotificationsResponse( @@ -569,8 +593,7 @@ public record PatchIidGidLinkRequest( Float score) implements Event { } - public record PatchIidGidLinkResponse(Either linkInfo) - implements EventResponse { + public record PatchIidGidLinkResponse(Either linkInfo) implements EventResponse { } public record PatchIidNewGidLinkRequest( @@ -580,14 +603,12 @@ public record PatchIidNewGidLinkRequest( float score) implements Event { } - public record PatchIidNewGidLinkResponse(Either linkInfo) - implements EventResponse { + public record PatchIidNewGidLinkResponse(Either linkInfo) implements EventResponse { } public record PostUpdateNotificationRequest( ActorRef replyTo, - String notificationId, - String state) implements Event { + String notificationId) implements Event { } public record PostUpdateNotificationResponse() implements EventResponse { @@ -645,8 +666,7 @@ public record PostSearchInteractionsResponse( public record PostUploadCsvFileRequest( ActorRef replyTo, FileInfo info, - File file) - implements Event { + File file) implements Event { } public record PostUploadCsvFileResponse() implements EventResponse { diff --git a/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/PsqlAuditTrail.java b/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/PsqlAuditTrail.java index cb9177250..cdc530246 100644 --- a/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/PsqlAuditTrail.java +++ b/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/PsqlAuditTrail.java @@ -28,12 +28,12 @@ final class PsqlAuditTrail { List goldenRecordAuditTrail(final String uid) { psqlClient.connect(); final var list = new ArrayList(); - try (PreparedStatement preparedStatement = psqlClient.prepareStatement( - String.format( - Locale.ROOT, - """ - SELECT * FROM %s where goldenID = ?; - """, PSQL_TABLE_AUDIT_TRAIL).stripIndent())) { + try (PreparedStatement preparedStatement = psqlClient.prepareStatement(String.format(Locale.ROOT, + """ + SELECT * FROM %s where goldenID = ?; + """, + PSQL_TABLE_AUDIT_TRAIL) + .stripIndent())) { preparedStatement.setString(1, uid); ResultSet rs = preparedStatement.executeQuery(); while (rs.next()) { @@ -53,12 +53,12 @@ List goldenRecordAuditTrail(final String uid) { List interactionRecordAuditTrail(final String uid) { psqlClient.connect(); final var list = new ArrayList(); - try (PreparedStatement preparedStatement = psqlClient.prepareStatement( - String.format( - Locale.ROOT, - """ - SELECT * FROM %s where interactionID = ?; - """, PSQL_TABLE_AUDIT_TRAIL).stripIndent())) { + try (PreparedStatement preparedStatement = psqlClient.prepareStatement(String.format(Locale.ROOT, + """ + SELECT * FROM %s where interactionID = ?; + """, + PSQL_TABLE_AUDIT_TRAIL) + .stripIndent())) { preparedStatement.setString(1, uid); ResultSet rs = preparedStatement.executeQuery(); while (rs.next()) { diff --git a/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/PsqlNotifications.java b/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/PsqlNotifications.java index d643655ca..aa8e4391c 100644 --- a/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/PsqlNotifications.java +++ b/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/PsqlNotifications.java @@ -5,14 +5,15 @@ import org.jembi.jempi.shared.models.MatchesForReviewResult; import java.sql.*; -import java.time.LocalDate; import java.util.*; final class PsqlNotifications { + + private static final String NOTIFICATION_TABLE_NAME = "notification"; private static final String QUERY = """ SELECT patient_id, id, names, created, state,type, score, golden_id FROM notification - WHERE created <= ? AND state = ? + WHERE created BETWEEN ? AND ? AND state IN (?, ?) ORDER BY created LIMIT ? OFFSET ? """; @@ -34,29 +35,35 @@ final class PsqlNotifications { * @param limit The maximum number of matches to retrieve. * @param offset The number of matches to skip from the beginning. * @param date The date threshold for match creation. - * @param state The state of notification. + * @param states The state of notification. * @return A {@link MatchesForReviewResult} object containing the matches and related information. */ MatchesForReviewResult getMatchesForReview( final int limit, final int offset, - final LocalDate date, - final String state) { + final Timestamp startDate, + final Timestamp endDate, + final List states) { final var list = new ArrayList>(); MatchesForReviewResult result = new MatchesForReviewResult(); int skippedRows = 0; psqlClient.connect(); try (PreparedStatement preparedStatement = psqlClient.prepareStatement(QUERY); - PreparedStatement countStatement = psqlClient.prepareStatement("SELECT COUNT(*) FROM notification")) { + PreparedStatement countStatement = psqlClient.prepareStatement( + "SELECT COUNT(*) FROM notification WHERE created BETWEEN ? AND ? AND state IN (?, ?)")) { + countStatement.setTimestamp(1, startDate); + countStatement.setTimestamp(2, endDate); + countStatement.setString(3, extractState(0, states)); + countStatement.setString(4, extractState(1, states)); ResultSet countRs = countStatement.executeQuery(); countRs.next(); int totalCount = countRs.getInt(1); - - preparedStatement.setDate(1, java.sql.Date.valueOf(date)); - preparedStatement.setString(2, state); - preparedStatement.setInt(3, limit); - preparedStatement.setInt(4, offset); - LOGGER.debug("{}", preparedStatement); + preparedStatement.setTimestamp(1, startDate); + preparedStatement.setTimestamp(2, endDate); + preparedStatement.setString(3, extractState(0, states)); + preparedStatement.setString(4, extractState(1, states)); + preparedStatement.setInt(5, limit); + preparedStatement.setInt(6, offset); ResultSet rs = preparedStatement.executeQuery(); ResultSetMetaData md = rs.getMetaData(); int columns = md.getColumnCount(); @@ -67,7 +74,13 @@ MatchesForReviewResult getMatchesForReview( if (md.getColumnName(i).equals("id")) { notificationID = rs.getObject(i, UUID.class); } - row.put(md.getColumnName(i), (rs.getObject(i))); + final var name = md.getColumnName(i); + final var obj = rs.getObject(i); + if (obj == null && "names".equals(name)) { + row.put(name, ""); + } else { + row.put(name, (obj)); + } } list.add(row); row.put("candidates", getCandidates(notificationID)); @@ -82,6 +95,33 @@ MatchesForReviewResult getMatchesForReview( return result; } + public int getNotificationCount(final String status) { + String queryStatement = status == null + ? String.format("SELECT COUNT(*) FROM %s", NOTIFICATION_TABLE_NAME) + : String.format("SELECT COUNT(*) FROM %s WHERE state = '%s'", NOTIFICATION_TABLE_NAME, status); + + psqlClient.connect(); + try (PreparedStatement preparedStatement = psqlClient.prepareStatement(queryStatement); + ResultSet resultSet = preparedStatement.executeQuery()) { + if (resultSet.next()) { + return resultSet.getInt(1); + } + return 0; + } catch (SQLException e) { + LOGGER.error(e); + } + return -1; + } + + String extractState( + final int index, + final List states) { + if (index + 1 > states.size()) { + return null; + } + return states.get(index); + } + List> getCandidates(final UUID nID) { final var list = new ArrayList>(); String candidates = "select notification_id, score, golden_id from candidates where notification_id IN ('" + nID + "')"; @@ -117,8 +157,7 @@ void insertCandidates( try (Statement stmt = psqlClient.createStatement()) { psqlClient.setAutoCommit(false); String sql = - "INSERT INTO candidates (notification_id, score, golden_id)" + " VALUES ('" + id + "','" + score + "', '" + gID - + "')"; + "INSERT INTO candidates (notification_id, score, golden_id)" + " VALUES ('" + id + "','" + score + "', '" + gID + "')"; stmt.addBatch(sql); @@ -128,12 +167,13 @@ void insertCandidates( } void updateNotificationState( - final String id, - final String state) throws SQLException { + final String id) throws SQLException { psqlClient.connect(); try (Statement stmt = psqlClient.createStatement()) { ResultSet rs = stmt.executeQuery(String.format(Locale.ROOT, - "update notification set state = \'%s\' where id = \'%s\'", state, id)); + "update notification set state = '%s' where id = '%s'", + "CLOSED", + id)); psqlClient.commit(); } } diff --git a/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/Routes.java b/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/Routes.java index aa5cbcd07..66b753be4 100644 --- a/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/Routes.java +++ b/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/Routes.java @@ -7,6 +7,7 @@ import akka.http.javadsl.marshalling.Marshaller; import akka.http.javadsl.model.*; import akka.http.javadsl.server.Route; +import akka.http.javadsl.unmarshalling.Unmarshaller; import akka.japi.Pair; import com.fasterxml.jackson.core.JsonProcessingException; import org.apache.logging.log4j.LogManager; @@ -16,14 +17,16 @@ import org.jembi.jempi.shared.models.*; import java.io.File; -import java.time.LocalDate; +import java.sql.Timestamp; import java.util.Locale; import java.util.Map; -import java.util.concurrent.CompletionStage; +import java.util.concurrent.*; import java.util.function.Function; +import java.util.regex.Pattern; import java.util.stream.Stream; import static akka.http.javadsl.server.Directives.*; +import static akka.http.javadsl.server.PathMatchers.segment; import static org.jembi.jempi.shared.utils.AppUtils.OBJECT_MAPPER; public final class Routes { @@ -52,19 +55,18 @@ public static Route patchGoldenRecord( final String goldenId) { return entity(Jackson.unmarshaller(GoldenRecordUpdateRequestPayload.class), payload -> payload != null - ? onComplete(Ask.patchGoldenRecord(actorSystem, backEnd, goldenId, payload), - result -> { - if (result.isSuccess()) { - final var updatedFields = result.get().fields(); - if (updatedFields.isEmpty()) { - return complete(StatusCodes.BAD_REQUEST); - } else { - return complete(StatusCodes.OK, result.get(), JSON_MARSHALLER); - } - } else { - return complete(StatusCodes.INTERNAL_SERVER_ERROR); - } - }) + ? onComplete(Ask.patchGoldenRecord(actorSystem, backEnd, goldenId, payload), result -> { + if (result.isSuccess()) { + final var updatedFields = result.get().fields(); + if (updatedFields.isEmpty()) { + return complete(StatusCodes.BAD_REQUEST); + } else { + return complete(StatusCodes.OK, result.get(), JSON_MARSHALLER); + } + } else { + return complete(StatusCodes.INTERNAL_SERVER_ERROR); + } + }) : complete(StatusCodes.NO_CONTENT)); } @@ -77,7 +79,7 @@ public static Route countRecords( new ApiModels.ApiNumberOfRecords(result.get().goldenRecords(), result.get().patientRecords()), JSON_MARSHALLER) - : complete(StatusCodes.IM_A_TEAPOT)); + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT))); } public static Route getGidsPaged( @@ -90,10 +92,8 @@ public static Route getGidsPaged( Long.parseLong(offset), Long.parseLong(length)), result -> result.isSuccess() - ? complete(StatusCodes.OK, - result.get(), - JSON_MARSHALLER) - : complete(StatusCodes.IM_A_TEAPOT)))); + ? complete(StatusCodes.OK, result.get(), JSON_MARSHALLER) + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT))))); } public static Route getGoldenRecordAuditTrail( @@ -105,7 +105,7 @@ public static Route getGoldenRecordAuditTrail( ? complete(StatusCodes.OK, ApiModels.ApiAuditTrail.fromAuditTrail(result.get().auditTrail()), JSON_MARSHALLER) - : complete(StatusCodes.IM_A_TEAPOT))); + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT)))); } public static Route getInteractionAuditTrail( @@ -117,57 +117,72 @@ public static Route getInteractionAuditTrail( ? complete(StatusCodes.OK, ApiModels.ApiAuditTrail.fromAuditTrail(result.get().auditTrail()), JSON_MARSHALLER) - : complete(StatusCodes.IM_A_TEAPOT))); + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT)))); } public static Route patchIidNewGidLink( - final ActorSystem actorSystem, - final ActorRef backEnd) { - return parameter("goldenID", - currentGoldenId -> parameter("patientID", - patientId -> onComplete(Ask.patchIidNewGidLink(actorSystem, - backEnd, - currentGoldenId, - patientId), - result -> result.isSuccess() - ? result.get() - .linkInfo() - .mapLeft(Routes::mapError) - .fold(error -> error, - linkInfo -> complete(StatusCodes.OK, - linkInfo, - JSON_MARSHALLER)) - : complete(StatusCodes.IM_A_TEAPOT)))); + final ActorSystem actorSystem, + final ActorRef backEnd, + final String controllerIp, + final Integer controllerPort, + final Http http) { + + return entity(Jackson.unmarshaller(NotificationResolution.class), + obj -> onComplete(Ask.patchIidNewGidLink(actorSystem, + backEnd, + obj.newGoldenId(), + obj.interactionId()), + result -> result.isSuccess() + ? result.get() + .linkInfo() + .mapLeft(Routes::mapError) + .fold(error -> error, + linkInfo -> onComplete(processOnNotificationResolution( + controllerIp, + controllerPort, + http, + new NotificationResolutionProcessorData(obj, linkInfo)), + r -> complete( + StatusCodes.OK, + linkInfo, + JSON_MARSHALLER)) + ) + : complete(StatusCodes.IM_A_TEAPOT)) + ); } public static Route patchIidGidLink( - final ActorSystem actorSystem, - final ActorRef backEnd) { - return parameter("goldenID", - currentGoldenId -> - parameter("newGoldenID", - newGoldenId -> - parameter("patientID", - patientId -> - parameter("score", - score -> onComplete( - Ask.patchIidGidLink( - actorSystem, - backEnd, - currentGoldenId, - newGoldenId, - patientId, - Float.parseFloat(score)), - result -> result.isSuccess() - ? result.get() - .linkInfo() - .mapLeft(Routes::mapError) - .fold(error -> error, - linkInfo -> complete( - StatusCodes.OK, - linkInfo, - JSON_MARSHALLER)) - : complete(StatusCodes.IM_A_TEAPOT)))))); + final ActorSystem actorSystem, + final ActorRef backEnd, + final String controllerIp, + final Integer controllerPort, + final Http http) { + + return entity(Jackson.unmarshaller(NotificationResolution.class), + obj -> onComplete(Ask.patchIidGidLink( + actorSystem, + backEnd, + obj.currentGoldenId(), + obj.newGoldenId(), + obj.interactionId(), + obj.score()), + result -> result.isSuccess() + ? result.get() + .linkInfo() + .mapLeft(Routes::mapError) + .fold(error -> error, + linkInfo -> onComplete(processOnNotificationResolution( + controllerIp, + controllerPort, + http, + new NotificationResolutionProcessorData(obj, linkInfo)), + r -> complete( + StatusCodes.OK, + linkInfo, + JSON_MARSHALLER)) + ) + : complete(StatusCodes.IM_A_TEAPOT)) + ); } public static Route countGoldenRecords( @@ -182,7 +197,7 @@ public static Route countGoldenRecords( count -> complete(StatusCodes.OK, new ApiModels.ApiGoldenRecordCount(count), JSON_MARSHALLER)) - : complete(StatusCodes.IM_A_TEAPOT)); + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT))); } public static Route countInteractions( @@ -197,7 +212,7 @@ public static Route countInteractions( count -> complete(StatusCodes.OK, new ApiModels.ApiInteractionCount(count), JSON_MARSHALLER)) - : complete(StatusCodes.IM_A_TEAPOT)); + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT))); } public static Route getGidsAll( @@ -206,7 +221,7 @@ public static Route getGidsAll( return onComplete(Ask.getGidsAll(actorSystem, backEnd), result -> result.isSuccess() ? complete(StatusCodes.OK, result.get(), JSON_MARSHALLER) - : complete(StatusCodes.IM_A_TEAPOT)); + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT))); } public static Route getNotifications( @@ -215,19 +230,21 @@ public static Route getNotifications( return parameter("limit", limit -> parameter("offset", offset -> - parameter("date", date -> - parameter("state", state -> - onComplete(Ask.getNotifications(actorSystem, + parameter("startDate", startDate -> + parameter("endDate", endDate -> + parameter("states", states -> + onComplete(Ask.getNotifications(actorSystem, backEnd, Integer.parseInt(limit), Integer.parseInt(offset), - LocalDate.parse(date), - state), + Timestamp.valueOf(startDate), + Timestamp.valueOf(endDate), + Stream.of(states.split(",")).map(String::trim).toList()), result -> result.isSuccess() ? complete(StatusCodes.OK, result.get(), JSON_MARSHALLER) - : complete(StatusCodes.IM_A_TEAPOT)))))); + : complete(StatusCodes.IM_A_TEAPOT))))))); } public static Route getExpandedGoldenRecordsUsingParameterList( @@ -246,7 +263,7 @@ public static Route getExpandedGoldenRecordsUsingParameterList( .map(ApiModels.ApiExpandedGoldenRecord::fromExpandedGoldenRecord) .toList(), JSON_MARSHALLER)) - : complete(StatusCodes.IM_A_TEAPOT)); + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT))); }); } @@ -255,19 +272,18 @@ public static Route getExpandedGoldenRecordsFromUsingCSV( final ActorRef backEnd) { return parameter("uidList", items -> { final var uidList = Stream.of(items.split(",")).map(String::trim).toList(); - return onComplete( - Ask.getExpandedGoldenRecords(actorSystem, backEnd, uidList), - result -> result.isSuccess() - ? result.get() - .expandedGoldenRecords() - .mapLeft(Routes::mapError) - .fold(error -> error, - expandedGoldenRecords -> complete(StatusCodes.OK, - expandedGoldenRecords.stream() - .map(ApiModels.ApiExpandedGoldenRecord::fromExpandedGoldenRecord) - .toList(), - JSON_MARSHALLER)) - : complete(StatusCodes.IM_A_TEAPOT)); + return onComplete(Ask.getExpandedGoldenRecords(actorSystem, backEnd, uidList), + result -> result.isSuccess() + ? result.get() + .expandedGoldenRecords() + .mapLeft(Routes::mapError) + .fold(error -> error, + expandedGoldenRecords -> complete(StatusCodes.OK, + expandedGoldenRecords.stream() + .map(ApiModels.ApiExpandedGoldenRecord::fromExpandedGoldenRecord) + .toList(), + JSON_MARSHALLER)) + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT))); }); } @@ -287,7 +303,7 @@ public static Route getExpandedInteractionsUsingCSV( .map(ApiModels.ApiExpandedInteraction::fromExpandedInteraction) .toList(), JSON_MARSHALLER)) - : complete(StatusCodes.IM_A_TEAPOT)); + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT))); }); } @@ -305,7 +321,7 @@ public static Route getExpandedGoldenRecord( ApiModels.ApiExpandedGoldenRecord.fromExpandedGoldenRecord( goldenRecord), Jackson.marshaller(OBJECT_MAPPER))) - : complete(StatusCodes.IM_A_TEAPOT)); + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT))); } public static Route getInteraction( @@ -321,7 +337,7 @@ public static Route getInteraction( patientRecord -> complete(StatusCodes.OK, ApiModels.ApiInteraction.fromInteraction(patientRecord), JSON_MARSHALLER)) - : complete(StatusCodes.IM_A_TEAPOT)); + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT))); } public static Route postUpdateNotification( @@ -333,7 +349,7 @@ public static Route postUpdateNotification( final var updateResponse = response.get(); return complete(StatusCodes.OK, updateResponse, JSON_MARSHALLER); } else { - return complete(StatusCodes.IM_A_TEAPOT); + return complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT)); } })); } @@ -341,21 +357,23 @@ public static Route postUpdateNotification( public static Route postUploadCsvFile( final ActorSystem actorSystem, final ActorRef backEnd) { - return withSizeLimit( - 1024 * 1024 * 6, - () -> storeUploadedFile("csv", - (info) -> { - try { - return File.createTempFile("import-", ".csv"); - } catch (Exception e) { - LOGGER.error(e.getMessage(), e); - return null; - } - }, - (info, file) -> onComplete(Ask.postUploadCsvFile(actorSystem, backEnd, info, file), - response -> response.isSuccess() - ? complete(StatusCodes.OK) - : complete(StatusCodes.IM_A_TEAPOT)))); + return withSizeLimit(1024 * 1024 * 2048, () -> storeUploadedFile("csv", + (info) -> { + try { + return File.createTempFile("import-", ".csv"); + } catch (Exception e) { + LOGGER.error(e.getMessage(), e); + return null; + } + }, + (info, file) -> onComplete(Ask.postUploadCsvFile(actorSystem, + backEnd, + info, + file), + response -> response.isSuccess() + ? complete(StatusCodes.OK) + : complete(ApiModels.getHttpErrorResponse( + StatusCodes.IM_A_TEAPOT))))); } public static Route postSimpleSearch( @@ -363,23 +381,20 @@ public static Route postSimpleSearch( final ActorRef backEnd, final RecordType recordType) { LOGGER.info("Simple search on {}", recordType); - return entity(Jackson.unmarshaller(ApiModels.ApiSimpleSearchRequestPayload.class), - searchParameters -> onComplete( - () -> { - if (recordType == RecordType.GoldenRecord) { - return Ask.postSimpleSearchGoldenRecords(actorSystem, backEnd, searchParameters); - } else { - return Ask.postSimpleSearchInteractions(actorSystem, backEnd, searchParameters); - } - }, - response -> { - if (response.isSuccess()) { - final var eventSearchRsp = response.get(); - return complete(StatusCodes.OK, eventSearchRsp, JSON_MARSHALLER); - } else { - return complete(StatusCodes.IM_A_TEAPOT); - } - })); + return entity(Jackson.unmarshaller(ApiModels.ApiSimpleSearchRequestPayload.class), searchParameters -> onComplete(() -> { + if (recordType == RecordType.GoldenRecord) { + return Ask.postSimpleSearchGoldenRecords(actorSystem, backEnd, searchParameters); + } else { + return Ask.postSimpleSearchInteractions(actorSystem, backEnd, searchParameters); + } + }, response -> { + if (response.isSuccess()) { + final var eventSearchRsp = response.get(); + return complete(StatusCodes.OK, eventSearchRsp, JSON_MARSHALLER); + } else { + return complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT)); + } + })); } public static Route postFilterGids( @@ -389,16 +404,14 @@ public static Route postFilterGids( // final ObjectMapper objectMapper = new ObjectMapper(); // objectMapper.registerModule(new JavaTimeModule()); return entity(Jackson.unmarshaller(OBJECT_MAPPER, FilterGidsRequestPayload.class), - searchParameters -> onComplete( - () -> Ask.postFilterGids(actorSystem, backEnd, searchParameters), - response -> { - if (response.isSuccess()) { - final var eventSearchRsp = response.get(); - return complete(StatusCodes.OK, eventSearchRsp, JSON_MARSHALLER); - } else { - return complete(StatusCodes.IM_A_TEAPOT); - } - })); + searchParameters -> onComplete(() -> Ask.postFilterGids(actorSystem, backEnd, searchParameters), response -> { + if (response.isSuccess()) { + final var eventSearchRsp = response.get(); + return complete(StatusCodes.OK, eventSearchRsp, JSON_MARSHALLER); + } else { + return complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT)); + } + })); } public static Route postFilterGidsWithInteractionCount( @@ -408,16 +421,16 @@ public static Route postFilterGidsWithInteractionCount( // final ObjectMapper objectMapper = new ObjectMapper(); // objectMapper.registerModule(new JavaTimeModule()); return entity(Jackson.unmarshaller(OBJECT_MAPPER, FilterGidsRequestPayload.class), - searchParameters -> onComplete( - () -> Ask.postFilterGidsWithInteractionCount(actorSystem, backEnd, searchParameters), - response -> { - if (response.isSuccess()) { - final var eventSearchRsp = response.get(); - return complete(StatusCodes.OK, eventSearchRsp, JSON_MARSHALLER); - } else { - return complete(StatusCodes.IM_A_TEAPOT); - } - })); + searchParameters -> onComplete(() -> Ask.postFilterGidsWithInteractionCount(actorSystem, + backEnd, + searchParameters), response -> { + if (response.isSuccess()) { + final var eventSearchRsp = response.get(); + return complete(StatusCodes.OK, eventSearchRsp, JSON_MARSHALLER); + } else { + return complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT)); + } + })); } public static Route postCustomSearch( @@ -435,7 +448,7 @@ public static Route postCustomSearch( final var eventSearchRsp = response.get(); return complete(StatusCodes.OK, eventSearchRsp, JSON_MARSHALLER); } else { - return complete(StatusCodes.IM_A_TEAPOT); + return complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT)); } })); } @@ -445,14 +458,13 @@ public static CompletionStage proxyPostCalculateScores( final Integer linkerPort, final Http http, final ApiModels.ApiCalculateScoresRequest body) throws JsonProcessingException { - final var request = HttpRequest - .create(String.format(Locale.ROOT, - "http://%s:%d/JeMPI/%s", - linkerIP, - linkerPort, - GlobalConstants.SEGMENT_PROXY_POST_CALCULATE_SCORES)) - .withMethod(HttpMethods.POST) - .withEntity(ContentTypes.APPLICATION_JSON, OBJECT_MAPPER.writeValueAsBytes(body)); + final var request = HttpRequest.create(String.format(Locale.ROOT, + "http://%s:%d/JeMPI/%s", + linkerIP, + linkerPort, + GlobalConstants.SEGMENT_PROXY_POST_CALCULATE_SCORES)) + .withMethod(HttpMethods.POST) + .withEntity(ContentTypes.APPLICATION_JSON, OBJECT_MAPPER.writeValueAsBytes(body)); final var stage = http.singleRequest(request); return stage.thenApply(response -> response); } @@ -461,56 +473,130 @@ public static Route proxyPostCalculateScores( final String linkerIp, final Integer linkerPort, final Http http) { - return entity(Jackson.unmarshaller(ApiModels.ApiCalculateScoresRequest.class), - obj -> { - try { - return onComplete(proxyPostCalculateScores(linkerIp, - linkerPort, - http, obj), - response -> response.isSuccess() - ? complete(response.get()) - : complete(StatusCodes.IM_A_TEAPOT)); - } catch (JsonProcessingException e) { - LOGGER.error(e.getLocalizedMessage(), e); - return complete(StatusCodes.IM_A_TEAPOT); - } - }); + return entity(Jackson.unmarshaller(ApiModels.ApiCalculateScoresRequest.class), obj -> { + try { + return onComplete(proxyPostCalculateScores(linkerIp, linkerPort, http, obj), + response -> response.isSuccess() + ? complete(response.get()) + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT))); + } catch (JsonProcessingException e) { + LOGGER.error(e.getLocalizedMessage(), e); + return complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT)); + } + }); } + public static Route getDashboardData( + final ActorSystem actorSystem, + final ActorRef backEnd, + final String controllerIp, + final Integer controllerPort, + final Http http) { + + final var request = HttpRequest + .create(String.format(Locale.ROOT, + "http://%s:%d/JeMPI/%s", + controllerIp, + controllerPort, + GlobalConstants.SEGMENT_PROXY_GET_DASHBOARD_DATA)) + .withMethod(HttpMethods.GET); + + CompletableFuture sqlDashboardDataFuture = Ask.getSQLDashboardData(actorSystem, backEnd).toCompletableFuture(); + CompletableFuture dashboardDataFuture = http.singleRequest(request).toCompletableFuture(); + return onComplete( + CompletableFuture.allOf( + sqlDashboardDataFuture, + dashboardDataFuture + ), + result -> { + if (result.isSuccess()) { + HttpResponse dashboardDataResponse = dashboardDataFuture.join(); + if (dashboardDataResponse.status() != StatusCodes.OK) { + LOGGER.error("Error getting dashboard data "); + return complete(StatusCodes.INTERNAL_SERVER_ERROR); + } + + String responseBody = null; + try { + responseBody = Unmarshaller.entityToString().unmarshal(dashboardDataResponse.entity(), actorSystem). + toCompletableFuture().get(2, TimeUnit.SECONDS); + } catch (InterruptedException | ExecutionException | TimeoutException e) { + LOGGER.error("Error getting dashboard data ", e); + return complete(StatusCodes.INTERNAL_SERVER_ERROR); + } + + Map dashboardDataResults = Map.ofEntries(Map.entry("sqlDashboardData", sqlDashboardDataFuture.join()), + Map.entry("dashboardData", responseBody)); + + return complete(StatusCodes.OK, dashboardDataResults, JSON_MARSHALLER); + } else { + LOGGER.error("Error getting dashboard data ", result.failed().get()); + return complete(StatusCodes.INTERNAL_SERVER_ERROR); + } + }); + + } private static CompletionStage proxyGetCandidatesWithScore( final String linkerIP, final Integer linkerPort, final Http http, final String iid) throws JsonProcessingException { - final var uri = Uri - .create(String.format(Locale.ROOT, - "http://%s:%d/JeMPI/%s", - linkerIP, - linkerPort, - GlobalConstants.SEGMENT_PROXY_GET_CANDIDATES_WITH_SCORES)) - .query(Query.create(Pair.create("iid", iid))); + final var uri = Uri.create(String.format(Locale.ROOT, + "http://%s:%d/JeMPI/%s", + linkerIP, + linkerPort, + GlobalConstants.SEGMENT_PROXY_GET_CANDIDATES_WITH_SCORES)) + .query(Query.create(Pair.create("iid", iid))); final var request = HttpRequest.GET(uri.path()); final var stage = http.singleRequest(request); return stage.thenApply(response -> response); } + private static CompletionStage processOnNotificationResolution( + final String linkerIP, + final Integer linkerPort, + final Http http, + final NotificationResolutionProcessorData body) { + try { + final var request = HttpRequest + .create(String.format(Locale.ROOT, + "http://%s:%d/JeMPI/%s", + linkerIP, + linkerPort, + GlobalConstants.SEGMENT_PROXY_ON_NOTIFICATION_RESOLUTION)) + .withMethod(HttpMethods.POST) + .withEntity(ContentTypes.APPLICATION_JSON, OBJECT_MAPPER.writeValueAsBytes(body)); + final var stage = http.singleRequest(request); + return stage.thenApply(response -> { + if (response.status() != StatusCodes.OK) { + LOGGER.error(String.format("An error occurred while processing the notification resolution. Notification id: %s", body.notificationResolution().notificationId())); + } + return true; + }); + } catch (Exception e) { + LOGGER.error(String.format("An error occurred while processing the notification resolution. Notification id: %s", body.notificationResolution().notificationId()), e); + return CompletableFuture.completedFuture(true); + } + + + } + public static Route proxyGetCandidatesWithScore( final String linkerIP, final Integer linkerPort, final Http http) { - return parameter("iid", - iid -> { - try { - return onComplete(proxyGetCandidatesWithScore(linkerIP, linkerPort, http, iid), - response -> response.isSuccess() - ? complete(response.get()) - : complete(StatusCodes.IM_A_TEAPOT)); - } catch (JsonProcessingException e) { - LOGGER.error(e.getLocalizedMessage(), e); - return complete(StatusCodes.IM_A_TEAPOT); - } - }); + return parameter("iid", iid -> { + try { + return onComplete(proxyGetCandidatesWithScore(linkerIP, linkerPort, http, iid), + response -> response.isSuccess() + ? complete(response.get()) + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT))); + } catch (JsonProcessingException e) { + LOGGER.error(e.getLocalizedMessage(), e); + return complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT)); + } + }); } private static CompletionStage patchCrUpdateFieldsProxy( @@ -518,14 +604,13 @@ private static CompletionStage patchCrUpdateFieldsProxy( final Integer linkerPort, final Http http, final ApiModels.ApiCrUpdateFieldsRequest body) throws JsonProcessingException { - final var request = HttpRequest - .create(String.format(Locale.ROOT, - "http://%s:%d/JeMPI/%s", - linkerIP, - linkerPort, - GlobalConstants.SEGMENT_PROXY_CR_UPDATE_FIELDS)) - .withMethod(HttpMethods.PATCH) - .withEntity(ContentTypes.APPLICATION_JSON, OBJECT_MAPPER.writeValueAsBytes(body)); + final var request = HttpRequest.create(String.format(Locale.ROOT, + "http://%s:%d/JeMPI/%s", + linkerIP, + linkerPort, + GlobalConstants.SEGMENT_PROXY_PATCH_CR_UPDATE_FIELDS)) + .withMethod(HttpMethods.PATCH) + .withEntity(ContentTypes.APPLICATION_JSON, OBJECT_MAPPER.writeValueAsBytes(body)); final var stage = http.singleRequest(request); return stage.thenApply(response -> response); } @@ -535,14 +620,45 @@ private static CompletionStage postCrRegisterProxy( final Integer linkerPort, final Http http, final ApiModels.ApiCrRegisterRequest body) throws JsonProcessingException { - final var request = HttpRequest - .create(String.format(Locale.ROOT, - "http://%s:%d/JeMPI/%s", - linkerIP, - linkerPort, - GlobalConstants.SEGMENT_PROXY_CR_REGISTER)) - .withMethod(HttpMethods.POST) - .withEntity(ContentTypes.APPLICATION_JSON, OBJECT_MAPPER.writeValueAsBytes(body)); + final var request = HttpRequest.create(String.format(Locale.ROOT, + "http://%s:%d/JeMPI/%s", + linkerIP, + linkerPort, + GlobalConstants.SEGMENT_PROXY_POST_CR_REGISTER)) + .withMethod(HttpMethods.POST) + .withEntity(ContentTypes.APPLICATION_JSON, OBJECT_MAPPER.writeValueAsBytes(body)); + final var stage = http.singleRequest(request); + return stage.thenApply(response -> response); + } + + private static CompletionStage postLinkInteractionProxy( + final String linkerIP, + final Integer linkerPort, + final Http http, + final ApiModels.LinkInteractionSyncBody body) throws JsonProcessingException { + final var request = HttpRequest.create(String.format(Locale.ROOT, + "http://%s:%d/JeMPI/%s", + linkerIP, + linkerPort, + GlobalConstants.SEGMENT_PROXY_POST_LINK_INTERACTION)) + .withMethod(HttpMethods.POST) + .withEntity(ContentTypes.APPLICATION_JSON, OBJECT_MAPPER.writeValueAsBytes(body)); + final var stage = http.singleRequest(request); + return stage.thenApply(response -> response); + } + + private static CompletionStage postLinkInteractionToGidProxy( + final String linkerIP, + final Integer linkerPort, + final Http http, + final ApiModels.LinkInteractionToGidSyncBody body) throws JsonProcessingException { + final var request = HttpRequest.create(String.format(Locale.ROOT, + "http://%s:%d/JeMPI/%s", + linkerIP, + linkerPort, + GlobalConstants.SEGMENT_PROXY_POST_LINK_INTERACTION_TO_GID)) + .withMethod(HttpMethods.POST) + .withEntity(ContentTypes.APPLICATION_JSON, OBJECT_MAPPER.writeValueAsBytes(body)); final var stage = http.singleRequest(request); return stage.thenApply(response -> response); } @@ -552,14 +668,13 @@ private static CompletionStage postCrCandidatesProxy( final Integer linkerPort, final Http http, final ApiModels.ApiCrCandidatesRequest body) throws JsonProcessingException { - final var request = HttpRequest - .create(String.format(Locale.ROOT, - "http://%s:%d/JeMPI/%s", - linkerIP, - linkerPort, - GlobalConstants.SEGMENT_PROXY_CR_CANDIDATES)) - .withMethod(HttpMethods.POST) - .withEntity(ContentTypes.APPLICATION_JSON, OBJECT_MAPPER.writeValueAsBytes(body)); + final var request = HttpRequest.create(String.format(Locale.ROOT, + "http://%s:%d/JeMPI/%s", + linkerIP, + linkerPort, + GlobalConstants.SEGMENT_PROXY_POST_CR_CANDIDATES)) + .withMethod(HttpMethods.POST) + .withEntity(ContentTypes.APPLICATION_JSON, OBJECT_MAPPER.writeValueAsBytes(body)); final var stage = http.singleRequest(request); return stage.thenApply(response -> { LOGGER.debug("{}", response); @@ -572,14 +687,13 @@ private static CompletionStage postCrFindProxy( final Integer linkerPort, final Http http, final ApiModels.ApiCrFindRequest body) throws JsonProcessingException { - final var request = HttpRequest - .create(String.format(Locale.ROOT, - "http://%s:%d/JeMPI/%s", - linkerIP, - linkerPort, - GlobalConstants.SEGMENT_PROXY_CR_FIND)) - .withMethod(HttpMethods.POST) - .withEntity(ContentTypes.APPLICATION_JSON, OBJECT_MAPPER.writeValueAsBytes(body)); + final var request = HttpRequest.create(String.format(Locale.ROOT, + "http://%s:%d/JeMPI/%s", + linkerIP, + linkerPort, + GlobalConstants.SEGMENT_PROXY_POST_CR_FIND)) + .withMethod(HttpMethods.POST) + .withEntity(ContentTypes.APPLICATION_JSON, OBJECT_MAPPER.writeValueAsBytes(body)); final var stage = http.singleRequest(request); return stage.thenApply(response -> { LOGGER.debug("{}", response); @@ -591,72 +705,102 @@ public static Route patchCrUpdateFields( final String linkerIP, final Integer linkerPort, final Http http) { - return entity(Jackson.unmarshaller(ApiModels.ApiCrUpdateFieldsRequest.class), - apiCrUpdateFields -> { - LOGGER.debug("{}", apiCrUpdateFields); - try { - return onComplete(patchCrUpdateFieldsProxy(linkerIP, linkerPort, http, apiCrUpdateFields), - response -> response.isSuccess() - ? complete(response.get()) - : complete(StatusCodes.IM_A_TEAPOT)); - } catch (JsonProcessingException e) { - LOGGER.error(e.getLocalizedMessage(), e); - return complete(StatusCodes.IM_A_TEAPOT); - } - }); + return entity(Jackson.unmarshaller(ApiModels.ApiCrUpdateFieldsRequest.class), apiCrUpdateFields -> { + LOGGER.debug("{}", apiCrUpdateFields); + try { + return onComplete(patchCrUpdateFieldsProxy(linkerIP, linkerPort, http, apiCrUpdateFields), + response -> response.isSuccess() + ? complete(response.get()) + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT))); + } catch (JsonProcessingException e) { + LOGGER.error(e.getLocalizedMessage(), e); + return complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT)); + } + }); } public static Route postCrFind( final String linkerIP, final Integer linkerPort, final Http http) { - return entity( - Jackson.unmarshaller(OBJECT_MAPPER, ApiModels.ApiCrFindRequest.class), - apiCrFind -> { - LOGGER.debug("{}", apiCrFind); - try { - return onComplete(postCrFindProxy(linkerIP, linkerPort, http, apiCrFind), - response -> response.isSuccess() - ? complete(response.get()) - : complete(StatusCodes.IM_A_TEAPOT)); - } catch (JsonProcessingException e) { - LOGGER.error(e.getLocalizedMessage(), e); - return complete(StatusCodes.IM_A_TEAPOT); - } - }); + return entity(Jackson.unmarshaller(OBJECT_MAPPER, ApiModels.ApiCrFindRequest.class), apiCrFind -> { + LOGGER.debug("{}", apiCrFind); + try { + return onComplete(postCrFindProxy(linkerIP, linkerPort, http, apiCrFind), + response -> response.isSuccess() + ? complete(response.get()) + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT))); + } catch (JsonProcessingException e) { + LOGGER.error(e.getLocalizedMessage(), e); + return complete(StatusCodes.IM_A_TEAPOT); + } + }); } public static Route postCrCandidates( final String linkerIP, final Integer linkerPort, final Http http) { - return entity(Jackson.unmarshaller(OBJECT_MAPPER, ApiModels.ApiCrCandidatesRequest.class), - apiCrCandidates -> { - LOGGER.debug("{}", apiCrCandidates); - try { - return onComplete(postCrCandidatesProxy(linkerIP, linkerPort, http, apiCrCandidates), - response -> response.isSuccess() - ? complete(response.get()) - : complete(StatusCodes.IM_A_TEAPOT)); - } catch (JsonProcessingException e) { - LOGGER.error(e.getLocalizedMessage(), e); - return complete(StatusCodes.IM_A_TEAPOT); - } - }); + return entity(Jackson.unmarshaller(OBJECT_MAPPER, ApiModels.ApiCrCandidatesRequest.class), apiCrCandidates -> { + LOGGER.debug("{}", apiCrCandidates); + try { + return onComplete(postCrCandidatesProxy(linkerIP, linkerPort, http, apiCrCandidates), + response -> response.isSuccess() + ? complete(response.get()) + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT))); + } catch (JsonProcessingException e) { + LOGGER.error(e.getLocalizedMessage(), e); + return complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT)); + } + }); } public static Route postCrRegister( final String linkerIP, final Integer linkerPort, final Http http) { - return entity(Jackson.unmarshaller(OBJECT_MAPPER, ApiModels.ApiCrRegisterRequest.class), - apiCrRegister -> { - LOGGER.debug("{}", apiCrRegister); + return entity(Jackson.unmarshaller(OBJECT_MAPPER, ApiModels.ApiCrRegisterRequest.class), apiCrRegister -> { + LOGGER.debug("{}", apiCrRegister); + try { + return onComplete(postCrRegisterProxy(linkerIP, linkerPort, http, apiCrRegister), + response -> response.isSuccess() + ? complete(response.get()) + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT))); + } catch (JsonProcessingException e) { + LOGGER.error(e.getLocalizedMessage(), e); + return complete(StatusCodes.NO_CONTENT); + } + }); + } + + public static Route postLinkInteraction( + final String linkerIP, + final Integer linkerPort, + final Http http) { + return entity(Jackson.unmarshaller(OBJECT_MAPPER, ApiModels.LinkInteractionSyncBody.class), linkInteractionSyncBody -> { + try { + return onComplete(postLinkInteractionProxy(linkerIP, linkerPort, http, linkInteractionSyncBody), + response -> response.isSuccess() + ? complete(response.get()) + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT))); + } catch (JsonProcessingException e) { + LOGGER.error(e.getLocalizedMessage(), e); + return complete(StatusCodes.NO_CONTENT); + } + }); + } + + public static Route postLinkInteractionToGid( + final String linkerIP, + final Integer linkerPort, + final Http http) { + return entity(Jackson.unmarshaller(OBJECT_MAPPER, ApiModels.LinkInteractionToGidSyncBody.class), + apiLinkInteractionToGid -> { try { - return onComplete(postCrRegisterProxy(linkerIP, linkerPort, http, apiCrRegister), + return onComplete(postLinkInteractionToGidProxy(linkerIP, linkerPort, http, apiLinkInteractionToGid), response -> response.isSuccess() ? complete(response.get()) - : complete(StatusCodes.IM_A_TEAPOT)); + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT))); } catch (JsonProcessingException e) { LOGGER.error(e.getLocalizedMessage(), e); return complete(StatusCodes.NO_CONTENT); @@ -664,4 +808,90 @@ public static Route postCrRegister( }); } + public static Route createCoreAPIRoutes( + final ActorSystem actorSystem, + final ActorRef backEnd, + final String jsonFields, + final String linkerIP, + final Integer linkerPort, + final String controllerIP, + final Integer controllerPort, + final Http http) { + return concat(post(() -> concat(path(GlobalConstants.SEGMENT_POST_UPDATE_NOTIFICATION, + () -> Routes.postUpdateNotification(actorSystem, backEnd)), + path(segment(GlobalConstants.SEGMENT_POST_SIMPLE_SEARCH).slash(segment(Pattern.compile( + "^(golden|patient)$"))), + type -> Routes.postSimpleSearch(actorSystem, + backEnd, + type.equals("golden") + ? RecordType.GoldenRecord + : RecordType.Interaction)), + path(segment(GlobalConstants.SEGMENT_POST_CUSTOM_SEARCH).slash(segment(Pattern.compile( + "^(golden|patient)$"))), + type -> Routes.postCustomSearch(actorSystem, + backEnd, + type.equals("golden") + ? RecordType.GoldenRecord + : RecordType.Interaction)), + path(GlobalConstants.SEGMENT_POST_UPLOAD_CSV_FILE, + () -> Routes.postUploadCsvFile(actorSystem, backEnd)), + path(GlobalConstants.SEGMENT_PROXY_POST_CALCULATE_SCORES, + () -> Routes.proxyPostCalculateScores(linkerIP, linkerPort, http)), + path(GlobalConstants.SEGMENT_POST_FILTER_GIDS, + () -> Routes.postFilterGids(actorSystem, backEnd)), + path(GlobalConstants.SEGMENT_PROXY_POST_LINK_INTERACTION, + () -> Routes.postLinkInteraction(linkerIP, linkerPort, http)), + path(GlobalConstants.SEGMENT_PROXY_POST_LINK_INTERACTION_TO_GID, + () -> Routes.postLinkInteractionToGid(linkerIP, linkerPort, http)), + path(GlobalConstants.SEGMENT_PROXY_POST_CR_REGISTER, + () -> Routes.postCrRegister(linkerIP, linkerPort, http)), + path(GlobalConstants.SEGMENT_PROXY_POST_CR_FIND, + () -> Routes.postCrFind(linkerIP, linkerPort, http)), + path(GlobalConstants.SEGMENT_PROXY_POST_CR_CANDIDATES, + () -> Routes.postCrCandidates(linkerIP, linkerPort, http)), + path(GlobalConstants.SEGMENT_POST_FILTER_GIDS_WITH_INTERACTION_COUNT, + () -> Routes.postFilterGidsWithInteractionCount(actorSystem, backEnd)), + path(GlobalConstants.SEGMENT_POST_IID_NEW_GID_LINK, + () -> Routes.patchIidNewGidLink(actorSystem, backEnd, controllerIP, controllerPort, http)), + path(GlobalConstants.SEGMENT_POST_IID_GID_LINK, + () -> Routes.patchIidGidLink(actorSystem, backEnd, controllerIP, controllerPort, http)))), + patch(() -> concat(path(segment(GlobalConstants.SEGMENT_PATCH_GOLDEN_RECORD).slash(segment(Pattern.compile( + "^[A-z0-9]+$"))), gid -> Routes.patchGoldenRecord(actorSystem, backEnd, gid)), + path(GlobalConstants.SEGMENT_PROXY_PATCH_CR_UPDATE_FIELDS, + () -> Routes.patchCrUpdateFields(linkerIP, linkerPort, http)))), + get(() -> concat(path(GlobalConstants.SEGMENT_COUNT_GOLDEN_RECORDS, + () -> Routes.countGoldenRecords(actorSystem, backEnd)), + path(GlobalConstants.SEGMENT_COUNT_INTERACTIONS, + () -> Routes.countInteractions(actorSystem, backEnd)), + path(GlobalConstants.SEGMENT_COUNT_RECORDS, () -> Routes.countRecords(actorSystem, backEnd)), + path(GlobalConstants.SEGMENT_GET_GIDS_ALL, () -> Routes.getGidsAll(actorSystem, backEnd)), + path(GlobalConstants.SEGMENT_GET_GIDS_PAGED, + () -> Routes.getGidsPaged(actorSystem, backEnd)), + path(segment(GlobalConstants.SEGMENT_GET_INTERACTION).slash(segment(Pattern.compile( + "^[A-z0-9]+$"))), iid -> Routes.getInteraction(actorSystem, backEnd, iid)), + path(segment(GlobalConstants.SEGMENT_GET_EXPANDED_GOLDEN_RECORD).slash(segment(Pattern.compile( + "^[A-z0-9]+$"))), gid -> Routes.getExpandedGoldenRecord(actorSystem, backEnd, gid)), + path(GlobalConstants.SEGMENT_GET_EXPANDED_GOLDEN_RECORDS_USING_PARAMETER_LIST, + () -> Routes.getExpandedGoldenRecordsUsingParameterList(actorSystem, backEnd)), + path(GlobalConstants.SEGMENT_GET_EXPANDED_GOLDEN_RECORDS_USING_CSV, + () -> Routes.getExpandedGoldenRecordsFromUsingCSV(actorSystem, backEnd)), + path(GlobalConstants.SEGMENT_GET_EXPANDED_INTERACTIONS_USING_CSV, + () -> Routes.getExpandedInteractionsUsingCSV(actorSystem, backEnd)), + path(GlobalConstants.SEGMENT_GET_GOLDEN_RECORD_AUDIT_TRAIL, + () -> Routes.getGoldenRecordAuditTrail(actorSystem, backEnd)), + path(GlobalConstants.SEGMENT_GET_INTERACTION_AUDIT_TRAIL, + () -> Routes.getInteractionAuditTrail(actorSystem, backEnd)), + path(GlobalConstants.SEGMENT_GET_NOTIFICATIONS, + () -> Routes.getNotifications(actorSystem, backEnd)), + path(segment(GlobalConstants.SEGMENT_GET_INTERACTION).slash(segment(Pattern.compile( + "^[A-z0-9]+$"))), iid -> Routes.getInteraction(actorSystem, backEnd, iid)), + path(segment(GlobalConstants.SEGMENT_GET_EXPANDED_GOLDEN_RECORD).slash(segment(Pattern.compile( + "^[A-z0-9]+$"))), gid -> Routes.getExpandedGoldenRecord(actorSystem, backEnd, gid)), + path(GlobalConstants.SEGMENT_GET_FIELDS_CONFIG, () -> complete(StatusCodes.OK, jsonFields)), + path(GlobalConstants.SEGMENT_PROXY_GET_DASHBOARD_DATA, + () -> Routes.getDashboardData(actorSystem, backEnd, controllerIP, controllerPort, http)), + path(GlobalConstants.SEGMENT_PROXY_GET_CANDIDATES_WITH_SCORES, + () -> Routes.proxyGetCandidatesWithScore(linkerIP, linkerPort, http))))); + } + } diff --git a/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/httpServer/HttpServerRouteEntries.java b/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/httpServer/HttpServerRouteEntries.java new file mode 100644 index 000000000..3543789fd --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/httpServer/HttpServerRouteEntries.java @@ -0,0 +1,13 @@ +package org.jembi.jempi.libapi.httpServer; + +public abstract class HttpServerRouteEntries implements IHttpServerRouteEntries { + + protected S httpServer = null; + + public HttpServerRouteEntries(final S ihttpServer) { + this.httpServer = ihttpServer; + } + + @Override + public abstract R getRouteEntries(); +} diff --git a/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/httpServer/IHttpServerRouteEntries.java b/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/httpServer/IHttpServerRouteEntries.java new file mode 100644 index 000000000..d17238275 --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibAPI/src/main/java/org/jembi/jempi/libapi/httpServer/IHttpServerRouteEntries.java @@ -0,0 +1,5 @@ +package org.jembi.jempi.libapi.httpServer; + +public interface IHttpServerRouteEntries { + R getRouteEntries(); +} diff --git a/JeMPI_Apps/JeMPI_LibMPI/pom.xml b/JeMPI_Apps/JeMPI_LibMPI/pom.xml index 263bea4e2..50efd5aca 100644 --- a/JeMPI_Apps/JeMPI_LibMPI/pom.xml +++ b/JeMPI_Apps/JeMPI_LibMPI/pom.xml @@ -14,8 +14,8 @@ jar - 17 - 17 + ${java.version} + ${java.version} UTF-8 diff --git a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/LibMPI.java b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/LibMPI.java index dc7dfe32d..2251cab57 100644 --- a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/LibMPI.java +++ b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/LibMPI.java @@ -236,13 +236,9 @@ public boolean setScore( final float newScore) { final var result = client.setScore(interactionID, goldenID, newScore); if (result) { - sendAuditEvent(interactionID, - goldenID, - String.format(Locale.ROOT, "score: %.5f -> %.5f", oldScore, newScore)); + sendAuditEvent(interactionID, goldenID, String.format(Locale.ROOT, "score: %.5f -> %.5f", oldScore, newScore)); } else { - sendAuditEvent(interactionID, - goldenID, - String.format(Locale.ROOT, "set score error: %.5f -> %.5f", oldScore, newScore)); + sendAuditEvent(interactionID, goldenID, String.format(Locale.ROOT, "set score error: %.5f -> %.5f", oldScore, newScore)); } return result; @@ -264,9 +260,7 @@ public boolean updateGoldenRecordField( final String newValue) { final var result = client.updateGoldenRecordField(goldenId, fieldName, newValue); if (result) { - sendAuditEvent(interactionId, - goldenId, - String.format(Locale.ROOT, "%s: '%s' -> '%s'", fieldName, oldValue, newValue)); + sendAuditEvent(interactionId, goldenId, String.format(Locale.ROOT, "%s: '%s' -> '%s'", fieldName, oldValue, newValue)); } else { sendAuditEvent(interactionId, goldenId, @@ -305,11 +299,16 @@ public Either updateLink( if (result.isRight()) { sendAuditEvent(interactionID, newGoldenID, - String.format(Locale.ROOT, "Interaction -> update GoldenID: old(%s) new(%s) [%f]", goldenID, newGoldenID, score)); + String.format(Locale.ROOT, + "Interaction -> update GoldenID: old(%s) new(%s) [%f]", + goldenID, + newGoldenID, + score)); } else { sendAuditEvent(interactionID, newGoldenID, - String.format(Locale.ROOT, "Interaction -> update GoldenID error: old(%s) new(%s) [%f]", + String.format(Locale.ROOT, + "Interaction -> update GoldenID error: old(%s) new(%s) [%f]", goldenID, newGoldenID, score)); @@ -326,12 +325,17 @@ public LinkInfo createInteractionAndLinkToExistingGoldenRecord( if (result != null) { sendAuditEvent(result.interactionUID(), result.goldenUID(), - String.format(Locale.ROOT, "Interaction -> Existing GoldenRecord (%.5f) / Validation: Deterministic(%s), Probabilistic(%.3f)", result.score(), - deterministicValidation, probabilisticValidation)); + String.format(Locale.ROOT, + "Interaction -> Existing GoldenRecord (%.5f) / Validation: Deterministic(%s), " + + "Probabilistic(%.3f)", + result.score(), + deterministicValidation, + probabilisticValidation)); } else { sendAuditEvent(interaction.interactionId(), goldenIdScore.goldenId(), - String.format(Locale.ROOT, "Interaction -> error linking to existing GoldenRecord (%.5f)", + String.format(Locale.ROOT, + "Interaction -> error linking to existing GoldenRecord (%.5f)", goldenIdScore.score())); } return result; diff --git a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/MpiServiceError.java b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/MpiServiceError.java index 84283670a..bc76b20ee 100644 --- a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/MpiServiceError.java +++ b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/MpiServiceError.java @@ -32,8 +32,7 @@ record DeletePredicateError( } record CRMissingFieldError( - String field - ) implements MpiServiceError { + String field) implements MpiServiceError { } record CRClientExistsError( diff --git a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphClient.java b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphClient.java index e95c573ef..0d02fc78e 100644 --- a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphClient.java +++ b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphClient.java @@ -13,7 +13,7 @@ import java.util.Map; import java.util.concurrent.TimeUnit; -final class DgraphClient { +public final class DgraphClient { private static final Logger LOGGER = LogManager.getLogger(DgraphClient.class); private io.dgraph.DgraphClient dgraphClient; diff --git a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphCount.java b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphCount.java index 50671434a..370f40fc7 100644 --- a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphCount.java +++ b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphCount.java @@ -4,5 +4,6 @@ import com.fasterxml.jackson.annotation.JsonProperty; @JsonInclude(JsonInclude.Include.NON_NULL) -record DgraphCount(@JsonProperty("count") Integer count) {} +record DgraphCount(@JsonProperty("count") Integer count) { +} diff --git a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphMutations.java b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphMutations.java index 1eb8a4e17..b475290a0 100644 --- a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphMutations.java +++ b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphMutations.java @@ -15,7 +15,6 @@ import org.jembi.jempi.shared.models.*; import org.jembi.jempi.shared.utils.AppUtils; -import java.time.LocalDateTime; import java.util.ArrayList; import java.util.List; import java.util.Locale; @@ -39,15 +38,12 @@ LinkInfo addNewDGraphInteraction(final Interaction interaction) { result.interactionUID, result.sourceUID, 1.0F, - new CustomUniqueGoldenRecordData(LocalDateTime.now(), - true, - interaction.uniqueInteractionData() - .auxId())); + new CustomUniqueGoldenRecordData(interaction.uniqueInteractionData())); if (grUID == null) { LOGGER.error("Failed to insert golden record"); return null; } - return new LinkInfo(grUID, result.interactionUID, 1.0F); + return new LinkInfo(grUID, result.interactionUID, result.sourceUID, 1.0F); } boolean updateGoldenRecordField( @@ -65,31 +61,32 @@ private String createSourceIdTriple(final CustomSourceId sourceId) { _:%s %s . _:%s %s . _:%s "SourceId" . - """, uuid, AppUtils.quotedValue(sourceId.facility()), uuid, AppUtils.quotedValue(sourceId.patient()), + """, + uuid, + AppUtils.quotedValue(sourceId.facility()), + uuid, + AppUtils.quotedValue(sourceId.patient()), uuid); } private DgraphSourceIds getSourceId(final CustomSourceId sourceId) { - if (StringUtils.isBlank(sourceId.facility()) - || StringUtils.isBlank(sourceId.patient())) { + if (StringUtils.isBlank(sourceId.facility()) || StringUtils.isBlank(sourceId.patient())) { return new DgraphSourceIds(List.of()); } - final String query = String.format( - Locale.ROOT, - """ - query query_source_id() { - var(func: eq(SourceId.facility, "%s")) { - A as uid - } - var(func: eq(SourceId.patient, "%s")) { - B as uid - } - all(func: uid(A,B)) @filter (uid(A) AND uid(B)) { - uid - expand(SourceId) - } - } - """, sourceId.facility(), sourceId.patient()); + final String query = String.format(Locale.ROOT, """ + query query_source_id() { + var(func: eq(SourceId.facility, "%s")) { + A as uid + } + var(func: eq(SourceId.patient, "%s")) { + B as uid + } + all(func: uid(A,B)) @filter (uid(A) AND uid(B)) { + uid + expand(SourceId) + } + } + """, sourceId.facility(), sourceId.patient()); return DgraphQueries.runSourceIdQuery(query); } @@ -98,12 +95,15 @@ private boolean updateGoldenRecordPredicate( final String predicate, final String value) { final var mutation = DgraphProto.Mutation.newBuilder() - .setSetNquads(ByteString.copyFromUtf8(String.format( - Locale.ROOT, - """ - <%s> <%s> "%s"^^ . - <%s> "GoldenRecord" . - """, goldenId, predicate, value, goldenId))) + .setSetNquads(ByteString.copyFromUtf8(String.format(Locale.ROOT, + """ + <%s> <%s> "%s"^^ . + <%s> "GoldenRecord" . + """, + goldenId, + predicate, + value, + goldenId))) .build(); return DgraphClient.getInstance().doMutateTransaction(mutation) != null; } @@ -114,18 +114,17 @@ private boolean updateGoldenRecordPredicate( final String predicate, final Boolean value) { final var mutation = DgraphProto.Mutation.newBuilder() - .setSetNquads(ByteString.copyFromUtf8(String.format( - Locale.ROOT, - """ - <%s> <%s> "%s"^^ . - <%s> "GoldenRecord" . - """, - goldenId, - predicate, - Boolean.TRUE.equals(value) - ? "true" - : "false", - goldenId))) + .setSetNquads(ByteString.copyFromUtf8(String.format(Locale.ROOT, + """ + <%s> <%s> "%s"^^ . + <%s> "GoldenRecord" . + """, + goldenId, + predicate, + Boolean.TRUE.equals(value) + ? "true" + : "false", + goldenId))) .build(); return DgraphClient.getInstance().doMutateTransaction(mutation) != null; } @@ -135,12 +134,15 @@ private boolean updateGoldenRecordPredicate( final String predicate, final Double value) { final var mutation = DgraphProto.Mutation.newBuilder() - .setSetNquads(ByteString.copyFromUtf8(String.format( - Locale.ROOT, - """ - <%s> <%s> "%f"^^ . - <%s> "GoldenRecord" . - """, goldenId, predicate, value, goldenId))) + .setSetNquads(ByteString.copyFromUtf8(String.format(Locale.ROOT, + """ + <%s> <%s> "%f"^^ . + <%s> "GoldenRecord" . + """, + goldenId, + predicate, + value, + goldenId))) .build(); return DgraphClient.getInstance().doMutateTransaction(mutation) != null; } @@ -150,12 +152,15 @@ private boolean updateGoldenRecordPredicate( final String predicate, final Long value) { final var mutation = DgraphProto.Mutation.newBuilder() - .setSetNquads(ByteString.copyFromUtf8(String.format( - Locale.ROOT, - """ - <%s> <%s> "%d"^^ . - <%s> "GoldenRecord" . - """, goldenId, predicate, value, goldenId))) + .setSetNquads(ByteString.copyFromUtf8(String.format(Locale.ROOT, + """ + <%s> <%s> "%d"^^ . + <%s> "GoldenRecord" . + """, + goldenId, + predicate, + value, + goldenId))) .build(); return DgraphClient.getInstance().doMutateTransaction(mutation) != null; } @@ -164,28 +169,28 @@ private boolean deletePredicate( final String uid, final String predicate, final String value) { - final var mutation = DgraphProto.Mutation.newBuilder() - .setDelNquads(ByteString.copyFromUtf8(String.format( - Locale.ROOT, - """ - <%s> <%s> <%s> . - """, uid, predicate, value))) - .build(); + final var mutation = DgraphProto.Mutation.newBuilder().setDelNquads(ByteString.copyFromUtf8(String.format(Locale.ROOT, + """ + <%s> <%s> <%s> . + """, + uid, + predicate, + value))).build(); return DgraphClient.getInstance().doMutateTransaction(mutation) != null; } private void addScoreFacets(final List interactionScoreList) { StringBuilder simWeightFacet = new StringBuilder(); for (DgraphPairWithScore interactionScore : interactionScoreList) { - simWeightFacet.append( - String.format(Locale.ROOT, - "<%s> <%s> (score=%f) .%n", - interactionScore.goldenUID(), interactionScore.interactionUID(), interactionScore.score())); + simWeightFacet.append(String.format(Locale.ROOT, + "<%s> <%s> (score=%f) .%n", + interactionScore.goldenUID(), + interactionScore.interactionUID(), + interactionScore.score())); } final var s = simWeightFacet.toString(); - final DgraphProto.Mutation mu = DgraphProto.Mutation.newBuilder().setSetNquads(ByteString.copyFromUtf8(s)) - .build(); + final DgraphProto.Mutation mu = DgraphProto.Mutation.newBuilder().setSetNquads(ByteString.copyFromUtf8(s)).build(); DgraphClient.getInstance().doMutateTransaction(mu); } @@ -194,8 +199,7 @@ private void addSourceId( final String uid, final String sourceId) { final var mutation = String.format(Locale.ROOT, "<%s> <%s> .%n", uid, sourceId); - final DgraphProto.Mutation mu = DgraphProto.Mutation.newBuilder().setSetNquads(ByteString.copyFromUtf8(mutation)) - .build(); + final DgraphProto.Mutation mu = DgraphProto.Mutation.newBuilder().setSetNquads(ByteString.copyFromUtf8(mutation)).build(); DgraphClient.getInstance().doMutateTransaction(mu); } @@ -208,14 +212,12 @@ private InsertInteractionResult insertInteraction(final Interaction interaction) final var sourceIdUid = !sourceId.isEmpty() ? sourceId.get(0).uid() : DgraphClient.getInstance().doMutateTransaction(sourceIdMutation); - final DgraphProto.Mutation mutation = DgraphProto - .Mutation - .newBuilder() - .setSetNquads(ByteString.copyFromUtf8(CustomDgraphMutations - .createInteractionTriple(interaction.uniqueInteractionData(), - interaction.demographicData(), - sourceIdUid))) - .build(); + final DgraphProto.Mutation mutation = DgraphProto.Mutation.newBuilder() + .setSetNquads(ByteString.copyFromUtf8(CustomDgraphMutations.createInteractionTriple( + interaction.uniqueInteractionData(), + interaction.demographicData(), + sourceIdUid))) + .build(); return new InsertInteractionResult(DgraphClient.getInstance().doMutateTransaction(mutation), sourceIdUid); } @@ -230,20 +232,18 @@ private String cloneGoldenRecordFromInteraction( interactionUID, sourceUID, score); - final DgraphProto.Mutation mutation = DgraphProto.Mutation.newBuilder() - .setSetNquads(ByteString.copyFromUtf8(command)) - .build(); + final DgraphProto.Mutation mutation = + DgraphProto.Mutation.newBuilder().setSetNquads(ByteString.copyFromUtf8(command)).build(); return DgraphClient.getInstance().doMutateTransaction(mutation); } private void deleteGoldenRecord(final String goldenId) { final var mutation = DgraphProto.Mutation.newBuilder() - .setDelNquads(ByteString.copyFromUtf8( - String.format(Locale.ROOT, - """ - <%s> * * . - """, - goldenId))) + .setDelNquads(ByteString.copyFromUtf8(String.format(Locale.ROOT, + """ + <%s> * * . + """, + goldenId))) .build(); DgraphClient.getInstance().doMutateTransaction(mutation); } @@ -283,10 +283,9 @@ Either linkToNewGoldenRecord( final var goldenUidInteractionUidList = DgraphQueries.findExpandedGoldenIds(currentGoldenId); if (goldenUidInteractionUidList.isEmpty() || !goldenUidInteractionUidList.contains(interactionId)) { - return Either.left( - new MpiServiceError.GoldenIdInteractionConflictError("Interaction not linked to GoldenRecord", - currentGoldenId, - interactionId)); + return Either.left(new MpiServiceError.GoldenIdInteractionConflictError("Interaction not linked to GoldenRecord", + currentGoldenId, + interactionId)); } final var count = goldenUidInteractionUidList.size(); @@ -306,11 +305,12 @@ Either linkToNewGoldenRecord( if (count == 1) { deleteGoldenRecord(currentGoldenId); } - final var newGoldenID = cloneGoldenRecordFromInteraction( - interaction.demographicData(), interaction.interactionId(), - interaction.sourceId().uid(), - score, new CustomUniqueGoldenRecordData(LocalDateTime.now(), true, interaction.uniqueInteractionData().auxId())); - return Either.right(new LinkInfo(newGoldenID, interactionId, score)); + final var newGoldenID = cloneGoldenRecordFromInteraction(interaction.demographicData(), + interaction.interactionId(), + interaction.sourceId().uid(), + score, + new CustomUniqueGoldenRecordData(interaction.uniqueInteractionData())); + return Either.right(new LinkInfo(newGoldenID, interactionId, interaction.sourceId().uid(), score)); } Either updateLink( @@ -320,9 +320,9 @@ Either updateLink( final float score) { final var goldenUidInteractionUidList = DgraphQueries.findExpandedGoldenIds(goldenId); if (goldenUidInteractionUidList.isEmpty() || !goldenUidInteractionUidList.contains(interactionId)) { - return Either.left( - new MpiServiceError.GoldenIdInteractionConflictError("Interaction not linked to GoldenRecord", goldenId, - interactionId)); + return Either.left(new MpiServiceError.GoldenIdInteractionConflictError("Interaction not linked to GoldenRecord", + goldenId, + interactionId)); } final var count = DgraphQueries.countGoldenRecordEntities(goldenId); @@ -334,7 +334,7 @@ Either updateLink( final var scoreList = new ArrayList(); scoreList.add(new DgraphPairWithScore(newGoldenId, interactionId, score)); addScoreFacets(scoreList); - return Either.right(new LinkInfo(newGoldenId, interactionId, score)); + return Either.right(new LinkInfo(newGoldenId, interactionId, null, score)); // FIX: need to return the source id } LinkInfo linkDGraphInteraction( @@ -352,16 +352,12 @@ LinkInfo linkDGraphInteraction( addSourceId(interactionScoreList.get(0).goldenUID(), result.sourceUID); final var grUID = interactionScoreList.get(0).goldenUID(); final var theScore = interactionScoreList.get(0).score(); - return new LinkInfo(grUID, result.interactionUID, theScore); + return new LinkInfo(grUID, result.interactionUID, result.sourceUID, theScore); } Option createSchema() { - final var schema = CustomDgraphConstants.MUTATION_CREATE_SOURCE_ID_TYPE - + CustomDgraphConstants.MUTATION_CREATE_GOLDEN_RECORD_TYPE - + CustomDgraphConstants.MUTATION_CREATE_INTERACTION_TYPE - + CustomDgraphConstants.MUTATION_CREATE_SOURCE_ID_FIELDS - + CustomDgraphConstants.MUTATION_CREATE_GOLDEN_RECORD_FIELDS - + CustomDgraphConstants.MUTATION_CREATE_INTERACTION_FIELDS; + final var schema = + CustomDgraphConstants.MUTATION_CREATE_SOURCE_ID_TYPE + CustomDgraphConstants.MUTATION_CREATE_GOLDEN_RECORD_TYPE + CustomDgraphConstants.MUTATION_CREATE_INTERACTION_TYPE + CustomDgraphConstants.MUTATION_CREATE_SOURCE_ID_FIELDS + CustomDgraphConstants.MUTATION_CREATE_GOLDEN_RECORD_FIELDS + CustomDgraphConstants.MUTATION_CREATE_INTERACTION_FIELDS; try { final DgraphProto.Operation operation = DgraphProto.Operation.newBuilder().setSchema(schema).build(); DgraphClient.getInstance().alter(operation); @@ -380,12 +376,13 @@ boolean setScore( final String goldenRecordUid, final float score) { final var mutation = DgraphProto.Mutation.newBuilder() - .setSetNquads(ByteString.copyFromUtf8(String.format( - Locale.ROOT, - "<%s> <%s> (score=%f) .%n", - goldenRecordUid, - interactionUid, - score))) + .setSetNquads(ByteString.copyFromUtf8(String.format(Locale.ROOT, + "<%s> <%s> " + + "(score=%f) .%n", + goldenRecordUid, + interactionUid, + score))) .build(); final var result = DgraphClient.getInstance().doMutateTransaction(mutation); if (LOGGER.isTraceEnabled()) { diff --git a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphPaginatedUidList.java b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphPaginatedUidList.java index 1dedcab91..909375851 100644 --- a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphPaginatedUidList.java +++ b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphPaginatedUidList.java @@ -7,8 +7,9 @@ import java.util.List; @JsonInclude(JsonInclude.Include.NON_NULL) -public record DgraphPaginatedUidList(@JsonProperty("all") List all, - @JsonProperty("pagination") List pagination) { +public record DgraphPaginatedUidList( + @JsonProperty("all") List all, + @JsonProperty("pagination") List pagination) { DgraphPaginatedUidList(@JsonProperty("all") final List all) { this(all, List.of(new LibMPIPagination(all.size()))); } diff --git a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphPaginationUidListWithInteractionCount.java b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphPaginationUidListWithInteractionCount.java index b60a9cf46..8cf84be6a 100644 --- a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphPaginationUidListWithInteractionCount.java +++ b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphPaginationUidListWithInteractionCount.java @@ -6,11 +6,15 @@ import org.jembi.jempi.shared.models.LibMPIPagination; import java.util.List; + @JsonInclude(JsonInclude.Include.NON_NULL) -public record DgraphPaginationUidListWithInteractionCount(@JsonProperty("all") List all, - @JsonProperty("pagination") List pagination, - @JsonProperty("interactionCount") List interactionCount) { - DgraphPaginationUidListWithInteractionCount(@JsonProperty("all") final List all, @JsonProperty("interactionCount") final List interactionCount) { +public record DgraphPaginationUidListWithInteractionCount( + @JsonProperty("all") List all, + @JsonProperty("pagination") List pagination, + @JsonProperty("interactionCount") List interactionCount) { + DgraphPaginationUidListWithInteractionCount( + @JsonProperty("all") final List all, + @JsonProperty("interactionCount") final List interactionCount) { this(all, List.of(new LibMPIPagination(all.size())), interactionCount); } } diff --git a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphPairWithScore.java b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphPairWithScore.java index c542178a0..9ebc00b2b 100644 --- a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphPairWithScore.java +++ b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphPairWithScore.java @@ -6,5 +6,6 @@ record DgraphPairWithScore( String goldenUID, String interactionUID, - float score) {} + float score) { +} diff --git a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphQueries.java b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphQueries.java index 7643f3ddc..c84d0c87d 100644 --- a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphQueries.java +++ b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphQueries.java @@ -131,16 +131,15 @@ static CustomDgraphGoldenRecord findDgraphGoldenRecord(final String goldenId) { } static List findExpandedGoldenIds(final String goldenId) { - final String query = String.format(Locale.ROOT, - """ - query recordGoldenUidInteractionUidList() { - list(func: uid(%s)) { - uid - list: GoldenRecord.interactions { - uid - } - } - }""", goldenId); + final String query = String.format(Locale.ROOT, """ + query recordGoldenUidInteractionUidList() { + list(func: uid(%s)) { + uid + list: GoldenRecord.interactions { + uid + } + } + }""", goldenId); try { final var json = DgraphClient.getInstance().executeReadOnlyTransaction(query, null); final var response = AppUtils.OBJECT_MAPPER.readValue(json, DgraphUidUidList.class); @@ -177,13 +176,12 @@ query recordGoldenId() { static List fetchGoldenIds( final long offset, final long length) { - final String query = String.format(Locale.ROOT, - """ - query recordGoldenIds() { - list(func: type(GoldenRecord), offset: %d, first: %d) { - uid - } - }""", offset, length); + final String query = String.format(Locale.ROOT, """ + query recordGoldenIds() { + list(func: type(GoldenRecord), offset: %d, first: %d) { + uid + } + }""", offset, length); try { final var json = DgraphClient.getInstance().executeReadOnlyTransaction(query, null); final var response = AppUtils.OBJECT_MAPPER.readValue(json, DgraphUidList.class); @@ -221,13 +219,12 @@ query recordCount() { } static long countGoldenRecordEntities(final String goldenId) { - final var query = String.format(Locale.ROOT, - """ - query recordCount() { - list(func: uid(%s)) { - count: count(GoldenRecord.interactions) - } - }""", goldenId); + final var query = String.format(Locale.ROOT, """ + query recordCount() { + list(func: uid(%s)) { + count: count(GoldenRecord.interactions) + } + }""", goldenId); return getCount(query); } @@ -245,8 +242,7 @@ static LinkedList deterministicFilter( final List> listFunction, final CustomDemographicData interaction) { final LinkedList candidateGoldenRecords = new LinkedList<>(); - for (Function1 deterministicFunction : listFunction) { + for (Function1 deterministicFunction : listFunction) { final var block = deterministicFunction.apply(interaction); if (!block.all().isEmpty()) { final var list = block.all(); @@ -367,8 +363,8 @@ private static HashMap getCustomSearchQueryVariables(final List< } private static String getSimpleSearchQueryFilters( - final RecordType recordType, - final List parameters) { + final RecordType recordType, + final List parameters) { List gqlFilters = new ArrayList<>(); for (ApiModels.ApiSearchParameter param : parameters) { if (!param.value().isEmpty()) { @@ -376,9 +372,20 @@ private static String getSimpleSearchQueryFilters( Integer distance = param.distance(); String value = param.value(); if (distance == -1) { - gqlFilters.add("le(" + recordType + "." + fieldName + ", \"" + value + "\")"); + if (value.contains("_")) { + gqlFilters.add("ge(" + recordType + "." + fieldName + ", \"" + value.substring(0, value.indexOf("_")) + + "\") AND le(" + + recordType + "." + fieldName + ", \"" + value.substring(value.indexOf("_") + 1) + "\")"); + } else { + gqlFilters.add("le(" + recordType + "." + fieldName + ", \"" + value + "\")"); + } } else if (distance == 0) { - gqlFilters.add("eq(" + recordType + "." + fieldName + ", \"" + value + "\")"); + if (value.contains("_")) { + gqlFilters.add( + "eq(" + recordType + "." + fieldName + ", \"" + value.substring(0, value.indexOf("_")) + "\")"); + } else { + gqlFilters.add("eq(" + recordType + "." + fieldName + ", \"" + value + "\")"); + } } else { gqlFilters.add("match(" + recordType + "." + fieldName + ", $" + fieldName + ", " + distance + ")"); } @@ -530,14 +537,13 @@ private static Either(); map.put("$" + camelToSnake(op.name()), op.value()); diff --git a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphUid.java b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphUid.java index d0b4bafa8..f1d81ec9c 100644 --- a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphUid.java +++ b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphUid.java @@ -4,5 +4,6 @@ import com.fasterxml.jackson.annotation.JsonProperty; @JsonInclude(JsonInclude.Include.NON_NULL) -record DgraphUid(@JsonProperty("uid") String uid) {} +record DgraphUid(@JsonProperty("uid") String uid) { +} diff --git a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphUidList.java b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphUidList.java index 3a26f8139..c61164468 100644 --- a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphUidList.java +++ b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphUidList.java @@ -6,4 +6,5 @@ import java.util.List; @JsonInclude(JsonInclude.Include.NON_NULL) -record DgraphUidList(@JsonProperty("list") List list) {} +record DgraphUidList(@JsonProperty("list") List list) { +} diff --git a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphUidUidList.java b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphUidUidList.java index 09ba3fbea..f388baa9a 100644 --- a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphUidUidList.java +++ b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/dgraph/DgraphUidUidList.java @@ -9,5 +9,6 @@ record DgraphUidUidList(@JsonProperty("list") List list) { record RecUidList( @JsonProperty("uid") String uid, - @JsonProperty("list") List list) {} + @JsonProperty("list") List list) { + } } diff --git a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/Edge.java b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/Edge.java index 4b0c8b697..7cdf08286 100644 --- a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/Edge.java +++ b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/Edge.java @@ -24,16 +24,15 @@ static void createEdge( final UUID uid1, final UUID uid2, final EdgeName edgeName) { - try (var stmt = PostgresqlClient.getInstance().prepareStatement( - String.format(Locale.ROOT, - """ - INSERT INTO %s (source, dest, name) VALUES ('%s', '%s', '%s'); - """, - TABLE_EDGES, - uid1.toString(), uid2.toString(), edgeName.name()).stripIndent() - .stripIndent(), - Statement.RETURN_GENERATED_KEYS) - ) { + try (var stmt = PostgresqlClient.getInstance().prepareStatement(String.format(Locale.ROOT, + """ + INSERT INTO %s (source, dest, name) VALUES ('%s', '%s', '%s'); + """, + TABLE_EDGES, + uid1.toString(), + uid2.toString(), + edgeName.name()).stripIndent().stripIndent(), + Statement.RETURN_GENERATED_KEYS)) { stmt.executeUpdate(); try (ResultSet keys = stmt.getGeneratedKeys()) { keys.next(); @@ -48,19 +47,16 @@ static void createEdge( final UUID uid2, final EdgeName edgeName, final Facet facet) { - try (var stmt = PostgresqlClient.getInstance().prepareStatement( - String.format(Locale.ROOT, - """ - INSERT INTO %s (source, dest, name, facet) VALUES ('%s', '%s', '%s', '%s'); - """, - TABLE_EDGES, - uid1.toString(), - uid2.toString(), - edgeName.name(), - OBJECT_MAPPER.writeValueAsString(facet)) - .stripIndent(), - Statement.RETURN_GENERATED_KEYS) - ) { + try (var stmt = PostgresqlClient.getInstance().prepareStatement(String.format(Locale.ROOT, + """ + INSERT INTO %s (source, dest, name, facet) VALUES ('%s', '%s', '%s', '%s'); + """, + TABLE_EDGES, + uid1.toString(), + uid2.toString(), + edgeName.name(), + OBJECT_MAPPER.writeValueAsString(facet)) + .stripIndent(), Statement.RETURN_GENERATED_KEYS)) { stmt.executeUpdate(); try (ResultSet keys = stmt.getGeneratedKeys()) { keys.next(); diff --git a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/FacetScore.java b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/FacetScore.java index 2b3765e9f..d0b89f929 100644 --- a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/FacetScore.java +++ b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/FacetScore.java @@ -1,6 +1,5 @@ package org.jembi.jempi.libmpi.postgresql; record FacetScore( - Float score -) implements Facet { + Float score) implements Facet { } diff --git a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/LibPostgresql.java b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/LibPostgresql.java index 96c7a7b78..6caa0a811 100644 --- a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/LibPostgresql.java +++ b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/LibPostgresql.java @@ -72,10 +72,7 @@ private ExpandedInteraction findExpandedInteraction(final String eid) { .patient())) .toList(), new CustomUniqueGoldenRecordData( - LocalDateTime.now(), - true, - interaction.uniqueInteractionData() - .auxId()), + interaction.uniqueInteractionData()), goldenRecord.data()), PostgresqlQueries.getScore(goldenRecord.uid(), UUID.fromString(eid))))); @@ -92,7 +89,7 @@ public GoldenRecord findGoldenRecord(final String goldenId) { sourceIds.stream() .map(x -> new CustomSourceId(x.id().toString(), x.data().facility(), x.data().patient())) .toList(), - new CustomUniqueGoldenRecordData(LocalDateTime.now(), true, "AUX_ID"), + new CustomUniqueGoldenRecordData(null), goldenRecord.data()); } @@ -278,7 +275,7 @@ public LinkInfo createInteractionAndLinkToExistingGoldenRecord( eid, Edge.EdgeName.GID2IID, new FacetScore(goldenIdScore.score())); - return new LinkInfo(goldenIdScore.goldenId(), eid.toString(), goldenIdScore.score()); + return new LinkInfo(goldenIdScore.goldenId(), eid.toString(), null, goldenIdScore.score()); } public LinkInfo createInteractionAndLinkToClonedGoldenRecord( @@ -291,7 +288,7 @@ public LinkInfo createInteractionAndLinkToClonedGoldenRecord( Edge.createEdge(gid, sid, Edge.EdgeName.GID2SID); Edge.createEdge(gid, iid, Edge.EdgeName.GID2IID, new FacetScore(score)); - return new LinkInfo(gid.toString(), iid.toString(), score); + return new LinkInfo(gid.toString(), iid.toString(), null, score); } public void startTransaction() { diff --git a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/Node.java b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/Node.java index 9e1418761..6411cec85 100644 --- a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/Node.java +++ b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/Node.java @@ -23,16 +23,15 @@ interface Node { default UUID createNode() { UUID uid; - try (var stmt = PostgresqlClient.getInstance().prepareStatement( - String.format(Locale.ROOT, - """ - insert into %s (type, fields) - values ('%s', '%s'); - """, - TABLE_NODES, - this.getType().name(), - OBJECT_MAPPER.writeValueAsString(this.getNodeData())).stripIndent(), - Statement.RETURN_GENERATED_KEYS)) { + try (var stmt = PostgresqlClient.getInstance().prepareStatement(String.format(Locale.ROOT, + """ + insert into %s (type, fields) + values ('%s', '%s'); + """, + TABLE_NODES, + this.getType().name(), + OBJECT_MAPPER.writeValueAsString(this.getNodeData())) + .stripIndent(), Statement.RETURN_GENERATED_KEYS)) { stmt.executeUpdate(); try (ResultSet keys = stmt.getGeneratedKeys()) { keys.next(); diff --git a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/PostgresqlMutations.java b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/PostgresqlMutations.java index 7bfbaa647..002560bce 100644 --- a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/PostgresqlMutations.java +++ b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/PostgresqlMutations.java @@ -29,29 +29,21 @@ static boolean dropAll() { LOGGER.debug("Drop All"); try (var stmt = PostgresqlClient.getInstance().createStatement()) { - stmt.executeUpdate( - String.format(Locale.ROOT, - """ - DROP TABLE IF EXISTS %s - """, TABLE_EDGES).stripIndent()); - - stmt.executeUpdate( - String.format(Locale.ROOT, - """ - DROP TABLE IF EXISTS %s - """, TABLE_NODES).stripIndent()); - - stmt.executeUpdate( - String.format(Locale.ROOT, - """ - DROP TYPE IF EXISTS %s - """, TYPE_NODE_TYPE).stripIndent()); - - stmt.executeUpdate( - String.format(Locale.ROOT, - """ - DROP TYPE IF EXISTS %s - """, TYPE_EDGE_NAME).stripIndent()); + stmt.executeUpdate(String.format(Locale.ROOT, """ + DROP TABLE IF EXISTS %s + """, TABLE_EDGES).stripIndent()); + + stmt.executeUpdate(String.format(Locale.ROOT, """ + DROP TABLE IF EXISTS %s + """, TABLE_NODES).stripIndent()); + + stmt.executeUpdate(String.format(Locale.ROOT, """ + DROP TYPE IF EXISTS %s + """, TYPE_NODE_TYPE).stripIndent()); + + stmt.executeUpdate(String.format(Locale.ROOT, """ + DROP TYPE IF EXISTS %s + """, TYPE_EDGE_NAME).stripIndent()); } catch (SQLException e) { LOGGER.error(e.getLocalizedMessage(), e); return false; @@ -66,122 +58,84 @@ static boolean createSchema() { stmt.executeUpdate("CREATE EXTENSION pg_trgm;"); stmt.executeUpdate("CREATE EXTENSION fuzzystrmatch;"); stmt.executeUpdate("CREATE EXTENSION btree_gist;"); - stmt.executeUpdate(String.format( - Locale.ROOT, - """ - CREATE TYPE %s AS ENUM ('%s','%s','%s'); - """, - TYPE_NODE_TYPE, - Node.NodeType.GOLDEN_RECORD.name(), - Node.NodeType.INTERACTION.name(), - Node.NodeType.SOURCE_ID.name()).stripIndent()); - stmt.executeUpdate(String.format( - Locale.ROOT, - """ - CREATE TYPE %s AS ENUM ('%s','%s','%s'); - """, - TYPE_EDGE_NAME, - Edge.EdgeName.IID2SID.name(), - Edge.EdgeName.GID2SID.name(), - Edge.EdgeName.GID2IID.name()).stripIndent()); - stmt.executeUpdate(String.format( - Locale.ROOT, - """ - CREATE TABLE IF NOT EXISTS %s ( - type %s NOT NULL, - id UUID NOT NULL DEFAULT gen_random_uuid(), - fields JSONB NOT NULL, - CREATED_AT TIMESTAMPTZ NOT NULL DEFAULT now(), - UPDATED_AT TIMESTAMPTZ NOT NULL DEFAULT now(), - CONSTRAINT PKEY_NODES PRIMARY KEY (id, type) - ) PARTITION BY LIST(type); - """, - TABLE_NODES, - TYPE_NODE_TYPE).stripIndent()); - stmt.executeUpdate(String.format( - Locale.ROOT, - """ - CREATE TABLE IF NOT EXISTS %s - PARTITION OF %s - FOR VALUES IN ('%s'); - """, - TABLE_NODE_GOLDEN_RECORDS, - TABLE_NODES, - Node.NodeType.GOLDEN_RECORD).stripIndent()); - stmt.executeUpdate(String.format( - Locale.ROOT, - """ - CREATE TABLE IF NOT EXISTS %s - PARTITION OF %s - FOR VALUES IN ('%s'); - """, - TABLE_NODE_INTERACTIONS, - TABLE_NODES, - Node.NodeType.INTERACTION).stripIndent()); - stmt.executeUpdate(String.format( - Locale.ROOT, - """ - CREATE TABLE IF NOT EXISTS %s - PARTITION OF %s - FOR VALUES IN ('%s'); - """, - TABLE_NODE_SOURCE_IDS, - TABLE_NODES, - Node.NodeType.SOURCE_ID).stripIndent()); - stmt.executeUpdate(String.format( - Locale.ROOT, - """ - CREATE TABLE IF NOT EXISTS %s ( - name %s NOT NULL, - source UUID NOT NULL, - dest UUID NOT NULL, - facet JSONB, - CONSTRAINT PKEY_EDGES PRIMARY KEY (name, source, dest) - ) PARTITION BY LIST(name); - """, - TABLE_EDGES, - TYPE_EDGE_NAME).stripIndent()); - stmt.executeUpdate(String.format( - Locale.ROOT, - """ - CREATE TABLE IF NOT EXISTS %s - PARTITION OF %s - FOR VALUES IN ('%s'); - """, - TABLE_EDGES_GID2EID, - TABLE_EDGES, - Edge.EdgeName.GID2IID).stripIndent()); - stmt.executeUpdate(String.format( - Locale.ROOT, - """ - CREATE TABLE IF NOT EXISTS %s - PARTITION OF %s - FOR VALUES IN ('%s'); - """, - TABLE_EDGES_GID2SID, - TABLE_EDGES, - Edge.EdgeName.GID2SID).stripIndent()); - stmt.executeUpdate(String.format( - Locale.ROOT, - """ - CREATE TABLE IF NOT EXISTS %s - PARTITION OF %s - FOR VALUES IN ('%s'); - """, - TABLE_EDGES_EID2SID, - TABLE_EDGES, - Edge.EdgeName.IID2SID).stripIndent()); - - stmt.executeUpdate(String.format( - Locale.ROOT, - """ - CREATE INDEX IF NOT EXISTS idx_gin_gr_a ON %s USING gin (fields jsonb_ops); - """, TABLE_NODE_GOLDEN_RECORDS).stripIndent()); - stmt.executeUpdate(String.format( - Locale.ROOT, - """ - CREATE INDEX IF NOT EXISTS idx_gin_gr_b ON %s USING gin (fields jsonb_path_ops); - """, TABLE_NODE_GOLDEN_RECORDS).stripIndent()); + stmt.executeUpdate(String.format(Locale.ROOT, + """ + CREATE TYPE %s AS ENUM ('%s','%s','%s'); + """, + TYPE_NODE_TYPE, + Node.NodeType.GOLDEN_RECORD.name(), + Node.NodeType.INTERACTION.name(), + Node.NodeType.SOURCE_ID.name()).stripIndent()); + stmt.executeUpdate(String.format(Locale.ROOT, + """ + CREATE TYPE %s AS ENUM ('%s','%s','%s'); + """, + TYPE_EDGE_NAME, + Edge.EdgeName.IID2SID.name(), + Edge.EdgeName.GID2SID.name(), + Edge.EdgeName.GID2IID.name()).stripIndent()); + stmt.executeUpdate(String.format(Locale.ROOT, """ + CREATE TABLE IF NOT EXISTS %s ( + type %s NOT NULL, + id UUID NOT NULL DEFAULT gen_random_uuid(), + fields JSONB NOT NULL, + CREATED_AT TIMESTAMPTZ NOT NULL DEFAULT now(), + UPDATED_AT TIMESTAMPTZ NOT NULL DEFAULT now(), + CONSTRAINT PKEY_NODES PRIMARY KEY (id, type) + ) PARTITION BY LIST(type); + """, TABLE_NODES, TYPE_NODE_TYPE).stripIndent()); + stmt.executeUpdate(String.format(Locale.ROOT, """ + CREATE TABLE IF NOT EXISTS %s + PARTITION OF %s + FOR VALUES IN ('%s'); + """, TABLE_NODE_GOLDEN_RECORDS, TABLE_NODES, Node.NodeType.GOLDEN_RECORD) + .stripIndent()); + stmt.executeUpdate(String.format(Locale.ROOT, """ + CREATE TABLE IF NOT EXISTS %s + PARTITION OF %s + FOR VALUES IN ('%s'); + """, TABLE_NODE_INTERACTIONS, TABLE_NODES, Node.NodeType.INTERACTION) + .stripIndent()); + stmt.executeUpdate(String.format(Locale.ROOT, """ + CREATE TABLE IF NOT EXISTS %s + PARTITION OF %s + FOR VALUES IN ('%s'); + """, TABLE_NODE_SOURCE_IDS, TABLE_NODES, Node.NodeType.SOURCE_ID) + .stripIndent()); + stmt.executeUpdate(String.format(Locale.ROOT, """ + CREATE TABLE IF NOT EXISTS %s ( + name %s NOT NULL, + source UUID NOT NULL, + dest UUID NOT NULL, + facet JSONB, + CONSTRAINT PKEY_EDGES PRIMARY KEY (name, source, dest) + ) PARTITION BY LIST(name); + """, TABLE_EDGES, TYPE_EDGE_NAME).stripIndent()); + stmt.executeUpdate(String.format(Locale.ROOT, """ + CREATE TABLE IF NOT EXISTS %s + PARTITION OF %s + FOR VALUES IN ('%s'); + """, TABLE_EDGES_GID2EID, TABLE_EDGES, Edge.EdgeName.GID2IID) + .stripIndent()); + stmt.executeUpdate(String.format(Locale.ROOT, """ + CREATE TABLE IF NOT EXISTS %s + PARTITION OF %s + FOR VALUES IN ('%s'); + """, TABLE_EDGES_GID2SID, TABLE_EDGES, Edge.EdgeName.GID2SID) + .stripIndent()); + stmt.executeUpdate(String.format(Locale.ROOT, """ + CREATE TABLE IF NOT EXISTS %s + PARTITION OF %s + FOR VALUES IN ('%s'); + """, TABLE_EDGES_EID2SID, TABLE_EDGES, Edge.EdgeName.IID2SID) + .stripIndent()); + + stmt.executeUpdate(String.format(Locale.ROOT, """ + CREATE INDEX IF NOT EXISTS idx_gin_gr_a ON %s USING gin (fields jsonb_ops); + """, TABLE_NODE_GOLDEN_RECORDS).stripIndent()); + stmt.executeUpdate(String.format(Locale.ROOT, """ + CREATE INDEX IF NOT EXISTS idx_gin_gr_b ON %s USING gin (fields jsonb_path_ops); + """, TABLE_NODE_GOLDEN_RECORDS).stripIndent()); CustomMutations.createSchema(stmt); } catch (SQLException e) { LOGGER.error(e.getLocalizedMessage(), e); @@ -194,16 +148,11 @@ static boolean updateGoldenRecordField( final String goldenId, final String fieldName, final String val) { - final var sql = String.format( - Locale.ROOT, - """ - UPDATE %s - SET "fields" = JSONB_SET("fields"::JSONB, '{%s}', TO_JSONB('%s'::TEXT)) - WHERE id = ?; - """, - TABLE_NODE_GOLDEN_RECORDS, - fieldName, - val).stripIndent(); + final var sql = String.format(Locale.ROOT, """ + UPDATE %s + SET "fields" = JSONB_SET("fields"::JSONB, '{%s}', TO_JSONB('%s'::TEXT)) + WHERE id = ?; + """, TABLE_NODE_GOLDEN_RECORDS, fieldName, val).stripIndent(); try (var stmt = PostgresqlClient.getInstance().prepareStatement(sql)) { stmt.setObject(1, goldenId, Types.OTHER); final var rs = stmt.executeUpdate(); @@ -218,17 +167,11 @@ static boolean setScore( final String interactionUID, final String goldenRecordUid, final float score) { - final var sql = String.format( - Locale.ROOT, - """ - UPDATE %s - SET facet = JSONB_SET(facet, '{score}', to_jsonb(%f)) - WHERE source = '%s' AND dest = '%s'; - """, - TABLE_EDGES_GID2EID, - score, - goldenRecordUid, - interactionUID).stripIndent(); + final var sql = String.format(Locale.ROOT, """ + UPDATE %s + SET facet = JSONB_SET(facet, '{score}', to_jsonb(%f)) + WHERE source = '%s' AND dest = '%s'; + """, TABLE_EDGES_GID2EID, score, goldenRecordUid, interactionUID).stripIndent(); try (var stmt = PostgresqlClient.getInstance().createStatement()) { final var rs = stmt.executeUpdate(sql); return rs == 1; diff --git a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/PostgresqlQueries.java b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/PostgresqlQueries.java index 32e441e2b..5c4c36a4b 100644 --- a/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/PostgresqlQueries.java +++ b/JeMPI_Apps/JeMPI_LibMPI/src/main/java/org/jembi/jempi/libmpi/postgresql/PostgresqlQueries.java @@ -10,7 +10,6 @@ import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Types; -import java.time.LocalDateTime; import java.util.*; import static org.jembi.jempi.libmpi.postgresql.PostgresqlMutations.*; @@ -80,7 +79,8 @@ public static Long countGoldenRecords() { public static List findSourceId( final String facility, final String patient) { - final var sql = String.format(Locale.ROOT, "select * from %s where fields->>'facility' = ? and fields->>'patient' = ?;", + final var sql = String.format(Locale.ROOT, + "select * from %s where fields->>'facility' = ? and fields->>'patient' = ?;", TABLE_NODE_SOURCE_IDS); try (var stmt = PostgresqlClient.getInstance().prepareStatement(sql)) { stmt.setString(1, facility); @@ -114,13 +114,10 @@ private static List runQuery(final PreparedStatement stmt) throws } public static List getInteractionSourceIds(final UUID eid) { - final var sql = String.format(Locale.ROOT, - """ - SELECT * FROM %s - WHERE id IN (SELECT dest FROM %s WHERE source = ?); - """, - TABLE_NODE_SOURCE_IDS, - TABLE_EDGES_EID2SID).stripIndent(); + final var sql = String.format(Locale.ROOT, """ + SELECT * FROM %s + WHERE id IN (SELECT dest FROM %s WHERE source = ?); + """, TABLE_NODE_SOURCE_IDS, TABLE_EDGES_EID2SID).stripIndent(); try (var stmt = PostgresqlClient.getInstance().prepareStatement(sql)) { stmt.setObject(1, eid, Types.OTHER); return runQuery(stmt); @@ -131,13 +128,10 @@ public static List getInteractionSourceIds(final UUID eid) { } public static List getGoldenRecordSourceIds(final UUID gid) { - final var sql = String.format(Locale.ROOT, - """ - select * from %s - where id in (select dest from %s where source = ?); - """, - TABLE_NODE_SOURCE_IDS, - TABLE_EDGES_GID2SID).stripIndent(); + final var sql = String.format(Locale.ROOT, """ + select * from %s + where id in (select dest from %s where source = ?); + """, TABLE_NODE_SOURCE_IDS, TABLE_EDGES_GID2SID).stripIndent(); try (var stmt = PostgresqlClient.getInstance().prepareStatement(sql)) { stmt.setObject(1, gid, Types.OTHER); return runQuery(stmt); @@ -148,13 +142,10 @@ public static List getGoldenRecordSourceIds(final UUID gid) { } public static List getGoldenRecordsOfInteraction(final UUID eid) { - final var sql = String.format(Locale.ROOT, - """ - select * from %s - where id in (select source from %s where dest = ?); - """, - TABLE_NODE_GOLDEN_RECORDS, - TABLE_EDGES_GID2EID).stripIndent(); + final var sql = String.format(Locale.ROOT, """ + select * from %s + where id in (select source from %s where dest = ?); + """, TABLE_NODE_GOLDEN_RECORDS, TABLE_EDGES_GID2EID).stripIndent(); try (var stmt = PostgresqlClient.getInstance().prepareStatement(sql)) { stmt.setObject(1, eid, Types.OTHER); final var rs = stmt.executeQuery(); @@ -162,11 +153,8 @@ public static List getGoldenRecordsOfInteraction(final UUID ei while (rs.next()) { final var id = rs.getString("id"); final var json = rs.getString("fields"); - final var goldenRecordData = new CustomGoldenRecordData(OBJECT_MAPPER.readValue(json, - CustomDemographicData.class)); - list.add(new NodeGoldenRecord(Node.NodeType.valueOf(rs.getString("type")), - UUID.fromString(id), - goldenRecordData)); + final var goldenRecordData = new CustomGoldenRecordData(OBJECT_MAPPER.readValue(json, CustomDemographicData.class)); + list.add(new NodeGoldenRecord(Node.NodeType.valueOf(rs.getString("type")), UUID.fromString(id), goldenRecordData)); } return list; } catch (SQLException | JsonProcessingException e) { @@ -176,13 +164,10 @@ public static List getGoldenRecordsOfInteraction(final UUID ei } public static List getGoldenRecordInteractions(final UUID gid) { - final var sql = String.format(Locale.ROOT, - """ - select * from %s - where id in (select dest from %s where source = ?); - """, - TABLE_NODE_INTERACTIONS, - TABLE_EDGES_GID2EID).stripIndent(); + final var sql = String.format(Locale.ROOT, """ + select * from %s + where id in (select dest from %s where source = ?); + """, TABLE_NODE_INTERACTIONS, TABLE_EDGES_GID2EID).stripIndent(); try (var stmt = PostgresqlClient.getInstance().prepareStatement(sql)) { stmt.setObject(1, gid, Types.OTHER); final var rs = stmt.executeQuery(); @@ -207,14 +192,8 @@ private static List findCandidatesWorker(final String sql) { while (rs.next()) { final var id = rs.getString("id"); final var json = rs.getString("fields"); - final var goldenRecordData = new CustomGoldenRecordData(OBJECT_MAPPER.readValue(json, - CustomDemographicData.class)); - list.add(new GoldenRecord(id, - null, - new CustomUniqueGoldenRecordData(LocalDateTime.now(), - true, - "AUX_ID"), - goldenRecordData)); + final var goldenRecordData = new CustomGoldenRecordData(OBJECT_MAPPER.readValue(json, CustomDemographicData.class)); + list.add(new GoldenRecord(id, null, new CustomUniqueGoldenRecordData(null), goldenRecordData)); } return list; } catch (SQLException | JsonProcessingException e) { @@ -240,18 +219,16 @@ public static List findCandidates(final CustomDemographicData cust } public static NodeGoldenRecord getGoldenRecord(final UUID gid) { - try (var stmt = PostgresqlClient.getInstance().prepareStatement( - String.format(Locale.ROOT, - """ - select * from %s where id = ?; - """, - TABLE_NODE_GOLDEN_RECORDS).stripIndent())) { + try (var stmt = PostgresqlClient.getInstance().prepareStatement(String.format(Locale.ROOT, """ + select * from %s where id = ?; + """, TABLE_NODE_GOLDEN_RECORDS) + .stripIndent())) { stmt.setObject(1, gid, Types.OTHER); final var rs = stmt.executeQuery(); if (rs.next()) { final var id = rs.getString("id"); - final var goldenRecordData = new CustomGoldenRecordData(OBJECT_MAPPER.readValue(rs.getString("fields"), - CustomDemographicData.class)); + final var goldenRecordData = + new CustomGoldenRecordData(OBJECT_MAPPER.readValue(rs.getString("fields"), CustomDemographicData.class)); return new NodeGoldenRecord(Node.NodeType.valueOf(rs.getString("type")), UUID.fromString(id), goldenRecordData); } return null; @@ -262,19 +239,16 @@ public static NodeGoldenRecord getGoldenRecord(final UUID gid) { } static NodeInteraction getInteraction(final UUID iid) { - try (var stmt = PostgresqlClient.getInstance().prepareStatement( - String.format(Locale.ROOT, - """ - select * from %s - where id = ?; - """, - TABLE_NODE_INTERACTIONS))) { + try (var stmt = PostgresqlClient.getInstance().prepareStatement(String.format(Locale.ROOT, """ + select * from %s + where id = ?; + """, TABLE_NODE_INTERACTIONS))) { stmt.setObject(1, iid, Types.OTHER); final var rs = stmt.executeQuery(); if (rs.next()) { final var id = rs.getString("id"); - final var interactionData = new CustomInteractionData(OBJECT_MAPPER.readValue(rs.getString("fields"), - CustomDemographicData.class)); + final var interactionData = + new CustomInteractionData(OBJECT_MAPPER.readValue(rs.getString("fields"), CustomDemographicData.class)); return new NodeInteraction(Node.NodeType.valueOf(rs.getString("type")), UUID.fromString(id), interactionData); } return null; @@ -285,18 +259,15 @@ static NodeInteraction getInteraction(final UUID iid) { } public static NodeSourceId getSourceId(final UUID sid) { - try (var stmt = PostgresqlClient.getInstance().prepareStatement( - String.format(Locale.ROOT, - """ - select * from %s where id = ?; - """, - TABLE_NODE_SOURCE_IDS).stripIndent())) { + try (var stmt = PostgresqlClient.getInstance().prepareStatement(String.format(Locale.ROOT, """ + select * from %s where id = ?; + """, TABLE_NODE_SOURCE_IDS) + .stripIndent())) { stmt.setObject(1, sid, Types.OTHER); final var rs = stmt.executeQuery(); if (rs.next()) { final var id = rs.getString("id"); - final var sourceIdData = - OBJECT_MAPPER.readValue(rs.getString("fields"), NodeSourceId.SourceIdData.class); + final var sourceIdData = OBJECT_MAPPER.readValue(rs.getString("fields"), NodeSourceId.SourceIdData.class); return new NodeSourceId(Node.NodeType.valueOf(rs.getString("type")), UUID.fromString(id), sourceIdData); } return null; diff --git a/JeMPI_Apps/JeMPI_LibShared/checkstyle/suppression.xml b/JeMPI_Apps/JeMPI_LibShared/checkstyle/suppression.xml index e8f93a8b9..01df86651 100644 --- a/JeMPI_Apps/JeMPI_LibShared/checkstyle/suppression.xml +++ b/JeMPI_Apps/JeMPI_LibShared/checkstyle/suppression.xml @@ -36,5 +36,26 @@ files="CustomDemographicData.java" /> + + + + + + + + + diff --git a/JeMPI_Apps/JeMPI_LibShared/pom.xml b/JeMPI_Apps/JeMPI_LibShared/pom.xml index 8f01f34d0..9c64b9077 100644 --- a/JeMPI_Apps/JeMPI_LibShared/pom.xml +++ b/JeMPI_Apps/JeMPI_LibShared/pom.xml @@ -13,12 +13,6 @@ JeMPI_LibShared jar - - 17 - 17 - UTF-8 - - @@ -31,6 +25,26 @@ commons-text + + com.typesafe.akka + akka-stream_${scala.tools.version} + + + + com.typesafe.akka + akka-http_${scala.tools.version} + + + + com.typesafe.akka + akka-http-jackson_${scala.tools.version} + + + + ch.megard + akka-http-cors_${scala.tools.version} + + com.fasterxml.jackson.core jackson-annotations @@ -75,6 +89,12 @@ org.apache.logging.log4j log4j-slf4j-impl + + org.junit.jupiter + junit-jupiter-api + 5.10.0 + test + @@ -98,6 +118,15 @@ + + org.apache.maven.plugins + maven-compiler-plugin + + ${maven.compiler.source} + ${maven.compiler.target} + ${maven.compiler.target} + + @@ -132,4 +161,4 @@ - \ No newline at end of file + diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/KafkaTopicManager.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/KafkaTopicManager.java index 0b11a12d1..4d7d9afd6 100644 --- a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/KafkaTopicManager.java +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/KafkaTopicManager.java @@ -1,18 +1,21 @@ package org.jembi.jempi.shared.kafka; -import org.apache.kafka.clients.admin.AdminClient; -import org.apache.kafka.clients.admin.DeleteTopicsOptions; -import org.apache.kafka.clients.admin.NewTopic; +import org.apache.kafka.clients.admin.*; import org.apache.kafka.common.KafkaFuture; import org.apache.kafka.common.config.TopicConfig; +import org.apache.kafka.common.errors.UnknownTopicOrPartitionException; import org.apache.kafka.streams.StreamsConfig; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; -import java.util.Collections; -import java.util.Properties; +import java.util.*; import java.util.concurrent.ExecutionException; +import java.util.function.Function; public final class KafkaTopicManager { + private static final Logger LOGGER = LogManager.getLogger(KafkaTopicManager.class); + private final AdminClient adminClient; public KafkaTopicManager(final String bootStrapServers) { @@ -21,21 +24,63 @@ public KafkaTopicManager(final String bootStrapServers) { adminClient = AdminClient.create(properties); } + public void close() { + adminClient.close(); + } + + public Collection getAllTopics() throws ExecutionException, InterruptedException { + return adminClient.listTopics(new ListTopicsOptions().listInternal(false)).listings().get(); + } + + public Boolean hasTopic(final String name) throws ExecutionException, InterruptedException { + return getAllTopics().stream().anyMatch(r -> r.name().equals(name)); + } + + public void checkTopicsWithWait(final Function, Boolean> checkFunc, final Integer timeoutMs) { + boolean isComplete = false; + int count = 0; + while (!isComplete) { + try { + Thread.sleep(200); + isComplete = checkFunc.apply(this.getAllTopics()) || count > timeoutMs; + count += 200; + } catch (ExecutionException | InterruptedException e) { + isComplete = true; + } + } + } + public Map describeTopic(final String topic) throws ExecutionException, InterruptedException { + return adminClient.describeTopics(Collections.singletonList(topic)).allTopicNames().get(); + } + public void createTopic( final String topicName, final int partitions, - final short replicationFactor) throws ExecutionException, InterruptedException { + final short replicationFactor, + final int retention_ms, + final int segments_bytes) throws ExecutionException, InterruptedException { NewTopic newTopic = new NewTopic(topicName, partitions, replicationFactor); - newTopic.configs(Collections.singletonMap(TopicConfig.RETENTION_MS_CONFIG, "86400000")); + + HashMap config = new HashMap<>(); + config.put(TopicConfig.RETENTION_MS_CONFIG, Integer.toString(retention_ms)); + config.put(TopicConfig.SEGMENT_BYTES_CONFIG, Integer.toString(segments_bytes)); + + newTopic.configs(config); KafkaFuture createFuture = adminClient.createTopics(Collections.singleton(newTopic)).all(); - createFuture.get(); // Wait for the topic creation to complete + createFuture.get(); } public void deleteTopic(final String topicName) throws ExecutionException, InterruptedException { KafkaFuture deleteFuture = adminClient.deleteTopics(Collections.singleton(topicName), new DeleteTopicsOptions()).all(); - deleteFuture.get(); // Wait for the topic deletion to complete + try { + deleteFuture.get(); // Wait for the topic deletion to complete + } catch (ExecutionException e) { + if (!(e.getCause() instanceof UnknownTopicOrPartitionException)) { + LOGGER.error(e.getLocalizedMessage(), e); + throw (e); + } + } } } - diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/MyKafkaConsumerByPartition.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/MyKafkaConsumerByPartition.java index 347eba596..43c365b49 100644 --- a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/MyKafkaConsumerByPartition.java +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/MyKafkaConsumerByPartition.java @@ -90,11 +90,10 @@ public void setOffset( TopicPartition tp = new TopicPartition(topic, partition); LOGGER.info("Set offset {} {}", tp, offset); // Get topic partitions - List partitions = consumer - .partitionsFor(topic) - .stream() - .map(partitionInfo -> new TopicPartition(topic, partitionInfo.partition())) - .collect(Collectors.toList()); + List partitions = consumer.partitionsFor(topic) + .stream() + .map(partitionInfo -> new TopicPartition(topic, partitionInfo.partition())) + .collect(Collectors.toList()); // Explicitly assign the partitions to our consumer consumer.assign(partitions); //seek, query offsets, or poll diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/MyKafkaProducer.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/MyKafkaProducer.java index 9e6cbea82..1a7a49b8f 100644 --- a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/MyKafkaProducer.java +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/MyKafkaProducer.java @@ -48,8 +48,7 @@ public void commitTransaction() { public RecordMetadata produceSync( final KEY_TYPE key, - final VAL_TYPE item) throws ExecutionException, - InterruptedException { + final VAL_TYPE item) throws ExecutionException, InterruptedException { final ProducerRecord rec = new ProducerRecord<>(topic, key, item); return producer.send(rec).get(); } diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessor.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessor.java new file mode 100644 index 000000000..49e06d05d --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessor.java @@ -0,0 +1,124 @@ +package org.jembi.jempi.shared.kafka.global_context.store_processor; + +import org.apache.kafka.common.serialization.Serdes; +import org.apache.kafka.streams.*; +import org.apache.kafka.streams.errors.InvalidStateStoreException; +import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler; +import org.apache.kafka.streams.kstream.Consumed; +import org.apache.kafka.streams.state.*; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.jembi.jempi.shared.kafka.MyKafkaProducer; +import org.jembi.jempi.shared.kafka.global_context.store_processor.serde.StoreValueSerde; + +import java.util.Properties; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; + +public class StoreProcessor { + private static final Logger LOGGER = LogManager.getLogger(StoreProcessor.class); + private final String topicName; + private final String sinkTopicName; + private final String topicStoreName; + private final ReadOnlyKeyValueStore keyValueStore; + private final MyKafkaProducer updater; + KafkaStreams streams; + protected StoreProcessor(final String bootStrapServers, final String topicNameIn, final String sinkTopicNameIn, final Class serializeCls) throws InterruptedException, ExecutionException { + + this.topicName = topicNameIn; + this.sinkTopicName = sinkTopicNameIn; + + String uniqueId = Utilities.getUniqueAppId(topicName, "" + ProcessHandle.current().pid()); + + this.topicStoreName = String.format("%s-store", topicName); + + StreamsBuilder builder = new StreamsBuilder(); + builder.addGlobalStore(Stores.keyValueStoreBuilder( + Stores.inMemoryKeyValueStore(topicStoreName), + Serdes.String(), + new StoreValueSerde(serializeCls)), + topicName, + Consumed.with(Serdes.String(), new StoreValueSerde(serializeCls)), + () -> new StoreProcessorValuesUpdater<>(getValueUpdater(), + topicName, + topicStoreName, + new StoreProcessorSinkManager<>(topicName, sinkTopicName, bootStrapServers, serializeCls))); + + streams = new KafkaStreams(builder.build(), this.getProperties(bootStrapServers, uniqueId)); + + streams.setUncaughtExceptionHandler(exception -> { + LOGGER.error(String.format("A error occurred on the global KTable stream %s", topicName), exception); + return StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.SHUTDOWN_CLIENT; + }); + + streams.start(); + keyValueStore = streams.store(StoreQueryParameters.fromNameAndType(topicStoreName, + QueryableStoreTypes.keyValueStore())); + + waitUntilStoreIsQueryable().get(); + + updater = Utilities.getTopicProducer(topicName, bootStrapServers); + + Runtime.getRuntime().addShutdownHook(new Thread(streams::close)); + } + + public boolean validateIsAlive() { + if (!streams.state().isRunningOrRebalancing()) { + return false; + } + + try { + getValue(); + } catch (InvalidStateStoreException e) { + return false; + } + + return true; + } + private CompletableFuture waitUntilStoreIsQueryable() { + CompletableFuture future = new CompletableFuture<>(); + + CompletableFuture.runAsync(() -> { + while (true) { + try { + getValue(); + future.complete(true); + break; + } catch (InvalidStateStoreException ignored) { + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + future.completeExceptionally(e); + } + } catch (Exception e) { + future.completeExceptionally(e); + } + } + }); + + future.orTimeout(5000, TimeUnit.MILLISECONDS); + + return future; + } + + private Properties getProperties(final String bootStrapServers, final String uniqueName) { + Properties properties = new Properties(); + properties.put(StreamsConfig.APPLICATION_ID_CONFIG, String.format("%s-app.id", uniqueName)); + properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootStrapServers); + + return properties; + } + protected StoreUpdaterProcessor getValueUpdater() { + return (T globalValue, T currentValue) -> currentValue; + } + public T getValue() { + return keyValueStore.get(topicName); + } + + public void updateValue(final T value) throws ExecutionException, InterruptedException { + updater.produceSync(this.topicName, value); + } + +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessorFactory.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessorFactory.java new file mode 100644 index 000000000..2d3412228 --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessorFactory.java @@ -0,0 +1,67 @@ +package org.jembi.jempi.shared.kafka.global_context.store_processor; + +import org.apache.kafka.common.errors.TopicExistsException; +import org.apache.kafka.common.errors.UnknownTopicIdException; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.jembi.jempi.shared.kafka.KafkaTopicManager; + +import java.util.HashMap; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.ExecutionException; + +public class StoreProcessorFactory { + private static final Logger LOGGER = LogManager.getLogger(StoreProcessorFactory.class); + private final HashMap> tables = new HashMap<>(); + private final KafkaTopicManager topicManager; + protected final String bootStrapServers; + + public StoreProcessorFactory(final String bootStrapServers) { + topicManager = new KafkaTopicManager(bootStrapServers); + this.bootStrapServers = bootStrapServers; + } + + private Boolean checkRequireTopicsExist(final String topicName) throws ExecutionException, InterruptedException { + Utilities.TopicStoreNames pTopicName = Utilities.getStoreNames(topicName); + return Boolean.TRUE.equals(topicManager.hasTopic(pTopicName.topicName())) && Boolean.TRUE.equals(topicManager.hasTopic(pTopicName.topicSinkName())); + } + public StoreProcessor getCreate(final String name, final Class serializeCls) throws TopicExistsException, ExecutionException, InterruptedException { + Utilities.TopicStoreNames pTopicName = Utilities.getStoreNames(name); + for (String t : List.of(pTopicName.topicName(), pTopicName.topicSinkName())) { + if (Boolean.FALSE.equals(topicManager.hasTopic(t))) { + topicManager.createTopic(t, + 1, + (short) 1, + 86400000, + 4194304); + } + } + topicManager.checkTopicsWithWait(topics -> topics.stream().filter(t -> Objects.equals(t.name(), pTopicName.topicName()) + || Objects.equals(t.name(), pTopicName.topicSinkName())).count() == 2, + 5000); + + return get(name, serializeCls); + } + public StoreProcessor get(final String name, final Class serializeCls) throws TopicExistsException, ExecutionException, InterruptedException { + if (Boolean.FALSE.equals(this.checkRequireTopicsExist(name))) { + throw new UnknownTopicIdException(String.format("Could not find the global KTable with the name '%s'. Try running getCreate instead.", name)); + } + Utilities.TopicStoreNames pTopicName = Utilities.getStoreNames(name); + if (!tables.containsKey(name) || (tables.containsKey(name) && !tables.get(name).validateIsAlive())) { + StoreProcessor instance; + try { + instance = getInstanceClass(pTopicName.topicName(), pTopicName.topicSinkName(), serializeCls); + } catch (Exception e) { + LOGGER.error(String.format("Failed to create global kTable with the name %s. Reason: %s", name, e.getMessage()), e); + throw e; + } + tables.put(name, instance); + } + return tables.get(name); + } + + protected StoreProcessor getInstanceClass(final String name, final String sinkName, final Class serializeCls) throws ExecutionException, InterruptedException { + return new StoreProcessor<>(bootStrapServers, name, sinkName, serializeCls); + } +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessorSinkManager.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessorSinkManager.java new file mode 100644 index 000000000..c9a80a7ef --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessorSinkManager.java @@ -0,0 +1,66 @@ +package org.jembi.jempi.shared.kafka.global_context.store_processor; + +import org.apache.kafka.clients.consumer.Consumer; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.common.PartitionInfo; +import org.apache.kafka.common.TopicPartition; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.jembi.jempi.shared.kafka.MyKafkaProducer; + +import java.time.Duration; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; + +public final class StoreProcessorSinkManager { + private static final Logger LOGGER = LogManager.getLogger(StoreProcessorSinkManager.class); + private final String topicName; + private final String sinkTopicName; + private final Consumer sinkReader; + private final MyKafkaProducer sinkUpdater; + + StoreProcessorSinkManager(final String topicName, final String sinkTopicName, final String bootStrapServers, final Class serializeCls) { + this.topicName = topicName; + this.sinkTopicName = sinkTopicName; + this.sinkUpdater = Utilities.getTopicProducer(sinkTopicName, bootStrapServers); + this.sinkReader = Utilities.getTopicReader(sinkTopicName, bootStrapServers, serializeCls); + } + public void updateSink(final T updatedValue) throws ExecutionException, InterruptedException { + this.sinkUpdater.produceSync(sinkTopicName, updatedValue); + } + + public T readSink() { + try { + Map> topics = this.sinkReader.listTopics(); + List partitions = topics.get(sinkTopicName); + + if (partitions != null) { + int lastPartition = partitions.size() - 1; + TopicPartition topicPartition = new TopicPartition(sinkTopicName, lastPartition); + this.sinkReader.assign(Collections.singletonList(topicPartition)); + this.sinkReader.seekToEnd(Collections.singletonList(topicPartition)); + long lastOffset = this.sinkReader.position(topicPartition); + if (lastOffset == 0) { + return null; + } + this.sinkReader.seek(topicPartition, lastOffset - 1); + + + ConsumerRecords records = this.sinkReader.poll(Duration.ofMillis(1000)); + + T lastRecord = null; + for (ConsumerRecord r: records) { + lastRecord = r.value(); + } + + return lastRecord; + } + } catch (Exception e) { + LOGGER.error(String.format("An error occurred trying to get the global store %s last value. Defaulting to null", this.topicName), e); + } + return null; + } +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessorValuesUpdater.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessorValuesUpdater.java new file mode 100644 index 000000000..54233e1f4 --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessorValuesUpdater.java @@ -0,0 +1,67 @@ +package org.jembi.jempi.shared.kafka.global_context.store_processor; + +import org.apache.kafka.streams.state.KeyValueStore; +import org.apache.kafka.streams.processor.api.Processor; +import org.apache.kafka.streams.processor.api.Record; +import org.apache.kafka.streams.processor.api.ProcessorContext; + +import java.util.concurrent.ExecutionException; + +public final class StoreProcessorValuesUpdater implements Processor { + + private ProcessorContext context; + private KeyValueStore topicStore; + private final StoreUpdaterProcessor valuesUpdater; + private final String topicStoreName; + private final String topicName; + private final StoreProcessorSinkManager sinkManager; + + public StoreProcessorValuesUpdater(final StoreUpdaterProcessor valuesUpdater, + final String topicName, + final String topicStoreName, + final StoreProcessorSinkManager sinkManager + ) { + this.valuesUpdater = valuesUpdater; + this.topicName = topicName; + this.topicStoreName = topicStoreName; + this.sinkManager = sinkManager; + } + + private T readLastValue(final Record recordToProcess) { + T lastValue = null; + + if (recordToProcess != null) { + lastValue = this.topicStore.get(recordToProcess.key()); + if (lastValue != null) { + return lastValue; + } + } + // This only happens on process starts to prime the global store + return this.sinkManager.readSink(); + + } + @Override + public void init(final ProcessorContext context) { + this.context = context; + this.topicStore = context.getStateStore(topicStoreName); + T lastValue = readLastValue(null); + if (lastValue != null) { + this.topicStore.put(topicName, lastValue); + } + } + + @Override + public void process(final Record recordToProcess) { + T updatedValue = this.valuesUpdater.apply(readLastValue(recordToProcess), recordToProcess.value()); + try { + this.sinkManager.updateSink(updatedValue); + this.topicStore.put(recordToProcess.key(), updatedValue); + } catch (ExecutionException e) { + throw new RuntimeException(e); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + this.context.commit(); + } + +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreUpdaterProcessor.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreUpdaterProcessor.java new file mode 100644 index 000000000..2c83c497f --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreUpdaterProcessor.java @@ -0,0 +1,6 @@ +package org.jembi.jempi.shared.kafka.global_context.store_processor; + +@FunctionalInterface +public interface StoreUpdaterProcessor { + R apply(G globalValue, C currentValue); +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/Utilities.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/Utilities.java new file mode 100644 index 000000000..802b523ec --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/Utilities.java @@ -0,0 +1,48 @@ +package org.jembi.jempi.shared.kafka.global_context.store_processor; + +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.Consumer; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; +import org.jembi.jempi.shared.kafka.MyKafkaProducer; +import org.jembi.jempi.shared.kafka.global_context.store_processor.serde.StoreValueDeserializer; +import org.jembi.jempi.shared.kafka.global_context.store_processor.serde.StoreValueSerializer; + +import java.util.Properties; +import java.util.UUID; + +public class Utilities { + + protected Utilities() { } + + public record TopicStoreNames(String topicName, String topicSinkName) { } + public static final String JEMPI_GLOBAL_STORE_PREFIX = "jempi-global-store-topic"; + + private static String getTopicWithPrefix(final String topicName) { + return String.format("%s-%s", JEMPI_GLOBAL_STORE_PREFIX, topicName); + } + public static TopicStoreNames getStoreNames(final String topicName) { + String topicNameWithPrefix = Utilities.getTopicWithPrefix(topicName); + return new TopicStoreNames(topicNameWithPrefix, String.format("%s-sink", topicNameWithPrefix)); + } + public static String getUniqueAppId(final String topicName, final String idSuffix) { + return String.format("jempi-global-store-app-%s-%s", topicName, idSuffix == null ? UUID.randomUUID() : idSuffix); + } + + public static MyKafkaProducer getTopicProducer(final String topicName, final String bootStrapServers) { + return new MyKafkaProducer<>(bootStrapServers, + topicName, + new StringSerializer(), + new StoreValueSerializer<>(), + String.format("%s-producer", Utilities.getUniqueAppId(topicName, null))); + } + + public static Consumer getTopicReader(final String topicName, final String bootStrapServers, final Class serializeCls) { + Properties properties = new Properties(); + properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootStrapServers); + properties.put(ConsumerConfig.GROUP_ID_CONFIG, String.format("%s-group", topicName)); + + return new KafkaConsumer<>(properties, new StringDeserializer(), new StoreValueDeserializer<>(serializeCls)); + } +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/serde/StoreValueDeserializer.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/serde/StoreValueDeserializer.java new file mode 100644 index 000000000..722c88cc3 --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/serde/StoreValueDeserializer.java @@ -0,0 +1,25 @@ +package org.jembi.jempi.shared.kafka.global_context.store_processor.serde; + +import org.apache.kafka.common.errors.SerializationException; +import org.apache.kafka.common.serialization.Deserializer; +import org.jembi.jempi.shared.utils.AppUtils; + +public final class StoreValueDeserializer implements Deserializer { + private final Class serializeCls; + public StoreValueDeserializer(final Class serializeCls) { + this.serializeCls = serializeCls; + } + @Override + public T deserialize(final String topic, final byte[] bytes) { + if (bytes == null) { + return null; + } + T data = null; + try { + data = AppUtils.OBJECT_MAPPER.readValue(bytes, this.serializeCls); + } catch (Exception ex) { + throw new SerializationException(ex); + } + return data; + } +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/serde/StoreValueSerde.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/serde/StoreValueSerde.java new file mode 100644 index 000000000..4c9588b80 --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/serde/StoreValueSerde.java @@ -0,0 +1,9 @@ +package org.jembi.jempi.shared.kafka.global_context.store_processor.serde; + +import org.apache.kafka.common.serialization.Serdes; + +public class StoreValueSerde extends Serdes.WrapperSerde { + public StoreValueSerde(final Class serializeCls) { + super(new StoreValueSerializer<>(), new StoreValueDeserializer<>(serializeCls)); + } +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/serde/StoreValueSerializer.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/serde/StoreValueSerializer.java new file mode 100644 index 000000000..38c8147f1 --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/kafka/global_context/store_processor/serde/StoreValueSerializer.java @@ -0,0 +1,20 @@ +package org.jembi.jempi.shared.kafka.global_context.store_processor.serde; + +import org.apache.kafka.common.errors.SerializationException; +import org.apache.kafka.common.serialization.Serializer; +import org.jembi.jempi.shared.utils.AppUtils; + +public final class StoreValueSerializer implements Serializer { + @Override + public byte[] serialize(final String topic, final T data) { + if (data == null) { + return new byte[0]; + } + + try { + return AppUtils.OBJECT_MAPPER.writeValueAsBytes(data); + } catch (Exception e) { + throw new SerializationException("Error serializing JSON message", e); + } + } +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/ApiModels.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/ApiModels.java index 8f3920d4a..978325c6a 100644 --- a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/ApiModels.java +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/ApiModels.java @@ -1,20 +1,54 @@ package org.jembi.jempi.shared.models; +import akka.http.javadsl.model.HttpResponse; +import akka.http.javadsl.model.StatusCode; +import akka.http.javadsl.model.StatusCodes; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonProcessingException; import org.apache.commons.lang3.ObjectUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.List; +import static org.jembi.jempi.shared.utils.AppUtils.OBJECT_MAPPER; + public abstract class ApiModels { + private static final Logger LOGGER = LogManager.getLogger(ApiModels.class); private static final String DATE_PATTERN = "yyyy-MM-dd HH:mm:ss.SSSSSS"; + public static HttpResponse getHttpErrorResponse(final StatusCode statusCode) { + try { + var entity = OBJECT_MAPPER.writeValueAsBytes(new ApiError()); + return HttpResponse.create().withStatus(statusCode).withEntity(entity); + } catch (JsonProcessingException e) { + LOGGER.error(e.getLocalizedMessage(), e); + return HttpResponse.create().withStatus(StatusCodes.INTERNAL_SERVER_ERROR); + } + } + public interface ApiPaginatedResultSet { } + public record ApiError( + + @JsonProperty("module") String module, + @JsonProperty("class") String klass, + @JsonProperty("line_number") Integer lineNumber) { + + public ApiError() { + this(Thread.currentThread().getStackTrace()[3].getModuleName(), + Thread.currentThread().getStackTrace()[3].getClassName(), + Thread.currentThread().getStackTrace()[3].getLineNumber()); + } + + + } + public record ApiGoldenRecordCount(Long count) { } @@ -69,6 +103,27 @@ public record ApiCrRegisterRequest( CustomDemographicData demographicData) { } + + @JsonInclude(JsonInclude.Include.NON_NULL) + public record LinkInteractionSyncBody( + String stan, + ExternalLinkRange externalLinkRange, + Float matchThreshold, + CustomSourceId sourceId, + CustomUniqueInteractionData uniqueInteractionData, + CustomDemographicData demographicData) { + } + + @JsonInclude(JsonInclude.Include.NON_NULL) + public record LinkInteractionToGidSyncBody( + String stan, + CustomSourceId sourceId, + CustomUniqueInteractionData uniqueInteractionData, + CustomDemographicData demographicData, + String gid) { + } + + @JsonInclude(JsonInclude.Include.NON_NULL) public record ApiCrRegisterResponse(LinkInfo linkInfo) { } @@ -134,10 +189,7 @@ public record ApiExpandedGoldenRecordsPaginatedResultSet( ApiPagination pagination) implements ApiPaginatedResultSet { public static ApiExpandedGoldenRecordsPaginatedResultSet fromLibMPIPaginatedResultSet( final LibMPIPaginatedResultSet resultSet) { - final var data = resultSet.data() - .stream() - .map(ApiExpandedGoldenRecord::fromExpandedGoldenRecord) - .toList(); + final var data = resultSet.data().stream().map(ApiExpandedGoldenRecord::fromExpandedGoldenRecord).toList(); return new ApiExpandedGoldenRecordsPaginatedResultSet(data, ApiPagination.fromLibMPIPagination(resultSet.pagination())); } } @@ -147,10 +199,7 @@ public record ApiInteractionsPaginatedResultSet( ApiPagination pagination) implements ApiPaginatedResultSet { public static ApiInteractionsPaginatedResultSet fromLibMPIPaginatedResultSet( final LibMPIPaginatedResultSet resultSet) { - final var data = resultSet.data() - .stream() - .map(ApiInteraction::fromInteraction) - .toList(); + final var data = resultSet.data().stream().map(ApiInteraction::fromInteraction).toList(); return new ApiInteractionsPaginatedResultSet(data, ApiPagination.fromLibMPIPagination(resultSet.pagination())); } } @@ -160,8 +209,7 @@ public record ApiFiteredGidsPaginatedResultSet( ApiPagination pagination) implements ApiPaginatedResultSet { public static ApiFiteredGidsPaginatedResultSet fromLibMPIPaginatedResultSet( final LibMPIPaginatedResultSet resultSet) { - final var data = resultSet.data() - .stream().toList(); + final var data = resultSet.data().stream().toList(); return new ApiFiteredGidsPaginatedResultSet(data, ApiPagination.fromLibMPIPagination(resultSet.pagination())); } } @@ -169,12 +217,10 @@ public static ApiFiteredGidsPaginatedResultSet fromLibMPIPaginatedResultSet( public record ApiFiteredGidsWithInteractionCountPaginatedResultSet( List data, InteractionCount interactionCount, - ApiPagination pagination - ) implements ApiPaginatedResultSet { + ApiPagination pagination) implements ApiPaginatedResultSet { public static ApiFiteredGidsWithInteractionCountPaginatedResultSet fromPaginatedGidsWithInteractionCount( final PaginatedGIDsWithInteractionCount resultSet) { - final var data = resultSet.data() - .stream().toList(); + final var data = resultSet.data().stream().toList(); return new ApiFiteredGidsWithInteractionCountPaginatedResultSet(data, InteractionCount.fromInteractionCount(resultSet.interactionCount()), ApiPagination.fromLibMPIPagination(resultSet.pagination())); @@ -263,11 +309,12 @@ public record ApiAuditTrail( List entries) { public static ApiAuditTrail fromAuditTrail(final List trail) { final var apiDateFormat = new SimpleDateFormat(DATE_PATTERN); - return new ApiAuditTrail(trail.stream().map(x -> new AuditEntry(apiDateFormat.format(x.insertedAt()), - apiDateFormat.format(x.createdAt()), - x.interactionID(), - x.goldenID(), - x.event())) + return new ApiAuditTrail(trail.stream() + .map(x -> new AuditEntry(apiDateFormat.format(x.insertedAt()), + apiDateFormat.format(x.createdAt()), + x.interactionID(), + x.goldenID(), + x.event())) .toList()); } diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/CustomDemographicData.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/CustomDemographicData.java index 7e655df40..3971ad7e9 100644 --- a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/CustomDemographicData.java +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/CustomDemographicData.java @@ -62,13 +62,13 @@ public CustomDemographicData( } public CustomDemographicData clean() { - return new CustomDemographicData(this.givenName.toLowerCase().replaceAll("\\W", ""), - this.familyName.toLowerCase().replaceAll("\\W", ""), - this.gender.toLowerCase().replaceAll("\\W", ""), - this.dob.toLowerCase().replaceAll("\\W", ""), - this.city.toLowerCase().replaceAll("\\W", ""), - this.phoneNumber.toLowerCase().replaceAll("\\W", ""), - this.nationalId.toLowerCase().replaceAll("\\W", "")); + return new CustomDemographicData(this.givenName.trim().toLowerCase().replaceAll("\\W", ""), + this.familyName.trim().toLowerCase().replaceAll("\\W", ""), + this.gender.trim().toLowerCase().replaceAll("\\W", ""), + this.dob.trim().toLowerCase().replaceAll("\\W", ""), + this.city.trim().toLowerCase().replaceAll("\\W", ""), + this.phoneNumber.trim().toLowerCase().replaceAll("\\W", ""), + this.nationalId.trim().toLowerCase().replaceAll("\\W", "")); } } diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/CustomFieldTallies.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/CustomFieldTallies.java new file mode 100644 index 000000000..63dd47874 --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/CustomFieldTallies.java @@ -0,0 +1,114 @@ +package org.jembi.jempi.shared.models; + +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.text.similarity.JaroWinklerSimilarity; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +public record CustomFieldTallies( + FieldTally givenName, + FieldTally familyName, + FieldTally gender, + FieldTally dob, + FieldTally city, + FieldTally phoneNumber, + FieldTally nationalId) { + + private static final Logger LOGGER = LogManager.getFormatterLogger(CustomFieldTallies.class); + private static final JaroWinklerSimilarity JARO_WINKLER_SIMILARITY = new JaroWinklerSimilarity(); + private static final FieldTally A = new FieldTally(1L, 0L, 0L, 0L); + private static final FieldTally B = new FieldTally(0L, 1L, 0L, 0L); + private static final FieldTally C = new FieldTally(0L, 0L, 1L, 0L); + private static final FieldTally D = new FieldTally(0L, 0L, 0L, 1L); + public static final CustomFieldTallies.FieldTally FIELD_TALLY_SUM_IDENTITY = new CustomFieldTallies.FieldTally(0L, 0L, 0L, 0L); + public static final CustomFieldTallies CUSTOM_FIELD_TALLIES_SUM_IDENTITY = new CustomFieldTallies( + FIELD_TALLY_SUM_IDENTITY, + FIELD_TALLY_SUM_IDENTITY, + FIELD_TALLY_SUM_IDENTITY, + FIELD_TALLY_SUM_IDENTITY, + FIELD_TALLY_SUM_IDENTITY, + FIELD_TALLY_SUM_IDENTITY, + FIELD_TALLY_SUM_IDENTITY); + + private static FieldTally getFieldTally( + final boolean recordsMatch, + final String left, + final String right) { + if (StringUtils.isEmpty(left) || StringUtils.isEmpty(right)) { + return FIELD_TALLY_SUM_IDENTITY; + } + final var fieldMatches = JARO_WINKLER_SIMILARITY.apply(left.toLowerCase(), right.toLowerCase()) >= 0.97; + if (recordsMatch) { + if (fieldMatches) { + return A; + } else { + return B; + } + } else { + if (fieldMatches) { + return C; + } else { + return D; + } + } + } + + private static void logMU( + final String tag, + final CustomFieldTallies.FieldTally fieldTally) { + LOGGER.debug("%-15s %,.5f %,.5f", + tag, + fieldTally.a().doubleValue() / (fieldTally.a().doubleValue() + fieldTally.b().doubleValue()), + fieldTally.c().doubleValue() / (fieldTally.c().doubleValue() + fieldTally.d().doubleValue())); + } + + public static CustomFieldTallies map( + final boolean recordsMatch, + final CustomDemographicData left, + final CustomDemographicData right) { + return new CustomFieldTallies(getFieldTally(recordsMatch, left.givenName, right.givenName), + getFieldTally(recordsMatch, left.familyName, right.familyName), + getFieldTally(recordsMatch, left.gender, right.gender), + getFieldTally(recordsMatch, left.dob, right.dob), + getFieldTally(recordsMatch, left.city, right.city), + getFieldTally(recordsMatch, left.phoneNumber, right.phoneNumber), + getFieldTally(recordsMatch, left.nationalId, right.nationalId)); + } + + public void logFieldMU() { + LOGGER.debug("Tally derived M&U's"); + logMU("givenName", givenName); + logMU("familyName", familyName); + logMU("gender", gender); + logMU("dob", dob); + logMU("city", city); + logMU("phoneNumber", phoneNumber); + logMU("nationalId", nationalId); + } + + public CustomFieldTallies sum(final CustomFieldTallies r) { + return new CustomFieldTallies(this.givenName.sum(r.givenName), + this.familyName.sum(r.familyName), + this.gender.sum(r.gender), + this.dob.sum(r.dob), + this.city.sum(r.city), + this.phoneNumber.sum(r.phoneNumber), + this.nationalId.sum(r.nationalId)); + } + + public record FieldTally( + Long a, + Long b, + Long c, + Long d) { + + FieldTally sum(final FieldTally r) { + return new FieldTally(this.a + r.a, + this.b + r.b, + this.c + r.c, + this.d + r.d); + } + + } + +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/CustomMU.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/CustomMU.java index 9b05a7e74..412bf46e9 100644 --- a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/CustomMU.java +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/CustomMU.java @@ -3,7 +3,8 @@ import com.fasterxml.jackson.annotation.JsonInclude; @JsonInclude(JsonInclude.Include.NON_NULL) -public record CustomMU(Probability givenName, +public record CustomMU(String tag, + Probability givenName, Probability familyName, Probability gender, Probability dob, @@ -11,15 +12,7 @@ public record CustomMU(Probability givenName, Probability phoneNumber, Probability nationalId) { - public CustomMU(final double[] mHat, final double[] uHat) { - this(new CustomMU.Probability((float) mHat[0], (float) uHat[0]), - new CustomMU.Probability((float) mHat[1], (float) uHat[1]), - new CustomMU.Probability((float) mHat[2], (float) uHat[2]), - new CustomMU.Probability((float) mHat[3], (float) uHat[3]), - new CustomMU.Probability((float) mHat[4], (float) uHat[4]), - new CustomMU.Probability((float) mHat[5], (float) uHat[5]), - new CustomMU.Probability((float) mHat[6], (float) uHat[6])); - } + public static final Boolean SEND_INTERACTIONS_TO_EM = true; public record Probability(float m, float u) { } diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/CustomUniqueGoldenRecordData.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/CustomUniqueGoldenRecordData.java index 2bad72850..8c895ed52 100644 --- a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/CustomUniqueGoldenRecordData.java +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/CustomUniqueGoldenRecordData.java @@ -2,8 +2,18 @@ import com.fasterxml.jackson.annotation.JsonInclude; +import java.time.LocalDateTime; + @JsonInclude(JsonInclude.Include.NON_NULL) public record CustomUniqueGoldenRecordData(java.time.LocalDateTime auxDateCreated, Boolean auxAutoUpdateEnabled, String auxId) { + + public CustomUniqueGoldenRecordData(final CustomUniqueInteractionData uniqueInteractionData) { + this(LocalDateTime.now(), + true, + uniqueInteractionData.auxId() + ); + } + } diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/FilterGidsRequestPayload.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/FilterGidsRequestPayload.java index fe55d878c..d79ff1bce 100644 --- a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/FilterGidsRequestPayload.java +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/FilterGidsRequestPayload.java @@ -1,34 +1,33 @@ package org.jembi.jempi.shared.models; +import com.fasterxml.jackson.annotation.JsonInclude; +import org.apache.commons.lang3.ObjectUtils; + import java.time.LocalDateTime; import java.util.ArrayList; import java.util.List; -import org.apache.commons.lang3.ObjectUtils; - -import com.fasterxml.jackson.annotation.JsonInclude; - @JsonInclude(JsonInclude.Include.NON_NULL) public record FilterGidsRequestPayload( List parameters, - LocalDateTime createdAt, - Integer offset, - Integer limit, - String sortBy, - Boolean sortAsc) { + LocalDateTime createdAt, + Integer offset, + Integer limit, + String sortBy, + Boolean sortAsc) { - public FilterGidsRequestPayload( - final List parameters, - final LocalDateTime createdAt, - final Integer offset, - final Integer limit, - final String sortBy, - final Boolean sortAsc) { + public FilterGidsRequestPayload( + final List parameters, + final LocalDateTime createdAt, + final Integer offset, + final Integer limit, + final String sortBy, + final Boolean sortAsc) { this.parameters = ObjectUtils.defaultIfNull(parameters, new ArrayList<>()); - this.createdAt = ObjectUtils.defaultIfNull(createdAt, LocalDateTime.now()); - this.offset = ObjectUtils.defaultIfNull(offset, 0); - this.limit = ObjectUtils.defaultIfNull(limit, 10); - this.sortBy = ObjectUtils.defaultIfNull(sortBy, "uid"); - this.sortAsc = ObjectUtils.defaultIfNull(sortAsc, false); + this.createdAt = ObjectUtils.defaultIfNull(createdAt, LocalDateTime.now()); + this.offset = ObjectUtils.defaultIfNull(offset, 0); + this.limit = ObjectUtils.defaultIfNull(limit, 10); + this.sortBy = ObjectUtils.defaultIfNull(sortBy, "uid"); + this.sortAsc = ObjectUtils.defaultIfNull(sortAsc, false); } } diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/GlobalConstants.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/GlobalConstants.java index 6fc224db6..e8903c317 100644 --- a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/GlobalConstants.java +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/GlobalConstants.java @@ -2,10 +2,12 @@ public final class GlobalConstants { - public static final String TOPIC_INTERACTION_ASYNC_ETL = "JeMPI-async-etl"; + public static final String TOPIC_INTERACTION_ETL = "JeMPI-interaction-etl"; public static final String TOPIC_INTERACTION_CONTROLLER = "JeMPI-interaction-controller"; + public static final String TOPIC_INTERACTION_PROCESSOR_CONTROLLER = "JeMPI-interaction-processor-controller"; public static final String TOPIC_INTERACTION_EM = "JeMPI-interaction-em"; public static final String TOPIC_INTERACTION_LINKER = "JeMPI-interaction-linker"; + public static final String TOPIC_MU_CONTROLLER = "JeMPI-mu-controller"; public static final String TOPIC_MU_LINKER = "JeMPI-mu-linker"; public static final String TOPIC_AUDIT_TRAIL = "JeMPI-audit-trail"; public static final String TOPIC_NOTIFICATIONS = "JeMPI-notifications"; @@ -37,8 +39,8 @@ public final class GlobalConstants { public static final String SEGMENT_GET_NOTIFICATIONS = "MatchesForReview"; public static final String SEGMENT_PATCH_GOLDEN_RECORD = "golden-record"; - public static final String SEGMENT_PATCH_IID_GID_LINK = "Link"; - public static final String SEGMENT_PATCH_IID_NEW_GID_LINK = "Unlink"; + public static final String SEGMENT_POST_IID_GID_LINK = "Link"; + public static final String SEGMENT_POST_IID_NEW_GID_LINK = "Unlink"; public static final String SEGMENT_POST_UPDATE_NOTIFICATION = "NotificationRequest"; public static final String SEGMENT_POST_SIMPLE_SEARCH = "search"; @@ -47,14 +49,18 @@ public final class GlobalConstants { public static final String SEGMENT_POST_FILTER_GIDS = "filter-gids"; public static final String SEGMENT_POST_FILTER_GIDS_WITH_INTERACTION_COUNT = "filter-gids-interaction"; - public static final String SEGMENT_PROXY_CR_REGISTER = "cr-register"; - public static final String SEGMENT_PROXY_CR_FIND = "cr-find"; - public static final String SEGMENT_PROXY_CR_CANDIDATES = "cr-candidates"; - public static final String SEGMENT_PROXY_CR_UPDATE_FIELDS = "cr-update-fields"; + public static final String SEGMENT_PROXY_POST_CR_REGISTER = "cr-register"; + public static final String SEGMENT_PROXY_POST_CR_FIND = "cr-find"; + public static final String SEGMENT_PROXY_POST_CR_CANDIDATES = "cr-candidates"; + public static final String SEGMENT_PROXY_PATCH_CR_UPDATE_FIELDS = "cr-update-fields"; public static final String SEGMENT_PROXY_GET_CANDIDATES_WITH_SCORES = "candidate-golden-records"; public static final String SEGMENT_PROXY_POST_CALCULATE_SCORES = "calculate-scores"; + + public static final String SEGMENT_PROXY_GET_DASHBOARD_DATA = "dashboard-data"; + + public static final String SEGMENT_PROXY_ON_NOTIFICATION_RESOLUTION = "on-notification-resolution"; public static final String SEGMENT_PROXY_POST_LINK_INTERACTION = "link-interaction"; public static final String SEGMENT_PROXY_POST_LINK_INTERACTION_TO_GID = "link-interaction-to-gid"; @@ -67,6 +73,7 @@ public final class GlobalConstants { // SEGMENT_CURRENT_USER: '/current-user', + public static final String DEFAULT_LINKER_GLOBAL_STORE_NAME = "linker"; private GlobalConstants() { } diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/GoldenRecord.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/GoldenRecord.java index 1dc0bced9..802a72c5c 100644 --- a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/GoldenRecord.java +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/GoldenRecord.java @@ -2,7 +2,6 @@ import com.fasterxml.jackson.annotation.JsonInclude; -import java.time.LocalDateTime; import java.util.List; @JsonInclude(JsonInclude.Include.NON_NULL) @@ -15,9 +14,7 @@ public record GoldenRecord( public GoldenRecord(final Interaction interaction) { this(null, List.of(interaction.sourceId()), - new CustomUniqueGoldenRecordData(LocalDateTime.now(), - true, - interaction.uniqueInteractionData().auxId()), + new CustomUniqueGoldenRecordData(interaction.uniqueInteractionData()), interaction.demographicData()); } diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/InteractionEnvelop.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/InteractionEnvelop.java index 987468b76..d29255855 100644 --- a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/InteractionEnvelop.java +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/InteractionEnvelop.java @@ -6,7 +6,8 @@ public record InteractionEnvelop( ContentType contentType, String tag, - String /* System Trace Audit Number */ stan, + String stan, + // System Trace Audit Number Interaction interaction) { public enum ContentType { diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/LibMPIInteractionCount.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/LibMPIInteractionCount.java index 66cec26a1..e9a391968 100644 --- a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/LibMPIInteractionCount.java +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/LibMPIInteractionCount.java @@ -2,6 +2,7 @@ import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; + @JsonInclude(JsonInclude.Include.NON_NULL) public record LibMPIInteractionCount(@JsonProperty("total") Integer total) { } diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/LinkInfo.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/LinkInfo.java index 9bdc91a04..2c20e6243 100644 --- a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/LinkInfo.java +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/LinkInfo.java @@ -3,5 +3,6 @@ public record LinkInfo( String goldenUID, String interactionUID, + String sourceUID, float score) { } diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/LinkInteractionSyncBody.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/LinkInteractionSyncBody.java deleted file mode 100644 index ff7b24c0e..000000000 --- a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/LinkInteractionSyncBody.java +++ /dev/null @@ -1,12 +0,0 @@ -package org.jembi.jempi.shared.models; - -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; - -@JsonInclude(JsonInclude.Include.NON_NULL) -public record LinkInteractionSyncBody( - @JsonProperty("stan") String stan, - @JsonProperty("externalLinkRange") ExternalLinkRange externalLinkRange, - @JsonProperty("matchThreshold") Float matchThreshold, - @JsonProperty("patientRecord") Interaction interaction) { -} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/LinkInteractionToGidSyncBody.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/LinkInteractionToGidSyncBody.java deleted file mode 100644 index 9ea4bcccf..000000000 --- a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/LinkInteractionToGidSyncBody.java +++ /dev/null @@ -1,11 +0,0 @@ -package org.jembi.jempi.shared.models; - -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; - -@JsonInclude(JsonInclude.Include.NON_NULL) -public record LinkInteractionToGidSyncBody( - @JsonProperty("stan") String stan, - @JsonProperty("patientRecord") Interaction interaction, - @JsonProperty("gid") String gid) { -} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/LinkStatsMeta.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/LinkStatsMeta.java new file mode 100644 index 000000000..99c047bfc --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/LinkStatsMeta.java @@ -0,0 +1,28 @@ +package org.jembi.jempi.shared.models; + +import com.fasterxml.jackson.annotation.JsonInclude; + +@JsonInclude(JsonInclude.Include.NON_NULL) +public record LinkStatsMeta( + ConfusionMatrix confusionMatrix, + CustomFieldTallies customFieldTallies) { + + public static final ConfusionMatrix CONFUSION_MATRIX_IDENTITY = new ConfusionMatrix(0.0, 0.0, 0.0, 0.0); + + @JsonInclude(JsonInclude.Include.NON_NULL) + public record ConfusionMatrix( + Double TP, + Double FP, + Double TN, + Double FN) { + + public ConfusionMatrix sum(final ConfusionMatrix right) { + return new ConfusionMatrix(this.TP + right.TP, + this.FP + right.FP, + this.TN + right.TN, + this.FN + right.FN); + } + + } + +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/Notification.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/Notification.java index 765ea69c0..762c21735 100644 --- a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/Notification.java +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/Notification.java @@ -12,7 +12,8 @@ public record Notification( MatchData linkedTo, List candidates) { public enum NotificationType { - THRESHOLD("Threshold"), + ABOVE_THRESHOLD("Above Threshold"), + BELOW_THRESHOLD("Below Threshold"), MARGIN("Margin"), UPDATE("Update"); diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/NotificationResolution.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/NotificationResolution.java new file mode 100644 index 000000000..c12478f64 --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/NotificationResolution.java @@ -0,0 +1,25 @@ +package org.jembi.jempi.shared.models; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +import java.util.ArrayList; + +@JsonInclude(JsonInclude.Include.NON_NULL) +public record NotificationResolution( + @JsonProperty(value = "notificationId", required = true) String notificationId, + @JsonProperty(value = "interactionId", required = true) String interactionId, + @JsonProperty(value = "resolutionState", required = true) String resolutionState, + @JsonProperty(value = "currentGoldenId", required = true) String currentGoldenId, + @JsonProperty(value = "currentCandidates", required = true) ArrayList currentCandidates, + @JsonProperty(value = "newGoldenId", required = true) String newGoldenId, + @JsonProperty(value = "notificationType", required = true) String notificationType, + @JsonProperty("score") Float score +) { + +} + + + + + diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/NotificationResolutionProcessorData.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/NotificationResolutionProcessorData.java new file mode 100644 index 000000000..d1c874765 --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/NotificationResolutionProcessorData.java @@ -0,0 +1,8 @@ +package org.jembi.jempi.shared.models; + + +public record NotificationResolutionProcessorData( + NotificationResolution notificationResolution, + LinkInfo linkInfo + ) { +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/dashboard/LinkerProgressStats.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/dashboard/LinkerProgressStats.java new file mode 100644 index 000000000..1ddd6476c --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/dashboard/LinkerProgressStats.java @@ -0,0 +1,4 @@ +package org.jembi.jempi.shared.models.dashboard; + +public record LinkerProgressStats(long totalCompleted, long sizeCompleted, long toFileSize, String filename) { +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/dashboard/LinkerStats.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/dashboard/LinkerStats.java new file mode 100644 index 000000000..eac79817f --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/dashboard/LinkerStats.java @@ -0,0 +1,4 @@ +package org.jembi.jempi.shared.models.dashboard; + +public record LinkerStats(long goldenRecordCount, long interactionsCount, LinkerProgressStats linkerProgressStats) { +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/dashboard/NotificationStats.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/dashboard/NotificationStats.java new file mode 100644 index 000000000..3197a3993 --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/dashboard/NotificationStats.java @@ -0,0 +1,4 @@ +package org.jembi.jempi.shared.models.dashboard; + +public record NotificationStats(int openNotifications, int closedNotifications) { +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/dashboard/SQLDashboardData.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/dashboard/SQLDashboardData.java new file mode 100644 index 000000000..41a17623f --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/models/dashboard/SQLDashboardData.java @@ -0,0 +1,4 @@ +package org.jembi.jempi.shared.models.dashboard; + +public record SQLDashboardData(NotificationStats notificationStats) { +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/utils/AppUtils.java b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/utils/AppUtils.java index 3c2ccc582..f2925b362 100644 --- a/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/utils/AppUtils.java +++ b/JeMPI_Apps/JeMPI_LibShared/src/main/java/org/jembi/jempi/shared/utils/AppUtils.java @@ -20,8 +20,8 @@ public final class AppUtils implements Serializable { - public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper().disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS) - .registerModule(new JavaTimeModule()); + public static final ObjectMapper OBJECT_MAPPER = + new ObjectMapper().disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS).registerModule(new JavaTimeModule()); private static final Logger LOGGER = LogManager.getLogger(AppUtils.class); @Serial @@ -39,8 +39,7 @@ static String getResourceFileAsString(final String fileName) throws IOException if (is == null) { return null; } - try (InputStreamReader isr = new InputStreamReader(is); - BufferedReader reader = new BufferedReader(isr)) { + try (InputStreamReader isr = new InputStreamReader(is); BufferedReader reader = new BufferedReader(isr)) { return reader.lines().collect(Collectors.joining(System.lineSeparator())); } } diff --git a/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/README.md b/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/README.md new file mode 100644 index 000000000..4f597e4a1 --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/README.md @@ -0,0 +1,44 @@ +GlobalKTable Tests +================== + +To run these test you need to have a kafka instance running on you machine on port 9097 +Below is a docker-compose file you can use as a sample kafka instance + +```yaml +version: "3.8" + +services: + kafka: + image: docker.io/bitnami/kafka:3.6 + hostname: kafka0 + container_name: kafka0 + ports: + - "9097:9097" + volumes: + - "kafka_data:/bitnami" + environment: + # KRaft settings + - KAFKA_CFG_NODE_ID=0 + - KAFKA_CFG_PROCESS_ROLES=controller,broker + - KAFKA_CFG_CONTROLLER_QUORUM_VOTERS=0@localhost:9093 + # Listeners + - KAFKA_CFG_LISTENERS=PLAINTEXT://:9097,CONTROLLER://:9093,PLAINTEXT_OTHER://:29092 + - KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT_OTHER://kafka0:29092,PLAINTEXT://localhost:9097 + - KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_OTHER:PLAINTEXT + - KAFKA_CFG_CONTROLLER_LISTENER_NAMES=CONTROLLER + - KAFKA_CFG_INTER_BROKER_LISTENER_NAME=PLAINTEXT + kafka-ui: + container_name: kafka-ui + image: provectuslabs/kafka-ui:latest + ports: + - 7474:8080 + environment: + DYNAMIC_CONFIG_ENABLED: 'true' + KAFKA_CLUSTERS_0_NAME: local + KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092 + depends_on: + - kafka +volumes: + kafka_data: + driver: local +``` \ No newline at end of file diff --git a/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessorFactoryTest.java b/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessorFactoryTest.java new file mode 100644 index 000000000..b45662de7 --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessorFactoryTest.java @@ -0,0 +1,49 @@ +package org.jembi.jempi.shared.kafka.global_context.store_processor; + +import org.apache.kafka.clients.admin.DeleteTopicsOptions; +import org.apache.kafka.clients.admin.ListTopicsOptions; +import org.apache.kafka.clients.admin.TopicListing; +import org.apache.kafka.common.errors.UnknownTopicIdException; + +import java.util.Collection; +import java.util.ArrayList; +import java.util.concurrent.ExecutionException; +import java.util.stream.Collectors; + +import org.jembi.jempi.shared.kafka.global_context.store_processor.utils.TestUtils; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; + +import static org.junit.jupiter.api.Assertions.*; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +class StoreProcessorFactoryTest { + TestUtils testUtils; + + @BeforeAll + void prepareForTests(){ + testUtils = new TestUtils("localhost:9097"); + } + + @Test + void testCanCreateNewInstance() throws ExecutionException, InterruptedException { + StoreProcessorFactory factory = testUtils.getGlobalKTableWrapperInstance(true); + StoreProcessor sampleInstance = factory.getCreate(testUtils.getTestTopicName("sample-table"), TestUtils.MockTableData.class); + assertInstanceOf(StoreProcessor.class, sampleInstance); + } + @Test + void testItErrorsOutWhenGlobalKTableDoesNotExists() throws ExecutionException, InterruptedException { + assertThrows(UnknownTopicIdException.class, () -> { + testUtils.getGlobalKTableWrapperInstance(true).get(testUtils.getTestTopicName("sample-table"), TestUtils.MockTableData.class); + }); + } + @Test + void testDoesNotRecreateIfGlobalKTableAlreadyExists() throws ExecutionException, InterruptedException { + StoreProcessorFactory factory = testUtils.getGlobalKTableWrapperInstance(true); + StoreProcessor sampleInstance = factory.getCreate(testUtils.getTestTopicName("sample-table"), TestUtils.MockTableData.class); + assertInstanceOf(StoreProcessor.class, sampleInstance); + + assertEquals(sampleInstance.hashCode(), factory.get(testUtils.getTestTopicName("sample-table"), TestUtils.MockTableData.class).hashCode()); + } +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessorTest.java b/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessorTest.java new file mode 100644 index 000000000..2c8ee878f --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessorTest.java @@ -0,0 +1,62 @@ +package org.jembi.jempi.shared.kafka.global_context.store_processor; + +import org.jembi.jempi.shared.kafka.global_context.store_processor.utils.TestUtils; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; + +import java.util.concurrent.ExecutionException; +import static org.junit.jupiter.api.Assertions.*; +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +class StoreProcessorTest { + TestUtils testUtils; + + @BeforeAll + void prepareForTests(){ + testUtils = new TestUtils("localhost:9097"); + } + + @Test + void testCanGetValue() throws ExecutionException, InterruptedException { + StoreProcessor kTableInstance = testUtils.getGlobalKTableWrapperInstance(true).getCreate(testUtils.getTestTopicName("sample-topic"), TestUtils.MockTableData.class); + Thread.sleep(2000); + assertNull(kTableInstance.getValue()); + } + @Test + void testCanUpdateAndGetTableValue() throws ExecutionException, InterruptedException { + StoreProcessor kTableInstance = testUtils. getGlobalKTableWrapperInstance(true).getCreate(testUtils.getTestTopicName("sample-topic"), TestUtils.MockTableData.class); + + TestUtils.MockTableData updateInstance = new TestUtils.MockTableData(); + updateInstance.totalValues = 1; + + kTableInstance.updateValue(updateInstance); + Thread.sleep(2000); + assertNotNull(kTableInstance.getValue()); + kTableInstance.getValue(); + assertEquals(1, kTableInstance.getValue().getTotalValues()); + } + + @Test + void testCanHaveMultipleInstance() throws ExecutionException, InterruptedException { + StoreProcessor instance1 = testUtils.getGlobalKTableWrapperInstance(true).getCreate(testUtils.getTestTopicName("sample-topic"), TestUtils.MockTableData.class); + + StoreProcessor instance2 = testUtils.getGlobalKTableWrapperInstance(false).getCreate(testUtils.getTestTopicName("sample-topic"), TestUtils.MockTableData.class); + + TestUtils.MockTableData updateInstance = new TestUtils.MockTableData(); + updateInstance.totalValues = 1; + instance1.updateValue(updateInstance); + Thread.sleep(2000); + + assertEquals(1, instance2.getValue().getTotalValues()); + + updateInstance.totalValues += 1; + instance2.updateValue(updateInstance); + Thread.sleep(2000); + + assertEquals(2, instance1.getValue().getTotalValues()); + + StoreProcessor instance3 = testUtils.getGlobalKTableWrapperInstance(false).getCreate(testUtils.getTestTopicName("sample-topic"), TestUtils.MockTableData.class); + assertEquals(2, instance3.getValue().getTotalValues()); + + } +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessorValuesUpdaterTest.java b/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessorValuesUpdaterTest.java new file mode 100644 index 000000000..5f4956111 --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/StoreProcessorValuesUpdaterTest.java @@ -0,0 +1,72 @@ +package org.jembi.jempi.shared.kafka.global_context.store_processor; + +import org.jembi.jempi.shared.kafka.global_context.store_processor.utils.TestUtils; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; + +import java.util.concurrent.ExecutionException; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +class StoreProcessorValuesUpdaterTest { + TestUtils testUtils; + + @BeforeAll + void prepareForTests(){ + testUtils = new TestUtils("localhost:9097"); + } + + @Test + void itCanHaveCustomValueUpdater() throws ExecutionException, InterruptedException { + StoreProcessor processor = testUtils.getMockStoreProcessor(true); + + TestUtils.MockTableData updateInstance = new TestUtils.MockTableData(); + updateInstance.totalValues = 1; + + processor.updateValue(updateInstance); + Thread.sleep(2000); + + assertEquals(1, processor.getValue().getTotalValues()); + + processor.updateValue(updateInstance); + Thread.sleep(2000); + + processor.updateValue(updateInstance); + Thread.sleep(2000); + + assertEquals(3, processor.getValue().getTotalValues()); + } + @Test + void itKeepsTrackOfAggregatedData() throws ExecutionException, InterruptedException { + + StoreProcessor processor = testUtils.getMockStoreProcessor(true); + + TestUtils.MockTableData updateInstance = new TestUtils.MockTableData(); + updateInstance.totalValues = 1; + + processor.updateValue(updateInstance); + Thread.sleep(2000); + + assertEquals(1, processor.getValue().getTotalValues()); + + processor.updateValue(updateInstance); + Thread.sleep(2000); + + processor.updateValue(updateInstance); + Thread.sleep(2000); + + assertEquals(3, processor.getValue().getTotalValues()); + + StoreProcessor processorNew = testUtils.getMockStoreProcessor(false); + assertEquals(3, processorNew.getValue().getTotalValues()); + + processorNew.updateValue(updateInstance); + Thread.sleep(2000); + + assertEquals(4, processorNew.getValue().getTotalValues()); + assertEquals(4, processor.getValue().getTotalValues()); + + } +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/utils/MockStoreProcessor.java b/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/utils/MockStoreProcessor.java new file mode 100644 index 000000000..3b972d25d --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/utils/MockStoreProcessor.java @@ -0,0 +1,20 @@ +package org.jembi.jempi.shared.kafka.global_context.store_processor.utils; + +import org.jembi.jempi.shared.kafka.global_context.store_processor.StoreProcessor; +import org.jembi.jempi.shared.kafka.global_context.store_processor.StoreUpdaterProcessor; + +import java.util.concurrent.ExecutionException; + +public class MockStoreProcessor extends StoreProcessor { + public MockStoreProcessor(String bootStrapServers, String topicName, String sinkName, Class serializeCls) throws InterruptedException, ExecutionException { + super(bootStrapServers, topicName, sinkName, serializeCls); + } + @Override + protected StoreUpdaterProcessor getValueUpdater(){ + return (TestUtils.MockTableData globalValue,TestUtils.MockTableData currentValue) -> { + TestUtils.MockTableData updateMockTable = new TestUtils.MockTableData(); + updateMockTable.totalValues = globalValue == null ? currentValue.totalValues : globalValue.totalValues + currentValue.totalValues; + return updateMockTable; + }; + } +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/utils/MockStoreProcessorFactor.java b/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/utils/MockStoreProcessorFactor.java new file mode 100644 index 000000000..51d57f863 --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/utils/MockStoreProcessorFactor.java @@ -0,0 +1,16 @@ +package org.jembi.jempi.shared.kafka.global_context.store_processor.utils; + +import org.jembi.jempi.shared.kafka.global_context.store_processor.StoreProcessor; +import org.jembi.jempi.shared.kafka.global_context.store_processor.StoreProcessorFactory; + +import java.util.concurrent.ExecutionException; + +public class MockStoreProcessorFactor extends StoreProcessorFactory { + public MockStoreProcessorFactor(String bootStrapServers) { + super(bootStrapServers); + } + @Override + protected StoreProcessor getInstanceClass(final String name, final String sinkName, Class serializeCls) throws ExecutionException, InterruptedException { + return new MockStoreProcessor(bootStrapServers, name, sinkName, serializeCls); + } +} diff --git a/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/utils/TestUtils.java b/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/utils/TestUtils.java new file mode 100644 index 000000000..b7b914c12 --- /dev/null +++ b/JeMPI_Apps/JeMPI_LibShared/src/test/java/org/jembi/jempi/shared/kafka/global_context/store_processor/utils/TestUtils.java @@ -0,0 +1,65 @@ +package org.jembi.jempi.shared.kafka.global_context.store_processor.utils; + +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.DeleteTopicsOptions; +import org.apache.kafka.clients.admin.ListTopicsOptions; +import org.apache.kafka.clients.admin.TopicListing; +import org.apache.kafka.streams.StreamsConfig; +import org.jembi.jempi.shared.kafka.global_context.store_processor.StoreProcessor; +import org.jembi.jempi.shared.kafka.global_context.store_processor.StoreProcessorFactory; +import org.jembi.jempi.shared.kafka.global_context.store_processor.Utilities; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Properties; +import java.util.concurrent.ExecutionException; +import java.util.stream.Collectors; + +public class TestUtils { + + public static class MockTableData{ + public int totalValues; + + public int getTotalValues() { + return totalValues; + } + } + + public final AdminClient kafkaAdminClient; + public final String bootStrapServer; + + public TestUtils(final String bootStrapServer){ + this.bootStrapServer = bootStrapServer; + Properties properties = new Properties(); + properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootStrapServer); + kafkaAdminClient = AdminClient.create(properties); + } + + public String getTestTopicName(final String topicName){ + return String.format("testTopic-%s", topicName); + } + + public void resetAllTopics() throws ExecutionException, InterruptedException { + Collection collection = kafkaAdminClient.listTopics(new ListTopicsOptions().listInternal(false)).listings().get().stream() + .map(TopicListing::name) + .filter(name -> name.startsWith(Utilities.JEMPI_GLOBAL_STORE_PREFIX) && name.contains("testTopic")) + .collect(Collectors.toCollection(ArrayList::new)); + + + kafkaAdminClient.deleteTopics(collection, new DeleteTopicsOptions()).all().get(); + Thread.sleep(1000); + } + public StoreProcessorFactory getGlobalKTableWrapperInstance(Boolean restAll) throws ExecutionException, InterruptedException { + if (restAll){ + this.resetAllTopics(); + } + return new StoreProcessorFactory(bootStrapServer); + } + + public StoreProcessor getMockStoreProcessor(Boolean restAll) throws ExecutionException, InterruptedException{ + if (restAll){ + this.resetAllTopics(); + } + return new MockStoreProcessorFactor(bootStrapServer).getCreate("testTopic", TestUtils.MockTableData.class); + } +} diff --git a/JeMPI_Apps/JeMPI_Linker/checkstyle/suppression.xml b/JeMPI_Apps/JeMPI_Linker/checkstyle/suppression.xml index 642c7e3e9..72dcc0be1 100644 --- a/JeMPI_Apps/JeMPI_Linker/checkstyle/suppression.xml +++ b/JeMPI_Apps/JeMPI_Linker/checkstyle/suppression.xml @@ -36,4 +36,15 @@ files="LinkerDWH.java" /> + + + + + diff --git a/JeMPI_Apps/JeMPI_Linker/docker/Dockerfile b/JeMPI_Apps/JeMPI_Linker/docker/Dockerfile index d3e37765f..81818f37a 100644 --- a/JeMPI_Apps/JeMPI_Linker/docker/Dockerfile +++ b/JeMPI_Apps/JeMPI_Linker/docker/Dockerfile @@ -6,7 +6,7 @@ ADD Linker-1.0-SNAPSHOT-spring-boot.jar /app/Linker-1.0-SNAPSHOT-spring-boot.jar RUN printf "#!/bin/bash\n\ cd /app\n\ -java -server --enable-preview -XX:MaxRAMPercentage=80 -XX:+UseZGC -jar /app/Linker-1.0-SNAPSHOT-spring-boot.jar\n" > /entrypoint.sh +java -server -XX:MaxRAMPercentage=80 -jar /app/Linker-1.0-SNAPSHOT-spring-boot.jar\n" > /entrypoint.sh RUN chmod +x /entrypoint.sh diff --git a/JeMPI_Apps/JeMPI_Linker/pom.xml b/JeMPI_Apps/JeMPI_Linker/pom.xml index 064c53b31..b95936044 100644 --- a/JeMPI_Apps/JeMPI_Linker/pom.xml +++ b/JeMPI_Apps/JeMPI_Linker/pom.xml @@ -162,6 +162,10 @@ test + + org.apache.commons + commons-csv + @@ -207,9 +211,9 @@ org.apache.maven.plugins maven-compiler-plugin - 17 - 17 - --enable-preview + ${java.version} + ${java.version} + diff --git a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/AppConfig.java b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/AppConfig.java index fdfd1d6f2..1a3976ca4 100644 --- a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/AppConfig.java +++ b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/AppConfig.java @@ -14,21 +14,20 @@ public final class AppConfig { private static final Logger LOGGER = LogManager.getLogger(AppConfig.class); private static final Config SYSTEM_PROPERTIES = ConfigFactory.systemProperties(); private static final Config SYSTEM_ENVIRONMENT = ConfigFactory.systemEnvironment(); - private static final Config CONFIG = new Builder() - .withSystemEnvironment() - .withSystemProperties() - .withOptionalRelativeFile("/conf/server.production.conf") - .withOptionalRelativeFile("/conf/server.staging.conf") - .withOptionalRelativeFile("/conf/server.test.conf") - .withResource("application.local.conf") - .withResource("application.conf") - .build(); - - public static final String POSTGRESQL_IP = CONFIG.getString("POSTGRESQL_IP"); - public static final Integer POSTGRESQL_PORT = CONFIG.getInt("POSTGRESQL_PORT"); - public static final String POSTGRESQL_USER = CONFIG.getString("POSTGRESQL_USER"); - public static final String POSTGRESQL_PASSWORD = CONFIG.getString("POSTGRESQL_PASSWORD"); - public static final String POSTGRESQL_DATABASE = CONFIG.getString("POSTGRESQL_DATABASE"); + private static final Config CONFIG = new Builder().withSystemEnvironment() + .withSystemProperties() + .withOptionalRelativeFile("/conf/server.production.conf") + .withOptionalRelativeFile("/conf/server.staging.conf") + .withOptionalRelativeFile("/conf/server.test.conf") + .withResource("application.local.conf") + .withResource("application.conf") + .build(); + + // public static final String POSTGRESQL_IP = CONFIG.getString("POSTGRESQL_IP"); +// public static final Integer POSTGRESQL_PORT = CONFIG.getInt("POSTGRESQL_PORT"); +// public static final String POSTGRESQL_USER = CONFIG.getString("POSTGRESQL_USER"); +// public static final String POSTGRESQL_PASSWORD = CONFIG.getString("POSTGRESQL_PASSWORD"); +// public static final String POSTGRESQL_NOTIFICATIONS_DB = CONFIG.getString("POSTGRESQL_NOTIFICATIONS_DB"); public static final String KAFKA_BOOTSTRAP_SERVERS = CONFIG.getString("KAFKA_BOOTSTRAP_SERVERS"); public static final String KAFKA_APPLICATION_ID_INTERACTIONS = CONFIG.getString("KAFKA_APPLICATION_ID_INTERACTIONS"); public static final String KAFKA_APPLICATION_ID_MU = CONFIG.getString("KAFKA_APPLICATION_ID_MU"); @@ -46,19 +45,19 @@ public final class AppConfig { public static final String API_HTTP_PORT = CONFIG.getString("API_HTTP_PORT"); public static final Float LINKER_MATCH_THRESHOLD = (float) CONFIG.getDouble("LINKER_MATCH_THRESHOLD"); public static final Float LINKER_MATCH_THRESHOLD_MARGIN = (float) CONFIG.getDouble("LINKER_MATCH_THRESHOLD_MARGIN"); + public static final Level GET_LOG_LEVEL = Level.toLevel(CONFIG.getString("LOG4J2_LEVEL")); + + private AppConfig() { + } public static String[] getDGraphHosts() { return DGRAPH_ALPHA_HOSTS; } + public static int[] getDGraphPorts() { return DGRAPH_ALPHA_PORTS; } - public static final Level GET_LOG_LEVEL = Level.toLevel(CONFIG.getString("LOG4J2_LEVEL")); - - private AppConfig() { - } - private static class Builder { private Config conf = ConfigFactory.empty(); diff --git a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/Ask.java b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/Ask.java index 3df6907a0..5c050e258 100644 --- a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/Ask.java +++ b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/Ask.java @@ -6,10 +6,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.jembi.jempi.linker.backend.BackEnd; -import org.jembi.jempi.shared.models.ApiModels; -import org.jembi.jempi.shared.models.InteractionEnvelop; -import org.jembi.jempi.shared.models.LinkInteractionSyncBody; -import org.jembi.jempi.shared.models.LinkInteractionToGidSyncBody; +import org.jembi.jempi.shared.models.*; import java.util.concurrent.CompletionStage; @@ -93,7 +90,7 @@ static CompletionStage patchCrUpdateField( static CompletionStage postLinkInteraction( final ActorSystem actorSystem, final ActorRef backEnd, - final LinkInteractionSyncBody body) { + final ApiModels.LinkInteractionSyncBody body) { CompletionStage stage = AskPattern.ask(backEnd, replyTo -> new BackEnd.SyncLinkInteractionRequest( body, @@ -118,18 +115,19 @@ static CompletionStage findCandidates( final ActorSystem actorSystem, final ActorRef backEnd, final String iid) { - CompletionStage stage = AskPattern - .ask(backEnd, - replyTo -> new BackEnd.FindCandidatesWithScoreRequest(replyTo, iid), - java.time.Duration.ofSeconds(5), - actorSystem.scheduler()); + CompletionStage stage = AskPattern.ask(backEnd, + replyTo -> new BackEnd.FindCandidatesWithScoreRequest( + replyTo, + iid), + java.time.Duration.ofSeconds(5), + actorSystem.scheduler()); return stage.thenApply(response -> response); } static CompletionStage postLinkPatientToGid( final ActorSystem actorSystem, final ActorRef backEnd, - final LinkInteractionToGidSyncBody body) { + final ApiModels.LinkInteractionToGidSyncBody body) { CompletionStage stage = AskPattern.ask(backEnd, replyTo -> new BackEnd.SyncLinkInteractionToGidRequest( body, @@ -143,14 +141,16 @@ static CompletionStage postCalculateScores( final ActorSystem actorSystem, final ActorRef backEnd, final ApiModels.ApiCalculateScoresRequest body) { - CompletionStage stage = AskPattern.ask( - backEnd, - replyTo -> new BackEnd.CalculateScoresRequest(body, replyTo), - java.time.Duration.ofSeconds(11), - actorSystem.scheduler()); + CompletionStage stage = AskPattern.ask(backEnd, + replyTo -> new BackEnd.CalculateScoresRequest(body, + replyTo), + java.time.Duration.ofSeconds(11), + actorSystem.scheduler()); return stage.thenApply(response -> response); } + + // static CompletionStage getMU( // final ActorSystem actorSystem, // final ActorRef backEnd) { diff --git a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/HttpServer.java b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/HttpServer.java index 1c1eda3d3..1a881b099 100644 --- a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/HttpServer.java +++ b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/HttpServer.java @@ -43,7 +43,7 @@ private Route createRoute( final ActorSystem actorSystem, final ActorRef backEnd) { return pathPrefix("JeMPI", - () -> concat(patch(() -> path(GlobalConstants.SEGMENT_PROXY_CR_UPDATE_FIELDS, + () -> concat(patch(() -> path(GlobalConstants.SEGMENT_PROXY_PATCH_CR_UPDATE_FIELDS, () -> Routes.proxyPatchCrUpdateField(actorSystem, backEnd))), post(() -> concat(path(GlobalConstants.SEGMENT_PROXY_POST_LINK_INTERACTION, () -> Routes.proxyPostLinkInteraction(actorSystem, backEnd)), @@ -51,15 +51,16 @@ private Route createRoute( () -> Routes.proxyPostLinkInteractionToGID(actorSystem, backEnd)), path(GlobalConstants.SEGMENT_PROXY_POST_CALCULATE_SCORES, () -> Routes.proxyPostCalculateScores(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_PROXY_CR_CANDIDATES, + path(GlobalConstants.SEGMENT_PROXY_POST_CR_CANDIDATES, () -> Routes.proxyGetCrCandidates(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_PROXY_CR_FIND, + path(GlobalConstants.SEGMENT_PROXY_POST_CR_FIND, () -> Routes.proxyGetCrFind(actorSystem, backEnd)), - path(GlobalConstants.SEGMENT_PROXY_CR_REGISTER, + path(GlobalConstants.SEGMENT_PROXY_POST_CR_REGISTER, () -> Routes.proxyPostCrRegister(actorSystem, backEnd)))), get(() -> concat(// path("mu", () -> Routes.routeMU(actorSystem, backEnd)), path(GlobalConstants.SEGMENT_PROXY_GET_CANDIDATES_WITH_SCORES, () -> Routes.proxyGetCandidatesWithScore(actorSystem, backEnd)))))); } + } diff --git a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/Main.java b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/Main.java index cbd6b68e8..9ea2f81f0 100644 --- a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/Main.java +++ b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/Main.java @@ -9,6 +9,8 @@ import org.apache.logging.log4j.Logger; import org.jembi.jempi.AppConfig; import org.jembi.jempi.linker.backend.BackEnd; +import org.jembi.jempi.shared.models.CustomMU; +import org.jembi.jempi.shared.models.GlobalConstants; public final class Main { @@ -23,25 +25,23 @@ public static void main(final String[] args) { } public Behavior create() { - return Behaviors.setup( - context -> { - final var system = context.getSystem(); - final ActorRef backEnd = context.spawn(BackEnd.create(), "BackEnd"); - context.watch(backEnd); - final SPInteractions spInteractions = SPInteractions.create(); - spInteractions.open(system, backEnd); - final SPMU spMU = new SPMU(); - spMU.open(system, backEnd); - httpServer = HttpServer.create(); - httpServer.open(system, backEnd); - return Behaviors.receive(Void.class) - .onSignal(Terminated.class, - sig -> { - httpServer.close(system); - return Behaviors.stopped(); - }) - .build(); - }); + return Behaviors.setup(context -> { + final var system = context.getSystem(); + final ActorRef backEnd = context.spawn(BackEnd.create(), "BackEnd"); + context.watch(backEnd); + if (!CustomMU.SEND_INTERACTIONS_TO_EM) { + final SPInteractions spInteractions = SPInteractions.create(GlobalConstants.TOPIC_INTERACTION_LINKER); + spInteractions.open(system, backEnd); + } + final SPMU spMU = new SPMU(); + spMU.open(system, backEnd); + httpServer = HttpServer.create(); + httpServer.open(system, backEnd); + return Behaviors.receive(Void.class).onSignal(Terminated.class, sig -> { + httpServer.close(system); + return Behaviors.stopped(); + }).build(); + }); } private void run() { diff --git a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/Routes.java b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/Routes.java index b12247628..8202a600c 100644 --- a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/Routes.java +++ b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/Routes.java @@ -3,24 +3,33 @@ import akka.actor.typed.ActorRef; import akka.actor.typed.ActorSystem; import akka.http.javadsl.marshallers.jackson.Jackson; +import akka.http.javadsl.model.StatusCode; import akka.http.javadsl.model.StatusCodes; import akka.http.javadsl.server.Route; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.jembi.jempi.libmpi.MpiGeneralError; import org.jembi.jempi.libmpi.MpiServiceError; import org.jembi.jempi.linker.backend.BackEnd; -import org.jembi.jempi.shared.models.ApiModels; -import org.jembi.jempi.shared.models.CustomMU; -import org.jembi.jempi.shared.models.LinkInteractionSyncBody; -import org.jembi.jempi.shared.models.LinkInteractionToGidSyncBody; +import org.jembi.jempi.shared.models.*; import static akka.http.javadsl.server.Directives.*; import static org.jembi.jempi.shared.utils.AppUtils.OBJECT_MAPPER; final class Routes { + private static final Logger LOGGER = LogManager.getLogger(Routes.class); + private Routes() { } + static StatusCode logHttpError( + final StatusCode code, + final String log) { + LOGGER.debug("{}", log); + return code; + } + static Route mapError(final MpiGeneralError obj) { return switch (obj) { case MpiServiceError.InteractionIdDoesNotExistError e -> complete(StatusCodes.BAD_REQUEST, e, Jackson.marshaller()); @@ -49,35 +58,17 @@ static Route proxyGetCandidatesWithScore( candidateList -> complete(StatusCodes.OK, candidateList, Jackson.marshaller())) - : complete(StatusCodes.IM_A_TEAPOT)))); - } - - static Route proxyPostLinkInteraction( - final ActorSystem actorSystem, - final ActorRef backEnd) { - return entity(Jackson.unmarshaller(LinkInteractionSyncBody.class), - obj -> onComplete(Ask.postLinkInteraction(actorSystem, backEnd, obj), response -> { - if (response.isSuccess()) { - final var eventLinkPatientSyncRsp = response.get(); - return complete(StatusCodes.OK, - new ApiModels.ApiExtendedLinkInfo(eventLinkPatientSyncRsp.stan(), - eventLinkPatientSyncRsp.linkInfo(), - eventLinkPatientSyncRsp.externalLinkCandidateList()), - Jackson.marshaller()); - } else { - return complete(StatusCodes.IM_A_TEAPOT); - } - })); + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT))))); } static Route proxyPostLinkInteractionToGID( final ActorSystem actorSystem, final ActorRef backEnd) { - return entity(Jackson.unmarshaller(LinkInteractionToGidSyncBody.class), + return entity(Jackson.unmarshaller(ApiModels.LinkInteractionToGidSyncBody.class), obj -> onComplete(Ask.postLinkPatientToGid(actorSystem, backEnd, obj), response -> response.isSuccess() ? complete(StatusCodes.OK, response.get(), Jackson.marshaller()) - : complete(StatusCodes.IM_A_TEAPOT))); + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT)))); } static Route proxyPostCalculateScores( @@ -87,18 +78,9 @@ static Route proxyPostCalculateScores( obj -> onComplete(Ask.postCalculateScores(actorSystem, backEnd, obj), response -> response.isSuccess() ? complete(StatusCodes.OK, response.get(), Jackson.marshaller()) - : complete(StatusCodes.IM_A_TEAPOT))); + : complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT)))); } -// static Route routeMU( -// final ActorSystem actorSystem, -// final ActorRef backEnd) { -// return onComplete(Ask.getMU(actorSystem, backEnd), -// response -> response.isSuccess() -// ? complete(StatusCodes.OK, response.get().mu(), Jackson.marshaller()) -// : complete(StatusCodes.IM_A_TEAPOT)); -// } - static Route proxyGetCrCandidates( final ActorSystem actorSystem, final ActorRef backEnd) { @@ -113,7 +95,7 @@ static Route proxyGetCrCandidates( new ApiModels.ApiCrCandidatesResponse(rsp.goldenRecords().get()), Jackson.marshaller(OBJECT_MAPPER)); } else { - return complete(StatusCodes.IM_A_TEAPOT); + return complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT)); } })); } @@ -132,7 +114,7 @@ static Route proxyGetCrFind( new ApiModels.ApiCrCandidatesResponse(rsp.goldenRecords().get()), Jackson.marshaller(OBJECT_MAPPER)); } else { - return complete(StatusCodes.IM_A_TEAPOT); + return complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT)); } })); } @@ -153,7 +135,25 @@ static Route proxyPostCrRegister( Jackson.marshaller(OBJECT_MAPPER)); } } else { - return complete(StatusCodes.IM_A_TEAPOT); + return complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT)); + } + })); + } + + static Route proxyPostLinkInteraction( + final ActorSystem actorSystem, + final ActorRef backEnd) { + return entity(Jackson.unmarshaller(OBJECT_MAPPER, ApiModels.LinkInteractionSyncBody.class), + obj -> onComplete(Ask.postLinkInteraction(actorSystem, backEnd, obj), response -> { + if (response.isSuccess()) { + final var eventLinkPatientSyncRsp = response.get(); + return complete(StatusCodes.OK, + new ApiModels.ApiExtendedLinkInfo(eventLinkPatientSyncRsp.stan(), + eventLinkPatientSyncRsp.linkInfo(), + eventLinkPatientSyncRsp.externalLinkCandidateList()), + Jackson.marshaller()); + } else { + return complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT)); } })); } @@ -174,7 +174,7 @@ static Route proxyPatchCrUpdateField( Jackson.marshaller()); } } else { - return complete(StatusCodes.IM_A_TEAPOT); + return complete(ApiModels.getHttpErrorResponse(StatusCodes.IM_A_TEAPOT)); } })); } diff --git a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/SPInteractions.java b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/SPInteractions.java index f487e8aec..e44e4634f 100644 --- a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/SPInteractions.java +++ b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/SPInteractions.java @@ -12,7 +12,7 @@ import org.apache.logging.log4j.Logger; import org.jembi.jempi.AppConfig; import org.jembi.jempi.linker.backend.BackEnd; -import org.jembi.jempi.shared.models.GlobalConstants; +import org.jembi.jempi.shared.models.CustomMU; import org.jembi.jempi.shared.models.InteractionEnvelop; import org.jembi.jempi.shared.serdes.JsonPojoDeserializer; import org.jembi.jempi.shared.serdes.JsonPojoSerializer; @@ -22,28 +22,33 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; +import static org.jembi.jempi.shared.models.InteractionEnvelop.ContentType.BATCH_END_SENTINEL; +import static org.jembi.jempi.shared.models.InteractionEnvelop.ContentType.BATCH_INTERACTION; + public final class SPInteractions { private static final Logger LOGGER = LogManager.getLogger(SPInteractions.class); - private KafkaStreams patientKafkaStreams; + private final String topic; + private KafkaStreams interactionEnvelopKafkaStreams; - private SPInteractions() { + private SPInteractions(final String topic_) { LOGGER.info("SPInteractions constructor"); + this.topic = topic_; } - public static SPInteractions create() { - return new SPInteractions(); + public static SPInteractions create(final String topic_) { + return new SPInteractions(topic_); } private void linkPatient( final ActorSystem system, final ActorRef backEnd, final String key, - final InteractionEnvelop batchInteraction) { - if (batchInteraction.contentType() != InteractionEnvelop.ContentType.BATCH_INTERACTION) { + final InteractionEnvelop interactionEnvelop) { + if (interactionEnvelop.contentType() != BATCH_INTERACTION) { return; } - final var completableFuture = Ask.linkInteraction(system, backEnd, key, batchInteraction).toCompletableFuture(); + final var completableFuture = Ask.linkInteraction(system, backEnd, key, interactionEnvelop).toCompletableFuture(); try { final var reply = completableFuture.get(65, TimeUnit.SECONDS); if (reply.linkInfo() == null) { @@ -51,37 +56,43 @@ private void linkPatient( } } catch (InterruptedException | ExecutionException | TimeoutException ex) { LOGGER.error(ex.getLocalizedMessage(), ex); - close(); + this.close(); } + } public void open( final ActorSystem system, final ActorRef backEnd) { - LOGGER.info("EM Stream Processor"); + LOGGER.info("SPInteractions Stream Processor"); final Properties props = loadConfig(); final var stringSerde = Serdes.String(); - final var batchPatientRecordSerde = - Serdes.serdeFrom(new JsonPojoSerializer<>(), new JsonPojoDeserializer<>(InteractionEnvelop.class)); + final var interactionEnvelopSerde = Serdes.serdeFrom(new JsonPojoSerializer<>(), + new JsonPojoDeserializer<>(InteractionEnvelop.class)); final StreamsBuilder streamsBuilder = new StreamsBuilder(); - final KStream patientsStream = - streamsBuilder.stream(GlobalConstants.TOPIC_INTERACTION_LINKER, Consumed.with(stringSerde, batchPatientRecordSerde)); - patientsStream.foreach((key, patient) -> linkPatient(system, backEnd, key, patient)); - patientKafkaStreams = new KafkaStreams(streamsBuilder.build(), props); - patientKafkaStreams.cleanUp(); - patientKafkaStreams.start(); + final KStream interactionStream = + streamsBuilder.stream(topic, Consumed.with(stringSerde, interactionEnvelopSerde)); + interactionStream.foreach((key, interactionEnvelop) -> { + linkPatient(system, backEnd, key, interactionEnvelop); + if (!CustomMU.SEND_INTERACTIONS_TO_EM && interactionEnvelop.contentType() == BATCH_END_SENTINEL) { + this.close(); + } + }); + interactionEnvelopKafkaStreams = new KafkaStreams(streamsBuilder.build(), props); + interactionEnvelopKafkaStreams.cleanUp(); + interactionEnvelopKafkaStreams.start(); LOGGER.info("KafkaStreams started"); } - public void close() { - LOGGER.warn("Stream closed"); - patientKafkaStreams.close(); + private void close() { + LOGGER.info("Stream closed"); + interactionEnvelopKafkaStreams.close(new KafkaStreams.CloseOptions().leaveGroup(true)); } private Properties loadConfig() { final Properties props = new Properties(); props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, AppConfig.KAFKA_BOOTSTRAP_SERVERS); - props.put(StreamsConfig.APPLICATION_ID_CONFIG, AppConfig.KAFKA_APPLICATION_ID_INTERACTIONS); + props.put(StreamsConfig.APPLICATION_ID_CONFIG, AppConfig.KAFKA_APPLICATION_ID_INTERACTIONS + topic); return props; } diff --git a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/SPMU.java b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/SPMU.java index e50fae76b..1139c8498 100644 --- a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/SPMU.java +++ b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/SPMU.java @@ -40,12 +40,15 @@ void installMU( final String key, final CustomMU mu) { LOGGER.info("New MU set: {}/{}", key, mu); - final CompletionStage result = - AskPattern.ask( - backEnd, - replyTo -> new BackEnd.EventUpdateMUReq(mu, replyTo), - java.time.Duration.ofSeconds(5), - system.scheduler()); + + final SPInteractions spInteractions = SPInteractions.create(mu.tag()); + spInteractions.open(system, backEnd); + + final CompletionStage result = AskPattern.ask(backEnd, + replyTo -> new BackEnd.EventUpdateMUReq(mu, + replyTo), + java.time.Duration.ofSeconds(5), + system.scheduler()); final var completableFuture = result.toCompletableFuture(); try { final var reply = completableFuture.get(6, TimeUnit.SECONDS); @@ -64,12 +67,10 @@ public void open( LOGGER.info("MY Stream Processor"); final Properties props = loadConfig(); final Serde stringSerde = Serdes.String(); - final Serde muSerde = Serdes.serdeFrom(new JsonPojoSerializer<>(), - new JsonPojoDeserializer<>(CustomMU.class)); + final Serde muSerde = Serdes.serdeFrom(new JsonPojoSerializer<>(), new JsonPojoDeserializer<>(CustomMU.class)); final StreamsBuilder streamsBuilder = new StreamsBuilder(); - final KStream muStream = streamsBuilder.stream( - GlobalConstants.TOPIC_MU_LINKER, - Consumed.with(stringSerde, muSerde)); + final KStream muStream = + streamsBuilder.stream(GlobalConstants.TOPIC_MU_LINKER, Consumed.with(stringSerde, muSerde)); muStream.foreach((key, mu) -> installMU(system, backEnd, key, mu)); muKafkaStreams = new KafkaStreams(streamsBuilder.build(), props); muKafkaStreams.cleanUp(); diff --git a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend/BackEnd.java b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend/BackEnd.java index 90385bf93..68a0a60e3 100644 --- a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend/BackEnd.java +++ b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend/BackEnd.java @@ -8,14 +8,12 @@ import akka.actor.typed.javadsl.Behaviors; import akka.actor.typed.javadsl.Receive; import io.vavr.control.Either; -import org.apache.commons.lang3.StringUtils; import org.apache.kafka.common.serialization.StringSerializer; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.config.Configurator; import org.jembi.jempi.AppConfig; import org.jembi.jempi.libmpi.LibMPI; -import org.jembi.jempi.libmpi.LibMPIClientInterface; import org.jembi.jempi.libmpi.MpiGeneralError; import org.jembi.jempi.shared.kafka.MyKafkaProducer; import org.jembi.jempi.shared.models.*; @@ -25,7 +23,6 @@ import java.time.Duration; import java.util.ArrayList; import java.util.List; -import java.util.Locale; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executor; @@ -92,29 +89,27 @@ private void openMPI(final boolean useDGraph) { AppConfig.KAFKA_BOOTSTRAP_SERVERS, "CLIENT_ID_LINKER-" + UUID.randomUUID()); } else { - libMPI = new LibMPI(String.format(Locale.ROOT, "jdbc:postgresql://%s:%d/%s", AppConfig.POSTGRESQL_IP, AppConfig.POSTGRESQL_PORT, AppConfig.POSTGRESQL_DATABASE), - AppConfig.POSTGRESQL_USER, - AppConfig.POSTGRESQL_PASSWORD, - AppConfig.KAFKA_BOOTSTRAP_SERVERS, - "CLIENT_ID_LINKER-" + UUID.randomUUID()); + libMPI = null; +// new LibMPI(String.format(Locale.ROOT, "jdbc:postgresql://%s:%d/%s", AppConfig.POSTGRESQL_IP, AppConfig +// .POSTGRESQL_PORT, AppConfig.POSTGRESQL_DATABASE), +// AppConfig.POSTGRESQL_USER, +// AppConfig.POSTGRESQL_PASSWORD, +// AppConfig.KAFKA_BOOTSTRAP_SERVERS, +// "CLIENT_ID_LINKER-" + UUID.randomUUID()); } libMPI.startTransaction(); - if (!(libMPI.dropAll().isEmpty() && libMPI.createSchema().isEmpty())) { - LOGGER.error("Create Schema Error"); - } - libMPI.closeTransaction(); } @Override public Receive createReceive() { return newReceiveBuilder().onMessage(AsyncLinkInteractionRequest.class, this::asyncLinkInteractionHandler) .onMessage(SyncLinkInteractionRequest.class, this::syncLinkInteractionHandler) - .onMessage(SyncLinkInteractionToGidRequest.class, this::syncLinkInteractionToGidHandler) +// .onMessage(SyncLinkInteractionToGidRequest.class, this::syncLinkInteractionToGidHandler) .onMessage(CalculateScoresRequest.class, this::calculateScoresHandler) .onMessage(TeaTimeRequest.class, this::teaTimeHandler) .onMessage(WorkTimeRequest.class, this::workTimeHandler) .onMessage(EventUpdateMUReq.class, this::eventUpdateMUReqHandler) -// .onMessage(EventGetMUReq.class, this::eventGetMUReqHandler) +// .onMessage(EventGetMUReq.class, this::eventGetMUReqHandler) .onMessage(CrCandidatesRequest.class, this::crCandidates) .onMessage(CrFindRequest.class, this::crFind) .onMessage(CrRegisterRequest.class, this::crRegister) @@ -138,22 +133,40 @@ private Behavior crFind(final CrFindRequest req) { return Behaviors.same(); } + private Behavior crUpdateField(final CrUpdateFieldRequest req) { + final var result = LinkerCR.crUpdateField(libMPI, req.crUpdateFields); + req.replyTo.tell(new CrUpdateFieldResponse(result)); + return Behaviors.same(); + } + private Behavior crRegister(final CrRegisterRequest req) { final var result = LinkerCR.crRegister(libMPI, req.crRegister); req.replyTo.tell(new CrRegisterResponse(result)); return Behaviors.same(); } - private Behavior crUpdateField(final CrUpdateFieldRequest req) { - final var result = LinkerCR.crUpdateField(libMPI, req.crUpdateFields); - req.replyTo.tell(new CrUpdateFieldResponse(result)); + private Behavior syncLinkInteractionHandler(final SyncLinkInteractionRequest request) { + final var listLinkInfo = LinkerDWH.linkInteraction(libMPI, + new Interaction(null, + request.link.sourceId(), + request.link.uniqueInteractionData(), + request.link.demographicData()), + request.link.externalLinkRange(), + request.link.matchThreshold() == null + ? AppConfig.LINKER_MATCH_THRESHOLD + : request.link.matchThreshold(), + request.link.stan()); + request.replyTo.tell(new SyncLinkInteractionResponse(request.link.stan(), + listLinkInfo.isLeft() + ? listLinkInfo.getLeft() + : null, + listLinkInfo.isRight() + ? listLinkInfo.get() + : null)); return Behaviors.same(); } private Behavior asyncLinkInteractionHandler(final AsyncLinkInteractionRequest req) { - if (LOGGER.isTraceEnabled()) { - LOGGER.trace("{}", req.batchInteraction.stan()); - } if (req.batchInteraction.contentType() != InteractionEnvelop.ContentType.BATCH_INTERACTION) { return Behaviors.withTimers(timers -> { timers.startSingleTimer(SINGLE_TIMER_TIMEOUT_KEY, TeaTimeRequest.INSTANCE, Duration.ofSeconds(5)); @@ -162,7 +175,11 @@ private Behavior asyncLinkInteractionHandler(final AsyncLinkInteraction }); } final var linkInfo = - LinkerDWH.linkInteraction(libMPI, req.batchInteraction.interaction(), null, AppConfig.LINKER_MATCH_THRESHOLD); + LinkerDWH.linkInteraction(libMPI, + req.batchInteraction.interaction(), + null, + AppConfig.LINKER_MATCH_THRESHOLD, + req.batchInteraction.stan()); if (linkInfo.isLeft()) { req.replyTo.tell(new AsyncLinkInteractionResponse(linkInfo.getLeft())); } else { @@ -174,22 +191,7 @@ private Behavior asyncLinkInteractionHandler(final AsyncLinkInteraction }); } - private Behavior syncLinkInteractionHandler(final SyncLinkInteractionRequest request) { - final var listLinkInfo = - LinkerDWH.linkInteraction(libMPI, - request.link.interaction(), - request.link.externalLinkRange(), - request.link.matchThreshold()); - request.replyTo.tell(new SyncLinkInteractionResponse(request.link.stan(), - listLinkInfo.isLeft() - ? listLinkInfo.getLeft() - : null, - listLinkInfo.isRight() - ? listLinkInfo.get() - : null)); - return Behaviors.same(); - } - +/* private Behavior syncLinkInteractionToGidHandler(final SyncLinkInteractionToGidRequest request) { final LinkInfo linkInfo; final var interaction = request.link.interaction(); @@ -207,12 +209,10 @@ private Behavior syncLinkInteractionToGidHandler(final SyncLinkInteract LOGGER.error("Golden Record for GID {} is null", gid); linkInfo = null; } else { - final var validated1 = - CustomLinkerDeterministic.validateDeterministicMatch(goldenRecord.demographicData(), - interaction.demographicData()); - final var validated2 = - CustomLinkerProbabilistic.validateProbabilisticScore(goldenRecord.demographicData(), - interaction.demographicData()); + final var validated1 = CustomLinkerDeterministic.validateDeterministicMatch(goldenRecord.demographicData(), + interaction.demographicData()); + final var validated2 = CustomLinkerProbabilistic.validateProbabilisticScore(goldenRecord.demographicData(), + interaction.demographicData()); linkInfo = libMPI.createInteractionAndLinkToExistingGoldenRecord(interaction, new LibMPIClientInterface.GoldenIdScore(gid, @@ -229,6 +229,7 @@ private Behavior syncLinkInteractionToGidHandler(final SyncLinkInteract request.replyTo.tell(new SyncLinkInteractionToGidResponse(request.link.stan(), linkInfo)); return Behaviors.same(); } +*/ private Behavior workTimeHandler(final WorkTimeRequest request) { LOGGER.info("WORK TIME"); @@ -271,19 +272,19 @@ private Behavior calculateScoresHandler(final CalculateScoresRequest re interaction.demographicData()))) .sorted((o1, o2) -> Float.compare(o2.score(), o1.score())) .collect(Collectors.toCollection(ArrayList::new)); - request.replyTo.tell( - new CalculateScoresResponse( - new ApiModels.ApiCalculateScoresResponse(request.calculateScoresRequest.interactionId(), - scores))); + request.replyTo.tell(new CalculateScoresResponse(new ApiModels.ApiCalculateScoresResponse(request.calculateScoresRequest.interactionId(), + scores))); return Behaviors.same(); } + private Behavior eventUpdateMUReqHandler(final EventUpdateMUReq req) { CustomLinkerProbabilistic.updateMU(req.mu); req.replyTo.tell(new EventUpdateMURsp(true)); return Behaviors.same(); } + // private Behavior eventGetMUReqHandler(final EventGetMUReq req) { // req.replyTo.tell(new EventGetMURsp(CustomLinkerProbabilistic.getMU())); // return Behaviors.same(); @@ -343,7 +344,7 @@ public record CalculateScoresResponse( } public record SyncLinkInteractionRequest( - LinkInteractionSyncBody link, + ApiModels.LinkInteractionSyncBody link, ActorRef replyTo) implements Request { } @@ -354,7 +355,7 @@ public record SyncLinkInteractionResponse( } public record SyncLinkInteractionToGidRequest( - LinkInteractionToGidSyncBody link, + ApiModels.LinkInteractionToGidSyncBody link, ActorRef replyTo) implements Request { } @@ -416,4 +417,5 @@ public record UpdateFieldResponse( } } + } diff --git a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend/CustomLinkerProbabilistic.java b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend/CustomLinkerProbabilistic.java index 9deea079b..34e97b75b 100644 --- a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend/CustomLinkerProbabilistic.java +++ b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend/CustomLinkerProbabilistic.java @@ -1,5 +1,7 @@ package org.jembi.jempi.linker.backend; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.jembi.jempi.shared.models.CustomDemographicData; import org.jembi.jempi.shared.models.CustomMU; @@ -13,6 +15,7 @@ final class CustomLinkerProbabilistic { + private static final Logger LOGGER = LogManager.getLogger(CustomLinkerProbabilistic.class); static final int METRIC_MIN = 0; static final int METRIC_MAX = 1; static final int METRIC_SCORE = 2; @@ -28,17 +31,6 @@ final class CustomLinkerProbabilistic { private CustomLinkerProbabilistic() { } - static CustomMU getMU() { - return new CustomMU( - LinkerProbabilistic.getProbability(currentLinkFields.givenName), - LinkerProbabilistic.getProbability(currentLinkFields.familyName), - LinkerProbabilistic.getProbability(currentLinkFields.gender), - LinkerProbabilistic.getProbability(currentLinkFields.dob), - LinkerProbabilistic.getProbability(currentLinkFields.city), - LinkerProbabilistic.getProbability(currentLinkFields.phoneNumber), - LinkerProbabilistic.getProbability(currentLinkFields.nationalId)); - } - private record LinkFields( LinkerProbabilistic.Field givenName, LinkerProbabilistic.Field familyName, @@ -112,4 +104,12 @@ public static void updateMU(final CustomMU mu) { } } + public static void checkUpdatedLinkMU() { + if (updatedLinkFields != null) { + LOGGER.info("Using updated Link MU values: {}", updatedLinkFields); + CustomLinkerProbabilistic.currentLinkFields = updatedLinkFields; + updatedLinkFields = null; + } + } + } diff --git a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend/LinkerCR.java b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend/LinkerCR.java index 975c57a11..aee93b4cd 100644 --- a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend/LinkerCR.java +++ b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend/LinkerCR.java @@ -69,14 +69,10 @@ static Either crRegister( if (crRegister.uniqueInteractionData().auxDateCreated() == null) { return Either.left(new MpiServiceError.CRMissingFieldError("auxDateCreated")); } else { - final var matchedCandidates = crMatchedCandidates(libMPI, - crRegister.candidateThreshold(), - crRegister.demographicData()); + final var matchedCandidates = crMatchedCandidates(libMPI, crRegister.candidateThreshold(), crRegister.demographicData()); if (matchedCandidates.isEmpty()) { - final var interaction = new Interaction(null, - crRegister.sourceId(), - crRegister.uniqueInteractionData(), - crRegister.demographicData()); + final var interaction = + new Interaction(null, crRegister.sourceId(), crRegister.uniqueInteractionData(), crRegister.demographicData()); final var linkInfo = libMPI.createInteractionAndLinkToClonedGoldenRecord(CustomLinkerBackEnd.applyAutoCreateFunctions(interaction), 1.0F); @@ -84,8 +80,7 @@ static Either crRegister( } else { return Either.left(new MpiServiceError.CRClientExistsError(matchedCandidates.stream() .map(GoldenRecord::demographicData) - .toList(), - crRegister.demographicData())); + .toList(), crRegister.demographicData())); } } } @@ -112,12 +107,9 @@ static Either linkStatsMetaProducer = null; + private LinkerDWH() { } @@ -82,21 +88,10 @@ static void helperUpdateInteractionsScore( final LibMPI libMPI, final float threshold, final ExpandedGoldenRecord expandedGoldenRecord) { - if (LOGGER.isTraceEnabled()) { - expandedGoldenRecord.interactionsWithScore().forEach(interactionWithScore -> LOGGER.trace("{} -> {} : {}", - interactionWithScore.interaction() - .uniqueInteractionData() - .auxId(), - expandedGoldenRecord.goldenRecord() - .customUniqueGoldenRecordData() - .auxId(), - interactionWithScore.score())); - } expandedGoldenRecord.interactionsWithScore().forEach(interactionWithScore -> { final var interaction = interactionWithScore.interaction(); - final var score = - LinkerUtils.calcNormalizedScore(expandedGoldenRecord.goldenRecord().demographicData(), - interaction.demographicData()); + final var score = LinkerUtils.calcNormalizedScore(expandedGoldenRecord.goldenRecord().demographicData(), + interaction.demographicData()); if (LOGGER.isTraceEnabled()) { LOGGER.trace("{} -- {} : {}", interactionWithScore.score(), score, abs(interactionWithScore.score() - score) > 1E-2); @@ -123,11 +118,29 @@ static void helperUpdateInteractionsScore( }); } - static Either> linkInteraction( + // + + public static Either> linkInteraction( final LibMPI libMPI, final Interaction interaction, final ExternalLinkRange externalLinkRange, - final float matchThreshold_) { + final float matchThreshold_, + final String envelopStan) { + +// if (LOGGER.isTraceEnabled()) { +// LOGGER.trace("{}", envelopStan); +// } + + LinkStatsMeta.ConfusionMatrix confusionMatrix; + CustomFieldTallies customFieldTallies = CUSTOM_FIELD_TALLIES_SUM_IDENTITY; + + if (linkStatsMetaProducer == null) { + linkStatsMetaProducer = new MyKafkaProducer<>(AppConfig.KAFKA_BOOTSTRAP_SERVERS, + GlobalConstants.TOPIC_INTERACTION_PROCESSOR_CONTROLLER, + stringSerializer(), + linkStatsMetaSerializer(), + "LinkerDWH-MU-TALLIES"); + } + if (!CustomLinkerDeterministic.canApplyLinking(interaction.demographicData())) { libMPI.startTransaction(); if (CustomLinkerDeterministic.DETERMINISTIC_DO_MATCHING || CustomLinkerProbabilistic.PROBABILISTIC_DO_MATCHING) { @@ -136,32 +149,29 @@ static Either> linkInteraction( if (candidates.isEmpty()) { try { final var i = OBJECT_MAPPER.writeValueAsString(interaction.demographicData()); - final var f = - """ - MATCH NOTIFICATION NO CANDIDATE - {}"""; + final var f = """ + MATCH NOTIFICATION NO CANDIDATE + {}"""; LOGGER.info(f, i); } catch (JsonProcessingException e) { LOGGER.error(e.getLocalizedMessage(), e); } } else { - final var workCandidate = - candidates.parallelStream() - .unordered() - .map(candidate -> new WorkCandidate(candidate, - LinkerUtils.calcNormalizedScore(candidate.demographicData(), - interaction.demographicData()))) - .sorted((o1, o2) -> Float.compare(o2.score(), o1.score())) - .collect(Collectors.toCollection(ArrayList::new)) - .get(0); + final var workCandidate = candidates.parallelStream() + .unordered() + .map(candidate -> new WorkCandidate(candidate, + LinkerUtils.calcNormalizedScore(candidate.demographicData(), + interaction.demographicData()))) + .sorted((o1, o2) -> Float.compare(o2.score(), o1.score())) + .collect(Collectors.toCollection(ArrayList::new)) + .getFirst(); try { final var i = OBJECT_MAPPER.writeValueAsString(interaction.demographicData()); final var g = OBJECT_MAPPER.writeValueAsString(workCandidate.goldenRecord().demographicData()); - final var f = - """ - MATCH NOTIFICATION - {} - {}"""; + final var f = """ + MATCH NOTIFICATION + {} + {}"""; LOGGER.info(f, i, g); } catch (JsonProcessingException e) { LOGGER.error(e.getLocalizedMessage(), e); @@ -178,19 +188,44 @@ static Either> linkInteraction( : matchThreshold_; try { libMPI.startTransaction(); - LinkerProbabilistic.checkUpdatedMU(); + CustomLinkerProbabilistic.checkUpdatedLinkMU(); final var candidateGoldenRecords = libMPI.findLinkCandidates(interaction.demographicData()); + LOGGER.debug("{} : {}", envelopStan, candidateGoldenRecords.size()); if (candidateGoldenRecords.isEmpty()) { linkInfo = libMPI.createInteractionAndLinkToClonedGoldenRecord(interaction, 1.0F); + confusionMatrix = new LinkStatsMeta.ConfusionMatrix(0.0, 0.0, 1.0, 0.0); } else { - final var allCandidateScores = - candidateGoldenRecords.parallelStream() - .unordered() - .map(candidate -> new WorkCandidate(candidate, - LinkerUtils.calcNormalizedScore(candidate.demographicData(), - interaction.demographicData()))) - .sorted((o1, o2) -> Float.compare(o2.score(), o1.score())) - .collect(Collectors.toCollection(ArrayList::new)); + final var allCandidateScores = candidateGoldenRecords + .parallelStream() + .unordered() + .map(candidate -> new WorkCandidate(candidate, + LinkerUtils.calcNormalizedScore( + candidate.demographicData(), + interaction.demographicData()))) + .sorted((o1, o2) -> Float.compare(o2.score(), o1.score())) + .collect(Collectors.toCollection(ArrayList::new)); + + // DO SOME TALLYING + customFieldTallies = IntStream + .range(0, allCandidateScores.size()) + .parallel() + .mapToObj(i -> { + final var workCandidate = allCandidateScores.get(i); + return CustomFieldTallies.map(i == 0 && workCandidate.score >= matchThreshold, + interaction.demographicData(), + workCandidate.goldenRecord.demographicData()); + }) + .reduce(CUSTOM_FIELD_TALLIES_SUM_IDENTITY, CustomFieldTallies::sum); + final var score = allCandidateScores.getFirst().score; + if (score >= matchThreshold + 0.1) { + confusionMatrix = new LinkStatsMeta.ConfusionMatrix(1.0, 0.0, 0.0, 0.0); + } else if (score >= matchThreshold) { + confusionMatrix = new LinkStatsMeta.ConfusionMatrix(0.80, 0.20, 0.0, 0.0); + } else if (score >= matchThreshold - 0.1) { + confusionMatrix = new LinkStatsMeta.ConfusionMatrix(0.0, 0.0, 0.20, 0.80); + } else { + confusionMatrix = new LinkStatsMeta.ConfusionMatrix(0.0, 0.0, 1.0, 0.0); + } // Get a list of candidates withing the supplied for external link range final var candidatesInExternalLinkRange = externalLinkRange == null @@ -202,26 +237,19 @@ static Either> linkInteraction( // Get a list of candidates above the supplied threshold final var belowThresholdNotifications = new ArrayList(); final var aboveThresholdNotifications = new ArrayList(); - final var candidatesAboveMatchThreshold = - allCandidateScores - .stream() - .peek(v -> { - if (v.score() > matchThreshold - 0.1 && v.score() < matchThreshold) { - belowThresholdNotifications.add(new Notification.MatchData(v.goldenRecord().goldenId(), - v.score())); - } else if (v.score() >= matchThreshold && v.score() < matchThreshold + 0.1) { - aboveThresholdNotifications.add(new Notification.MatchData(v.goldenRecord().goldenId(), - v.score())); - } - }) - .filter(v -> v.score() >= matchThreshold) - .collect(Collectors.toCollection(ArrayList::new)); + final var candidatesAboveMatchThreshold = allCandidateScores.stream().peek(v -> { + if (v.score() > matchThreshold - 0.1 && v.score() < matchThreshold) { + belowThresholdNotifications.add(new Notification.MatchData(v.goldenRecord().goldenId(), v.score())); + } else if (v.score() >= matchThreshold && v.score() < matchThreshold + 0.1) { + aboveThresholdNotifications.add(new Notification.MatchData(v.goldenRecord().goldenId(), v.score())); + } + }).filter(v -> v.score() >= matchThreshold).collect(Collectors.toCollection(ArrayList::new)); if (candidatesAboveMatchThreshold.isEmpty()) { if (candidatesInExternalLinkRange.isEmpty()) { linkInfo = libMPI.createInteractionAndLinkToClonedGoldenRecord(interaction, 1.0F); if (!belowThresholdNotifications.isEmpty()) { - sendNotification(Notification.NotificationType.THRESHOLD, + sendNotification(Notification.NotificationType.BELOW_THRESHOLD, linkInfo.interactionUID(), AppUtils.getNames(interaction.demographicData()), new Notification.MatchData(linkInfo.goldenUID(), linkInfo.score()), @@ -233,7 +261,7 @@ static Either> linkInteraction( candidate.score))); } } else { - final var firstCandidate = candidatesAboveMatchThreshold.get(0); + final var firstCandidate = candidatesAboveMatchThreshold.getFirst(); final var linkToGoldenId = new LibMPIClientInterface.GoldenIdScore(firstCandidate.goldenRecord.goldenId(), firstCandidate.score); final var validated1 = @@ -242,18 +270,20 @@ static Either> linkInteraction( final var validated2 = CustomLinkerProbabilistic.validateProbabilisticScore(firstCandidate.goldenRecord.demographicData(), interaction.demographicData()); - linkInfo = - libMPI.createInteractionAndLinkToExistingGoldenRecord(interaction, - linkToGoldenId, - validated1, - validated2); + linkInfo = libMPI.createInteractionAndLinkToExistingGoldenRecord(interaction, + linkToGoldenId, + validated1, + validated2); if (linkToGoldenId.score() <= matchThreshold + 0.1) { - sendNotification(Notification.NotificationType.THRESHOLD, + sendNotification(Notification.NotificationType.ABOVE_THRESHOLD, linkInfo.interactionUID(), AppUtils.getNames(interaction.demographicData()), new Notification.MatchData(linkInfo.goldenUID(), linkInfo.score()), - aboveThresholdNotifications); + aboveThresholdNotifications.stream() + .filter(m -> !Objects.equals(m.gID(), + firstCandidate.goldenRecord.goldenId())) + .collect(Collectors.toCollection(ArrayList::new))); } if (Boolean.TRUE.equals(firstCandidate.goldenRecord.customUniqueGoldenRecordData().auxAutoUpdateEnabled())) { CustomLinkerBackEnd.updateGoldenRecordFields(libMPI, @@ -284,6 +314,14 @@ static Either> linkInteraction( } finally { libMPI.closeTransaction(); } + linkStatsMetaProducer.produceAsync("123", + new LinkStatsMeta(confusionMatrix, customFieldTallies), + ((metadata, exception) -> { + if (exception != null) { + LOGGER.error(exception.toString()); + } + })); + return linkInfo == null ? Either.right(externalLinkCandidateList) : Either.left(linkInfo); @@ -304,7 +342,15 @@ private static void sendNotification( } } - private record WorkCandidate( + private static Serializer stringSerializer() { + return new StringSerializer(); + } + + private static Serializer linkStatsMetaSerializer() { + return new JsonPojoSerializer<>(); + } + + public record WorkCandidate( GoldenRecord goldenRecord, float score) { } diff --git a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend/LinkerProbabilistic.java b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend/LinkerProbabilistic.java index 7f9e8a3b7..e940ba69f 100644 --- a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend/LinkerProbabilistic.java +++ b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend/LinkerProbabilistic.java @@ -17,7 +17,7 @@ import static java.lang.Math.log; import static org.jembi.jempi.linker.backend.CustomLinkerProbabilistic.*; -final class LinkerProbabilistic { +public final class LinkerProbabilistic { static final JaroWinklerSimilarity JARO_WINKLER_SIMILARITY = new JaroWinklerSimilarity(); static final JaccardSimilarity JACCARD_SIMILARITY = new JaccardSimilarity(); @@ -26,7 +26,6 @@ final class LinkerProbabilistic { private static final Logger LOGGER = LogManager.getLogger(LinkerProbabilistic.class); private static final double LOG2 = java.lang.Math.log(2.0); private static final float MISSING_PENALTY = 0.925F; - private LinkerProbabilistic() { } @@ -49,7 +48,7 @@ private static float fieldScore( return (float) (log((1.0 - m) / (1.0 - u)) / LOG2); } - private static float fieldScore( + public static float fieldScore( final String left, final String right, final Field field) { @@ -62,19 +61,35 @@ private static float fieldScore( return fieldScore(false, field.m, field.u); } - static CustomMU.Probability getProbability(final Field field) { + public static FieldScoreInfo fieldScoreInfo( + final String left, + final String right, + final Field field) { + final var score = field.similarityScore.apply(left, right); + for (int i = 0; i < field.weights.size(); i++) { + if (score >= field.comparisonLevels.get(i)) { + return new FieldScoreInfo(i <= field.comparisonLevels.size() / 2, + fieldScore(i <= field.comparisonLevels.size() / 2, + field.m, field.u) * field.weights.get(i)); + } + } + return new FieldScoreInfo(false, fieldScore(false, field.m, field.u)); + + } + + public static CustomMU.Probability getProbability(final Field field) { return new CustomMU.Probability(field.m(), field.u()); } - static void checkUpdatedMU() { -// if (CustomLinkerProbabilistic.updatedFields != null) { -// LOGGER.info("Using updated MU values: {}", CustomLinkerProbabilistic.updatedFields); -// CustomLinkerProbabilistic.currentLinkFields = CustomLinkerProbabilistic.updatedFields; -// CustomLinkerProbabilistic.updatedFields = null; +// public static void checkUpdatedMU() { +// if (updatedLinkFields != null) { +// LOGGER.info("Using updated MU values: {}", updatedLinkFields); +// CustomLinkerProbabilistic.currentLinkFields = updatedLinkFields; +// updatedLinkFields = null; // } - } +// } - static void updateMetricsForStringField( + public static void updateMetricsForStringField( final float[] metrics, final String left, final String right, @@ -93,6 +108,11 @@ static void updateMetricsForStringField( } } + public record FieldScoreInfo( + Boolean isMatch, + Float score) { + } + static class ExactSimilarity implements SimilarityScore { @Override @@ -174,7 +194,7 @@ public Double apply( } - record Field( + public record Field( SimilarityScore similarityScore, List comparisonLevels, List weights, @@ -182,14 +202,14 @@ record Field( float u, float min, float max) { - Field { + public Field { m = limitProbability(m); u = limitProbability(u); min = fieldScore(false, m, u); max = fieldScore(true, m, u); } - Field( + public Field( final SimilarityScore func_, final List comparisonLevels_, final float m_, @@ -202,11 +222,7 @@ private static List computeWeights(final int n) { if (n % 2 == 0) { final var k = n / 2; final var z = 1.0F / k; - final var w = IntStream.range(0, n) - .mapToDouble(i -> abs(1.0 - (z * i))) - .boxed() - .map(Double::floatValue) - .toList(); + final var w = IntStream.range(0, n).mapToDouble(i -> abs(1.0 - (z * i))).boxed().map(Double::floatValue).toList(); if (LOGGER.isDebugEnabled()) { try { LOGGER.debug("{}", AppUtils.OBJECT_MAPPER.writeValueAsString(w)); diff --git a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend/LinkerUtils.java b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend/LinkerUtils.java index 9458aaa7a..606458700 100644 --- a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend/LinkerUtils.java +++ b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/linker/backend/LinkerUtils.java @@ -4,14 +4,14 @@ import org.apache.logging.log4j.Logger; import org.jembi.jempi.shared.models.CustomDemographicData; -final class LinkerUtils { +public final class LinkerUtils { private static final Logger LOGGER = LogManager.getLogger(LinkerUtils.class); private LinkerUtils() { } - static float calcNormalizedScore( + public static float calcNormalizedScore( final CustomDemographicData goldenRecord, final CustomDemographicData interaction) { if (CustomLinkerDeterministic.linkDeterministicMatch(goldenRecord, interaction)) { diff --git a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/stats/StatsTask.java b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/stats/StatsTask.java index ee172a2e7..87523c8a7 100644 --- a/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/stats/StatsTask.java +++ b/JeMPI_Apps/JeMPI_Linker/src/main/java/org/jembi/jempi/stats/StatsTask.java @@ -156,6 +156,7 @@ public StatsResults run() { LOGGER.info("Sub Lists: {}", subLists); LOGGER.info("Final Sub List Size: {}", finalSubListSize); } + int fromIdx; int toIdx; for (long i = 0; i < subLists; i++) { @@ -196,19 +197,21 @@ public StatsResults run() { if (LOGGER.isInfoEnabled()) { LOGGER.info("Golden Records Found: {}", dataSet.size()); LOGGER.info("TP:{} FP:{} FN:{} Precision:{} Recall:{} F-score:{}", - truePositives[0], falsePositives[0], falseNegatives[0], - precision, recall, fScore); + truePositives[0], + falsePositives[0], + falseNegatives[0], + precision, + recall, + fScore); } - return new StatsResults( - interactionCount, - goldenRecords, - truePositives[0], - falsePositives[0], - falseNegatives[0], - precision, - recall, - fScore); - + return new StatsResults(interactionCount, + goldenRecords, + truePositives[0], + falsePositives[0], + falseNegatives[0], + precision, + recall, + fScore); } catch (IOException e) { LOGGER.error(e.getLocalizedMessage(), e); } diff --git a/JeMPI_Apps/JeMPI_Linker/src/test/java/org/jembi/jempi/linker/LinkerTest.java b/JeMPI_Apps/JeMPI_Linker/src/test/java/org/jembi/jempi/linker/LinkerTest.java deleted file mode 100644 index c94d54cea..000000000 --- a/JeMPI_Apps/JeMPI_Linker/src/test/java/org/jembi/jempi/linker/LinkerTest.java +++ /dev/null @@ -1,81 +0,0 @@ -package org.jembi.jempi.linker; - -import org.junit.jupiter.api.TestInstance; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.junit.jupiter.MockitoExtension; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -@ExtendWith(MockitoExtension.class) -@TestInstance(TestInstance.Lifecycle.PER_CLASS) -class LinkerTests { -/* - - ActorTestKit testKit; - ActorRef myActorRef; - CustomDemographicData goldenRecordDemographicData; - List patientRecords; - PatientRecordWithScore patientRecordWithScore1; - - @Mock - private LibMPI libMPI; - private ExpandedGoldenRecord expandedGoldenRecord; - - private PatientRecord patientRecord; - - private List goldenRecords; - - @BeforeEach - public void init(){ - testKit = ActorTestKit.create(); - myActorRef = testKit.spawn(BackEnd.create(libMPI)); - goldenRecordDemographicData = new CustomDemographicData("1", "Jane", "Doe", "F", "1985-05-05", "New York", "555-1234", "123456789"); - patientRecord = new PatientRecord("2", new SourceId("1", null, null), new CustomDemographicData("2", "Jane", "Doe", "F", "1985-05-05", "Boston", "555-5678", "987654321")); - patientRecords = new ArrayList<>(); - patientRecordWithScore1 = new PatientRecordWithScore(patientRecord, 0.14604087F); - patientRecords.add(patientRecordWithScore1); - expandedGoldenRecord = new ExpandedGoldenRecord(new GoldenRecord("1", null, goldenRecordDemographicData), patientRecords); - goldenRecords = new ArrayList<>(); - goldenRecords.add(expandedGoldenRecord.goldenRecord()); - } - - @AfterAll - public void tearDown() { - testKit = null; - myActorRef = null; - goldenRecordDemographicData = null; - patientRecord = null; - patientRecords = null; - patientRecordWithScore1 = null; - expandedGoldenRecord = null; - goldenRecords = null; - - } - - @Test - public void updateGoldenRecordField_Success(){ - - // Arrange - when(libMPI.updateGoldenRecordField(eq("1"), eq("city"), anyString())).thenReturn(true); - - BackEnd.updateGoldenRecordField(expandedGoldenRecord, "city", "New York", CustomDemographicData::city); - // Assert - verify(libMPI).updateGoldenRecordField(eq("1"), eq("city"), anyString()); - } - - @Test - public void updateGoldenRecordField_expandedGoldenRecordNull_returnSuccess() { - BackEnd.updateGoldenRecordField(null, "city", "New York", CustomDemographicData::city); - - verify(libMPI, never()).updateGoldenRecordField(anyString(), anyString(), anyString()); - } - - @Test - public void testCandidatesForReview() throws Exception{ - when(libMPI.getCandidates(any(CustomDemographicData.class), anyBoolean())).thenReturn(goldenRecords); - ArrayList list = BackEnd.getCandidatesMatchDataForPatientRecord(patientRecord); - - assertEquals(1, list.size()); - } -*/ -} diff --git a/JeMPI_Apps/JeMPI_UI/.dockerignore b/JeMPI_Apps/JeMPI_UI/.dockerignore index bb649288b..5547148a2 100644 --- a/JeMPI_Apps/JeMPI_UI/.dockerignore +++ b/JeMPI_Apps/JeMPI_UI/.dockerignore @@ -5,6 +5,7 @@ node_modules/ Dockerfile # Env +.env .env.local .env.test diff --git a/JeMPI_Apps/JeMPI_UI/.env.local b/JeMPI_Apps/JeMPI_UI/.env.local index e32912c0c..b3ee26ef2 100644 --- a/JeMPI_Apps/JeMPI_UI/.env.local +++ b/JeMPI_Apps/JeMPI_UI/.env.local @@ -1,9 +1,11 @@ -REACT_APP_JEMPI_BASE_URL=http://localhost:50000/JeMPI +REACT_APP_JEMPI_BASE_API_HOST=http://localhost +REACT_APP_JEMPI_BASE_API_PORT=50000 REACT_APP_MAX_UPLOAD_CSV_SIZE_IN_MEGABYTES="128" - +REACT_APP_SHOW_BRAND_LOGO=false REACT_APP_MOCK_BACKEND=false REACT_APP_ENABLE_SSO=false -KC_FRONTEND_URL=http://localhost:9088 -KC_REALM_NAME=platform-realm -KC_JEMPI_CLIENT_ID=jempi-oauth + +KC_FRONTEND_URL=http://localhost:8080 +KC_REALM_NAME=jempi-dev +KC_JEMPI_CLIENT_ID=jempi-oauth \ No newline at end of file diff --git a/JeMPI_Apps/JeMPI_UI/.gitignore b/JeMPI_Apps/JeMPI_UI/.gitignore index 75e109c9b..b8e55a36d 100644 --- a/JeMPI_Apps/JeMPI_UI/.gitignore +++ b/JeMPI_Apps/JeMPI_UI/.gitignore @@ -31,3 +31,5 @@ platform.exe instant-linux instant-macos instant.exe + +public/config.json \ No newline at end of file diff --git a/JeMPI_Apps/JeMPI_UI/Dockerfile b/JeMPI_Apps/JeMPI_UI/Dockerfile index 9422aa82f..d2428a7da 100644 --- a/JeMPI_Apps/JeMPI_UI/Dockerfile +++ b/JeMPI_Apps/JeMPI_UI/Dockerfile @@ -36,10 +36,24 @@ RUN yarn build FROM node:18-alpine as production-stage +ENV BUILD_DEPS="gettext" \ + RUNTIME_DEPS="libintl" + +RUN set -x && \ + apk add --update $RUNTIME_DEPS && \ + apk add --virtual build_deps $BUILD_DEPS && \ + cp /usr/bin/envsubst /usr/local/bin/envsubst && \ + apk del build_deps + WORKDIR /app RUN yarn global add serve -COPY --chown=node:node --from=build-stage /app/build ./build +COPY --chown=node:node --from=build-stage /app/build ./ + +COPY ./docker-entrypoint.sh /usr/local/bin/ + +RUN chmod +x /usr/local/bin/docker-entrypoint.sh + +ENTRYPOINT [ "/bin/sh", "/usr/local/bin/docker-entrypoint.sh" ] -CMD ["serve" , "-s", "build"] \ No newline at end of file diff --git a/JeMPI_Apps/JeMPI_UI/build-image.sh b/JeMPI_Apps/JeMPI_UI/build-image.sh index f76dde96a..f85e9a6f3 100755 --- a/JeMPI_Apps/JeMPI_UI/build-image.sh +++ b/JeMPI_Apps/JeMPI_UI/build-image.sh @@ -12,4 +12,4 @@ envsubst < $PROJECT_DEVOPS_DIR/conf/ui/.env > ./.env [ -z $(docker images -q ${UI_IMAGE}) ] || docker rmi ${UI_IMAGE} docker system prune --volumes -f -docker build --tag $UI_IMAGE --target production-stage . +docker build --tag $UI_IMAGE --target $NODE_ENV-stage . diff --git a/JeMPI_Apps/JeMPI_UI/docker-entrypoint.sh b/JeMPI_Apps/JeMPI_UI/docker-entrypoint.sh new file mode 100644 index 000000000..00c4be49b --- /dev/null +++ b/JeMPI_Apps/JeMPI_UI/docker-entrypoint.sh @@ -0,0 +1,16 @@ +# Set default variables + +export REACT_APP_NODE_ENV=${REACT_APP_NODE_ENV:-"development"} +export REACT_APP_MOCK_BACKEND=${REACT_APP_MOCK_BACKEND:-"false"} +export REACT_APP_JEMPI_BASE_API_HOST=${REACT_APP_JEMPI_BASE_API_HOST:-""} +export REACT_APP_JEMPI_BASE_API_PORT=${REACT_APP_JEMPI_BASE_API_PORT:-"50000"} +export REACT_APP_ENABLE_SSO=${REACT_APP_ENABLE_SSO:-"false"} +export REACT_APP_MAX_UPLOAD_CSV_SIZE_IN_MEGABYTES=${REACT_APP_MAX_UPLOAD_CSV_SIZE_IN_MEGABYTES:-"128"} +export KC_FRONTEND_URL=${KC_FRONTEND_URL:-""} +export KC_REALM_NAME=${KC_REALM_NAME:-""} +export KC_JEMPI_CLIENT_ID=${KC_JEMPI_CLIENT_ID:-""} +export REACT_APP_SHOW_BRAND_LOGO=${REACT_APP_SHOW_BRAND_LOGO:-"false"} + +cat /app/config-template.json | envsubst | tee /app/config.json + +serve -s /app diff --git a/JeMPI_Apps/JeMPI_UI/jest.config.js b/JeMPI_Apps/JeMPI_UI/jest.config.js new file mode 100644 index 000000000..bdb11ff21 --- /dev/null +++ b/JeMPI_Apps/JeMPI_UI/jest.config.js @@ -0,0 +1,22 @@ +const { pathsToModuleNameMapper } = require('ts-jest') +const { compilerOptions } = require('./tsconfig') + +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + coveragePathIgnorePatterns: [ + "/tests/", + "/node_modules/", + "tsconfig.json", + ], + collectCoverageFrom: [ + "**/src/**" + ], + transform: { + '^.+\\.(ts|tsx)?$': ['ts-jest', { isolatedModules:true }] + }, + modulePaths: [compilerOptions.baseUrl], + moduleNameMapper: { ...pathsToModuleNameMapper(compilerOptions.paths), + ".+\\.(css|styl|less|sass|scss|ttf|woff|woff2)$": "identity-obj-proxy" + }, + }; \ No newline at end of file diff --git a/JeMPI_Apps/JeMPI_UI/package.json b/JeMPI_Apps/JeMPI_UI/package.json index f852c02bd..79c06b421 100644 --- a/JeMPI_Apps/JeMPI_UI/package.json +++ b/JeMPI_Apps/JeMPI_UI/package.json @@ -11,25 +11,31 @@ "lint": "eslint src/**/*.{js,jsx,ts,tsx}", "lint:fix": "eslint ./src/ --ext ts,js,tsx,jsx --fix", "format": "prettier 'src/**/*.{js,jsx,ts,tsx,json,css}' --write", - "type-check": "tsc" + "type-check": "tsc", + "mock:startJeMPIAPIServer": "npx ts-node --compilerOptions '{\"module\":\"commonjs\"}' ./tests/test.utils/mocks/enviroments/MockJeMPI_API/MockJeMPI_API.ts", + "mock:startKeycloakServer": "npx ts-node --compilerOptions '{\"module\":\"commonjs\"}' ./tests/test.utils/mocks/enviroments/MockKeyCloak/MockKeyCloak.ts", + "mock:enviroments": "(start npm run mock:startKeycloakServer && start npm run mock:startJeMPIAPIServer -- 3000) || (npm run mock:startKeycloakServer & npm run mock:startJeMPIAPIServer -- 3000)" }, "dependencies": { + "@babel/core": "^7.0.0-0", + "@babel/plugin-proposal-private-property-in-object": "^7.21.11", + "@babel/plugin-syntax-flow": "^7.14.5", + "@babel/plugin-transform-private-property-in-object": "^7.23.4", + "@babel/plugin-transform-react-jsx": "^7.14.9", "@emotion/react": "^11.10.5", "@emotion/styled": "^11.10.5", "@fontsource/roboto": "^4.5.8", "@mui/icons-material": "^5.10.9", "@mui/material": "^5.13.4", + "@mui/system": "^5.4.1", "@mui/x-data-grid": "^6.6.0", "@mui/x-date-pickers": "^6.6.0", "@tanstack/react-query": "^4.16.1", "@tanstack/react-query-devtools": "^4.16.1", + "@testing-library/dom": ">=7.21.4", "@testing-library/jest-dom": "^5.14.1", "@testing-library/react": "^13.4.0", "@testing-library/user-event": "^13.2.1", - "@types/jest": "^27.0.1", - "@types/node": "^16.7.13", - "@types/react": "^18.0.0", - "@types/react-dom": "^18.0.0", "axios": "^1.1.3", "axios-mock-adapter": "^1.21.2", "cross-env": "^7.0.3", @@ -46,8 +52,14 @@ "web-vitals": "^2.1.0" }, "devDependencies": { + "@types/dockerode": "^3.3.20", + "@types/jest": "^29.5.5", + "@types/node": "^16.7.13", + "@types/react": "^18.0.0", + "@types/react-dom": "^18.0.0", "@typescript-eslint/eslint-plugin": "^5.51.0", "@typescript-eslint/parser": "^5.51.0", + "dockerode": "^4.0.0", "eslint": "^8.33.0", "eslint-config-prettier": "^8.6.0", "eslint-plugin-jsx-a11y": "^6.7.1", @@ -55,8 +67,14 @@ "eslint-plugin-react": "^7.32.2", "eslint-plugin-react-hooks": "^4.6.0", "husky": "^8.0.0", + "identity-obj-proxy": "^3.0.0", + "jest": "^29.7.0", + "jest-environment-jsdom": "^29.7.0", + "jest-fetch-mock": "^3.0.3", "lint-staged": "^13.1.1", - "prettier": "^2.8.4" + "prettier": "^2.8.4", + "process": "^0.11.10", + "ts-jest": "^29.1.1" }, "lint-staged": { "*.{js,jsx,ts,tsx}": [ diff --git a/JeMPI_Apps/JeMPI_UI/public/config-template.json b/JeMPI_Apps/JeMPI_UI/public/config-template.json new file mode 100644 index 000000000..1ff8c18cc --- /dev/null +++ b/JeMPI_Apps/JeMPI_UI/public/config-template.json @@ -0,0 +1,12 @@ +{ + "nodeEnv": "${REACT_APP_NODE_ENV}", + "shouldMockBackend": ${REACT_APP_MOCK_BACKEND}, + "apiHost": "${REACT_APP_JEMPI_BASE_API_HOST}", + "apiPort": ${REACT_APP_JEMPI_BASE_API_PORT}, + "useSso": ${REACT_APP_ENABLE_SSO}, + "maxUploadCsvSize": ${REACT_APP_MAX_UPLOAD_CSV_SIZE_IN_MEGABYTES}, + "KeyCloakUrl": "${KC_FRONTEND_URL}", + "KeyCloakRealm": "${KC_REALM_NAME}", + "KeyCloakClientId": "${KC_JEMPI_CLIENT_ID}", + "showBrandLogo": ${REACT_APP_SHOW_BRAND_LOGO} +} diff --git a/JeMPI_Apps/JeMPI_UI/src/App.test.tsx b/JeMPI_Apps/JeMPI_UI/src/App.test.tsx deleted file mode 100644 index 233c6e381..000000000 --- a/JeMPI_Apps/JeMPI_UI/src/App.test.tsx +++ /dev/null @@ -1,10 +0,0 @@ -import { act, render, screen } from '@testing-library/react' -import App from './App' - -test('renders JeMPI logo', async () => { - await act(() => { - render() - }) - const linkElement = await screen.findAllByText(/MPI/i) - expect(linkElement[0]).toBeInTheDocument() -}) diff --git a/JeMPI_Apps/JeMPI_UI/src/App.tsx b/JeMPI_Apps/JeMPI_UI/src/App.tsx index 48cfb1cee..56738550d 100644 --- a/JeMPI_Apps/JeMPI_UI/src/App.tsx +++ b/JeMPI_Apps/JeMPI_UI/src/App.tsx @@ -4,12 +4,20 @@ import { CssBaseline, ThemeProvider } from '@mui/material' import { QueryClient, QueryClientProvider } from '@tanstack/react-query' import baseTheme from './themes/baseTheme' import baseRouter from 'router/BaseRouter' -import { ReactQueryDevtools } from 'DevTools' import { RouterProvider } from 'react-router-dom' +import { ConfigProvider } from 'hooks/useConfig' +import { ReactQueryDevtools } from '@tanstack/react-query-devtools' +import ScrollBackButtons from 'components/shared/ScrollBackButtons' +import { AuthProvider } from 'hooks/useAuth' +import { SnackbarProvider } from 'notistack' +import React from 'react' const queryClient = new QueryClient({ defaultOptions: { - queries: {} + queries: { + staleTime: 5 * (60 * 1000), + cacheTime: 10 * (60 * 1000) + } } }) @@ -17,10 +25,17 @@ const App = () => { return ( - - - - + + + + + + + + + + + ) } diff --git a/JeMPI_Apps/JeMPI_UI/src/DevTools.tsx b/JeMPI_Apps/JeMPI_UI/src/DevTools.tsx deleted file mode 100644 index a0103ece1..000000000 --- a/JeMPI_Apps/JeMPI_UI/src/DevTools.tsx +++ /dev/null @@ -1,10 +0,0 @@ -import { config } from 'config' -import { lazy } from 'react' - -export const ReactQueryDevtools = !config.isDev - ? () => <> - : lazy(() => - import('@tanstack/react-query-devtools').then(res => ({ - default: res.ReactQueryDevtools - })) - ) diff --git a/JeMPI_Apps/JeMPI_UI/src/components/browseRecords/BrowseRecords.tsx b/JeMPI_Apps/JeMPI_UI/src/components/browseRecords/BrowseRecords.tsx index d87034885..99ac3c754 100644 --- a/JeMPI_Apps/JeMPI_UI/src/components/browseRecords/BrowseRecords.tsx +++ b/JeMPI_Apps/JeMPI_UI/src/components/browseRecords/BrowseRecords.tsx @@ -2,6 +2,7 @@ import { Accordion, AccordionDetails, AccordionSummary, + Box, Container, Divider, FormControlLabel, @@ -10,7 +11,12 @@ import { Switch, Typography } from '@mui/material' -import { DataGrid, GridColDef, GridRenderCellParams } from '@mui/x-data-grid' +import { + DataGrid, + GridColDef, + GridRenderCellParams, + gridClasses +} from '@mui/x-data-grid' import ApiErrorMessage from 'components/error/ApiErrorMessage' import { useAppConfig } from 'hooks/useAppConfig' import { @@ -29,16 +35,22 @@ import { useEffect, useMemo, useState } from 'react' import { isPatientCorresponding } from 'hooks/useSearch' import { useQuery } from '@tanstack/react-query' import { AxiosError } from 'axios' -import ApiClient from 'services/ApiClient' import ExpandMoreIcon from '@mui/icons-material/ExpandMore' import PageHeader from 'components/shell/PageHeader' -import { LocalizationProvider, DesktopDatePicker } from '@mui/x-date-pickers' +import { + LocalizationProvider, + DesktopDatePicker, + DateTimePicker +} from '@mui/x-date-pickers' import { AdapterDayjs } from '@mui/x-date-pickers/AdapterDayjs' import dayjs, { Dayjs } from 'dayjs' import getCellComponent from 'components/shared/getCellComponent' import { useNavigate, useSearchParams } from 'react-router-dom' import { Search } from '@mui/icons-material' +import { useConfig } from 'hooks/useConfig' +import CustomPagination from 'components/shared/CustomDataGridPagination' +// TODO: Later - We can update this at a later stage, such the field configuration info can contain the getAlignment, since this can be dynamic const getAlignment = (fieldName: string) => fieldName === 'givenName' || fieldName === 'familyName' || @@ -49,19 +61,20 @@ const getAlignment = (fieldName: string) => const Records = () => { const navigate = useNavigate() + const { apiClient } = useConfig() const { getFieldsByGroup } = useAppConfig() - + const [startDateFilter, setStartDateFilter] = useState( + dayjs().startOf('day') + ) + const [endDateFilter, setEndDateFilter] = useState( + dayjs().endOf('day') + ) const [searchQuery, setSearchQuery] = useState>([]) - - const [dateFilter, setDateFilter] = useState(dayjs()) - const [dateSearch, setDateSearch] = useState(dayjs()) - const [searchParams, setSearchParams] = useSearchParams() - - const [isFetchingInteractions, setIsFetchingInteractions] = useState( + const [isFetchingInteractions, setIsFetchingInteractions] = useState( searchParams.get('isFetchingInteractions') - ? JSON.parse(searchParams.get('isFetchingInteractions') as string) + ? JSON.parse(searchParams.get('isFetchingInteractions') as string) == "true" : false ) @@ -71,7 +84,7 @@ const Records = () => { : [], limit: searchParams.get('limit') ? JSON.parse(searchParams.get('limit') as string) - : 10, + : 25, offset: searchParams.get('offset') ? JSON.parse(searchParams.get('offset') as string) : 0, @@ -108,22 +121,14 @@ const Records = () => { ApiSearchResult, AxiosError >({ - queryKey: [ - 'golden-records', - JSON.stringify(filterPayload.parameters), - filterPayload.offset, - filterPayload.limit, - filterPayload.sortAsc, - filterPayload.sortBy - ], + queryKey: ['golden-records', JSON.stringify(filterPayload)], queryFn: async () => - (await ApiClient.searchQuery( + (await apiClient.searchQuery( filterPayload, true )) as ApiSearchResult, refetchOnWindowFocus: false, - keepPreviousData: true, - staleTime: 1000 * 60 + keepPreviousData: true }) const rows = useMemo(() => { @@ -137,16 +142,15 @@ const Records = () => { return acc }, []) }, [isFetchingInteractions, data]) - useEffect(() => { setSearchParams( Object.entries(filterPayload).reduce( (acc, [k, v]) => { acc[k] = JSON.stringify(v) return acc - // eslint-disable-next-line @typescript-eslint/no-explicit-any }, - { isFetchingInteractions: isFetchingInteractions } as any + // eslint-disable-next-line @typescript-eslint/no-explicit-any + { isFetchingInteractions } as any ) ) // eslint-disable-next-line react-hooks/exhaustive-deps @@ -161,11 +165,13 @@ const Records = () => { } const onFilter = (query: SearchParameter[]) => { + const startDate = startDateFilter.toJSON() + const endDate = endDateFilter.toJSON() setFilterPayload({ ...filterPayload, parameters: [ { - value: dateFilter.toJSON(), + value: `${startDate}_${endDate}`, distance: -1, fieldName: 'auxDateCreated' }, @@ -180,12 +186,6 @@ const Records = () => { : '' } - const changeSelectedFileterDate = (date: Dayjs | null) => { - if (date) { - setDateFilter(date) - } - } - const changeSelectedSearchDate = (date: Dayjs | null) => { if (date) { setDateSearch(date) @@ -196,7 +196,6 @@ const Records = () => { { - changeSelectedFileterDate(value)} - slotProps={{ - textField: { - variant: 'outlined', - label: 'Date' - } - }} - /> + + value && setStartDateFilter(value)} + slotProps={{ + textField: { + variant: 'outlined', + label: 'Start Date' + } + }} + /> + value && setEndDateFilter(value)} + slotProps={{ + textField: { + variant: 'outlined', + label: 'End Date' + } + }} + /> + { - - } - aria-controls="panel1a-content" - id="panel1a-header" - > - Search within filtered results - - - - - changeSelectedSearchDate(value)} - slotProps={{ - textField: { - variant: 'outlined', - label: 'Date' - } - }} - /> - + {/* Search will be refactored in the future to be part of filter */} + {false && ( + + } + aria-controls="panel1a-content" + id="panel1a-header" + > + + Search within filtered results + + + + + + changeSelectedSearchDate(value)} + slotProps={{ + textField: { + variant: 'outlined', + label: 'Date' + } + }} + /> + - setSearchQuery([])} - /> - - - + setSearchQuery([])} + /> + + + + )} Search result uid} @@ -304,9 +339,10 @@ const Records = () => { page: filterPayload.offset / filterPayload.limit, pageSize: filterPayload.limit }} + slots={{ pagination: CustomPagination }} columns={columns} rows={rows} - pageSizeOptions={[10, 25, 50, 100]} + pageSizeOptions={[25, 50, 100]} onRowDoubleClick={params => { if ('linkRecords' in params.row) { navigate({ diff --git a/JeMPI_Apps/JeMPI_UI/src/components/common/LoadingSpinner.tsx b/JeMPI_Apps/JeMPI_UI/src/components/common/LoadingSpinner.tsx index f91035992..697161b12 100644 --- a/JeMPI_Apps/JeMPI_UI/src/components/common/LoadingSpinner.tsx +++ b/JeMPI_Apps/JeMPI_UI/src/components/common/LoadingSpinner.tsx @@ -1,10 +1,11 @@ import { Backdrop, CircularProgress } from '@mui/material' -const LoadingSpinner = () => { +const LoadingSpinner = ({ id }: { id?: string }) => { return ( theme.zIndex.drawer + 1 }} open={true} + id={id || 'loading-spinner'} > diff --git a/JeMPI_Apps/JeMPI_UI/src/components/customSearch/AddFieldOrGroupButton.tsx b/JeMPI_Apps/JeMPI_UI/src/components/customSearch/AddFieldOrGroupButton.tsx index 888b3f238..bb9ba1d2b 100644 --- a/JeMPI_Apps/JeMPI_UI/src/components/customSearch/AddFieldOrGroupButton.tsx +++ b/JeMPI_Apps/JeMPI_UI/src/components/customSearch/AddFieldOrGroupButton.tsx @@ -3,6 +3,7 @@ import { Button } from '@mui/material' import { SearchParameter } from '../../types/SimpleSearch' interface AddFieldOrGroupButtonProps { + // eslint-disable-next-line @typescript-eslint/no-explicit-any onClick: (obj: any) => void initialCustomSearchValues: | SearchParameter diff --git a/JeMPI_Apps/JeMPI_UI/src/components/customSearch/CustomSearch.tsx b/JeMPI_Apps/JeMPI_UI/src/components/customSearch/CustomSearch.tsx index ddd369192..0aeaced89 100644 --- a/JeMPI_Apps/JeMPI_UI/src/components/customSearch/CustomSearch.tsx +++ b/JeMPI_Apps/JeMPI_UI/src/components/customSearch/CustomSearch.tsx @@ -32,7 +32,6 @@ const CustomSearch: React.FC = () => { { + const { children, value, index, ...other } = props + return ( + + ) +} + +const tabProps = (index: number) => { + return { + id: `dashboard-tab-${index}`, + 'aria-controls': `dashboard-tabpanel-${index}` + } +} + +const Dashboard = () => { + const dashboardData = useDashboardData() + + console.log(dashboardData) + const [currentTabIndex, setCurrentTabIndex] = useState(0) + const handleChangeTab = (event: React.SyntheticEvent, newValue: number) => { + setCurrentTabIndex(newValue) + } + return ( + + + + Confusion Matrix} + {...tabProps(0)} + /> + M & U Values} + {...tabProps(1)} + /> + Import Process Status} + {...tabProps(2)} + /> + + + + + + + + + Records + + + } + iconBackgroundColor={'#FFD700'} + /> + + + } + iconBackgroundColor={'primary.main'} + /> + + + + + + + Notifications + + + + } + iconBackgroundColor={'#76ff03'} + /> + + + + } + iconBackgroundColor={pink[600]} + /> + + + + + + + + + + + + Confusion Matrix + + + + + + + + + + + + + + + + + + + ) +} + +export default Dashboard diff --git a/JeMPI_Apps/JeMPI_UI/src/components/dashboard/widgets/BetaFscoreWidget.tsx b/JeMPI_Apps/JeMPI_UI/src/components/dashboard/widgets/BetaFscoreWidget.tsx new file mode 100644 index 000000000..1d584cd7b --- /dev/null +++ b/JeMPI_Apps/JeMPI_UI/src/components/dashboard/widgets/BetaFscoreWidget.tsx @@ -0,0 +1,29 @@ +import { Box, Grid } from '@mui/material' +import CountWidget from './CountWidgetWidget' + +function BetaFscoreWidget({data, ...rest}: any) { + return ( + + Beta F-scores + + + + + + + + + + + + + ) +} + +export default BetaFscoreWidget diff --git a/JeMPI_Apps/JeMPI_UI/src/components/dashboard/widgets/ConfusionMatrixWidget.tsx b/JeMPI_Apps/JeMPI_UI/src/components/dashboard/widgets/ConfusionMatrixWidget.tsx new file mode 100644 index 000000000..12b8b40a2 --- /dev/null +++ b/JeMPI_Apps/JeMPI_UI/src/components/dashboard/widgets/ConfusionMatrixWidget.tsx @@ -0,0 +1,198 @@ +import { Box, Divider, Grid, Typography } from '@mui/material' + +const ConfusionMatrix = ({data, ...rest}: any) => { + return ( + <> + + + + False Negatives + {data && data.tptnMatrix.falseNegative} + + + + + True Positives + + + + {data && data.tptnMatrix.truePositive.toLocaleString()} + + + + + + + True Negatives + {data && data.tptnMatrix.trueNegative} + + + + + {'False Positives'} + + + {data && data.tptnMatrix.falsePositive.toLocaleString()} + + + + + + + + + + Precision + + + + + + {data && `${data.tptnMatrix.truePositive} / ${data.tptnMatrix.truePositive + data.tptnMatrix.falsePositive} =`} + + + + + + {data && (data.tptnMatrix.truePositive / (data.tptnMatrix.truePositive + data.tptnMatrix.falsePositive)).toFixed(5)} + + + + + + + + + + Recall + + + + + + {data && `${data.tptnMatrix.truePositive} / ${data.tptnMatrix.truePositive + data.tptnMatrix.falseNegative} =`} + + + + + + {data && (data.tptnMatrix.truePositive / (data.tptnMatrix.truePositive + data.tptnMatrix.falseNegative)).toFixed(5) } + + + + + + + + + + + + + ) +} + +export default ConfusionMatrix diff --git a/JeMPI_Apps/JeMPI_UI/src/components/dashboard/widgets/CountWidgetWidget.tsx b/JeMPI_Apps/JeMPI_UI/src/components/dashboard/widgets/CountWidgetWidget.tsx new file mode 100644 index 000000000..1d77296e2 --- /dev/null +++ b/JeMPI_Apps/JeMPI_UI/src/components/dashboard/widgets/CountWidgetWidget.tsx @@ -0,0 +1,67 @@ +import { Box, Avatar, Typography } from '@mui/material' +import React from 'react' + +type countWidgetType = { + label: string + value: number + icon?: React.ReactElement + iconBackgroundColor?: string + secondValue?: number +} +function CountWidget({ + label, + value, + icon, + iconBackgroundColor, + secondValue +}: countWidgetType) { + return ( + + {icon && ( + + {icon} + + )} + + {label} + + + {value} + + {!!secondValue && ( + + {secondValue} + + )} + + + + ) +} + +export default CountWidget diff --git a/JeMPI_Apps/JeMPI_UI/src/components/dashboard/widgets/ImportProcessWidget.tsx b/JeMPI_Apps/JeMPI_UI/src/components/dashboard/widgets/ImportProcessWidget.tsx new file mode 100644 index 000000000..2cd0e3fe6 --- /dev/null +++ b/JeMPI_Apps/JeMPI_UI/src/components/dashboard/widgets/ImportProcessWidget.tsx @@ -0,0 +1,79 @@ +import { CircularProgressProps, CircularProgress, Typography, LinearProgressProps, LinearProgress } from '@mui/material' +import { Box } from '@mui/system' +import React, { useEffect, useState } from 'react' + +function CircularProgressWithLabel( + props: CircularProgressProps & { value: number } +) { + return ( + + + + {`${Math.round(props.value)}%`} + + + ) +} + +function LinearProgressWithLabel( + props: LinearProgressProps & { value: number } +) { + return ( + + + + + + {`${Math.round( + props.value + )}%`} + + + ) +} +export const ImportProcessWidget = ({data, ...rest}: any) => { + + let progress = 0 + + if (data){ + // TODO: data.toFileSize * 4 is just rough estimate. Need to this properly later + progress = data.sizeCompleted >= (data.toFileSize * 4) ? 100 : (data.sizeCompleted/(data.toFileSize * 4)) * 100 + } + + return ( + + Import Process + + {data && data.filename} + + + + + {(data ? data.totalCompleted : 0) + ' Processed'} + + + + ) +} diff --git a/JeMPI_Apps/JeMPI_UI/src/components/dashboard/widgets/MandUWidget.tsx b/JeMPI_Apps/JeMPI_UI/src/components/dashboard/widgets/MandUWidget.tsx new file mode 100644 index 000000000..ba8da2f6f --- /dev/null +++ b/JeMPI_Apps/JeMPI_UI/src/components/dashboard/widgets/MandUWidget.tsx @@ -0,0 +1,56 @@ +import { Box } from '@mui/material' +import Table from '@mui/material/Table' +import TableBody from '@mui/material/TableBody' +import TableCell from '@mui/material/TableCell' +import TableContainer from '@mui/material/TableContainer' +import TableHead from '@mui/material/TableHead' +import TableRow from '@mui/material/TableRow' +import Paper from '@mui/material/Paper' + +function createData(fieldName: string, mValue: number, uValue: number) { + return { fieldName, mValue, uValue } +} +const rows = [ + createData('Given Name', 75, 6.0), + createData('Family Name', 237, 9.0), + createData('Gender', 0.5, 0.7), + createData('Birthdate', 305, 3.7), + createData('Phone', 305, 3.7), + createData('National Id ', 305, 3.7), + createData('City ', 305, 3.7) +] +function MandU({data, ...rest}: any) { + + + return ( + + M & U Values + + + + + Field Name + M value + U value + + + + {data && Object.entries(data).map(([fieldId, m_and_u]) => ( + + + {fieldId} + + + {parseFloat((m_and_u as any).m).toFixed(7)} + + {parseFloat((m_and_u as any).u).toFixed(7)} + + ))} + +
+
+
+ ) +} + +export default MandU diff --git a/JeMPI_Apps/JeMPI_UI/src/components/error/ErrorBoundary.tsx b/JeMPI_Apps/JeMPI_UI/src/components/error/ErrorBoundary.tsx index 9d2c10ab3..7739feb48 100644 --- a/JeMPI_Apps/JeMPI_UI/src/components/error/ErrorBoundary.tsx +++ b/JeMPI_Apps/JeMPI_UI/src/components/error/ErrorBoundary.tsx @@ -29,6 +29,7 @@ class ErrorBoundary extends Component { } public componentDidCatch(error: Error, errorInfo: ErrorInfo) { + // eslint-disable-next-line no-console console.error('Uncaught error:', error, errorInfo) } diff --git a/JeMPI_Apps/JeMPI_UI/src/components/import/DropZone.tsx b/JeMPI_Apps/JeMPI_UI/src/components/import/DropZone.tsx index 7dbeee06c..c96522f88 100644 --- a/JeMPI_Apps/JeMPI_UI/src/components/import/DropZone.tsx +++ b/JeMPI_Apps/JeMPI_UI/src/components/import/DropZone.tsx @@ -1,13 +1,14 @@ import { UploadFile as UploadFileIcon } from '@mui/icons-material' import { + Avatar, Box, - CardActions, - Checkbox, - Container, - FormControl, + Card, + CardContent, FormControlLabel, + Grid, Radio, RadioGroup, + Slider, TextField, Typography } from '@mui/material' @@ -16,50 +17,67 @@ import { AxiosError, AxiosProgressEvent, AxiosRequestConfig } from 'axios' import { useSnackbar } from 'notistack' import { FC, useRef, useState } from 'react' import { FileRejection, useDropzone } from 'react-dropzone' -import ApiClient from '../../services/ApiClient' -import { FileObj, UploadStatus } from '../../types/FileUpload' +import { + FileObj, + UploadStatus, + importQueriesType +} from '../../types/FileUpload' import Button from '../shared/Button' -import './Import.css' import UploadFileListItem from './UploadFileListItem' import { formatBytesSize, megabytesToBytes } from 'utils/formatters' +import { useConfig } from 'hooks/useConfig' +import { useFormik } from 'formik' -const MAX_UPLOAD_FILE_SIZE_IN_BYTES = megabytesToBytes( - +(process.env.REACT_APP_MAX_UPLOAD_CSV_SIZE_IN_MEGABYTES ?? 128) -) const DropZone: FC = () => { + const { enqueueSnackbar } = useSnackbar() const [fileObjs, setFilesObj] = useState() const abortControllerRef = useRef(new AbortController()) - const { enqueueSnackbar } = useSnackbar() + const { apiClient, config } = useConfig() + const MAX_UPLOAD_FILE_SIZE_IN_BYTES = megabytesToBytes( + config.maxUploadCsvSize + ) + const { + handleChange: handleImportFormChange, + handleSubmit, + values: FormValues, + setFieldValue + } = useFormik({ + initialValues: { + reporting: false, + computing: 0, + leftMargin: 0.65, + threshold: 0.7, + rightMargin: 0.75, + windowSize: 0.1 + }, + onSubmit: () => { + if (fileObjs?.file) { + uploadFileMutation.mutate(fileObjs) + } + } + }) const onDrop = ( acceptedFiles: File[], fileRejections: FileRejection[] ): void => { - validate(acceptedFiles, fileRejections) - setFilesObj({ - file: acceptedFiles[0], - progress: 0, - status: UploadStatus.Pending - }) + if (validate(fileRejections)) { + setFilesObj({ + file: acceptedFiles[0], + progress: 0, + status: UploadStatus.Pending + }) + } } - const validate = ( - acceptedFiles: File[], - fileRejections: FileRejection[] - ): void => { + const validate = (fileRejections: FileRejection[]): boolean => { if (fileRejections.length > 0) { enqueueSnackbar(fileRejections[0].errors[0].message, { variant: 'error' }) - return - } - - if (uploadFileMutation.isLoading) { - enqueueSnackbar('Please wait for current import to be completed', { - variant: 'warning' - }) - return + return false } + return true } const { getRootProps, getInputProps } = useDropzone({ @@ -69,15 +87,22 @@ const DropZone: FC = () => { maxSize: MAX_UPLOAD_FILE_SIZE_IN_BYTES }) - const uploadFile = async (fileObj: FileObj) => { - return await ApiClient.uploadFile(createFileUploadAxiosConfig(fileObj)) + const uploadFile = async ( + fileObj: FileObj, + importQueries: importQueriesType + ) => { + return await apiClient.uploadFile( + createFileUploadAxiosConfig(fileObj, importQueries) + ) } const createFileUploadAxiosConfig = ( - fileObj: FileObj + fileObj: FileObj, + importQueries: importQueriesType ): AxiosRequestConfig => { const formData = new FormData() formData.set('csv', fileObj.file) + formData.set('queries', JSON.stringify(importQueries)) return { signal: abortControllerRef.current.signal, headers: { @@ -85,54 +110,27 @@ const DropZone: FC = () => { }, data: formData, onUploadProgress: (progressEvent: AxiosProgressEvent) => { - if (progressEvent.total) { - const progress = (progressEvent.loaded / progressEvent.total) * 100 - updateFileUploadProgress(fileObj, progress) - } - } - } - } - - const updateFileUploadProgress = ( - fileUploadObj: FileObj, - progress: number - ) => { - if (fileObjs) - setFilesObj((prev: FileObj | undefined) => { - if (prev?.file.name === fileUploadObj.file.name) { - return { ...prev, progress, status: getFileUploadStatus(fileObjs) } - } - }) - } - - const setUploadStatus = (fileUploadObj: FileObj, status: UploadStatus) => { - setFilesObj((prev: FileObj | undefined) => { - if (prev?.file.name === fileUploadObj.file.name) { - prev.status = status - if (status === UploadStatus.Failed) { - prev.progress = 0 - } + setFilesObj((prev: FileObj | undefined) => { + if (prev?.file.name === fileObj.file.name && progressEvent.total) { + return { + ...prev, + progress: Math.round( + (progressEvent.loaded * 100) / progressEvent.total + ), + status: UploadStatus.Loading + } + } + }) } - return prev - }) - } - - const getFileUploadStatus = (fileObj: FileObj) => { - if (fileObj.progress === 0) { - return UploadStatus.Pending - } else if (fileObj.progress > 0 && fileObj.progress < 100) { - return UploadStatus.Loading - } else if (fileObj.progress === 100) { - return UploadStatus.Complete - } else { - return UploadStatus.Failed } } const uploadFileMutation = useMutation({ - mutationFn: uploadFile, - onSuccess: (data, fileObj) => { - setUploadStatus(fileObj, UploadStatus.Complete) + mutationFn: (fileObjs: FileObj) => uploadFile(fileObjs, FormValues), + onSuccess: (_, fileObj) => { + setFilesObj((prev: FileObj | undefined) => + prev ? { ...prev, status: UploadStatus.Complete } : undefined + ) enqueueSnackbar(`${fileObj.file.name} file imported`, { variant: 'success' }) @@ -145,7 +143,9 @@ const DropZone: FC = () => { variant: 'error' } ) - setUploadStatus(data, UploadStatus.Failed) + setFilesObj((prev: FileObj | undefined) => + prev ? { ...prev, status: UploadStatus.Failed } : undefined + ) } }) @@ -156,21 +156,11 @@ const DropZone: FC = () => { setFilesObj(undefined) } - const handleUpload = () => { - if ( - fileObjs?.status === UploadStatus.Complete || - fileObjs?.status === UploadStatus.Failed - ) { - return - } - if (fileObjs) uploadFileMutation.mutate(fileObjs) - } - const handleRemoveFile = (): void => { setFilesObj(undefined) } - const uploadList = ( + const uploadList: JSX.Element = ( <> {fileObjs && ( { ) return ( - - -
- - - - - - Click to upload or drag and drop - - - CSV (max. {formatBytesSize(MAX_UPLOAD_FILE_SIZE_IN_BYTES)}) - -
-
- {uploadList} - - - TB 1 - - - - - - CB - - } - label="CB1" - labelPlacement="start" - /> - } - label="CB2" - labelPlacement="start" - /> - - - - RB - - - + + + +
+ + + + Machine Learning Configuration + + + + { + handleImportFormChange({ + target: { name: 'computing', value: 0 } + }) + }} + checked={FormValues.computing === 0} + /> + } + label={ + + { + " Use current M & U's (computed periodically, only using the Client Registry)." + } + + } + /> + + handleImportFormChange({ + target: { name: 'computing', value: 1 } + }) + } + checked={FormValues.computing === 1} + /> + } + label={ + + { + ' Before linking, compute M & U values using the interactions from the CSV file.' + } + + } + /> + { + handleImportFormChange({ + target: { name: 'computing', value: 2 } + }) + }} + checked={FormValues.computing === 2} + /> + } + label={ + + { + " Before linking, compute M & U values using the interactions from the CSV file & the client registry's golden records." + } + + } + /> + + + + Threshold + + + + { + if (!Array.isArray(value)) return + const [leftMargin, threshold, rightMargin] = value + if ( + 0 < threshold && + threshold < 1 && + threshold > leftMargin && + threshold < rightMargin + ) { + setFieldValue('threshold', threshold) + } + if (threshold > leftMargin) + setFieldValue('leftMargin', leftMargin) + + if (threshold < rightMargin) + setFieldValue('rightMargin', rightMargin) + }} + getAriaValueText={(e: number) => e.toString()} + valueLabelDisplay="auto" + step={0.05} + marks + min={0.19} + max={0.96} + value={[ + FormValues.leftMargin, + FormValues.threshold, + FormValues.rightMargin + ]} + defaultValue={[ + FormValues.leftMargin, + FormValues.threshold, + FormValues.rightMargin + ]} + sx={{ + '& .MuiSlider-thumb': { + "&[data-index='0']": { + backgroundColor: 'red' + }, + "&[data-index='1']": { + backgroundColor: 'green' + } + } + }} + track={false} + /> + + + { + if (+e.target.value < FormValues.threshold) { + handleImportFormChange(e) + } + }} + inputProps={{ + min: 0.19, + max: FormValues.threshold, + step: 0.01 + }} + InputLabelProps={{ + style: { color: 'red' } + }} + fullWidth + /> + + + { + if ( + +e.target.value > FormValues.leftMargin && + +e.target.value < FormValues.rightMargin + ) { + handleImportFormChange(e) + } + }} + inputProps={{ min: 0.2, max: 0.95, step: 0.01 }} + InputLabelProps={{ + style: { color: 'green' } + }} + fullWidth + /> + + + { + if (+e.target.value > FormValues.threshold) { + handleImportFormChange(e) + } + }} + inputProps={{ + min: FormValues.threshold, + max: 0.96, + step: 0.01 + }} + InputLabelProps={{ + style: { color: '#1976D2' } + }} + fullWidth + /> + + + e.toString()} + valueLabelDisplay="auto" + onChange={handleImportFormChange} + name="windowSize" + step={0.01} + marks + min={0} + max={0.2} + /> + + + + + + + Reports + + + + + } + label={ + + {' Link records only (do not generate report).'} + + } + /> + } + label={ + + { + ' Create CSV report and send notification when input file is created.' + } + + } + /> + + + +
+
+ - } label="Opt1" /> - } label="Opt2" /> - } label="Opt3" /> -
-
-
- - - - -
+ + {!fileObjs?.file ? ( + + + + + + + Click to upload or drag and drop + + + CSV (max. {formatBytesSize(MAX_UPLOAD_FILE_SIZE_IN_BYTES)}) + + + ) : ( + uploadList + )} + + + + + + + +
+
+ + ) } export default DropZone diff --git a/JeMPI_Apps/JeMPI_UI/src/components/import/Import.css b/JeMPI_Apps/JeMPI_UI/src/components/import/Import.css deleted file mode 100644 index b0d901505..000000000 --- a/JeMPI_Apps/JeMPI_UI/src/components/import/Import.css +++ /dev/null @@ -1,71 +0,0 @@ -/* Drop Zone Component */ -.dropzone { - color: rgba(0, 0, 0, 0.87); - margin: auto; - border: 1px dashed rgba(0, 0, 0, 0.12); - border-radius: 4px; - max-width: 600px; - max-height: 152px; - text-align: center; - margin-top: 5%; - margin-bottom: 5%; -} - -.dropzone-inner { - margin: 5%; -} - -.dropzone.is-drag-accept { - border-color: #00e676; -} - -.dropzone.is-drag-reject { - border-color: #ff1744; -} - -.dropzone.is-focused { - border-color: #64b5f6; -} - -/* Upload File List Item Component */ -.import__upload-list-item { - max-width: 575px; - margin: 5% auto; - width: 100%; -} - -.import__upload-list-item.failed { - color: red !important; -} - -.import__upload-icon { - border-radius: 100%; - background-color: #e3eef9; - width: 36px; - height: 36px; - font-size: 15px; - text-align: center; - padding: 5px; - margin: auto; - color: #1976d2; -} - -.failed .import__upload-icon { - background-color: #f9e3e3; - color: red; -} - -.import__close-icon { - color: currentColor; - background: none; - cursor: pointer; -} - -.import__upload-progress-bar { - max-width: 100%; - margin-top: 10px; -} - -.failed .import__upload-progress-bar span { - background-color: red !important; -} diff --git a/JeMPI_Apps/JeMPI_UI/src/components/import/Import.tsx b/JeMPI_Apps/JeMPI_UI/src/components/import/Import.tsx index 9fb52d7ac..ba8c431bf 100644 --- a/JeMPI_Apps/JeMPI_UI/src/components/import/Import.tsx +++ b/JeMPI_Apps/JeMPI_UI/src/components/import/Import.tsx @@ -1,12 +1,5 @@ import { UploadFile as UploadFileIcon } from '@mui/icons-material' -import { - Card, - CardContent, - Container, - Divider, - Stack, - Typography -} from '@mui/material' +import { Container, Divider, Stack } from '@mui/material' import PageHeader from '../shell/PageHeader' import DropZone from './DropZone' @@ -22,18 +15,10 @@ const Import = () => { title: 'Import' } ]} - description={'Import or submit Patient records to MPI'} /> - - - - - Bulk Upload - - - - + +
) diff --git a/JeMPI_Apps/JeMPI_UI/src/components/import/UploadFileListItem.tsx b/JeMPI_Apps/JeMPI_UI/src/components/import/UploadFileListItem.tsx index 1078de7d3..a48555b54 100644 --- a/JeMPI_Apps/JeMPI_UI/src/components/import/UploadFileListItem.tsx +++ b/JeMPI_Apps/JeMPI_UI/src/components/import/UploadFileListItem.tsx @@ -1,13 +1,11 @@ import { - Circle as CircleIcon, Close as CloseIcon, UploadFile as UploadFileIcon } from '@mui/icons-material' -import { Box, Grid, LinearProgress, Typography } from '@mui/material' +import { Avatar, Box, Grid, LinearProgress, Typography } from '@mui/material' import { FC } from 'react' import { FileObj, UploadStatus } from '../../types/FileUpload' -import './Import.css' -import { truncateString } from 'utils/formatters' +import { formatBytesSize } from 'utils/formatters' interface FileObjProps { fileObj: FileObj @@ -15,22 +13,12 @@ interface FileObjProps { } const UploadFileListItem: FC = (props: FileObjProps) => { - const addStatusClass = (status: UploadStatus): string => { - switch (status) { - case UploadStatus.Failed: - return 'failed' - default: - return '' - } - } - return ( = (props: FileObjProps) => { textAlign="center" padding={'0.5rem'} > - + - + = (props: FileObjProps) => { justifyContent="center" alignItems="center" > - - {truncateString(props.fileObj.file.name, 30)} - - {Math.round(props.fileObj.file.size / 1000)}kb - - {UploadStatus[props.fileObj.status]} + {props.fileObj.file.name} + + Size: + + {formatBytesSize(props.fileObj.file.size)} + + Status: + + {UploadStatus[props.fileObj.status]} + {props.fileObj.progress > 0 && ` ${props.fileObj.progress}%`} + + = (props: FileObjProps) => { > props.handleRemoveFile(props.fileObj)} > @@ -82,7 +78,6 @@ const UploadFileListItem: FC = (props: FileObjProps) => { { const navigate = useNavigate() - const selectedDate = dayjs().locale({ - ...locale - }) - const [date, setDate] = React.useState(selectedDate) + const { apiClient } = useConfig() + const [selectedStates, setSelectedStates] = useState([NotificationState.OPEN]) + const [startDateFilter, setStartDateFilter] = useState( + dayjs().startOf('day') + ) + const [endDateFilter, setEndDateFilter] = useState( + dayjs().endOf('day') + ) const [paginationModel, setPaginationModel] = useState({ page: 0, - pageSize: 10 + pageSize: 25 }) const [filterModel, setFilterModel] = useState({ - items: [{ field: 'state', value: 'New', operator: 'contains' }] + items: [{ field: 'state', value: 'OPEN', operator: 'contains' }] }) - const { data, error, isLoading, isFetching } = useQuery< + + const { data, error, isLoading, isFetching, refetch } = useQuery< Notifications, AxiosError >({ queryKey: [ 'notifications', - date.format('YYYY-MM-DD'), paginationModel.page, paginationModel.pageSize, filterModel ], queryFn: () => - ApiClient.getMatches( + apiClient.getMatches( paginationModel.pageSize, paginationModel.page * paginationModel.pageSize, - date.format('YYYY-MM-DD'), - filterModel.items[0].value ? filterModel.items[0].value : '' + startDateFilter.format('YYYY-MM-DD HH:mm:ss'), + endDateFilter.format('YYYY-MM-DD HH:mm:ss'), + selectedStates ), - refetchOnWindowFocus: false + refetchOnWindowFocus: false, + keepPreviousData: true }) const onFilterChange = useCallback((filterModel: GridFilterModel) => { setFilterModel({ ...filterModel }) }, []) - if (isLoading || isFetching) { - return - } - - if (error) { - return - } - - if (!data) { - return - } - - const changeSelectedDate = (date: Dayjs | null) => { - if (date) { - setDate(date) - } - } return ( { /> - - - changeSelectedDate(value)} + + + value && setStartDateFilter(value)} slotProps={{ textField: { variant: 'outlined', - label: 'Date' + label: 'Start Date' } }} /> - - + value && setEndDateFilter(value)} + slotProps={{ + textField: { + variant: 'outlined', + label: 'End Date' + } + }} + /> + + + + - } + {!data && !isLoading && !isFetching && } + setPaginationModel(model)} - paginationMode="server" - rowCount={data.pagination.total || 0} - filterMode="server" - filterModel={filterModel} - onFilterModelChange={debounce(onFilterChange, 3000)} - onRowDoubleClick={params => - navigate( - { - pathname: 'match-details' - }, - { - state: { - payload: { - notificationId: params.row.id, - patient_id: params.row.patient_id, - golden_id: params.row.golden_id, - score: params.row.score, - candidates: params.row.candidates - } + > + {data && ( + setPaginationModel(model)} + paginationMode="server" + rowCount={data.pagination.total || 0} + filterMode="server" + filterModel={filterModel} + loading={isLoading} + onFilterModelChange={debounce(onFilterChange, 3000)} + onRowDoubleClick={params => + navigate( + { + pathname: 'match-details' + }, + { + state: { + payload: { + notificationId: params.row.id, + notificationType: params.row.type, + patient_id: params.row.patient_id, + golden_id: params.row.golden_id, + score: params.row.score, + candidates: params.row.candidates + } + } + } + ) } - ) - } - /> + /> + )} + diff --git a/JeMPI_Apps/JeMPI_UI/src/components/notificationWorklist/notificationsColumns.tsx b/JeMPI_Apps/JeMPI_UI/src/components/notificationWorklist/notificationsColumns.tsx index 893ad0eba..a093e24c9 100644 --- a/JeMPI_Apps/JeMPI_UI/src/components/notificationWorklist/notificationsColumns.tsx +++ b/JeMPI_Apps/JeMPI_UI/src/components/notificationWorklist/notificationsColumns.tsx @@ -24,8 +24,6 @@ const NOTIFICATIONS_COLUMNS: GridColDef[] = [ headerName: 'Date', type: 'date', flex: 1, - sortable: true, - sortingOrder: ['desc'], filterable: false, headerClassName: 'super-app-theme--header', renderCell: (params: GridRenderCellParams) => diff --git a/JeMPI_Apps/JeMPI_UI/src/components/recordDetails/RecordDetails.tsx b/JeMPI_Apps/JeMPI_UI/src/components/recordDetails/RecordDetails.tsx index 8e59290b8..60bee4b90 100644 --- a/JeMPI_Apps/JeMPI_UI/src/components/recordDetails/RecordDetails.tsx +++ b/JeMPI_Apps/JeMPI_UI/src/components/recordDetails/RecordDetails.tsx @@ -25,7 +25,6 @@ import PageHeader from 'components/shell/PageHeader' import { useAppConfig } from 'hooks/useAppConfig' import { useSnackbar } from 'notistack' import { useState } from 'react' -import ApiClient from 'services/ApiClient' import { DisplayField, FieldChangeReq, FieldType } from 'types/Fields' import { PatientRecord, GoldenRecord, AnyRecord } from 'types/PatientRecord' import { sortColumns } from 'utils/helpers' @@ -33,6 +32,7 @@ import getCellComponent from 'components/shared/getCellComponent' import { AUDIT_TRAIL_COLUMNS } from 'utils/constants' import { AuditTrail } from 'types/AuditTrail' import { useLoaderData, useNavigate } from 'react-router-dom' +import { useConfig } from 'hooks/useConfig' export interface UpdatedFields { [fieldName: string]: { oldValue: unknown; newValue: unknown } @@ -42,6 +42,7 @@ const RecordDetails = () => { const uid = useLoaderData() const navigate = useNavigate() const { enqueueSnackbar } = useSnackbar() + const { apiClient } = useConfig() const { availableFields } = useAppConfig() const [isEditMode, setIsEditMode] = useState(false) const [updatedFields, setUpdatedFields] = useState({}) @@ -120,7 +121,7 @@ const RecordDetails = () => { queryKey: ['record-details', uid], queryFn: async () => { const recordId = uid as string - return await ApiClient.getFlatExpandedGoldenRecords([recordId]) + return await apiClient.getFlatExpandedGoldenRecords([recordId]) }, onSuccess: data => { setPatientRecord(data[0]) @@ -139,9 +140,9 @@ const RecordDetails = () => { queryFn: async () => { if (record) { if ('linkRecords' in record) { - return await ApiClient.getGoldenRecordAuditTrail(record.uid || '') + return await apiClient.getGoldenRecordAuditTrail(record.uid || '') } else { - return await ApiClient.getInteractionAuditTrail(record.uid || '') + return await apiClient.getInteractionAuditTrail(record.uid || '') } } throw new Error('Empty record') @@ -153,7 +154,7 @@ const RecordDetails = () => { const updateRecord = useMutation({ mutationKey: ['golden-record', record?.uid], mutationFn: async (req: FieldChangeReq) => { - return await ApiClient.updatedGoldenRecord(record?.uid as string, req) + return await apiClient.updatedGoldenRecord(record?.uid as string, req) }, onSuccess: () => { enqueueSnackbar(`Successfully saved patient records`, { @@ -254,7 +255,6 @@ const RecordDetails = () => { { icon: , title: 'Browse Records', link: '/browse-records' } ]} title={`Patient interactions`} - description={`Browse patient interactions for GID ${uid}`} /> = Pick> type PartialBy = Omit & Partial> -interface CustomDataGridProps extends PartialBy { +interface CustomDataGridProps extends PartialBy, 'columns'> { action?: (uid: string) => void } @@ -35,12 +36,12 @@ const getCellClassName = ( } else return '' } -const CustomDataGrid: React.FC = ({ +const CustomDataGrid = ({ sx, rows, action, ...props -}) => { +}: CustomDataGridProps) => { const { availableFields } = useAppConfig() const fieldColumns: GridColDef[] = availableFields.map( @@ -59,7 +60,7 @@ const CustomDataGrid: React.FC = ({ cellClassName: (params: GridCellParams) => fieldName === 'recordType' ? getRecordTypeClassName(params) - : getCellClassName(params, groups, rows[0]), + : getCellClassName(params, groups, rows[0] as any as AnyRecord), renderCell: (params: GridRenderCellParams) => getCellComponent(fieldName, params) }) @@ -67,26 +68,24 @@ const CustomDataGrid: React.FC = ({ const columns: GridColDef[] = useMemo( () => - action - ? [ - ...fieldColumns, - { - field: 'action', - type: 'action', - headerName: 'Action', - flex: 1, - sortable: false, - filterable: false, - align: 'center', - headerAlign: 'center', - headerClassName: 'super-app-theme--linkHeader', - renderCell: (params: GridRenderCellParams) => - getCellComponent('actions', params, () => { - if (action) action(params.row.uid) - }) - } - ] - : fieldColumns, + [ + ...fieldColumns, + { + field: 'action', + type: 'action', + headerName: action ? 'Action' : '', + flex: 1, + sortable: false, + filterable: false, + align: 'center', + headerAlign: 'center', + headerClassName: 'super-app-theme--linkHeader', + renderCell: (params: GridRenderCellParams) => + action ? getCellComponent('actions', params, () => { + if (action) action(params.row.uid) + }) : null + } + ], [action, fieldColumns] ) diff --git a/JeMPI_Apps/JeMPI_UI/src/components/reviewLink/ReviewLink.tsx b/JeMPI_Apps/JeMPI_UI/src/components/reviewLink/ReviewLink.tsx index 606c2306f..70b0c43f7 100644 --- a/JeMPI_Apps/JeMPI_UI/src/components/reviewLink/ReviewLink.tsx +++ b/JeMPI_Apps/JeMPI_UI/src/components/reviewLink/ReviewLink.tsx @@ -13,9 +13,11 @@ import { useLinkReview } from 'hooks/useLinkReview' import { useSnackbar } from 'notistack' import { useState } from 'react' import { CustomSearchQuery, SearchQuery } from 'types/SimpleSearch' -import ApiClient from '../../services/ApiClient' -import { NotificationState } from '../../types/Notification' -import { AnyRecord } from '../../types/PatientRecord' +import { + AnyRecord, + GoldenRecord, + PatientRecord +} from '../../types/PatientRecord' import Loading from '../common/Loading' import ApiErrorMessage from '../error/ApiErrorMessage' import NotFound from '../error/NotFound' @@ -28,8 +30,11 @@ import LinkRecordsDialog from './LinkRecordsDialog' import CloseNotificationDialog from './CloseNotificationDialog' import UnlinkingDialog from './UnlinkingDialog' import { useLocation, useNavigate } from 'react-router-dom' +import { useConfig } from 'hooks/useConfig' +import { NotificationRequest } from 'types/BackendResponse' +import { RESOLUTION_TYPES } from 'utils/constants' -const getRowClassName = (type: string) => { +const getRowClassName = (type?: string) => { switch (type) { case 'Current': return 'super-app-theme--Current' @@ -57,6 +62,7 @@ const ReviewLink = () => { const [refineSearchQuery, setRefineSearchQuery] = useState< SearchQuery | CustomSearchQuery | undefined >(undefined) + const { apiClient } = useConfig() const { patientRecord, @@ -70,7 +76,8 @@ const ReviewLink = () => { const { linkRecords, createNewGoldenRecord } = useRelink() const mutateNotification = useMutation({ - mutationFn: ApiClient.updateNotification, + mutationFn: (request: NotificationRequest) => + apiClient.updateNotification(request), onError: (error: AxiosError) => { enqueueSnackbar(`Error updating notification: ${error.message}`, { variant: 'error' @@ -79,44 +86,40 @@ const ReviewLink = () => { } }) - const updateNotification = (state: NotificationState) => { + const updateNotification = () => { mutateNotification.mutate( { - notificationId: payload?.notificationId ? payload.notificationId : '', - state: state + notificationId: payload?.notificationId }, { onSuccess: () => { - if (state === NotificationState.Pending) { - enqueueSnackbar( - 'Notification kept as pending. Golden Record remains linked', - { - variant: 'warning' - } - ) - navigate('/notifications') - } + navigate('/notifications') } } ) } - const createGoldenRecord = (id: string) => { + const createGoldenRecord = (id: string, resolutionState:string) => { createNewGoldenRecord.mutate( { - patientID: payload?.patient_id || '', - goldenID: goldenRecord ? goldenRecord.uid : '', - newGoldenID: id + notificationId: payload?.notificationId, + notificationType: payload?.notificationType, + interactionId: payload.patient_id || '', + currentGoldenId: goldenRecord ? goldenRecord.uid : '', + resolutionState: resolutionState, + currentCandidates: payload?.notificationId ? (candidateGoldenRecords || []).map(c => c.uid) : [], + newGoldenId: id, + score: payload?.score }, { onSuccess: data => { if (payload?.notificationId) { - updateNotification(NotificationState.Actioned) + updateNotification() } enqueueSnackbar('New record linked', { variant: 'success' }) - navigate(`/record-details/${data.goldenUID}`) + navigate(`/record-details/${(data as any).goldenUID}`) }, onError: (error: AxiosError) => { enqueueSnackbar( @@ -131,24 +134,30 @@ const ReviewLink = () => { ) } - const linkToCandidateRecord = (id: string, status?: NotificationState) => { - linkRecords.mutate( - { - patientID: payload?.patient_id || '', - goldenID: goldenRecord ? goldenRecord.uid : '', - newGoldenID: id - }, - { - onSuccess: () => { - if (payload?.notificationId) { - updateNotification(status ?? NotificationState.Actioned) - navigate('/notifications') - } else { - navigate(`/record-details/${id}`) + const linkToCandidateRecord = (id: string, resolutionState: string) => { + goldenRecord && + linkRecords.mutate( + { + notificationId: payload?.notificationId, + notificationType: payload?.notificationType, + interactionId: payload.patient_id, + currentGoldenId: goldenRecord.uid, + resolutionState: resolutionState, + currentCandidates: payload?.notificationId ? (candidateGoldenRecords || []).map(c => c.uid) : [], + newGoldenId: id, + score: payload?.score + }, + { + onSuccess: () => { + if (payload?.notificationId) { + updateNotification() + navigate('/notifications') + } else { + navigate(`/record-details/${id}`) + } } } - } - ) + ) } const handleModalCancel = () => { @@ -180,16 +189,15 @@ const ReviewLink = () => { ...goldenRecord.linkRecords.filter( record => record.uid !== patientRecord?.uid ), - patientRecord, + ...(patientRecord ? [patientRecord] : []), goldenRecord ] : [] const handleOpenLinkedRecordDialog = (uid: string) => { const tableDataTemp: AnyRecord[] | undefined = payload?.notificationId - ? candidateGoldenRecords?.filter(d => d.uid === uid) - : thresholdCandidates?.filter(d => d.uid === uid) - + ? candidateGoldenRecords?.filter(ele => ele.uid === uid) + : thresholdCandidates?.filter(ele => ele.uid === uid) if (patientRecord && tableDataTemp) setTableData([patientRecord, ...tableDataTemp]) @@ -198,11 +206,7 @@ const ReviewLink = () => { } const handleCancel = () => { - if (payload?.notificationId) { - updateNotification(NotificationState.Pending) - } else { - navigate(`/record-details/${goldenRecord?.uid}`) - } + navigate(`/record-details/${goldenRecord?.uid}`) } return ( @@ -212,7 +216,6 @@ const ReviewLink = () => { breadcrumbs={[ { icon: , title: 'Browse Records', link: '/browse-records' } ]} - description="Review the patient record and possible matches in detail." buttons={[