diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml
index b4eb094389..ff9bd2a6c4 100644
--- a/.buildkite/pipeline.yml
+++ b/.buildkite/pipeline.yml
@@ -2,110 +2,57 @@ env:
RETENTION_DAYS: "10"
steps:
- - label: ":white_check_mark: Check Shell"
- key: "check-shell"
- command: ./ops/check.sh shell
-
- - label: ":lock: Check Security"
- key: "check-security"
- command: ./ops/check.sh security
-
- - label: ":lock: Check CSS px"
- key: "check-px"
- command: ./ops/check.sh px
-
- - label: ":lock: Deny CSS hex"
- key: "deny-css-hex-check"
- command: ./ops/check.sh hex
-
- - label: ":lock: Deny CSS rgba"
- key: "deny-css-rgba-check"
- command: ./ops/check.sh rgba
-
- - label: ":lock: Check .* in backend"
- key: "check-dot-star"
- command: ./ops/check.sh dot-star
-
- - label: ":white_check_mark: Check Backend"
- if: build.branch == "main" && build.message =~ /(?i)\[backend\]/
- key: "check-backend"
- command: ./ops/check.sh backend
-
- - label: ":white_check_mark: Check Frontend"
- if: build.branch == "main" && build.message =~ /(?i)\[frontend\]/
- key: "check-frontend"
- command: ./ops/check.sh frontend
-
- - label: ":mag: Check Frontend License"
- key: "check-frontend-license"
- commands: ./ops/check.sh frontend-license
-
- - label: ":mag: Check Backend License"
- key: "check-backend-license"
- commands: ./ops/check.sh backend-license
- plugins:
- - artifacts#v1.9.0:
- upload:
- - "backend/build/reports/dependency-license/**/*"
- name: "backend-license-report"
- expire_in: "${RETENTION_DAYS} days"
-
- label: ":cloudformation: Deploy infra"
if: build.branch == "main" && build.message =~ /(?i)\[infra\]/
key: "deploy-infra"
- depends_on:
- - "check-shell"
- - "check-security"
- - "check-frontend"
- - "check-px"
- - deny-css-rgba-check
- - deny-css-hex-check
- - "check-backend"
- - "check-frontend-license"
- - "check-backend-license"
env:
AWSHost: "$AWS_HOST"
AWSAccountId: "$AWS_ACCOUNT_ID"
AWSRegion: "$AWS_REGION"
command: ./ops/deploy.sh infra
+ - label: ":white_check_mark: GitHub Basic Check"
+ if: build.branch == "main"
+ key: "check-github-basic"
+ command: ./ops/check.sh github-basic-passed
+ env:
+ COMMIT_SHA: "$BUILDKITE_COMMIT"
+ GITHUB_TOKEN: "$E2E_TOKEN_GITHUB"
+ BRANCH: "$BUILDKITE_BRANCH"
+ depends_on:
+ - "deploy-infra"
+
- label: ":react: Build Frontend"
if: build.branch == "main" && build.message =~ /(?i)\[frontend\]/
key: "build-frontend"
- depends_on: "deploy-infra"
+ depends_on:
+ - "check-github-basic"
command: ./ops/build.sh frontend
- label: ":java: Build Backend"
if: build.branch == "main" && build.message =~ /(?i)\[backend\]/
key: "build-backend"
- depends_on: "deploy-infra"
+ depends_on:
+ - "check-github-basic"
command: ./ops/build.sh backend
- label: ":rocket: Deploy e2e"
- if: build.branch == "main" && (build.message =~ /(?i)\[frontend\]/ || build.message =~ /(?i)\[backend\]/)
+ if: build.branch == "main"
key: "deploy-e2e"
depends_on:
- "build-frontend"
- "build-backend"
+ - "check-github-basic"
command: ./ops/deploy.sh e2e
- label: ":rocket: Run e2e"
- branches: main
+ if: build.branch == "main"
key: "check-e2e"
depends_on:
- "deploy-e2e"
- - "check-shell"
- - "check-security"
- - "check-frontend"
- - "check-px"
- - deny-css-rgba-check
- - deny-css-hex-check
- - "check-backend"
- - "check-frontend-license"
- - "check-backend-license"
command: ./ops/check.sh e2e-container
plugins:
- - artifacts#v1.9.0:
+ - artifacts#v1.9.3:
upload: "./e2e-reports.tar.gz"
expire_in: "${RETENTION_DAYS} days"
diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml
index 46b1c6c1a6..969401ce55 100644
--- a/.github/ISSUE_TEMPLATE/feature_request.yml
+++ b/.github/ISSUE_TEMPLATE/feature_request.yml
@@ -6,6 +6,8 @@ body:
- type: markdown
attributes:
value: |
+ ## Request Detail
+
The issue list is reserved exclusively for bug reports and feature requests.
For usage questions, please use the following resources:
@@ -54,8 +56,51 @@ body:
description: What tools will support your request feature?
multiple: true
options:
- - Board
- - Pipeline Tool
- - Source Control
+ - Board (like Jira)
+ - Pipeline Tool (like buildkite)
+ - Source Control (like github)
+ validations:
+ required: true
+
+ - type: markdown
+ attributes:
+ value: |
+ ## Account Detail
+
+ Let's know more about you and your account. We will horizontally evaluate all received requests to adjust the priority.
+
+ **Below information are important in terms of prioritization.**
+
+ - type: input
+ id: account_info
+ attributes:
+ label: Account name
+ description: What's your account name?
+ placeholder: Make sure it could be found in jigsaw
+ validations:
+ required: true
+
+ - type: input
+ id: account_location
+ attributes:
+ label: Account location
+ description: Which country you account locate at?
+ validations:
+ required: true
+
+ - type: input
+ id: account_size
+ attributes:
+ label: Teams in Account
+ description: How many teams will adopt heartbeat after feature release?
+ validations:
+ required: true
+
+ - type: input
+ id: expected_date
+ attributes:
+ label: Expected launch date
+ description: What is the latest possible launch date you can accept?
+ placeholder: 2024-12
validations:
- required: true
\ No newline at end of file
+ required: false
diff --git a/.github/workflows/Docs.yaml b/.github/workflows/Docs.yaml
index 9507aebc86..f691316b16 100644
--- a/.github/workflows/Docs.yaml
+++ b/.github/workflows/Docs.yaml
@@ -28,7 +28,7 @@ jobs:
- name: Build docs
run: pnpm run build
- name: Deploy to github pages
- uses: peaceiris/actions-gh-pages@v3
+ uses: peaceiris/actions-gh-pages@v4
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./docs/dist
diff --git a/.github/workflows/Release.yaml b/.github/workflows/Release.yaml
index 7314fcb29d..4319387ab5 100644
--- a/.github/workflows/Release.yaml
+++ b/.github/workflows/Release.yaml
@@ -24,7 +24,7 @@ jobs:
- name: Validate Gradle wrapper
uses: gradle/wrapper-validation-action@v2
- name: Set up Gradle
- uses: gradle/gradle-build-action@v3.1.0
+ uses: gradle/gradle-build-action@v3.2.1
- name: Build
run: ./gradlew clean build
- uses: actions/upload-artifact@v4
@@ -91,10 +91,23 @@ jobs:
tags: |
ghcr.io/${{ env.LOWCASE_REPO_NAME }}_backend:${{ env.TAG_NAME }}
ghcr.io/${{ env.LOWCASE_REPO_NAME }}_backend:latest
- release:
+
+ build-sbom:
runs-on: ubuntu-latest
needs:
- build_and_push_image
+ steps:
+ - uses: actions/checkout@v4
+ - uses: anchore/sbom-action@v0
+ with:
+ path: ./
+ artifact-name: ${{ env.REPO_NAME }}.${{ env.TAG_NAME }}.sbom.spdx.json
+ - uses: anchore/sbom-action/publish-sbom@v0
+
+ release:
+ runs-on: ubuntu-latest
+ needs:
+ - build-sbom
steps:
- uses: actions/checkout@v4
- name: Download frontend artifact
@@ -119,7 +132,7 @@ jobs:
ls
echo "TAG_NAME=$(git tag --sort version:refname | tail -n 1)" >> "$GITHUB_ENV"
- name: Upload zip file
- uses: softprops/action-gh-release@v1
+ uses: softprops/action-gh-release@v2
with:
files: ${{ env.REPO_NAME }}-${{ env.TAG_NAME }}.zip
diff --git a/.github/workflows/build-and-deploy.yml b/.github/workflows/build-and-deploy.yml
index 8b30ba63a4..9fd7814e0d 100644
--- a/.github/workflows/build-and-deploy.yml
+++ b/.github/workflows/build-and-deploy.yml
@@ -91,7 +91,7 @@ jobs:
- name: Validate Gradle wrapper
uses: gradle/wrapper-validation-action@v2
- name: Set up Gradle
- uses: gradle/gradle-build-action@v3.1.0
+ uses: gradle/gradle-build-action@v3.2.1
- name: Test and check
run: ./gradlew clean check
- name: Build
@@ -120,7 +120,7 @@ jobs:
- name: Validate Gradle wrapper
uses: gradle/wrapper-validation-action@v2
- name: Set up Gradle
- uses: gradle/gradle-build-action@v3.1.0
+ uses: gradle/gradle-build-action@v3.2.1
- name: License check
run: ./gradlew clean checkLicense
- uses: actions/upload-artifact@v4
@@ -228,21 +228,23 @@ jobs:
run: |
./ops/check.sh frontend-license
- # check-buildkite-status:
- # if: ${{ github.event_name == 'pull_request' }}
- # runs-on: ubuntu-latest
- # steps:
- # - name: Checkout code
- # uses: actions/checkout@v4
- #
- # - name: Check BuildKite status
- # run: |
- # buildkite_status=$(curl -H "Authorization: Bearer ${{ secrets.BUILDKITE_TOKEN }}" "https://api.buildkite.com/v2/organizations/thoughtworks-Heartbeat/pipelines/heartbeat/builds?branch=main"| jq -r '.[0].state')
- #
- # if [ "$buildkite_status" != "passed" ]; then
- # echo "BuildKite build failed. Cannot merge the PR."
- # exit 1
- # fi
+ check-buildkite-status:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ - name: Check BuildKite status
+ env:
+ BUILDKITE_TOKEN: ${{ secrets.BUILDKITE_TOKEN }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ GITHUB_REPOSITORY: ${{ github.repository }}
+ CURRENT_ACTOR: ${{ github.actor }}
+ EVENT_NAME: ${{ github.event_name }}
+ CURRENT_BRANCH_NAME: ${{ github.ref }}
+ PULL_REQUEST_TITLE: ${{ github.event.pull_request.title }}
+ run: |
+ ./ops/check.sh buildkite-status
+
images-check:
runs-on: ubuntu-latest
steps:
@@ -289,6 +291,7 @@ jobs:
- credential-check
- frontend-license-check
- backend-license-check
+ - check-buildkite-status
runs-on: ubuntu-latest
permissions:
id-token: write
@@ -459,13 +462,22 @@ jobs:
npm install -g pnpm
- name: Set env
run: echo "HOME=/root" >> $GITHUB_ENV
+ - name: Install shell deps
+ run: |
+ apt-get update && apt-get install -y jq
+ jq --version
+ - name: Check e2e deployment
+ env:
+ BUILDKITE_TOKEN: ${{ secrets.BUILDKITE_TOKEN }}
+ COMMIT_SHA: ${{ github.sha }}
+ run: ./ops/check.sh buildkite-e2e-deployed
- name: Run E2E
env:
APP_ORIGIN: ${{ vars.APP_HTTP_SCHEDULE }}://${{ secrets.AWS_EC2_IP_E2E }}:${{ secrets.AWS_EC2_IP_E2E_FRONTEND_PORT }}
E2E_TOKEN_JIRA: ${{ secrets.E2E_TOKEN_JIRA }}
E2E_TOKEN_BUILD_KITE: ${{ secrets.E2E_TOKEN_BUILD_KITE }}
E2E_TOKEN_GITHUB: ${{ secrets.E2E_TOKEN_GITHUB }}
- E2E_TOKEN_FLAG_AS_BLOCK_JIRA: ${{ secrets.E2E_TOKEN_FLAG_AS_BLOCK_JIRA }}
+ E2E_TOKEN_PIPELINE_NO_ORG_CONFIG_BUILDKITE: ${{ secrets.E2E_TOKEN_PIPELINE_NO_ORG_CONFIG_BUILDKITE }}
shell: bash {0}
run: ./ops/check.sh e2e
- uses: actions/upload-artifact@v4
@@ -474,6 +486,14 @@ jobs:
name: playwright-report
path: frontend/e2e/reports/
retention-days: 30
+ - name: Slack Notification
+ uses: rtCamp/action-slack-notify@v2
+ if: always()
+ env:
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
+ SLACK_ICON_EMOJI: ":heart-beat:"
+ SLACK_COLOR: ${{ job.status }}
+ SLACK_USERNAME: "Heartbeat E2E Status"
deploy:
runs-on: ubuntu-latest
diff --git a/.gitignore b/.gitignore
index 09186b0ace..d0e1e07fa9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -7,6 +7,7 @@
/out-tsc
/logs
/app
+/stubs/logs/*
frontend/cypress/
# Only exists if Bazel was run
/bazel-out
@@ -51,3 +52,4 @@ volume
csv
gitleaks-report.json
+*.sbom.spdx.json
diff --git a/.gitleaksignore b/.gitleaksignore
index 27eafd5816..7307666c36 100644
--- a/.gitleaksignore
+++ b/.gitleaksignore
@@ -19,3 +19,4 @@ e001f3e4dc70deb4638d106d2ebfab520b9a2745:docs/src/components/Header/DocSearch.ts
6cff3275f5fcff29462e33b0508359b5d619ffec:docs/src/components/Header/DocSearch.tsx:generic-api-key:54
9102192bbe6790a348e5558cefbb051caa092411:_astro/DocSearch.d9740404.js:generic-api-key:13
a3fe6c206ca324e9e5e9a0e1422fd8c72845d855:_astro/DocSearch.d5fd0ff0.js:generic-api-key:13
+cb693e0c6117cb8f383b72e4bb1c8f2635b7b041:_astro/DocSearch.E1RdsI6d.js:generic-api-key:13
diff --git a/.trivyignore b/.trivyignore
index 048fbfd363..b693001686 100644
--- a/.trivyignore
+++ b/.trivyignore
@@ -12,3 +12,5 @@ CVE-2023-49468
CVE-2024-0553
CVE-2024-0567
CVE-2024-22201
+CVE-2024-22259
+CVE-2024-28085
diff --git a/README.md b/README.md
index 2106ab99b9..4dc2aa139a 100644
--- a/README.md
+++ b/README.md
@@ -1,45 +1,57 @@
-# Heartbeat Project(2023/07)
+# Heartbeat Project
[![Build status](https://badge.buildkite.com/62f2d9def796f9bf8d79dc67e548341b6e3e3ad07631164b07.svg)](https://buildkite.com/heartbeat-backup/heartbeat)[![Codacy Badge](https://app.codacy.com/project/badge/Grade/2e19839055d3429598b2141884496c49)](https://www.codacy.com/gh/au-heartbeat/HeartBeat/dashboard?utm_source=github.com&utm_medium=referral&utm_content=au-heartbeat/HeartBeat&utm_campaign=Badge_Grade)[![Codacy Badge](https://app.codacy.com/project/badge/Coverage/2e19839055d3429598b2141884496c49)](https://www.codacy.com/gh/au-heartbeat/HeartBeat/dashboard?utm_source=github.com&utm_medium=referral&utm_content=au-heartbeat/HeartBeat&utm_campaign=Badge_Coverage)
-[![Docs](https://github.com/au-heartbeat/HeartBeat/actions/workflows/Docs.yaml/badge.svg)](https://github.com/au-heartbeat/HeartBeat/actions/workflows/Docs.yaml) [![Frontend](https://github.com/au-heartbeat/HeartBeat/actions/workflows/frontend.yml/badge.svg)](https://github.com/au-heartbeat/HeartBeat/actions/workflows/frontend.yml) [![Backend](https://github.com/au-heartbeat/HeartBeat/actions/workflows/backend.yml/badge.svg)](https://github.com/au-heartbeat/HeartBeat/actions/workflows/backend.yml) [![Security](https://github.com/au-heartbeat/HeartBeat/actions/workflows/Security.yml/badge.svg)](https://github.com/au-heartbeat/HeartBeat/actions/workflows/Security.yml) [![Build and Deploy](https://github.com/au-heartbeat/Heartbeat/actions/workflows/build-and-deploy.yml/badge.svg)](https://github.com/au-heartbeat/Heartbeat/actions/workflows/build-and-deploy.yml)
+[![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=au-heartbeat-heartbeat-frontend&metric=alert_status)](https://sonarcloud.io/summary/new_code?id=au-heartbeat-heartbeat-frontend)
+[![Frontend Coverage](https://sonarcloud.io/api/project_badges/measure?project=au-heartbeat-heartbeat-frontend&metric=coverage)](https://sonarcloud.io/summary/new_code?id=au-heartbeat-heartbeat-frontend)
+[![Backend Coverage](https://sonarcloud.io/api/project_badges/measure?project=au-heartbeat-heartbeat-backend&metric=coverage)](https://sonarcloud.io/summary/new_code?id=au-heartbeat-heartbeat-backend)
+
+[![Docs](https://github.com/au-heartbeat/HeartBeat/actions/workflows/Docs.yaml/badge.svg)](https://github.com/au-heartbeat/HeartBeat/actions/workflows/Docs.yaml) [![Build and Deploy](https://github.com/au-heartbeat/Heartbeat/actions/workflows/build-and-deploy.yml/badge.svg)](https://github.com/au-heartbeat/Heartbeat/actions/workflows/build-and-deploy.yml)
[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
[![FOSSA Status](https://app.fossa.com/api/projects/custom%2B23211%2Fgithub.com%2Fau-heartbeat%2FHeartbeat.svg?type=large)](https://app.fossa.com/projects/custom%2B23211%2Fgithub.com%2Fau-heartbeat%2FHeartbeat?ref=badge_large)
-- [Heartbeat Project(2023/07)](#heartbeat-project202307)
+- [Heartbeat Project](#heartbeat-project)
- [News](#news)
- [1 About Heartbeat](#1-about-heartbeat)
- [2 Support tools](#2-support-tools)
- [3 Product Features](#3-product-features)
-
- [3.1 Config project info](#31-config-project-info)
- [3.1.1 Config Board/Pipeline/Source data](#311-config-boardpipelinesource-data)
- [3.1.2 Config search data](#312-config-search-data)
+ - [3.1.2.1 Date picker validation rules](#3121-date-picker-validation-rules)
- [3.1.3 Config project account](#313-config-project-account)
+ - [3.1.3.1 Guideline for generating Jira token](#3131-guideline-for-generating-jira-token)
+ - [3.1.3.2 Guideline for generating Buildkite token](#3132-guideline-for-generating-buildkite-token)
+ - [3.1.3.3 Guideline for generating GitHub token](#3133-guideline-for-generating-github-token)
+ - [3.1.3.4 Authorize GitHub token with correct organization](#3134-authorize-github-token-with-correct-organization)
- [3.2 Config Metrics data](#32-config-metrics-data)
- - [3.2.1 Config Crews/Cycle Time](#321-config-crewscycle-time)
+ - [3.2.1 Config Crews/Board Mappings](#321-config-crewsboard-mappings)
- [3.2.2 Setting Classification](#322-setting-classification)
- - [3.2.3 Setting advanced settings](#323-setting-advanced-setting)
- - [3.2.4 Pipeline configuration](#324-pipeline-configuration)
+ - [3.2.3 Rework times Setting](#323-rework-times-setting)
+ - [3.2.4 Setting advanced Setting](#324-setting-advanced-setting)
+ - [3.2.5 Pipeline configuration](#325-pipeline-configuration)
- [3.3 Export and import config info](#33-export-and-import-config-info)
- [3.3.1 Export Config Json File](#331-export-config-json-file)
- [3.3.2 Import Config Json File](#332-import-config-json-file)
- - [3.4 Generate Metrics Data](#34-generate-metrics-data)
+ - [3.4 Generate Metrics report](#34-generate-metrics-report)
- [3.4.1 Velocity](#341-velocity)
- [3.4.2 Cycle Time](#342-cycle-time)
- [3.4.3 Classification](#343-classification)
- - [3.4.4 Deployment Frequency](#344-deployment-frequency)
- - [3.4.5 Lead time for changes Data](#345-lead-time-for-changes-data)
- - [3.4.6 Change Failure Rate](#346-change-failure-rate)
- - [3.4.7 Mean time to recovery](#347-mean-time-to-recovery)
+ - [3.4.4 Rework](#344-rework)
+ - [3.4.5 Deployment Frequency](#345-deployment-frequency)
+ - [3.4.6 Lead time for changes Data](#346-lead-time-for-changes-data)
+ - [3.4.7 Dev Change Failure Rate](#347-dev-change-failure-rate)
+ - [3.4.8 Dev Mean time to recovery](#348-dev-mean-time-to-recovery)
- [3.5 Export original data](#35-export-original-data)
- [3.5.1 Export board data](#351-export-board-data)
+ - [3.5.1.1 Done card exporting](#3511-done-card-exporting)
+ - [3.5.1.1 Undone card exporting](#3511-undone-card-exporting)
- [3.5.2 Export pipeline data](#352-export-pipeline-data)
- [3.6 Caching data](#36-caching-data)
- [4 Known issues](#4-known-issues)
- - [4.1 Change status name in Jira board](#41-change-status-name-in-jira-board-setting-when-there-are-cards-in-this-status)
+ - [4.1 Change status name in Jira board setting when there are cards in this status](#41--change-status-name-in-jira-board-setting-when-there-are-cards-in-this-status)
- [5 Instructions](#5-instructions)
- [5.1 Prepare for Jira Project](#51-prepare-for-jira-project)
- [5.2 Prepare env to use Heartbeat tool](#52-prepare-env-to-use-heartbeat-tool)
@@ -48,16 +60,19 @@
- [6.1.1 How to build and local preview](#611-how-to-build-and-local-preview)
- [6.1.2 How to run unit tests](#612-how-to-run-unit-tests)
- [6.1.3 How to generate a test report](#613-how-to-generate-a-test-report)
- - [6.1.4 How to run e2e tests locally](#614-how-to-run-e2e-tests-locally)
+ - [6.1.4 How to run E2E tests locally](#614-how-to-run-e2e-tests-locally)
+ - [6.2 How to run backend](#62-how-to-run-backend)
- [7 How to trigger BuildKite Pipeline](#7-how-to-trigger-buildkite-pipeline)
- [Release](#release)
- [Release command in main branch](#release-command-in-main-branch)
-- [7 How to use](#7-how-to-use)
- - [7.1 Docker-compose](#71-docker-compose)
- - [7.1.1 Customize story point field in Jira](#711-customize-story-point-field-in-jira)
- - [7.1.2 Multiple instance deployment](#712-multiple-instance-deployment)
- - [7.2 K8S](#72-k8s)
- - [7.2.1 Multiple instance deployment](#721-multiple-instance-deployment)
+- [8 How to use](#8-how-to-use)
+ - [8.1 Docker-compose](#81-docker-compose)
+ - [8.1.1 Customize story point field in Jira](#811-customize-story-point-field-in-jira)
+ - [8.1.2 Multiple instance deployment](#812-multiple-instance-deployment)
+ - [8.2 K8S](#82-k8s)
+ - [8.2.1 Multiple instance deployment](#821-multiple-instance-deployment)
+- [9 Contribution](#9-contribution)
+- [10 Pipeline Strategy](#10-pipeline-strategy)
# News
@@ -67,24 +82,27 @@
- [Nov 6 2023 - Release Heartbeat - 1.1.2](release-notes/20231106.md)
- [Nov 21 2023 - Release Heartbeat - 1.1.3](release-notes/20231121.md)
- [Dev 4 2023 - Release Heartbeat - 1.1.4](release-notes/20231204.md)
- - [Feb 29 2024 - Release Heartbeat - 1.1.5](release-notes/20240229.md)
+- [Feb 29 2024 - Release Heartbeat - 1.1.5](release-notes/20240229.md)
+- [Apr 2 2024 - Release heartbeat - 1.1.6](release-notes/20240402.md)
# 1 About Heartbeat
Heartbeat is a tool for tracking project delivery metrics that can help you get a better understanding of delivery performance. This product allows you easily get all aspects of source data faster and more accurate to analyze team delivery performance which enables delivery teams and team leaders focusing on driving continuous improvement and enhancing team productivity and efficiency.
-State of DevOps Report is launching in 2019. In this webinar, The 4 key metrics research team and Google Cloud share key metrics to measure DevOps performance, measure the effectiveness of development and delivery practices. They searching about six years, developed four metrics that provide a high-level systems view of software delivery and performance.
+State of DevOps Report is launching in 2019. In this webinar, The 4 key metrics research team and Google Cloud share key metrics to measure DevOps performance, measure the effectiveness of development and delivery practices. They searching about six years, developed four metrics that provide a high-level systems view of software delivery and performance. Based on that, Heartbeat introduce below metrics as below.
-**Here are the four Key meterics:**
+**8 metrics supported by heartbeat:**
-1. Deployment Frequency (DF)
-2. Lead Time for changes (LTC)
-3. Mean Time To Recover (MTTR)
-4. Change Failure Rate (CFR)
-In Heartbeat tool, we also have some other metrics, like: Velocity, Cycle Time and Classification. So we can collect DF, LTC, CFR, Velocity, Cycle Time and Classification.
+1. [Velocity](#341-velocity)
+2. [Cycle time](#341-velocity)
+3. [Classification](#343-classification)
+4. [Rework](#344-rework)
+5. [Deployment Frequency](#345-deployment-frequency)
+6. [Lead Time for changes](#346-lead-time-for-changes-data)
+7. [Dev Change Failure Rate](#347-dev-change-failure-rate)
+8. [Dev Mean Time To Recovery](#348-dev-mean-time-to-recovery)
-For MTTR meter, specifically, if the pipeline stay in failed status during the selected period, the unfixed part will not be included for MTTR calculation.
# 2 Support tools
@@ -129,21 +147,30 @@ All need to select which data you want to get, for now, we support seven metrics
![Image 3-3](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/3.png)\
_Image 3-3,Metrics Data_
+##### 3.1.2.1 Date picker validation rules
+
+User can not select future time in calendar (both start time & end time). The max date interval between start time and end time is 31 days (e.g. 01/01/2024 - 01/31/2024).
+
+Invalid dates may be, e.g. future dates, interval between start time and end time is more than 31 days, end time is before start time, etc.
+
+If user selects and invalid date, a warning may be shown.
+
#### 3.1.3 Config project account
Because all metrics data from different tools that your projects use. Need to have the access to these tools then you can get the data. So after select time period and metrics data, then you need to input the config for different tools(Image 3-4).
According to your selected required data, you need to input account settings for the respective data source. Below is the mapping between your selected data to data source.
-| Required Data | Datasource |
-| --------------------- | -------------- |
-| Velocity | Board |
-| Cycle time | Board |
-| Classification | Board |
-| Lead time for changes | Repo,Pipeline |
-| Deployment frequency | Pipeline |
-| Change failure rate | Pipeline |
-| Mean time to recovery | Pipeline |
+| Required Data | Datasource |
+|---------------------------| -------------- |
+| Velocity | Board |
+| Cycle time | Board |
+| Classification | Board |
+| Rework times | Board |
+| Lead time for changes | Repo,Pipeline |
+| Deployment frequency | Pipeline |
+| Dev change failure rate | Pipeline |
+| Dev mean time to recovery | Pipeline |
![Image 3-4](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/4.png)\
Image 3-4,Project config
@@ -157,27 +184,44 @@ Image 3-4,Project config
|Site|Site is the domain for your jira board, like below URL, `dorametrics` is the site
https://dorametrics.atlassian.net/jira/software/projects/ADM/boards/2 |
|Email|The email can access to the Jira board |
|Token|Generate a new token with below link, https://id.atlassian.com/manage-profile/security/api-tokens |
+##### 3.1.3.1 Guideline for generating Jira token
+![Image 3-5](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/guideline-for-generating-token/generate-jira-token.png)
+_Image 3-5, create Jira token_
**The details for Pipeline:**
|Items|Description|
|---|---|
|PipelineTool| The pipeline tool you team use, currently heartbeat only support buildkite|
|Token|Generate buildkite token with below link, https://buildkite.com/user/api-access-tokens|
+##### 3.1.3.2 Guideline for generating Buildkite token
+Select organization for you pipeline
+![Image 3-6](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/guideline-for-generating-token/generate-buildkite-token-org.png)
+Choose "Read Builds","Read Organizations" and "Read Pipelines".
+![Image 3-6](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/guideline-for-generating-token/generate-buildkite-token.png)
+_Image 3-6, generate Buildkite token_
**The details for SourceControl:**
|Items|Description|
|---|---|
|SourceControl|The source control tool you team use, currently heartbeat only support Github|
|Token|Generate Github token with below link(classic one), https://github.com/settings/tokens|
-
+##### 3.1.3.3 Guideline for generating GitHub token
+Generate new token (classic)
+![Image 3-7](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/guideline-for-generating-token/generate-github-token-entry.png)
+Select repo from scopes
+![Image 3-7](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/guideline-for-generating-token/generate-github-token.png)
+_Image 3-7, generate classic GitHub token_
+##### 3.1.3.4 Authorize GitHub token with correct organization
+![Image 3-8](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/guideline-for-generating-token/unauthorized.png)
+_Image 3-8, authorize GitHub token with correct organization_
### 3.2 Config Metrics data
After inputting the details info, users need to click the `Verify` button to verify if can access to these tool. Once verified, they could click the `Next` button go to next page -- Config Metrics page(Image 3-5,Image 3-6,Image 3-7)
-#### 3.2.1 Config Crews/Cycle Time
+#### 3.2.1 Config Crews/Board Mappings
-![Image 3-5](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/5.png)\
-_Image 3-5, Crews/Cycle Time config_
+![Image 3-9](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/5.png)\
+_Image 3-9, Crews/Board Mappings config_
**Crew Settings:** You could select your team members from a list get from board source. The list will include the assignees for those tickets that finished in the time period selected in the last step.
@@ -195,46 +239,57 @@ _Image 3-5, Crews/Cycle Time config_
| Done | It means the tickets are already done. Cycle time doesn't include this time. |
| -- | If you don't need to map, you can select -- |
+**By Status**: user can click the toggle selected button to choose the mapping relationship by column or by status. It support multiple status map in to one column, just as the picture shows the TODO and INPROGRESS board status can be mapped to different heartbeat states.
+
+![Image 3-10](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/16.png)\
+_Image 3-10,By Status_
+
#### 3.2.2 Setting Classification
-![Image 3-6](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/6.png)\
-_Image 3-6,Classification Settings_
+![Image 3-11](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/6.png)\
+_Image 3-11,Classification Settings_
In classification settings, it will list all Context fields for your jira board. Users can select anyone to get the data for them. And according to your selection, in the export page, you will see the classification report to provide more insight with your board data.
-#### 3.2.3 Setting advanced Setting
+#### 3.2.3 Rework times Setting
+![Image 3-12](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/rework-setting-image/rework-times-settings.png)\
+_Image 3-12,Rework times Settings_
+
+In Rework times settings, it contains Rework to which state Input and Exclude which states(optional) Input. The options in the Rework to which state Input are all from Board mappings, the options are ordered, and when an option is selected, the rework information of the option and all subsequent options will be counted in the report page and export file. The Exclude which states(optional) Input can help you exclude certain subsequent options (image 3-7).
-![Image 3-7](https://jsd.cdn.zzko.cn/gh/au-heartbeat/data-hosting@main/advanced-setting-image/advance-settings.png)\
-_Image 3-7,advanced Settings_
+#### 3.2.4 Setting advanced Setting
+
+![Image 3-13](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/advanced-setting-image/advance-settings.png)\
+_Image 3-13,advanced Settings_
In advanced settings, it contains story points Input and Flagged Input. Users can input story points and Flagged custom-field on their own when the jira board has permission restriction . And according to these input, in the export page, user can get correct story points and block days
how to find the story points and Flagged custom-field?
-![Image 3-8](https://jsd.cdn.zzko.cn/gh/au-heartbeat/data-hosting@main/advanced-setting-image/devtool-network.png)\
-_Image 3-8,devTool-network-part_
+![Image 3-14](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/advanced-setting-image/devtool-network.png)\
+_Image 3-14,devTool-network-part_
-![Image 3-9](https://jsd.cdn.zzko.cn/gh/au-heartbeat/data-hosting@main/advanced-setting-image/card-history.png)\
-_Image 3-9,card-history_
+![Image 3-15](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/advanced-setting-image/card-history.png)\
+_Image 3-15,card-history_
-![Image 3-10](https://jsd.cdn.zzko.cn/gh/au-heartbeat/data-hosting@main/advanced-setting-image/find-custom-field-api.png)\
-_Image 3-10,find-custom-field-api_
+![Image 3-16](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/advanced-setting-image/find-custom-field-api.png)\
+_Image 3-16,find-custom-field-api_
-![Image 3-11](https://jsd.cdn.zzko.cn/gh/au-heartbeat/data-hosting@main/advanced-setting-image/story-point-custom-field.png)\
-_Image 3-11,story-point-custom-field_
+![Image 3-17](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/advanced-setting-image/story-point-custom-field.png)\
+_Image 3-17,story-point-custom-field_
-![Image 3-12](https://jsd.cdn.zzko.cn/gh/au-heartbeat/data-hosting@main/advanced-setting-image/flagged-custom-field.png)\
-_Image 3-12,flagged-custom-field_
+![Image 3-18](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/advanced-setting-image/flagged-custom-field.png)\
+_Image 3-18,flagged-custom-field_
1. user need to go to the jira board and click one card , then open dev tool switch to network part.
2. then click card's history part.
3. at that time, user can see one api call which headers request URL is https://xxx.atlassian.net/rest/gira/1/ .
-4. then go to review part, find fieldDisplayName which show Flagged and story point estimate and get the fieldId as the custom-field that user need to input in advanced settings. from image 3-11 and 3-12 we can find that flagged custom field is customfield_10021, story points custom field is customfield_10016.
+4. then go to review part, find fieldDisplayName which show Flagged and story point estimate and get the fieldId as the custom-field that user need to input in advanced settings. from image 3-13 and 3-14 we can find that flagged custom field is customfield_10021, story points custom field is customfield_10016.
-#### 3.2.4 Pipeline configuration
+#### 3.2.5 Pipeline configuration
-![Image 3-13](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/7.png)\
-_Image 3-13,Settings for Pipeline_
+![Image 3-19](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/7.png)\
+_Image 3-19,Settings for Pipeline_
They are sharing the similar settings which you need to specify the pipeline step so that Heartbeat will know in which pipeline and step, team consider it as deploy to PROD. So that we could use it to calculate metrics.
@@ -248,69 +303,116 @@ They are sharing the similar settings which you need to specify the pipeline ste
### 3.3.1 Export Config Json File
-When user first use this tool, need to create a project, and do some config. To avoid the user entering configuration information repeatedly every time, we provide a “Save” button in the config and metrics pages. In config page, click the save button, it will save all items in config page in a Json file. If you click the save button in the metrics page, it will save all items in config and metrics settings in a Json file. Here is the json file (Image 3-8)。Note: Below screenshot just contains a part of data.
+When user first use this tool, need to create a project, and do some config. To avoid the user entering configuration information repeatedly every time, we provide a “Save” button in the config and metrics pages. In config page, click the save button, it will save all items in config page in a Json file. If you click the save button in the metrics page, it will save all items in config and metrics settings in a Json file. Here is the json file (Image 3-16)。Note: Below screenshot just contains a part of data.
-![Image 3-14](https://user-images.githubusercontent.com/995849/89784710-b4c41180-db4b-11ea-9bc4-db14ce98ef69.png)\
-_Image 3-14, Config Json file_
+![Image 3-20](https://user-images.githubusercontent.com/995849/89784710-b4c41180-db4b-11ea-9bc4-db14ce98ef69.png)\
+_Image 3-20, Config Json file_
### 3.3.2 Import Config Json File
-When user already saved config file before, then you don’t need to create a new project. In the home page, can click Import Project from File button(Image 3-1) to select the config file. If your config file is too old, and the tool already have some new feature change, then if you import the config file, it will get some warning info(Image 3-9). You need to re-select some info, then go to the next page.
+When user already saved config file before, then you don’t need to create a new project. In the home page, can click Import Project from File button(Image 3-1) to select the config file. If your config file is too old, and the tool already have some new feature change, then if you import the config file, it will get some warning info(Image 3-17). You need to re-select some info, then go to the next page.
-![Image 3-15](https://user-images.githubusercontent.com/995849/89784267-f902e200-db4a-11ea-9d0b-a8ab29a8819e.png)\
-_Image 3-15, Warning message_
+![Image 3-21](https://user-images.githubusercontent.com/995849/89784267-f902e200-db4a-11ea-9d0b-a8ab29a8819e.png)\
+_Image 3-21, Warning message_
## 3.4 Generate Metrics report
After setup and configuration, then it will generate the heartbeat dashboard.
-![Image 3-16](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/8.png)
+![Image 3-22](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/8.png)
+_Image 3-22, Report page_
You could find the drill down from `show more >` link from dashboard.
### 3.4.1 Velocity
-In Velocity Report, it will list the corresponding data by Story Point and the number of story tickets. (image 3-10)
-![Image 3-16](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/9.png)\
-_Image 3-16,Velocity Report_
+In Velocity Report, it will list the corresponding data by Story Point and the number of story tickets. (image 3-19)
+- `Velocity` : includes how many story points and cards we have completed within selected time period.
+- Definition for 'Velocity(Story Point)‘: how many story point we have completed within selected time period.
+- Formula for 'Velocity(Story Point): sum of story points for done cards in selected time period
+- Definition for 'Throughput(Cards Count): how many story cards we have completed within selected time period.
+- Formula for 'Throughput(Cards Count): sum of cards count for done cards in selected time period
+
+
+![Image 3-23](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/9.png)\
+_Image 3-23,Velocity Report_
### 3.4.2 Cycle Time
The calculation process data and final result of Cycle Time are calculated by rounding method, and two digits are kept after the decimal point. Such as: 3.567... Is 3.56; 3.564... Is 3.56.
+- `Cycle time`: the time it take for each card start ‘to do’ until move to ‘done’.
+- Definition for ‘Average Cycle Time(Days/SP)’: how many days does it take on average to complete a point?
+- Formula for ‘Average Cycle Time(Days/SP)’: sum of cycle time for done cards/done cards story points
+- Definition for ‘Average Cycle Time(Days/Card)’: how many days does it take on average to complete a card?
+- Formula for ‘Average Cycle Time(Days/Card)’: sum of cycle time for done cards/done cards count
-![Image 3-17](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/10.png)\
-_Image 3-17,Cycle Time Report_
+![Image 3-24](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/10.png)\
+_Image 3-24,Cycle Time Report_
### 3.4.3 Classification
It will show the classification data of Board based on your selection on `Classification Settings` in metrics page.
The percentage value represent the count of that type tickets vs total count of tickets.
+- `Classification`: provide different dimensions to view how much efforts team spent within selected time period.
+- for example: spike cards account for 17.65% of the total completed cards
+
+![Image 3-25](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/11.png)\
+_Image 3-25,Classification Report_
+
+### 3.4.4 Rework
+
+- Definition for ‘Rework': cards roll back from a later state to a previous state, for example, one card move from 'testing' state to 'in dev' state, which means this card is reworked.
+- Formula for 'Total rework times': the total number of rework times in all done cards
+- Formula for 'Total rework cards': the total number of rework cards in all done cards
+- Formula for 'Rework cards ratio': total rework cards/throughput
+
+It will show the rework data of board on your selection on `Rework times settins` in metrics page (image 3-21).
+
+If "to do" is selected in the "Rework to which column", we will count the number of times the subsequent options in the options are reworked back to the "to do" state.
+
-![Image 3-18](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/11.png)\
-_Image 3-18,Classification Report_
+
+![Image 3-26](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/rework-setting-image/rework-detail.png)\
+_Image 3-26,Rework Report_
-### 3.4.4 Deployment Frequency
+### 3.4.5 Deployment Frequency
+- Definition for ‘Deployment Frequency': this metrics records how often you deploy code to production on a daily basis.
+- Formula for ‘Deployment Frequency': the umber of build for(Status = passed & Valid = true)/working days
+![Image 3-27](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/export/export-pipline-data.png)\
+_Image 3-27,export pipline data_
+![Image 3-28](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/12.png)\
+_Image 3-28,Deployment Frequency Report_
-![Image 3-19](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/12.png)\
-_Image 3-19,Deployment Frequency Report_
+### 3.4.6 Lead time for changes Data
+- Formula for ‘PR lead time':
+ - if PR exist : PR lead time = PR merged time - first code committed time
+ - if no PR or revert PR: PR lead time = 0
-### 3.4.5 Lead time for changes Data
+- Formula for ‘Pipeline lead time':
+ - if PR exist: Pipeline lead time = Job Complete Time - PR merged time
+ - if no PR: Pipeline lead time = Job Complete Time - Job Start Time
-![Image 3-20](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/13.png)\
-_Image 3-20,Lead time for changes Report_
-### 3.4.6 Change Failure Rate
-![Image 3-21](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/14.png)\
-_Image 3-21,Change Failure Rate Report_
+![Image 3-29](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/13.png)\
+_Image 3-29,Lead time for changes Report_
-### 3.4.7 Mean time to recovery
+### 3.4.7 Dev Change Failure Rate
+- Definition for ‘Dev Change Failure Rate': this metrics is different from the official definition of change failure rate, in heartbeat, we definite this metrics based on development,which is the percentage of failed pipelines in the total pipelines, and you chan select different pipeline as your final step,and this value is lower means failed pipeline is fewer.
+- Formula for ‘Dev Change Failure Rate': the number of build for (Status = failed)/the number of build for [(Status = passed & Valid = true)+ the number of build for (status=failed)]
-![Image 3-22](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/15.png)\
-_Image 3-22,mean time to recovery
+![Image 3-30](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/14.png)\
+_Image 3-30,Dev Change Failure Rate Report_
+
+### 3.4.8 Dev Mean time to recovery
+- Definition for ‘Dev Mean time to recovery': this metrics is also different from the official definition of Mean time to recovery. This metrics comes from pipeline, and it records how long it generally takes to restore when pipeline failed, and If this value is less than 8 hours, it means ‘red does not last overnight’, which means our repair speed is relatively good.
+- Formula for ‘Dev Mean time to recovery': sum[he time difference from the first fail to the first pass for deployment completed time]/ the number of repairs
+
+![Image 3-31](https://cdn.jsdelivr.net/gh/au-heartbeat/data-hosting@main/readme/15.png)\
+_Image 3-31,mean time to recovery
## 3.5 Export original data
-After generating the report, you can export the original data for your board and pipeline (Image 3-15). Users can click the “Export board data” or “Export pipeline data” button to export the original data.
+After generating the report, you can export the original data for your board and pipeline (Image 3-18). Users can click the “Export board data” or “Export pipeline data” button to export the original data.
### 3.5.1 Export board data
@@ -318,14 +420,14 @@ It will export a csv file for board data
#### 3.5.1.1 Done card exporting
-Export the all done tickets during the time period(Image 1)
+Export the all done tickets during the time period(Image 3-18)
#### 3.5.1.1 Undone card exporting
-Export the latest updated 50 non-done tickets in your current active board. And it will order by heartbeat state and then last status change date(Image 3-16)
+Export the latest updated 50 non-done tickets in your current active board. And it will order by heartbeat state and then last status change date(Image 3-28)
-![Image 3-22](https://user-images.githubusercontent.com/995849/89784291-01f3b380-db4b-11ea-8f5a-d475e80014fb.png)\
-_Image 3-22,Exported Board Data_
+![Image 3-32](https://user-images.githubusercontent.com/995849/89784291-01f3b380-db4b-11ea-8f5a-d475e80014fb.png)\
+_Image 3-32,Exported Board Data_
**All columns for Jira board:**
|Column name |Description|
@@ -352,13 +454,14 @@ _Image 3-22,Exported Board Data_
|Block Days|Blocked days for each ticket|
|Review Days|--|
|Original Cycle Time: {Column Name}|The data for Jira board original data |
-
+|Rework: total - {rework state} | The total number of rework times |
+|Rework: from {subsequent status} | The number of rework times |
### 3.5.2 Export pipeline data
-It will export a csv file for pipeline data (image 3-17).
+It will export a csv file for pipeline data (image 3-29).
-![Image 3-23](https://user-images.githubusercontent.com/995849/89784293-0324e080-db4b-11ea-975d-6609024aac49.png)\
-_Image 3-23,Exported Pipeline Data_
+![Image 3-33](https://user-images.githubusercontent.com/995849/89784293-0324e080-db4b-11ea-975d-6609024aac49.png)\
+_Image 3-33,Exported Pipeline Data_
**All columns for pipeline data:**
|Column name |Description|
@@ -443,7 +546,7 @@ pnpm test
pnpm coverage
```
-## 6.1.4 How to run e2e tests locally
+## 6.1.4 How to run E2E tests locally
2. Start the backend service
@@ -459,12 +562,14 @@ cd HearBeat/frontend
pnpm start
```
-4. Run the e2e tests
+4. Run the E2E tests
```
cd HearBeat/frontend
-pnpm e2e
+pnpm run e2e:headed
```
+## 6.2 How to run backend
+Refer to [run backend](backend/README.md#1-how-to-start-backend-application)
# 7 How to trigger BuildKite Pipeline
@@ -491,9 +596,9 @@ git tag -d {tag name}
git push origin :refs/tags/{tag name}
```
-# 7 How to use
+# 8 How to use
-## 7.1 Docker-compose
+## 8.1 Docker-compose
First, create a `docker-compose.yml` file, and copy below code into the file.
@@ -523,7 +628,7 @@ Then, execute this command
docker-compose up -d frontend
```
-### 7.1.1 Customize story point field in Jira
+### 8.1.1 Customize story point field in Jira
Specifically, story point field can be indicated in `docker-compose.yml`. You can do it as below.
@@ -548,7 +653,7 @@ services:
restart: always
```
-### 7.1.2 Multiple instance deployment
+### 8.1.2 Multiple instance deployment
Specifically, if you want to run with multiple instances. You can do it with below docker compose file.
@@ -579,7 +684,7 @@ volumes:
file_volume:
```
-## 7.2 K8S
+## 8.2 K8S
First, create a `k8s-heartbeat.yml` file, and copy below code into the file.
@@ -634,7 +739,7 @@ spec:
apiVersion: v1
kind: Service
metadata:
- name: frontend
+ name: **frontend**
spec:
selector:
app: frontend
@@ -651,6 +756,50 @@ Then, execute this command
kubectl apply -f k8s-heartbeat.yml
```
-### 7.2.1 Multiple instance deployment
+### 8.2.1 Multiple instance deployment
You also can deploy Heartbeats in multiple instances using K8S through the following [documentation](https://au-heartbeat.github.io/Heartbeat/en/devops/how-to-deploy-heartbeat-in-multiple-instances-by-k8s/).
+
+# 9 Contribution
+
+We love your input! Please see our [contributing guide](contribution.md) to get started. Thank you 🙏 to all our contributors!
+
+# 10 Pipeline Strategy
+
+Now, Heartbeat uses `GitHub Actions` and `BuildKite` to build and deploy Heartbeat application.
+
+But there is some constrains, like some pipeline dependency.
+
+So, committer should pay attention to this flow when there is some pipeline issues.
+
+```mermaid
+ sequenceDiagram
+ actor Committer
+ participant GitHub_Actions as GitHub Actions
+ participant BuildKite
+
+ Committer ->> GitHub_Actions : Push code
+ Committer ->> BuildKite : Push code
+ loop 30s/40 times
+ BuildKite->> GitHub_Actions: Check the basic check(all check before 'deploy-infra' job) has been passed
+ GitHub_Actions -->> BuildKite: Basic check has passed?
+ alt Yes
+ BuildKite ->> BuildKite: Build and deploy e2e env
+ Note over BuildKite, GitHub_Actions: Some times passed
+ loop 30s/60 times
+ GitHub_Actions ->> BuildKite: Request to check if the e2e has been deployed
+ BuildKite -->> GitHub_Actions: e2e deployment status, if the e2e has been deployed?
+ alt Yes
+ GitHub_Actions ->> GitHub_Actions: Run e2e check on GitHub actions
+ Note over BuildKite, GitHub_Actions: Some times passed
+ GitHub_Actions -->> Committer: Response the pipeline result to committer
+ BuildKite -->> Committer: Response the pipeline result to committer
+ else No
+ GitHub_Actions -->> Committer: Break the pipeline
+ end
+ end
+ else No
+ BuildKite -->> Committer: Break the pipeline
+ end
+ end
+```
diff --git a/backend/build.gradle b/backend/build.gradle
index 46692478e5..25fe3a9b70 100644
--- a/backend/build.gradle
+++ b/backend/build.gradle
@@ -3,9 +3,9 @@ plugins {
id 'jacoco'
id 'pmd'
id 'org.springframework.boot' version '3.1.9'
- id 'io.spring.dependency-management' version '1.1.0'
- id "io.spring.javaformat" version "0.0.38"
- id 'com.github.jk1.dependency-license-report' version '2.1'
+ id 'io.spring.dependency-management' version '1.1.4'
+ id "io.spring.javaformat" version "0.0.41"
+ id 'com.github.jk1.dependency-license-report' version '2.6'
id "org.sonarqube" version "4.4.1.3373"
}
@@ -29,7 +29,7 @@ dependencies {
implementation 'org.springframework.boot:spring-boot-starter-actuator'
implementation 'org.springframework.boot:spring-boot-starter-log4j2'
implementation 'org.springframework.boot:spring-boot-starter-validation'
- implementation 'org.springframework:spring-core:6.1.3'
+ implementation 'org.springframework:spring-core:6.1.5'
implementation("org.springframework.cloud:spring-cloud-starter-openfeign:4.0.2") {
exclude group: 'commons-fileupload', module: 'commons-fileupload'
}
@@ -41,22 +41,23 @@ dependencies {
implementation 'org.springframework.boot:spring-boot-starter-cache'
implementation 'org.ehcache:ehcache:3.10.8'
implementation 'javax.annotation:javax.annotation-api:1.3.2'
- implementation 'com.google.code.gson:gson:2.8.9'
+ implementation 'com.google.code.gson:gson:2.10.1'
testImplementation 'junit:junit:4.13.2'
- compileOnly 'org.projectlombok:lombok:1.18.26'
- annotationProcessor 'org.projectlombok:lombok:1.18.26'
+ compileOnly 'org.projectlombok:lombok:1.18.32'
+ annotationProcessor 'org.projectlombok:lombok:1.18.32'
testImplementation 'org.springframework.boot:spring-boot-starter-test'
- testImplementation 'org.junit.jupiter:junit-jupiter:5.9.2'
- testCompileOnly 'org.projectlombok:lombok:1.18.26'
- testAnnotationProcessor 'org.projectlombok:lombok:1.18.26'
- implementation 'com.opencsv:opencsv:5.5.2'
- implementation 'org.apache.commons:commons-text:1.10.0'
+ testImplementation 'org.junit.jupiter:junit-jupiter:5.10.2'
+ testCompileOnly 'org.projectlombok:lombok:1.18.32'
+ testAnnotationProcessor 'org.projectlombok:lombok:1.18.32'
+ implementation 'com.opencsv:opencsv:5.9'
+ implementation 'org.apache.commons:commons-text:1.11.0'
+ implementation 'org.awaitility:awaitility:3.1.6'
}
tasks.named('test') {
useJUnitPlatform()
testLogging {
- events "passed", "skipped", "failed"
+ events "skipped", "failed"
}
finalizedBy jacocoTestReport
}
@@ -72,6 +73,7 @@ sonar {
property "sonar.projectKey", "au-heartbeat-heartbeat-backend"
property "sonar.organization", "au-heartbeat"
property "sonar.host.url", "https://sonarcloud.io"
+ property "sonar.exclusions", "src/main/java/heartbeat/HeartbeatApplication.java,src/main/java/heartbeat/config/**,src/main/java/heartbeat/util/SystemUtil.java"
}
}
@@ -100,6 +102,36 @@ jacocoTestCoverageVerification {
violationRules {
rule {
limit {
+ counter = 'INSTRUCTION'
+ value = 'COVEREDRATIO'
+ minimum = 1.0
+ }
+ }
+ rule {
+ limit {
+ counter = 'LINE'
+ value = 'COVEREDRATIO'
+ minimum = 1.0
+ }
+ }
+ rule {
+ limit {
+ counter = 'METHOD'
+ value = 'COVEREDRATIO'
+ minimum = 1.0
+ }
+ }
+ rule {
+ limit {
+ counter = 'BRANCH'
+ value = 'COVEREDRATIO'
+ minimum = 0.90
+ }
+ }
+ rule {
+ limit {
+ counter = 'CLASS'
+ value = 'COVEREDRATIO'
minimum = 1.0
}
}
diff --git a/backend/gradle/wrapper/gradle-wrapper.jar b/backend/gradle/wrapper/gradle-wrapper.jar
index c1962a79e2..e6441136f3 100644
Binary files a/backend/gradle/wrapper/gradle-wrapper.jar and b/backend/gradle/wrapper/gradle-wrapper.jar differ
diff --git a/backend/gradle/wrapper/gradle-wrapper.properties b/backend/gradle/wrapper/gradle-wrapper.properties
index 37aef8d3f0..b82aa23a4f 100644
--- a/backend/gradle/wrapper/gradle-wrapper.properties
+++ b/backend/gradle/wrapper/gradle-wrapper.properties
@@ -1,6 +1,7 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-8.1.1-bin.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-bin.zip
networkTimeout=10000
+validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
diff --git a/backend/gradlew b/backend/gradlew
index aeb74cbb43..1aa94a4269 100755
--- a/backend/gradlew
+++ b/backend/gradlew
@@ -83,7 +83,8 @@ done
# This is normally unused
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
-APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
+# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
+APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
@@ -130,10 +131,13 @@ location of your Java installation."
fi
else
JAVACMD=java
- which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+ if ! command -v java >/dev/null 2>&1
+ then
+ die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
+ fi
fi
# Increase the maximum file descriptors if we can.
@@ -141,7 +145,7 @@ if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
- # shellcheck disable=SC3045
+ # shellcheck disable=SC2039,SC3045
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
@@ -149,7 +153,7 @@ if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
'' | soft) :;; #(
*)
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
- # shellcheck disable=SC3045
+ # shellcheck disable=SC2039,SC3045
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
@@ -198,11 +202,11 @@ fi
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
-# Collect all arguments for the java command;
-# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
-# shell script including quotes and variable substitutions, so put them in
-# double quotes to make sure that they get re-expanded; and
-# * put everything else in single quotes, so that it's not re-expanded.
+# Collect all arguments for the java command:
+# * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments,
+# and any embedded shellness will be escaped.
+# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be
+# treated as '${Hostname}' itself on the command line.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
diff --git a/backend/gradlew.bat b/backend/gradlew.bat
index 6689b85bee..7101f8e467 100644
--- a/backend/gradlew.bat
+++ b/backend/gradlew.bat
@@ -43,11 +43,11 @@ set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if %ERRORLEVEL% equ 0 goto execute
-echo.
-echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
-echo.
-echo Please set the JAVA_HOME variable in your environment to match the
-echo location of your Java installation.
+echo. 1>&2
+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2
+echo. 1>&2
+echo Please set the JAVA_HOME variable in your environment to match the 1>&2
+echo location of your Java installation. 1>&2
goto fail
@@ -57,11 +57,11 @@ set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
-echo.
-echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
-echo.
-echo Please set the JAVA_HOME variable in your environment to match the
-echo location of your Java installation.
+echo. 1>&2
+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2
+echo. 1>&2
+echo Please set the JAVA_HOME variable in your environment to match the 1>&2
+echo location of your Java installation. 1>&2
goto fail
diff --git a/backend/src/main/java/heartbeat/client/BuildKiteFeignClient.java b/backend/src/main/java/heartbeat/client/BuildKiteFeignClient.java
index c129c531be..1d1a8fdd2f 100644
--- a/backend/src/main/java/heartbeat/client/BuildKiteFeignClient.java
+++ b/backend/src/main/java/heartbeat/client/BuildKiteFeignClient.java
@@ -3,11 +3,8 @@
import heartbeat.client.decoder.BuildKiteFeignClientDecoder;
import heartbeat.client.dto.pipeline.buildkite.BuildKiteBuildInfo;
import heartbeat.client.dto.pipeline.buildkite.BuildKiteOrganizationsInfo;
-import heartbeat.client.dto.pipeline.buildkite.BuildKiteTokenInfo;
import heartbeat.client.dto.pipeline.buildkite.BuildKitePipelineDTO;
-
-import java.util.List;
-
+import heartbeat.client.dto.pipeline.buildkite.BuildKiteTokenInfo;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.http.HttpStatus;
@@ -19,6 +16,8 @@
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseStatus;
+import java.util.List;
+
@FeignClient(name = "buildKiteFeignClient", url = "${buildKite.url}", configuration = BuildKiteFeignClientDecoder.class)
public interface BuildKiteFeignClient {
@@ -32,10 +31,9 @@ public interface BuildKiteFeignClient {
@ResponseStatus(HttpStatus.OK)
List getBuildKiteOrganizationsInfo(@RequestHeader("Authorization") String token);
- @Cacheable(cacheNames = "pipelineInfo", key = "#token+'-'+#organizationId+'-'+#page+'-'+#perPage")
@GetMapping(path = "v2/organizations/{organizationId}/pipelines?page={page}&per_page={perPage}")
@ResponseStatus(HttpStatus.OK)
- List getPipelineInfo(@RequestHeader("Authorization") String token,
+ ResponseEntity> getPipelineInfo(@RequestHeader("Authorization") String token,
@PathVariable String organizationId, @PathVariable String page, @PathVariable String perPage);
@GetMapping(path = "v2/organizations/{organizationId}/pipelines/{pipelineId}/builds",
diff --git a/backend/src/main/java/heartbeat/client/JiraFeignClient.java b/backend/src/main/java/heartbeat/client/JiraFeignClient.java
index 402f47008b..c9c8c24e64 100644
--- a/backend/src/main/java/heartbeat/client/JiraFeignClient.java
+++ b/backend/src/main/java/heartbeat/client/JiraFeignClient.java
@@ -51,4 +51,8 @@ CardHistoryResponseDTO getJiraCardHistoryByCount(URI baseUrl, @PathVariable Stri
@GetMapping(path = "rest/api/2/project/{projectIdOrKey}")
JiraBoardProject getProject(URI baseUrl, @PathVariable String projectIdOrKey, @RequestHeader String authorization);
+ // This api is solely used for site url checking
+ @GetMapping(path = "/rest/api/3/dashboard")
+ String getDashboard(URI baseUrl, @RequestHeader String authorization);
+
}
diff --git a/backend/src/main/java/heartbeat/client/decoder/BuildKiteFeignClientDecoder.java b/backend/src/main/java/heartbeat/client/decoder/BuildKiteFeignClientDecoder.java
index 8ec1c38811..c7706925ae 100644
--- a/backend/src/main/java/heartbeat/client/decoder/BuildKiteFeignClientDecoder.java
+++ b/backend/src/main/java/heartbeat/client/decoder/BuildKiteFeignClientDecoder.java
@@ -1,6 +1,5 @@
package heartbeat.client.decoder;
-import feign.FeignException;
import feign.Response;
import feign.codec.ErrorDecoder;
import heartbeat.util.ExceptionUtil;
@@ -12,12 +11,17 @@ public class BuildKiteFeignClientDecoder implements ErrorDecoder {
@Override
public Exception decode(String methodKey, Response response) {
+ String errorMessage = switch (methodKey) {
+ case "getTokenInfo" -> "Failed to get token info";
+ case "getBuildKiteOrganizationsInfo" -> "Failed to get BuildKite OrganizationsInfo info";
+ case "getPipelineInfo" -> "Failed to get pipeline info";
+ case "getPipelineSteps" -> "Failed to get pipeline steps";
+ case "getPipelineStepsInfo" -> "Failed to get pipeline steps info";
+ default -> "Failed to get buildkite info";
+ };
+
log.error("Failed to get BuildKite info_response status: {}, method key: {}", response.status(), methodKey);
HttpStatus statusCode = HttpStatus.valueOf(response.status());
- FeignException exception = FeignException.errorStatus(methodKey, response);
- String errorMessage = String.format("Failed to get BuildKite info_status: %s, reason: %s", statusCode,
- exception.getMessage());
-
return ExceptionUtil.handleCommonFeignClientException(statusCode, errorMessage);
}
diff --git a/backend/src/main/java/heartbeat/client/decoder/GitHubFeignClientDecoder.java b/backend/src/main/java/heartbeat/client/decoder/GitHubFeignClientDecoder.java
index cd0dbb235d..a71b1c8b57 100644
--- a/backend/src/main/java/heartbeat/client/decoder/GitHubFeignClientDecoder.java
+++ b/backend/src/main/java/heartbeat/client/decoder/GitHubFeignClientDecoder.java
@@ -1,6 +1,5 @@
package heartbeat.client.decoder;
-import feign.FeignException;
import feign.Response;
import feign.codec.ErrorDecoder;
import heartbeat.util.ExceptionUtil;
@@ -12,13 +11,18 @@ public class GitHubFeignClientDecoder implements ErrorDecoder {
@Override
public Exception decode(String methodKey, Response response) {
+ String errorMessage = switch (methodKey) {
+ case "verifyToken" -> "Failed to verify token";
+ case "verifyCanReadTargetBranch" -> "Failed to verify canRead target branch";
+ case "getCommitInfo" -> "Failed to get commit info";
+ case "getPullRequestCommitInfo" -> "Failed to get pull request commit info";
+ case "getPullRequestListInfo" -> "Failed to get pull request list info";
+ default -> "Failed to get github info";
+ };
+
log.error("Failed to get GitHub info_response status: {}, method key: {}", response.status(), methodKey);
HttpStatus statusCode = HttpStatus.valueOf(response.status());
- FeignException exception = FeignException.errorStatus(methodKey, response);
- String errorMessage = String.format("Failed to get GitHub info_status: %s, reason: %s", statusCode,
- exception.getMessage());
return ExceptionUtil.handleCommonFeignClientException(statusCode, errorMessage);
-
}
}
diff --git a/backend/src/main/java/heartbeat/client/decoder/JiraFeignClientDecoder.java b/backend/src/main/java/heartbeat/client/decoder/JiraFeignClientDecoder.java
index a858ad4b97..8af55eed41 100644
--- a/backend/src/main/java/heartbeat/client/decoder/JiraFeignClientDecoder.java
+++ b/backend/src/main/java/heartbeat/client/decoder/JiraFeignClientDecoder.java
@@ -1,6 +1,5 @@
package heartbeat.client.decoder;
-import feign.FeignException;
import feign.Response;
import feign.codec.ErrorDecoder;
import heartbeat.util.ExceptionUtil;
@@ -12,11 +11,19 @@ public class JiraFeignClientDecoder implements ErrorDecoder {
@Override
public Exception decode(String methodKey, Response response) {
+ String errorMessage = switch (methodKey) {
+ case "getJiraBoardConfiguration" -> "Failed to get jira board configuration";
+ case "getColumnStatusCategory" -> "Failed to get column status category";
+ case "getJiraCards" -> "Failed to get jira cards";
+ case "getJiraCardHistoryByCount" -> "Failed to get jira card history by count";
+ case "getTargetField" -> "Failed to get target field";
+ case "getBoard" -> "Failed to get board";
+ case "getProject" -> "Failed to get project";
+ default -> "Failed to get jira info";
+ };
+
log.error("Failed to get Jira info_response status: {}, method key: {}", response.status(), methodKey);
HttpStatus statusCode = HttpStatus.valueOf(response.status());
- FeignException exception = FeignException.errorStatus(methodKey, response);
- String errorMessage = String.format("Failed to get Jira info_status: %s, reason: %s", statusCode,
- exception.getMessage());
return ExceptionUtil.handleCommonFeignClientException(statusCode, errorMessage);
}
diff --git a/backend/src/main/java/heartbeat/client/dto/board/jira/HistoryDetail.java b/backend/src/main/java/heartbeat/client/dto/board/jira/HistoryDetail.java
index 2ce8c78b28..bed9f960b1 100644
--- a/backend/src/main/java/heartbeat/client/dto/board/jira/HistoryDetail.java
+++ b/backend/src/main/java/heartbeat/client/dto/board/jira/HistoryDetail.java
@@ -22,6 +22,8 @@ public class HistoryDetail implements Serializable {
private Actor actor;
+ private String fieldDisplayName;
+
@Getter
@Setter
@Builder
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/AuthorOuter.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/AuthorOuter.java
deleted file mode 100644
index 566a81519c..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/AuthorOuter.java
+++ /dev/null
@@ -1,66 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class AuthorOuter implements Serializable {
-
- private String login;
-
- private String id;
-
- @JsonProperty("node_id")
- private String nodeId;
-
- @JsonProperty("avatar_url")
- private String avatarUrl;
-
- @JsonProperty("gravatar_id")
- private String gravatarId;
-
- private String url;
-
- @JsonProperty("html_url")
- private String htmlUrl;
-
- @JsonProperty("followers_url")
- private String followersUrl;
-
- @JsonProperty("following_url")
- private String followingUrl;
-
- @JsonProperty("gists_url")
- private String gistsUrl;
-
- @JsonProperty("starred_url")
- private String starredUrl;
-
- @JsonProperty("subscriptions_url")
- private String subscriptionsUrl;
-
- @JsonProperty("organizations_url")
- private String organizationsUrl;
-
- @JsonProperty("repos_url")
- private String reposUrl;
-
- @JsonProperty("events_url")
- private String eventsUrl;
-
- @JsonProperty("received_events_url")
- private String receivedEventsUrl;
-
- private String type;
-
- private Boolean siteAdmin;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/Base.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/Base.java
deleted file mode 100644
index e2bc62608b..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/Base.java
+++ /dev/null
@@ -1,26 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class Base implements Serializable {
-
- private String label;
-
- private String ref;
-
- private String sha;
-
- private User user;
-
- private Repo repo;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/Comment.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/Comment.java
deleted file mode 100644
index 84d4dfbfcb..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/Comment.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class Comment implements Serializable {
-
- private String href;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/Commits.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/Commits.java
deleted file mode 100644
index 6a6087d8fc..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/Commits.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class Commits implements Serializable {
-
- private String href;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/CommitterOuter.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/CommitterOuter.java
deleted file mode 100644
index 23eb7d5cef..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/CommitterOuter.java
+++ /dev/null
@@ -1,67 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class CommitterOuter implements Serializable {
-
- private String login;
-
- private String id;
-
- @JsonProperty("node_id")
- private String nodeId;
-
- @JsonProperty("avatar_url")
- private String avatarUrl;
-
- @JsonProperty("gravatar_id")
- private String gravatarId;
-
- private String url;
-
- @JsonProperty("html_url")
- private String htmlUrl;
-
- @JsonProperty("followers_url")
- private String followersUrl;
-
- @JsonProperty("following_url")
- private String followingUrl;
-
- @JsonProperty("gists_url")
- private String gistsUrl;
-
- @JsonProperty("starred_url")
- private String starredUrl;
-
- @JsonProperty("subscriptions_url")
- private String subscriptionsUrl;
-
- @JsonProperty("organizations_url")
- private String organizationsUrl;
-
- @JsonProperty("repos_url")
- private String reposUrl;
-
- @JsonProperty("events_url")
- private String eventsUrl;
-
- @JsonProperty("received_events_url")
- private String receivedEventsUrl;
-
- private String type;
-
- @JsonProperty("site_admin")
- private Boolean siteAdmin;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/File.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/File.java
deleted file mode 100644
index 5279069781..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/File.java
+++ /dev/null
@@ -1,40 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class File implements Serializable {
-
- private String sha;
-
- private String filename;
-
- private String status;
-
- private Integer additions;
-
- private Integer deletions;
-
- private Integer changes;
-
- @JsonProperty("blob_url")
- private String blobUrl;
-
- @JsonProperty("raw_url")
- private String rawUrl;
-
- @JsonProperty("contents_url")
- private String contentsUrl;
-
- private String patch;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/GitHubPull.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/GitHubPull.java
deleted file mode 100644
index 5ff0372e2b..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/GitHubPull.java
+++ /dev/null
@@ -1,20 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class GitHubPull {
-
- private String createdAt;
-
- private String mergedAt;
-
- private Integer number;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/Head.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/Head.java
deleted file mode 100644
index dab44a8da7..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/Head.java
+++ /dev/null
@@ -1,26 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class Head implements Serializable {
-
- private String label;
-
- private String ref;
-
- private String sha;
-
- private User user;
-
- private Repo repo;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/Html.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/Html.java
deleted file mode 100644
index c9f7b0d42c..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/Html.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class Html implements Serializable {
-
- private String href;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/Issue.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/Issue.java
deleted file mode 100644
index b935061bb3..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/Issue.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class Issue implements Serializable {
-
- private String href;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/License.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/License.java
deleted file mode 100644
index 46ce69b932..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/License.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class License implements Serializable {
-
- private String key;
-
- private String name;
-
- @JsonProperty("spdx_id")
- private String spdxId;
-
- private String url;
-
- @JsonProperty("node_id")
- private String nodeId;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/LinkCollection.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/LinkCollection.java
deleted file mode 100644
index 5caf031822..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/LinkCollection.java
+++ /dev/null
@@ -1,35 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class LinkCollection implements Serializable {
-
- private Self self;
-
- private Html html;
-
- private Issue issue;
-
- private Comment comment;
-
- @JsonProperty("review_comments")
- private ReviewComments reviewComments;
-
- @JsonProperty("review_comment")
- private ReviewComment reviewComment;
-
- private Commits commits;
-
- private Status status;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/Owner.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/Owner.java
deleted file mode 100644
index 24fd8a1a92..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/Owner.java
+++ /dev/null
@@ -1,67 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class Owner implements Serializable {
-
- private String login;
-
- private Integer id;
-
- @JsonProperty("node_id")
- private String nodeId;
-
- @JsonProperty("avatar_url")
- private String avatarUrl;
-
- @JsonProperty("gravatar_id")
- private String gravatarId;
-
- private String url;
-
- @JsonProperty("html_url")
- private String htmlUrl;
-
- @JsonProperty("followers_url")
- private String followersUrl;
-
- @JsonProperty("following_url")
- private String followingUrl;
-
- @JsonProperty("gists_url")
- private String gistsUrl;
-
- @JsonProperty("starred_url")
- private String starredUrl;
-
- @JsonProperty("subscriptions_url")
- private String subscriptionsUrl;
-
- @JsonProperty("organizations_url")
- private String organizationsUrl;
-
- @JsonProperty("repos_url")
- private String reposUrl;
-
- @JsonProperty("events_url")
- private String eventsUrl;
-
- @JsonProperty("received_events_url")
- private String receivedEventsUrl;
-
- private String type;
-
- @JsonProperty("site_admin")
- private Boolean siteAdmin;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/Parent.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/Parent.java
deleted file mode 100644
index 291a39b40f..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/Parent.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class Parent implements Serializable {
-
- private String sha;
-
- private String url;
-
- @JsonProperty("html_url")
- private String htmlUrl;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/PullRequestInfo.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/PullRequestInfo.java
index 2b2f06ee99..5572cf6cd9 100644
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/PullRequestInfo.java
+++ b/backend/src/main/java/heartbeat/client/dto/codebase/github/PullRequestInfo.java
@@ -3,13 +3,13 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
-import lombok.Data;
import lombok.Getter;
import lombok.NoArgsConstructor;
+import lombok.Setter;
import java.io.Serializable;
-@Data
+@Setter
@Builder
@NoArgsConstructor
@AllArgsConstructor
@@ -18,6 +18,8 @@ public class PullRequestInfo implements Serializable {
private Integer number;
+ private String url;
+
@JsonProperty("created_at")
private String createdAt;
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/Repo.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/Repo.java
deleted file mode 100644
index ee55d42092..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/Repo.java
+++ /dev/null
@@ -1,240 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-import java.util.List;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class Repo implements Serializable {
-
- private Integer id;
-
- @JsonProperty("node_id")
- private String nodeId;
-
- private String name;
-
- @JsonProperty("full_name")
- private String fullName;
-
- @JsonProperty("private")
- private Boolean _private;
-
- private Owner owner;
-
- @JsonProperty("html_url")
- private String htmlUrl;
-
- private String description;
-
- private Boolean fork;
-
- private String url;
-
- @JsonProperty("forks_url")
- private String forksUrl;
-
- @JsonProperty("keys_url")
- private String keysUrl;
-
- @JsonProperty("collaborators_url")
- private String collaboratorsUrl;
-
- @JsonProperty("teams_url")
- private String teamsUrl;
-
- @JsonProperty("hooks_url")
- private String hooksUrl;
-
- @JsonProperty("issue_events_url")
- private String issueEventsUrl;
-
- @JsonProperty("events_url")
- private String eventsUrl;
-
- @JsonProperty("assignees_url")
- private String assigneesUrl;
-
- @JsonProperty("branches_url")
- private String branchesUrl;
-
- @JsonProperty("tags_url")
- private String tagsUrl;
-
- @JsonProperty("blobs_url")
- private String blobsUrl;
-
- @JsonProperty("git_tags_url")
- private String gitTagsUrl;
-
- @JsonProperty("git_refs_url")
- private String gitRefsUrl;
-
- @JsonProperty("trees_url")
- private String treesUrl;
-
- @JsonProperty("statuses_url")
- private String statusesUrl;
-
- @JsonProperty("languages_url")
- private String languagesUrl;
-
- @JsonProperty("stargazers_url")
- private String stargazersUrl;
-
- @JsonProperty("contributors_url")
- private String contributorsUrl;
-
- @JsonProperty("subscribers_url")
- private String subscribersUrl;
-
- @JsonProperty("subscription_url")
- private String subscriptionUrl;
-
- @JsonProperty("commits_url")
- private String commitsUrl;
-
- @JsonProperty("git_commits_url")
- private String gitCommitsUrl;
-
- @JsonProperty("comments_url")
- private String commentsUrl;
-
- @JsonProperty("issue_comment_url")
- private String issueCommentUrl;
-
- @JsonProperty("contents_url")
- private String contentsUrl;
-
- @JsonProperty("compare_url")
- private String compareUrl;
-
- @JsonProperty("merges_url")
- private String mergesUrl;
-
- @JsonProperty("archive_url")
- private String archiveUrl;
-
- @JsonProperty("downloads_url")
- private String downloadsUrl;
-
- @JsonProperty("issues_url")
- private String issuesUrl;
-
- @JsonProperty("pulls_url")
- private String pullsUrl;
-
- @JsonProperty("milestones_url")
- private String milestonesUrl;
-
- @JsonProperty("notifications_url")
- private String notificationsUrl;
-
- @JsonProperty("labels_url")
- private String labelsUrl;
-
- @JsonProperty("releases_url")
- private String releasesUrl;
-
- @JsonProperty("deployments_url")
- private String deploymentsUrl;
-
- @JsonProperty("created_at")
- private String createdAt;
-
- @JsonProperty("updated_at")
- private String updatedAt;
-
- @JsonProperty("pushed_at")
- private String pushedAt;
-
- @JsonProperty("git_url")
- private String gitUrl;
-
- @JsonProperty("ssh_url")
- private String sshUrl;
-
- @JsonProperty("clone_url")
- private String cloneUrl;
-
- @JsonProperty("svn_url")
- private String svnUrl;
-
- private String homepage;
-
- private Integer size;
-
- @JsonProperty("stargazers_count")
- private Integer stargazersCount;
-
- @JsonProperty("watchers_count")
- private Integer watchersCount;
-
- private String language;
-
- @JsonProperty("has_issues")
- private Boolean hasIssues;
-
- @JsonProperty("has_projects")
- private Boolean hasProjects;
-
- @JsonProperty("has_downloads")
- private Boolean hasDownloads;
-
- @JsonProperty("has_wiki")
- private Boolean hasWiki;
-
- @JsonProperty("has_pages")
- private Boolean hasPages;
-
- @JsonProperty("has_discussions")
- private Boolean hasDiscussions;
-
- @JsonProperty("forks_count")
- private Integer forksCount;
-
- @JsonProperty("mirror_url")
- private Object mirrorUrl;
-
- @JsonProperty("archived")
- private Boolean archived;
-
- private Boolean disabled;
-
- @JsonProperty("open_issues_count")
- private Integer openIssuesCount;
-
- private License license;
-
- @JsonProperty("allow_forking")
- private Boolean allowForking;
-
- @JsonProperty("is_template")
- private Boolean isTemplate;
-
- @JsonProperty("web_commit_signoff_required")
- private Boolean webCommitSignoffRequired;
-
- private List topics;
-
- private String visibility;
-
- private Integer forks;
-
- @JsonProperty("open_issues")
- private Integer openIssues;
-
- private Integer watchers;
-
- @JsonProperty("default_branch")
- private String defaultBranch;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/ReviewComment.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/ReviewComment.java
deleted file mode 100644
index 52d50eff3e..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/ReviewComment.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class ReviewComment implements Serializable {
-
- private String href;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/ReviewComments.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/ReviewComments.java
deleted file mode 100644
index b040cf3f29..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/ReviewComments.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class ReviewComments implements Serializable {
-
- private String href;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/Self.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/Self.java
deleted file mode 100644
index 20aa3b6afb..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/Self.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class Self implements Serializable {
-
- private String href;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/Stats.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/Stats.java
deleted file mode 100644
index ce1dd29918..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/Stats.java
+++ /dev/null
@@ -1,22 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class Stats implements Serializable {
-
- private Integer total;
-
- private Integer additions;
-
- private Integer deletions;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/Status.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/Status.java
deleted file mode 100644
index 59d115d7f9..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/Status.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class Status implements Serializable {
-
- private String href;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/Tree.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/Tree.java
deleted file mode 100644
index 2bb563eed6..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/Tree.java
+++ /dev/null
@@ -1,20 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class Tree implements Serializable {
-
- private String sha;
-
- private String url;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/User.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/User.java
deleted file mode 100644
index 4d82094d20..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/User.java
+++ /dev/null
@@ -1,67 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class User implements Serializable {
-
- private String login;
-
- private Integer id;
-
- @JsonProperty("node_id")
- private String nodeId;
-
- @JsonProperty("avatar_url")
- private String avatarUrl;
-
- @JsonProperty("gravatar_id")
- private String gravatarId;
-
- private String url;
-
- @JsonProperty("html_url")
- private String htmlUrl;
-
- @JsonProperty("followers_url")
- private String followersUrl;
-
- @JsonProperty("following_url")
- private String followingUrl;
-
- @JsonProperty("gists_url")
- private String gistsUrl;
-
- @JsonProperty("starred_url")
- private String starredUrl;
-
- @JsonProperty("subscriptions_url")
- private String subscriptionsUrl;
-
- @JsonProperty("organizations_url")
- private String organizationsUrl;
-
- @JsonProperty("repos_url")
- private String reposUrl;
-
- @JsonProperty("events_url")
- private String eventsUrl;
-
- @JsonProperty("received_events_url")
- private String receivedEventsUrl;
-
- private String type;
-
- @JsonProperty("site_admin")
- private Boolean siteAdmin;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/codebase/github/Verification.java b/backend/src/main/java/heartbeat/client/dto/codebase/github/Verification.java
deleted file mode 100644
index 5991419b87..0000000000
--- a/backend/src/main/java/heartbeat/client/dto/codebase/github/Verification.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package heartbeat.client.dto.codebase.github;
-
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.io.Serializable;
-
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class Verification implements Serializable {
-
- private Boolean verified;
-
- private String reason;
-
- private String signature;
-
- private String payload;
-
-}
diff --git a/backend/src/main/java/heartbeat/client/dto/pipeline/buildkite/BuildKitePipelineDTO.java b/backend/src/main/java/heartbeat/client/dto/pipeline/buildkite/BuildKitePipelineDTO.java
index 4c6e0ac6bf..0d9e765205 100644
--- a/backend/src/main/java/heartbeat/client/dto/pipeline/buildkite/BuildKitePipelineDTO.java
+++ b/backend/src/main/java/heartbeat/client/dto/pipeline/buildkite/BuildKitePipelineDTO.java
@@ -1,14 +1,12 @@
package heartbeat.client.dto.pipeline.buildkite;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
-import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serializable;
-import java.util.Date;
import java.util.List;
@Data
@@ -18,85 +16,12 @@
@JsonIgnoreProperties(ignoreUnknown = true)
public class BuildKitePipelineDTO implements Serializable {
- private String id;
-
- @JsonProperty("graphql_id")
- private String graphqlId;
-
- private String url;
-
- @JsonProperty("web_url")
- private String webUrl;
-
private String name;
- private String description;
-
private String slug;
private String repository;
- @JsonProperty("cluster_id")
- private String clusterId;
-
- @JsonProperty("branch_configuration")
- private String branchConfiguration;
-
- @JsonProperty("default_branch")
- private String defaultBranch;
-
- @JsonProperty("skip_queued_branch_builds")
- private String skipQueuedBranchBuilds;
-
- @JsonProperty("skip_queued_branch_builds_filter")
- private String skipQueuedBranchBuildsFilter;
-
- @JsonProperty("cancel_running_branch_builds")
- private String cancelRunningBranchBuilds;
-
- @JsonProperty("cancel_running_branch_builds_filter")
- private String cancelRunningBranchBuildsFilter;
-
- @JsonProperty("allow_rebuilds")
- private String allowRebuilds;
-
- private ProviderDTO provider;
-
- @JsonProperty("builds_url")
- private String buildsUrl;
-
- @JsonProperty("badge_url")
- private String badgeUrl;
-
- private CreatedByDTO createdBy;
-
- @JsonProperty("created_at")
- private Date createdAt;
-
- @JsonProperty("archived_at")
- private Date archivedAt;
-
- private EnvDTO env;
-
- @JsonProperty("scheduled_builds_count")
- private int scheduledBuildsCount;
-
- @JsonProperty("running_builds_count")
- private int runningBuildsCount;
-
- @JsonProperty("scheduled_jobs_count")
- private int scheduledJobsCount;
-
- @JsonProperty("running_jobs_count")
- private int runningJobsCount;
-
- @JsonProperty("waiting_jobs_count")
- private int waitingJobsCount;
-
- private String visibility;
-
- private List tags;
-
private List steps;
}
diff --git a/backend/src/main/java/heartbeat/client/dto/pipeline/buildkite/PageBuildKitePipelineInfoDTO.java b/backend/src/main/java/heartbeat/client/dto/pipeline/buildkite/PageBuildKitePipelineInfoDTO.java
new file mode 100644
index 0000000000..1f0071b361
--- /dev/null
+++ b/backend/src/main/java/heartbeat/client/dto/pipeline/buildkite/PageBuildKitePipelineInfoDTO.java
@@ -0,0 +1,23 @@
+package heartbeat.client.dto.pipeline.buildkite;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import lombok.AllArgsConstructor;
+import lombok.Builder;
+import lombok.Data;
+import lombok.NoArgsConstructor;
+
+import java.io.Serializable;
+import java.util.List;
+
+@Data
+@JsonIgnoreProperties(ignoreUnknown = true)
+@AllArgsConstructor
+@NoArgsConstructor
+@Builder
+public class PageBuildKitePipelineInfoDTO implements Serializable {
+
+ private int totalPage;
+
+ private List firstPageInfo;
+
+}
diff --git a/backend/src/main/java/heartbeat/config/CacheConfig.java b/backend/src/main/java/heartbeat/config/CacheConfig.java
index 5f0f03c7c4..3ef122d294 100644
--- a/backend/src/main/java/heartbeat/config/CacheConfig.java
+++ b/backend/src/main/java/heartbeat/config/CacheConfig.java
@@ -2,19 +2,14 @@
import heartbeat.client.dto.board.jira.CardHistoryResponseDTO;
import heartbeat.client.dto.board.jira.FieldResponseDTO;
+import heartbeat.client.dto.board.jira.HolidaysResponseDTO;
import heartbeat.client.dto.board.jira.JiraBoardConfigDTO;
import heartbeat.client.dto.board.jira.JiraBoardProject;
import heartbeat.client.dto.board.jira.JiraBoardVerifyDTO;
import heartbeat.client.dto.board.jira.StatusSelfDTO;
-import java.time.Duration;
-import java.util.List;
-import javax.cache.CacheManager;
-import javax.cache.Caching;
-import javax.cache.spi.CachingProvider;
-
-import heartbeat.client.dto.board.jira.HolidaysResponseDTO;
import heartbeat.client.dto.codebase.github.CommitInfo;
import heartbeat.client.dto.pipeline.buildkite.BuildKiteTokenInfo;
+import heartbeat.client.dto.pipeline.buildkite.PageBuildKitePipelineInfoDTO;
import heartbeat.client.dto.pipeline.buildkite.PageStepsInfoDto;
import org.ehcache.config.builders.CacheConfigurationBuilder;
import org.ehcache.config.builders.ExpiryPolicyBuilder;
@@ -25,6 +20,12 @@
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
+import javax.cache.CacheManager;
+import javax.cache.Caching;
+import javax.cache.spi.CachingProvider;
+import java.time.Duration;
+import java.util.List;
+
@Configuration
@EnableCaching
public class CacheConfig {
@@ -45,7 +46,7 @@ public CacheManager ehCacheManager() {
cacheManager.createCache("holidayResult", getCacheConfiguration(HolidaysResponseDTO.class));
cacheManager.createCache("tokenInfo", getCacheConfiguration(BuildKiteTokenInfo.class));
cacheManager.createCache("buildKiteOrganizationInfo", getCacheConfiguration(List.class));
- cacheManager.createCache("pipelineInfo", getCacheConfiguration(List.class));
+ cacheManager.createCache("pagePipelineInfo", getCacheConfiguration(PageBuildKitePipelineInfoDTO.class));
cacheManager.createCache("pageStepsInfo", getCacheConfiguration(PageStepsInfoDto.class));
cacheManager.createCache("pipelineStepsInfo", getCacheConfiguration(List.class));
cacheManager.createCache("githubOrganizationInfo", getCacheConfiguration(List.class));
diff --git a/backend/src/main/java/heartbeat/config/SwaggerConfig.java b/backend/src/main/java/heartbeat/config/SwaggerConfig.java
index 2c6fe9c26e..b3a7e323b6 100644
--- a/backend/src/main/java/heartbeat/config/SwaggerConfig.java
+++ b/backend/src/main/java/heartbeat/config/SwaggerConfig.java
@@ -4,6 +4,8 @@
import io.swagger.v3.oas.models.OpenAPI;
import io.swagger.v3.oas.models.info.Info;
import io.swagger.v3.oas.models.servers.Server;
+import org.springframework.beans.factory.annotation.Value;
+
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@@ -12,11 +14,14 @@
@Configuration
public class SwaggerConfig {
+ @Value("${heartbeat.swagger.host}")
+ private String swaggerHost;
+
@Bean
public OpenAPI customOpenAPI() {
return new OpenAPI().components(new Components())
.info(new Info().title("Backend API").version("1.0"))
- .servers(List.of(new Server().url("http://13.214.14.43:4321/api/v1")));
+ .servers(List.of(new Server().url(String.format("%s/api/v1", this.swaggerHost))));
}
}
diff --git a/backend/src/main/java/heartbeat/controller/board/dto/request/CardStepsEnum.java b/backend/src/main/java/heartbeat/controller/board/dto/request/CardStepsEnum.java
index e41b4d5383..7ee6bc3394 100644
--- a/backend/src/main/java/heartbeat/controller/board/dto/request/CardStepsEnum.java
+++ b/backend/src/main/java/heartbeat/controller/board/dto/request/CardStepsEnum.java
@@ -1,21 +1,32 @@
package heartbeat.controller.board.dto.request;
+import java.util.Map;
+import java.util.Set;
+
public enum CardStepsEnum {
- TODO("To do"), ANALYSE("Analysis"), DEVELOPMENT("In Dev"), BLOCK("Block"), TESTING("Testing"), REVIEW("Review"),
- DONE("Done"), CLOSED("Closed"), WAITING("Waiting for testing"), FLAG("FLAG"), REMOVEFLAG("removeFlag"),
- UNKNOWN("UNKNOWN");
+ TODO("To do", "To do"), ANALYSE("Analysis", "Analysis"), DEVELOPMENT("In Dev", "In dev"), BLOCK("Block", "Block"),
+ FLAG("FLAG", "Flag"), REMOVEFLAG("removeFlag", "Remove flag"), REVIEW("Review", "Review"),
+ WAITING("Waiting for testing", "Waiting for testing"), TESTING("Testing", "Testing"), DONE("Done", "Done"),
+ CLOSED("Closed", "Closed"), UNKNOWN("UNKNOWN", "Unknown");
private final String value;
- CardStepsEnum(String value) {
+ private final String alias;
+
+ CardStepsEnum(String value, String alias) {
this.value = value;
+ this.alias = alias;
}
public String getValue() {
return value;
}
+ public String getAlias() {
+ return alias;
+ }
+
public static CardStepsEnum fromValue(String type) {
for (CardStepsEnum cardStepsEnum : values()) {
if (cardStepsEnum.value.equals(type)) {
@@ -25,4 +36,10 @@ public static CardStepsEnum fromValue(String type) {
throw new IllegalArgumentException("Type does not find!");
}
+ public static final Map> reworkJudgmentMap = Map.of(TODO,
+ Set.of(ANALYSE, DEVELOPMENT, BLOCK, FLAG, REVIEW, WAITING, TESTING, DONE), ANALYSE,
+ Set.of(DEVELOPMENT, BLOCK, FLAG, REVIEW, WAITING, TESTING, DONE), DEVELOPMENT,
+ Set.of(BLOCK, FLAG, REVIEW, WAITING, TESTING, DONE), BLOCK, Set.of(REVIEW, WAITING, TESTING, DONE), REVIEW,
+ Set.of(WAITING, TESTING, DONE), WAITING, Set.of(TESTING, DONE), TESTING, Set.of(DONE));
+
}
diff --git a/backend/src/main/java/heartbeat/controller/board/dto/request/ReworkTimesSetting.java b/backend/src/main/java/heartbeat/controller/board/dto/request/ReworkTimesSetting.java
new file mode 100644
index 0000000000..599fa54907
--- /dev/null
+++ b/backend/src/main/java/heartbeat/controller/board/dto/request/ReworkTimesSetting.java
@@ -0,0 +1,26 @@
+package heartbeat.controller.board.dto.request;
+
+import lombok.Builder;
+import lombok.Getter;
+import lombok.Setter;
+
+import java.util.List;
+
+@Getter
+@Setter
+@Builder
+public class ReworkTimesSetting {
+
+ private String reworkState;
+
+ private List excludedStates;
+
+ public CardStepsEnum getEnumReworkState() {
+ return CardStepsEnum.fromValue(reworkState);
+ }
+
+ public List getEnumExcludeStates() {
+ return excludedStates.stream().map(CardStepsEnum::fromValue).toList();
+ }
+
+}
diff --git a/backend/src/main/java/heartbeat/controller/board/dto/request/StoryPointsAndCycleTimeRequest.java b/backend/src/main/java/heartbeat/controller/board/dto/request/StoryPointsAndCycleTimeRequest.java
index 62d208f250..51226a9d14 100644
--- a/backend/src/main/java/heartbeat/controller/board/dto/request/StoryPointsAndCycleTimeRequest.java
+++ b/backend/src/main/java/heartbeat/controller/board/dto/request/StoryPointsAndCycleTimeRequest.java
@@ -36,4 +36,6 @@ public class StoryPointsAndCycleTimeRequest {
private boolean treatFlagCardAsBlock;
+ private ReworkTimesSetting reworkTimesSetting;
+
}
diff --git a/backend/src/main/java/heartbeat/controller/board/dto/response/CardCollection.java b/backend/src/main/java/heartbeat/controller/board/dto/response/CardCollection.java
index 29902daaf4..95d0d467b5 100644
--- a/backend/src/main/java/heartbeat/controller/board/dto/response/CardCollection.java
+++ b/backend/src/main/java/heartbeat/controller/board/dto/response/CardCollection.java
@@ -19,4 +19,8 @@ public class CardCollection {
private List jiraCardDTOList;
+ private int reworkCardNumber;
+
+ private double reworkRatio;
+
}
diff --git a/backend/src/main/java/heartbeat/controller/board/dto/response/JiraCardDTO.java b/backend/src/main/java/heartbeat/controller/board/dto/response/JiraCardDTO.java
index 4c39dc2798..0e07792d13 100644
--- a/backend/src/main/java/heartbeat/controller/board/dto/response/JiraCardDTO.java
+++ b/backend/src/main/java/heartbeat/controller/board/dto/response/JiraCardDTO.java
@@ -8,9 +8,12 @@
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
+import org.apache.commons.collections4.CollectionUtils;
import java.util.HashMap;
import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
@Data
@Builder
@@ -26,8 +29,14 @@ public class JiraCardDTO {
private CardCycleTime cardCycleTime;
+ private List reworkTimesInfos;
+
+ private Integer totalReworkTimes;
+
private Object cycleTimeFlat;
+ private Object reworkTimesFlat;
+
@Nullable
private String totalCycleTimeDivideStoryPoints;
@@ -56,4 +65,22 @@ public Object buildCycleTimeFlatObject() {
return cycleTimeFlat;
}
+ @JsonIgnore
+ public Object buildReworkTimesFlatObject() {
+ if (CollectionUtils.isEmpty(this.getReworkTimesInfos())) {
+ return null;
+ }
+ Map reworkTimesMap = this.getReworkTimesInfos()
+ .stream()
+ .collect(Collectors.toMap(reworkTimesInfo -> reworkTimesInfo.getState().getAlias(),
+ ReworkTimesInfo::getTimes));
+ reworkTimesMap.put("totalReworkTimes", totalReworkTimes);
+ return reworkTimesMap;
+ }
+
+ @JsonIgnore
+ public void calculateTotalReworkTimes() {
+ this.totalReworkTimes = reworkTimesInfos.stream().mapToInt(ReworkTimesInfo::getTimes).sum();
+ }
+
}
diff --git a/backend/src/main/java/heartbeat/controller/board/dto/response/ReworkTimesInfo.java b/backend/src/main/java/heartbeat/controller/board/dto/response/ReworkTimesInfo.java
new file mode 100644
index 0000000000..3318620be4
--- /dev/null
+++ b/backend/src/main/java/heartbeat/controller/board/dto/response/ReworkTimesInfo.java
@@ -0,0 +1,19 @@
+package heartbeat.controller.board.dto.response;
+
+import heartbeat.controller.board.dto.request.CardStepsEnum;
+import lombok.AllArgsConstructor;
+import lombok.Builder;
+import lombok.Getter;
+import lombok.Setter;
+
+@Getter
+@Setter
+@AllArgsConstructor
+@Builder
+public class ReworkTimesInfo {
+
+ private CardStepsEnum state;
+
+ private Integer times;
+
+}
diff --git a/backend/src/main/java/heartbeat/controller/report/ReportController.java b/backend/src/main/java/heartbeat/controller/report/ReportController.java
index 7f28eaa25b..cfb7f506cc 100644
--- a/backend/src/main/java/heartbeat/controller/report/ReportController.java
+++ b/backend/src/main/java/heartbeat/controller/report/ReportController.java
@@ -2,7 +2,6 @@
import heartbeat.controller.report.dto.request.GenerateReportRequest;
import heartbeat.controller.report.dto.request.ReportType;
-import heartbeat.controller.report.dto.request.MetricType;
import heartbeat.controller.report.dto.response.CallbackResponse;
import heartbeat.controller.report.dto.response.ReportResponse;
import heartbeat.service.report.GenerateReporterService;
@@ -54,23 +53,15 @@ public InputStreamResource exportCSV(
public ResponseEntity generateReport(@PathVariable String reportId) {
log.info("Start to generate report_reportId: {}", reportId);
ReportResponse reportResponse = generateReporterService.getComposedReportResponse(reportId);
- if (reportResponse.isAllMetricsCompleted()) {
- log.info("Successfully generate Report_reportId: {}, reports: {}", reportId, reportResponse);
- generateReporterService.generateCSVForMetric(reportResponse, reportId);
- return ResponseEntity.status(HttpStatus.CREATED).body(reportResponse);
- }
return ResponseEntity.status(HttpStatus.OK).body(reportResponse);
}
- @PostMapping("{metricType}")
- public ResponseEntity generateReport(
- @Schema(type = "string", allowableValues = { "board", "dora" },
- accessMode = Schema.AccessMode.READ_ONLY) @PathVariable MetricType metricType,
- @RequestBody GenerateReportRequest request) {
- log.info("Start to generate report_metricType: {}", metricType);
- reportService.generateReportByType(request, metricType);
+ @PostMapping
+ public ResponseEntity generateReport(@RequestBody GenerateReportRequest request) {
+ log.info("Start to generate report");
+ reportService.generateReport(request);
String callbackUrl = "/reports/" + request.getCsvTimeStamp();
- log.info("Successfully generate report_metricsType: {}", metricType);
+ log.info("Successfully generate report");
return ResponseEntity.status(HttpStatus.ACCEPTED)
.body(CallbackResponse.builder().callbackUrl(callbackUrl).interval(interval).build());
}
diff --git a/backend/src/main/java/heartbeat/controller/report/dto/request/GenerateReportRequest.java b/backend/src/main/java/heartbeat/controller/report/dto/request/GenerateReportRequest.java
index 96c273cd5e..5444a78e7e 100644
--- a/backend/src/main/java/heartbeat/controller/report/dto/request/GenerateReportRequest.java
+++ b/backend/src/main/java/heartbeat/controller/report/dto/request/GenerateReportRequest.java
@@ -29,6 +29,8 @@ public class GenerateReportRequest {
private List metrics;
+ private List metricTypes;
+
private JiraBoardSetting jiraBoardSetting;
private BuildKiteSetting buildKiteSetting;
@@ -40,7 +42,10 @@ public class GenerateReportRequest {
@JsonIgnore
public List getPipelineMetrics() {
- return this.metrics.stream().map(String::toLowerCase).filter(MetricsUtil.buildKiteMetrics::contains).toList();
+ return this.metrics.stream()
+ .map(String::toLowerCase)
+ .filter(MetricsUtil.BUILDKITE_METRICS.getValue()::contains)
+ .toList();
}
public List getMetrics() {
@@ -49,12 +54,18 @@ public List getMetrics() {
@JsonIgnore
public List getSourceControlMetrics() {
- return this.metrics.stream().map(String::toLowerCase).filter(MetricsUtil.codebaseMetrics::contains).toList();
+ return this.metrics.stream()
+ .map(String::toLowerCase)
+ .filter(MetricsUtil.CODEBASE_METRICS.getValue()::contains)
+ .toList();
}
@JsonIgnore
public List getBoardMetrics() {
- return this.metrics.stream().map(String::toLowerCase).filter(MetricsUtil.kanbanMetrics::contains).toList();
+ return this.metrics.stream()
+ .map(String::toLowerCase)
+ .filter(MetricsUtil.KANBAN_METRICS.getValue()::contains)
+ .toList();
}
@JsonIgnore
@@ -72,11 +83,6 @@ public String getBoardReportId() {
return IdUtil.getBoardReportId(this.csvTimeStamp);
}
- @JsonIgnore
- public String getDoraReportId() {
- return IdUtil.getDoraReportId(this.csvTimeStamp);
- }
-
@JsonIgnore
public GenerateReportRequest toPipelineRequest() {
return GenerateReportRequest.builder()
diff --git a/backend/src/main/java/heartbeat/controller/report/dto/request/JiraBoardSetting.java b/backend/src/main/java/heartbeat/controller/report/dto/request/JiraBoardSetting.java
index 6b3a1ae0d7..7a05c9061c 100644
--- a/backend/src/main/java/heartbeat/controller/report/dto/request/JiraBoardSetting.java
+++ b/backend/src/main/java/heartbeat/controller/report/dto/request/JiraBoardSetting.java
@@ -1,6 +1,7 @@
package heartbeat.controller.report.dto.request;
import heartbeat.controller.board.dto.request.RequestJiraBoardColumnSetting;
+import heartbeat.controller.board.dto.request.ReworkTimesSetting;
import heartbeat.controller.board.dto.response.TargetField;
import lombok.AllArgsConstructor;
import lombok.Builder;
@@ -39,4 +40,6 @@ public class JiraBoardSetting {
private List overrideFields;
+ private ReworkTimesSetting reworkTimesSetting;
+
}
diff --git a/backend/src/main/java/heartbeat/controller/report/dto/request/MetricEnum.java b/backend/src/main/java/heartbeat/controller/report/dto/request/MetricEnum.java
index 82139aae22..caf477f831 100644
--- a/backend/src/main/java/heartbeat/controller/report/dto/request/MetricEnum.java
+++ b/backend/src/main/java/heartbeat/controller/report/dto/request/MetricEnum.java
@@ -3,8 +3,9 @@
public enum MetricEnum {
VELOCITY("velocity"), CYCLE_TIME("cycle time"), CLASSIFICATION("classification"),
- DEPLOYMENT_FREQUENCY("deployment frequency"), CHANGE_FAILURE_RATE("change failure rate"),
- MEAN_TIME_TO_RECOVERY("mean time to recovery"), LEAD_TIME_FOR_CHANGES("lead time for changes");
+ DEPLOYMENT_FREQUENCY("deployment frequency"), DEV_CHANGE_FAILURE_RATE("dev change failure rate"),
+ DEV_MEAN_TIME_TO_RECOVERY("dev mean time to recovery"), LEAD_TIME_FOR_CHANGES("lead time for changes"),
+ REWORK_TIMES("rework times");
private final String value;
diff --git a/backend/src/main/java/heartbeat/controller/report/dto/response/AvgChangeFailureRate.java b/backend/src/main/java/heartbeat/controller/report/dto/response/AvgDevChangeFailureRate.java
similarity index 90%
rename from backend/src/main/java/heartbeat/controller/report/dto/response/AvgChangeFailureRate.java
rename to backend/src/main/java/heartbeat/controller/report/dto/response/AvgDevChangeFailureRate.java
index ecfc2826c4..fa3c4c6b10 100644
--- a/backend/src/main/java/heartbeat/controller/report/dto/response/AvgChangeFailureRate.java
+++ b/backend/src/main/java/heartbeat/controller/report/dto/response/AvgDevChangeFailureRate.java
@@ -9,7 +9,7 @@
@Builder
@NoArgsConstructor
@AllArgsConstructor
-public class AvgChangeFailureRate {
+public class AvgDevChangeFailureRate {
@Builder.Default
private String name = "Average";
diff --git a/backend/src/main/java/heartbeat/controller/report/dto/response/AvgMeanTimeToRecovery.java b/backend/src/main/java/heartbeat/controller/report/dto/response/AvgDevMeanTimeToRecovery.java
similarity index 75%
rename from backend/src/main/java/heartbeat/controller/report/dto/response/AvgMeanTimeToRecovery.java
rename to backend/src/main/java/heartbeat/controller/report/dto/response/AvgDevMeanTimeToRecovery.java
index e828c92c10..093955b721 100644
--- a/backend/src/main/java/heartbeat/controller/report/dto/response/AvgMeanTimeToRecovery.java
+++ b/backend/src/main/java/heartbeat/controller/report/dto/response/AvgDevMeanTimeToRecovery.java
@@ -10,9 +10,10 @@
@NoArgsConstructor
@AllArgsConstructor
@Builder
-public class AvgMeanTimeToRecovery {
+public class AvgDevMeanTimeToRecovery {
- private final String name = "Average";
+ @Builder.Default
+ private String name = "Average";
private BigDecimal timeToRecovery;
diff --git a/backend/src/main/java/heartbeat/controller/report/dto/response/ChangeFailureRate.java b/backend/src/main/java/heartbeat/controller/report/dto/response/DevChangeFailureRate.java
similarity index 58%
rename from backend/src/main/java/heartbeat/controller/report/dto/response/ChangeFailureRate.java
rename to backend/src/main/java/heartbeat/controller/report/dto/response/DevChangeFailureRate.java
index 0faf01c901..8b9ee253b6 100644
--- a/backend/src/main/java/heartbeat/controller/report/dto/response/ChangeFailureRate.java
+++ b/backend/src/main/java/heartbeat/controller/report/dto/response/DevChangeFailureRate.java
@@ -11,10 +11,10 @@
@Builder
@NoArgsConstructor
@AllArgsConstructor
-public class ChangeFailureRate {
+public class DevChangeFailureRate {
- private AvgChangeFailureRate avgChangeFailureRate;
+ private AvgDevChangeFailureRate avgDevChangeFailureRate;
- private List changeFailureRateOfPipelines;
+ private List devChangeFailureRateOfPipelines;
}
diff --git a/backend/src/main/java/heartbeat/controller/report/dto/response/ChangeFailureRateOfPipeline.java b/backend/src/main/java/heartbeat/controller/report/dto/response/DevChangeFailureRateOfPipeline.java
similarity index 88%
rename from backend/src/main/java/heartbeat/controller/report/dto/response/ChangeFailureRateOfPipeline.java
rename to backend/src/main/java/heartbeat/controller/report/dto/response/DevChangeFailureRateOfPipeline.java
index 04cb80a73e..bd74a96871 100644
--- a/backend/src/main/java/heartbeat/controller/report/dto/response/ChangeFailureRateOfPipeline.java
+++ b/backend/src/main/java/heartbeat/controller/report/dto/response/DevChangeFailureRateOfPipeline.java
@@ -9,7 +9,7 @@
@Builder
@NoArgsConstructor
@AllArgsConstructor
-public class ChangeFailureRateOfPipeline {
+public class DevChangeFailureRateOfPipeline {
private String name;
diff --git a/backend/src/main/java/heartbeat/controller/report/dto/response/MeanTimeToRecovery.java b/backend/src/main/java/heartbeat/controller/report/dto/response/DevMeanTimeToRecovery.java
similarity index 57%
rename from backend/src/main/java/heartbeat/controller/report/dto/response/MeanTimeToRecovery.java
rename to backend/src/main/java/heartbeat/controller/report/dto/response/DevMeanTimeToRecovery.java
index 812236594d..8c0a30270c 100644
--- a/backend/src/main/java/heartbeat/controller/report/dto/response/MeanTimeToRecovery.java
+++ b/backend/src/main/java/heartbeat/controller/report/dto/response/DevMeanTimeToRecovery.java
@@ -10,10 +10,10 @@
@NoArgsConstructor
@AllArgsConstructor
@Builder
-public class MeanTimeToRecovery {
+public class DevMeanTimeToRecovery {
- private AvgMeanTimeToRecovery avgMeanTimeToRecovery;
+ private AvgDevMeanTimeToRecovery avgDevMeanTimeToRecovery;
- private List meanTimeRecoveryPipelines;
+ private List devMeanTimeToRecoveryOfPipelines;
}
diff --git a/backend/src/main/java/heartbeat/controller/report/dto/response/MeanTimeToRecoveryOfPipeline.java b/backend/src/main/java/heartbeat/controller/report/dto/response/DevMeanTimeToRecoveryOfPipeline.java
similarity index 58%
rename from backend/src/main/java/heartbeat/controller/report/dto/response/MeanTimeToRecoveryOfPipeline.java
rename to backend/src/main/java/heartbeat/controller/report/dto/response/DevMeanTimeToRecoveryOfPipeline.java
index b5a1df9d00..2462864884 100644
--- a/backend/src/main/java/heartbeat/controller/report/dto/response/MeanTimeToRecoveryOfPipeline.java
+++ b/backend/src/main/java/heartbeat/controller/report/dto/response/DevMeanTimeToRecoveryOfPipeline.java
@@ -1,23 +1,21 @@
package heartbeat.controller.report.dto.response;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.math.BigDecimal;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
+import java.math.BigDecimal;
+
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
-public class MeanTimeToRecoveryOfPipeline {
+public class DevMeanTimeToRecoveryOfPipeline {
- @JsonProperty("name")
- private String pipelineName;
+ private String name;
- @JsonProperty("step")
- private String pipelineStep;
+ private String step;
private BigDecimal timeToRecovery;
diff --git a/backend/src/main/java/heartbeat/controller/report/dto/response/MetricsDataCompleted.java b/backend/src/main/java/heartbeat/controller/report/dto/response/MetricsDataCompleted.java
index bea9b9ef2c..c3fc9fe488 100644
--- a/backend/src/main/java/heartbeat/controller/report/dto/response/MetricsDataCompleted.java
+++ b/backend/src/main/java/heartbeat/controller/report/dto/response/MetricsDataCompleted.java
@@ -6,6 +6,8 @@
import lombok.NoArgsConstructor;
import lombok.Setter;
+import java.util.Optional;
+
@Builder
@NoArgsConstructor
@AllArgsConstructor
@@ -17,6 +19,10 @@ public class MetricsDataCompleted {
private Boolean doraMetricsCompleted;
+ private Boolean overallMetricCompleted;
+
+ private Boolean isSuccessfulCreateCsvFile;
+
public Boolean boardMetricsCompleted() {
return boardMetricsCompleted;
}
@@ -25,4 +31,17 @@ public Boolean doraMetricsCompleted() {
return doraMetricsCompleted;
}
+ public Boolean overallMetricCompleted() {
+ return overallMetricCompleted;
+ }
+
+ public Boolean isSuccessfulCreateCsvFile() {
+ return isSuccessfulCreateCsvFile;
+ }
+
+ public Boolean allMetricsCompleted() {
+ return Optional.ofNullable(boardMetricsCompleted).orElse(true)
+ && Optional.ofNullable(doraMetricsCompleted).orElse(true) && overallMetricCompleted;
+ }
+
}
diff --git a/backend/src/main/java/heartbeat/controller/report/dto/response/ReportResponse.java b/backend/src/main/java/heartbeat/controller/report/dto/response/ReportResponse.java
index 9088295488..2ea152555c 100644
--- a/backend/src/main/java/heartbeat/controller/report/dto/response/ReportResponse.java
+++ b/backend/src/main/java/heartbeat/controller/report/dto/response/ReportResponse.java
@@ -21,21 +21,27 @@ public class ReportResponse {
private DeploymentFrequency deploymentFrequency;
- private ChangeFailureRate changeFailureRate;
+ private DevChangeFailureRate devChangeFailureRate;
- private MeanTimeToRecovery meanTimeToRecovery;
+ private DevMeanTimeToRecovery devMeanTimeToRecovery;
private LeadTimeForChanges leadTimeForChanges;
private ReportMetricsError reportMetricsError;
+ private Rework rework;
+
private Long exportValidityTime;
- private boolean boardMetricsCompleted;
+ private Boolean boardMetricsCompleted;
+
+ private Boolean doraMetricsCompleted;
+
+ private Boolean overallMetricsCompleted;
- private boolean doraMetricsCompleted;
+ private Boolean allMetricsCompleted;
- private boolean allMetricsCompleted;
+ private Boolean isSuccessfulCreateCsvFile;
public ReportResponse(Long exportValidityTime) {
this.exportValidityTime = exportValidityTime;
diff --git a/backend/src/main/java/heartbeat/controller/report/dto/response/Rework.java b/backend/src/main/java/heartbeat/controller/report/dto/response/Rework.java
new file mode 100644
index 0000000000..c3f462d278
--- /dev/null
+++ b/backend/src/main/java/heartbeat/controller/report/dto/response/Rework.java
@@ -0,0 +1,36 @@
+package heartbeat.controller.report.dto.response;
+
+import lombok.Builder;
+import lombok.Getter;
+import lombok.Setter;
+
+@Getter
+@Setter
+@Builder
+public class Rework {
+
+ private Integer totalReworkTimes;
+
+ private String reworkState;
+
+ private Integer fromAnalysis;
+
+ private Integer fromInDev;
+
+ private Integer fromBlock;
+
+ private Integer fromWaitingForTesting;
+
+ private Integer fromTesting;
+
+ private Integer fromReview;
+
+ private Integer fromDone;
+
+ private Integer totalReworkCards;
+
+ private Integer throughput;
+
+ private Double reworkCardsRatio;
+
+}
diff --git a/backend/src/main/java/heartbeat/handler/AsyncExceptionHandler.java b/backend/src/main/java/heartbeat/handler/AsyncExceptionHandler.java
index fd51251e4a..3cb5c4e8c8 100644
--- a/backend/src/main/java/heartbeat/handler/AsyncExceptionHandler.java
+++ b/backend/src/main/java/heartbeat/handler/AsyncExceptionHandler.java
@@ -16,14 +16,14 @@
public class AsyncExceptionHandler extends AsyncDataBaseHandler {
public void put(String reportId, BaseException e) {
- createFileByType(ERROR, reportId, new Gson().toJson(e));
+ createFileByType(ERROR, reportId, new Gson().toJson(new AsyncExceptionDTO(e)));
}
- public BaseException get(String reportId) {
+ public AsyncExceptionDTO get(String reportId) {
return readFileByType(ERROR, reportId, AsyncExceptionDTO.class);
}
- public BaseException remove(String reportId) {
+ public AsyncExceptionDTO remove(String reportId) {
return readAndRemoveFileByType(ERROR, reportId, AsyncExceptionDTO.class);
}
diff --git a/backend/src/main/java/heartbeat/handler/AsyncMetricsDataHandler.java b/backend/src/main/java/heartbeat/handler/AsyncMetricsDataHandler.java
index 8d8b0ac166..0cfc91a082 100644
--- a/backend/src/main/java/heartbeat/handler/AsyncMetricsDataHandler.java
+++ b/backend/src/main/java/heartbeat/handler/AsyncMetricsDataHandler.java
@@ -5,18 +5,12 @@
import heartbeat.controller.report.dto.response.MetricsDataCompleted;
import heartbeat.exception.GenerateReportException;
import heartbeat.handler.base.AsyncDataBaseHandler;
-import heartbeat.service.report.MetricsDataDTO;
-import heartbeat.util.IdUtil;
-import heartbeat.util.ValueUtil;
-import jakarta.annotation.Nullable;
import lombok.RequiredArgsConstructor;
import lombok.Synchronized;
import lombok.extern.log4j.Log4j2;
import org.springframework.stereotype.Component;
import java.io.File;
-import java.util.Objects;
-import java.util.stream.Stream;
import static heartbeat.handler.base.FIleType.METRICS_DATA_COMPLETED;
@@ -25,6 +19,8 @@
@RequiredArgsConstructor
public class AsyncMetricsDataHandler extends AsyncDataBaseHandler {
+ private static final String GENERATE_REPORT_ERROR = "Failed to update metrics data completed through this timestamp.";
+
public void putMetricsDataCompleted(String timeStamp, MetricsDataCompleted metricsDataCompleted) {
try {
acquireLock(METRICS_DATA_COMPLETED, timeStamp);
@@ -44,11 +40,15 @@ public void deleteExpireMetricsDataCompletedFile(long currentTimeStamp, File dir
}
@Synchronized
- public void updateMetricsDataCompletedInHandler(String metricDataFileId, MetricType metricType) {
+ public void updateMetricsDataCompletedInHandler(String metricDataFileId, MetricType metricType,
+ boolean isCreateCsvSuccess) {
MetricsDataCompleted previousMetricsCompleted = getMetricsDataCompleted(metricDataFileId);
if (previousMetricsCompleted == null) {
- log.error("Failed to update metrics data completed through this timestamp.");
- throw new GenerateReportException("Failed to update metrics data completed through this timestamp.");
+ log.error(GENERATE_REPORT_ERROR);
+ throw new GenerateReportException(GENERATE_REPORT_ERROR);
+ }
+ if (isCreateCsvSuccess) {
+ previousMetricsCompleted.setIsSuccessfulCreateCsvFile(true);
}
switch (metricType) {
case BOARD -> previousMetricsCompleted.setBoardMetricsCompleted(true);
@@ -59,31 +59,14 @@ public void updateMetricsDataCompletedInHandler(String metricDataFileId, MetricT
putMetricsDataCompleted(metricDataFileId, previousMetricsCompleted);
}
- public MetricsDataDTO getReportReadyStatusByTimeStamp(String timeStamp) {
- Boolean boardReadyStatus = getReadyStatus(IdUtil.getBoardReportId(timeStamp), MetricType.BOARD);
- boolean isBoardReady = ValueUtil.valueOrDefault(false, boardReadyStatus);
-
- Boolean doraReadyStatus = getReadyStatus(IdUtil.getDoraReportId(timeStamp), MetricType.DORA);
- boolean isDoraReady = ValueUtil.valueOrDefault(false, doraReadyStatus);
-
- boolean isReportReady = Stream.of(boardReadyStatus, doraReadyStatus)
- .filter(Objects::nonNull)
- .allMatch(Boolean::booleanValue);
- return new MetricsDataDTO(isBoardReady, isDoraReady, isReportReady);
- }
-
- @Nullable
- private Boolean getReadyStatus(String fileId, MetricType metricType) {
- MetricsDataCompleted metricsDataCompleted = getMetricsDataCompleted(fileId);
- if (metricsDataCompleted == null) {
- return null;
- }
- else if (metricType == MetricType.BOARD) {
- return metricsDataCompleted.boardMetricsCompleted();
- }
- else {
- return metricsDataCompleted.doraMetricsCompleted();
+ public void updateOverallMetricsCompletedInHandler(String metricDataFileId) {
+ MetricsDataCompleted previousMetricsCompleted = getMetricsDataCompleted(metricDataFileId);
+ if (previousMetricsCompleted == null) {
+ log.error(GENERATE_REPORT_ERROR);
+ throw new GenerateReportException(GENERATE_REPORT_ERROR);
}
+ previousMetricsCompleted.setOverallMetricCompleted(true);
+ putMetricsDataCompleted(metricDataFileId, previousMetricsCompleted);
}
}
diff --git a/backend/src/main/java/heartbeat/handler/base/AsyncDataBaseHandler.java b/backend/src/main/java/heartbeat/handler/base/AsyncDataBaseHandler.java
index aff961671c..9d382729af 100644
--- a/backend/src/main/java/heartbeat/handler/base/AsyncDataBaseHandler.java
+++ b/backend/src/main/java/heartbeat/handler/base/AsyncDataBaseHandler.java
@@ -16,7 +16,6 @@
import java.util.Objects;
import java.util.Optional;
-import static heartbeat.handler.base.FIleType.METRICS_DATA_COMPLETED;
import static heartbeat.service.report.scheduler.DeleteExpireCSVScheduler.EXPORT_CSV_VALIDITY_TIME;
@Log4j2
@@ -28,7 +27,7 @@ public class AsyncDataBaseHandler {
public static final String SUFFIX_LOCK = ".lock";
- public static final String FILENAME_SPLIT_PATTERN = "\\s*\\-|\\.\\s*";
+ public static final String FILENAME_SPLIT_PATTERN = "[-.]";
protected synchronized void createFileByType(FIleType fIleType, String fileId, String json) {
createDirToConvertData(fIleType);
@@ -95,12 +94,12 @@ protected T readAndRemoveFileByType(FIleType fIleType, String fileId, Class<
}
catch (IOException | RuntimeException e) {
log.info("Failed remove file type: {}, file name: {}", fIleType.getType(), fileId);
- throw new GenerateReportException("Failed remove " + fIleType.getType() + " file " + fileId);
+ throw new GenerateReportException("Failed remove " + fIleType.getType() + " file with file:" + fileId);
}
}
else {
throw new GenerateReportException(
- "Failed remove " + fIleType.getType() + " file " + fileId + "invalid file name");
+ "Failed read and remove " + fIleType.getType() + " file with file name :" + fileId);
}
}
@@ -126,8 +125,7 @@ public void acquireLock(FIleType fIleType, String fileId) {
}
}
else {
- throw new GenerateReportException(
- "Failed locked " + fIleType.getType() + " file " + fileId + "invalid file name");
+ throw new GenerateReportException("Failed locked " + fIleType.getType() + " lock :" + fileId);
}
}
@@ -149,8 +147,7 @@ protected void unLock(FIleType fIleType, String fileId) {
}
}
else {
- throw new GenerateReportException(
- "Failed unlocked " + fIleType.getType() + " file " + fileId + "invalid file name");
+ throw new GenerateReportException("Failed unlocked " + fIleType.getType() + " lock :" + fileId);
}
}
diff --git a/backend/src/main/java/heartbeat/handler/base/AsyncExceptionDTO.java b/backend/src/main/java/heartbeat/handler/base/AsyncExceptionDTO.java
index 8e39fb6e25..53a8095d84 100644
--- a/backend/src/main/java/heartbeat/handler/base/AsyncExceptionDTO.java
+++ b/backend/src/main/java/heartbeat/handler/base/AsyncExceptionDTO.java
@@ -1,11 +1,20 @@
package heartbeat.handler.base;
import heartbeat.exception.BaseException;
+import lombok.AllArgsConstructor;
+import lombok.Data;
-public class AsyncExceptionDTO extends BaseException {
+@Data
+@AllArgsConstructor
+public class AsyncExceptionDTO {
- public AsyncExceptionDTO(String message, int status) {
- super(message, status);
+ private String message;
+
+ private int status;
+
+ public AsyncExceptionDTO(BaseException e) {
+ this.message = e.getMessage();
+ this.status = e.getStatus();
}
}
diff --git a/backend/src/main/java/heartbeat/service/board/jira/JiraService.java b/backend/src/main/java/heartbeat/service/board/jira/JiraService.java
index c9fc4b5830..b22b29a3dd 100644
--- a/backend/src/main/java/heartbeat/service/board/jira/JiraService.java
+++ b/backend/src/main/java/heartbeat/service/board/jira/JiraService.java
@@ -28,6 +28,7 @@
import heartbeat.controller.board.dto.request.BoardVerifyRequestParam;
import heartbeat.controller.board.dto.request.CardStepsEnum;
import heartbeat.controller.board.dto.request.RequestJiraBoardColumnSetting;
+import heartbeat.controller.board.dto.request.ReworkTimesSetting;
import heartbeat.controller.board.dto.request.StoryPointsAndCycleTimeRequest;
import heartbeat.controller.board.dto.response.BoardConfigDTO;
import heartbeat.controller.board.dto.response.CardCollection;
@@ -38,6 +39,7 @@
import heartbeat.controller.board.dto.response.CycleTimeInfoDTO;
import heartbeat.controller.board.dto.response.JiraCardDTO;
import heartbeat.controller.board.dto.response.JiraColumnDTO;
+import heartbeat.controller.board.dto.response.ReworkTimesInfo;
import heartbeat.controller.board.dto.response.StatusChangedItem;
import heartbeat.controller.board.dto.response.StepsDay;
import heartbeat.controller.board.dto.response.TargetField;
@@ -58,24 +60,35 @@
import org.springframework.util.CollectionUtils;
import java.lang.reflect.Type;
+import java.math.BigDecimal;
+import java.math.RoundingMode;
import java.net.URI;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
+import java.util.EnumMap;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
+import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
+import static heartbeat.controller.board.dto.request.CardStepsEnum.BLOCK;
+import static heartbeat.controller.board.dto.request.CardStepsEnum.FLAG;
+import static heartbeat.controller.board.dto.request.CardStepsEnum.fromValue;
+import static heartbeat.controller.board.dto.request.CardStepsEnum.reworkJudgmentMap;
import static java.lang.Long.parseLong;
import static java.util.Objects.isNull;
import static java.util.Objects.nonNull;
@@ -98,6 +111,10 @@ public class JiraService {
private static final String NONE_DONE_CARD_TAG = "nonDone";
+ public static final String FLAGGED = "flagged";
+
+ public static final String IMPEDIMENT = "impediment";
+
private final ThreadPoolTaskExecutor customTaskExecutor;
private final JiraFeignClient jiraFeignClient;
@@ -117,13 +134,18 @@ public void shutdownExecutor() {
public String verify(BoardType boardType, BoardVerifyRequestParam boardVerifyRequestParam) {
URI baseUrl = urlGenerator.getUri(boardVerifyRequestParam.getSite());
- if (!BoardType.JIRA.equals(boardType)) {
- throw new BadRequestException("boardType param is not correct");
- }
+ verifyBoardTypeIsJira(boardType);
+ try {
+ jiraFeignClient.getDashboard(baseUrl, boardVerifyRequestParam.getToken());
+ }
+ catch (NotFoundException e) {
+ throw new NotFoundException("site is incorrect");
+ }
try {
JiraBoardVerifyDTO jiraBoardVerifyDTO = jiraFeignClient.getBoard(baseUrl,
boardVerifyRequestParam.getBoardId(), boardVerifyRequestParam.getToken());
+
return jiraBoardVerifyDTO.getLocation().getProjectKey();
}
catch (NotFoundException e) {
@@ -145,9 +167,7 @@ public String verify(BoardType boardType, BoardVerifyRequestParam boardVerifyReq
public BoardConfigDTO getInfo(BoardType boardType, BoardRequestParam boardRequestParam) {
URI baseUrl = urlGenerator.getUri(boardRequestParam.getSite());
try {
- if (!BoardType.JIRA.equals(boardType)) {
- throw new BadRequestException("boardType param is not correct");
- }
+ verifyBoardTypeIsJira(boardType);
String jiraBoardStyle = jiraFeignClient
.getProject(baseUrl, boardRequestParam.getProjectKey(), boardRequestParam.getToken())
.getStyle();
@@ -216,11 +236,17 @@ public BoardConfigDTO getJiraConfiguration(BoardType boardType, BoardRequestPara
}
}
+ private static void verifyBoardTypeIsJira(BoardType boardType) {
+ if (!BoardType.JIRA.equals(boardType)) {
+ throw new BadRequestException("boardType param is not correct");
+ }
+ }
+
private boolean isIgnoredTargetField(TargetField targetField) {
return (FIELDS_IGNORE.contains(targetField.getKey())) || FIELDS_IGNORE.contains(targetField.getName());
}
- public CardCollection getStoryPointsAndCycleTimeForDoneCards(StoryPointsAndCycleTimeRequest request,
+ public CardCollection getStoryPointsAndCycleTimeAndReworkInfoForDoneCards(StoryPointsAndCycleTimeRequest request,
List boardColumns, List users, String assigneeFilter) {
BoardType boardType = BoardType.fromValue(request.getType());
URI baseUrl = urlGenerator.getUri(request.getSite());
@@ -242,13 +268,27 @@ public CardCollection getStoryPointsAndCycleTimeForDoneCards(StoryPointsAndCycle
}
List realDoneCards = getRealDoneCards(request, boardColumns, users, baseUrl, allDoneCards,
jiraCardWithFields.getTargetFields(), assigneeFilter);
+
double storyPointSum = realDoneCards.stream()
.mapToDouble(card -> card.getBaseInfo().getFields().getStoryPoints())
.sum();
+ int reworkCardNumber = realDoneCards.stream()
+ .filter(realDoneCard -> realDoneCard.getReworkTimesInfos()
+ .stream()
+ .anyMatch(reworkTimesInfo -> reworkTimesInfo.getTimes() != 0))
+ .toList()
+ .size();
+ double reworkRatio = realDoneCards.isEmpty() ? 0
+ : BigDecimal.valueOf(reworkCardNumber)
+ .divide(BigDecimal.valueOf(realDoneCards.size()), 4, RoundingMode.HALF_UP)
+ .doubleValue();
+
return CardCollection.builder()
.storyPointSum(storyPointSum)
.cardsNumber(realDoneCards.size())
+ .reworkCardNumber(reworkCardNumber)
+ .reworkRatio(reworkRatio)
.jiraCardDTOList(realDoneCards)
.build();
}
@@ -380,8 +420,7 @@ private JiraCardWithFields getAllCards(BoardType boardType, URI baseUrl, BoardRe
private AllCardsResponseDTO formatAllCards(String allCardResponse, List targetFields,
List overrideFields) {
- Gson gson = new Gson();
- AllCardsResponseDTO allCardsResponseDTO = gson.fromJson(allCardResponse, AllCardsResponseDTO.class);
+ AllCardsResponseDTO allCardsResponseDTO = new Gson().fromJson(allCardResponse, AllCardsResponseDTO.class);
List jiraCards = allCardsResponseDTO.getIssues();
JsonArray elements = JsonParser.parseString(allCardResponse).getAsJsonObject().get("issues").getAsJsonArray();
@@ -408,41 +447,40 @@ private AllCardsResponseDTO formatAllCards(String allCardResponse, List {
+ Sprint sprint = sprintMap.get(jiraCard.getKey());
+ jiraCard.getFields().setSprint(sprint);
+ });
return allCardsResponseDTO;
}
- private static Map getCustomfieldMap(Gson gson, Map sprintMap,
- Map resultMap, JsonElement element, JsonObject jsonElement) {
+ private Map getCustomFieldMap(JsonElement element, Map resultMap,
+ JsonObject jsonElement, Map sprintMap) {
Map customFieldMap = new HashMap<>();
- for (Map.Entry entry : resultMap.entrySet()) {
- String customFieldKey = entry.getKey();
- String customFieldValue = entry.getValue();
+ resultMap.forEach((customFieldKey, customFieldValue) -> {
if (jsonElement.has(customFieldKey)) {
JsonElement fieldValue = jsonElement.get(customFieldKey);
- if (customFieldValue.equals("Sprint") && !fieldValue.isJsonNull() && fieldValue.isJsonArray()) {
- JsonArray jsonArray = fieldValue.getAsJsonArray();
- if (!jsonArray.isJsonNull() && !jsonArray.isEmpty()) {
- Type listType = new TypeToken>() {
- }.getType();
- List sprints = gson.fromJson(jsonArray, listType);
- sprints.sort(Comparator.comparing(Sprint::getCompleteDate,
- Comparator.nullsLast(Comparator.comparing(ZonedDateTime::parse))));
- sprintMap.put(element.getAsJsonObject().get("key").getAsString(),
- sprints.get(sprints.size() - 1));
- }
- }
- else if (customFieldValue.equals("Story point estimate") && !fieldValue.isJsonNull()
- && fieldValue.isJsonPrimitive()) {
+ fieldValue = mapFieldValue(element, sprintMap, customFieldValue, fieldValue);
+ customFieldMap.put(customFieldKey, fieldValue);
+ }
+ });
+ return customFieldMap;
+ }
+
+ private JsonElement mapFieldValue(JsonElement element, Map sprintMap, String customFieldValue,
+ JsonElement fieldValue) {
+ switch (customFieldValue) {
+ case "Sprint" -> Optional.ofNullable(getSprint(fieldValue))
+ .ifPresentOrElse(it -> sprintMap.put(element.getAsJsonObject().get("key").getAsString(), it), () -> {
+ });
+ case "Story point estimate" -> {
+ if (!fieldValue.isJsonNull() && fieldValue.isJsonPrimitive()) {
JsonPrimitive jsonPrimitive = fieldValue.getAsJsonPrimitive();
if (jsonPrimitive.isNumber()) {
Number numberValue = jsonPrimitive.getAsNumber();
@@ -450,17 +488,35 @@ else if (customFieldValue.equals("Story point estimate") && !fieldValue.isJsonNu
fieldValue = new JsonPrimitive(doubleValue);
}
}
- else if (customFieldValue.equals("Flagged") && !fieldValue.isJsonNull() && fieldValue.isJsonArray()) {
+ }
+ case "Flagged" -> {
+ if (!fieldValue.isJsonNull() && fieldValue.isJsonArray()) {
JsonArray jsonArray = fieldValue.getAsJsonArray();
if (!jsonArray.isJsonNull() && !jsonArray.isEmpty()) {
JsonElement targetField = jsonArray.get(jsonArray.size() - 1);
fieldValue = targetField.getAsJsonObject().get("value");
}
}
- customFieldMap.put(customFieldKey, fieldValue);
+ }
+ default -> {
}
}
- return customFieldMap;
+ return fieldValue;
+ }
+
+ private Sprint getSprint(JsonElement fieldValue) {
+ if (!fieldValue.isJsonNull() && fieldValue.isJsonArray()) {
+ JsonArray jsonArray = fieldValue.getAsJsonArray();
+ if (!jsonArray.isJsonNull() && !jsonArray.isEmpty()) {
+ Type listType = new TypeToken>() {
+ }.getType();
+ List sprints = new Gson().fromJson(jsonArray, listType);
+ sprints.sort(Comparator.comparing(Sprint::getCompleteDate,
+ Comparator.nullsLast(Comparator.comparing(ZonedDateTime::parse))));
+ return sprints.get(sprints.size() - 1);
+ }
+ }
+ return null;
}
private String parseJiraJql(BoardType boardType, List doneColumns, BoardRequestParam boardRequestParam) {
@@ -554,9 +610,9 @@ private List getRealDoneCards(StoryPointsAndCycleTimeRequest reques
jiraCards.forEach(doneCard -> {
CardHistoryResponseDTO cardHistoryResponseDTO = getJiraCardHistory(baseUrl, doneCard.getKey(), 0,
request.getToken());
+ List assigneeSet = getAssigneeSet(cardHistoryResponseDTO, filterMethod, doneCard);
CycleTimeInfoDTO cycleTimeInfoDTO = getCycleTime(cardHistoryResponseDTO, request.isTreatFlagCardAsBlock(),
keyFlagged, request.getStatus());
- List assigneeSet = getAssigneeSet(cardHistoryResponseDTO, filterMethod, doneCard);
if (users.stream().anyMatch(assigneeSet::contains)) {
JiraCardDTO jiraCardDTO = JiraCardDTO.builder()
.baseInfo(doneCard)
@@ -564,13 +620,135 @@ private List getRealDoneCards(StoryPointsAndCycleTimeRequest reques
.originCycleTime(cycleTimeInfoDTO.getOriginCycleTimeInfos())
.cardCycleTime(calculateCardCycleTime(doneCard.getKey(), cycleTimeInfoDTO.getCycleTimeInfos(),
boardColumns))
+ .reworkTimesInfos(getReworkTimesInfo(cardHistoryResponseDTO, request.getReworkTimesSetting(),
+ request.isTreatFlagCardAsBlock(), boardColumns))
.build();
+ jiraCardDTO.calculateTotalReworkTimes();
realDoneCards.add(jiraCardDTO);
}
});
return realDoneCards;
}
+ private List getReworkTimesInfo(CardHistoryResponseDTO jiraCardHistory,
+ ReworkTimesSetting reworkTimesSetting, boolean considerFlagAsBlock,
+ List boardColumns) {
+ if (Objects.isNull(reworkTimesSetting)) {
+ return List.of();
+ }
+ Map stateMap = buildBoardStateMap(boardColumns);
+ if (considerFlagAsBlock) {
+ return getReworkTimesInfoWhenConsiderFlagAsBlock(jiraCardHistory, reworkTimesSetting.getEnumReworkState(),
+ new HashSet<>(reworkTimesSetting.getEnumExcludeStates()), stateMap);
+ }
+ else {
+ return getReworkTimesInfoWhenNotConsiderFlagAsBlock(jiraCardHistory,
+ reworkTimesSetting.getEnumReworkState(), new HashSet<>(reworkTimesSetting.getEnumExcludeStates()),
+ stateMap);
+ }
+ }
+
+ private List getReworkTimesInfoWhenConsiderFlagAsBlock(CardHistoryResponseDTO jiraCardHistory,
+ CardStepsEnum reworkState, Set excludedStates, Map stateMap) {
+ Map reworkTimesMap = initializeReworkTimesMap(reworkState, excludedStates, stateMap);
+ reworkTimesMap.put(FLAG, 0);
+ AtomicReference currentState = new AtomicReference<>();
+ AtomicBoolean hasFlag = new AtomicBoolean(false);
+ jiraCardHistory.getItems()
+ .stream()
+ .filter(jiraCardHistoryItem -> STATUS_FIELD_ID.equalsIgnoreCase(jiraCardHistoryItem.getFieldId())
+ || FLAGGED.equalsIgnoreCase(jiraCardHistoryItem.getFieldDisplayName()))
+ .forEach(jiraCardHistoryItem -> {
+ if (STATUS_FIELD_ID.equalsIgnoreCase(jiraCardHistoryItem.getFieldId())) {
+ currentState
+ .set(convertBoardStateToEnumState(jiraCardHistoryItem.getTo().getDisplayName(), stateMap));
+ if (!hasFlag.get()) {
+ calculateReworkTimesMap(reworkState, excludedStates, reworkTimesMap, jiraCardHistoryItem,
+ stateMap);
+ }
+ }
+ else {
+ if (IMPEDIMENT.equalsIgnoreCase(jiraCardHistoryItem.getTo().getDisplayName())) {
+ hasFlag.set(true);
+ CardStepsEnum from = Objects.requireNonNull(currentState).get();
+ calculateTimes(reworkState, excludedStates, reworkTimesMap, from, FLAG);
+ }
+ else {
+ hasFlag.set(false);
+ CardStepsEnum to = Objects.requireNonNull(currentState).get();
+ calculateTimes(reworkState, excludedStates, reworkTimesMap, FLAG, to);
+ }
+ }
+ });
+ if (reworkJudgmentMap.get(fromValue(reworkState.getValue())).contains(BLOCK)) {
+ reworkTimesMap.put(BLOCK, reworkTimesMap.getOrDefault(BLOCK, 0) + reworkTimesMap.get(FLAG));
+ }
+ reworkTimesMap.remove(FLAG);
+ return reworkTimesMap.entrySet()
+ .stream()
+ .map(entry -> new ReworkTimesInfo(entry.getKey(), entry.getValue()))
+ .toList();
+ }
+
+ private static Map initializeReworkTimesMap(CardStepsEnum reworkState,
+ Set excludedStates, Map stateMap) {
+ Map reworkTimesMap = new EnumMap<>(CardStepsEnum.class);
+ Set stateReworkEnums = new HashSet<>(reworkJudgmentMap.get(reworkState));
+ stateReworkEnums.removeAll(excludedStates);
+ stateReworkEnums.stream().filter(stateMap.values()::contains).forEach(state -> reworkTimesMap.put(state, 0));
+ return reworkTimesMap;
+ }
+
+ private Map buildBoardStateMap(List boardColumns) {
+ return boardColumns.stream()
+ .collect(Collectors.toMap(boardColumn -> boardColumn.getName().toUpperCase(),
+ boardColumn -> CardStepsEnum.fromValue(boardColumn.getValue())));
+ }
+
+ private boolean isRework(CardStepsEnum from, CardStepsEnum to, Set excludedStates) {
+ return !excludedStates.contains(from) && reworkJudgmentMap.get(to).contains(from);
+ }
+
+ private CardStepsEnum convertBoardStateToEnumState(String value, Map stateMap) {
+ if (stateMap.containsKey(value.toUpperCase())) {
+ return stateMap.get(value.toUpperCase());
+ }
+ return CardStepsEnum.UNKNOWN;
+ }
+
+ private List getReworkTimesInfoWhenNotConsiderFlagAsBlock(CardHistoryResponseDTO jiraCardHistory,
+ CardStepsEnum reworkState, Set excludedStates, Map stateMap) {
+ Map reworkTimesMap = initializeReworkTimesMap(reworkState, excludedStates, stateMap);
+ reworkTimesMap.remove(FLAG);
+ jiraCardHistory.getItems()
+ .stream()
+ .filter(jiraCardHistoryItem -> STATUS_FIELD_ID.equalsIgnoreCase(jiraCardHistoryItem.getFieldId()))
+ .forEach(jiraCardHistoryItem -> calculateReworkTimesMap(reworkState, excludedStates, reworkTimesMap,
+ jiraCardHistoryItem, stateMap));
+ return reworkTimesMap.entrySet()
+ .stream()
+ .map(entry -> new ReworkTimesInfo(entry.getKey(), entry.getValue()))
+ .toList();
+ }
+
+ private void calculateReworkTimesMap(CardStepsEnum reworkState, Set excludedStates,
+ Map reworkTimesMap, HistoryDetail jiraCardHistoryItem,
+ Map stateMap) {
+ CardStepsEnum from = convertBoardStateToEnumState(jiraCardHistoryItem.getFrom().getDisplayName(), stateMap);
+ CardStepsEnum to = convertBoardStateToEnumState(jiraCardHistoryItem.getTo().getDisplayName(), stateMap);
+ calculateTimes(reworkState, excludedStates, reworkTimesMap, from, to);
+ }
+
+ private void calculateTimes(CardStepsEnum reworkState, Set excludedStates,
+ Map reworkTimesMap, CardStepsEnum from, CardStepsEnum to) {
+ if (!to.equals(reworkState)) {
+ return;
+ }
+ if (isRework(from, to, excludedStates)) {
+ reworkTimesMap.computeIfPresent(from, (key, value) -> value + 1);
+ }
+ }
+
private List getAssigneeSet(CardHistoryResponseDTO jiraCardHistory, String assigneeFilter,
JiraCard doneCard) {
List assigneeSet = new ArrayList<>();
@@ -771,7 +949,7 @@ private CardCustomFieldKey covertCustomFieldKey(List model, List cardCustomFieldKey.setStoryPoints(value.getKey());
case "sprint" -> cardCustomFieldKey.setSprint(value.getKey());
- case "flagged" -> cardCustomFieldKey.setFlagged(value.getKey());
+ case FLAGGED -> cardCustomFieldKey.setFlagged(value.getKey());
default -> {
}
}
@@ -786,7 +964,7 @@ private CardCustomFieldKey covertCustomFieldKey(List model, List ("flagged").equalsIgnoreCase(targetField.getName()))
+ .filter(targetField -> FLAGGED.equalsIgnoreCase(targetField.getName()))
.map(TargetField::getKey)
.filter(key -> !key.isEmpty())
.findFirst()
diff --git a/backend/src/main/java/heartbeat/service/pipeline/buildkite/BuildKiteService.java b/backend/src/main/java/heartbeat/service/pipeline/buildkite/BuildKiteService.java
index 6c4a733db7..7d4ba73e58 100644
--- a/backend/src/main/java/heartbeat/service/pipeline/buildkite/BuildKiteService.java
+++ b/backend/src/main/java/heartbeat/service/pipeline/buildkite/BuildKiteService.java
@@ -4,6 +4,7 @@
import heartbeat.client.dto.pipeline.buildkite.BuildKiteBuildInfo;
import heartbeat.client.dto.pipeline.buildkite.BuildKiteJob;
import heartbeat.client.dto.pipeline.buildkite.BuildKiteOrganizationsInfo;
+import heartbeat.client.dto.pipeline.buildkite.BuildKitePipelineDTO;
import heartbeat.client.dto.pipeline.buildkite.BuildKiteTokenInfo;
import heartbeat.client.dto.pipeline.buildkite.DeployInfo;
import heartbeat.client.dto.pipeline.buildkite.DeployTimes;
@@ -29,10 +30,10 @@
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collection;
+import java.util.Comparator;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
-import java.util.Comparator;
import java.util.concurrent.CompletableFuture;
import java.util.stream.IntStream;
@@ -45,6 +46,8 @@ public class BuildKiteService {
private static final String CANCELED_STATE = "canceled";
+ public static final String BEARER_TITLE = "Bearer ";
+
private final CachePageService cachePageService;
private final ThreadPoolTaskExecutor customTaskExecutor;
@@ -148,7 +151,7 @@ private List fetchPipelineStepsByPage(String token, Deployme
log.info(
"Start to paginated pipeline steps pagination info, orgId: {}, pipelineId: {}, stepsParam: {}, page:{}",
orgId, pipelineId, stepsParam, page);
- String realToken = "Bearer " + token;
+ String realToken = BEARER_TITLE + token;
stepsParam.setStartTime(TimeUtil.convertToISOFormat(stepsParam.getStartTime()));
stepsParam.setEndTime(TimeUtil.convertToISOFormat(stepsParam.getEndTime()));
@@ -175,6 +178,18 @@ private List fetchPipelineStepsByPage(String token, Deployme
return pageStepsInfo;
}
+ private CompletableFuture> getBuildKitePipelineInfoAsync(String orgSlug,
+ String buildKiteToken, int page, String perPage) {
+ return CompletableFuture.supplyAsync(() -> {
+ log.info("Start to paginated pipeline info, orgId: {}, page:{}, perPage:{}", orgSlug, page, perPage);
+ var pipelineInfo = buildKiteFeignClient.getPipelineInfo(buildKiteToken, orgSlug, String.valueOf(page),
+ perPage);
+ log.info("Successfully get paginated pipeline info, orgSlug: {}, page:{}, perPage:{}", orgSlug, page,
+ perPage);
+ return pipelineInfo.getBody();
+ }, customTaskExecutor);
+ }
+
private CompletableFuture> getBuildKiteStepsAsync(String token, String organizationId,
String pipelineId, PipelineStepsParam stepsParam, String perPage, int page, List branch) {
return CompletableFuture.supplyAsync(() -> {
@@ -244,7 +259,7 @@ private List getBuildsByState(List buildInfos,
public void verifyToken(String token) {
try {
- String buildKiteToken = "Bearer " + token;
+ String buildKiteToken = BEARER_TITLE + token;
log.info("Start to query token permissions by token");
BuildKiteTokenInfo buildKiteTokenInfo = buildKiteFeignClient.getTokenInfo(buildKiteToken);
log.info("Successfully query token permissions by token, token info scopes: {}",
@@ -264,7 +279,7 @@ public void verifyToken(String token) {
public BuildKiteResponseDTO getBuildKiteInfo(TokenParam tokenParam) {
try {
- String buildKiteToken = "Bearer " + tokenParam.getToken();
+ String buildKiteToken = BEARER_TITLE + tokenParam.getToken();
log.info("Start to query BuildKite organizations by token");
List buildKiteOrganizationsInfo = buildKiteFeignClient
.getBuildKiteOrganizationsInfo(buildKiteToken);
@@ -272,8 +287,7 @@ public BuildKiteResponseDTO getBuildKiteInfo(TokenParam tokenParam) {
log.info("Start to query BuildKite pipelineInfo by organizations slug: {}", buildKiteOrganizationsInfo);
List buildKiteInfoList = buildKiteOrganizationsInfo.stream()
- .flatMap(org -> buildKiteFeignClient.getPipelineInfo(buildKiteToken, org.getSlug(), "1", "100")
- .stream()
+ .flatMap(org -> getPipelineInfoList(org, buildKiteToken).stream()
.map(pipeline -> PipelineTransformer.fromBuildKitePipelineDto(pipeline, org.getSlug(),
org.getName())))
.toList();
@@ -294,6 +308,32 @@ public BuildKiteResponseDTO getBuildKiteInfo(TokenParam tokenParam) {
}
}
+ private List getPipelineInfoList(BuildKiteOrganizationsInfo org, String buildKiteToken) {
+ String firstPage = "1";
+ String perPage = "100";
+ var pipelineInfoResponse = cachePageService.getPipelineInfoList(org.getSlug(), buildKiteToken, firstPage,
+ perPage);
+ var firstPageStepsInfo = pipelineInfoResponse.getFirstPageInfo();
+ int totalPage = pipelineInfoResponse.getTotalPage();
+ List pagePipelineInfo = new ArrayList<>();
+ if (Objects.nonNull(firstPageStepsInfo)) {
+ pagePipelineInfo.addAll(firstPageStepsInfo);
+ }
+ if (totalPage > 1) {
+ List>> futures = IntStream
+ .range(Integer.parseInt(firstPage) + 1, totalPage + 1)
+ .mapToObj(page -> getBuildKitePipelineInfoAsync(org.getSlug(), buildKiteToken, page, perPage))
+ .toList();
+
+ var buildKiteBuildInfos = futures.stream()
+ .map(CompletableFuture::join)
+ .flatMap(Collection::stream)
+ .toList();
+ pagePipelineInfo.addAll(buildKiteBuildInfos);
+ }
+ return pagePipelineInfo;
+ }
+
public BuildKiteJob getBuildKiteJob(List jobs, List steps, List states,
String startTime, String endTime) {
Instant startDate = Instant.ofEpochMilli(Long.parseLong(startTime));
diff --git a/backend/src/main/java/heartbeat/service/pipeline/buildkite/CachePageService.java b/backend/src/main/java/heartbeat/service/pipeline/buildkite/CachePageService.java
index 90a8625aa1..59bac169bd 100644
--- a/backend/src/main/java/heartbeat/service/pipeline/buildkite/CachePageService.java
+++ b/backend/src/main/java/heartbeat/service/pipeline/buildkite/CachePageService.java
@@ -2,6 +2,7 @@
import heartbeat.client.BuildKiteFeignClient;
import heartbeat.client.dto.pipeline.buildkite.BuildKiteBuildInfo;
+import heartbeat.client.dto.pipeline.buildkite.PageBuildKitePipelineInfoDTO;
import heartbeat.client.dto.pipeline.buildkite.PageStepsInfoDto;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
@@ -41,16 +42,43 @@ public PageStepsInfoDto fetchPageStepsInfo(String realToken, String orgId, Strin
return PageStepsInfoDto.builder().firstPageStepsInfo(firstPageStepsInfo).totalPage(totalPage).build();
}
+ @Cacheable(cacheNames = "pagePipelineInfo", key = "#buildKiteToken+'-'+#orgSlug+'-'+#page+'-'+#perPage")
+ public PageBuildKitePipelineInfoDTO getPipelineInfoList(String orgSlug, String buildKiteToken, String page,
+ String perPage) {
+ var pipelineInfoResponse = buildKiteFeignClient.getPipelineInfo(buildKiteToken, orgSlug, page, perPage);
+ log.info("Successfully get paginated pipeline info pagination info, orgSlug: {}, page:{}", orgSlug, 1);
+
+ int totalPage = parseTotalPage(pipelineInfoResponse.getHeaders().get(BUILD_KITE_LINK_HEADER));
+ log.info("Successfully parse the total page_total page: {}", totalPage);
+
+ return PageBuildKitePipelineInfoDTO.builder()
+ .firstPageInfo(pipelineInfoResponse.getBody())
+ .totalPage(totalPage)
+ .build();
+ }
+
private int parseTotalPage(@Nullable List linkHeader) {
if (linkHeader == null) {
return 1;
}
String lastLink = linkHeader.stream().map(link -> link.replaceAll("per_page=\\d+", "")).findFirst().orElse("");
- Matcher matcher = Pattern.compile("page=(\\d+)[^>]*>;\\s*rel=\"last\"").matcher(lastLink);
- if (matcher.find()) {
- return Integer.parseInt(matcher.group(1));
+ int lastIndex = lastLink.indexOf("rel=\"last\"");
+ if (lastIndex == -1) {
+ return 1;
+ }
+ String beforeLastRel = lastLink.substring(0, lastIndex);
+ Matcher matcher = Pattern.compile("page=(\\d+)").matcher(beforeLastRel);
+
+ String lastNumber = null;
+ while (matcher.find()) {
+ lastNumber = matcher.group(1);
+ }
+ if (lastNumber != null) {
+ return Integer.parseInt(lastNumber);
+ }
+ else {
+ return 1;
}
- return 1;
}
}
diff --git a/backend/src/main/java/heartbeat/service/report/BoardSheetGenerator.java b/backend/src/main/java/heartbeat/service/report/BoardSheetGenerator.java
new file mode 100644
index 0000000000..453713b72f
--- /dev/null
+++ b/backend/src/main/java/heartbeat/service/report/BoardSheetGenerator.java
@@ -0,0 +1,69 @@
+package heartbeat.service.report;
+
+import heartbeat.controller.board.dto.response.JiraCardDTO;
+import heartbeat.controller.report.dto.response.BoardCSVConfig;
+import lombok.Builder;
+import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.lang3.ArrayUtils;
+
+import java.util.List;
+
+@Builder
+public class BoardSheetGenerator {
+
+ private List jiraCardDTOList;
+
+ private List fields;
+
+ private List extraFields;
+
+ private List reworkFields;
+
+ private CSVFileGenerator csvFileGenerator;
+
+ private String[][] sheet;
+
+ String[][] generate() {
+ return sheet;
+ }
+
+ BoardSheetGenerator mergeBaseInfoAndCycleTimeSheet() {
+ String[][] baseInfoAndCycleTimeSheet = csvFileGenerator.assembleBoardData(jiraCardDTOList, fields, extraFields);
+ sheet = mergeSheetHorizontally(sheet, baseInfoAndCycleTimeSheet);
+ return this;
+ }
+
+ BoardSheetGenerator mergeReworkTimesSheet() {
+ if (CollectionUtils.isEmpty(reworkFields)) {
+ return this;
+ }
+ int columnCount = reworkFields.size();
+ String[][] reworkTimesSheet = new String[jiraCardDTOList.size() + 1][columnCount];
+
+ for (int column = 0; column < columnCount; column++) {
+ reworkTimesSheet[0][column] = reworkFields.get(column).getLabel();
+ }
+ for (int row = 0; row < jiraCardDTOList.size(); row++) {
+ JiraCardDTO cardDTO = jiraCardDTOList.get(row);
+ for (int column = 0; column < columnCount; column++) {
+ reworkTimesSheet[row + 1][column] = csvFileGenerator.getExtraDataPerRow(cardDTO.getReworkTimesFlat(),
+ reworkFields.get(column));
+ }
+ }
+ sheet = mergeSheetHorizontally(sheet, reworkTimesSheet);
+ return this;
+ }
+
+ private String[][] mergeSheetHorizontally(String[][] sheet, String[][] sheetToMerge) {
+ int rows = jiraCardDTOList.size() + 1;
+ String[][] combinedArray = new String[rows][];
+ if (ArrayUtils.isEmpty(sheet)) {
+ return sheetToMerge;
+ }
+ for (int i = 0; i < rows; i++) {
+ combinedArray[i] = ArrayUtils.addAll(sheet[i], sheetToMerge[i]);
+ }
+ return combinedArray;
+ }
+
+}
diff --git a/backend/src/main/java/heartbeat/service/report/CSVFileGenerator.java b/backend/src/main/java/heartbeat/service/report/CSVFileGenerator.java
index b5abcd2097..cf7788abec 100644
--- a/backend/src/main/java/heartbeat/service/report/CSVFileGenerator.java
+++ b/backend/src/main/java/heartbeat/service/report/CSVFileGenerator.java
@@ -5,27 +5,28 @@
import com.opencsv.CSVWriter;
import heartbeat.controller.board.dto.response.JiraCardDTO;
import heartbeat.controller.report.dto.request.ReportType;
-import heartbeat.controller.report.dto.response.AvgChangeFailureRate;
import heartbeat.controller.report.dto.response.AvgDeploymentFrequency;
+import heartbeat.controller.report.dto.response.AvgDevChangeFailureRate;
+import heartbeat.controller.report.dto.response.AvgDevMeanTimeToRecovery;
import heartbeat.controller.report.dto.response.AvgLeadTimeForChanges;
-import heartbeat.controller.report.dto.response.AvgMeanTimeToRecovery;
import heartbeat.controller.report.dto.response.BoardCSVConfig;
import heartbeat.controller.report.dto.response.BoardCSVConfigEnum;
-import heartbeat.controller.report.dto.response.ChangeFailureRate;
-import heartbeat.controller.report.dto.response.ChangeFailureRateOfPipeline;
import heartbeat.controller.report.dto.response.Classification;
import heartbeat.controller.report.dto.response.ClassificationNameValuePair;
import heartbeat.controller.report.dto.response.CycleTime;
import heartbeat.controller.report.dto.response.CycleTimeForSelectedStepItem;
import heartbeat.controller.report.dto.response.DeploymentFrequency;
import heartbeat.controller.report.dto.response.DeploymentFrequencyOfPipeline;
+import heartbeat.controller.report.dto.response.DevChangeFailureRate;
+import heartbeat.controller.report.dto.response.DevChangeFailureRateOfPipeline;
+import heartbeat.controller.report.dto.response.DevMeanTimeToRecovery;
+import heartbeat.controller.report.dto.response.DevMeanTimeToRecoveryOfPipeline;
import heartbeat.controller.report.dto.response.LeadTimeForChanges;
import heartbeat.controller.report.dto.response.LeadTimeForChangesOfPipelines;
import heartbeat.controller.report.dto.response.LeadTimeInfo;
-import heartbeat.controller.report.dto.response.MeanTimeToRecovery;
-import heartbeat.controller.report.dto.response.MeanTimeToRecoveryOfPipeline;
import heartbeat.controller.report.dto.response.PipelineCSVInfo;
import heartbeat.controller.report.dto.response.ReportResponse;
+import heartbeat.controller.report.dto.response.Rework;
import heartbeat.controller.report.dto.response.Velocity;
import heartbeat.exception.FileIOException;
import heartbeat.exception.GenerateReportException;
@@ -42,6 +43,8 @@
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
+import java.math.BigDecimal;
+import java.math.RoundingMode;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@@ -49,6 +52,7 @@
import java.util.stream.Stream;
import static heartbeat.service.report.calculator.ClassificationCalculator.pickDisplayNameFromObj;
+import static heartbeat.util.DecimalUtil.formatDecimalFour;
import static heartbeat.util.TimeUtil.convertToSimpleISOFormat;
import static java.util.concurrent.TimeUnit.HOURS;
@@ -65,6 +69,8 @@ public class CSVFileGenerator {
private static final String CANCELED_STATUS = "canceled";
+ private static final String REWORK_FIELD = "Rework";
+
private static InputStreamResource readStringFromCsvFile(String fileName) {
try {
InputStream inputStream = new FileInputStream(fileName);
@@ -101,54 +107,54 @@ public void convertPipelineDataToCSV(List leadTimeData, String
csvWriter.writeNext(headers);
- for (PipelineCSVInfo csvInfo : leadTimeData) {
- String committerName = null;
- String commitDate = null;
- String creatorName = null;
- String organization = csvInfo.getOrganizationName();
- String pipelineName = csvInfo.getPipeLineName();
- String stepName = csvInfo.getStepName();
- String valid = String.valueOf(csvInfo.getValid()).toLowerCase();
- String buildNumber = String.valueOf(csvInfo.getBuildInfo().getNumber());
- String state = csvInfo.getPiplineStatus().equals(CANCELED_STATUS) ? CANCELED_STATUS
- : csvInfo.getDeployInfo().getState();
- String branch = csvInfo.getBuildInfo().getBranch();
- if (csvInfo.getCommitInfo() != null) {
- committerName = csvInfo.getCommitInfo().getCommit().getAuthor().getName();
- commitDate = csvInfo.getCommitInfo().getCommit().getAuthor().getDate();
- }
-
- if (csvInfo.getBuildInfo().getCreator() != null
- && csvInfo.getBuildInfo().getCreator().getName() != null) {
- creatorName = csvInfo.getBuildInfo().getCreator().getName();
- }
-
- LeadTimeInfo leadTimeInfo = csvInfo.getLeadTimeInfo();
- String firstCommitTimeInPr = leadTimeInfo.getFirstCommitTimeInPr();
- String prCreatedTime = leadTimeInfo.getPrCreatedTime();
- String prMergedTime = leadTimeInfo.getPrMergedTime();
- String jobFinishTime = csvInfo.getDeployInfo().getJobFinishTime();
- String totalTime = leadTimeInfo.getTotalTime();
- String prLeadTime = leadTimeInfo.getPrLeadTime();
- String pipelineLeadTime = leadTimeInfo.getPipelineLeadTime();
-
- String[] rowData = { organization, pipelineName, stepName, valid, buildNumber, committerName,
- creatorName, firstCommitTimeInPr, commitDate, prCreatedTime, prMergedTime, jobFinishTime,
- totalTime, prLeadTime, pipelineLeadTime, state, branch };
-
- csvWriter.writeNext(rowData);
- }
+ leadTimeData.stream().map(this::getRowData).forEach(csvWriter::writeNext);
}
catch (IOException e) {
- log.error("Failed to write file", e);
+ log.error("Failed to write pipeline file", e);
throw new FileIOException(e);
}
}
else {
- throw new GenerateReportException("Failed to generate csv file,invalid csvTimestamp");
+ throw new GenerateReportException("Failed to generate pipeline csv file, invalid csvTimestamp");
}
}
+ private String[] getRowData(PipelineCSVInfo csvInfo) {
+ String committerName = null;
+ String commitDate = null;
+ if (csvInfo.getCommitInfo() != null) {
+ committerName = csvInfo.getCommitInfo().getCommit().getAuthor().getName();
+ commitDate = csvInfo.getCommitInfo().getCommit().getAuthor().getDate();
+ }
+
+ String creatorName = null;
+ if (csvInfo.getBuildInfo().getCreator() != null && csvInfo.getBuildInfo().getCreator().getName() != null) {
+ creatorName = csvInfo.getBuildInfo().getCreator().getName();
+ }
+
+ String organization = csvInfo.getOrganizationName();
+ String pipelineName = csvInfo.getPipeLineName();
+ String stepName = csvInfo.getStepName();
+ String valid = String.valueOf(csvInfo.getValid()).toLowerCase();
+ String buildNumber = String.valueOf(csvInfo.getBuildInfo().getNumber());
+ String state = csvInfo.getPiplineStatus().equals(CANCELED_STATUS) ? CANCELED_STATUS
+ : csvInfo.getDeployInfo().getState();
+ String branch = csvInfo.getBuildInfo().getBranch();
+
+ LeadTimeInfo leadTimeInfo = csvInfo.getLeadTimeInfo();
+ String firstCommitTimeInPr = leadTimeInfo.getFirstCommitTimeInPr();
+ String prCreatedTime = leadTimeInfo.getPrCreatedTime();
+ String prMergedTime = leadTimeInfo.getPrMergedTime();
+ String jobFinishTime = csvInfo.getDeployInfo().getJobFinishTime();
+ String totalTime = leadTimeInfo.getTotalTime();
+ String prLeadTime = leadTimeInfo.getPrLeadTime();
+ String pipelineLeadTime = leadTimeInfo.getPipelineLeadTime();
+
+ return new String[] { organization, pipelineName, stepName, valid, buildNumber, committerName, creatorName,
+ firstCommitTimeInPr, commitDate, prCreatedTime, prMergedTime, jobFinishTime, totalTime, prLeadTime,
+ pipelineLeadTime, state, branch };
+ }
+
public InputStreamResource getDataFromCSV(ReportType reportDataType, long csvTimeStamp) {
return switch (reportDataType) {
case METRIC -> readStringFromCsvFile(
@@ -170,37 +176,44 @@ private void createCsvDirToConvertData() {
public void convertBoardDataToCSV(List cardDTOList, List fields,
List extraFields, String csvTimeStamp) {
log.info("Start to create board csv directory");
+ String[][] mergedArrays = assembleBoardData(cardDTOList, fields, extraFields);
+ writeDataToCSV(csvTimeStamp, mergedArrays);
+ }
+
+ public void writeDataToCSV(String csvTimeStamp, String[][] mergedArrays) {
createCsvDirToConvertData();
String fileName = CSVFileNameEnum.BOARD.getValue() + FILENAME_SEPARATOR + csvTimeStamp + CSV_EXTENSION;
if (!fileName.contains("..") && fileName.startsWith(FILE_LOCAL_PATH)) {
try (CSVWriter writer = new CSVWriter(new FileWriter(fileName))) {
- List fixedFields = new ArrayList<>(fields);
- fixedFields.removeAll(extraFields);
-
- String[][] fixedFieldsData = getFixedFieldsData(cardDTOList, fixedFields);
- String[][] extraFieldsData = getExtraFieldsData(cardDTOList, extraFields);
-
- String[] fixedFieldsRow = fixedFieldsData[0];
- String targetElement = "Cycle Time";
- List fixedFieldsRowList = Arrays.asList(fixedFieldsRow);
- int targetIndex = fixedFieldsRowList.indexOf(targetElement) + 1;
-
- String[][] mergedArrays = mergeArrays(fixedFieldsData, extraFieldsData, targetIndex);
-
writer.writeAll(Arrays.asList(mergedArrays));
-
}
catch (IOException e) {
- log.error("Failed to write file", e);
+ log.error("Failed to write board file", e);
throw new FileIOException(e);
}
}
else {
- throw new GenerateReportException("Failed to generate csv file,invalid csvTimestamp");
+ throw new GenerateReportException("Failed to generate board csv file, invalid csvTimestamp");
}
}
+ public String[][] assembleBoardData(List cardDTOList, List fields,
+ List extraFields) {
+ List fixedFields = new ArrayList<>(fields);
+ fixedFields.removeAll(extraFields);
+
+ String[][] fixedFieldsData = getFixedFieldsData(cardDTOList, fixedFields);
+ String[][] extraFieldsData = getExtraFieldsData(cardDTOList, extraFields);
+
+ String[] fixedFieldsRow = fixedFieldsData[0];
+ String targetElement = "Cycle Time";
+ List fixedFieldsRowList = Arrays.asList(fixedFieldsRow);
+ int targetIndex = fixedFieldsRowList.indexOf(targetElement) + 1;
+
+ return mergeArrays(fixedFieldsData, extraFieldsData, targetIndex);
+ }
+
public String[][] mergeArrays(String[][] fixedFieldsData, String[][] extraFieldsData, int fixedColumnCount) {
int mergedColumnLength = fixedFieldsData[0].length + extraFieldsData[0].length;
String[][] mergedArray = new String[fixedFieldsData.length][mergedColumnLength];
@@ -289,33 +302,7 @@ private String[] getFixedDataPerRow(JiraCardDTO cardDTO, int columnCount) {
rowData[0] = cardDTO.getBaseInfo().getKey();
if (cardDTO.getBaseInfo().getFields() != null) {
- rowData[1] = cardDTO.getBaseInfo().getFields().getSummary();
- rowData[2] = cardDTO.getBaseInfo().getFields().getIssuetype().getName();
- rowData[3] = cardDTO.getBaseInfo().getFields().getStatus().getName();
- if (cardDTO.getBaseInfo().getFields().getLastStatusChangeDate() != null) {
- rowData[4] = convertToSimpleISOFormat(cardDTO.getBaseInfo().getFields().getLastStatusChangeDate());
- }
- rowData[5] = String.valueOf(cardDTO.getBaseInfo().getFields().getStoryPoints());
- if (cardDTO.getBaseInfo().getFields().getAssignee() != null) {
- rowData[6] = cardDTO.getBaseInfo().getFields().getAssignee().getDisplayName();
- }
- if (cardDTO.getBaseInfo().getFields().getReporter() != null) {
- rowData[7] = cardDTO.getBaseInfo().getFields().getReporter().getDisplayName();
- }
-
- rowData[8] = cardDTO.getBaseInfo().getFields().getProject().getKey();
- rowData[9] = cardDTO.getBaseInfo().getFields().getProject().getName();
- rowData[10] = cardDTO.getBaseInfo().getFields().getPriority().getName();
-
- if (cardDTO.getBaseInfo().getFields().getParent() != null) {
- rowData[11] = cardDTO.getBaseInfo().getFields().getParent().getFields().getSummary();
- }
-
- if (cardDTO.getBaseInfo().getFields().getSprint() != null) {
- rowData[12] = cardDTO.getBaseInfo().getFields().getSprint().getName();
- }
-
- rowData[13] = String.join(",", cardDTO.getBaseInfo().getFields().getLabels());
+ fixDataWithFields(cardDTO, rowData);
}
}
@@ -332,7 +319,37 @@ private String[] getFixedDataPerRow(JiraCardDTO cardDTO, int columnCount) {
return rowData;
}
- private String getExtraDataPerRow(Object object, BoardCSVConfig extraField) {
+ private void fixDataWithFields(JiraCardDTO cardDTO, String[] rowData) {
+ rowData[1] = cardDTO.getBaseInfo().getFields().getSummary();
+ rowData[2] = cardDTO.getBaseInfo().getFields().getIssuetype().getName();
+ rowData[3] = cardDTO.getBaseInfo().getFields().getStatus().getName();
+ if (cardDTO.getBaseInfo().getFields().getLastStatusChangeDate() != null) {
+ rowData[4] = convertToSimpleISOFormat(cardDTO.getBaseInfo().getFields().getLastStatusChangeDate());
+ }
+ rowData[5] = String.valueOf(cardDTO.getBaseInfo().getFields().getStoryPoints());
+ if (cardDTO.getBaseInfo().getFields().getAssignee() != null) {
+ rowData[6] = cardDTO.getBaseInfo().getFields().getAssignee().getDisplayName();
+ }
+ if (cardDTO.getBaseInfo().getFields().getReporter() != null) {
+ rowData[7] = cardDTO.getBaseInfo().getFields().getReporter().getDisplayName();
+ }
+
+ rowData[8] = cardDTO.getBaseInfo().getFields().getProject().getKey();
+ rowData[9] = cardDTO.getBaseInfo().getFields().getProject().getName();
+ rowData[10] = cardDTO.getBaseInfo().getFields().getPriority().getName();
+
+ if (cardDTO.getBaseInfo().getFields().getParent() != null) {
+ rowData[11] = cardDTO.getBaseInfo().getFields().getParent().getFields().getSummary();
+ }
+
+ if (cardDTO.getBaseInfo().getFields().getSprint() != null) {
+ rowData[12] = cardDTO.getBaseInfo().getFields().getSprint().getName();
+ }
+
+ rowData[13] = String.join(",", cardDTO.getBaseInfo().getFields().getLabels());
+ }
+
+ public String getExtraDataPerRow(Object object, BoardCSVConfig extraField) {
Map elementMap = (Map) object;
if (elementMap == null) {
return null;
@@ -381,12 +398,12 @@ public void convertMetricDataToCSV(ReportResponse reportResponse, String csvTime
csvWriter.writeAll(convertReportResponseToCSVRows(reportResponse));
}
catch (IOException e) {
- log.error("Failed to write file", e);
+ log.error("Failed to write metric file", e);
throw new FileIOException(e);
}
}
else {
- throw new GenerateReportException("Failed to generate csv file,invalid csvTimestamp");
+ throw new GenerateReportException("Failed to generate metric csv file, invalid csvTimestamp");
}
}
@@ -405,6 +422,11 @@ private List convertReportResponseToCSVRows(ReportResponse reportRespo
if (classificationList != null)
classificationList.forEach(classification -> rows.addAll(getRowsFormClassification(classification)));
+ Rework rework = reportResponse.getRework();
+ if (rework != null) {
+ rows.addAll(getRowFromRework(rework));
+ }
+
DeploymentFrequency deploymentFrequency = reportResponse.getDeploymentFrequency();
if (deploymentFrequency != null)
rows.addAll(getRowsFromDeploymentFrequency(deploymentFrequency));
@@ -413,13 +435,13 @@ private List convertReportResponseToCSVRows(ReportResponse reportRespo
if (leadTimeForChanges != null)
rows.addAll(getRowsFromLeadTimeForChanges(leadTimeForChanges));
- ChangeFailureRate changeFailureRate = reportResponse.getChangeFailureRate();
- if (changeFailureRate != null)
- rows.addAll(getRowsFromChangeFailureRate(changeFailureRate));
+ DevChangeFailureRate devChangeFailureRate = reportResponse.getDevChangeFailureRate();
+ if (devChangeFailureRate != null)
+ rows.addAll(getRowsFromDevChangeFailureRate(devChangeFailureRate));
- MeanTimeToRecovery meanTimeToRecovery = reportResponse.getMeanTimeToRecovery();
- if (meanTimeToRecovery != null)
- rows.addAll(getRowsFromMeanTimeToRecovery(meanTimeToRecovery));
+ DevMeanTimeToRecovery devMeanTimeToRecovery = reportResponse.getDevMeanTimeToRecovery();
+ if (devMeanTimeToRecovery != null)
+ rows.addAll(getRowsFromDevMeanTimeToRecovery(devMeanTimeToRecovery));
return rows;
}
@@ -433,24 +455,25 @@ private List getRowsFormVelocity(Velocity velocity) {
}
private List getRowsFromCycleTime(CycleTime cycleTime) {
+ String cycleTimeTitle = "Cycle time";
List rows = new ArrayList<>();
List rowsForSelectedStepItemAverageTime = new ArrayList<>();
- rows.add(new String[] { "Cycle time", "Average cycle time(days/storyPoint)",
+ rows.add(new String[] { cycleTimeTitle, "Average cycle time(days/storyPoint)",
String.valueOf(cycleTime.getAverageCycleTimePerSP()) });
- rows.add(new String[] { "Cycle time", "Average cycle time(days/card)",
+ rows.add(new String[] { cycleTimeTitle, "Average cycle time(days/card)",
String.valueOf(cycleTime.getAverageCycleTimePerCard()) });
List swimlaneList = cycleTime.getSwimlaneList();
swimlaneList.forEach(cycleTimeForSelectedStepItem -> {
- String StepName = formatStepName(cycleTimeForSelectedStepItem);
+ String stepName = formatStepName(cycleTimeForSelectedStepItem);
double proportion = cycleTimeForSelectedStepItem.getTotalTime() / cycleTime.getTotalTimeForCards();
- rows.add(new String[] { "Cycle time", "Total " + StepName + " time / Total cycle time",
+ rows.add(new String[] { cycleTimeTitle, "Total " + stepName + " time / Total cycle time",
DecimalUtil.formatDecimalTwo(proportion * 100) });
rowsForSelectedStepItemAverageTime
- .add(new String[] { "Cycle time", "Average " + StepName + " time(days/storyPoint)",
+ .add(new String[] { cycleTimeTitle, "Average " + stepName + " time(days/storyPoint)",
DecimalUtil.formatDecimalTwo(cycleTimeForSelectedStepItem.getAverageTimeForSP()) });
rowsForSelectedStepItemAverageTime
- .add(new String[] { "Cycle time", "Average " + StepName + " time(days/card)",
+ .add(new String[] { cycleTimeTitle, "Average " + stepName + " time(days/card)",
DecimalUtil.formatDecimalTwo(cycleTimeForSelectedStepItem.getAverageTimeForCards()) });
});
rows.addAll(rowsForSelectedStepItemAverageTime);
@@ -458,6 +481,15 @@ private List getRowsFromCycleTime(CycleTime cycleTime) {
return rows;
}
+ private List getRowFromRework(Rework rework) {
+ List rows = new ArrayList<>();
+ rows.add(new String[] { REWORK_FIELD, "Total rework times", String.valueOf(rework.getTotalReworkTimes()) });
+ rows.add(new String[] { REWORK_FIELD, "Total rework cards", String.valueOf(rework.getTotalReworkCards()) });
+ rows.add(new String[] { REWORK_FIELD, "Rework cards ratio(Total rework cards/Throughput)",
+ formatDecimalFour(rework.getReworkCardsRatio()) });
+ return rows;
+ }
+
private String formatStepName(CycleTimeForSelectedStepItem cycleTimeForSelectedStepItem) {
return switch (cycleTimeForSelectedStepItem.getOptionalItemName()) {
case "In Dev" -> "development";
@@ -483,7 +515,7 @@ private List getRowsFromDeploymentFrequency(DeploymentFrequency deploy
List deploymentFrequencyOfPipelines = deploymentFrequency
.getDeploymentFrequencyOfPipelines();
deploymentFrequencyOfPipelines.forEach(pipeline -> rows.add(new String[] { "Deployment frequency",
- pipeline.getName() + " / " + pipeline.getStep().replaceAll(":\\w+: ", "")
+ pipeline.getName() + " / " + extractPipelineStep(pipeline.getStep())
+ " / Deployment frequency(Deployments/Day)",
DecimalUtil.formatDecimalTwo(pipeline.getDeploymentFrequency()) }));
@@ -496,32 +528,38 @@ private List getRowsFromDeploymentFrequency(DeploymentFrequency deploy
return rows;
}
+ private String extractPipelineStep(String step) {
+ return step.replaceAll(":\\w+: ", "");
+ }
+
private List getRowsFromLeadTimeForChanges(LeadTimeForChanges leadTimeForChanges) {
List rows = new ArrayList<>();
+
List leadTimeForChangesOfPipelines = leadTimeForChanges
.getLeadTimeForChangesOfPipelines();
+ String leadTimeForChangesTitle = "Lead time for changes";
leadTimeForChangesOfPipelines.forEach(pipeline -> {
- String pipelineStep = pipeline.getStep().replaceAll(":\\w+: ", "");
- rows.add(new String[] { "Lead time for changes",
+ String pipelineStep = extractPipelineStep(pipeline.getStep());
+ rows.add(new String[] { leadTimeForChangesTitle,
pipeline.getName() + " / " + pipelineStep + " / PR Lead Time",
DecimalUtil.formatDecimalTwo(TimeUtils.minutesToUnit(pipeline.getPrLeadTime(), HOURS)) });
- rows.add(new String[] { "Lead time for changes",
+ rows.add(new String[] { leadTimeForChangesTitle,
pipeline.getName() + " / " + pipelineStep + " / Pipeline Lead Time",
DecimalUtil.formatDecimalTwo(TimeUtils.minutesToUnit(pipeline.getPipelineLeadTime(), HOURS)) });
- rows.add(new String[] { "Lead time for changes",
+ rows.add(new String[] { leadTimeForChangesTitle,
pipeline.getName() + " / " + pipelineStep + " / Total Lead Time",
DecimalUtil.formatDecimalTwo(TimeUtils.minutesToUnit(pipeline.getTotalDelayTime(), HOURS)) });
});
AvgLeadTimeForChanges avgLeadTimeForChanges = leadTimeForChanges.getAvgLeadTimeForChanges();
if (leadTimeForChangesOfPipelines.size() > 1) {
- rows.add(new String[] { "Lead time for changes", avgLeadTimeForChanges.getName() + " / PR Lead Time",
+ rows.add(new String[] { leadTimeForChangesTitle, avgLeadTimeForChanges.getName() + " / PR Lead Time",
DecimalUtil
.formatDecimalTwo(TimeUtils.minutesToUnit(avgLeadTimeForChanges.getPrLeadTime(), HOURS)) });
- rows.add(new String[] { "Lead time for changes", avgLeadTimeForChanges.getName() + " / Pipeline Lead Time",
+ rows.add(new String[] { leadTimeForChangesTitle, avgLeadTimeForChanges.getName() + " / Pipeline Lead Time",
DecimalUtil.formatDecimalTwo(
TimeUtils.minutesToUnit(avgLeadTimeForChanges.getPipelineLeadTime(), HOURS)) });
- rows.add(new String[] { "Lead time for changes", avgLeadTimeForChanges.getName() + " / Total Lead Time",
+ rows.add(new String[] { leadTimeForChangesTitle, avgLeadTimeForChanges.getName() + " / Total Lead Time",
DecimalUtil
.formatDecimalTwo(TimeUtils.minutesToUnit(avgLeadTimeForChanges.getTotalDelayTime(), HOURS)) });
}
@@ -529,37 +567,38 @@ private List getRowsFromLeadTimeForChanges(LeadTimeForChanges leadTime
return rows;
}
- private List getRowsFromChangeFailureRate(ChangeFailureRate changeFailureRate) {
+ private List getRowsFromDevChangeFailureRate(DevChangeFailureRate devChangeFailureRate) {
List rows = new ArrayList<>();
- List changeFailureRateOfPipelines = changeFailureRate
- .getChangeFailureRateOfPipelines();
- changeFailureRateOfPipelines.forEach(pipeline -> rows.add(new String[] { "Change failure rate",
- pipeline.getName() + " / " + pipeline.getStep().replaceAll(":\\w+: ", "") + " / Failure rate",
- DecimalUtil.formatDecimalTwo(pipeline.getFailureRate() * 100) }));
-
- AvgChangeFailureRate avgChangeFailureRate = changeFailureRate.getAvgChangeFailureRate();
- if (changeFailureRateOfPipelines.size() > 1)
- rows.add(new String[] { "Change failure rate", avgChangeFailureRate.getName() + " / Failure rate",
- DecimalUtil.formatDecimalTwo(avgChangeFailureRate.getFailureRate() * 100) });
+ List devChangeFailureRateOfPipelines = devChangeFailureRate
+ .getDevChangeFailureRateOfPipelines();
+ devChangeFailureRateOfPipelines.forEach(pipeline -> rows.add(new String[] { "Dev change failure rate",
+ pipeline.getName() + " / " + extractPipelineStep(pipeline.getStep()) + " / Dev change failure rate",
+ DecimalUtil.formatDecimalFour(pipeline.getFailureRate()) }));
+
+ AvgDevChangeFailureRate avgDevChangeFailureRate = devChangeFailureRate.getAvgDevChangeFailureRate();
+ if (devChangeFailureRateOfPipelines.size() > 1)
+ rows.add(new String[] { "Dev change failure rate",
+ avgDevChangeFailureRate.getName() + " / Dev change failure rate",
+ DecimalUtil.formatDecimalTwo(avgDevChangeFailureRate.getFailureRate() * 100) });
return rows;
}
- private List getRowsFromMeanTimeToRecovery(MeanTimeToRecovery meanTimeToRecovery) {
+ private List getRowsFromDevMeanTimeToRecovery(DevMeanTimeToRecovery devMeanTimeToRecovery) {
List rows = new ArrayList<>();
- List meanTimeRecoveryPipelines = meanTimeToRecovery
- .getMeanTimeRecoveryPipelines();
- meanTimeRecoveryPipelines.forEach(pipeline -> rows.add(new String[] { "Mean Time To Recovery",
- pipeline.getPipelineName() + " / " + pipeline.getPipelineStep().replaceAll(":\\w+: ", "")
- + " / Mean Time To Recovery",
+ List devMeanTimeToRecoveryOfPipelines = devMeanTimeToRecovery
+ .getDevMeanTimeToRecoveryOfPipelines();
+ devMeanTimeToRecoveryOfPipelines.forEach(pipeline -> rows.add(new String[] { "Dev mean time to recovery",
+ pipeline.getName() + " / " + extractPipelineStep(pipeline.getStep()) + " / Dev mean time to recovery",
DecimalUtil
.formatDecimalTwo(TimeUtils.millisToUnit(pipeline.getTimeToRecovery().doubleValue(), HOURS)) }));
- AvgMeanTimeToRecovery avgMeanTimeToRecovery = meanTimeToRecovery.getAvgMeanTimeToRecovery();
- if (meanTimeRecoveryPipelines.size() > 1)
- rows.add(new String[] { "Mean Time To Recovery",
- avgMeanTimeToRecovery.getName() + " / Mean Time To Recovery", DecimalUtil.formatDecimalTwo(
- TimeUtils.millisToUnit(avgMeanTimeToRecovery.getTimeToRecovery().doubleValue(), HOURS)) });
+ AvgDevMeanTimeToRecovery avgDevMeanTimeToRecovery = devMeanTimeToRecovery.getAvgDevMeanTimeToRecovery();
+ if (devMeanTimeToRecoveryOfPipelines.size() > 1)
+ rows.add(new String[] { "Dev mean time to recovery",
+ avgDevMeanTimeToRecovery.getName() + " / Dev mean time to recovery",
+ DecimalUtil.formatDecimalTwo(TimeUtils
+ .millisToUnit(avgDevMeanTimeToRecovery.getTimeToRecovery().doubleValue(), HOURS)) });
return rows;
}
diff --git a/backend/src/main/java/heartbeat/service/report/GenerateReporterService.java b/backend/src/main/java/heartbeat/service/report/GenerateReporterService.java
index 8791757500..ca2a75f973 100644
--- a/backend/src/main/java/heartbeat/service/report/GenerateReporterService.java
+++ b/backend/src/main/java/heartbeat/service/report/GenerateReporterService.java
@@ -1,9 +1,11 @@
package heartbeat.service.report;
-import heartbeat.client.dto.codebase.github.PipelineLeadTime;
+import heartbeat.controller.board.dto.request.CardStepsEnum;
+import heartbeat.controller.board.dto.response.CardCollection;
import heartbeat.controller.report.dto.request.GenerateReportRequest;
import heartbeat.controller.report.dto.request.JiraBoardSetting;
import heartbeat.controller.report.dto.response.ErrorInfo;
+import heartbeat.controller.report.dto.response.MetricsDataCompleted;
import heartbeat.controller.report.dto.response.PipelineCSVInfo;
import heartbeat.controller.report.dto.response.ReportMetricsError;
import heartbeat.controller.report.dto.response.ReportResponse;
@@ -15,12 +17,14 @@
import heartbeat.handler.AsyncExceptionHandler;
import heartbeat.handler.AsyncMetricsDataHandler;
import heartbeat.handler.AsyncReportRequestHandler;
-import heartbeat.service.report.calculator.ChangeFailureRateCalculator;
+import heartbeat.handler.base.AsyncExceptionDTO;
+import heartbeat.service.report.calculator.DevChangeFailureRateCalculator;
import heartbeat.service.report.calculator.ClassificationCalculator;
import heartbeat.service.report.calculator.CycleTimeCalculator;
import heartbeat.service.report.calculator.DeploymentFrequencyCalculator;
import heartbeat.service.report.calculator.LeadTimeForChangesCalculator;
import heartbeat.service.report.calculator.MeanToRecoveryCalculator;
+import heartbeat.service.report.calculator.ReworkCalculator;
import heartbeat.service.report.calculator.VelocityCalculator;
import heartbeat.service.report.calculator.model.FetchedData;
import heartbeat.service.report.calculator.model.FetchedData.BuildKiteData;
@@ -35,11 +39,13 @@
import java.util.List;
import java.util.Objects;
import java.util.Optional;
+import java.util.concurrent.CompletableFuture;
import static heartbeat.controller.report.dto.request.MetricType.BOARD;
import static heartbeat.controller.report.dto.request.MetricType.DORA;
import static heartbeat.service.report.scheduler.DeleteExpireCSVScheduler.EXPORT_CSV_VALIDITY_TIME;
import static heartbeat.util.ValueUtil.getValueOrNull;
+import static java.util.Objects.isNull;
@Service
@RequiredArgsConstructor
@@ -48,6 +54,8 @@ public class GenerateReporterService {
private final KanbanService kanbanService;
+ private final KanbanCsvService kanbanCsvService;
+
private final PipelineService pipelineService;
private final WorkDay workDay;
@@ -56,7 +64,7 @@ public class GenerateReporterService {
private final DeploymentFrequencyCalculator deploymentFrequency;
- private final ChangeFailureRateCalculator changeFailureRate;
+ private final DevChangeFailureRateCalculator devChangeFailureRate;
private final MeanToRecoveryCalculator meanToRecoveryCalculator;
@@ -68,6 +76,8 @@ public class GenerateReporterService {
private final LeadTimeForChangesCalculator leadTimeForChangesCalculator;
+ private final ReworkCalculator reworkCalculator;
+
private final AsyncReportRequestHandler asyncReportRequestHandler;
private final AsyncMetricsDataHandler asyncMetricsDataHandler;
@@ -83,7 +93,6 @@ public void generateBoardReport(GenerateReportRequest request) {
boardReportId);
try {
saveReporterInHandler(generateBoardReporter(request), boardReportId);
- asyncMetricsDataHandler.updateMetricsDataCompletedInHandler(boardReportId, BOARD);
log.info(
"Successfully generate board report, _metrics: {}, _considerHoliday: {}, _startTime: {}, _endTime: {}, _boardReportId: {}",
request.getMetrics(), request.getConsiderHoliday(), request.getStartTime(), request.getEndTime(),
@@ -92,7 +101,9 @@ public void generateBoardReport(GenerateReportRequest request) {
catch (BaseException e) {
asyncExceptionHandler.put(boardReportId, e);
if (List.of(401, 403, 404).contains(e.getStatus()))
- asyncMetricsDataHandler.updateMetricsDataCompletedInHandler(boardReportId, BOARD);
+ asyncMetricsDataHandler.updateMetricsDataCompletedInHandler(
+ IdUtil.getDataCompletedPrefix(request.getCsvTimeStamp()), BOARD, false);
+
}
}
@@ -102,16 +113,18 @@ public void generateDoraReport(GenerateReportRequest request) {
FetchedData fetchedData = new FetchedData();
if (CollectionUtils.isNotEmpty(request.getPipelineMetrics())) {
GenerateReportRequest pipelineRequest = request.toPipelineRequest();
- fetchOriginalData(pipelineRequest, fetchedData);
generatePipelineReport(pipelineRequest, fetchedData);
}
if (CollectionUtils.isNotEmpty(request.getSourceControlMetrics())) {
GenerateReportRequest sourceControlRequest = request.toSourceControlRequest();
- fetchOriginalData(sourceControlRequest, fetchedData);
generateSourceControlReport(sourceControlRequest, fetchedData);
}
- generateCSVForPipeline(request, fetchedData.getBuildKiteData());
- asyncMetricsDataHandler.updateMetricsDataCompletedInHandler(request.getDoraReportId(), DORA);
+
+ MetricsDataCompleted previousMetricsCompleted = asyncMetricsDataHandler
+ .getMetricsDataCompleted(IdUtil.getDataCompletedPrefix(request.getCsvTimeStamp()));
+ if (Boolean.FALSE.equals(previousMetricsCompleted.doraMetricsCompleted())) {
+ CompletableFuture.runAsync(() -> generateCSVForPipeline(request, fetchedData.getBuildKiteData()));
+ }
}
private void generatePipelineReport(GenerateReportRequest request, FetchedData fetchedData) {
@@ -121,6 +134,7 @@ private void generatePipelineReport(GenerateReportRequest request, FetchedData f
request.getPipelineMetrics(), request.getConsiderHoliday(), request.getStartTime(),
request.getEndTime(), pipelineReportId);
try {
+ fetchBuildKiteData(request, fetchedData);
saveReporterInHandler(generatePipelineReporter(request, fetchedData), pipelineReportId);
log.info(
"Successfully generate pipeline report, _metrics: {}, _considerHoliday: {}, _startTime: {}, _endTime: {}, _pipelineReportId: {}",
@@ -130,7 +144,8 @@ private void generatePipelineReport(GenerateReportRequest request, FetchedData f
catch (BaseException e) {
asyncExceptionHandler.put(pipelineReportId, e);
if (List.of(401, 403, 404).contains(e.getStatus()))
- asyncMetricsDataHandler.updateMetricsDataCompletedInHandler(request.getDoraReportId(), DORA);
+ asyncMetricsDataHandler.updateMetricsDataCompletedInHandler(
+ IdUtil.getDataCompletedPrefix(request.getCsvTimeStamp()), DORA, false);
}
}
@@ -141,6 +156,7 @@ private void generateSourceControlReport(GenerateReportRequest request, FetchedD
request.getSourceControlMetrics(), request.getConsiderHoliday(), request.getStartTime(),
request.getEndTime(), sourceControlReportId);
try {
+ fetchGitHubData(request, fetchedData);
saveReporterInHandler(generateSourceControlReporter(request, fetchedData), sourceControlReportId);
log.info(
"Successfully generate source control report, _metrics: {}, _considerHoliday: {}, _startTime: {}, _endTime: {}, _sourceControlReportId: {}",
@@ -150,7 +166,8 @@ private void generateSourceControlReport(GenerateReportRequest request, FetchedD
catch (BaseException e) {
asyncExceptionHandler.put(sourceControlReportId, e);
if (List.of(401, 403, 404).contains(e.getStatus()))
- asyncMetricsDataHandler.updateMetricsDataCompletedInHandler(request.getDoraReportId(), DORA);
+ asyncMetricsDataHandler.updateMetricsDataCompletedInHandler(
+ IdUtil.getDataCompletedPrefix(request.getCsvTimeStamp()), DORA, false);
}
}
@@ -169,9 +186,9 @@ private synchronized ReportResponse generatePipelineReporter(GenerateReportReque
case "deployment frequency" -> reportResponse.setDeploymentFrequency(
deploymentFrequency.calculate(fetchedData.getBuildKiteData().getDeployTimesList(),
Long.parseLong(request.getStartTime()), Long.parseLong(request.getEndTime())));
- case "change failure rate" -> reportResponse.setChangeFailureRate(
- changeFailureRate.calculate(fetchedData.getBuildKiteData().getDeployTimesList()));
- case "mean time to recovery" -> reportResponse.setMeanTimeToRecovery(
+ case "dev change failure rate" -> reportResponse.setDevChangeFailureRate(
+ devChangeFailureRate.calculate(fetchedData.getBuildKiteData().getDeployTimesList()));
+ case "dev mean time to recovery" -> reportResponse.setDevMeanTimeToRecovery(
meanToRecoveryCalculator.calculate(fetchedData.getBuildKiteData().getDeployTimesList()));
default -> {
// TODO
@@ -184,30 +201,61 @@ private synchronized ReportResponse generatePipelineReporter(GenerateReportReque
private synchronized ReportResponse generateBoardReporter(GenerateReportRequest request) {
workDay.changeConsiderHolidayMode(request.getConsiderHoliday());
- FetchedData fetchedData = fetchOriginalData(request, new FetchedData());
+ FetchedData fetchedData = fetchJiraBoardData(request, new FetchedData());
ReportResponse reportResponse = new ReportResponse(EXPORT_CSV_VALIDITY_TIME);
JiraBoardSetting jiraBoardSetting = request.getJiraBoardSetting();
request.getBoardMetrics().forEach(metric -> {
switch (metric) {
- case "velocity" -> reportResponse.setVelocity(velocityCalculator
- .calculateVelocity(fetchedData.getCardCollectionInfo().getRealDoneCardCollection()));
- case "cycle time" -> reportResponse.setCycleTime(cycleTimeCalculator.calculateCycleTime(
- fetchedData.getCardCollectionInfo().getRealDoneCardCollection(),
- jiraBoardSetting.getBoardColumns()));
- case "classification" -> reportResponse
- .setClassificationList(classificationCalculator.calculate(jiraBoardSetting.getTargetFields(),
- fetchedData.getCardCollectionInfo().getRealDoneCardCollection()));
+ case "velocity" -> assembleVelocity(fetchedData, reportResponse);
+ case "cycle time" -> assembleCycleTime(fetchedData, reportResponse, jiraBoardSetting);
+ case "classification" -> assembleClassification(fetchedData, reportResponse, jiraBoardSetting);
+ case "rework times" -> assembleReworkInfo(request, fetchedData, reportResponse);
default -> {
// TODO
}
}
});
+ CompletableFuture.runAsync(() -> generateCsvForBoard(request, fetchedData));
return reportResponse;
}
+ private void generateCsvForBoard(GenerateReportRequest request, FetchedData fetchedData) {
+ kanbanCsvService.generateCsvInfo(request, fetchedData.getCardCollectionInfo().getRealDoneCardCollection(),
+ fetchedData.getCardCollectionInfo().getNonDoneCardCollection());
+ asyncMetricsDataHandler
+ .updateMetricsDataCompletedInHandler(IdUtil.getDataCompletedPrefix(request.getCsvTimeStamp()), BOARD, true);
+ }
+
+ private void assembleVelocity(FetchedData fetchedData, ReportResponse reportResponse) {
+ CardCollection cardCollection = fetchedData.getCardCollectionInfo().getRealDoneCardCollection();
+ reportResponse.setVelocity(velocityCalculator.calculateVelocity(cardCollection));
+ }
+
+ private void assembleCycleTime(FetchedData fetchedData, ReportResponse reportResponse,
+ JiraBoardSetting jiraBoardSetting) {
+ reportResponse.setCycleTime(cycleTimeCalculator.calculateCycleTime(
+ fetchedData.getCardCollectionInfo().getRealDoneCardCollection(), jiraBoardSetting.getBoardColumns()));
+ }
+
+ private void assembleClassification(FetchedData fetchedData, ReportResponse reportResponse,
+ JiraBoardSetting jiraBoardSetting) {
+ reportResponse.setClassificationList(classificationCalculator.calculate(jiraBoardSetting.getTargetFields(),
+ fetchedData.getCardCollectionInfo().getRealDoneCardCollection()));
+ }
+
+ private void assembleReworkInfo(GenerateReportRequest request, FetchedData fetchedData,
+ ReportResponse reportResponse) {
+ if (isNull(request.getJiraBoardSetting().getReworkTimesSetting())) {
+ return;
+ }
+ CardCollection realDoneCardCollection = fetchedData.getCardCollectionInfo().getRealDoneCardCollection();
+ CardStepsEnum enumReworkState = request.getJiraBoardSetting().getReworkTimesSetting().getEnumReworkState();
+ reportResponse.setRework(reworkCalculator.calculateRework(realDoneCardCollection, enumReworkState));
+ }
+
private synchronized ReportResponse generateSourceControlReporter(GenerateReportRequest request,
FetchedData fetchedData) {
workDay.changeConsiderHolidayMode(request.getConsiderHoliday());
@@ -227,25 +275,24 @@ private synchronized ReportResponse generateSourceControlReporter(GenerateReport
return reportResponse;
}
- private FetchedData fetchOriginalData(GenerateReportRequest request, FetchedData fetchedData) {
+ private void fetchBuildKiteData(GenerateReportRequest request, FetchedData fetchedData) {
+ if (request.getBuildKiteSetting() == null)
+ throw new BadRequestException("Failed to fetch BuildKite info due to BuildKite setting is null.");
+ fetchedData.setBuildKiteData(pipelineService.fetchBuildKiteInfo(request));
+ }
+
+ private void fetchGitHubData(GenerateReportRequest request, FetchedData fetchedData) {
+ if (request.getCodebaseSetting() == null)
+ throw new BadRequestException("Failed to fetch Github info due to code base setting is null.");
+ fetchedData.setBuildKiteData(pipelineService.fetchGitHubData(request));
+ }
+
+ private FetchedData fetchJiraBoardData(GenerateReportRequest request, FetchedData fetchedData) {
if (CollectionUtils.isNotEmpty(request.getBoardMetrics())) {
if (request.getJiraBoardSetting() == null)
throw new BadRequestException("Failed to fetch Jira info due to Jira board setting is null.");
fetchedData.setCardCollectionInfo(kanbanService.fetchDataFromKanban(request));
}
-
- if (CollectionUtils.isNotEmpty(request.getSourceControlMetrics())) {
- if (request.getCodebaseSetting() == null)
- throw new BadRequestException("Failed to fetch Github info due to code base setting is null.");
- fetchedData.setBuildKiteData(pipelineService.fetchGithubData(request));
- }
-
- if (CollectionUtils.isNotEmpty(request.getPipelineMetrics())) {
- if (request.getBuildKiteSetting() == null)
- throw new BadRequestException("Failed to fetch BuildKite info due to BuildKite setting is null.");
- fetchedData.setBuildKiteData(pipelineService.fetchBuildKiteInfo(request));
- }
-
return fetchedData;
}
@@ -255,6 +302,8 @@ private void generateCSVForPipeline(GenerateReportRequest request, BuildKiteData
request.getBuildKiteSetting().getDeploymentEnvList());
csvFileGenerator.convertPipelineDataToCSV(pipelineData, request.getCsvTimeStamp());
+ asyncMetricsDataHandler
+ .updateMetricsDataCompletedInHandler(IdUtil.getDataCompletedPrefix(request.getCsvTimeStamp()), DORA, true);
}
public void generateCSVForMetric(ReportResponse reportContent, String csvTimeStamp) {
@@ -265,7 +314,7 @@ private void saveReporterInHandler(ReportResponse reportContent, String reportId
asyncReportRequestHandler.putReport(reportId, reportContent);
}
- private ErrorInfo handleAsyncExceptionAndGetErrorInfo(BaseException exception) {
+ private ErrorInfo handleAsyncExceptionAndGetErrorInfo(AsyncExceptionDTO exception) {
if (Objects.nonNull(exception)) {
int status = exception.getStatus();
final String errorMessage = exception.getMessage();
@@ -286,7 +335,7 @@ private void deleteOldCSV(long currentTimeStamp, File directory) {
if (!ObjectUtils.isEmpty(files)) {
for (File file : files) {
String fileName = file.getName();
- String[] splitResult = fileName.split("\\s*\\-|\\.\\s*");
+ String[] splitResult = fileName.split("[-.]");
String timeStamp = splitResult[1];
if (validateExpire(currentTimeStamp, Long.parseLong(timeStamp)) && !file.delete() && file.exists()) {
log.error("Failed to deleted expired CSV file, file name: {}", fileName);
@@ -317,15 +366,15 @@ private ReportResponse getReportFromHandler(String reportId) {
return asyncReportRequestHandler.getReport(reportId);
}
- public MetricsDataDTO checkReportReadyStatus(String reportTimeStamp) {
+ public MetricsDataCompleted checkReportReadyStatus(String reportTimeStamp) {
if (validateExpire(System.currentTimeMillis(), Long.parseLong(reportTimeStamp))) {
throw new GenerateReportException("Failed to get report due to report time expires");
}
- return asyncMetricsDataHandler.getReportReadyStatusByTimeStamp(reportTimeStamp);
+ return asyncMetricsDataHandler.getMetricsDataCompleted(IdUtil.getDataCompletedPrefix(reportTimeStamp));
}
public ReportResponse getComposedReportResponse(String reportId) {
- MetricsDataDTO reportReadyStatus = checkReportReadyStatus(reportId);
+ MetricsDataCompleted reportReadyStatus = checkReportReadyStatus(reportId);
ReportResponse boardReportResponse = getReportFromHandler(IdUtil.getBoardReportId(reportId));
ReportResponse pipleineReportResponse = getReportFromHandler(IdUtil.getPipelineReportId(reportId));
@@ -336,22 +385,25 @@ public ReportResponse getComposedReportResponse(String reportId) {
.velocity(getValueOrNull(boardReportResponse, ReportResponse::getVelocity))
.classificationList(getValueOrNull(boardReportResponse, ReportResponse::getClassificationList))
.cycleTime(getValueOrNull(boardReportResponse, ReportResponse::getCycleTime))
+ .rework(getValueOrNull(boardReportResponse, ReportResponse::getRework))
.exportValidityTime(EXPORT_CSV_VALIDITY_TIME)
.deploymentFrequency(getValueOrNull(pipleineReportResponse, ReportResponse::getDeploymentFrequency))
- .changeFailureRate(getValueOrNull(pipleineReportResponse, ReportResponse::getChangeFailureRate))
- .meanTimeToRecovery(getValueOrNull(pipleineReportResponse, ReportResponse::getMeanTimeToRecovery))
+ .devChangeFailureRate(getValueOrNull(pipleineReportResponse, ReportResponse::getDevChangeFailureRate))
+ .devMeanTimeToRecovery(getValueOrNull(pipleineReportResponse, ReportResponse::getDevMeanTimeToRecovery))
.leadTimeForChanges(getValueOrNull(sourceControlReportResponse, ReportResponse::getLeadTimeForChanges))
- .boardMetricsCompleted(reportReadyStatus.isBoardReady)
- .doraMetricsCompleted(reportReadyStatus.isDoraReady)
- .allMetricsCompleted(reportReadyStatus.isAllMetricsReady)
+ .boardMetricsCompleted(reportReadyStatus.boardMetricsCompleted())
+ .doraMetricsCompleted(reportReadyStatus.doraMetricsCompleted())
+ .overallMetricsCompleted(reportReadyStatus.overallMetricCompleted())
+ .allMetricsCompleted(reportReadyStatus.allMetricsCompleted())
+ .isSuccessfulCreateCsvFile(reportReadyStatus.isSuccessfulCreateCsvFile())
.reportMetricsError(reportMetricsError)
.build();
}
private ReportMetricsError getReportErrorAndHandleAsyncException(String reportId) {
- BaseException boardException = asyncExceptionHandler.get(IdUtil.getBoardReportId(reportId));
- BaseException pipelineException = asyncExceptionHandler.get(IdUtil.getPipelineReportId(reportId));
- BaseException sourceControlException = asyncExceptionHandler.get(IdUtil.getSourceControlReportId(reportId));
+ AsyncExceptionDTO boardException = asyncExceptionHandler.get(IdUtil.getBoardReportId(reportId));
+ AsyncExceptionDTO pipelineException = asyncExceptionHandler.get(IdUtil.getPipelineReportId(reportId));
+ AsyncExceptionDTO sourceControlException = asyncExceptionHandler.get(IdUtil.getSourceControlReportId(reportId));
return ReportMetricsError.builder()
.boardMetricsError(handleAsyncExceptionAndGetErrorInfo(boardException))
.pipelineMetricsError(handleAsyncExceptionAndGetErrorInfo(pipelineException))
diff --git a/backend/src/main/java/heartbeat/service/report/KanbanCsvService.java b/backend/src/main/java/heartbeat/service/report/KanbanCsvService.java
index 359f7d12f5..c9a0d3e6b2 100644
--- a/backend/src/main/java/heartbeat/service/report/KanbanCsvService.java
+++ b/backend/src/main/java/heartbeat/service/report/KanbanCsvService.java
@@ -11,11 +11,12 @@
import heartbeat.client.dto.board.jira.Status;
import heartbeat.controller.board.dto.request.BoardRequestParam;
import heartbeat.controller.board.dto.request.CardStepsEnum;
+import heartbeat.controller.board.dto.request.RequestJiraBoardColumnSetting;
import heartbeat.controller.board.dto.response.CardCollection;
+import heartbeat.controller.board.dto.response.CycleTimeInfo;
import heartbeat.controller.board.dto.response.JiraCardDTO;
import heartbeat.controller.board.dto.response.JiraColumnDTO;
import heartbeat.controller.board.dto.response.TargetField;
-import heartbeat.controller.board.dto.response.CycleTimeInfo;
import heartbeat.controller.report.dto.request.GenerateReportRequest;
import heartbeat.controller.report.dto.request.JiraBoardSetting;
import heartbeat.controller.report.dto.response.BoardCSVConfig;
@@ -24,18 +25,25 @@
import heartbeat.service.board.jira.JiraService;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
+import org.apache.commons.collections4.CollectionUtils;
import org.springframework.stereotype.Service;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
+import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
+import static heartbeat.controller.board.dto.request.CardStepsEnum.BLOCK;
+import static heartbeat.controller.board.dto.request.CardStepsEnum.FLAG;
+import static heartbeat.controller.board.dto.request.CardStepsEnum.reworkJudgmentMap;
+
@Service
@Log4j2
@RequiredArgsConstructor
@@ -69,62 +77,51 @@ public void generateCsvInfo(GenerateReportRequest request, CardCollection realDo
boardRequestParam.getToken());
JiraColumnResult jiraColumns = jiraService.getJiraColumns(boardRequestParam, baseUrl, jiraBoardConfigDTO);
+ List reworkFromStates = null;
+ CardStepsEnum reworkState = null;
+ if (request.getJiraBoardSetting().getReworkTimesSetting() != null) {
+ reworkState = request.getJiraBoardSetting().getReworkTimesSetting().getEnumReworkState();
+ List reworkExcludeStates = request.getJiraBoardSetting()
+ .getReworkTimesSetting()
+ .getEnumExcludeStates();
+ Set mappedColumns = request.getJiraBoardSetting()
+ .getBoardColumns()
+ .stream()
+ .map(RequestJiraBoardColumnSetting::getValue)
+ .map(CardStepsEnum::fromValue)
+ .collect(Collectors.toSet());
+ if (Boolean.TRUE.equals(request.getJiraBoardSetting().getTreatFlagCardAsBlock())) {
+ mappedColumns.add(BLOCK);
+ }
+ reworkFromStates = reworkJudgmentMap.get(reworkState)
+ .stream()
+ .sorted()
+ .filter(state -> !reworkExcludeStates.contains(state))
+ .filter(mappedColumns::contains)
+ .map(CardStepsEnum::getAlias)
+ .toList();
+
+ }
this.generateCSVForBoard(realDoneCardCollection.getJiraCardDTOList(),
nonDoneCardCollection.getJiraCardDTOList(), jiraColumns.getJiraColumnResponse(),
- jiraBoardSetting.getTargetFields(), request.getCsvTimeStamp());
+ jiraBoardSetting.getTargetFields(), request.getCsvTimeStamp(), reworkState, reworkFromStates);
}
private void generateCSVForBoard(List allDoneCards, List nonDoneCards,
- List jiraColumns, List targetFields, String csvTimeStamp) {
+ List jiraColumns, List targetFields, String csvTimeStamp,
+ CardStepsEnum reworkState, List reworkFromStates) {
List cardDTOList = new ArrayList<>();
List emptyJiraCard = List.of(JiraCardDTO.builder().build());
if (allDoneCards != null) {
- if (allDoneCards.size() > 1) {
- allDoneCards.sort((preCard, nextCard) -> {
- Status preStatus = preCard.getBaseInfo().getFields().getStatus();
- Status nextStatus = nextCard.getBaseInfo().getFields().getStatus();
- Long preDateTimeStamp = preCard.getBaseInfo().getFields().getLastStatusChangeDate();
- Long nextDateTimeStamp = nextCard.getBaseInfo().getFields().getLastStatusChangeDate();
- if (Objects.isNull(preStatus) || Objects.isNull(nextStatus) || Objects.isNull(preDateTimeStamp)
- || Objects.isNull(nextDateTimeStamp)) {
- return jiraColumns.size() + 1;
- }
- else {
- return nextDateTimeStamp.compareTo(preDateTimeStamp);
- }
- });
- }
+ sortAllDoneCardsByTime(allDoneCards, jiraColumns);
cardDTOList.addAll(allDoneCards);
}
cardDTOList.addAll(emptyJiraCard);
if (nonDoneCards != null) {
- if (nonDoneCards.size() > 1) {
- nonDoneCards.sort((preCard, nextCard) -> {
- Status preStatus = preCard.getBaseInfo().getFields().getStatus();
- Status nextStatus = nextCard.getBaseInfo().getFields().getStatus();
- Long preDateTimeStamp = preCard.getBaseInfo().getFields().getLastStatusChangeDate();
- Long nextDateTimeStamp = nextCard.getBaseInfo().getFields().getLastStatusChangeDate();
- if (Objects.isNull(preStatus) || Objects.isNull(nextStatus)) {
- return jiraColumns.size() + 1;
- }
- else {
- String preCardStatusName = preStatus.getName();
- String nextCardStatusName = nextStatus.getName();
- int statusIndexComparison = getIndexForStatus(jiraColumns, nextCardStatusName)
- - getIndexForStatus(jiraColumns, preCardStatusName);
-
- if (statusIndexComparison == 0 && Objects.nonNull(preDateTimeStamp)
- && Objects.nonNull(nextDateTimeStamp)) {
- return nextDateTimeStamp.compareTo(preDateTimeStamp);
- }
-
- return statusIndexComparison;
- }
- });
- }
+ sortNonDoneCardsByStatusAndTime(nonDoneCards, jiraColumns);
cardDTOList.addAll(nonDoneCards);
}
@@ -154,12 +151,82 @@ private void generateCSVForBoard(List allDoneCards, List reworkFields = new ArrayList<>();
+ if (reworkState != null) {
+ reworkFields.add(BoardCSVConfig.builder()
+ .label("Rework: total - " + reworkState.getAlias())
+ .value("totalReworkTimes")
+ .build());
+ reworkFields.addAll(reworkFromStates.stream()
+ .map(state -> BoardCSVConfig.builder()
+ .label("Rework: from " + state)
+ .value("reworkTimesFlat." + state)
+ .build())
+ .toList());
+ }
cardDTOList.forEach(card -> {
card.setCycleTimeFlat(card.buildCycleTimeFlatObject());
card.setTotalCycleTimeDivideStoryPoints(card.getTotalCycleTimeDivideStoryPoints());
+ card.setReworkTimesFlat(card.buildReworkTimesFlatObject());
});
- csvFileGenerator.convertBoardDataToCSV(cardDTOList, allBoardFields, newExtraFields, csvTimeStamp);
+ String[][] sheet = BoardSheetGenerator.builder()
+ .csvFileGenerator(csvFileGenerator)
+ .jiraCardDTOList(cardDTOList)
+ .fields(allBoardFields)
+ .extraFields(newExtraFields)
+ .reworkFields(reworkFields)
+ .build()
+ .mergeBaseInfoAndCycleTimeSheet()
+ .mergeReworkTimesSheet()
+ .generate();
+ csvFileGenerator.writeDataToCSV(csvTimeStamp, sheet);
+ }
+
+ private void sortNonDoneCardsByStatusAndTime(List nonDoneCards, List jiraColumns) {
+ if (nonDoneCards.size() > 1) {
+ nonDoneCards.sort((preCard, nextCard) -> {
+ Status preStatus = preCard.getBaseInfo().getFields().getStatus();
+ Status nextStatus = nextCard.getBaseInfo().getFields().getStatus();
+ Long preDateTimeStamp = preCard.getBaseInfo().getFields().getLastStatusChangeDate();
+ Long nextDateTimeStamp = nextCard.getBaseInfo().getFields().getLastStatusChangeDate();
+ if (Objects.isNull(preStatus) || Objects.isNull(nextStatus)) {
+ return jiraColumns.size() + 1;
+ }
+ else {
+ String preCardStatusName = preStatus.getName();
+ String nextCardStatusName = nextStatus.getName();
+ int statusIndexComparison = getIndexForStatus(jiraColumns, nextCardStatusName)
+ - getIndexForStatus(jiraColumns, preCardStatusName);
+
+ if (statusIndexComparison == 0 && Objects.nonNull(preDateTimeStamp)
+ && Objects.nonNull(nextDateTimeStamp)) {
+ return nextDateTimeStamp.compareTo(preDateTimeStamp);
+ }
+
+ return statusIndexComparison;
+ }
+ });
+ }
+ }
+
+ private void sortAllDoneCardsByTime(List allDoneCards, List jiraColumns) {
+ if (allDoneCards.size() > 1) {
+ allDoneCards.sort((preCard, nextCard) -> {
+ Status preStatus = preCard.getBaseInfo().getFields().getStatus();
+ Status nextStatus = nextCard.getBaseInfo().getFields().getStatus();
+ Long preDateTimeStamp = preCard.getBaseInfo().getFields().getLastStatusChangeDate();
+ Long nextDateTimeStamp = nextCard.getBaseInfo().getFields().getLastStatusChangeDate();
+ if (Objects.isNull(preStatus) || Objects.isNull(nextStatus) || Objects.isNull(preDateTimeStamp)
+ || Objects.isNull(nextDateTimeStamp)) {
+ return jiraColumns.size() + 1;
+ }
+ else {
+ return nextDateTimeStamp.compareTo(preDateTimeStamp);
+ }
+ });
+ }
}
private List insertExtraFieldsAfterCycleTime(final List extraFields,
@@ -255,7 +322,7 @@ private List getExtraFields(List targetFields, List
private List getFixedBoardFields() {
return Arrays.stream(BoardCSVConfigEnum.values())
.map(field -> BoardCSVConfig.builder().label(field.getLabel()).value(field.getValue()).build())
- .collect(Collectors.toList());
+ .toList();
}
private String getFieldDisplayValue(Object object) {
diff --git a/backend/src/main/java/heartbeat/service/report/KanbanService.java b/backend/src/main/java/heartbeat/service/report/KanbanService.java
index 9e44d35652..bdea34cc43 100644
--- a/backend/src/main/java/heartbeat/service/report/KanbanService.java
+++ b/backend/src/main/java/heartbeat/service/report/KanbanService.java
@@ -17,12 +17,9 @@ public class KanbanService {
private final JiraService jiraService;
- private final KanbanCsvService kanbanCsvService;
-
public FetchedData.CardCollectionInfo fetchDataFromKanban(GenerateReportRequest request) {
CardCollection nonDoneCardCollection = fetchNonDoneCardCollection(request);
CardCollection realDoneCardCollection = fetchRealDoneCardCollection(request);
- kanbanCsvService.generateCsvInfo(request, realDoneCardCollection, nonDoneCardCollection);
return FetchedData.CardCollectionInfo.builder()
.realDoneCardCollection(realDoneCardCollection)
@@ -34,7 +31,7 @@ private CardCollection fetchRealDoneCardCollection(GenerateReportRequest request
JiraBoardSetting jiraBoardSetting = request.getJiraBoardSetting();
StoryPointsAndCycleTimeRequest storyPointsAndCycleTimeRequest = buildStoryPointsAndCycleTimeRequest(
jiraBoardSetting, request.getStartTime(), request.getEndTime());
- return jiraService.getStoryPointsAndCycleTimeForDoneCards(storyPointsAndCycleTimeRequest,
+ return jiraService.getStoryPointsAndCycleTimeAndReworkInfoForDoneCards(storyPointsAndCycleTimeRequest,
jiraBoardSetting.getBoardColumns(), jiraBoardSetting.getUsers(), jiraBoardSetting.getAssigneeFilter());
}
@@ -60,6 +57,7 @@ private static StoryPointsAndCycleTimeRequest buildStoryPointsAndCycleTimeReques
.targetFields(jiraBoardSetting.getTargetFields())
.overrideFields(jiraBoardSetting.getOverrideFields())
.treatFlagCardAsBlock(jiraBoardSetting.getTreatFlagCardAsBlock())
+ .reworkTimesSetting(jiraBoardSetting.getReworkTimesSetting())
.build();
}
diff --git a/backend/src/main/java/heartbeat/service/report/PipelineService.java b/backend/src/main/java/heartbeat/service/report/PipelineService.java
index 66b4e1d3a3..7ba6350cbe 100644
--- a/backend/src/main/java/heartbeat/service/report/PipelineService.java
+++ b/backend/src/main/java/heartbeat/service/report/PipelineService.java
@@ -38,7 +38,7 @@ public class PipelineService {
private final GitHubService gitHubService;
- public FetchedData.BuildKiteData fetchGithubData(GenerateReportRequest request) {
+ public FetchedData.BuildKiteData fetchGitHubData(GenerateReportRequest request) {
FetchedData.BuildKiteData buildKiteData = fetchBuildKiteInfo(request);
Map repoMap = getRepoMap(request.getBuildKiteSetting().getDeploymentEnvList());
List pipelineLeadTimes = Collections.emptyList();
diff --git a/backend/src/main/java/heartbeat/service/report/ReportService.java b/backend/src/main/java/heartbeat/service/report/ReportService.java
index e85b5d73ba..3f355ede28 100644
--- a/backend/src/main/java/heartbeat/service/report/ReportService.java
+++ b/backend/src/main/java/heartbeat/service/report/ReportService.java
@@ -1,18 +1,28 @@
package heartbeat.service.report;
import heartbeat.controller.report.dto.request.GenerateReportRequest;
-import heartbeat.controller.report.dto.request.ReportType;
import heartbeat.controller.report.dto.request.MetricType;
+import heartbeat.controller.report.dto.request.ReportType;
import heartbeat.controller.report.dto.response.MetricsDataCompleted;
+import heartbeat.controller.report.dto.response.ReportMetricsError;
+import heartbeat.controller.report.dto.response.ReportResponse;
import heartbeat.exception.NotFoundException;
import heartbeat.handler.AsyncMetricsDataHandler;
+import heartbeat.service.report.calculator.ReportGenerator;
import heartbeat.util.IdUtil;
import lombok.RequiredArgsConstructor;
import org.springframework.core.io.InputStreamResource;
import org.springframework.stereotype.Service;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
import java.util.concurrent.CompletableFuture;
+import java.util.function.Consumer;
+import static heartbeat.controller.report.dto.request.MetricType.BOARD;
+import static heartbeat.controller.report.dto.request.MetricType.DORA;
import static heartbeat.service.report.scheduler.DeleteExpireCSVScheduler.EXPORT_CSV_VALIDITY_TIME;
@Service
@@ -25,6 +35,8 @@ public class ReportService {
private final GenerateReporterService generateReporterService;
+ private final ReportGenerator reportGenerator;
+
public InputStreamResource exportCsv(ReportType reportDataType, long csvTimestamp) {
if (isExpiredTimeStamp(csvTimestamp)) {
throw new NotFoundException("Failed to fetch CSV data due to CSV not found");
@@ -36,28 +48,54 @@ private boolean isExpiredTimeStamp(long timeStamp) {
return timeStamp < System.currentTimeMillis() - EXPORT_CSV_VALIDITY_TIME;
}
- public void generateReportByType(GenerateReportRequest request, MetricType metricType) {
- initializeMetricsDataCompletedInHandler(request.getCsvTimeStamp(), metricType);
+ public void generateReport(GenerateReportRequest request) {
+ List metricTypes = request.getMetricTypes();
+ String timeStamp = request.getCsvTimeStamp();
+ initializeMetricsDataCompletedInHandler(metricTypes, timeStamp);
+ Map> reportGeneratorMap = reportGenerator
+ .getReportGenerator(generateReporterService);
+ List> threadList = new ArrayList<>();
+ for (MetricType metricType : metricTypes) {
+ CompletableFuture metricTypeThread = CompletableFuture
+ .runAsync(() -> reportGeneratorMap.get(metricType).accept(request));
+ threadList.add(metricTypeThread);
+ }
+
CompletableFuture.runAsync(() -> {
- switch (metricType) {
- case BOARD -> generateReporterService.generateBoardReport(request);
- case DORA -> generateReporterService.generateDoraReport(request);
- default -> {
- // TODO
- }
+ for (CompletableFuture thread : threadList) {
+ thread.join();
}
+
+ ReportResponse reportResponse = generateReporterService.getComposedReportResponse(timeStamp);
+ if (isNotGenerateMetricError(reportResponse.getReportMetricsError())) {
+ generateReporterService.generateCSVForMetric(reportResponse, timeStamp);
+ }
+ asyncMetricsDataHandler.updateOverallMetricsCompletedInHandler(IdUtil.getDataCompletedPrefix(timeStamp));
});
}
- public void initializeMetricsDataCompletedInHandler(String timeStamp, MetricType metricType) {
- if (metricType == MetricType.BOARD) {
- asyncMetricsDataHandler.putMetricsDataCompleted(IdUtil.getBoardReportId(timeStamp),
- MetricsDataCompleted.builder().boardMetricsCompleted(false).build());
- }
- else {
- asyncMetricsDataHandler.putMetricsDataCompleted(IdUtil.getDoraReportId(timeStamp),
- MetricsDataCompleted.builder().doraMetricsCompleted(false).build());
+ private boolean isNotGenerateMetricError(ReportMetricsError reportMetricsError) {
+ return Objects.isNull(reportMetricsError.getBoardMetricsError())
+ && Objects.isNull(reportMetricsError.getSourceControlMetricsError())
+ && Objects.isNull(reportMetricsError.getPipelineMetricsError());
+ }
+
+ private void initializeMetricsDataCompletedInHandler(List metricTypes, String timeStamp) {
+ MetricsDataCompleted previousMetricsDataCompleted = asyncMetricsDataHandler
+ .getMetricsDataCompleted(IdUtil.getDataCompletedPrefix(timeStamp));
+ Boolean initializeBoardMetricsCompleted = null;
+ Boolean initializeDoraMetricsCompleted = null;
+ if (!Objects.isNull(previousMetricsDataCompleted)) {
+ initializeBoardMetricsCompleted = previousMetricsDataCompleted.boardMetricsCompleted();
+ initializeDoraMetricsCompleted = previousMetricsDataCompleted.doraMetricsCompleted();
}
+ asyncMetricsDataHandler
+ .putMetricsDataCompleted(IdUtil.getDataCompletedPrefix(timeStamp), MetricsDataCompleted.builder()
+ .boardMetricsCompleted(metricTypes.contains(BOARD) ? Boolean.FALSE : initializeBoardMetricsCompleted)
+ .doraMetricsCompleted(metricTypes.contains(DORA) ? Boolean.FALSE : initializeDoraMetricsCompleted)
+ .overallMetricCompleted(Boolean.FALSE)
+ .isSuccessfulCreateCsvFile(Boolean.FALSE)
+ .build());
}
}
diff --git a/backend/src/main/java/heartbeat/service/report/WorkDay.java b/backend/src/main/java/heartbeat/service/report/WorkDay.java
index 162b4871ab..94eee3c74f 100644
--- a/backend/src/main/java/heartbeat/service/report/WorkDay.java
+++ b/backend/src/main/java/heartbeat/service/report/WorkDay.java
@@ -21,7 +21,7 @@
@RequiredArgsConstructor
public class WorkDay {
- private static final long ONE_DAY = 1000 * 60 * 60 * 24;
+ private static final long ONE_DAY = 1000L * 60 * 60 * 24;
private static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd");
diff --git a/backend/src/main/java/heartbeat/service/report/calculator/ClassificationCalculator.java b/backend/src/main/java/heartbeat/service/report/calculator/ClassificationCalculator.java
index d190b81d2a..c9f4255dc5 100644
--- a/backend/src/main/java/heartbeat/service/report/calculator/ClassificationCalculator.java
+++ b/backend/src/main/java/heartbeat/service/report/calculator/ClassificationCalculator.java
@@ -46,54 +46,52 @@ public List calculate(List targetFields, CardCollec
JiraCardField jiraCardFields = jiraCardResponse.getBaseInfo().getFields();
Map tempFields = extractFields(jiraCardFields);
- for (String tempFieldsKey : tempFields.keySet()) {
- Object object = tempFields.get(tempFieldsKey);
- if (object instanceof JsonArray objectArray) {
- List objectList = new ArrayList<>();
- for (JsonElement element : objectArray) {
- if (element.isJsonObject()) {
- JsonObject jsonObject = element.getAsJsonObject();
- objectList.add(jsonObject);
- }
- }
- mapArrayField(resultMap, tempFieldsKey, (List.of(objectList)));
- }
- else if (object instanceof List) {
- mapArrayField(resultMap, tempFieldsKey, (List.of(object)));
- }
- else if (object != null) {
- Map countMap = resultMap.get(tempFieldsKey);
- if (countMap != null) {
- String displayName = pickDisplayNameFromObj(object);
- Integer count = countMap.getOrDefault(displayName, 0);
- countMap.put(displayName, count > 0 ? count + 1 : 1);
- countMap.put(NONE_KEY, countMap.get(NONE_KEY) - 1);
- }
- }
- }
+ mapFields(tempFields, resultMap);
}
- for (Map.Entry> entry : resultMap.entrySet()) {
- String fieldName = entry.getKey();
- Map valueMap = entry.getValue();
+ resultMap.forEach((fieldName, valueMap) -> {
List classificationNameValuePair = new ArrayList<>();
if (valueMap.get(NONE_KEY) == 0) {
valueMap.remove(NONE_KEY);
}
- for (Map.Entry mapEntry : valueMap.entrySet()) {
- String displayName = mapEntry.getKey();
- Integer count = mapEntry.getValue();
- classificationNameValuePair
- .add(new ClassificationNameValuePair(displayName, (double) count / cards.getCardsNumber()));
- }
+ valueMap.forEach((displayName, count) -> classificationNameValuePair
+ .add(new ClassificationNameValuePair(displayName, (double) count / cards.getCardsNumber())));
classificationFields.add(new Classification(nameMap.get(fieldName), classificationNameValuePair));
- }
+ });
+
return classificationFields;
}
+ private void mapFields(Map tempFields, Map> resultMap) {
+ tempFields.forEach((tempFieldsKey, object) -> {
+ if (object instanceof JsonArray objectArray) {
+ List objectList = new ArrayList<>();
+ objectArray.forEach(element -> {
+ if (element.isJsonObject()) {
+ JsonObject jsonObject = element.getAsJsonObject();
+ objectList.add(jsonObject);
+ }
+ });
+ mapArrayField(resultMap, tempFieldsKey, (List.of(objectList)));
+ }
+ else if (object instanceof List) {
+ mapArrayField(resultMap, tempFieldsKey, (List.of(object)));
+ }
+ else if (object != null) {
+ Map countMap = resultMap.get(tempFieldsKey);
+ if (countMap != null) {
+ String displayName = pickDisplayNameFromObj(object);
+ Integer count = countMap.getOrDefault(displayName, 0);
+ countMap.put(displayName, count > 0 ? count + 1 : 1);
+ countMap.put(NONE_KEY, countMap.get(NONE_KEY) - 1);
+ }
+ }
+ });
+ }
+
private void mapArrayField(Map> resultMap, String fieldsKey, List