diff --git a/.github/workflows/build-images.yaml b/.github/workflows/build-images.yaml index dae02057a17..2a413b5e7dc 100644 --- a/.github/workflows/build-images.yaml +++ b/.github/workflows/build-images.yaml @@ -19,25 +19,24 @@ jobs: name: Build runs-on: ubuntu-20.04 steps: - - name: Docker meta + - name: Container meta for default (distroless) image id: docker_meta uses: docker/metadata-action@v3 with: images: ${{ env.IMAGES }} tags: | type=match,pattern=image-(.*),group=1,enable=${{github.event_name != 'pull_request'}} - type=sha + - - name: Docker distroless meta - id: docker_distroless_meta + - name: Container meta for tomcat image + id: docker_tomcat_meta uses: docker/metadata-action@v3 with: images: ${{ env.IMAGES }} tags: | type=match,pattern=image-(.*),group=1,enable=${{github.event_name != 'pull_request'}} - type=sha flavor: | - suffix=-distroless,onlatest=true + suffix=-tomcat,onlatest=true - name: Set up QEMU uses: docker/setup-qemu-action@v1 @@ -60,7 +59,7 @@ jobs: restore-keys: | ${{ runner.os }}-buildx- - - name: Build and push + - name: Build and push default (distroless) image id: docker_build uses: docker/build-push-action@v2 with: @@ -70,15 +69,16 @@ jobs: tags: ${{ steps.docker_meta.outputs.tags }} labels: ${{ steps.docker_meta.outputs.labels }} platforms: ${{ env.PLATFORMS }} + target: default - - name: Build and push distroless - id: docker_build_distroless + - name: Build and push tomcat image + id: docker_build_tomcat uses: docker/build-push-action@v2 with: cache-from: type=local,src=/tmp/.buildx-cache cache-to: type=local,dest=/tmp/.buildx-cache push: ${{ github.event_name != 'pull_request' }} - tags: ${{ steps.docker_distroless_meta.outputs.tags }} - labels: ${{ steps.docker_distroless_meta.outputs.labels }} + tags: ${{ steps.docker_tomcat_meta.outputs.tags }} + labels: ${{ steps.docker_tomcat_meta.outputs.labels }} platforms: ${{ env.PLATFORMS }} - target: release-distroless + target: tomcat diff --git a/.github/workflows/chart-release.yaml b/.github/workflows/chart-release.yaml index 5d30c2f47e7..ae2045ceb05 100644 --- a/.github/workflows/chart-release.yaml +++ b/.github/workflows/chart-release.yaml @@ -21,11 +21,6 @@ jobs: git config user.name "$GITHUB_ACTOR" git config user.email "$GITHUB_ACTOR@users.noreply.github.com" - - name: Install Helm - uses: azure/setup-helm@v1 - with: - version: v3.7.0 - - name: Add bitnami repo run: helm repo add bitnami https://charts.bitnami.com/bitnami diff --git a/.github/workflows/chart-test.yaml b/.github/workflows/chart-test.yaml index 90bb32a8208..1d32194682f 100644 --- a/.github/workflows/chart-test.yaml +++ b/.github/workflows/chart-test.yaml @@ -15,7 +15,7 @@ jobs: - name: Install helm-docs working-directory: /tmp env: - HELM_DOCS_URL: https://github.com/norwoodj/helm-docs/releases/download/v1.5.0/helm-docs_1.5.0_Linux_x86_64.tar.gz + HELM_DOCS_URL: https://github.com/norwoodj/helm-docs/releases/download/v1.9.1/helm-docs_1.9.1_Linux_x86_64.tar.gz run: | curl -LSs $HELM_DOCS_URL | tar xz && \ mv ./helm-docs /usr/local/bin/helm-docs && \ @@ -35,21 +35,19 @@ jobs: test: runs-on: ubuntu-20.04 + strategy: + matrix: + k8s-version: [1.22.9, 1.23.6, 1.24.1] needs: - lint steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 - - name: Set up Helm - uses: azure/setup-helm@v1 - with: - version: v3.7.0 - - name: Set up chart-testing - uses: helm/chart-testing-action@v2.1.0 + uses: helm/chart-testing-action@v2.2.1 - name: Run chart-testing (list-changed) id: list-changed @@ -62,6 +60,10 @@ jobs: - name: Create k8s Kind Cluster uses: helm/kind-action@v1.2.0 if: steps.list-changed.outputs.changed == 'true' + with: + version: v0.14.0 + cluster_name: kind-cluster-k8s-${{ matrix.k8s-version }} + node_image: kindest/node:v${{ matrix.k8s-version }} - name: Run chart-testing (install) run: ct install --config .github/ct/config.yaml diff --git a/.github/workflows/maven.yml b/.github/workflows/maven.yml index 435d58b3dfd..9919452b887 100644 --- a/.github/workflows/maven.yml +++ b/.github/workflows/maven.yml @@ -17,9 +17,9 @@ jobs: steps: - uses: actions/checkout@v2 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v1 with: - java-version: 11 + java-version: 17 - name: Build with Maven run: mvn -B package --file pom.xml diff --git a/Dockerfile b/Dockerfile index 550a55e6597..3a8ea7a44f1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,19 +1,39 @@ -FROM maven:3.8.2-jdk-11-slim as build-hapi -WORKDIR /usr/src/app/hapi-fhir-jpaserver-starter +FROM maven:3.8-openjdk-17-slim as build-hapi +WORKDIR /tmp/hapi-fhir-jpaserver-starter COPY pom.xml . COPY server.xml . RUN mvn -ntp dependency:go-offline -COPY src/ /usr/src/app/hapi-fhir-jpaserver-starter/src/ -RUN mvn clean install -DskipTests +COPY src/ /tmp/hapi-fhir-jpaserver-starter/src/ +RUN mvn clean install -DskipTests -Djdk.lang.Process.launchMechanism=vfork FROM build-hapi AS build-distroless RUN mvn package spring-boot:repackage -Pboot -RUN mkdir /app && \ - cp /usr/src/app/hapi-fhir-jpaserver-starter/target/ROOT.war /app/main.war +RUN mkdir /app && cp /tmp/hapi-fhir-jpaserver-starter/target/ROOT.war /app/main.war -FROM gcr.io/distroless/java-debian11:11 AS release-distroless + +########### bitnami tomcat version is suitable for debugging and comes with a shell +########### it can be built using eg. `docker build --target tomcat .` +FROM bitnami/tomcat:9.0 as tomcat + +RUN rm -rf /opt/bitnami/tomcat/webapps/ROOT && \ + rm -rf /opt/bitnami/tomcat/webapps_default/ROOT && \ + mkdir -p /opt/bitnami/hapi/data/hapi/lucenefiles && \ + chmod 775 /opt/bitnami/hapi/data/hapi/lucenefiles + +USER root +RUN mkdir -p /target && chown -R 1001:1001 target +USER 1001 + +COPY --chown=1001:1001 catalina.properties /opt/bitnami/tomcat/conf/catalina.properties +COPY --chown=1001:1001 server.xml /opt/bitnami/tomcat/conf/server.xml +COPY --from=build-hapi --chown=1001:1001 /tmp/hapi-fhir-jpaserver-starter/target/ROOT.war /opt/bitnami/tomcat/webapps_default/ROOT.war + +ENV ALLOW_EMPTY_PASSWORD=yes + +########### distroless brings focus on security and runs on plain spring boot - this is the default image +FROM gcr.io/distroless/java17:nonroot as default COPY --chown=nonroot:nonroot --from=build-distroless /app /app # 65532 is the nonroot user's uid # used here instead of the name to allow Kubernetes to easily detect that the container @@ -21,13 +41,3 @@ COPY --chown=nonroot:nonroot --from=build-distroless /app /app USER 65532:65532 WORKDIR /app CMD ["/app/main.war"] - -FROM tomcat:9.0.53-jdk11-openjdk-slim-bullseye - -RUN mkdir -p /data/hapi/lucenefiles && chmod 775 /data/hapi/lucenefiles -COPY --from=build-hapi /usr/src/app/hapi-fhir-jpaserver-starter/target/*.war /usr/local/tomcat/webapps/ - -COPY catalina.properties /usr/local/tomcat/conf/catalina.properties -COPY server.xml /usr/local/tomcat/conf/server.xml - -CMD ["catalina.sh", "run"] diff --git a/README.md b/README.md index 2a2a14fb450..79d5060e501 100644 --- a/README.md +++ b/README.md @@ -291,7 +291,7 @@ You can use a custom property file that utilizes environment variables for many -e OAUTH_ENABLED= \ -e OAUTH_URL= \ -e reuse_cached_search_results_millis= \ --e spring.config.location='' \ +-e spring.config.location='' \ -e subscription.resthook.enabled= \ -e subscription.websocket.enabled= \ -e url_pattern= \ @@ -316,6 +316,13 @@ spring: password: admin driverClassName: com.mysql.jdbc.Driver ``` + +Also, make sure you are not setting the Hibernate dialect explicitly, in other words remove any lines similar to: + +``` +hibernate.dialect: {some none MySQL dialect} +``` + On some systems, it might be necessary to override hibernate's default naming strategy. The naming strategy must be set using spring.jpa.hibernate.physical_naming_strategy. ```yaml @@ -342,6 +349,26 @@ spring: Because the integration tests within the project rely on the default H2 database configuration, it is important to either explicity skip the integration tests during the build process, i.e., `mvn install -DskipTests`, or delete the tests altogether. Failure to skip or delete the tests once you've configured PostgreSQL for the datasource.driver, datasource.url, and hibernate.dialect as outlined above will result in build errors and compilation failure. +### Microsoft SQL Server configuration + +To configure the starter app to use MS SQL Server, instead of the default H2, update the application.yaml file to have the following: + +```yaml +spring: + datasource: + url: 'jdbc:sqlserver://:;databaseName=' + username: admin + password: admin + driverClassName: com.microsoft.sqlserver.jdbc.SQLServerDriver +``` + + +Because the integration tests within the project rely on the default H2 database configuration, it is important to either explicity skip the integration tests during the build process, i.e., `mvn install -DskipTests`, or delete the tests altogether. Failure to skip or delete the tests once you've configured PostgreSQL for the datasource.driver, datasource.url, and hibernate.dialect as outlined above will result in build errors and compilation failure. + + +NOTE: MS SQL Server by default uses a case-insensitive codepage. This will cause errors with some operations - such as when expanding case-sensitive valuesets (UCUM) as there are unique indexes defined on the terminology tables for codes. +It is recommended to deploy a case-sensitive database prior to running HAPI FHIR when using MS SQL Server to avoid these and potentially other issues. + ## Customizing The Web Testpage UI The UI that comes with this server is an exact clone of the server available at [http://hapi.fhir.org](http://hapi.fhir.org). You may skin this UI if you'd like. For example, you might change the introductory text or replace the logo with your own. @@ -406,6 +433,8 @@ spring: driverClassName: com.mysql.jdbc.Driver ``` +Also, make sure you are not setting the Hibernate Dialect explicitly, see more details in the section about MySQL. + ## Running hapi-fhir-jpaserver directly from IntelliJ as Spring Boot Make sure you run with the maven profile called ```boot``` and NOT also ```jetty```. Then you are ready to press debug the project directly without any extra Application Servers. @@ -465,6 +494,7 @@ Set `hapi.fhir.mdm_enabled=true` in the [application.yaml](https://github.com/ha Set `empi.enabled=true` in the [hapi.properties](https://github.com/hapifhir/hapi-fhir-jpaserver-starter/blob/master/src/main/resources/hapi.properties) file to enable EMPI on this server. The EMPI matching rules are configured in [empi-rules.json](https://github.com/hapifhir/hapi-fhir-jpaserver-starter/blob/master/src/main/resources/empi-rules.json). The rules in this example file should be replaced with actual matching rules appropriate to your data. Note that EMPI relies on subscriptions, so for EMPI to work, subscriptions must be enabled. + ## Enabling EMPI Set `empi.enabled=true` in the [hapi.properties](https://github.com/hapifhir/hapi-fhir-jpaserver-starter/blob/master/src/main/resources/hapi.properties) file to enable EMPI on this server. The EMPI matching rules are configured in [empi-rules.json](https://github.com/hapifhir/hapi-fhir-jpaserver-starter/blob/master/src/main/resources/empi-rules.json). The rules in this example file should be replaced with actual matching rules appropriate to your data. Note that EMPI relies on subscriptions, so for EMPI to work, subscriptions must be enabled. @@ -489,10 +519,14 @@ elasticsearch.schema_management_strategy=CREATE Set `hapi.fhir.lastn_enabled=true` in the [application.yaml](https://github.com/hapifhir/hapi-fhir-jpaserver-starter/blob/master/src/main/resources/application.yaml) file to enable the $lastn operation on this server. Note that the $lastn operation relies on Elasticsearch, so for $lastn to work, indexing must be enabled using Elasticsearch. +## Enabling Resource to be stored in Lucene Index + +Set `hapi.fhir.store_resource_in_lucene_index_enabled` in the [application.yaml](https://github.com/hapifhir/hapi-fhir-jpaserver-starter/blob/master/src/main/resources/application.yaml) file to enable storing of resource json along with Lucene/Elasticsearch index mappings. + ## Changing cached search results time -It is possible to change the cached search results time. The option `reuse_cached_search_results_millis` in the [application.yaml] is 6000 miliseconds by default. -Set `reuse_cached_search_results_millis: -1` in the [application.yaml] file to ignore the cache time every search. +It is possible to change the cached search results time. The option `reuse_cached_search_results_millis` in the [application.yaml](https://github.com/hapifhir/hapi-fhir-jpaserver-starter/blob/master/src/main/resources/application.yaml) is 6000 miliseconds by default. +Set `reuse_cached_search_results_millis: -1` in the [application.yaml](https://github.com/hapifhir/hapi-fhir-jpaserver-starter/blob/master/src/main/resources/application.yaml) file to ignore the cache time every search. ## Build the distroless variant of the image (for lower footprint and improved security) @@ -503,7 +537,7 @@ using the `gcr.io/distroless/java-debian10:11` base image: docker build --target=release-distroless -t hapi-fhir:distroless . ``` -Note that distroless images are also automatically build and pushed to the container registry, +Note that distroless images are also automatically built and pushed to the container registry, see the `-distroless` suffix in the image tags. ## Adding custom operations diff --git a/charts/hapi-fhir-jpaserver/Chart.lock b/charts/hapi-fhir-jpaserver/Chart.lock index 0db0f3a7b87..e8c97e8edcc 100644 --- a/charts/hapi-fhir-jpaserver/Chart.lock +++ b/charts/hapi-fhir-jpaserver/Chart.lock @@ -1,6 +1,6 @@ dependencies: - name: postgresql repository: https://charts.bitnami.com/bitnami - version: 10.12.2 -digest: sha256:38ee315eae1af3e3f6eb20e1dd8ffd60d4ab7ee0c51bf26941b56c8bcb376c11 -generated: "2021-10-07T00:19:18.9743522+02:00" + version: 11.6.2 +digest: sha256:1b96efc47b5dbe28bf34bcb694697325f3d2755a39ce2f1c371b2c9de9fac9d3 +generated: "2022-06-03T11:48:19.1684784+02:00" diff --git a/charts/hapi-fhir-jpaserver/Chart.yaml b/charts/hapi-fhir-jpaserver/Chart.yaml index 0b839154c3d..9cebc38656e 100644 --- a/charts/hapi-fhir-jpaserver/Chart.yaml +++ b/charts/hapi-fhir-jpaserver/Chart.yaml @@ -7,17 +7,39 @@ sources: - https://github.com/hapifhir/hapi-fhir-jpaserver-starter dependencies: - name: postgresql - version: 10.12.2 + version: 11.6.2 repository: https://charts.bitnami.com/bitnami condition: postgresql.enabled +appVersion: v6.0.1 +version: 0.9.0 annotations: artifacthub.io/license: Apache-2.0 - artifacthub.io/prerelease: "true" artifacthub.io/changes: | # When using the list of objects option the valid supported kinds are # added, changed, deprecated, removed, fixed, and security. - kind: changed description: | - updated HAPI FHIR starter image to 5.5.1 -appVersion: v5.5.1 -version: 0.6.0 + BREAKING CHANGE: updated HAPI FHIR starter image to v6.0.1. + See for all application changes. + - kind: changed + description: | + updated included PostgreSQL-subchart to v11.6.2 + - kind: fixed + description: | + use a fixed image for the wait-for-database container (docker.io/bitnami/postgresql:14.3.0-debian-10-r20) + instead of relying on the PostgreSQL sub-chart values + - kind: changed + description: | + expose actuator/metrics endpoint on a separate port (8081) + - kind: added + description: | + support for monitoring metrics using ServiceMonitor CRDs + - kind: changed + description: | + switched liveness and readiness probes to Spring Boot actuator endpoints + - kind: changed + description: | + BREAKING CHANGE: removed included `NetworkPolicy`, which is subject to more thorough rework + - kind: added + description: | + allow configuring `topologySpreadConstraints` for the deployment diff --git a/charts/hapi-fhir-jpaserver/README.md b/charts/hapi-fhir-jpaserver/README.md index e1e549606a8..20d0d6f9410 100644 --- a/charts/hapi-fhir-jpaserver/README.md +++ b/charts/hapi-fhir-jpaserver/README.md @@ -1,6 +1,6 @@ # HAPI FHIR JPA Server Starter Helm Chart -![Version: 0.6.0](https://img.shields.io/badge/Version-0.6.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: v5.5.1](https://img.shields.io/badge/AppVersion-v5.5.1-informational?style=flat-square) +![Version: 0.9.0](https://img.shields.io/badge/Version-0.9.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: v6.0.1](https://img.shields.io/badge/AppVersion-v6.0.1-informational?style=flat-square) This helm chart will help you install the HAPI FHIR JPA Server in a Kubernetes environment. @@ -11,6 +11,9 @@ helm repo add hapifhir https://hapifhir.github.io/hapi-fhir-jpaserver-starter/ helm install --render-subchart-notes hapi-fhir-jpaserver hapifhir/hapi-fhir-jpaserver ``` +> ⚠ By default, the included [PostgreSQL Helm chart](https://github.com/bitnami/charts/tree/master/bitnami/postgresql#upgrading) +> auto-generates a random password for the database which may cause problems when upgrading the chart (see [here for details](https://github.com/bitnami/charts/tree/master/bitnami/postgresql#upgrading)). + ## Values | Key | Type | Default | Description | @@ -24,12 +27,12 @@ helm install --render-subchart-notes hapi-fhir-jpaserver hapifhir/hapi-fhir-jpas | externalDatabase.password | string | `""` | database password | | externalDatabase.port | int | `5432` | database port number | | externalDatabase.user | string | `"fhir"` | username for the external database | +| extraEnv | list | `[]` | extra environment variables to set on the server container | | fullnameOverride | string | `""` | override the chart fullname | -| image.flavor | string | `"distroless"` | the flavor or variant of the image to use. appended to the image tag by `-`. | -| image.pullPolicy | string | `"IfNotPresent"` | | -| image.registry | string | `"docker.io"` | | -| image.repository | string | `"hapiproject/hapi"` | | -| image.tag | string | `""` | defaults to `Chart.appVersion` | +| image.pullPolicy | string | `"IfNotPresent"` | image pullPolicy to use | +| image.registry | string | `"docker.io"` | registry where the HAPI FHIR server image is hosted | +| image.repository | string | `"hapiproject/hapi"` | the path inside the repository | +| image.tag | string | `""` | defaults to `Chart.appVersion`. As of v5.7.0, this is the `distroless` flavor | | imagePullSecrets | list | `[]` | image pull secrets to use when pulling the image | | ingress.annotations | object | `{}` | provide any additional annotations which may be required. Evaluated as a template. | | ingress.enabled | bool | `false` | whether to create an Ingress to expose the FHIR server HTTP endpoint | @@ -37,18 +40,26 @@ helm install --render-subchart-notes hapi-fhir-jpaserver hapifhir/hapi-fhir-jpas | ingress.hosts[0].pathType | string | `"ImplementationSpecific"` | | | ingress.hosts[0].paths[0] | string | `"/"` | | | ingress.tls | list | `[]` | ingress TLS config | +| livenessProbe.failureThreshold | int | `5` | | +| livenessProbe.initialDelaySeconds | int | `30` | | +| livenessProbe.periodSeconds | int | `20` | | +| livenessProbe.successThreshold | int | `1` | | +| livenessProbe.timeoutSeconds | int | `30` | | +| metrics.service.port | int | `8081` | | +| metrics.serviceMonitor.additionalLabels | object | `{}` | additional labels to apply to the ServiceMonitor object, e.g. `release: prometheus` | +| metrics.serviceMonitor.enabled | bool | `false` | if enabled, creates a ServiceMonitor instance for Prometheus Operator-based monitoring | | nameOverride | string | `""` | override the chart name | -| networkPolicy.allowedFrom | list | `[]` | Additional allowed NetworkPolicyPeer specs Evaluated as a template so you could do: Example: allowedFrom: - podSelector: matchLabels: app.kubernetes.io/name: {{ $.Release.Name }} | -| networkPolicy.enabled | bool | `false` | enable NetworkPolicy | -| networkPolicy.explicitNamespacesSelector | object | `{}` | a Kubernetes LabelSelector to explicitly select namespaces from which ingress traffic could be allowed | | nodeSelector | object | `{}` | node selector for the pod | | podAnnotations | object | `{}` | annotations applied to the server pod | +| podDisruptionBudget.enabled | bool | `false` | Enable PodDisruptionBudget for the server pods. uses policy/v1/PodDisruptionBudget thus requiring k8s 1.21+ | +| podDisruptionBudget.maxUnavailable | string | `""` | maximum unavailable instances | +| podDisruptionBudget.minAvailable | int | `1` | minimum available instances | | podSecurityContext | object | `{}` | pod security context | -| postgresql.containerSecurityContext.allowPrivilegeEscalation | bool | `false` | | -| postgresql.containerSecurityContext.capabilities.drop[0] | string | `"ALL"` | | +| postgresql.auth.database | string | `"fhir"` | name for a custom database to create | +| postgresql.auth.existingSecret | string | `""` | Name of existing secret to use for PostgreSQL credentials `auth.postgresPassword`, `auth.password`, and `auth.replicationPassword` will be ignored and picked up from this secret The secret must contain the keys `postgres-password` (which is the password for "postgres" admin user), `password` (which is the password for the custom user to create when `auth.username` is set), and `replication-password` (which is the password for replication user). The secret might also contains the key `ldap-password` if LDAP is enabled. `ldap.bind_password` will be ignored and picked from this secret in this case. The value is evaluated as a template. | | postgresql.enabled | bool | `true` | enable an included PostgreSQL DB. see for details if set to `false`, the values under `externalDatabase` are used | -| postgresql.existingSecret | string | `""` | Name of existing secret to use for PostgreSQL passwords. The secret has to contain the keys `postgresql-password` which is the password for `postgresqlUsername` when it is different of `postgres`, `postgresql-postgres-password` which will override `postgresqlPassword`, `postgresql-replication-password` which will override `replication.password` and `postgresql-ldap-password` which will be sed to authenticate on LDAP. The value is evaluated as a template. | -| postgresql.postgresqlDatabase | string | `"fhir"` | name of the database to create see: | +| postgresql.primary.containerSecurityContext.allowPrivilegeEscalation | bool | `false` | | +| postgresql.primary.containerSecurityContext.capabilities.drop[0] | string | `"ALL"` | | | readinessProbe.failureThreshold | int | `5` | | | readinessProbe.initialDelaySeconds | int | `30` | | | readinessProbe.periodSeconds | int | `20` | | @@ -61,14 +72,27 @@ helm install --render-subchart-notes hapi-fhir-jpaserver hapifhir/hapi-fhir-jpas | securityContext.readOnlyRootFilesystem | bool | `true` | | | securityContext.runAsNonRoot | bool | `true` | | | securityContext.runAsUser | int | `65532` | | -| service.port | int | `8080` | | -| service.type | string | `"ClusterIP"` | | +| service.port | int | `8080` | port where the server will be exposed at | +| service.type | string | `"ClusterIP"` | service type | | startupProbe.failureThreshold | int | `10` | | | startupProbe.initialDelaySeconds | int | `60` | | | startupProbe.periodSeconds | int | `30` | | | startupProbe.successThreshold | int | `1` | | | startupProbe.timeoutSeconds | int | `30` | | | tolerations | list | `[]` | pod tolerations | +| topologySpreadConstraints | list | `[]` | pod topology spread configuration see: https://kubernetes.io/docs/concepts/workloads/pods/pod-topology-spread-constraints/#api | + +## Development + +To update the Helm chart when a new version of the `hapiproject/hapi` image is released, the [Chart.yaml](Chart.yaml)'s +`appVersion` and `version` fields need to be updated accordingly. Afterwards, re-generate the [README.md](README.md) +by running: + +```sh +$ helm-docs +INFO[2021-11-20T12:38:04Z] Found Chart directories [charts/hapi-fhir-jpaserver] +INFO[2021-11-20T12:38:04Z] Generating README Documentation for chart /usr/src/app/charts/hapi-fhir-jpaserver +``` ---------------------------------------------- -Autogenerated from chart metadata using [helm-docs v1.5.0](https://github.com/norwoodj/helm-docs/releases/v1.5.0) +Autogenerated from chart metadata using [helm-docs v1.9.1](https://github.com/norwoodj/helm-docs/releases/v1.9.1) diff --git a/charts/hapi-fhir-jpaserver/README.md.gotmpl b/charts/hapi-fhir-jpaserver/README.md.gotmpl index c599d14392e..e345f8be8cb 100644 --- a/charts/hapi-fhir-jpaserver/README.md.gotmpl +++ b/charts/hapi-fhir-jpaserver/README.md.gotmpl @@ -11,6 +11,21 @@ helm repo add hapifhir https://hapifhir.github.io/hapi-fhir-jpaserver-starter/ helm install --render-subchart-notes hapi-fhir-jpaserver hapifhir/hapi-fhir-jpaserver ``` +> ⚠ By default, the included [PostgreSQL Helm chart](https://github.com/bitnami/charts/tree/master/bitnami/postgresql#upgrading) +> auto-generates a random password for the database which may cause problems when upgrading the chart (see [here for details](https://github.com/bitnami/charts/tree/master/bitnami/postgresql#upgrading)). + {{ template "chart.valuesSection" . }} +## Development + +To update the Helm chart when a new version of the `hapiproject/hapi` image is released, the [Chart.yaml](Chart.yaml)'s +`appVersion` and `version` fields need to be updated accordingly. Afterwards, re-generate the [README.md](README.md) +by running: + +```sh +$ helm-docs +INFO[2021-11-20T12:38:04Z] Found Chart directories [charts/hapi-fhir-jpaserver] +INFO[2021-11-20T12:38:04Z] Generating README Documentation for chart /usr/src/app/charts/hapi-fhir-jpaserver +``` + {{ template "helm-docs.versionFooter" . }} diff --git a/charts/hapi-fhir-jpaserver/ci/enabled-ingress-values.yaml b/charts/hapi-fhir-jpaserver/ci/enabled-ingress-values.yaml new file mode 100644 index 00000000000..f28063f19a0 --- /dev/null +++ b/charts/hapi-fhir-jpaserver/ci/enabled-ingress-values.yaml @@ -0,0 +1,6 @@ +ingress: + enabled: true + +postgresql: + auth: + postgresPassword: secretpassword diff --git a/charts/hapi-fhir-jpaserver/templates/_helpers.tpl b/charts/hapi-fhir-jpaserver/templates/_helpers.tpl index 178d84028bd..eee1ed59867 100644 --- a/charts/hapi-fhir-jpaserver/templates/_helpers.tpl +++ b/charts/hapi-fhir-jpaserver/templates/_helpers.tpl @@ -30,18 +30,6 @@ Create chart name and version as used by the chart label. {{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} {{- end }} -{{/* -Create image tag -*/}} -{{- define "hapi-fhir-jpaserver.imageTag" -}} -{{- $version := default .Chart.AppVersion .Values.image.tag -}} -{{- if .Values.image.flavor }} -{{- printf "%s-%s" $version .Values.image.flavor }} -{{- else }} -{{- printf "%s" $version }} -{{- end }} -{{- end }} - {{/* Common labels */}} @@ -75,10 +63,10 @@ We truncate at 63 chars because some Kubernetes name fields are limited to this Get the Postgresql credentials secret name. */}} {{- define "hapi-fhir-jpaserver.postgresql.secretName" -}} -{{- if and (.Values.postgresql.enabled) (not .Values.postgresql.existingSecret) -}} +{{- if and (.Values.postgresql.enabled) (not .Values.postgresql.auth.existingSecret) -}} {{- printf "%s" (include "hapi-fhir-jpaserver.postgresql.fullname" .) -}} -{{- else if and (.Values.postgresql.enabled) (.Values.postgresql.existingSecret) -}} - {{- printf "%s" .Values.postgresql.existingSecret -}} +{{- else if and (.Values.postgresql.enabled) (.Values.postgresql.auth.existingSecret) -}} + {{- printf "%s" .Values.postgresql.auth.existingSecret -}} {{- else }} {{- if .Values.externalDatabase.existingSecret -}} {{- printf "%s" .Values.externalDatabase.existingSecret -}} @@ -95,7 +83,7 @@ Get the Postgresql credentials secret key. {{- if (.Values.externalDatabase.existingSecret) -}} {{- printf "%s" .Values.externalDatabase.existingSecretKey -}} {{- else }} - {{- printf "postgresql-password" -}} + {{- printf "postgres-password" -}} {{- end -}} {{- end -}} @@ -110,14 +98,14 @@ Add environment variables to configure database values Add environment variables to configure database values */}} {{- define "hapi-fhir-jpaserver.database.user" -}} -{{- ternary .Values.postgresql.postgresqlUsername .Values.externalDatabase.user .Values.postgresql.enabled -}} +{{- ternary "postgres" .Values.externalDatabase.user .Values.postgresql.enabled -}} {{- end -}} {{/* Add environment variables to configure database values */}} {{- define "hapi-fhir-jpaserver.database.name" -}} -{{- ternary .Values.postgresql.postgresqlDatabase .Values.externalDatabase.database .Values.postgresql.enabled -}} +{{- ternary .Values.postgresql.auth.database .Values.externalDatabase.database .Values.postgresql.enabled -}} {{- end -}} {{/* diff --git a/charts/hapi-fhir-jpaserver/templates/deployment.yaml b/charts/hapi-fhir-jpaserver/templates/deployment.yaml index a58024c82e3..741eb71add2 100644 --- a/charts/hapi-fhir-jpaserver/templates/deployment.yaml +++ b/charts/hapi-fhir-jpaserver/templates/deployment.yaml @@ -30,7 +30,7 @@ spec: {{- toYaml .Values.podSecurityContext | nindent 8 }} initContainers: - name: wait-for-db-to-be-ready - image: "{{ .Values.postgresql.image.registry }}/{{ .Values.postgresql.image.repository }}:{{ .Values.postgresql.image.tag }}" + image: docker.io/bitnami/postgresql:14.3.0-debian-10-r20 imagePullPolicy: IfNotPresent securityContext: allowPrivilegeEscalation: false @@ -60,15 +60,29 @@ spec: - name: {{ .Chart.Name }} securityContext: {{- toYaml .Values.securityContext | nindent 12 }} - image: {{ .Values.image.registry }}/{{ .Values.image.repository }}:{{ include "hapi-fhir-jpaserver.imageTag" . }} + image: {{ .Values.image.registry }}/{{ .Values.image.repository }}:{{ default .Chart.AppVersion .Values.image.tag }} imagePullPolicy: {{ .Values.image.pullPolicy }} ports: - name: http containerPort: 8080 protocol: TCP + - name: metrics + containerPort: 8081 + protocol: TCP + startupProbe: + httpGet: + path: /readyz + port: http + {{- with .Values.startupProbe }} + initialDelaySeconds: {{ .initialDelaySeconds }} + periodSeconds: {{ .periodSeconds }} + timeoutSeconds: {{ .timeoutSeconds }} + successThreshold: {{ .successThreshold }} + failureThreshold: {{ .failureThreshold }} + {{- end }} readinessProbe: httpGet: - path: / + path: /readyz port: http {{- with .Values.readinessProbe }} initialDelaySeconds: {{ .initialDelaySeconds }} @@ -77,11 +91,11 @@ spec: successThreshold: {{ .successThreshold }} failureThreshold: {{ .failureThreshold }} {{- end }} - startupProbe: + livenessProbe: httpGet: - path: /fhir/metadata + path: /livez port: http - {{- with .Values.startupProbe }} + {{- with .Values.livenessProbe }} initialDelaySeconds: {{ .initialDelaySeconds }} periodSeconds: {{ .periodSeconds }} timeoutSeconds: {{ .timeoutSeconds }} @@ -102,12 +116,14 @@ spec: key: {{ include "hapi-fhir-jpaserver.postgresql.secretKey" . }} - name: SPRING_DATASOURCE_DRIVERCLASSNAME value: org.postgresql.Driver - - name: SPRING_JPA_PROPERTIES_HIBERNATE_DIALECT - value: org.hibernate.dialect.PostgreSQL10Dialect + - name: spring.jpa.properties.hibernate.dialect + value: ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgres94Dialect - name: HAPI_FHIR_USE_APACHE_ADDRESS_STRATEGY value: "true" - - name: SPRING_JPA_DATABASE_PLATFORM - value: org.hibernate.dialect.PostgreSQLDialect + - name: MANAGEMENT_ENDPOINT_HEALTH_PROBES_ADD_ADDITIONAL_PATHS + value: "true" + - name: MANAGEMENT_SERVER_PORT + value: "8081" {{- if .Values.extraEnv }} {{ toYaml .Values.extraEnv | nindent 12 }} {{- end }} @@ -128,6 +144,10 @@ spec: tolerations: {{- toYaml . | nindent 8 }} {{- end }} + {{- with .Values.topologySpreadConstraints }} + topologySpreadConstraints: + {{- toYaml . | nindent 8 }} + {{- end }} volumes: - name: tmp-volume emptyDir: {} diff --git a/charts/hapi-fhir-jpaserver/templates/externaldb-secret.yaml b/charts/hapi-fhir-jpaserver/templates/externaldb-secret.yaml index e3a35d80219..a487cb6b030 100644 --- a/charts/hapi-fhir-jpaserver/templates/externaldb-secret.yaml +++ b/charts/hapi-fhir-jpaserver/templates/externaldb-secret.yaml @@ -1,4 +1,4 @@ -{{- if and (not .Values.postgresql.enabled) (not .Values.externalDatabase.existingSecret) (not .Values.postgresql.existingSecret) }} +{{- if and (not .Values.postgresql.enabled) (not .Values.externalDatabase.existingSecret) (not .Values.postgresql.auth.existingSecret) }} apiVersion: v1 kind: Secret metadata: @@ -7,5 +7,5 @@ metadata: {{- include "hapi-fhir-jpaserver.labels" . | nindent 4 }} type: Opaque data: - postgresql-password: {{ .Values.externalDatabase.password | b64enc | quote }} + postgres-password: {{ .Values.externalDatabase.password | b64enc | quote }} {{- end }} diff --git a/charts/hapi-fhir-jpaserver/templates/networkpolicy.yaml b/charts/hapi-fhir-jpaserver/templates/networkpolicy.yaml deleted file mode 100644 index d051950e0e1..00000000000 --- a/charts/hapi-fhir-jpaserver/templates/networkpolicy.yaml +++ /dev/null @@ -1,27 +0,0 @@ -{{- if .Values.networkPolicy.enabled }} -kind: NetworkPolicy -apiVersion: networking.k8s.io/v1 -metadata: - name: {{ include "hapi-fhir-jpaserver.fullname" . }} - labels: - {{- include "hapi-fhir-jpaserver.labels" . | nindent 4 }} -spec: - podSelector: - matchLabels: - {{- include "hapi-fhir-jpaserver.selectorLabels" . | nindent 6 }} - ingress: - # Allow inbound connections from pods with the "hapi-fhir-jpaserver-client: true" label - - ports: - - port: http - from: - - podSelector: - matchLabels: - {{ include "hapi-fhir-jpaserver.fullname" . }}-client: "true" - {{- with .Values.networkPolicy.explicitNamespacesSelector }} - namespaceSelector: - {{ toYaml . | nindent 12 }} - {{- end }} - {{- with .Values.networkPolicy.allowedFrom }} - {{ tpl (toYaml .) $ | nindent 8 }} - {{- end }} -{{- end }} diff --git a/charts/hapi-fhir-jpaserver/templates/poddisruptionbudget.yaml b/charts/hapi-fhir-jpaserver/templates/poddisruptionbudget.yaml new file mode 100644 index 00000000000..bae8dad8a9a --- /dev/null +++ b/charts/hapi-fhir-jpaserver/templates/poddisruptionbudget.yaml @@ -0,0 +1,18 @@ +{{- if .Values.podDisruptionBudget.enabled }} +kind: PodDisruptionBudget +apiVersion: policy/v1 +metadata: + name: {{ include "hapi-fhir-jpaserver.fullname" . }} + labels: + {{- include "hapi-fhir-jpaserver.labels" . | nindent 4 }} +spec: + {{- if .Values.podDisruptionBudget.minAvailable }} + minAvailable: {{ .Values.podDisruptionBudget.minAvailable }} + {{- end }} + {{- if .Values.podDisruptionBudget.maxUnavailable }} + maxUnavailable: {{ .Values.podDisruptionBudget.maxUnavailable }} + {{- end }} + selector: + matchLabels: + {{- include "hapi-fhir-jpaserver.selectorLabels" . | nindent 6 }} +{{- end }} diff --git a/charts/hapi-fhir-jpaserver/templates/service.yaml b/charts/hapi-fhir-jpaserver/templates/service.yaml index 90a05a291e8..d7ecaa5d25e 100644 --- a/charts/hapi-fhir-jpaserver/templates/service.yaml +++ b/charts/hapi-fhir-jpaserver/templates/service.yaml @@ -11,5 +11,9 @@ spec: targetPort: http protocol: TCP name: http + - port: {{ .Values.metrics.service.port }} + targetPort: metrics + protocol: TCP + name: metrics selector: {{- include "hapi-fhir-jpaserver.selectorLabels" . | nindent 4 }} diff --git a/charts/hapi-fhir-jpaserver/templates/servicemonitor.yaml b/charts/hapi-fhir-jpaserver/templates/servicemonitor.yaml new file mode 100644 index 00000000000..e161feeb5c9 --- /dev/null +++ b/charts/hapi-fhir-jpaserver/templates/servicemonitor.yaml @@ -0,0 +1,30 @@ +{{- if .Values.metrics.serviceMonitor.enabled }} +apiVersion: monitoring.coreos.com/v1 +kind: ServiceMonitor +metadata: + name: {{ include "hapi-fhir-jpaserver.fullname" . }} + {{- if .Values.metrics.serviceMonitor.namespace }} + namespace: {{ .Values.metrics.serviceMonitor.namespace }} + {{- end }} + labels: + {{- include "hapi-fhir-jpaserver.labels" . | nindent 4 }} + {{- if .Values.metrics.serviceMonitor.additionalLabels }} + {{- toYaml .Values.metrics.serviceMonitor.additionalLabels | nindent 4 }} + {{- end }} +spec: + endpoints: + - port: metrics + path: /actuator/prometheus + {{- if .Values.metrics.serviceMonitor.interval }} + interval: {{ .Values.metrics.serviceMonitor.interval }} + {{- end }} + {{- if .Values.metrics.serviceMonitor.scrapeTimeout }} + scrapeTimeout: {{ .Values.metrics.serviceMonitor.scrapeTimeout }} + {{- end }} + namespaceSelector: + matchNames: + - {{ .Release.Namespace }} + selector: + matchLabels: + {{- include "hapi-fhir-jpaserver.selectorLabels" . | nindent 6 }} +{{- end }} diff --git a/charts/hapi-fhir-jpaserver/templates/tests/test-connection.yaml b/charts/hapi-fhir-jpaserver/templates/tests/test-endpoints.yaml similarity index 53% rename from charts/hapi-fhir-jpaserver/templates/tests/test-connection.yaml rename to charts/hapi-fhir-jpaserver/templates/tests/test-endpoints.yaml index eac503dfbf2..911f59d6ae3 100644 --- a/charts/hapi-fhir-jpaserver/templates/tests/test-connection.yaml +++ b/charts/hapi-fhir-jpaserver/templates/tests/test-endpoints.yaml @@ -1,7 +1,7 @@ apiVersion: v1 kind: Pod metadata: - name: "{{ include "hapi-fhir-jpaserver.fullname" . }}-test-connection" + name: "{{ include "hapi-fhir-jpaserver.fullname" . }}-test-endpoints" labels: {{- include "hapi-fhir-jpaserver.labels" . | nindent 4 }} {{ include "hapi-fhir-jpaserver.fullname" . }}-client: "true" @@ -10,7 +10,32 @@ metadata: spec: restartPolicy: Never containers: - - name: wget + - name: test-metadata-endpoint + image: busybox:1 + command: ['wget', '-O', '-'] + args: ['http://{{ include "hapi-fhir-jpaserver.fullname" . }}:{{ .Values.service.port }}/fhir/metadata'] + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + readOnlyRootFilesystem: true + runAsUser: 22222 + runAsNonRoot: true + resources: + limits: + cpu: 100m + memory: 128Mi + requests: + cpu: 100m + memory: 128Mi + livenessProbe: + exec: + command: ["true"] + readinessProbe: + exec: + command: ["true"] + - name: test-patient-endpoint image: busybox:1 command: ['wget', '-O', '-'] args: ['http://{{ include "hapi-fhir-jpaserver.fullname" . }}:{{ .Values.service.port }}/fhir/Patient?_count=1'] diff --git a/charts/hapi-fhir-jpaserver/values.yaml b/charts/hapi-fhir-jpaserver/values.yaml index 5e3c63bfb3c..55863c89d23 100644 --- a/charts/hapi-fhir-jpaserver/values.yaml +++ b/charts/hapi-fhir-jpaserver/values.yaml @@ -2,13 +2,13 @@ replicaCount: 1 image: + # -- registry where the HAPI FHIR server image is hosted registry: docker.io + # -- the path inside the repository repository: hapiproject/hapi - # -- defaults to `Chart.appVersion` + # -- defaults to `Chart.appVersion`. As of v5.7.0, this is the `distroless` flavor tag: "" - # -- the flavor or variant of the image to use. - # appended to the image tag by `-`. - flavor: "distroless" + # -- image pullPolicy to use pullPolicy: IfNotPresent # -- image pull secrets to use when pulling the image @@ -42,7 +42,9 @@ securityContext: # service to expose the server service: + # -- service type type: ClusterIP + # -- port where the server will be exposed at port: 8080 ingress: @@ -86,27 +88,41 @@ tolerations: [] # -- pod affinity affinity: {} +# -- pod topology spread configuration +# see: https://kubernetes.io/docs/concepts/workloads/pods/pod-topology-spread-constraints/#api +topologySpreadConstraints: + [] + # - maxSkew: 1 + # topologyKey: topology.kubernetes.io/zone + # whenUnsatisfiable: ScheduleAnyway + # labelSelector: + # matchLabels: + # app.kubernetes.io/instance: hapi-fhir-jpaserver + # app.kubernetes.io/name: hapi-fhir-jpaserver + postgresql: # -- enable an included PostgreSQL DB. # see for details # if set to `false`, the values under `externalDatabase` are used enabled: true - # -- name of the database to create - # see: - postgresqlDatabase: "fhir" - # -- Name of existing secret to use for PostgreSQL passwords. - # The secret has to contain the keys `postgresql-password` - # which is the password for `postgresqlUsername` when it is - # different of `postgres`, `postgresql-postgres-password` which - # will override `postgresqlPassword`, `postgresql-replication-password` - # which will override `replication.password` and `postgresql-ldap-password` - # which will be sed to authenticate on LDAP. The value is evaluated as a template. - existingSecret: "" - containerSecurityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL + auth: + # -- name for a custom database to create + database: "fhir" + # -- Name of existing secret to use for PostgreSQL credentials + # `auth.postgresPassword`, `auth.password`, and `auth.replicationPassword` will be ignored and picked up from this secret + # The secret must contain the keys `postgres-password` (which is the password for "postgres" admin user), + # `password` (which is the password for the custom user to create when `auth.username` is set), + # and `replication-password` (which is the password for replication user). + # The secret might also contains the key `ldap-password` if LDAP is enabled. `ldap.bind_password` will be ignored and + # picked from this secret in this case. + # The value is evaluated as a template. + existingSecret: "" + primary: + containerSecurityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL readinessProbe: failureThreshold: 5 @@ -122,6 +138,13 @@ startupProbe: successThreshold: 1 timeoutSeconds: 30 +livenessProbe: + failureThreshold: 5 + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + timeoutSeconds: 30 + externalDatabase: # -- external database host used with `postgresql.enabled=false` host: localhost @@ -138,22 +161,29 @@ externalDatabase: # -- database name database: fhir -networkPolicy: - # -- enable NetworkPolicy +# -- extra environment variables to set on the server container +extraEnv: + [] + # - name: SPRING_FLYWAY_BASELINE_ON_MIGRATE + # value: "true" + +podDisruptionBudget: + # -- Enable PodDisruptionBudget for the server pods. + # uses policy/v1/PodDisruptionBudget thus requiring k8s 1.21+ enabled: false - # -- a Kubernetes LabelSelector to explicitly select namespaces from which ingress traffic could be allowed - explicitNamespacesSelector: - {} - # matchLabels: - # team: one - # test: foo - - # -- Additional allowed NetworkPolicyPeer specs - # Evaluated as a template so you could do: - # - # Example: - # allowedFrom: - # - podSelector: - # matchLabels: - # app.kubernetes.io/name: {{ $.Release.Name }} - allowedFrom: [] + # -- minimum available instances + minAvailable: 1 + # -- maximum unavailable instances + maxUnavailable: "" + +metrics: + serviceMonitor: + # -- if enabled, creates a ServiceMonitor instance for Prometheus Operator-based monitoring + enabled: false + # -- additional labels to apply to the ServiceMonitor object, e.g. `release: prometheus` + additionalLabels: {} + # namespace: monitoring + # interval: 30s + # scrapeTimeout: 10s + service: + port: 8081 diff --git a/docker-compose.yml b/docker-compose.yml index 47ac688fef4..6ad9f11324b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -11,13 +11,15 @@ services: datasource.password: 'admin' datasource.url: 'jdbc:mysql://hapi-fhir-mysql/hapi' datasource.username: 'admin' - fhir_version: 'R4' + fhir_version: 'DSTU2' hapi.fhir.server_address: 'http://localhost:8080/fhir/' hibernate.dialect: 'org.hibernate.dialect.MySQL5InnoDBDialect' reuse_cached_search_results_millis: '0' - spring.config.location: '/usr/local/tomcat/webapps/ROOT/WEB-INF/classes/application-custom.yaml' + spring.config.location: 'classpath:/application-custom.yaml' subscription.resthook.enabled: 'true' subscription.websocket.enabled: 'true' + url_pattern: "/fhir/*" + enable-web: false hapi-fhir-mysql: image: mysql:latest container_name: hapi-fhir-mysql @@ -33,3 +35,5 @@ services: - hapi-fhir-mysql:/var/lib/mysql volumes: hapi-fhir-mysql: + + diff --git a/pom.xml b/pom.xml index b9c0714430a..f5f509f9527 100644 --- a/pom.xml +++ b/pom.xml @@ -14,17 +14,17 @@ ca.uhn.hapi.fhir hapi-fhir - 5.5.1 + 6.1.0 hapi-fhir-jpaserver-starter - 8 + 11 - 3.6.3 + 3.8.3 war @@ -55,23 +55,24 @@ mysql mysql-connector-java - 8.0.25 - org.postgresql postgresql - 42.2.23 + + + com.microsoft.sqlserver + mssql-jdbc - com.sun.mail - javax.mail + org.simplejavamail + simple-java-mail - javax.activation - activation + jakarta.annotation + jakarta.annotation-api @@ -99,6 +100,18 @@ hapi-fhir-base ${project.version} + + + ca.uhn.hapi.fhir + hapi-fhir-jpaserver-subscription + ${project.version} + + + com.zaxxer + HikariCP-java7 + + + @@ -128,6 +141,12 @@ hapi-fhir-jpaserver-mdm ${project.version} + + + ca.uhn.hapi.fhir + hapi-fhir-server-openapi + ${project.version} + ca.uhn.hapi.fhir @@ -167,7 +186,7 @@ org.yaml snakeyaml - 1.29 + 1.30 @@ -200,7 +219,7 @@ org.webjars bootstrap - 3.4.1 + 5.1.3 org.webjars @@ -308,6 +327,26 @@ ${spring_boot_version} + + org.springframework.boot + spring-boot-starter-actuator + ${spring_boot_version} + + + + + io.micrometer + micrometer-registry-prometheus + 1.8.5 + + + + com.zaxxer + HikariCP + 5.0.1 + + + org.junit.jupiter junit-jupiter-api @@ -382,7 +421,7 @@ maven-compiler-plugin 3.8.1 - 8 + 11 diff --git a/src/main/java/ca/uhn/fhir/jpa/starter/AppProperties.java b/src/main/java/ca/uhn/fhir/jpa/starter/AppProperties.java index fd92001cca2..af8be7c44c3 100644 --- a/src/main/java/ca/uhn/fhir/jpa/starter/AppProperties.java +++ b/src/main/java/ca/uhn/fhir/jpa/starter/AppProperties.java @@ -22,13 +22,17 @@ public class AppProperties { private Boolean cql_enabled = false; + private Boolean openapi_enabled = false; private Boolean mdm_enabled = false; + private boolean advanced_lucene_indexing = false; + private boolean enable_index_of_type = false; private Boolean allow_cascading_deletes = false; private Boolean allow_contains_searches = true; private Boolean allow_external_references = false; private Boolean allow_multiple_delete = false; private Boolean allow_override_default_search_params = true; private Boolean auto_create_placeholder_reference_targets = false; + private Boolean dao_scheduling_enabled = true; private Boolean delete_expunge_enabled = false; private Boolean enable_index_missing_fields = false; private Boolean enable_index_contained_resource = false; @@ -67,14 +71,24 @@ public class AppProperties { private Map implementationGuides = null; private Boolean lastn_enabled = false; + private boolean store_resource_in_lucene_index_enabled = false; private NormalizedQuantitySearchLevel normalized_quantity_search_level = NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED; - private Integer search_coord_core_pool_size = 20; - private Integer search_coord_max_pool_size = 100; - private Integer search_coord_queue_capacity = 200; private Boolean use_apache_address_strategy = false; private Boolean use_apache_address_strategy_https = false; + private Integer bundle_batch_pool_size = 20; + private Integer bundle_batch_pool_max_size = 100; + private List local_base_urls = new ArrayList<>(); + + public Boolean getOpenapi_enabled() { + return openapi_enabled; + } + + public void setOpenapi_enabled(Boolean openapi_enabled) { + this.openapi_enabled = openapi_enabled; + } + public Boolean getUse_apache_address_strategy() { return use_apache_address_strategy; } @@ -187,7 +201,11 @@ public void setSupported_resource_types(List supported_resource_types) { this.supported_resource_types = supported_resource_types; } - public Logger getLogger() { + public List getSupported_resource_types(List supported_resource_types) { + return this.supported_resource_types; + } + + public Logger getLogger() { return logger; } @@ -204,7 +222,15 @@ public void setClient_id_strategy( this.client_id_strategy = client_id_strategy; } - public Boolean getAllow_cascading_deletes() { + public boolean getAdvanced_lucene_indexing() { + return this.advanced_lucene_indexing; + } + + public void setAdvanced_lucene_indexing(boolean theAdvanced_lucene_indexing) { + advanced_lucene_indexing = theAdvanced_lucene_indexing; + } + + public Boolean getAllow_cascading_deletes() { return allow_cascading_deletes; } @@ -262,6 +288,14 @@ public void setDefault_page_size(Integer default_page_size) { this.default_page_size = default_page_size; } + public Boolean getDao_scheduling_enabled() { + return dao_scheduling_enabled; + } + + public void setDao_scheduling_enabled(Boolean dao_scheduling_enabled) { + this.dao_scheduling_enabled = dao_scheduling_enabled; + } + public Boolean getDelete_expunge_enabled() { return delete_expunge_enabled; } @@ -446,7 +480,15 @@ public void setLastn_enabled(Boolean lastn_enabled) { this.lastn_enabled = lastn_enabled; } - public NormalizedQuantitySearchLevel getNormalized_quantity_search_level() { + public boolean getStore_resource_in_lucene_index_enabled() { + return store_resource_in_lucene_index_enabled; + } + + public void setStore_resource_in_lucene_index_enabled(Boolean store_resource_in_lucene_index_enabled) { + this.store_resource_in_lucene_index_enabled = store_resource_in_lucene_index_enabled; + } + + public NormalizedQuantitySearchLevel getNormalized_quantity_search_level() { return this.normalized_quantity_search_level; } @@ -454,30 +496,32 @@ public void setNormalized_quantity_search_level(NormalizedQuantitySearchLevel no this.normalized_quantity_search_level = normalized_quantity_search_level; } - public Integer getSearch_coord_core_pool_size() { return search_coord_core_pool_size; } - - public void setSearch_coord_core_pool_size(Integer search_coord_core_pool_size) { - this.search_coord_core_pool_size = search_coord_core_pool_size; - } + public boolean getInstall_transitive_ig_dependencies() { + return install_transitive_ig_dependencies; + } - public Integer getSearch_coord_max_pool_size() { return search_coord_max_pool_size; } + public void setInstall_transitive_ig_dependencies(boolean install_transitive_ig_dependencies) { + this.install_transitive_ig_dependencies = install_transitive_ig_dependencies; + } - public void setSearch_coord_max_pool_size(Integer search_coord_max_pool_size) { - this.search_coord_max_pool_size = search_coord_max_pool_size; - } + public Integer getBundle_batch_pool_size() { + return this.bundle_batch_pool_size; + } - public Integer getSearch_coord_queue_capacity() { return search_coord_queue_capacity; } + public void setBundle_batch_pool_size(Integer bundle_batch_pool_size) { + this.bundle_batch_pool_size = bundle_batch_pool_size; + } - public void setSearch_coord_queue_capacity(Integer search_coord_queue_capacity) { - this.search_coord_queue_capacity = search_coord_queue_capacity; - } + public Integer getBundle_batch_pool_max_size() { + return bundle_batch_pool_max_size; + } - public boolean getInstall_transitive_ig_dependencies() { - return install_transitive_ig_dependencies; + public void setBundle_batch_pool_max_size(Integer bundle_batch_pool_max_size) { + this.bundle_batch_pool_max_size = bundle_batch_pool_max_size; } - public void setInstall_transitive_ig_dependencies(boolean install_transitive_ig_dependencies) { - this.install_transitive_ig_dependencies = install_transitive_ig_dependencies; + public List getLocal_base_urls() { + return local_base_urls; } public static class Cors { @@ -773,4 +817,12 @@ public void setQuitWait(Boolean quitWait) { private Boolean quitWait = false; } } + + public boolean getEnable_index_of_type() { + return enable_index_of_type; + } + + public void setEnable_index_of_type(boolean enable_index_of_type) { + this.enable_index_of_type = enable_index_of_type; + } } diff --git a/src/main/java/ca/uhn/fhir/jpa/starter/Application.java b/src/main/java/ca/uhn/fhir/jpa/starter/Application.java index 733605b6a8e..29d40cd3dd1 100644 --- a/src/main/java/ca/uhn/fhir/jpa/starter/Application.java +++ b/src/main/java/ca/uhn/fhir/jpa/starter/Application.java @@ -1,7 +1,9 @@ package ca.uhn.fhir.jpa.starter; -import ca.uhn.fhir.jpa.starter.mdm.MdmConfig; +import ca.uhn.fhir.batch2.jobs.config.Batch2JobsConfig; +import ca.uhn.fhir.jpa.batch2.JpaBatch2Config; import ca.uhn.fhir.jpa.starter.annotations.OnEitherVersion; +import ca.uhn.fhir.jpa.starter.mdm.MdmConfig; import ca.uhn.fhir.jpa.subscription.channel.config.SubscriptionChannelConfig; import ca.uhn.fhir.jpa.subscription.match.config.SubscriptionProcessorConfig; import ca.uhn.fhir.jpa.subscription.match.config.WebsocketDispatcherConfig; @@ -24,21 +26,19 @@ @ServletComponentScan(basePackageClasses = { JpaRestfulServer.class}) @SpringBootApplication(exclude = {ElasticsearchRestClientAutoConfiguration.class}) -@Import({SubscriptionSubmitterConfig.class, SubscriptionProcessorConfig.class, SubscriptionChannelConfig.class, WebsocketDispatcherConfig.class, MdmConfig.class}) +@Import({ + SubscriptionSubmitterConfig.class, + SubscriptionProcessorConfig.class, + SubscriptionChannelConfig.class, + WebsocketDispatcherConfig.class, + MdmConfig.class, + JpaBatch2Config.class, + Batch2JobsConfig.class +}) public class Application extends SpringBootServletInitializer { public static void main(String[] args) { - /* - * https://github.com/hapifhir/hapi-fhir-jpaserver-starter/issues/246 - * This will be allowed for a short period until we know how MDM should be configured - * or don't have multiple equal bean instantiations. - * - * This will require changes in the main project as stated in the Github comment - * */ - System.setProperty("spring.main.allow-bean-definition-overriding","true"); - - System.setProperty("spring.batch.job.enabled", "false"); SpringApplication.run(Application.class, args); //Server is now accessible at eg. http://localhost:8080/fhir/metadata diff --git a/src/main/java/ca/uhn/fhir/jpa/starter/BaseJpaRestfulServer.java b/src/main/java/ca/uhn/fhir/jpa/starter/BaseJpaRestfulServer.java index 7f119d3f192..a252a21529d 100644 --- a/src/main/java/ca/uhn/fhir/jpa/starter/BaseJpaRestfulServer.java +++ b/src/main/java/ca/uhn/fhir/jpa/starter/BaseJpaRestfulServer.java @@ -1,5 +1,6 @@ package ca.uhn.fhir.jpa.starter; +import ca.uhn.fhir.batch2.jobs.reindex.ReindexProvider; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.support.IValidationSupport; @@ -9,18 +10,15 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; -import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor; +import ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor; +import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider; import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider; +import ca.uhn.fhir.jpa.graphql.GraphQLProvider; import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor; import ca.uhn.fhir.jpa.packages.IPackageInstallerSvc; import ca.uhn.fhir.jpa.packages.PackageInstallationSpec; import ca.uhn.fhir.jpa.partition.PartitionManagementProvider; -import ca.uhn.fhir.jpa.provider.GraphQLProvider; -import ca.uhn.fhir.jpa.provider.IJpaSystemProvider; -import ca.uhn.fhir.jpa.provider.JpaCapabilityStatementProvider; -import ca.uhn.fhir.jpa.provider.JpaConformanceProviderDstu2; -import ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider; -import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider; +import ca.uhn.fhir.jpa.provider.*; import ca.uhn.fhir.jpa.provider.dstu3.JpaConformanceProviderDstu3; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; import ca.uhn.fhir.jpa.subscription.util.SubscriptionDebugLogInterceptor; @@ -28,17 +26,9 @@ import ca.uhn.fhir.narrative.DefaultThymeleafNarrativeGenerator; import ca.uhn.fhir.narrative.INarrativeGenerator; import ca.uhn.fhir.narrative2.NullNarrativeGenerator; -import ca.uhn.fhir.rest.server.ApacheProxyAddressStrategy; -import ca.uhn.fhir.rest.server.ETagSupportEnum; -import ca.uhn.fhir.rest.server.HardcodedServerAddressStrategy; -import ca.uhn.fhir.rest.server.IncomingRequestAddressStrategy; -import ca.uhn.fhir.rest.server.RestfulServer; -import ca.uhn.fhir.rest.server.interceptor.CorsInterceptor; -import ca.uhn.fhir.rest.server.interceptor.FhirPathFilterInterceptor; -import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor; -import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor; -import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor; -import ca.uhn.fhir.rest.server.interceptor.ResponseValidatingInterceptor; +import ca.uhn.fhir.rest.openapi.OpenApiInterceptor; +import ca.uhn.fhir.rest.server.*; +import ca.uhn.fhir.rest.server.interceptor.*; import ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor; import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory; import ca.uhn.fhir.rest.server.tenant.UrlBaseTenantIdentificationStrategy; @@ -47,19 +37,16 @@ import ca.uhn.fhir.validation.ResultSeverityEnum; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.stream.Collectors; -import javax.servlet.ServletException; import org.hl7.fhir.r4.model.Bundle.BundleType; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.http.HttpHeaders; import org.springframework.web.cors.CorsConfiguration; +import javax.servlet.ServletException; +import java.util.*; +import java.util.stream.Collectors; + public class BaseJpaRestfulServer extends RestfulServer { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseJpaRestfulServer.class); @@ -77,6 +64,8 @@ public class BaseJpaRestfulServer extends RestfulServer { @Autowired IJpaSystemProvider jpaSystemProvider; @Autowired + ValueSetOperationProvider myValueSetOperationProvider; + @Autowired IInterceptorBroadcaster interceptorBroadcaster; @Autowired DatabaseBackedPagingProvider databaseBackedPagingProvider; @@ -90,9 +79,16 @@ public class BaseJpaRestfulServer extends RestfulServer { BulkDataExportProvider bulkDataExportProvider; @Autowired PartitionManagementProvider partitionManagementProvider; + + @Autowired + ValueSetOperationProvider valueSetOperationProvider; + @Autowired + ReindexProvider reindexProvider; @Autowired BinaryStorageInterceptor binaryStorageInterceptor; @Autowired + Optional binaryAccessProvider; + @Autowired IPackageInstallerSvc packageInstallerSvc; @Autowired AppProperties appProperties; @@ -124,8 +120,10 @@ protected void initialize() throws ServletException { // Customize supported resource types List supportedResourceTypes = appProperties.getSupported_resource_types(); - if (!supportedResourceTypes.isEmpty() && !supportedResourceTypes.contains("SearchParameter")) { - supportedResourceTypes.add("SearchParameter"); + if (!supportedResourceTypes.isEmpty()) { + if (!supportedResourceTypes.contains("SearchParameter")) { + supportedResourceTypes.add("SearchParameter"); + } daoRegistry.setSupportedResourceTypes(supportedResourceTypes); } @@ -140,7 +138,6 @@ protected void initialize() throws ServletException { registerProviders(resourceProviderFactory.createProviders()); registerProvider(jpaSystemProvider); - /* * The conformance provider exports the supported resources, search parameters, etc for * this server. The JPA version adds resourceProviders counts to the exported statement, so it @@ -324,7 +321,8 @@ protected void initialize() throws ServletException { } // Binary Storage - if (appProperties.getBinary_storage_enabled()) { + if (appProperties.getBinary_storage_enabled() && binaryAccessProvider.isPresent()) { + registerProvider(binaryAccessProvider.get()); getInterceptorService().registerInterceptor(binaryStorageInterceptor); } @@ -358,11 +356,21 @@ protected void initialize() throws ServletException { daoConfig.setDeferIndexingForCodesystemsOfSize(appProperties.getDefer_indexing_for_codesystems_of_size()); + if (appProperties.getOpenapi_enabled()) { + registerInterceptor(new OpenApiInterceptor()); + } + // Bulk Export if (appProperties.getBulk_export_enabled()) { registerProvider(bulkDataExportProvider); } + // valueSet Operations i.e $expand + registerProvider(myValueSetOperationProvider); + + //reindex Provider $reindex + registerProvider(reindexProvider); + // Partitioning if (appProperties.getPartitioning() != null) { registerInterceptor(new RequestTenantPartitionInterceptor()); @@ -374,6 +382,9 @@ protected void initialize() throws ServletException { daoConfig.setResourceServerIdStrategy(DaoConfig.IdStrategyEnum.UUID); daoConfig.setResourceClientIdStrategy(appProperties.getClient_id_strategy()); } + //Parallel Batch GET execution settings + daoConfig.setBundleBatchPoolSize(appProperties.getBundle_batch_pool_size()); + daoConfig.setBundleBatchPoolSize(appProperties.getBundle_batch_pool_max_size()); if (appProperties.getImplementationGuides() != null) { Map guides = appProperties.getImplementationGuides(); @@ -400,8 +411,8 @@ protected void initialize() throws ServletException { daoConfig.setLastNEnabled(true); } - daoConfig.getModelConfig().setNormalizedQuantitySearchLevel(appProperties.getNormalized_quantity_search_level()); - - daoConfig.getModelConfig().setIndexOnContainedResources(appProperties.getEnable_index_contained_resource()); + daoConfig.setStoreResourceInHSearchIndex(appProperties.getStore_resource_in_lucene_index_enabled()); + daoConfig.getModelConfig().setNormalizedQuantitySearchLevel(appProperties.getNormalized_quantity_search_level()); + daoConfig.getModelConfig().setIndexOnContainedResources(appProperties.getEnable_index_contained_resource()); } } diff --git a/src/main/java/ca/uhn/fhir/jpa/starter/CustomServerCapabilityStatementProviderR4.java b/src/main/java/ca/uhn/fhir/jpa/starter/CustomServerCapabilityStatementProviderR4.java index 67c549a8126..ab514304a82 100644 --- a/src/main/java/ca/uhn/fhir/jpa/starter/CustomServerCapabilityStatementProviderR4.java +++ b/src/main/java/ca/uhn/fhir/jpa/starter/CustomServerCapabilityStatementProviderR4.java @@ -5,43 +5,43 @@ import javax.servlet.http.HttpServletRequest; -import org.apache.commons.lang3.ObjectUtils; import org.hl7.fhir.instance.model.api.IBaseConformance; import org.hl7.fhir.r4.model.CapabilityStatement; import org.hl7.fhir.r4.model.CapabilityStatement.CapabilityStatementRestSecurityComponent; import org.hl7.fhir.r4.model.Extension; import org.hl7.fhir.r4.model.UriType; - import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.RestfulServer; import ca.uhn.fhir.rest.server.provider.ServerCapabilityStatementProvider; + public class CustomServerCapabilityStatementProviderR4 extends ServerCapabilityStatementProvider { - private static final String OAUTH_TOKEN_URL = System.getenv("OAUTH_TOKEN_URL"); - private static final String OAUTH_MANAGE_URL = System.getenv("OAUTH_MANAGE_URL"); - - private CapabilityStatement capabilityStatement; + public CustomServerCapabilityStatementProviderR4(RestfulServer theServer) { super(theServer); + // TODO Auto-generated constructor stub } + private static final String OAUTH_TOKEN_URL = System.getenv("OAUTH_TOKEN_URL"); + private static final String OAUTH_MANAGE_URL = System.getenv("OAUTH_MANAGE_URL"); + + private CapabilityStatement capabilityStatement; + + @Override public IBaseConformance getServerConformance(HttpServletRequest theRequest, RequestDetails theRequestDetails) { + // TODO Auto-generated method stub capabilityStatement = (CapabilityStatement) super.getServerConformance(theRequest, theRequestDetails); capabilityStatement.getRest().get(0).setSecurity(getSecurityComponent()); return capabilityStatement; } - + private static CapabilityStatementRestSecurityComponent getSecurityComponent() { CapabilityStatementRestSecurityComponent security = new CapabilityStatementRestSecurityComponent(); List extensions = new ArrayList(); - if (!ObjectUtils.isEmpty(OAUTH_TOKEN_URL)) { - extensions.add(new Extension("token", new UriType(OAUTH_TOKEN_URL))); - } - if (!ObjectUtils.isEmpty(OAUTH_MANAGE_URL)) { - extensions.add(new Extension("manage", new UriType(OAUTH_MANAGE_URL))); - } + extensions.add(new Extension("token", new UriType(OAUTH_TOKEN_URL))); + extensions.add(new Extension("manage", new UriType(OAUTH_MANAGE_URL))); List extensionsList = new ArrayList(); extensionsList.add((Extension) new Extension( new UriType("http://fhir-registry.smarthealthit.org/StructureDefinition/oauth-uris")) diff --git a/src/main/java/ca/uhn/fhir/jpa/starter/ElasticsearchConfig.java b/src/main/java/ca/uhn/fhir/jpa/starter/ElasticsearchConfig.java new file mode 100644 index 00000000000..21216e6dd59 --- /dev/null +++ b/src/main/java/ca/uhn/fhir/jpa/starter/ElasticsearchConfig.java @@ -0,0 +1,33 @@ +package ca.uhn.fhir.jpa.starter; + +import ca.uhn.fhir.jpa.search.lastn.ElasticsearchSvcImpl; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.env.ConfigurableEnvironment; + +/** Shared configuration for Elasticsearch */ +@Configuration +public class ElasticsearchConfig { + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ElasticsearchConfig.class); + + @Autowired + private ConfigurableEnvironment configurableEnvironment; + + @Bean + public ElasticsearchSvcImpl elasticsearchSvc() { + if (EnvironmentHelper.isElasticsearchEnabled(configurableEnvironment)) { + String elasticsearchUrl = EnvironmentHelper.getElasticsearchServerUrl(configurableEnvironment); + if (elasticsearchUrl.startsWith("http")) { + elasticsearchUrl =elasticsearchUrl.substring(elasticsearchUrl.indexOf("://") + 3); + } + String elasticsearchProtocol = EnvironmentHelper.getElasticsearchServerProtocol(configurableEnvironment); + String elasticsearchUsername = EnvironmentHelper.getElasticsearchServerUsername(configurableEnvironment); + String elasticsearchPassword = EnvironmentHelper.getElasticsearchServerPassword(configurableEnvironment); + ourLog.info("Configuring elasticsearch {} {}", elasticsearchProtocol, elasticsearchUrl); + return new ElasticsearchSvcImpl(elasticsearchProtocol, elasticsearchUrl, elasticsearchUsername, elasticsearchPassword); + } else { + return null; + } + } +} diff --git a/src/main/java/ca/uhn/fhir/jpa/starter/EnvironmentHelper.java b/src/main/java/ca/uhn/fhir/jpa/starter/EnvironmentHelper.java index 75db0f8c957..937c95c2bf4 100644 --- a/src/main/java/ca/uhn/fhir/jpa/starter/EnvironmentHelper.java +++ b/src/main/java/ca/uhn/fhir/jpa/starter/EnvironmentHelper.java @@ -1,7 +1,7 @@ package ca.uhn.fhir.jpa.starter; import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean; -import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer; +import ca.uhn.fhir.jpa.search.HapiHSearchAnalysisConfigurers; import ca.uhn.fhir.jpa.search.elastic.ElasticsearchHibernatePropertiesBuilder; import org.apache.lucene.util.Version; import org.hibernate.cfg.AvailableSettings; @@ -14,6 +14,7 @@ import org.hibernate.search.mapper.orm.automaticindexing.session.AutomaticIndexingSynchronizationStrategyNames; import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings; import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName; +import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.boot.orm.jpa.hibernate.SpringImplicitNamingStrategy; import org.springframework.boot.orm.jpa.hibernate.SpringPhysicalNamingStrategy; import org.springframework.core.env.CompositePropertySource; @@ -21,11 +22,15 @@ import org.springframework.core.env.EnumerablePropertySource; import org.springframework.core.env.PropertySource; -import java.util.*; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; public class EnvironmentHelper { - public static Properties getHibernateProperties(ConfigurableEnvironment environment) { + public static Properties getHibernateProperties(ConfigurableEnvironment environment, + ConfigurableListableBeanFactory myConfigurableListableBeanFactory) { Properties properties = new Properties(); Map jpaProps = getPropertiesStartingWith(environment, "spring.jpa.properties"); for (Map.Entry entry : jpaProps.entrySet()) { @@ -41,7 +46,7 @@ public static Properties getHibernateProperties(ConfigurableEnvironment environm //properties.putIfAbsent(AvailableSettings.BEAN_CONTAINER, new SpringBeanContainer(beanFactory)); //hapi-fhir-jpaserver-base "sensible defaults" - Map hapiJpaPropertyMap = new HapiFhirLocalContainerEntityManagerFactoryBean().getJpaPropertyMap(); + Map hapiJpaPropertyMap = new HapiFhirLocalContainerEntityManagerFactoryBean(myConfigurableListableBeanFactory).getJpaPropertyMap(); hapiJpaPropertyMap.forEach(properties::putIfAbsent); //hapi-fhir-jpaserver-starter defaults @@ -66,14 +71,15 @@ public static Properties getHibernateProperties(ConfigurableEnvironment environm if (properties.get(BackendSettings.backendKey(BackendSettings.TYPE)).equals(LuceneBackendSettings.TYPE_NAME)) { properties.putIfAbsent(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), LocalFileSystemDirectoryProvider.NAME); properties.putIfAbsent(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_ROOT), "target/lucenefiles"); - properties.putIfAbsent(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), HapiLuceneAnalysisConfigurer.class.getName()); + properties.putIfAbsent(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), + HapiHSearchAnalysisConfigurers.HapiLuceneAnalysisConfigurer.class.getName()); properties.putIfAbsent(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), Version.LATEST); } else if (properties.get(BackendSettings.backendKey(BackendSettings.TYPE)).equals(ElasticsearchBackendSettings.TYPE_NAME)) { ElasticsearchHibernatePropertiesBuilder builder = new ElasticsearchHibernatePropertiesBuilder(); IndexStatus requiredIndexStatus = environment.getProperty("elasticsearch.required_index_status", IndexStatus.class); builder.setRequiredIndexStatus(requireNonNullElse(requiredIndexStatus, IndexStatus.YELLOW)); - builder.setRestUrl(getElasticsearchServerUrl(environment)); + builder.setHosts(getElasticsearchServerUrl(environment)); builder.setUsername(getElasticsearchServerUsername(environment)); builder.setPassword(getElasticsearchServerPassword(environment)); builder.setProtocol(getElasticsearchServerProtocol(environment)); diff --git a/src/main/java/ca/uhn/fhir/jpa/starter/FhirServerConfigCommon.java b/src/main/java/ca/uhn/fhir/jpa/starter/FhirServerConfigCommon.java index 4ec59123ed0..ac80f093b0e 100644 --- a/src/main/java/ca/uhn/fhir/jpa/starter/FhirServerConfigCommon.java +++ b/src/main/java/ca/uhn/fhir/jpa/starter/FhirServerConfigCommon.java @@ -1,15 +1,18 @@ package ca.uhn.fhir.jpa.starter; import ca.uhn.fhir.jpa.api.config.DaoConfig; +import ca.uhn.fhir.jpa.binary.api.IBinaryStorageSvc; import ca.uhn.fhir.jpa.binstore.DatabaseBlobBinaryStorageSvcImpl; -import ca.uhn.fhir.jpa.binstore.IBinaryStorageSvc; import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.config.PartitionSettings.CrossPartitionReferenceMode; import ca.uhn.fhir.jpa.model.entity.ModelConfig; import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionDeliveryHandlerFactory; +import ca.uhn.fhir.jpa.subscription.match.deliver.email.EmailSenderImpl; import ca.uhn.fhir.jpa.subscription.match.deliver.email.IEmailSender; -import ca.uhn.fhir.jpa.subscription.match.deliver.email.JavaMailEmailSender; +import ca.uhn.fhir.rest.server.mail.IMailSvc; +import ca.uhn.fhir.rest.server.mail.MailConfig; +import ca.uhn.fhir.rest.server.mail.MailSvc; import com.google.common.base.Strings; import org.hl7.fhir.dstu2.model.Subscription; import org.springframework.boot.env.YamlPropertySourceLoader; @@ -20,6 +23,7 @@ import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; import org.springframework.transaction.annotation.EnableTransactionManagement; +import java.util.HashSet; import java.util.Optional; /** @@ -36,6 +40,7 @@ public FhirServerConfigCommon(AppProperties appProperties) { ourLog.info("Server configured to " + (appProperties.getAllow_contains_searches() ? "allow" : "deny") + " contains searches"); ourLog.info("Server configured to " + (appProperties.getAllow_multiple_delete() ? "allow" : "deny") + " multiple deletes"); ourLog.info("Server configured to " + (appProperties.getAllow_external_references() ? "allow" : "deny") + " external references"); + ourLog.info("Server configured to " + (appProperties.getDao_scheduling_enabled() ? "enable" : "disable") + " DAO scheduling"); ourLog.info("Server configured to " + (appProperties.getDelete_expunge_enabled() ? "enable" : "disable") + " delete expunges"); ourLog.info("Server configured to " + (appProperties.getExpunge_enabled() ? "enable" : "disable") + " expunges"); ourLog.info("Server configured to " + (appProperties.getAllow_override_default_search_params() ? "allow" : "deny") + " overriding default search params"); @@ -62,7 +67,7 @@ public FhirServerConfigCommon(AppProperties appProperties) { if (appProperties.getSubscription().getEmail() != null) { ourLog.info("Email subscriptions enabled"); } - + if (appProperties.getEnable_index_contained_resource() == Boolean.TRUE) { ourLog.info("Indexed on contained resource enabled"); } @@ -71,7 +76,7 @@ public FhirServerConfigCommon(AppProperties appProperties) { /** * Configure FHIR properties around the the JPA server via this bean */ - @Bean() + @Bean public DaoConfig daoConfig(AppProperties appProperties) { DaoConfig retVal = new DaoConfig(); @@ -82,6 +87,7 @@ public DaoConfig daoConfig(AppProperties appProperties) { retVal.setAllowContainsSearches(appProperties.getAllow_contains_searches()); retVal.setAllowMultipleDelete(appProperties.getAllow_multiple_delete()); retVal.setAllowExternalReferences(appProperties.getAllow_external_references()); + retVal.setSchedulingDisabled(!appProperties.getDao_scheduling_enabled()); retVal.setDeleteExpungeEnabled(appProperties.getDelete_expunge_enabled()); retVal.setExpungeEnabled(appProperties.getExpunge_enabled()); if(appProperties.getSubscription() != null && appProperties.getSubscription().getEmail() != null) @@ -116,6 +122,8 @@ public DaoConfig daoConfig(AppProperties appProperties) { } retVal.setFilterParameterEnabled(appProperties.getFilter_search_enabled()); + retVal.setAdvancedHSearchIndexing(appProperties.getAdvanced_lucene_indexing()); + retVal.setTreatBaseUrlsAsLocal(new HashSet<>(appProperties.getLocal_base_urls())); return retVal; } @@ -136,7 +144,7 @@ public PartitionSettings partitionSettings(AppProperties appProperties) { if(appProperties.getPartitioning().getAllow_references_across_partitions()) { retVal.setAllowReferencesAcrossPartitions(CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED); } else { - retVal.setAllowReferencesAcrossPartitions(CrossPartitionReferenceMode.NOT_ALLOWED); + retVal.setAllowReferencesAcrossPartitions(CrossPartitionReferenceMode.NOT_ALLOWED); } } @@ -151,8 +159,8 @@ public HibernatePropertiesProvider jpaStarterDialectProvider(LocalContainerEntit } @Bean - public ModelConfig modelConfig(AppProperties appProperties) { - ModelConfig modelConfig = new ModelConfig(); + public ModelConfig modelConfig(AppProperties appProperties, DaoConfig daoConfig) { + ModelConfig modelConfig = daoConfig.getModelConfig(); modelConfig.setAllowContainsSearches(appProperties.getAllow_contains_searches()); modelConfig.setAllowExternalReferences(appProperties.getAllow_external_references()); modelConfig.setDefaultSearchParamsCanBeOverridden(appProperties.getAllow_override_default_search_params()); @@ -169,8 +177,9 @@ public ModelConfig modelConfig(AppProperties appProperties) { } modelConfig.setNormalizedQuantitySearchLevel(appProperties.getNormalized_quantity_search_level()); - + modelConfig.setIndexOnContainedResources(appProperties.getEnable_index_contained_resource()); + modelConfig.setIndexIdentifierOfType(appProperties.getEnable_index_of_type()); return modelConfig; } @@ -204,25 +213,24 @@ public IBinaryStorageSvc binaryStorageSvc(AppProperties appProperties) { return binaryStorageSvc; } - @Bean() + @Bean public IEmailSender emailSender(AppProperties appProperties, Optional subscriptionDeliveryHandlerFactory) { if (appProperties.getSubscription() != null && appProperties.getSubscription().getEmail() != null) { - JavaMailEmailSender retVal = new JavaMailEmailSender(); + MailConfig mailConfig = new MailConfig(); AppProperties.Subscription.Email email = appProperties.getSubscription().getEmail(); - retVal.setSmtpServerHostname(email.getHost()); - retVal.setSmtpServerPort(email.getPort()); - retVal.setSmtpServerUsername(email.getUsername()); - retVal.setSmtpServerPassword(email.getPassword()); - retVal.setAuth(email.getAuth()); - retVal.setStartTlsEnable(email.getStartTlsEnable()); - retVal.setStartTlsRequired(email.getStartTlsRequired()); - retVal.setQuitWait(email.getQuitWait()); - - if(subscriptionDeliveryHandlerFactory.isPresent()) - subscriptionDeliveryHandlerFactory.get().setEmailSender(retVal); - - return retVal; + mailConfig.setSmtpHostname(email.getHost()); + mailConfig.setSmtpPort(email.getPort()); + mailConfig.setSmtpUsername(email.getUsername()); + mailConfig.setSmtpPassword(email.getPassword()); + mailConfig.setSmtpUseStartTLS(email.getStartTlsEnable()); + + IMailSvc mailSvc = new MailSvc(mailConfig); + IEmailSender emailSender = new EmailSenderImpl(mailSvc); + + subscriptionDeliveryHandlerFactory.ifPresent(theSubscriptionDeliveryHandlerFactory -> theSubscriptionDeliveryHandlerFactory.setEmailSender(emailSender)); + + return emailSender; } return null; diff --git a/src/main/java/ca/uhn/fhir/jpa/starter/FhirServerConfigDstu2.java b/src/main/java/ca/uhn/fhir/jpa/starter/FhirServerConfigDstu2.java index 6e26d41fafe..b8011389d20 100644 --- a/src/main/java/ca/uhn/fhir/jpa/starter/FhirServerConfigDstu2.java +++ b/src/main/java/ca/uhn/fhir/jpa/starter/FhirServerConfigDstu2.java @@ -1,83 +1,16 @@ package ca.uhn.fhir.jpa.starter; -import ca.uhn.fhir.context.ConfigurationException; -import ca.uhn.fhir.jpa.config.BaseJavaConfigDstu2; -import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; +import ca.uhn.fhir.jpa.config.JpaDstu2Config; import ca.uhn.fhir.jpa.starter.annotations.OnDSTU2Condition; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Primary; -import org.springframework.core.env.ConfigurableEnvironment; -import org.springframework.orm.jpa.JpaTransactionManager; -import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; - -import javax.annotation.PostConstruct; -import javax.persistence.EntityManagerFactory; -import javax.sql.DataSource; +import org.springframework.context.annotation.Import; @Configuration @Conditional(OnDSTU2Condition.class) -public class FhirServerConfigDstu2 extends BaseJavaConfigDstu2 { - - @Autowired - private DataSource myDataSource; - - /** - * We override the paging provider definition so that we can customize - * the default/max page sizes for search results. You can set these however - * you want, although very large page sizes will require a lot of RAM. - */ - @Autowired - AppProperties appProperties; - - @PostConstruct - public void initSettings() { - if(appProperties.getSearch_coord_core_pool_size() != null) { - setSearchCoordCorePoolSize(appProperties.getSearch_coord_core_pool_size()); - } - if(appProperties.getSearch_coord_max_pool_size() != null) { - setSearchCoordMaxPoolSize(appProperties.getSearch_coord_max_pool_size()); - } - if(appProperties.getSearch_coord_queue_capacity() != null) { - setSearchCoordQueueCapacity(appProperties.getSearch_coord_queue_capacity()); - } - } - - @Override - public DatabaseBackedPagingProvider databaseBackedPagingProvider() { - DatabaseBackedPagingProvider pagingProvider = super.databaseBackedPagingProvider(); - pagingProvider.setDefaultPageSize(appProperties.getDefault_page_size()); - pagingProvider.setMaximumPageSize(appProperties.getMax_page_size()); - return pagingProvider; - } - - @Autowired - private ConfigurableEnvironment configurableEnvironment; - - @Override - @Bean() - public LocalContainerEntityManagerFactoryBean entityManagerFactory() { - LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(); - retVal.setPersistenceUnitName("HAPI_PU"); - - try { - retVal.setDataSource(myDataSource); - } catch (Exception e) { - throw new ConfigurationException("Could not set the data source due to a configuration issue", e); - } - retVal.setJpaProperties(EnvironmentHelper.getHibernateProperties(configurableEnvironment)); - return retVal; - } - - @Bean - @Primary - public JpaTransactionManager hapiTransactionManager(EntityManagerFactory entityManagerFactory) { - JpaTransactionManager retVal = new JpaTransactionManager(); - retVal.setEntityManagerFactory(entityManagerFactory); - return retVal; - } - - +@Import({ + StarterJpaConfig.class, + JpaDstu2Config.class +}) +public class FhirServerConfigDstu2 { } diff --git a/src/main/java/ca/uhn/fhir/jpa/starter/FhirServerConfigDstu3.java b/src/main/java/ca/uhn/fhir/jpa/starter/FhirServerConfigDstu3.java index 367c0ec6809..35d1dabb49c 100644 --- a/src/main/java/ca/uhn/fhir/jpa/starter/FhirServerConfigDstu3.java +++ b/src/main/java/ca/uhn/fhir/jpa/starter/FhirServerConfigDstu3.java @@ -1,106 +1,19 @@ package ca.uhn.fhir.jpa.starter; -import ca.uhn.fhir.context.ConfigurationException; -import ca.uhn.fhir.jpa.config.BaseJavaConfigDstu3; -import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; -import ca.uhn.fhir.jpa.search.lastn.ElasticsearchSvcImpl; +import ca.uhn.fhir.jpa.config.dstu3.JpaDstu3Config; import ca.uhn.fhir.jpa.starter.annotations.OnDSTU3Condition; import ca.uhn.fhir.jpa.starter.cql.StarterCqlDstu3Config; -import javax.annotation.PostConstruct; -import javax.persistence.EntityManagerFactory; -import javax.sql.DataSource; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.Bean; +import ca.uhn.fhir.jpa.starter.mdm.MdmConfig; import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; -import org.springframework.context.annotation.Primary; -import org.springframework.core.env.ConfigurableEnvironment; -import org.springframework.orm.jpa.JpaTransactionManager; -import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; @Configuration @Conditional(OnDSTU3Condition.class) -@Import(StarterCqlDstu3Config.class) -public class FhirServerConfigDstu3 extends BaseJavaConfigDstu3 { - - @Autowired - private DataSource myDataSource; - - /** - * We override the paging provider definition so that we can customize - * the default/max page sizes for search results. You can set these however - * you want, although very large page sizes will require a lot of RAM. - */ - @Autowired - AppProperties appProperties; - - @PostConstruct - public void initSettings() { - if(appProperties.getSearch_coord_core_pool_size() != null) { - setSearchCoordCorePoolSize(appProperties.getSearch_coord_core_pool_size()); - } - if(appProperties.getSearch_coord_max_pool_size() != null) { - setSearchCoordMaxPoolSize(appProperties.getSearch_coord_max_pool_size()); - } - if(appProperties.getSearch_coord_queue_capacity() != null) { - setSearchCoordQueueCapacity(appProperties.getSearch_coord_queue_capacity()); - } - } - - - @Override - public DatabaseBackedPagingProvider databaseBackedPagingProvider() { - DatabaseBackedPagingProvider pagingProvider = super.databaseBackedPagingProvider(); - pagingProvider.setDefaultPageSize(appProperties.getDefault_page_size()); - pagingProvider.setMaximumPageSize(appProperties.getMax_page_size()); - return pagingProvider; - } - - @Autowired - private ConfigurableEnvironment configurableEnvironment; - - @Override - @Bean - public LocalContainerEntityManagerFactoryBean entityManagerFactory() { - LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(); - retVal.setPersistenceUnitName("HAPI_PU"); - - try { - retVal.setDataSource(myDataSource); - } catch (Exception e) { - throw new ConfigurationException("Could not set the data source due to a configuration issue", e); - } - - retVal.setJpaProperties(EnvironmentHelper.getHibernateProperties(configurableEnvironment)); - return retVal; - } - - @Bean - @Primary - public JpaTransactionManager hapiTransactionManager(EntityManagerFactory entityManagerFactory) { - JpaTransactionManager retVal = new JpaTransactionManager(); - retVal.setEntityManagerFactory(entityManagerFactory); - return retVal; - } - - @Bean() - public ElasticsearchSvcImpl elasticsearchSvc() { - if (EnvironmentHelper.isElasticsearchEnabled(configurableEnvironment)) { - String elasticsearchUrl = EnvironmentHelper.getElasticsearchServerUrl(configurableEnvironment); - String elasticsearchHost; - if (elasticsearchUrl.startsWith("http")) { - elasticsearchHost = elasticsearchUrl.substring(elasticsearchUrl.indexOf("://") + 3, elasticsearchUrl.lastIndexOf(":")); - } else { - elasticsearchHost = elasticsearchUrl.substring(0, elasticsearchUrl.indexOf(":")); - } - String elasticsearchUsername = EnvironmentHelper.getElasticsearchServerUsername(configurableEnvironment); - String elasticsearchPassword = EnvironmentHelper.getElasticsearchServerPassword(configurableEnvironment); - int elasticsearchPort = Integer.parseInt(elasticsearchUrl.substring(elasticsearchUrl.lastIndexOf(":")+1)); - return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchPort, elasticsearchUsername, elasticsearchPassword); - } else { - return null; - } - } - +@Import({ + StarterJpaConfig.class, + JpaDstu3Config.class, + StarterCqlDstu3Config.class, + ElasticsearchConfig.class}) +public class FhirServerConfigDstu3 { } diff --git a/src/main/java/ca/uhn/fhir/jpa/starter/FhirServerConfigR4.java b/src/main/java/ca/uhn/fhir/jpa/starter/FhirServerConfigR4.java index a53b0578711..c31cf529edd 100644 --- a/src/main/java/ca/uhn/fhir/jpa/starter/FhirServerConfigR4.java +++ b/src/main/java/ca/uhn/fhir/jpa/starter/FhirServerConfigR4.java @@ -1,103 +1,20 @@ package ca.uhn.fhir.jpa.starter; -import ca.uhn.fhir.context.ConfigurationException; -import ca.uhn.fhir.jpa.config.BaseJavaConfigR4; -import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; -import ca.uhn.fhir.jpa.search.lastn.ElasticsearchSvcImpl; +import ca.uhn.fhir.jpa.config.r4.JpaR4Config; import ca.uhn.fhir.jpa.starter.annotations.OnR4Condition; import ca.uhn.fhir.jpa.starter.cql.StarterCqlR4Config; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.*; -import org.springframework.core.env.ConfigurableEnvironment; -import org.springframework.orm.jpa.JpaTransactionManager; -import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; - -import javax.annotation.PostConstruct; -import javax.persistence.EntityManagerFactory; -import javax.sql.DataSource; +import ca.uhn.fhir.jpa.starter.mdm.MdmConfig; +import org.springframework.context.annotation.Conditional; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; @Configuration @Conditional(OnR4Condition.class) -@Import(StarterCqlR4Config.class) -public class FhirServerConfigR4 extends BaseJavaConfigR4 { - - @Autowired - private DataSource myDataSource; - - /** - * We override the paging provider definition so that we can customize - * the default/max page sizes for search results. You can set these however - * you want, although very large page sizes will require a lot of RAM. - */ - @Autowired - AppProperties appProperties; - - @PostConstruct - public void initSettings() { - if(appProperties.getSearch_coord_core_pool_size() != null) { - setSearchCoordCorePoolSize(appProperties.getSearch_coord_core_pool_size()); - } - if(appProperties.getSearch_coord_max_pool_size() != null) { - setSearchCoordMaxPoolSize(appProperties.getSearch_coord_max_pool_size()); - } - if(appProperties.getSearch_coord_queue_capacity() != null) { - setSearchCoordQueueCapacity(appProperties.getSearch_coord_queue_capacity()); - } - } - - @Override - public DatabaseBackedPagingProvider databaseBackedPagingProvider() { - DatabaseBackedPagingProvider pagingProvider = super.databaseBackedPagingProvider(); - pagingProvider.setDefaultPageSize(appProperties.getDefault_page_size()); - pagingProvider.setMaximumPageSize(appProperties.getMax_page_size()); - return pagingProvider; - } - - @Autowired - private ConfigurableEnvironment configurableEnvironment; - - @Override - @Bean() - public LocalContainerEntityManagerFactoryBean entityManagerFactory() { - LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(); - retVal.setPersistenceUnitName("HAPI_PU"); - - try { - retVal.setDataSource(myDataSource); - } catch (Exception e) { - throw new ConfigurationException("Could not set the data source due to a configuration issue", e); - } - - retVal.setJpaProperties(EnvironmentHelper.getHibernateProperties(configurableEnvironment)); - return retVal; - } - - @Bean - @Primary - public JpaTransactionManager hapiTransactionManager(EntityManagerFactory entityManagerFactory) { - JpaTransactionManager retVal = new JpaTransactionManager(); - retVal.setEntityManagerFactory(entityManagerFactory); - return retVal; - } - - @Bean() - public ElasticsearchSvcImpl elasticsearchSvc() { - if (EnvironmentHelper.isElasticsearchEnabled(configurableEnvironment)) { - String elasticsearchUrl = EnvironmentHelper.getElasticsearchServerUrl(configurableEnvironment); - String elasticsearchHost; - if (elasticsearchUrl.startsWith("http")) { - elasticsearchHost = elasticsearchUrl.substring(elasticsearchUrl.indexOf("://") + 3, elasticsearchUrl.lastIndexOf(":")); - } else { - elasticsearchHost = elasticsearchUrl.substring(0, elasticsearchUrl.indexOf(":")); - } - - String elasticsearchUsername = EnvironmentHelper.getElasticsearchServerUsername(configurableEnvironment); - String elasticsearchPassword = EnvironmentHelper.getElasticsearchServerPassword(configurableEnvironment); - int elasticsearchPort = Integer.parseInt(elasticsearchUrl.substring(elasticsearchUrl.lastIndexOf(":")+1)); - return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchPort, elasticsearchUsername, elasticsearchPassword); - } else { - return null; - } - } - +@Import({ + StarterJpaConfig.class, + JpaR4Config.class, + StarterCqlR4Config.class, + ElasticsearchConfig.class +}) +public class FhirServerConfigR4 { } diff --git a/src/main/java/ca/uhn/fhir/jpa/starter/FhirServerConfigR5.java b/src/main/java/ca/uhn/fhir/jpa/starter/FhirServerConfigR5.java index 2c6d72eaa46..8ee03df272d 100644 --- a/src/main/java/ca/uhn/fhir/jpa/starter/FhirServerConfigR5.java +++ b/src/main/java/ca/uhn/fhir/jpa/starter/FhirServerConfigR5.java @@ -1,104 +1,17 @@ package ca.uhn.fhir.jpa.starter; -import ca.uhn.fhir.context.ConfigurationException; -import ca.uhn.fhir.jpa.config.BaseJavaConfigR5; -import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; -import ca.uhn.fhir.jpa.search.lastn.ElasticsearchSvcImpl; +import ca.uhn.fhir.jpa.config.r5.JpaR5Config; import ca.uhn.fhir.jpa.starter.annotations.OnR5Condition; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Primary; -import org.springframework.core.env.ConfigurableEnvironment; -import org.springframework.orm.jpa.JpaTransactionManager; -import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; - -import javax.annotation.PostConstruct; -import javax.persistence.EntityManagerFactory; -import javax.sql.DataSource; +import org.springframework.context.annotation.Import; @Configuration @Conditional(OnR5Condition.class) -public class FhirServerConfigR5 extends BaseJavaConfigR5 { - - @Autowired - private DataSource myDataSource; - - /** - * We override the paging provider definition so that we can customize - * the default/max page sizes for search results. You can set these however - * you want, although very large page sizes will require a lot of RAM. - */ - @Autowired - AppProperties appProperties; - - @PostConstruct - public void initSettings() { - if(appProperties.getSearch_coord_core_pool_size() != null) { - setSearchCoordCorePoolSize(appProperties.getSearch_coord_core_pool_size()); - } - if(appProperties.getSearch_coord_max_pool_size() != null) { - setSearchCoordMaxPoolSize(appProperties.getSearch_coord_max_pool_size()); - } - if(appProperties.getSearch_coord_queue_capacity() != null) { - setSearchCoordQueueCapacity(appProperties.getSearch_coord_queue_capacity()); - } - } - - @Override - public DatabaseBackedPagingProvider databaseBackedPagingProvider() { - DatabaseBackedPagingProvider pagingProvider = super.databaseBackedPagingProvider(); - pagingProvider.setDefaultPageSize(appProperties.getDefault_page_size()); - pagingProvider.setMaximumPageSize(appProperties.getMax_page_size()); - return pagingProvider; - } - - @Autowired - private ConfigurableEnvironment configurableEnvironment; - - @Override - @Bean() - public LocalContainerEntityManagerFactoryBean entityManagerFactory() { - LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(); - retVal.setPersistenceUnitName("HAPI_PU"); - - try { - retVal.setDataSource(myDataSource); - } catch (Exception e) { - throw new ConfigurationException("Could not set the data source due to a configuration issue", e); - } - - retVal.setJpaProperties(EnvironmentHelper.getHibernateProperties(configurableEnvironment)); - return retVal; - } - - @Bean - @Primary - public JpaTransactionManager hapiTransactionManager(EntityManagerFactory entityManagerFactory) { - JpaTransactionManager retVal = new JpaTransactionManager(); - retVal.setEntityManagerFactory(entityManagerFactory); - return retVal; - } - - @Bean() - public ElasticsearchSvcImpl elasticsearchSvc() { - if (EnvironmentHelper.isElasticsearchEnabled(configurableEnvironment)) { - String elasticsearchUrl = EnvironmentHelper.getElasticsearchServerUrl(configurableEnvironment); - String elasticsearchHost; - if (elasticsearchUrl.startsWith("http")) { - elasticsearchHost = elasticsearchUrl.substring(elasticsearchUrl.indexOf("://") + 3, elasticsearchUrl.lastIndexOf(":")); - } else { - elasticsearchHost = elasticsearchUrl.substring(0, elasticsearchUrl.indexOf(":")); - } - String elasticsearchUsername = EnvironmentHelper.getElasticsearchServerUsername(configurableEnvironment); - String elasticsearchPassword = EnvironmentHelper.getElasticsearchServerPassword(configurableEnvironment); - int elasticsearchPort = Integer.parseInt(elasticsearchUrl.substring(elasticsearchUrl.lastIndexOf(":")+1)); - return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchPort, elasticsearchUsername, elasticsearchPassword); - } else { - return null; - } - } - - +@Import({ + StarterJpaConfig.class, + JpaR5Config.class, + ElasticsearchConfig.class +}) +public class FhirServerConfigR5 { } diff --git a/src/main/java/ca/uhn/fhir/jpa/starter/JpaRestfulServer.java b/src/main/java/ca/uhn/fhir/jpa/starter/JpaRestfulServer.java index 95989b83ecf..18899e516cb 100644 --- a/src/main/java/ca/uhn/fhir/jpa/starter/JpaRestfulServer.java +++ b/src/main/java/ca/uhn/fhir/jpa/starter/JpaRestfulServer.java @@ -12,7 +12,7 @@ @Import(AppProperties.class) public class JpaRestfulServer extends BaseJpaRestfulServer { - + private static final String FHIR_VERSION = System.getenv("fhir_version"); private static final String OAUTH_ENABLED = System.getenv("OAUTH_ENABLED"); @@ -28,13 +28,12 @@ public JpaRestfulServer() { @Override protected void initialize() throws ServletException { super.initialize(); - // Add your own customization here - + /* Custom ServerConformanceProvider will be triggered when fhir version is R4 and Oauth is enabled. */ if (FHIR_VERSION.equals(FhirVersionEnum.R4.name()) && Boolean.parseBoolean(OAUTH_ENABLED)) { - CustomServerCapabilityStatementProviderR4 capabilityStatementProviderR4 = new CustomServerCapabilityStatementProviderR4(this); - setServerConformanceProvider(capabilityStatementProviderR4); + CustomServerCapabilityStatementProviderR4 customCapabilityStatementProviderR4 = new CustomServerCapabilityStatementProviderR4(this); + setServerConformanceProvider(customCapabilityStatementProviderR4); } SearchNarrowingInterceptor customSearchNarrowingInterceptor = new CustomSearchNarrowingInterceptor(); this.registerInterceptor(customSearchNarrowingInterceptor); @@ -43,5 +42,4 @@ protected void initialize() throws ServletException { AuthorizationInterceptor authorizationInterceptor = new CustomAuthorizationInterceptor(); this.registerInterceptor(authorizationInterceptor); } - } diff --git a/src/main/java/ca/uhn/fhir/jpa/starter/OAuth2Helper.java b/src/main/java/ca/uhn/fhir/jpa/starter/OAuth2Helper.java index dea94ef3c1c..f4ec36cb41c 100644 --- a/src/main/java/ca/uhn/fhir/jpa/starter/OAuth2Helper.java +++ b/src/main/java/ca/uhn/fhir/jpa/starter/OAuth2Helper.java @@ -10,7 +10,6 @@ import java.util.Base64; import java.util.HashMap; import java.util.List; - import org.apache.commons.lang3.ObjectUtils; import org.json.JSONArray; import org.json.JSONException; @@ -28,7 +27,6 @@ import com.auth0.jwt.exceptions.JWTVerificationException; import com.auth0.jwt.interfaces.Claim; import com.auth0.jwt.interfaces.DecodedJWT; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeSearchParam; diff --git a/src/main/java/ca/uhn/fhir/jpa/starter/StarterJpaConfig.java b/src/main/java/ca/uhn/fhir/jpa/starter/StarterJpaConfig.java new file mode 100644 index 00000000000..c0ccba70e73 --- /dev/null +++ b/src/main/java/ca/uhn/fhir/jpa/starter/StarterJpaConfig.java @@ -0,0 +1,128 @@ +package ca.uhn.fhir.jpa.starter; + +import ca.uhn.fhir.context.ConfigurationException; +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.api.IDaoRegistry; +import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; +import ca.uhn.fhir.jpa.batch.config.NonPersistedBatchConfigurer; +import ca.uhn.fhir.jpa.config.util.HapiEntityManagerFactoryUtil; +import ca.uhn.fhir.jpa.config.util.ResourceCountCacheUtil; +import ca.uhn.fhir.jpa.config.util.ValidationSupportConfigUtil; +import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl; +import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; +import ca.uhn.fhir.jpa.dao.mdm.MdmLinkDaoJpaImpl; +import ca.uhn.fhir.jpa.dao.search.HSearchSortHelperImpl; +import ca.uhn.fhir.jpa.dao.search.IHSearchSortHelper; +import ca.uhn.fhir.jpa.provider.DaoRegistryResourceSupportedSvc; +import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; +import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc; +import ca.uhn.fhir.jpa.search.StaleSearchDeletingSvcImpl; +import ca.uhn.fhir.jpa.util.ResourceCountCache; +import ca.uhn.fhir.jpa.validation.JpaValidationSupportChain; +import ca.uhn.fhir.mdm.dao.IMdmLinkDao; +import ca.uhn.fhir.rest.api.IResourceSupportedSvc; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; +import org.hl7.fhir.common.hapi.validation.support.CachingValidationSupport; +import org.springframework.batch.core.configuration.annotation.BatchConfigurer; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Primary; +import org.springframework.core.env.ConfigurableEnvironment; +import org.springframework.orm.jpa.JpaTransactionManager; +import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; + +import javax.persistence.EntityManagerFactory; +import javax.sql.DataSource; + +@Configuration +public class StarterJpaConfig { + @Bean + public IFulltextSearchSvc fullTextSearchSvc() { + return new FulltextSearchSvcImpl(); + } + + @Bean + public IStaleSearchDeletingSvc staleSearchDeletingSvc() { + return new StaleSearchDeletingSvcImpl(); + } + + @Primary + @Bean + public CachingValidationSupport validationSupportChain(JpaValidationSupportChain theJpaValidationSupportChain) { + return ValidationSupportConfigUtil.newCachingValidationSupport(theJpaValidationSupportChain); + } + + @Bean + public BatchConfigurer batchConfigurer() { + return new NonPersistedBatchConfigurer(); + } + + @Autowired + AppProperties appProperties; + @Autowired + private DataSource myDataSource; + @Autowired + private ConfigurableEnvironment configurableEnvironment; + + /** + * Customize the default/max page sizes for search results. You can set these however + * you want, although very large page sizes will require a lot of RAM. + */ + @Bean + public DatabaseBackedPagingProvider databaseBackedPagingProvider() { + DatabaseBackedPagingProvider pagingProvider = new DatabaseBackedPagingProvider(); + pagingProvider.setDefaultPageSize(appProperties.getDefault_page_size()); + pagingProvider.setMaximumPageSize(appProperties.getMax_page_size()); + return pagingProvider; + } + + @Bean + public IResourceSupportedSvc resourceSupportedSvc(IDaoRegistry theDaoRegistry) { + return new DaoRegistryResourceSupportedSvc(theDaoRegistry); + } + + @Bean(name = "myResourceCountsCache") + public ResourceCountCache resourceCountsCache(IFhirSystemDao theSystemDao) { + return ResourceCountCacheUtil.newResourceCountCache(theSystemDao); + } + + @Primary + @Bean + public LocalContainerEntityManagerFactoryBean entityManagerFactory( + ConfigurableListableBeanFactory myConfigurableListableBeanFactory, FhirContext theFhirContext) { + LocalContainerEntityManagerFactoryBean retVal = HapiEntityManagerFactoryUtil.newEntityManagerFactory(myConfigurableListableBeanFactory, theFhirContext); + retVal.setPersistenceUnitName("HAPI_PU"); + + try { + retVal.setDataSource(myDataSource); + } catch (Exception e) { + throw new ConfigurationException("Could not set the data source due to a configuration issue", e); + } + retVal.setJpaProperties(EnvironmentHelper.getHibernateProperties(configurableEnvironment, myConfigurableListableBeanFactory)); + return retVal; + } + + @Bean + @Primary + public JpaTransactionManager hapiTransactionManager(EntityManagerFactory entityManagerFactory) { + JpaTransactionManager retVal = new JpaTransactionManager(); + retVal.setEntityManagerFactory(entityManagerFactory); + return retVal; + } + + @Autowired + private ISearchParamRegistry mySearchParamRegistry; + + @Bean + public IHSearchSortHelper hSearchSortHelper() { + return new HSearchSortHelperImpl(mySearchParamRegistry); + } + @Bean + public static IMdmLinkDao mdmLinkDao(){ + return new MdmLinkDaoJpaImpl(); + } + + +} diff --git a/src/main/java/ca/uhn/fhir/jpa/starter/mdm/MdmConfig.java b/src/main/java/ca/uhn/fhir/jpa/starter/mdm/MdmConfig.java index 301c8833150..50dfe9e0489 100644 --- a/src/main/java/ca/uhn/fhir/jpa/starter/mdm/MdmConfig.java +++ b/src/main/java/ca/uhn/fhir/jpa/starter/mdm/MdmConfig.java @@ -1,15 +1,12 @@ package ca.uhn.fhir.jpa.starter.mdm; -import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.mdm.config.MdmConsumerConfig; import ca.uhn.fhir.jpa.mdm.config.MdmSubmitterConfig; import ca.uhn.fhir.jpa.starter.AppProperties; import ca.uhn.fhir.mdm.api.IMdmSettings; import ca.uhn.fhir.mdm.rules.config.MdmRuleValidator; import ca.uhn.fhir.mdm.rules.config.MdmSettings; -import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import com.google.common.base.Charsets; -import java.io.IOException; import org.apache.commons.io.IOUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; @@ -19,16 +16,13 @@ import org.springframework.core.io.DefaultResourceLoader; import org.springframework.core.io.Resource; +import java.io.IOException; + @Configuration @Conditional(MdmConfigCondition.class) @Import({MdmConsumerConfig.class, MdmSubmitterConfig.class}) public class MdmConfig { - @Bean - MdmRuleValidator mdmRuleValidator(FhirContext theFhirContext, ISearchParamRegistry theSearchParamRegistry) { - return new MdmRuleValidator(theFhirContext, theSearchParamRegistry); - } - @Bean IMdmSettings mdmSettings(@Autowired MdmRuleValidator theMdmRuleValidator, AppProperties appProperties) throws IOException { DefaultResourceLoader resourceLoader = new DefaultResourceLoader(); @@ -36,5 +30,4 @@ IMdmSettings mdmSettings(@Autowired MdmRuleValidator theMdmRuleValidator, AppPro String json = IOUtils.toString(resource.getInputStream(), Charsets.UTF_8); return new MdmSettings(theMdmRuleValidator).setEnabled(appProperties.getMdm_enabled()).setScriptText(json); } - } diff --git a/src/main/resources/application-custom.yaml b/src/main/resources/application-custom.yaml index 4f1686c952f..bd7f35ad9db 100644 --- a/src/main/resources/application-custom.yaml +++ b/src/main/resources/application-custom.yaml @@ -1,91 +1,119 @@ +#Adds the option to go to eg. http://localhost:8080/actuator/health for seeing the running configuration +#see https://docs.spring.io/spring-boot/docs/current/reference/html/actuator.html#actuator.endpoints +management: + endpoints: + web: + exposure: + include: "health,prometheus" spring: + main: + allow-circular-references: true + #allow-bean-definition-overriding: true + batch.job.enabled: false + flyway: + enabled: false + check-location: false + baselineOnMigrate: true datasource: url: ${datasource.url} username: ${datasource.username} password: ${datasource.password} driverClassName: ${datasource.driver} max-active: 15 + + # database connection pool size + hikari: + maximum-pool-size: 10 jpa: properties: - hibernate.dialect: ${hibernate.dialect} hibernate.format_sql: false hibernate.show_sql: false + hibernate.dialect: ${hibernate.dialect} hibernate.hbm2ddl.auto: update hibernate.jdbc.batch_size: 20 hibernate.cache.use_query_cache: false hibernate.cache.use_second_level_cache: false hibernate.cache.use_structured_entries: false hibernate.cache.use_minimal_puts: false -### These settings will enable fulltext search with lucene - hibernate.search.enabled: true - hibernate.search.backend.type: lucene - hibernate.search.backend.analysis.configurer: ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer - hibernate.search.backend.directory.type: local-filesystem - hibernate.search.backend.directory.root: target/lucenefiles - hibernate.search.backend.lucene_version: lucene_current + ### These settings will enable fulltext search with lucene or elastic + hibernate.search.enabled: false + ### lucene parameters + # hibernate.search.backend.type: lucene + # hibernate.search.backend.analysis.configurer: ca.uhn.fhir.jpa.search.HapiHSearchAnalysisConfigurers$HapiLuceneAnalysisConfigurer + # hibernate.search.backend.directory.type: local-filesystem + # hibernate.search.backend.directory.root: target/lucenefiles + # hibernate.search.backend.lucene_version: lucene_current hibernate.physical_naming_strategy: org.hibernate.boot.model.naming.PhysicalNamingStrategyStandardImpl batch: job: enabled: false - main: -# TODO 5.6.0 -> Prevent duplicate bean definitions in the Spring batch config in HAPI: see: - allow-bean-definition-overriding: true + hapi: fhir: + ### This enables the swagger-ui at /fhir/swagger-ui/index.html as well as the /fhir/api-docs (see https://hapifhir.io/hapi-fhir/docs/server_plain/openapi.html) + openapi_enabled: true ### This is the FHIR version. Choose between, DSTU2, DSTU3, R4 or R5 fhir_version: ${fhir_version} -### enable to use the ApacheProxyAddressStrategy which uses X-Forwarded-* headers -### to determine the FHIR server address -# use_apache_address_strategy: false -### forces the use of the https:// protocol for the returned server address. -### alternatively, it may be set using the X-Forwarded-Proto header. -# use_apache_address_strategy_https: false -### enable to set the Server URL -# server_address: http://hapi.fhir.org/baseR4 -# defer_indexing_for_codesystems_of_size: 101 - #implementationguides: - #example from registry (packages.fhir.org) - #swiss: - #name: swiss.mednet.fhir - #version: 0.8.0 - #example not from registry - #ips_1_0_0: - #url: https://build.fhir.org/ig/HL7/fhir-ips/package.tgz - #name: hl7.fhir.uv.ips - #version: 1.0.0 - - #supported_resource_types: - # - Patient - # - Observation + ### enable to use the ApacheProxyAddressStrategy which uses X-Forwarded-* headers + ### to determine the FHIR server address + # use_apache_address_strategy: false + ### forces the use of the https:// protocol for the returned server address. + ### alternatively, it may be set using the X-Forwarded-Proto header. + # use_apache_address_strategy_https: false + ### enable to set the Server URL + # server_address: http://hapi.fhir.org/baseR4 + # defer_indexing_for_codesystems_of_size: 101 + # install_transitive_ig_dependencies: true + # implementationguides: + ### example from registry (packages.fhir.org) + # swiss: + # name: swiss.mednet.fhir + # version: 0.8.0 + # example not from registry + # ips_1_0_0: + # url: https://build.fhir.org/ig/HL7/fhir-ips/package.tgz + # name: hl7.fhir.uv.ips + # version: 1.0.0 + # supported_resource_types: + # - Patient + # - Observation allow_cascading_deletes: true allow_contains_searches: true allow_external_references: true allow_multiple_delete: true allow_override_default_search_params: true - allow_placeholder_references: true auto_create_placeholder_reference_targets: false -# cql_enabled: true + # cql_enabled: true default_encoding: JSON -# default_pretty_print: true + # default_pretty_print: true default_page_size: 20 -# delete_expunge_enabled: true -# enable_repository_validating_interceptor: false - enable_index_missing_fields: false -# enable_index_contained_resource: false + # delete_expunge_enabled: true + # enable_repository_validating_interceptor: false + # enable_index_missing_fields: false + # enable_index_of_type: true + # enable_index_contained_resource: false + ### !!Extended Lucene/Elasticsearch Indexing is still a experimental feature, expect some features (e.g. _total=accurate) to not work as expected!! + ### more information here: https://hapifhir.io/hapi-fhir/docs/server_jpa/elastic.html + advanced_lucene_indexing: false + # enforce_referential_integrity_on_delete: false + # This is an experimental feature, and does not fully support _total and other FHIR features. enforce_referential_integrity_on_delete: false enforce_referential_integrity_on_write: false etag_support_enabled: true expunge_enabled: true -# daoconfig_client_id_strategy: null -# client_id_strategy: ALPHANUMERIC - fhirpath_interceptor_enabled: true + # daoconfig_client_id_strategy: null + # client_id_strategy: ALPHANUMERIC + # fhirpath_interceptor_enabled: false filter_search_enabled: true graphql_enabled: true -# narrative_enabled: true -# mdm_enabled: true -# partitioning: -# allow_references_across_partitions: false -# partitioning_include_in_search_hashes: false + # narrative_enabled: true + # mdm_enabled: true +# local_base_urls: +# - https://hapi.fhir.org/baseR4 + mdm_enabled: false + # partitioning: + # allow_references_across_partitions: false + # partitioning_include_in_search_hashes: false cors: allow_Credentials: true # These are allowed_origin patterns, see: https://docs.spring.io/spring-framework/docs/current/javadoc-api/org/springframework/web/cors/CorsConfiguration.html#setAllowedOriginPatterns-java.util.List- @@ -97,49 +125,54 @@ hapi: search-coord-max-pool-size: 100 search-coord-queue-capacity: 200 + # Threadpool size for BATCH'ed GETs in a bundle. +# bundle_batch_pool_size: 10 +# bundle_batch_pool_max_size: 50 + # logger: -# error_format: 'ERROR - ${requestVerb} ${requestUrl}' -# format: >- -# Path[${servletPath}] Source[${requestHeader.x-forwarded-for}] -# Operation[${operationType} ${operationName} ${idOrResourceName}] -# UA[${requestHeader.user-agent}] Params[${requestParameters}] -# ResponseEncoding[${responseEncodingNoDefault}] -# log_exceptions: true -# name: fhirtest.access + # error_format: 'ERROR - ${requestVerb} ${requestUrl}' + # format: >- + # Path[${servletPath}] Source[${requestHeader.x-forwarded-for}] + # Operation[${operationType} ${operationName} ${idOrResourceName}] + # UA[${requestHeader.user-agent}] Params[${requestParameters}] + # ResponseEncoding[${responseEncodingNoDefault}] + # log_exceptions: true + # name: fhirtest.access max_binary_size: 104857600 max_page_size: 200 retain_cached_searches_mins: 60 reuse_cached_search_results_millis: ${reuse_cached_search_results_millis} tester: - home: - name: Local Tester - server_address: ${server_address} - refuse_to_fetch_third_party_urls: false - fhir_version: ${fhir_version} - global: - name: Global Tester - server_address: "http://hapi.fhir.org/baseR4" - refuse_to_fetch_third_party_urls: false - fhir_version: ${fhir_version} - validation: - requests_enabled: false - responses_enabled: false + home: + name: Local Tester + server_address: ${server_address} + refuse_to_fetch_third_party_urls: false + fhir_version: ${fhir_version} + global: + name: Global Tester + server_address: "http://hapi.fhir.org/baseR4" + refuse_to_fetch_third_party_urls: false + fhir_version: ${fhir_version} + # validation: + # requests_enabled: true + # responses_enabled: true binary_storage_enabled: true bulk_export_enabled: true subscription: - resthook_enabled: ${subscription.resthook.enabled} - websocket_enabled: ${subscription.websocket.enabled} - email: - from: some@test.com - host: google.com - port: - username: - password: -# auth: -# startTlsEnable: -# startTlsRequired: -# quitWait: -# lastn_enabled: true + resthook_enabled: ${subscription.resthook.enabled} + websocket_enabled: ${subscription.websocket.enabled} +# email: +# from: some@test.com +# host: google.com +# port: +# username: +# password: +# auth: +# startTlsEnable: +# startTlsRequired: +# quitWait: +# lastn_enabled: true +# store_resource_in_lucene_index_enabled: true ### This is configuration for normalized quantity serach level default is 0 ### 0: NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED - default ### 1: NORMALIZED_QUANTITY_STORAGE_SUPPORTED diff --git a/src/main/resources/application.yaml b/src/main/resources/application.yaml index c4c393d6dfe..2ba756d54c3 100644 --- a/src/main/resources/application.yaml +++ b/src/main/resources/application.yaml @@ -1,4 +1,19 @@ +#Adds the option to go to eg. http://localhost:8080/actuator/health for seeing the running configuration +#see https://docs.spring.io/spring-boot/docs/current/reference/html/actuator.html#actuator.endpoints +management: + endpoints: + web: + exposure: + include: "health,prometheus" spring: + main: + allow-circular-references: true + #allow-bean-definition-overriding: true + batch.job.enabled: false + flyway: + enabled: false + check-location: false + baselineOnMigrate: true datasource: url: 'jdbc:h2:file:./target/database/h2' #url: jdbc:h2:mem:test_mem @@ -14,81 +29,95 @@ spring: properties: hibernate.format_sql: false hibernate.show_sql: false -# hibernate.dialect: org.hibernate.dialect.h2dialect -# hibernate.hbm2ddl.auto: update -# hibernate.jdbc.batch_size: 20 -# hibernate.cache.use_query_cache: false -# hibernate.cache.use_second_level_cache: false -# hibernate.cache.use_structured_entries: false -# hibernate.cache.use_minimal_puts: false -### These settings will enable fulltext search with lucene -# hibernate.search.enabled: true + #Hibernate dialect is automatically detected except Postgres and H2. + #If using H2, then supply the value of ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect + #If using postgres, then supply the value of ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgres94Dialect + + hibernate.dialect: ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect + # hibernate.hbm2ddl.auto: update + # hibernate.jdbc.batch_size: 20 + # hibernate.cache.use_query_cache: false + # hibernate.cache.use_second_level_cache: false + # hibernate.cache.use_structured_entries: false + # hibernate.cache.use_minimal_puts: false + ### These settings will enable fulltext search with lucene or elastic + hibernate.search.enabled: false + ### lucene parameters # hibernate.search.backend.type: lucene -# hibernate.search.backend.analysis.configurer: ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer +# hibernate.search.backend.analysis.configurer: ca.uhn.fhir.jpa.search.HapiHSearchAnalysisConfigurers$HapiLuceneAnalysisConfigurer # hibernate.search.backend.directory.type: local-filesystem # hibernate.search.backend.directory.root: target/lucenefiles # hibernate.search.backend.lucene_version: lucene_current - batch: - job: - enabled: false - main: -# TODO 5.6.0 -> Prevent duplicate bean definitions in the Spring batch config in HAPI: see: - allow-bean-definition-overriding: true + ### elastic parameters ===> see also elasticsearch section below <=== +# hibernate.search.backend.type: elasticsearch +# hibernate.search.backend.analysis.configurer: ca.uhn.fhir.jpa.search.HapiHSearchAnalysisConfigurers$HapiElasticAnalysisConfigurer + hapi: fhir: + ### This enables the swagger-ui at /fhir/swagger-ui/index.html as well as the /fhir/api-docs (see https://hapifhir.io/hapi-fhir/docs/server_plain/openapi.html) + openapi_enabled: true ### This is the FHIR version. Choose between, DSTU2, DSTU3, R4 or R5 fhir_version: R4 -### enable to use the ApacheProxyAddressStrategy which uses X-Forwarded-* headers -### to determine the FHIR server address -# use_apache_address_strategy: false -### forces the use of the https:// protocol for the returned server address. -### alternatively, it may be set using the X-Forwarded-Proto header. -# use_apache_address_strategy_https: false -### enable to set the Server URL -# server_address: http://hapi.fhir.org/baseR4 -# defer_indexing_for_codesystems_of_size: 101 -# install_transitive_ig_dependencies: true -# implementationguides: -### example from registry (packages.fhir.org) -# swiss: -# name: swiss.mednet.fhir -# version: 0.8.0 -# example not from registry -# ips_1_0_0: -# url: https://build.fhir.org/ig/HL7/fhir-ips/package.tgz -# name: hl7.fhir.uv.ips -# version: 1.0.0 -# supported_resource_types: -# - Patient -# - Observation -# allow_cascading_deletes: true -# allow_contains_searches: true -# allow_external_references: true -# allow_multiple_delete: true -# allow_override_default_search_params: true -# auto_create_placeholder_reference_targets: false -# cql_enabled: true -# default_encoding: JSON -# default_pretty_print: true -# default_page_size: 20 -# delete_expunge_enabled: true -# enable_repository_validating_interceptor: false -# enable_index_missing_fields: false -# enable_index_contained_resource: false + ### enable to use the ApacheProxyAddressStrategy which uses X-Forwarded-* headers + ### to determine the FHIR server address + # use_apache_address_strategy: false + ### forces the use of the https:// protocol for the returned server address. + ### alternatively, it may be set using the X-Forwarded-Proto header. + # use_apache_address_strategy_https: false + ### enable to set the Server URL + # server_address: http://hapi.fhir.org/baseR4 + # defer_indexing_for_codesystems_of_size: 101 + # install_transitive_ig_dependencies: true + # implementationguides: + ### example from registry (packages.fhir.org) + # swiss: + # name: swiss.mednet.fhir + # version: 0.8.0 + # example not from registry + # ips_1_0_0: + # url: https://build.fhir.org/ig/HL7/fhir-ips/package.tgz + # name: hl7.fhir.uv.ips + # version: 1.0.0 + # supported_resource_types: + # - Patient + # - Observation + # allow_cascading_deletes: true + # allow_contains_searches: true + # allow_external_references: true + # allow_multiple_delete: true + # allow_override_default_search_params: true + # auto_create_placeholder_reference_targets: false + # cql_enabled: true + # default_encoding: JSON + # default_pretty_print: true + # default_page_size: 20 + # delete_expunge_enabled: true + # enable_repository_validating_interceptor: false + # enable_index_missing_fields: false + # enable_index_of_type: true + # enable_index_contained_resource: false + ### !!Extended Lucene/Elasticsearch Indexing is still a experimental feature, expect some features (e.g. _total=accurate) to not work as expected!! + ### more information here: https://hapifhir.io/hapi-fhir/docs/server_jpa/elastic.html + advanced_lucene_indexing: false + # enforce_referential_integrity_on_delete: false + # This is an experimental feature, and does not fully support _total and other FHIR features. # enforce_referential_integrity_on_delete: false -# enforce_referential_integrity_on_write: false -# etag_support_enabled: true -# expunge_enabled: true -# daoconfig_client_id_strategy: null -# client_id_strategy: ALPHANUMERIC -# fhirpath_interceptor_enabled: false -# filter_search_enabled: true -# graphql_enabled: true -# narrative_enabled: true -# mdm_enabled: true -# partitioning: -# allow_references_across_partitions: false -# partitioning_include_in_search_hashes: false + # enforce_referential_integrity_on_write: false + # etag_support_enabled: true + # expunge_enabled: true + # daoconfig_client_id_strategy: null + # client_id_strategy: ALPHANUMERIC + # fhirpath_interceptor_enabled: false + # filter_search_enabled: true + # graphql_enabled: true + # narrative_enabled: true + # mdm_enabled: true +# local_base_urls: +# - https://hapi.fhir.org/baseR4 + mdm_enabled: false + # partitioning: + # allow_references_across_partitions: false + # partitioning_include_in_search_hashes: false cors: allow_Credentials: true # These are allowed_origin patterns, see: https://docs.spring.io/spring-framework/docs/current/javadoc-api/org/springframework/web/cors/CorsConfiguration.html#setAllowedOriginPatterns-java.util.List- @@ -100,30 +129,34 @@ hapi: search-coord-max-pool-size: 100 search-coord-queue-capacity: 200 + # Threadpool size for BATCH'ed GETs in a bundle. +# bundle_batch_pool_size: 10 +# bundle_batch_pool_max_size: 50 + # logger: -# error_format: 'ERROR - ${requestVerb} ${requestUrl}' -# format: >- -# Path[${servletPath}] Source[${requestHeader.x-forwarded-for}] -# Operation[${operationType} ${operationName} ${idOrResourceName}] -# UA[${requestHeader.user-agent}] Params[${requestParameters}] -# ResponseEncoding[${responseEncodingNoDefault}] -# log_exceptions: true -# name: fhirtest.access -# max_binary_size: 104857600 -# max_page_size: 200 -# retain_cached_searches_mins: 60 -# reuse_cached_search_results_millis: 60000 + # error_format: 'ERROR - ${requestVerb} ${requestUrl}' + # format: >- + # Path[${servletPath}] Source[${requestHeader.x-forwarded-for}] + # Operation[${operationType} ${operationName} ${idOrResourceName}] + # UA[${requestHeader.user-agent}] Params[${requestParameters}] + # ResponseEncoding[${responseEncodingNoDefault}] + # log_exceptions: true + # name: fhirtest.access + # max_binary_size: 104857600 + # max_page_size: 200 + # retain_cached_searches_mins: 60 + # reuse_cached_search_results_millis: 60000 tester: - home: - name: Local Tester - server_address: 'http://localhost:8080/fhir' - refuse_to_fetch_third_party_urls: false - fhir_version: R4 - global: - name: Global Tester - server_address: "http://hapi.fhir.org/baseR4" - refuse_to_fetch_third_party_urls: false - fhir_version: R4 + home: + name: Local Tester + server_address: 'http://localhost:8080/fhir' + refuse_to_fetch_third_party_urls: false + fhir_version: R4 + global: + name: Global Tester + server_address: "http://hapi.fhir.org/baseR4" + refuse_to_fetch_third_party_urls: false + fhir_version: R4 # validation: # requests_enabled: true # responses_enabled: true @@ -143,6 +176,7 @@ hapi: # startTlsRequired: # quitWait: # lastn_enabled: true +# store_resource_in_lucene_index_enabled: true ### This is configuration for normalized quantity serach level default is 0 ### 0: NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED - default ### 1: NORMALIZED_QUANTITY_STORAGE_SUPPORTED diff --git a/src/test/java/ca/uhn/fhir/jpa/starter/Demo.java b/src/test/java/ca/uhn/fhir/jpa/starter/Demo.java index 5dadf14f8da..d46094360ee 100644 --- a/src/test/java/ca/uhn/fhir/jpa/starter/Demo.java +++ b/src/test/java/ca/uhn/fhir/jpa/starter/Demo.java @@ -15,6 +15,6 @@ public static void main(String[] args) { System.setProperty("spring.batch.job.enabled", "false"); SpringApplication.run(Demo.class, args); - //Server is now accessible at eg. http://localhost:8080/metadata + //Server is now accessible at eg. http://localhost:8080/fhir/metadata } } diff --git a/src/test/java/ca/uhn/fhir/jpa/starter/ElasticsearchLastNR4IT.java b/src/test/java/ca/uhn/fhir/jpa/starter/ElasticsearchLastNR4IT.java index 24dfada96cb..6ad49c61cf5 100644 --- a/src/test/java/ca/uhn/fhir/jpa/starter/ElasticsearchLastNR4IT.java +++ b/src/test/java/ca/uhn/fhir/jpa/starter/ElasticsearchLastNR4IT.java @@ -42,13 +42,19 @@ "spring.datasource.url=jdbc:h2:mem:dbr4", "hapi.fhir.fhir_version=r4", "hapi.fhir.lastn_enabled=true", + "hapi.fhir.store_resource_in_lucene_index_enabled=true", + "hapi.fhir.advanced_lucene_indexing=true", "elasticsearch.enabled=true", // Because the port is set randomly, we will set the rest_url using the Initializer. // "elasticsearch.rest_url='http://localhost:9200'", "elasticsearch.username=SomeUsername", "elasticsearch.password=SomePassword", + "elasticsearch.debug.refresh_after_write=true", "elasticsearch.protocol=http", - "spring.main.allow-bean-definition-overriding=true" + "spring.main.allow-bean-definition-overriding=true", + "spring.jpa.properties.hibernate.search.enabled=true", + "spring.jpa.properties.hibernate.search.backend.type=elasticsearch", + "spring.jpa.properties.hibernate.search.backend.analysis.configurer=ca.uhn.fhir.jpa.search.elastic.HapiElasticsearchAnalysisConfigurer" }) @ContextConfiguration(initializers = ElasticsearchLastNR4IT.Initializer.class) public class ElasticsearchLastNR4IT { @@ -56,10 +62,9 @@ public class ElasticsearchLastNR4IT { private IGenericClient ourClient; private FhirContext ourCtx; - private static final String ELASTIC_VERSION = "7.10.2"; - private static final String ELASTIC_IMAGE = "docker.elastic.co/elasticsearch/elasticsearch:" + ELASTIC_VERSION; - - private static ElasticsearchContainer embeddedElastic; + private static final String ELASTIC_VERSION = "7.16.3"; + private static final String ELASTIC_IMAGE = "docker.elastic.co/elasticsearch/elasticsearch:" + ELASTIC_VERSION; + private static ElasticsearchContainer embeddedElastic; @Autowired private ElasticsearchSvcImpl myElasticsearchSvc; @@ -79,7 +84,8 @@ public void stop() { private int port; @Test - void testLastN() throws IOException { + void testLastN() throws IOException, InterruptedException { + Thread.sleep(2000); Patient pt = new Patient(); pt.addName().setFamily("Lastn").addGiven("Arthur"); @@ -89,8 +95,10 @@ void testLastN() throws IOException { obs.getSubject().setReferenceElement(id); String observationCode = "testobservationcode"; String codeSystem = "http://testobservationcodesystem"; + obs.getCode().addCoding().setCode(observationCode).setSystem(codeSystem); obs.setValue(new StringType(observationCode)); + Date effectiveDtm = new GregorianCalendar().getTime(); obs.setEffective(new DateTimeType(effectiveDtm)); obs.getCategoryFirstRep().addCoding().setCode("testcategorycode").setSystem("http://testcategorycodesystem"); diff --git a/src/test/java/ca/uhn/fhir/jpa/starter/ExampleServerDstu2IT.java b/src/test/java/ca/uhn/fhir/jpa/starter/ExampleServerDstu2IT.java index 0b408deb2c7..964fae24a6f 100644 --- a/src/test/java/ca/uhn/fhir/jpa/starter/ExampleServerDstu2IT.java +++ b/src/test/java/ca/uhn/fhir/jpa/starter/ExampleServerDstu2IT.java @@ -21,7 +21,6 @@ "spring.batch.job.enabled=false", "hapi.fhir.fhir_version=dstu2", "spring.datasource.url=jdbc:h2:mem:dbr2", - "spring.main.allow-bean-definition-overriding=true" }) public class ExampleServerDstu2IT { diff --git a/src/test/java/ca/uhn/fhir/jpa/starter/ExampleServerDstu3IT.java b/src/test/java/ca/uhn/fhir/jpa/starter/ExampleServerDstu3IT.java index 91cb4baeb30..b4ca6c3cec3 100644 --- a/src/test/java/ca/uhn/fhir/jpa/starter/ExampleServerDstu3IT.java +++ b/src/test/java/ca/uhn/fhir/jpa/starter/ExampleServerDstu3IT.java @@ -46,7 +46,6 @@ "hapi.fhir.subscription.websocket_enabled=true", "hapi.fhir.allow_external_references=true", "hapi.fhir.allow_placeholder_references=true", - "spring.main.allow-bean-definition-overriding=true" }) diff --git a/src/test/java/ca/uhn/fhir/jpa/starter/ExampleServerR4IT.java b/src/test/java/ca/uhn/fhir/jpa/starter/ExampleServerR4IT.java index 6d995238411..7b0548747a7 100644 --- a/src/test/java/ca/uhn/fhir/jpa/starter/ExampleServerR4IT.java +++ b/src/test/java/ca/uhn/fhir/jpa/starter/ExampleServerR4IT.java @@ -1,9 +1,12 @@ package ca.uhn.fhir.jpa.starter; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.partition.SystemRequestDetails; +import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.rest.api.CacheControlDirective; import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.rest.api.MethodOutcome; +import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.client.api.IGenericClient; import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum; @@ -86,6 +89,57 @@ private Patient getGoldenResourcePatient() { } } + @Test + public void testBatchPutWithIdenticalTags() { + String batchPuts = "{\n" + + "\t\"resourceType\": \"Bundle\",\n" + + "\t\"id\": \"patients\",\n" + + "\t\"type\": \"batch\",\n" + + "\t\"entry\": [\n" + + "\t\t{\n" + + "\t\t\t\"request\": {\n" + + "\t\t\t\t\"method\": \"PUT\",\n" + + "\t\t\t\t\"url\": \"Patient/pat-1\"\n" + + "\t\t\t},\n" + + "\t\t\t\"resource\": {\n" + + "\t\t\t\t\"resourceType\": \"Patient\",\n" + + "\t\t\t\t\"id\": \"pat-1\",\n" + + "\t\t\t\t\"meta\": {\n" + + "\t\t\t\t\t\"tag\": [\n" + + "\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\"system\": \"http://mysystem.org\",\n" + + "\t\t\t\t\t\t\t\"code\": \"value2\"\n" + + "\t\t\t\t\t\t}\n" + + "\t\t\t\t\t]\n" + + "\t\t\t\t}\n" + + "\t\t\t},\n" + + "\t\t\t\"fullUrl\": \"/Patient/pat-1\"\n" + + "\t\t},\n" + + "\t\t{\n" + + "\t\t\t\"request\": {\n" + + "\t\t\t\t\"method\": \"PUT\",\n" + + "\t\t\t\t\"url\": \"Patient/pat-2\"\n" + + "\t\t\t},\n" + + "\t\t\t\"resource\": {\n" + + "\t\t\t\t\"resourceType\": \"Patient\",\n" + + "\t\t\t\t\"id\": \"pat-2\",\n" + + "\t\t\t\t\"meta\": {\n" + + "\t\t\t\t\t\"tag\": [\n" + + "\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\"system\": \"http://mysystem.org\",\n" + + "\t\t\t\t\t\t\t\"code\": \"value2\"\n" + + "\t\t\t\t\t\t}\n" + + "\t\t\t\t\t]\n" + + "\t\t\t\t}\n" + + "\t\t\t},\n" + + "\t\t\t\"fullUrl\": \"/Patient/pat-2\"\n" + + "\t\t}\n" + + "\t]\n" + + "}"; + Bundle bundle = FhirContext.forR4().newJsonParser().parseResource(Bundle.class, batchPuts); + ourClient.transaction().withBundle(bundle).execute(); + } + @Test @Order(1) void testWebsocketSubscription() throws Exception { diff --git a/src/test/java/ca/uhn/fhir/jpa/starter/ExampleServerR5IT.java b/src/test/java/ca/uhn/fhir/jpa/starter/ExampleServerR5IT.java index 6a7fc52df23..055d1ab25d7 100644 --- a/src/test/java/ca/uhn/fhir/jpa/starter/ExampleServerR5IT.java +++ b/src/test/java/ca/uhn/fhir/jpa/starter/ExampleServerR5IT.java @@ -39,7 +39,6 @@ "hapi.fhir.fhir_version=r5", "hapi.fhir.subscription.websocket_enabled=true", "hapi.fhir.subscription.websocket_enabled=true", - "spring.main.allow-bean-definition-overriding=true" }) public class ExampleServerR5IT { diff --git a/src/test/java/ca/uhn/fhir/jpa/starter/MultitenantServerR4IT.java b/src/test/java/ca/uhn/fhir/jpa/starter/MultitenantServerR4IT.java index d8ae7cfa1a1..29af5e7482a 100644 --- a/src/test/java/ca/uhn/fhir/jpa/starter/MultitenantServerR4IT.java +++ b/src/test/java/ca/uhn/fhir/jpa/starter/MultitenantServerR4IT.java @@ -25,7 +25,6 @@ "hapi.fhir.fhir_version=r4", "hapi.fhir.subscription.websocket_enabled=true", "hapi.fhir.partitioning.partitioning_include_in_search_hashes=false", - "spring.main.allow-bean-definition-overriding=true" }) public class MultitenantServerR4IT { diff --git a/src/test/resources/application-integrationtest.yaml b/src/test/resources/application-integrationtest.yaml index f3118911c0d..3ab724038f8 100644 --- a/src/test/resources/application-integrationtest.yaml +++ b/src/test/resources/application-integrationtest.yaml @@ -35,6 +35,8 @@ hapi: # fhirpath_interceptor_enabled: false # filter_search_enabled: true # graphql_enabled: true +# local_base_urls: +# - http://hapi.fhir.org/baseR4 #partitioning: # cross_partition_reference_mode: true # multitenancy_enabled: true diff --git a/src/test/smoketest/SMOKE_TEST.md b/src/test/smoketest/SMOKE_TEST.md new file mode 100644 index 00000000000..968d7fb2fd8 --- /dev/null +++ b/src/test/smoketest/SMOKE_TEST.md @@ -0,0 +1,53 @@ +# JPA Server Starter Smoke Tests + +--- + +When updating the supporting HAPI-FHIR version, or making changes to the JPA server starter code itself, it is recommended to run basic smoke tests to ensure the basic functionality of the server is maintained. The goal of these tests is +to provide users a quick way to run groups of tests that correspond to various sections within the +[HAPI-FHIR documentation][Link-HAPI-FHIR-docs]. + +### Requirements +Tests are written in IntelliJ [HTTP Client Request files][Link-HTTP-Client-Req-Intro]. Ultimate edition of IntelliJ +is required to run these tests. + +For more details on integrated tooling and request syntax, there is [documentation available][Link-HTTP-Client-Req-Exploring] +on the jetbrains website, in addition to the [API reference][Link-HTTP-Client-Req-API]. + +### Formatting +Each test file corresponds to a given section within the hapifhir documentation as close as possible. For +example, there is a `plain_server.rest` file, which attemps to smoke test all basic functionality outlined in the section +[within the docs][Link-HAPI-FHIR-docs-plain-server]. + +Individual tests are formatted as follows: +```javascript +### Test Title Here +# +REST-OPERATION ENDPOINT-URL +// Verification Steps +``` + +To run these tests against a specific server, configure the server details within the `http-client.env.json` file. By default, we provide the following: +```json +{ + "default": { + "host": "localhost:8080", + "username": "username", + "password": "password" + } +} +``` + +### Running the Tests +Within IntelliJ, right click the file you wish to run tests in and select the `Run All` option from the menu. + +**Important:** Tests may not work individually when run. Often times, tests need to be run sequentially, as they depend +on resources/references from previous tests to complete. _(An example of this would be adding a Patient, saving the id, +then using that saved id to test if we can successfully PATCH that Patient resource. Without that saved id from the +previous test creating that patient, the PATCH test will fail.)_ + + +[Link-HAPI-FHIR-docs]: https://hapifhir.io/hapi-fhir/docs/ +[Link-HAPI-FHIR-docs-plain-server]: https://hapifhir.io/hapi-fhir/docs/server_plain/server_types.html +[Link-HTTP-Client-Req-Intro]: https://www.jetbrains.com/help/idea/http-client-in-product-code-editor.html +[Link-HTTP-Client-Req-Exploring]: https://www.jetbrains.com/help/idea/exploring-http-syntax.html +[Link-HTTP-Client-Req-API]: https://www.jetbrains.com/help/idea/http-response-handling-api-reference.html \ No newline at end of file diff --git a/src/test/smoketest/http-client.env.json b/src/test/smoketest/http-client.env.json new file mode 100644 index 00000000000..85b8ef4a45d --- /dev/null +++ b/src/test/smoketest/http-client.env.json @@ -0,0 +1,7 @@ +{ + "default": { + "host": "localhost:8080", + "username": "username", + "password": "password" + } +} \ No newline at end of file diff --git a/src/test/smoketest/plain_server.rest b/src/test/smoketest/plain_server.rest new file mode 100644 index 00000000000..c5856ffa8f9 --- /dev/null +++ b/src/test/smoketest/plain_server.rest @@ -0,0 +1,223 @@ +### Create Single Patient +# https://hapifhir.io/hapi-fhir/docs/server_plain/rest_operations.html#type_create +POST http://{{host}}/fhir/Patient +Content-Type: application/json + +< smoketestfiles/patient_create.json + +> {% + client.test("Patient created successfully", function() { + client.assert(response.status === 201, "Response status is not 201"); + }); + client.test("Response content-type is json", function() { + const type = response.contentType.mimeType; + client.assert(type === "application/fhir+json", "Expected 'application/fhir+json' but received '" + type + "'"); + }); + client.test("Response resourceType is Patient", function() { + const resourceType = response.body.resourceType; + client.assert(resourceType === "Patient", "Expected 'Patient' but received '" + resourceType + "'"); + }); + client.global.set("single_patient_id", response.body.id); + client.global.set("single_patient_family_name", response.body.name[0].family); +%} + +### Search Single Patient +# https://hapifhir.io/hapi-fhir/docs/server_plain/rest_operations.html#type_search +GET http://{{host}}/fhir/Patient?name={{single_patient_family_name}}&_id={{single_patient_id}} + +> {% + client.test("Patient search successful", function() { + client.assert(response.status === 200, "Response status is not 200"); + }); + client.test("Response content-type is json", function() { + const type = response.contentType.mimeType; + client.assert(type === "application/fhir+json", "Expected 'application/fhir+json' but received '" + type + "'"); + }); + client.test("Response resourceType is Bundle", function() { + const resourceType = response.body.resourceType; + client.assert(resourceType === "Bundle", "Expected 'Bundle' but received '" + resourceType + "'"); + }); + client.test("Total patients found is 1", function() { + const totalFound = response.body.total; + client.assert(totalFound === 1, "Expected '1' match but found '" + totalFound + "'"); + }); + %} + +### Delete Patient +# https://hapifhir.io/hapi-fhir/docs/server_plain/rest_operations.html#instance_delete +DELETE http://{{host}}/fhir/Patient/{{single_patient_id}} + +> {% + client.test("Patient deleted successfully", function() { + client.assert(response.status === 200, "Response status is not 200"); + }); + client.test("Response content-type is json", function() { + const type = response.contentType.mimeType; + client.assert(type === "application/fhir+json", "Expected 'application/fhir+json' but received '" + type + "'"); + }); + client.test("Response resourceType is OperationOutcome", function() { + const resourceType = response.body.resourceType; + client.assert(resourceType === "OperationOutcome", "Expected 'OperationOutcome' but received '" + resourceType + "'"); + }); +%} + +### Batch Patient Create +# https://hapifhir.io/hapi-fhir/docs/server_plain/rest_operations.html#system_transaction +POST http://{{host}}/fhir/ +Content-Type: application/json + +< smoketestfiles/patient_batch_create.json + +> {% + client.test("Bundle transaction completed successfully", function() { + client.assert(response.status === 200, "Response status is not 200"); + }); + client.test("Response content-type is json", function() { + const type = response.contentType.mimeType; + client.assert(type === "application/fhir+json", "Expected 'application/fhir+json' but received '" + type + "'"); + }); + client.test("Response resourceType is Bundle", function() { + const resourceType = response.body.resourceType; + client.assert(resourceType === "Bundle", "Expected 'Bundle' but received '" + resourceType + "'"); + }); + client.test("All patient additions successful", function() { + for (var index = 0; index < response.body.entry.length; index++) { + client.assert(response.body.entry[index].response.status === "201 Created", "Expected '201 Created' for patient index " + index + " but received '" + response.body.entry[index].response.status + "'"); + } + }); + const batch_patient_location = response.body.entry[0].response.location; + const indexOfHistory = batch_patient_location.lastIndexOf("/_history"); + trimmed_location = batch_patient_location.substring(0, indexOfHistory); + trimmed_location = trimmed_location.replace("Patient/", "") + client.global.set("batch_patient_id", trimmed_location); +%} + +### Update - Instance +# https://hapifhir.io/hapi-fhir/docs/server_plain/rest_operations.html#instance_update +PUT http://{{host}}/fhir/Patient/{{batch_patient_id}} +Content-Type: application/json + +< smoketestfiles/patient_update.json + +> {% + client.test("Bundle transaction completed successfully", function() { + client.assert(response.status === 200, "Response status is not 200"); + }); + client.test("Response content-type is json", function() { + const type = response.contentType.mimeType; + client.assert(type === "application/fhir+json", "Expected 'application/fhir+json' but received '" + type + "'"); + }); + client.test("Response resourceType is Patient", function() { + const resourceType = response.body.resourceType; + client.assert(resourceType === "Patient", "Expected 'Patient' but received '" + resourceType + "'"); + }); + client.test("Test last name updated", function() { + const familyName = response.body.name[0].family; + client.assert(familyName === "Iantoryes", "Expected updated family name 'Iantoryes' but received '" + familyName + "'"); + }); + client.test("Test version number updated", function() { + const versionId = response.body.meta.versionId; + client.assert(versionId === "2", "Expected updated versionId name '2' but received '" + versionId + "'"); + }); +%} + +### Patch - Instance +# https://hapifhir.io/hapi-fhir/docs/server_plain/rest_operations.html#instance-level-patch +PATCH http://{{host}}/fhir/Patient/{{batch_patient_id}} +Content-Type: application/json-patch+json + +< smoketestfiles/patient_patch.json + +> {% + client.test("Patch operation completed successfully", function() { + client.assert(response.status === 200, "Response status is not 200"); + }); + client.test("Response content-type is json", function() { + const type = response.contentType.mimeType; + client.assert(type === "application/fhir+json", "Expected 'application/fhir+json' but received '" + type + "'"); + }); + client.test("Response resourceType is Patient", function() { + const resourceType = response.body.resourceType; + client.assert(resourceType === "Patient", "Expected 'Patient' but received '" + resourceType + "'"); + }); + client.test("Test active field patched", function() { + const active = response.body.active; + client.assert(active === false, "Expected updated active 'false' but received '" + active + "'"); + }); +%} + +### History - Server/Type/Instance +# https://hapifhir.io/hapi-fhir/docs/server_plain/rest_operations.html#history +GET http://{{host}}/fhir/Patient/{{batch_patient_id}}/_history + +> {% + client.test("History completed successfully", function() { + client.assert(response.status === 200, "Response status is not 200"); + }); + client.test("Response content-type is json", function() { + const type = response.contentType.mimeType; + client.assert(type === "application/fhir+json", "Expected 'application/fhir+json' but received '" + type + "'"); + }); + client.test("Response resourceType is Bundle", function() { + const resourceType = response.body.resourceType; + client.assert(resourceType === "Bundle", "Expected 'Bundle' but received '" + resourceType + "'"); + }); + client.test("Test receive history type", function() { + const type = response.body.type; + client.assert(type === "history", "Expected type 'history' but received '" + type + "'"); + }); +%} + +### Capability Statement +# https://hapifhir.io/hapi-fhir/docs/server_plain/rest_operations.html#system_capabilities +GET http://{{host}}/fhir/metadata + +> {% + client.test("CapabilityStatement fetched successfully", function() { + client.assert(response.status === 200, "Response status is not 200"); + }); + client.test("Response content-type is json", function() { + const type = response.contentType.mimeType; + client.assert(type === "application/fhir+json", "Expected 'application/fhir+json' but received '" + type + "'"); + }); + client.test("Response resourceType is CapabilityStatement", function() { + const resourceType = response.body.resourceType; + client.assert(resourceType === "CapabilityStatement", "Expected 'CapabilityStatement' but received '" + resourceType + "'"); + }); +%} + +### Extended Operations - everything +# https://hapifhir.io/hapi-fhir/docs/server_plain/rest_operations_operations.html +GET http://{{host}}/fhir/Patient/{{batch_patient_id}}/$everything + +> {% + client.test("$everything operation successful", function() { + client.assert(response.status === 200, "Response status is not 200"); + }); + client.test("Response content-type is json", function() { + const type = response.contentType.mimeType; + client.assert(type === "application/fhir+json", "Expected 'application/fhir+json' but received '" + type + "'"); + }); + client.test("Response resourceType is Bundle", function() { + const resourceType = response.body.resourceType; + client.assert(resourceType === "Bundle", "Expected 'Bundle' but received '" + resourceType + "'"); + }); +%} + +### Extended Operations - validate +# https://hapifhir.io/hapi-fhir/docs/server_plain/rest_operations_operations.html +POST http://{{host}}/fhir/Patient/{{batch_patient_id}}/$validate + +> {% + client.test("$validate operation successful", function() { + client.assert(response.status === 200, "Response status is not 200"); + }); + client.test("Response content-type is json", function() { + const type = response.contentType.mimeType; + client.assert(type === "application/fhir+json", "Expected 'application/fhir+json' but received '" + type + "'"); + }); + client.test("Response resourceType is OperationOutcome", function() { + const resourceType = response.body.resourceType; + client.assert(resourceType === "OperationOutcome", "Expected 'OperationOutcome' but received '" + resourceType + "'"); + }); +%} \ No newline at end of file diff --git a/src/test/smoketest/smoketestfiles/patient_batch_create.json b/src/test/smoketest/smoketestfiles/patient_batch_create.json new file mode 100644 index 00000000000..c476f72f931 --- /dev/null +++ b/src/test/smoketest/smoketestfiles/patient_batch_create.json @@ -0,0 +1,85 @@ +{ + "resourceType": "Bundle", + "id": "bundle-transaction", + "meta": { + "lastUpdated": "2014-08-18T01:43:30Z" + }, + "type": "transaction", + "entry": [ + { + "resource": { + "resourceType": "Patient", + "text": { + "status": "generated", + "div": "
Some narrative
" + }, + "active": true, + "name": [ + { + "use": "official", + "family": "Iantorno", + "given": [ + "Mark" + ] + } + ], + "gender": "male", + "birthDate": "1983-06-23" + }, + "request": { + "method": "POST", + "url": "Patient" + } + }, + { + "resource": { + "resourceType": "Patient", + "text": { + "status": "generated", + "div": "
Some narrative
" + }, + "active": true, + "name": [ + { + "use": "official", + "family": "Iantorno", + "given": [ + "Alexander" + ] + } + ], + "gender": "male", + "birthDate": "1993-08-16" + }, + "request": { + "method": "POST", + "url": "Patient" + } + }, + { + "resource": { + "resourceType": "Patient", + "text": { + "status": "generated", + "div": "
Some narrative
" + }, + "active": true, + "name": [ + { + "use": "official", + "family": "Cash", + "given": [ + "Johnny" + ] + } + ], + "gender": "male", + "birthDate": "1932-02-26" + }, + "request": { + "method": "POST", + "url": "Patient" + } + } + ] +} \ No newline at end of file diff --git a/src/test/smoketest/smoketestfiles/patient_create.json b/src/test/smoketest/smoketestfiles/patient_create.json new file mode 100644 index 00000000000..73f58999de6 --- /dev/null +++ b/src/test/smoketest/smoketestfiles/patient_create.json @@ -0,0 +1,162 @@ +{ + "resourceType": "Patient", + "id": "example", + "text": { + "status": "generated", + "div": "
\n\t\t\t\n\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\n\t\t\t
NamePeter James \n Chalmers ("Jim")\n
Address534 Erewhon, Pleasantville, Vic, 3999
ContactsHome: unknown. Work: (03) 5555 6473
IdMRN: 12345 (Acme Healthcare)
\n\t\t
" + }, + "identifier": [ + { + "use": "usual", + "type": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/v2-0203", + "code": "MR" + } + ] + }, + "system": "urn:oid:1.2.36.146.595.217.0.1", + "value": "12345", + "period": { + "start": "2001-05-06" + }, + "assigner": { + "display": "Acme Healthcare" + } + } + ], + "active": true, + "name": [ + { + "use": "official", + "family": "Chalmers", + "given": [ + "Peter", + "James" + ] + }, + { + "use": "usual", + "given": [ + "Jim" + ] + }, + { + "use": "maiden", + "family": "Windsor", + "given": [ + "Peter", + "James" + ], + "period": { + "end": "2002" + } + } + ], + "telecom": [ + { + "use": "home" + }, + { + "system": "phone", + "value": "(03) 5555 6473", + "use": "work", + "rank": 1 + }, + { + "system": "phone", + "value": "(03) 3410 5613", + "use": "mobile", + "rank": 2 + }, + { + "system": "phone", + "value": "(03) 5555 8834", + "use": "old", + "period": { + "end": "2014" + } + } + ], + "gender": "male", + "birthDate": "1974-12-25", + "_birthDate": { + "extension": [ + { + "url": "http://hl7.org/fhir/StructureDefinition/patient-birthTime", + "valueDateTime": "1974-12-25T14:35:45-05:00" + } + ] + }, + "deceasedBoolean": false, + "address": [ + { + "use": "home", + "type": "both", + "text": "534 Erewhon St PeasantVille, Rainbow, Vic 3999", + "line": [ + "534 Erewhon St" + ], + "city": "PleasantVille", + "district": "Rainbow", + "state": "Vic", + "postalCode": "3999", + "period": { + "start": "1974-12-25" + } + } + ], + "contact": [ + { + "relationship": [ + { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/v2-0131", + "code": "N" + } + ] + } + ], + "name": { + "family": "du Marché", + "_family": { + "extension": [ + { + "url": "http://hl7.org/fhir/StructureDefinition/humanname-own-prefix", + "valueString": "VV" + } + ] + }, + "given": [ + "Bénédicte" + ] + }, + "telecom": [ + { + "system": "phone", + "value": "+33 (237) 998327" + } + ], + "address": { + "use": "home", + "type": "both", + "line": [ + "534 Erewhon St" + ], + "city": "PleasantVille", + "district": "Rainbow", + "state": "Vic", + "postalCode": "3999", + "period": { + "start": "1974-12-25" + } + }, + "gender": "female", + "period": { + "start": "2012" + } + } + ] +} \ No newline at end of file diff --git a/src/test/smoketest/smoketestfiles/patient_patch.json b/src/test/smoketest/smoketestfiles/patient_patch.json new file mode 100644 index 00000000000..b1cfaaa25d6 --- /dev/null +++ b/src/test/smoketest/smoketestfiles/patient_patch.json @@ -0,0 +1,7 @@ +[ + { + "op": "add", + "path": "/active", + "value": false + } +] \ No newline at end of file diff --git a/src/test/smoketest/smoketestfiles/patient_process_message.json b/src/test/smoketest/smoketestfiles/patient_process_message.json new file mode 100644 index 00000000000..033f3cf4a86 --- /dev/null +++ b/src/test/smoketest/smoketestfiles/patient_process_message.json @@ -0,0 +1,63 @@ +{ + "resourceType": "MessageHeader", + "id": "{{batch_patient_id}}", + "text": { + "status": "generated", + "div": "
\n\t\t\t

Update Person resource for Peter James CHALMERS (Jim), MRN: 12345 (Acme Healthcare)

\n\t\t
" + }, + "eventCoding": { + "system": "http://example.org/fhir/message-events", + "code": "admin-notify" + }, + "destination": [ + { + "name": "Acme Message Gateway", + "target": { + "reference": "Device/example" + }, + "endpoint": "llp:10.11.12.14:5432", + "receiver": { + "reference": "http://acme.com/ehr/fhir/Practitioner/2323-33-4" + } + } + ], + "sender": { + "reference": "Organization/1" + }, + "enterer": { + "reference": "Practitioner/example" + }, + "author": { + "reference": "Practitioner/example" + }, + "source": { + "name": "Acme Central Patient Registry", + "software": "FooBar Patient Manager", + "version": "3.1.45.AABB", + "contact": { + "system": "phone", + "value": "+1 (555) 123 4567" + }, + "endpoint": "llp:10.11.12.13:5432" + }, + "reason": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/message-reasons-encounter", + "code": "admit" + } + ] + }, + "response": { + "identifier": { + "value": "5015fe84-8e76-4526-89d8-44b322e8d4fb" + }, + "code": "ok" + }, + "focus": [ + { + "reference": "Patient/example" + } + ], + "definition": "http:////acme.com/ehr/fhir/messagedefinition/patientrequest" +} \ No newline at end of file diff --git a/src/test/smoketest/smoketestfiles/patient_update.json b/src/test/smoketest/smoketestfiles/patient_update.json new file mode 100644 index 00000000000..b182b2fbf52 --- /dev/null +++ b/src/test/smoketest/smoketestfiles/patient_update.json @@ -0,0 +1,20 @@ +{ + "resourceType": "Patient", + "id": "{{batch_patient_id}}", + "text": { + "status": "generated", + "div": "
Some narrative
" + }, + "active": true, + "name": [ + { + "use": "official", + "family": "Iantoryes", + "given": [ + "Mark" + ] + } + ], + "gender": "male", + "birthDate": "1983-06-23" +}