Skip to content

Commit

Permalink
#96 - usage of bigquery restricted to glific module
Browse files Browse the repository at this point in the history
  • Loading branch information
petmongrels committed Oct 27, 2023
1 parent 19a469d commit 2295995
Show file tree
Hide file tree
Showing 4 changed files with 71 additions and 27 deletions.
63 changes: 63 additions & 0 deletions glific/build.gradle
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
plugins {
id 'org.springframework.boot' version '2.5.6'
id 'io.spring.dependency-management' version '1.0.10.RELEASE'
id 'java'
}

group 'org.avni_integration_service'
version '0.0.2-SNAPSHOT'

bootJar {
enabled = false
}

jar {
enabled = true
}

tasks.withType(JavaCompile).all {
options.compilerArgs += ['--enable-preview']
}

tasks.withType(Test).all {
jvmArgs += '--enable-preview'
}

tasks.withType(JavaExec) {
jvmArgs += '--enable-preview'
}

ext {
set('springCloudGcpVersion', "2.0.9")
set('springCloudVersion', "2020.0.3")
}

repositories {
mavenCentral()
}

dependencies {
testImplementation 'org.junit.jupiter:junit-jupiter-api:5.6.0'
testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine'

// https://mvnrepository.com/artifact/org.springframework.cloud/spring-cloud-gcp-starter
implementation 'org.springframework.cloud:spring-cloud-gcp-starter:1.2.6.RELEASE'
// https://mvnrepository.com/artifact/org.springframework.cloud/spring-cloud-gcp-starter-bigquery
implementation 'org.springframework.cloud:spring-cloud-gcp-starter-bigquery:1.2.6.RELEASE'

implementation "log4j:log4j:1.2.17"
}

dependencyManagement {
imports {
mavenBom "com.google.cloud:spring-cloud-gcp-dependencies:${springCloudGcpVersion}"
mavenBom "org.springframework.cloud:spring-cloud-dependencies:${springCloudVersion}"
}
}

test {
useJUnitPlatform()
}

sourceCompatibility = JavaVersion.VERSION_17
targetCompatibility = JavaVersion.VERSION_17
Original file line number Diff line number Diff line change
Expand Up @@ -19,16 +19,17 @@ public BigQueryClient(BigQueryConnector bigQueryConnector) {
this.bigQueryConnector = bigQueryConnector;
}

public TableResult queryWithPagination(String query, String date, int limit) {
public List<Map<String, Object>> queryWithPagination(String query, String date, int limit, List<String> fields) {
QueryJobConfiguration queryConfig =
QueryJobConfiguration.newBuilder(query)
.addNamedParameter("updated_at", QueryParameterValue.string(date))
.addNamedParameter("limit_count", QueryParameterValue.int64(limit))
.build();
return queryCall(queryConfig);
TableResult tableResult = queryCall(queryConfig);
return this.filterData(tableResult, fields);
}

public TableResult queryCall(QueryJobConfiguration queryJobConfiguration) {
private TableResult queryCall(QueryJobConfiguration queryJobConfiguration) {
try {
JobId jobId = JobId.of(UUID.randomUUID().toString());
Job queryJob = bigQueryConnector.getBigQuery().create(JobInfo.newBuilder(queryJobConfiguration).setJobId(jobId).build());
Expand All @@ -50,7 +51,7 @@ public TableResult queryCall(QueryJobConfiguration queryJobConfiguration) {
}
}

public List<Map<String, Object>> filterData(TableResult response, List<String> resultFields) {
private List<Map<String, Object>> filterData(TableResult response, List<String> resultFields) {
Schema schema = response.getSchema();
List<Map<String, Object>> list1 = new LinkedList<>();
for (FieldValueList row : response.iterateAll()) {
Expand All @@ -69,14 +70,14 @@ public List<Map<String, Object>> filterData(TableResult response, List<String> r
return list1;
}

public void getResultData(Map<String, Object> map, String result, List<String> resultFields) {
private void getResultData(Map<String, Object> map, String result, List<String> resultFields) {
JsonObject jsonObject = new JsonParser().parse(result).getAsJsonObject();
resultFields.forEach(field -> {
map.put(field, getDataFromJson(jsonObject, field));
});
}

public String getDataFromJson(JsonObject jsonObject, String field) {
private String getDataFromJson(JsonObject jsonObject, String field) {
return (jsonObject.has(field)) ? jsonObject.getAsJsonObject(field).get("input").getAsString() : null;
}
}
18 changes: 0 additions & 18 deletions lahi/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,6 @@ repositories {
mavenCentral()
}

ext {
set('springCloudGcpVersion', "2.0.9")
set('springCloudVersion', "2020.0.3")
}

dependencies {
implementation project(':integration-data')
implementation project(':glific')
Expand All @@ -50,19 +45,6 @@ dependencies {
testImplementation 'org.springframework.boot:spring-boot-starter-test'
implementation 'org.springframework.boot:spring-boot-starter-data-jpa'
implementation 'com.bugsnag:bugsnag-spring:3.6.3'

// https://mvnrepository.com/artifact/org.springframework.cloud/spring-cloud-gcp-starter
implementation 'org.springframework.cloud:spring-cloud-gcp-starter:1.2.6.RELEASE'
// https://mvnrepository.com/artifact/org.springframework.cloud/spring-cloud-gcp-starter-bigquery
implementation 'org.springframework.cloud:spring-cloud-gcp-starter-bigquery:1.2.6.RELEASE'

}

dependencyManagement {
imports {
mavenBom "com.google.cloud:spring-cloud-gcp-dependencies:${springCloudGcpVersion}"
mavenBom "org.springframework.cloud:spring-cloud-dependencies:${springCloudVersion}"
}
}

test {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
package org.avni_integration_service.lahi.service;

import com.google.cloud.bigquery.TableResult;
import org.apache.log4j.Logger;
import org.avni_integration_service.avni.domain.Subject;
import org.avni_integration_service.glific.bigQuery.BigQueryClient;
Expand Down Expand Up @@ -62,8 +61,7 @@ public StudentService(StudentMappingService studentMappingService,

public void extractDataFromBigdata() {
String fetchtime = getIntegratingEntityStatus().getReadUptoDateTime().toString();
TableResult response = bigQueryClient.queryWithPagination(BULK_FETCH_QUERY, fetchtime, LIMIT);
List<Map<String, Object>> filterData = bigQueryClient.filterData(response, ResultFieldList);
List<Map<String, Object>> filterData = bigQueryClient.queryWithPagination(BULK_FETCH_QUERY, fetchtime, LIMIT, ResultFieldList);
logger.info(String.format("%s Data get after fetching from glific", filterData.size()));
logger.info("Splitting the record and doing next step !!!");
filterData.forEach(this::processing);
Expand Down

0 comments on commit 2295995

Please sign in to comment.