Skip to content

Commit

Permalink
issue #2169 - migrate project code from ValueSet enum to Value enum
Browse files Browse the repository at this point in the history
Signed-off-by: Lee Surprenant <lmsurpre@us.ibm.com>
  • Loading branch information
lmsurpre committed Apr 27, 2021
1 parent a3e7837 commit 95d0533
Show file tree
Hide file tree
Showing 45 changed files with 333 additions and 334 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ public static void main(String[] args) throws Exception {
}

List<ResourceType> base = searchParameter.getBase();
if (base.size() != 1 || base.get(0).getValueAsEnumConstant() == ResourceType.ValueSet.RESOURCE) {
if (base.size() != 1 || base.get(0).getValueAsEnum() == ResourceType.Value.RESOURCE) {
continue; // too complicated to handle this case right now
}

Expand Down Expand Up @@ -92,7 +92,7 @@ private static String getImplicitSystem(SearchParameter searchParameter) {
def.getBinding() != null &&
def.getType().size() == 1 &&
FHIRDefinedType.CODE.getValue().equals(def.getType().get(0).getCode().getValue()) &&
BindingStrength.ValueSet.REQUIRED == def.getBinding().getStrength().getValueAsEnumConstant()) {
BindingStrength.Value.REQUIRED == def.getBinding().getStrength().getValueAsEnum()) {
Canonical valueSetRef = def.getBinding().getValueSet();
ValueSet valueSet = FHIRRegistry.getInstance().getResource(valueSetRef.getValue(), ValueSet.class);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ public static void main(String[] args) throws Exception {
}

List<ResourceType> base = searchParameter.getBase();
if (base.size() != 1 || base.get(0).getValueAsEnumConstant() == ResourceType.ValueSet.RESOURCE) {
if (base.size() != 1 || base.get(0).getValueAsEnum() == ResourceType.Value.RESOURCE) {
continue; // too complicated to handle this case right now
}

Expand Down Expand Up @@ -92,7 +92,7 @@ private static String getImplicitSystem(SearchParameter searchParameter) {
def.getBinding() != null &&
def.getType().size() == 1 &&
FHIRDefinedType.CODE.getValue().equals(def.getType().get(0).getCode().getValue()) &&
BindingStrength.ValueSet.REQUIRED == def.getBinding().getStrength().getValueAsEnumConstant()) {
BindingStrength.Value.REQUIRED == def.getBinding().getStrength().getValueAsEnum()) {
Canonical valueSetRef = def.getBinding().getValueSet();
ValueSet valueSet = FHIRRegistry.getInstance().getResource(valueSetRef.getValue(), ValueSet.class);

Expand Down
6 changes: 3 additions & 3 deletions fhir-bucket/src/main/java/com/ibm/fhir/bucket/app/Main.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* (C) Copyright IBM Corp. 2020
* (C) Copyright IBM Corp. 2020, 2021
*
* SPDX-License-Identifier: Apache-2.0
*/
Expand Down Expand Up @@ -647,8 +647,8 @@ public void bootstrapDb() {
// populate the RESOURCE_TYPES table
try (ITransaction tx = transactionProvider.getTransaction()) {
try {
Set<String> resourceTypes = Arrays.stream(FHIRResourceType.ValueSet.values())
.map(FHIRResourceType.ValueSet::value)
Set<String> resourceTypes = Arrays.stream(FHIRResourceType.Value.values())
.map(FHIRResourceType.Value::value)
.collect(Collectors.toSet());

if (adapter.getTranslator().getType() == DbType.POSTGRESQL) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* (C) Copyright IBM Corp. 2020
* (C) Copyright IBM Corp. 2020, 2021
*
* SPDX-License-Identifier: Apache-2.0
*/
Expand Down Expand Up @@ -72,43 +72,43 @@
@Test(singleThreaded = true)
public class FhirBucketSchemaTest {
private static final Logger logger = Logger.getLogger(FhirBucketSchemaTest.class.getName());

private static final String DB_NAME = "target/derby/bucketDB";

// put everything into the one Derby schema
private static final String ADMIN_SCHEMA_NAME = "APP";
private static final String DATA_SCHEMA_NAME = "APP";

// The database we set up
private DerbyMaster db;

// Connection pool used to work alongside the transaction provider
private PoolConnectionProvider connectionPool;

// Simple transaction service for use outside of JEE
private ITransactionProvider transactionProvider;

// The UUID id we use for the loader id of this test
private final UUID uuid = UUID.randomUUID();

// The id allocated when we register this loader instance
private long loaderInstanceId;


@BeforeClass
public void prepare() throws Exception {
DerbyMaster.dropDatabase(DB_NAME);
db = new DerbyMaster(DB_NAME);

this.connectionPool = new PoolConnectionProvider(new DerbyConnectionProvider(db, null), 10);
this.transactionProvider = new SimpleTransactionProvider(connectionPool);

// Lambdas are quite tasty for this sort of thing
db.runWithAdapter(adapter -> CreateVersionHistory.createTableIfNeeded(ADMIN_SCHEMA_NAME, adapter));

// apply the model we've defined to the new Derby database
VersionHistoryService vhs = createVersionHistoryService();

// Create the schema in a managed transaction
FhirBucketSchema schema = new FhirBucketSchema(DATA_SCHEMA_NAME);
PhysicalDataModel pdm = new PhysicalDataModel();
Expand All @@ -124,7 +124,7 @@ public void prepare() throws Exception {
throw t;
}
}

}

/**
Expand All @@ -133,7 +133,7 @@ public void prepare() throws Exception {
@Test
public void basicBucketSchemaTests() {
assertNotNull(db);

DerbyAdapter adapter = new DerbyAdapter(connectionPool);
try (ITransaction tx = transactionProvider.getTransaction()) {
try {
Expand All @@ -150,7 +150,7 @@ public void basicBucketSchemaTests() {
Long id2 = adapter.runStatement(c2);
assertNotNull(id2);
assertEquals(id2, bucketId);

AddBucketPath c3 = new AddBucketPath("bucket1", "/path/to/dir1/");
Long id3 = adapter.runStatement(c3);
assertNotNull(id3);
Expand All @@ -160,7 +160,7 @@ public void basicBucketSchemaTests() {
AddResourceBundle c4 = new AddResourceBundle(bucketId, "patient1.json", 1024, FileType.JSON, "1234abcd", new Date());
ResourceBundleData id4 = adapter.runStatement(c4);
assertNotNull(id4);

// Try adding the same record again (should be ignored because we didn't change it)
ResourceBundleData id5 = adapter.runStatement(c4);
assertNotNull(id5);
Expand All @@ -173,21 +173,21 @@ public void basicBucketSchemaTests() {
assertNotEquals(id6.getResourceBundleId(), id5.getResourceBundleId());

// Populate the resource types table
Set<String> resourceTypes = Arrays.stream(FHIRResourceType.ValueSet.values())
.map(FHIRResourceType.ValueSet::value)
Set<String> resourceTypes = Arrays.stream(FHIRResourceType.Value.values())
.map(FHIRResourceType.Value::value)
.collect(Collectors.toSet());
MergeResourceTypes c6 = new MergeResourceTypes(resourceTypes);
adapter.runStatement(c6);


} catch (Throwable t) {
// mark the transaction for rollback
tx.setRollbackOnly();
throw t;
}
}
}

@Test(dependsOnMethods = { "basicBucketSchemaTests" })
public void readResourceTypesTest() {
DerbyAdapter adapter = new DerbyAdapter(connectionPool);
Expand All @@ -197,24 +197,24 @@ public void readResourceTypesTest() {
List<ResourceTypeRec> resourceTypes = adapter.runStatement(c1);

// Check against our reference set of resources
Set<String> reference = Arrays.stream(FHIRResourceType.ValueSet.values())
.map(FHIRResourceType.ValueSet::value)
Set<String> reference = Arrays.stream(FHIRResourceType.Value.values())
.map(FHIRResourceType.Value::value)
.collect(Collectors.toSet());

assertTrue(reference.size() > 0);
assertEquals(resourceTypes.size(), reference.size());
assertEquals(resourceTypes.size(), reference.size());
for (ResourceTypeRec rec: resourceTypes) {
assertTrue(reference.contains(rec.getResourceType()));
}

} catch (Throwable t) {
// mark the transaction for rollback
tx.setRollbackOnly();
throw t;
}
}
}

@Test(dependsOnMethods = { "readResourceTypesTest" })
public void resourceBundlesTest() {
DerbyAdapter adapter = new DerbyAdapter(connectionPool);
Expand All @@ -235,7 +235,7 @@ public void resourceBundlesTest() {
}
}
}

@Test(dependsOnMethods = { "resourceBundlesTest" })
public void allocateJobsTest() {
DerbyAdapter adapter = new DerbyAdapter(connectionPool);
Expand All @@ -245,7 +245,7 @@ public void allocateJobsTest() {
List<BucketLoaderJob> jobList = new ArrayList<>();
AllocateJobs c2 = new AllocateJobs(DATA_SCHEMA_NAME, jobList, FileType.JSON, loaderInstanceId, 2, bucketPaths);
adapter.runStatement(c2);

// check we got the jobs we expected
assertEquals(jobList.size(), 2);
assertEquals(jobList.get(0).getObjectKey(), "/path/to/dir1/patient1.json");
Expand All @@ -269,7 +269,7 @@ public void allocateJobsTest() {
AllocateJobs c5 = new AllocateJobs(DATA_SCHEMA_NAME, jobList, FileType.JSON, loaderInstanceId, 3, bucketPaths);
adapter.runStatement(c5);
assertEquals(jobList.size(), 3);

MarkBundleDone c6 = new MarkBundleDone(jobList.get(0).getResourceBundleLoadId(), 0, 1);
adapter.runStatement(c6);

Expand All @@ -281,7 +281,7 @@ public void allocateJobsTest() {
jobList.clear();
adapter.runStatement(c5);
assertEquals(jobList.size(), 3);

// With a job, we have a resource_bundle_loads record, so we can create some resources
Map<String, Integer> resourceTypeMap = new HashMap<>();
List<ResourceTypeRec> resourceTypes = adapter.runStatement(new ResourceTypesReader());
Expand All @@ -305,22 +305,22 @@ public void allocateJobsTest() {
Integer lastLine = adapter.runStatement(c9);
assertNotNull(lastLine);
assertEquals(lastLine.intValue(), lineNumber);


// Add some resource bundle errors
// int lineNumber, String errorText, Integer responseTimeMs, Integer httpStatusCode, String httpStatusText
List<ResourceBundleError> errors = new ArrayList<>();
errors.add(new ResourceBundleError(0, "error1", null, null, null));
errors.add(new ResourceBundleError(1, "error2", 60000, 400, "timeout"));
AddResourceBundleErrors c10 = new AddResourceBundleErrors(job.getResourceBundleLoadId(), errors, 10);
adapter.runStatement(c10);

// Fetch some ResourceRefs for a line we know we have loaded
GetResourceRefsForBundleLine c11 = new GetResourceRefsForBundleLine(job.getResourceBundleId(), job.getVersion(), lineNumber);
List<ResourceRef> refs = adapter.runStatement(c11);
assertNotNull(refs);
assertEquals(refs.size(), 1);

// And an empty list
GetResourceRefsForBundleLine c12 = new GetResourceRefsForBundleLine(job.getResourceBundleId(), job.getVersion(), lineNumber+1);
refs = adapter.runStatement(c12);
Expand All @@ -347,18 +347,18 @@ public void allocateJobsTest() {
}
}
}

@AfterClass
public void tearDown() throws Exception {
if (db != null) {
db.close();
}
}

/**
* Create the version history table and a simple service which is used to
* access information from it.
*
*
* @throws SQLException
*/
protected VersionHistoryService createVersionHistoryService() throws SQLException {
Expand All @@ -377,7 +377,7 @@ protected VersionHistoryService createVersionHistoryService() throws SQLExceptio
throw x;
}
}

// Current version history for the data schema.
VersionHistoryService vhs = new VersionHistoryService(ADMIN_SCHEMA_NAME, DATA_SCHEMA_NAME);
vhs.setTransactionProvider(transactionProvider);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -533,13 +533,13 @@ protected void handleCode(Code.Builder code) throws IllegalAccessException, Ille
// cpb-15: If kind = capability, implementation must be absent, software must be present
if (code instanceof CapabilityStatementKind.Builder) {
// use 'instance' to avoid the other special cases
enumConstant = CapabilityStatementKind.ValueSet.INSTANCE;
enumConstant = CapabilityStatementKind.Value.INSTANCE;
}
// trd-3: A named event requires a name, a periodic event requires timing, and a data event requires data
if (code instanceof TriggerType.Builder) {
if (enumConstant == TriggerType.ValueSet.PERIODIC) {
if (enumConstant == TriggerType.Value.PERIODIC) {
// trd-1 has prevented us from including a timing element, but we're good with any other type
enumConstant = TriggerType.ValueSet.DATA_MODIFIED;
enumConstant = TriggerType.Value.DATA_MODIFIED;
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
/*
* (C) Copyright IBM Corp. 2019, 2021
*
* SPDX-License-Identifier: Apache-2.0
*/

package com.ibm.fhir.examples;

import static com.ibm.fhir.model.type.String.string;
Expand Down Expand Up @@ -27,21 +33,21 @@

public class LargeValueSetCreator {
private static final Uri TEST_URI = Uri.of("http://ibm.com/fhir/valueset/test");

private final PodamFactory podam;

public LargeValueSetCreator() throws IOException {
super();
podam = new PodamFactoryImpl();
}

public ValueSet createValueSet() throws Exception {
final ValueSet.Builder vsBuilder = ValueSet.builder().status(PublicationStatus.DRAFT);
final ValueSet.Expansion.Builder expansionBuilder = ValueSet.Expansion.builder().timestamp(DateTime.now());
final ValueSet.Expansion.Contains.Builder template = ValueSet.Expansion.Contains.builder()
.system(TEST_URI)
.version(string("1"));

for (int i=0; i < 100_000; i++) {
expansionBuilder.contains(template
.code(Code.of(podam.manufacturePojo(String.class)))
Expand Down Expand Up @@ -69,24 +75,24 @@ public Set<String> convertToHashSet(ValueSet vs) throws Exception {
public static void main(String[] args) throws Exception {
LargeValueSetCreator creator = new LargeValueSetCreator();
ValueSet vs = creator.createValueSet();

Path json = Paths.get("ValueSet-large.json");
try (BufferedWriter writer = Files.newBufferedWriter(json, StandardCharsets.UTF_8)) {
FHIRGenerator.generator(Format.JSON).generate(vs, writer);
}

Path xml = Paths.get("ValueSet-large.xml");
try (BufferedWriter writer = Files.newBufferedWriter(xml, StandardCharsets.UTF_8)) {
FHIRGenerator.generator(Format.XML).generate(vs, writer);
}

Path txt = Paths.get("ValueSet-large.txt");
try (BufferedWriter writer = Files.newBufferedWriter(txt, StandardCharsets.UTF_8)) {
for (Contains concept : vs.getExpansion().getContains()) {
writer.write(concept.getSystem().getValue() + "|" + concept.getCode().getValue() + "\n");
}
}

Set<String> set = creator.convertToHashSet(vs);
Path serializedHashSet = Paths.get("ValueSet-large-HashSet.ser");
try (FileOutputStream os = new FileOutputStream(serializedHashSet.toFile())) {
Expand Down
Loading

0 comments on commit 95d0533

Please sign in to comment.