diff --git a/.gitignore b/.gitignore index 498334d5..73adb14b 100644 --- a/.gitignore +++ b/.gitignore @@ -6,3 +6,4 @@ build/ .idea/ *.log *.iml +.run/ diff --git a/.run/Commander Acceptance Tests.run.xml b/.run/Commander Acceptance Tests.run.xml deleted file mode 100644 index a927ebeb..00000000 --- a/.run/Commander Acceptance Tests.run.xml +++ /dev/null @@ -1,20 +0,0 @@ - - - - \ No newline at end of file diff --git a/.run/Feature_ DataDictionary 1.7 - Local Metadata.run.xml b/.run/Feature_ DataDictionary 1.7 - Local Metadata.run.xml deleted file mode 100644 index f4e0e2c3..00000000 --- a/.run/Feature_ DataDictionary 1.7 - Local Metadata.run.xml +++ /dev/null @@ -1,21 +0,0 @@ - - - - \ No newline at end of file diff --git a/doc/Docker.md b/doc/Docker.md index 70c10bf5..e0f34698 100644 --- a/doc/Docker.md +++ b/doc/Docker.md @@ -1,11 +1,39 @@ # RESO Commander and Docker -Both the command-line and automated testing tools can be run in a Docker container. +RESO automated testing tools and Commander utilities can both be run in a Docker containers. +The containers are slightly different in each case. -A [Dockerfile](./Dockerfile) has been provided to dockerize the application. -This can be used for CI/CD environments such as Jenkins or TravisCI. The following command will build an image for you: +### RESO Automated Testing Tools +A [GradleDockerfile](../GradleDockerfile) has been provided in order to prepare a Gradle +environment for the Commander. The container builds itself from the main branch of the source code, so you don't need +the entire repo checked out locally, just the file. +This can also be used in CI/CD environments such as Jenkins or TravisCI. + +Run the RESO Certification tests in a Docker container locally by issuing one of the following commands. +Docker must be running on your local machine. + +One way to do this is to build the container first and then run it: + +```docker build --file GradleDockerfile -t web-api-commander-gradle .``` + +Once the container is built, you can use the Gradle commands normally with: +```docker run -it web-api-commander-gradle testWebApiCore_2_0_0 -DpathToRESOScript=/home/gradle/project/resoscripts/your.resoscript -DshowResponses=true``` + +You can also build the container on the fly: + +```docker run --rm -it -v "$PWD":/home/gradle/project -v /path/to/your/resoscripts:/home/gradle/project/resoscripts -w /home/gradle/project -it $(docker build -f GradleDockerfile -q .) testWebApiCore_2_0_0 -DpathToRESOScript=/home/gradle/project/resoscripts/your.resoscript -DshowResponses=true``` + +Note that this will create a directory in your home directory for the project, and build artifacts and the log will be placed in that directory, +which is also where you will end up after runtime. + +You may need to adjust the path separators if using Windows. + + +### Commander Utilities +A [Dockerfile](../Dockerfile) has also been provided to Dockerize the application for Commander utilities. + +To run the Commander utilities, use the following commands: -### Commander Features Other Than Automated Web API Testing ``` $ docker build -t web-api-commander . ``` @@ -21,28 +49,3 @@ If you have input files you may need to mount your filesystem into the docker co ``` $ docker run -it -v $PWD:/app web-api-commander --validateMetadata --inputFile ``` - -### Automated Web API Testing - -You may also run the tests in a Docker container locally by issuing one of the following commands. -Docker must be running on your local machine. - -#### MacOS or Linux All-In-One Commands -``` -cd ~; \ -rm -rf commander-tmp/; \ -mkdir commander-tmp; \ -cd commander-tmp; \ -git clone https://github.com/RESOStandards/web-api-commander.git; \ -cd web-api-commander; \ -docker run --rm -u gradle -v "$PWD":/home/gradle/project -v /path/to/your/resoscripts:/home/gradle/project/resoscripts -w /home/gradle/project gradle gradle testWebAPIServer_2_0_0_Core -DpathToRESOScript=/home/gradle/project/resoscripts/your.web-api-server.core.2.0.0.resoscript -DshowResponses=true -``` - -Note that this will create a directory in your home directory for the project, and build artifacts and the log will be placed in that directory, -which is also where you will end up after runtime. - - -#### Windows All-In-One WIP -``` -cd C:\;mkdir commander-tmp;cd commander-tmp;git clone https://github.com/RESOStandards/web-api-commander.git;cd web-api-commander; docker run --rm -u gradle -v C:\current\path\web-api-commander:/home/gradle/project -v C:\path\to\your\resoscripts:/home/gradle/project/resoscripts -w /home/gradle/project gradle gradle testWebAPIServer_2_0_0_Core -DpathToRESOScript=/home/gradle/project/resoscripts/your.web-api-server.core.2.0.0.resoscript -DshowResponses=true -``` diff --git a/sample-web-api-server.core.1.0.2.resoscript b/sample-web-api-server.core.2.0.0.resoscript similarity index 97% rename from sample-web-api-server.core.1.0.2.resoscript rename to sample-web-api-server.core.2.0.0.resoscript index 8782df4b..cf266846 100644 --- a/sample-web-api-server.core.1.0.2.resoscript +++ b/sample-web-api-server.core.2.0.0.resoscript @@ -1,7 +1,7 @@ ") - .append("") - .append("") + .append(standardField.getLookupName()).append("\" -->") + .append("") + .append("") .append(""); } return content.toString(); @@ -269,11 +269,11 @@ private String buildSingleEnumTypeMarkup(ReferenceStandardField standardField) { private String buildMultipleEnumTypeMarkup(ReferenceStandardField standardField) { StringBuilder content = new StringBuilder(); - if (getEnumerations().get(standardField.getLookupStandardName()) != null) { - content.append(""); + if (getEnumerations().get(standardField.getLookupName()) != null) { + content.append(""); //iterate through each of the lookup values and generate their edm:EnumType content - getEnumerations().get(standardField.getLookupStandardName()).forEach(lookup -> { + getEnumerations().get(standardField.getLookupName()).forEach(lookup -> { content .append("") .append(EDMXTemplates.buildDisplayNameAnnotation(lookup.getLookupDisplayName())) @@ -285,8 +285,8 @@ private String buildMultipleEnumTypeMarkup(ReferenceStandardField standardField) content.append(""); } else { content - .append("") - .append("") + .append("") + .append("") .append(EDMXTemplates.buildDDWikiUrlAnnotation(standardField.getWikiPageUrl())) .append(EDMXTemplates.buildDescriptionAnnotation(standardField.getDefinition())) .append("") @@ -394,7 +394,7 @@ public static String buildEnumTypeMultiMember(ReferenceStandardField field) { if (!field.getLookup().toLowerCase().contains("lookups")) return EMPTY_STRING; return "" + "" + + "\" Type=\"Collection(" + RESO_NAMESPACE + ".enums." + field.getLookupName() + ")\">" + buildDisplayNameAnnotation(field.getDisplayName()) + buildDDWikiUrlAnnotation(field.getWikiPageUrl()) + buildDescriptionAnnotation(field.getDefinition()) diff --git a/src/main/java/org/reso/certification/containers/WebAPITestContainer.java b/src/main/java/org/reso/certification/containers/WebAPITestContainer.java index e7a24d0a..c05a54ca 100644 --- a/src/main/java/org/reso/certification/containers/WebAPITestContainer.java +++ b/src/main/java/org/reso/certification/containers/WebAPITestContainer.java @@ -7,6 +7,7 @@ import com.networknt.schema.JsonSchemaFactory; import com.networknt.schema.SpecVersion; import com.networknt.schema.ValidationMessage; +import io.cucumber.java.bs.A; import org.apache.http.HttpStatus; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -22,6 +23,7 @@ import org.apache.olingo.commons.api.edm.Edm; import org.apache.olingo.commons.api.edm.provider.CsdlProperty; import org.apache.olingo.commons.api.format.ContentType; +import org.reso.certification.codegen.DDCacheProcessor; import org.reso.commander.Commander; import org.reso.commander.common.DataDictionaryMetadata; import org.reso.commander.common.TestUtils; @@ -82,6 +84,7 @@ public final class WebAPITestContainer implements TestContainer { private final AtomicBoolean isDataSystemValid = new AtomicBoolean(false); private final AtomicReference> schemaValidationErrors = new AtomicReference<>(); private final AtomicBoolean isUsingMetadataFile = new AtomicBoolean(false); + private final AtomicBoolean useEdmEnabledClient = new AtomicBoolean(true); // request instance variables - these get resetMarkupBuffer with every request //TODO: refactor underlying response properties to use a ODataTransportWrapper (or any TransportWrapper) @@ -101,6 +104,8 @@ public final class WebAPITestContainer implements TestContainer { private final AtomicReference> clientEntitySetRequest = new AtomicReference<>(); private final AtomicReference> clientEntitySetResponse = new AtomicReference<>(); private final AtomicReference clientEntitySet = new AtomicReference<>(); + private final AtomicReference ddCacheProcessor = new AtomicReference<>(); + private static final String WEB_API_CORE_REFERENCE_REQUESTS = "reference-web-api-core-requests.xml"; //singleton variables @@ -111,9 +116,11 @@ public final class WebAPITestContainer implements TestContainer { */ public void initialize() { if (getIsInitialized()) return; - Commander.Builder builder = new Commander.Builder().useEdmEnabledClient(true); - if (!isUsingMetadataFile.get()) { + LOG.info("Using Edm Enabled Client: " + useEdmEnabledClient.get()); + Commander.Builder builder = new Commander.Builder().useEdmEnabledClient(useEdmEnabledClient.get()); + + if (getSettings() != null) { //overwrite any requests loaded with the reference queries //TODO: make the reference requests something that can be passed in during initialization getSettings().setRequests(loadFromRESOScript(new File(Objects.requireNonNull( @@ -208,19 +215,31 @@ public Map getFieldMap(String entityTypeName) { */ private void buildFieldMap() { try { - if (fieldMap.get() == null) fieldMap.set(new LinkedHashMap<>()); + if (fieldMap.get() == null) { + fieldMap.set(new LinkedHashMap<>()); + } LOG.debug("Building Field Map..."); - assertNotNull(getDefaultErrorMessage("no XML Metadata found in the container!"), fetchXMLMetadata()); - assertNotNull(getDefaultErrorMessage("no Entity Data Model (edm) found in the container!"), getEdm()); + //if settings exist + if (getXMLMetadata() == null) { + if (getSettings() != null) { + LOG.info("No XML Metadata found in the container but settings exist. Trying to fetch it from the server..."); + assertNotNull(getDefaultErrorMessage("No XML Metadata was fetched from the server!"), fetchXMLMetadata()); + assertNotNull(getDefaultErrorMessage("No Entity Data Model (edm) found in the container!"), getEdm()); + LOG.info("Metadata fetched!"); + } else { + LOG.debug("Metadata does not exist in the container!"); + return; + } + } //build a map of all of the discovered fields on the server for the given resource by field name //TODO: add multiple Data Dictionary version support DataDictionaryMetadata.v1_7.WELL_KNOWN_RESOURCES.forEach(resourceName -> { List csdlProperties = null; try { - csdlProperties = TestUtils.findEntityTypesForEntityTypeName(getEdm(), fetchXMLMetadata(), resourceName); + csdlProperties = TestUtils.findEntityTypesForEntityTypeName(getEdm(), getXMLMetadata(), resourceName); } catch (Exception e) { LOG.error(e); } @@ -341,7 +360,7 @@ public String getExpandField() { */ public Edm getEdm() { if (edm.get() == null) { - assertNotNull(getDefaultErrorMessage("no XML response data found, cannot return Edm!"), xmlResponseData.get()); + assertNotNull(getDefaultErrorMessage("No XML response data found, cannot return Edm!"), xmlResponseData.get()); edm.set(Commander.deserializeEdm(xmlResponseData.get(), getCommander().getClient())); } return edm.get(); @@ -363,7 +382,7 @@ public void setEdm(Edm edm) { * @implNote the data in this item are cached in the test container once fetched */ public XMLMetadata fetchXMLMetadata() throws Exception { - if (xmlMetadata.get() == null) { + if (getSettings() != null && xmlMetadata.get() == null) { try { Request request = getSettings().getRequest(Request.WELL_KNOWN.METADATA_ENDPOINT); setRequest(request); @@ -414,6 +433,7 @@ public void setXMLMetadata(XMLMetadata xmlMetadata) { * @return the local Commander instance */ public Commander getCommander() { + if (commander.get() == null) initialize(); return commander.get(); } @@ -874,6 +894,13 @@ public void setIsInitialized(boolean value) { isInitialized.set(value); } + public DDCacheProcessor getDDCacheProcessor() { + if (ddCacheProcessor.get() == null) { + ddCacheProcessor.set(TestUtils.buildDataDictionaryCache()); + } + return ddCacheProcessor.get(); + } + public static final class ODATA_QUERY_PARAMS { private static final String format = DOLLAR_SIGN + "%s"; diff --git a/src/main/java/org/reso/certification/features/data-dictionary/v1-7-0/additional-tests/lookup-resource-tests.feature b/src/main/java/org/reso/certification/features/data-dictionary/v1-7-0/additional-tests/lookup-resource-tests.feature new file mode 100644 index 00000000..cac48656 --- /dev/null +++ b/src/main/java/org/reso/certification/features/data-dictionary/v1-7-0/additional-tests/lookup-resource-tests.feature @@ -0,0 +1,43 @@ +# This feature implements the change proposal outlined in +# section 2.2 of the RESO Data Dictionary 1.7 specification. +# +# The tests for the Lookup resource model is in ../resources/lookup.feature +# +# See: https://github.com/RESOStandards/reso-transport-specifications/blob/cd8bbd2038955e5380598d509fa2245bc98cbfdd/DATA-DICTIONARY.md#lookup-resource +Feature: Lookup Acceptance Tests (RCP-032) + + Background: + When a RESOScript file is provided + Then Client Settings and Parameters can be read from the RESOScript + And a test container was successfully created from the given RESOScript file + And the test container uses an Authorization Code or Client Credentials for authentication + And valid metadata were retrieved from the server + When the "Lookup" Resource exists in the metadata + Then valid data is replicated from the "Lookup" Resource + + @dd-1.7 @rcp-032 @lookup-resource + Scenario: Ensure That Required Lookup Resource Fields Are Present in Server Metadata + Given that metadata have been retrieved from the server and validated + When the "Lookup" Resource exists in the metadata + Then "Lookup" Resource data and metadata MUST contain the following fields + | LookupKey | + | LookupName | + | LookupValue | + | ModificationTimestamp | + + + # + # + # + # + # + # + # + # + # + @dd-1.7 @rcp-032 @lookup-resource + Scenario: Check Required Annotations and LookupName Data + Given that metadata have been retrieved from the server and validated + When the "Lookup" Resource exists in the metadata + Then RESO Lookups using String or String Collection data types MUST have the annotation "RESO.OData.Metadata.LookupName" + And fields with the annotation term "RESO.OData.Metadata.LookupName" MUST have a LookupName in the Lookup Resource diff --git a/src/main/java/org/reso/certification/features/payloads/data-availability.feature b/src/main/java/org/reso/certification/features/payloads/data-availability.feature index ff5a6954..a0b8f674 100644 --- a/src/main/java/org/reso/certification/features/payloads/data-availability.feature +++ b/src/main/java/org/reso/certification/features/payloads/data-availability.feature @@ -22,14 +22,14 @@ Feature: Payloads Sampling (Web API) @standard-resource-sampling @dd-1.7 @payloads-sampling Scenario: Standard Resource Sampling - Given that valid metadata have been requested from the server + Given that metadata have been retrieved from the server and validated And the metadata contains RESO Standard Resources And "payload-samples" has been created in the build directory Then up to 100000 records are sampled from each resource with payload samples stored in "payload-samples" @local-resource-sampling @dd-1.7 @payloads-sampling Scenario: Non Standard Resource Sampling - Request Data from Each Server Resource - Given that valid metadata have been requested from the server + Given that metadata have been retrieved from the server and validated And the metadata contains local resources Then up to 100000 records are sampled from each local resource diff --git a/src/main/java/org/reso/certification/stepdefs/DataAvailability.java b/src/main/java/org/reso/certification/stepdefs/DataAvailability.java index f780be41..dc3350ed 100644 --- a/src/main/java/org/reso/certification/stepdefs/DataAvailability.java +++ b/src/main/java/org/reso/certification/stepdefs/DataAvailability.java @@ -22,7 +22,6 @@ import org.apache.olingo.commons.api.edm.EdmPrimitiveTypeKind; import org.apache.olingo.commons.api.format.ContentType; import org.reso.certification.codegen.DDCacheProcessor; -import org.reso.certification.codegen.DataDictionaryCodeGenerator; import org.reso.certification.containers.WebAPITestContainer; import org.reso.commander.common.DataDictionaryMetadata; import org.reso.commander.common.Utils; @@ -63,34 +62,44 @@ public class DataAvailability { private static final String BUILD_DIRECTORY_PATH = "build"; private static final String CERTIFICATION_PATH = BUILD_DIRECTORY_PATH + File.separator + "certification"; - private static final String DATA_AVAILABILITY_REPORT_PATH = BUILD_DIRECTORY_PATH + File.separator + "certification" + File.separator + "results"; + public static final String CERTIFICATION_RESULTS_PATH = BUILD_DIRECTORY_PATH + File.separator + "certification" + File.separator + "results"; private static final String SAMPLES_DIRECTORY_TEMPLATE = BUILD_DIRECTORY_PATH + File.separator + "%s"; - private static final String PATH_TO_RESOSCRIPT_KEY = "pathToRESOScript"; - private static final String USE_STRICT_MODE_ARG = "strict"; - private static final String A_B_TESTING_MODE_ARG = "abTesting"; - //strict mode is enabled by default - private final boolean strictMode = - System.getProperty(USE_STRICT_MODE_ARG) == null || Boolean.parseBoolean(System.getProperty(USE_STRICT_MODE_ARG)); + private static final String PATH_TO_RESOSCRIPT_ARG = "pathToRESOScript"; - //abTesting mode is disabled by default - private final boolean abTestingMode = - System.getProperty(A_B_TESTING_MODE_ARG) != null && Boolean.parseBoolean(System.getProperty(A_B_TESTING_MODE_ARG)); - - //TODO: read from params - final String ORIGINATING_SYSTEM_FIELD = "OriginatingSystemName"; - final String ORIGINATING_SYSTEM_FIELD_VALUE = EMPTY_STRING; + // strict mode is enabled by default + private static final String USE_STRICT_MODE_ARG = "strict"; + private final boolean STRICT_MODE_ENABLED = Boolean.parseBoolean(System.getProperty(USE_STRICT_MODE_ARG, "true")); - final boolean USE_ORIGINATING_SYSTEM_QUERY = ORIGINATING_SYSTEM_FIELD.length() > 0 && ORIGINATING_SYSTEM_FIELD_VALUE.length() > 0; - final String ORIGINATING_SYSTEM_QUERY = ORIGINATING_SYSTEM_FIELD + " eq '" + ORIGINATING_SYSTEM_FIELD_VALUE + "'"; - final String REQUEST_URI_TEMPLATE = "?$filter=" - + (USE_ORIGINATING_SYSTEM_QUERY ? ORIGINATING_SYSTEM_QUERY + " and " : EMPTY_STRING) + // abTesting mode is disabled by default + private static final String A_B_TESTING_MODE_ARG = "abTesting"; + private static final boolean AB_TESTING_MODE_ENABLED = Boolean.parseBoolean(System.getProperty(A_B_TESTING_MODE_ARG, "false")); + + // OriginatingSystemName query + private static final String ORIGINATING_SYSTEM_NAME_FIELD_ARG = "OriginatingSystemName"; + private static final String ORIGINATING_SYSTEM_NAME_FIELD_VALUE = System.getProperty(ORIGINATING_SYSTEM_NAME_FIELD_ARG, EMPTY_STRING); + private static final String ORIGINATING_SYSTEM_NAME_QUERY = ORIGINATING_SYSTEM_NAME_FIELD_ARG + " eq '" + ORIGINATING_SYSTEM_NAME_FIELD_VALUE + "'"; + private static final boolean USE_ORIGINATING_SYSTEM_NAME_QUERY = ORIGINATING_SYSTEM_NAME_FIELD_VALUE.length() > 0; + + // OriginatingSystemID query + private static final String ORIGINATING_SYSTEM_ID_FIELD_ARG = "OriginatingSystemID"; + private static final String ORIGINATING_SYSTEM_ID_FIELD_VALUE = System.getProperty(ORIGINATING_SYSTEM_ID_FIELD_ARG, EMPTY_STRING);; + private static final String ORIGINATING_SYSTEM_ID_QUERY = ORIGINATING_SYSTEM_NAME_FIELD_ARG + " eq '" + ORIGINATING_SYSTEM_NAME_FIELD_VALUE + "'"; + private static final boolean USE_ORIGINATING_ID_NAME_QUERY = ORIGINATING_SYSTEM_ID_FIELD_VALUE.length() > 0; + + // Query Templates - prefer OriginatingSystemID if both are passed + private static final String SAMPLING_REQUEST_URI_TEMPLATE = "?$filter=" + + (USE_ORIGINATING_ID_NAME_QUERY ? ORIGINATING_SYSTEM_ID_QUERY + " and " + : (USE_ORIGINATING_SYSTEM_NAME_QUERY ? ORIGINATING_SYSTEM_NAME_QUERY + " and " : EMPTY_STRING)) + "%s" + " lt %s&$orderby=%s desc&$top=" + TOP_COUNT; - final String COUNT_REQUEST_URI_TEMPLATE = "?" + (USE_ORIGINATING_SYSTEM_QUERY ? "$filter=" + ORIGINATING_SYSTEM_QUERY + "&": EMPTY_STRING) + "$count=true"; + private static final String COUNT_REQUEST_URI_TEMPLATE = "?" + + (USE_ORIGINATING_ID_NAME_QUERY ? "$filter=" + ORIGINATING_SYSTEM_ID_QUERY + "&" + : (USE_ORIGINATING_SYSTEM_NAME_QUERY ? "$filter=" + ORIGINATING_SYSTEM_NAME_QUERY + "&" : EMPTY_STRING)) + + "$count=true"; - //TODO: get this from the parameters - private final static boolean DEBUG = false; + private static final String DEBUG_ARG = "debug"; + private static final boolean DEBUG_MODE_ENABLED = Boolean.parseBoolean(System.getProperty(DEBUG_ARG, "false")); private static Scenario scenario; @@ -105,36 +114,30 @@ public class DataAvailability { private final static AtomicReference>> resourcePayloadSampleMap = new AtomicReference<>(Collections.synchronizedMap(new LinkedHashMap<>())); - private final static AtomicReference>> resourceFieldMap = - new AtomicReference<>(Collections.synchronizedMap(new LinkedHashMap<>())); - private final static AtomicReference> resourceCounts = new AtomicReference<>(Collections.synchronizedMap(new LinkedHashMap<>())); - //resourceName, fieldName, lookupName, lookupValue, tally private final static AtomicReference> resourceFieldLookupTallies = new AtomicReference<>(Collections.synchronizedMap(new LinkedHashMap<>())); - private final static AtomicReference processor = new AtomicReference<>(); - @Inject public DataAvailability(WebAPITestContainer c) { if (container.get() == null) { container.set(c); - LOG.info("Using strict mode: " + strictMode); + LOG.info("Using strict mode: " + STRICT_MODE_ENABLED); } } @Before public void beforeStep(Scenario scenario) { - final String pathToRESOScript = System.getProperty(PATH_TO_RESOSCRIPT_KEY, null); + final String pathToRESOScript = System.getProperty(PATH_TO_RESOSCRIPT_ARG, null); if (pathToRESOScript == null) return; DataAvailability.scenario = scenario; if (!container.get().getIsInitialized()) { - container.get().setSettings(Settings.loadFromRESOScript(new File(System.getProperty(PATH_TO_RESOSCRIPT_KEY)))); + container.get().setSettings(Settings.loadFromRESOScript(new File(System.getProperty(PATH_TO_RESOSCRIPT_ARG)))); container.get().initialize(); } } @@ -153,7 +156,7 @@ public void createDataAvailabilityReport(Map> resour GsonBuilder gsonBuilder = new GsonBuilder().setPrettyPrinting(); gsonBuilder.registerTypeAdapter(PayloadSampleReport.class, payloadSampleReport); - Utils.createFile(DATA_AVAILABILITY_REPORT_PATH, reportName, gsonBuilder.create().toJson(payloadSampleReport)); + Utils.createFile(CERTIFICATION_RESULTS_PATH, reportName, gsonBuilder.create().toJson(payloadSampleReport)); } @@ -182,7 +185,7 @@ private String buildODataTimestampRequestUriString(String resourceName, String t .newURIBuilder(container.get().getServiceRoot()) .appendEntitySetSegment(resourceName).build().toString(); - requestUri += String.format(REQUEST_URI_TEMPLATE, timestampField, + requestUri += String.format(SAMPLING_REQUEST_URI_TEMPLATE, timestampField, lastFetchedDate.format(DateTimeFormatter.ISO_INSTANT), timestampField); return requestUri; @@ -254,8 +257,8 @@ List fetchAndProcessRecords(String resourceName, int targetRecord final AtomicReference timestampField = new AtomicReference<>(); final AtomicBoolean hasRecords = new AtomicBoolean(true); final AtomicReference payloadSample = new AtomicReference<>(); - final AtomicReference> payloadSamples = - new AtomicReference<>(Collections.synchronizedList(new LinkedList<>())); + final AtomicReference> payloadSamples = + new AtomicReference<>(Collections.synchronizedSet(new LinkedHashSet<>())); boolean hasStandardTimestampField = false; String requestUri; @@ -279,7 +282,12 @@ List fetchAndProcessRecords(String resourceName, int targetRecord if (entityType.get().getProperty(propertyName).getType().getFullQualifiedName().getFullQualifiedNameAsString() .contentEquals(EdmPrimitiveTypeKind.DateTimeOffset.getFullQualifiedName().getFullQualifiedNameAsString())) { scenario.log("Found Edm.DateTimeOffset field " + propertyName + " in the " + resourceName + " resource!\n"); - timestampCandidateFields.add(propertyName); + + if (propertyName.toLowerCase(Locale.ROOT).contains("modificationtimestamp")) { + timestampCandidateFields.add(0, propertyName); + } else { + timestampCandidateFields.add (propertyName); + } } } catch (Exception ex) { LOG.error(ex); @@ -295,14 +303,18 @@ List fetchAndProcessRecords(String resourceName, int targetRecord scenario.log("Keys found: " + keyFields.stream().map(EdmKeyPropertyRef::getName).collect(Collectors.joining(", "))); //loop and fetch records as long as items are available and we haven't reached our target count yet + //TODO: switch to OData Fetch API while (hasRecords.get() && recordsProcessed < targetRecordCount) { + if (hasStandardTimestampField) { timestampField.set(MODIFICATION_TIMESTAMP_FIELD); } else if (timestampCandidateFields.size() > 0 && lastTimestampCandidateIndex < timestampCandidateFields.size()) { timestampField.set(timestampCandidateFields.get(lastTimestampCandidateIndex++)); } else { - scenario.log(getDefaultErrorMessage("Could not find a suitable timestamp field in the " - + resourceName + " resource to sample with...")); + if (recordsProcessed == 0) { + scenario.log(getDefaultErrorMessage("Could not find a suitable timestamp field in the " + + resourceName + " resource to sample with...")); + } //skip this resource since no suitable fields were found break; @@ -319,7 +331,7 @@ List fetchAndProcessRecords(String resourceName, int targetRecord // retries. sometimes requests can time out and fail and we don't want to stop sampling // immediately, but retry a couple of times before we bail - if (recordsProcessed == 0 && transportWrapper.get().getResponseData() == null) { + if (recordsProcessed == 0 || transportWrapper.get().getResponseData() == null) { //only count retries if we're constantly making requests and not getting anything numTimestampRetries += 1; } else { @@ -383,7 +395,7 @@ List fetchAndProcessRecords(String resourceName, int targetRecord || property.isGeospatial() && property.asGeospatial() != null) ? property.getValue().toString() : null; - if (DEBUG) { + if (DEBUG_MODE_ENABLED) { if (property.isCollection() && property.asCollection().size() > 0) { LOG.info("Found Collection for field: " + property.getName() + ", value: " + property.asCollection()); } @@ -401,12 +413,12 @@ List fetchAndProcessRecords(String resourceName, int targetRecord } } - //if the field is a lookup field, collect the frequency of each unique set of enumerations for the field - if (property.isEnum() || (processor.get().getStandardFieldCache().containsKey(resourceName) - && processor.get().getStandardFieldCache().get(resourceName).containsKey(property.getName()))) { - ReferenceStandardField standardField = processor.get().getStandardFieldCache().get(resourceName).get(property.getName()); - //if the field is declared as an OData Edm.EnumType or String List, Single or Multii in the DD, then collect its value + if (property.isEnum() || (container.get().getDDCacheProcessor().getStandardFieldCache().containsKey(resourceName) + && container.get().getDDCacheProcessor().getStandardFieldCache().get(resourceName).containsKey(property.getName()))) { + ReferenceStandardField standardField = container.get().getDDCacheProcessor().getStandardFieldCache().get(resourceName).get(property.getName()); + + //if the field is declared as an OData Edm.EnumType or String List, Single or Multi in the DD, then collect its value if (property.isEnum() || (standardField.getSimpleDataType().contentEquals(STRING_LIST_SINGLE) || standardField.getSimpleDataType().contentEquals(STRING_LIST_MULTI))) { @@ -443,7 +455,7 @@ List fetchAndProcessRecords(String resourceName, int targetRecord } //turn off hashing when DEBUG is true - if (!DEBUG && value != null) { + if (!DEBUG_MODE_ENABLED && value != null) { if (!(property.getName().contentEquals(timestampField.get()) || property.getName().equals(POSTAL_CODE_FIELD) || keyFields.stream().reduce(true, (acc, f) -> acc && f.getName().contentEquals(property.getName()), Boolean::logicalAnd))) { @@ -469,7 +481,7 @@ List fetchAndProcessRecords(String resourceName, int targetRecord payloadSample.get().setResponseTimeMillis(transportWrapper.get().getElapsedTimeMillis()); - if (abTestingMode && encodedResultsDirectoryName != null) { + if (AB_TESTING_MODE_ENABLED && encodedResultsDirectoryName != null) { //serialize results once resource processing has finished Utils.createFile(String.format(SAMPLES_DIRECTORY_TEMPLATE, encodedResultsDirectoryName), resourceName + "-" + Utils.getTimestamp() + ".json", @@ -478,48 +490,35 @@ List fetchAndProcessRecords(String resourceName, int targetRecord payloadSamples.get().add(payloadSample.get()); } else { - scenario.log("All available records fetched! Total: " + recordsProcessed); + scenario.log("All available records fetched! Unique Records Processed: " + recordsProcessed); hasRecords.set(false); } } catch (Exception ex) { scenario.log("Error in fetchAndProcessRecords: " + getDefaultErrorMessage(ex.toString())); scenario.log("Skipping sample..."); - lastFetchedDate.set(lastFetchedDate.get().minus(1, ChronoUnit.WEEKS)); + + //try adding some time to get unstuck, if possible + lastFetchedDate.set(lastFetchedDate.get().plus(1, ChronoUnit.DAYS)); } } } - return payloadSamples.get(); - } - - /** - * fetches and processes records in cases where only sampling is required and encoding is not necessary - * - * @param resourceName the resource name to sample from - * @param targetRecordCount the target record count for the resource (will stop if the end of the records is reached) - * @return a list of PayloadSample items - */ - List fetchAndProcessRecords(String resourceName, int targetRecordCount) { - return fetchAndProcessRecords(resourceName, targetRecordCount, null); + return payloadSamples.get().parallelStream().collect(Collectors.toList()); } /*==================================== TESTS START HERE ====================================*/ - - @Given("that valid metadata have been requested from the server") - public void thatValidMetadataHaveBeenRequestedFromTheServer() { + @Given("that metadata have been retrieved from the server and validated") + public void thatValidMetadataHaveBeenRetrievedFromTheServerAndValidated() { try { - if (container.get().hasValidMetadata()) { - if (processor.get() == null || processor.get().getStandardFieldCache().size() == 0) { - LOG.info("Creating standard field cache..."); - processor.set(new DDCacheProcessor()); - DataDictionaryCodeGenerator generator = new DataDictionaryCodeGenerator(processor.get()); - generator.processWorksheets(); - LOG.info("Standard field cache created!"); - } - } else { + if (!container.get().hasValidMetadata()) { failAndExitWithErrorMessage("Valid metadata was not retrieved from the server. Exiting!", scenario); } + + if (container.get().getDDCacheProcessor() == null) { + failAndExitWithErrorMessage("Could not initialize standard field cache!", scenario); + } + } catch (Exception ex) { failAndExitWithErrorMessage(ex.toString(), scenario); } @@ -527,11 +526,9 @@ public void thatValidMetadataHaveBeenRequestedFromTheServer() { @And("the metadata contains RESO Standard Resources") public void theMetadataContainsRESOStandardResources() { - Set resources = container.get().getEdm().getSchemas().stream().map(schema -> - schema.getEntityTypes().stream().map(EdmNamed::getName) - .collect(Collectors.toSet())) - .flatMap(Collection::stream) - .collect(Collectors.toSet()); + Set resources = container.get().getEdm().getSchemas().stream().flatMap(schema -> + schema.getEntityTypes().stream().map(EdmNamed::getName)) + .collect(Collectors.toSet()); standardResources.set(resources.stream() .filter(DataDictionaryMetadata.v1_7.WELL_KNOWN_RESOURCES::contains).collect(Collectors.toSet())); @@ -630,7 +627,7 @@ public void aDataAvailabilityReportIsCreatedIn(String reportFileName) { createDataAvailabilityReport(resourcePayloadSampleMap.get(), reportFileName, resourceCounts.get(), resourceFieldLookupTallies.get()); } catch (Exception ex) { final String errorMsg = "Data Availability Report could not be created.\n" + ex; - if (strictMode) { + if (STRICT_MODE_ENABLED) { failAndExitWithErrorMessage(errorMsg, scenario); } else { LOG.error(errorMsg); diff --git a/src/main/java/org/reso/certification/stepdefs/DataDictionary.java b/src/main/java/org/reso/certification/stepdefs/DataDictionary.java index f614b09b..59f2dc29 100644 --- a/src/main/java/org/reso/certification/stepdefs/DataDictionary.java +++ b/src/main/java/org/reso/certification/stepdefs/DataDictionary.java @@ -18,6 +18,7 @@ import org.apache.olingo.commons.api.edm.EdmNamed; import org.apache.olingo.commons.api.edm.FullQualifiedName; import org.apache.olingo.commons.api.edm.provider.CsdlEnumMember; +import org.junit.AfterClass; import org.reso.certification.containers.WebAPITestContainer; import org.reso.commander.Commander; import org.reso.commander.common.TestUtils; @@ -114,13 +115,12 @@ public void aMetadataTestContainerWasSuccessfullyCreatedFromTheGivenMetadataFile public void aRESOScriptFileIsProvided() { if (isUsingRESOScript) { if (container.getPathToRESOScript() == null) { - container.setPathToRESOScript(System.getProperty("pathToRESOScript")); + container.setPathToRESOScript(System.getProperty(PATH_TO_RESOSCRIPT_ARG)); } if (container.getPathToRESOScript() == null) { failAndExitWithErrorMessage("pathToRESOScript must be present in command arguments, see README.", scenario); } - LOG.debug("Using RESOScript: " + container.getPathToRESOScript()); } } @@ -128,7 +128,7 @@ public void aRESOScriptFileIsProvided() { public void clientSettingsAndParametersCanBeReadFromTheRESOScript() { if (isUsingRESOScript) { if (container.getSettings() == null) { - container.setSettings(Settings.loadFromRESOScript(new File(System.getProperty("pathToRESOScript")))); + container.setSettings(Settings.loadFromRESOScript(new File(System.getProperty(PATH_TO_RESOSCRIPT_ARG)))); if (container.getPathToRESOScript() == null) { failAndExitWithErrorMessage("Settings could not be loaded!", scenario); } @@ -206,9 +206,9 @@ public void validMetadataWereRetrievedFromTheServer() { } //metadata validation tests - TestUtils.assertValidXMLMetadata(container); - TestUtils.assertXmlMetadataContainsEdm(container); - TestUtils.assertXMLMetadataHasValidServiceDocument(container); + TestUtils.assertValidXMLMetadata(container, scenario); + TestUtils.assertXmlMetadataContainsEdm(container, scenario); + TestUtils.assertXMLMetadataHasValidServiceDocument(container, scenario); //build field map and ensure it's not null assertNotNull(container.getFieldMap()); diff --git a/src/main/java/org/reso/certification/stepdefs/LookupResource.java b/src/main/java/org/reso/certification/stepdefs/LookupResource.java new file mode 100644 index 00000000..595f0e6e --- /dev/null +++ b/src/main/java/org/reso/certification/stepdefs/LookupResource.java @@ -0,0 +1,202 @@ +package org.reso.certification.stepdefs; + +import com.google.gson.JsonObject; +import com.google.inject.Inject; +import io.cucumber.java.Before; +import io.cucumber.java.Scenario; +import io.cucumber.java.en.And; +import io.cucumber.java.en.Then; +import io.cucumber.java.en.When; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.olingo.client.api.domain.ClientEntity; +import org.apache.olingo.commons.api.edm.*; +import org.reso.certification.containers.WebAPITestContainer; +import org.reso.commander.common.ODataFetchApi; +import org.reso.commander.common.ODataUtils; +import org.reso.commander.common.Utils; +import org.reso.models.ReferenceStandardField; +import org.reso.models.Settings; + +import java.io.File; +import java.util.*; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assume.assumeTrue; +import static org.reso.certification.stepdefs.DataAvailability.CERTIFICATION_RESULTS_PATH; +import static org.reso.commander.common.ODataUtils.*; +import static org.reso.commander.common.TestUtils.failAndExitWithErrorMessage; +import static org.reso.commander.common.Utils.wrapColumns; + +public class LookupResource { + private static final Logger LOG = LogManager.getLogger(LookupResource.class); + private static Scenario scenario; + private final static AtomicReference container = new AtomicReference<>(); + private static final String PATH_TO_RESOSCRIPT_ARG = "pathToRESOScript"; + private static final AtomicReference>> lookupResourceCache = new AtomicReference<>(new LinkedHashMap<>()); + private static final String LOOKUP_RESOURCE_LOOKUP_METADATA_FILE_NAME = "lookup-resource-lookup-metadata.json"; + private static final String LOOKUP_RESOURCE_FIELD_METADATA_FILE_NAME = "lookup-resource-field-metadata.json"; + + private static final String LOOKUP_NAME_FIELD = "LookupName"; + + @Inject + public LookupResource(WebAPITestContainer c) { + if (container.get() == null) { + container.set(c); + } + } + + @Before + public void beforeStep(Scenario scenario) { + final String pathToRESOScript = System.getProperty(PATH_TO_RESOSCRIPT_ARG, null); + + LookupResource.scenario = scenario; + + if (pathToRESOScript != null && !container.get().getIsInitialized()) { + container.get().setSettings(Settings.loadFromRESOScript(new File(System.getProperty(PATH_TO_RESOSCRIPT_ARG)))); + container.get().initialize(); + } + } + + @Then("valid data is replicated from the {string} Resource") + public void validDataIsReplicatedFromTheResource(String resourceName) { + if (lookupResourceCache.get() == null) { + failAndExitWithErrorMessage("Could not replicate data from resource: " + resourceName, scenario); + } + + if (!lookupResourceCache.get().containsKey(resourceName)) { + lookupResourceCache.get().put(resourceName, new ArrayList<>()); + try { + final List results = ODataFetchApi.replicateDataFromResource(container.get(), resourceName, + ODataFetchApi.WebApiReplicationStrategy.TopAndSkip); + + if (results.size() == 0) { + failAndExitWithErrorMessage("Could not replicate data from the " + resourceName + " resource!", scenario); + } + + lookupResourceCache.get().get(resourceName).addAll(results); + + final JsonObject metadata = new JsonObject(); + metadata.add("lookups", ODataUtils.serializeLookupMetadata(container.get().getCommander().getClient(), results)); + Utils.createFile(CERTIFICATION_RESULTS_PATH, LOOKUP_RESOURCE_LOOKUP_METADATA_FILE_NAME, metadata.toString()); + + } catch (Exception exception) { + failAndExitWithErrorMessage("Unable to retrieve data from the Lookup Resource! " + exception.getMessage(), scenario); + } + } else { + LOG.debug("Using cached data from: " + resourceName); + } + } + + @Then("{string} Resource data and metadata MUST contain the following fields") + public void resourceDataAndMetadataMUSTContainTheFollowingFields(String resourceName, List fields) { + if (lookupResourceCache.get() == null || lookupResourceCache.get().get(resourceName) == null) { + failAndExitWithErrorMessage("Entity Cache could not be created for the " + resourceName + " resource!", scenario); + } + + final String mandatoryFields = "'" + String.join(", ", fields) + "'"; + + //check metadata + scenario.log("Ensuring mandatory fields " + mandatoryFields + " are present in server metadata"); + assertTrue("The fields " + mandatoryFields + " MUST be present in the server metadata for the " + resourceName + " Resource!", + container.get().getFieldMap(resourceName) != null && container.get().getFieldMap(resourceName).keySet().containsAll(fields)); + + //check resource data cache + scenario.log("Ensuring mandatory fields " + mandatoryFields + " are present in " + resourceName + " Resource data"); + lookupResourceCache.get().get(resourceName).forEach(clientEntity -> fields.forEach(fieldName -> { + if (clientEntity.getProperty(fieldName) == null || clientEntity.getProperty(fieldName).getValue() == null) { + failAndExitWithErrorMessage("Missing required field in the " + resourceName + " Resource!", scenario); + } + })); + scenario.log("All mandatory fields present!"); + } + + @When("the {string} Resource exists in the metadata") + public void theResourceExistsInTheMetadata(String resourceName) { + boolean hasResource = container.get().getFieldMap(resourceName) != null; + assumeTrue("The " + resourceName + " Resource was not found in the default entity container in the metadata!", hasResource); + scenario.log("Found " + resourceName + " Resource!"); + } + + /* + + + + + + + + + + */ + @Then("RESO Lookups using String or String Collection data types MUST have the annotation {string}") + public void resoLookupsUsingStringOrStringCollectionDataTypesMUSTHaveTheAnnotation(String annotationTerm) { + if (container.get().getDDCacheProcessor() == null || container.get().getDDCacheProcessor().getStandardFieldCache() == null) { + failAndExitWithErrorMessage("Could not access standard field cache. Check to make sure metadata requests have succeeded.", scenario); + } + + final Map> standardLookupFieldCache = + container.get().getDDCacheProcessor().getStandardFieldCache(); + + final Set lookupFields = + standardLookupFieldCache.keySet().stream().flatMap(resourceName -> + standardLookupFieldCache.get(resourceName).values().stream() + .filter(referenceStandardField -> referenceStandardField.getLookupName() != null)).collect(Collectors.toSet()); + + lookupFields.forEach(referenceStandardField -> { + LOG.debug("Standard Field: { " + + "resourceName: \"" + referenceStandardField.getParentResourceName() + "\"" + + ", standardName: \"" + referenceStandardField.getStandardName() + "\"" + + ", lookupName: \"" + referenceStandardField.getLookupName() + "\" }"); + + EdmElement foundElement = getEdmElement(container.get().getEdm(), referenceStandardField.getParentResourceName(), referenceStandardField.getStandardName()); + final boolean isStringDataType = foundElement != null && + foundElement.getType().getFullQualifiedName().toString().contentEquals(EdmPrimitiveTypeKind.String.getFullQualifiedName().toString()); + + if (foundElement != null && isStringDataType) { + if (!hasAnnotationTerm(foundElement, annotationTerm)) { + final String message = "Could not find required annotation with term \"" + annotationTerm + "\" for field: " + + referenceStandardField.getStandardName(); + LOG.info("WARN: " + message); + failAndExitWithErrorMessage(message, scenario); + } + } + }); + } + + @And("fields with the annotation term {string} MUST have a LookupName in the Lookup Resource") + public void fieldsWithTheAnnotationTermMUSTHaveALookupNameInTheLookupResource(String annotationTerm) { + //every item annotated with the annotation should have a corresponding element in the Lookup set + final Map> filteredResourceFieldMap = + ODataUtils.getEdmElementsWithAnnotation(container.get().getEdm(), annotationTerm); + + final Set lookupNamesFromLookupData = lookupResourceCache.get().values().parallelStream() + .flatMap(Collection::parallelStream) + .map(clientEntity -> clientEntity.getProperty(LOOKUP_NAME_FIELD).getValue().toString()) + .collect(Collectors.toSet()); + + final Set annotatedLookupNames = filteredResourceFieldMap.values().parallelStream() + .flatMap(Collection::parallelStream) + .map(edmElement -> getAnnotationValue(edmElement, annotationTerm)) + .collect(Collectors.toSet()); + + final Set missingLookupNames = Utils.getDifference(annotatedLookupNames, lookupNamesFromLookupData); + + if (missingLookupNames.size() > 0) { + failAndExitWithErrorMessage("LookupName elements missing from LookupMetadata: " + + wrapColumns(String.join(", ", missingLookupNames)), scenario); + } else { + if (filteredResourceFieldMap.size() > 0) { + scenario.log("Found all annotated LookupName elements in the Lookup data. Unique count: " + annotatedLookupNames.size()); + scenario.log("LookupNames: " + wrapColumns(String.join(", ", annotatedLookupNames))); + + Utils.createFile(CERTIFICATION_RESULTS_PATH, LOOKUP_RESOURCE_FIELD_METADATA_FILE_NAME, + ODataUtils.serializeFieldMetadataForLookupFields(filteredResourceFieldMap).toString()); + } else { + scenario.log("No annotated lookup names found in the OData XML Metadata."); + } + } + } +} diff --git a/src/main/java/org/reso/commander/Commander.java b/src/main/java/org/reso/commander/Commander.java index d442adcc..15d9a417 100644 --- a/src/main/java/org/reso/commander/Commander.java +++ b/src/main/java/org/reso/commander/Commander.java @@ -19,7 +19,7 @@ import org.reso.auth.OAuth2HttpClientFactory; import org.reso.auth.TokenHttpClientFactory; import org.reso.commander.common.TestUtils; -import org.reso.models.MetadataReport; +import org.reso.commander.jsonSerializers.MetadataReport; import org.reso.models.ODataTransportWrapper; import org.reso.models.Request; import org.xml.sax.*; diff --git a/src/main/java/org/reso/commander/common/ODataFetchApi.java b/src/main/java/org/reso/commander/common/ODataFetchApi.java new file mode 100644 index 00000000..16c26214 --- /dev/null +++ b/src/main/java/org/reso/commander/common/ODataFetchApi.java @@ -0,0 +1,328 @@ +package org.reso.commander.common; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.olingo.client.api.ODataClient; +import org.apache.olingo.client.api.communication.request.retrieve.ODataEntitySetRequest; +import org.apache.olingo.client.api.communication.response.ODataRetrieveResponse; +import org.apache.olingo.client.api.domain.ClientEntity; +import org.apache.olingo.client.api.domain.ClientEntitySet; +import org.apache.olingo.client.api.http.HttpClientException; +import org.apache.olingo.commons.api.format.ContentType; +import org.apache.olingo.commons.api.http.HttpStatusCode; +import org.reso.certification.containers.WebAPITestContainer; + +import java.net.URI; +import java.time.OffsetDateTime; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeParseException; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; + +public class ODataFetchApi { + + private static final Logger LOG = LogManager.getLogger(ODataFetchApi.class); + + final static int DEFAULT_PAGE_SIZE = 1000; + private final static String FILTER_DESCENDING_TEMPLATE = "?$filter=%s" + " lt %s&$orderby=%s desc"; + private final static String FILTER_ASCENDING_INIT_TEMPLATE = "?$orderby=%s asc"; + private final static String FILTER_ASCENDING_TEMPLATE = "?$filter=%s" + " gt %s&$orderby=%s asc"; + private final static String TOP_QUERY_PARAMETER = "&$top=" + DEFAULT_PAGE_SIZE; + final static String DEFAULT_TIMESTAMP_FIELD = "ModificationTimestamp"; + + + /*** + * Gets the total count for the given resource. + * @param container the test container. + * @param resourceName the name of the resource to get the count for. + * @return the total available number of available records. + */ + public static Integer getResourceCount(WebAPITestContainer container, String resourceName) { + final ODataClient client = container.getCommander().getClient(); + client.getConfiguration().setDefaultPubFormat(ContentType.APPLICATION_JSON); + + ODataEntitySetRequest request = client.getRetrieveRequestFactory() + .getEntitySetRequest(client.newURIBuilder(container.getServiceRoot()) + .appendEntitySetSegment(resourceName).count(true).top(1).build()); + + final ODataRetrieveResponse response = request.execute(); + Integer count = 0; + + if (response != null && response.getStatusCode() == HttpStatusCode.OK.getStatusCode() && response.getBody() != null) { + count = response.getBody().getCount(); + LOG.info("Total Count is: " + count); + } else { + LOG.debug("Couldn't get count! Returning 0."); + } + + return count; + } + + /** + * Contains the replication strategies available for the fetch client replication methods. + */ + public enum WebApiReplicationStrategy { + ModificationTimestampDescending, + ModificationTimestampAscending, + TopAndSkip + } + + /** + * Replicates data using the given WebApiReplicationStrategy + * + * @param container the test container + * @param resourceName the name of the resource to replicate from + * @param strategy the replication strategy + * @return a list of ClientEntity items that were replicated. + * @throws Exception exceptions are thrown with messages so that the caller can respond and exit or continue, + * as needed. Clients can use the included message for the reason for the error. + */ + public static List replicateDataFromResource(WebAPITestContainer container, String resourceName, WebApiReplicationStrategy strategy) + throws Exception { + LOG.info("Checking metadata for resource: " + resourceName); + if (container.getXMLMetadata().getSchemas().parallelStream() + .anyMatch(item -> item.getEntityType(resourceName) != null)) { + + LOG.info("Replicating data from " + resourceName + " using strategy: " + strategy.toString()); + if (strategy == WebApiReplicationStrategy.TopAndSkip) + return replicateUsingTopAndSkip(container, resourceName); + + if (strategy == WebApiReplicationStrategy.ModificationTimestampDescending) + return replicateUsingModificationTimestampField(container, resourceName, WebApiReplicationStrategy.ModificationTimestampDescending); + + if (strategy == WebApiReplicationStrategy.ModificationTimestampAscending) + return replicateUsingModificationTimestampField(container, resourceName, WebApiReplicationStrategy.ModificationTimestampAscending); + + } else { + throw new Exception(resourceName + " resource was not found in metadata!"); + } + return new ArrayList<>(); + } + + + /** + * Implementation of an OData client using a TopAndSkip replication strategy. + * + * @param container the test container. + * @param resourceName the name of the resource to replicate from. + * @return a list of ClientEntity items that were replicated. + * @throws Exception exceptions are thrown so that their messages can be used to fail or continue. Implementations + * should bubble any relevant errors up. + */ + private static List replicateUsingTopAndSkip(WebAPITestContainer container, String resourceName) throws Exception { + final ODataClient client = container.getCommander().getClient(); + final String serviceRoot = container.getServiceRoot(); + + int pageSize = DEFAULT_PAGE_SIZE; + final Integer resourceCount = ODataFetchApi.getResourceCount(container, resourceName); + + final ArrayList entities = new ArrayList<>(); + try { + for (int skipAmount = 0; pageSize > 0 && entities.size() <= resourceCount; skipAmount += pageSize) { + final URI requestUri = client.newURIBuilder(serviceRoot).appendEntitySetSegment(resourceName).top(pageSize).skip(skipAmount).build(); + final ODataRetrieveResponse response = client.getRetrieveRequestFactory().getEntitySetRequest(requestUri).execute(); + + LOG.info("Fetching " + resourceName + " Resource data from URL: " + requestUri.toString()); + + if (response != null && response.getStatusCode() == HttpStatusCode.OK.getStatusCode() && response.getBody() != null) { + pageSize = response.getBody().getEntities().size(); + if (pageSize > 0) { + entities.addAll(response.getBody().getEntities()); + } + } + } + } catch (HttpClientException httpClientException) { + final String message = "Could not retrieve data from the " + resourceName + " resource!" + httpClientException.getMessage(); + LOG.error(message); + LOG.error("Cause " + httpClientException.getCause().getMessage()); + + throw new Exception(message); + } + LOG.info("Total records fetched: " + entities.size()); + return entities; + } + + /** + * Default ModificationTimestamp replication client. + * + * @param container the test container. + * @param resourceName the name of the resource to replicate from. + * @param strategy the replication strategy, either desc or asc. + * @return a list of ClientEntity items that were replicated. + * @throws Exception exceptions are thrown so that their messages can be used to fail or continue. Implementations + * should bubble any relevant errors up. + */ + private static List replicateUsingModificationTimestampField(WebAPITestContainer container, String resourceName, WebApiReplicationStrategy strategy) throws Exception { + return replicateUsingTimestampField(container, resourceName, DEFAULT_TIMESTAMP_FIELD, strategy); + } + + + /** + * General timestamp replication client. + * + * @param container the test container. + * @param resourceName the name of the resource to replicate from. + * @param timestampField the name of the timestamp field to use for comparisons. + * @param strategy the replication strantegy, either asc or desc. + * @return a list of ClientEntity items that were replicated. + * @throws Exception exceptions are thrown so that their messages can be used to fail or continue. Implementations + * should bubble any relevant errors up. + */ + private static List replicateUsingTimestampField(WebAPITestContainer container, String resourceName, String timestampField, WebApiReplicationStrategy strategy) throws Exception { + final ODataClient client = container.getCommander().getClient(); + final AtomicReference lastOffsetDateTime = new AtomicReference<>(OffsetDateTime.now()); + final int MAX_RETRIES = 3; + final int RETRY_SKIP_MS = 1; + int numRetries = 0; + + final Integer resourceCount = ODataFetchApi.getResourceCount(container, resourceName); + final Set entities = new HashSet<>(); + boolean isInitialRequest = true; + try { + do { + URI requestUri; + if (strategy == WebApiReplicationStrategy.ModificationTimestampDescending) { + requestUri = TestUtils.prepareUri(buildTimestampDescendingFilterRequestUri(container, resourceName, timestampField, lastOffsetDateTime.get())); + } else if (strategy == WebApiReplicationStrategy.ModificationTimestampAscending) { + if (isInitialRequest) { + requestUri = TestUtils.prepareUri(buildTimestampAscendingInitFilterRequestUri(container, resourceName, timestampField)); + } else { + requestUri = TestUtils.prepareUri(buildTimestampAscendingFilterRequestUri(container, resourceName, timestampField, lastOffsetDateTime.get())); + } + } else { + throw new Exception("Unsupported WebApiReplicationStrategy: " + strategy); + } + + LOG.info("Fetching " + resourceName + " Resource data from URL: " + requestUri.toString()); + final ODataRetrieveResponse response = client.getRetrieveRequestFactory().getEntitySetRequest(requestUri).execute(); + final List currentPage = response.getBody().getEntities(); + + if (currentPage.size() == 0) { + LOG.error("Page contained no records, exiting! Request URI: " + requestUri.toString()); + break; + } else { + for (ClientEntity clientEntity : currentPage) { + try { + if (entities.contains(clientEntity)) { + LOG.error("Duplicate page detected!"); + LOG.error("Last Timestamp: " + lastOffsetDateTime.get().format(DateTimeFormatter.ISO_INSTANT)); + numRetries++; + + if (strategy == WebApiReplicationStrategy.ModificationTimestampDescending) { + LOG.error("\t--> Subtracting " + RETRY_SKIP_MS + "ms from last timestamp..."); + lastOffsetDateTime.set(lastOffsetDateTime.get().minus(RETRY_SKIP_MS, ChronoUnit.MILLIS)); + } else { + LOG.error("\t--> Adding " + RETRY_SKIP_MS + "ms to last timestamp..."); + lastOffsetDateTime.set(lastOffsetDateTime.get().plus(RETRY_SKIP_MS, ChronoUnit.MILLIS)); + } + break; + } else { + entities.add(clientEntity); + OffsetDateTime currentOffsetDateTime = OffsetDateTime.parse(clientEntity.getProperty(timestampField).getValue().toString()); + if (strategy == WebApiReplicationStrategy.ModificationTimestampDescending && currentOffsetDateTime.isBefore(lastOffsetDateTime.get())) { + LOG.debug("Current " + timestampField + " field timestamp is: " + currentOffsetDateTime.format(DateTimeFormatter.ISO_INSTANT)); + LOG.debug("Found earlier timestamp! Last timestamp: " + lastOffsetDateTime.get().format(DateTimeFormatter.ISO_INSTANT) + "\n"); + lastOffsetDateTime.set(currentOffsetDateTime); + } else if (strategy == WebApiReplicationStrategy.ModificationTimestampAscending) { + if (!isInitialRequest && currentOffsetDateTime.isAfter(lastOffsetDateTime.get())) { + LOG.debug("Current " + timestampField + " field timestamp is: " + currentOffsetDateTime.format(DateTimeFormatter.ISO_INSTANT)); + LOG.debug("Found later timestamp! Last timestamp: " + lastOffsetDateTime.get().format(DateTimeFormatter.ISO_INSTANT) + "\n"); + } + lastOffsetDateTime.set(currentOffsetDateTime); + } + } + } catch (DateTimeParseException exception) { + LOG.error(exception); + throw new Exception("Could not convert " + timestampField + " to timestamp value!"); + } + } + } + isInitialRequest = false; + } while (entities.size() <= resourceCount && numRetries < MAX_RETRIES); + + if (numRetries >= MAX_RETRIES) { + LOG.warn("Exceeded maximum number of retries (" + MAX_RETRIES + ")! "); + } + + if (entities.size() != resourceCount) { + throw new Exception("Could not fetch all records!\n\tTotal Count: " + resourceCount + ". Records fetched: " + entities.size()); + } + + LOG.info("Records fetched: " + entities.size()); + + } catch (HttpClientException httpClientException) { + final String message = "Could not retrieve data from the " + resourceName + " resource!" + httpClientException.getMessage(); + LOG.error(message); + LOG.error("Cause " + httpClientException.getCause().getMessage()); + + throw new Exception(message); + } + return new ArrayList<>(entities); + } + + + /** + * Builds a request URI string, taking into account whether the sampling is being done with an optional + * filter, for instance in the shared systems case + * + * @param resourceName the resource name to query + * @param timestampField the timestamp field for the resource + * @param lastFetchedDate the last fetched date for filtering + * @return a string OData query used for sampling + */ + public static String buildTimestampDescendingFilterRequestUri(WebAPITestContainer container, String resourceName, + String timestampField, OffsetDateTime lastFetchedDate) { + String requestUri = container.getCommander().getClient() + .newURIBuilder(container.getServiceRoot()) + .appendEntitySetSegment(resourceName).build().toString(); + + requestUri += String.format(FILTER_DESCENDING_TEMPLATE + TOP_QUERY_PARAMETER, timestampField, + lastFetchedDate.format(DateTimeFormatter.ISO_INSTANT), timestampField); + + return requestUri; + } + + /** + * Builds a request URI string, taking into account whether the sampling is being done with an optional + * filter, for instance in the shared systems case + * + * @param resourceName the resource name to query + * @param timestampField the timestamp field for the resource + * @param lastFetchedDate the last fetched date for filtering + * @return a string OData query used for sampling + */ + public static String buildTimestampAscendingFilterRequestUri(WebAPITestContainer container, String resourceName, + String timestampField, OffsetDateTime lastFetchedDate) { + String requestUri = container.getCommander().getClient() + .newURIBuilder(container.getServiceRoot()) + .appendEntitySetSegment(resourceName).build().toString(); + + requestUri += String.format(FILTER_ASCENDING_TEMPLATE + TOP_QUERY_PARAMETER, timestampField, + lastFetchedDate.format(DateTimeFormatter.ISO_INSTANT), timestampField); + + return requestUri; + } + + /** + * Builds a request URI string, taking into account whether the sampling is being done with an optional + * filter, for instance in the shared systems case + * + * @param resourceName the resource name to query + * @param timestampField the timestamp field for the resource + * @return a string OData query used for sampling + */ + public static String buildTimestampAscendingInitFilterRequestUri(WebAPITestContainer container, String resourceName, + String timestampField) { + String requestUri = container.getCommander().getClient() + .newURIBuilder(container.getServiceRoot()) + .appendEntitySetSegment(resourceName).build().toString(); + + requestUri += String.format(FILTER_ASCENDING_INIT_TEMPLATE + TOP_QUERY_PARAMETER, timestampField); + + return requestUri; + } +} diff --git a/src/main/java/org/reso/commander/common/ODataUtils.java b/src/main/java/org/reso/commander/common/ODataUtils.java new file mode 100644 index 00000000..6013c440 --- /dev/null +++ b/src/main/java/org/reso/commander/common/ODataUtils.java @@ -0,0 +1,193 @@ +package org.reso.commander.common; + +import com.google.gson.Gson; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.olingo.client.api.ODataClient; +import org.apache.olingo.client.api.domain.ClientEntity; +import org.apache.olingo.client.api.serialization.ODataSerializerException; +import org.apache.olingo.client.core.edm.xml.ClientCsdlAnnotation; +import org.apache.olingo.client.core.serialization.JsonSerializer; +import org.apache.olingo.commons.api.edm.*; +import org.apache.olingo.commons.api.format.ContentType; +import org.apache.olingo.commons.core.edm.EdmAnnotationImpl; +import org.apache.olingo.commons.core.edm.EdmPropertyImpl; + +import java.io.StringWriter; +import java.lang.reflect.Field; +import java.util.*; +import java.util.stream.Collectors; + +public class ODataUtils { + + private static final Logger LOG = LogManager.getLogger(ODataUtils.class); + + public static EdmElement getEdmElement(Edm edm, String resourceName, String fieldName) { + final Optional entitySet = Optional.ofNullable(edm.getEntityContainer().getEntitySet(resourceName)); + + if (entitySet.isPresent()) { + final EdmElement fieldEdm = entitySet.get().getEntityTypeWithAnnotations().getProperty(fieldName); + if (fieldEdm != null && fieldEdm.getType().getFullQualifiedName().toString().contentEquals(EdmPrimitiveTypeKind.String.getFullQualifiedName().toString())) { + LOG.debug("\nFound field with resource: " + resourceName + " and standard name: " + fieldName); + LOG.debug("\t\t Data type is: " + fieldEdm.getType().getFullQualifiedName().toString() + (fieldEdm.isCollection() ? ", Collection: true" : "")); + return fieldEdm; + } + } + return null; + } + + /** + * Determines whether the element has the given term. + * + * @param element the Edm element to check. + * @param annotationTerm the term to search for. + * @return true if the Edm element contains the annotationTerm, false otherwise. + */ + public static boolean hasAnnotationTerm(EdmElement element, String annotationTerm) { + return Optional.ofNullable(getAnnotationValue(element, annotationTerm)).isPresent(); + } + + /** + * Gets the annotation value for the given annotation term. + * + * @param element the Edm element to check. + * @param annotationTerm the term to search for. + * @return a string value, if present, otherwise null. + */ + public static String getAnnotationValue(EdmElement element, String annotationTerm) { + if (element == null || annotationTerm == null) return null; + + final Optional foundAnnotation = Optional.of((EdmPropertyImpl) element).get().getAnnotations().stream() + .filter(edmAnnotation -> { + final SneakyAnnotationReader annotationReader = new SneakyAnnotationReader(edmAnnotation); + return annotationReader.getTerm() != null && annotationReader.getTerm().contentEquals(annotationTerm); + }).findFirst(); + + if (foundAnnotation.isPresent()) { + final Optional value = Optional.ofNullable(foundAnnotation.get().getExpression().asConstant().getValueAsString()); + + if (value.isPresent()) { + LOG.debug("Found \"" + annotationTerm + "\" annotation! Value is: " + value); + return value.get(); + } + } + return null; + } + + /** + * Serializes a list of OData ClientEntity items in a JSON Array with those properties. + * + * @param results list of OData ClientEntity results + * @param client OData client to use as serializer + * @return a JsonArray of results + */ + public static JsonArray serializeLookupMetadata(ODataClient client, List results) { + final JsonArray lookups = new JsonArray(); + + try { + final Gson gson = new Gson(); + final JsonSerializer jsonSerializer = new JsonSerializer(false, ContentType.APPLICATION_JSON); + results.forEach(clientEntity -> { + try { + StringWriter writer = new StringWriter(); + jsonSerializer.write(writer, client.getBinder().getEntity(clientEntity)); + Optional element = Optional.ofNullable(gson.fromJson(writer.toString(), JsonElement.class)); + element.ifPresent(lookups::add); + } catch (ODataSerializerException e) { + LOG.error("ERROR: could not deserialize. Exception: " + e); + } + }); + } catch (Exception exception) { + LOG.error(exception); + } + + return lookups; + } + + public static JsonObject serializeFieldMetadataForLookupFields(Map> resourceFieldMap) { + //TODO: migrate to test file + final String LOOKUP_ANNOTATION_TERM = "RESO.OData.Metadata.LookupName"; + + final String + DESCRIPTION_KEY = "description", DESCRIPTION = "Lookup Resource Annotated Fields Metadata", + VERSION_KEY = "version", VERSION = "1.7", + GENERATED_ON_KEY = "generatedOn", + FIELDS_KEY = "fields"; + + JsonObject metadataReport = new JsonObject(); + metadataReport.addProperty(DESCRIPTION_KEY, DESCRIPTION); + metadataReport.addProperty(VERSION_KEY, VERSION); + metadataReport.addProperty(GENERATED_ON_KEY, Utils.getIsoTimestamp()); + + JsonArray fieldsArray = new JsonArray(); + resourceFieldMap.forEach((resourceName, fieldElements) -> fieldElements.forEach( + fieldElement -> { + JsonObject fieldObject = new JsonObject(); + fieldObject.addProperty("resourceName", resourceName); + fieldObject.addProperty("fieldName:", fieldElement.getName()); + fieldObject.addProperty("type", getAnnotationValue(fieldElement, LOOKUP_ANNOTATION_TERM)); + fieldsArray.add(fieldObject); + } + )); + + metadataReport.add(FIELDS_KEY, fieldsArray); + return metadataReport; + } + + /** + * Returns a Map of EntityDataModel (Edm) elements and annotation value with the given annotation term. + */ + public static Map> getEdmElementsWithAnnotation(Edm edm, String annotationTerm) { + return edm.getSchemas().parallelStream() + .filter(edmSchema -> edmSchema != null && edmSchema.getEntityContainer() != null) + .flatMap(edmSchema -> edmSchema.getEntityContainer().getEntitySets().parallelStream()) + .collect(Collectors.toMap(edmEntitySet -> edmEntitySet.getEntityTypeWithAnnotations().getName(), + edmEntitySet -> edmEntitySet.getEntityTypeWithAnnotations().getPropertyNames().parallelStream() + .map(propertyName -> edmEntitySet.getEntityTypeWithAnnotations().getProperty(propertyName)) + .filter(edmElement -> getAnnotationValue(edmElement, annotationTerm) != null) + .collect(Collectors.toSet()))); + } + + /** + * Class to read OData internal annotation variables. + */ + public static class SneakyAnnotationReader { + Class object; + Field field; + EdmAnnotationImpl edmAnnotationImpl; + ClientCsdlAnnotation clientCsdlAnnotation; + + /** + * Allows the consumer to read internal annotations. + * + * @param edmAnnotation the annotation to read from + */ + public SneakyAnnotationReader(EdmAnnotation edmAnnotation) { + try { + edmAnnotationImpl = ((EdmAnnotationImpl) edmAnnotation); + + // create an object of the class named Class + object = edmAnnotationImpl.getClass(); + + // access the private variable + field = object.getDeclaredField("annotation"); + // make private field accessible + field.setAccessible(true); + + clientCsdlAnnotation = (ClientCsdlAnnotation) field.get(edmAnnotationImpl); + + } catch (Exception ex) { + LOG.error(ex); + ex.printStackTrace(); + } + } + + public String getTerm() { + return clientCsdlAnnotation.getTerm(); + } + } + +} diff --git a/src/main/java/org/reso/commander/common/TestUtils.java b/src/main/java/org/reso/commander/common/TestUtils.java index ae8a74b2..d5cedb2d 100644 --- a/src/main/java/org/reso/commander/common/TestUtils.java +++ b/src/main/java/org/reso/commander/common/TestUtils.java @@ -4,6 +4,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import io.cucumber.java.Scenario; import org.apache.http.Header; +import org.apache.http.NameValuePair; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.olingo.client.api.communication.ODataClientErrorException; @@ -18,6 +19,8 @@ import org.apache.olingo.commons.core.edm.primitivetype.EdmDate; import org.apache.olingo.commons.core.edm.primitivetype.EdmDateTimeOffset; import org.apache.olingo.commons.core.edm.primitivetype.EdmTimeOfDay; +import org.reso.certification.codegen.DDCacheProcessor; +import org.reso.certification.codegen.DataDictionaryCodeGenerator; import org.reso.certification.containers.WebAPITestContainer; import org.reso.commander.Commander; import org.reso.models.Settings; @@ -564,14 +567,9 @@ public static boolean isValidJson(String jsonString) { * @return the value of the header with key, or null */ public static String getHeaderData(String key, Collection
headers) { - String data = null; - - for (Header header : headers) { - if (header.getName().toLowerCase().contains(key.toLowerCase())) { - data = header.getValue(); - } - } - return data; + return headers.stream() + .filter(header -> header.getName().toLowerCase().contains(key.toLowerCase())) + .findFirst().map(NameValuePair::getValue).orElse(null); } /** @@ -583,14 +581,7 @@ public static String getHeaderData(String key, Collection
headers) { */ public static String getHeaderData(String key, ODataResponse oDataResponse) { if (key == null || oDataResponse.getHeader(key) == null) return null; - ArrayList result = new ArrayList<>(oDataResponse.getHeader(key)); - - if (result.size() > 0) { - return result.get(0); - } else { - return null; - } - + return oDataResponse.getHeader(key).stream().reduce(String::concat).orElse(null); } /** @@ -768,42 +759,39 @@ public static void assertDateTimeOffset(String parameterFieldName, String op, Ti /** * Asserts that metadata in the given container are valid. Fetches metadata if not present in the container. + * * @param container a test container with a valid config that metadata can be fetched into */ - public static void assertValidXMLMetadata(WebAPITestContainer container) { + public static void assertValidXMLMetadata(WebAPITestContainer container, Scenario scenario) { try { if (!container.getHaveMetadataBeenRequested()) { //will lazy-load metadata from the server if not yet requested container.fetchXMLMetadata(); } container.validateMetadata(); - assertTrue("XML Metadata at the given service root is not valid! " + container.getServiceRoot(), - container.getIsValidXMLMetadata()); + if (!container.getIsValidXMLMetadata()) { + failAndExitWithErrorMessage("Invalid XML Metadata! Service root: " + container.getServiceRoot(), scenario); + } } catch (Exception ex) { - fail(getDefaultErrorMessage(ex)); + failAndExitWithErrorMessage(getDefaultErrorMessage(ex), scenario); } } /** * Asserts that the given container has XML Metadata that contains an Entity Data Model (Edm) + * * @param container the container with XML metadata to validate */ - public static void assertXmlMetadataContainsEdm(WebAPITestContainer container) { + public static void assertXmlMetadataContainsEdm(WebAPITestContainer container, Scenario scenario) { container.setEdm(Commander.deserializeEdm(container.getXMLResponseData(), container.getCommander().getClient())); - assertNotNull(getDefaultErrorMessage("Edm de-serialized to an empty object!"), container.getEdm()); - } - - /** - * Asserts that the Edm in the given container are valid - * @param container the container with the XML Metadata to check - */ - public static void assertValidEdm(WebAPITestContainer container) { - assertTrue("Edm Metadata at the given service root is not valid! " + container.getServiceRoot(), - container.getIsValidEdm()); + if (container.getEdm() == null) { + failAndExitWithErrorMessage(getDefaultErrorMessage("Edm de-serialized to an empty object!"), scenario); + } } /** * Asserts that XML Metadata are retrieved from the server + * * @param container the container to retrieve metadata with */ public static void assertXMLMetadataAreRequestedFromTheServer(WebAPITestContainer container, Scenario scenario) { @@ -833,35 +821,28 @@ public static void assertXMLMetadataAreRequestedFromTheServer(WebAPITestContaine } } - /** - * Asserts that the XML Response in the given container is valid XML - * @param container the container with the XML response to validate - */ - public static void assertXMLResponseIsValidXML(WebAPITestContainer container) { - assertNotNull(getDefaultErrorMessage("no XML Response data were found!"), container.getXMLResponseData()); - container.validateXMLMetadataXML(); - assertTrue(getDefaultErrorMessage("invalid XML response!"), container.getIsValidXMLMetadataXML()); - } - /** * Asserts that the XML metadata in the given container has a valid service document + * * @param container the container with XML Metadata to validate */ - public static void assertXMLMetadataHasValidServiceDocument(WebAPITestContainer container) { + public static void assertXMLMetadataHasValidServiceDocument(WebAPITestContainer container, Scenario scenario) { try { - assertNotNull("ERROR: could not find default entity container for given service root: " + - container.getServiceRoot(), container.getEdm().getEntityContainer()); + if (container == null || container.getEdm() == null || container.getEdm().getEntityContainer() == null) { + failAndExitWithErrorMessage("Could not find default entity container for given service root: " + container.getServiceRoot(), scenario); + } LOG.info("Found Default Entity Container: '" + container.getEdm().getEntityContainer().getNamespace() + "'"); } catch (ODataClientErrorException cex) { container.setResponseCode(cex.getStatusLine().getStatusCode()); - fail(cex.toString()); + failAndExitWithErrorMessage(cex.toString(), scenario); } catch (Exception ex) { - fail(getDefaultErrorMessage(ex)); + failAndExitWithErrorMessage(getDefaultErrorMessage(ex), scenario); } } /** * Asserts that valid Metadata have been retrieved. Fetches metadata if not present. + * * @param container a test container to validate */ public static void assertValidMetadataHaveBeenRetrieved(WebAPITestContainer container) { @@ -880,6 +861,7 @@ public static void assertValidMetadataHaveBeenRetrieved(WebAPITestContainer cont /** * Validates that the given response data have a valid OData count + * * @param responseData the data to check for a count against * @return true if the there is a count present and it's greater than or equal to the number of results */ @@ -902,21 +884,21 @@ public static boolean validateODataCount(String responseData) { * Contains the list of supported operators for use in query expressions. */ public static class Operators { - public static final String - AND = "and", - OR = "or", - NE = "ne", - EQ = "eq", - GREATER_THAN = "gt", - GREATER_THAN_OR_EQUAL = "ge", - HAS = "has", - LESS_THAN = "lt", - LESS_THAN_OR_EQUAL = "le", - CONTAINS = "contains", - ENDS_WITH = "endswith", - STARTS_WITH = "startswith", - TO_LOWER = "tolower", - TO_UPPER = "toupper"; + public static final String + AND = "and", + OR = "or", + NE = "ne", + EQ = "eq", + GREATER_THAN = "gt", + GREATER_THAN_OR_EQUAL = "ge", + HAS = "has", + LESS_THAN = "lt", + LESS_THAN_OR_EQUAL = "le", + CONTAINS = "contains", + ENDS_WITH = "endswith", + STARTS_WITH = "startswith", + TO_LOWER = "tolower", + TO_UPPER = "toupper"; } public static final class DateParts { @@ -965,5 +947,19 @@ public static void failAndExitWithErrorMessage(String msg, Scenario scenario) { } System.exit(NOT_OK); } + + /** + * Builds a Data Dictionary Cache + * + * @return a DDProcessor Cache object + */ + public static DDCacheProcessor buildDataDictionaryCache() { + LOG.info("Creating standard field cache..."); + final DDCacheProcessor cache = new DDCacheProcessor(); + DataDictionaryCodeGenerator generator = new DataDictionaryCodeGenerator(cache); + generator.processWorksheets(); + LOG.info("Standard field cache created!"); + return cache; + } } diff --git a/src/main/java/org/reso/commander/common/Utils.java b/src/main/java/org/reso/commander/common/Utils.java index f6224255..199544f1 100644 --- a/src/main/java/org/reso/commander/common/Utils.java +++ b/src/main/java/org/reso/commander/common/Utils.java @@ -1,18 +1,34 @@ package org.reso.commander.common; +import com.google.common.base.Functions; +import com.google.gson.*; +import io.cucumber.gherkin.internal.com.eclipsesource.json.Json; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.olingo.client.api.ODataClient; +import org.apache.olingo.client.api.domain.ClientEntity; +import org.apache.olingo.client.api.serialization.ODataSerializerException; +import org.apache.olingo.client.core.edm.xml.ClientCsdlAnnotation; +import org.apache.olingo.client.core.serialization.JsonSerializer; +import org.apache.olingo.commons.api.edm.Edm; +import org.apache.olingo.commons.api.edm.EdmAnnotation; +import org.apache.olingo.commons.api.edm.EdmElement; +import org.apache.olingo.commons.api.format.ContentType; +import org.apache.olingo.commons.core.edm.EdmAnnotationImpl; import java.io.File; import java.io.FileWriter; +import java.io.StringWriter; +import java.lang.reflect.Field; import java.nio.charset.StandardCharsets; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.time.OffsetDateTime; import java.time.format.DateTimeFormatter; -import java.util.Arrays; -import java.util.Date; -import java.util.Objects; +import java.util.*; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.stream.Collectors; public class Utils { private static final Logger LOG = LogManager.getLogger(Utils.class); @@ -30,6 +46,7 @@ public static String getTimestamp(Date date) { /** * Gets the current timestamp + * * @return the current timestamp returned as a string */ public static String getTimestamp() { @@ -38,9 +55,10 @@ public static String getTimestamp() { /** * Creates a file in the given directory with the given content + * * @param directoryName the directory name to create the file in - * @param fileName the name of the file to create - * @param content the content to write to the file + * @param fileName the name of the file to create + * @param content the content to write to the file */ public static File createFile(String directoryName, String fileName, String content) { if (directoryName == null || fileName == null) return null; @@ -66,6 +84,7 @@ public static File createFile(String directoryName, String fileName, String cont /** * Creates a file in the given directory with the given content + * * @param content the content to write to the file */ public static File createFile(String outputPath, String content) { @@ -88,9 +107,10 @@ public static File createFile(String outputPath, String content) { /** * Removes a directory at the given pathToDirectory. - * + *

* If current user has write access then directory creation will result in True being returned. * Otherwise will return false if the directory couldn't be created for some reason. + * * @param pathToDirectory * @return */ @@ -143,4 +163,17 @@ public static String getIsoTimestamp(OffsetDateTime fromDate) { return OffsetDateTime.from(fromDate.toInstant()).format(DateTimeFormatter.ISO_INSTANT); } + /** + * Gets the difference of two generic sets. + * @param a the minuend set + * @param b the subtrahend set + * @param the type of set + * @return Set of type T that contains A \ B + */ + public static Set getDifference(Set a, Set b) { + return a.parallelStream() + .filter(item -> !b.contains(item)) + .filter(Objects::nonNull) + .collect(Collectors.toSet()); + } } diff --git a/src/main/java/org/reso/commander/jsonSerializers/FieldJson.java b/src/main/java/org/reso/commander/jsonSerializers/FieldJson.java new file mode 100644 index 00000000..4d92f6bf --- /dev/null +++ b/src/main/java/org/reso/commander/jsonSerializers/FieldJson.java @@ -0,0 +1,177 @@ +package org.reso.commander.jsonSerializers; + +import com.google.gson.*; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.olingo.commons.api.edm.EdmAnnotation; +import org.apache.olingo.commons.api.edm.EdmElement; +import org.apache.olingo.commons.api.edm.EdmProperty; +import org.reso.commander.common.ODataUtils; + +import java.lang.reflect.Type; +import java.util.List; +import java.util.Optional; + +import static org.reso.commander.common.ErrorMsg.getDefaultErrorMessage; + +/** + * FieldJson uses a JSON payload with the following structure: + *

+ * { + * "resourceName": "Property", + * "fieldName": "AboveGradeFinishedArea", + * "type": "Edm.Decimal" + * } + */ +public final class FieldJson implements JsonSerializer { + private static final Logger LOG = LogManager.getLogger(FieldJson.class); + + static final String + RESOURCE_NAME_KEY = "resourceName", + FIELD_NAME_KEY = "fieldName", + NULLABLE_KEY = "nullable", + MAX_LENGTH_KEY = "maxLength", + PRECISION_KEY = "precision", + SCALE_KEY = "scale", + IS_COLLECTION_KEY = "isCollection", + DEFAULT_VALUE_KEY = "defaultValue", + UNICODE_KEY = "unicode", + TYPE_KEY = "type", + TERM_KEY = "term", + VALUE_KEY = "value", + ANNOTATIONS_KEY = "annotations", + FIELDS_KEY = "fields"; + + String resourceName; + EdmElement edmElement; + + /** + * Constructor which takes an edmElement and reads the type from it, then + * uses it as the resource name. + * @param edmElement edmElement to create FieldJson for + */ + public FieldJson(EdmElement edmElement) { + Optional element = Optional.ofNullable(edmElement); + assert element.isPresent() : "EdmElement cannot be null!"; + this.edmElement = edmElement; + + Optional resourceName = Optional.ofNullable(edmElement.getType().getName()); + assert resourceName.isPresent() : "Could not read name from edmElement type!"; + this.resourceName = resourceName.get(); + } + + /** + * Constructor which takes an edmElement and reads the type from it, then + * uses it as the resource name. + * @param resourceName the resourceName the element belongs to + * @param edmElement edmElement to create FieldJson for + */ + public FieldJson(String resourceName, EdmElement edmElement) { + this.resourceName = resourceName; + this.edmElement = edmElement; + } + + /** + * Metadata Pretty Printer + * @param metadataReport the metadata report + * @return a human-friendly string version of the metadata report + */ + public static String buildReportString(JsonElement metadataReport) { + StringBuilder reportBuilder = new StringBuilder(); + metadataReport.getAsJsonObject().get(FIELDS_KEY).getAsJsonArray().forEach(field -> { + reportBuilder.append("\nResource: "); + reportBuilder.append(field.getAsJsonObject().get(RESOURCE_NAME_KEY)); + reportBuilder.append("\nField: "); + reportBuilder.append(field.getAsJsonObject().get(FIELD_NAME_KEY)); + reportBuilder.append("\nType: "); + reportBuilder.append(field.getAsJsonObject().get(TYPE_KEY)); + + if (field.getAsJsonObject().get(ANNOTATIONS_KEY) != null) { + JsonArray annotations = field.getAsJsonObject().get(ANNOTATIONS_KEY).getAsJsonArray(); + if (annotations != null && annotations.size() > 0) { + reportBuilder.append("\n"); + reportBuilder.append("Annotations:"); + annotations.forEach(annotation -> { + if (annotation.getAsJsonObject().get(TERM_KEY) != null) { + reportBuilder.append("\n\tTerm: "); + reportBuilder.append(annotation.getAsJsonObject().get(TERM_KEY)); + } + + if (annotation.getAsJsonObject().get(VALUE_KEY) != null) { + reportBuilder.append("\n\tValue: "); + reportBuilder.append(annotation.getAsJsonObject().get(VALUE_KEY)); + } + }); + } + } + reportBuilder.append("\n"); + }); + return reportBuilder.toString(); + } + + @Override + public JsonElement serialize(FieldJson src, Type typeOfSrc, JsonSerializationContext context) { + JsonObject field = new JsonObject(); + + + field.addProperty(RESOURCE_NAME_KEY, src.resourceName); + field.addProperty(FIELD_NAME_KEY, src.edmElement.getName()); + + String typeName = null; + try { + typeName = src.edmElement.getType().getFullQualifiedName().getFullQualifiedNameAsString(); + field.addProperty(TYPE_KEY, typeName); + } catch (Exception ex) { + LOG.error(getDefaultErrorMessage("Field Name:", src.edmElement.getName(), ex.toString())); + field.addProperty(TYPE_KEY, "UNDEFINED"); + } + + field.addProperty(NULLABLE_KEY, ((EdmProperty) src.edmElement).isNullable()); + field.addProperty(MAX_LENGTH_KEY, ((EdmProperty) src.edmElement).getMaxLength()); + field.addProperty(SCALE_KEY, ((EdmProperty) src.edmElement).getScale()); + field.addProperty(PRECISION_KEY, ((EdmProperty) src.edmElement).getPrecision()); + field.addProperty(DEFAULT_VALUE_KEY, ((EdmProperty) src.edmElement).getDefaultValue()); + field.addProperty(IS_COLLECTION_KEY, src.edmElement.isCollection()); + field.addProperty(UNICODE_KEY, ((EdmProperty) src.edmElement).isUnicode()); + + //TODO: report issue to Apache + // Can only get the annotation term using ((ClientCsdlAnnotation) ((EdmAnnotationImpl)edmAnnotation).annotatable).term + // which a private member and cannot be accessed + List annotations = ((EdmProperty) src.edmElement).getAnnotations(); + if (annotations != null && annotations.size() > 0) { + JsonArray annotationsJsonArray = new JsonArray(); + annotations.forEach(edmAnnotation -> { + if (edmAnnotation.getExpression() != null) { + if (edmAnnotation.getExpression().isConstant()) { + JsonObject annotation = new JsonObject(); + if (edmAnnotation.getTerm() != null) { + annotation.addProperty(TERM_KEY, edmAnnotation.getTerm().getFullQualifiedName().getFullQualifiedNameAsString()); + } else { + ODataUtils.SneakyAnnotationReader sneakyAnnotationReader = new ODataUtils.SneakyAnnotationReader(edmAnnotation); + annotation.addProperty(TERM_KEY, sneakyAnnotationReader.getTerm()); + } + annotation.addProperty(VALUE_KEY, edmAnnotation.getExpression().asConstant().getValueAsString()); + annotationsJsonArray.add(annotation); + } else if (edmAnnotation.getExpression().isDynamic()) { + if (edmAnnotation.getExpression().asDynamic().isCollection()) { + edmAnnotation.getExpression().asDynamic().asCollection().getItems().forEach(edmExpression -> { + //OData Allowed Values come across as Records, in which case their key is "Value" + if (edmExpression.asDynamic().isRecord()) { + JsonObject annotation = new JsonObject(); + edmExpression.asDynamic().asRecord().getPropertyValues().forEach(edmPropertyValue -> { + annotation.addProperty(TERM_KEY, edmPropertyValue.getProperty()); + annotation.addProperty(VALUE_KEY, edmPropertyValue.getValue().asConstant().getValueAsString()); + annotationsJsonArray.add(annotation); + }); + } + }); + } + } + } + }); + if (annotationsJsonArray.size() > 0) field.add(ANNOTATIONS_KEY, annotationsJsonArray); + } + return field; + } +} + diff --git a/src/main/java/org/reso/commander/jsonSerializers/LookupJson.java b/src/main/java/org/reso/commander/jsonSerializers/LookupJson.java new file mode 100644 index 00000000..8a85b556 --- /dev/null +++ b/src/main/java/org/reso/commander/jsonSerializers/LookupJson.java @@ -0,0 +1,101 @@ +package org.reso.commander.jsonSerializers; + +import com.google.gson.*; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.olingo.commons.api.edm.EdmEnumType; +import org.reso.commander.common.ODataUtils; + +import java.lang.reflect.Type; + +/** + * LookupJson uses a JSON payload with the following structure: + *

+ * { + * "lookupName": "org.reso.metadata.enums.CommunityFeatures", + * "lookupValue": "Stables", + * "type": "Edm.Int32" + * } + */ +public final class LookupJson implements JsonSerializer { + private static final Logger LOG = LogManager.getLogger(LookupJson.class); + + public static final String + LOOKUP_NAME_KEY = "lookupName", LOOKUP_VALUE_KEY = "lookupValue", + TYPE_KEY = "type", VALUE_KEY = "value", ANNOTATIONS_KEY = "annotations", + LOOKUPS_KEY = "lookups", TERM_KEY = "term"; + + EdmEnumType edmEnumType; + String memberName; + + public LookupJson(String memberName, EdmEnumType edmEnumType) { + this.edmEnumType = edmEnumType; + this.memberName = memberName; + } + + /** + * Metadata Pretty Printer + * @param metadataReport the metadata report + * @return a human-friendly string version of the metadata report + */ + public static String buildReportString(JsonElement metadataReport) { + StringBuilder reportBuilder = new StringBuilder(); + metadataReport.getAsJsonObject().get(LOOKUPS_KEY).getAsJsonArray().forEach(field -> { + reportBuilder.append("\nLookup Name: "); + reportBuilder.append(field.getAsJsonObject().get(LOOKUP_NAME_KEY)); + reportBuilder.append("\nLookup Value: "); + reportBuilder.append(field.getAsJsonObject().get(LOOKUP_VALUE_KEY)); + reportBuilder.append("\nType: "); + reportBuilder.append(field.getAsJsonObject().get(TYPE_KEY)); + + if (field.getAsJsonObject().get(ANNOTATIONS_KEY) != null) { + JsonArray annotations = field.getAsJsonObject().get(ANNOTATIONS_KEY).getAsJsonArray(); + if (annotations != null && annotations.size() > 0) { + reportBuilder.append("\n"); + reportBuilder.append("Annotations:"); + annotations.forEach(annotation -> { + if (annotation.getAsJsonObject().get(TERM_KEY) != null) { + reportBuilder.append("\n\tTerm: "); + reportBuilder.append(annotation.getAsJsonObject().get(TERM_KEY)); + } + + if (annotation.getAsJsonObject().get(VALUE_KEY) != null) { + reportBuilder.append("\n\tValue: "); + reportBuilder.append(annotation.getAsJsonObject().get(VALUE_KEY)); + } + }); + } + } + reportBuilder.append("\n"); + }); + return reportBuilder.toString(); + } + + @Override + public JsonElement serialize(LookupJson src, Type typeOfSrc, JsonSerializationContext context) { + JsonObject membersJsonObject = new JsonObject(); + membersJsonObject.addProperty(LOOKUP_NAME_KEY, src.edmEnumType.getFullQualifiedName().toString()); + membersJsonObject.addProperty(LOOKUP_VALUE_KEY, src.memberName); + membersJsonObject.addProperty(TYPE_KEY, src.edmEnumType.getUnderlyingType().getFullQualifiedName().getFullQualifiedNameAsString()); + + if (src.edmEnumType.getMember(memberName).getAnnotations().size() > 0) { + JsonArray annotations = new JsonArray(); + src.edmEnumType.getMember(memberName).getAnnotations().forEach(edmAnnotation -> { + JsonObject annotation = new JsonObject(); + if (edmAnnotation.getTerm() != null) { + annotation.addProperty(TERM_KEY, edmAnnotation.getTerm().getFullQualifiedName().getFullQualifiedNameAsString()); + } else { + ODataUtils.SneakyAnnotationReader sneakyAnnotationReader = new ODataUtils.SneakyAnnotationReader(edmAnnotation); + annotation.addProperty(TERM_KEY, sneakyAnnotationReader.getTerm()); + } + + if (edmAnnotation.getExpression() != null) { + annotation.addProperty(VALUE_KEY, edmAnnotation.getExpression().asConstant().getValueAsString()); + } + annotations.add(annotation); + }); + membersJsonObject.add(ANNOTATIONS_KEY, annotations); + } + return membersJsonObject; + } +} diff --git a/src/main/java/org/reso/commander/jsonSerializers/MetadataReport.java b/src/main/java/org/reso/commander/jsonSerializers/MetadataReport.java new file mode 100644 index 00000000..45466442 --- /dev/null +++ b/src/main/java/org/reso/commander/jsonSerializers/MetadataReport.java @@ -0,0 +1,83 @@ +package org.reso.commander.jsonSerializers; + +import com.google.gson.*; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.olingo.commons.api.edm.*; +import org.reso.commander.common.Utils; + +import java.lang.reflect.Type; +import java.util.Date; + +import static org.reso.commander.Commander.REPORT_DIVIDER; +import static org.reso.commander.common.ErrorMsg.getDefaultErrorMessage; + +public class MetadataReport implements JsonSerializer { + private static final Logger LOG = LogManager.getLogger(MetadataReport.class); + + private Edm metadata; + + private MetadataReport() { + //private default constructor + } + + public MetadataReport(Edm metadata) { + this.metadata = metadata; + } + + @Override + public String toString() { + StringBuilder reportBuilder = new StringBuilder(); + + reportBuilder + .append("\n\n" + REPORT_DIVIDER) + .append("\nRESO Metadata Report") + .append("\n").append(new Date()) + .append("\n" + REPORT_DIVIDER); + + JsonElement metadataReport = serialize(this, MetadataReport.class, null); + reportBuilder.append(FieldJson.buildReportString(metadataReport)); + reportBuilder.append(LookupJson.buildReportString(metadataReport)); + + return reportBuilder.toString(); + } + + @Override + public JsonElement serialize(MetadataReport src, Type typeOfSrc, JsonSerializationContext context) { + final String + DESCRIPTION_KEY = "description", DESCRIPTION = "RESO Data Dictionary Metadata Report", + VERSION_KEY = "version", VERSION = "1.7", + GENERATED_ON_KEY = "generatedOn", + FIELDS_KEY = "fields", + LOOKUPS_KEY = "lookups"; + + JsonArray fields = new JsonArray(); + JsonArray lookups = new JsonArray(); + + src.metadata.getSchemas().forEach(edmSchema -> { + //serialize entities (resources) and members (fields) + edmSchema.getEntityTypes().forEach(edmEntityType -> { + edmEntityType.getPropertyNames().forEach(propertyName -> { + FieldJson fieldJson = new FieldJson(edmEntityType.getName(), edmEntityType.getProperty(propertyName)); + fields.add(fieldJson.serialize(fieldJson, FieldJson.class, null)); + }); + }); + + //serialize enum types + edmSchema.getEnumTypes().forEach(edmEnumType -> { + edmEnumType.getMemberNames().forEach(memberName -> { + LookupJson lookupJson = new LookupJson(memberName, edmEnumType); + lookups.add(lookupJson.serialize(lookupJson, LookupJson.class, null)); + }); + }); + }); + + JsonObject metadataReport = new JsonObject(); + metadataReport.addProperty(DESCRIPTION_KEY, DESCRIPTION); + metadataReport.addProperty(VERSION_KEY, VERSION); + metadataReport.addProperty(GENERATED_ON_KEY, Utils.getIsoTimestamp()); + metadataReport.add(FIELDS_KEY, fields); + metadataReport.add(LOOKUPS_KEY, lookups); + return metadataReport; + } +} diff --git a/src/main/java/org/reso/models/LookupResourceItem.java b/src/main/java/org/reso/models/LookupResourceItem.java new file mode 100644 index 00000000..1e8ed859 --- /dev/null +++ b/src/main/java/org/reso/models/LookupResourceItem.java @@ -0,0 +1,46 @@ +package org.reso.models; + +import java.util.Date; + +public class LookupResourceItem { + private final String lookupKey; + private final String lookupName; + private final String lookupValue; + private final String lookupStandardName; + private final String legacyODataValue; + private final Date modificationTimestamp; + + public LookupResourceItem(String lookupKey, String lookupName, String lookupValue, + String lookupStandardName, String legacyODataValue, Date modificationTimestamp) { + this.lookupKey = lookupKey; + this.lookupName = lookupName; + this.lookupValue = lookupValue; + this.lookupStandardName = lookupStandardName; + this.legacyODataValue = legacyODataValue; + this.modificationTimestamp = modificationTimestamp; + } + + public String getLookupKey() { + return lookupKey; + } + + public String getLookupName() { + return lookupName; + } + + public String getLookupValue() { + return lookupValue; + } + + public String getLookupStandardName() { + return lookupStandardName; + } + + public String getLegacyODataValue() { + return legacyODataValue; + } + + public Date getModificationTimestamp() { + return modificationTimestamp; + } +} \ No newline at end of file diff --git a/src/main/java/org/reso/models/MetadataReport.java b/src/main/java/org/reso/models/MetadataReport.java deleted file mode 100644 index 2b2d8c05..00000000 --- a/src/main/java/org/reso/models/MetadataReport.java +++ /dev/null @@ -1,336 +0,0 @@ -package org.reso.models; - -import com.google.gson.*; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.apache.olingo.client.core.edm.xml.ClientCsdlAnnotation; -import org.apache.olingo.commons.api.edm.*; -import org.apache.olingo.commons.core.edm.EdmAnnotationImpl; -import org.reso.commander.common.Utils; - -import java.lang.reflect.Field; -import java.lang.reflect.Type; -import java.util.Date; -import java.util.List; - -import static org.reso.commander.Commander.REPORT_DIVIDER; -import static org.reso.commander.common.ErrorMsg.getDefaultErrorMessage; - -public class MetadataReport implements JsonSerializer { - private static final Logger LOG = LogManager.getLogger(MetadataReport.class); - - private Edm metadata; - - private MetadataReport() { - //private default constructor - } - - public MetadataReport(Edm metadata) { - this.metadata = metadata; - } - - @Override - public String toString() { - StringBuilder reportBuilder = new StringBuilder(); - - reportBuilder - .append("\n\n" + REPORT_DIVIDER) - .append("\nRESO Metadata Report") - .append("\n").append(new Date()) - .append("\n" + REPORT_DIVIDER); - - JsonElement metadataReport = serialize(this, MetadataReport.class, null); - reportBuilder.append(FieldJson.buildReportString(metadataReport)); - reportBuilder.append(LookupJson.buildReportString(metadataReport)); - - return reportBuilder.toString(); - } - - /** - * FieldJson uses a JSON payload with the following structure: - *

- * { - * "resourceName": "Property", - * "fieldName": "AboveGradeFinishedArea", - * "type": "Edm.Decimal" - * } - */ - private static final class FieldJson implements JsonSerializer { - static final String - RESOURCE_NAME_KEY = "resourceName", - FIELD_NAME_KEY = "fieldName", - NULLABLE_KEY = "nullable", - MAX_LENGTH_KEY = "maxLength", - PRECISION_KEY = "precision", - SCALE_KEY = "scale", - IS_COLLECTION_KEY = "isCollection", - DEFAULT_VALUE_KEY = "defaultValue", - UNICODE_KEY = "unicode", - TYPE_KEY = "type", - TERM_KEY = "term", - VALUE_KEY = "value", - ANNOTATIONS_KEY = "annotations", - FIELDS_KEY = "fields"; - - String resourceName; - EdmElement edmElement; - - public FieldJson(String resourceName, EdmElement edmElement) { - this.resourceName = resourceName; - this.edmElement = edmElement; - } - - public static String buildReportString(JsonElement metadataReport) { - StringBuilder reportBuilder = new StringBuilder(); - metadataReport.getAsJsonObject().get(FIELDS_KEY).getAsJsonArray().forEach(field -> { - reportBuilder.append("\nResource: "); - reportBuilder.append(field.getAsJsonObject().get(RESOURCE_NAME_KEY)); - reportBuilder.append("\nField: "); - reportBuilder.append(field.getAsJsonObject().get(FIELD_NAME_KEY)); - reportBuilder.append("\nType: "); - reportBuilder.append(field.getAsJsonObject().get(TYPE_KEY)); - - if (field.getAsJsonObject().get(ANNOTATIONS_KEY) != null) { - JsonArray annotations = field.getAsJsonObject().get(ANNOTATIONS_KEY).getAsJsonArray(); - if (annotations != null && annotations.size() > 0) { - reportBuilder.append("\n"); - reportBuilder.append("Annotations:"); - annotations.forEach(annotation -> { - if (annotation.getAsJsonObject().get(TERM_KEY) != null) { - reportBuilder.append("\n\tTerm: "); - reportBuilder.append(annotation.getAsJsonObject().get(TERM_KEY)); - } - - if (annotation.getAsJsonObject().get(VALUE_KEY) != null) { - reportBuilder.append("\n\tValue: "); - reportBuilder.append(annotation.getAsJsonObject().get(VALUE_KEY)); - } - }); - } - } - reportBuilder.append("\n"); - }); - return reportBuilder.toString(); - } - - @Override - public JsonElement serialize(FieldJson src, Type typeOfSrc, JsonSerializationContext context) { - JsonObject field = new JsonObject(); - - - field.addProperty(RESOURCE_NAME_KEY, src.resourceName); - field.addProperty(FIELD_NAME_KEY, src.edmElement.getName()); - - String typeName = null; - try { - typeName = src.edmElement.getType().getFullQualifiedName().getFullQualifiedNameAsString(); - field.addProperty(TYPE_KEY, typeName); - } catch (Exception ex) { - LOG.error(getDefaultErrorMessage("Field Name:", src.edmElement.getName(), ex.toString())); - field.addProperty(TYPE_KEY, "UNDEFINED"); - } - - field.addProperty(NULLABLE_KEY, ((EdmProperty) src.edmElement).isNullable()); - field.addProperty(MAX_LENGTH_KEY, ((EdmProperty) src.edmElement).getMaxLength()); - field.addProperty(SCALE_KEY, ((EdmProperty) src.edmElement).getScale()); - field.addProperty(PRECISION_KEY, ((EdmProperty) src.edmElement).getPrecision()); - field.addProperty(DEFAULT_VALUE_KEY, ((EdmProperty) src.edmElement).getDefaultValue()); - field.addProperty(IS_COLLECTION_KEY, src.edmElement.isCollection()); - field.addProperty(UNICODE_KEY, ((EdmProperty) src.edmElement).isUnicode()); - - //TODO: report issue to Apache - // Can only get the annotation term using ((ClientCsdlAnnotation) ((EdmAnnotationImpl)edmAnnotation).annotatable).term - // which a private member and cannot be accessed - List annotations = ((EdmProperty) src.edmElement).getAnnotations(); - if (annotations != null && annotations.size() > 0) { - JsonArray annotationsJsonArray = new JsonArray(); - annotations.forEach(edmAnnotation -> { - if (edmAnnotation.getExpression() != null) { - if (edmAnnotation.getExpression().isConstant()) { - JsonObject annotation = new JsonObject(); - if (edmAnnotation.getTerm() != null) { - annotation.addProperty(TERM_KEY, edmAnnotation.getTerm().getFullQualifiedName().getFullQualifiedNameAsString()); - } else { - SneakyAnnotationReader sneakyAnnotationReader = new SneakyAnnotationReader(edmAnnotation); - annotation.addProperty(TERM_KEY, sneakyAnnotationReader.getTerm()); - } - annotation.addProperty(VALUE_KEY, edmAnnotation.getExpression().asConstant().getValueAsString()); - annotationsJsonArray.add(annotation); - } else if (edmAnnotation.getExpression().isDynamic()) { - if (edmAnnotation.getExpression().asDynamic().isCollection()) { - edmAnnotation.getExpression().asDynamic().asCollection().getItems().forEach(edmExpression -> { - //OData Allowed Values come across as Records, in which case their key is "Value" - if (edmExpression.asDynamic().isRecord()) { - JsonObject annotation = new JsonObject(); - edmExpression.asDynamic().asRecord().getPropertyValues().forEach(edmPropertyValue -> { - annotation.addProperty(TERM_KEY, edmPropertyValue.getProperty()); - annotation.addProperty(VALUE_KEY, edmPropertyValue.getValue().asConstant().getValueAsString()); - annotationsJsonArray.add(annotation); - }); - } - }); - } - } - } - }); - if (annotationsJsonArray.size() > 0) field.add(ANNOTATIONS_KEY, annotationsJsonArray); - } - return field; - } - } - - static class SneakyAnnotationReader { - Class object; - Field field; - EdmAnnotationImpl edmAnnotationImpl; - ClientCsdlAnnotation clientCsdlAnnotation; - - public SneakyAnnotationReader(EdmAnnotation edmAnnotation) { - try { - edmAnnotationImpl = ((EdmAnnotationImpl) edmAnnotation); - - // create an object of the class named Class - object = edmAnnotationImpl.getClass(); - - // access the private variable - field = object.getDeclaredField("annotation"); - // make private field accessible - field.setAccessible(true); - - clientCsdlAnnotation = (ClientCsdlAnnotation) field.get(edmAnnotationImpl); - - } catch (Exception ex) { - LOG.error(ex); - ex.printStackTrace(); - } - } - - public String getTerm() { - return clientCsdlAnnotation.getTerm(); - } - } - - /** - * LookupJson uses a JSON payload with the following structure: - *

- * { - * "lookupName": "org.reso.metadata.enums.CommunityFeatures", - * "lookupValue": "Stables", - * "type": "Edm.Int32" - * } - */ - private static final class LookupJson implements JsonSerializer { - static final String - LOOKUP_NAME_KEY = "lookupName", LOOKUP_VALUE_KEY = "lookupValue", - TYPE_KEY = "type", VALUE_KEY = "value", ANNOTATIONS_KEY = "annotations", - LOOKUPS_KEY = "lookups"; - - EdmEnumType edmEnumType; - String memberName; - - public LookupJson(String memberName, EdmEnumType edmEnumType) { - this.edmEnumType = edmEnumType; - this.memberName = memberName; - } - - public static String buildReportString(JsonElement metadataReport) { - StringBuilder reportBuilder = new StringBuilder(); - metadataReport.getAsJsonObject().get(LOOKUPS_KEY).getAsJsonArray().forEach(field -> { - reportBuilder.append("\nLookup Name: "); - reportBuilder.append(field.getAsJsonObject().get(LOOKUP_NAME_KEY)); - reportBuilder.append("\nLookup Value: "); - reportBuilder.append(field.getAsJsonObject().get(LOOKUP_VALUE_KEY)); - reportBuilder.append("\nType: "); - reportBuilder.append(field.getAsJsonObject().get(TYPE_KEY)); - - if (field.getAsJsonObject().get(ANNOTATIONS_KEY) != null) { - JsonArray annotations = field.getAsJsonObject().get(ANNOTATIONS_KEY).getAsJsonArray(); - if (annotations != null && annotations.size() > 0) { - reportBuilder.append("\n"); - reportBuilder.append("Annotations:"); - annotations.forEach(annotation -> { - if (annotation.getAsJsonObject().get(FieldJson.TERM_KEY) != null) { - reportBuilder.append("\n\tTerm: "); - reportBuilder.append(annotation.getAsJsonObject().get(FieldJson.TERM_KEY)); - } - - if (annotation.getAsJsonObject().get(VALUE_KEY) != null) { - reportBuilder.append("\n\tValue: "); - reportBuilder.append(annotation.getAsJsonObject().get(VALUE_KEY)); - } - }); - } - } - reportBuilder.append("\n"); - }); - return reportBuilder.toString(); - } - - @Override - public JsonElement serialize(LookupJson src, Type typeOfSrc, JsonSerializationContext context) { - JsonObject membersJsonObject = new JsonObject(); - membersJsonObject.addProperty(LOOKUP_NAME_KEY, src.edmEnumType.getFullQualifiedName().toString()); - membersJsonObject.addProperty(LOOKUP_VALUE_KEY, src.memberName); - membersJsonObject.addProperty(TYPE_KEY, src.edmEnumType.getUnderlyingType().getFullQualifiedName().getFullQualifiedNameAsString()); - - if (src.edmEnumType.getMember(memberName).getAnnotations().size() > 0) { - JsonArray annotations = new JsonArray(); - src.edmEnumType.getMember(memberName).getAnnotations().forEach(edmAnnotation -> { - JsonObject annotation = new JsonObject(); - if (edmAnnotation.getTerm() != null) { - annotation.addProperty(FieldJson.TERM_KEY, edmAnnotation.getTerm().getFullQualifiedName().getFullQualifiedNameAsString()); - } else { - SneakyAnnotationReader sneakyAnnotationReader = new SneakyAnnotationReader(edmAnnotation); - annotation.addProperty(FieldJson.TERM_KEY, sneakyAnnotationReader.getTerm()); - } - - if (edmAnnotation.getExpression() != null) { - annotation.addProperty(VALUE_KEY, edmAnnotation.getExpression().asConstant().getValueAsString()); - } - annotations.add(annotation); - }); - membersJsonObject.add(ANNOTATIONS_KEY, annotations); - } - return membersJsonObject; - } - } - - @Override - public JsonElement serialize(MetadataReport src, Type typeOfSrc, JsonSerializationContext context) { - final String - DESCRIPTION_KEY = "description", DESCRIPTION = "RESO Data Dictionary Metadata Report", - VERSION_KEY = "version", VERSION = "1.7", - GENERATED_ON_KEY = "generatedOn", - FIELDS_KEY = "fields", - LOOKUPS_KEY = "lookups"; - - JsonArray fields = new JsonArray(); - JsonArray lookups = new JsonArray(); - - src.metadata.getSchemas().forEach(edmSchema -> { - //serialize entities (resources) and members (fields) - edmSchema.getEntityTypes().forEach(edmEntityType -> { - edmEntityType.getPropertyNames().forEach(propertyName -> { - FieldJson fieldJson = new FieldJson(edmEntityType.getName(), edmEntityType.getProperty(propertyName)); - fields.add(fieldJson.serialize(fieldJson, FieldJson.class, null)); - }); - }); - - //serialize enum types - edmSchema.getEnumTypes().forEach(edmEnumType -> { - edmEnumType.getMemberNames().forEach(memberName -> { - LookupJson lookupJson = new LookupJson(memberName, edmEnumType); - lookups.add(lookupJson.serialize(lookupJson, LookupJson.class, null)); - }); - }); - }); - - JsonObject metadataReport = new JsonObject(); - metadataReport.addProperty(DESCRIPTION_KEY, DESCRIPTION); - metadataReport.addProperty(VERSION_KEY, VERSION); - metadataReport.addProperty(GENERATED_ON_KEY, Utils.getIsoTimestamp()); - metadataReport.add(FIELDS_KEY, fields); - metadataReport.add(LOOKUPS_KEY, lookups); - return metadataReport; - } -} diff --git a/src/main/java/org/reso/models/ReferenceStandardField.java b/src/main/java/org/reso/models/ReferenceStandardField.java index 7992ccf1..f37dfb35 100644 --- a/src/main/java/org/reso/models/ReferenceStandardField.java +++ b/src/main/java/org/reso/models/ReferenceStandardField.java @@ -102,8 +102,14 @@ public String getLookup() { return lookup; } - public String getLookupStandardName() { - return getLookup().replace("Lookups", "").trim(); + public String getLookupName() { + String lookupName = getLookup() + .replace("", "") + .replace("Lookups", "").trim(); + + if (lookupName.length() == 0) return null; + + return lookupName; } public String getCollection() { diff --git a/src/test/java/org/reso/commander/test/features/test-web-api-core-test-container.feature b/src/test/java/org/reso/commander/test/features/test-web-api-core-test-container.feature index 17ba9610..f30ede7a 100644 --- a/src/test/java/org/reso/commander/test/features/test-web-api-core-test-container.feature +++ b/src/test/java/org/reso/commander/test/features/test-web-api-core-test-container.feature @@ -1,7 +1,7 @@ Feature: Web API Container Tests Background: - Given a Web API test container was created using the RESOScript "mock.web-api-server.core.1.0.2.resoscript" + Given a Web API test container was created using the RESOScript "mock.web-api-server.core.2.0.0.resoscript" And a Commander instance exists within the test container #################################### diff --git a/src/test/resources/mock.web-api-server.core.1.0.2.resoscript b/src/test/resources/mock.web-api-server.core.2.0.0.resoscript similarity index 97% rename from src/test/resources/mock.web-api-server.core.1.0.2.resoscript rename to src/test/resources/mock.web-api-server.core.2.0.0.resoscript index a7d469cd..3107e17d 100644 --- a/src/test/resources/mock.web-api-server.core.1.0.2.resoscript +++ b/src/test/resources/mock.web-api-server.core.2.0.0.resoscript @@ -1,7 +1,7 @@