Compare commits
16 Commits
main
...
issue-76-c
Author | SHA1 | Date |
---|---|---|
Joshua Darnell | c8d207c5a3 | |
Joshua Darnell | 313c57e9f7 | |
Joshua Darnell | 20dc65f77c | |
Joshua Darnell | 85924aafa5 | |
Joshua Darnell | 0dc848243b | |
Joshua Darnell | 8c0ae882f2 | |
Joshua Darnell | 790911f65f | |
Joshua Darnell | 02b3855c20 | |
Joshua Darnell | 03eff74438 | |
Joshua Darnell | d6d0799fb5 | |
Joshua Darnell | b6ed5308ea | |
Joshua Darnell | 554af35f17 | |
Joshua Darnell | 0ac742e7d1 | |
Joshua Darnell | dc93fbb9b6 | |
Joshua Darnell | 094fc33d19 | |
Joshua Darnell | cd9791f33a |
|
@ -1,42 +0,0 @@
|
|||
name: Commander Docker with Gradle
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ['release']
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
jobs:
|
||||
build-and-push-image:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}{% raw %}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
image: 'GradleDockerfile'
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
|
@ -6,5 +6,3 @@ build/
|
|||
.idea/
|
||||
*.log
|
||||
*.iml
|
||||
.run/
|
||||
.DS_Store
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
[submodule "odata-openapi"]
|
||||
path = odata-openapi
|
||||
url = https://github.com/oasis-tcs/odata-openapi.git
|
|
@ -0,0 +1,20 @@
|
|||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="Commander Acceptance Tests" type="CucumberJavaRunConfigurationType" factoryName="Cucumber java">
|
||||
<option name="CUCUMBER_CORE_VERSION" value="4.5" />
|
||||
<option name="FILE_PATH" value="$PROJECT_DIR$/src/test/java/org/reso/commander/test/features" />
|
||||
<option name="GLUE" value="org.reso.commander.test.stepdefs" />
|
||||
<option name="MAIN_CLASS_NAME" value="io.cucumber.core.cli.Main" />
|
||||
<module name="web-api-commander.test" />
|
||||
<option name="PROGRAM_PARAMETERS" value=" --plugin org.jetbrains.plugins.cucumber.java.run.CucumberJvm4SMFormatter --strict" />
|
||||
<option name="SUGGESTED_NAME" value="Feature: web-api-server-1.0.2" />
|
||||
<extension name="coverage">
|
||||
<pattern>
|
||||
<option name="PATTERN" value="io.cucumber.core.cli.*" />
|
||||
<option name="ENABLED" value="true" />
|
||||
</pattern>
|
||||
</extension>
|
||||
<method v="2">
|
||||
<option name="Make" enabled="true" />
|
||||
</method>
|
||||
</configuration>
|
||||
</component>
|
|
@ -0,0 +1,21 @@
|
|||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="Feature: DataDictionary 1.7 - Local Metadata" type="CucumberJavaRunConfigurationType" factoryName="Cucumber java">
|
||||
<option name="CUCUMBER_CORE_VERSION" value="4.5" />
|
||||
<option name="FILE_PATH" value="$PROJECT_DIR$/src/main/java/org/reso/certification/features/data-dictionary/v1-7-0/" />
|
||||
<option name="GLUE" value="org.reso.certification.stepdefs" />
|
||||
<option name="MAIN_CLASS_NAME" value="io.cucumber.core.cli.Main" />
|
||||
<module name="web-api-commander.main" />
|
||||
<option name="PROGRAM_PARAMETERS" value=" --plugin org.jetbrains.plugins.cucumber.java.run.CucumberJvm4SMFormatter --strict" />
|
||||
<option name="SUGGESTED_NAME" value="Feature: web-api-server-1.0.2" />
|
||||
<option name="VM_PARAMETERS" value="-DpathToMetadata=$PROJECT_DIR$/src/main/resources/RESODataDictionary-1.7.xml -Dcucumber.filter.tags=""" />
|
||||
<extension name="coverage">
|
||||
<pattern>
|
||||
<option name="PATTERN" value="io.cucumber.core.cli.*" />
|
||||
<option name="ENABLED" value="true" />
|
||||
</pattern>
|
||||
</extension>
|
||||
<method v="2">
|
||||
<option name="Make" enabled="true" />
|
||||
</method>
|
||||
</configuration>
|
||||
</component>
|
|
@ -1,15 +0,0 @@
|
|||
FROM alpine:3.17.2
|
||||
|
||||
RUN apk add --update bash ca-certificates openjdk8 openjdk8-jre-base nss git gradle && \
|
||||
rm -rf /var/cache/apk/*
|
||||
|
||||
RUN git clone --single-branch --branch main https://github.com/RESOStandards/web-api-commander.git
|
||||
|
||||
WORKDIR web-api-commander
|
||||
|
||||
ADD runCommander.sh /usr/local/bin/runCommander.sh
|
||||
|
||||
RUN ["chmod", "+x", "/usr/local/bin/runCommander.sh"]
|
||||
|
||||
ENTRYPOINT ["/usr/local/bin/runCommander.sh"]
|
||||
CMD ["certificationCommand", "certificationRequestId"]
|
|
@ -1,4 +1,4 @@
|
|||
FROM gradle:8.0.2-jdk8 AS builder
|
||||
FROM gradle:6.2.1-jdk8 AS builder
|
||||
|
||||
WORKDIR /home/gradle/project
|
||||
|
||||
|
@ -10,7 +10,7 @@ RUN ls
|
|||
|
||||
FROM alpine:latest
|
||||
|
||||
RUN apk add --update bash ca-certificates openjdk8-jre-base nss git && \
|
||||
RUN apk add --update bash ca-certificates openjdk8-jre-base nss && \
|
||||
rm -rf /var/cache/apk/*
|
||||
|
||||
COPY --from=builder /home/gradle/project/build/libs/web-api-commander.jar ./
|
||||
|
|
|
@ -1,13 +0,0 @@
|
|||
FROM alpine:3.17.2
|
||||
|
||||
RUN apk add --update bash ca-certificates openjdk8-jre-base nss git gradle && \
|
||||
rm -rf /var/cache/apk/*
|
||||
|
||||
RUN git clone --single-branch --branch main https://github.com/RESOStandards/web-api-commander.git
|
||||
|
||||
WORKDIR web-api-commander
|
||||
|
||||
RUN gradle jar
|
||||
|
||||
ENTRYPOINT ["gradle"]
|
||||
CMD ["tasks"]
|
22
LICENSE
22
LICENSE
|
@ -1,5 +1,21 @@
|
|||
By downloading this resource, you agree to the RESO EULA.
|
||||
MIT License
|
||||
|
||||
https://www.reso.org/eula/
|
||||
Copyright (c) 2019 Joshua Darnell
|
||||
|
||||
Copyright (c) 2019 RESO (dev@reso.org)
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
|
701
README.md
701
README.md
|
@ -1,37 +1,688 @@
|
|||
|
||||
# RESO Commander
|
||||
# RESO Web API Commander
|
||||
|
||||
[![CodeFactor](https://www.codefactor.io/repository/github/resostandards/web-api-commander/badge)](https://www.codefactor.io/repository/github/resostandards/web-api-commander) ![Java CI with Gradle](https://github.com/RESOStandards/web-api-commander/workflows/Java%20CI%20with%20Gradle/badge.svg?branch=master)
|
||||
|
||||
The RESO Commander is an OData client Java library and command-line client,
|
||||
as well as an automated RESO Certification testing tool.
|
||||
The RESO Web API Commander is an OData client library and command-line client, as well as an automated RESO Certification testing tool.
|
||||
|
||||
# Getting Started
|
||||
To begin using the RESO Commander, choose one of the following topics:
|
||||
## Getting Started
|
||||
To begin using the Commander, choose one of the following topics:
|
||||
* [Command-line Web API Tools](#command-line-web-api-tools)
|
||||
* [RESO Certification](#reso-certification)
|
||||
* [Commander as a Web API client library](#using-the-commander-as-a-web-api-client-library)
|
||||
|
||||
## [RESO Certification](/doc/Certification.md)
|
||||
One of the Commander's primary uses is as an automated testing tool. This section
|
||||
explains how to run the following tests:
|
||||
|
||||
* Data Dictionary 1.7
|
||||
* Data Dictionary Availability Report
|
||||
* Web API Core 2.0.0
|
||||
# Command-line Web API Tools
|
||||
**Introduction**
|
||||
* [Java Requirements](#java-requirements)
|
||||
* [Display Help](#display-help)
|
||||
* [Authentication](#authentication)
|
||||
|
||||
## [Command-Line OData Web API Tools](/doc/CLI.md)
|
||||
The RESO Commander contains command line tools for working with OData Web APIs.
|
||||
This section covers how to validate metadata, make requests, and generate sample requests for testing.
|
||||
**Client Commands**
|
||||
* [Getting Metadata](#getting-metadata)
|
||||
* [Validating Metadata stored in an EDMX file](#validating-metadata-stored-in-an-edmx-file)
|
||||
* [Saving Results from a Given `uri`](#saving-results-from-a-given-uri)
|
||||
* [Displaying Queries for RESOScript Files](#displaying-queries-for-resoscript-files)
|
||||
* [Running RESOScript Files](#running-resoscript-files)
|
||||
|
||||
## [Web API Client Library](/doc/ODataClient.md)
|
||||
The RESO Commander offers a convenient wrapper around the Apache Olingo Java client for OData,
|
||||
with added support for OAuth2 Bearer Tokens and Client Credentials.
|
||||
|
||||
The client library can be used to fetch and validate metadata, fetch data, and perform other
|
||||
OData client tasks.
|
||||
**Additional Commands**
|
||||
* [Generating RESO Data Dictionary Acceptance Tests](#generating-reso-data-dictionary-acceptance-tests)
|
||||
* [Generating RESO Data Dictionary Reference Metadata](#generating-reso-data-dictionary-reference-metadata)
|
||||
* [Converting metadata to Open API 3 format](#converting-metadata-to-open-api-3-format)
|
||||
|
||||
## [Docker](/doc/Docker.md)
|
||||
For those who prefer Docker, both the Command Line and Automated Testing Tools can be
|
||||
run in a Docker container.
|
||||
## Java Requirements
|
||||
Your operating system probably already has a Java Runtime Environment (JRE) installed. This is all you need to run the Commander as a Web API Client.
|
||||
|
||||
## [Codegen](/doc/Codegen.md)
|
||||
The RESO Commander may also be used to generate reference XML Metadata, DDL, database seeds,
|
||||
automated acceptance tests, reference server models, and to convert XML Metadata to Open API 3 format.
|
||||
To check your version of Java, type the following in a command line environment:
|
||||
```
|
||||
$ java -version
|
||||
```
|
||||
If you have the Java SE Runtime Environment installed, the output will look similar to the following:
|
||||
```
|
||||
$ java -version
|
||||
Java version "1.8.x" (or a higher version)
|
||||
Java<TM> SE Runtime Environment ...
|
||||
```
|
||||
If you don't see something like this, you need to install the [Java SE](https://www.oracle.com/java/technologies/javase-jre8-downloads.html) runtime.
|
||||
|
||||
Once the Java SE Runtime is installed, you may [download the Commander JAR file](build/libs/web-api-commander.jar)
|
||||
|
||||
## Display Help
|
||||
|
||||
Assuming [you've downloaded `web-api-commander.jar`](build/libs/web-api-commander.jar) at this point, help is available from the command line by passing `--help` or just passing no arguments, as follows:
|
||||
```
|
||||
$ java -jar path/to/web-api-commander.jar
|
||||
```
|
||||
|
||||
Doing so displays the following information:
|
||||
```
|
||||
usage: java -jar web-api-commander
|
||||
--bearerToken <b> Bearer token to be used with the
|
||||
request.
|
||||
--clientId <d> Client Id to be used with the request.
|
||||
--clientSecret <s>
|
||||
--contentType <t> Results format: JSON (default),
|
||||
JSON_NO_METADATA, JSON_FULL_METADATA,
|
||||
XML.
|
||||
--entityName <n> The name of the entity to fetch, e.g.
|
||||
Property.
|
||||
--generateDDAcceptanceTests Generates acceptance tests in the
|
||||
current directory.
|
||||
--generateMetadataReport Generates metadata report from given
|
||||
<inputFile>.
|
||||
--generateQueries Resolves queries in a given RESOScript
|
||||
<inputFile> and displays them in
|
||||
standard out.
|
||||
--generateReferenceDDL Generates reference DDL to create a
|
||||
RESO-compliant SQL database. Pass
|
||||
--useKeyNumeric to generate the DB
|
||||
using numeric keys.
|
||||
--generateReferenceEDMX Generates reference metadata in EDMX
|
||||
format.
|
||||
--generateResourceInfoModels Generates Java Models for the Web API
|
||||
Reference Server in the current
|
||||
directory.
|
||||
--getMetadata Fetches metadata from <serviceRoot>
|
||||
using <bearerToken> and saves results
|
||||
in <outputFile>.
|
||||
--help print help
|
||||
--inputFile <i> Path to input file.
|
||||
--outputFile <o> Path to output file.
|
||||
--runRESOScript Runs commands in RESOScript file given
|
||||
as <inputFile>.
|
||||
--saveGetRequest Performs GET from <requestURI> using
|
||||
the given <bearerToken> and saves
|
||||
output to <outputFile>.
|
||||
--serviceRoot <s> Service root URL on the host.
|
||||
--uri <u> URI for raw request. Use 'single
|
||||
quotes' to enclose.
|
||||
--useEdmEnabledClient present if an EdmEnabledClient should
|
||||
be used.
|
||||
--useKeyNumeric present if numeric keys are to be used
|
||||
for database DDL generation.
|
||||
--validateMetadata Validates previously-fetched metadata
|
||||
in the <inputFile> path.
|
||||
|
||||
```
|
||||
When using commands, if required arguments aren't provided, relevant feedback will be displayed in the terminal.
|
||||
|
||||
## Authentication
|
||||
The RESO Commander only supports passing OAuth2 "Bearer" tokens from the command line at this time. For those using OAuth2 Client Credentials, please see the section on _[Running RESOScript files](#running-resoscript-files)_.
|
||||
|
||||
|
||||
## Getting Metadata
|
||||
To get metadata from a given server, use the `--getMetadata` argument with the following
|
||||
options:
|
||||
|
||||
```
|
||||
$ java -jar path/to/web-api-commander.jar --getMetadata --serviceRoot https://api.server.com/serviceRoot --outputFile metadata.xml --bearerToken abc123
|
||||
```
|
||||
|
||||
where `serviceRoot` is the path to the _root_ of the OData WebAPI server.
|
||||
|
||||
Assuming everything goes well, metadata will be retrieved from the host
|
||||
and written to the provided `--outputFile`, and the following output will be displayed:
|
||||
```
|
||||
Requesting metadata from: https://api.server.com/serviceRoot/$metadata
|
||||
Metadata request succeeded.
|
||||
```
|
||||
|
||||
## Validating Metadata stored in an EDMX file
|
||||
Sometimes it's useful to validate a local OData XML Metadata (EDMX) file.
|
||||
|
||||
Since parsing EDMX is an incremental process, validation terminates _each time_ invalid items are encountered. Therefore, the workflow for correcting an EDMX document that contains errors would be to run the
|
||||
Commander repeatedly, fixing errors that are encountered along the way.
|
||||
|
||||
To validate metadata that's already been downloaded, call Commander with the following options,
|
||||
adjusting the `path/to/web-api-commander.jar` and `--inputFile` path for your environment accordingly:
|
||||
```
|
||||
$ java -jar path/to/web-api-commander.jar --validateMetadata --inputFile '/src/main/resources/RESODataDictionary-1.7.xml'
|
||||
```
|
||||
XML or OData validation errors will be displayed if any issues were found. If successful, the following message
|
||||
should appear:
|
||||
```
|
||||
Checking Metadata for validity...
|
||||
Valid Metadata!
|
||||
```
|
||||
|
||||
## Saving Results from a Given `uri`
|
||||
The `--saveGetRequest` action makes a request to a `--uri` using a given `--bearerToken`, and saves the response to the given `--outputFile`.
|
||||
|
||||
For example:
|
||||
```
|
||||
$ java -jar build/libs/web-api-commander.jar --saveGetRequest --uri 'https://api.server.com/OData/Property?$filter=ListPrice gt 100000&$top=100' --bearerToken abc123 --outputFile response.json
|
||||
```
|
||||
If the response is successful, it will be written to the specified file and the following will be displayed on the console:
|
||||
```
|
||||
JSON Data fetched from: https://api.server.com/OData/Property?$filter=ListPrice gt 100000&top=100"
|
||||
with response code: 200
|
||||
JSON Response saved to: response.json
|
||||
```
|
||||
Otherwise, errors will be displayed showing what went wrong during the request.
|
||||
|
||||
|
||||
## Displaying Queries for RESOScript Files
|
||||
A RESOScript file usually contains a server's service root and one or more Requests that can either
|
||||
be used in batch-format or can be used during testing.
|
||||
|
||||
To resolve all parameters and display the queries to be run with your RESOScript, use the following command:
|
||||
|
||||
```
|
||||
$ java -jar web-api-commander.jar --generateQueries --inputFile /path/to/your.resoscript
|
||||
```
|
||||
|
||||
This should display something similar to the following:
|
||||
|
||||
```
|
||||
==============================================================
|
||||
Web API Commander Starting... Press <ctrl+c> at any time to exit.
|
||||
==============================================================
|
||||
Displaying 44 Request(s)
|
||||
RESOScript: src/test/resources/mock.web-api-server.core.1.0.2.resoscript
|
||||
==============================================================
|
||||
|
||||
|
||||
===========================
|
||||
Request: #1
|
||||
===========================
|
||||
Request Id: metadata-validation
|
||||
Resolved URL: https://api.reso.org/OData/$metadata
|
||||
|
||||
|
||||
===========================
|
||||
Request: #2
|
||||
===========================
|
||||
Request Id: fetch-by-key
|
||||
Resolved URL: https://api.reso.org/OData/Property('12345')?$select=ListingKey
|
||||
|
||||
...
|
||||
```
|
||||
|
||||
## Running RESOScript Files
|
||||
The Web API Commander is able to run files written using RESO's XML-based scripting format, also known as a RESOScript.
|
||||
|
||||
In order to run an RESOScript file, use a command similar to the following:
|
||||
|
||||
```
|
||||
$ java -jar out/web-api-commander.jar --runRESOScript --inputFile /path/to/your/inputFile
|
||||
```
|
||||
|
||||
A results directory will be created from the RESOScript name and timestamp when it was run, and output will be shown as the requests are made.
|
||||
|
||||
Results will be saved to the filenames specified in the given RESOScript, and error files will be created when there are exceptions, with an ".ERROR" extension appended to them.
|
||||
|
||||
**RESOScript File Format**
|
||||
For examples of files using the RESOScript format, see:
|
||||
* [Data Dictionary 1.7 RESOScript Template](sample-data-dictionary.1.7.0.resoscript)
|
||||
* [Web API Core 1.0.2 RESOScript Template](sample-web-api-server.core.1.0.2.resoscript)
|
||||
|
||||
|
||||
## Generating RESO Data Dictionary Acceptance Tests
|
||||
The RESO Commander can be used to generate Data Dictionary acceptance tests from the currently approved [Data Dictionary Spreadsheet](src/main/resources/RESODataDictionary-1.7.xlsx).
|
||||
|
||||
The Commander project's copy of the sheet needs to be updated with a copy of the [DD Google Sheet](https://docs.google.com/spreadsheets/d/1SZ0b6T4_lz6ti6qB2Je7NSz_9iNOaV_v9dbfhPwWgXA/edit?usp=sharing) prior to generating reference metadata.
|
||||
|
||||
```
|
||||
$ java -jar path/to/web-api-commander.jar --generateDDAcceptanceTests
|
||||
```
|
||||
New Cucumber BDD acceptance tests will be generated and placed in a timestamped directory relative to your current path.
|
||||
|
||||
To update the current tests, copy the newly generated ones into the [Data Dictionary BDD `.features` directory](src/main/java/org/reso/certification/features/data-dictionary/v1-7-0), run the `./gradlew build` task, and if everything works as expected, commit the newly generated tests.
|
||||
|
||||
## Generating RESO Web API Reference Server Data Models
|
||||
The RESO Commander can be used to generate data models for the Web API Reference server from the currently approved [Data Dictionary Spreadsheet](src/main/resources/RESODataDictionary-1.7.xlsx).
|
||||
|
||||
The Commander project's copy of the sheet needs to be updated with a copy of the [DD Google Sheet](https://docs.google.com/spreadsheets/d/1SZ0b6T4_lz6ti6qB2Je7NSz_9iNOaV_v9dbfhPwWgXA/edit?usp=sharing) prior to generating reference metadata.
|
||||
|
||||
```
|
||||
$ java -jar path/to/web-api-commander.jar --generateResourceInfoModels
|
||||
```
|
||||
New ResourceInfo Models for the Web API Reference Server will be generated and placed in a timestamped directory relative to your current path.
|
||||
|
||||
|
||||
## Generating RESO Data Dictionary Reference Metadata
|
||||
In addition to generating DD acceptance tests, the RESO Commander can generate reference metadata based on the current reference [Data Dictionary Spreadsheet](src/main/resources/RESODataDictionary-1.7.xlsx).
|
||||
|
||||
```
|
||||
$ java -jar path/to/web-api-commander.jar --generateReferenceEDMX --inputFile=src/main/resources/RESODataDictionary-1.7.xlsx
|
||||
```
|
||||
In order to update the Commander's version of the reference metadata, update the local copy of the [DD Google Sheet](https://docs.google.com/spreadsheets/d/1SZ0b6T4_lz6ti6qB2Je7NSz_9iNOaV_v9dbfhPwWgXA/edit?usp=sharing) _prior to_ generating metadata, replace [the local copy](src/main/resources/RESODataDictionary-1.7.xml), and try running automated acceptance tests with `./gradlew build`.
|
||||
|
||||
## Generating RESO Data Dictionary 1.7 Reference DDL
|
||||
|
||||
There is a command that can generate reference DDL for creating SQL databases using either key or key numeric values.
|
||||
|
||||
### String Keys
|
||||
|
||||
Issuing the following will print DDL in the console using String keys as the primary key:
|
||||
|
||||
```
|
||||
$ java -jar path/to/web-api-commander.jar --generateReferenceDDL
|
||||
```
|
||||
|
||||
This means that linked lookups will also use string keys since they'll be linked by a related table that uses string keys.
|
||||
|
||||
There is a variable string key size in the DDLProcessor (currently 64 characters in length).
|
||||
|
||||
Numeric keys are still present in this case, they're just not the primary key.
|
||||
|
||||
|
||||
### Numeric Keys
|
||||
|
||||
Issuing the following will print DDL in the console using Numeric keys as the primary key:
|
||||
|
||||
```
|
||||
$ java -jar path/to/web-api-commander.jar --generateReferenceDDL --useKeyNumeric
|
||||
```
|
||||
|
||||
In this case, `BIGINT` values will be used for all related lookup values.
|
||||
|
||||
### DDL TODO
|
||||
|
||||
The following items need to be added to the DDL generator still:
|
||||
- [ ] Foreign Key relationships.
|
||||
- [ ] Creation of Lookup resource.
|
||||
|
||||
|
||||
## Converting metadata to Open API 3 format
|
||||
See documentation regarding running the [nodejs-based tools in odata-openapi/lib/README.md](odata-openapi/lib/README.md).
|
||||
|
||||
---
|
||||
|
||||
# RESO Certification
|
||||
* [Java and the JDK](#java-and-the-jdk)
|
||||
* [Cloning Commander Repository](#cloning-commander-repository)
|
||||
* [Cucumber Feature Specifications](#cucumber-feature-specifications)
|
||||
* [Testing Environment](#testing-environment)
|
||||
* [Gradle Wrapper](#gradle-wrapper)
|
||||
* [Automated RESO Web API Core Testing](#automated-reso-web-api-core-testing)
|
||||
* [Automated RESO Data Dictionary Testing](#automated-reso-data-dictionary-testing)
|
||||
|
||||
|
||||
## Java and the JDK
|
||||
To run the Commander as an _automated testing tool_, the Java JDK must be installed. The Commander has been tested with JDK 1.8 and 10 at this point. Those using JDK 11+, please [report issues](https://github.com/RESOStandards/web-api-commander/issues) if they arise.
|
||||
|
||||
To see whether you have the JDK installed, type the following using your local command line environment:
|
||||
```
|
||||
$ java -version
|
||||
```
|
||||
If you have a Java JDK installed, your output will look something like:
|
||||
```
|
||||
$ java -version
|
||||
openjdk version "1.8.0_275"
|
||||
OpenJDK Runtime Environment (build 1.8.0_275-8u275-b01-0ubuntu1~20.10-b01)
|
||||
OpenJDK 64-Bit Server VM (build 25.275-b01, mixed mode)
|
||||
```
|
||||
If you don't see something like this, you need to install the JDK:
|
||||
* [Open JDK is recommended](https://openjdk.java.net/install/index.html).
|
||||
* [Oracle's SE Development kit may also be used](https://www.oracle.com/java/technologies/javase/javase-jdk8-downloads.html), but there may be additional licensing terms to accept.
|
||||
|
||||
## Cloning Commander Repository
|
||||
The Commander may be run in automated testing mode using a terminal. Automated testing assumes that you have a Java 1.8+ JDK installed, as mentioned elsewhere in this [`README`](#java-and-the-jdk).
|
||||
|
||||
First, change into the directory you want to work in and clone the Commander repository.
|
||||
|
||||
You will need to have Git installed. Chances are you already do, to check, open a command line and type `git` and if it's present, it will print some info about the app. If not, [there are installation instructions here](https://git-scm.com/downloads).
|
||||
|
||||
##### MacOS or Linux
|
||||
```
|
||||
$ git clone https://github.com/RESOStandards/web-api-commander.git
|
||||
```
|
||||
|
||||
##### Windows
|
||||
```
|
||||
C:\> git clone https://github.com/RESOStandards/web-api-commander.git
|
||||
```
|
||||
|
||||
This will clone the repository into a directory called web-api-commander relative to whatever directory you're currently in, which also means you'll have a fresh copy of the latest code to execute.
|
||||
|
||||
To refresh the code after you have downloaded it, issue the command `$ git pull` in the root of the directory that was just created.
|
||||
|
||||
## Cucumber Feature Specifications
|
||||
|
||||
[Cucumber](https://cucumber.io) is being used to describe acceptance criteria in a higher-level DSL rather than encapsulating all of the test logic code. Cucumber's DSL is called [Gherkin](https://cucumber.io/docs/gherkin/) and essentially allows backing test code to be organized in a logical manner that makes sense to analysts as well as programmers.
|
||||
|
||||
## Testing Environment
|
||||
|
||||
Under the hood, [Gradle](https://gradle.org/) is being used for automation. It works across multiple platforms and is friendly with both Docker and Cucumber so that tests may be automated on CI/CD platforms such as Jenkins, Circle CI, Travis, or similar, and emit standard system codes during regression testing.
|
||||
|
||||
## Gradle Wrapper
|
||||
The [Gradle wrapper](https://docs.gradle.org/current/userguide/gradle_wrapper.html) provides a convenient way to automatically download Gradle when running tests.
|
||||
|
||||
After you have cloned the repository, as shown in [a previous step](#cloning-commander-repository), change into the directory containing the source code from GitHub. Convenience methods have been provided for the various certification tasks.
|
||||
|
||||
## Gradle Tasks
|
||||
Once the Gradle Wrapper is set up, you should be able to run the `./gradlew tasks` command in from the root of the Commander source directory in a terminal window and see the list of available tasks.
|
||||
```
|
||||
$ ./gradlew tasks
|
||||
|
||||
> Task :tasks
|
||||
|
||||
------------------------------------------------------------
|
||||
Tasks runnable from root project
|
||||
------------------------------------------------------------
|
||||
...
|
||||
```
|
||||
There are both _built-in tasks_ and _RESO tasks_.
|
||||
The following section is what's of interest here:
|
||||
```
|
||||
RESO Certification tasks
|
||||
------------------------
|
||||
generateCertificationReport_DD_1_7 - Runs Data Dictionary 1.7 tests and creates a certification report
|
||||
RESOScript Example:
|
||||
./gradlew generateCertificationReport_DD_1_7 -DpathToRESOScript=/path/to/dd17.resoscript -Dminimal=true -Dstrict=true
|
||||
Metadata File Example:
|
||||
./gradlew generateCertificationReport_DD_1_7 -DpathToMetadata=/path/to/RESODataDictionary-1.7.xml -Dminimal=true -Dstrict=true
|
||||
To enable strict mode, pass -Dstrict=true. All applicants MUST pass strict mode tests to be certified.
|
||||
|
||||
testDataDictionary_1_7 - Runs Data Dictionary 1.7 Automated Acceptance Tests and generates a "raw" report.
|
||||
RESOScript Example:
|
||||
./gradlew testDataDictionary_1_7 -DpathToRESOScript=/path/to/dd17.resoscript -DshowResponses=true -Dcucumber.filter.tags=""
|
||||
Metadata File Example:
|
||||
./gradlew testDataDictionary_1_7 -DpathToMetadata=/path/to/RESODataDictionary-1.7.xml -Dcucumber.filter.tags=""
|
||||
To enable strict mode, pass -Dstrict=true. All applicants MUST pass strict mode tests to be certified.
|
||||
|
||||
testDataDictionaryReferenceMetadata_1_7 - Runs Data Dictionary tests against reference metadata
|
||||
|
||||
testWebApiServer_1_0_2_Core - Runs Web API Core 1.0.2 Automated Acceptance Tests.
|
||||
Example:
|
||||
$ ./gradlew testWebApiServer_1_0_2_Core -DpathToRESOScript=/path/to/web-api-core-1.0.2.resoscript -DshowResponses=true
|
||||
|
||||
Note: by default the Web API tests assume Collection(Edm.EnumType).
|
||||
Pass -DuseCollections=false if using OData IsFlags.
|
||||
```
|
||||
|
||||
## Automated RESO Web API Core Testing
|
||||
Automated Web API Core automated testing tools are currently in development. See [Issue 34](https://github.com/RESOStandards/web-api-commander/issues/34) for progress.
|
||||
|
||||
To use the automated RESO testing tools, you must have a [JDK installed](#java-and-the-jdk).
|
||||
|
||||
### Web API Core RESOScript Template
|
||||
To use the Commander for automated Web API Core testing, you need a RESOScript.
|
||||
|
||||
For Web API 1.0.2 Server Core Certification, use [this resoscript](sample-web-api-server.core.1.0.2.resoscript) as a template.
|
||||
|
||||
For more information regarding Parameters and Client Settings, see the [Web API Walkthrough](https://github.com/RESOStandards/web-api-commander/wiki/Walkthrough:-Automated-Web-API-Certification-Using-the-RESO-Commander#configuring-the-resoscript-file) (in-progress).
|
||||
|
||||
### Web API Cucumber Acceptance Tests
|
||||
The Cucumber BDD acceptance tests for Web API 1.0.2 Core certification are [here](https://github.com/RESOStandards/web-api-commander/blob/issue-37-data-dictionary-testing/src/main/java/org/reso/certification/features/web-api/web-api-server.core.1.0.2.feature). If you have any questions, please [send us an email](mailto:dev@reso.org).
|
||||
|
||||
### Gradle Tasks for Web API 1.0.2 Server Certification
|
||||
While you may use tags to filter tests as you choose, explained in the next section, it's convenient
|
||||
to be able to run a predefined set of tests Web API Core Certification.
|
||||
|
||||
These tasks will also produce reports in the local `build` directory, named according to which test you ran.
|
||||
|
||||
#### Core Certification
|
||||
|
||||
This will run the Core tests against the Web API 1.0.2 Server provided as `WebAPIURI` in your `web-api-server.core.1.0.2.resoscript` file.
|
||||
|
||||
**Note**: by default, the Commander uses `Collection(Edm.EnumType)` for multiple enumerations testing.
|
||||
Pass `-DuseCollections=false` if you are using `IsFlags="true"` instead.
|
||||
|
||||
##### MacOS or Linux
|
||||
```
|
||||
$ ./gradlew testWebApiServer_1_0_2_Core -DpathToRESOScript=/path/to/your.web-api-server.core.1.0.2.resoscript -DshowResponses=true
|
||||
```
|
||||
|
||||
##### Windows
|
||||
```
|
||||
C:\path\to\web-api-commander> gradlew testWebApiServer_1_0_2_Core -DpathToRESOScript=C:\path\to\your.web-api-server.core.1.0.2.resoscript -DshowResponses=true
|
||||
```
|
||||
|
||||
*Note: the first time you run these tasks, they will take some time as the environment must be configured and code is being compiled from the contents of the source directory downloaded in the previous step.
|
||||
|
||||
### Web API Program Output
|
||||
|
||||
A sample of the runtime terminal output follows:
|
||||
|
||||
```gherkin
|
||||
> Task :testWebApiServer_1_0_2_Core
|
||||
|
||||
@metadata-request @2.4.1
|
||||
Scenario: REQ-WA103-END3 - Request and Validate Server Metadata
|
||||
|
||||
Using RESOScript: ./web-api-server.core.1.0.2.resoscript
|
||||
Given a RESOScript file was provided
|
||||
|
||||
RESOScript loaded successfully!
|
||||
And Client Settings and Parameters were read from the file
|
||||
|
||||
Bearer token loaded... first 4 characters: test
|
||||
Service root is: https://api.yourserver.com/OData
|
||||
And a test container was successfully created from the given RESOScript
|
||||
|
||||
Authentication Type: authorization_code
|
||||
And the test container uses an authorization_code or client_credentials for authentication
|
||||
|
||||
Requesting XML Metadata from service root at: https://api.yourserver.com/OData
|
||||
When XML Metadata are requested from the service root in "ClientSettings_WebAPIURI"
|
||||
|
||||
Asserted Response Code: 200, Server Response Code: 200
|
||||
Then the server responds with a status code of 200
|
||||
|
||||
Reported OData-Version header value: '4.0'
|
||||
And the server has an OData-Version header value of "4.0" or "4.01"
|
||||
|
||||
Validating XML Metadata response to ensure it's valid XML and matches OASIS OData XSDs...
|
||||
See: https://docs.oasis-open.org/odata/odata/v4.0/errata03/os/complete/schemas/
|
||||
XMLMetadata string is valid XML!
|
||||
And the XML Metadata response is valid XML
|
||||
|
||||
Validating XML Metadata response to ensure it's valid XML and matches OASIS OData XSDs...
|
||||
See: https://docs.oasis-open.org/odata/odata/v4.0/errata03/os/complete/schemas/
|
||||
XMLMetadata string is valid XML!
|
||||
XML Metadata is valid!
|
||||
Edm Metadata is valid!
|
||||
And the XML Metadata returned by the server are valid
|
||||
|
||||
And the XML Metadata returned by the server contains Edm metadata
|
||||
|
||||
And the Edm metadata returned by the server are valid
|
||||
|
||||
Found Default Entity Container: 'Default'
|
||||
And the metadata contains a valid service document
|
||||
|
||||
Resource Name: Property
|
||||
Allowed Resources: Property, Member, Office, Contacts, ContactListings, HistoryTransactional, InternetTracking, Media, OpenHouse, OUID, Prospecting, Queue, Rules, SavedSearch, Showing, Teams
|
||||
And the given "Parameter_EndpointResource" resource exists within "Parameter_DD17_WellKnownResourceList"
|
||||
|
||||
Found EntityContainer for the given resource: 'Property'
|
||||
And the metadata contains the "Parameter_EndpointResource" resource
|
||||
|
||||
Searching the default entity container for one of the following Standard Resources: Property, Member, Office, Media
|
||||
Standard Resource Names requirement met!
|
||||
And the metadata contains at least one resource from "Parameter_WebAPI102_RequiredResourceList"
|
||||
|
||||
|
||||
1 Scenarios (1 passed)
|
||||
15 Steps (15 passed)
|
||||
0m4.093s
|
||||
```
|
||||
|
||||
Detailed information will be added to a local `commander.log` file at runtime.
|
||||
|
||||
---
|
||||
## Automated RESO Data Dictionary Testing
|
||||
The Commander provides automated Data Dictionary 1.7 acceptance testing for RESO Certification. The DD 1.7 testing specification is available [here](https://docs.google.com/document/d/15DFf9kDX_mlGCJVOch2fztl8W5h-yd18N0_03Sb4HwM/edit?usp=sharing).
|
||||
|
||||
* [Data Dictionary RESOScript Template](#data-dictionary-resoscript-template)
|
||||
* [Data Dictionary Acceptance Tests](#data-dictionary-acceptance-tests)
|
||||
* [Gradle Tasks for Data Dictionary Certification](#gradle-tasks-for-data-dictionary-certification)
|
||||
* [Test Data Dictionary](#test-data-dictionary)
|
||||
* [Generate Data Dictionary Certification Report](#generate-data-dictionary-certification-report)
|
||||
|
||||
To use the RESO Commander for Data Dictionary testing, you must have the JDK installed and a local copy of the Commander repository. See [RESO Certification](#reso-certification) before proceeding.
|
||||
|
||||
### Data Dictionary RESOScript Template
|
||||
To use the Commander for automated Data Dictionary testing, you need a RESOScript.
|
||||
|
||||
For Data Dictionary 1.7 Certification, use [this resoscript](sample-data-dictionary.1.7.0.resoscript) as a template.
|
||||
|
||||
### Data Dictionary Acceptance Tests
|
||||
RESO Data Dictionary Certification is driven off of the official Data Dictionary spreadsheet for each version of the dictionary, [currently DD 1.7](https://docs.google.com/spreadsheets/d/1SZ0b6T4_lz6ti6qB2Je7NSz_9iNOaV_v9dbfhPwWgXA/edit?usp=sharing).
|
||||
|
||||
Cucumber BDD acceptance tests are [automatically generated](#generating-reso-data-dictionary-acceptance-tests) from the [local copy of the approved spreadsheet](src/main/resources/RESODataDictionary-1.7.xlsx).
|
||||
|
||||
The generated Data Dictionary 1.7 Cucumber BDD tests are [located in this directory](https://github.com/RESOStandards/web-api-commander/tree/issue-37-data-dictionary-testing/src/main/java/org/reso/certification/features/data-dictionary/v1-7-0). See the [property.feature file](src/main/java/org/reso/certification/features/data-dictionary/v1-7-0/property.feature), for example, for the RESO Property Resource acceptance tests.
|
||||
|
||||
If you have any questions, please [send us an email](mailto:dev@reso.org).
|
||||
|
||||
### Gradle Tasks for Data Dictionary Certification
|
||||
There are predefined tasks for automated RESO Data Dictionary Certification using the Commander. These can be displayed using [Gradle Tasks](#gradle-tasks) as well.
|
||||
|
||||
* [Test Data Dictionary 1.7](#test-data-dictionary)
|
||||
* [Generate Data Dictionary 1.7 Certification Report](#generate-data-dictionary-certification-report)
|
||||
|
||||
_Note: the first time you run these tasks, they will take some time as the environment must be configured and code is being compiled from the contents of the source directory downloaded in the previous step._
|
||||
|
||||
#### Test Data Dictionary
|
||||
This task tests for Data Dictionary compliance and generates a raw report in a timestamped local directory.
|
||||
|
||||
There are two ways to run automated testing to check for RESO compliant Web API metadata:
|
||||
* using a local metadata file
|
||||
* using a RESOScript file to fetch metadata from a given server
|
||||
|
||||
While RESOScript files and the use of strict mode are required for RESO Certification. In both cases, metadata are validated and then processed for RESO compliance.
|
||||
|
||||
##### Data Dictionary Testing using Local Metadata
|
||||
The Commander allows for a local metadata file to be specified. Not only is this used for internal acceptance testing, but is useful for developers to troubleshoot metadata locally while working on compliance.
|
||||
|
||||
The Gradle task to validate local metadata can be run using the following command:
|
||||
|
||||
```
|
||||
$ ./gradlew testDataDictionary_1_7 -DpathToMetadata=/path/to/RESODataDictionary-1.7.xml
|
||||
```
|
||||
You may also pass a `-Dstrict=true` flag to see whether the given metadata file would pass Certification.
|
||||
|
||||
A raw report will be generated in a timestamped directory, and a `commander.log` will be generated during runtime.
|
||||
|
||||
##### Data Dictionary Testing using a Data Dictionary RESOScript
|
||||
During Certification, metadata are retrieved directly from an applicant's Web API server using either OAuth2 Bearer Tokens or Client Credentials. Either authentication option is currently available for RESO Certification, depending on configuration, and the applicant will provide working RESOScripts when they apply for certification.
|
||||
|
||||
An example Data Dictionary RESOScript template can be found [here](sample-data-dictionary.1.7.0.resoscript).
|
||||
|
||||
Once a RESOScript file has been created, it may be used with the following command:
|
||||
|
||||
```
|
||||
$ ./gradlew testDataDictionary_1_7 -DpathToRESOScript=/path/to/dd17.resoscript -DshowResponses=true
|
||||
```
|
||||
You may also pass a `-Dstrict=true` flag to see whether the given metadata file would pass Certification.
|
||||
|
||||
A raw report will be generated in a timestamped directory, and a `commander.log` will be generated during runtime.
|
||||
|
||||
#### Generate Data Dictionary Certification Report
|
||||
This task tests for Data Dictionary compliance and generates both a raw report and a RESO Certification report in a timestamped directory.
|
||||
|
||||
Similar to the [Test Data Dictionary 1.7](#test-data-dictionary-1.7) task, the report generator can be run for both local metadata or used with a RESOScript.
|
||||
|
||||
For the purposes of Certification, a Certification Report MUST be generated using a RESOScript using strict mode. But it's useful to be able to produce certification reports with any local files as well.
|
||||
|
||||
##### Certification Reports using Local Metadata
|
||||
A RESO Certification report can be generated for local metadata by using the following commmand:
|
||||
```
|
||||
$ ./gradlew generateCertificationReport_DD_1_7 -DpathToMetadata=src/main/resources/RESODataDictionary-1.7.xml -Dminimal=true -Dstrict=true --continue
|
||||
```
|
||||
Note the use of the `--continue` argument.
|
||||
|
||||
You may remove the `-Dstrict=true` flag, but it will be required for RESO Certification.
|
||||
|
||||
A "pretty" Certification report will be generated in a timestamped directory in addition to the normal raw report.
|
||||
|
||||
##### Certification Reports using a Data Dictionary RESOScript
|
||||
A RESO Certification report can be generated using a RESOScript by using the following command:
|
||||
```
|
||||
$ ./gradlew generateCertificationReport_DD_1_7 -DpathToRESOScript=/path/to/dd1.7.resoscript -Dminimal=true -Dstrict=true --continue
|
||||
```
|
||||
You may remove the `-Dstrict=true` flag, but it will be required for RESO Certification.
|
||||
|
||||
A "pretty" Certification report will be generated in a timestamped directory in addition to the normal raw report.
|
||||
|
||||
### Data Dictionary Testing Output
|
||||
To see examples of Data Dictionary testing output, you may use the `./gradlew testDataDictionaryReferenceMetadata_1_7` command to run the Data Dictionary acceptance tests on the RESO reference metadata.
|
||||
|
||||
There is additional documentation about how Data Dictionary testing works, including sample output, in the [RESO Data Dictionary 1.7 Testing Specification](https://docs.google.com/document/d/15DFf9kDX_mlGCJVOch2fztl8W5h-yd18N0_03Sb4HwM/edit#heading=h.rib4osorsdcx).
|
||||
|
||||
## Advanced feature: Tag Filtering
|
||||
You may filter by tags in any of the Web API or Data Dictionary tests. These are the items in the Cucumber .feature files prefixed by an `@` symbol. Expressions may also be used with tags. This README doen't cover how to use tags, but the Commander supports them. For more information, see the [Cucumber Documentation](https://cucumber.io/docs/cucumber/api/#tags).
|
||||
|
||||
#### Examples
|
||||
|
||||
**Run Web API Core Metadata Tests Only**
|
||||
```
|
||||
$ gradle testWebAPIServerCore_1_0_2 -DpathToRESOScript=/path/to/your.web-api-server.core.1.0.2.resoscript -Dcucumber.filter.tags="@metadata"
|
||||
```
|
||||
|
||||
**Run Data Dictionary Tests on IDX Fields Only**
|
||||
```
|
||||
$ ./gradlew testDataDictionary_1_7 -DpathToRESOScript=/path/to/your/dd1.7.resoscript -DshowResponses=true -Dcucumber.filter.tags="@IDX"
|
||||
```
|
||||
|
||||
## Docker
|
||||
|
||||
A [Dockerfile](./Dockerfile) has been provided to dockerize the application.
|
||||
This can be used for CI/CD environments such as Jenkins or TravisCI. The following command will build an image for you:
|
||||
|
||||
|
||||
### Commander Features Other Than Automated Web API Testing
|
||||
```
|
||||
$ docker build -t web-api-commander .
|
||||
```
|
||||
|
||||
The usage for the docker container is the same for `web-api-commander.jar` presented above.
|
||||
|
||||
```
|
||||
$ docker run -it web-api-commander --help
|
||||
```
|
||||
|
||||
If you have input files you may need to mount your filesystem into the docker container
|
||||
|
||||
```
|
||||
$ docker run -it -v $PWD:/app web-api-commander --validateMetadata --inputFile <pathInContainer>
|
||||
```
|
||||
|
||||
### Automated Web API Testing
|
||||
|
||||
You may also run the tests in a Docker container locally by issuing one of the following commands.
|
||||
Docker must be running on your local machine.
|
||||
|
||||
#### MacOS or Linux All-In-One Commands
|
||||
```
|
||||
cd ~; \
|
||||
rm -rf commander-tmp/; \
|
||||
mkdir commander-tmp; \
|
||||
cd commander-tmp; \
|
||||
git clone https://github.com/RESOStandards/web-api-commander.git; \
|
||||
cd web-api-commander; \
|
||||
docker run --rm -u gradle -v "$PWD":/home/gradle/project -v /path/to/your/resoscripts:/home/gradle/project/resoscripts -w /home/gradle/project gradle gradle testWebAPIServer_1_0_2_Core -DpathToRESOScript=/home/gradle/project/resoscripts/your.web-api-server.core.1.0.2.resoscript -DshowResponses=true
|
||||
```
|
||||
|
||||
Note that this will create a directory in your home directory for the project, and build artifacts and the log will be placed in that directory,
|
||||
which is also where you will end up after runtime.
|
||||
|
||||
|
||||
#### Windows All-In-One WIP
|
||||
```
|
||||
cd C:\;mkdir commander-tmp;cd commander-tmp;git clone https://github.com/RESOStandards/web-api-commander.git;cd web-api-commander; docker run --rm -u gradle -v C:\current\path\web-api-commander:/home/gradle/project -v C:\path\to\your\resoscripts:/home/gradle/project/resoscripts -w /home/gradle/project gradle gradle testWebAPIServer_1_0_2_Core -DpathToRESOScript=/home/gradle/project/resoscripts/your.web-api-server.core.1.0.2.resoscript -DshowResponses=true
|
||||
```
|
||||
|
||||
---
|
||||
## Using the Commander as a Web API Client Library
|
||||
Java or Scala developers may also use the Commander as a client library, which uses the Apache Olingo library under the hood but adds things like OAuth2 support and data retrieval, validation, and serialization methods. To do so, include the [standalone Web API Commander Jar](build/libs/web-api-commander.jar) in your projects. Feel free to open issues or feature requests in the [Commander GitHub project](https://github.com/RESOStandards/web-api-commander/issues).
|
||||
|
||||
---
|
||||
## Logging
|
||||
|
||||
In the current version of the Commander, two logs are produced. One is outputted in the terminal at `INFO` level during runtime through `stdout`. A detailed log called `commander.log` will be outputted at runtime and will contain details down to the wire requests.
|
||||
|
||||
Log4j 2 is being used under the hood for logging, and a configuration file may be found [here](https://github.com/RESOStandards/web-api-commander/blob/master/src/main/resources/log4j2.xml). Multiple outputs may be used, including posting to collectors or sending emails. [See Log4j 2 docs for more info](https://logging.apache.org/log4j/2.x/manual/index.html).
|
||||
|
||||
Gradle may be debugged as well, and additional gradle commands such as turning on full gradle step logging are discussed in [Gradle Commands](#gradle-commands).
|
||||
|
||||
---
|
||||
|
||||
## Support
|
||||
Please contact [Josh](mailto:josh@reso.org) with any questions, bug reports, or feature requests. Contributions to code or documentation are welcome.
|
||||
|
||||
You may also [open a ticket](https://github.com/RESOStandards/web-api-commander/issues).
|
|
@ -1,2 +1 @@
|
|||
theme: jekyll-theme-slate
|
||||
title: RESO Commander
|
||||
theme: jekyll-theme-midnight
|
447
build.gradle
447
build.gradle
|
@ -1,6 +1,9 @@
|
|||
import java.text.DateFormat
|
||||
import java.text.SimpleDateFormat
|
||||
|
||||
plugins {
|
||||
// Apply the java plugin to add support for Java
|
||||
id 'java-library'
|
||||
id 'java'
|
||||
|
||||
// Apply the application plugin to add support for building an application
|
||||
id 'application'
|
||||
|
@ -13,125 +16,108 @@ targetCompatibility = 1.8
|
|||
compileJava.options.encoding = 'UTF-8'
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
// Use jcenter for resolving your dependencies.
|
||||
// You can declare any Maven/Ivy/file repository here.
|
||||
jcenter()
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation 'com.google.guava:guava:31.1-jre'
|
||||
implementation 'com.google.inject:guice:5.1.0'
|
||||
implementation 'commons-cli:commons-cli:1.5.0'
|
||||
compile 'com.google.guava:guava:30.1.1-jre'
|
||||
compile 'com.google.inject:guice:5.0.1'
|
||||
compile 'commons-cli:commons-cli:1.4'
|
||||
|
||||
implementation 'org.apache.logging.log4j:log4j-api:2.20.0'
|
||||
implementation 'org.apache.logging.log4j:log4j-slf4j-impl:2.20.0'
|
||||
implementation 'org.apache.logging.log4j:log4j-1.2-api:2.20.0'
|
||||
implementation 'org.apache.logging.log4j:log4j-core:2.20.0'
|
||||
compile 'org.apache.logging.log4j:log4j-api:2.13.0'
|
||||
compile 'org.apache.logging.log4j:log4j-slf4j-impl:2.13.0'
|
||||
compile 'org.apache.logging.log4j:log4j-1.2-api:2.13.0'
|
||||
compile 'org.apache.logging.log4j:log4j-core:2.13.0'
|
||||
|
||||
implementation 'org.apache.olingo:odata-client-api:4.9.0'
|
||||
implementation 'org.apache.olingo:odata-commons-core:4.9.0'
|
||||
implementation 'org.apache.olingo:odata-client-core:4.9.0'
|
||||
compile 'org.apache.olingo:odata-client-api:4.8.0'
|
||||
compile 'org.apache.olingo:odata-commons-core:4.8.0'
|
||||
compile 'org.apache.olingo:odata-client-core:4.8.0'
|
||||
|
||||
implementation 'org.apache.poi:poi:5.2.2'
|
||||
implementation 'org.apache.poi:poi-ooxml:5.2.2'
|
||||
compile 'org.apache.poi:poi:5.0.0'
|
||||
compile 'org.apache.poi:poi-ooxml:5.0.0'
|
||||
|
||||
implementation 'io.rest-assured:rest-assured:5.1.1'
|
||||
implementation 'io.rest-assured:json-path:5.1.1'
|
||||
implementation 'io.rest-assured:json-schema-validator:5.1.1'
|
||||
compile 'io.rest-assured:rest-assured:4.3.3'
|
||||
compile 'io.rest-assured:json-path:4.3.3'
|
||||
compile 'io.rest-assured:json-schema-validator:4.3.3'
|
||||
|
||||
implementation 'io.cucumber:cucumber-java8:7.1.0'
|
||||
implementation 'io.cucumber:cucumber-java:7.1.0'
|
||||
implementation 'io.cucumber:cucumber-junit:7.1.0'
|
||||
implementation 'io.cucumber:cucumber-guice:7.1.0'
|
||||
implementation 'io.cucumber:cucumber-core:7.1.0'
|
||||
compile 'io.cucumber:cucumber-java8:6.10.2'
|
||||
compile 'io.cucumber:cucumber-java:6.10.2'
|
||||
compile 'io.cucumber:cucumber-junit:6.10.2'
|
||||
compile 'io.cucumber:cucumber-guice:6.10.2'
|
||||
compile 'io.cucumber:cucumber-core:6.10.2'
|
||||
|
||||
compile 'com.github.javafaker:javafaker:1.0.2'
|
||||
|
||||
compile 'net.masterthought:cucumber-reporting:5.5.2'
|
||||
|
||||
//TODO: choose one schema validator between this and rest-assured
|
||||
implementation 'com.networknt:json-schema-validator:1.0.70'
|
||||
implementation 'com.google.code.gson:gson:2.9.0'
|
||||
implementation 'org.apache.commons:commons-text:1.10.0'
|
||||
compile 'com.networknt:json-schema-validator:1.0.51'
|
||||
compile 'com.google.code.gson:gson:2.8.6'
|
||||
compile 'org.apache.commons:commons-text:1.9'
|
||||
|
||||
}
|
||||
|
||||
configurations {
|
||||
cucumberRuntime {
|
||||
extendsFrom implementation
|
||||
extendsFrom compile
|
||||
}
|
||||
}
|
||||
|
||||
jar {
|
||||
duplicatesStrategy = DuplicatesStrategy.INCLUDE
|
||||
|
||||
manifest {
|
||||
attributes "Main-Class": mainClassName
|
||||
attributes "Multi-Release": true
|
||||
}
|
||||
|
||||
from {
|
||||
configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) }
|
||||
configurations.compile.collect { it.isDirectory() ? it : zipTree(it) }
|
||||
}
|
||||
|
||||
exclude 'META-INF/*.RSA'
|
||||
exclude 'META-INF/*.SF'
|
||||
exclude 'META-INF/*.DSA'
|
||||
}
|
||||
|
||||
|
||||
|
||||
// don't suppress warnings or deprecation notices
|
||||
tasks.withType(JavaCompile).configureEach {
|
||||
tasks.withType(JavaCompile) {
|
||||
options.compilerArgs << '-Xlint:unchecked'
|
||||
options.deprecation = true
|
||||
}
|
||||
|
||||
String pathToMetadata = null,
|
||||
pathToRESOScript = null,
|
||||
certFileName = null
|
||||
certFileName = null,
|
||||
certOutputDir = null,
|
||||
certJsonPath = null
|
||||
|
||||
final String certOutputDir = 'build' + File.separator + 'certification',
|
||||
cucumberJsonPath = certOutputDir + File.separator + 'cucumberJson',
|
||||
certReportsDir = certOutputDir + File.separator + 'reports',
|
||||
certResultsDir = certOutputDir + File.separator + 'results'
|
||||
|
||||
tasks.register('prepareCertificationEnvironment') {
|
||||
doLast {
|
||||
def folder = new File(certOutputDir)
|
||||
if (folder.exists()) {
|
||||
delete certOutputDir
|
||||
}
|
||||
|
||||
mkdir certOutputDir
|
||||
mkdir cucumberJsonPath
|
||||
mkdir certReportsDir
|
||||
mkdir certResultsDir
|
||||
}
|
||||
}
|
||||
|
||||
tasks.register('testWebApiCore_2_0_0') {
|
||||
// task for Web API Server 1.0.2 Core Testing
|
||||
task testWebApiServer_1_0_2_Core() {
|
||||
group = 'RESO Certification'
|
||||
description = 'Web API Core 2.0.0 Acceptance Tests' +
|
||||
'\nExample: ' +
|
||||
'\n ./gradlew testWebApiCore_2_0_0 -DpathToRESOScript=/path/to/web-api-core-2.0.0.resoscript -DshowResponses=true' +
|
||||
'\n\nNote: by default the Web API tests assume Collection(Edm.EnumType).' +
|
||||
'\nPass -DuseStringEnums=true if using string enumerations and the Lookup Resource.' +
|
||||
'\nPass -DuseCollections=false if using OData IsFlags.' +
|
||||
'\n\n[Report location: ' + certReportsDir + ']' +
|
||||
'\n\n'
|
||||
description = 'Runs Web API Core 1.0.2 Automated Acceptance Tests.' +
|
||||
'\n Example: ' +
|
||||
'\n $ ./gradlew testWebApiServer_1_0_2_Core -DpathToRESOScript=/path/to/web-api-core-1.0.2.resoscript -DshowResponses=true\n' +
|
||||
'\n Note: by default the Web API tests assume Collection(Edm.EnumType).' +
|
||||
'\n Pass -DuseCollections=false if using OData IsFlags.\n'
|
||||
|
||||
String reportName = 'web-api-server.core.2.0.0'
|
||||
|
||||
dependsOn jar, prepareCertificationEnvironment
|
||||
dependsOn jar
|
||||
doLast {
|
||||
javaexec {
|
||||
mainClass = "io.cucumber.core.cli.Main"
|
||||
main = "io.cucumber.core.cli.Main"
|
||||
classpath = configurations.cucumberRuntime + sourceSets.main.output + sourceSets.test.output
|
||||
systemProperties = System.getProperties()
|
||||
|
||||
args = [
|
||||
'--strict',
|
||||
'--plugin',
|
||||
'pretty',
|
||||
'--plugin',
|
||||
'json:' + cucumberJsonPath + '/' + reportName + '.json',
|
||||
'json:build/web-api-server.core.1.0.2.json',
|
||||
'--plugin',
|
||||
'html:' + certReportsDir + '/' + reportName + '.html',
|
||||
'html:build/web-api-server.core.1.0.2.html',
|
||||
'--glue',
|
||||
'org.reso.certification.stepdefs#WebAPIServerCore',
|
||||
'org.reso.certification.stepdefs#WebAPIServer_1_0_2',
|
||||
'src/main/java/org/reso/certification/features/web-api',
|
||||
'--tags',
|
||||
'@core-endorsement'
|
||||
|
@ -140,23 +126,168 @@ tasks.register('testWebApiCore_2_0_0') {
|
|||
}
|
||||
}
|
||||
|
||||
tasks.register('testDataDictionary_1_7') {
|
||||
// task for Data Dictionary 1.7
|
||||
task testDataDictionary_1_7() {
|
||||
group = 'RESO Certification'
|
||||
description = 'Data Dictionary 1.7 Acceptance Tests' +
|
||||
'\nRESOScript Example:' +
|
||||
'\n ./gradlew testDataDictionary_1_7 -DpathToRESOScript=/path/to/dd17.resoscript -DshowResponses=true -Dstrict=true' +
|
||||
'\n\nMetadata File Example:' +
|
||||
'\n ./gradlew testDataDictionary_1_7 -DpathToMetadata=/path/to/RESODataDictionary-1.7.xml' +
|
||||
'\n\nTo disable strict mode, remove the -Dstrict=true parameter. All applicants MUST pass strict mode tests to be certified!' +
|
||||
'\n\n[Report location: ' + certReportsDir + ']' +
|
||||
'\n\n'
|
||||
description = 'Runs Data Dictionary 1.7 Automated Acceptance Tests and generates a "raw" report.' +
|
||||
'\n RESOScript Example:' +
|
||||
'\n ./gradlew testDataDictionary_1_7 -DpathToRESOScript=/path/to/dd17.resoscript -DshowResponses=true -Dcucumber.filter.tags=""' +
|
||||
'\n Metadata File Example:' +
|
||||
'\n ./gradlew testDataDictionary_1_7 -DpathToMetadata=/path/to/RESODataDictionary-1.7.xml -Dcucumber.filter.tags=""' +
|
||||
'\n To enable strict mode, pass -Dstrict=true. All applicants MUST pass strict mode tests to be certified.\n'
|
||||
|
||||
String reportName = 'data-dictionary-1.7'
|
||||
|
||||
dependsOn jar, prepareCertificationEnvironment
|
||||
dependsOn jar
|
||||
doLast {
|
||||
javaexec {
|
||||
mainClass = 'io.cucumber.core.cli.Main'
|
||||
main = 'io.cucumber.core.cli.Main'
|
||||
classpath = configurations.cucumberRuntime + sourceSets.main.output + sourceSets.test.output
|
||||
systemProperties = System.getProperties()
|
||||
|
||||
pathToMetadata = System.getProperty('pathToMetadata', null)
|
||||
pathToRESOScript = System.getProperty('pathToRESOScript', null)
|
||||
|
||||
if (pathToMetadata != null) {
|
||||
certFileName = pathToMetadata
|
||||
.substring(Math.max(pathToMetadata.lastIndexOf(File.separator)+1, 0), pathToMetadata.lastIndexOf('.'))
|
||||
} else if (pathToRESOScript != null) {
|
||||
certFileName = pathToRESOScript
|
||||
.substring(Math.max(pathToRESOScript.lastIndexOf(File.separator)+1, 0), pathToRESOScript.lastIndexOf('.'))
|
||||
}
|
||||
|
||||
DateFormat df = new SimpleDateFormat("yyyyMMddHHMMssS")
|
||||
certOutputDir = 'DD1.7-results' + (certFileName != null ? '-' + certFileName : '') + '-' + df.format(new Date())
|
||||
certJsonPath = certOutputDir + '/data-dictionary-1.7.json'
|
||||
|
||||
args = [
|
||||
'--strict',
|
||||
'--plugin',
|
||||
'pretty',
|
||||
'--plugin',
|
||||
'json:' + certJsonPath,
|
||||
'--plugin',
|
||||
'html:' + certOutputDir + '/data-dictionary-1.7.html',
|
||||
'--glue',
|
||||
'org.reso.certification.stepdefs#DataDictionary',
|
||||
'src/main/java/org/reso/certification/features/data-dictionary/v1-7-0',
|
||||
'--tags',
|
||||
(systemProperties.get('cucumber.filter.tags', '').toString().trim().length() > 0
|
||||
? systemProperties.get('cucumber.filter.tags') : '')
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
task testIdxPayload_1_7() {
|
||||
group = 'RESO Certification'
|
||||
description = 'Runs IDX Payload 1.7 Automated Acceptance Tests.' +
|
||||
'\n Example: ' +
|
||||
'\n $ ./gradlew testIdxPayload_1_7 -DpathToRESOScript=/path/to/web-api-core-1.0.2.resoscript -DshowResponses=true\n'
|
||||
|
||||
dependsOn jar
|
||||
doLast {
|
||||
javaexec {
|
||||
main = "io.cucumber.core.cli.Main"
|
||||
classpath = configurations.cucumberRuntime + sourceSets.main.output + sourceSets.test.output
|
||||
systemProperties = System.getProperties()
|
||||
|
||||
args = [
|
||||
'--strict',
|
||||
'--plugin',
|
||||
'pretty',
|
||||
'--plugin',
|
||||
'json:build/idx-payload.dd-1.7.json',
|
||||
'--plugin',
|
||||
'html:build/idx-payload.dd-1.7.html',
|
||||
'--glue',
|
||||
'org.reso.certification.stepdefs#IDXPayload',
|
||||
'src/main/java/org/reso/certification/features/payloads/idx-payload.feature'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
task generateCertificationReport_DD_1_7() {
|
||||
group = 'RESO Certification'
|
||||
description = 'Runs Data Dictionary 1.7 tests and creates a certification report' +
|
||||
'\n RESOScript Example:' +
|
||||
'\n ./gradlew generateCertificationReport_DD_1_7 -DpathToRESOScript=/path/to/dd17.resoscript -Dminimal=true -Dstrict=true' +
|
||||
'\n Metadata File Example:' +
|
||||
'\n ./gradlew generateCertificationReport_DD_1_7 -DpathToMetadata=/path/to/RESODataDictionary-1.7.xml -Dminimal=true -Dstrict=true' +
|
||||
'\n To enable strict mode, pass -Dstrict=true. All applicants MUST pass strict mode tests to be certified.\n'
|
||||
|
||||
dependsOn testDataDictionary_1_7
|
||||
doLast {
|
||||
javaexec {
|
||||
System.setProperty('pathToJsonResults', certJsonPath)
|
||||
System.setProperty('reportDescription', 'RESO Data Dictionary 1.7 Certification Report')
|
||||
systemProperties = System.getProperties()
|
||||
classpath = sourceSets.main.runtimeClasspath
|
||||
main = 'org.reso.certification.reporting.CertificationReportGenerator'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
task testDataDictionaryReferenceMetadata_1_7() {
|
||||
group = 'RESO Certification'
|
||||
description = 'Runs Data Dictionary tests against reference metadata\n'
|
||||
|
||||
doLast {
|
||||
javaexec {
|
||||
main = "io.cucumber.core.cli.Main"
|
||||
classpath = configurations.cucumberRuntime + sourceSets.main.output + sourceSets.test.output
|
||||
System.setProperty('pathToMetadata', 'src/main/resources/RESODataDictionary-1.7.xml')
|
||||
systemProperties = System.getProperties()
|
||||
args = [
|
||||
'--strict',
|
||||
'--plugin',
|
||||
'pretty',
|
||||
'--glue',
|
||||
'org.reso.certification.stepdefs#DataDictionary',
|
||||
'src/main/java/org/reso/certification/features/data-dictionary/v1-7-0'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//used for internal Commander Web API acceptance testing
|
||||
test {
|
||||
dependsOn assemble, compileTestJava, testDataDictionaryReferenceMetadata_1_7
|
||||
doLast {
|
||||
javaexec {
|
||||
main = "io.cucumber.core.cli.Main"
|
||||
classpath = configurations.cucumberRuntime + sourceSets.main.output + sourceSets.test.output
|
||||
args = ['--strict',
|
||||
'--plugin',
|
||||
'pretty',
|
||||
'--glue',
|
||||
'org.reso.commander.test.stepdefs',
|
||||
'src/test/java/org/reso/commander/test/features'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// task for Data Dictionary 1.7 in Jenkins
|
||||
// TODO: consolidate this with the DD tests it was copied from after further testing.
|
||||
task _jenkins_testDataDictionary_1_7() {
|
||||
group = 'RESO Certification - Jenkins'
|
||||
description = 'Runs Data Dictionary 1.7 Automated Acceptance Tests in a way that plays well with various Jenkins Plugins' +
|
||||
"\n Commands used in this case are the same as self-testing locally, they're just passed to a Jenkins-specific config." +
|
||||
"\n NOTE: Most people should never have to use this option unless they're using Jenkins for builds." +
|
||||
'\n RESOScript Example:' +
|
||||
'\n ./gradlew _jenkins_testDataDictionary_1_7() -DpathToRESOScript=/path/to/dd17.resoscript -DshowResponses=true -Dcucumber.filter.tags=""' +
|
||||
'\n Metadata File Example:' +
|
||||
'\n ./gradlew _jenkins_testDataDictionary_1_7() -DpathToMetadata=/path/to/RESODataDictionary-1.7.xml -Dcucumber.filter.tags=""' +
|
||||
'\n To enable strict mode, pass -Dstrict=true. All applicants MUST pass strict mode tests to be certified.\n'
|
||||
|
||||
dependsOn jar
|
||||
|
||||
doLast {
|
||||
certJsonPath = 'build/data-dictionary-1.7.jenkins.json'
|
||||
|
||||
javaexec {
|
||||
|
||||
main = 'io.cucumber.core.cli.Main'
|
||||
classpath = configurations.cucumberRuntime + sourceSets.main.output + sourceSets.test.output
|
||||
systemProperties = System.getProperties()
|
||||
|
||||
|
@ -171,145 +302,65 @@ tasks.register('testDataDictionary_1_7') {
|
|||
.substring(Math.max(pathToRESOScript.lastIndexOf(File.separator) + 1, 0), pathToRESOScript.lastIndexOf('.'))
|
||||
}
|
||||
|
||||
def argsArray = []
|
||||
|
||||
def prettyPrint = System.getProperty('pretty', null)
|
||||
|
||||
if (prettyPrint) {
|
||||
argsArray.add('--plugin')
|
||||
argsArray.add('pretty')
|
||||
}
|
||||
|
||||
argsArray.addAll(
|
||||
args = [
|
||||
'--strict',
|
||||
'--plugin',
|
||||
'json:' + cucumberJsonPath + '/' + reportName + '.json',
|
||||
'--plugin',
|
||||
'html:' + certReportsDir + '/' + reportName + '.html',
|
||||
'json:' + certJsonPath,
|
||||
'--glue',
|
||||
'org.reso.certification.stepdefs#DataDictionary',
|
||||
'src/main/java/org/reso/certification/features/data-dictionary/v1-7-0'
|
||||
)
|
||||
'src/main/java/org/reso/certification/features/data-dictionary/v1-7-0',
|
||||
'--tags',
|
||||
(systemProperties.get('cucumber.filter.tags', '').toString().trim().length() > 0
|
||||
? systemProperties.get('cucumber.filter.tags') : '')
|
||||
]
|
||||
}
|
||||
|
||||
if (systemProperties.hasProperty('cucumber.filter.tags')) {
|
||||
argsArray.add('--tags')
|
||||
argsArray.add(systemProperties.get('cucumber.filter.tags').toString())
|
||||
}
|
||||
|
||||
args = argsArray
|
||||
javaexec {
|
||||
System.setProperty('pathToJsonResults', certJsonPath)
|
||||
systemProperties = System.getProperties()
|
||||
classpath = sourceSets.main.runtimeClasspath
|
||||
main = 'org.reso.certification.reporting.CertificationReportGenerator'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tasks.register('testDataAvailability_1_7') {
|
||||
// task for Web API Server 1.0.2 Core Testing specific to Jenkins
|
||||
task _jenkins_testWebApiServer_1_0_2_Core() {
|
||||
group = 'RESO Certification'
|
||||
description = 'Data Dictionary 1.7 Data Availability Tests' +
|
||||
'\nExample:' +
|
||||
'\n ./gradlew testDataAvailability_1_7 -DpathToRESOScript=/path/to/web-api-core-2.0.0.resoscript' +
|
||||
'\n\n[Report location: ' + certReportsDir + ']' +
|
||||
'\n\n'
|
||||
|
||||
String reportName = 'data-availability.dd-1.7'
|
||||
description = 'Runs Web API Server 1.0.2 Core Automated Acceptance Tests in a way that plays well with various Jenkins Plugins' +
|
||||
"\n Commands used in this case are the same as self-testing locally, they're just passed to a Jenkins-specific config." +
|
||||
"\n NOTE: Most people should never have to use this option unless they're using Jenkins for builds." +
|
||||
'\n RESOScript Example:' +
|
||||
'\n $ ./gradlew _jenkins_testWebApiServer_1_0_2_Core -DpathToRESOScript=/path/to/web-api-core-1.0.2.resoscript -DshowResponses=true\n' +
|
||||
'\n Note: by default the Web API tests assume Collection(Edm.EnumType).' +
|
||||
'\n Pass -DuseCollections=false if using OData IsFlags=true.\n'
|
||||
|
||||
dependsOn jar
|
||||
|
||||
doLast {
|
||||
certJsonPath = 'build/web-api-server.core.1.0.2.jenkins.json'
|
||||
javaexec {
|
||||
mainClass = "io.cucumber.core.cli.Main"
|
||||
main = "io.cucumber.core.cli.Main"
|
||||
classpath = configurations.cucumberRuntime + sourceSets.main.output + sourceSets.test.output
|
||||
systemProperties = System.getProperties()
|
||||
|
||||
def argsArray = []
|
||||
|
||||
def prettyPrint = System.getProperty('pretty', null)
|
||||
|
||||
if (prettyPrint) {
|
||||
argsArray.add('--plugin')
|
||||
argsArray.add('pretty')
|
||||
}
|
||||
|
||||
argsArray.addAll(
|
||||
args = [
|
||||
'--strict',
|
||||
'--plugin',
|
||||
'json:' + cucumberJsonPath + '/' + reportName + '.json',
|
||||
'--plugin',
|
||||
'html:' + certReportsDir + '/' + reportName + '.html',
|
||||
'json:' + certJsonPath,
|
||||
'--glue',
|
||||
'org.reso.certification.stepdefs#DataAvailability',
|
||||
'src/main/java/org/reso/certification/features/payloads/data-availability.feature'
|
||||
)
|
||||
|
||||
args = argsArray
|
||||
|
||||
'org.reso.certification.stepdefs#WebAPIServer_1_0_2',
|
||||
'src/main/java/org/reso/certification/features/web-api',
|
||||
'--tags',
|
||||
'@core-endorsement'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tasks.register('testIdxPayload_1_7') {
|
||||
group = 'RESO Certification'
|
||||
description = 'Data Dictionary 1.7 Payloads Sampling Tests' +
|
||||
'\nExample:' +
|
||||
'\n ./gradlew testIdxPayload_1_7 -DpathToRESOScript=/path/to/web-api-core-2.0.0.resoscript' +
|
||||
'\n\n[Report location: ' + certReportsDir + ']' +
|
||||
'\n\n'
|
||||
|
||||
String reportName = 'idx-payload.dd-1.7'
|
||||
|
||||
dependsOn jar
|
||||
doLast {
|
||||
javaexec {
|
||||
mainClass = "io.cucumber.core.cli.Main"
|
||||
classpath = configurations.cucumberRuntime + sourceSets.main.output + sourceSets.test.output
|
||||
System.setProperty('pathToJsonResults', certJsonPath)
|
||||
systemProperties = System.getProperties()
|
||||
|
||||
args = [
|
||||
'--plugin',
|
||||
'pretty',
|
||||
'--plugin',
|
||||
'json:' + cucumberJsonPath + '/' + reportName + '.json',
|
||||
'--plugin',
|
||||
'html:' + certReportsDir + '/' + reportName + '.html',
|
||||
'--glue',
|
||||
'org.reso.certification.stepdefs#IdxPayload',
|
||||
'src/main/java/org/reso/certification/features/payloads/idx-payload.feature'
|
||||
]
|
||||
classpath = sourceSets.main.runtimeClasspath
|
||||
main = 'org.reso.certification.reporting.CertificationReportGenerator'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tasks.register('testDataDictionaryReferenceMetadata_1_7') {
|
||||
description = 'Runs Data Dictionary tests against reference metadata'
|
||||
dependsOn jar
|
||||
doLast {
|
||||
javaexec {
|
||||
mainClass = "io.cucumber.core.cli.Main"
|
||||
classpath = configurations.cucumberRuntime + sourceSets.main.output + sourceSets.test.output
|
||||
System.setProperty('pathToMetadata', 'src/main/resources/RESODataDictionary-1.7.xml')
|
||||
systemProperties = System.getProperties()
|
||||
args = [
|
||||
'--glue',
|
||||
'org.reso.certification.stepdefs#DataDictionary',
|
||||
'src/main/java/org/reso/certification/features/data-dictionary/v1-7-0'
|
||||
]
|
||||
|
||||
if (System.hasProperty("pretty")) {
|
||||
args.add("--plugin", "pretty")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//used for internal Commander Web API acceptance testing
|
||||
test {
|
||||
dependsOn assemble, compileTestJava, testDataDictionaryReferenceMetadata_1_7
|
||||
doLast {
|
||||
javaexec {
|
||||
mainClass = "io.cucumber.core.cli.Main"
|
||||
classpath = configurations.cucumberRuntime + sourceSets.main.output + sourceSets.test.output
|
||||
args = [
|
||||
'--plugin',
|
||||
'pretty',
|
||||
'--glue',
|
||||
'org.reso.commander.test.stepdefs',
|
||||
'src/test/java/org/reso/commander/test/features'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
200
doc/CLI.md
200
doc/CLI.md
|
@ -1,200 +0,0 @@
|
|||
# Command-Line Tools
|
||||
The RESO Commander offers the following command line utilities:
|
||||
* Getting Metadata
|
||||
* Validating XML Metadata
|
||||
* Requesting and Saving Results
|
||||
* Running RESOScript Files
|
||||
* Displaying RESOScript Testing Queries
|
||||
|
||||
In order to run the RESO Commander locally, you must have the Java Runtime Environment (JRE) version
|
||||
8, 10, or 12 installed.
|
||||
|
||||
You may also use [Docker](/doc/Docker.md) if you prefer.
|
||||
|
||||
## Java Requirements
|
||||
Your operating system probably already has a Java Runtime Environment (JRE) installed. This is all you need to run the Commander as a Web API Client.
|
||||
|
||||
To check your version of Java, type the following in a command line environment:
|
||||
```
|
||||
$ java -version
|
||||
```
|
||||
If you have the Java SE Runtime Environment installed, the output will look similar to the following:
|
||||
```
|
||||
$ java -version
|
||||
Java version "1.8.x" (or a higher version)
|
||||
Java<TM> SE Runtime Environment ...
|
||||
```
|
||||
If you don't see something like this, you need to install the [Java SE](https://www.oracle.com/java/technologies/javase-jre8-downloads.html) runtime.
|
||||
|
||||
Once the Java SE Runtime is installed, you may [download the Commander JAR file](build/libs/web-api-commander.jar)
|
||||
|
||||
|
||||
## Display Help
|
||||
|
||||
After downloading the [latest `web-api-commander.jar` file from GitHub](build/libs/web-api-commander.jar), help is available from the command line by passing `--help` or just passing no arguments, as follows:
|
||||
```
|
||||
$ java -jar path/to/web-api-commander.jar
|
||||
```
|
||||
|
||||
Doing so displays the following information:
|
||||
```
|
||||
usage: java -jar web-api-commander
|
||||
--bearerToken <b> Bearer token to be used with the
|
||||
request.
|
||||
--clientId <d> Client Id to be used with the request.
|
||||
--clientSecret <s>
|
||||
--contentType <t> Results format: JSON (default),
|
||||
JSON_NO_METADATA, JSON_FULL_METADATA,
|
||||
XML.
|
||||
--entityName <n> The name of the entity to fetch, e.g.
|
||||
Property.
|
||||
--generateDDAcceptanceTests Generates acceptance tests in the
|
||||
current directory.
|
||||
--generateMetadataReport Generates metadata report from given
|
||||
<inputFile>.
|
||||
--generateQueries Resolves queries in a given RESOScript
|
||||
<inputFile> and displays them in
|
||||
standard out.
|
||||
--generateReferenceDDL Generates reference DDL to create a
|
||||
RESO-compliant SQL database. Pass
|
||||
--useKeyNumeric to generate the DB
|
||||
using numeric keys.
|
||||
--generateReferenceEDMX Generates reference metadata in EDMX
|
||||
format.
|
||||
--generateResourceInfoModels Generates Java Models for the Web API
|
||||
Reference Server in the current
|
||||
directory.
|
||||
--getMetadata Fetches metadata from <serviceRoot>
|
||||
using <bearerToken> and saves results
|
||||
in <outputFile>.
|
||||
--help print help
|
||||
--inputFile <i> Path to input file.
|
||||
--outputFile <o> Path to output file.
|
||||
--runRESOScript Runs commands in RESOScript file given
|
||||
as <inputFile>.
|
||||
--saveGetRequest Performs GET from <requestURI> using
|
||||
the given <bearerToken> and saves
|
||||
output to <outputFile>.
|
||||
--serviceRoot <s> Service root URL on the host.
|
||||
--uri <u> URI for raw request. Use 'single
|
||||
quotes' to enclose.
|
||||
--useEdmEnabledClient present if an EdmEnabledClient should
|
||||
be used.
|
||||
--useKeyNumeric present if numeric keys are to be used
|
||||
for database DDL generation.
|
||||
--validateMetadata Validates previously-fetched metadata
|
||||
in the <inputFile> path.
|
||||
|
||||
```
|
||||
When using commands, if required arguments aren't provided, relevant feedback will be displayed in the terminal.
|
||||
|
||||
## Authentication
|
||||
The RESO Commander only supports passing OAuth2 "Bearer" tokens from the command line at this time. For those using OAuth2 Client Credentials, please see the section on _[Running RESOScript files](#running-resoscript-files)_.
|
||||
|
||||
|
||||
## Getting Metadata
|
||||
To get metadata from a given server, use the `--getMetadata` argument with the following
|
||||
options:
|
||||
|
||||
```
|
||||
$ java -jar path/to/web-api-commander.jar --getMetadata --serviceRoot https://api.server.com/serviceRoot --outputFile metadata.xml --bearerToken abc123
|
||||
```
|
||||
|
||||
where `serviceRoot` is the path to the _root_ of the OData WebAPI server.
|
||||
|
||||
Assuming everything goes well, metadata will be retrieved from the host
|
||||
and written to the provided `--outputFile`, and the following output will be displayed:
|
||||
```
|
||||
Requesting metadata from: https://api.server.com/serviceRoot/$metadata
|
||||
Metadata request succeeded.
|
||||
```
|
||||
|
||||
## Validating Metadata stored in an EDMX file
|
||||
Sometimes it's useful to validate a local OData XML Metadata (EDMX) file.
|
||||
|
||||
Since parsing EDMX is an incremental process, validation terminates _each time_ invalid items are encountered. Therefore, the workflow for correcting an EDMX document that contains errors would be to run the
|
||||
Commander repeatedly, fixing errors that are encountered along the way.
|
||||
|
||||
To validate metadata that's already been downloaded, call Commander with the following options,
|
||||
adjusting the `path/to/web-api-commander.jar` and `--inputFile` path for your environment accordingly:
|
||||
```
|
||||
$ java -jar path/to/web-api-commander.jar --validateMetadata --inputFile '/src/main/resources/RESODataDictionary-1.7.xml'
|
||||
```
|
||||
XML or OData validation errors will be displayed if any issues were found. If successful, the following message
|
||||
should appear:
|
||||
```
|
||||
Checking Metadata for validity...
|
||||
Valid Metadata!
|
||||
```
|
||||
|
||||
## Saving Results from a Given `uri`
|
||||
The `--saveGetRequest` action makes a request to a `--uri` using a given `--bearerToken`, and saves the response to the given `--outputFile`.
|
||||
|
||||
For example:
|
||||
```
|
||||
$ java -jar build/libs/web-api-commander.jar --saveGetRequest --uri 'https://api.server.com/OData/Property?$filter=ListPrice gt 100000&$top=100' --bearerToken abc123 --outputFile response.json
|
||||
```
|
||||
If the response is successful, it will be written to the specified file and the following will be displayed on the console:
|
||||
```
|
||||
JSON Data fetched from: https://api.server.com/OData/Property?$filter=ListPrice gt 100000&top=100"
|
||||
with response code: 200
|
||||
JSON Response saved to: response.json
|
||||
```
|
||||
Otherwise, errors will be displayed showing what went wrong during the request.
|
||||
|
||||
|
||||
## Displaying Queries for RESOScript Files
|
||||
A RESOScript file usually contains a server's service root and one or more Requests that can either
|
||||
be used in batch-format or can be used during testing.
|
||||
|
||||
To resolve all parameters and display the queries to be run with your RESOScript, use the following command:
|
||||
|
||||
```
|
||||
$ java -jar web-api-commander.jar --generateQueries --inputFile /path/to/your.resoscript
|
||||
```
|
||||
|
||||
This should display something similar to the following:
|
||||
|
||||
```
|
||||
==============================================================
|
||||
Web API Commander Starting... Press <ctrl+c> at any time to exit.
|
||||
==============================================================
|
||||
Displaying 44 Request(s)
|
||||
RESOScript: src/test/resources/mock.web-api-server.core.2.0.0.resoscript
|
||||
==============================================================
|
||||
|
||||
|
||||
===========================
|
||||
Request: #1
|
||||
===========================
|
||||
Request Id: metadata-validation
|
||||
Resolved URL: https://api.reso.org/OData/$metadata
|
||||
|
||||
|
||||
===========================
|
||||
Request: #2
|
||||
===========================
|
||||
Request Id: fetch-by-key
|
||||
Resolved URL: https://api.reso.org/OData/Property('12345')?$select=ListingKey
|
||||
|
||||
...
|
||||
```
|
||||
|
||||
## Running RESOScript Files
|
||||
The Web API Commander is able to run files written using RESO's XML-based scripting format, also known as a RESOScript.
|
||||
|
||||
In order to run an RESOScript file, use a command similar to the following:
|
||||
|
||||
```
|
||||
$ java -jar out/web-api-commander.jar --runRESOScript --inputFile /path/to/your/inputFile
|
||||
```
|
||||
|
||||
A results directory will be created from the RESOScript name and timestamp when it was run, and output will be shown as the requests are made.
|
||||
|
||||
Results will be saved to the filenames specified in the given RESOScript, and error files will be created when there are exceptions, with an ".ERROR" extension appended to them.
|
||||
|
||||
**RESOScript File Format**
|
||||
For examples of files using the RESOScript format, see:
|
||||
* [Data Dictionary 1.7 RESOScript Template](sample-data-dictionary.1.7.0.resoscript)
|
||||
* [Web API Core 2.0.0 RESOScript Template](sample-web-api-server.core.2.0.0.resoscript)
|
||||
|
|
@ -1,325 +0,0 @@
|
|||
# RESO Certification
|
||||
The RESO Commander is the basis for automated Data Dictionary, Payloads, and Web API Certification.
|
||||
|
||||
* [Java and the JDK](#java-and-the-jdk)
|
||||
* [Cloning Commander Repository](#cloning-commander-repository)
|
||||
* [Cucumber Feature Specifications](#cucumber-feature-specifications)
|
||||
* [Testing Environment](#testing-environment)
|
||||
* [Gradle Wrapper](#gradle-wrapper)
|
||||
* [Automated RESO Web API Core Testing](#automated-reso-web-api-core-testing)
|
||||
* [Automated RESO Data Dictionary Testing](#automated-reso-data-dictionary-testing)
|
||||
|
||||
|
||||
## Java and the JDK
|
||||
To run the Commander as an _automated testing tool_, a Java 64-bit JDK must be installed.
|
||||
|
||||
The Commander has been tested with JDK 1.8 and 10 at this point.
|
||||
|
||||
Those using JDK 11+, please [report issues](https://github.com/RESOStandards/web-api-commander/issues) if they arise.
|
||||
|
||||
To see whether you have the JDK installed, type the following using your local command line environment:
|
||||
```
|
||||
$ java -version
|
||||
```
|
||||
If you have a Java JDK installed, your output will look something like:
|
||||
```
|
||||
$ java -version
|
||||
openjdk version "1.8.0_275"
|
||||
OpenJDK Runtime Environment (build 1.8.0_275-8u275-b01-0ubuntu1~20.10-b01)
|
||||
OpenJDK 64-Bit Server VM (build 25.275-b01, mixed mode)
|
||||
```
|
||||
If you don't see something like this, you need to install the JDK:
|
||||
* [OpenJDK 8, 10, or 11 are recommended](https://openjdk.java.net/install/index.html).
|
||||
* [Oracle's SE Development kit may also be used](https://www.oracle.com/java/technologies/javase/javase-jdk8-downloads.html), but there may be additional licensing terms to accept.
|
||||
|
||||
**Note**: there are known issues with Java/JDK 14+ and Groovy. The recommended JDK versions are 8 (1.8), 10, or 11. OpenJDK is preferred. If you're using MacOS you can install the JDK [using Homebrew](https://formulae.brew.sh/formula/openjdk@11). There are instructions for Windows and Linux [here](https://jdk.java.net/java-se-ri/11).
|
||||
|
||||
|
||||
## Cloning Commander Repository
|
||||
The Commander may be run in automated testing mode using a terminal. Automated testing assumes that you have a Java 1.8+ JDK installed, as mentioned elsewhere in this [`README`](#java-and-the-jdk).
|
||||
|
||||
First, change into the directory you want to work in and clone the Commander repository.
|
||||
|
||||
You will need to have Git installed. Chances are you already do, to check, open a command line and type `git` and if it's present, it will print some info about the app. If not, [there are installation instructions here](https://git-scm.com/downloads).
|
||||
|
||||
##### MacOS or Linux
|
||||
```
|
||||
$ git clone https://github.com/RESOStandards/web-api-commander.git
|
||||
```
|
||||
|
||||
##### Windows
|
||||
```
|
||||
C:\> git clone https://github.com/RESOStandards/web-api-commander.git
|
||||
```
|
||||
|
||||
This will clone the repository into a directory called web-api-commander relative to whatever directory you're currently in, which also means you'll have a fresh copy of the latest code to execute.
|
||||
|
||||
To refresh the code after you have downloaded it, issue the command `$ git pull` in the root of the directory that was just created.
|
||||
|
||||
## Cucumber Feature Specifications
|
||||
|
||||
[Cucumber](https://cucumber.io) is being used to describe acceptance criteria in a higher-level DSL rather than encapsulating all of the test logic code. Cucumber's DSL is called [Gherkin](https://cucumber.io/docs/gherkin/) and essentially allows backing test code to be organized in a logical manner that makes sense to analysts as well as programmers.
|
||||
|
||||
## Testing Environment
|
||||
|
||||
Under the hood, [Gradle](https://gradle.org/) is being used for automation. It works across multiple platforms and is friendly with both Docker and Cucumber so that tests may be automated on CI/CD platforms such as Jenkins, Circle CI, Travis, or similar, and emit standard system codes during regression testing.
|
||||
|
||||
## Gradle Wrapper
|
||||
The [Gradle wrapper](https://docs.gradle.org/current/userguide/gradle_wrapper.html) provides a convenient way to automatically download Gradle when running tests.
|
||||
|
||||
After you have cloned the repository, as shown in [a previous step](#cloning-commander-repository), change into the directory containing the source code from GitHub. Convenience methods have been provided for the various certification tasks.
|
||||
|
||||
## Gradle Tasks
|
||||
Once the Gradle Wrapper is set up, you should be able to run the `./gradlew tasks` command in from the root of the Commander source directory in a terminal window and see the list of available tasks.
|
||||
```
|
||||
$ ./gradlew tasks
|
||||
|
||||
> Task :tasks
|
||||
|
||||
------------------------------------------------------------
|
||||
Tasks runnable from root project
|
||||
------------------------------------------------------------
|
||||
...
|
||||
```
|
||||
There are both _built-in tasks_ and _RESO tasks_.
|
||||
|
||||
|
||||
The following section is what's of interest here:
|
||||
```
|
||||
RESO Certification tasks
|
||||
------------------------
|
||||
testDataAvailability_1_7 - Data Dictionary 1.7 Data Availability Tests
|
||||
Example:
|
||||
./gradlew testDataAvailability_1_7 -DpathToRESOScript=/path/to/web-api-core-2.0.0.resoscript
|
||||
|
||||
[Report location: build/certification/reports]
|
||||
|
||||
|
||||
testDataDictionary_1_7 - Data Dictionary 1.7 Acceptance Tests
|
||||
RESOScript Example:
|
||||
./gradlew testDataDictionary_1_7 -DpathToRESOScript=/path/to/dd17.resoscript -DshowResponses=true -Dstrict=true
|
||||
|
||||
Metadata File Example:
|
||||
./gradlew testDataDictionary_1_7 -DpathToMetadata=/path/to/RESODataDictionary-1.7.xml
|
||||
|
||||
To disable strict mode, remove the -Dstrict=true parameter. All applicants MUST pass strict mode tests to be certified!
|
||||
|
||||
[Report location: build/certification/reports]
|
||||
|
||||
|
||||
testIdxPayload_1_7 - Data Dictionary 1.7 Payloads Sampling Tests
|
||||
Example:
|
||||
./gradlew testIdxPayload_1_7 -DpathToRESOScript=/path/to/web-api-core-2.0.0.resoscript
|
||||
|
||||
[Report location: build/certification/reports]
|
||||
|
||||
|
||||
testWebApiCore_2_0_0 - Web API Core 2.0.0 Acceptance Tests
|
||||
Example:
|
||||
./gradlew testWebApiCore_2_0_0 -DpathToRESOScript=/path/to/web-api-core-2.0.0.resoscript -DshowResponses=true
|
||||
|
||||
Note: by default the Web API tests assume Collection(Edm.EnumType).
|
||||
Pass -DuseStringEnums=true if using string enumerations and the Lookup Resource.
|
||||
Pass -DuseCollections=false if using OData IsFlags.
|
||||
|
||||
[Report location: build/certification/reports]
|
||||
|
||||
```
|
||||
|
||||
## Automated RESO Web API Core Testing
|
||||
To use the automated RESO testing tools, you must have a [JDK installed](#java-and-the-jdk).
|
||||
|
||||
### Web API Core RESOScript Template
|
||||
To use the Commander for automated Web API Core testing, you need a RESOScript.
|
||||
|
||||
For Web API 2.0.0 Server Core Certification, use [this resoscript](https://github.com/RESOStandards/web-api-commander/blob/main/sample-web-api-server.core.2.0.0.resoscript) as a template.
|
||||
|
||||
For more information regarding Parameters and Client Settings, see the [Web API Walkthrough](https://github.com/RESOStandards/web-api-commander/wiki/Configuring-the-RESO-Commander-for-Automated-Web-API-Core-Testing).
|
||||
|
||||
### Web API Cucumber Acceptance Tests
|
||||
The Cucumber BDD acceptance tests for Web API 2.0.0 Core certification are [here](https://github.com/RESOStandards/web-api-commander/blob/main/src/main/java/org/reso/certification/features/web-api/web-api-server.core.feature). If you have any questions, please [send us an email](mailto:dev@reso.org).
|
||||
|
||||
### Gradle Tasks for Web API 2.0.0 Server Certification
|
||||
While you may use tags to filter tests as you choose, explained in the next section, it's convenient
|
||||
to be able to run a predefined set of tests Web API Core Certification.
|
||||
|
||||
These tasks will also produce reports in the local `/build/certification` directory, relative to the project root, named according to which test you ran.
|
||||
|
||||
#### Core Certification
|
||||
|
||||
This will run the Core tests against the Web API 2.0.0 Server provided as `WebAPIURI` in your `web-api-server.core.2.0.0.resoscript` file.
|
||||
|
||||
**Note**: by default, the Commander assumes `Edm.EnumType` for single- and `Collection(Edm.EnumType)` for multiple-enumeration testing.
|
||||
Pass `-DuseStringEnums=true` if you are using string enumerations.
|
||||
Pass `-DuseCollections=false` if you are using `IsFlags="true"` instead.
|
||||
|
||||
##### MacOS or Linux
|
||||
```
|
||||
$ ./gradlew testWebApiCore_2_0_0 -DpathToRESOScript=/path/to/your.web-api-server.core.2.0.0.resoscript -DshowResponses=true
|
||||
```
|
||||
|
||||
##### Windows
|
||||
```
|
||||
C:\path\to\web-api-commander> gradlew testWebApiCore_2_0_0 -DpathToRESOScript=C:\path\to\your.web-api-server.core.2.0.0.resoscript -DshowResponses=true
|
||||
```
|
||||
|
||||
*Note: the first time you run these tasks, they will take some time as the environment must be configured and code is being compiled from the contents of the source directory downloaded in the previous step.
|
||||
|
||||
### Web API Program Output
|
||||
|
||||
A sample of the runtime terminal output follows:
|
||||
|
||||
```gherkin
|
||||
> Task :testWebApiCore_2_0_0
|
||||
|
||||
@metadata-request @2.4.1
|
||||
Scenario: REQ-WA103-END3 - Request and Validate Server Metadata
|
||||
|
||||
Using RESOScript: ./web-api-server.core.2.0.0.resoscript
|
||||
Given a RESOScript file was provided
|
||||
|
||||
RESOScript loaded successfully!
|
||||
And Client Settings and Parameters were read from the file
|
||||
|
||||
Bearer token loaded... first 4 characters: test
|
||||
Service root is: https://api.yourserver.com/OData
|
||||
And a test container was successfully created from the given RESOScript
|
||||
|
||||
Authentication Type: authorization_code
|
||||
And the test container uses an authorization_code or client_credentials for authentication
|
||||
|
||||
Requesting XML Metadata from service root at: https://api.yourserver.com/OData
|
||||
When XML Metadata are requested from the service root in "ClientSettings_WebAPIURI"
|
||||
|
||||
Asserted Response Code: 200, Server Response Code: 200
|
||||
Then the server responds with a status code of 200
|
||||
|
||||
Reported OData-Version header value: '4.0'
|
||||
And the server has an OData-Version header value of "4.0" or "4.01"
|
||||
|
||||
Validating XML Metadata response to ensure it's valid XML and matches OASIS OData XSDs...
|
||||
See: https://docs.oasis-open.org/odata/odata/v4.0/errata03/os/complete/schemas/
|
||||
XMLMetadata string is valid XML!
|
||||
And the XML Metadata response is valid XML
|
||||
|
||||
Validating XML Metadata response to ensure it's valid XML and matches OASIS OData XSDs...
|
||||
See: https://docs.oasis-open.org/odata/odata/v4.0/errata03/os/complete/schemas/
|
||||
XMLMetadata string is valid XML!
|
||||
XML Metadata is valid!
|
||||
Edm Metadata is valid!
|
||||
And the XML Metadata returned by the server are valid
|
||||
|
||||
And the XML Metadata returned by the server contains Edm metadata
|
||||
|
||||
And the Edm metadata returned by the server are valid
|
||||
|
||||
Found Default Entity Container: 'Default'
|
||||
And the metadata contains a valid service document
|
||||
|
||||
Resource Name: Property
|
||||
Allowed Resources: Property, Member, Office, Contacts, ContactListings, HistoryTransactional, InternetTracking, Media, OpenHouse, OUID, Prospecting, Queue, Rules, SavedSearch, Showing, Teams
|
||||
And the given "Parameter_EndpointResource" resource exists within "Parameter_DD17_WellKnownResourceList"
|
||||
|
||||
Found EntityContainer for the given resource: 'Property'
|
||||
And the metadata contains the "Parameter_EndpointResource" resource
|
||||
|
||||
Searching the default entity container for one of the following Standard Resources: Property, Member, Office, Media
|
||||
Standard Resource Names requirement met!
|
||||
And the metadata contains at least one resource from "Parameter_WebAPI102_RequiredResourceList"
|
||||
|
||||
|
||||
1 Scenarios (1 passed)
|
||||
15 Steps (15 passed)
|
||||
0m4.093s
|
||||
```
|
||||
|
||||
Detailed information will be added to a local `commander.log` file at runtime.
|
||||
|
||||
---
|
||||
## Automated RESO Data Dictionary Testing
|
||||
The Commander provides automated Data Dictionary 1.7 acceptance testing for RESO Certification. The DD 1.7 testing specification is available [here](https://docs.google.com/document/d/15DFf9kDX_mlGCJVOch2fztl8W5h-yd18N0_03Sb4HwM/edit?usp=sharing).
|
||||
|
||||
* [Data Dictionary RESOScript Template](#data-dictionary-resoscript-template)
|
||||
* [Data Dictionary Acceptance Tests](#data-dictionary-acceptance-tests)
|
||||
* [Gradle Tasks for Data Dictionary Certification](#gradle-tasks-for-data-dictionary-certification)
|
||||
* [Test Data Dictionary](#test-data-dictionary)
|
||||
* [Generate Data Dictionary Certification Report](#generate-data-dictionary-certification-report)
|
||||
|
||||
To use the RESO Commander for Data Dictionary testing, you must have the JDK installed and a local copy of the Commander repository. See [RESO Certification](#reso-certification) before proceeding.
|
||||
|
||||
### Data Dictionary RESOScript Template
|
||||
To use the Commander for automated Data Dictionary testing, you need a RESOScript.
|
||||
|
||||
For Data Dictionary 1.7 Certification, use [this resoscript](https://github.com/RESOStandards/web-api-commander/blob/main/sample-data-dictionary.1.7.0.resoscript) as a template.
|
||||
|
||||
### Data Dictionary Acceptance Tests
|
||||
RESO Data Dictionary Certification is driven off of the official Data Dictionary spreadsheet for each version of the dictionary, [currently DD 1.7](https://docs.google.com/spreadsheets/d/1SZ0b6T4_lz6ti6qB2Je7NSz_9iNOaV_v9dbfhPwWgXA/edit?usp=sharing).
|
||||
|
||||
Cucumber BDD acceptance tests are [automatically generated](#generating-reso-data-dictionary-acceptance-tests) from the [local copy of the approved spreadsheet](https://github.com/RESOStandards/web-api-commander/blob/main/src/main/resources/RESODataDictionary-1.7.xlsx).
|
||||
|
||||
The generated Data Dictionary 1.7 Cucumber BDD tests are [located in this directory](https://github.com/RESOStandards/web-api-commander/tree/main/src/main/java/org/reso/certification/features/data-dictionary/v1-7-0). See the [property.feature file](https://github.com/RESOStandards/web-api-commander/blob/main/src/main/java/org/reso/certification/features/data-dictionary/v1-7-0/property.feature), for example, for the RESO Property Resource acceptance tests.
|
||||
|
||||
If you have any questions, please [send us an email](mailto:dev@reso.org).
|
||||
|
||||
### Gradle Tasks for Data Dictionary Certification
|
||||
There are predefined tasks for automated RESO Data Dictionary Certification using the Commander. These can be displayed using [Gradle Tasks](#gradle-tasks) as well.
|
||||
|
||||
* [Test Data Dictionary 1.7](#test-data-dictionary)
|
||||
* [Generate Data Dictionary 1.7 Certification Report](#generate-data-dictionary-certification-report)
|
||||
|
||||
_Note: the first time you run these tasks, they will take some time as the environment must be configured and code is being compiled from the contents of the source directory downloaded in the previous step._
|
||||
|
||||
#### Test Data Dictionary
|
||||
This task tests for Data Dictionary compliance and generates a raw report in a timestamped local directory.
|
||||
|
||||
There are two ways to run automated testing to check for RESO compliant Web API metadata:
|
||||
* using a local metadata file
|
||||
* using a RESOScript file to fetch metadata from a given server
|
||||
|
||||
While RESOScript files and the use of strict mode are required for RESO Certification. In both cases, metadata are validated and then processed for RESO compliance.
|
||||
|
||||
##### Data Dictionary Testing using Local Metadata
|
||||
The Commander allows for a local metadata file to be specified. Not only is this used for internal acceptance testing, but is useful for developers to troubleshoot metadata locally while working on compliance.
|
||||
|
||||
The Gradle task to validate local metadata can be run using the following command:
|
||||
|
||||
```
|
||||
$ ./gradlew testDataDictionary_1_7 -DpathToMetadata=/path/to/RESODataDictionary-1.7.xml
|
||||
```
|
||||
You may also pass a `-Dstrict=true` flag to see whether the given metadata file would pass Certification.
|
||||
|
||||
A raw report will be generated in a timestamped directory, and a `commander.log` will be generated during runtime.
|
||||
|
||||
##### Data Dictionary Testing using a Data Dictionary RESOScript
|
||||
During Certification, metadata are retrieved directly from an applicant's Web API server using either OAuth2 Bearer Tokens or Client Credentials. Either authentication option is currently available for RESO Certification, depending on configuration, and the applicant will provide working RESOScripts when they apply for certification.
|
||||
|
||||
An example Data Dictionary RESOScript template can be found [here](https://github.com/RESOStandards/web-api-commander/blob/main/sample-data-dictionary.1.7.0.resoscript).
|
||||
|
||||
Once a RESOScript file has been created, it may be used with the following command:
|
||||
|
||||
```
|
||||
$ ./gradlew testDataDictionary_1_7 -DpathToRESOScript=/path/to/dd17.resoscript -DshowResponses=true
|
||||
```
|
||||
You may also pass a `-Dstrict=true` flag to see whether the given metadata file would pass Certification.
|
||||
|
||||
A raw report will be generated in a timestamped directory, and a `commander.log` will be generated during runtime.
|
||||
|
||||
### Data Dictionary Testing Output
|
||||
To see examples of Data Dictionary testing output, you may use the `./gradlew testDataDictionaryReferenceMetadata_1_7` command to run the Data Dictionary acceptance tests on the RESO reference metadata.
|
||||
|
||||
There is additional documentation about how Data Dictionary testing works, including sample output, in the [RESO Data Dictionary 1.7 Specification](https://github.com/RESOStandards/transport/blob/main/data-dictionary.md).
|
||||
|
||||
## Advanced feature: Tag Filtering
|
||||
You may filter by tags in any of the Web API or Data Dictionary tests. These are the items in the Cucumber .feature files prefixed by an `@` symbol. Expressions may also be used with tags. This README doen't cover how to use tags, but the Commander supports them. For more information, see the [Cucumber Documentation](https://cucumber.io/docs/cucumber/api/#tags).
|
||||
|
||||
#### Examples
|
||||
|
||||
**Run Web API Core Metadata Tests Only**
|
||||
```
|
||||
$ gradle testWebApiCore_2_0_0 -DpathToRESOScript=/path/to/your.web-api-server.core.2.0.0.resoscript -Dcucumber.filter.tags="@metadata"
|
||||
```
|
||||
|
||||
**Run Data Dictionary Tests on IDX Fields Only**
|
||||
```
|
||||
$ ./gradlew testDataDictionary_1_7 -DpathToRESOScript=/path/to/your/dd1.7.resoscript -DshowResponses=true -Dcucumber.filter.tags="@IDX"
|
||||
```
|
|
@ -1,86 +0,0 @@
|
|||
# Codegen
|
||||
The RESO Commander CLI contains code generation for the following items:
|
||||
* [Generating RESO Data Dictionary Acceptance Tests](#generating-reso-data-dictionary-acceptance-tests)
|
||||
* [Generating RESO Web API Reference Server Data Models](#generating-reso-web-api-reference-server-data-models)
|
||||
* [Generating RESO Data Dictionary Reference Metadata](#generating-reso-data-dictionary-reference-metadata)
|
||||
* [Generating RESO Data Dictionary 1.7 Reference DDL](#generating-reso-data-dictionary-17-reference-ddl)
|
||||
* [Converting OData XML Metadata to Open API 3 Format](#converting-odata-xml-metadata-to-open-api-3-format)
|
||||
|
||||
## Generating RESO Data Dictionary Acceptance Tests
|
||||
The RESO Commander can be used to generate Data Dictionary acceptance tests from the currently approved [Data Dictionary Spreadsheet](src/main/resources/RESODataDictionary-1.7.xlsx).
|
||||
|
||||
The Commander project's copy of the sheet needs to be updated with a copy of the [DD Google Sheet](https://docs.google.com/spreadsheets/d/1SZ0b6T4_lz6ti6qB2Je7NSz_9iNOaV_v9dbfhPwWgXA/edit?usp=sharing) prior to generating reference metadata.
|
||||
|
||||
```
|
||||
$ java -jar path/to/web-api-commander.jar --generateDDAcceptanceTests
|
||||
```
|
||||
New Cucumber BDD acceptance tests will be generated and placed in a timestamped directory relative to your current path.
|
||||
|
||||
To update the current tests, copy the newly generated ones into the [Data Dictionary BDD `.features` directory](src/main/java/org/reso/certification/features/data-dictionary/v1-7-0), run the `./gradlew build` task, and if everything works as expected, commit the newly generated tests.
|
||||
|
||||
## Generating RESO Web API Reference Server Data Models
|
||||
The RESO Commander can be used to generate data models for the Web API Reference server from the currently approved [Data Dictionary Spreadsheet](src/main/resources/RESODataDictionary-1.7.xlsx).
|
||||
|
||||
The Commander project's copy of the sheet needs to be updated with a copy of the [DD Google Sheet](https://docs.google.com/spreadsheets/d/1SZ0b6T4_lz6ti6qB2Je7NSz_9iNOaV_v9dbfhPwWgXA/edit?usp=sharing) prior to generating reference metadata.
|
||||
|
||||
```
|
||||
$ java -jar path/to/web-api-commander.jar --generateResourceInfoModels
|
||||
```
|
||||
New ResourceInfo Models for the Web API Reference Server will be generated and placed in a timestamped directory relative to your current path.
|
||||
|
||||
|
||||
## Generating RESO Data Dictionary Reference Metadata
|
||||
In addition to generating DD acceptance tests, the RESO Commander can generate reference metadata based on the current reference [Data Dictionary Spreadsheet](src/main/resources/RESODataDictionary-1.7.xlsx).
|
||||
|
||||
```
|
||||
$ java -jar path/to/web-api-commander.jar --generateReferenceEDMX --inputFile=src/main/resources/RESODataDictionary-1.7.xlsx
|
||||
```
|
||||
In order to update the Commander's version of the reference metadata, update the local copy of the [DD Google Sheet](https://docs.google.com/spreadsheets/d/1SZ0b6T4_lz6ti6qB2Je7NSz_9iNOaV_v9dbfhPwWgXA/edit?usp=sharing) _prior to_ generating metadata, replace [the local copy](src/main/resources/RESODataDictionary-1.7.xml), and try running automated acceptance tests with `./gradlew build`.
|
||||
|
||||
## Generating RESO Data Dictionary 1.7 Reference DDL
|
||||
|
||||
There is a command that can generate reference DDL for creating SQL databases using either key or key numeric values.
|
||||
|
||||
### String Keys
|
||||
|
||||
Issuing the following will print DDL in the console using String keys as the primary key:
|
||||
|
||||
```
|
||||
$ java -jar path/to/web-api-commander.jar --generateReferenceDDL
|
||||
```
|
||||
|
||||
This means that linked lookups will also use string keys since they'll be linked by a related table that uses string keys.
|
||||
|
||||
There is a variable string key size in the DDLProcessor (currently 64 characters in length).
|
||||
|
||||
Numeric keys are still present in this case, they're just not the primary key.
|
||||
|
||||
|
||||
### Numeric Keys
|
||||
|
||||
Issuing the following will print DDL in the console using Numeric keys as the primary key:
|
||||
|
||||
```
|
||||
$ java -jar path/to/web-api-commander.jar --generateReferenceDDL --useKeyNumeric
|
||||
```
|
||||
|
||||
In this case, `BIGINT` values will be used for all related lookup values.
|
||||
|
||||
### DDL TODO
|
||||
|
||||
The following items need to be added to the DDL generator still:
|
||||
- [ ] Foreign Key relationships.
|
||||
- [x] Creation of Lookup resource.
|
||||
|
||||
|
||||
## Converting OData XML Metadata to Open API 3 Format
|
||||
In order to generate an Open API 3 Spec from the reference metadata, run the following command from
|
||||
the root of the odata-openapi3 directory:
|
||||
```
|
||||
$ odata-openapi3 --host 'api.reso.org' --scheme 'https' --basePath '' ../src/main/resources/RESODataDictionary-1.7.xml
|
||||
```
|
||||
You will need to issue an `npm install` command from the odata-openapi3 directory in order for the packages to be available.
|
||||
|
||||
See documentation regarding running the nodejs-based tools [in the odata-openapi README.md](../odata-openapi/README.md).
|
||||
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
# RESO Commander and Docker
|
||||
RESO automated testing tools and Commander utilities can both be run in a Docker containers.
|
||||
The containers are slightly different in each case.
|
||||
|
||||
### RESO Automated Testing Tools
|
||||
A [GradleDockerfile](../GradleDockerfile) has been provided in order to prepare a Gradle
|
||||
environment for the Commander. The container builds itself from the main branch of the source code, so you don't need
|
||||
the entire repo checked out locally, just the file.
|
||||
|
||||
This can also be used in CI/CD environments such as Jenkins or TravisCI.
|
||||
|
||||
Run the RESO Certification tests in a Docker container locally by issuing one of the following commands.
|
||||
Docker must be running on your local machine.
|
||||
|
||||
One way to do this is to build the container first and then run it:
|
||||
|
||||
```docker build --file GradleDockerfile -t web-api-commander-gradle .```
|
||||
|
||||
Once the container is built, you can use the Gradle commands normally with:
|
||||
```docker run -it web-api-commander-gradle testWebApiCore_2_0_0 -DpathToRESOScript=/home/gradle/project/resoscripts/your.resoscript -DshowResponses=true```
|
||||
|
||||
You can also build the container on the fly:
|
||||
|
||||
```docker run --rm -it -v "$PWD":/home/gradle/project -v /path/to/your/resoscripts:/home/gradle/project/resoscripts -w /home/gradle/project -it $(docker build -f GradleDockerfile -q .) testWebApiCore_2_0_0 -DpathToRESOScript=/home/gradle/project/resoscripts/your.resoscript -DshowResponses=true```
|
||||
|
||||
Note that this will create a directory in your home directory for the project, and build artifacts and the log will be placed in that directory,
|
||||
which is also where you will end up after runtime.
|
||||
|
||||
You may need to adjust the path separators if using Windows.
|
||||
|
||||
|
||||
### Commander Utilities
|
||||
A [Dockerfile](../Dockerfile) has also been provided to Dockerize the application for Commander utilities.
|
||||
|
||||
To run the Commander utilities, use the following commands:
|
||||
|
||||
```
|
||||
$ docker build -t web-api-commander .
|
||||
```
|
||||
|
||||
The usage for the docker container is the same for `web-api-commander.jar` presented above.
|
||||
|
||||
```
|
||||
$ docker run -it web-api-commander --help
|
||||
```
|
||||
|
||||
If you have input files you may need to mount your filesystem into the docker container
|
||||
|
||||
```
|
||||
$ docker run -it -v $PWD:/app web-api-commander --validateMetadata --inputFile <pathInContainer>
|
||||
```
|
|
@ -1,8 +0,0 @@
|
|||
# Using the Commander as a Web API Client Library
|
||||
Java or Scala developers may also use the Commander as a client library,
|
||||
which uses [Apache Olingo](#https://olingo.apache.org/doc/odata4/index.html) under the hood but adds things like OAuth2 support and data retrieval,
|
||||
validation, and serialization methods.
|
||||
|
||||
To use the library, include the standalone Web API Commander Jar in your projects.
|
||||
|
||||
Feel free to open issues or feature requests in the Commander GitHub project.
|
|
@ -1,2 +0,0 @@
|
|||
org.gradle.jvmargs=-Xmx28g
|
||||
org.gradle.warning.mode=all
|
Binary file not shown.
|
@ -1,5 +1,6 @@
|
|||
#Mon Mar 23 12:15:57 PDT 2020
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-5.2.1-all.zip
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.0.2-bin.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
|
|
|
@ -1,129 +1,78 @@
|
|||
#!/bin/sh
|
||||
|
||||
#
|
||||
# Copyright © 2015-2021 the original authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
#!/usr/bin/env sh
|
||||
|
||||
##############################################################################
|
||||
#
|
||||
# Gradle start up script for POSIX generated by Gradle.
|
||||
#
|
||||
# Important for running:
|
||||
#
|
||||
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
|
||||
# noncompliant, but you have some other compliant shell such as ksh or
|
||||
# bash, then to run this script, type that shell name before the whole
|
||||
# command line, like:
|
||||
#
|
||||
# ksh Gradle
|
||||
#
|
||||
# Busybox and similar reduced shells will NOT work, because this script
|
||||
# requires all of these POSIX shell features:
|
||||
# * functions;
|
||||
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
|
||||
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
|
||||
# * compound commands having a testable exit status, especially «case»;
|
||||
# * various built-in commands including «command», «set», and «ulimit».
|
||||
#
|
||||
# Important for patching:
|
||||
#
|
||||
# (2) This script targets any POSIX shell, so it avoids extensions provided
|
||||
# by Bash, Ksh, etc; in particular arrays are avoided.
|
||||
#
|
||||
# The "traditional" practice of packing multiple parameters into a
|
||||
# space-separated string is a well documented source of bugs and security
|
||||
# problems, so this is (mostly) avoided, by progressively accumulating
|
||||
# options in "$@", and eventually passing that to Java.
|
||||
#
|
||||
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
|
||||
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
|
||||
# see the in-line comments for details.
|
||||
#
|
||||
# There are tweaks for specific operating systems such as AIX, CygWin,
|
||||
# Darwin, MinGW, and NonStop.
|
||||
#
|
||||
# (3) This script is generated from the Groovy template
|
||||
# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
|
||||
# within the Gradle project.
|
||||
#
|
||||
# You can find Gradle at https://github.com/gradle/gradle/.
|
||||
#
|
||||
##
|
||||
## Gradle start up script for UN*X
|
||||
##
|
||||
##############################################################################
|
||||
|
||||
# Attempt to set APP_HOME
|
||||
|
||||
# Resolve links: $0 may be a link
|
||||
app_path=$0
|
||||
|
||||
# Need this for daisy-chained symlinks.
|
||||
while
|
||||
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
|
||||
[ -h "$app_path" ]
|
||||
do
|
||||
ls=$( ls -ld "$app_path" )
|
||||
link=${ls#*' -> '}
|
||||
case $link in #(
|
||||
/*) app_path=$link ;; #(
|
||||
*) app_path=$APP_HOME$link ;;
|
||||
esac
|
||||
PRG="$0"
|
||||
# Need this for relative symlinks.
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG=`dirname "$PRG"`"/$link"
|
||||
fi
|
||||
done
|
||||
|
||||
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
|
||||
SAVED="`pwd`"
|
||||
cd "`dirname \"$PRG\"`/" >/dev/null
|
||||
APP_HOME="`pwd -P`"
|
||||
cd "$SAVED" >/dev/null
|
||||
|
||||
APP_NAME="Gradle"
|
||||
APP_BASE_NAME=${0##*/}
|
||||
APP_BASE_NAME=`basename "$0"`
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
|
||||
DEFAULT_JVM_OPTS='"-Xmx64m"'
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD=maximum
|
||||
MAX_FD="maximum"
|
||||
|
||||
warn () {
|
||||
echo "$*"
|
||||
} >&2
|
||||
}
|
||||
|
||||
die () {
|
||||
echo
|
||||
echo "$*"
|
||||
echo
|
||||
exit 1
|
||||
} >&2
|
||||
}
|
||||
|
||||
# OS specific support (must be 'true' or 'false').
|
||||
cygwin=false
|
||||
msys=false
|
||||
darwin=false
|
||||
nonstop=false
|
||||
case "$( uname )" in #(
|
||||
CYGWIN* ) cygwin=true ;; #(
|
||||
Darwin* ) darwin=true ;; #(
|
||||
MSYS* | MINGW* ) msys=true ;; #(
|
||||
NONSTOP* ) nonstop=true ;;
|
||||
case "`uname`" in
|
||||
CYGWIN* )
|
||||
cygwin=true
|
||||
;;
|
||||
Darwin* )
|
||||
darwin=true
|
||||
;;
|
||||
MINGW* )
|
||||
msys=true
|
||||
;;
|
||||
NONSTOP* )
|
||||
nonstop=true
|
||||
;;
|
||||
esac
|
||||
|
||||
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
||||
|
||||
|
||||
# Determine the Java command to use to start the JVM.
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD=$JAVA_HOME/jre/sh/java
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
else
|
||||
JAVACMD=$JAVA_HOME/bin/java
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
||||
|
@ -132,7 +81,7 @@ Please set the JAVA_HOME variable in your environment to match the
|
|||
location of your Java installation."
|
||||
fi
|
||||
else
|
||||
JAVACMD=java
|
||||
JAVACMD="java"
|
||||
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
|
@ -140,95 +89,84 @@ location of your Java installation."
|
|||
fi
|
||||
|
||||
# Increase the maximum file descriptors if we can.
|
||||
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
|
||||
case $MAX_FD in #(
|
||||
max*)
|
||||
MAX_FD=$( ulimit -H -n ) ||
|
||||
warn "Could not query maximum file descriptor limit"
|
||||
esac
|
||||
case $MAX_FD in #(
|
||||
'' | soft) :;; #(
|
||||
*)
|
||||
ulimit -n "$MAX_FD" ||
|
||||
warn "Could not set maximum file descriptor limit to $MAX_FD"
|
||||
esac
|
||||
fi
|
||||
|
||||
# Collect all arguments for the java command, stacking in reverse order:
|
||||
# * args from the command line
|
||||
# * the main class name
|
||||
# * -classpath
|
||||
# * -D...appname settings
|
||||
# * --module-path (only if needed)
|
||||
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
|
||||
|
||||
# For Cygwin or MSYS, switch paths to Windows format before running java
|
||||
if "$cygwin" || "$msys" ; then
|
||||
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
|
||||
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
|
||||
|
||||
JAVACMD=$( cygpath --unix "$JAVACMD" )
|
||||
|
||||
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
||||
for arg do
|
||||
if
|
||||
case $arg in #(
|
||||
-*) false ;; # don't mess with options #(
|
||||
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
|
||||
[ -e "$t" ] ;; #(
|
||||
*) false ;;
|
||||
esac
|
||||
then
|
||||
arg=$( cygpath --path --ignore --mixed "$arg" )
|
||||
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
|
||||
MAX_FD_LIMIT=`ulimit -H -n`
|
||||
if [ $? -eq 0 ] ; then
|
||||
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
|
||||
MAX_FD="$MAX_FD_LIMIT"
|
||||
fi
|
||||
# Roll the args list around exactly as many times as the number of
|
||||
# args, so each arg winds up back in the position where it started, but
|
||||
# possibly modified.
|
||||
#
|
||||
# NB: a `for` loop captures its iteration list before it begins, so
|
||||
# changing the positional parameters here affects neither the number of
|
||||
# iterations, nor the values presented in `arg`.
|
||||
shift # remove old arg
|
||||
set -- "$@" "$arg" # push replacement arg
|
||||
done
|
||||
ulimit -n $MAX_FD
|
||||
if [ $? -ne 0 ] ; then
|
||||
warn "Could not set maximum file descriptor limit: $MAX_FD"
|
||||
fi
|
||||
else
|
||||
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Collect all arguments for the java command;
|
||||
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
|
||||
# shell script including quotes and variable substitutions, so put them in
|
||||
# double quotes to make sure that they get re-expanded; and
|
||||
# * put everything else in single quotes, so that it's not re-expanded.
|
||||
# For Darwin, add options to specify how the application appears in the dock
|
||||
if $darwin; then
|
||||
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
|
||||
fi
|
||||
|
||||
set -- \
|
||||
"-Dorg.gradle.appname=$APP_BASE_NAME" \
|
||||
-classpath "$CLASSPATH" \
|
||||
org.gradle.wrapper.GradleWrapperMain \
|
||||
"$@"
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin ; then
|
||||
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
|
||||
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
|
||||
JAVACMD=`cygpath --unix "$JAVACMD"`
|
||||
|
||||
# Use "xargs" to parse quoted args.
|
||||
#
|
||||
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
|
||||
#
|
||||
# In Bash we could simply go:
|
||||
#
|
||||
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
|
||||
# set -- "${ARGS[@]}" "$@"
|
||||
#
|
||||
# but POSIX shell has neither arrays nor command substitution, so instead we
|
||||
# post-process each arg (as a line of input to sed) to backslash-escape any
|
||||
# character that might be a shell metacharacter, then use eval to reverse
|
||||
# that process (while maintaining the separation between arguments), and wrap
|
||||
# the whole thing up as a single "set" statement.
|
||||
#
|
||||
# This will of course break if any of these variables contains a newline or
|
||||
# an unmatched quote.
|
||||
#
|
||||
# We build the pattern for arguments to be converted via cygpath
|
||||
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
|
||||
SEP=""
|
||||
for dir in $ROOTDIRSRAW ; do
|
||||
ROOTDIRS="$ROOTDIRS$SEP$dir"
|
||||
SEP="|"
|
||||
done
|
||||
OURCYGPATTERN="(^($ROOTDIRS))"
|
||||
# Add a user-defined pattern to the cygpath arguments
|
||||
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
|
||||
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
|
||||
fi
|
||||
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
||||
i=0
|
||||
for arg in "$@" ; do
|
||||
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
|
||||
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
|
||||
|
||||
eval "set -- $(
|
||||
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
|
||||
xargs -n1 |
|
||||
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
|
||||
tr '\n' ' '
|
||||
)" '"$@"'
|
||||
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
|
||||
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
|
||||
else
|
||||
eval `echo args$i`="\"$arg\""
|
||||
fi
|
||||
i=$((i+1))
|
||||
done
|
||||
case $i in
|
||||
(0) set -- ;;
|
||||
(1) set -- "$args0" ;;
|
||||
(2) set -- "$args0" "$args1" ;;
|
||||
(3) set -- "$args0" "$args1" "$args2" ;;
|
||||
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
|
||||
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
|
||||
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
|
||||
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
|
||||
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
|
||||
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Escape application args
|
||||
save () {
|
||||
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
|
||||
echo " "
|
||||
}
|
||||
APP_ARGS=$(save "$@")
|
||||
|
||||
# Collect all arguments for the java command, following the shell quoting and substitution rules
|
||||
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
|
||||
|
||||
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
|
||||
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
|
||||
cd "$(dirname "$0")"
|
||||
fi
|
||||
|
||||
exec "$JAVACMD" "$@"
|
||||
|
|
|
@ -1,19 +1,3 @@
|
|||
@rem
|
||||
@rem Copyright 2015 the original author or authors.
|
||||
@rem
|
||||
@rem Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@rem you may not use this file except in compliance with the License.
|
||||
@rem You may obtain a copy of the License at
|
||||
@rem
|
||||
@rem https://www.apache.org/licenses/LICENSE-2.0
|
||||
@rem
|
||||
@rem Unless required by applicable law or agreed to in writing, software
|
||||
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@rem See the License for the specific language governing permissions and
|
||||
@rem limitations under the License.
|
||||
@rem
|
||||
|
||||
@if "%DEBUG%" == "" @echo off
|
||||
@rem ##########################################################################
|
||||
@rem
|
||||
|
@ -29,18 +13,15 @@ if "%DIRNAME%" == "" set DIRNAME=.
|
|||
set APP_BASE_NAME=%~n0
|
||||
set APP_HOME=%DIRNAME%
|
||||
|
||||
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
|
||||
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
|
||||
|
||||
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
|
||||
set DEFAULT_JVM_OPTS="-Xmx64m"
|
||||
|
||||
@rem Find java.exe
|
||||
if defined JAVA_HOME goto findJavaFromJavaHome
|
||||
|
||||
set JAVA_EXE=java.exe
|
||||
%JAVA_EXE% -version >NUL 2>&1
|
||||
if "%ERRORLEVEL%" == "0" goto execute
|
||||
if "%ERRORLEVEL%" == "0" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
@ -54,7 +35,7 @@ goto fail
|
|||
set JAVA_HOME=%JAVA_HOME:"=%
|
||||
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
||||
|
||||
if exist "%JAVA_EXE%" goto execute
|
||||
if exist "%JAVA_EXE%" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
||||
|
@ -64,14 +45,28 @@ echo location of your Java installation.
|
|||
|
||||
goto fail
|
||||
|
||||
:init
|
||||
@rem Get command-line arguments, handling Windows variants
|
||||
|
||||
if not "%OS%" == "Windows_NT" goto win9xME_args
|
||||
|
||||
:win9xME_args
|
||||
@rem Slurp the command line arguments.
|
||||
set CMD_LINE_ARGS=
|
||||
set _SKIP=2
|
||||
|
||||
:win9xME_args_slurp
|
||||
if "x%~1" == "x" goto execute
|
||||
|
||||
set CMD_LINE_ARGS=%*
|
||||
|
||||
:execute
|
||||
@rem Setup the command line
|
||||
|
||||
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
||||
|
||||
|
||||
@rem Execute Gradle
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
|
||||
|
||||
:end
|
||||
@rem End local scope for the variables with windows NT shell
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Subproject commit e2b333a79a6eea22afc962a0650acfb51ea9d213
|
|
@ -1,71 +0,0 @@
|
|||
#!/bin/bash
|
||||
echo "Job: RESO Data Dictionary Testing"
|
||||
echo "Started at: " date
|
||||
echo "Args: $*"
|
||||
env
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
echo "ERROR: 'certificationRequestId' parameter missing from args!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$2" ]; then
|
||||
echo "ERROR: 'certificationCommand' parameter missing from args!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
certificationCommand="$1"
|
||||
certificationRequestId="$2"
|
||||
certificationPath="/certification/$certificationRequestId"
|
||||
|
||||
echo "Running Commander..."
|
||||
echo "certificationCommand: $certificationCommand"
|
||||
echo "certificationRequestId: $certificationRequestId"
|
||||
echo "certificationPath: $certificationPath"
|
||||
|
||||
echo "Checking for config file: $certificationPath/config.xml"
|
||||
ls "$certificationPath/config.xml"
|
||||
|
||||
status=$?
|
||||
if [ $status -eq 1 ]; then
|
||||
echo "ERROR: Could not find config file!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Changing to Commander Directory"
|
||||
cd "/web-api-commander" || exit 1
|
||||
|
||||
echo "Running Tests! Command: gradle $certificationCommand -DpathToRESOScript=$certificationPath/config.xml"
|
||||
gradle "$certificationCommand" "-DpathToRESOScript=$certificationPath/config.xml" > "$certificationPath/$certificationCommand.log"
|
||||
|
||||
status=$?
|
||||
if [ $status -eq 1 ]; then
|
||||
echo "ERROR: Command '$certificationCommand' failed for certificationRequestId: $certificationRequestId"
|
||||
exit 1
|
||||
else
|
||||
echo "SUCCESS: Command '$certificationCommand' succeeded for certificationRequestId: $certificationRequestId"
|
||||
fi
|
||||
|
||||
echo "Copying files: cp -R commander.log build/certification $certificationPath"
|
||||
cp -R "commander.log" "build/certification" "$certificationPath"
|
||||
|
||||
status=$?
|
||||
if [ $status -eq 1 ]; then
|
||||
echo "ERROR: Could not copy files to '$certificationPath'!"
|
||||
exit 1
|
||||
else
|
||||
echo "Files copied!"
|
||||
fi
|
||||
|
||||
ls -alh "$certificationPath"
|
||||
status=$?
|
||||
if [ $status -eq 1 ]; then
|
||||
echo "ERROR: Could not list files in '$certificationPath'!"
|
||||
exit 1
|
||||
else
|
||||
echo "Files copied!"
|
||||
fi
|
||||
|
||||
echo "Testing complete!"
|
||||
|
||||
|
|
@ -83,6 +83,17 @@
|
|||
|
||||
</ClientSettings>
|
||||
|
||||
|
||||
<!--
|
||||
############################################################
|
||||
Parameters Section - add your testing variables here
|
||||
############################################################-->
|
||||
|
||||
<Parameters>
|
||||
<!-- OPTIONAL: Useful for testing the OData Format Parameter - Value="?$format=application/xml" -->
|
||||
<Parameter Name="OptionalMetadataFormatParameter" Value="" />
|
||||
</Parameters>
|
||||
|
||||
<!--
|
||||
############################################################
|
||||
Requests Section - Queries used during testing,
|
||||
|
@ -92,8 +103,8 @@
|
|||
|
||||
<Request
|
||||
RequestId="metadata-request"
|
||||
OutputFile="metadata-request.xml"
|
||||
Url="*ClientSettings_WebAPIURI*/$metadata?$format=application/xml"
|
||||
OutputFile="metadata-metadata-request.xml"
|
||||
Url="*ClientSettings_WebAPIURI*/$metadata*Parameter_OptionalMetadataFormatParameter*"
|
||||
/>
|
||||
|
||||
</Requests>
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<?xml version="1.0" encoding="utf-8" ?>
|
||||
|
||||
<!--
|
||||
>>> To be used as a template for Web API Server Core 2.0.0 Certification <<<
|
||||
>>> To be used as a template for Web API Server Core 1.0.2 Certification <<<
|
||||
|
||||
NOTES:
|
||||
* Anything marked REQUIRED should be filled in.
|
||||
|
@ -202,6 +202,10 @@
|
|||
Value="*Parameter_MultipleValueLookupNamespace*'*Parameter_MultipleLookupValue1*'"/>
|
||||
<Parameter Name="MultipleValueLookupValue2"
|
||||
Value="*Parameter_MultipleValueLookupNamespace*'*Parameter_MultipleLookupValue2*'"/>
|
||||
|
||||
<!-- OPTIONAL: Useful for testing the OData Format Parameter - Value="?$format=application/xml" -->
|
||||
<Parameter Name="OptionalMetadataFormatParameter" Value="?$format=application/xml"/>
|
||||
|
||||
</Parameters>
|
||||
|
||||
<!--
|
||||
|
@ -214,7 +218,7 @@
|
|||
<Request
|
||||
RequestId="metadata-request"
|
||||
OutputFile="metadata-request.xml"
|
||||
Url="*ClientSettings_WebAPIURI*/$metadata?$format=application/xml"
|
||||
Url="*ClientSettings_WebAPIURI*/$metadata*Parameter_OptionalMetadataFormatParameter*"
|
||||
/>
|
||||
|
||||
<Request
|
|
@ -132,8 +132,7 @@ public class BDDProcessor extends WorksheetProcessor {
|
|||
String template = EMPTY_STRING;
|
||||
|
||||
if (field.getSynonyms().size() > 0) {
|
||||
template += " Given that the following synonyms for \"" + field.getStandardName()
|
||||
+ "\" DO NOT exist in the \"" + field.getParentResourceName() + "\" metadata\n" +
|
||||
template += " And the following synonyms for \"" + field.getStandardName() + "\" MUST NOT exist in the metadata\n" +
|
||||
field.getSynonyms().stream()
|
||||
.map(synonym -> padLeft("| " + synonym + " |\n", EXAMPLES_PADDING_AMOUNT)).collect(Collectors.joining());
|
||||
}
|
||||
|
@ -145,9 +144,9 @@ public class BDDProcessor extends WorksheetProcessor {
|
|||
|
||||
return "\n " + buildTags(field).stream().map(tag -> "@" + tag).collect(Collectors.joining(SINGLE_SPACE)) + "\n" +
|
||||
" Scenario: " + field.getStandardName() + "\n" +
|
||||
generateSynonymsMarkup(field) +
|
||||
" When \"" + field.getStandardName() + "\" exists in the \"" + field.getParentResourceName() + "\" metadata\n" +
|
||||
" Then \"" + field.getStandardName() + "\" MUST be \"Boolean\" data type\n";
|
||||
" Then \"" + field.getStandardName() + "\" MUST be \"Boolean\" data type\n" +
|
||||
generateSynonymsMarkup(field);
|
||||
}
|
||||
|
||||
public static String buildDateTest(ReferenceStandardField field) {
|
||||
|
@ -155,9 +154,9 @@ public class BDDProcessor extends WorksheetProcessor {
|
|||
|
||||
return "\n " + buildTags(field).stream().map(tag -> "@" + tag).collect(Collectors.joining(SINGLE_SPACE)) + "\n" +
|
||||
" Scenario: " + field.getStandardName() + "\n" +
|
||||
generateSynonymsMarkup(field) +
|
||||
" When \"" + field.getStandardName() + "\" exists in the \"" + field.getParentResourceName() + "\" metadata\n" +
|
||||
" Then \"" + field.getStandardName() + "\" MUST be \"Date\" data type\n";
|
||||
" Then \"" + field.getStandardName() + "\" MUST be \"Date\" data type\n" +
|
||||
generateSynonymsMarkup(field);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -178,9 +177,9 @@ public class BDDProcessor extends WorksheetProcessor {
|
|||
String template =
|
||||
"\n " + buildTags(field).stream().map(tag -> "@" + tag).collect(Collectors.joining(SINGLE_SPACE)) + "\n" +
|
||||
" Scenario: " + field.getStandardName() + "\n" +
|
||||
generateSynonymsMarkup(field) +
|
||||
" When \"" + field.getStandardName() + "\" exists in the \"" + field.getParentResourceName() + "\" metadata\n" +
|
||||
" Then \"" + field.getStandardName() + "\" MUST be \"Decimal\" data type\n";
|
||||
" Then \"" + field.getStandardName() + "\" MUST be \"Decimal\" data type\n" +
|
||||
generateSynonymsMarkup(field);
|
||||
|
||||
//TODO Length is actually scale for Decimal fields by the DD! :/
|
||||
if (field.getSuggestedMaxLength() != null)
|
||||
|
@ -202,9 +201,9 @@ public class BDDProcessor extends WorksheetProcessor {
|
|||
|
||||
return "\n " + buildTags(field).stream().map(tag -> "@" + tag).collect(Collectors.joining(SINGLE_SPACE)) + "\n" +
|
||||
" Scenario: " + field.getStandardName() + "\n" +
|
||||
generateSynonymsMarkup(field) +
|
||||
" When \"" + field.getStandardName() + "\" exists in the \"" + field.getParentResourceName() + "\" metadata\n" +
|
||||
" Then \"" + field.getStandardName() + "\" MUST be \"Integer\" data type\n";
|
||||
" Then \"" + field.getStandardName() + "\" MUST be \"Integer\" data type\n" +
|
||||
generateSynonymsMarkup(field);
|
||||
}
|
||||
|
||||
private static String buildStandardEnumerationMarkup(String lookupName) {
|
||||
|
@ -230,9 +229,9 @@ public class BDDProcessor extends WorksheetProcessor {
|
|||
return
|
||||
"\n " + buildTags(field).stream().map(tag -> "@" + tag).collect(Collectors.joining(SINGLE_SPACE)) + "\n" +
|
||||
" Scenario: " + field.getStandardName() + "\n" +
|
||||
generateSynonymsMarkup(field) +
|
||||
" When \"" + field.getStandardName() + "\" exists in the \"" + field.getParentResourceName() + "\" metadata\n" +
|
||||
" Then \"" + field.getStandardName() + "\" MUST be \"Multiple Enumeration\" data type\n";
|
||||
" Then \"" + field.getStandardName() + "\" MUST be \"Multiple Enumeration\" data type\n" +
|
||||
generateSynonymsMarkup(field);
|
||||
}
|
||||
|
||||
public static String buildStringListSingleTest(ReferenceStandardField field) {
|
||||
|
@ -241,9 +240,9 @@ public class BDDProcessor extends WorksheetProcessor {
|
|||
return
|
||||
"\n " + buildTags(field).stream().map(tag -> "@" + tag).collect(Collectors.joining(SINGLE_SPACE)) + "\n" +
|
||||
" Scenario: " + field.getStandardName() + "\n" +
|
||||
generateSynonymsMarkup(field) +
|
||||
" When \"" + field.getStandardName() + "\" exists in the \"" + field.getParentResourceName() + "\" metadata\n" +
|
||||
" Then \"" + field.getStandardName() + "\" MUST be \"Single Enumeration\" data type\n";
|
||||
" Then \"" + field.getStandardName() + "\" MUST be \"Single Enumeration\" data type\n" +
|
||||
generateSynonymsMarkup(field);
|
||||
}
|
||||
|
||||
public static String buildStringTest(ReferenceStandardField field) {
|
||||
|
@ -251,9 +250,9 @@ public class BDDProcessor extends WorksheetProcessor {
|
|||
String template =
|
||||
"\n " + buildTags(field).stream().map(tag -> "@" + tag).collect(Collectors.joining(SINGLE_SPACE)) + "\n" +
|
||||
" Scenario: " + field.getStandardName() + "\n" +
|
||||
generateSynonymsMarkup(field) +
|
||||
" When \"" + field.getStandardName() + "\" exists in the \"" + field.getParentResourceName() + "\" metadata\n" +
|
||||
" Then \"" + field.getStandardName() + "\" MUST be \"String\" data type\n";
|
||||
" Then \"" + field.getStandardName() + "\" MUST be \"String\" data type\n" +
|
||||
generateSynonymsMarkup(field);
|
||||
|
||||
if (field.getSuggestedMaxLength() != null)
|
||||
template +=
|
||||
|
@ -267,9 +266,9 @@ public class BDDProcessor extends WorksheetProcessor {
|
|||
|
||||
return "\n " + buildTags(field).stream().map(tag -> "@" + tag).collect(Collectors.joining(SINGLE_SPACE)) + "\n" +
|
||||
" Scenario: " + field.getStandardName() + "\n" +
|
||||
generateSynonymsMarkup(field) +
|
||||
" When \"" + field.getStandardName() + "\" exists in the \"" + field.getParentResourceName() + "\" metadata\n" +
|
||||
" Then \"" + field.getStandardName() + "\" MUST be \"Timestamp\" data type\n";
|
||||
" Then \"" + field.getStandardName() + "\" MUST be \"Timestamp\" data type\n" +
|
||||
generateSynonymsMarkup(field);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,21 +1,39 @@
|
|||
package org.reso.certification.codegen;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.reso.models.ReferenceStandardField;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class DDCacheProcessor extends WorksheetProcessor {
|
||||
final AtomicReference<Map<String, Map<String, ReferenceStandardField>>> standardFieldCache =
|
||||
new AtomicReference<>(Collections.synchronizedMap(new LinkedHashMap<>()));
|
||||
private static final Logger LOG = LogManager.getLogger(DDCacheProcessor.class);
|
||||
Map<String, List<ReferenceStandardField>> fieldCache = new LinkedHashMap<>();
|
||||
|
||||
private void addToFieldCache(ReferenceStandardField field) {
|
||||
standardFieldCache.get().putIfAbsent(field.getParentResourceName(), new LinkedHashMap<>());
|
||||
standardFieldCache.get().get(field.getParentResourceName()).put(field.getStandardName(), field);
|
||||
fieldCache.putIfAbsent(field.getParentResourceName(), new LinkedList<>());
|
||||
fieldCache.get(field.getParentResourceName()).add(field);
|
||||
}
|
||||
|
||||
public Map<String, Map<String, ReferenceStandardField>> getStandardFieldCache() {
|
||||
return standardFieldCache.get();
|
||||
public Map<String, List<ReferenceStandardField>> getFieldCache() {
|
||||
return fieldCache;
|
||||
}
|
||||
|
||||
public static Map<String, List<ReferenceStandardField>> buildCache() {
|
||||
LOG.info("Creating standard field cache...");
|
||||
DDCacheProcessor cacheProcessor = new DDCacheProcessor();
|
||||
DataDictionaryCodeGenerator generator = new DataDictionaryCodeGenerator(cacheProcessor);
|
||||
generator.processWorksheets();
|
||||
LOG.info("Standard field cache created!");
|
||||
return cacheProcessor.getFieldCache();
|
||||
}
|
||||
|
||||
public static DataDictionaryCodeGenerator getGeneratorInstance() {
|
||||
DDCacheProcessor cacheProcessor = new DDCacheProcessor();
|
||||
return new DataDictionaryCodeGenerator(cacheProcessor);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -168,8 +168,6 @@ public class DDLProcessor extends WorksheetProcessor {
|
|||
return CaseFormat.UPPER_CAMEL.to(CaseFormat.LOWER_UNDERSCORE, resourceName).replace("o_u_i_d", "ouid");
|
||||
}
|
||||
|
||||
|
||||
|
||||
private static String buildCreateLookupStatement(boolean useKeyNumeric) {
|
||||
return
|
||||
"\n\n/**\n" +
|
||||
|
@ -205,7 +203,7 @@ public class DDLProcessor extends WorksheetProcessor {
|
|||
standardFieldMap.forEach((standardName, referenceStandardField) -> {
|
||||
String inserts = buildLookupValueInserts(referenceStandardField);
|
||||
if (inserts.length() > 0) {
|
||||
markupMap.putIfAbsent(referenceStandardField.getLookupName(),
|
||||
markupMap.putIfAbsent(referenceStandardField.getLookupStandardName(),
|
||||
(markupMap.keySet().size() > 0 ? ", " : EMPTY_STRING) + PADDING + inserts);
|
||||
}
|
||||
});
|
||||
|
@ -219,16 +217,16 @@ public class DDLProcessor extends WorksheetProcessor {
|
|||
private String buildLookupValueInserts(ReferenceStandardField standardField) {
|
||||
StringBuilder content = new StringBuilder();
|
||||
|
||||
if (getEnumerations().get(standardField.getLookupName()) != null) {
|
||||
if (getEnumerations().get(standardField.getLookupStandardName()) != null) {
|
||||
AtomicReference<String> fieldHash = new AtomicReference<>();
|
||||
|
||||
//iterate through each of the lookup values and generate their edm:EnumType content
|
||||
getEnumerations().get(standardField.getLookupName()).forEach(lookup -> {
|
||||
getEnumerations().get(standardField.getLookupStandardName()).forEach(lookup -> {
|
||||
|
||||
// key is the sha256 of the following values
|
||||
fieldHash.set(sha256()
|
||||
.hashString(
|
||||
standardField.getLookupName()
|
||||
standardField.getLookupStandardName()
|
||||
+ lookup.getLookupDisplayName()
|
||||
+ lookup.getLookupValue(), StandardCharsets.UTF_8)
|
||||
.toString());
|
||||
|
@ -237,7 +235,7 @@ public class DDLProcessor extends WorksheetProcessor {
|
|||
.append(content.length() > 0 ? ", " : EMPTY_STRING).append("\n")
|
||||
.append(PADDING).append("(")
|
||||
.append("\"").append(fieldHash.get()).append("\"")
|
||||
.append(", ").append("\"").append(standardField.getLookupName()).append("\"")
|
||||
.append(", ").append("\"").append(standardField.getLookupStandardName()).append("\"")
|
||||
.append(", ").append("\"").append(lookup.getLookupDisplayName()).append("\"")
|
||||
.append(", ").append("\"").append(lookup.getLookupDisplayName()).append("\"")
|
||||
.append(", ").append("\"").append(lookup.getLookupValue()).append("\"")
|
||||
|
|
|
@ -9,9 +9,9 @@ import org.reso.commander.common.DataDictionaryMetadata;
|
|||
import static org.reso.certification.codegen.WorksheetProcessor.REFERENCE_WORKSHEET;
|
||||
import static org.reso.certification.codegen.WorksheetProcessor.buildWellKnownStandardFieldHeaderMap;
|
||||
|
||||
public class DataDictionaryCodeGenerator {
|
||||
public final class DataDictionaryCodeGenerator {
|
||||
private static final Logger LOG = LogManager.getLogger(DataDictionaryCodeGenerator.class);
|
||||
WorksheetProcessor processor = null;
|
||||
private WorksheetProcessor processor = null;
|
||||
Workbook workbook = null;
|
||||
|
||||
private DataDictionaryCodeGenerator() {
|
||||
|
@ -31,25 +31,27 @@ public class DataDictionaryCodeGenerator {
|
|||
|
||||
/**
|
||||
* Generates Data Dictionary references for local workbook instance using the configured WorksheetProcessor
|
||||
*
|
||||
* TODO: convert to .parallelStream()
|
||||
*/
|
||||
public void processWorksheets() {
|
||||
Sheet currentWorksheet, standardResourcesWorksheet;
|
||||
Sheet currentWorksheet, standardRelationshipsWorksheet;
|
||||
int sheetIndex, rowIndex;
|
||||
final int ROW_HEADER_INDEX = 0, FIRST_ROW_INDEX = 1;
|
||||
final String STANDARD_RELATIONSHIPS_WORKSHEET = "Standard Relationships";
|
||||
|
||||
try {
|
||||
standardResourcesWorksheet = workbook.getSheet(STANDARD_RELATIONSHIPS_WORKSHEET);
|
||||
assert standardResourcesWorksheet != null;
|
||||
standardRelationshipsWorksheet = workbook.getSheet(STANDARD_RELATIONSHIPS_WORKSHEET);
|
||||
assert standardRelationshipsWorksheet != null : "Standard Relationships worksheet MUST be present!";
|
||||
|
||||
processor.buildStandardRelationships(standardResourcesWorksheet);
|
||||
processor.buildStandardRelationships(standardRelationshipsWorksheet);
|
||||
|
||||
//workbook consists of many sheets, process only the ones that have the name of a well-known resource
|
||||
//TODO: change to stream processing logic
|
||||
for (sheetIndex = ROW_HEADER_INDEX; sheetIndex < workbook.getNumberOfSheets(); sheetIndex++) {
|
||||
assert workbook != null && sheetIndex >= 0 && sheetIndex < workbook.getNumberOfSheets()
|
||||
: "Worksheet at index + " + sheetIndex + " does not exist!";
|
||||
currentWorksheet = workbook.getSheetAt(sheetIndex);
|
||||
|
||||
//TODO: make DD version dynamic
|
||||
if (DataDictionaryMetadata.v1_7.WELL_KNOWN_RESOURCES.contains(currentWorksheet.getSheetName()) && currentWorksheet.getPhysicalNumberOfRows() > 1) {
|
||||
processor.beforeResourceSheetProcessed(currentWorksheet);
|
||||
|
||||
|
@ -71,4 +73,8 @@ public class DataDictionaryCodeGenerator {
|
|||
LOG.info(ex);
|
||||
}
|
||||
}
|
||||
|
||||
public WorksheetProcessor getProcessor() {
|
||||
return processor;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,281 @@
|
|||
package org.reso.certification.codegen;
|
||||
|
||||
import com.github.javafaker.Faker;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.reso.commander.common.Utils;
|
||||
import org.reso.models.DataGenerator;
|
||||
import org.reso.models.ReferenceStandardField;
|
||||
import org.reso.models.ReferenceStandardLookup;
|
||||
|
||||
import java.time.OffsetDateTime;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ThreadLocalRandom;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.reso.certification.codegen.WorksheetProcessor.WELL_KNOWN_DATA_TYPES.*;
|
||||
|
||||
/**
|
||||
* From: https://mariadb.com/kb/en/how-to-quickly-insert-data-into-mariadb/
|
||||
*
|
||||
* ALTER TABLE table_name DISABLE KEYS;
|
||||
* BEGIN;
|
||||
* ... inserting data with INSERT or LOAD DATA ....
|
||||
* COMMIT;
|
||||
* ALTER TABLE table_name ENABLE KEYS;
|
||||
*
|
||||
* SET @@session.unique_checks = 0;
|
||||
* SET @@session.foreign_key_checks = 0;
|
||||
*
|
||||
* SET @@global.innodb_autoinc_lock_mode = 2;
|
||||
*
|
||||
* Then use this to import the data:
|
||||
*
|
||||
* mysqlimport --use-threads=<numThreads> database text-file-name [text-file-name...]
|
||||
*/
|
||||
public class DataDictionarySeedDataSqlGenerator {
|
||||
private static final Logger LOG = LogManager.getLogger(DataDictionarySeedDataSqlGenerator.class);
|
||||
final private DDCacheProcessor processor;
|
||||
|
||||
/**
|
||||
* Cache of fields and their data generators by resource
|
||||
*/
|
||||
private final static AtomicReference<Map<String, Map<String, DataGenerator.FieldDataGenerator>>> dataGeneratorResourceFieldMap
|
||||
= new AtomicReference<>(Collections.synchronizedMap(new LinkedHashMap<>()));
|
||||
|
||||
/**
|
||||
* Cache of standard fields from the current Data Dictionary worksheet
|
||||
*/
|
||||
private final static AtomicReference<Map<String, List<ReferenceStandardField>>> referenceStandardFieldCache
|
||||
= new AtomicReference<>(Collections.synchronizedMap(new LinkedHashMap<>()));
|
||||
|
||||
/**
|
||||
* Cache of keys by resource name
|
||||
*/
|
||||
private final static AtomicReference<Map<String, String>> keyCache
|
||||
= new AtomicReference<>(Collections.synchronizedMap(new LinkedHashMap<>()));
|
||||
|
||||
|
||||
/**
|
||||
* TODO: add a standard relationships cache so keys can be sampled from the keyCache for related records
|
||||
*/
|
||||
|
||||
public DataDictionarySeedDataSqlGenerator() {
|
||||
LOG.info("Welcome to the RESO Data Dictionary Database Seed Generator!");
|
||||
LOG.info("Creating standard field cache...");
|
||||
DDCacheProcessor processor = new DDCacheProcessor();
|
||||
DataDictionaryCodeGenerator generator = new DataDictionaryCodeGenerator(processor);
|
||||
generator.processWorksheets();
|
||||
LOG.info("Standard field cache created!");
|
||||
|
||||
this.processor = processor;
|
||||
|
||||
//build a cache of the Dictionary standard fields
|
||||
referenceStandardFieldCache.set(processor.getFieldCache());
|
||||
|
||||
//build a cache of Data Dictionary generators
|
||||
DataGenerator dataGenerator = DataGenerator.deserialize();
|
||||
dataGenerator.getResourceInfo().forEach(resourceInfo -> {
|
||||
dataGeneratorResourceFieldMap.get().putIfAbsent(resourceInfo.getResourceName(), new LinkedHashMap<>());
|
||||
dataGenerator.getFields().forEach(fieldDataGenerator ->
|
||||
dataGeneratorResourceFieldMap.get().get(resourceInfo.getResourceName()).put(fieldDataGenerator.getFieldName(), fieldDataGenerator));
|
||||
});
|
||||
|
||||
//extract counts for each resource
|
||||
final Map<String, Integer> resourceCounts = dataGenerator.getResourceInfo().stream()
|
||||
.collect(Collectors.toMap(DataGenerator.ResourceInfo::getResourceName, DataGenerator.ResourceInfo::getRecordCount));
|
||||
|
||||
//iterate over each resource in the Data Dictionary and generate n items from it, where n is the recordCount
|
||||
//in the resourceInfo section of the data generator reference file
|
||||
referenceStandardFieldCache.get().keySet().forEach(resourceName -> {
|
||||
LOG.info("Processing " + resourceName + " resource...");
|
||||
LOG.info(generateRowInsertStatements(resourceName, referenceStandardFieldCache.get().get(resourceName), resourceCounts.get(resourceName)));
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* INSERT INTO tbl_name (a,b,c)
|
||||
* VALUES(1,2,3), (4,5,6), (7,8,9);
|
||||
*
|
||||
* TODO: this function needs to have the lookups split out and handled in their own insert statement generator
|
||||
*
|
||||
* @param resourceName
|
||||
* @param referenceStandardFields
|
||||
* @param numStatements
|
||||
* @return
|
||||
*/
|
||||
final String generateRowInsertStatements(String resourceName, List<ReferenceStandardField> referenceStandardFields, Integer numStatements) {
|
||||
final String tableName = DDLProcessor.buildDbTableName(resourceName);
|
||||
StringBuilder stringBuilder = new StringBuilder();
|
||||
stringBuilder.append("ALTER TABLE ").append(tableName).append(" DISABLE KEYS;\n");
|
||||
stringBuilder.append("BEGIN;\n");
|
||||
stringBuilder.append("INSERT INTO ").append(tableName);
|
||||
stringBuilder.append(" (");
|
||||
stringBuilder.append(referenceStandardFields.stream().map(ReferenceStandardField::getStandardName)
|
||||
.collect(Collectors.joining(", ")));
|
||||
stringBuilder.append(") VALUES");
|
||||
|
||||
for (int statementCount = 0; statementCount < numStatements; statementCount++) {
|
||||
stringBuilder.append("\n\t(");
|
||||
stringBuilder.append(referenceStandardFields.stream().map(this::generateValues).collect(Collectors.joining(", ")));
|
||||
stringBuilder.append(")");
|
||||
|
||||
//add commas between values only if we're not at the last item
|
||||
if (statementCount < numStatements - 1) stringBuilder.append(", ");
|
||||
}
|
||||
|
||||
stringBuilder.append(";\n");
|
||||
stringBuilder.append("COMMIT;\n");
|
||||
stringBuilder.append("ALTER TABLE " + tableName + " ENABLE KEYS;\n\n");
|
||||
|
||||
return stringBuilder.toString();
|
||||
}
|
||||
|
||||
final String generateValues(ReferenceStandardField referenceStandardField) {
|
||||
//now that row has been processed, extract field type and assemble the template
|
||||
switch (referenceStandardField.getSimpleDataType()) {
|
||||
case NUMBER:
|
||||
return generateNumber(referenceStandardField);
|
||||
case STRING_LIST_SINGLE:
|
||||
return generateStringListSingle(referenceStandardField);
|
||||
case STRING:
|
||||
return generateString(referenceStandardField);
|
||||
case BOOLEAN:
|
||||
return generateBoolean(referenceStandardField);
|
||||
case STRING_LIST_MULTI:
|
||||
return generateStringListMulti(referenceStandardField).toString();
|
||||
case DATE:
|
||||
return generateDate(referenceStandardField);
|
||||
case TIMESTAMP:
|
||||
return generateTimestamp(referenceStandardField);
|
||||
default:
|
||||
if (referenceStandardField.getSimpleDataType() != null)
|
||||
LOG.debug("Data type: " + referenceStandardField.getSimpleDataType() + " is not supported!");
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
String generateNumber(ReferenceStandardField referenceStandardField) {
|
||||
return referenceStandardField.getSuggestedMaxPrecision() != null
|
||||
? generateDecimal(referenceStandardField) : generateInteger(referenceStandardField);
|
||||
}
|
||||
|
||||
String generateInteger(ReferenceStandardField referenceStandardField) {
|
||||
final int MAX_INTEGER_POWER = 5;
|
||||
int maxPower = Math.min(referenceStandardField.getSuggestedMaxLength(), MAX_INTEGER_POWER);
|
||||
return String.valueOf(Faker.instance().number().numberBetween(0, (int)Math.pow(10, maxPower)));
|
||||
}
|
||||
|
||||
String generateDecimal(ReferenceStandardField referenceStandardField) {
|
||||
final int MAX_INTEGER_POWER = 6;
|
||||
int maxPower = Math.min(referenceStandardField.getSuggestedMaxLength(), MAX_INTEGER_POWER);
|
||||
return String.valueOf(Faker.instance().number()
|
||||
.randomDouble(referenceStandardField.getSuggestedMaxPrecision(), 0, (int)Math.pow(10, maxPower)));
|
||||
}
|
||||
|
||||
String generateBoolean(ReferenceStandardField referenceStandardField) {
|
||||
return String.valueOf(ThreadLocalRandom.current().nextBoolean()).toUpperCase();
|
||||
}
|
||||
|
||||
String generateStringListSingle(ReferenceStandardField referenceStandardField) {
|
||||
List<String> possibleChoices;
|
||||
List<String> customExamples = dataGeneratorResourceFieldMap.get().get(referenceStandardField.getParentResourceName()).get(referenceStandardField.getStandardName()) != null
|
||||
? dataGeneratorResourceFieldMap.get().get(referenceStandardField.getParentResourceName()).get(referenceStandardField.getStandardName()).getCustomExamples() : null;
|
||||
|
||||
if (processor.getEnumerations().containsKey(referenceStandardField.getLookupStandardName())) {
|
||||
possibleChoices = processor.getEnumerations().get(referenceStandardField.getLookupStandardName()).stream()
|
||||
.map(ReferenceStandardLookup::getLookupValue).collect(Collectors.toList());
|
||||
} else if (customExamples != null && customExamples.size() > 0) {
|
||||
possibleChoices = customExamples;
|
||||
} else {
|
||||
possibleChoices = new ArrayList<>();
|
||||
possibleChoices.add(Faker.instance().chuckNorris().fact());
|
||||
}
|
||||
|
||||
Collections.shuffle(possibleChoices);
|
||||
return wrapInQuotes(possibleChoices.get(0));
|
||||
}
|
||||
|
||||
List<String> generateStringListMulti(ReferenceStandardField referenceStandardField) {
|
||||
List<String> possibleChoices;
|
||||
List<String> customExamples = dataGeneratorResourceFieldMap.get().get(referenceStandardField.getParentResourceName()).get(referenceStandardField.getStandardName()) != null
|
||||
? dataGeneratorResourceFieldMap.get().get(referenceStandardField.getParentResourceName()).get(referenceStandardField.getStandardName()).getCustomExamples() : null;
|
||||
int numElements, randomSize = 0;
|
||||
Set<String> enumNames = new LinkedHashSet<>();
|
||||
|
||||
if (processor.getEnumerations().containsKey(referenceStandardField.getLookupStandardName())) {
|
||||
numElements = processor.getEnumerations().get(referenceStandardField.getLookupStandardName()).size();
|
||||
randomSize = ThreadLocalRandom.current().nextInt(0, numElements);
|
||||
possibleChoices = processor.getEnumerations().get(referenceStandardField.getLookupStandardName()).stream()
|
||||
.map(ReferenceStandardLookup::getLookupValue).collect(Collectors.toList());
|
||||
} else if (customExamples != null && customExamples.size() > 0) {
|
||||
randomSize = ThreadLocalRandom.current().nextInt(customExamples.size());
|
||||
possibleChoices = customExamples;
|
||||
} else {
|
||||
possibleChoices = new ArrayList<>();
|
||||
possibleChoices.add(Faker.instance().buffy().quotes());
|
||||
}
|
||||
|
||||
new LinkedHashSet<>(randomSize);
|
||||
|
||||
for(int numEnums = 0; numEnums < randomSize; numEnums++) {
|
||||
Collections.shuffle(possibleChoices);
|
||||
if (possibleChoices.size() > 0) {
|
||||
enumNames.add(wrapInQuotes(possibleChoices.get(0)));
|
||||
possibleChoices.remove(0);
|
||||
}
|
||||
}
|
||||
return new ArrayList<>(enumNames);
|
||||
}
|
||||
|
||||
static String wrapInQuotes(String item) {
|
||||
return "\"" + item + "\"";
|
||||
}
|
||||
|
||||
/**
|
||||
* TODO: determine whether we need to be able to go both ways on dates on demand.
|
||||
* For example, it might make sense to have open house dates in the future.
|
||||
* This method currently only generates past dates.
|
||||
* @param referenceStandardField
|
||||
* @return
|
||||
*/
|
||||
String generateDate(ReferenceStandardField referenceStandardField) {
|
||||
long numDays = ThreadLocalRandom.current().nextInt(5 * 365); //max 5 years back
|
||||
return wrapInQuotes(Utils.getIsoDate(OffsetDateTime.now().minus(numDays, ChronoUnit.DAYS)));
|
||||
}
|
||||
|
||||
/**
|
||||
* The only time a string will be generated will be when there is a custom example
|
||||
* @param referenceStandardField
|
||||
* @return
|
||||
*/
|
||||
String generateString(ReferenceStandardField referenceStandardField) {
|
||||
List<String> customExamples = dataGeneratorResourceFieldMap.get().get(referenceStandardField.getParentResourceName()).get(referenceStandardField.getStandardName()) != null
|
||||
? dataGeneratorResourceFieldMap.get().get(referenceStandardField.getParentResourceName()).get(referenceStandardField.getStandardName()).getCustomExamples() : null;
|
||||
|
||||
String value;
|
||||
|
||||
if (customExamples != null && customExamples.size() > 0) {
|
||||
value = customExamples.get(ThreadLocalRandom.current().nextInt(customExamples.size()));
|
||||
} else {
|
||||
value = Faker.instance().buffy().quotes();
|
||||
}
|
||||
|
||||
if (value != null) {
|
||||
value = wrapInQuotes(value);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
String generateTimestamp(ReferenceStandardField referenceStandardField) {
|
||||
long numDays = ThreadLocalRandom.current().nextInt(5 * 365); //max 5 years back
|
||||
return wrapInQuotes(Utils.getIsoTimestamp(OffsetDateTime.now().minus(numDays, ChronoUnit.DAYS)));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
@ -198,11 +198,11 @@ public class EDMXProcessor extends WorksheetProcessor {
|
|||
standardFieldsMap.forEach((resourceName, standardFieldMap) -> {
|
||||
standardFieldMap.forEach((standardName, referenceStandardField) -> {
|
||||
if (referenceStandardField.isSingleEnumeration()) {
|
||||
markupMap.putIfAbsent(referenceStandardField.getLookupName(), buildSingleEnumTypeMarkup(referenceStandardField));
|
||||
markupMap.putIfAbsent(referenceStandardField.getLookupStandardName(), buildSingleEnumTypeMarkup(referenceStandardField));
|
||||
}
|
||||
|
||||
if (referenceStandardField.isMultipleEnumeration()) {
|
||||
markupMap.putIfAbsent(referenceStandardField.getLookupName(), buildMultipleEnumTypeMarkup(referenceStandardField));
|
||||
markupMap.putIfAbsent(referenceStandardField.getLookupStandardName(), buildMultipleEnumTypeMarkup(referenceStandardField));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
@ -241,11 +241,11 @@ public class EDMXProcessor extends WorksheetProcessor {
|
|||
private String buildSingleEnumTypeMarkup(ReferenceStandardField standardField) {
|
||||
StringBuilder content = new StringBuilder();
|
||||
|
||||
if (getEnumerations().get(standardField.getLookupName()) != null) {
|
||||
content.append("<EnumType Name=\"").append(standardField.getLookupName()).append("\">");
|
||||
if (getEnumerations().get(standardField.getLookupStandardName()) != null) {
|
||||
content.append("<EnumType Name=\"").append(standardField.getLookupStandardName()).append("\">");
|
||||
|
||||
//iterate through each of the lookup values and generate their edm:EnumType content
|
||||
getEnumerations().get(standardField.getLookupName()).forEach(lookup -> {
|
||||
getEnumerations().get(standardField.getLookupStandardName()).forEach(lookup -> {
|
||||
content
|
||||
.append("<Member Name=\"").append(lookup.getLookupValue()).append("\">")
|
||||
.append(EDMXTemplates.buildDisplayNameAnnotation(lookup.getLookupDisplayName()))
|
||||
|
@ -258,9 +258,9 @@ public class EDMXProcessor extends WorksheetProcessor {
|
|||
} else {
|
||||
content
|
||||
.append("<!-- TODO: implement if you are using the single-valued enumeration \"")
|
||||
.append(standardField.getLookupName()).append("\" -->")
|
||||
.append("<EnumType Name=\"").append(standardField.getLookupName()).append("\">")
|
||||
.append("<Member Name=\"Sample").append(standardField.getLookupName()).append("EnumValue").append("\"/>")
|
||||
.append(standardField.getLookupStandardName()).append("\" -->")
|
||||
.append("<EnumType Name=\"").append(standardField.getLookupStandardName()).append("\">")
|
||||
.append("<Member Name=\"Sample").append(standardField.getLookupStandardName()).append("EnumValue").append("\"/>")
|
||||
.append("</EnumType>");
|
||||
}
|
||||
return content.toString();
|
||||
|
@ -269,11 +269,11 @@ public class EDMXProcessor extends WorksheetProcessor {
|
|||
private String buildMultipleEnumTypeMarkup(ReferenceStandardField standardField) {
|
||||
StringBuilder content = new StringBuilder();
|
||||
|
||||
if (getEnumerations().get(standardField.getLookupName()) != null) {
|
||||
content.append("<EnumType Name=\"").append(standardField.getLookupName()).append("\">");
|
||||
if (getEnumerations().get(standardField.getLookupStandardName()) != null) {
|
||||
content.append("<EnumType Name=\"").append(standardField.getLookupStandardName()).append("\">");
|
||||
|
||||
//iterate through each of the lookup values and generate their edm:EnumType content
|
||||
getEnumerations().get(standardField.getLookupName()).forEach(lookup -> {
|
||||
getEnumerations().get(standardField.getLookupStandardName()).forEach(lookup -> {
|
||||
content
|
||||
.append("<Member Name=\"").append(lookup.getLookupValue()).append("\">")
|
||||
.append(EDMXTemplates.buildDisplayNameAnnotation(lookup.getLookupDisplayName()))
|
||||
|
@ -285,8 +285,8 @@ public class EDMXProcessor extends WorksheetProcessor {
|
|||
content.append("</EnumType>");
|
||||
} else {
|
||||
content
|
||||
.append("<!-- TODO: implement if you are using the multi-valued enumeration \"").append(standardField.getLookupName()).append("\" -->")
|
||||
.append("<EnumType Name=\"").append(standardField.getLookupName()).append("\">")
|
||||
.append("<!-- TODO: implement if you are using the multi-valued enumeration \"").append(standardField.getLookupStandardName()).append("\" -->")
|
||||
.append("<EnumType Name=\"").append(standardField.getLookupStandardName()).append("\">")
|
||||
.append(EDMXTemplates.buildDDWikiUrlAnnotation(standardField.getWikiPageUrl()))
|
||||
.append(EDMXTemplates.buildDescriptionAnnotation(standardField.getDefinition()))
|
||||
.append("<Member Name=\"Sample").append(standardField.getStandardName()).append("EnumValue").append("\"/>")
|
||||
|
@ -394,7 +394,7 @@ public class EDMXProcessor extends WorksheetProcessor {
|
|||
if (!field.getLookup().toLowerCase().contains("lookups")) return EMPTY_STRING;
|
||||
return ""
|
||||
+ "<Property Name=\"" + field.getStandardName()
|
||||
+ "\" Type=\"Collection(" + RESO_NAMESPACE + ".enums." + field.getLookupName() + ")\">"
|
||||
+ "\" Type=\"Collection(" + RESO_NAMESPACE + ".enums." + field.getLookupStandardName() + ")\">"
|
||||
+ buildDisplayNameAnnotation(field.getDisplayName())
|
||||
+ buildDDWikiUrlAnnotation(field.getWikiPageUrl())
|
||||
+ buildDescriptionAnnotation(field.getDefinition())
|
||||
|
|
|
@ -20,7 +20,6 @@ import static org.junit.Assert.assertTrue;
|
|||
import static org.reso.certification.codegen.WorksheetProcessor.WELL_KNOWN_DATA_TYPES.*;
|
||||
import static org.reso.certification.codegen.WorksheetProcessor.WELL_KNOWN_FIELD_HEADERS.COLLECTION;
|
||||
import static org.reso.certification.codegen.WorksheetProcessor.WELL_KNOWN_FIELD_HEADERS.STANDARD_NAME;
|
||||
import static org.reso.commander.common.DataDictionaryMetadata.v1_7.LOOKUP_FIELDS_AND_VALUES;
|
||||
import static org.reso.commander.common.ErrorMsg.getDefaultErrorMessage;
|
||||
|
||||
public abstract class WorksheetProcessor {
|
||||
|
@ -28,7 +27,7 @@ public abstract class WorksheetProcessor {
|
|||
public static final String REFERENCE_WORKSHEET = "RESODataDictionary-1.7.xlsx";
|
||||
|
||||
static final Map<String, String> resourceTemplates = new LinkedHashMap<>();
|
||||
static final Map<String, Set<ReferenceStandardLookup>> standardEnumerationsMap = new LinkedHashMap<>();
|
||||
static final Map<String, List<ReferenceStandardLookup>> standardEnumerationsMap = new LinkedHashMap<>();
|
||||
static final Map<String, Map<String, ReferenceStandardField>> standardFieldsMap = new LinkedHashMap<>(new LinkedHashMap<>());
|
||||
private static final Logger LOG = LogManager.getLogger(WorksheetProcessor.class);
|
||||
String referenceDocument = null;
|
||||
|
@ -302,8 +301,7 @@ public abstract class WorksheetProcessor {
|
|||
}
|
||||
|
||||
String getDirectoryName() {
|
||||
return startTimestamp + "-" + getReferenceResource()
|
||||
.toLowerCase().substring(0, getReferenceResource().lastIndexOf("."));
|
||||
return startTimestamp + "-" + REFERENCE_WORKSHEET.toLowerCase().substring(0, REFERENCE_WORKSHEET.lastIndexOf("."));
|
||||
}
|
||||
|
||||
public String getReferenceResource() {
|
||||
|
@ -333,7 +331,9 @@ public abstract class WorksheetProcessor {
|
|||
}
|
||||
|
||||
public void buildEnumerationMap() {
|
||||
Sheet sheet = getReferenceWorkbook().getSheet(LOOKUP_FIELDS_AND_VALUES);
|
||||
final String ENUMERATION_TAB_NAME = "Lookup Fields and Values";
|
||||
|
||||
Sheet sheet = getReferenceWorkbook().getSheet(ENUMERATION_TAB_NAME);
|
||||
buildWellKnownStandardEnumerationHeaderMap(sheet);
|
||||
|
||||
AtomicReference<ReferenceStandardLookup> standardEnumeration = new AtomicReference<>();
|
||||
|
@ -343,14 +343,13 @@ public abstract class WorksheetProcessor {
|
|||
standardEnumeration.set(deserializeStandardEnumerationRow(row));
|
||||
|
||||
if (!standardEnumerationsMap.containsKey(standardEnumeration.get().getLookupField())) {
|
||||
standardEnumerationsMap.put(standardEnumeration.get().getLookupField(), new LinkedHashSet<>());
|
||||
standardEnumerationsMap.put(standardEnumeration.get().getLookupField(), new ArrayList<>());
|
||||
}
|
||||
standardEnumerationsMap.get(standardEnumeration.get().getLookupField()).add(standardEnumeration.get());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
//TODO: convert to parallel stream
|
||||
public void buildStandardRelationships(Sheet worksheet) {
|
||||
int FIRST_ROW_INDEX = 1;
|
||||
Row currentRow;
|
||||
|
@ -364,7 +363,7 @@ public abstract class WorksheetProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
public Map<String, Set<ReferenceStandardLookup>> getEnumerations() {
|
||||
public Map<String, List<ReferenceStandardLookup>> getEnumerations() {
|
||||
return standardEnumerationsMap;
|
||||
}
|
||||
|
||||
|
|
|
@ -7,7 +7,6 @@ import com.networknt.schema.JsonSchema;
|
|||
import com.networknt.schema.JsonSchemaFactory;
|
||||
import com.networknt.schema.SpecVersion;
|
||||
import com.networknt.schema.ValidationMessage;
|
||||
import io.cucumber.java.bs.A;
|
||||
import org.apache.http.HttpStatus;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
@ -23,25 +22,22 @@ import org.apache.olingo.client.api.uri.QueryOption;
|
|||
import org.apache.olingo.commons.api.edm.Edm;
|
||||
import org.apache.olingo.commons.api.edm.provider.CsdlProperty;
|
||||
import org.apache.olingo.commons.api.format.ContentType;
|
||||
import org.reso.certification.codegen.DDCacheProcessor;
|
||||
import org.reso.commander.Commander;
|
||||
import org.reso.commander.common.DataDictionaryMetadata;
|
||||
import org.reso.commander.common.TestUtils;
|
||||
import org.reso.models.*;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.InputStream;
|
||||
import java.net.URI;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.reso.commander.Commander.*;
|
||||
import static org.reso.commander.common.ErrorMsg.getDefaultErrorMessage;
|
||||
import static org.reso.commander.common.TestUtils.*;
|
||||
import static org.reso.models.Request.loadFromRESOScript;
|
||||
import static org.reso.commander.common.TestUtils.HEADER_ODATA_VERSION;
|
||||
import static org.reso.commander.common.TestUtils.JSON_VALUE_PATH;
|
||||
|
||||
/**
|
||||
* Encapsulates Commander Requests and Responses during runtime
|
||||
|
@ -83,7 +79,6 @@ public final class WebAPITestContainer implements TestContainer {
|
|||
private final AtomicBoolean isDataSystemValid = new AtomicBoolean(false);
|
||||
private final AtomicReference<Set<ValidationMessage>> schemaValidationErrors = new AtomicReference<>();
|
||||
private final AtomicBoolean isUsingMetadataFile = new AtomicBoolean(false);
|
||||
private final AtomicBoolean useEdmEnabledClient = new AtomicBoolean(true);
|
||||
|
||||
// request instance variables - these get resetMarkupBuffer with every request
|
||||
//TODO: refactor underlying response properties to use a ODataTransportWrapper (or any TransportWrapper)
|
||||
|
@ -103,9 +98,6 @@ public final class WebAPITestContainer implements TestContainer {
|
|||
private final AtomicReference<ODataEntitySetRequest<ClientEntitySet>> clientEntitySetRequest = new AtomicReference<>();
|
||||
private final AtomicReference<ODataRetrieveResponse<ClientEntitySet>> clientEntitySetResponse = new AtomicReference<>();
|
||||
private final AtomicReference<ClientEntitySet> clientEntitySet = new AtomicReference<>();
|
||||
private final AtomicReference<DDCacheProcessor> ddCacheProcessor = new AtomicReference<>();
|
||||
|
||||
private static final String WEB_API_CORE_REFERENCE_REQUESTS = "reference-web-api-core-requests.xml";
|
||||
|
||||
//singleton variables
|
||||
private static final AtomicReference<Map<String, Map<String, CsdlProperty>>> fieldMap = new AtomicReference<>();
|
||||
|
@ -115,18 +107,9 @@ public final class WebAPITestContainer implements TestContainer {
|
|||
*/
|
||||
public void initialize() {
|
||||
if (getIsInitialized()) return;
|
||||
Commander.Builder builder = new Commander.Builder().useEdmEnabledClient(true);
|
||||
|
||||
LOG.info("Using Edm Enabled Client: " + useEdmEnabledClient.get());
|
||||
Commander.Builder builder = new Commander.Builder().useEdmEnabledClient(useEdmEnabledClient.get());
|
||||
|
||||
if (getSettings() != null) {
|
||||
//overwrite any requests loaded with the reference queries
|
||||
//TODO: make the reference requests something that can be passed in during initialization
|
||||
getSettings().setRequests(loadFromRESOScript(new File(Objects.requireNonNull(
|
||||
getClass().getClassLoader().getResource(WEB_API_CORE_REFERENCE_REQUESTS)).getPath()))
|
||||
.stream().map(request -> Settings.resolveParameters(request, getSettings())).collect(Collectors.toList()));
|
||||
|
||||
|
||||
if (!isUsingMetadataFile.get()) {
|
||||
setServiceRoot(getSettings().getClientSettings().get(ClientSettings.SERVICE_ROOT));
|
||||
|
||||
//TODO: add base64 un-encode when applicable
|
||||
|
@ -142,7 +125,7 @@ public final class WebAPITestContainer implements TestContainer {
|
|||
setRedirectUri(getSettings().getClientSettings().get(ClientSettings.REDIRECT_URI));
|
||||
setScope(getSettings().getClientSettings().get(ClientSettings.CLIENT_SCOPE));
|
||||
|
||||
LOG.debug("Service root is: " + getServiceRoot());
|
||||
LOG.info("Service root is: " + getServiceRoot());
|
||||
|
||||
builder
|
||||
.clientId(getClientId())
|
||||
|
@ -212,33 +195,21 @@ public final class WebAPITestContainer implements TestContainer {
|
|||
/**
|
||||
* Creates a metadata field map for the given resource name and each set of fields found for that resource, if present
|
||||
*/
|
||||
public void buildFieldMap() {
|
||||
private void buildFieldMap() {
|
||||
try {
|
||||
if (fieldMap.get() == null) {
|
||||
fieldMap.set(new LinkedHashMap<>());
|
||||
}
|
||||
if (fieldMap.get() == null) fieldMap.set(new LinkedHashMap<>());
|
||||
|
||||
LOG.debug("Building Field Map...");
|
||||
|
||||
//if settings exist
|
||||
if (getXMLMetadata() == null) {
|
||||
if (getSettings() != null) {
|
||||
LOG.info("No XML Metadata found in the container but settings exist. Trying to fetch it from the server...");
|
||||
assertNotNull(getDefaultErrorMessage("No XML Metadata was fetched from the server!"), fetchXMLMetadata());
|
||||
assertNotNull(getDefaultErrorMessage("No Entity Data Model (edm) found in the container!"), getEdm());
|
||||
LOG.info("Metadata fetched!");
|
||||
} else {
|
||||
LOG.debug("Metadata does not exist in the container!");
|
||||
return;
|
||||
}
|
||||
}
|
||||
assertNotNull(getDefaultErrorMessage("no XML Metadata found in the container!"), fetchXMLMetadata());
|
||||
assertNotNull(getDefaultErrorMessage("no Entity Data Model (edm) found in the container!"), getEdm());
|
||||
|
||||
//build a map of all of the discovered fields on the server for the given resource by field name
|
||||
//TODO: add multiple Data Dictionary version support
|
||||
DataDictionaryMetadata.v1_7.WELL_KNOWN_RESOURCES.forEach(resourceName -> {
|
||||
List<CsdlProperty> csdlProperties = null;
|
||||
try {
|
||||
csdlProperties = TestUtils.findEntityTypesForEntityTypeName(getEdm(), getXMLMetadata(), resourceName);
|
||||
csdlProperties = TestUtils.findEntityTypesForEntityTypeName(getEdm(), fetchXMLMetadata(), resourceName);
|
||||
} catch (Exception e) {
|
||||
LOG.error(e);
|
||||
}
|
||||
|
@ -359,7 +330,7 @@ public final class WebAPITestContainer implements TestContainer {
|
|||
*/
|
||||
public Edm getEdm() {
|
||||
if (edm.get() == null) {
|
||||
assertNotNull(getDefaultErrorMessage("No XML response data found, cannot return Edm!"), xmlResponseData.get());
|
||||
assertNotNull(getDefaultErrorMessage("no XML response data found, cannot return Edm!"), xmlResponseData.get());
|
||||
edm.set(Commander.deserializeEdm(xmlResponseData.get(), getCommander().getClient()));
|
||||
}
|
||||
return edm.get();
|
||||
|
@ -381,7 +352,7 @@ public final class WebAPITestContainer implements TestContainer {
|
|||
* @implNote the data in this item are cached in the test container once fetched
|
||||
*/
|
||||
public XMLMetadata fetchXMLMetadata() throws Exception {
|
||||
if (getSettings() != null && xmlMetadata.get() == null) {
|
||||
if (xmlMetadata.get() == null) {
|
||||
try {
|
||||
Request request = getSettings().getRequest(Request.WELL_KNOWN.METADATA_ENDPOINT);
|
||||
setRequest(request);
|
||||
|
@ -389,8 +360,7 @@ public final class WebAPITestContainer implements TestContainer {
|
|||
|
||||
URI pathToMetadata = getCommander().getPathToMetadata(request.getRequestUrl());
|
||||
if (pathToMetadata != null) {
|
||||
LOG.info("Requesting XML Metadata from: " + pathToMetadata);
|
||||
|
||||
LOG.info("Requesting XML Metadata from: " + pathToMetadata.toString());
|
||||
ODataTransportWrapper wrapper = getCommander().executeODataGetRequest(pathToMetadata.toString());
|
||||
setODataRawResponse(wrapper.getODataRawResponse());
|
||||
responseCode.set(wrapper.getHttpResponseCode());
|
||||
|
@ -404,8 +374,8 @@ public final class WebAPITestContainer implements TestContainer {
|
|||
xmlResponseData.set(wrapper.getResponseData());
|
||||
xmlMetadata.set(Commander.deserializeXMLMetadata(xmlResponseData.get(), getCommander().getClient()));
|
||||
} else {
|
||||
failAndExitWithErrorMessage(getDefaultErrorMessage("Could not create metadata URI from given requestUri:",
|
||||
request.getRequestUrl()), LOG);
|
||||
LOG.error(getDefaultErrorMessage("could not create metadata URI from given requestUri:", request.getRequestUrl()));
|
||||
System.exit(NOT_OK);
|
||||
}
|
||||
} finally {
|
||||
haveMetadataBeenRequested.set(true);
|
||||
|
@ -433,7 +403,6 @@ public final class WebAPITestContainer implements TestContainer {
|
|||
* @return the local Commander instance
|
||||
*/
|
||||
public Commander getCommander() {
|
||||
if (commander.get() == null) initialize();
|
||||
return commander.get();
|
||||
}
|
||||
|
||||
|
@ -706,23 +675,15 @@ public final class WebAPITestContainer implements TestContainer {
|
|||
}
|
||||
|
||||
private void processODataRequestException(ODataClientErrorException exception) {
|
||||
/*
|
||||
TODO: determine whether these additional lines are needed or whether the bubbled error is sufficient
|
||||
LOG.error("ODataClientErrorException caught. Check tests for asserted conditions...");
|
||||
LOG.error(exception);
|
||||
*/
|
||||
|
||||
LOG.debug("ODataClientErrorException caught. Check tests for asserted conditions...");
|
||||
LOG.debug(exception);
|
||||
setODataClientErrorException(exception);
|
||||
setServerODataHeaderVersion(TestUtils.getHeaderData(HEADER_ODATA_VERSION, Arrays.asList(exception.getHeaderInfo())));
|
||||
setResponseCode(exception.getStatusLine().getStatusCode());
|
||||
}
|
||||
|
||||
private void processODataRequestException(ODataServerErrorException exception) {
|
||||
/*
|
||||
TODO: determine whether these additional lines are needed or whether the bubbled error is sufficient
|
||||
LOG.error("ODataServerErrorException thrown in executeGetRequest. Check tests for asserted conditions...");
|
||||
*/
|
||||
|
||||
LOG.debug("ODataServerErrorException thrown in executeGetRequest. Check tests for asserted conditions...");
|
||||
//TODO: look for better ways to do this in Olingo or open PR
|
||||
if (exception.getMessage().contains(Integer.toString(HttpStatus.SC_NOT_IMPLEMENTED))) {
|
||||
setResponseCode(HttpStatus.SC_NOT_IMPLEMENTED);
|
||||
|
@ -730,6 +691,7 @@ public final class WebAPITestContainer implements TestContainer {
|
|||
setODataServerErrorException(exception);
|
||||
}
|
||||
|
||||
|
||||
public boolean getIsValidXMLMetadata() {
|
||||
return isValidXMLMetadata.get();
|
||||
}
|
||||
|
@ -776,7 +738,7 @@ public final class WebAPITestContainer implements TestContainer {
|
|||
&& edm.get() != null && getIsValidEdm();
|
||||
}
|
||||
|
||||
public WebAPITestContainer validateMetadata() {
|
||||
public final WebAPITestContainer validateMetadata() {
|
||||
try {
|
||||
if (!haveMetadataBeenRequested.get()) fetchXMLMetadata();
|
||||
assertNotNull(getDefaultErrorMessage("no XML response data found!"), getXMLResponseData());
|
||||
|
@ -901,13 +863,6 @@ public final class WebAPITestContainer implements TestContainer {
|
|||
isInitialized.set(value);
|
||||
}
|
||||
|
||||
public DDCacheProcessor getDDCacheProcessor() {
|
||||
if (ddCacheProcessor.get() == null) {
|
||||
ddCacheProcessor.set(TestUtils.buildDataDictionaryCache());
|
||||
}
|
||||
return ddCacheProcessor.get();
|
||||
}
|
||||
|
||||
public static final class ODATA_QUERY_PARAMS {
|
||||
private static final String format = DOLLAR_SIGN + "%s";
|
||||
|
||||
|
|
|
@ -1,43 +0,0 @@
|
|||
# This feature implements the change proposal outlined in
|
||||
# section 2.2 of the RESO Data Dictionary 1.7 specification.
|
||||
#
|
||||
# The tests for the Lookup resource model is in ../resources/lookup.feature
|
||||
#
|
||||
# See: https://github.com/RESOStandards/reso-transport-specifications/blob/cd8bbd2038955e5380598d509fa2245bc98cbfdd/DATA-DICTIONARY.md#lookup-resource
|
||||
Feature: Lookup Acceptance Tests (RCP-032)
|
||||
|
||||
Background:
|
||||
When a RESOScript file is provided
|
||||
Then Client Settings and Parameters can be read from the RESOScript
|
||||
And a test container was successfully created from the given RESOScript file
|
||||
And the test container uses an Authorization Code or Client Credentials for authentication
|
||||
And valid metadata were retrieved from the server
|
||||
When the "Lookup" Resource exists in the metadata
|
||||
Then valid data is replicated from the "Lookup" Resource
|
||||
|
||||
@dd-1.7 @rcp-032 @lookup-resource
|
||||
Scenario: Ensure That Required Lookup Resource Fields Are Present in Server Metadata
|
||||
Given that metadata have been retrieved from the server and validated
|
||||
When the "Lookup" Resource exists in the metadata
|
||||
Then "Lookup" Resource data and metadata MUST contain the following fields
|
||||
| LookupKey |
|
||||
| LookupName |
|
||||
| LookupValue |
|
||||
| ModificationTimestamp |
|
||||
|
||||
|
||||
# <!-- OData annotation for String List, Single field -->
|
||||
# <Property Name="OfficeCountyOrParish" Type="Edm.String">
|
||||
# <Annotation Term="RESO.OData.Metadata.LookupName" String="CountyOrParish" />
|
||||
# </Property>
|
||||
#
|
||||
# <!-- OData annotation for String List, Multi field -->
|
||||
# <Property Name="ExteriorFeatures" Type="Collection(Edm.String)">
|
||||
# <Annotation Term="RESO.OData.Metadata.LookupName" String="ExteriorFeatures" />
|
||||
# </Property>
|
||||
@dd-1.7 @rcp-032 @lookup-resource
|
||||
Scenario: Check Required Annotations and LookupName Data
|
||||
Given that metadata have been retrieved from the server and validated
|
||||
When the "Lookup" Resource exists in the metadata
|
||||
Then RESO Lookups using String or String Collection data types MUST have the annotation "RESO.OData.Metadata.LookupName"
|
||||
And fields with the annotation term "RESO.OData.Metadata.LookupName" MUST have a LookupName in the Lookup Resource
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: ContactListingNotes
|
||||
|
||||
Background:
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: ContactListings
|
||||
|
||||
Background:
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: Contacts
|
||||
|
||||
Background:
|
||||
|
@ -66,18 +66,18 @@ Feature: Contacts
|
|||
|
||||
@Contacts
|
||||
Scenario: ContactKey
|
||||
Given that the following synonyms for "ContactKey" DO NOT exist in the "Contacts" metadata
|
||||
| RID |
|
||||
When "ContactKey" exists in the "Contacts" metadata
|
||||
Then "ContactKey" MUST be "String" data type
|
||||
And the following synonyms for "ContactKey" MUST NOT exist in the metadata
|
||||
| RID |
|
||||
And "ContactKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Contacts
|
||||
Scenario: ContactKeyNumeric
|
||||
Given that the following synonyms for "ContactKeyNumeric" DO NOT exist in the "Contacts" metadata
|
||||
| RID |
|
||||
When "ContactKeyNumeric" exists in the "Contacts" metadata
|
||||
Then "ContactKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "ContactKeyNumeric" MUST NOT exist in the metadata
|
||||
| RID |
|
||||
|
||||
@Contacts
|
||||
Scenario: ContactLoginId
|
||||
|
@ -93,10 +93,10 @@ Feature: Contacts
|
|||
|
||||
@Contacts
|
||||
Scenario: ContactStatus
|
||||
Given that the following synonyms for "ContactStatus" DO NOT exist in the "Contacts" metadata
|
||||
| ChoiceList |
|
||||
When "ContactStatus" exists in the "Contacts" metadata
|
||||
Then "ContactStatus" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "ContactStatus" MUST NOT exist in the metadata
|
||||
| ChoiceList |
|
||||
|
||||
@Contacts
|
||||
Scenario: ContactType
|
||||
|
@ -159,11 +159,11 @@ Feature: Contacts
|
|||
|
||||
@Contacts
|
||||
Scenario: HomeCarrierRoute
|
||||
Given that the following synonyms for "HomeCarrierRoute" DO NOT exist in the "Contacts" metadata
|
||||
| RR |
|
||||
| CR |
|
||||
When "HomeCarrierRoute" exists in the "Contacts" metadata
|
||||
Then "HomeCarrierRoute" MUST be "String" data type
|
||||
And the following synonyms for "HomeCarrierRoute" MUST NOT exist in the metadata
|
||||
| RR |
|
||||
| CR |
|
||||
And "HomeCarrierRoute" length SHOULD be equal to the RESO Suggested Max Length of 9
|
||||
|
||||
@Contacts
|
||||
|
@ -179,10 +179,10 @@ Feature: Contacts
|
|||
|
||||
@Contacts
|
||||
Scenario: HomeCountyOrParish
|
||||
Given that the following synonyms for "HomeCountyOrParish" DO NOT exist in the "Contacts" metadata
|
||||
| County |
|
||||
When "HomeCountyOrParish" exists in the "Contacts" metadata
|
||||
Then "HomeCountyOrParish" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "HomeCountyOrParish" MUST NOT exist in the metadata
|
||||
| County |
|
||||
|
||||
@Contacts
|
||||
Scenario: HomeFax
|
||||
|
@ -255,11 +255,11 @@ Feature: Contacts
|
|||
|
||||
@Contacts
|
||||
Scenario: NamePrefix
|
||||
Given that the following synonyms for "NamePrefix" DO NOT exist in the "Contacts" metadata
|
||||
| Salutation |
|
||||
| Title |
|
||||
When "NamePrefix" exists in the "Contacts" metadata
|
||||
Then "NamePrefix" MUST be "String" data type
|
||||
And the following synonyms for "NamePrefix" MUST NOT exist in the metadata
|
||||
| Salutation |
|
||||
| Title |
|
||||
And "NamePrefix" length SHOULD be equal to the RESO Suggested Max Length of 10
|
||||
|
||||
@Contacts
|
||||
|
@ -299,10 +299,10 @@ Feature: Contacts
|
|||
|
||||
@Contacts
|
||||
Scenario: OriginatingSystemContactKey
|
||||
Given that the following synonyms for "OriginatingSystemContactKey" DO NOT exist in the "Contacts" metadata
|
||||
| ProviderKey |
|
||||
When "OriginatingSystemContactKey" exists in the "Contacts" metadata
|
||||
Then "OriginatingSystemContactKey" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemContactKey" MUST NOT exist in the metadata
|
||||
| ProviderKey |
|
||||
And "OriginatingSystemContactKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Contacts
|
||||
|
@ -313,11 +313,11 @@ Feature: Contacts
|
|||
|
||||
@Contacts
|
||||
Scenario: OriginatingSystemName
|
||||
Given that the following synonyms for "OriginatingSystemName" DO NOT exist in the "Contacts" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "OriginatingSystemName" exists in the "Contacts" metadata
|
||||
Then "OriginatingSystemName" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "OriginatingSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Contacts
|
||||
|
@ -334,11 +334,11 @@ Feature: Contacts
|
|||
|
||||
@Contacts
|
||||
Scenario: OtherCarrierRoute
|
||||
Given that the following synonyms for "OtherCarrierRoute" DO NOT exist in the "Contacts" metadata
|
||||
| RR |
|
||||
| CR |
|
||||
When "OtherCarrierRoute" exists in the "Contacts" metadata
|
||||
Then "OtherCarrierRoute" MUST be "String" data type
|
||||
And the following synonyms for "OtherCarrierRoute" MUST NOT exist in the metadata
|
||||
| RR |
|
||||
| CR |
|
||||
And "OtherCarrierRoute" length SHOULD be equal to the RESO Suggested Max Length of 9
|
||||
|
||||
@Contacts
|
||||
|
@ -354,10 +354,10 @@ Feature: Contacts
|
|||
|
||||
@Contacts
|
||||
Scenario: OtherCountyOrParish
|
||||
Given that the following synonyms for "OtherCountyOrParish" DO NOT exist in the "Contacts" metadata
|
||||
| County |
|
||||
When "OtherCountyOrParish" exists in the "Contacts" metadata
|
||||
Then "OtherCountyOrParish" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "OtherCountyOrParish" MUST NOT exist in the metadata
|
||||
| County |
|
||||
|
||||
@Contacts
|
||||
Scenario: OtherPhoneType
|
||||
|
@ -383,26 +383,26 @@ Feature: Contacts
|
|||
|
||||
@Contacts
|
||||
Scenario: OwnerMemberID
|
||||
Given that the following synonyms for "OwnerMemberID" DO NOT exist in the "Contacts" metadata
|
||||
| OwnerAgentID |
|
||||
When "OwnerMemberID" exists in the "Contacts" metadata
|
||||
Then "OwnerMemberID" MUST be "String" data type
|
||||
And the following synonyms for "OwnerMemberID" MUST NOT exist in the metadata
|
||||
| OwnerAgentID |
|
||||
And "OwnerMemberID" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@Contacts
|
||||
Scenario: OwnerMemberKey
|
||||
Given that the following synonyms for "OwnerMemberKey" DO NOT exist in the "Contacts" metadata
|
||||
| OwnerAgentKey |
|
||||
When "OwnerMemberKey" exists in the "Contacts" metadata
|
||||
Then "OwnerMemberKey" MUST be "String" data type
|
||||
And the following synonyms for "OwnerMemberKey" MUST NOT exist in the metadata
|
||||
| OwnerAgentKey |
|
||||
And "OwnerMemberKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Contacts
|
||||
Scenario: OwnerMemberKeyNumeric
|
||||
Given that the following synonyms for "OwnerMemberKeyNumeric" DO NOT exist in the "Contacts" metadata
|
||||
| OwnerAgentKeyNumeric |
|
||||
When "OwnerMemberKeyNumeric" exists in the "Contacts" metadata
|
||||
Then "OwnerMemberKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "OwnerMemberKeyNumeric" MUST NOT exist in the metadata
|
||||
| OwnerAgentKeyNumeric |
|
||||
|
||||
@Contacts
|
||||
Scenario: Pager
|
||||
|
@ -439,27 +439,27 @@ Feature: Contacts
|
|||
|
||||
@Contacts
|
||||
Scenario: SourceSystemContactKey
|
||||
Given that the following synonyms for "SourceSystemContactKey" DO NOT exist in the "Contacts" metadata
|
||||
| ProviderKey |
|
||||
When "SourceSystemContactKey" exists in the "Contacts" metadata
|
||||
Then "SourceSystemContactKey" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemContactKey" MUST NOT exist in the metadata
|
||||
| ProviderKey |
|
||||
And "SourceSystemContactKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Contacts
|
||||
Scenario: SourceSystemID
|
||||
Given that the following synonyms for "SourceSystemID" DO NOT exist in the "Contacts" metadata
|
||||
| MLSID |
|
||||
When "SourceSystemID" exists in the "Contacts" metadata
|
||||
Then "SourceSystemID" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemID" MUST NOT exist in the metadata
|
||||
| MLSID |
|
||||
And "SourceSystemID" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@Contacts
|
||||
Scenario: SourceSystemName
|
||||
Given that the following synonyms for "SourceSystemName" DO NOT exist in the "Contacts" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "SourceSystemName" exists in the "Contacts" metadata
|
||||
Then "SourceSystemName" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "SourceSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Contacts
|
||||
|
@ -500,11 +500,11 @@ Feature: Contacts
|
|||
|
||||
@Contacts
|
||||
Scenario: WorkCarrierRoute
|
||||
Given that the following synonyms for "WorkCarrierRoute" DO NOT exist in the "Contacts" metadata
|
||||
| RR |
|
||||
| CR |
|
||||
When "WorkCarrierRoute" exists in the "Contacts" metadata
|
||||
Then "WorkCarrierRoute" MUST be "String" data type
|
||||
And the following synonyms for "WorkCarrierRoute" MUST NOT exist in the metadata
|
||||
| RR |
|
||||
| CR |
|
||||
And "WorkCarrierRoute" length SHOULD be equal to the RESO Suggested Max Length of 9
|
||||
|
||||
@Contacts
|
||||
|
@ -520,10 +520,10 @@ Feature: Contacts
|
|||
|
||||
@Contacts
|
||||
Scenario: WorkCountyOrParish
|
||||
Given that the following synonyms for "WorkCountyOrParish" DO NOT exist in the "Contacts" metadata
|
||||
| County |
|
||||
When "WorkCountyOrParish" exists in the "Contacts" metadata
|
||||
Then "WorkCountyOrParish" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "WorkCountyOrParish" MUST NOT exist in the metadata
|
||||
| County |
|
||||
|
||||
@Contacts
|
||||
Scenario: WorkPostalCode
|
||||
|
|
|
@ -1,38 +0,0 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
Feature: Field
|
||||
|
||||
Background:
|
||||
Given a RESOScript or Metadata file are provided
|
||||
When a RESOScript file is provided
|
||||
Then Client Settings and Parameters can be read from the RESOScript
|
||||
And a test container was successfully created from the given RESOScript file
|
||||
And the test container uses an Authorization Code or Client Credentials for authentication
|
||||
And valid metadata were retrieved from the server
|
||||
When a metadata file is provided
|
||||
Then a test container was successfully created from the given metadata file
|
||||
And valid metadata are loaded into the test container
|
||||
|
||||
@Field
|
||||
Scenario: FieldKey
|
||||
When "FieldKey" exists in the "Field" metadata
|
||||
Then "FieldKey" MUST be "String" data type
|
||||
|
||||
@Field
|
||||
Scenario: ResourceName
|
||||
When "ResourceName" exists in the "Field" metadata
|
||||
Then "ResourceName" MUST be "String" data type
|
||||
|
||||
@Field
|
||||
Scenario: FieldName
|
||||
When "FieldName" exists in the "Field" metadata
|
||||
Then "FieldName" MUST be "String" data type
|
||||
|
||||
@Field
|
||||
Scenario: DisplayName
|
||||
When "DisplayName" exists in the "Field" metadata
|
||||
Then "DisplayName" MUST be "String" data type
|
||||
|
||||
@Field
|
||||
Scenario: ModificationTimestamp
|
||||
When "ModificationTimestamp" exists in the "Field" metadata
|
||||
Then "ModificationTimestamp" MUST be "Timestamp" data type
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: HistoryTransactional
|
||||
|
||||
Background:
|
||||
|
@ -19,26 +19,26 @@ Feature: HistoryTransactional
|
|||
|
||||
@HistoryTransactional
|
||||
Scenario: ChangedByMemberID
|
||||
Given that the following synonyms for "ChangedByMemberID" DO NOT exist in the "HistoryTransactional" metadata
|
||||
| ChangedByAgentID |
|
||||
When "ChangedByMemberID" exists in the "HistoryTransactional" metadata
|
||||
Then "ChangedByMemberID" MUST be "String" data type
|
||||
And the following synonyms for "ChangedByMemberID" MUST NOT exist in the metadata
|
||||
| ChangedByAgentID |
|
||||
And "ChangedByMemberID" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@HistoryTransactional
|
||||
Scenario: ChangedByMemberKey
|
||||
Given that the following synonyms for "ChangedByMemberKey" DO NOT exist in the "HistoryTransactional" metadata
|
||||
| ChangedByAgentKey |
|
||||
When "ChangedByMemberKey" exists in the "HistoryTransactional" metadata
|
||||
Then "ChangedByMemberKey" MUST be "String" data type
|
||||
And the following synonyms for "ChangedByMemberKey" MUST NOT exist in the metadata
|
||||
| ChangedByAgentKey |
|
||||
And "ChangedByMemberKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@HistoryTransactional
|
||||
Scenario: ChangedByMemberKeyNumeric
|
||||
Given that the following synonyms for "ChangedByMemberKeyNumeric" DO NOT exist in the "HistoryTransactional" metadata
|
||||
| ChangedByAgentKeyNumeric |
|
||||
When "ChangedByMemberKeyNumeric" exists in the "HistoryTransactional" metadata
|
||||
Then "ChangedByMemberKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "ChangedByMemberKeyNumeric" MUST NOT exist in the metadata
|
||||
| ChangedByAgentKeyNumeric |
|
||||
|
||||
@HistoryTransactional
|
||||
Scenario: ClassName
|
||||
|
@ -87,10 +87,10 @@ Feature: HistoryTransactional
|
|||
|
||||
@HistoryTransactional
|
||||
Scenario: OriginatingSystemHistoryKey
|
||||
Given that the following synonyms for "OriginatingSystemHistoryKey" DO NOT exist in the "HistoryTransactional" metadata
|
||||
| ProviderKey |
|
||||
When "OriginatingSystemHistoryKey" exists in the "HistoryTransactional" metadata
|
||||
Then "OriginatingSystemHistoryKey" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemHistoryKey" MUST NOT exist in the metadata
|
||||
| ProviderKey |
|
||||
And "OriginatingSystemHistoryKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@HistoryTransactional
|
||||
|
@ -101,11 +101,11 @@ Feature: HistoryTransactional
|
|||
|
||||
@HistoryTransactional
|
||||
Scenario: OriginatingSystemName
|
||||
Given that the following synonyms for "OriginatingSystemName" DO NOT exist in the "HistoryTransactional" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "OriginatingSystemName" exists in the "HistoryTransactional" metadata
|
||||
Then "OriginatingSystemName" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "OriginatingSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@HistoryTransactional
|
||||
|
@ -122,55 +122,55 @@ Feature: HistoryTransactional
|
|||
|
||||
@HistoryTransactional
|
||||
Scenario: ResourceRecordID
|
||||
Given that the following synonyms for "ResourceRecordID" DO NOT exist in the "HistoryTransactional" metadata
|
||||
When "ResourceRecordID" exists in the "HistoryTransactional" metadata
|
||||
Then "ResourceRecordID" MUST be "String" data type
|
||||
And the following synonyms for "ResourceRecordID" MUST NOT exist in the metadata
|
||||
| MLNumber |
|
||||
| MLSNumber |
|
||||
| ListingNumber |
|
||||
| AgentID |
|
||||
| OfficeID |
|
||||
| ContactID |
|
||||
When "ResourceRecordID" exists in the "HistoryTransactional" metadata
|
||||
Then "ResourceRecordID" MUST be "String" data type
|
||||
And "ResourceRecordID" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@HistoryTransactional
|
||||
Scenario: ResourceRecordKey
|
||||
Given that the following synonyms for "ResourceRecordKey" DO NOT exist in the "HistoryTransactional" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "ResourceRecordKey" exists in the "HistoryTransactional" metadata
|
||||
Then "ResourceRecordKey" MUST be "String" data type
|
||||
And the following synonyms for "ResourceRecordKey" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
And "ResourceRecordKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@HistoryTransactional
|
||||
Scenario: ResourceRecordKeyNumeric
|
||||
Given that the following synonyms for "ResourceRecordKeyNumeric" DO NOT exist in the "HistoryTransactional" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "ResourceRecordKeyNumeric" exists in the "HistoryTransactional" metadata
|
||||
Then "ResourceRecordKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "ResourceRecordKeyNumeric" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
|
||||
@HistoryTransactional
|
||||
Scenario: SourceSystemHistoryKey
|
||||
Given that the following synonyms for "SourceSystemHistoryKey" DO NOT exist in the "HistoryTransactional" metadata
|
||||
| ProviderKey |
|
||||
When "SourceSystemHistoryKey" exists in the "HistoryTransactional" metadata
|
||||
Then "SourceSystemHistoryKey" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemHistoryKey" MUST NOT exist in the metadata
|
||||
| ProviderKey |
|
||||
And "SourceSystemHistoryKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@HistoryTransactional
|
||||
Scenario: SourceSystemID
|
||||
Given that the following synonyms for "SourceSystemID" DO NOT exist in the "HistoryTransactional" metadata
|
||||
| MLSID |
|
||||
When "SourceSystemID" exists in the "HistoryTransactional" metadata
|
||||
Then "SourceSystemID" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemID" MUST NOT exist in the metadata
|
||||
| MLSID |
|
||||
And "SourceSystemID" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@HistoryTransactional
|
||||
Scenario: SourceSystemName
|
||||
Given that the following synonyms for "SourceSystemName" DO NOT exist in the "HistoryTransactional" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "SourceSystemName" exists in the "HistoryTransactional" metadata
|
||||
Then "SourceSystemName" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "SourceSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: InternetTracking
|
||||
|
||||
Background:
|
||||
|
|
|
@ -1,43 +0,0 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
Feature: Lookup
|
||||
|
||||
Background:
|
||||
Given a RESOScript or Metadata file are provided
|
||||
When a RESOScript file is provided
|
||||
Then Client Settings and Parameters can be read from the RESOScript
|
||||
And a test container was successfully created from the given RESOScript file
|
||||
And the test container uses an Authorization Code or Client Credentials for authentication
|
||||
And valid metadata were retrieved from the server
|
||||
When a metadata file is provided
|
||||
Then a test container was successfully created from the given metadata file
|
||||
And valid metadata are loaded into the test container
|
||||
|
||||
@Lookup
|
||||
Scenario: LookupKey
|
||||
When "LookupKey" exists in the "Lookup" metadata
|
||||
Then "LookupKey" MUST be "String" data type
|
||||
|
||||
@Lookup
|
||||
Scenario: LookupName
|
||||
When "LookupName" exists in the "Lookup" metadata
|
||||
Then "LookupName" MUST be "String" data type
|
||||
|
||||
@Lookup
|
||||
Scenario: LookupValue
|
||||
When "LookupValue" exists in the "Lookup" metadata
|
||||
Then "LookupValue" MUST be "String" data type
|
||||
|
||||
@Lookup
|
||||
Scenario: StandardLookupValue
|
||||
When "StandardLookupValue" exists in the "Lookup" metadata
|
||||
Then "StandardLookupValue" MUST be "String" data type
|
||||
|
||||
@Lookup
|
||||
Scenario: LegacyODataValue
|
||||
When "LegacyODataValue" exists in the "Lookup" metadata
|
||||
Then "LegacyODataValue" MUST be "String" data type
|
||||
|
||||
@Lookup
|
||||
Scenario: ModificationTimestamp
|
||||
When "ModificationTimestamp" exists in the "Lookup" metadata
|
||||
Then "ModificationTimestamp" MUST be "Timestamp" data type
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: Media
|
||||
|
||||
Background:
|
||||
|
@ -14,26 +14,26 @@ Feature: Media
|
|||
|
||||
@Media
|
||||
Scenario: ChangedByMemberID
|
||||
Given that the following synonyms for "ChangedByMemberID" DO NOT exist in the "Media" metadata
|
||||
| ChangedByAgentID |
|
||||
When "ChangedByMemberID" exists in the "Media" metadata
|
||||
Then "ChangedByMemberID" MUST be "String" data type
|
||||
And the following synonyms for "ChangedByMemberID" MUST NOT exist in the metadata
|
||||
| ChangedByAgentID |
|
||||
And "ChangedByMemberID" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@Media
|
||||
Scenario: ChangedByMemberKey
|
||||
Given that the following synonyms for "ChangedByMemberKey" DO NOT exist in the "Media" metadata
|
||||
| ChangedByAgentKey |
|
||||
When "ChangedByMemberKey" exists in the "Media" metadata
|
||||
Then "ChangedByMemberKey" MUST be "String" data type
|
||||
And the following synonyms for "ChangedByMemberKey" MUST NOT exist in the metadata
|
||||
| ChangedByAgentKey |
|
||||
And "ChangedByMemberKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Media
|
||||
Scenario: ChangedByMemberKeyNumeric
|
||||
Given that the following synonyms for "ChangedByMemberKeyNumeric" DO NOT exist in the "Media" metadata
|
||||
| ChangedByAgentKeyNumeric |
|
||||
When "ChangedByMemberKeyNumeric" exists in the "Media" metadata
|
||||
Then "ChangedByMemberKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "ChangedByMemberKeyNumeric" MUST NOT exist in the metadata
|
||||
| ChangedByAgentKeyNumeric |
|
||||
|
||||
@Media
|
||||
Scenario: ClassName
|
||||
|
@ -62,10 +62,10 @@ Feature: Media
|
|||
|
||||
@Media @IDX
|
||||
Scenario: LongDescription
|
||||
Given that the following synonyms for "LongDescription" DO NOT exist in the "Media" metadata
|
||||
| FullDescription |
|
||||
When "LongDescription" exists in the "Media" metadata
|
||||
Then "LongDescription" MUST be "String" data type
|
||||
And the following synonyms for "LongDescription" MUST NOT exist in the metadata
|
||||
| FullDescription |
|
||||
And "LongDescription" length SHOULD be equal to the RESO Suggested Max Length of 1024
|
||||
|
||||
@Media @IDX
|
||||
|
@ -81,27 +81,27 @@ Feature: Media
|
|||
|
||||
@Media @IDX
|
||||
Scenario: MediaKey
|
||||
Given that the following synonyms for "MediaKey" DO NOT exist in the "Media" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "MediaKey" exists in the "Media" metadata
|
||||
Then "MediaKey" MUST be "String" data type
|
||||
And the following synonyms for "MediaKey" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
And "MediaKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Media @IDX
|
||||
Scenario: MediaKeyNumeric
|
||||
Given that the following synonyms for "MediaKeyNumeric" DO NOT exist in the "Media" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "MediaKeyNumeric" exists in the "Media" metadata
|
||||
Then "MediaKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "MediaKeyNumeric" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
|
||||
@Media @IDX
|
||||
Scenario: MediaModificationTimestamp
|
||||
Given that the following synonyms for "MediaModificationTimestamp" DO NOT exist in the "Media" metadata
|
||||
| MediaTimestamp |
|
||||
When "MediaModificationTimestamp" exists in the "Media" metadata
|
||||
Then "MediaModificationTimestamp" MUST be "Timestamp" data type
|
||||
And the following synonyms for "MediaModificationTimestamp" MUST NOT exist in the metadata
|
||||
| MediaTimestamp |
|
||||
|
||||
@Media
|
||||
Scenario: MediaObjectID
|
||||
|
@ -116,10 +116,10 @@ Feature: Media
|
|||
|
||||
@Media
|
||||
Scenario: MediaType
|
||||
Given that the following synonyms for "MediaType" DO NOT exist in the "Media" metadata
|
||||
| MimeType |
|
||||
When "MediaType" exists in the "Media" metadata
|
||||
Then "MediaType" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "MediaType" MUST NOT exist in the metadata
|
||||
| MimeType |
|
||||
|
||||
@Media @IDX
|
||||
Scenario: MediaURL
|
||||
|
@ -129,15 +129,15 @@ Feature: Media
|
|||
|
||||
@Media @IDX
|
||||
Scenario: ModificationTimestamp
|
||||
Given that the following synonyms for "ModificationTimestamp" DO NOT exist in the "Media" metadata
|
||||
When "ModificationTimestamp" exists in the "Media" metadata
|
||||
Then "ModificationTimestamp" MUST be "Timestamp" data type
|
||||
And the following synonyms for "ModificationTimestamp" MUST NOT exist in the metadata
|
||||
| ModificationDateTime |
|
||||
| DateTimeModified |
|
||||
| ModDate |
|
||||
| DateMod |
|
||||
| UpdateDate |
|
||||
| UpdateTimestamp |
|
||||
When "ModificationTimestamp" exists in the "Media" metadata
|
||||
Then "ModificationTimestamp" MUST be "Timestamp" data type
|
||||
|
||||
@Media @IDX
|
||||
Scenario: Order
|
||||
|
@ -152,19 +152,19 @@ Feature: Media
|
|||
|
||||
@Media @IDX
|
||||
Scenario: OriginatingSystemMediaKey
|
||||
Given that the following synonyms for "OriginatingSystemMediaKey" DO NOT exist in the "Media" metadata
|
||||
| ProviderKey |
|
||||
When "OriginatingSystemMediaKey" exists in the "Media" metadata
|
||||
Then "OriginatingSystemMediaKey" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemMediaKey" MUST NOT exist in the metadata
|
||||
| ProviderKey |
|
||||
And "OriginatingSystemMediaKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Media
|
||||
Scenario: OriginatingSystemName
|
||||
Given that the following synonyms for "OriginatingSystemName" DO NOT exist in the "Media" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "OriginatingSystemName" exists in the "Media" metadata
|
||||
Then "OriginatingSystemName" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "OriginatingSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Media
|
||||
|
@ -184,64 +184,64 @@ Feature: Media
|
|||
|
||||
@Media @IDX
|
||||
Scenario: ResourceRecordID
|
||||
Given that the following synonyms for "ResourceRecordID" DO NOT exist in the "Media" metadata
|
||||
When "ResourceRecordID" exists in the "Media" metadata
|
||||
Then "ResourceRecordID" MUST be "String" data type
|
||||
And the following synonyms for "ResourceRecordID" MUST NOT exist in the metadata
|
||||
| MLNumber |
|
||||
| MLSNumber |
|
||||
| ListingNumber |
|
||||
| AgentID |
|
||||
| OfficeID |
|
||||
| ContactID |
|
||||
When "ResourceRecordID" exists in the "Media" metadata
|
||||
Then "ResourceRecordID" MUST be "String" data type
|
||||
And "ResourceRecordID" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Media @IDX
|
||||
Scenario: ResourceRecordKey
|
||||
Given that the following synonyms for "ResourceRecordKey" DO NOT exist in the "Media" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "ResourceRecordKey" exists in the "Media" metadata
|
||||
Then "ResourceRecordKey" MUST be "String" data type
|
||||
And the following synonyms for "ResourceRecordKey" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
And "ResourceRecordKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Media @IDX
|
||||
Scenario: ResourceRecordKeyNumeric
|
||||
Given that the following synonyms for "ResourceRecordKeyNumeric" DO NOT exist in the "Media" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "ResourceRecordKeyNumeric" exists in the "Media" metadata
|
||||
Then "ResourceRecordKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "ResourceRecordKeyNumeric" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
|
||||
@Media @IDX
|
||||
Scenario: ShortDescription
|
||||
Given that the following synonyms for "ShortDescription" DO NOT exist in the "Media" metadata
|
||||
| Caption |
|
||||
| Name |
|
||||
When "ShortDescription" exists in the "Media" metadata
|
||||
Then "ShortDescription" MUST be "String" data type
|
||||
And the following synonyms for "ShortDescription" MUST NOT exist in the metadata
|
||||
| Caption |
|
||||
| Name |
|
||||
And "ShortDescription" length SHOULD be equal to the RESO Suggested Max Length of 50
|
||||
|
||||
@Media @IDX
|
||||
Scenario: SourceSystemID
|
||||
Given that the following synonyms for "SourceSystemID" DO NOT exist in the "Media" metadata
|
||||
| MLSID |
|
||||
When "SourceSystemID" exists in the "Media" metadata
|
||||
Then "SourceSystemID" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemID" MUST NOT exist in the metadata
|
||||
| MLSID |
|
||||
And "SourceSystemID" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@Media @IDX
|
||||
Scenario: SourceSystemMediaKey
|
||||
Given that the following synonyms for "SourceSystemMediaKey" DO NOT exist in the "Media" metadata
|
||||
| ProviderKey |
|
||||
When "SourceSystemMediaKey" exists in the "Media" metadata
|
||||
Then "SourceSystemMediaKey" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemMediaKey" MUST NOT exist in the metadata
|
||||
| ProviderKey |
|
||||
And "SourceSystemMediaKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Media
|
||||
Scenario: SourceSystemName
|
||||
Given that the following synonyms for "SourceSystemName" DO NOT exist in the "Media" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "SourceSystemName" exists in the "Media" metadata
|
||||
Then "SourceSystemName" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "SourceSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: Member
|
||||
|
||||
Background:
|
||||
|
@ -25,400 +25,400 @@ Feature: Member
|
|||
|
||||
@Member
|
||||
Scenario: MemberAOR
|
||||
Given that the following synonyms for "MemberAOR" DO NOT exist in the "Member" metadata
|
||||
| AgentAOR |
|
||||
When "MemberAOR" exists in the "Member" metadata
|
||||
Then "MemberAOR" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "MemberAOR" MUST NOT exist in the metadata
|
||||
| AgentAOR |
|
||||
|
||||
@Member
|
||||
Scenario: MemberAORMlsId
|
||||
Given that the following synonyms for "MemberAORMlsId" DO NOT exist in the "Member" metadata
|
||||
| AgentAORMlsld |
|
||||
When "MemberAORMlsId" exists in the "Member" metadata
|
||||
Then "MemberAORMlsId" MUST be "String" data type
|
||||
And the following synonyms for "MemberAORMlsId" MUST NOT exist in the metadata
|
||||
| AgentAORMlsld |
|
||||
And "MemberAORMlsId" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@Member
|
||||
Scenario: MemberAORkey
|
||||
Given that the following synonyms for "MemberAORkey" DO NOT exist in the "Member" metadata
|
||||
| AgentAORkey |
|
||||
When "MemberAORkey" exists in the "Member" metadata
|
||||
Then "MemberAORkey" MUST be "String" data type
|
||||
And the following synonyms for "MemberAORkey" MUST NOT exist in the metadata
|
||||
| AgentAORkey |
|
||||
And "MemberAORkey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Member
|
||||
Scenario: MemberAORkeyNumeric
|
||||
Given that the following synonyms for "MemberAORkeyNumeric" DO NOT exist in the "Member" metadata
|
||||
| AgentAORkeyNumeric |
|
||||
When "MemberAORkeyNumeric" exists in the "Member" metadata
|
||||
Then "MemberAORkeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "MemberAORkeyNumeric" MUST NOT exist in the metadata
|
||||
| AgentAORkeyNumeric |
|
||||
|
||||
@Member
|
||||
Scenario: MemberAddress1
|
||||
Given that the following synonyms for "MemberAddress1" DO NOT exist in the "Member" metadata
|
||||
| AgentAddress1 |
|
||||
When "MemberAddress1" exists in the "Member" metadata
|
||||
Then "MemberAddress1" MUST be "String" data type
|
||||
And the following synonyms for "MemberAddress1" MUST NOT exist in the metadata
|
||||
| AgentAddress1 |
|
||||
And "MemberAddress1" length SHOULD be equal to the RESO Suggested Max Length of 50
|
||||
|
||||
@Member
|
||||
Scenario: MemberAddress2
|
||||
Given that the following synonyms for "MemberAddress2" DO NOT exist in the "Member" metadata
|
||||
| AgentAddress2 |
|
||||
When "MemberAddress2" exists in the "Member" metadata
|
||||
Then "MemberAddress2" MUST be "String" data type
|
||||
And the following synonyms for "MemberAddress2" MUST NOT exist in the metadata
|
||||
| AgentAddress2 |
|
||||
And "MemberAddress2" length SHOULD be equal to the RESO Suggested Max Length of 50
|
||||
|
||||
@Member
|
||||
Scenario: MemberAssociationComments
|
||||
Given that the following synonyms for "MemberAssociationComments" DO NOT exist in the "Member" metadata
|
||||
| AgentAssociationComments |
|
||||
When "MemberAssociationComments" exists in the "Member" metadata
|
||||
Then "MemberAssociationComments" MUST be "String" data type
|
||||
And the following synonyms for "MemberAssociationComments" MUST NOT exist in the metadata
|
||||
| AgentAssociationComments |
|
||||
And "MemberAssociationComments" length SHOULD be equal to the RESO Suggested Max Length of 500
|
||||
|
||||
@Member
|
||||
Scenario: MemberCarrierRoute
|
||||
Given that the following synonyms for "MemberCarrierRoute" DO NOT exist in the "Member" metadata
|
||||
When "MemberCarrierRoute" exists in the "Member" metadata
|
||||
Then "MemberCarrierRoute" MUST be "String" data type
|
||||
And the following synonyms for "MemberCarrierRoute" MUST NOT exist in the metadata
|
||||
| AgentCarrierRoute |
|
||||
| RR |
|
||||
| CR |
|
||||
When "MemberCarrierRoute" exists in the "Member" metadata
|
||||
Then "MemberCarrierRoute" MUST be "String" data type
|
||||
And "MemberCarrierRoute" length SHOULD be equal to the RESO Suggested Max Length of 9
|
||||
|
||||
@Member
|
||||
Scenario: MemberCity
|
||||
Given that the following synonyms for "MemberCity" DO NOT exist in the "Member" metadata
|
||||
| AgentCity |
|
||||
When "MemberCity" exists in the "Member" metadata
|
||||
Then "MemberCity" MUST be "String" data type
|
||||
And the following synonyms for "MemberCity" MUST NOT exist in the metadata
|
||||
| AgentCity |
|
||||
And "MemberCity" length SHOULD be equal to the RESO Suggested Max Length of 50
|
||||
|
||||
@Member
|
||||
Scenario: MemberCountry
|
||||
Given that the following synonyms for "MemberCountry" DO NOT exist in the "Member" metadata
|
||||
| AgentCountry |
|
||||
When "MemberCountry" exists in the "Member" metadata
|
||||
Then "MemberCountry" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "MemberCountry" MUST NOT exist in the metadata
|
||||
| AgentCountry |
|
||||
|
||||
@Member
|
||||
Scenario: MemberCountyOrParish
|
||||
Given that the following synonyms for "MemberCountyOrParish" DO NOT exist in the "Member" metadata
|
||||
| AgentCountyOrParish |
|
||||
When "MemberCountyOrParish" exists in the "Member" metadata
|
||||
Then "MemberCountyOrParish" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "MemberCountyOrParish" MUST NOT exist in the metadata
|
||||
| AgentCountyOrParish |
|
||||
|
||||
@Member
|
||||
Scenario: MemberDesignation
|
||||
Given that the following synonyms for "MemberDesignation" DO NOT exist in the "Member" metadata
|
||||
| AgentDesignation |
|
||||
When "MemberDesignation" exists in the "Member" metadata
|
||||
Then "MemberDesignation" MUST be "Multiple Enumeration" data type
|
||||
And the following synonyms for "MemberDesignation" MUST NOT exist in the metadata
|
||||
| AgentDesignation |
|
||||
|
||||
@Member
|
||||
Scenario: MemberDirectPhone
|
||||
Given that the following synonyms for "MemberDirectPhone" DO NOT exist in the "Member" metadata
|
||||
| AgentDirectPhone |
|
||||
When "MemberDirectPhone" exists in the "Member" metadata
|
||||
Then "MemberDirectPhone" MUST be "String" data type
|
||||
And the following synonyms for "MemberDirectPhone" MUST NOT exist in the metadata
|
||||
| AgentDirectPhone |
|
||||
And "MemberDirectPhone" length SHOULD be equal to the RESO Suggested Max Length of 16
|
||||
|
||||
@Member @IDX
|
||||
Scenario: MemberEmail
|
||||
Given that the following synonyms for "MemberEmail" DO NOT exist in the "Member" metadata
|
||||
| AgentEmail |
|
||||
When "MemberEmail" exists in the "Member" metadata
|
||||
Then "MemberEmail" MUST be "String" data type
|
||||
And the following synonyms for "MemberEmail" MUST NOT exist in the metadata
|
||||
| AgentEmail |
|
||||
And "MemberEmail" length SHOULD be equal to the RESO Suggested Max Length of 80
|
||||
|
||||
@Member
|
||||
Scenario: MemberFax
|
||||
Given that the following synonyms for "MemberFax" DO NOT exist in the "Member" metadata
|
||||
| AgentFax |
|
||||
When "MemberFax" exists in the "Member" metadata
|
||||
Then "MemberFax" MUST be "String" data type
|
||||
And the following synonyms for "MemberFax" MUST NOT exist in the metadata
|
||||
| AgentFax |
|
||||
And "MemberFax" length SHOULD be equal to the RESO Suggested Max Length of 16
|
||||
|
||||
@Member @IDX
|
||||
Scenario: MemberFirstName
|
||||
Given that the following synonyms for "MemberFirstName" DO NOT exist in the "Member" metadata
|
||||
| AgentFirstName |
|
||||
When "MemberFirstName" exists in the "Member" metadata
|
||||
Then "MemberFirstName" MUST be "String" data type
|
||||
And the following synonyms for "MemberFirstName" MUST NOT exist in the metadata
|
||||
| AgentFirstName |
|
||||
And "MemberFirstName" length SHOULD be equal to the RESO Suggested Max Length of 50
|
||||
|
||||
@Member @IDX
|
||||
Scenario: MemberFullName
|
||||
Given that the following synonyms for "MemberFullName" DO NOT exist in the "Member" metadata
|
||||
| AgentFullName |
|
||||
When "MemberFullName" exists in the "Member" metadata
|
||||
Then "MemberFullName" MUST be "String" data type
|
||||
And the following synonyms for "MemberFullName" MUST NOT exist in the metadata
|
||||
| AgentFullName |
|
||||
And "MemberFullName" length SHOULD be equal to the RESO Suggested Max Length of 150
|
||||
|
||||
@Member
|
||||
Scenario: MemberHomePhone
|
||||
Given that the following synonyms for "MemberHomePhone" DO NOT exist in the "Member" metadata
|
||||
| AgentHomePhone |
|
||||
When "MemberHomePhone" exists in the "Member" metadata
|
||||
Then "MemberHomePhone" MUST be "String" data type
|
||||
And the following synonyms for "MemberHomePhone" MUST NOT exist in the metadata
|
||||
| AgentHomePhone |
|
||||
And "MemberHomePhone" length SHOULD be equal to the RESO Suggested Max Length of 16
|
||||
|
||||
@Member
|
||||
Scenario: MemberIsAssistantTo
|
||||
Given that the following synonyms for "MemberIsAssistantTo" DO NOT exist in the "Member" metadata
|
||||
| AgentIsAssistantTo |
|
||||
When "MemberIsAssistantTo" exists in the "Member" metadata
|
||||
Then "MemberIsAssistantTo" MUST be "String" data type
|
||||
And the following synonyms for "MemberIsAssistantTo" MUST NOT exist in the metadata
|
||||
| AgentIsAssistantTo |
|
||||
And "MemberIsAssistantTo" length SHOULD be equal to the RESO Suggested Max Length of 50
|
||||
|
||||
@Member @IDX
|
||||
Scenario: MemberKey
|
||||
Given that the following synonyms for "MemberKey" DO NOT exist in the "Member" metadata
|
||||
| AgentKey |
|
||||
When "MemberKey" exists in the "Member" metadata
|
||||
Then "MemberKey" MUST be "String" data type
|
||||
And the following synonyms for "MemberKey" MUST NOT exist in the metadata
|
||||
| AgentKey |
|
||||
And "MemberKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Member @IDX
|
||||
Scenario: MemberKeyNumeric
|
||||
Given that the following synonyms for "MemberKeyNumeric" DO NOT exist in the "Member" metadata
|
||||
| AgentKeyNumeric |
|
||||
When "MemberKeyNumeric" exists in the "Member" metadata
|
||||
Then "MemberKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "MemberKeyNumeric" MUST NOT exist in the metadata
|
||||
| AgentKeyNumeric |
|
||||
|
||||
@Member
|
||||
Scenario: MemberLanguages
|
||||
Given that the following synonyms for "MemberLanguages" DO NOT exist in the "Member" metadata
|
||||
| AgentLanguages |
|
||||
When "MemberLanguages" exists in the "Member" metadata
|
||||
Then "MemberLanguages" MUST be "Multiple Enumeration" data type
|
||||
And the following synonyms for "MemberLanguages" MUST NOT exist in the metadata
|
||||
| AgentLanguages |
|
||||
|
||||
@Member @IDX
|
||||
Scenario: MemberLastName
|
||||
Given that the following synonyms for "MemberLastName" DO NOT exist in the "Member" metadata
|
||||
| AgentLastName |
|
||||
When "MemberLastName" exists in the "Member" metadata
|
||||
Then "MemberLastName" MUST be "String" data type
|
||||
And the following synonyms for "MemberLastName" MUST NOT exist in the metadata
|
||||
| AgentLastName |
|
||||
And "MemberLastName" length SHOULD be equal to the RESO Suggested Max Length of 50
|
||||
|
||||
@Member @IDX
|
||||
Scenario: MemberLoginId
|
||||
Given that the following synonyms for "MemberLoginId" DO NOT exist in the "Member" metadata
|
||||
| AgentLoginId |
|
||||
When "MemberLoginId" exists in the "Member" metadata
|
||||
Then "MemberLoginId" MUST be "String" data type
|
||||
And the following synonyms for "MemberLoginId" MUST NOT exist in the metadata
|
||||
| AgentLoginId |
|
||||
And "MemberLoginId" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@Member @IDX
|
||||
Scenario: MemberMiddleName
|
||||
Given that the following synonyms for "MemberMiddleName" DO NOT exist in the "Member" metadata
|
||||
| AgentMiddleName |
|
||||
When "MemberMiddleName" exists in the "Member" metadata
|
||||
Then "MemberMiddleName" MUST be "String" data type
|
||||
And the following synonyms for "MemberMiddleName" MUST NOT exist in the metadata
|
||||
| AgentMiddleName |
|
||||
And "MemberMiddleName" length SHOULD be equal to the RESO Suggested Max Length of 50
|
||||
|
||||
@Member
|
||||
Scenario: MemberMlsAccessYN
|
||||
Given that the following synonyms for "MemberMlsAccessYN" DO NOT exist in the "Member" metadata
|
||||
| AgentMlsAccessYN |
|
||||
When "MemberMlsAccessYN" exists in the "Member" metadata
|
||||
Then "MemberMlsAccessYN" MUST be "Boolean" data type
|
||||
And the following synonyms for "MemberMlsAccessYN" MUST NOT exist in the metadata
|
||||
| AgentMlsAccessYN |
|
||||
|
||||
@Member @IDX
|
||||
Scenario: MemberMlsId
|
||||
Given that the following synonyms for "MemberMlsId" DO NOT exist in the "Member" metadata
|
||||
| AgentMlsId |
|
||||
When "MemberMlsId" exists in the "Member" metadata
|
||||
Then "MemberMlsId" MUST be "String" data type
|
||||
And the following synonyms for "MemberMlsId" MUST NOT exist in the metadata
|
||||
| AgentMlsId |
|
||||
And "MemberMlsId" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@Member
|
||||
Scenario: MemberMlsSecurityClass
|
||||
Given that the following synonyms for "MemberMlsSecurityClass" DO NOT exist in the "Member" metadata
|
||||
| AgentMlsSecurityClass |
|
||||
When "MemberMlsSecurityClass" exists in the "Member" metadata
|
||||
Then "MemberMlsSecurityClass" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "MemberMlsSecurityClass" MUST NOT exist in the metadata
|
||||
| AgentMlsSecurityClass |
|
||||
|
||||
@Member
|
||||
Scenario: MemberMobilePhone
|
||||
Given that the following synonyms for "MemberMobilePhone" DO NOT exist in the "Member" metadata
|
||||
| AgentMobilePhone |
|
||||
When "MemberMobilePhone" exists in the "Member" metadata
|
||||
Then "MemberMobilePhone" MUST be "String" data type
|
||||
And the following synonyms for "MemberMobilePhone" MUST NOT exist in the metadata
|
||||
| AgentMobilePhone |
|
||||
And "MemberMobilePhone" length SHOULD be equal to the RESO Suggested Max Length of 16
|
||||
|
||||
@Member
|
||||
Scenario: MemberNamePrefix
|
||||
Given that the following synonyms for "MemberNamePrefix" DO NOT exist in the "Member" metadata
|
||||
When "MemberNamePrefix" exists in the "Member" metadata
|
||||
Then "MemberNamePrefix" MUST be "String" data type
|
||||
And the following synonyms for "MemberNamePrefix" MUST NOT exist in the metadata
|
||||
| AgentNamePrefix |
|
||||
| Salutation |
|
||||
| Title |
|
||||
When "MemberNamePrefix" exists in the "Member" metadata
|
||||
Then "MemberNamePrefix" MUST be "String" data type
|
||||
And "MemberNamePrefix" length SHOULD be equal to the RESO Suggested Max Length of 10
|
||||
|
||||
@Member @IDX
|
||||
Scenario: MemberNameSuffix
|
||||
Given that the following synonyms for "MemberNameSuffix" DO NOT exist in the "Member" metadata
|
||||
| AgentNameSuffix |
|
||||
When "MemberNameSuffix" exists in the "Member" metadata
|
||||
Then "MemberNameSuffix" MUST be "String" data type
|
||||
And the following synonyms for "MemberNameSuffix" MUST NOT exist in the metadata
|
||||
| AgentNameSuffix |
|
||||
And "MemberNameSuffix" length SHOULD be equal to the RESO Suggested Max Length of 10
|
||||
|
||||
@Member
|
||||
Scenario: MemberNationalAssociationId
|
||||
Given that the following synonyms for "MemberNationalAssociationId" DO NOT exist in the "Member" metadata
|
||||
| AgentNationalAssociationId |
|
||||
When "MemberNationalAssociationId" exists in the "Member" metadata
|
||||
Then "MemberNationalAssociationId" MUST be "String" data type
|
||||
And the following synonyms for "MemberNationalAssociationId" MUST NOT exist in the metadata
|
||||
| AgentNationalAssociationId |
|
||||
And "MemberNationalAssociationId" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@Member @IDX
|
||||
Scenario: MemberNickname
|
||||
Given that the following synonyms for "MemberNickname" DO NOT exist in the "Member" metadata
|
||||
| AgentNickname |
|
||||
When "MemberNickname" exists in the "Member" metadata
|
||||
Then "MemberNickname" MUST be "String" data type
|
||||
And the following synonyms for "MemberNickname" MUST NOT exist in the metadata
|
||||
| AgentNickname |
|
||||
And "MemberNickname" length SHOULD be equal to the RESO Suggested Max Length of 50
|
||||
|
||||
@Member
|
||||
Scenario: MemberOfficePhone
|
||||
Given that the following synonyms for "MemberOfficePhone" DO NOT exist in the "Member" metadata
|
||||
| AgentOfficePhone |
|
||||
When "MemberOfficePhone" exists in the "Member" metadata
|
||||
Then "MemberOfficePhone" MUST be "String" data type
|
||||
And the following synonyms for "MemberOfficePhone" MUST NOT exist in the metadata
|
||||
| AgentOfficePhone |
|
||||
And "MemberOfficePhone" length SHOULD be equal to the RESO Suggested Max Length of 16
|
||||
|
||||
@Member
|
||||
Scenario: MemberOfficePhoneExt
|
||||
Given that the following synonyms for "MemberOfficePhoneExt" DO NOT exist in the "Member" metadata
|
||||
| AgentOfficePhoneExt |
|
||||
When "MemberOfficePhoneExt" exists in the "Member" metadata
|
||||
Then "MemberOfficePhoneExt" MUST be "String" data type
|
||||
And the following synonyms for "MemberOfficePhoneExt" MUST NOT exist in the metadata
|
||||
| AgentOfficePhoneExt |
|
||||
And "MemberOfficePhoneExt" length SHOULD be equal to the RESO Suggested Max Length of 10
|
||||
|
||||
@Member
|
||||
Scenario: MemberOtherPhoneType
|
||||
Given that the following synonyms for "MemberOtherPhoneType" DO NOT exist in the "Member" metadata
|
||||
| AgentOtherPhoneType |
|
||||
When "MemberOtherPhoneType" exists in the "Member" metadata
|
||||
Then "MemberOtherPhoneType" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "MemberOtherPhoneType" MUST NOT exist in the metadata
|
||||
| AgentOtherPhoneType |
|
||||
|
||||
@Member
|
||||
Scenario: MemberPager
|
||||
Given that the following synonyms for "MemberPager" DO NOT exist in the "Member" metadata
|
||||
| AgentPager |
|
||||
When "MemberPager" exists in the "Member" metadata
|
||||
Then "MemberPager" MUST be "String" data type
|
||||
And the following synonyms for "MemberPager" MUST NOT exist in the metadata
|
||||
| AgentPager |
|
||||
And "MemberPager" length SHOULD be equal to the RESO Suggested Max Length of 16
|
||||
|
||||
@Member
|
||||
Scenario: MemberPassword
|
||||
Given that the following synonyms for "MemberPassword" DO NOT exist in the "Member" metadata
|
||||
| AgentPassword |
|
||||
When "MemberPassword" exists in the "Member" metadata
|
||||
Then "MemberPassword" MUST be "String" data type
|
||||
And the following synonyms for "MemberPassword" MUST NOT exist in the metadata
|
||||
| AgentPassword |
|
||||
And "MemberPassword" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@Member
|
||||
Scenario: MemberPhoneTTYTDD
|
||||
Given that the following synonyms for "MemberPhoneTTYTDD" DO NOT exist in the "Member" metadata
|
||||
| AgentPhoneTTYTDD |
|
||||
When "MemberPhoneTTYTDD" exists in the "Member" metadata
|
||||
Then "MemberPhoneTTYTDD" MUST be "String" data type
|
||||
And the following synonyms for "MemberPhoneTTYTDD" MUST NOT exist in the metadata
|
||||
| AgentPhoneTTYTDD |
|
||||
And "MemberPhoneTTYTDD" length SHOULD be equal to the RESO Suggested Max Length of 16
|
||||
|
||||
@Member
|
||||
Scenario: MemberPostalCode
|
||||
Given that the following synonyms for "MemberPostalCode" DO NOT exist in the "Member" metadata
|
||||
| AgentPostalCode |
|
||||
When "MemberPostalCode" exists in the "Member" metadata
|
||||
Then "MemberPostalCode" MUST be "String" data type
|
||||
And the following synonyms for "MemberPostalCode" MUST NOT exist in the metadata
|
||||
| AgentPostalCode |
|
||||
And "MemberPostalCode" length SHOULD be equal to the RESO Suggested Max Length of 10
|
||||
|
||||
@Member
|
||||
Scenario: MemberPostalCodePlus4
|
||||
Given that the following synonyms for "MemberPostalCodePlus4" DO NOT exist in the "Member" metadata
|
||||
| AgentPostalCodePlus4 |
|
||||
When "MemberPostalCodePlus4" exists in the "Member" metadata
|
||||
Then "MemberPostalCodePlus4" MUST be "String" data type
|
||||
And the following synonyms for "MemberPostalCodePlus4" MUST NOT exist in the metadata
|
||||
| AgentPostalCodePlus4 |
|
||||
And "MemberPostalCodePlus4" length SHOULD be equal to the RESO Suggested Max Length of 4
|
||||
|
||||
@Member @IDX
|
||||
Scenario: MemberPreferredPhone
|
||||
Given that the following synonyms for "MemberPreferredPhone" DO NOT exist in the "Member" metadata
|
||||
| AgentPreferredPhone |
|
||||
When "MemberPreferredPhone" exists in the "Member" metadata
|
||||
Then "MemberPreferredPhone" MUST be "String" data type
|
||||
And the following synonyms for "MemberPreferredPhone" MUST NOT exist in the metadata
|
||||
| AgentPreferredPhone |
|
||||
And "MemberPreferredPhone" length SHOULD be equal to the RESO Suggested Max Length of 16
|
||||
|
||||
@Member @IDX
|
||||
Scenario: MemberPreferredPhoneExt
|
||||
Given that the following synonyms for "MemberPreferredPhoneExt" DO NOT exist in the "Member" metadata
|
||||
| AgentPreferredPhoneExt |
|
||||
When "MemberPreferredPhoneExt" exists in the "Member" metadata
|
||||
Then "MemberPreferredPhoneExt" MUST be "String" data type
|
||||
And the following synonyms for "MemberPreferredPhoneExt" MUST NOT exist in the metadata
|
||||
| AgentPreferredPhoneExt |
|
||||
And "MemberPreferredPhoneExt" length SHOULD be equal to the RESO Suggested Max Length of 10
|
||||
|
||||
@Member @IDX
|
||||
Scenario: MemberStateLicense
|
||||
Given that the following synonyms for "MemberStateLicense" DO NOT exist in the "Member" metadata
|
||||
| AgentStateLicense |
|
||||
When "MemberStateLicense" exists in the "Member" metadata
|
||||
Then "MemberStateLicense" MUST be "String" data type
|
||||
And the following synonyms for "MemberStateLicense" MUST NOT exist in the metadata
|
||||
| AgentStateLicense |
|
||||
And "MemberStateLicense" length SHOULD be equal to the RESO Suggested Max Length of 50
|
||||
|
||||
@Member @IDX
|
||||
Scenario: MemberStateLicenseState
|
||||
Given that the following synonyms for "MemberStateLicenseState" DO NOT exist in the "Member" metadata
|
||||
| AgentStateLicenseState |
|
||||
When "MemberStateLicenseState" exists in the "Member" metadata
|
||||
Then "MemberStateLicenseState" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "MemberStateLicenseState" MUST NOT exist in the metadata
|
||||
| AgentStateLicenseState |
|
||||
|
||||
@Member
|
||||
Scenario: MemberStateOrProvince
|
||||
Given that the following synonyms for "MemberStateOrProvince" DO NOT exist in the "Member" metadata
|
||||
| AgentStateOrProvince |
|
||||
When "MemberStateOrProvince" exists in the "Member" metadata
|
||||
Then "MemberStateOrProvince" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "MemberStateOrProvince" MUST NOT exist in the metadata
|
||||
| AgentStateOrProvince |
|
||||
|
||||
@Member @IDX
|
||||
Scenario: MemberStatus
|
||||
Given that the following synonyms for "MemberStatus" DO NOT exist in the "Member" metadata
|
||||
| AgentStatus |
|
||||
When "MemberStatus" exists in the "Member" metadata
|
||||
Then "MemberStatus" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "MemberStatus" MUST NOT exist in the metadata
|
||||
| AgentStatus |
|
||||
|
||||
@Member
|
||||
Scenario: MemberTollFreePhone
|
||||
Given that the following synonyms for "MemberTollFreePhone" DO NOT exist in the "Member" metadata
|
||||
| AgentTollFreePhone |
|
||||
When "MemberTollFreePhone" exists in the "Member" metadata
|
||||
Then "MemberTollFreePhone" MUST be "String" data type
|
||||
And the following synonyms for "MemberTollFreePhone" MUST NOT exist in the metadata
|
||||
| AgentTollFreePhone |
|
||||
And "MemberTollFreePhone" length SHOULD be equal to the RESO Suggested Max Length of 16
|
||||
|
||||
@Member
|
||||
Scenario: MemberType
|
||||
Given that the following synonyms for "MemberType" DO NOT exist in the "Member" metadata
|
||||
| AgentType |
|
||||
When "MemberType" exists in the "Member" metadata
|
||||
Then "MemberType" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "MemberType" MUST NOT exist in the metadata
|
||||
| AgentType |
|
||||
|
||||
@Member
|
||||
Scenario: MemberVoiceMail
|
||||
Given that the following synonyms for "MemberVoiceMail" DO NOT exist in the "Member" metadata
|
||||
| AgentVoiceMail |
|
||||
When "MemberVoiceMail" exists in the "Member" metadata
|
||||
Then "MemberVoiceMail" MUST be "String" data type
|
||||
And the following synonyms for "MemberVoiceMail" MUST NOT exist in the metadata
|
||||
| AgentVoiceMail |
|
||||
And "MemberVoiceMail" length SHOULD be equal to the RESO Suggested Max Length of 16
|
||||
|
||||
@Member
|
||||
Scenario: MemberVoiceMailExt
|
||||
Given that the following synonyms for "MemberVoiceMailExt" DO NOT exist in the "Member" metadata
|
||||
| AgentVoiceMailExt |
|
||||
When "MemberVoiceMailExt" exists in the "Member" metadata
|
||||
Then "MemberVoiceMailExt" MUST be "String" data type
|
||||
And the following synonyms for "MemberVoiceMailExt" MUST NOT exist in the metadata
|
||||
| AgentVoiceMailExt |
|
||||
And "MemberVoiceMailExt" length SHOULD be equal to the RESO Suggested Max Length of 10
|
||||
|
||||
@Member @IDX
|
||||
|
@ -456,28 +456,28 @@ Feature: Member
|
|||
|
||||
@Member @IDX
|
||||
Scenario: OriginatingSystemID
|
||||
Given that the following synonyms for "OriginatingSystemID" DO NOT exist in the "Member" metadata
|
||||
| MLSID |
|
||||
When "OriginatingSystemID" exists in the "Member" metadata
|
||||
Then "OriginatingSystemID" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemID" MUST NOT exist in the metadata
|
||||
| MLSID |
|
||||
And "OriginatingSystemID" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@Member @IDX
|
||||
Scenario: OriginatingSystemMemberKey
|
||||
Given that the following synonyms for "OriginatingSystemMemberKey" DO NOT exist in the "Member" metadata
|
||||
| OriginatingSystemAgentkey |
|
||||
| ProviderKey |
|
||||
When "OriginatingSystemMemberKey" exists in the "Member" metadata
|
||||
Then "OriginatingSystemMemberKey" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemMemberKey" MUST NOT exist in the metadata
|
||||
| OriginatingSystemAgentkey |
|
||||
| ProviderKey |
|
||||
And "OriginatingSystemMemberKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Member
|
||||
Scenario: OriginatingSystemName
|
||||
Given that the following synonyms for "OriginatingSystemName" DO NOT exist in the "Member" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "OriginatingSystemName" exists in the "Member" metadata
|
||||
Then "OriginatingSystemName" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "OriginatingSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Member
|
||||
|
@ -487,28 +487,28 @@ Feature: Member
|
|||
|
||||
@Member
|
||||
Scenario: SourceSystemID
|
||||
Given that the following synonyms for "SourceSystemID" DO NOT exist in the "Member" metadata
|
||||
| MLSID |
|
||||
When "SourceSystemID" exists in the "Member" metadata
|
||||
Then "SourceSystemID" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemID" MUST NOT exist in the metadata
|
||||
| MLSID |
|
||||
And "SourceSystemID" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@Member @IDX
|
||||
Scenario: SourceSystemMemberKey
|
||||
Given that the following synonyms for "SourceSystemMemberKey" DO NOT exist in the "Member" metadata
|
||||
| SourceSystemAgentKey |
|
||||
| ProviderKey |
|
||||
When "SourceSystemMemberKey" exists in the "Member" metadata
|
||||
Then "SourceSystemMemberKey" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemMemberKey" MUST NOT exist in the metadata
|
||||
| SourceSystemAgentKey |
|
||||
| ProviderKey |
|
||||
And "SourceSystemMemberKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Member
|
||||
Scenario: SourceSystemName
|
||||
Given that the following synonyms for "SourceSystemName" DO NOT exist in the "Member" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "SourceSystemName" exists in the "Member" metadata
|
||||
Then "SourceSystemName" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "SourceSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Member
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: Office
|
||||
|
||||
Background:
|
||||
|
@ -121,10 +121,10 @@ Feature: Office
|
|||
|
||||
@Office
|
||||
Scenario: OfficeCountyOrParish
|
||||
Given that the following synonyms for "OfficeCountyOrParish" DO NOT exist in the "Office" metadata
|
||||
| County |
|
||||
When "OfficeCountyOrParish" exists in the "Office" metadata
|
||||
Then "OfficeCountyOrParish" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "OfficeCountyOrParish" MUST NOT exist in the metadata
|
||||
| County |
|
||||
|
||||
@Office
|
||||
Scenario: OfficeEmail
|
||||
|
@ -230,27 +230,27 @@ Feature: Office
|
|||
|
||||
@Office @IDX
|
||||
Scenario: OriginatingSystemID
|
||||
Given that the following synonyms for "OriginatingSystemID" DO NOT exist in the "Office" metadata
|
||||
| MLSID |
|
||||
When "OriginatingSystemID" exists in the "Office" metadata
|
||||
Then "OriginatingSystemID" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemID" MUST NOT exist in the metadata
|
||||
| MLSID |
|
||||
And "OriginatingSystemID" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@Office @IDX
|
||||
Scenario: OriginatingSystemName
|
||||
Given that the following synonyms for "OriginatingSystemName" DO NOT exist in the "Office" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "OriginatingSystemName" exists in the "Office" metadata
|
||||
Then "OriginatingSystemName" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "OriginatingSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Office @IDX
|
||||
Scenario: OriginatingSystemOfficeKey
|
||||
Given that the following synonyms for "OriginatingSystemOfficeKey" DO NOT exist in the "Office" metadata
|
||||
| ProviderKey |
|
||||
When "OriginatingSystemOfficeKey" exists in the "Office" metadata
|
||||
Then "OriginatingSystemOfficeKey" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemOfficeKey" MUST NOT exist in the metadata
|
||||
| ProviderKey |
|
||||
And "OriginatingSystemOfficeKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Office
|
||||
|
@ -260,35 +260,35 @@ Feature: Office
|
|||
|
||||
@Office @IDX
|
||||
Scenario: SourceSystemID
|
||||
Given that the following synonyms for "SourceSystemID" DO NOT exist in the "Office" metadata
|
||||
| MLSID |
|
||||
When "SourceSystemID" exists in the "Office" metadata
|
||||
Then "SourceSystemID" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemID" MUST NOT exist in the metadata
|
||||
| MLSID |
|
||||
And "SourceSystemID" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@Office @IDX
|
||||
Scenario: SourceSystemName
|
||||
Given that the following synonyms for "SourceSystemName" DO NOT exist in the "Office" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "SourceSystemName" exists in the "Office" metadata
|
||||
Then "SourceSystemName" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "SourceSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Office @IDX
|
||||
Scenario: SourceSystemOfficeKey
|
||||
Given that the following synonyms for "SourceSystemOfficeKey" DO NOT exist in the "Office" metadata
|
||||
| ProviderKey |
|
||||
When "SourceSystemOfficeKey" exists in the "Office" metadata
|
||||
Then "SourceSystemOfficeKey" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemOfficeKey" MUST NOT exist in the metadata
|
||||
| ProviderKey |
|
||||
And "SourceSystemOfficeKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Office
|
||||
Scenario: SyndicateAgentOption
|
||||
Given that the following synonyms for "SyndicateAgentOption" DO NOT exist in the "Office" metadata
|
||||
| SyndicateMemberOption |
|
||||
When "SyndicateAgentOption" exists in the "Office" metadata
|
||||
Then "SyndicateAgentOption" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "SyndicateAgentOption" MUST NOT exist in the metadata
|
||||
| SyndicateMemberOption |
|
||||
|
||||
@Office
|
||||
Scenario: SyndicateTo
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: OpenHouse
|
||||
|
||||
Background:
|
||||
|
@ -19,42 +19,42 @@ Feature: OpenHouse
|
|||
|
||||
@OpenHouse @IDX
|
||||
Scenario: ListingId
|
||||
Given that the following synonyms for "ListingId" DO NOT exist in the "OpenHouse" metadata
|
||||
When "ListingId" exists in the "OpenHouse" metadata
|
||||
Then "ListingId" MUST be "String" data type
|
||||
And the following synonyms for "ListingId" MUST NOT exist in the metadata
|
||||
| MLNumber |
|
||||
| MLSNumber |
|
||||
| ListingNumber |
|
||||
When "ListingId" exists in the "OpenHouse" metadata
|
||||
Then "ListingId" MUST be "String" data type
|
||||
And "ListingId" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@OpenHouse @IDX
|
||||
Scenario: ListingKey
|
||||
Given that the following synonyms for "ListingKey" DO NOT exist in the "OpenHouse" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "ListingKey" exists in the "OpenHouse" metadata
|
||||
Then "ListingKey" MUST be "String" data type
|
||||
And the following synonyms for "ListingKey" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
And "ListingKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@OpenHouse @IDX
|
||||
Scenario: ListingKeyNumeric
|
||||
Given that the following synonyms for "ListingKeyNumeric" DO NOT exist in the "OpenHouse" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "ListingKeyNumeric" exists in the "OpenHouse" metadata
|
||||
Then "ListingKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "ListingKeyNumeric" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
|
||||
@OpenHouse @IDX
|
||||
Scenario: ModificationTimestamp
|
||||
Given that the following synonyms for "ModificationTimestamp" DO NOT exist in the "OpenHouse" metadata
|
||||
When "ModificationTimestamp" exists in the "OpenHouse" metadata
|
||||
Then "ModificationTimestamp" MUST be "Timestamp" data type
|
||||
And the following synonyms for "ModificationTimestamp" MUST NOT exist in the metadata
|
||||
| ModificationDateTime |
|
||||
| DateTimeModified |
|
||||
| ModDate |
|
||||
| DateMod |
|
||||
| UpdateDate |
|
||||
| UpdateTimestamp |
|
||||
When "ModificationTimestamp" exists in the "OpenHouse" metadata
|
||||
Then "ModificationTimestamp" MUST be "Timestamp" data type
|
||||
|
||||
@OpenHouse
|
||||
Scenario: OpenHouseAttendedBy
|
||||
|
@ -111,13 +111,13 @@ Feature: OpenHouse
|
|||
|
||||
@OpenHouse @IDX
|
||||
Scenario: OriginalEntryTimestamp
|
||||
Given that the following synonyms for "OriginalEntryTimestamp" DO NOT exist in the "OpenHouse" metadata
|
||||
When "OriginalEntryTimestamp" exists in the "OpenHouse" metadata
|
||||
Then "OriginalEntryTimestamp" MUST be "Timestamp" data type
|
||||
And the following synonyms for "OriginalEntryTimestamp" MUST NOT exist in the metadata
|
||||
| EntryDate |
|
||||
| InputDate |
|
||||
| DateTimeCreated |
|
||||
| CreatedDate. |
|
||||
When "OriginalEntryTimestamp" exists in the "OpenHouse" metadata
|
||||
Then "OriginalEntryTimestamp" MUST be "Timestamp" data type
|
||||
|
||||
@OpenHouse @IDX
|
||||
Scenario: OriginatingSystemID
|
||||
|
@ -127,19 +127,19 @@ Feature: OpenHouse
|
|||
|
||||
@OpenHouse @IDX
|
||||
Scenario: OriginatingSystemKey
|
||||
Given that the following synonyms for "OriginatingSystemKey" DO NOT exist in the "OpenHouse" metadata
|
||||
| ProviderKey |
|
||||
When "OriginatingSystemKey" exists in the "OpenHouse" metadata
|
||||
Then "OriginatingSystemKey" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemKey" MUST NOT exist in the metadata
|
||||
| ProviderKey |
|
||||
And "OriginatingSystemKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@OpenHouse
|
||||
Scenario: OriginatingSystemName
|
||||
Given that the following synonyms for "OriginatingSystemName" DO NOT exist in the "OpenHouse" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "OriginatingSystemName" exists in the "OpenHouse" metadata
|
||||
Then "OriginatingSystemName" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "OriginatingSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@OpenHouse @IDX
|
||||
|
@ -150,64 +150,64 @@ Feature: OpenHouse
|
|||
|
||||
@OpenHouse
|
||||
Scenario: ShowingAgentFirstName
|
||||
Given that the following synonyms for "ShowingAgentFirstName" DO NOT exist in the "OpenHouse" metadata
|
||||
| ShowingMemberFirstName |
|
||||
When "ShowingAgentFirstName" exists in the "OpenHouse" metadata
|
||||
Then "ShowingAgentFirstName" MUST be "String" data type
|
||||
And the following synonyms for "ShowingAgentFirstName" MUST NOT exist in the metadata
|
||||
| ShowingMemberFirstName |
|
||||
And "ShowingAgentFirstName" length SHOULD be equal to the RESO Suggested Max Length of 50
|
||||
|
||||
@OpenHouse @IDX
|
||||
Scenario: ShowingAgentKey
|
||||
Given that the following synonyms for "ShowingAgentKey" DO NOT exist in the "OpenHouse" metadata
|
||||
| ShowingMemberKey |
|
||||
When "ShowingAgentKey" exists in the "OpenHouse" metadata
|
||||
Then "ShowingAgentKey" MUST be "String" data type
|
||||
And the following synonyms for "ShowingAgentKey" MUST NOT exist in the metadata
|
||||
| ShowingMemberKey |
|
||||
And "ShowingAgentKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@OpenHouse @IDX
|
||||
Scenario: ShowingAgentKeyNumeric
|
||||
Given that the following synonyms for "ShowingAgentKeyNumeric" DO NOT exist in the "OpenHouse" metadata
|
||||
| ShowingMemberKeyNumeric |
|
||||
When "ShowingAgentKeyNumeric" exists in the "OpenHouse" metadata
|
||||
Then "ShowingAgentKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "ShowingAgentKeyNumeric" MUST NOT exist in the metadata
|
||||
| ShowingMemberKeyNumeric |
|
||||
|
||||
@OpenHouse
|
||||
Scenario: ShowingAgentLastName
|
||||
Given that the following synonyms for "ShowingAgentLastName" DO NOT exist in the "OpenHouse" metadata
|
||||
| ShowingMemberLastName |
|
||||
When "ShowingAgentLastName" exists in the "OpenHouse" metadata
|
||||
Then "ShowingAgentLastName" MUST be "String" data type
|
||||
And the following synonyms for "ShowingAgentLastName" MUST NOT exist in the metadata
|
||||
| ShowingMemberLastName |
|
||||
And "ShowingAgentLastName" length SHOULD be equal to the RESO Suggested Max Length of 50
|
||||
|
||||
@OpenHouse
|
||||
Scenario: ShowingAgentMlsID
|
||||
Given that the following synonyms for "ShowingAgentMlsID" DO NOT exist in the "OpenHouse" metadata
|
||||
| ShowingMemberMlslD |
|
||||
When "ShowingAgentMlsID" exists in the "OpenHouse" metadata
|
||||
Then "ShowingAgentMlsID" MUST be "String" data type
|
||||
And the following synonyms for "ShowingAgentMlsID" MUST NOT exist in the metadata
|
||||
| ShowingMemberMlslD |
|
||||
And "ShowingAgentMlsID" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@OpenHouse @IDX
|
||||
Scenario: SourceSystemID
|
||||
Given that the following synonyms for "SourceSystemID" DO NOT exist in the "OpenHouse" metadata
|
||||
| MLSID |
|
||||
When "SourceSystemID" exists in the "OpenHouse" metadata
|
||||
Then "SourceSystemID" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemID" MUST NOT exist in the metadata
|
||||
| MLSID |
|
||||
And "SourceSystemID" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@OpenHouse @IDX
|
||||
Scenario: SourceSystemKey
|
||||
Given that the following synonyms for "SourceSystemKey" DO NOT exist in the "OpenHouse" metadata
|
||||
| ProviderKey |
|
||||
When "SourceSystemKey" exists in the "OpenHouse" metadata
|
||||
Then "SourceSystemKey" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemKey" MUST NOT exist in the metadata
|
||||
| ProviderKey |
|
||||
And "SourceSystemKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@OpenHouse
|
||||
Scenario: SourceSystemName
|
||||
Given that the following synonyms for "SourceSystemName" DO NOT exist in the "OpenHouse" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "SourceSystemName" exists in the "OpenHouse" metadata
|
||||
Then "SourceSystemName" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "SourceSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: OtherPhone
|
||||
|
||||
Background:
|
||||
|
@ -19,15 +19,15 @@ Feature: OtherPhone
|
|||
|
||||
@OtherPhone
|
||||
Scenario: ModificationTimestamp
|
||||
Given that the following synonyms for "ModificationTimestamp" DO NOT exist in the "OtherPhone" metadata
|
||||
When "ModificationTimestamp" exists in the "OtherPhone" metadata
|
||||
Then "ModificationTimestamp" MUST be "Timestamp" data type
|
||||
And the following synonyms for "ModificationTimestamp" MUST NOT exist in the metadata
|
||||
| ModificationDateTime |
|
||||
| DateTimeModified |
|
||||
| ModDate |
|
||||
| DateMod |
|
||||
| UpdateDate |
|
||||
| UpdateTimestamp |
|
||||
When "ModificationTimestamp" exists in the "OtherPhone" metadata
|
||||
Then "ModificationTimestamp" MUST be "Timestamp" data type
|
||||
|
||||
@OtherPhone
|
||||
Scenario: OtherPhoneExt
|
||||
|
@ -37,11 +37,11 @@ Feature: OtherPhone
|
|||
|
||||
@OtherPhone
|
||||
Scenario: OtherPhoneKey
|
||||
Given that the following synonyms for "OtherPhoneKey" DO NOT exist in the "OtherPhone" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "OtherPhoneKey" exists in the "OtherPhone" metadata
|
||||
Then "OtherPhoneKey" MUST be "String" data type
|
||||
And the following synonyms for "OtherPhoneKey" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
And "OtherPhoneKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@OtherPhone
|
||||
|
@ -57,10 +57,10 @@ Feature: OtherPhone
|
|||
|
||||
@OtherPhone
|
||||
Scenario: OtherPhoneType
|
||||
Given that the following synonyms for "OtherPhoneType" DO NOT exist in the "OtherPhone" metadata
|
||||
| PhoneType |
|
||||
When "OtherPhoneType" exists in the "OtherPhone" metadata
|
||||
Then "OtherPhoneType" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "OtherPhoneType" MUST NOT exist in the metadata
|
||||
| PhoneType |
|
||||
|
||||
@OtherPhone
|
||||
Scenario: ResourceName
|
||||
|
@ -69,30 +69,30 @@ Feature: OtherPhone
|
|||
|
||||
@OtherPhone
|
||||
Scenario: ResourceRecordID
|
||||
Given that the following synonyms for "ResourceRecordID" DO NOT exist in the "OtherPhone" metadata
|
||||
When "ResourceRecordID" exists in the "OtherPhone" metadata
|
||||
Then "ResourceRecordID" MUST be "String" data type
|
||||
And the following synonyms for "ResourceRecordID" MUST NOT exist in the metadata
|
||||
| MLNumber |
|
||||
| MLSNumber |
|
||||
| ListingNumber |
|
||||
| AgentID |
|
||||
| OfficeID |
|
||||
| ContactID |
|
||||
When "ResourceRecordID" exists in the "OtherPhone" metadata
|
||||
Then "ResourceRecordID" MUST be "String" data type
|
||||
And "ResourceRecordID" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@OtherPhone
|
||||
Scenario: ResourceRecordKey
|
||||
Given that the following synonyms for "ResourceRecordKey" DO NOT exist in the "OtherPhone" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "ResourceRecordKey" exists in the "OtherPhone" metadata
|
||||
Then "ResourceRecordKey" MUST be "String" data type
|
||||
And the following synonyms for "ResourceRecordKey" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
And "ResourceRecordKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@OtherPhone
|
||||
Scenario: ResourceRecordKeyNumeric
|
||||
Given that the following synonyms for "ResourceRecordKeyNumeric" DO NOT exist in the "OtherPhone" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "ResourceRecordKeyNumeric" exists in the "OtherPhone" metadata
|
||||
Then "ResourceRecordKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "ResourceRecordKeyNumeric" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: OUID
|
||||
|
||||
Background:
|
||||
|
@ -14,26 +14,26 @@ Feature: OUID
|
|||
|
||||
@OUID
|
||||
Scenario: ChangedByMemberID
|
||||
Given that the following synonyms for "ChangedByMemberID" DO NOT exist in the "OUID" metadata
|
||||
| ChangedByAgentID |
|
||||
When "ChangedByMemberID" exists in the "OUID" metadata
|
||||
Then "ChangedByMemberID" MUST be "String" data type
|
||||
And the following synonyms for "ChangedByMemberID" MUST NOT exist in the metadata
|
||||
| ChangedByAgentID |
|
||||
And "ChangedByMemberID" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@OUID
|
||||
Scenario: ChangedByMemberKey
|
||||
Given that the following synonyms for "ChangedByMemberKey" DO NOT exist in the "OUID" metadata
|
||||
| ChangedByAgentKey |
|
||||
When "ChangedByMemberKey" exists in the "OUID" metadata
|
||||
Then "ChangedByMemberKey" MUST be "String" data type
|
||||
And the following synonyms for "ChangedByMemberKey" MUST NOT exist in the metadata
|
||||
| ChangedByAgentKey |
|
||||
And "ChangedByMemberKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@OUID
|
||||
Scenario: ChangedByMemberKeyNumeric
|
||||
Given that the following synonyms for "ChangedByMemberKeyNumeric" DO NOT exist in the "OUID" metadata
|
||||
| ChangedByAgentKeyNumeric |
|
||||
When "ChangedByMemberKeyNumeric" exists in the "OUID" metadata
|
||||
Then "ChangedByMemberKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "ChangedByMemberKeyNumeric" MUST NOT exist in the metadata
|
||||
| ChangedByAgentKeyNumeric |
|
||||
|
||||
@OUID
|
||||
Scenario: ModificationTimestamp
|
||||
|
@ -76,11 +76,11 @@ Feature: OUID
|
|||
|
||||
@OUID
|
||||
Scenario: OrganizationCarrierRoute
|
||||
Given that the following synonyms for "OrganizationCarrierRoute" DO NOT exist in the "OUID" metadata
|
||||
| RR |
|
||||
| CR |
|
||||
When "OrganizationCarrierRoute" exists in the "OUID" metadata
|
||||
Then "OrganizationCarrierRoute" MUST be "String" data type
|
||||
And the following synonyms for "OrganizationCarrierRoute" MUST NOT exist in the metadata
|
||||
| RR |
|
||||
| CR |
|
||||
And "OrganizationCarrierRoute" length SHOULD be equal to the RESO Suggested Max Length of 9
|
||||
|
||||
@OUID
|
||||
|
@ -139,11 +139,11 @@ Feature: OUID
|
|||
|
||||
@OUID
|
||||
Scenario: OrganizationContactNamePrefix
|
||||
Given that the following synonyms for "OrganizationContactNamePrefix" DO NOT exist in the "OUID" metadata
|
||||
| Salutation |
|
||||
| Title |
|
||||
When "OrganizationContactNamePrefix" exists in the "OUID" metadata
|
||||
Then "OrganizationContactNamePrefix" MUST be "String" data type
|
||||
And the following synonyms for "OrganizationContactNamePrefix" MUST NOT exist in the metadata
|
||||
| Salutation |
|
||||
| Title |
|
||||
And "OrganizationContactNamePrefix" length SHOULD be equal to the RESO Suggested Max Length of 10
|
||||
|
||||
@OUID
|
||||
|
@ -171,17 +171,17 @@ Feature: OUID
|
|||
|
||||
@OUID
|
||||
Scenario: OrganizationCountyOrParish
|
||||
Given that the following synonyms for "OrganizationCountyOrParish" DO NOT exist in the "OUID" metadata
|
||||
| County |
|
||||
When "OrganizationCountyOrParish" exists in the "OUID" metadata
|
||||
Then "OrganizationCountyOrParish" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "OrganizationCountyOrParish" MUST NOT exist in the metadata
|
||||
| County |
|
||||
|
||||
@OUID
|
||||
Scenario: OrganizationMemberCount
|
||||
Given that the following synonyms for "OrganizationMemberCount" DO NOT exist in the "OUID" metadata
|
||||
| OrganizationAgentCount |
|
||||
When "OrganizationMemberCount" exists in the "OUID" metadata
|
||||
Then "OrganizationMemberCount" MUST be "Integer" data type
|
||||
And the following synonyms for "OrganizationMemberCount" MUST NOT exist in the metadata
|
||||
| OrganizationAgentCount |
|
||||
|
||||
@OUID
|
||||
Scenario: OrganizationMlsCode
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: PropertyGreenVerification
|
||||
|
||||
Background:
|
||||
|
@ -25,107 +25,107 @@ Feature: PropertyGreenVerification
|
|||
|
||||
@PropertyGreenVerification @IDX
|
||||
Scenario: GreenBuildingVerificationType
|
||||
Given that the following synonyms for "GreenBuildingVerificationType" DO NOT exist in the "PropertyGreenVerification" metadata
|
||||
When "GreenBuildingVerificationType" exists in the "PropertyGreenVerification" metadata
|
||||
Then "GreenBuildingVerificationType" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "GreenBuildingVerificationType" MUST NOT exist in the metadata
|
||||
| GreenBuildingCertification |
|
||||
| GreenBuildingVerification |
|
||||
| GreenBuildingRating |
|
||||
When "GreenBuildingVerificationType" exists in the "PropertyGreenVerification" metadata
|
||||
Then "GreenBuildingVerificationType" MUST be "Single Enumeration" data type
|
||||
|
||||
@PropertyGreenVerification @IDX
|
||||
Scenario: GreenVerificationBody
|
||||
Given that the following synonyms for "GreenVerificationBody" DO NOT exist in the "PropertyGreenVerification" metadata
|
||||
| GreenCertifyingBody |
|
||||
| GreenRatingBody |
|
||||
When "GreenVerificationBody" exists in the "PropertyGreenVerification" metadata
|
||||
Then "GreenVerificationBody" MUST be "String" data type
|
||||
And the following synonyms for "GreenVerificationBody" MUST NOT exist in the metadata
|
||||
| GreenCertifyingBody |
|
||||
| GreenRatingBody |
|
||||
And "GreenVerificationBody" length SHOULD be equal to the RESO Suggested Max Length of 50
|
||||
|
||||
@PropertyGreenVerification @IDX
|
||||
Scenario: GreenVerificationMetric
|
||||
Given that the following synonyms for "GreenVerificationMetric" DO NOT exist in the "PropertyGreenVerification" metadata
|
||||
| GreenCertificationMetric |
|
||||
| GreenRatingMetric |
|
||||
When "GreenVerificationMetric" exists in the "PropertyGreenVerification" metadata
|
||||
Then "GreenVerificationMetric" MUST be "Integer" data type
|
||||
And the following synonyms for "GreenVerificationMetric" MUST NOT exist in the metadata
|
||||
| GreenCertificationMetric |
|
||||
| GreenRatingMetric |
|
||||
|
||||
@PropertyGreenVerification @IDX
|
||||
Scenario: GreenVerificationRating
|
||||
Given that the following synonyms for "GreenVerificationRating" DO NOT exist in the "PropertyGreenVerification" metadata
|
||||
| GreenCertificationRating |
|
||||
| GreenRating |
|
||||
When "GreenVerificationRating" exists in the "PropertyGreenVerification" metadata
|
||||
Then "GreenVerificationRating" MUST be "String" data type
|
||||
And the following synonyms for "GreenVerificationRating" MUST NOT exist in the metadata
|
||||
| GreenCertificationRating |
|
||||
| GreenRating |
|
||||
And "GreenVerificationRating" length SHOULD be equal to the RESO Suggested Max Length of 50
|
||||
|
||||
@PropertyGreenVerification @IDX
|
||||
Scenario: GreenVerificationSource
|
||||
Given that the following synonyms for "GreenVerificationSource" DO NOT exist in the "PropertyGreenVerification" metadata
|
||||
| GreenRatingSource |
|
||||
| GreenCertificationSource |
|
||||
When "GreenVerificationSource" exists in the "PropertyGreenVerification" metadata
|
||||
Then "GreenVerificationSource" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "GreenVerificationSource" MUST NOT exist in the metadata
|
||||
| GreenRatingSource |
|
||||
| GreenCertificationSource |
|
||||
|
||||
@PropertyGreenVerification @IDX
|
||||
Scenario: GreenVerificationStatus
|
||||
Given that the following synonyms for "GreenVerificationStatus" DO NOT exist in the "PropertyGreenVerification" metadata
|
||||
| GreenRatingStatus |
|
||||
| GreenCertificationStatus |
|
||||
When "GreenVerificationStatus" exists in the "PropertyGreenVerification" metadata
|
||||
Then "GreenVerificationStatus" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "GreenVerificationStatus" MUST NOT exist in the metadata
|
||||
| GreenRatingStatus |
|
||||
| GreenCertificationStatus |
|
||||
|
||||
@PropertyGreenVerification @IDX
|
||||
Scenario: GreenVerificationURL
|
||||
Given that the following synonyms for "GreenVerificationURL" DO NOT exist in the "PropertyGreenVerification" metadata
|
||||
| GreenRatingURL |
|
||||
| GreenCertificationURL |
|
||||
When "GreenVerificationURL" exists in the "PropertyGreenVerification" metadata
|
||||
Then "GreenVerificationURL" MUST be "String" data type
|
||||
And the following synonyms for "GreenVerificationURL" MUST NOT exist in the metadata
|
||||
| GreenRatingURL |
|
||||
| GreenCertificationURL |
|
||||
And "GreenVerificationURL" length SHOULD be equal to the RESO Suggested Max Length of 8000
|
||||
|
||||
@PropertyGreenVerification @IDX
|
||||
Scenario: GreenVerificationVersion
|
||||
Given that the following synonyms for "GreenVerificationVersion" DO NOT exist in the "PropertyGreenVerification" metadata
|
||||
| GreenRatingVersion |
|
||||
| GreenCertificationVersion |
|
||||
When "GreenVerificationVersion" exists in the "PropertyGreenVerification" metadata
|
||||
Then "GreenVerificationVersion" MUST be "String" data type
|
||||
And the following synonyms for "GreenVerificationVersion" MUST NOT exist in the metadata
|
||||
| GreenRatingVersion |
|
||||
| GreenCertificationVersion |
|
||||
And "GreenVerificationVersion" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@PropertyGreenVerification @IDX
|
||||
Scenario: GreenVerificationYear
|
||||
Given that the following synonyms for "GreenVerificationYear" DO NOT exist in the "PropertyGreenVerification" metadata
|
||||
| GreenYearCertified |
|
||||
| GreenRatingYear |
|
||||
When "GreenVerificationYear" exists in the "PropertyGreenVerification" metadata
|
||||
Then "GreenVerificationYear" MUST be "Integer" data type
|
||||
And the following synonyms for "GreenVerificationYear" MUST NOT exist in the metadata
|
||||
| GreenYearCertified |
|
||||
| GreenRatingYear |
|
||||
|
||||
@PropertyGreenVerification @IDX
|
||||
Scenario: ListingId
|
||||
Given that the following synonyms for "ListingId" DO NOT exist in the "PropertyGreenVerification" metadata
|
||||
When "ListingId" exists in the "PropertyGreenVerification" metadata
|
||||
Then "ListingId" MUST be "String" data type
|
||||
And the following synonyms for "ListingId" MUST NOT exist in the metadata
|
||||
| MLNumber |
|
||||
| MLSNumber |
|
||||
| ListingNumber |
|
||||
When "ListingId" exists in the "PropertyGreenVerification" metadata
|
||||
Then "ListingId" MUST be "String" data type
|
||||
And "ListingId" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@PropertyGreenVerification @IDX
|
||||
Scenario: ListingKey
|
||||
Given that the following synonyms for "ListingKey" DO NOT exist in the "PropertyGreenVerification" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "ListingKey" exists in the "PropertyGreenVerification" metadata
|
||||
Then "ListingKey" MUST be "String" data type
|
||||
And the following synonyms for "ListingKey" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
And "ListingKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@PropertyGreenVerification @IDX
|
||||
Scenario: ListingKeyNumeric
|
||||
Given that the following synonyms for "ListingKeyNumeric" DO NOT exist in the "PropertyGreenVerification" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "ListingKeyNumeric" exists in the "PropertyGreenVerification" metadata
|
||||
Then "ListingKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "ListingKeyNumeric" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
|
||||
@PropertyGreenVerification @IDX
|
||||
Scenario: ModificationTimestamp
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: PropertyPowerProduction
|
||||
|
||||
Background:
|
||||
|
@ -14,30 +14,30 @@ Feature: PropertyPowerProduction
|
|||
|
||||
@PropertyPowerProduction
|
||||
Scenario: ListingId
|
||||
Given that the following synonyms for "ListingId" DO NOT exist in the "PropertyPowerProduction" metadata
|
||||
When "ListingId" exists in the "PropertyPowerProduction" metadata
|
||||
Then "ListingId" MUST be "String" data type
|
||||
And the following synonyms for "ListingId" MUST NOT exist in the metadata
|
||||
| MLNumber |
|
||||
| MLSNumber |
|
||||
| ListingNumber |
|
||||
When "ListingId" exists in the "PropertyPowerProduction" metadata
|
||||
Then "ListingId" MUST be "String" data type
|
||||
And "ListingId" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@PropertyPowerProduction
|
||||
Scenario: ListingKey
|
||||
Given that the following synonyms for "ListingKey" DO NOT exist in the "PropertyPowerProduction" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "ListingKey" exists in the "PropertyPowerProduction" metadata
|
||||
Then "ListingKey" MUST be "String" data type
|
||||
And the following synonyms for "ListingKey" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
And "ListingKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@PropertyPowerProduction
|
||||
Scenario: ListingKeyNumeric
|
||||
Given that the following synonyms for "ListingKeyNumeric" DO NOT exist in the "PropertyPowerProduction" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "ListingKeyNumeric" exists in the "PropertyPowerProduction" metadata
|
||||
Then "ListingKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "ListingKeyNumeric" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
|
||||
@PropertyPowerProduction
|
||||
Scenario: ModificationTimestamp
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: PropertyRooms
|
||||
|
||||
Background:
|
||||
|
@ -14,30 +14,30 @@ Feature: PropertyRooms
|
|||
|
||||
@PropertyRooms
|
||||
Scenario: ListingId
|
||||
Given that the following synonyms for "ListingId" DO NOT exist in the "PropertyRooms" metadata
|
||||
When "ListingId" exists in the "PropertyRooms" metadata
|
||||
Then "ListingId" MUST be "String" data type
|
||||
And the following synonyms for "ListingId" MUST NOT exist in the metadata
|
||||
| MLNumber |
|
||||
| MLSNumber |
|
||||
| ListingNumber |
|
||||
When "ListingId" exists in the "PropertyRooms" metadata
|
||||
Then "ListingId" MUST be "String" data type
|
||||
And "ListingId" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@PropertyRooms
|
||||
Scenario: ListingKey
|
||||
Given that the following synonyms for "ListingKey" DO NOT exist in the "PropertyRooms" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "ListingKey" exists in the "PropertyRooms" metadata
|
||||
Then "ListingKey" MUST be "String" data type
|
||||
And the following synonyms for "ListingKey" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
And "ListingKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@PropertyRooms
|
||||
Scenario: ListingKeyNumeric
|
||||
Given that the following synonyms for "ListingKeyNumeric" DO NOT exist in the "PropertyRooms" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "ListingKeyNumeric" exists in the "PropertyRooms" metadata
|
||||
Then "ListingKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "ListingKeyNumeric" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
|
||||
@PropertyRooms
|
||||
Scenario: ModificationTimestamp
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: PropertyUnitTypes
|
||||
|
||||
Background:
|
||||
|
@ -14,30 +14,30 @@ Feature: PropertyUnitTypes
|
|||
|
||||
@PropertyUnitTypes
|
||||
Scenario: ListingId
|
||||
Given that the following synonyms for "ListingId" DO NOT exist in the "PropertyUnitTypes" metadata
|
||||
When "ListingId" exists in the "PropertyUnitTypes" metadata
|
||||
Then "ListingId" MUST be "String" data type
|
||||
And the following synonyms for "ListingId" MUST NOT exist in the metadata
|
||||
| MLNumber |
|
||||
| MLSNumber |
|
||||
| ListingNumber |
|
||||
When "ListingId" exists in the "PropertyUnitTypes" metadata
|
||||
Then "ListingId" MUST be "String" data type
|
||||
And "ListingId" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@PropertyUnitTypes
|
||||
Scenario: ListingKey
|
||||
Given that the following synonyms for "ListingKey" DO NOT exist in the "PropertyUnitTypes" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "ListingKey" exists in the "PropertyUnitTypes" metadata
|
||||
Then "ListingKey" MUST be "String" data type
|
||||
And the following synonyms for "ListingKey" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
And "ListingKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@PropertyUnitTypes
|
||||
Scenario: ListingKeyNumeric
|
||||
Given that the following synonyms for "ListingKeyNumeric" DO NOT exist in the "PropertyUnitTypes" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "ListingKeyNumeric" exists in the "PropertyUnitTypes" metadata
|
||||
Then "ListingKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "ListingKeyNumeric" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
|
||||
@PropertyUnitTypes
|
||||
Scenario: ModificationTimestamp
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: Prospecting
|
||||
|
||||
Background:
|
||||
|
@ -133,20 +133,20 @@ Feature: Prospecting
|
|||
|
||||
@Prospecting
|
||||
Scenario: ProspectingKey
|
||||
Given that the following synonyms for "ProspectingKey" DO NOT exist in the "Prospecting" metadata
|
||||
| AutoMailKey |
|
||||
| AutoEmailKey |
|
||||
When "ProspectingKey" exists in the "Prospecting" metadata
|
||||
Then "ProspectingKey" MUST be "String" data type
|
||||
And the following synonyms for "ProspectingKey" MUST NOT exist in the metadata
|
||||
| AutoMailKey |
|
||||
| AutoEmailKey |
|
||||
And "ProspectingKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Prospecting
|
||||
Scenario: ProspectingKeyNumeric
|
||||
Given that the following synonyms for "ProspectingKeyNumeric" DO NOT exist in the "Prospecting" metadata
|
||||
| AutoMailKeyNumeric |
|
||||
| AutoEmailKeyNumeric |
|
||||
When "ProspectingKeyNumeric" exists in the "Prospecting" metadata
|
||||
Then "ProspectingKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "ProspectingKeyNumeric" MUST NOT exist in the metadata
|
||||
| AutoMailKeyNumeric |
|
||||
| AutoEmailKeyNumeric |
|
||||
|
||||
@Prospecting
|
||||
Scenario: ReasonActiveOrDisabled
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: Queue
|
||||
|
||||
Background:
|
||||
|
@ -30,19 +30,19 @@ Feature: Queue
|
|||
|
||||
@Queue
|
||||
Scenario: OriginatingSystemName
|
||||
Given that the following synonyms for "OriginatingSystemName" DO NOT exist in the "Queue" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "OriginatingSystemName" exists in the "Queue" metadata
|
||||
Then "OriginatingSystemName" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "OriginatingSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Queue
|
||||
Scenario: OriginatingSystemQueueKey
|
||||
Given that the following synonyms for "OriginatingSystemQueueKey" DO NOT exist in the "Queue" metadata
|
||||
| ProviderKey |
|
||||
When "OriginatingSystemQueueKey" exists in the "Queue" metadata
|
||||
Then "OriginatingSystemQueueKey" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemQueueKey" MUST NOT exist in the metadata
|
||||
| ProviderKey |
|
||||
And "OriginatingSystemQueueKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Queue
|
||||
|
@ -68,55 +68,55 @@ Feature: Queue
|
|||
|
||||
@Queue
|
||||
Scenario: ResourceRecordID
|
||||
Given that the following synonyms for "ResourceRecordID" DO NOT exist in the "Queue" metadata
|
||||
When "ResourceRecordID" exists in the "Queue" metadata
|
||||
Then "ResourceRecordID" MUST be "String" data type
|
||||
And the following synonyms for "ResourceRecordID" MUST NOT exist in the metadata
|
||||
| MLNumber |
|
||||
| MLSNumber |
|
||||
| ListingNumber |
|
||||
| AgentID |
|
||||
| OfficeID |
|
||||
| ContactID |
|
||||
When "ResourceRecordID" exists in the "Queue" metadata
|
||||
Then "ResourceRecordID" MUST be "String" data type
|
||||
And "ResourceRecordID" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Queue
|
||||
Scenario: ResourceRecordKey
|
||||
Given that the following synonyms for "ResourceRecordKey" DO NOT exist in the "Queue" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "ResourceRecordKey" exists in the "Queue" metadata
|
||||
Then "ResourceRecordKey" MUST be "String" data type
|
||||
And the following synonyms for "ResourceRecordKey" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
And "ResourceRecordKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Queue
|
||||
Scenario: ResourceRecordKeyNumeric
|
||||
Given that the following synonyms for "ResourceRecordKeyNumeric" DO NOT exist in the "Queue" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "ResourceRecordKeyNumeric" exists in the "Queue" metadata
|
||||
Then "ResourceRecordKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "ResourceRecordKeyNumeric" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
|
||||
@Queue
|
||||
Scenario: SourceSystemID
|
||||
Given that the following synonyms for "SourceSystemID" DO NOT exist in the "Queue" metadata
|
||||
| MLSID |
|
||||
When "SourceSystemID" exists in the "Queue" metadata
|
||||
Then "SourceSystemID" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemID" MUST NOT exist in the metadata
|
||||
| MLSID |
|
||||
And "SourceSystemID" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@Queue
|
||||
Scenario: SourceSystemName
|
||||
Given that the following synonyms for "SourceSystemName" DO NOT exist in the "Queue" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "SourceSystemName" exists in the "Queue" metadata
|
||||
Then "SourceSystemName" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "SourceSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Queue
|
||||
Scenario: SourceSystemQueueKey
|
||||
Given that the following synonyms for "SourceSystemQueueKey" DO NOT exist in the "Queue" metadata
|
||||
| ProviderKey |
|
||||
When "SourceSystemQueueKey" exists in the "Queue" metadata
|
||||
Then "SourceSystemQueueKey" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemQueueKey" MUST NOT exist in the metadata
|
||||
| ProviderKey |
|
||||
And "SourceSystemQueueKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: Rules
|
||||
|
||||
Background:
|
||||
|
@ -36,25 +36,25 @@ Feature: Rules
|
|||
|
||||
@Rules
|
||||
Scenario: ModificationTimestamp
|
||||
Given that the following synonyms for "ModificationTimestamp" DO NOT exist in the "Rules" metadata
|
||||
When "ModificationTimestamp" exists in the "Rules" metadata
|
||||
Then "ModificationTimestamp" MUST be "Timestamp" data type
|
||||
And the following synonyms for "ModificationTimestamp" MUST NOT exist in the metadata
|
||||
| ModificationDateTime |
|
||||
| DateTimeModified |
|
||||
| ModDate |
|
||||
| DateMod |
|
||||
| UpdateDate |
|
||||
| UpdateTimestamp |
|
||||
When "ModificationTimestamp" exists in the "Rules" metadata
|
||||
Then "ModificationTimestamp" MUST be "Timestamp" data type
|
||||
|
||||
@Rules
|
||||
Scenario: OriginalEntryTimestamp
|
||||
Given that the following synonyms for "OriginalEntryTimestamp" DO NOT exist in the "Rules" metadata
|
||||
When "OriginalEntryTimestamp" exists in the "Rules" metadata
|
||||
Then "OriginalEntryTimestamp" MUST be "Timestamp" data type
|
||||
And the following synonyms for "OriginalEntryTimestamp" MUST NOT exist in the metadata
|
||||
| EntryDate |
|
||||
| InputDate |
|
||||
| DateTimeCreated |
|
||||
| CreatedDate. |
|
||||
When "OriginalEntryTimestamp" exists in the "Rules" metadata
|
||||
Then "OriginalEntryTimestamp" MUST be "Timestamp" data type
|
||||
|
||||
@Rules
|
||||
Scenario: OriginatingSystemID
|
||||
|
@ -64,11 +64,11 @@ Feature: Rules
|
|||
|
||||
@Rules
|
||||
Scenario: OriginatingSystemName
|
||||
Given that the following synonyms for "OriginatingSystemName" DO NOT exist in the "Rules" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "OriginatingSystemName" exists in the "Rules" metadata
|
||||
Then "OriginatingSystemName" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "OriginatingSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Rules
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: SavedSearch
|
||||
|
||||
Background:
|
||||
|
@ -19,48 +19,48 @@ Feature: SavedSearch
|
|||
|
||||
@SavedSearch
|
||||
Scenario: MemberKey
|
||||
Given that the following synonyms for "MemberKey" DO NOT exist in the "SavedSearch" metadata
|
||||
| AgentKey |
|
||||
When "MemberKey" exists in the "SavedSearch" metadata
|
||||
Then "MemberKey" MUST be "String" data type
|
||||
And the following synonyms for "MemberKey" MUST NOT exist in the metadata
|
||||
| AgentKey |
|
||||
And "MemberKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@SavedSearch
|
||||
Scenario: MemberKeyNumeric
|
||||
Given that the following synonyms for "MemberKeyNumeric" DO NOT exist in the "SavedSearch" metadata
|
||||
| AgentKeyNumeric |
|
||||
When "MemberKeyNumeric" exists in the "SavedSearch" metadata
|
||||
Then "MemberKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "MemberKeyNumeric" MUST NOT exist in the metadata
|
||||
| AgentKeyNumeric |
|
||||
|
||||
@SavedSearch
|
||||
Scenario: MemberMlsId
|
||||
Given that the following synonyms for "MemberMlsId" DO NOT exist in the "SavedSearch" metadata
|
||||
| AgentMlsld |
|
||||
When "MemberMlsId" exists in the "SavedSearch" metadata
|
||||
Then "MemberMlsId" MUST be "String" data type
|
||||
And the following synonyms for "MemberMlsId" MUST NOT exist in the metadata
|
||||
| AgentMlsld |
|
||||
And "MemberMlsId" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@SavedSearch
|
||||
Scenario: ModificationTimestamp
|
||||
Given that the following synonyms for "ModificationTimestamp" DO NOT exist in the "SavedSearch" metadata
|
||||
When "ModificationTimestamp" exists in the "SavedSearch" metadata
|
||||
Then "ModificationTimestamp" MUST be "Timestamp" data type
|
||||
And the following synonyms for "ModificationTimestamp" MUST NOT exist in the metadata
|
||||
| ModificationDateTime |
|
||||
| DateTimeModified |
|
||||
| ModDate |
|
||||
| DateMod |
|
||||
| UpdateDate |
|
||||
| UpdateTimestamp |
|
||||
When "ModificationTimestamp" exists in the "SavedSearch" metadata
|
||||
Then "ModificationTimestamp" MUST be "Timestamp" data type
|
||||
|
||||
@SavedSearch
|
||||
Scenario: OriginalEntryTimestamp
|
||||
Given that the following synonyms for "OriginalEntryTimestamp" DO NOT exist in the "SavedSearch" metadata
|
||||
When "OriginalEntryTimestamp" exists in the "SavedSearch" metadata
|
||||
Then "OriginalEntryTimestamp" MUST be "Timestamp" data type
|
||||
And the following synonyms for "OriginalEntryTimestamp" MUST NOT exist in the metadata
|
||||
| EntryDate |
|
||||
| InputDate |
|
||||
| DateTimeCreated |
|
||||
| CreatedDate. |
|
||||
When "OriginalEntryTimestamp" exists in the "SavedSearch" metadata
|
||||
Then "OriginalEntryTimestamp" MUST be "Timestamp" data type
|
||||
|
||||
@SavedSearch
|
||||
Scenario: OriginatingSystemID
|
||||
|
@ -70,38 +70,38 @@ Feature: SavedSearch
|
|||
|
||||
@SavedSearch
|
||||
Scenario: OriginatingSystemKey
|
||||
Given that the following synonyms for "OriginatingSystemKey" DO NOT exist in the "SavedSearch" metadata
|
||||
| ProviderKey |
|
||||
When "OriginatingSystemKey" exists in the "SavedSearch" metadata
|
||||
Then "OriginatingSystemKey" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemKey" MUST NOT exist in the metadata
|
||||
| ProviderKey |
|
||||
And "OriginatingSystemKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@SavedSearch
|
||||
Scenario: OriginatingSystemMemberKey
|
||||
Given that the following synonyms for "OriginatingSystemMemberKey" DO NOT exist in the "SavedSearch" metadata
|
||||
| OriginatingSystemAgentKey |
|
||||
| ProviderKey |
|
||||
When "OriginatingSystemMemberKey" exists in the "SavedSearch" metadata
|
||||
Then "OriginatingSystemMemberKey" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemMemberKey" MUST NOT exist in the metadata
|
||||
| OriginatingSystemAgentKey |
|
||||
| ProviderKey |
|
||||
And "OriginatingSystemMemberKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@SavedSearch
|
||||
Scenario: OriginatingSystemMemberName
|
||||
Given that the following synonyms for "OriginatingSystemMemberName" DO NOT exist in the "SavedSearch" metadata
|
||||
When "OriginatingSystemMemberName" exists in the "SavedSearch" metadata
|
||||
Then "OriginatingSystemMemberName" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemMemberName" MUST NOT exist in the metadata
|
||||
| OriginatingSystemAgentName |
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "OriginatingSystemMemberName" exists in the "SavedSearch" metadata
|
||||
Then "OriginatingSystemMemberName" MUST be "String" data type
|
||||
And "OriginatingSystemMemberName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@SavedSearch
|
||||
Scenario: OriginatingSystemName
|
||||
Given that the following synonyms for "OriginatingSystemName" DO NOT exist in the "SavedSearch" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "OriginatingSystemName" exists in the "SavedSearch" metadata
|
||||
Then "OriginatingSystemName" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "OriginatingSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@SavedSearch
|
||||
|
@ -139,10 +139,10 @@ Feature: SavedSearch
|
|||
|
||||
@SavedSearch
|
||||
Scenario: SearchQuery
|
||||
Given that the following synonyms for "SearchQuery" DO NOT exist in the "SavedSearch" metadata
|
||||
| SearchCriteria |
|
||||
When "SearchQuery" exists in the "SavedSearch" metadata
|
||||
Then "SearchQuery" MUST be "String" data type
|
||||
And the following synonyms for "SearchQuery" MUST NOT exist in the metadata
|
||||
| SearchCriteria |
|
||||
And "SearchQuery" length SHOULD be equal to the RESO Suggested Max Length of 8000
|
||||
|
||||
@SavedSearch
|
||||
|
@ -169,25 +169,25 @@ Feature: SavedSearch
|
|||
|
||||
@SavedSearch
|
||||
Scenario: SourceSystemID
|
||||
Given that the following synonyms for "SourceSystemID" DO NOT exist in the "SavedSearch" metadata
|
||||
| MLSID |
|
||||
When "SourceSystemID" exists in the "SavedSearch" metadata
|
||||
Then "SourceSystemID" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemID" MUST NOT exist in the metadata
|
||||
| MLSID |
|
||||
And "SourceSystemID" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@SavedSearch
|
||||
Scenario: SourceSystemKey
|
||||
Given that the following synonyms for "SourceSystemKey" DO NOT exist in the "SavedSearch" metadata
|
||||
| ProviderKey |
|
||||
When "SourceSystemKey" exists in the "SavedSearch" metadata
|
||||
Then "SourceSystemKey" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemKey" MUST NOT exist in the metadata
|
||||
| ProviderKey |
|
||||
And "SourceSystemKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@SavedSearch
|
||||
Scenario: SourceSystemName
|
||||
Given that the following synonyms for "SourceSystemName" DO NOT exist in the "SavedSearch" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "SourceSystemName" exists in the "SavedSearch" metadata
|
||||
Then "SourceSystemName" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "SourceSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: Showing
|
||||
|
||||
Background:
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: SocialMedia
|
||||
|
||||
Background:
|
||||
|
@ -19,15 +19,15 @@ Feature: SocialMedia
|
|||
|
||||
@SocialMedia
|
||||
Scenario: ModificationTimestamp
|
||||
Given that the following synonyms for "ModificationTimestamp" DO NOT exist in the "SocialMedia" metadata
|
||||
When "ModificationTimestamp" exists in the "SocialMedia" metadata
|
||||
Then "ModificationTimestamp" MUST be "Timestamp" data type
|
||||
And the following synonyms for "ModificationTimestamp" MUST NOT exist in the metadata
|
||||
| ModificationDateTime |
|
||||
| DateTimeModified |
|
||||
| ModDate |
|
||||
| DateMod |
|
||||
| UpdateDate |
|
||||
| UpdateTimestamp |
|
||||
When "ModificationTimestamp" exists in the "SocialMedia" metadata
|
||||
Then "ModificationTimestamp" MUST be "Timestamp" data type
|
||||
|
||||
@SocialMedia
|
||||
Scenario: ResourceName
|
||||
|
@ -36,41 +36,41 @@ Feature: SocialMedia
|
|||
|
||||
@SocialMedia
|
||||
Scenario: ResourceRecordID
|
||||
Given that the following synonyms for "ResourceRecordID" DO NOT exist in the "SocialMedia" metadata
|
||||
When "ResourceRecordID" exists in the "SocialMedia" metadata
|
||||
Then "ResourceRecordID" MUST be "String" data type
|
||||
And the following synonyms for "ResourceRecordID" MUST NOT exist in the metadata
|
||||
| MLNumber |
|
||||
| MLSNumber |
|
||||
| ListingNumber |
|
||||
| AgentID |
|
||||
| OfficeID |
|
||||
| ContactID |
|
||||
When "ResourceRecordID" exists in the "SocialMedia" metadata
|
||||
Then "ResourceRecordID" MUST be "String" data type
|
||||
And "ResourceRecordID" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@SocialMedia
|
||||
Scenario: ResourceRecordKey
|
||||
Given that the following synonyms for "ResourceRecordKey" DO NOT exist in the "SocialMedia" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "ResourceRecordKey" exists in the "SocialMedia" metadata
|
||||
Then "ResourceRecordKey" MUST be "String" data type
|
||||
And the following synonyms for "ResourceRecordKey" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
And "ResourceRecordKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@SocialMedia
|
||||
Scenario: ResourceRecordKeyNumeric
|
||||
Given that the following synonyms for "ResourceRecordKeyNumeric" DO NOT exist in the "SocialMedia" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "ResourceRecordKeyNumeric" exists in the "SocialMedia" metadata
|
||||
Then "ResourceRecordKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "ResourceRecordKeyNumeric" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
|
||||
@SocialMedia
|
||||
Scenario: SocialMediaKey
|
||||
Given that the following synonyms for "SocialMediaKey" DO NOT exist in the "SocialMedia" metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
When "SocialMediaKey" exists in the "SocialMedia" metadata
|
||||
Then "SocialMediaKey" MUST be "String" data type
|
||||
And the following synonyms for "SocialMediaKey" MUST NOT exist in the metadata
|
||||
| SystemUniqueID |
|
||||
| ImmediateSourceID |
|
||||
And "SocialMediaKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@SocialMedia
|
||||
|
@ -80,10 +80,10 @@ Feature: SocialMedia
|
|||
|
||||
@SocialMedia
|
||||
Scenario: SocialMediaType
|
||||
Given that the following synonyms for "SocialMediaType" DO NOT exist in the "SocialMedia" metadata
|
||||
| MimeType |
|
||||
When "SocialMediaType" exists in the "SocialMedia" metadata
|
||||
Then "SocialMediaType" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "SocialMediaType" MUST NOT exist in the metadata
|
||||
| MimeType |
|
||||
|
||||
@SocialMedia
|
||||
Scenario: SocialMediaUrlOrId
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 20210105220129831
|
||||
Feature: TeamMembers
|
||||
|
||||
Background:
|
||||
|
@ -14,33 +14,33 @@ Feature: TeamMembers
|
|||
|
||||
@TeamMembers
|
||||
Scenario: MemberKey
|
||||
Given that the following synonyms for "MemberKey" DO NOT exist in the "TeamMembers" metadata
|
||||
| AgentKey |
|
||||
When "MemberKey" exists in the "TeamMembers" metadata
|
||||
Then "MemberKey" MUST be "String" data type
|
||||
And the following synonyms for "MemberKey" MUST NOT exist in the metadata
|
||||
| AgentKey |
|
||||
And "MemberKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@TeamMembers
|
||||
Scenario: MemberKeyNumeric
|
||||
Given that the following synonyms for "MemberKeyNumeric" DO NOT exist in the "TeamMembers" metadata
|
||||
| AgentKeyNumeric |
|
||||
When "MemberKeyNumeric" exists in the "TeamMembers" metadata
|
||||
Then "MemberKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "MemberKeyNumeric" MUST NOT exist in the metadata
|
||||
| AgentKeyNumeric |
|
||||
|
||||
@TeamMembers
|
||||
Scenario: MemberLoginId
|
||||
Given that the following synonyms for "MemberLoginId" DO NOT exist in the "TeamMembers" metadata
|
||||
| AgentLoginId |
|
||||
When "MemberLoginId" exists in the "TeamMembers" metadata
|
||||
Then "MemberLoginId" MUST be "String" data type
|
||||
And the following synonyms for "MemberLoginId" MUST NOT exist in the metadata
|
||||
| AgentLoginId |
|
||||
And "MemberLoginId" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@TeamMembers
|
||||
Scenario: MemberMlsId
|
||||
Given that the following synonyms for "MemberMlsId" DO NOT exist in the "TeamMembers" metadata
|
||||
| AgentMlsId |
|
||||
When "MemberMlsId" exists in the "TeamMembers" metadata
|
||||
Then "MemberMlsId" MUST be "String" data type
|
||||
And the following synonyms for "MemberMlsId" MUST NOT exist in the metadata
|
||||
| AgentMlsId |
|
||||
And "MemberMlsId" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@TeamMembers
|
||||
|
@ -61,44 +61,44 @@ Feature: TeamMembers
|
|||
|
||||
@TeamMembers
|
||||
Scenario: OriginatingSystemKey
|
||||
Given that the following synonyms for "OriginatingSystemKey" DO NOT exist in the "TeamMembers" metadata
|
||||
| ProviderKey |
|
||||
When "OriginatingSystemKey" exists in the "TeamMembers" metadata
|
||||
Then "OriginatingSystemKey" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemKey" MUST NOT exist in the metadata
|
||||
| ProviderKey |
|
||||
And "OriginatingSystemKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@TeamMembers
|
||||
Scenario: OriginatingSystemName
|
||||
Given that the following synonyms for "OriginatingSystemName" DO NOT exist in the "TeamMembers" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "OriginatingSystemName" exists in the "TeamMembers" metadata
|
||||
Then "OriginatingSystemName" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "OriginatingSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@TeamMembers
|
||||
Scenario: SourceSystemID
|
||||
Given that the following synonyms for "SourceSystemID" DO NOT exist in the "TeamMembers" metadata
|
||||
| MLSID |
|
||||
When "SourceSystemID" exists in the "TeamMembers" metadata
|
||||
Then "SourceSystemID" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemID" MUST NOT exist in the metadata
|
||||
| MLSID |
|
||||
And "SourceSystemID" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@TeamMembers
|
||||
Scenario: SourceSystemKey
|
||||
Given that the following synonyms for "SourceSystemKey" DO NOT exist in the "TeamMembers" metadata
|
||||
| ProviderKey |
|
||||
When "SourceSystemKey" exists in the "TeamMembers" metadata
|
||||
Then "SourceSystemKey" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemKey" MUST NOT exist in the metadata
|
||||
| ProviderKey |
|
||||
And "SourceSystemKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@TeamMembers
|
||||
Scenario: SourceSystemName
|
||||
Given that the following synonyms for "SourceSystemName" DO NOT exist in the "TeamMembers" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "SourceSystemName" exists in the "TeamMembers" metadata
|
||||
Then "SourceSystemName" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "SourceSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@TeamMembers
|
||||
|
@ -119,38 +119,38 @@ Feature: TeamMembers
|
|||
|
||||
@TeamMembers
|
||||
Scenario: TeamMemberKey
|
||||
Given that the following synonyms for "TeamMemberKey" DO NOT exist in the "TeamMembers" metadata
|
||||
| TeamAgentKey |
|
||||
When "TeamMemberKey" exists in the "TeamMembers" metadata
|
||||
Then "TeamMemberKey" MUST be "String" data type
|
||||
And the following synonyms for "TeamMemberKey" MUST NOT exist in the metadata
|
||||
| TeamAgentKey |
|
||||
And "TeamMemberKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@TeamMembers
|
||||
Scenario: TeamMemberKeyNumeric
|
||||
Given that the following synonyms for "TeamMemberKeyNumeric" DO NOT exist in the "TeamMembers" metadata
|
||||
| TeamAgentKeyNumeric |
|
||||
When "TeamMemberKeyNumeric" exists in the "TeamMembers" metadata
|
||||
Then "TeamMemberKeyNumeric" MUST be "Integer" data type
|
||||
And the following synonyms for "TeamMemberKeyNumeric" MUST NOT exist in the metadata
|
||||
| TeamAgentKeyNumeric |
|
||||
|
||||
@TeamMembers
|
||||
Scenario: TeamMemberNationalAssociationId
|
||||
Given that the following synonyms for "TeamMemberNationalAssociationId" DO NOT exist in the "TeamMembers" metadata
|
||||
| TeamAgentNationalAssociationId |
|
||||
When "TeamMemberNationalAssociationId" exists in the "TeamMembers" metadata
|
||||
Then "TeamMemberNationalAssociationId" MUST be "String" data type
|
||||
And the following synonyms for "TeamMemberNationalAssociationId" MUST NOT exist in the metadata
|
||||
| TeamAgentNationalAssociationId |
|
||||
And "TeamMemberNationalAssociationId" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@TeamMembers
|
||||
Scenario: TeamMemberStateLicense
|
||||
Given that the following synonyms for "TeamMemberStateLicense" DO NOT exist in the "TeamMembers" metadata
|
||||
| TeamAgentStateLicense |
|
||||
When "TeamMemberStateLicense" exists in the "TeamMembers" metadata
|
||||
Then "TeamMemberStateLicense" MUST be "String" data type
|
||||
And the following synonyms for "TeamMemberStateLicense" MUST NOT exist in the metadata
|
||||
| TeamAgentStateLicense |
|
||||
And "TeamMemberStateLicense" length SHOULD be equal to the RESO Suggested Max Length of 50
|
||||
|
||||
@TeamMembers
|
||||
Scenario: TeamMemberType
|
||||
Given that the following synonyms for "TeamMemberType" DO NOT exist in the "TeamMembers" metadata
|
||||
| TeamAgentType |
|
||||
When "TeamMemberType" exists in the "TeamMembers" metadata
|
||||
Then "TeamMemberType" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "TeamMemberType" MUST NOT exist in the metadata
|
||||
| TeamAgentType |
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# This file was autogenerated on: 20211212171220893
|
||||
# This file was autogenerated on: 2021031622034416
|
||||
Feature: Teams
|
||||
|
||||
Background:
|
||||
|
@ -36,11 +36,11 @@ Feature: Teams
|
|||
|
||||
@Teams
|
||||
Scenario: OriginatingSystemName
|
||||
Given that the following synonyms for "OriginatingSystemName" DO NOT exist in the "Teams" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "OriginatingSystemName" exists in the "Teams" metadata
|
||||
Then "OriginatingSystemName" MUST be "String" data type
|
||||
And the following synonyms for "OriginatingSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "OriginatingSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Teams
|
||||
|
@ -50,27 +50,27 @@ Feature: Teams
|
|||
|
||||
@Teams
|
||||
Scenario: SourceSystemID
|
||||
Given that the following synonyms for "SourceSystemID" DO NOT exist in the "Teams" metadata
|
||||
| MLSID |
|
||||
When "SourceSystemID" exists in the "Teams" metadata
|
||||
Then "SourceSystemID" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemID" MUST NOT exist in the metadata
|
||||
| MLSID |
|
||||
And "SourceSystemID" length SHOULD be equal to the RESO Suggested Max Length of 25
|
||||
|
||||
@Teams
|
||||
Scenario: SourceSystemKey
|
||||
Given that the following synonyms for "SourceSystemKey" DO NOT exist in the "Teams" metadata
|
||||
| ProviderKey |
|
||||
When "SourceSystemKey" exists in the "Teams" metadata
|
||||
Then "SourceSystemKey" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemKey" MUST NOT exist in the metadata
|
||||
| ProviderKey |
|
||||
And "SourceSystemKey" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Teams
|
||||
Scenario: SourceSystemName
|
||||
Given that the following synonyms for "SourceSystemName" DO NOT exist in the "Teams" metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
When "SourceSystemName" exists in the "Teams" metadata
|
||||
Then "SourceSystemName" MUST be "String" data type
|
||||
And the following synonyms for "SourceSystemName" MUST NOT exist in the metadata
|
||||
| ProviderName |
|
||||
| MLSID |
|
||||
And "SourceSystemName" length SHOULD be equal to the RESO Suggested Max Length of 255
|
||||
|
||||
@Teams
|
||||
|
@ -87,11 +87,11 @@ Feature: Teams
|
|||
|
||||
@Teams
|
||||
Scenario: TeamCarrierRoute
|
||||
Given that the following synonyms for "TeamCarrierRoute" DO NOT exist in the "Teams" metadata
|
||||
| RR |
|
||||
| CR |
|
||||
When "TeamCarrierRoute" exists in the "Teams" metadata
|
||||
Then "TeamCarrierRoute" MUST be "String" data type
|
||||
And the following synonyms for "TeamCarrierRoute" MUST NOT exist in the metadata
|
||||
| RR |
|
||||
| CR |
|
||||
And "TeamCarrierRoute" length SHOULD be equal to the RESO Suggested Max Length of 9
|
||||
|
||||
@Teams
|
||||
|
@ -107,10 +107,10 @@ Feature: Teams
|
|||
|
||||
@Teams
|
||||
Scenario: TeamCountyOrParish
|
||||
Given that the following synonyms for "TeamCountyOrParish" DO NOT exist in the "Teams" metadata
|
||||
| County |
|
||||
When "TeamCountyOrParish" exists in the "Teams" metadata
|
||||
Then "TeamCountyOrParish" MUST be "Single Enumeration" data type
|
||||
And the following synonyms for "TeamCountyOrParish" MUST NOT exist in the metadata
|
||||
| County |
|
||||
|
||||
@Teams
|
||||
Scenario: TeamDescription
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
Feature: Payloads Sampling (Web API)
|
||||
All Scenarios passing means the given Web API Server has completed data availability testing
|
||||
# SEE: https://docs.google.com/document/d/1btCduOpWWzeadeMcSviA8M9dclIz23P-bPUGKwcD0NY/edit?usp=sharing
|
||||
|
||||
Background:
|
||||
Given a RESOScript file was provided
|
||||
And Client Settings and Parameters were read from the file
|
||||
And a test container was successfully created from the given RESOScript
|
||||
And the test container uses an authorization_code or client_credentials for authentication
|
||||
|
||||
@metadata-validation @payloads-sampling @dd-1.7
|
||||
Scenario: Request and Validate Server Metadata
|
||||
When XML Metadata are requested from the service root in "ClientSettings_WebAPIURI"
|
||||
Then the server responds with a status code of 200
|
||||
And the server has an OData-Version header value of "4.0" or "4.01"
|
||||
And the XML Metadata response is valid XML
|
||||
And the XML Metadata returned by the server are valid
|
||||
And the XML Metadata returned by the server contains Edm metadata
|
||||
And the Edm metadata returned by the server are valid
|
||||
And the metadata contains a valid service document
|
||||
And each resource MUST have a primary key field by the OData specification
|
||||
|
||||
@standard-resource-sampling @dd-1.7 @payloads-sampling
|
||||
Scenario: Standard Resource Sampling
|
||||
Given that metadata have been retrieved from the server and validated
|
||||
And the metadata contains RESO Standard Resources
|
||||
And "payload-samples" has been created in the build directory
|
||||
Then up to 100000 records are sampled from each resource with payload samples stored in "payload-samples"
|
||||
|
||||
@local-resource-sampling @dd-1.7 @payloads-sampling
|
||||
Scenario: Non Standard Resource Sampling - Request Data from Each Server Resource
|
||||
Given that metadata have been retrieved from the server and validated
|
||||
And the metadata contains local resources
|
||||
Then up to 100000 records are sampled from each local resource
|
||||
|
||||
@payloads-sampling @dd-1.7
|
||||
Scenario: A Data Availability Report is Created from Sampled Records
|
||||
Given standard and local resources have been processed
|
||||
Then a data availability report is created in "data-availability-report.json"
|
|
@ -1,5 +1,5 @@
|
|||
Feature: Payloads Sampling (Web API)
|
||||
All Scenarios passing means the given Web API Server has completed data availability testing
|
||||
Feature: IDX Payload Endorsement (Web API)
|
||||
All Scenarios passing means the given Web API server passes the IDX Payloads Endorsement
|
||||
# SEE: https://docs.google.com/document/d/1btCduOpWWzeadeMcSviA8M9dclIz23P-bPUGKwcD0NY/edit?usp=sharing
|
||||
|
||||
Background:
|
||||
|
@ -9,7 +9,7 @@ Feature: Payloads Sampling (Web API)
|
|||
And the test container uses an authorization_code or client_credentials for authentication
|
||||
|
||||
# TODO: tie back into common metadata validation shared scenario
|
||||
@metadata-validation @idx-payload-endorsement @dd-1.7
|
||||
@metadata-validation @idx-payload-endorsement @dd-1.7 @web-api-1.0.2
|
||||
Scenario: Request and Validate Server Metadata
|
||||
When XML Metadata are requested from the service root in "ClientSettings_WebAPIURI"
|
||||
Then the server responds with a status code of 200
|
||||
|
@ -21,7 +21,26 @@ Feature: Payloads Sampling (Web API)
|
|||
And the metadata contains a valid service document
|
||||
And each resource MUST have a primary key field by the OData specification
|
||||
|
||||
@standard-resource-sampling @dd-1.7 @idx-payload-endorsement
|
||||
Scenario: Standard Resource Sampling
|
||||
Given that valid metadata have been requested from the server
|
||||
And the metadata contains RESO Standard Resources
|
||||
And "payload-samples" has been created in the build directory
|
||||
Then up to 10000 records are sampled from each resource with "IDX" payload samples stored in "payload-samples"
|
||||
|
||||
# data are not stored in this case, just sampled and scored
|
||||
@local-resource-sampling @dd-1.7 @idx-payload-endorsement
|
||||
Scenario: Non Standard Resource Sampling - Request Data from Each Server Resource
|
||||
Given that valid metadata have been requested from the server
|
||||
And the metadata contains local resources
|
||||
Then up to 10000 records are sampled from each local resource
|
||||
|
||||
@idx-payload-endorsement @dd-1.7
|
||||
Scenario: A Data Availability Report is Created from Sampled Records
|
||||
Given standard and local resources have been processed
|
||||
Then a data availability report is created in "data-availability-report.json"
|
||||
|
||||
@idx-user-sampling @dd-1.7 @idx-payload-endorsement
|
||||
Scenario: IDX User Sampling
|
||||
Given samples exist in "payload-samples" in the build directory
|
||||
And a RESOScript file was provided for the IDX User
|
||||
|
|
|
@ -19,7 +19,7 @@ Feature: Web API Server Add/Edit Endorsement
|
|||
#
|
||||
# This is without the prefer header and minimal value
|
||||
#
|
||||
@create @create-succeeds @add-edit-endorsement @rcp-010 @2.0.0
|
||||
@create @create-succeeds @add-edit-endorsement @rcp-010 @1.0.2
|
||||
Scenario: Create operation succeeds using a given payload
|
||||
Given valid metadata have been retrieved
|
||||
And request data has been provided in "create-succeeds.json"
|
||||
|
@ -64,7 +64,7 @@ Feature: Web API Server Add/Edit Endorsement
|
|||
# OData-Version: 4.01
|
||||
# Content-Type: application/json
|
||||
# Accept: application/json
|
||||
@create @create-fails @add-edit-endorsement @rcp-010 @2.0.0
|
||||
@create @create-fails @add-edit-endorsement @rcp-010 @1.0.2
|
||||
Scenario: Create operation fails using a given payload
|
||||
Given valid metadata have been retrieved
|
||||
And request data has been provided in "create-fails.json"
|
||||
|
|
|
@ -8,7 +8,7 @@ Feature: Web API Server Core Endorsement
|
|||
And a test container was successfully created from the given RESOScript
|
||||
And the test container uses an authorization_code or client_credentials for authentication
|
||||
|
||||
@metadata-validation @core-endorsement @add-edit-endorsement @2.0.0
|
||||
@metadata-validation @core-endorsement @add-edit-endorsement @1.0.2
|
||||
Scenario: metadata-validation - Request and Validate Server Metadata
|
||||
When XML Metadata are requested from the service root in "ClientSettings_WebAPIURI"
|
||||
Then the server responds with a status code of 200
|
||||
|
@ -22,7 +22,7 @@ Feature: Web API Server Core Endorsement
|
|||
And the metadata contains the "Parameter_EndpointResource" resource
|
||||
And the metadata contains at least one resource from "Parameter_WebAPI102_RequiredResourceList"
|
||||
|
||||
@service-document @core-endorsement @2.0.0
|
||||
@service-document @core-endorsement @1.0.2
|
||||
Scenario: service-document - Service Document Request
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "service-document"
|
||||
|
@ -30,7 +30,7 @@ Feature: Web API Server Core Endorsement
|
|||
And the server has an OData-Version header value of "4.0" or "4.01"
|
||||
And the response is valid JSON
|
||||
|
||||
@fetch-by-key @core-endorsement @2.0.0
|
||||
@fetch-by-key @core-endorsement @1.0.2
|
||||
Scenario: fetch-by-key - fetch by Key Field
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "fetch-by-key"
|
||||
|
@ -40,7 +40,7 @@ Feature: Web API Server Core Endorsement
|
|||
And the response has singleton results in "Parameter_KeyField"
|
||||
And the provided "Parameter_KeyValue" is returned in "Parameter_KeyField"
|
||||
|
||||
@select @core-endorsement @2.0.0
|
||||
@select @core-endorsement @1.0.2
|
||||
Scenario: select - Query Support: $select
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "select"
|
||||
|
@ -51,7 +51,7 @@ Feature: Web API Server Core Endorsement
|
|||
And resource metadata for "Parameter_EndpointResource" contains the fields in the given select list
|
||||
And data are present for fields contained within the given select list
|
||||
|
||||
@top @core-endorsement @2.0.0
|
||||
@top @core-endorsement @1.0.2
|
||||
Scenario: top - Query Support: $top
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "top"
|
||||
|
@ -63,7 +63,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And the number of results is less than or equal to "Parameter_TopCount"
|
||||
|
||||
@skip @core-endorsement @2.0.0
|
||||
@skip @core-endorsement @1.0.2
|
||||
Scenario: skip - Query Support: $skip
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "skip"
|
||||
|
@ -82,7 +82,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And data in the "Parameter_Key" fields are different in the second request than in the first
|
||||
|
||||
@count @core-endorsement @2.0.0
|
||||
@count @core-endorsement @1.0.2
|
||||
Scenario: count - Query Support: $count=true
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "count"
|
||||
|
@ -96,7 +96,7 @@ Feature: Web API Server Core Endorsement
|
|||
# INTEGER COMPARISONS
|
||||
##############################################
|
||||
|
||||
@filter-int-and @core-endorsement @2.0.0
|
||||
@filter-int-and @core-endorsement @1.0.2
|
||||
Scenario: filter-int-and - $filter - Integer Comparison: and
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-int-and"
|
||||
|
@ -108,7 +108,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And Integer data in "Parameter_IntegerField" "gt" "Parameter_IntegerValueLow" "and" "lt" "Parameter_IntegerValueHigh"
|
||||
|
||||
@filter-int-or @core-endorsement @2.0.0
|
||||
@filter-int-or @core-endorsement @1.0.2
|
||||
Scenario: filter-int-or - $filter - Integer Comparison: or
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-int-or"
|
||||
|
@ -120,7 +120,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And Integer data in "Parameter_IntegerField" "gt" "Parameter_IntegerValueLow" "or" "lt" "Parameter_IntegerValueHigh"
|
||||
|
||||
@filter-int-not @core-endorsement @2.0.0
|
||||
@filter-int-not @core-endorsement @1.0.2
|
||||
Scenario: filter-int-not - $filter - Integer Comparison: not() operator
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-int-not"
|
||||
|
@ -132,7 +132,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And Integer data in "Parameter_FilterNotField" "ne" "Parameter_FilterNotValue"
|
||||
|
||||
@filter-int-eq @core-endorsement @2.0.0
|
||||
@filter-int-eq @core-endorsement @1.0.2
|
||||
Scenario: filter-int-eq - $filter - Integer Comparison: eq
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-int-eq"
|
||||
|
@ -144,7 +144,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And Integer data in "Parameter_IntegerField" "eq" "Parameter_IntegerValueLow"
|
||||
|
||||
@filter-int-ne @core-endorsement @2.0.0
|
||||
@filter-int-ne @core-endorsement @1.0.2
|
||||
Scenario: filter-int-ne - $filter - Integer Comparison: ne
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-int-ne"
|
||||
|
@ -156,7 +156,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And Integer data in "Parameter_IntegerField" "ne" "Parameter_IntegerValueLow"
|
||||
|
||||
@filter-int-gt @core-endorsement @2.0.0
|
||||
@filter-int-gt @core-endorsement @1.0.2
|
||||
Scenario: filter-int-gt - $filter - Integer Comparison: gt
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-int-gt"
|
||||
|
@ -168,7 +168,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And Integer data in "Parameter_IntegerField" "gt" "Parameter_IntegerValueLow"
|
||||
|
||||
@filter-int-ge @core-endorsement @2.0.0
|
||||
@filter-int-ge @core-endorsement @1.0.2
|
||||
Scenario: filter-int-ge - $filter - Integer Comparison: ge
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-int-ge"
|
||||
|
@ -180,7 +180,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And Integer data in "Parameter_IntegerField" "ge" "Parameter_IntegerValueLow"
|
||||
|
||||
@filter-int-lt @core-endorsement @2.0.0
|
||||
@filter-int-lt @core-endorsement @1.0.2
|
||||
Scenario: filter-int-lt - $filter - Integer Comparison: lt
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-int-lt"
|
||||
|
@ -192,7 +192,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And Integer data in "Parameter_IntegerField" "lt" "Parameter_IntegerValueHigh"
|
||||
|
||||
@filter-int-le @core-endorsement @2.0.0
|
||||
@filter-int-le @core-endorsement @1.0.2
|
||||
Scenario: filter-int-le - $filter - Integer Comparison: le
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-int-le"
|
||||
|
@ -209,7 +209,7 @@ Feature: Web API Server Core Endorsement
|
|||
# DECIMAL COMPARISONS
|
||||
##############################################
|
||||
|
||||
@filter-decimal-ne @core-endorsement @2.0.0
|
||||
@filter-decimal-ne @core-endorsement @1.0.2
|
||||
Scenario: filter-decimal-ne - $filter - Decimal Comparison: ne
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-decimal-ne"
|
||||
|
@ -221,7 +221,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And Decimal data in "Parameter_DecimalField" "ne" "Parameter_DecimalValueLow"
|
||||
|
||||
@filter-decimal-gt @core-endorsement @2.0.0
|
||||
@filter-decimal-gt @core-endorsement @1.0.2
|
||||
Scenario: filter-decimal-gt - $filter - Decimal Comparison: gt
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-decimal-gt"
|
||||
|
@ -233,7 +233,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And Decimal data in "Parameter_DecimalField" "gt" "Parameter_DecimalValueLow"
|
||||
|
||||
@filter-decimal-ge @core-endorsement @2.0.0
|
||||
@filter-decimal-ge @core-endorsement @1.0.2
|
||||
Scenario: filter-decimal-ge - $filter - Decimal Comparison: ge
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-decimal-ge"
|
||||
|
@ -245,7 +245,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And Decimal data in "Parameter_DecimalField" "ge" "Parameter_DecimalValueLow"
|
||||
|
||||
@filter-decimal-lt @core-endorsement @2.0.0
|
||||
@filter-decimal-lt @core-endorsement @1.0.2
|
||||
Scenario: filter-decimal-lt - $filter - Decimal Comparison: lt
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-decimal-lt"
|
||||
|
@ -257,7 +257,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And Decimal data in "Parameter_DecimalField" "lt" "Parameter_DecimalValueHigh"
|
||||
|
||||
@filter-decimal-le @core-endorsement @2.0.0
|
||||
@filter-decimal-le @core-endorsement @1.0.2
|
||||
Scenario: filter-decimal-le - $filter - Decimal Comparison: le
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-decimal-le"
|
||||
|
@ -274,7 +274,7 @@ Feature: Web API Server Core Endorsement
|
|||
# ISO 8601 DATES IN 'yyyy-mm-dd' FORMAT
|
||||
##############################################
|
||||
|
||||
@filter-date-eq @core-endorsement @2.0.0
|
||||
@filter-date-eq @core-endorsement @1.0.2
|
||||
Scenario: filter-date-eq - DateField eq 'yyyy-mm-dd' date value
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-date-eq"
|
||||
|
@ -286,7 +286,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And Date data in "Parameter_DateField" "eq" "Parameter_DateValue"
|
||||
|
||||
@filter-date-ne @core-endorsement @2.0.0
|
||||
@filter-date-ne @core-endorsement @1.0.2
|
||||
Scenario: filter-date-ne - DateField ne 'yyyy-mm-dd' date value
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-date-ne"
|
||||
|
@ -298,7 +298,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And Date data in "Parameter_DateField" "ne" "Parameter_DateValue"
|
||||
|
||||
@filter-date-gt @core-endorsement @2.0.0
|
||||
@filter-date-gt @core-endorsement @1.0.2
|
||||
Scenario: filter-date-gt - DateField gt 'yyyy-mm-dd' date value
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-date-gt"
|
||||
|
@ -310,7 +310,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And Date data in "Parameter_DateField" "gt" "Parameter_DateValue"
|
||||
|
||||
@filter-date-ge @core-endorsement @2.0.0
|
||||
@filter-date-ge @core-endorsement @1.0.2
|
||||
Scenario: filter-date-ge - DateField ge 'yyyy-mm-dd' date value
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-date-ge"
|
||||
|
@ -322,7 +322,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And Date data in "Parameter_DateField" "ge" "Parameter_DateValue"
|
||||
|
||||
@filter-date-lt @core-endorsement @2.0.0
|
||||
@filter-date-lt @core-endorsement @1.0.2
|
||||
Scenario: filter-date-gt - DateField lt 'yyyy-mm-dd' date value
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-date-lt"
|
||||
|
@ -334,7 +334,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And Date data in "Parameter_DateField" "lt" "Parameter_DateValue"
|
||||
|
||||
@filter-date-le @core-endorsement @2.0.0
|
||||
@filter-date-le @core-endorsement @1.0.2
|
||||
Scenario: filter-date-le - DateField le 'yyyy-mm-dd' date value
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-date-le"
|
||||
|
@ -351,7 +351,7 @@ Feature: Web API Server Core Endorsement
|
|||
# ISO 8601 Timestamps
|
||||
##############################################
|
||||
|
||||
@filter-datetime-gt @core-endorsement @2.0.0
|
||||
@filter-datetime-gt @core-endorsement @1.0.2
|
||||
Scenario: filter-datetime-lt - TimestampField gt DateTimeOffset
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-datetime-gt"
|
||||
|
@ -363,7 +363,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And DateTimeOffset data in "Parameter_TimestampField" "gt" "Parameter_DateTimeValue"
|
||||
|
||||
@filter-datetime-ge @core-endorsement @2.0.0
|
||||
@filter-datetime-ge @core-endorsement @1.0.2
|
||||
Scenario: filter-datetime-gt - TimestampField ge DateTimeOffset
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-datetime-ge"
|
||||
|
@ -375,7 +375,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And DateTimeOffset data in "Parameter_TimestampField" "ge" "Parameter_DateTimeValue"
|
||||
|
||||
@filter-datetime-lt-now @core-endorsement @2.0.0
|
||||
@filter-datetime-lt-now @core-endorsement @1.0.2
|
||||
Scenario: filter-datetime-lt-now - TimestampField lt now() DateTimeOffset
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-datetime-lt"
|
||||
|
@ -387,7 +387,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And DateTimeOffset data in "Parameter_TimestampField" "lt" now()
|
||||
|
||||
@filter-datetime-le-now @core-endorsement @2.0.0
|
||||
@filter-datetime-le-now @core-endorsement @1.0.2
|
||||
Scenario: filter-datetime-le-now - TimestampField le now() DateTimeOffset
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-datetime-le"
|
||||
|
@ -399,7 +399,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And DateTimeOffset data in "Parameter_TimestampField" "le" now()
|
||||
|
||||
@filter-datetime-ne-now @core-endorsement @2.0.0
|
||||
@filter-datetime-ne-now @core-endorsement @1.0.2
|
||||
Scenario: filter-datetime-ne - TimestampField ne now() DateTimeOffset
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-datetime-ne"
|
||||
|
@ -416,7 +416,7 @@ Feature: Web API Server Core Endorsement
|
|||
# ISO 8601 TIMESTAMP SORTING TESTS
|
||||
##############################################
|
||||
|
||||
@orderby-timestamp-asc @core-endorsement @2.0.0
|
||||
@orderby-timestamp-asc @core-endorsement @1.0.2
|
||||
Scenario: orderby-timestamp-asc - Query Support: $orderby ascending
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "orderby-timestamp-asc"
|
||||
|
@ -428,7 +428,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And DateTimeOffset data in "Parameter_TimestampField" is sorted in "asc" order
|
||||
|
||||
@orderby-timestamp-desc @core-endorsement @2.0.0
|
||||
@orderby-timestamp-desc @core-endorsement @1.0.2
|
||||
Scenario: orderby-timestamp-desc - Query Support: $orderby timestamp descending
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "orderby-timestamp-desc"
|
||||
|
@ -445,7 +445,7 @@ Feature: Web API Server Core Endorsement
|
|||
# ISO 8601 TIMESTAMP + INTEGER COMPARISONS
|
||||
##############################################
|
||||
|
||||
@orderby-timestamp-asc-filter-int-gt @core-endorsement @2.0.0
|
||||
@orderby-timestamp-asc-filter-int-gt @core-endorsement @1.0.2
|
||||
Scenario: orderby-timestamp-asc-filter-int-gt - Query Support: $orderby timestamp asc
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "orderby-timestamp-asc-filter-int-gt"
|
||||
|
@ -457,7 +457,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And DateTimeOffset data in "Parameter_TimestampField" is sorted in "asc" order
|
||||
|
||||
@orderby-timestamp-desc-filter-int-gt @core-endorsement @2.0.0
|
||||
@orderby-timestamp-desc-filter-int-gt @core-endorsement @1.0.2
|
||||
Scenario: orderby-timestamp-desc-filter-int-gt - Query Support: $orderby desc filtered
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "orderby-timestamp-desc-filter-int-gt"
|
||||
|
@ -474,7 +474,7 @@ Feature: Web API Server Core Endorsement
|
|||
# SINGLE VALUE ENUMERATIONS
|
||||
##############################################
|
||||
|
||||
@filter-enum-single-has @core-endorsement @2.0.0
|
||||
@filter-enum-single-has @core-endorsement @1.0.2
|
||||
Scenario: filter-enum-single-has - Support Single Value Lookups
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-enum-single-has"
|
||||
|
@ -486,7 +486,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And Single Valued Enumeration Data in "Parameter_SingleValueLookupField" "has" "Parameter_SingleLookupValue"
|
||||
|
||||
@filter-enum-single-eq @core-endorsement @2.0.0
|
||||
@filter-enum-single-eq @core-endorsement @1.0.2
|
||||
Scenario: filter-enum-single-eq - Query Support: Single Edm.EnumType, eq
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-enum-single-eq"
|
||||
|
@ -496,7 +496,7 @@ Feature: Web API Server Core Endorsement
|
|||
And the response has results
|
||||
And Single Valued Enumeration Data in "Parameter_SingleValueLookupField" "eq" "Parameter_SingleLookupValue"
|
||||
|
||||
@filter-enum-ne @core-endorsement @2.0.0
|
||||
@filter-enum-ne @core-endorsement @1.0.2
|
||||
Scenario: filter-enum-single-ne - Query Support: Single Edm.EnumType, ne
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-enum-single-ne"
|
||||
|
@ -510,7 +510,7 @@ Feature: Web API Server Core Endorsement
|
|||
# MULTI-VALUE ENUMERATIONS - IsFlags
|
||||
##############################################
|
||||
|
||||
@filter-enum-multi-has @core-endorsement @2.0.0
|
||||
@filter-enum-multi-has @core-endorsement @1.0.2
|
||||
Scenario: filter-enum-multi-has - Support Multi Value Lookups
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-enum-multi-has"
|
||||
|
@ -522,7 +522,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And Multiple Valued Enumeration Data in "Parameter_MultipleValueLookupField" has "Parameter_MultipleLookupValue1"
|
||||
|
||||
@filter-enum-multi-has-and @core-endorsement @2.0.0
|
||||
@filter-enum-multi-has-and @core-endorsement @1.0.2
|
||||
Scenario: filter-enum-multi-has-and - Support Multi Value Lookups multiple values
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "filter-enum-multi-has-and"
|
||||
|
@ -540,7 +540,7 @@ Feature: Web API Server Core Endorsement
|
|||
# MULTI-VALUE ENUMERATIONS - Collections
|
||||
##############################################
|
||||
|
||||
@filter-coll-enum-any @core-endorsement @2.0.0
|
||||
@filter-coll-enum-any @core-endorsement @1.0.2
|
||||
Scenario: filter-coll-enum-any - Collections for Multi-Enumerations: any()
|
||||
Given valid metadata have been retrieved
|
||||
And field "Parameter_MultipleValueLookupField" in "Parameter_EndpointResource" has Collection of Enumeration data type
|
||||
|
@ -553,7 +553,7 @@ Feature: Web API Server Core Endorsement
|
|||
And data are present for fields contained within the given select list
|
||||
And Multiple Valued Enumeration Data in "Parameter_MultipleValueLookupField" has "Parameter_MultipleLookupValue1"
|
||||
|
||||
@filter-coll-enum-all @core-endorsement @2.0.0
|
||||
@filter-coll-enum-all @core-endorsement @1.0.2
|
||||
Scenario: filter-coll-enum-all - Collections of Multi-Enumerations: all()
|
||||
Given valid metadata have been retrieved
|
||||
And field "Parameter_MultipleValueLookupField" in "Parameter_EndpointResource" has Collection of Enumeration data type
|
||||
|
@ -572,7 +572,7 @@ Feature: Web API Server Core Endorsement
|
|||
# RESPONSE CODE TESTING
|
||||
##############################################
|
||||
|
||||
@response-code-400 @core-endorsement @2.0.0
|
||||
@response-code-400 @core-endorsement @1.0.2
|
||||
Scenario: response-code-400 - 400 Bad Request
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "response-code-400"
|
||||
|
@ -580,7 +580,7 @@ Feature: Web API Server Core Endorsement
|
|||
# Disable this check for now until Olingo-1380 is fixed - see: https://issues.apache.org/jira/browse/OLINGO-1380
|
||||
# And the server has an OData-Version header value of "4.0" or "4.01"
|
||||
|
||||
@response-code-404 @core-endorsement @2.0.0
|
||||
@response-code-404 @core-endorsement @1.0.2
|
||||
Scenario: response-code-404 - 404 Not Found Request
|
||||
Given valid metadata have been retrieved
|
||||
When a GET request is made to the resolved Url in "response-code-404"
|
||||
|
|
|
@ -0,0 +1,119 @@
|
|||
package org.reso.certification.reporting;
|
||||
|
||||
import com.google.gson.Gson;
|
||||
import com.google.gson.JsonArray;
|
||||
import com.google.gson.JsonElement;
|
||||
import com.google.gson.JsonObject;
|
||||
import net.masterthought.cucumber.Configuration;
|
||||
import net.masterthought.cucumber.ReportBuilder;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.reso.commander.common.Utils;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.reso.commander.Commander.NOT_OK;
|
||||
import static org.reso.commander.common.ErrorMsg.getDefaultErrorMessage;
|
||||
|
||||
public class CertificationReportGenerator {
|
||||
private static final Logger LOG = LogManager.getLogger(CertificationReportGenerator.class);
|
||||
private static final String PATH_TO_JSON_RESULTS = System.getProperty("pathToJsonResults", null);
|
||||
private static final String outputDirectoryName = PATH_TO_JSON_RESULTS.substring(0, PATH_TO_JSON_RESULTS.lastIndexOf(File.separator));
|
||||
private static final boolean USE_MINIMAL_REPORT = Boolean.parseBoolean(System.getProperty("minimal", "false"));
|
||||
private static final String DEFAULT_REPORT_DESCRIPTION = "Certification Report";
|
||||
private static final String projectName = System.getProperty("reportDescription", DEFAULT_REPORT_DESCRIPTION);
|
||||
private static final String MINIMAL_JSON_EXTENSION = "minimal.json";
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
if (PATH_TO_JSON_RESULTS == null || !Files.exists(Paths.get(PATH_TO_JSON_RESULTS))) {
|
||||
LOG.error(getDefaultErrorMessage("path to JSON results does not exist!" +
|
||||
(PATH_TO_JSON_RESULTS != null ? "\npathToJsonResults=\"" + PATH_TO_JSON_RESULTS + "\"" : "")));
|
||||
System.exit(NOT_OK);
|
||||
}
|
||||
|
||||
LOG.info("Path to JSON Results is: " + PATH_TO_JSON_RESULTS);
|
||||
|
||||
if (USE_MINIMAL_REPORT) {
|
||||
LOG.info("Using minimal report format...");
|
||||
Utils.createFile(PATH_TO_JSON_RESULTS, filterJSONResults());
|
||||
} else {
|
||||
List<String> jsonFiles = new ArrayList<>();
|
||||
Configuration configuration = new Configuration(new File(outputDirectoryName), projectName);
|
||||
|
||||
jsonFiles.add(PATH_TO_JSON_RESULTS);
|
||||
|
||||
ReportBuilder reportBuilder = new ReportBuilder(jsonFiles, configuration);
|
||||
reportBuilder.generateReports();
|
||||
|
||||
//remove minimal report file
|
||||
if (jsonFiles.size() > 0 && jsonFiles.get(0).contains(MINIMAL_JSON_EXTENSION))
|
||||
new File(jsonFiles.get(0)).deleteOnExit();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters all trailing skipped tests where there are no subsequent errors.
|
||||
* Depends on the output of the Cucumber Reporting JSON plugin
|
||||
*
|
||||
* @return a JSON string with any trailing skipped tests removed
|
||||
*/
|
||||
private static String filterJSONResults() {
|
||||
try {
|
||||
Gson gson = new Gson();
|
||||
JsonArray scenarios = gson.fromJson(new FileReader(PATH_TO_JSON_RESULTS), JsonArray.class);
|
||||
JsonArray filteredScenarios = new JsonArray();
|
||||
JsonArray filteredElements;
|
||||
|
||||
int skippedStepsCount = 0, failedStepsCount = 0;
|
||||
|
||||
for (JsonElement scenario : scenarios) {
|
||||
filteredElements = new JsonArray();
|
||||
JsonArray elements = scenario.getAsJsonObject().getAsJsonArray("elements").getAsJsonArray();
|
||||
for (JsonElement element : elements) {
|
||||
if (!element.getAsJsonObject().get("type").getAsString().contentEquals("background")) {
|
||||
JsonArray steps = element.getAsJsonObject().get("steps").getAsJsonArray();
|
||||
for (JsonElement step : steps) {
|
||||
JsonObject result = step.getAsJsonObject().get("result").getAsJsonObject();
|
||||
if (result.get("status").getAsString().contentEquals("skipped")) {
|
||||
skippedStepsCount++;
|
||||
}
|
||||
|
||||
if (result.get("status").getAsString().contentEquals("failed")) {
|
||||
failedStepsCount++;
|
||||
}
|
||||
}
|
||||
if (skippedStepsCount == 0 || failedStepsCount > 0) {
|
||||
filteredElements.add(element);
|
||||
}
|
||||
skippedStepsCount = 0;
|
||||
failedStepsCount = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (filteredElements.size() > 0) {
|
||||
final JsonObject filteredScenario = scenario.deepCopy().getAsJsonObject();
|
||||
filteredScenario.getAsJsonObject().remove("elements");
|
||||
filteredScenario.getAsJsonObject().add("elements", filteredElements);
|
||||
filteredScenarios.add(filteredScenario);
|
||||
}
|
||||
}
|
||||
|
||||
String outputFilename = PATH_TO_JSON_RESULTS
|
||||
.substring(PATH_TO_JSON_RESULTS.lastIndexOf(File.separator) + 1)
|
||||
.replace(".json", MINIMAL_JSON_EXTENSION);
|
||||
return Utils.createFile(outputDirectoryName, outputFilename, filteredScenarios.toString()).toString();
|
||||
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
|
@ -18,7 +18,6 @@ import org.apache.olingo.commons.api.edm.EdmMember;
|
|||
import org.apache.olingo.commons.api.edm.EdmNamed;
|
||||
import org.apache.olingo.commons.api.edm.FullQualifiedName;
|
||||
import org.apache.olingo.commons.api.edm.provider.CsdlEnumMember;
|
||||
import org.junit.AfterClass;
|
||||
import org.reso.certification.containers.WebAPITestContainer;
|
||||
import org.reso.commander.Commander;
|
||||
import org.reso.commander.common.TestUtils;
|
||||
|
@ -30,7 +29,6 @@ import org.reso.models.Settings;
|
|||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Type;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.*;
|
||||
|
@ -66,7 +64,7 @@ public class DataDictionary {
|
|||
private static final AtomicReference<Map<String, Map<String, Set<String>>>> ignoredItems = new AtomicReference<>(new LinkedHashMap<>());
|
||||
|
||||
private static XMLMetadata referenceMetadata = null;
|
||||
private static boolean isMetadataValid = false;
|
||||
private static boolean areMetadataValid = false;
|
||||
|
||||
//named args
|
||||
private static final String SHOW_RESPONSES_ARG = "showResponses";
|
||||
|
@ -77,10 +75,7 @@ public class DataDictionary {
|
|||
|
||||
//extract any params here
|
||||
private final boolean showResponses = Boolean.parseBoolean(System.getProperty(SHOW_RESPONSES_ARG));
|
||||
|
||||
//strict mode is enabled by default
|
||||
private final boolean strictMode = System.getProperty(USE_STRICT_MODE_ARG) == null || Boolean.parseBoolean(System.getProperty(USE_STRICT_MODE_ARG));
|
||||
|
||||
private final boolean strictMode = Boolean.parseBoolean(System.getProperty(USE_STRICT_MODE_ARG));
|
||||
private final String pathToMetadata = System.getProperty(PATH_TO_METADATA_ARG);
|
||||
private final String pathToRESOScript = System.getProperty(PATH_TO_RESOSCRIPT_ARG);
|
||||
|
||||
|
@ -115,12 +110,13 @@ public class DataDictionary {
|
|||
public void aRESOScriptFileIsProvided() {
|
||||
if (isUsingRESOScript) {
|
||||
if (container.getPathToRESOScript() == null) {
|
||||
container.setPathToRESOScript(System.getProperty(PATH_TO_RESOSCRIPT_ARG));
|
||||
container.setPathToRESOScript(System.getProperty("pathToRESOScript"));
|
||||
}
|
||||
|
||||
if (container.getPathToRESOScript() == null) {
|
||||
failAndExitWithErrorMessage("pathToRESOScript must be present in command arguments, see README.", scenario);
|
||||
}
|
||||
LOG.debug("Using RESOScript: " + container.getPathToRESOScript());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -128,7 +124,7 @@ public class DataDictionary {
|
|||
public void clientSettingsAndParametersCanBeReadFromTheRESOScript() {
|
||||
if (isUsingRESOScript) {
|
||||
if (container.getSettings() == null) {
|
||||
container.setSettings(Settings.loadFromRESOScript(new File(System.getProperty(PATH_TO_RESOSCRIPT_ARG))));
|
||||
container.setSettings(Settings.loadFromRESOScript(new File(System.getProperty("pathToRESOScript"))));
|
||||
if (container.getPathToRESOScript() == null) {
|
||||
failAndExitWithErrorMessage("Settings could not be loaded!", scenario);
|
||||
}
|
||||
|
@ -184,14 +180,11 @@ public class DataDictionary {
|
|||
|
||||
//if we have gotten to this point without exceptions, then metadata are valid
|
||||
container.validateMetadata();
|
||||
isMetadataValid = container.hasValidMetadata();
|
||||
areMetadataValid = container.hasValidMetadata();
|
||||
|
||||
//create metadata report
|
||||
Commander.generateMetadataReport(container.getEdm());
|
||||
|
||||
//the container needs a field map built when the metadata is being loaded from a file
|
||||
container.buildFieldMap();
|
||||
|
||||
} catch (IOException e) {
|
||||
failAndExitWithErrorMessage(getDefaultErrorMessage(e), scenario);
|
||||
}
|
||||
|
@ -209,9 +202,9 @@ public class DataDictionary {
|
|||
}
|
||||
|
||||
//metadata validation tests
|
||||
TestUtils.assertValidXMLMetadata(container, scenario);
|
||||
TestUtils.assertXmlMetadataContainsEdm(container, scenario);
|
||||
TestUtils.assertXMLMetadataHasValidServiceDocument(container, scenario);
|
||||
TestUtils.assertValidXMLMetadata(container);
|
||||
TestUtils.assertXmlMetadataContainsEdm(container);
|
||||
TestUtils.assertXMLMetadataHasValidServiceDocument(container);
|
||||
|
||||
//build field map and ensure it's not null
|
||||
assertNotNull(container.getFieldMap());
|
||||
|
@ -220,15 +213,7 @@ public class DataDictionary {
|
|||
container.setShouldValidateMetadata(false);
|
||||
|
||||
//if we have gotten to this point without exceptions, then metadata are valid
|
||||
isMetadataValid = container.hasValidMetadata();
|
||||
|
||||
if (!isMetadataValid) {
|
||||
failAndExitWithErrorMessage("OData XML Metadata MUST be valid!", scenario);
|
||||
}
|
||||
|
||||
//save metadata locally
|
||||
Utils.createFile("build" + File.separator + "certification" + File.separator + "results",
|
||||
"metadata.xml", container.getXMLResponseData());
|
||||
areMetadataValid = container.hasValidMetadata();
|
||||
|
||||
//create metadata report
|
||||
Commander.generateMetadataReport(container.getEdm());
|
||||
|
@ -238,7 +223,7 @@ public class DataDictionary {
|
|||
@When("{string} exists in the {string} metadata")
|
||||
public void existsInTheMetadata(String fieldName, String resourceName) {
|
||||
|
||||
if (strictMode && !isMetadataValid) {
|
||||
if (strictMode && !areMetadataValid) {
|
||||
failAndExitWithErrorMessage("Metadata validation failed, but is required to pass when using strict mode!", scenario);
|
||||
}
|
||||
|
||||
|
@ -531,16 +516,12 @@ public class DataDictionary {
|
|||
LOG.info("PASSED: Field \"" + fieldName + "\" only contains Standard Names!");
|
||||
}
|
||||
|
||||
@Given("that the following synonyms for {string} DO NOT exist in the {string} metadata")
|
||||
public void theFollowingSynonymsForDONOTExistInTheMetadata(String fieldName, String resourceName, List<String> synonyms) {
|
||||
if (container.getFieldMap(resourceName) == null) {
|
||||
assumeTrue("\"" + resourceName + "\" not found in metadata!", true);
|
||||
} else {
|
||||
synonyms.forEach(synonym ->
|
||||
assertFalse(wrapColumns(getDefaultErrorMessage("Synonym", "\"" + synonym + "\"", "of fieldName", "\"" + fieldName + "\"", "found in the metadata!",
|
||||
"\nSynonyms are not allowed!")),
|
||||
container.getFieldMap(resourceName).containsKey(synonym)));
|
||||
}
|
||||
@And("the following synonyms for {string} MUST NOT exist in the metadata")
|
||||
public void theFollowingSynonymsForMUSTNOTExistInTheMetadata(String fieldName, List<String> synonyms) {
|
||||
synonyms.forEach(synonym ->
|
||||
assertFalse(wrapColumns(getDefaultErrorMessage("Synonym", "\"" + synonym + "\"", "of fieldName", "\"" + fieldName + "\"", "found in the metadata!",
|
||||
"\nSynonyms are not allowed!")),
|
||||
container.getFieldMap(currentResourceName.get()).containsKey(synonym)));
|
||||
}
|
||||
|
||||
private static int getDistanceThreshold(String word) {
|
||||
|
@ -606,11 +587,9 @@ public class DataDictionary {
|
|||
|
||||
private XMLMetadata getReferenceMetadata() {
|
||||
if (referenceMetadata == null) {
|
||||
URL resource = Thread.currentThread().getContextClassLoader().getResource(REFERENCE_METADATA);
|
||||
assert resource != null;
|
||||
referenceMetadata = Commander
|
||||
.deserializeXMLMetadata(Commander.convertInputStreamToString(Commander.deserializeFileFromPath(resource.getPath())),
|
||||
container.getCommander().getClient());
|
||||
final String xmlMetadata = Commander.convertInputStreamToString(Thread.currentThread().getContextClassLoader().getResourceAsStream(REFERENCE_METADATA));
|
||||
assert xmlMetadata != null : getDefaultErrorMessage("could not load reference metadata from: " + REFERENCE_METADATA);
|
||||
referenceMetadata = Commander.deserializeXMLMetadata(xmlMetadata, container.getCommander().getClient());
|
||||
}
|
||||
return referenceMetadata;
|
||||
}
|
||||
|
|
|
@ -45,61 +45,28 @@ import java.util.stream.Collectors;
|
|||
import static io.restassured.path.json.JsonPath.from;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
import static org.reso.certification.codegen.WorksheetProcessor.WELL_KNOWN_DATA_TYPES.STRING_LIST_MULTI;
|
||||
import static org.reso.certification.codegen.WorksheetProcessor.WELL_KNOWN_DATA_TYPES.STRING_LIST_SINGLE;
|
||||
import static org.reso.certification.containers.WebAPITestContainer.EMPTY_STRING;
|
||||
import static org.reso.commander.Commander.NOT_OK;
|
||||
import static org.reso.commander.common.ErrorMsg.getDefaultErrorMessage;
|
||||
import static org.reso.commander.common.TestUtils.failAndExitWithErrorMessage;
|
||||
|
||||
public class DataAvailability {
|
||||
private static final Logger LOG = LogManager.getLogger(DataAvailability.class);
|
||||
public class IDXPayload {
|
||||
private static final Logger LOG = LogManager.getLogger(IDXPayload.class);
|
||||
private static final String MODIFICATION_TIMESTAMP_FIELD = "ModificationTimestamp";
|
||||
private static final String POSTAL_CODE_FIELD = "PostalCode";
|
||||
private static final int TOP_COUNT = 100;
|
||||
|
||||
private static final int MAX_TIMESTAMP_RETRIES = 3;
|
||||
private static final int MAX_RETRIES = 3;
|
||||
|
||||
private static final String BUILD_DIRECTORY_PATH = "build";
|
||||
private static final String CERTIFICATION_PATH = BUILD_DIRECTORY_PATH + File.separator + "certification";
|
||||
public static final String CERTIFICATION_RESULTS_PATH = BUILD_DIRECTORY_PATH + File.separator + "certification" + File.separator + "results";
|
||||
private static final String SAMPLES_DIRECTORY_TEMPLATE = BUILD_DIRECTORY_PATH + File.separator + "%s";
|
||||
private static final String SAMPLES_DIRECTORY_ROOT = "build";
|
||||
private static final String SAMPLES_DIRECTORY_TEMPLATE = SAMPLES_DIRECTORY_ROOT + File.separator + "%s";
|
||||
private static final String PATH_TO_RESOSCRIPT_KEY = "pathToRESOScript";
|
||||
|
||||
private static final String PATH_TO_RESOSCRIPT_ARG = "pathToRESOScript";
|
||||
final String REQUEST_URI_TEMPLATE = "?$filter=%s" + " lt %s&$orderby=%s desc&$top=" + TOP_COUNT;
|
||||
final String COUNT_REQUEST_URI_TEMPLATE = "?$count=true";
|
||||
|
||||
// strict mode is enabled by default
|
||||
private static final String USE_STRICT_MODE_ARG = "strict";
|
||||
private final boolean STRICT_MODE_ENABLED = Boolean.parseBoolean(System.getProperty(USE_STRICT_MODE_ARG, "true"));
|
||||
|
||||
// abTesting mode is disabled by default
|
||||
private static final String A_B_TESTING_MODE_ARG = "abTesting";
|
||||
private static final boolean AB_TESTING_MODE_ENABLED = Boolean.parseBoolean(System.getProperty(A_B_TESTING_MODE_ARG, "false"));
|
||||
|
||||
// OriginatingSystemName query
|
||||
private static final String ORIGINATING_SYSTEM_NAME_FIELD_ARG = "OriginatingSystemName";
|
||||
private static final String ORIGINATING_SYSTEM_NAME_FIELD_VALUE = System.getProperty(ORIGINATING_SYSTEM_NAME_FIELD_ARG, EMPTY_STRING);
|
||||
private static final String ORIGINATING_SYSTEM_NAME_QUERY = ORIGINATING_SYSTEM_NAME_FIELD_ARG + " eq '" + ORIGINATING_SYSTEM_NAME_FIELD_VALUE + "'";
|
||||
private static final boolean USE_ORIGINATING_SYSTEM_NAME_QUERY = ORIGINATING_SYSTEM_NAME_FIELD_VALUE.length() > 0;
|
||||
|
||||
// OriginatingSystemID query
|
||||
private static final String ORIGINATING_SYSTEM_ID_FIELD_ARG = "OriginatingSystemID";
|
||||
private static final String ORIGINATING_SYSTEM_ID_FIELD_VALUE = System.getProperty(ORIGINATING_SYSTEM_ID_FIELD_ARG, EMPTY_STRING);;
|
||||
private static final String ORIGINATING_SYSTEM_ID_QUERY = ORIGINATING_SYSTEM_NAME_FIELD_ARG + " eq '" + ORIGINATING_SYSTEM_NAME_FIELD_VALUE + "'";
|
||||
private static final boolean USE_ORIGINATING_ID_NAME_QUERY = ORIGINATING_SYSTEM_ID_FIELD_VALUE.length() > 0;
|
||||
|
||||
// Query Templates - prefer OriginatingSystemID if both are passed
|
||||
private static final String SAMPLING_REQUEST_URI_TEMPLATE = "?$filter="
|
||||
+ (USE_ORIGINATING_ID_NAME_QUERY ? ORIGINATING_SYSTEM_ID_QUERY + " and "
|
||||
: (USE_ORIGINATING_SYSTEM_NAME_QUERY ? ORIGINATING_SYSTEM_NAME_QUERY + " and " : EMPTY_STRING))
|
||||
+ "%s" + " lt %s&$orderby=%s desc&$top=" + TOP_COUNT;
|
||||
|
||||
private static final String COUNT_REQUEST_URI_TEMPLATE = "?"
|
||||
+ (USE_ORIGINATING_ID_NAME_QUERY ? "$filter=" + ORIGINATING_SYSTEM_ID_QUERY + "&"
|
||||
: (USE_ORIGINATING_SYSTEM_NAME_QUERY ? "$filter=" + ORIGINATING_SYSTEM_NAME_QUERY + "&" : EMPTY_STRING))
|
||||
+ "$count=true";
|
||||
|
||||
private static final String DEBUG_ARG = "debug";
|
||||
private static final boolean DEBUG_MODE_ENABLED = Boolean.parseBoolean(System.getProperty(DEBUG_ARG, "false"));
|
||||
//TODO: get this from the parameters
|
||||
private final static boolean DEBUG = false;
|
||||
|
||||
private static Scenario scenario;
|
||||
|
||||
|
@ -110,54 +77,46 @@ public class DataAvailability {
|
|||
private final static AtomicReference<WebAPITestContainer> container = new AtomicReference<>();
|
||||
private final static AtomicBoolean hasSamplesDirectoryBeenCleared = new AtomicBoolean(false);
|
||||
|
||||
//TODO: compute moving averages and search each payload sample immediately so no collection is needed
|
||||
private final static AtomicReference<Map<String, List<PayloadSample>>> resourcePayloadSampleMap =
|
||||
new AtomicReference<>(Collections.synchronizedMap(new LinkedHashMap<>()));
|
||||
|
||||
private final static AtomicReference<Map<String, List<ReferenceStandardField>>> standardFieldCache =
|
||||
new AtomicReference<>(Collections.synchronizedMap(new LinkedHashMap<>()));
|
||||
|
||||
private final static AtomicReference<Map<String, Integer>> resourceCounts =
|
||||
new AtomicReference<>(Collections.synchronizedMap(new LinkedHashMap<>()));
|
||||
|
||||
private final static AtomicReference<Map<LookupValue, Integer>> resourceFieldLookupTallies =
|
||||
new AtomicReference<>(Collections.synchronizedMap(new LinkedHashMap<>()));
|
||||
|
||||
@Inject
|
||||
public DataAvailability(WebAPITestContainer c) {
|
||||
if (container.get() == null) {
|
||||
container.set(c);
|
||||
LOG.info("Using strict mode: " + STRICT_MODE_ENABLED);
|
||||
}
|
||||
public IDXPayload(WebAPITestContainer c) {
|
||||
container.set(c);
|
||||
}
|
||||
|
||||
@Before
|
||||
public void beforeStep(Scenario scenario) {
|
||||
final String pathToRESOScript = System.getProperty(PATH_TO_RESOSCRIPT_ARG, null);
|
||||
final String pathToRESOScript = System.getProperty(PATH_TO_RESOSCRIPT_KEY, null);
|
||||
|
||||
if (pathToRESOScript == null) return;
|
||||
|
||||
DataAvailability.scenario = scenario;
|
||||
IDXPayload.scenario = scenario;
|
||||
|
||||
if (!container.get().getIsInitialized()) {
|
||||
container.get().setSettings(Settings.loadFromRESOScript(new File(System.getProperty(PATH_TO_RESOSCRIPT_ARG))));
|
||||
container.get().setSettings(Settings.loadFromRESOScript(new File(System.getProperty(PATH_TO_RESOSCRIPT_KEY))));
|
||||
container.get().initialize();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a data availability report for the given samples map
|
||||
*
|
||||
* @param resourcePayloadSamplesMap the samples map to create the report from
|
||||
* @param reportName the name of the report
|
||||
* @param reportName the name of the report
|
||||
*/
|
||||
public void createDataAvailabilityReport(Map<String, List<PayloadSample>> resourcePayloadSamplesMap, String reportName,
|
||||
Map<String, Integer> resourceCounts, Map<LookupValue, Integer> resourceFieldLookupTallies) {
|
||||
|
||||
PayloadSampleReport payloadSampleReport =
|
||||
new PayloadSampleReport(container.get().getEdm(), resourcePayloadSamplesMap, resourceCounts, resourceFieldLookupTallies);
|
||||
public void createDataAvailabilityReport(Map<String, List<PayloadSample>> resourcePayloadSamplesMap,
|
||||
String reportName, Map<String, Integer> resourceCounts) {
|
||||
PayloadSampleReport payloadSampleReport = new PayloadSampleReport(container.get().getEdm(), resourcePayloadSamplesMap, resourceCounts);
|
||||
GsonBuilder gsonBuilder = new GsonBuilder().setPrettyPrinting();
|
||||
gsonBuilder.registerTypeAdapter(PayloadSampleReport.class, payloadSampleReport);
|
||||
|
||||
Utils.createFile(CERTIFICATION_RESULTS_PATH, reportName, gsonBuilder.create().toJson(payloadSampleReport));
|
||||
|
||||
Utils.createFile(SAMPLES_DIRECTORY_ROOT, reportName, gsonBuilder.create().toJson(payloadSampleReport));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -167,16 +126,14 @@ public class DataAvailability {
|
|||
* @return the SHA hash of the given values
|
||||
*/
|
||||
private static String hashValues(String... values) {
|
||||
//noinspection UnstableApiUsage
|
||||
return Hashing.sha256().hashString(String.join(EMPTY_STRING, values), StandardCharsets.UTF_8).toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a request URI string, taking into account whether the sampling is being done with an optional
|
||||
* filter, for instance in the shared systems case
|
||||
*
|
||||
* @param resourceName the resource name to query
|
||||
* @param timestampField the timestamp field for the resource
|
||||
* @param resourceName the resource name to query
|
||||
* @param timestampField the timestamp field for the resource
|
||||
* @param lastFetchedDate the last fetched date for filtering
|
||||
* @return a string OData query used for sampling
|
||||
*/
|
||||
|
@ -185,7 +142,7 @@ public class DataAvailability {
|
|||
.newURIBuilder(container.get().getServiceRoot())
|
||||
.appendEntitySetSegment(resourceName).build().toString();
|
||||
|
||||
requestUri += String.format(SAMPLING_REQUEST_URI_TEMPLATE, timestampField,
|
||||
requestUri += String.format(REQUEST_URI_TEMPLATE, timestampField,
|
||||
lastFetchedDate.format(DateTimeFormatter.ISO_INSTANT), timestampField);
|
||||
|
||||
return requestUri;
|
||||
|
@ -194,7 +151,6 @@ public class DataAvailability {
|
|||
/**
|
||||
* Builds a request URI string for counting the number of available items on a resource, taking into account
|
||||
* whether the sample is being done with an optional filter, for instance in the shared system case
|
||||
*
|
||||
* @param resourceName the resource name to query
|
||||
* @return a request URI string for getting OData counts
|
||||
*/
|
||||
|
@ -210,7 +166,6 @@ public class DataAvailability {
|
|||
|
||||
/**
|
||||
* Queries the server and fetches a resource count for the given resource name
|
||||
*
|
||||
* @param resourceName the resource name to get the count for
|
||||
* @return the count found for the resource, or null if the request did not return a count
|
||||
*/
|
||||
|
@ -253,24 +208,23 @@ public class DataAvailability {
|
|||
final AtomicReference<Map<String, String>> encodedSample = new AtomicReference<>(Collections.synchronizedMap(new LinkedHashMap<>()));
|
||||
final AtomicReference<ODataTransportWrapper> transportWrapper = new AtomicReference<>();
|
||||
final AtomicReference<ResWrap<EntityCollection>> entityCollectionResWrap = new AtomicReference<>();
|
||||
final AtomicReference<ResWrap<EntityCollection>> lastEntityCollectionResWrap = new AtomicReference<>();
|
||||
final AtomicReference<String> timestampField = new AtomicReference<>();
|
||||
final AtomicBoolean hasRecords = new AtomicBoolean(true);
|
||||
final AtomicReference<PayloadSample> payloadSample = new AtomicReference<>();
|
||||
final AtomicReference<Set<PayloadSample>> payloadSamples =
|
||||
new AtomicReference<>(Collections.synchronizedSet(new LinkedHashSet<>()));
|
||||
final AtomicReference<List<PayloadSample>> payloadSamples =
|
||||
new AtomicReference<>(Collections.synchronizedList(new LinkedList<>()));
|
||||
|
||||
boolean hasStandardTimestampField = false;
|
||||
String requestUri;
|
||||
int recordsProcessed = 0;
|
||||
int numTimestampRetries = 0;
|
||||
int numRetries = 0;
|
||||
int lastTimestampCandidateIndex = 0;
|
||||
|
||||
container.get().getEdm().getSchemas().forEach(edmSchema ->
|
||||
edmSchema.getEntityTypes().stream().filter(edmEntityType -> edmEntityType.getName().equals(resourceName))
|
||||
.findFirst().ifPresent(entityType::set));
|
||||
|
||||
//return an empty list if the entity type isn't defined
|
||||
//return null if the entity type isn't defined
|
||||
if (entityType.get() == null) return new ArrayList<>();
|
||||
|
||||
if (entityType.get().getProperty(MODIFICATION_TIMESTAMP_FIELD) == null) {
|
||||
|
@ -282,12 +236,7 @@ public class DataAvailability {
|
|||
if (entityType.get().getProperty(propertyName).getType().getFullQualifiedName().getFullQualifiedNameAsString()
|
||||
.contentEquals(EdmPrimitiveTypeKind.DateTimeOffset.getFullQualifiedName().getFullQualifiedNameAsString())) {
|
||||
scenario.log("Found Edm.DateTimeOffset field " + propertyName + " in the " + resourceName + " resource!\n");
|
||||
|
||||
if (propertyName.toLowerCase(Locale.ROOT).contains("modificationtimestamp")) {
|
||||
timestampCandidateFields.add(0, propertyName);
|
||||
} else {
|
||||
timestampCandidateFields.add (propertyName);
|
||||
}
|
||||
timestampCandidateFields.add(propertyName);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
LOG.error(ex);
|
||||
|
@ -303,18 +252,14 @@ public class DataAvailability {
|
|||
scenario.log("Keys found: " + keyFields.stream().map(EdmKeyPropertyRef::getName).collect(Collectors.joining(", ")));
|
||||
|
||||
//loop and fetch records as long as items are available and we haven't reached our target count yet
|
||||
//TODO: switch to OData Fetch API
|
||||
while (hasRecords.get() && recordsProcessed < targetRecordCount) {
|
||||
|
||||
if (hasStandardTimestampField) {
|
||||
timestampField.set(MODIFICATION_TIMESTAMP_FIELD);
|
||||
} else if (timestampCandidateFields.size() > 0 && lastTimestampCandidateIndex < timestampCandidateFields.size()) {
|
||||
timestampField.set(timestampCandidateFields.get(lastTimestampCandidateIndex++));
|
||||
} else {
|
||||
if (recordsProcessed == 0) {
|
||||
scenario.log(getDefaultErrorMessage("Could not find a suitable timestamp field in the "
|
||||
+ resourceName + " resource to sample with..."));
|
||||
}
|
||||
scenario.log(getDefaultErrorMessage("Could not find a suitable timestamp field in the "
|
||||
+ resourceName + " resource to sample with..."));
|
||||
|
||||
//skip this resource since no suitable fields were found
|
||||
break;
|
||||
|
@ -331,19 +276,19 @@ public class DataAvailability {
|
|||
|
||||
// retries. sometimes requests can time out and fail and we don't want to stop sampling
|
||||
// immediately, but retry a couple of times before we bail
|
||||
if (recordsProcessed == 0 || transportWrapper.get().getResponseData() == null) {
|
||||
if (recordsProcessed == 0 && transportWrapper.get().getResponseData() == null) {
|
||||
//only count retries if we're constantly making requests and not getting anything
|
||||
numTimestampRetries += 1;
|
||||
numRetries += 1;
|
||||
} else {
|
||||
numTimestampRetries = 0;
|
||||
numRetries = 0;
|
||||
}
|
||||
|
||||
if (numTimestampRetries >= MAX_TIMESTAMP_RETRIES) {
|
||||
if (numRetries >= MAX_RETRIES) {
|
||||
if (timestampCandidateFields.size() > 0 && (lastTimestampCandidateIndex < timestampCandidateFields.size())) {
|
||||
LOG.info("Trying next candidate timestamp field: " + timestampCandidateFields.get(lastTimestampCandidateIndex));
|
||||
numTimestampRetries = 0;
|
||||
numRetries = 0;
|
||||
} else {
|
||||
LOG.info("Could not fetch records from the " + resourceName + " resource after " + MAX_TIMESTAMP_RETRIES
|
||||
LOG.info("Could not fetch records from the " + resourceName + " resource after " + MAX_RETRIES
|
||||
+ " tries from the given URL: " + requestUri);
|
||||
break;
|
||||
}
|
||||
|
@ -359,7 +304,6 @@ public class DataAvailability {
|
|||
}
|
||||
break;
|
||||
} else {
|
||||
//TODO: add pluralizer
|
||||
LOG.info("Time taken: "
|
||||
+ (transportWrapper.get().getElapsedTimeMillis() >= 1000 ? (transportWrapper.get().getElapsedTimeMillis() / 1000) + "s"
|
||||
: transportWrapper.get().getElapsedTimeMillis() + "ms"));
|
||||
|
@ -367,22 +311,12 @@ public class DataAvailability {
|
|||
try {
|
||||
payloadSample.get().setResponseSizeBytes(transportWrapper.get().getResponseData().getBytes().length);
|
||||
|
||||
lastEntityCollectionResWrap.set(entityCollectionResWrap.get());
|
||||
|
||||
entityCollectionResWrap.set(container.get().getCommander().getClient()
|
||||
.getDeserializer(ContentType.APPLICATION_JSON)
|
||||
.toEntitySet(new ByteArrayInputStream(transportWrapper.get().getResponseData().getBytes())));
|
||||
|
||||
if (lastEntityCollectionResWrap.get() != null && entityCollectionResWrap.get() != null
|
||||
&& lastEntityCollectionResWrap.get().getPayload().hashCode() == entityCollectionResWrap.get().getPayload().hashCode()) {
|
||||
//if the payload is the same between pages, we need to skip it and subtract some more time
|
||||
LOG.info("Found identical pages. Subtracting one day from the time...");
|
||||
lastFetchedDate.set(lastFetchedDate.get().minus(1, ChronoUnit.DAYS));
|
||||
break;
|
||||
} else if (entityCollectionResWrap.get().getPayload().getEntities().size() > 0) {
|
||||
|
||||
LOG.debug("Hashing " + resourceName + " payload values...");
|
||||
|
||||
if (entityCollectionResWrap.get().getPayload().getEntities().size() > 0) {
|
||||
LOG.info("Hashing " + resourceName + " payload values...");
|
||||
entityCollectionResWrap.get().getPayload().getEntities().forEach(entity -> {
|
||||
encodedSample.set(Collections.synchronizedMap(new LinkedHashMap<>()));
|
||||
|
||||
|
@ -395,7 +329,7 @@ public class DataAvailability {
|
|||
|| property.isGeospatial() && property.asGeospatial() != null)
|
||||
? property.getValue().toString() : null;
|
||||
|
||||
if (DEBUG_MODE_ENABLED) {
|
||||
if (DEBUG) {
|
||||
if (property.isCollection() && property.asCollection().size() > 0) {
|
||||
LOG.info("Found Collection for field: " + property.getName() + ", value: " + property.asCollection());
|
||||
}
|
||||
|
@ -413,49 +347,8 @@ public class DataAvailability {
|
|||
}
|
||||
}
|
||||
|
||||
//if the field is a lookup field, collect the frequency of each unique set of enumerations for the field
|
||||
if (property.isEnum() || (container.get().getDDCacheProcessor().getStandardFieldCache().containsKey(resourceName)
|
||||
&& container.get().getDDCacheProcessor().getStandardFieldCache().get(resourceName).containsKey(property.getName()))) {
|
||||
ReferenceStandardField standardField = container.get().getDDCacheProcessor().getStandardFieldCache().get(resourceName).get(property.getName());
|
||||
|
||||
//if the field is declared as an OData Edm.EnumType or String List, Single or Multi in the DD, then collect its value
|
||||
if (property.isEnum() || (standardField.getSimpleDataType().contentEquals(STRING_LIST_SINGLE)
|
||||
|| standardField.getSimpleDataType().contentEquals(STRING_LIST_MULTI))) {
|
||||
|
||||
ArrayList<String> values = new ArrayList<>();
|
||||
|
||||
if (value == null) {
|
||||
values.add("NULL_VALUE");
|
||||
} else if (value.contentEquals("[]")) {
|
||||
values.add("EMPTY_LIST");
|
||||
} else {
|
||||
if (property.isCollection()) {
|
||||
if (property.asCollection().size() > 0) {
|
||||
property.asCollection().forEach(v -> values.add(v.toString()));
|
||||
} else {
|
||||
values.add("EMPTY_LIST");
|
||||
}
|
||||
} else {
|
||||
if (value.contains(",")) {
|
||||
values.addAll(Arrays.asList(value.split(",")));
|
||||
} else {
|
||||
values.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
values.forEach(v -> {
|
||||
LookupValue binder = new LookupValue(resourceName, property.getName(), v);
|
||||
resourceFieldLookupTallies.get().putIfAbsent(binder, 0);
|
||||
|
||||
//now increment the lookup value
|
||||
resourceFieldLookupTallies.get().put(binder, resourceFieldLookupTallies.get().get(binder) + 1);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
//turn off hashing when DEBUG is true
|
||||
if (!DEBUG_MODE_ENABLED && value != null) {
|
||||
if (!DEBUG && value != null) {
|
||||
if (!(property.getName().contentEquals(timestampField.get())
|
||||
|| property.getName().equals(POSTAL_CODE_FIELD)
|
||||
|| keyFields.stream().reduce(true, (acc, f) -> acc && f.getName().contentEquals(property.getName()), Boolean::logicalAnd))) {
|
||||
|
@ -474,14 +367,17 @@ public class DataAvailability {
|
|||
});
|
||||
payloadSample.get().addSample(encodedSample.get());
|
||||
});
|
||||
LOG.debug("Values encoded!");
|
||||
LOG.info("Values encoded!");
|
||||
|
||||
recordsProcessed += entityCollectionResWrap.get().getPayload().getEntities().size();
|
||||
LOG.info("Records processed: " + recordsProcessed + ". Target record count: " + targetRecordCount + "\n");
|
||||
|
||||
payloadSample.get().setResponseTimeMillis(transportWrapper.get().getElapsedTimeMillis());
|
||||
|
||||
if (AB_TESTING_MODE_ENABLED && encodedResultsDirectoryName != null) {
|
||||
if (encodedResultsDirectoryName != null) {
|
||||
payloadSample.get().setPayloadFields(standardFieldCache.get().get(resourceName).stream()
|
||||
.map(ReferenceStandardField::getStandardName).collect(Collectors.toList()));
|
||||
|
||||
//serialize results once resource processing has finished
|
||||
Utils.createFile(String.format(SAMPLES_DIRECTORY_TEMPLATE, encodedResultsDirectoryName),
|
||||
resourceName + "-" + Utils.getTimestamp() + ".json",
|
||||
|
@ -490,35 +386,44 @@ public class DataAvailability {
|
|||
|
||||
payloadSamples.get().add(payloadSample.get());
|
||||
} else {
|
||||
scenario.log("All available records fetched! Unique Records Processed: " + recordsProcessed);
|
||||
scenario.log("All available records fetched! Total: " + recordsProcessed);
|
||||
hasRecords.set(false);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
scenario.log("Error in fetchAndProcessRecords: " + getDefaultErrorMessage(ex.toString()));
|
||||
scenario.log("Skipping sample...");
|
||||
|
||||
//try adding some time to get unstuck, if possible
|
||||
lastFetchedDate.set(lastFetchedDate.get().plus(1, ChronoUnit.DAYS));
|
||||
lastFetchedDate.set(lastFetchedDate.get().minus(1, ChronoUnit.WEEKS));
|
||||
}
|
||||
}
|
||||
}
|
||||
return payloadSamples.get().parallelStream().collect(Collectors.toList());
|
||||
return payloadSamples.get();
|
||||
}
|
||||
|
||||
/**
|
||||
* fetches and processes records in cases where only sampling is required and encoding is not necessary
|
||||
*
|
||||
* @param resourceName the resource name to sample from
|
||||
* @param targetRecordCount the target record count for the resource (will stop if the end of the records is reached)
|
||||
* @return a list of PayloadSample items
|
||||
*/
|
||||
List<PayloadSample> fetchAndProcessRecords(String resourceName, int targetRecordCount) {
|
||||
return fetchAndProcessRecords(resourceName, targetRecordCount, null);
|
||||
}
|
||||
|
||||
|
||||
/*==================================== TESTS START HERE ====================================*/
|
||||
|
||||
@Given("that metadata have been retrieved from the server and validated")
|
||||
public void thatValidMetadataHaveBeenRetrievedFromTheServerAndValidated() {
|
||||
|
||||
@Given("that valid metadata have been requested from the server")
|
||||
public void thatValidMetadataHaveBeenRequestedFromTheServer() {
|
||||
try {
|
||||
if (!container.get().hasValidMetadata()) {
|
||||
if (container.get().hasValidMetadata()) {
|
||||
if (standardFieldCache.get().size() == 0) {
|
||||
standardFieldCache.get().putAll(DDCacheProcessor.buildCache());
|
||||
}
|
||||
} else {
|
||||
failAndExitWithErrorMessage("Valid metadata was not retrieved from the server. Exiting!", scenario);
|
||||
}
|
||||
|
||||
if (container.get().getDDCacheProcessor() == null) {
|
||||
failAndExitWithErrorMessage("Could not initialize standard field cache!", scenario);
|
||||
}
|
||||
|
||||
} catch (Exception ex) {
|
||||
failAndExitWithErrorMessage(ex.toString(), scenario);
|
||||
}
|
||||
|
@ -526,9 +431,11 @@ public class DataAvailability {
|
|||
|
||||
@And("the metadata contains RESO Standard Resources")
|
||||
public void theMetadataContainsRESOStandardResources() {
|
||||
Set<String> resources = container.get().getEdm().getSchemas().stream().flatMap(schema ->
|
||||
schema.getEntityTypes().stream().map(EdmNamed::getName))
|
||||
.collect(Collectors.toSet());
|
||||
Set<String> resources = container.get().getEdm().getSchemas().stream().map(schema ->
|
||||
schema.getEntityTypes().stream().map(EdmNamed::getName)
|
||||
.collect(Collectors.toSet()))
|
||||
.flatMap(Collection::stream)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
standardResources.set(resources.stream()
|
||||
.filter(DataDictionaryMetadata.v1_7.WELL_KNOWN_RESOURCES::contains).collect(Collectors.toSet()));
|
||||
|
@ -565,18 +472,27 @@ public class DataAvailability {
|
|||
}
|
||||
}
|
||||
|
||||
@Then("up to {int} records are sampled from each resource with payload samples stored in {string}")
|
||||
public void upToRecordsAreSampledFromEachResourceWithPayloadSamplesStoredIn(int numRecords, String resultsDirectoryName) {
|
||||
@Then("up to {int} records are sampled from each resource with {string} payload samples stored in {string}")
|
||||
public void upToRecordsAreSampledFromEachResourceWithPayloadSamplesStoredIn(int numRecords, String payloadName, String resultsDirectoryName) {
|
||||
assertNotNull(getDefaultErrorMessage("resultsDirectoryName MUST be present!"), resultsDirectoryName);
|
||||
|
||||
if (!hasStandardResources.get()) {
|
||||
scenario.log("No RESO Standard Resources to sample!");
|
||||
assumeTrue(true);
|
||||
} else {
|
||||
Set<String> payloadResources = new LinkedHashSet<>();
|
||||
standardFieldCache.get().forEach((resourceName, fieldList) -> {
|
||||
if (!payloadResources.contains(resourceName) && fieldList.stream().anyMatch(field -> field.getPayloads().contains(payloadName))) {
|
||||
payloadResources.add(resourceName);
|
||||
}
|
||||
});
|
||||
|
||||
standardResources.get().forEach(resourceName -> {
|
||||
resourceCounts.get().put(resourceName, getResourceCount(resourceName));
|
||||
resourcePayloadSampleMap.get().putIfAbsent(resourceName, Collections.synchronizedList(new LinkedList<>()));
|
||||
//only save results to the directory if the resources are part of the given payload
|
||||
resourcePayloadSampleMap.get().put(resourceName, fetchAndProcessRecords(resourceName, numRecords, resultsDirectoryName));
|
||||
resourcePayloadSampleMap.get().put(resourceName,
|
||||
fetchAndProcessRecords(resourceName, numRecords, payloadResources.contains(resourceName) ? resultsDirectoryName : null));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -595,9 +511,21 @@ public class DataAvailability {
|
|||
}
|
||||
}
|
||||
|
||||
@Given("samples exist in {string} in the build directory")
|
||||
public void samplesExistInInTheBuildDirectory(String resultsDirectory) {
|
||||
scenario.log("Samples exist in {string} in the build directory!");
|
||||
assumeTrue(true);
|
||||
}
|
||||
|
||||
@And("a RESOScript file was provided for the IDX User")
|
||||
public void aRESOScriptFileWasProvidedForTheIDXUser() {
|
||||
scenario.log("!!TODO!! A RESOScript file was provided for the IDX User!");
|
||||
assumeTrue(true);
|
||||
}
|
||||
|
||||
@When("samples from {string} are fetched as the representative user for each resource in the {string} payload")
|
||||
public void samplesFromAreFetchedAsTheRepresentativeUserForEachResourceInThePayload(String resultsDirectory, String payloadName) {
|
||||
File f = new File(CERTIFICATION_PATH + File.separator + resultsDirectory);
|
||||
File f = new File(SAMPLES_DIRECTORY_ROOT + File.separator + resultsDirectory);
|
||||
AtomicReference<PayloadSample> payloadSample = new AtomicReference<>();
|
||||
|
||||
if (f.list() == null) return;
|
||||
|
@ -616,23 +544,32 @@ public class DataAvailability {
|
|||
});
|
||||
}
|
||||
|
||||
@Then("each result MUST contain the string version of the key and the following fields")
|
||||
public void eachResultMUSTContainTheStringVersionOfTheKeyAndTheFollowingFields(List<String> requiredFields) {
|
||||
scenario.log("!!TODO!! Each result MUST contain the string version of the key and the following fields!");
|
||||
assumeTrue(true);
|
||||
}
|
||||
|
||||
@And("the {string} payload field values MUST match those in the samples")
|
||||
public void thePayloadFieldValuesMUSTMatchThoseInTheSamples(String arg0) {
|
||||
scenario.log("!!TODO!! The {string} payload field values MUST match those in the samples!");
|
||||
assumeTrue(true);
|
||||
}
|
||||
|
||||
@Given("standard and local resources have been processed")
|
||||
public void standardAndLocalResourcesHaveBeenProcessed() {
|
||||
scenario.log("!!TODO!! Standard and local resources have been processed!");
|
||||
assumeTrue(true);
|
||||
}
|
||||
|
||||
@Then("a data availability report is created in {string}")
|
||||
public void aDataAvailabilityReportIsCreatedIn(String reportFileName) {
|
||||
if (resourcePayloadSampleMap.get() == null) {
|
||||
LOG.info("No resource payload samples found! Skipping...");
|
||||
assumeTrue(true);
|
||||
}
|
||||
try {
|
||||
LOG.info("\n\nCreating data availability report!");
|
||||
createDataAvailabilityReport(resourcePayloadSampleMap.get(), reportFileName, resourceCounts.get(), resourceFieldLookupTallies.get());
|
||||
} catch (Exception ex) {
|
||||
final String errorMsg = "Data Availability Report could not be created.\n" + ex;
|
||||
if (STRICT_MODE_ENABLED) {
|
||||
failAndExitWithErrorMessage(errorMsg, scenario);
|
||||
} else {
|
||||
LOG.error(errorMsg);
|
||||
}
|
||||
}
|
||||
LOG.info("\n\nCreating data availability report!");
|
||||
createDataAvailabilityReport(resourcePayloadSampleMap.get(), reportFileName, resourceCounts.get());
|
||||
}
|
||||
|
||||
@And("{string} has been created in the build directory")
|
|
@ -1,75 +0,0 @@
|
|||
package org.reso.certification.stepdefs;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import io.cucumber.java.Before;
|
||||
import io.cucumber.java.Scenario;
|
||||
import io.cucumber.java.en.And;
|
||||
import io.cucumber.java.en.Given;
|
||||
import io.cucumber.java.en.Then;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.reso.certification.containers.WebAPITestContainer;
|
||||
import org.reso.models.Settings;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
|
||||
public class IdxPayload {
|
||||
private static final Logger LOG = LogManager.getLogger(IdxPayload.class);
|
||||
private static final String PATH_TO_RESOSCRIPT_KEY = "pathToRESOScript";
|
||||
private static Scenario scenario;
|
||||
|
||||
private final static AtomicReference<WebAPITestContainer> container = new AtomicReference<>();
|
||||
|
||||
@Inject
|
||||
public IdxPayload(WebAPITestContainer c) {
|
||||
container.set(c);
|
||||
}
|
||||
|
||||
@Before
|
||||
public void beforeStep(Scenario scenario) {
|
||||
final String pathToRESOScript = System.getProperty(PATH_TO_RESOSCRIPT_KEY, null);
|
||||
|
||||
if (pathToRESOScript == null) return;
|
||||
|
||||
IdxPayload.scenario = scenario;
|
||||
|
||||
if (!container.get().getIsInitialized()) {
|
||||
container.get().setSettings(Settings.loadFromRESOScript(new File(System.getProperty(PATH_TO_RESOSCRIPT_KEY))));
|
||||
container.get().initialize();
|
||||
}
|
||||
}
|
||||
|
||||
@Given("samples exist in {string} in the build directory")
|
||||
public void samplesExistInInTheBuildDirectory(String resultsDirectory) {
|
||||
scenario.log("Samples exist in {string} in the build directory!");
|
||||
assumeTrue(true);
|
||||
}
|
||||
|
||||
@And("a RESOScript file was provided for the IDX User")
|
||||
public void aRESOScriptFileWasProvidedForTheIDXUser() {
|
||||
scenario.log("!!TODO!! A RESOScript file was provided for the IDX User!");
|
||||
assumeTrue(true);
|
||||
}
|
||||
|
||||
@Then("each result MUST contain the string version of the key and the following fields")
|
||||
public void eachResultMUSTContainTheStringVersionOfTheKeyAndTheFollowingFields(List<String> requiredFields) {
|
||||
scenario.log("!!TODO!! Each result MUST contain the string version of the key and the following fields!");
|
||||
assumeTrue(true);
|
||||
}
|
||||
|
||||
@And("the {string} payload field values MUST match those in the samples")
|
||||
public void thePayloadFieldValuesMUSTMatchThoseInTheSamples(String arg0) {
|
||||
scenario.log("!!TODO!! The {string} payload field values MUST match those in the samples!");
|
||||
assumeTrue(true);
|
||||
}
|
||||
|
||||
@Given("standard and local resources have been processed")
|
||||
public void standardAndLocalResourcesHaveBeenProcessed() {
|
||||
scenario.log("Testing complete!");
|
||||
assumeTrue(true);
|
||||
}
|
||||
}
|
|
@ -1,206 +0,0 @@
|
|||
package org.reso.certification.stepdefs;
|
||||
|
||||
import com.google.gson.JsonObject;
|
||||
import com.google.inject.Inject;
|
||||
import io.cucumber.java.Before;
|
||||
import io.cucumber.java.Scenario;
|
||||
import io.cucumber.java.en.And;
|
||||
import io.cucumber.java.en.Then;
|
||||
import io.cucumber.java.en.When;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.olingo.client.api.domain.ClientEntity;
|
||||
import org.apache.olingo.commons.api.edm.*;
|
||||
import org.reso.certification.containers.WebAPITestContainer;
|
||||
import org.reso.commander.common.ODataFetchApi;
|
||||
import org.reso.commander.common.ODataUtils;
|
||||
import org.reso.commander.common.Utils;
|
||||
import org.reso.models.ReferenceStandardField;
|
||||
import org.reso.models.Settings;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
import static org.reso.certification.stepdefs.DataAvailability.CERTIFICATION_RESULTS_PATH;
|
||||
import static org.reso.commander.common.ErrorMsg.getDefaultErrorMessage;
|
||||
import static org.reso.commander.common.ODataUtils.*;
|
||||
import static org.reso.commander.common.TestUtils.failAndExitWithErrorMessage;
|
||||
import static org.reso.commander.common.Utils.wrapColumns;
|
||||
|
||||
public class LookupResource {
|
||||
private static final Logger LOG = LogManager.getLogger(LookupResource.class);
|
||||
private static Scenario scenario;
|
||||
private final static AtomicReference<WebAPITestContainer> container = new AtomicReference<>();
|
||||
private static final String PATH_TO_RESOSCRIPT_ARG = "pathToRESOScript";
|
||||
private static final AtomicReference<Map<String, List<ClientEntity>>> lookupResourceCache = new AtomicReference<>(new LinkedHashMap<>());
|
||||
private static final String LOOKUP_RESOURCE_LOOKUP_METADATA_FILE_NAME = "lookup-resource-lookup-metadata.json";
|
||||
|
||||
//TODO: only output file in DEBUG mode
|
||||
//private static final String LOOKUP_RESOURCE_FIELD_METADATA_FILE_NAME = "lookup-resource-field-metadata.json";
|
||||
|
||||
private static final String LOOKUP_NAME_FIELD = "LookupName";
|
||||
|
||||
@Inject
|
||||
public LookupResource(WebAPITestContainer c) {
|
||||
if (container.get() == null) {
|
||||
container.set(c);
|
||||
}
|
||||
}
|
||||
|
||||
@Before
|
||||
public void beforeStep(Scenario scenario) {
|
||||
final String pathToRESOScript = System.getProperty(PATH_TO_RESOSCRIPT_ARG, null);
|
||||
|
||||
LookupResource.scenario = scenario;
|
||||
|
||||
if (pathToRESOScript != null && !container.get().getIsInitialized()) {
|
||||
container.get().setSettings(Settings.loadFromRESOScript(new File(System.getProperty(PATH_TO_RESOSCRIPT_ARG))));
|
||||
container.get().initialize();
|
||||
}
|
||||
}
|
||||
|
||||
@Then("valid data is replicated from the {string} Resource")
|
||||
public void validDataIsReplicatedFromTheResource(String resourceName) {
|
||||
if (lookupResourceCache.get() == null) {
|
||||
failAndExitWithErrorMessage("Could not replicate data from resource: " + resourceName, scenario);
|
||||
}
|
||||
|
||||
if (!lookupResourceCache.get().containsKey(resourceName)) {
|
||||
lookupResourceCache.get().put(resourceName, new ArrayList<>());
|
||||
try {
|
||||
final List<ClientEntity> results = ODataFetchApi.replicateDataFromResource(container.get(), resourceName,
|
||||
ODataFetchApi.WebApiReplicationStrategy.TopAndSkip);
|
||||
|
||||
if (results.size() == 0) {
|
||||
failAndExitWithErrorMessage("Could not replicate data from the " + resourceName + " resource!", scenario);
|
||||
}
|
||||
|
||||
lookupResourceCache.get().get(resourceName).addAll(results);
|
||||
|
||||
Utils.createFile(CERTIFICATION_RESULTS_PATH, LOOKUP_RESOURCE_LOOKUP_METADATA_FILE_NAME,
|
||||
ODataUtils.serializeLookupMetadata(container.get().getCommander().getClient(), results).toString());
|
||||
|
||||
} catch (Exception exception) {
|
||||
failAndExitWithErrorMessage("Unable to retrieve data from the Lookup Resource! " + exception.getMessage(), scenario);
|
||||
}
|
||||
} else {
|
||||
LOG.debug("Using cached data from: " + resourceName);
|
||||
}
|
||||
}
|
||||
|
||||
@Then("{string} Resource data and metadata MUST contain the following fields")
|
||||
public void resourceDataAndMetadataMUSTContainTheFollowingFields(String resourceName, List<String> fields) {
|
||||
if (lookupResourceCache.get() == null || lookupResourceCache.get().get(resourceName) == null) {
|
||||
failAndExitWithErrorMessage("Entity Cache could not be created for the " + resourceName + " resource!", scenario);
|
||||
}
|
||||
|
||||
final String mandatoryFields = "'" + String.join(", ", fields) + "'";
|
||||
|
||||
//check metadata
|
||||
scenario.log("Ensuring mandatory fields " + mandatoryFields + " are present in server metadata");
|
||||
assertTrue("The fields " + mandatoryFields + " MUST be present in the server metadata for the " + resourceName + " Resource!",
|
||||
container.get().getFieldMap(resourceName) != null && container.get().getFieldMap(resourceName).keySet().containsAll(fields));
|
||||
|
||||
//check resource data cache
|
||||
scenario.log("Ensuring mandatory fields " + mandatoryFields + " are present in " + resourceName + " Resource data");
|
||||
lookupResourceCache.get().get(resourceName).forEach(clientEntity -> fields.forEach(fieldName -> {
|
||||
if (clientEntity.getProperty(fieldName) == null || clientEntity.getProperty(fieldName).getValue() == null) {
|
||||
failAndExitWithErrorMessage("Missing required field in the " + resourceName + " Resource!", scenario);
|
||||
}
|
||||
}));
|
||||
scenario.log("All mandatory fields present!");
|
||||
}
|
||||
|
||||
@When("the {string} Resource exists in the metadata")
|
||||
public void theResourceExistsInTheMetadata(String resourceName) {
|
||||
boolean hasResource = container.get().getFieldMap(resourceName) != null;
|
||||
assumeTrue("The " + resourceName + " Resource was not found in the default entity container in the metadata!", hasResource);
|
||||
scenario.log("Found " + resourceName + " Resource!");
|
||||
}
|
||||
|
||||
/*
|
||||
<!-- OData annotation for String List, Single field -->
|
||||
<Property Name="OfficeCountyOrParish" Type="Edm.String">
|
||||
<Annotation Term="RESO.OData.Metadata.LookupName" String="CountyOrParish" />
|
||||
</Property>
|
||||
|
||||
<!-- OData annotation for String List, Multi field -->
|
||||
<Property Name="ExteriorFeatures" Type="Collection(Edm.String)">
|
||||
<Annotation Term="RESO.OData.Metadata.LookupName" String="ExteriorFeatures" />
|
||||
</Property>
|
||||
*/
|
||||
@Then("RESO Lookups using String or String Collection data types MUST have the annotation {string}")
|
||||
public void resoLookupsUsingStringOrStringCollectionDataTypesMUSTHaveTheAnnotation(String annotationTerm) {
|
||||
if (container.get().getDDCacheProcessor() == null || container.get().getDDCacheProcessor().getStandardFieldCache() == null) {
|
||||
failAndExitWithErrorMessage("Could not access standard field cache. Check to make sure metadata requests have succeeded.", scenario);
|
||||
}
|
||||
|
||||
final Map<String, Map<String, ReferenceStandardField>> standardLookupFieldCache =
|
||||
container.get().getDDCacheProcessor().getStandardFieldCache();
|
||||
|
||||
final Set<ReferenceStandardField> lookupFields =
|
||||
standardLookupFieldCache.keySet().stream().flatMap(resourceName ->
|
||||
standardLookupFieldCache.get(resourceName).values().stream()
|
||||
.filter(referenceStandardField -> referenceStandardField.getLookupName() != null)).collect(Collectors.toSet());
|
||||
|
||||
final ArrayList<String> fieldsWithMissingAnnotations = new ArrayList<>();
|
||||
lookupFields.forEach(referenceStandardField -> {
|
||||
LOG.debug("Standard Field: { "
|
||||
+ "resourceName: \"" + referenceStandardField.getParentResourceName() + "\""
|
||||
+ ", standardName: \"" + referenceStandardField.getStandardName() + "\""
|
||||
+ ", lookupName: \"" + referenceStandardField.getLookupName() + "\" }");
|
||||
|
||||
EdmElement foundElement = getEdmElement(container.get().getEdm(), referenceStandardField.getParentResourceName(), referenceStandardField.getStandardName());
|
||||
final boolean isStringDataType = foundElement != null &&
|
||||
foundElement.getType().getFullQualifiedName().toString().contentEquals(EdmPrimitiveTypeKind.String.getFullQualifiedName().toString());
|
||||
|
||||
|
||||
if (foundElement != null && isStringDataType && !hasAnnotationTerm(foundElement, annotationTerm)) {
|
||||
fieldsWithMissingAnnotations.add(referenceStandardField.getStandardName());
|
||||
}
|
||||
});
|
||||
|
||||
if (fieldsWithMissingAnnotations.size() > 0) {
|
||||
final String msg = "The following fields are missing the required '" + annotationTerm + "' annotation: "
|
||||
+ wrapColumns(String.join(", ", fieldsWithMissingAnnotations)) + "\n";
|
||||
|
||||
LOG.error(getDefaultErrorMessage(msg));
|
||||
fail(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@And("fields with the annotation term {string} MUST have a LookupName in the Lookup Resource")
|
||||
public void fieldsWithTheAnnotationTermMUSTHaveALookupNameInTheLookupResource(String annotationTerm) {
|
||||
//every item annotated with the annotation should have a corresponding element in the Lookup set
|
||||
final Map<String, Set<EdmElement>> filteredResourceFieldMap =
|
||||
ODataUtils.getEdmElementsWithAnnotation(container.get().getEdm(), annotationTerm);
|
||||
|
||||
final Set<String> lookupNamesFromLookupData = lookupResourceCache.get().values().parallelStream()
|
||||
.flatMap(Collection::parallelStream)
|
||||
.map(clientEntity -> clientEntity.getProperty(LOOKUP_NAME_FIELD).getValue().toString())
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
final Set<String> annotatedLookupNames = filteredResourceFieldMap.values().parallelStream()
|
||||
.flatMap(Collection::parallelStream)
|
||||
.map(edmElement -> getAnnotationValue(edmElement, annotationTerm))
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
final Set<String> missingLookupNames = Utils.getDifference(annotatedLookupNames, lookupNamesFromLookupData);
|
||||
|
||||
if (missingLookupNames.size() > 0) {
|
||||
final String msg = "The following fields have LookupName annotations but are missing from the Lookup Resource: "
|
||||
+ wrapColumns(String.join(", ", missingLookupNames)) + "\n";
|
||||
|
||||
LOG.error(getDefaultErrorMessage(msg));
|
||||
fail(msg);
|
||||
} else {
|
||||
scenario.log("Found all annotated LookupName elements in the Lookup data. Unique count: " + annotatedLookupNames.size());
|
||||
scenario.log("LookupNames: " + wrapColumns(String.join(", ", annotatedLookupNames)));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -3,8 +3,6 @@ package org.reso.certification.stepdefs;
|
|||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.fasterxml.jackson.databind.node.POJONode;
|
||||
import com.google.inject.Inject;
|
||||
import io.cucumber.java.Before;
|
||||
import io.cucumber.java.Scenario;
|
||||
import io.cucumber.java8.En;
|
||||
import org.apache.http.HttpStatus;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
|
@ -36,6 +34,7 @@ import java.util.*;
|
|||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static io.restassured.path.json.JsonPath.from;
|
||||
import static org.junit.Assert.*;
|
||||
|
@ -48,27 +47,24 @@ import static org.reso.commander.common.ErrorMsg.getDefaultErrorMessage;
|
|||
import static org.reso.commander.common.TestUtils.DateParts.FRACTIONAL;
|
||||
import static org.reso.commander.common.TestUtils.*;
|
||||
import static org.reso.commander.common.TestUtils.Operators.*;
|
||||
import static org.reso.models.Request.loadFromRESOScript;
|
||||
|
||||
/**
|
||||
* Contains the glue code for Web API Core 2.0.0 Certification as well as previous Platinum tests,
|
||||
* Contains the glue code for Web API Core 1.0.2 Certification as well as previous Platinum tests,
|
||||
* which will converted to standalone endorsements, where applicable.
|
||||
*/
|
||||
public class WebAPIServerCore implements En {
|
||||
private static final Logger LOG = LogManager.getLogger(WebAPIServerCore.class);
|
||||
class WebAPIServer implements En {
|
||||
private static final Logger LOG = LogManager.getLogger(WebAPIServer.class);
|
||||
private static final String
|
||||
SHOW_RESPONSES_PARAM = "showResponses",
|
||||
USE_STRING_ENUMS_PARAM = "useStringEnums",
|
||||
USE_COLLECTIONS_PARAM = "useCollections";
|
||||
|
||||
private static final String PATH_TO_RESOSCRIPT_KEY = "pathToRESOScript";
|
||||
private static Scenario scenario;
|
||||
|
||||
//extract any params here
|
||||
private static final boolean showResponses = Boolean.parseBoolean(System.getProperty(SHOW_RESPONSES_PARAM));
|
||||
// boolean used for indicating whether Web API tests are using collections of enums or not
|
||||
// defaults to useCollections=true since IsFlags is being deprecated
|
||||
private static final boolean useCollections = Boolean.parseBoolean(System.getProperty(USE_COLLECTIONS_PARAM, "true"));
|
||||
private static final boolean useStringEnums = Boolean.parseBoolean(System.getProperty(USE_STRING_ENUMS_PARAM, "false"));
|
||||
|
||||
|
||||
/*
|
||||
* Used to store a static instance of the WebAPITestContainer class
|
||||
|
@ -77,55 +73,44 @@ public class WebAPIServerCore implements En {
|
|||
|
||||
//TODO: change this to allow passing of a given set of testing queries
|
||||
//for now this assumes the requests will always be Web API Core Server test queries, but could be $expand, for instance
|
||||
//private static final String WEB_API_CORE_REFERENCE_REQUESTS = "reference-web-api-core-requests.xml";
|
||||
|
||||
@Before
|
||||
public void beforeStep(Scenario scenario) {
|
||||
final String pathToRESOScript = System.getProperty(PATH_TO_RESOSCRIPT_KEY, null);
|
||||
|
||||
if (pathToRESOScript == null) return;
|
||||
|
||||
WebAPIServerCore.scenario = scenario;
|
||||
|
||||
if (!container.get().getIsInitialized()) {
|
||||
container.get().setSettings(Settings.loadFromRESOScript(new File(System.getProperty(PATH_TO_RESOSCRIPT_KEY))));
|
||||
container.get().initialize();
|
||||
}
|
||||
}
|
||||
private static final String WEB_API_CORE_REFERENCE_REQUESTS = "reference-web-api-core-requests.xml";
|
||||
|
||||
/**
|
||||
* Entry point to the Web API Server tests
|
||||
*/
|
||||
@Inject
|
||||
public WebAPIServerCore(WebAPITestContainer c) {
|
||||
public WebAPIServer(WebAPITestContainer c) {
|
||||
container.set(c);
|
||||
|
||||
container.get().setShowResponses(showResponses);
|
||||
getTestContainer().setShowResponses(showResponses);
|
||||
|
||||
runBackground();
|
||||
|
||||
And("^the XML Metadata returned by the server contains Edm metadata$", () -> {
|
||||
container.get().setEdm(Commander.deserializeEdm(container.get().getXMLResponseData(), container.get().getCommander().getClient()));
|
||||
assertNotNull(getDefaultErrorMessage("Edm deserialized to an empty object!"), container.get().getEdm());
|
||||
getTestContainer().setEdm(
|
||||
Commander.deserializeEdm(getTestContainer().getXMLResponseData(), getTestContainer().getCommander().getClient())
|
||||
);
|
||||
assertNotNull(getDefaultErrorMessage("Edm deserialized to an empty object!"), getTestContainer().getEdm());
|
||||
});
|
||||
|
||||
/*
|
||||
* Edm Metadata Validator
|
||||
*/
|
||||
And("^the Edm metadata returned by the server are valid$", () ->
|
||||
assertTrue("Edm Metadata at the given service root is not valid! " + container.get().getServiceRoot(), container.get().getIsValidEdm()));
|
||||
assertTrue("Edm Metadata at the given service root is not valid! " + getTestContainer().getServiceRoot(),
|
||||
getTestContainer().getIsValidEdm()));
|
||||
|
||||
/*
|
||||
* XML Metadata Validator
|
||||
*/
|
||||
And("^the XML Metadata returned by the server are valid$", () -> {
|
||||
if (!container.get().getHaveMetadataBeenRequested()) {
|
||||
if (!getTestContainer().getHaveMetadataBeenRequested()) {
|
||||
//will lazy-load metadata from the server if not yet requested
|
||||
container.get().getXMLMetadata();
|
||||
getTestContainer().getXMLMetadata();
|
||||
}
|
||||
container.get().validateMetadata();
|
||||
assertTrue("XML Metadata at the given service root is not valid! " + container.get().getServiceRoot(),
|
||||
container.get().getIsValidXMLMetadata());
|
||||
getTestContainer().validateMetadata();
|
||||
assertTrue("XML Metadata at the given service root is not valid! " + getTestContainer().getServiceRoot(),
|
||||
getTestContainer().getIsValidXMLMetadata());
|
||||
});
|
||||
|
||||
/*
|
||||
|
@ -133,9 +118,9 @@ public class WebAPIServerCore implements En {
|
|||
*/
|
||||
And("^the provided \"([^\"]*)\" is returned in \"([^\"]*)\"$", (String parameterUniqueIdValue, String parameterUniqueId) -> {
|
||||
try {
|
||||
String expectedValueAsString = Settings.resolveParametersString(parameterUniqueIdValue, container.get().getSettings());
|
||||
Object resolvedValue = from(container.get().getResponseData())
|
||||
.get(Settings.resolveParametersString(parameterUniqueId, container.get().getSettings()));
|
||||
String expectedValueAsString = Settings.resolveParametersString(parameterUniqueIdValue, getTestContainer().getSettings());
|
||||
Object resolvedValue = from(getTestContainer().getResponseData())
|
||||
.get(Settings.resolveParametersString(parameterUniqueId, getTestContainer().getSettings()));
|
||||
|
||||
//both of the inputs should be present
|
||||
assertNotNull(expectedValueAsString);
|
||||
|
@ -171,15 +156,15 @@ public class WebAPIServerCore implements En {
|
|||
double fill;
|
||||
|
||||
assertNotNull(getDefaultErrorMessage("no fields found within the given $select list. Check request Id:",
|
||||
container.get().getRequest().getRequestId(), "in your .resoscript file!"), container.get().getSelectList());
|
||||
getTestContainer().getRequest().getRequestId(), "in your .resoscript file!"), getTestContainer().getSelectList());
|
||||
|
||||
LOG.info(QueryOption.SELECT + " list is: " + container.get().getSelectList());
|
||||
LOG.info(QueryOption.SELECT + " list is: " + getTestContainer().getSelectList());
|
||||
|
||||
AtomicInteger numResults = new AtomicInteger();
|
||||
//iterate over the items and count the number of fields with data to determine whether there are data present
|
||||
from(container.get().getResponseData()).getList(JSON_VALUE_PATH, HashMap.class).forEach(item -> {
|
||||
from(getTestContainer().getResponseData()).getList(JSON_VALUE_PATH, HashMap.class).forEach(item -> {
|
||||
numResults.getAndIncrement();
|
||||
container.get().getSelectList().forEach(field -> {
|
||||
getTestContainer().getSelectList().forEach(field -> {
|
||||
if (item.get(field) != null) {
|
||||
numFieldsWithData.getAndIncrement();
|
||||
}
|
||||
|
@ -187,11 +172,11 @@ public class WebAPIServerCore implements En {
|
|||
});
|
||||
|
||||
LOG.info("Number of Results: " + numResults.get());
|
||||
LOG.info("Number of Fields: " + container.get().getSelectList().size());
|
||||
LOG.info("Number of Fields: " + getTestContainer().getSelectList().size());
|
||||
LOG.info("Fields with Data: " + numFieldsWithData.get());
|
||||
|
||||
if (numResults.get() > 0 && container.get().getSelectList().size() > 0) {
|
||||
fill = ((100.0 * numFieldsWithData.get()) / (numResults.get() * container.get().getSelectList().size()));
|
||||
if (numResults.get() > 0 && getTestContainer().getSelectList().size() > 0) {
|
||||
fill = ((100.0 * numFieldsWithData.get()) / (numResults.get() * getTestContainer().getSelectList().size()));
|
||||
LOG.info("Percent Fill: " + df.format(fill) + "%");
|
||||
} else {
|
||||
LOG.info("Percent Fill: 0% - no fields with data found!");
|
||||
|
@ -209,10 +194,10 @@ public class WebAPIServerCore implements En {
|
|||
*/
|
||||
And("^the results contain at most \"([^\"]*)\" records$", (String parameterTopCount) -> {
|
||||
try {
|
||||
List<String> items = from(container.get().getResponseData()).getList(JSON_VALUE_PATH);
|
||||
List<String> items = from(getTestContainer().getResponseData()).getList(JSON_VALUE_PATH);
|
||||
AtomicInteger numResults = new AtomicInteger(items.size());
|
||||
|
||||
int topCount = Integer.parseInt(Settings.resolveParametersString(parameterTopCount, container.get().getSettings()));
|
||||
int topCount = Integer.parseInt(Settings.resolveParametersString(parameterTopCount, getTestContainer().getSettings()));
|
||||
LOG.info("Number of values returned: " + numResults.get() + ", top count is: " + topCount);
|
||||
|
||||
assertTrue(getDefaultErrorMessage("results count must be greater than zero and less than", parameterTopCount + "!"),
|
||||
|
@ -222,25 +207,26 @@ public class WebAPIServerCore implements En {
|
|||
}
|
||||
});
|
||||
|
||||
|
||||
/*
|
||||
* skip - $skip
|
||||
* $skip=*Parameter_TopCount*
|
||||
*/
|
||||
And("^a GET request is made to the resolved Url in \"([^\"]*)\" with \\$skip=\"([^\"]*)\"$", (String requestId, String parameterTopCount) -> {
|
||||
try {
|
||||
int skipCount = Integer.parseInt(Settings.resolveParametersString(parameterTopCount, container.get().getSettings()));
|
||||
int skipCount = Integer.parseInt(Settings.resolveParametersString(parameterTopCount, getTestContainer().getSettings()));
|
||||
LOG.info("Skip count is: " + skipCount);
|
||||
|
||||
container.get().setRequest(requestId);
|
||||
getTestContainer().setRequest(requestId);
|
||||
|
||||
//preserve initial response data for later comparisons
|
||||
container.get().setInitialResponseData(container.get().getResponseData());
|
||||
getTestContainer().setInitialResponseData(getTestContainer().getResponseData());
|
||||
|
||||
//TODO: convert to OData filter factory
|
||||
container.get().setRequestUri(Commander.prepareURI(
|
||||
Settings.resolveParameters(container.get().getSettings().getRequest(requestId), container.get().getSettings()).getRequestUrl()
|
||||
getTestContainer().setRequestUri(Commander.prepareURI(
|
||||
Settings.resolveParameters(getTestContainer().getSettings().getRequest(requestId), getTestContainer().getSettings()).getRequestUrl()
|
||||
+ AMPERSAND + ODATA_QUERY_PARAMS.SKIP + EQUALS + skipCount));
|
||||
container.get().executePreparedRawGetRequest();
|
||||
getTestContainer().executePreparedRawGetRequest();
|
||||
} catch (Exception ex) {
|
||||
fail(getDefaultErrorMessage(ex));
|
||||
}
|
||||
|
@ -254,15 +240,16 @@ public class WebAPIServerCore implements En {
|
|||
* Rather than returning an integer response, this implementation expects the @odata.count property to be
|
||||
* available when requested, and a $top=0 may be used to restrict the number of items returned as results.
|
||||
*/
|
||||
And("^the \"([^\"]*)\" value is greater than or equal to the number of results$", (String field) ->
|
||||
assertTrue(getDefaultErrorMessage("the @odata.count value MUST be present",
|
||||
"and contain a non-zero value greater than or equal to the number of results!"),
|
||||
TestUtils.validateODataCount(container.get().getResponseData())));
|
||||
And("^the \"([^\"]*)\" value is greater than or equal to the number of results$", (String field) -> {
|
||||
assertTrue(getDefaultErrorMessage("the @odata.count value MUST be present",
|
||||
"and contain a non-zero value greater than or equal to the number of results!"),
|
||||
TestUtils.validateODataCount(getTestContainer().getResponseData()));
|
||||
});
|
||||
|
||||
And("^data in the \"([^\"]*)\" fields are different in the second request than in the first$", (String parameterUniqueId) -> {
|
||||
try {
|
||||
List<POJONode> l1 = from(container.get().getInitialResponseData()).getJsonObject(JSON_VALUE_PATH);
|
||||
List<POJONode> l2 = from(container.get().getResponseData()).getJsonObject(JSON_VALUE_PATH);
|
||||
List<POJONode> l1 = from(getTestContainer().getInitialResponseData()).getJsonObject(JSON_VALUE_PATH);
|
||||
List<POJONode> l2 = from(getTestContainer().getResponseData()).getJsonObject(JSON_VALUE_PATH);
|
||||
|
||||
int combinedCount = l1.size() + l2.size();
|
||||
Set<POJONode> intersection = new LinkedHashSet<>(l1);
|
||||
|
@ -289,12 +276,8 @@ public class WebAPIServerCore implements En {
|
|||
final Set<String> collectionRequestIds = new HashSet<>(Arrays.asList("filter-coll-enum-any", "filter-coll-enum-all"));
|
||||
final Set<String> isFlagsRequestIds = new HashSet<>(Arrays.asList("filter-enum-multi-has", "filter-enum-multi-has-and"));
|
||||
|
||||
if (useStringEnums) {
|
||||
assumeFalse("Using string enumerations. Skipping Test: " + requestId, requestId.contentEquals("filter-enum-single-has"));
|
||||
}
|
||||
|
||||
if (useCollections) {
|
||||
assumeFalse("Using Collections for enumerations. Skipping Test: " + requestId, isFlagsRequestIds.contains(requestId));
|
||||
assumeFalse("Using Collection(Edm.EnumType). Skipping Test: " + requestId, isFlagsRequestIds.contains(requestId));
|
||||
} else {
|
||||
assumeFalse("Using IsFlags=\"true\". Skipping Test: " + requestId, collectionRequestIds.contains(requestId));
|
||||
}
|
||||
|
@ -305,40 +288,37 @@ public class WebAPIServerCore implements En {
|
|||
* Assert response code
|
||||
*/
|
||||
Then("^the server responds with a status code of (\\d+)$", (Integer assertedResponseCode) -> {
|
||||
assertNotNull(getDefaultErrorMessage("request was null! \nCheck RESOScript to make sure requestId exists."), container.get().getRequest());
|
||||
assertNotNull(getDefaultErrorMessage("request was null! \nCheck RESOScript to make sure requestId exists."), getTestContainer().getRequest());
|
||||
try {
|
||||
LOG.info("Asserted Response Code: " + assertedResponseCode + ", Server Response Code: " + container.get().getResponseCode());
|
||||
String errorMessage = "";
|
||||
LOG.info("Asserted Response Code: " + assertedResponseCode + ", Server Response Code: " + getTestContainer().getResponseCode());
|
||||
|
||||
if (container.get().getODataClientErrorException() != null) {
|
||||
if (container.get().getODataClientErrorException().getODataError().getMessage() != null) {
|
||||
errorMessage = container.get().getODataClientErrorException().getODataError().getMessage();
|
||||
|
||||
} else if (container.get().getODataClientErrorException().getMessage() != null) {
|
||||
errorMessage = container.get().getODataClientErrorException().getMessage();
|
||||
if (getTestContainer().getODataClientErrorException() != null) {
|
||||
if (getTestContainer().getODataClientErrorException().getODataError().getMessage() != null) {
|
||||
LOG.error(getDefaultErrorMessage("Request failed with the following message:",
|
||||
getTestContainer().getODataClientErrorException().getODataError().getMessage()));
|
||||
} else if (getTestContainer().getODataClientErrorException().getMessage() != null) {
|
||||
LOG.error(getDefaultErrorMessage("Request failed with the following message:",
|
||||
getTestContainer().getODataClientErrorException().getMessage()));
|
||||
}
|
||||
} else if (container.get().getODataServerErrorException() != null) {
|
||||
errorMessage = container.get().getODataServerErrorException().getMessage();
|
||||
scenario.log(getDefaultErrorMessage("Request failed with the following message:", errorMessage));
|
||||
if (container.get().getODataServerErrorException().toString().contains(String.valueOf(HttpStatus.SC_INTERNAL_SERVER_ERROR))) {
|
||||
container.get().setResponseCode(HttpStatus.SC_INTERNAL_SERVER_ERROR);
|
||||
}
|
||||
|
||||
if (getTestContainer().getODataServerErrorException() != null) {
|
||||
LOG.error(getDefaultErrorMessage("Request failed with the following message:",
|
||||
getTestContainer().getODataServerErrorException().toString()));
|
||||
|
||||
if (getTestContainer().getODataServerErrorException().toString().contains(String.valueOf(HttpStatus.SC_INTERNAL_SERVER_ERROR))) {
|
||||
getTestContainer().setResponseCode(HttpStatus.SC_INTERNAL_SERVER_ERROR);
|
||||
}
|
||||
}
|
||||
|
||||
//TODO: clean up logic
|
||||
if (container.get().getResponseCode() != null && assertedResponseCode.intValue() != container.get().getResponseCode().intValue()) {
|
||||
final String responseCodeErrorMessage = getAssertResponseCodeErrorMessage(assertedResponseCode, container.get().getResponseCode());
|
||||
if (errorMessage.length() > 0) {
|
||||
scenario.log(errorMessage);
|
||||
}
|
||||
scenario.log(responseCodeErrorMessage);
|
||||
fail(responseCodeErrorMessage + "\n" + errorMessage);
|
||||
if (getTestContainer().getResponseCode() != null && assertedResponseCode.intValue() != getTestContainer().getResponseCode().intValue()) {
|
||||
fail(getAssertResponseCodeErrorMessage(assertedResponseCode, getTestContainer().getResponseCode()));
|
||||
}
|
||||
|
||||
//if we make it through without failing, things are good
|
||||
assertTrue(container.get().getResponseCode() > 0 && assertedResponseCode > 0);
|
||||
assertTrue(getTestContainer().getResponseCode() > 0 && assertedResponseCode > 0);
|
||||
} catch (Exception ex) {
|
||||
scenario.log(ex.toString());
|
||||
fail(getDefaultErrorMessage(ex));
|
||||
}
|
||||
});
|
||||
|
@ -347,15 +327,15 @@ public class WebAPIServerCore implements En {
|
|||
* validate XML wrapper
|
||||
*/
|
||||
And("^the XML Metadata response is valid XML$", () -> {
|
||||
assertNotNull(getDefaultErrorMessage("no XML Response data were found!"), container.get().getXMLResponseData());
|
||||
container.get().validateXMLMetadataXML();
|
||||
assertTrue(getDefaultErrorMessage("invalid XML response!"), container.get().getIsValidXMLMetadataXML());
|
||||
assertNotNull(getDefaultErrorMessage("no XML Response data were found!"), getTestContainer().getXMLResponseData());
|
||||
getTestContainer().validateXMLMetadataXML();
|
||||
assertTrue(getDefaultErrorMessage("invalid XML response!"), getTestContainer().getIsValidXMLMetadataXML());
|
||||
});
|
||||
|
||||
/*
|
||||
* validate JSON wrapper
|
||||
*/
|
||||
And("^the response is valid JSON$", container.get()::validateJSON);
|
||||
And("^the response is valid JSON$", getTestContainer()::validateJSON);
|
||||
|
||||
/*
|
||||
* Assert HTTP Response Code given asserted OData version
|
||||
|
@ -364,15 +344,15 @@ public class WebAPIServerCore implements En {
|
|||
*/
|
||||
Then("^the server responds with a status code of (\\d+) if the server reports OData-Version \"([^\"]*)\"$", (Integer assertedHttpResponseCode, String assertedODataVersion) -> {
|
||||
try {
|
||||
boolean versionsMatch = container.get().getServerODataHeaderVersion().equals(assertedODataVersion),
|
||||
responseCodesMatch = container.get().getResponseCode().intValue() == assertedHttpResponseCode.intValue();
|
||||
boolean versionsMatch = getTestContainer().getServerODataHeaderVersion().equals(assertedODataVersion),
|
||||
responseCodesMatch = getTestContainer().getResponseCode().intValue() == assertedHttpResponseCode.intValue();
|
||||
|
||||
LOG.info("Asserted OData Version: " + assertedODataVersion + ", Server Version: " + container.get().getServerODataHeaderVersion());
|
||||
LOG.info("Asserted OData Version: " + assertedODataVersion + ", Server Version: " + getTestContainer().getServerODataHeaderVersion());
|
||||
|
||||
if (versionsMatch) {
|
||||
LOG.info("Asserted Response Code: " + assertedHttpResponseCode + ", Response code: " + container.get().getResponseCode());
|
||||
LOG.info("Asserted Response Code: " + assertedHttpResponseCode + ", Response code: " + getTestContainer().getResponseCode());
|
||||
assertTrue(getDefaultErrorMessage("asserted response code (" + assertedHttpResponseCode + ")",
|
||||
"does not match the one returned from the server (" + container.get().getResponseCode() + ")!"), responseCodesMatch);
|
||||
"does not match the one returned from the server (" + getTestContainer().getResponseCode() + ")!"), responseCodesMatch);
|
||||
} else {
|
||||
LOG.info("Test skipped! Only applies when the asserted version matches the reported server version.");
|
||||
}
|
||||
|
@ -389,8 +369,8 @@ public class WebAPIServerCore implements En {
|
|||
And("^the response has results$", () -> {
|
||||
try {
|
||||
assertTrue(getDefaultErrorMessage("no results were found in the '" + JSON_VALUE_PATH + "' path of the JSON response!"),
|
||||
from(container.get().getResponseData()).getList(JSON_VALUE_PATH, Map.class).size() > 0);
|
||||
LOG.info("Results count is: " + from(container.get().getResponseData()).getList(JSON_VALUE_PATH, Map.class).size());
|
||||
from(getTestContainer().getResponseData()).getList(JSON_VALUE_PATH, Map.class).size() > 0);
|
||||
LOG.info("Results count is: " + from(getTestContainer().getResponseData()).getList(JSON_VALUE_PATH, Map.class).size());
|
||||
} catch (Exception ex) {
|
||||
fail(getDefaultErrorMessage(ex));
|
||||
}
|
||||
|
@ -401,8 +381,8 @@ public class WebAPIServerCore implements En {
|
|||
*/
|
||||
And("^the response has singleton results in \"([^\"]*)\"", (String parameterFieldName) -> {
|
||||
try {
|
||||
String value = Settings.resolveParametersString(parameterFieldName, container.get().getSettings());
|
||||
boolean isPresent = from(container.get().getResponseData()).get() != null;
|
||||
String value = Settings.resolveParametersString(parameterFieldName, getTestContainer().getSettings());
|
||||
boolean isPresent = from(getTestContainer().getResponseData()).get() != null;
|
||||
assertTrue(getDefaultErrorMessage("OData singleton results not found for '" + value + "'!"), isPresent);
|
||||
LOG.info("Data are present and response value is: " + value);
|
||||
} catch (Exception ex) {
|
||||
|
@ -415,8 +395,8 @@ public class WebAPIServerCore implements En {
|
|||
*/
|
||||
And("^the number of results is less than or equal to \"([^\"]*)\"$", (String limitField) -> {
|
||||
try {
|
||||
int count = from(container.get().getResponseData()).getList(JSON_VALUE_PATH, HashMap.class).size(),
|
||||
limit = Integer.parseInt(Settings.resolveParametersString(limitField, container.get().getSettings()));
|
||||
int count = from(getTestContainer().getResponseData()).getList(JSON_VALUE_PATH, HashMap.class).size(),
|
||||
limit = Integer.parseInt(Settings.resolveParametersString(limitField, getTestContainer().getSettings()));
|
||||
LOG.info("Results count is: " + count + ", Limit is: " + limit);
|
||||
assertTrue(getDefaultErrorMessage("number of results exceeds that specified in '" + limitField + "'!"), count <= limit);
|
||||
} catch (Exception ex) {
|
||||
|
@ -430,11 +410,11 @@ public class WebAPIServerCore implements En {
|
|||
*/
|
||||
And("^Integer data in \"([^\"]*)\" \"([^\"]*)\" \"([^\"]*)\"$", (String parameterFieldName, String op, String parameterAssertedValue) -> {
|
||||
try {
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, container.get().getSettings());
|
||||
int assertedValue = Integer.parseInt(Settings.resolveParametersString(parameterAssertedValue, container.get().getSettings()));
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, getTestContainer().getSettings());
|
||||
int assertedValue = Integer.parseInt(Settings.resolveParametersString(parameterAssertedValue, getTestContainer().getSettings()));
|
||||
|
||||
LOG.info("fieldName: " + fieldName + ", op: " + op + ", assertedValue: " + assertedValue);
|
||||
assertTrue(TestUtils.compareIntegerPayloadToAssertedValue(container.get().getResponseData(), fieldName, op, assertedValue));
|
||||
assertTrue(TestUtils.compareIntegerPayloadToAssertedValue(getTestContainer().getResponseData(), fieldName, op, assertedValue));
|
||||
} catch (Exception ex) {
|
||||
fail(getDefaultErrorMessage(ex));
|
||||
}
|
||||
|
@ -442,11 +422,11 @@ public class WebAPIServerCore implements En {
|
|||
|
||||
And("^Decimal data in \"([^\"]*)\" \"([^\"]*)\" \"([^\"]*)\"$", (String parameterFieldName, String op, String parameterAssertedValue) -> {
|
||||
try {
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, container.get().getSettings());
|
||||
Double assertedValue = Double.parseDouble(Settings.resolveParametersString(parameterAssertedValue, container.get().getSettings()));
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, getTestContainer().getSettings());
|
||||
Double assertedValue = Double.parseDouble(Settings.resolveParametersString(parameterAssertedValue, getTestContainer().getSettings()));
|
||||
|
||||
LOG.info("fieldName: " + fieldName + ", op: " + op + ", assertedValue: " + assertedValue);
|
||||
assertTrue(TestUtils.compareDecimalPayloadToAssertedValue(container.get().getResponseData(), fieldName, op, assertedValue));
|
||||
assertTrue(TestUtils.compareDecimalPayloadToAssertedValue(getTestContainer().getResponseData(), fieldName, op, assertedValue));
|
||||
} catch (Exception ex) {
|
||||
fail(getDefaultErrorMessage(ex));
|
||||
}
|
||||
|
@ -458,14 +438,14 @@ public class WebAPIServerCore implements En {
|
|||
*/
|
||||
And("^Integer data in \"([^\"]*)\" \"([^\"]*)\" \"([^\"]*)\" \"([^\"]*)\" \"([^\"]*)\" \"([^\"]*)\"$", (String parameterFieldName, String opLhs, String parameterAssertedLhsValue, String andOrOp, String opRhs, String parameterAssertedRhsValue) -> {
|
||||
try {
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, container.get().getSettings());
|
||||
Integer assertedLhsValue = Integer.parseInt(Settings.resolveParametersString(parameterAssertedLhsValue, container.get().getSettings())),
|
||||
assertedRhsValue = Integer.parseInt(Settings.resolveParametersString(parameterAssertedRhsValue, container.get().getSettings()));
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, getTestContainer().getSettings());
|
||||
Integer assertedLhsValue = Integer.parseInt(Settings.resolveParametersString(parameterAssertedLhsValue, getTestContainer().getSettings())),
|
||||
assertedRhsValue = Integer.parseInt(Settings.resolveParametersString(parameterAssertedRhsValue, getTestContainer().getSettings()));
|
||||
|
||||
String op = andOrOp.toLowerCase();
|
||||
boolean isAndOp = op.contains(AND);
|
||||
|
||||
//these should default to true when And, and false when Or for the purpose of boolean comparisons
|
||||
//these should default to true when And and false when Or for the purpose of boolean comparisons
|
||||
AtomicBoolean lhsResult = new AtomicBoolean(isAndOp);
|
||||
AtomicBoolean rhsResult = new AtomicBoolean(isAndOp);
|
||||
AtomicBoolean itemResult = new AtomicBoolean(isAndOp);
|
||||
|
@ -475,7 +455,7 @@ public class WebAPIServerCore implements En {
|
|||
rhsValue = new AtomicReference<>();
|
||||
|
||||
//iterate through response data and ensure that with data, the statement fieldName "op" assertValue is true
|
||||
from(container.get().getResponseData()).getList(JSON_VALUE_PATH, HashMap.class).forEach(item -> {
|
||||
from(getTestContainer().getResponseData()).getList(JSON_VALUE_PATH, HashMap.class).forEach(item -> {
|
||||
lhsValue.set(Integer.parseInt(item.get(fieldName).toString()));
|
||||
rhsValue.set(Integer.parseInt(item.get(fieldName).toString()));
|
||||
|
||||
|
@ -503,14 +483,14 @@ public class WebAPIServerCore implements En {
|
|||
*/
|
||||
And("^Date data in \"([^\"]*)\" \"([^\"]*)\" \"([^\"]*)\"$", (String parameterFieldName, String op, String parameterAssertedValue) -> {
|
||||
try {
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, container.get().getSettings());
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, getTestContainer().getSettings());
|
||||
AtomicReference<Date> fieldValue = new AtomicReference<>();
|
||||
AtomicReference<Date> assertedValue = new AtomicReference<>();
|
||||
|
||||
assertedValue.set(TestUtils.parseDateFromEdmDateString(Settings.resolveParametersString(parameterAssertedValue, container.get().getSettings())));
|
||||
assertedValue.set(TestUtils.parseDateFromEdmDateString(Settings.resolveParametersString(parameterAssertedValue, getTestContainer().getSettings())));
|
||||
LOG.info("Asserted value is: " + assertedValue.get().toString());
|
||||
|
||||
from(container.get().getResponseData()).getList(JSON_VALUE_PATH, HashMap.class).forEach(item -> {
|
||||
from(getTestContainer().getResponseData()).getList(JSON_VALUE_PATH, HashMap.class).forEach(item -> {
|
||||
try {
|
||||
fieldValue.set(TestUtils.parseDateFromEdmDateTimeOffsetString(item.get(fieldName).toString()));
|
||||
assertTrue(TestUtils.compare(fieldValue.get(), op, assertedValue.get()));
|
||||
|
@ -528,14 +508,14 @@ public class WebAPIServerCore implements En {
|
|||
*/
|
||||
And("^TimeOfDay data in \"([^\"]*)\" \"([^\"]*)\" \"([^\"]*)\"$", (String parameterFieldName, String op, String parameterAssertedValue) -> {
|
||||
try {
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, container.get().getSettings());
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, getTestContainer().getSettings());
|
||||
AtomicReference<Time> fieldValue = new AtomicReference<>();
|
||||
AtomicReference<Time> assertedValue = new AtomicReference<>();
|
||||
|
||||
assertedValue.set(TestUtils.parseTimeOfDayFromEdmTimeOfDayString(Settings.resolveParametersString(parameterAssertedValue, container.get().getSettings())));
|
||||
assertedValue.set(TestUtils.parseTimeOfDayFromEdmTimeOfDayString(Settings.resolveParametersString(parameterAssertedValue, getTestContainer().getSettings())));
|
||||
LOG.info("Asserted value is: " + assertedValue.get().toString());
|
||||
|
||||
from(container.get().getResponseData()).getList(JSON_VALUE_PATH, HashMap.class).forEach(item -> {
|
||||
from(getTestContainer().getResponseData()).getList(JSON_VALUE_PATH, HashMap.class).forEach(item -> {
|
||||
try {
|
||||
fieldValue.set(TestUtils.parseTimeOfDayFromEdmDateTimeOffsetString(item.get(fieldName).toString()));
|
||||
assertTrue(TestUtils.compare(fieldValue.get(), op, assertedValue.get()));
|
||||
|
@ -553,7 +533,7 @@ public class WebAPIServerCore implements En {
|
|||
*/
|
||||
And("^DateTimeOffset data in \"([^\"]*)\" \"([^\"]*)\" \"([^\"]*)\"$", (String parameterFieldName, String op, String parameterAssertedValue) -> {
|
||||
try {
|
||||
TestUtils.assertDateTimeOffset(parameterFieldName, op, parameterAssertedValue, container.get().getResponseData(), container.get().getSettings());
|
||||
TestUtils.assertDateTimeOffset(parameterFieldName, op, parameterAssertedValue, getTestContainer().getResponseData(), getTestContainer().getSettings());
|
||||
} catch (Exception ex) {
|
||||
fail(getDefaultErrorMessage(ex));
|
||||
}
|
||||
|
@ -564,7 +544,7 @@ public class WebAPIServerCore implements En {
|
|||
*/
|
||||
And("^DateTimeOffset data in \"([^\"]*)\" \"([^\"]*)\" now\\(\\)$", (String parameterFieldName, String op) -> {
|
||||
try {
|
||||
TestUtils.assertDateTimeOffset(parameterFieldName, op, Timestamp.from(Instant.now()), container.get().getResponseData(), container.get().getSettings());
|
||||
TestUtils.assertDateTimeOffset(parameterFieldName, op, Timestamp.from(Instant.now()), getTestContainer().getResponseData(), getTestContainer().getSettings());
|
||||
} catch (Exception ex) {
|
||||
fail(getDefaultErrorMessage(ex));
|
||||
}
|
||||
|
@ -576,16 +556,16 @@ public class WebAPIServerCore implements En {
|
|||
And("^Single Valued Enumeration Data in \"([^\"]*)\" \"([^\"]*)\" \"([^\"]*)\"$", (String parameterFieldName, String op, String parameterAssertedValue) -> {
|
||||
try {
|
||||
|
||||
final String fieldName = Settings.resolveParametersString(parameterFieldName, container.get().getSettings());
|
||||
final String fieldName = Settings.resolveParametersString(parameterFieldName, getTestContainer().getSettings());
|
||||
AtomicReference<String> fieldValue = new AtomicReference<>();
|
||||
AtomicReference<String> assertedValue = new AtomicReference<>();
|
||||
|
||||
AtomicBoolean result = new AtomicBoolean(false);
|
||||
|
||||
assertedValue.set(Settings.resolveParametersString(parameterAssertedValue, container.get().getSettings()));
|
||||
assertedValue.set(Settings.resolveParametersString(parameterAssertedValue, getTestContainer().getSettings()));
|
||||
LOG.info("Asserted value is: " + assertedValue.get());
|
||||
|
||||
from(container.get().getResponseData()).getList(JSON_VALUE_PATH, HashMap.class).forEach(item -> {
|
||||
from(getTestContainer().getResponseData()).getList(JSON_VALUE_PATH, HashMap.class).forEach(item -> {
|
||||
fieldValue.set(item.get(fieldName).toString());
|
||||
if (op.toLowerCase().contentEquals(EQ) || op.toLowerCase().contentEquals(HAS)) {
|
||||
result.set(fieldValue.get().contentEquals(assertedValue.get()));
|
||||
|
@ -606,31 +586,28 @@ public class WebAPIServerCore implements En {
|
|||
*/
|
||||
And("^Multiple Valued Enumeration Data in \"([^\"]*)\" has \"([^\"]*)\"$", (String parameterFieldName, String parameterAssertedValue) -> {
|
||||
try {
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, container.get().getSettings());
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, getTestContainer().getSettings());
|
||||
AtomicReference<String> fieldValue = new AtomicReference<>();
|
||||
AtomicReference<String> assertedValue = new AtomicReference<>();
|
||||
AtomicBoolean result = new AtomicBoolean(true);
|
||||
|
||||
assertedValue.set(Settings.resolveParametersString(parameterAssertedValue, container.get().getSettings()));
|
||||
assertedValue.set(Settings.resolveParametersString(parameterAssertedValue, getTestContainer().getSettings()));
|
||||
LOG.info("Asserted value is: " + assertedValue.get());
|
||||
|
||||
from(container.get().getResponseData()).getList(JSON_VALUE_PATH, ObjectNode.class).forEach(item -> {
|
||||
from(getTestContainer().getResponseData()).getList(JSON_VALUE_PATH, ObjectNode.class).forEach(item -> {
|
||||
fieldValue.set(item.get(fieldName).toString());
|
||||
String assertMessage;
|
||||
if (useCollections) {
|
||||
if (item.get(fieldName).isArray()) {
|
||||
result.set(result.get() && TestUtils.testAnyOperator(item, fieldName, assertedValue.get()));
|
||||
assertMessage = "Assert True: " + fieldValue.get() + " contains " + assertedValue.get() + " ==> " + result.get();
|
||||
LOG.info(assertMessage);
|
||||
assertTrue(assertMessage, result.get());
|
||||
LOG.info("Assert True: " + fieldValue.get() + " contains " + assertedValue.get() + " ==> " + result.get());
|
||||
assertTrue(result.get());
|
||||
} else {
|
||||
fail(getDefaultErrorMessage(fieldName, "MUST contain an array of values but found:", item.get(fieldName).toString()));
|
||||
}
|
||||
} else {
|
||||
result.set(fieldValue.get().contains(assertedValue.get()));
|
||||
assertMessage = "Assert True: " + fieldValue.get() + " has " + assertedValue.get() + " ==> " + result.get();
|
||||
LOG.info(assertMessage);
|
||||
assertTrue(assertMessage, result.get());
|
||||
LOG.info("Assert True: " + fieldValue.get() + " has " + assertedValue.get() + " ==> " + result.get());
|
||||
assertTrue(result.get());
|
||||
}
|
||||
});
|
||||
} catch (Exception ex) {
|
||||
|
@ -640,16 +617,16 @@ public class WebAPIServerCore implements En {
|
|||
|
||||
And("^Multiple Valued Enumeration Data in \"([^\"]*)\" is empty OR has \"([^\"]*)\"$", (String parameterFieldName, String parameterAssertedValue) -> {
|
||||
try {
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, container.get().getSettings());
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, getTestContainer().getSettings());
|
||||
AtomicReference<String> fieldValue = new AtomicReference<>();
|
||||
AtomicReference<String> assertedValue = new AtomicReference<>();
|
||||
|
||||
AtomicBoolean result = new AtomicBoolean(true);
|
||||
|
||||
assertedValue.set(Settings.resolveParametersString(parameterAssertedValue, container.get().getSettings()));
|
||||
assertedValue.set(Settings.resolveParametersString(parameterAssertedValue, getTestContainer().getSettings()));
|
||||
LOG.info("Asserted value is: " + assertedValue.get());
|
||||
|
||||
from(container.get().getResponseData()).getList(JSON_VALUE_PATH, ObjectNode.class).forEach(item -> {
|
||||
from(getTestContainer().getResponseData()).getList(JSON_VALUE_PATH, ObjectNode.class).forEach(item -> {
|
||||
fieldValue.set(item.get(fieldName).toString());
|
||||
if (item.get(fieldName).isArray()) {
|
||||
result.set(result.get() && testAllOperator(item, fieldName, assertedValue.get()));
|
||||
|
@ -669,7 +646,7 @@ public class WebAPIServerCore implements En {
|
|||
*/
|
||||
And("^DateTimeOffset data in \"([^\"]*)\" is sorted in \"([^\"]*)\" order$", (String parameterFieldName, String parameterOrderByDirection) -> {
|
||||
try {
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, container.get().getSettings());
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, getTestContainer().getSettings());
|
||||
final String ASC = "asc", DESC = "desc";
|
||||
AtomicReference<String> orderBy = new AtomicReference<>(parameterOrderByDirection.toLowerCase());
|
||||
|
||||
|
@ -680,12 +657,12 @@ public class WebAPIServerCore implements En {
|
|||
AtomicReference<Timestamp> currentValue = new AtomicReference<>();
|
||||
AtomicInteger count = new AtomicInteger(0);
|
||||
|
||||
from(container.get().getResponseData()).getList(JSON_VALUE_PATH, HashMap.class).forEach(item -> {
|
||||
from(getTestContainer().getResponseData()).getList(JSON_VALUE_PATH, HashMap.class).forEach(item -> {
|
||||
try {
|
||||
if (count.get() == 0) {
|
||||
initialValue.set(TestUtils.parseTimestampFromEdmDateTimeOffsetString((String) item.get(fieldName)));
|
||||
initialValue.set(TestUtils.parseTimestampFromEdmDateTimeOffsetString((String)item.get(fieldName)));
|
||||
} else {
|
||||
currentValue.set(TestUtils.parseTimestampFromEdmDateTimeOffsetString((String) item.get(fieldName)));
|
||||
currentValue.set(TestUtils.parseTimestampFromEdmDateTimeOffsetString((String)item.get(fieldName)));
|
||||
if (orderBy.get().equals(ASC)) {
|
||||
assertTrue(TestUtils.compare(initialValue.get(), LESS_THAN_OR_EQUAL, currentValue.get()));
|
||||
} else if (orderBy.get().equals(DESC)) {
|
||||
|
@ -707,15 +684,15 @@ public class WebAPIServerCore implements En {
|
|||
* Date Field comparisons
|
||||
*/
|
||||
And("^\"([^\"]*)\" data in Date Field \"([^\"]*)\" \"([^\"]*)\" \"([^\"]*)\"$", (String stringDatePart, String parameterFieldName, String op, String parameterAssertedValue) -> {
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, container.get().getSettings());
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, getTestContainer().getSettings());
|
||||
AtomicInteger assertedValue = new AtomicInteger();
|
||||
AtomicReference<String> datePart = new AtomicReference<>(stringDatePart.toLowerCase());
|
||||
AtomicReference<String> operator = new AtomicReference<>(op.toLowerCase());
|
||||
|
||||
try {
|
||||
assertedValue.set(Integer.parseInt(Settings.resolveParametersString(parameterAssertedValue, container.get().getSettings())));
|
||||
assertedValue.set(Integer.parseInt(Settings.resolveParametersString(parameterAssertedValue, getTestContainer().getSettings())));
|
||||
LOG.info("Asserted value is: " + assertedValue.get());
|
||||
assertTrue(TestUtils.compareDatePayloadToAssertedDatePartValue(container.get().getResponseData(), datePart.get(), fieldName, operator.get(), assertedValue.intValue()));
|
||||
assertTrue(TestUtils.compareDatePayloadToAssertedDatePartValue(getTestContainer().getResponseData(), datePart.get(), fieldName, operator.get(), assertedValue.intValue()));
|
||||
} catch (Exception ex) {
|
||||
fail(getDefaultErrorMessage(ex));
|
||||
}
|
||||
|
@ -726,22 +703,22 @@ public class WebAPIServerCore implements En {
|
|||
*/
|
||||
And("^\"([^\"]*)\" data in Timestamp Field \"([^\"]*)\" \"([^\"]*)\" \"([^\"]*)\"$", (String stringDatePart, String parameterFieldName, String op, String parameterAssertedValue) -> {
|
||||
try {
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, container.get().getSettings());
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, getTestContainer().getSettings());
|
||||
double assertedValue;
|
||||
String datePart = stringDatePart.toLowerCase();
|
||||
String operator = op.toLowerCase();
|
||||
|
||||
try {
|
||||
assertedValue = Double.parseDouble(Settings.resolveParametersString(parameterAssertedValue, container.get().getSettings()));
|
||||
assertedValue = Double.parseDouble(Settings.resolveParametersString(parameterAssertedValue, getTestContainer().getSettings()));
|
||||
|
||||
if (assertedValue % 1 == 0) LOG.info("Asserted value is: " + (int) assertedValue);
|
||||
else LOG.info("Asserted value is: " + assertedValue);
|
||||
|
||||
//TODO: re-consolidate fractional with other date part ops
|
||||
if (datePart.contentEquals(FRACTIONAL)) {
|
||||
assertTrue(TestUtils.compareFractionalSecondsPayloadToAssertedValue(container.get().getResponseData(), fieldName, operator, assertedValue));
|
||||
assertTrue(TestUtils.compareFractionalSecondsPayloadToAssertedValue(getTestContainer().getResponseData(), fieldName, operator, assertedValue));
|
||||
} else {
|
||||
assertTrue(TestUtils.compareTimestampPayloadToAssertedDatePartValue(container.get().getResponseData(), datePart, fieldName, operator, (int) assertedValue));
|
||||
assertTrue(TestUtils.compareTimestampPayloadToAssertedDatePartValue(getTestContainer().getResponseData(), datePart, fieldName, operator, (int) assertedValue));
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
fail(getDefaultErrorMessage(ex));
|
||||
|
@ -756,9 +733,9 @@ public class WebAPIServerCore implements En {
|
|||
*/
|
||||
And("^String data in \"([^\"]*)\" \"([^\"]*)\" \"([^\"]*)\"$", (String parameterFieldName, String op, String parameterAssertedValue) -> {
|
||||
try {
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, container.get().getSettings());
|
||||
String assertedValue = Settings.resolveParametersString(parameterAssertedValue, container.get().getSettings());
|
||||
assertTrue(TestUtils.compareStringPayloadToAssertedValue(container.get().getResponseData(), fieldName, op, assertedValue));
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, getTestContainer().getSettings());
|
||||
String assertedValue = Settings.resolveParametersString(parameterAssertedValue, getTestContainer().getSettings());
|
||||
assertTrue(TestUtils.compareStringPayloadToAssertedValue(getTestContainer().getResponseData(), fieldName, op, assertedValue));
|
||||
} catch (Exception ex) {
|
||||
fail(getDefaultErrorMessage(ex));
|
||||
}
|
||||
|
@ -773,11 +750,11 @@ public class WebAPIServerCore implements En {
|
|||
Checks that metadata are accessible and contain the resource name specified in generic.resoscript
|
||||
*/
|
||||
And("^the metadata contains the \"([^\"]*)\" resource$", (String parameterResourceName) -> {
|
||||
final String resourceName = Settings.resolveParametersString(parameterResourceName, container.get().getSettings());
|
||||
final String resourceName = Settings.resolveParametersString(parameterResourceName, getTestContainer().getSettings());
|
||||
AtomicReference<CsdlEntityContainer> entityContainer = new AtomicReference<>();
|
||||
|
||||
try {
|
||||
entityContainer.set(TestUtils.findDefaultEntityContainer(container.get().getEdm(), container.get().getXMLMetadata()));
|
||||
entityContainer.set(TestUtils.findDefaultEntityContainer(getTestContainer().getEdm(), getTestContainer().getXMLMetadata()));
|
||||
|
||||
assertNotNull(getDefaultErrorMessage("server metadata does not contain the given resource name:", resourceName),
|
||||
entityContainer.get().getEntitySet(resourceName));
|
||||
|
@ -791,20 +768,20 @@ public class WebAPIServerCore implements En {
|
|||
|
||||
/*
|
||||
* Ensures that the server metadata for the given resource in parameterResourceName contains
|
||||
* each of the fields in the given parameterSelectList.
|
||||
* all of the fields in the given parameterSelectList.
|
||||
*/
|
||||
And("^resource metadata for \"([^\"]*)\" contains the fields in the given select list$", (String parameterResourceName) -> {
|
||||
assertTrue(getDefaultErrorMessage("no $select list found for requestId:", container.get().getRequest().getRequestId()),
|
||||
container.get().getSelectList().size() > 0);
|
||||
assertTrue(getDefaultErrorMessage("no $select list found for requestId:", getTestContainer().getRequest().getRequestId()),
|
||||
getTestContainer().getSelectList().size() > 0);
|
||||
|
||||
try {
|
||||
LOG.info("Searching metadata for fields in given select list: " + container.get().getSelectList());
|
||||
container.get().getSelectList().forEach(fieldName -> {
|
||||
//need to skip the expanded field when looking through the metadata
|
||||
if (container.get().getExpandField() != null && !fieldName.contentEquals(container.get().getExpandField())) {
|
||||
LOG.info("Searching metadata for fields in given select list: " + getTestContainer().getSelectList().toString());
|
||||
getTestContainer().getSelectList().forEach(fieldName -> {
|
||||
//need to skip the expand field when looking through the metadata
|
||||
if (getTestContainer().getExpandField() != null && !fieldName.contentEquals(getTestContainer().getExpandField())) {
|
||||
try {
|
||||
assertNotNull(getDefaultErrorMessage("Field name '" + fieldName + "' is not present in server metadata!"),
|
||||
container.get().getCsdlProperty(parameterResourceName, fieldName));
|
||||
getTestContainer().getCsdlProperty(parameterResourceName, fieldName));
|
||||
LOG.info("Found: '" + fieldName.trim() + "'");
|
||||
} catch (Exception ex) {
|
||||
LOG.error(getDefaultErrorMessage(ex));
|
||||
|
@ -824,10 +801,10 @@ public class WebAPIServerCore implements En {
|
|||
When("^the metadata contains a valid service document$", () -> {
|
||||
try {
|
||||
assertNotNull(getDefaultErrorMessage("could not find default entity container for given service root:",
|
||||
container.get().getServiceRoot()), container.get().getEdm().getEntityContainer());
|
||||
LOG.info("Found Default Entity Container: '" + container.get().getEdm().getEntityContainer().getNamespace() + "'");
|
||||
getTestContainer().getServiceRoot()), getTestContainer().getEdm().getEntityContainer());
|
||||
LOG.info("Found Default Entity Container: '" + getTestContainer().getEdm().getEntityContainer().getNamespace() + "'");
|
||||
} catch (ODataClientErrorException cex) {
|
||||
container.get().setResponseCode(cex.getStatusLine().getStatusCode());
|
||||
getTestContainer().setResponseCode(cex.getStatusLine().getStatusCode());
|
||||
fail(cex.toString());
|
||||
} catch (Exception ex) {
|
||||
fail(getDefaultErrorMessage(ex));
|
||||
|
@ -838,21 +815,24 @@ public class WebAPIServerCore implements En {
|
|||
* XML Metadata getter
|
||||
*/
|
||||
And("^XML Metadata are requested from the service root in \"([^\"]*)\"$", (String clientSettingsServiceRoot) -> {
|
||||
final String serviceRoot = Settings.resolveParametersString(clientSettingsServiceRoot, container.get().getSettings());
|
||||
final String serviceRoot = Settings.resolveParametersString(clientSettingsServiceRoot, getTestContainer().getSettings());
|
||||
assertEquals(getDefaultErrorMessage("given service root doesn't match the one configured in the Commander"),
|
||||
serviceRoot,
|
||||
container.get().getCommander().getServiceRoot());
|
||||
getTestContainer().getCommander().getServiceRoot());
|
||||
|
||||
try {
|
||||
if (container.get().fetchXMLMetadata() == null) {
|
||||
if (getTestContainer().fetchXMLMetadata() == null) {
|
||||
//force exit rather than allowing the tests to finish
|
||||
failAndExitWithErrorMessage("Could not retrieve valid XML Metadata for given service root: " + serviceRoot, LOG);
|
||||
LOG.error(getDefaultErrorMessage("could not retrieve valid XML Metadata for given service root:", serviceRoot));
|
||||
System.exit(NOT_OK);
|
||||
}
|
||||
} catch (ODataClientErrorException cex) {
|
||||
container.get().setResponseCode(cex.getStatusLine().getStatusCode());
|
||||
failAndExitWithErrorMessage(getDefaultErrorMessage(cex), LOG);
|
||||
getTestContainer().setResponseCode(cex.getStatusLine().getStatusCode());
|
||||
LOG.error(getDefaultErrorMessage(cex));
|
||||
System.exit(NOT_OK);
|
||||
} catch (Exception ex) {
|
||||
failAndExitWithErrorMessage(getDefaultErrorMessage(ex), LOG);
|
||||
LOG.error(getDefaultErrorMessage(ex));
|
||||
System.exit(NOT_OK);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -860,10 +840,10 @@ public class WebAPIServerCore implements En {
|
|||
* Tests whether a navigation property can be found in the given resource name.
|
||||
*/
|
||||
And("^an OData NavigationProperty exists for the given \"([^\"]*)\"$", (String parameterEndpointResource) -> {
|
||||
String resourceName = Settings.resolveParametersString(parameterEndpointResource, container.get().getSettings());
|
||||
String resourceName = Settings.resolveParametersString(parameterEndpointResource, getTestContainer().getSettings());
|
||||
|
||||
List<CsdlNavigationProperty> navigationProperties
|
||||
= TestUtils.findNavigationPropertiesForEntityTypeName(container.get().getEdm(), container.get().getXMLMetadata(), resourceName);
|
||||
= TestUtils.findNavigationPropertiesForEntityTypeName(getTestContainer().getEdm(), getTestContainer().getXMLMetadata(), resourceName);
|
||||
|
||||
assertTrue(getDefaultErrorMessage("no navigation properties found for the given '" + resourceName + "' resource!"),
|
||||
navigationProperties.size() > 0);
|
||||
|
@ -880,11 +860,11 @@ public class WebAPIServerCore implements En {
|
|||
* Checks to see whether the expanded field has data
|
||||
*/
|
||||
And("^data and type information exist in the results and within the given \"([^\"]*)\"$", (String parameterExpandField) -> {
|
||||
String expandField = Settings.resolveParametersString(parameterExpandField, container.get().getSettings());
|
||||
String expandField = Settings.resolveParametersString(parameterExpandField, getTestContainer().getSettings());
|
||||
assertFalse(getDefaultErrorMessage("no expand field found for", parameterExpandField), expandField.isEmpty());
|
||||
|
||||
ClientEntitySet results = container.get().getCommander().getClient().getRetrieveRequestFactory()
|
||||
.getEntitySetRequest(container.get().getRequestUri()).execute().getBody();
|
||||
ClientEntitySet results = getTestContainer().getCommander().getClient().getRetrieveRequestFactory()
|
||||
.getEntitySetRequest(getTestContainer().getRequestUri()).execute().getBody();
|
||||
|
||||
LOG.info("Results count is: " + results.getEntities().size());
|
||||
AtomicInteger counter = new AtomicInteger();
|
||||
|
@ -938,7 +918,7 @@ public class WebAPIServerCore implements En {
|
|||
*/
|
||||
And("^the metadata contains at least one resource from \"([^\"]*)\"$", (String parameterRequiredResourceList) -> {
|
||||
String requiredResourceString =
|
||||
Settings.resolveParametersString(parameterRequiredResourceList, container.get().getSettings()).replace(SINGLE_SPACE, EMPTY_STRING);
|
||||
Settings.resolveParametersString(parameterRequiredResourceList, getTestContainer().getSettings()).replace(SINGLE_SPACE, EMPTY_STRING);
|
||||
List<String> requiredResources = Arrays.asList(requiredResourceString.split(FIELD_SEPARATOR));
|
||||
|
||||
LOG.info("Searching the default entity container for one of the following Standard Resources: "
|
||||
|
@ -948,7 +928,7 @@ public class WebAPIServerCore implements En {
|
|||
requiredResources.forEach(requiredResource -> {
|
||||
try {
|
||||
if (!found.get())
|
||||
found.set(found.get() || container.get().getEdm().getEntityContainer().getEntitySet(requiredResource) != null);
|
||||
found.set(found.get() || getTestContainer().getEdm().getEntityContainer().getEntitySet(requiredResource) != null);
|
||||
} catch (Exception ex) {
|
||||
fail(getDefaultErrorMessage(ex));
|
||||
}
|
||||
|
@ -966,8 +946,8 @@ public class WebAPIServerCore implements En {
|
|||
* latest version of the Data Dictionary, currently 1.7.
|
||||
*/
|
||||
And("^the given \"([^\"]*)\" resource exists within \"([^\"]*)\"$", (String parameterResourceName, String parameterResourceList) -> {
|
||||
String resourceName = Settings.resolveParametersString(parameterResourceName, container.get().getSettings()),
|
||||
allowedResourceString = Settings.resolveParametersString(parameterResourceList, container.get().getSettings()).replace(" ", "");
|
||||
String resourceName = Settings.resolveParametersString(parameterResourceName, getTestContainer().getSettings()),
|
||||
allowedResourceString = Settings.resolveParametersString(parameterResourceList, getTestContainer().getSettings()).replace(" ", "");
|
||||
List<String> allowedResources = new ArrayList<>(Arrays.asList(allowedResourceString.split(FIELD_SEPARATOR)));
|
||||
|
||||
LOG.info("Resource Name: " + resourceName);
|
||||
|
@ -980,25 +960,25 @@ public class WebAPIServerCore implements En {
|
|||
|
||||
|
||||
When("^a GET request is made to the resolved Url in \"([^\"]*)\" using the OData Client$", (String requestId) -> {
|
||||
Request request = container.get().getSettings().getRequest(requestId);
|
||||
String uriString = Settings.resolveParameters(request, container.get().getSettings()).getRequestUrl();
|
||||
Request request = getTestContainer().getSettings().getRequest(requestId);
|
||||
String uriString = Settings.resolveParameters(request, getTestContainer().getSettings()).getRequestUrl();
|
||||
assertTrue(getDefaultErrorMessage("the resolved Url in", "'" + requestId + "'", "was invalid!"), uriString != null && uriString.length() > 0);
|
||||
|
||||
LOG.info("Request Id: " + requestId);
|
||||
try {
|
||||
container.get().setRequest(request);
|
||||
container.get().setRequestUri(prepareUri(uriString));
|
||||
container.get().setClientEntitySetRequest(container.get().getCommander().getClient().getRetrieveRequestFactory().getEntitySetRequest(container.get().getRequestUri()));
|
||||
getTestContainer().setRequest(request);
|
||||
getTestContainer().setRequestUri(prepareUri(uriString));
|
||||
getTestContainer().setClientEntitySetRequest(getTestContainer().getCommander().getClient().getRetrieveRequestFactory().getEntitySetRequest(getTestContainer().getRequestUri()));
|
||||
LOG.info("OData Client Request being made to: " + uriString);
|
||||
container.get().setClientEntitySetRequest(container.get().getClientEntitySetRequest());
|
||||
container.get().setClientEntitySetResponse(container.get().getClientEntitySetRequest().execute());
|
||||
container.get().setResponseCode(container.get().getClientEntitySetResponse().getStatusCode());
|
||||
getTestContainer().setClientEntitySetRequest(getTestContainer().getClientEntitySetRequest());
|
||||
getTestContainer().setClientEntitySetResponse(getTestContainer().getClientEntitySetRequest().execute());
|
||||
getTestContainer().setResponseCode(getTestContainer().getClientEntitySetResponse().getStatusCode());
|
||||
|
||||
ResWrap<EntityCollection> coll = (container.get().getCommander().getClient().getDeserializer(ContentType.JSON).toEntitySet(container.get().getClientEntitySetResponse().getRawResponse()));
|
||||
container.get().setClientEntitySet(container.get().getCommander().getClient().getBinder().getODataEntitySet(coll));
|
||||
ResWrap<EntityCollection> coll = (getTestContainer().getCommander().getClient().getDeserializer(ContentType.JSON).toEntitySet(getTestContainer().getClientEntitySetResponse().getRawResponse()));
|
||||
getTestContainer().setClientEntitySet(getTestContainer().getCommander().getClient().getBinder().getODataEntitySet(coll));
|
||||
} catch (ODataClientErrorException cex) {
|
||||
container.get().setODataClientErrorException(cex);
|
||||
container.get().setResponseCode(cex.getStatusLine().getStatusCode());
|
||||
getTestContainer().setODataClientErrorException(cex);
|
||||
getTestContainer().setResponseCode(cex.getStatusLine().getStatusCode());
|
||||
} catch (Exception ex) {
|
||||
fail(getDefaultErrorMessage(ex));
|
||||
}
|
||||
|
@ -1008,23 +988,23 @@ public class WebAPIServerCore implements En {
|
|||
* Uses the OData ClientEntitySet rather than raw JSON responses for comparisons
|
||||
*/
|
||||
And("^client entity set Integer data in \"([^\"]*)\" \"([^\"]*)\" \"([^\"]*)\"$", (String parameterFieldName, String operator, String parameterFieldValue) -> {
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, container.get().getSettings()),
|
||||
String fieldName = Settings.resolveParametersString(parameterFieldName, getTestContainer().getSettings()),
|
||||
op = operator.trim().toLowerCase();
|
||||
|
||||
Integer fieldValue = Integer.parseInt(Settings.resolveParametersString(parameterFieldValue, container.get().getSettings()));
|
||||
Integer fieldValue = Integer.parseInt(Settings.resolveParametersString(parameterFieldValue, getTestContainer().getSettings()));
|
||||
assertNotNull(fieldValue);
|
||||
|
||||
container.get().getClientEntitySet().getEntities().forEach(entity ->
|
||||
getTestContainer().getClientEntitySet().getEntities().forEach(entity ->
|
||||
assertTrue(compare((Integer) entity.getProperty(fieldName).getValue().asPrimitive().toValue(), op, fieldValue)));
|
||||
|
||||
});
|
||||
|
||||
And("^the OData client response has client entity set data$", () -> {
|
||||
assertNotNull(getDefaultErrorMessage("no entity collection returned in response!"), container.get().getClientEntitySet());
|
||||
assertTrue(getDefaultErrorMessage("no results returned!"), container.get().getClientEntitySet().getCount() > 0);
|
||||
assertNotNull(getDefaultErrorMessage("no entity collection returned in response!"), getTestContainer().getClientEntitySet());
|
||||
assertTrue(getDefaultErrorMessage("no results returned!"), getTestContainer().getClientEntitySet().getCount() > 0);
|
||||
|
||||
if (showResponses) {
|
||||
container.get().getClientEntitySet().getEntities().forEach(entity -> {
|
||||
getTestContainer().getClientEntitySet().getEntities().forEach(entity -> {
|
||||
LOG.info("Entity Type is: " + entity.getTypeName());
|
||||
entity.getProperties().forEach(property -> LOG.info("\tProperty: " + property.toString()));
|
||||
});
|
||||
|
@ -1041,13 +1021,13 @@ public class WebAPIServerCore implements En {
|
|||
|
||||
assertNotNull(getDefaultErrorMessage("must specify an 'OData-Version' in the response header!"
|
||||
+ "\nSee: http://docs.oasis-open.org/odata/odata/v4.0/errata03/os/complete/part1-protocol/odata-v4.0-errata03-os-part1-protocol-complete.html#_Toc453752225\n"),
|
||||
container.get().getServerODataHeaderVersion());
|
||||
getTestContainer().getServerODataHeaderVersion());
|
||||
|
||||
LOG.info("Reported OData-Version header value: '" + container.get().getServerODataHeaderVersion() + "'");
|
||||
LOG.info("Reported OData-Version header value: '" + getTestContainer().getServerODataHeaderVersion() + "'");
|
||||
|
||||
assertTrue(getDefaultErrorMessage("the 'OData-Version' response header must either be", "'" + val1 + "'", "or", "'" + val2 + "'", "(without quotes)."),
|
||||
container.get().getServerODataHeaderVersion().contentEquals(val1)
|
||||
|| container.get().getServerODataHeaderVersion().contentEquals(val2));
|
||||
getTestContainer().getServerODataHeaderVersion().contentEquals(val1)
|
||||
|| getTestContainer().getServerODataHeaderVersion().contentEquals(val2));
|
||||
});
|
||||
|
||||
/*
|
||||
|
@ -1055,12 +1035,12 @@ public class WebAPIServerCore implements En {
|
|||
*/
|
||||
Given("^valid metadata have been retrieved$", () -> {
|
||||
//NOTE: this is here so that tests may be run individually
|
||||
if (!container.get().getHaveMetadataBeenRequested()) {
|
||||
container.get().getXMLMetadata();
|
||||
container.get().validateMetadata();
|
||||
if (!getTestContainer().getHaveMetadataBeenRequested()) {
|
||||
getTestContainer().getXMLMetadata();
|
||||
getTestContainer().validateMetadata();
|
||||
}
|
||||
|
||||
if (!container.get().hasValidMetadata()) {
|
||||
if (!getTestContainer().hasValidMetadata()) {
|
||||
LOG.error(getDefaultErrorMessage("Valid metadata could not be retrieved from the server! Please check the log for more information."));
|
||||
System.exit(NOT_OK);
|
||||
}
|
||||
|
@ -1076,13 +1056,13 @@ public class WebAPIServerCore implements En {
|
|||
assumeTrue("Skipping Test: using IsFlags enumerations.", useCollections);
|
||||
|
||||
final String
|
||||
resolvedFieldName = Settings.resolveParametersString(fieldName, container.get().getSettings()),
|
||||
resolvedResourceName = Settings.resolveParametersString(resourceName, container.get().getSettings());
|
||||
resolvedFieldName = Settings.resolveParametersString(fieldName, getTestContainer().getSettings()),
|
||||
resolvedResourceName = Settings.resolveParametersString(resourceName, getTestContainer().getSettings());
|
||||
|
||||
assertNotNull(getDefaultErrorMessage("resolved field name for parameter", "'" + fieldName + "'", "was null!"), resolvedFieldName);
|
||||
assertNotNull(getDefaultErrorMessage("resolved resource name for parameter", "'" + resourceName + "'", "was null!"), resolvedResourceName);
|
||||
|
||||
CsdlProperty csdlProperty = container.get().getCsdlProperty(resolvedResourceName, resolvedFieldName);
|
||||
CsdlProperty csdlProperty = getTestContainer().getCsdlProperty(resolvedResourceName, resolvedFieldName);
|
||||
|
||||
assertNotNull(getDefaultErrorMessage("could not find metadata item for", "'" + resolvedResourceName + "'", "and", "'" + resolvedFieldName + "'!"),
|
||||
csdlProperty);
|
||||
|
@ -1095,37 +1075,42 @@ public class WebAPIServerCore implements En {
|
|||
|
||||
}
|
||||
|
||||
static WebAPITestContainer getTestContainer() {
|
||||
return container.get();
|
||||
}
|
||||
|
||||
/*
|
||||
* Execute Get Request Wrapper
|
||||
*/
|
||||
void prepareAndExecuteRawGetRequest(String requestId) {
|
||||
try {
|
||||
//reset local state each time a get request is run
|
||||
container.get().resetState();
|
||||
//reset local state each time a get request request is run
|
||||
getTestContainer().resetState();
|
||||
|
||||
assertNotNull(getDefaultErrorMessage("request Id cannot be null!"), requestId);
|
||||
|
||||
Request request = container.get().getSettings().getRequest(requestId);
|
||||
Request request = getTestContainer().getSettings().getRequest(requestId);
|
||||
|
||||
if (request == null) {
|
||||
throw new Exception(getDefaultErrorMessage("request for requestId:", requestId, "was null!"));
|
||||
}
|
||||
|
||||
container.get().setRequest(container.get().getSettings().getRequest(requestId));
|
||||
getTestContainer().setRequest(getTestContainer().getSettings().getRequest(requestId));
|
||||
LOG.info("Request ID: " + requestId);
|
||||
|
||||
URI requestUri = prepareURI(container.get().getRequest().getRequestUrl());
|
||||
URI requestUri = prepareURI(getTestContainer().getRequest().getRequestUrl());
|
||||
|
||||
//prepare request URI
|
||||
container.get().setRequestUri(requestUri);
|
||||
scenario.log("Request URI: " + container.get().getRequestUri().toString());
|
||||
getTestContainer().setRequestUri(requestUri);
|
||||
LOG.info("Request URI: " + getTestContainer().getRequestUri().toString());
|
||||
|
||||
//execute request
|
||||
container.get().executePreparedRawGetRequest();
|
||||
getTestContainer().executePreparedRawGetRequest();
|
||||
} catch (MalformedURLException urlException) {
|
||||
LOG.info("Malformed URL was thrown in " + this.getClass() + ": " + urlException + "\nSkipping Request!");
|
||||
LOG.info("Malformed URL was thrown in " + this.getClass() + ": " + urlException.toString()
|
||||
+ "\nSkipping Request!");
|
||||
} catch (Exception ex) {
|
||||
LOG.info("Exception was thrown in " + this.getClass() + "!\n" + ex);
|
||||
LOG.info("Exception was thrown in " + this.getClass() + "!\n" + ex.toString());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1137,29 +1122,33 @@ public class WebAPIServerCore implements En {
|
|||
* Background
|
||||
*/
|
||||
Given("^a RESOScript file was provided$", () -> {
|
||||
if (container.get().getPathToRESOScript() == null) {
|
||||
container.get().setPathToRESOScript(System.getProperty("pathToRESOScript"));
|
||||
LOG.info("Using RESOScript: " + container.get().getPathToRESOScript());
|
||||
if (getTestContainer().getPathToRESOScript() == null) {
|
||||
getTestContainer().setPathToRESOScript(System.getProperty("pathToRESOScript"));
|
||||
LOG.info("Using RESOScript: " + getTestContainer().getPathToRESOScript());
|
||||
}
|
||||
assertNotNull(getDefaultErrorMessage("pathToRESOScript must be present in command arguments, see README"),
|
||||
container.get().getPathToRESOScript());
|
||||
getTestContainer().getPathToRESOScript());
|
||||
});
|
||||
|
||||
And("^Client Settings and Parameters were read from the file$", () -> {
|
||||
if (container.get().getSettings() == null) {
|
||||
container.get().setSettings(Settings.loadFromRESOScript(new File(System.getProperty("pathToRESOScript"))));
|
||||
if (getTestContainer().getSettings() == null) {
|
||||
getTestContainer().setSettings(Settings.loadFromRESOScript(new File(System.getProperty("pathToRESOScript"))));
|
||||
|
||||
getTestContainer().getSettings().setRequests(loadFromRESOScript(new File(Objects.requireNonNull(
|
||||
getClass().getClassLoader().getResource(WEB_API_CORE_REFERENCE_REQUESTS)).getPath()))
|
||||
.parallelStream().map(request -> Settings.resolveParameters(request, getTestContainer().getSettings())).collect(Collectors.toList()));
|
||||
|
||||
LOG.info("Test configuration loaded successfully!");
|
||||
}
|
||||
assertNotNull(getDefaultErrorMessage("Settings could not be loaded."), container.get().getSettings());
|
||||
assertNotNull(getDefaultErrorMessage("Settings could not be loaded."), getTestContainer().getSettings());
|
||||
});
|
||||
|
||||
Given("^a test container was successfully created from the given RESOScript$", () -> {
|
||||
if (!container.get().getIsInitialized()) {
|
||||
container.get().initialize();
|
||||
if (container.get().getCommander().isAuthTokenClient()) {
|
||||
if (!getTestContainer().getIsInitialized()) {
|
||||
getTestContainer().initialize();
|
||||
if (getTestContainer().getCommander().isAuthTokenClient()) {
|
||||
LOG.info("Authentication Type: authorization_code");
|
||||
} else if (container.get().getCommander().isOAuth2Client()) {
|
||||
} else if (getTestContainer().getCommander().isOAuth2Client()) {
|
||||
LOG.info("Authentication Type: client_credentials");
|
||||
}
|
||||
}
|
||||
|
@ -1169,10 +1158,10 @@ public class WebAPIServerCore implements En {
|
|||
* Ensures that the client either uses Authorization Codes or Client Credentials
|
||||
*/
|
||||
And("^the test container uses an authorization_code or client_credentials for authentication$", () -> {
|
||||
assertNotNull(container.get().getCommander());
|
||||
assertNotNull(getTestContainer().getCommander());
|
||||
assertTrue(getDefaultErrorMessage("Commander must either have a valid Authorization Code or Client Credentials configuration."),
|
||||
container.get().getCommander().isAuthTokenClient()
|
||||
|| (container.get().getCommander().isOAuth2Client() && container.get().getCommander().hasValidAuthConfig()));
|
||||
getTestContainer().getCommander().isAuthTokenClient()
|
||||
|| (getTestContainer().getCommander().isOAuth2Client() && getTestContainer().getCommander().hasValidAuthConfig()));
|
||||
});
|
||||
}
|
||||
}
|
|
@ -3,7 +3,6 @@ package org.reso.commander;
|
|||
import org.apache.commons.cli.*;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.olingo.commons.api.edm.Edm;
|
||||
import org.apache.olingo.commons.api.format.ContentType;
|
||||
import org.reso.certification.codegen.*;
|
||||
import org.reso.models.ClientSettings;
|
||||
|
@ -14,12 +13,10 @@ import org.reso.models.Settings;
|
|||
import java.io.File;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.reso.commander.Commander.*;
|
||||
import static org.reso.commander.common.ErrorMsg.getDefaultErrorMessage;
|
||||
import static org.reso.commander.common.Utils.getTimestamp;
|
||||
import static org.reso.commander.common.XMLMetadataToJSONSchemaSerializer.convertEdmToJsonSchemaDocuments;
|
||||
|
||||
/**
|
||||
* Entry point of the RESO Web API Commander, which is a command line OData client that uses the Java Olingo
|
||||
|
@ -82,7 +79,7 @@ public class App {
|
|||
//if we're running a batch, initialize variables from the settings file rather than from command line options
|
||||
Settings settings = null;
|
||||
|
||||
LOG.debug("Service Root is: " + commanderBuilder.serviceRoot);
|
||||
LOG.debug("Service Root is:" + commanderBuilder.serviceRoot);
|
||||
|
||||
//If the RESOScript option was passed, then the correct commander instance should exist at this point
|
||||
if (cmd.hasOption(APP_OPTIONS.ACTIONS.RUN_RESOSCRIPT)) {
|
||||
|
@ -245,11 +242,9 @@ public class App {
|
|||
} catch (Exception ex) {
|
||||
LOG.error(getDefaultErrorMessage(ex));
|
||||
}
|
||||
} else if (cmd.hasOption(APP_OPTIONS.ACTIONS.GENERATE_JSON_SCHEMAS_FROM_XML_METADATA)) {
|
||||
} else if (cmd.hasOption(APP_OPTIONS.ACTIONS.GENERATE_SEED_DATA_SQL)) {
|
||||
try {
|
||||
Edm edm = deserializeEdmFromPath(inputFilename, commander.getClient());
|
||||
final Map<String, String> jsonSchemaMap = convertEdmToJsonSchemaDocuments(edm);
|
||||
//jsonSchemaMap.forEach((model, jsonSchema) -> LOG.info("Model is: " + model + "\nSchema is: " + jsonSchema));
|
||||
DataDictionarySeedDataSqlGenerator generator = new DataDictionarySeedDataSqlGenerator();
|
||||
} catch (Exception ex) {
|
||||
LOG.error(getDefaultErrorMessage(ex));
|
||||
}
|
||||
|
@ -435,7 +430,7 @@ public class App {
|
|||
}
|
||||
} else if (action.matches(ACTIONS.GENERATE_QUERIES)) {
|
||||
validationResponse = validateOptions(cmd, INPUT_FILE);
|
||||
} else if (action.matches(ACTIONS.GENERATE_JSON_SCHEMAS_FROM_XML_METADATA)) {
|
||||
} else if (action.matches(ACTIONS.GENERATE_REFERENCE_DDL) || action.matches(ACTIONS.GENERATE_SEED_DATA_SQL)) {
|
||||
validationResponse = validateOptions(cmd, INPUT_FILE);
|
||||
}
|
||||
|
||||
|
@ -535,14 +530,14 @@ public class App {
|
|||
.desc("Runs commands in RESOScript file given as <inputFile>.").build())
|
||||
.addOption(Option.builder().argName("t").longOpt(ACTIONS.GENERATE_DD_ACCEPTANCE_TESTS)
|
||||
.desc("Generates acceptance tests in the current directory.").build())
|
||||
.addOption(Option.builder().argName("j").longOpt(ACTIONS.GENERATE_JSON_SCHEMAS_FROM_XML_METADATA)
|
||||
.desc("Generates JSON Schema documents from the given XML metadata.").build())
|
||||
.addOption(Option.builder().argName("i").longOpt(ACTIONS.GENERATE_RESOURCE_INFO_MODELS)
|
||||
.desc("Generates Java Models for the Web API Reference Server in the current directory.").build())
|
||||
.addOption(Option.builder().argName("r").longOpt(ACTIONS.GENERATE_REFERENCE_EDMX)
|
||||
.desc("Generates reference metadata in EDMX format.").build())
|
||||
.addOption(Option.builder().argName("k").longOpt(ACTIONS.GENERATE_REFERENCE_DDL)
|
||||
.desc("Generates reference DDL to create a RESO-compliant SQL database. Pass --useKeyNumeric to generate the DB using numeric keys.").build())
|
||||
.addOption(Option.builder().argName("d").longOpt(ACTIONS.GENERATE_SEED_DATA_SQL)
|
||||
.desc("Generates SQL statements to seed data (Data Dictionary 1.7). Pass --useKeyNumeric to generate the DB using numeric keys.").build())
|
||||
.addOption(Option.builder().argName("m").longOpt(ACTIONS.GET_METADATA)
|
||||
.desc("Fetches metadata from <serviceRoot> using <bearerToken> and saves results in <outputFile>.").build())
|
||||
.addOption(Option.builder().argName("g").longOpt(ACTIONS.GENERATE_METADATA_REPORT)
|
||||
|
@ -575,7 +570,7 @@ public class App {
|
|||
public static final String GENERATE_DD_ACCEPTANCE_TESTS = "generateDDAcceptanceTests";
|
||||
public static final String GENERATE_REFERENCE_EDMX = "generateReferenceEDMX";
|
||||
public static final String GENERATE_REFERENCE_DDL = "generateReferenceDDL";
|
||||
public static final String GENERATE_JSON_SCHEMAS_FROM_XML_METADATA = "generateJSONSchemasFromXMLMetadata";
|
||||
public static final String GENERATE_SEED_DATA_SQL = "generateSeedDataSql";
|
||||
public static final String GENERATE_QUERIES = "generateQueries";
|
||||
public static final String RUN_RESOSCRIPT = "runRESOScript";
|
||||
public static final String GET_METADATA = "getMetadata";
|
||||
|
|
|
@ -19,7 +19,7 @@ import org.apache.olingo.commons.api.format.ContentType;
|
|||
import org.reso.auth.OAuth2HttpClientFactory;
|
||||
import org.reso.auth.TokenHttpClientFactory;
|
||||
import org.reso.commander.common.TestUtils;
|
||||
import org.reso.commander.jsonSerializers.MetadataReport;
|
||||
import org.reso.models.MetadataReport;
|
||||
import org.reso.models.ODataTransportWrapper;
|
||||
import org.reso.models.Request;
|
||||
import org.xml.sax.*;
|
||||
|
@ -196,7 +196,7 @@ public class Commander {
|
|||
targetReportFile = new File(fileName.replaceAll(".edmx|.xml", EMPTY_STRING) + ".metadata-report.json");
|
||||
} else {
|
||||
//place unnamed files in the build directory
|
||||
targetReportFile = new File("build" + File.separator + "certification" + File.separator + "results", DEFAULT_FILENAME);
|
||||
targetReportFile = new File("build", DEFAULT_FILENAME);
|
||||
}
|
||||
|
||||
FileUtils.copyInputStreamToFile(new ByteArrayInputStream(gsonBuilder.create().toJson(report).getBytes()), targetReportFile);
|
||||
|
@ -549,24 +549,27 @@ public class Commander {
|
|||
* @return a URI with the metadata path included
|
||||
*/
|
||||
public URI getPathToMetadata(String requestUri) {
|
||||
if (requestUri == null || requestUri.length() == 0) {
|
||||
TestUtils.failAndExitWithErrorMessage("OData service root is missing!", LOG);
|
||||
} else {
|
||||
try {
|
||||
String uri = requestUri;
|
||||
if (!requestUri.contains(METADATA_PATH)) {
|
||||
uri += METADATA_PATH;
|
||||
}
|
||||
return new URI(uri).normalize();
|
||||
} catch (Exception ex) {
|
||||
TestUtils.failAndExitWithErrorMessage("Could not create metadata URI.\n\t" + ex, LOG);
|
||||
}
|
||||
if (requestUri == null) {
|
||||
LOG.error(getDefaultErrorMessage("service root is null!"));
|
||||
System.exit(NOT_OK);
|
||||
}
|
||||
|
||||
try {
|
||||
String uri = requestUri;
|
||||
if (!requestUri.contains(METADATA_PATH)) {
|
||||
uri += METADATA_PATH;
|
||||
}
|
||||
return new URI(uri);
|
||||
} catch (Exception ex) {
|
||||
LOG.error(getDefaultErrorMessage("could not create path to metadata.\n" + ex.toString()));
|
||||
System.exit(NOT_OK);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes an OData GET Request with the current Commander instance
|
||||
* Executes an OData GET Request w ith the current Commander instance
|
||||
* @param wrapper the OData transport wrapper to use for the request
|
||||
* @return and OData transport wrapper with the response, or exception if one was thrown
|
||||
*/
|
||||
|
|
|
@ -11,191 +11,182 @@ import static org.reso.certification.containers.WebAPITestContainer.EMPTY_STRING
|
|||
import static org.reso.commander.common.DataDictionaryMetadata.v1_7.WELL_KNOWN_RESOURCE_KEYS.*;
|
||||
|
||||
public class DataDictionaryMetadata {
|
||||
private static final Logger LOG = LogManager.getLogger(DataDictionaryMetadata.class);
|
||||
private static final Logger LOG = LogManager.getLogger(DataDictionaryMetadata.class);
|
||||
|
||||
public static final class v1_7 {
|
||||
//TODO: clean up
|
||||
public static final Set<String> WELL_KNOWN_RESOURCES = new LinkedHashSet<>(Arrays.asList(
|
||||
PROPERTY,
|
||||
MEMBER,
|
||||
OFFICE,
|
||||
CONTACTS,
|
||||
CONTACT_LISTINGS,
|
||||
HISTORY_TRANSACTIONAL,
|
||||
INTERNET_TRACKING,
|
||||
MEDIA,
|
||||
OPEN_HOUSE,
|
||||
OUID,
|
||||
PROSPECTING,
|
||||
QUEUE,
|
||||
RULES,
|
||||
SAVED_SEARCH,
|
||||
SHOWING,
|
||||
TEAMS,
|
||||
TEAM_MEMBERS,
|
||||
CONTACT_LISTING_NOTES,
|
||||
OTHER_PHONE,
|
||||
PROPERTY_GREEN_VERIFICATION,
|
||||
PROPERTY_POWER_PRODUCTION,
|
||||
PROPERTY_ROOMS,
|
||||
PROPERTY_UNIT_TYPES,
|
||||
SOCIAL_MEDIA,
|
||||
FIELD,
|
||||
LOOKUP
|
||||
));
|
||||
public static final String LOOKUP_FIELDS_AND_VALUES = "Lookup Fields and Values";
|
||||
public static final class v1_7 {
|
||||
//TODO: clean up
|
||||
public static final Set<String> WELL_KNOWN_RESOURCES = new LinkedHashSet<>(Arrays.asList(
|
||||
PROPERTY,
|
||||
MEMBER,
|
||||
OFFICE,
|
||||
CONTACTS,
|
||||
CONTACT_LISTINGS,
|
||||
HISTORY_TRANSACTIONAL,
|
||||
INTERNET_TRACKING,
|
||||
MEDIA,
|
||||
OPEN_HOUSE,
|
||||
OUID,
|
||||
PROSPECTING,
|
||||
QUEUE,
|
||||
RULES,
|
||||
SAVED_SEARCH,
|
||||
SHOWING,
|
||||
TEAMS,
|
||||
TEAM_MEMBERS,
|
||||
CONTACT_LISTING_NOTES,
|
||||
OTHER_PHONE,
|
||||
PROPERTY_GREEN_VERIFICATION,
|
||||
PROPERTY_POWER_PRODUCTION,
|
||||
PROPERTY_ROOMS,
|
||||
PROPERTY_UNIT_TYPES,
|
||||
SOCIAL_MEDIA
|
||||
));
|
||||
public static final String LOOKUP_FIELDS_AND_VALUES = "Lookup Fields and Values";
|
||||
|
||||
//TODO: clean up
|
||||
public static class WELL_KNOWN_RESOURCE_KEYS {
|
||||
public static final String
|
||||
PROPERTY = "Property",
|
||||
MEMBER = "Member",
|
||||
OFFICE = "Office",
|
||||
CONTACTS = "Contacts",
|
||||
CONTACT_LISTINGS = "ContactListings",
|
||||
HISTORY_TRANSACTIONAL = "HistoryTransactional",
|
||||
INTERNET_TRACKING = "InternetTracking",
|
||||
MEDIA = "Media",
|
||||
OPEN_HOUSE = "OpenHouse",
|
||||
OUID = "OUID",
|
||||
PROSPECTING = "Prospecting",
|
||||
QUEUE = "Queue",
|
||||
RULES = "Rules",
|
||||
SAVED_SEARCH = "SavedSearch",
|
||||
SHOWING = "Showing",
|
||||
TEAMS = "Teams",
|
||||
TEAM_MEMBERS = "TeamMembers",
|
||||
CONTACT_LISTING_NOTES = "ContactListingNotes",
|
||||
OTHER_PHONE = "OtherPhone",
|
||||
PROPERTY_GREEN_VERIFICATION = "PropertyGreenVerification",
|
||||
PROPERTY_POWER_PRODUCTION = "PropertyPowerProduction",
|
||||
PROPERTY_ROOMS = "PropertyRooms",
|
||||
PROPERTY_UNIT_TYPES = "PropertyUnitTypes",
|
||||
SOCIAL_MEDIA = "SocialMedia",
|
||||
FIELD = "Field",
|
||||
LOOKUP = "Lookup";
|
||||
//TODO: clean up
|
||||
public static class WELL_KNOWN_RESOURCE_KEYS {
|
||||
public static final String
|
||||
PROPERTY = "Property",
|
||||
MEMBER = "Member",
|
||||
OFFICE = "Office",
|
||||
CONTACTS = "Contacts",
|
||||
CONTACT_LISTINGS = "ContactListings",
|
||||
HISTORY_TRANSACTIONAL = "HistoryTransactional",
|
||||
INTERNET_TRACKING = "InternetTracking",
|
||||
MEDIA = "Media",
|
||||
OPEN_HOUSE = "OpenHouse",
|
||||
OUID = "OUID",
|
||||
PROSPECTING = "Prospecting",
|
||||
QUEUE = "Queue",
|
||||
RULES = "Rules",
|
||||
SAVED_SEARCH = "SavedSearch",
|
||||
SHOWING = "Showing",
|
||||
TEAMS = "Teams",
|
||||
TEAM_MEMBERS = "TeamMembers",
|
||||
CONTACT_LISTING_NOTES = "ContactListingNotes",
|
||||
OTHER_PHONE = "OtherPhone",
|
||||
PROPERTY_GREEN_VERIFICATION = "PropertyGreenVerification",
|
||||
PROPERTY_POWER_PRODUCTION = "PropertyPowerProduction",
|
||||
PROPERTY_ROOMS = "PropertyRooms",
|
||||
PROPERTY_UNIT_TYPES = "PropertyUnitTypes",
|
||||
SOCIAL_MEDIA = "SocialMedia";
|
||||
}
|
||||
|
||||
public static Boolean isPrimaryKeyField(String resource, String fieldName) {
|
||||
return getKeyFieldForResource(resource).contentEquals(fieldName);
|
||||
}
|
||||
public static String getKeyFieldForResource(String resourceName) {
|
||||
switch (resourceName) {
|
||||
case PROPERTY:
|
||||
return "ListingKey";
|
||||
case MEMBER:
|
||||
return "MemberKey";
|
||||
case OFFICE:
|
||||
return "OfficeKey";
|
||||
case CONTACTS:
|
||||
case CONTACT_LISTING_NOTES:
|
||||
return "ContactKey";
|
||||
case CONTACT_LISTINGS:
|
||||
return "ContactListingsKey";
|
||||
case HISTORY_TRANSACTIONAL:
|
||||
return "HistoryTransactionalKey";
|
||||
case INTERNET_TRACKING:
|
||||
return "EventKey";
|
||||
case MEDIA:
|
||||
return "MediaKey";
|
||||
case OPEN_HOUSE:
|
||||
return "OpenHouseKey";
|
||||
case OUID:
|
||||
return "OrganizationUniqueIdKey";
|
||||
case PROSPECTING:
|
||||
return "ProspectingKey";
|
||||
case QUEUE:
|
||||
return "QueueTransactionKey";
|
||||
case RULES:
|
||||
return "RuleKey";
|
||||
case SAVED_SEARCH:
|
||||
return "SavedSearchKey";
|
||||
case SHOWING:
|
||||
return "ShowingKey";
|
||||
case TEAMS:
|
||||
return "TeamKey";
|
||||
case TEAM_MEMBERS:
|
||||
return "TeamMemberKey";
|
||||
case OTHER_PHONE:
|
||||
return "OtherPhoneKey";
|
||||
case PROPERTY_GREEN_VERIFICATION:
|
||||
return "GreenBuildingVerificationKey";
|
||||
case PROPERTY_POWER_PRODUCTION:
|
||||
return "PowerProductionKey";
|
||||
case PROPERTY_ROOMS:
|
||||
return "RoomKey";
|
||||
case PROPERTY_UNIT_TYPES:
|
||||
return "UnitTypeKey";
|
||||
case SOCIAL_MEDIA:
|
||||
return "SocialMediaKey";
|
||||
default:
|
||||
LOG.error("Cannot find key name for resource: " + resourceName);
|
||||
return EMPTY_STRING;
|
||||
}
|
||||
}
|
||||
|
||||
public static Boolean isPrimaryKeyNumericField(String resource, String fieldName) {
|
||||
return getKeyNumericFieldForResource(resource).contentEquals(fieldName);
|
||||
}
|
||||
|
||||
public static String getKeyNumericFieldForResource(String resourceName) {
|
||||
switch (resourceName) {
|
||||
case PROPERTY:
|
||||
return "ListingKeyNumeric";
|
||||
case MEMBER:
|
||||
return "MemberKeyNumeric";
|
||||
case OFFICE:
|
||||
return "OfficeKeyNumeric";
|
||||
case CONTACTS:
|
||||
case CONTACT_LISTING_NOTES:
|
||||
return "ContactKeyNumeric";
|
||||
case CONTACT_LISTINGS:
|
||||
return "ContactListingsKeyNumeric";
|
||||
case HISTORY_TRANSACTIONAL:
|
||||
return "HistoryTransactionalKeyNumeric";
|
||||
case INTERNET_TRACKING:
|
||||
return "EventKeyNumeric";
|
||||
case MEDIA:
|
||||
return "MediaKeyNumeric";
|
||||
case OPEN_HOUSE:
|
||||
return "OpenHouseKeyNumeric";
|
||||
case OUID:
|
||||
return "OrganizationUniqueIdKeyNumeric";
|
||||
case PROSPECTING:
|
||||
return "ProspectingKeyNumeric";
|
||||
case QUEUE:
|
||||
return "QueueTransactionKeyNumeric";
|
||||
case RULES:
|
||||
return "RuleKeyNumeric";
|
||||
case SAVED_SEARCH:
|
||||
return "SavedSearchKeyNumeric";
|
||||
case SHOWING:
|
||||
return "ShowingKeyNumeric";
|
||||
case TEAMS:
|
||||
return "TeamKeyNumeric";
|
||||
case TEAM_MEMBERS:
|
||||
return "TeamMemberKeyNumeric";
|
||||
case OTHER_PHONE:
|
||||
return "OtherPhoneKeyNumeric";
|
||||
case PROPERTY_GREEN_VERIFICATION:
|
||||
return "GreenBuildingVerificationKeyNumeric";
|
||||
case PROPERTY_POWER_PRODUCTION:
|
||||
return "PowerProductionKeyNumeric";
|
||||
case PROPERTY_ROOMS:
|
||||
return "RoomKeyNumeric";
|
||||
case PROPERTY_UNIT_TYPES:
|
||||
return "UnitTypeKeyNumeric";
|
||||
case SOCIAL_MEDIA:
|
||||
return "SocialMediaKeyNumeric";
|
||||
default:
|
||||
LOG.error("Cannot find key name for resource: " + resourceName);
|
||||
return EMPTY_STRING;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static Boolean isPrimaryKeyField(String resource, String fieldName) {
|
||||
return getKeyFieldForResource(resource).contentEquals(fieldName);
|
||||
}
|
||||
|
||||
public static String getKeyFieldForResource(String resourceName) {
|
||||
switch (resourceName) {
|
||||
case PROPERTY:
|
||||
return "ListingKey";
|
||||
case MEMBER:
|
||||
return "MemberKey";
|
||||
case OFFICE:
|
||||
return "OfficeKey";
|
||||
case CONTACTS:
|
||||
case CONTACT_LISTING_NOTES:
|
||||
return "ContactKey";
|
||||
case CONTACT_LISTINGS:
|
||||
return "ContactListingsKey";
|
||||
case HISTORY_TRANSACTIONAL:
|
||||
return "HistoryTransactionalKey";
|
||||
case INTERNET_TRACKING:
|
||||
return "EventKey";
|
||||
case MEDIA:
|
||||
return "MediaKey";
|
||||
case OPEN_HOUSE:
|
||||
return "OpenHouseKey";
|
||||
case OUID:
|
||||
return "OrganizationUniqueIdKey";
|
||||
case PROSPECTING:
|
||||
return "ProspectingKey";
|
||||
case QUEUE:
|
||||
return "QueueTransactionKey";
|
||||
case RULES:
|
||||
return "RuleKey";
|
||||
case SAVED_SEARCH:
|
||||
return "SavedSearchKey";
|
||||
case SHOWING:
|
||||
return "ShowingKey";
|
||||
case TEAMS:
|
||||
return "TeamKey";
|
||||
case TEAM_MEMBERS:
|
||||
return "TeamMemberKey";
|
||||
case OTHER_PHONE:
|
||||
return "OtherPhoneKey";
|
||||
case PROPERTY_GREEN_VERIFICATION:
|
||||
return "GreenBuildingVerificationKey";
|
||||
case PROPERTY_POWER_PRODUCTION:
|
||||
return "PowerProductionKey";
|
||||
case PROPERTY_ROOMS:
|
||||
return "RoomKey";
|
||||
case PROPERTY_UNIT_TYPES:
|
||||
return "UnitTypeKey";
|
||||
case SOCIAL_MEDIA:
|
||||
return "SocialMediaKey";
|
||||
case FIELD:
|
||||
return "FieldKey";
|
||||
case LOOKUP:
|
||||
return "LookupKey";
|
||||
default:
|
||||
LOG.error("Cannot find key name for resource: " + resourceName);
|
||||
return EMPTY_STRING;
|
||||
}
|
||||
}
|
||||
|
||||
public static Boolean isPrimaryKeyNumericField(String resource, String fieldName) {
|
||||
return getKeyNumericFieldForResource(resource).contentEquals(fieldName);
|
||||
}
|
||||
|
||||
public static String getKeyNumericFieldForResource(String resourceName) {
|
||||
switch (resourceName) {
|
||||
case PROPERTY:
|
||||
return "ListingKeyNumeric";
|
||||
case MEMBER:
|
||||
return "MemberKeyNumeric";
|
||||
case OFFICE:
|
||||
return "OfficeKeyNumeric";
|
||||
case CONTACTS:
|
||||
case CONTACT_LISTING_NOTES:
|
||||
return "ContactKeyNumeric";
|
||||
case CONTACT_LISTINGS:
|
||||
return "ContactListingsKeyNumeric";
|
||||
case HISTORY_TRANSACTIONAL:
|
||||
return "HistoryTransactionalKeyNumeric";
|
||||
case INTERNET_TRACKING:
|
||||
return "EventKeyNumeric";
|
||||
case MEDIA:
|
||||
return "MediaKeyNumeric";
|
||||
case OPEN_HOUSE:
|
||||
return "OpenHouseKeyNumeric";
|
||||
case OUID:
|
||||
return "OrganizationUniqueIdKeyNumeric";
|
||||
case PROSPECTING:
|
||||
return "ProspectingKeyNumeric";
|
||||
case QUEUE:
|
||||
return "QueueTransactionKeyNumeric";
|
||||
case RULES:
|
||||
return "RuleKeyNumeric";
|
||||
case SAVED_SEARCH:
|
||||
return "SavedSearchKeyNumeric";
|
||||
case SHOWING:
|
||||
return "ShowingKeyNumeric";
|
||||
case TEAMS:
|
||||
return "TeamKeyNumeric";
|
||||
case TEAM_MEMBERS:
|
||||
return "TeamMemberKeyNumeric";
|
||||
case OTHER_PHONE:
|
||||
return "OtherPhoneKeyNumeric";
|
||||
case PROPERTY_GREEN_VERIFICATION:
|
||||
return "GreenBuildingVerificationKeyNumeric";
|
||||
case PROPERTY_POWER_PRODUCTION:
|
||||
return "PowerProductionKeyNumeric";
|
||||
case PROPERTY_ROOMS:
|
||||
return "RoomKeyNumeric";
|
||||
case PROPERTY_UNIT_TYPES:
|
||||
return "UnitTypeKeyNumeric";
|
||||
case SOCIAL_MEDIA:
|
||||
return "SocialMediaKeyNumeric";
|
||||
default:
|
||||
LOG.error("Cannot find key name for resource: " + resourceName);
|
||||
return EMPTY_STRING;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,328 +0,0 @@
|
|||
package org.reso.commander.common;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.olingo.client.api.ODataClient;
|
||||
import org.apache.olingo.client.api.communication.request.retrieve.ODataEntitySetRequest;
|
||||
import org.apache.olingo.client.api.communication.response.ODataRetrieveResponse;
|
||||
import org.apache.olingo.client.api.domain.ClientEntity;
|
||||
import org.apache.olingo.client.api.domain.ClientEntitySet;
|
||||
import org.apache.olingo.client.api.http.HttpClientException;
|
||||
import org.apache.olingo.commons.api.format.ContentType;
|
||||
import org.apache.olingo.commons.api.http.HttpStatusCode;
|
||||
import org.reso.certification.containers.WebAPITestContainer;
|
||||
|
||||
import java.net.URI;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.time.format.DateTimeParseException;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
public class ODataFetchApi {
|
||||
|
||||
private static final Logger LOG = LogManager.getLogger(ODataFetchApi.class);
|
||||
|
||||
final static int DEFAULT_PAGE_SIZE = 1000;
|
||||
private final static String FILTER_DESCENDING_TEMPLATE = "?$filter=%s" + " lt %s&$orderby=%s desc";
|
||||
private final static String FILTER_ASCENDING_INIT_TEMPLATE = "?$orderby=%s asc";
|
||||
private final static String FILTER_ASCENDING_TEMPLATE = "?$filter=%s" + " gt %s&$orderby=%s asc";
|
||||
private final static String TOP_QUERY_PARAMETER = "&$top=" + DEFAULT_PAGE_SIZE;
|
||||
final static String DEFAULT_TIMESTAMP_FIELD = "ModificationTimestamp";
|
||||
|
||||
|
||||
/***
|
||||
* Gets the total count for the given resource.
|
||||
* @param container the test container.
|
||||
* @param resourceName the name of the resource to get the count for.
|
||||
* @return the total available number of available records.
|
||||
*/
|
||||
public static Integer getResourceCount(WebAPITestContainer container, String resourceName) {
|
||||
final ODataClient client = container.getCommander().getClient();
|
||||
client.getConfiguration().setDefaultPubFormat(ContentType.APPLICATION_JSON);
|
||||
|
||||
ODataEntitySetRequest<ClientEntitySet> request = client.getRetrieveRequestFactory()
|
||||
.getEntitySetRequest(client.newURIBuilder(container.getServiceRoot())
|
||||
.appendEntitySetSegment(resourceName).count(true).top(1).build());
|
||||
|
||||
final ODataRetrieveResponse<ClientEntitySet> response = request.execute();
|
||||
Integer count = 0;
|
||||
|
||||
if (response != null && response.getStatusCode() == HttpStatusCode.OK.getStatusCode() && response.getBody() != null) {
|
||||
count = response.getBody().getCount();
|
||||
LOG.info("Total Count is: " + count);
|
||||
} else {
|
||||
LOG.debug("Couldn't get count! Returning 0.");
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Contains the replication strategies available for the fetch client replication methods.
|
||||
*/
|
||||
public enum WebApiReplicationStrategy {
|
||||
ModificationTimestampDescending,
|
||||
ModificationTimestampAscending,
|
||||
TopAndSkip
|
||||
}
|
||||
|
||||
/**
|
||||
* Replicates data using the given WebApiReplicationStrategy
|
||||
*
|
||||
* @param container the test container
|
||||
* @param resourceName the name of the resource to replicate from
|
||||
* @param strategy the replication strategy
|
||||
* @return a list of ClientEntity items that were replicated.
|
||||
* @throws Exception exceptions are thrown with messages so that the caller can respond and exit or continue,
|
||||
* as needed. Clients can use the included message for the reason for the error.
|
||||
*/
|
||||
public static List<ClientEntity> replicateDataFromResource(WebAPITestContainer container, String resourceName, WebApiReplicationStrategy strategy)
|
||||
throws Exception {
|
||||
LOG.info("Checking metadata for resource: " + resourceName);
|
||||
if (container.getXMLMetadata().getSchemas().parallelStream()
|
||||
.anyMatch(item -> item.getEntityType(resourceName) != null)) {
|
||||
|
||||
LOG.info("Replicating data from " + resourceName + " using strategy: " + strategy.toString());
|
||||
if (strategy == WebApiReplicationStrategy.TopAndSkip)
|
||||
return replicateUsingTopAndSkip(container, resourceName);
|
||||
|
||||
if (strategy == WebApiReplicationStrategy.ModificationTimestampDescending)
|
||||
return replicateUsingModificationTimestampField(container, resourceName, WebApiReplicationStrategy.ModificationTimestampDescending);
|
||||
|
||||
if (strategy == WebApiReplicationStrategy.ModificationTimestampAscending)
|
||||
return replicateUsingModificationTimestampField(container, resourceName, WebApiReplicationStrategy.ModificationTimestampAscending);
|
||||
|
||||
} else {
|
||||
throw new Exception(resourceName + " resource was not found in metadata!");
|
||||
}
|
||||
return new ArrayList<>();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Implementation of an OData client using a TopAndSkip replication strategy.
|
||||
*
|
||||
* @param container the test container.
|
||||
* @param resourceName the name of the resource to replicate from.
|
||||
* @return a list of ClientEntity items that were replicated.
|
||||
* @throws Exception exceptions are thrown so that their messages can be used to fail or continue. Implementations
|
||||
* should bubble any relevant errors up.
|
||||
*/
|
||||
private static List<ClientEntity> replicateUsingTopAndSkip(WebAPITestContainer container, String resourceName) throws Exception {
|
||||
final ODataClient client = container.getCommander().getClient();
|
||||
final String serviceRoot = container.getServiceRoot();
|
||||
|
||||
int pageSize = DEFAULT_PAGE_SIZE;
|
||||
final Integer resourceCount = ODataFetchApi.getResourceCount(container, resourceName);
|
||||
|
||||
final ArrayList<ClientEntity> entities = new ArrayList<>();
|
||||
try {
|
||||
for (int skipAmount = 0; pageSize > 0 && entities.size() <= resourceCount; skipAmount += pageSize) {
|
||||
final URI requestUri = client.newURIBuilder(serviceRoot).appendEntitySetSegment(resourceName).top(pageSize).skip(skipAmount).build();
|
||||
final ODataRetrieveResponse<ClientEntitySet> response = client.getRetrieveRequestFactory().getEntitySetRequest(requestUri).execute();
|
||||
|
||||
LOG.info("Fetching " + resourceName + " Resource data from URL: " + requestUri.toString());
|
||||
|
||||
if (response != null && response.getStatusCode() == HttpStatusCode.OK.getStatusCode() && response.getBody() != null) {
|
||||
pageSize = response.getBody().getEntities().size();
|
||||
if (pageSize > 0) {
|
||||
entities.addAll(response.getBody().getEntities());
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (HttpClientException httpClientException) {
|
||||
final String message = "Could not retrieve data from the " + resourceName + " resource!" + httpClientException.getMessage();
|
||||
LOG.error(message);
|
||||
LOG.error("Cause " + httpClientException.getCause().getMessage());
|
||||
|
||||
throw new Exception(message);
|
||||
}
|
||||
LOG.info("Total records fetched: " + entities.size());
|
||||
return entities;
|
||||
}
|
||||
|
||||
/**
|
||||
* Default ModificationTimestamp replication client.
|
||||
*
|
||||
* @param container the test container.
|
||||
* @param resourceName the name of the resource to replicate from.
|
||||
* @param strategy the replication strategy, either desc or asc.
|
||||
* @return a list of ClientEntity items that were replicated.
|
||||
* @throws Exception exceptions are thrown so that their messages can be used to fail or continue. Implementations
|
||||
* should bubble any relevant errors up.
|
||||
*/
|
||||
private static List<ClientEntity> replicateUsingModificationTimestampField(WebAPITestContainer container, String resourceName, WebApiReplicationStrategy strategy) throws Exception {
|
||||
return replicateUsingTimestampField(container, resourceName, DEFAULT_TIMESTAMP_FIELD, strategy);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* General timestamp replication client.
|
||||
*
|
||||
* @param container the test container.
|
||||
* @param resourceName the name of the resource to replicate from.
|
||||
* @param timestampField the name of the timestamp field to use for comparisons.
|
||||
* @param strategy the replication strantegy, either asc or desc.
|
||||
* @return a list of ClientEntity items that were replicated.
|
||||
* @throws Exception exceptions are thrown so that their messages can be used to fail or continue. Implementations
|
||||
* should bubble any relevant errors up.
|
||||
*/
|
||||
private static List<ClientEntity> replicateUsingTimestampField(WebAPITestContainer container, String resourceName, String timestampField, WebApiReplicationStrategy strategy) throws Exception {
|
||||
final ODataClient client = container.getCommander().getClient();
|
||||
final AtomicReference<OffsetDateTime> lastOffsetDateTime = new AtomicReference<>(OffsetDateTime.now());
|
||||
final int MAX_RETRIES = 3;
|
||||
final int RETRY_SKIP_MS = 1;
|
||||
int numRetries = 0;
|
||||
|
||||
final Integer resourceCount = ODataFetchApi.getResourceCount(container, resourceName);
|
||||
final Set<ClientEntity> entities = new HashSet<>();
|
||||
boolean isInitialRequest = true;
|
||||
try {
|
||||
do {
|
||||
URI requestUri;
|
||||
if (strategy == WebApiReplicationStrategy.ModificationTimestampDescending) {
|
||||
requestUri = TestUtils.prepareUri(buildTimestampDescendingFilterRequestUri(container, resourceName, timestampField, lastOffsetDateTime.get()));
|
||||
} else if (strategy == WebApiReplicationStrategy.ModificationTimestampAscending) {
|
||||
if (isInitialRequest) {
|
||||
requestUri = TestUtils.prepareUri(buildTimestampAscendingInitFilterRequestUri(container, resourceName, timestampField));
|
||||
} else {
|
||||
requestUri = TestUtils.prepareUri(buildTimestampAscendingFilterRequestUri(container, resourceName, timestampField, lastOffsetDateTime.get()));
|
||||
}
|
||||
} else {
|
||||
throw new Exception("Unsupported WebApiReplicationStrategy: " + strategy);
|
||||
}
|
||||
|
||||
LOG.info("Fetching " + resourceName + " Resource data from URL: " + requestUri.toString());
|
||||
final ODataRetrieveResponse<ClientEntitySet> response = client.getRetrieveRequestFactory().getEntitySetRequest(requestUri).execute();
|
||||
final List<ClientEntity> currentPage = response.getBody().getEntities();
|
||||
|
||||
if (currentPage.size() == 0) {
|
||||
LOG.error("Page contained no records, exiting! Request URI: " + requestUri.toString());
|
||||
break;
|
||||
} else {
|
||||
for (ClientEntity clientEntity : currentPage) {
|
||||
try {
|
||||
if (entities.contains(clientEntity)) {
|
||||
LOG.error("Duplicate page detected!");
|
||||
LOG.error("Last Timestamp: " + lastOffsetDateTime.get().format(DateTimeFormatter.ISO_INSTANT));
|
||||
numRetries++;
|
||||
|
||||
if (strategy == WebApiReplicationStrategy.ModificationTimestampDescending) {
|
||||
LOG.error("\t--> Subtracting " + RETRY_SKIP_MS + "ms from last timestamp...");
|
||||
lastOffsetDateTime.set(lastOffsetDateTime.get().minus(RETRY_SKIP_MS, ChronoUnit.MILLIS));
|
||||
} else {
|
||||
LOG.error("\t--> Adding " + RETRY_SKIP_MS + "ms to last timestamp...");
|
||||
lastOffsetDateTime.set(lastOffsetDateTime.get().plus(RETRY_SKIP_MS, ChronoUnit.MILLIS));
|
||||
}
|
||||
break;
|
||||
} else {
|
||||
entities.add(clientEntity);
|
||||
OffsetDateTime currentOffsetDateTime = OffsetDateTime.parse(clientEntity.getProperty(timestampField).getValue().toString());
|
||||
if (strategy == WebApiReplicationStrategy.ModificationTimestampDescending && currentOffsetDateTime.isBefore(lastOffsetDateTime.get())) {
|
||||
LOG.debug("Current " + timestampField + " field timestamp is: " + currentOffsetDateTime.format(DateTimeFormatter.ISO_INSTANT));
|
||||
LOG.debug("Found earlier timestamp! Last timestamp: " + lastOffsetDateTime.get().format(DateTimeFormatter.ISO_INSTANT) + "\n");
|
||||
lastOffsetDateTime.set(currentOffsetDateTime);
|
||||
} else if (strategy == WebApiReplicationStrategy.ModificationTimestampAscending) {
|
||||
if (!isInitialRequest && currentOffsetDateTime.isAfter(lastOffsetDateTime.get())) {
|
||||
LOG.debug("Current " + timestampField + " field timestamp is: " + currentOffsetDateTime.format(DateTimeFormatter.ISO_INSTANT));
|
||||
LOG.debug("Found later timestamp! Last timestamp: " + lastOffsetDateTime.get().format(DateTimeFormatter.ISO_INSTANT) + "\n");
|
||||
}
|
||||
lastOffsetDateTime.set(currentOffsetDateTime);
|
||||
}
|
||||
}
|
||||
} catch (DateTimeParseException exception) {
|
||||
LOG.error(exception);
|
||||
throw new Exception("Could not convert " + timestampField + " to timestamp value!");
|
||||
}
|
||||
}
|
||||
}
|
||||
isInitialRequest = false;
|
||||
} while (entities.size() <= resourceCount && numRetries < MAX_RETRIES);
|
||||
|
||||
if (numRetries >= MAX_RETRIES) {
|
||||
LOG.warn("Exceeded maximum number of retries (" + MAX_RETRIES + ")! ");
|
||||
}
|
||||
|
||||
if (entities.size() != resourceCount) {
|
||||
throw new Exception("Could not fetch all records!\n\tTotal Count: " + resourceCount + ". Records fetched: " + entities.size());
|
||||
}
|
||||
|
||||
LOG.info("Records fetched: " + entities.size());
|
||||
|
||||
} catch (HttpClientException httpClientException) {
|
||||
final String message = "Could not retrieve data from the " + resourceName + " resource!" + httpClientException.getMessage();
|
||||
LOG.error(message);
|
||||
LOG.error("Cause " + httpClientException.getCause().getMessage());
|
||||
|
||||
throw new Exception(message);
|
||||
}
|
||||
return new ArrayList<>(entities);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Builds a request URI string, taking into account whether the sampling is being done with an optional
|
||||
* filter, for instance in the shared systems case
|
||||
*
|
||||
* @param resourceName the resource name to query
|
||||
* @param timestampField the timestamp field for the resource
|
||||
* @param lastFetchedDate the last fetched date for filtering
|
||||
* @return a string OData query used for sampling
|
||||
*/
|
||||
public static String buildTimestampDescendingFilterRequestUri(WebAPITestContainer container, String resourceName,
|
||||
String timestampField, OffsetDateTime lastFetchedDate) {
|
||||
String requestUri = container.getCommander().getClient()
|
||||
.newURIBuilder(container.getServiceRoot())
|
||||
.appendEntitySetSegment(resourceName).build().toString();
|
||||
|
||||
requestUri += String.format(FILTER_DESCENDING_TEMPLATE + TOP_QUERY_PARAMETER, timestampField,
|
||||
lastFetchedDate.format(DateTimeFormatter.ISO_INSTANT), timestampField);
|
||||
|
||||
return requestUri;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a request URI string, taking into account whether the sampling is being done with an optional
|
||||
* filter, for instance in the shared systems case
|
||||
*
|
||||
* @param resourceName the resource name to query
|
||||
* @param timestampField the timestamp field for the resource
|
||||
* @param lastFetchedDate the last fetched date for filtering
|
||||
* @return a string OData query used for sampling
|
||||
*/
|
||||
public static String buildTimestampAscendingFilterRequestUri(WebAPITestContainer container, String resourceName,
|
||||
String timestampField, OffsetDateTime lastFetchedDate) {
|
||||
String requestUri = container.getCommander().getClient()
|
||||
.newURIBuilder(container.getServiceRoot())
|
||||
.appendEntitySetSegment(resourceName).build().toString();
|
||||
|
||||
requestUri += String.format(FILTER_ASCENDING_TEMPLATE + TOP_QUERY_PARAMETER, timestampField,
|
||||
lastFetchedDate.format(DateTimeFormatter.ISO_INSTANT), timestampField);
|
||||
|
||||
return requestUri;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a request URI string, taking into account whether the sampling is being done with an optional
|
||||
* filter, for instance in the shared systems case
|
||||
*
|
||||
* @param resourceName the resource name to query
|
||||
* @param timestampField the timestamp field for the resource
|
||||
* @return a string OData query used for sampling
|
||||
*/
|
||||
public static String buildTimestampAscendingInitFilterRequestUri(WebAPITestContainer container, String resourceName,
|
||||
String timestampField) {
|
||||
String requestUri = container.getCommander().getClient()
|
||||
.newURIBuilder(container.getServiceRoot())
|
||||
.appendEntitySetSegment(resourceName).build().toString();
|
||||
|
||||
requestUri += String.format(FILTER_ASCENDING_INIT_TEMPLATE + TOP_QUERY_PARAMETER, timestampField);
|
||||
|
||||
return requestUri;
|
||||
}
|
||||
}
|
|
@ -1,206 +0,0 @@
|
|||
package org.reso.commander.common;
|
||||
|
||||
import com.google.gson.Gson;
|
||||
import com.google.gson.JsonArray;
|
||||
import com.google.gson.JsonElement;
|
||||
import com.google.gson.JsonObject;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.olingo.client.api.ODataClient;
|
||||
import org.apache.olingo.client.api.domain.ClientEntity;
|
||||
import org.apache.olingo.client.api.serialization.ODataSerializerException;
|
||||
import org.apache.olingo.client.core.edm.xml.ClientCsdlAnnotation;
|
||||
import org.apache.olingo.client.core.serialization.JsonSerializer;
|
||||
import org.apache.olingo.commons.api.edm.*;
|
||||
import org.apache.olingo.commons.api.format.ContentType;
|
||||
import org.apache.olingo.commons.core.edm.EdmAnnotationImpl;
|
||||
import org.apache.olingo.commons.core.edm.EdmPropertyImpl;
|
||||
|
||||
import java.io.StringWriter;
|
||||
import java.lang.reflect.Field;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class ODataUtils {
|
||||
|
||||
private static final Logger LOG = LogManager.getLogger(ODataUtils.class);
|
||||
|
||||
public static EdmElement getEdmElement(Edm edm, String resourceName, String fieldName) {
|
||||
final Optional<EdmEntitySet> entitySet = Optional.ofNullable(edm.getEntityContainer().getEntitySet(resourceName));
|
||||
|
||||
if (entitySet.isPresent()) {
|
||||
final EdmElement fieldEdm = entitySet.get().getEntityTypeWithAnnotations().getProperty(fieldName);
|
||||
if (fieldEdm != null && fieldEdm.getType().getFullQualifiedName().toString().contentEquals(EdmPrimitiveTypeKind.String.getFullQualifiedName().toString())) {
|
||||
LOG.debug("\nFound field with resource: " + resourceName + " and standard name: " + fieldName);
|
||||
LOG.debug("\t\t Data type is: " + fieldEdm.getType().getFullQualifiedName().toString() + (fieldEdm.isCollection() ? ", Collection: true" : ""));
|
||||
return fieldEdm;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines whether the element has the given term.
|
||||
*
|
||||
* @param element the Edm element to check.
|
||||
* @param annotationTerm the term to search for.
|
||||
* @return true if the Edm element contains the annotationTerm, false otherwise.
|
||||
*/
|
||||
public static boolean hasAnnotationTerm(EdmElement element, String annotationTerm) {
|
||||
return Optional.ofNullable(getAnnotationValue(element, annotationTerm)).isPresent();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the annotation value for the given annotation term.
|
||||
*
|
||||
* @param element the Edm element to check.
|
||||
* @param annotationTerm the term to search for.
|
||||
* @return a string value, if present, otherwise null.
|
||||
*/
|
||||
public static String getAnnotationValue(EdmElement element, String annotationTerm) {
|
||||
if (element == null || annotationTerm == null) return null;
|
||||
|
||||
final Optional<EdmAnnotation> foundAnnotation = Optional.of((EdmPropertyImpl) element).get().getAnnotations().stream()
|
||||
.filter(edmAnnotation -> {
|
||||
final SneakyAnnotationReader annotationReader = new SneakyAnnotationReader(edmAnnotation);
|
||||
return annotationReader.getTerm() != null && annotationReader.getTerm().contentEquals(annotationTerm);
|
||||
}).findFirst();
|
||||
|
||||
if (foundAnnotation.isPresent()) {
|
||||
final Optional<String> value = Optional.ofNullable(foundAnnotation.get().getExpression().asConstant().getValueAsString());
|
||||
|
||||
if (value.isPresent()) {
|
||||
LOG.debug("Found \"" + annotationTerm + "\" annotation! Value is: " + value);
|
||||
return value.get();
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Serializes a list of OData ClientEntity items in a JSON Array with those properties.
|
||||
*
|
||||
* @param lookups list of OData ClientEntity results
|
||||
* @param client OData client to use as serializer
|
||||
* @return a JsonArray of results
|
||||
*/
|
||||
public static JsonObject serializeLookupMetadata(ODataClient client, List<ClientEntity> lookups) {
|
||||
final String
|
||||
DESCRIPTION_KEY = "description", DESCRIPTION = "Data Dictionary Lookup Resource Metadata",
|
||||
VERSION_KEY = "version", VERSION = "1.7",
|
||||
GENERATED_ON_KEY = "generatedOn",
|
||||
LOOKUPS_KEY = "lookups";
|
||||
|
||||
JsonObject metadataReport = new JsonObject();
|
||||
metadataReport.addProperty(DESCRIPTION_KEY, DESCRIPTION);
|
||||
metadataReport.addProperty(VERSION_KEY, VERSION);
|
||||
metadataReport.addProperty(GENERATED_ON_KEY, Utils.getIsoTimestamp());
|
||||
|
||||
final JsonArray lookupsArray = new JsonArray();
|
||||
|
||||
try {
|
||||
final Gson gson = new Gson();
|
||||
final JsonSerializer jsonSerializer = new JsonSerializer(false, ContentType.APPLICATION_JSON);
|
||||
lookups.forEach(clientEntity -> {
|
||||
try {
|
||||
StringWriter writer = new StringWriter();
|
||||
jsonSerializer.write(writer, client.getBinder().getEntity(clientEntity));
|
||||
Optional<JsonElement> element = Optional.ofNullable(gson.fromJson(writer.toString(), JsonElement.class));
|
||||
element.ifPresent(lookupsArray::add);
|
||||
} catch (ODataSerializerException e) {
|
||||
LOG.error("ERROR: could not deserialize. Exception: " + e);
|
||||
}
|
||||
});
|
||||
} catch (Exception exception) {
|
||||
LOG.error(exception);
|
||||
}
|
||||
|
||||
metadataReport.add(LOOKUPS_KEY, lookupsArray);
|
||||
return metadataReport;
|
||||
}
|
||||
|
||||
//TODO: Only output the field metadata in DEBUG mode
|
||||
public static JsonObject serializeFieldMetadataForLookupFields(Map<String, Set<EdmElement>> resourceFieldMap) {
|
||||
//TODO: migrate to test file
|
||||
final String LOOKUP_ANNOTATION_TERM = "RESO.OData.Metadata.LookupName";
|
||||
|
||||
final String
|
||||
DESCRIPTION_KEY = "description", DESCRIPTION = "Data Dictionary Lookup Resource Annotated Fields Metadata",
|
||||
VERSION_KEY = "version", VERSION = "1.7",
|
||||
GENERATED_ON_KEY = "generatedOn",
|
||||
FIELDS_KEY = "fields";
|
||||
|
||||
JsonObject metadataReport = new JsonObject();
|
||||
metadataReport.addProperty(DESCRIPTION_KEY, DESCRIPTION);
|
||||
metadataReport.addProperty(VERSION_KEY, VERSION);
|
||||
metadataReport.addProperty(GENERATED_ON_KEY, Utils.getIsoTimestamp());
|
||||
|
||||
JsonArray fieldsArray = new JsonArray();
|
||||
resourceFieldMap.forEach((resourceName, fieldElements) -> fieldElements.forEach(
|
||||
fieldElement -> {
|
||||
JsonObject fieldObject = new JsonObject();
|
||||
fieldObject.addProperty("resourceName", resourceName);
|
||||
fieldObject.addProperty("fieldName:", fieldElement.getName());
|
||||
fieldObject.addProperty("type", getAnnotationValue(fieldElement, LOOKUP_ANNOTATION_TERM));
|
||||
fieldsArray.add(fieldObject);
|
||||
}
|
||||
));
|
||||
|
||||
metadataReport.add(FIELDS_KEY, fieldsArray);
|
||||
return metadataReport;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a Map of EntityDataModel (Edm) elements and annotation value with the given annotation term.
|
||||
*/
|
||||
public static Map<String, Set<EdmElement>> getEdmElementsWithAnnotation(Edm edm, String annotationTerm) {
|
||||
return edm.getSchemas().parallelStream()
|
||||
.filter(edmSchema -> edmSchema != null && edmSchema.getEntityContainer() != null)
|
||||
.flatMap(edmSchema -> edmSchema.getEntityContainer().getEntitySets().parallelStream())
|
||||
.collect(Collectors.toMap(edmEntitySet -> edmEntitySet.getEntityTypeWithAnnotations().getName(),
|
||||
edmEntitySet -> edmEntitySet.getEntityTypeWithAnnotations().getPropertyNames().parallelStream()
|
||||
.map(propertyName -> edmEntitySet.getEntityTypeWithAnnotations().getProperty(propertyName))
|
||||
.filter(edmElement -> getAnnotationValue(edmElement, annotationTerm) != null)
|
||||
.collect(Collectors.toSet())));
|
||||
}
|
||||
|
||||
/**
|
||||
* Class to read OData internal annotation variables.
|
||||
*/
|
||||
public static class SneakyAnnotationReader {
|
||||
Class<? extends EdmAnnotationImpl> object;
|
||||
Field field;
|
||||
EdmAnnotationImpl edmAnnotationImpl;
|
||||
ClientCsdlAnnotation clientCsdlAnnotation;
|
||||
|
||||
/**
|
||||
* Allows the consumer to read internal annotations.
|
||||
*
|
||||
* @param edmAnnotation the annotation to read from
|
||||
*/
|
||||
public SneakyAnnotationReader(EdmAnnotation edmAnnotation) {
|
||||
try {
|
||||
edmAnnotationImpl = ((EdmAnnotationImpl) edmAnnotation);
|
||||
|
||||
// create an object of the class named Class
|
||||
object = edmAnnotationImpl.getClass();
|
||||
|
||||
// access the private variable
|
||||
field = object.getDeclaredField("annotation");
|
||||
// make private field accessible
|
||||
field.setAccessible(true);
|
||||
|
||||
clientCsdlAnnotation = (ClientCsdlAnnotation) field.get(edmAnnotationImpl);
|
||||
|
||||
} catch (Exception ex) {
|
||||
LOG.error(ex);
|
||||
ex.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
public String getTerm() {
|
||||
return clientCsdlAnnotation.getTerm();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -4,7 +4,6 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
|||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import io.cucumber.java.Scenario;
|
||||
import org.apache.http.Header;
|
||||
import org.apache.http.NameValuePair;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.olingo.client.api.communication.ODataClientErrorException;
|
||||
|
@ -19,8 +18,6 @@ import org.apache.olingo.commons.api.edm.provider.CsdlSchema;
|
|||
import org.apache.olingo.commons.core.edm.primitivetype.EdmDate;
|
||||
import org.apache.olingo.commons.core.edm.primitivetype.EdmDateTimeOffset;
|
||||
import org.apache.olingo.commons.core.edm.primitivetype.EdmTimeOfDay;
|
||||
import org.reso.certification.codegen.DDCacheProcessor;
|
||||
import org.reso.certification.codegen.DataDictionaryCodeGenerator;
|
||||
import org.reso.certification.containers.WebAPITestContainer;
|
||||
import org.reso.commander.Commander;
|
||||
import org.reso.models.Settings;
|
||||
|
@ -65,7 +62,7 @@ public final class TestUtils {
|
|||
return URI.create(
|
||||
queryString.replace(" ", "%20")
|
||||
/* add other handlers here */
|
||||
).normalize();
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -560,16 +557,21 @@ public final class TestUtils {
|
|||
}
|
||||
|
||||
/**
|
||||
* Helper method to find headers with a given key in an array of headers
|
||||
* Helper method to find headers with a given key in an an array of headers
|
||||
*
|
||||
* @param key the header to get
|
||||
* @param headers an array containing Header objects
|
||||
* @return the value of the header with key, or null
|
||||
*/
|
||||
public static String getHeaderData(String key, Collection<Header> headers) {
|
||||
return headers.stream()
|
||||
.filter(header -> header.getName().toLowerCase().contains(key.toLowerCase()))
|
||||
.findFirst().map(NameValuePair::getValue).orElse(null);
|
||||
String data = null;
|
||||
|
||||
for (Header header : headers) {
|
||||
if (header.getName().toLowerCase().contains(key.toLowerCase())) {
|
||||
data = header.getValue();
|
||||
}
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -581,7 +583,14 @@ public final class TestUtils {
|
|||
*/
|
||||
public static String getHeaderData(String key, ODataResponse oDataResponse) {
|
||||
if (key == null || oDataResponse.getHeader(key) == null) return null;
|
||||
return oDataResponse.getHeader(key).stream().reduce(String::concat).orElse(null);
|
||||
ArrayList<String> result = new ArrayList<>(oDataResponse.getHeader(key));
|
||||
|
||||
if (result.size() > 0) {
|
||||
return result.get(0);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -701,7 +710,7 @@ public final class TestUtils {
|
|||
*/
|
||||
public static String convertInputStreamToString(InputStream inputStream) {
|
||||
try {
|
||||
InputStreamReader isReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8);
|
||||
InputStreamReader isReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8.name());
|
||||
BufferedReader reader = new BufferedReader(isReader);
|
||||
StringBuilder sb = new StringBuilder();
|
||||
String str;
|
||||
|
@ -759,98 +768,100 @@ public final class TestUtils {
|
|||
|
||||
/**
|
||||
* Asserts that metadata in the given container are valid. Fetches metadata if not present in the container.
|
||||
*
|
||||
* @param container a test container with a valid config that metadata can be fetched into
|
||||
*/
|
||||
public static void assertValidXMLMetadata(WebAPITestContainer container, Scenario scenario) {
|
||||
public static void assertValidXMLMetadata(WebAPITestContainer container) {
|
||||
try {
|
||||
if (!container.getHaveMetadataBeenRequested()) {
|
||||
//will lazy-load metadata from the server if not yet requested
|
||||
container.fetchXMLMetadata();
|
||||
}
|
||||
container.validateMetadata();
|
||||
if (!container.getIsValidXMLMetadata()) {
|
||||
failAndExitWithErrorMessage("Invalid XML Metadata! Service root: " + container.getServiceRoot(), scenario);
|
||||
}
|
||||
assertTrue("XML Metadata at the given service root is not valid! " + container.getServiceRoot(),
|
||||
container.getIsValidXMLMetadata());
|
||||
} catch (Exception ex) {
|
||||
failAndExitWithErrorMessage(getDefaultErrorMessage(ex), scenario);
|
||||
fail(getDefaultErrorMessage(ex));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that the given container has XML Metadata that contains an Entity Data Model (Edm)
|
||||
*
|
||||
* @param container the container with XML metadata to validate
|
||||
*/
|
||||
public static void assertXmlMetadataContainsEdm(WebAPITestContainer container, Scenario scenario) {
|
||||
public static void assertXmlMetadataContainsEdm(WebAPITestContainer container) {
|
||||
container.setEdm(Commander.deserializeEdm(container.getXMLResponseData(), container.getCommander().getClient()));
|
||||
if (container.getEdm() == null) {
|
||||
failAndExitWithErrorMessage(getDefaultErrorMessage("Edm de-serialized to an empty object!"), scenario);
|
||||
}
|
||||
assertNotNull(getDefaultErrorMessage("Edm de-serialized to an empty object!"), container.getEdm());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that the Edm in the given container are valid
|
||||
* @param container the container with the XML Metadata to check
|
||||
*/
|
||||
public static void assertValidEdm(WebAPITestContainer container) {
|
||||
assertTrue("Edm Metadata at the given service root is not valid! " + container.getServiceRoot(),
|
||||
container.getIsValidEdm());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that XML Metadata are retrieved from the server
|
||||
*
|
||||
* @param container the container to retrieve metadata with
|
||||
*/
|
||||
public static void assertXMLMetadataAreRequestedFromTheServer(WebAPITestContainer container, Scenario scenario) {
|
||||
if (container == null || container.getCommander() == null) {
|
||||
failAndExitWithErrorMessage("Cannot create Commander instance!", LOG);
|
||||
failAndExitWithErrorMessage("Cannot create Commander instance!", scenario);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!container.getHaveMetadataBeenRequested()) {
|
||||
final String serviceRoot = Settings.resolveParametersString(container.getServiceRoot(), container.getSettings());
|
||||
if (!serviceRoot.contentEquals(container.getCommander().getServiceRoot())) {
|
||||
failAndExitWithErrorMessage("Given service root doesn't match the one configured in the Commander", scenario);
|
||||
failAndExitWithErrorMessage("given service root doesn't match the one configured in the Commander", scenario);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
if (container.fetchXMLMetadata() == null) {
|
||||
failAndExitWithErrorMessage("Could not retrieve valid XML Metadata for given service root: "
|
||||
+ serviceRoot, LOG);
|
||||
failAndExitWithErrorMessage("could not retrieve valid XML Metadata for given service root: " + serviceRoot, scenario);
|
||||
}
|
||||
|
||||
} catch (ODataClientErrorException cex) {
|
||||
container.setResponseCode(cex.getStatusLine().getStatusCode());
|
||||
failAndExitWithErrorMessage("Could not retrieve valid XML Metadata for given service root: "
|
||||
+ serviceRoot + "\n\tException: " + cex.getMessage(), LOG);
|
||||
failAndExitWithErrorMessage(cex.getMessage(), scenario);
|
||||
} catch (Exception ex) {
|
||||
failAndExitWithErrorMessage("Could not retrieve valid XML Metadata for given service root: "
|
||||
+ serviceRoot + "\n\tException: " + ex, LOG);
|
||||
failAndExitWithErrorMessage(ex.toString(), scenario);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that the XML Response in the given container is valid XML
|
||||
* @param container the container with the XML response to validate
|
||||
*/
|
||||
public static void assertXMLResponseIsValidXML(WebAPITestContainer container) {
|
||||
assertNotNull(getDefaultErrorMessage("no XML Response data were found!"), container.getXMLResponseData());
|
||||
container.validateXMLMetadataXML();
|
||||
assertTrue(getDefaultErrorMessage("invalid XML response!"), container.getIsValidXMLMetadataXML());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that the XML metadata in the given container has a valid service document
|
||||
*
|
||||
* @param container the container with XML Metadata to validate
|
||||
*/
|
||||
public static void assertXMLMetadataHasValidServiceDocument(WebAPITestContainer container, Scenario scenario) {
|
||||
public static void assertXMLMetadataHasValidServiceDocument(WebAPITestContainer container) {
|
||||
try {
|
||||
if (container == null || container.getEdm() == null || container.getEdm().getEntityContainer() == null) {
|
||||
final String serviceRoot = container != null && container.getServiceRoot() != null
|
||||
? container.getServiceRoot() : "<null>";
|
||||
|
||||
failAndExitWithErrorMessage("Could not find default entity container for given service root: " + serviceRoot, scenario);
|
||||
} else {
|
||||
LOG.info("Found Default Entity Container: '" + container.getEdm().getEntityContainer().getNamespace() + "'");
|
||||
}
|
||||
assertNotNull("ERROR: could not find default entity container for given service root: " +
|
||||
container.getServiceRoot(), container.getEdm().getEntityContainer());
|
||||
LOG.info("Found Default Entity Container: '" + container.getEdm().getEntityContainer().getNamespace() + "'");
|
||||
} catch (ODataClientErrorException cex) {
|
||||
if (container != null) {
|
||||
container.setResponseCode(cex.getStatusLine().getStatusCode());
|
||||
}
|
||||
failAndExitWithErrorMessage(cex.toString(), scenario);
|
||||
container.setResponseCode(cex.getStatusLine().getStatusCode());
|
||||
fail(cex.toString());
|
||||
} catch (Exception ex) {
|
||||
failAndExitWithErrorMessage(getDefaultErrorMessage(ex), scenario);
|
||||
fail(getDefaultErrorMessage(ex));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that valid Metadata have been retrieved. Fetches metadata if not present.
|
||||
*
|
||||
* @param container a test container to validate
|
||||
*/
|
||||
public static void assertValidMetadataHaveBeenRetrieved(WebAPITestContainer container) {
|
||||
|
@ -869,9 +880,8 @@ public final class TestUtils {
|
|||
|
||||
/**
|
||||
* Validates that the given response data have a valid OData count
|
||||
*
|
||||
* @param responseData the data to check for a count against
|
||||
* @return true if the there is a count present, and it's greater than or equal to the number of results
|
||||
* @return true if the there is a count present and it's greater than or equal to the number of results
|
||||
*/
|
||||
public static boolean validateODataCount(String responseData) {
|
||||
List<String> items = from(responseData).getList(JSON_VALUE_PATH);
|
||||
|
@ -892,21 +902,21 @@ public final class TestUtils {
|
|||
* Contains the list of supported operators for use in query expressions.
|
||||
*/
|
||||
public static class Operators {
|
||||
public static final String
|
||||
AND = "and",
|
||||
OR = "or",
|
||||
NE = "ne",
|
||||
EQ = "eq",
|
||||
GREATER_THAN = "gt",
|
||||
GREATER_THAN_OR_EQUAL = "ge",
|
||||
HAS = "has",
|
||||
LESS_THAN = "lt",
|
||||
LESS_THAN_OR_EQUAL = "le",
|
||||
CONTAINS = "contains",
|
||||
ENDS_WITH = "endswith",
|
||||
STARTS_WITH = "startswith",
|
||||
TO_LOWER = "tolower",
|
||||
TO_UPPER = "toupper";
|
||||
public static final String
|
||||
AND = "and",
|
||||
OR = "or",
|
||||
NE = "ne",
|
||||
EQ = "eq",
|
||||
GREATER_THAN = "gt",
|
||||
GREATER_THAN_OR_EQUAL = "ge",
|
||||
HAS = "has",
|
||||
LESS_THAN = "lt",
|
||||
LESS_THAN_OR_EQUAL = "le",
|
||||
CONTAINS = "contains",
|
||||
ENDS_WITH = "endswith",
|
||||
STARTS_WITH = "startswith",
|
||||
TO_LOWER = "tolower",
|
||||
TO_UPPER = "toupper";
|
||||
}
|
||||
|
||||
public static final class DateParts {
|
||||
|
@ -955,26 +965,5 @@ public final class TestUtils {
|
|||
}
|
||||
System.exit(NOT_OK);
|
||||
}
|
||||
|
||||
public static void failAndExitWithErrorMessage(String msg, Logger logger) {
|
||||
if (logger != null) {
|
||||
logger.error(getDefaultErrorMessage(msg));
|
||||
}
|
||||
System.exit(NOT_OK);
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a Data Dictionary Cache
|
||||
*
|
||||
* @return a DDProcessor Cache object
|
||||
*/
|
||||
public static DDCacheProcessor buildDataDictionaryCache() {
|
||||
LOG.info("Creating standard field cache...");
|
||||
final DDCacheProcessor cache = new DDCacheProcessor();
|
||||
DataDictionaryCodeGenerator generator = new DataDictionaryCodeGenerator(cache);
|
||||
generator.processWorksheets();
|
||||
LOG.info("Standard field cache created!");
|
||||
return cache;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,34 +1,18 @@
|
|||
package org.reso.commander.common;
|
||||
|
||||
import com.google.common.base.Functions;
|
||||
import com.google.gson.*;
|
||||
import io.cucumber.gherkin.internal.com.eclipsesource.json.Json;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.olingo.client.api.ODataClient;
|
||||
import org.apache.olingo.client.api.domain.ClientEntity;
|
||||
import org.apache.olingo.client.api.serialization.ODataSerializerException;
|
||||
import org.apache.olingo.client.core.edm.xml.ClientCsdlAnnotation;
|
||||
import org.apache.olingo.client.core.serialization.JsonSerializer;
|
||||
import org.apache.olingo.commons.api.edm.Edm;
|
||||
import org.apache.olingo.commons.api.edm.EdmAnnotation;
|
||||
import org.apache.olingo.commons.api.edm.EdmElement;
|
||||
import org.apache.olingo.commons.api.format.ContentType;
|
||||
import org.apache.olingo.commons.core.edm.EdmAnnotationImpl;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.StringWriter;
|
||||
import java.lang.reflect.Field;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.text.DateFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.*;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.Objects;
|
||||
|
||||
public class Utils {
|
||||
private static final Logger LOG = LogManager.getLogger(Utils.class);
|
||||
|
@ -40,13 +24,12 @@ public class Utils {
|
|||
* @return date string in yyyyMMddHHMMssS format
|
||||
*/
|
||||
public static String getTimestamp(Date date) {
|
||||
DateFormat df = new SimpleDateFormat("yyyyMMddHHmmssS");
|
||||
DateFormat df = new SimpleDateFormat("yyyyMMddHHMMssS");
|
||||
return df.format(date);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the current timestamp
|
||||
*
|
||||
* @return the current timestamp returned as a string
|
||||
*/
|
||||
public static String getTimestamp() {
|
||||
|
@ -55,10 +38,9 @@ public class Utils {
|
|||
|
||||
/**
|
||||
* Creates a file in the given directory with the given content
|
||||
*
|
||||
* @param directoryName the directory name to create the file in
|
||||
* @param fileName the name of the file to create
|
||||
* @param content the content to write to the file
|
||||
* @param fileName the name of the file to create
|
||||
* @param content the content to write to the file
|
||||
*/
|
||||
public static File createFile(String directoryName, String fileName, String content) {
|
||||
if (directoryName == null || fileName == null) return null;
|
||||
|
@ -84,7 +66,6 @@ public class Utils {
|
|||
|
||||
/**
|
||||
* Creates a file in the given directory with the given content
|
||||
*
|
||||
* @param content the content to write to the file
|
||||
*/
|
||||
public static File createFile(String outputPath, String content) {
|
||||
|
@ -107,10 +88,9 @@ public class Utils {
|
|||
|
||||
/**
|
||||
* Removes a directory at the given pathToDirectory.
|
||||
* <p>
|
||||
*
|
||||
* If current user has write access then directory creation will result in True being returned.
|
||||
* Otherwise will return false if the directory couldn't be created for some reason.
|
||||
*
|
||||
* @param pathToDirectory
|
||||
* @return
|
||||
*/
|
||||
|
@ -156,24 +136,19 @@ public class Utils {
|
|||
}
|
||||
|
||||
public static String getIsoTimestamp() {
|
||||
return OffsetDateTime.now().format(DateTimeFormatter.ISO_INSTANT);
|
||||
return getIsoTimestamp(OffsetDateTime.now());
|
||||
}
|
||||
|
||||
public static String getIsoTimestamp(OffsetDateTime fromDate) {
|
||||
return OffsetDateTime.from(fromDate.toInstant()).format(DateTimeFormatter.ISO_INSTANT);
|
||||
return OffsetDateTime.from(fromDate).format(DateTimeFormatter.ISO_INSTANT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the difference of two generic sets.
|
||||
* @param a the minuend set
|
||||
* @param b the subtrahend set
|
||||
* @param <T> the type of set
|
||||
* @return Set of type T that contains A \ B
|
||||
*/
|
||||
public static <T> Set<T> getDifference(Set<T> a, Set<T> b) {
|
||||
return a.parallelStream()
|
||||
.filter(item -> !b.contains(item))
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toSet());
|
||||
public static String getIsoDate() {
|
||||
return getIsoDate(OffsetDateTime.now());
|
||||
}
|
||||
|
||||
public static String getIsoDate(OffsetDateTime fromDate) {
|
||||
return fromDate.format(DateTimeFormatter.ISO_DATE);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,144 +0,0 @@
|
|||
package org.reso.commander.common;
|
||||
|
||||
import com.google.gson.JsonElement;
|
||||
import com.google.gson.JsonSerializationContext;
|
||||
import com.google.gson.JsonSerializer;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.olingo.commons.api.edm.Edm;
|
||||
import org.apache.olingo.commons.api.edm.EdmElement;
|
||||
import org.apache.olingo.commons.api.edm.EdmPrimitiveTypeKind;
|
||||
|
||||
import java.lang.reflect.Type;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class XMLMetadataToJSONSchemaSerializer implements JsonSerializer<XMLMetadataToJSONSchemaSerializer> {
|
||||
private static final Logger LOG = LogManager.getLogger(XMLMetadataToJSONSchemaSerializer.class);
|
||||
|
||||
private XMLMetadataToJSONSchemaSerializer() {
|
||||
//should not use default constructor
|
||||
}
|
||||
|
||||
/**
|
||||
* Gson invokes this call-back method during serialization when it encounters a field of the
|
||||
* specified type.
|
||||
*
|
||||
* <p>In the implementation of this call-back method, you should consider invoking
|
||||
* {@link JsonSerializationContext#serialize(Object, Type)} method to create JsonElements for any
|
||||
* non-trivial field of the {@code src} object. However, you should never invoke it on the
|
||||
* {@code src} object itself since that will cause an infinite loop (Gson will call your
|
||||
* call-back method again).</p>
|
||||
*
|
||||
* @param src the object that needs to be converted to Json.
|
||||
* @param typeOfSrc the actual type (fully genericized version) of the source object.
|
||||
* @param context
|
||||
* @return a JsonElement corresponding to the specified object.
|
||||
*/
|
||||
@Override
|
||||
public JsonElement serialize(XMLMetadataToJSONSchemaSerializer src, Type typeOfSrc, JsonSerializationContext context) {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an OData Entity Data Model into a collection of JSON Schema 6 Documents
|
||||
*
|
||||
* @param edm
|
||||
* @return HashMap containing a collection of resource name, JSON Schema pairs
|
||||
*/
|
||||
public static Map<String, String> convertEdmToJsonSchemaDocuments(Edm edm) {
|
||||
final Map<String, String> jsonSchemas = Collections.synchronizedMap(new LinkedHashMap<>());
|
||||
|
||||
final String
|
||||
JSON_SCHEMA_RESOURCE_VALUE_WRAPPER =
|
||||
"{\n" +
|
||||
" \"$id\": \"https://reso.org/data-dictionary/schemas/1.7/%s\",\n" + /* resource name */
|
||||
" \"$schema\": \"https://json-schema.org/draft/2020-12/schema\",\n" +
|
||||
" \"type\": \"array\",\n" +
|
||||
" \"required\": [\"value\", \"@odata.context\" ],\n" +
|
||||
" \"properties\" : {\n" +
|
||||
" \"@odata.context\" : { \"type\": \"string\" }, \n" +
|
||||
" \"value\": { \"type\": \"array\",\n" +
|
||||
" \"items\": { \"$ref\": \"#/$defs/%s\" }, \n" + /* resource name */
|
||||
" }\n" +
|
||||
"},\n",
|
||||
JSON_SCHEMA_TEMPLATE_DEFS =
|
||||
"$defs: {\n" +
|
||||
" \"%s\": { \n" + /* resource name, string */
|
||||
" \"type\": \"object\",\n" +
|
||||
" \"required\" : [ %s ],\n" + /* key fields, string list with quotes */
|
||||
" \"properties\" : { \n" +
|
||||
" %s\n" + /* comma-separated JSON Schema type definition fragments */
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}\n";
|
||||
|
||||
edm.getSchemas().forEach(edmSchema -> {
|
||||
StringBuilder schemaDocument = new StringBuilder();
|
||||
|
||||
//serialize entities (resources) and members (fields)
|
||||
edmSchema.getEntityTypes().forEach(edmEntityType -> {
|
||||
edmEntityType.getPropertyNames().forEach(propertyName -> {
|
||||
final String jsonSchemaFragment = getJsonSchemaType(edmEntityType.getProperty(propertyName));
|
||||
if (jsonSchemaFragment != null) {
|
||||
schemaDocument
|
||||
.append(schemaDocument.length() > 0 ? ",\n" : "")
|
||||
.append(" \"")
|
||||
.append(propertyName)
|
||||
.append("\": ")
|
||||
.append(getJsonSchemaType(edmEntityType.getProperty(propertyName)));
|
||||
}
|
||||
});
|
||||
final String schemaString = String.format(JSON_SCHEMA_RESOURCE_VALUE_WRAPPER, edmEntityType.getName(), schemaDocument.toString());
|
||||
jsonSchemas.put(edmEntityType.getName(), schemaString);
|
||||
});
|
||||
|
||||
// //serialize enum types
|
||||
// edmSchema.getEnumTypes().forEach(edmEnumType -> {
|
||||
// edmEnumType.getMemberNames().forEach(memberName -> {
|
||||
//
|
||||
// });
|
||||
// });
|
||||
});
|
||||
return jsonSchemas;
|
||||
}
|
||||
|
||||
private static String getJsonSchemaType(EdmElement element) {
|
||||
final String fullyQualifiedName = element.getType().getFullQualifiedName().getFullQualifiedNameAsString();
|
||||
|
||||
final String
|
||||
EDM_STRING = EdmPrimitiveTypeKind.String.getFullQualifiedName().getFullQualifiedNameAsString(),
|
||||
EDM_BINARY = EdmPrimitiveTypeKind.Binary.getFullQualifiedName().getFullQualifiedNameAsString(),
|
||||
EDM_SBYTE = EdmPrimitiveTypeKind.SByte.getFullQualifiedName().getFullQualifiedNameAsString(),
|
||||
EDM_DATE_TIME_OFFSET = EdmPrimitiveTypeKind.DateTimeOffset.getFullQualifiedName().getFullQualifiedNameAsString(),
|
||||
EDM_DATE = EdmPrimitiveTypeKind.Date.getFullQualifiedName().getFullQualifiedNameAsString(),
|
||||
EDM_DECIMAL = EdmPrimitiveTypeKind.Decimal.getFullQualifiedName().getFullQualifiedNameAsString(),
|
||||
EDM_INT_64 = EdmPrimitiveTypeKind.Int64.getFullQualifiedName().getFullQualifiedNameAsString(),
|
||||
EDM_INT_32 = EdmPrimitiveTypeKind.Int32.getFullQualifiedName().getFullQualifiedNameAsString(),
|
||||
EDM_INT_16 = EdmPrimitiveTypeKind.Int16.getFullQualifiedName().getFullQualifiedNameAsString(),
|
||||
EDM_BOOLEAN = EdmPrimitiveTypeKind.Boolean.getFullQualifiedName().getFullQualifiedNameAsString();
|
||||
|
||||
if (fullyQualifiedName.contentEquals(EDM_STRING)
|
||||
|| fullyQualifiedName.contentEquals(EDM_SBYTE)
|
||||
|| fullyQualifiedName.contentEquals(EDM_BINARY)) {
|
||||
return "{ \"type\" : \"string\" }";
|
||||
} else if (fullyQualifiedName.contentEquals(EDM_DATE_TIME_OFFSET)) {
|
||||
return "{ \"type\": \"string\", \"format\": \"date-time\" }";
|
||||
} else if (fullyQualifiedName.contentEquals(EDM_DATE)) {
|
||||
return "{ \"type\": \"string\", \"format\": \"date\" }";
|
||||
} else if (fullyQualifiedName.contentEquals(EDM_DECIMAL)
|
||||
|| fullyQualifiedName.contentEquals(EDM_INT_64)
|
||||
|| fullyQualifiedName.contentEquals(EDM_INT_32)
|
||||
|| fullyQualifiedName.contentEquals(EDM_INT_16)) {
|
||||
return "{ \"type\": \"number\" }";
|
||||
} else if (fullyQualifiedName.contentEquals(EDM_BOOLEAN)) {
|
||||
return "{ \"type\": \"boolean\" }";
|
||||
} else {
|
||||
LOG.error("Unsupported type mapping! Type:" + fullyQualifiedName);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,179 +0,0 @@
|
|||
package org.reso.commander.jsonSerializers;
|
||||
|
||||
import com.google.gson.*;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.olingo.commons.api.edm.EdmAnnotation;
|
||||
import org.apache.olingo.commons.api.edm.EdmElement;
|
||||
import org.apache.olingo.commons.api.edm.EdmProperty;
|
||||
import org.reso.commander.common.ODataUtils;
|
||||
import static org.reso.commander.common.TestUtils.failAndExitWithErrorMessage;
|
||||
|
||||
import java.lang.reflect.Type;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.reso.commander.common.ErrorMsg.getDefaultErrorMessage;
|
||||
|
||||
/**
|
||||
* FieldJson uses a JSON payload with the following structure:
|
||||
* <p>
|
||||
* {
|
||||
* "resourceName": "Property",
|
||||
* "fieldName": "AboveGradeFinishedArea",
|
||||
* "type": "Edm.Decimal"
|
||||
* }
|
||||
*/
|
||||
public final class FieldJson implements JsonSerializer<FieldJson> {
|
||||
private static final Logger LOG = LogManager.getLogger(FieldJson.class);
|
||||
|
||||
static final String
|
||||
RESOURCE_NAME_KEY = "resourceName",
|
||||
FIELD_NAME_KEY = "fieldName",
|
||||
NULLABLE_KEY = "nullable",
|
||||
MAX_LENGTH_KEY = "maxLength",
|
||||
PRECISION_KEY = "precision",
|
||||
SCALE_KEY = "scale",
|
||||
IS_COLLECTION_KEY = "isCollection",
|
||||
DEFAULT_VALUE_KEY = "defaultValue",
|
||||
UNICODE_KEY = "unicode",
|
||||
TYPE_KEY = "type",
|
||||
TERM_KEY = "term",
|
||||
VALUE_KEY = "value",
|
||||
ANNOTATIONS_KEY = "annotations",
|
||||
FIELDS_KEY = "fields";
|
||||
|
||||
String resourceName;
|
||||
EdmElement edmElement;
|
||||
|
||||
/**
|
||||
* Constructor which takes an edmElement and reads the type from it, then
|
||||
* uses it as the resource name.
|
||||
* @param edmElement edmElement to create FieldJson for
|
||||
*/
|
||||
public FieldJson(EdmElement edmElement) {
|
||||
Optional<EdmElement> element = Optional.ofNullable(edmElement);
|
||||
assert element.isPresent() : "EdmElement cannot be null!";
|
||||
this.edmElement = edmElement;
|
||||
|
||||
Optional<String> resourceName = Optional.ofNullable(edmElement.getType().getName());
|
||||
assert resourceName.isPresent() : "Could not read name from edmElement type!";
|
||||
this.resourceName = resourceName.get();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor which takes an edmElement and reads the type from it, then
|
||||
* uses it as the resource name.
|
||||
* @param resourceName the resourceName the element belongs to
|
||||
* @param edmElement edmElement to create FieldJson for
|
||||
*/
|
||||
public FieldJson(String resourceName, EdmElement edmElement) {
|
||||
this.resourceName = resourceName;
|
||||
this.edmElement = edmElement;
|
||||
}
|
||||
|
||||
/**
|
||||
* Metadata Pretty Printer
|
||||
* @param metadataReport the metadata report
|
||||
* @return a human-friendly string version of the metadata report
|
||||
*/
|
||||
public static String buildReportString(JsonElement metadataReport) {
|
||||
StringBuilder reportBuilder = new StringBuilder();
|
||||
metadataReport.getAsJsonObject().get(FIELDS_KEY).getAsJsonArray().forEach(field -> {
|
||||
reportBuilder.append("\nResource: ");
|
||||
reportBuilder.append(field.getAsJsonObject().get(RESOURCE_NAME_KEY));
|
||||
reportBuilder.append("\nField: ");
|
||||
reportBuilder.append(field.getAsJsonObject().get(FIELD_NAME_KEY));
|
||||
reportBuilder.append("\nType: ");
|
||||
reportBuilder.append(field.getAsJsonObject().get(TYPE_KEY));
|
||||
|
||||
if (field.getAsJsonObject().get(ANNOTATIONS_KEY) != null) {
|
||||
JsonArray annotations = field.getAsJsonObject().get(ANNOTATIONS_KEY).getAsJsonArray();
|
||||
if (annotations != null && annotations.size() > 0) {
|
||||
reportBuilder.append("\n");
|
||||
reportBuilder.append("Annotations:");
|
||||
annotations.forEach(annotation -> {
|
||||
if (annotation.getAsJsonObject().get(TERM_KEY) != null) {
|
||||
reportBuilder.append("\n\tTerm: ");
|
||||
reportBuilder.append(annotation.getAsJsonObject().get(TERM_KEY));
|
||||
}
|
||||
|
||||
if (annotation.getAsJsonObject().get(VALUE_KEY) != null) {
|
||||
reportBuilder.append("\n\tValue: ");
|
||||
reportBuilder.append(annotation.getAsJsonObject().get(VALUE_KEY));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
reportBuilder.append("\n");
|
||||
});
|
||||
return reportBuilder.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public JsonElement serialize(FieldJson src, Type typeOfSrc, JsonSerializationContext context) {
|
||||
JsonObject field = new JsonObject();
|
||||
|
||||
|
||||
field.addProperty(RESOURCE_NAME_KEY, src.resourceName);
|
||||
field.addProperty(FIELD_NAME_KEY, src.edmElement.getName());
|
||||
|
||||
String typeName;
|
||||
try {
|
||||
typeName = src.edmElement.getType().getFullQualifiedName().getFullQualifiedNameAsString();
|
||||
field.addProperty(TYPE_KEY, typeName);
|
||||
} catch (Exception ex) {
|
||||
//Issue #162: Need to fail on serialization exceptions since Olingo metadata validation might not catch them
|
||||
LOG.error(getDefaultErrorMessage("Field Name:", src.edmElement.getName(), ex.toString()));
|
||||
failAndExitWithErrorMessage(ex.toString(), LOG);
|
||||
}
|
||||
|
||||
field.addProperty(NULLABLE_KEY, ((EdmProperty) src.edmElement).isNullable());
|
||||
field.addProperty(MAX_LENGTH_KEY, ((EdmProperty) src.edmElement).getMaxLength());
|
||||
field.addProperty(SCALE_KEY, ((EdmProperty) src.edmElement).getScale());
|
||||
field.addProperty(PRECISION_KEY, ((EdmProperty) src.edmElement).getPrecision());
|
||||
field.addProperty(DEFAULT_VALUE_KEY, ((EdmProperty) src.edmElement).getDefaultValue());
|
||||
field.addProperty(IS_COLLECTION_KEY, src.edmElement.isCollection());
|
||||
field.addProperty(UNICODE_KEY, ((EdmProperty) src.edmElement).isUnicode());
|
||||
|
||||
//TODO: report issue to Apache
|
||||
// Can only get the annotation term using ((ClientCsdlAnnotation) ((EdmAnnotationImpl)edmAnnotation).annotatable).term
|
||||
// which a private member and cannot be accessed
|
||||
List<EdmAnnotation> annotations = ((EdmProperty) src.edmElement).getAnnotations();
|
||||
if (annotations != null && annotations.size() > 0) {
|
||||
JsonArray annotationsJsonArray = new JsonArray();
|
||||
annotations.forEach(edmAnnotation -> {
|
||||
if (edmAnnotation.getExpression() != null) {
|
||||
if (edmAnnotation.getExpression().isConstant()) {
|
||||
JsonObject annotation = new JsonObject();
|
||||
if (edmAnnotation.getTerm() != null) {
|
||||
annotation.addProperty(TERM_KEY, edmAnnotation.getTerm().getFullQualifiedName().getFullQualifiedNameAsString());
|
||||
} else {
|
||||
ODataUtils.SneakyAnnotationReader sneakyAnnotationReader = new ODataUtils.SneakyAnnotationReader(edmAnnotation);
|
||||
annotation.addProperty(TERM_KEY, sneakyAnnotationReader.getTerm());
|
||||
}
|
||||
annotation.addProperty(VALUE_KEY, edmAnnotation.getExpression().asConstant().getValueAsString());
|
||||
annotationsJsonArray.add(annotation);
|
||||
} else if (edmAnnotation.getExpression().isDynamic()) {
|
||||
if (edmAnnotation.getExpression().asDynamic().isCollection()) {
|
||||
edmAnnotation.getExpression().asDynamic().asCollection().getItems().forEach(edmExpression -> {
|
||||
//OData Allowed Values come across as Records, in which case their key is "Value"
|
||||
if (edmExpression.asDynamic().isRecord()) {
|
||||
JsonObject annotation = new JsonObject();
|
||||
edmExpression.asDynamic().asRecord().getPropertyValues().forEach(edmPropertyValue -> {
|
||||
annotation.addProperty(TERM_KEY, edmPropertyValue.getProperty());
|
||||
annotation.addProperty(VALUE_KEY, edmPropertyValue.getValue().asConstant().getValueAsString());
|
||||
annotationsJsonArray.add(annotation);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (annotationsJsonArray.size() > 0) field.add(ANNOTATIONS_KEY, annotationsJsonArray);
|
||||
}
|
||||
return field;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,101 +0,0 @@
|
|||
package org.reso.commander.jsonSerializers;
|
||||
|
||||
import com.google.gson.*;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.olingo.commons.api.edm.EdmEnumType;
|
||||
import org.reso.commander.common.ODataUtils;
|
||||
|
||||
import java.lang.reflect.Type;
|
||||
|
||||
/**
|
||||
* LookupJson uses a JSON payload with the following structure:
|
||||
* <p>
|
||||
* {
|
||||
* "lookupName": "org.reso.metadata.enums.CommunityFeatures",
|
||||
* "lookupValue": "Stables",
|
||||
* "type": "Edm.Int32"
|
||||
* }
|
||||
*/
|
||||
public final class LookupJson implements JsonSerializer<LookupJson> {
|
||||
private static final Logger LOG = LogManager.getLogger(LookupJson.class);
|
||||
|
||||
public static final String
|
||||
LOOKUP_NAME_KEY = "lookupName", LOOKUP_VALUE_KEY = "lookupValue",
|
||||
TYPE_KEY = "type", VALUE_KEY = "value", ANNOTATIONS_KEY = "annotations",
|
||||
LOOKUPS_KEY = "lookups", TERM_KEY = "term";
|
||||
|
||||
EdmEnumType edmEnumType;
|
||||
String memberName;
|
||||
|
||||
public LookupJson(String memberName, EdmEnumType edmEnumType) {
|
||||
this.edmEnumType = edmEnumType;
|
||||
this.memberName = memberName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Metadata Pretty Printer
|
||||
* @param metadataReport the metadata report
|
||||
* @return a human-friendly string version of the metadata report
|
||||
*/
|
||||
public static String buildReportString(JsonElement metadataReport) {
|
||||
StringBuilder reportBuilder = new StringBuilder();
|
||||
metadataReport.getAsJsonObject().get(LOOKUPS_KEY).getAsJsonArray().forEach(field -> {
|
||||
reportBuilder.append("\nLookup Name: ");
|
||||
reportBuilder.append(field.getAsJsonObject().get(LOOKUP_NAME_KEY));
|
||||
reportBuilder.append("\nLookup Value: ");
|
||||
reportBuilder.append(field.getAsJsonObject().get(LOOKUP_VALUE_KEY));
|
||||
reportBuilder.append("\nType: ");
|
||||
reportBuilder.append(field.getAsJsonObject().get(TYPE_KEY));
|
||||
|
||||
if (field.getAsJsonObject().get(ANNOTATIONS_KEY) != null) {
|
||||
JsonArray annotations = field.getAsJsonObject().get(ANNOTATIONS_KEY).getAsJsonArray();
|
||||
if (annotations != null && annotations.size() > 0) {
|
||||
reportBuilder.append("\n");
|
||||
reportBuilder.append("Annotations:");
|
||||
annotations.forEach(annotation -> {
|
||||
if (annotation.getAsJsonObject().get(TERM_KEY) != null) {
|
||||
reportBuilder.append("\n\tTerm: ");
|
||||
reportBuilder.append(annotation.getAsJsonObject().get(TERM_KEY));
|
||||
}
|
||||
|
||||
if (annotation.getAsJsonObject().get(VALUE_KEY) != null) {
|
||||
reportBuilder.append("\n\tValue: ");
|
||||
reportBuilder.append(annotation.getAsJsonObject().get(VALUE_KEY));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
reportBuilder.append("\n");
|
||||
});
|
||||
return reportBuilder.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public JsonElement serialize(LookupJson src, Type typeOfSrc, JsonSerializationContext context) {
|
||||
JsonObject membersJsonObject = new JsonObject();
|
||||
membersJsonObject.addProperty(LOOKUP_NAME_KEY, src.edmEnumType.getFullQualifiedName().toString());
|
||||
membersJsonObject.addProperty(LOOKUP_VALUE_KEY, src.memberName);
|
||||
membersJsonObject.addProperty(TYPE_KEY, src.edmEnumType.getUnderlyingType().getFullQualifiedName().getFullQualifiedNameAsString());
|
||||
|
||||
if (src.edmEnumType.getMember(memberName).getAnnotations().size() > 0) {
|
||||
JsonArray annotations = new JsonArray();
|
||||
src.edmEnumType.getMember(memberName).getAnnotations().forEach(edmAnnotation -> {
|
||||
JsonObject annotation = new JsonObject();
|
||||
if (edmAnnotation.getTerm() != null) {
|
||||
annotation.addProperty(TERM_KEY, edmAnnotation.getTerm().getFullQualifiedName().getFullQualifiedNameAsString());
|
||||
} else {
|
||||
ODataUtils.SneakyAnnotationReader sneakyAnnotationReader = new ODataUtils.SneakyAnnotationReader(edmAnnotation);
|
||||
annotation.addProperty(TERM_KEY, sneakyAnnotationReader.getTerm());
|
||||
}
|
||||
|
||||
if (edmAnnotation.getExpression() != null) {
|
||||
annotation.addProperty(VALUE_KEY, edmAnnotation.getExpression().asConstant().getValueAsString());
|
||||
}
|
||||
annotations.add(annotation);
|
||||
});
|
||||
membersJsonObject.add(ANNOTATIONS_KEY, annotations);
|
||||
}
|
||||
return membersJsonObject;
|
||||
}
|
||||
}
|
|
@ -1,83 +0,0 @@
|
|||
package org.reso.commander.jsonSerializers;
|
||||
|
||||
import com.google.gson.*;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.olingo.commons.api.edm.*;
|
||||
import org.reso.commander.common.Utils;
|
||||
|
||||
import java.lang.reflect.Type;
|
||||
import java.util.Date;
|
||||
|
||||
import static org.reso.commander.Commander.REPORT_DIVIDER;
|
||||
import static org.reso.commander.common.ErrorMsg.getDefaultErrorMessage;
|
||||
|
||||
public class MetadataReport implements JsonSerializer<MetadataReport> {
|
||||
private static final Logger LOG = LogManager.getLogger(MetadataReport.class);
|
||||
|
||||
private Edm metadata;
|
||||
|
||||
private MetadataReport() {
|
||||
//private default constructor
|
||||
}
|
||||
|
||||
public MetadataReport(Edm metadata) {
|
||||
this.metadata = metadata;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder reportBuilder = new StringBuilder();
|
||||
|
||||
reportBuilder
|
||||
.append("\n\n" + REPORT_DIVIDER)
|
||||
.append("\nRESO Metadata Report")
|
||||
.append("\n").append(new Date())
|
||||
.append("\n" + REPORT_DIVIDER);
|
||||
|
||||
JsonElement metadataReport = serialize(this, MetadataReport.class, null);
|
||||
reportBuilder.append(FieldJson.buildReportString(metadataReport));
|
||||
reportBuilder.append(LookupJson.buildReportString(metadataReport));
|
||||
|
||||
return reportBuilder.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public JsonElement serialize(MetadataReport src, Type typeOfSrc, JsonSerializationContext context) {
|
||||
final String
|
||||
DESCRIPTION_KEY = "description", DESCRIPTION = "RESO Data Dictionary Metadata Report",
|
||||
VERSION_KEY = "version", VERSION = "1.7",
|
||||
GENERATED_ON_KEY = "generatedOn",
|
||||
FIELDS_KEY = "fields",
|
||||
LOOKUPS_KEY = "lookups";
|
||||
|
||||
JsonArray fields = new JsonArray();
|
||||
JsonArray lookups = new JsonArray();
|
||||
|
||||
src.metadata.getSchemas().forEach(edmSchema -> {
|
||||
//serialize entities (resources) and members (fields)
|
||||
edmSchema.getEntityTypes().forEach(edmEntityType -> {
|
||||
edmEntityType.getPropertyNames().forEach(propertyName -> {
|
||||
FieldJson fieldJson = new FieldJson(edmEntityType.getName(), edmEntityType.getProperty(propertyName));
|
||||
fields.add(fieldJson.serialize(fieldJson, FieldJson.class, null));
|
||||
});
|
||||
});
|
||||
|
||||
//serialize enum types
|
||||
edmSchema.getEnumTypes().forEach(edmEnumType -> {
|
||||
edmEnumType.getMemberNames().forEach(memberName -> {
|
||||
LookupJson lookupJson = new LookupJson(memberName, edmEnumType);
|
||||
lookups.add(lookupJson.serialize(lookupJson, LookupJson.class, null));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
JsonObject metadataReport = new JsonObject();
|
||||
metadataReport.addProperty(DESCRIPTION_KEY, DESCRIPTION);
|
||||
metadataReport.addProperty(VERSION_KEY, VERSION);
|
||||
metadataReport.addProperty(GENERATED_ON_KEY, Utils.getIsoTimestamp());
|
||||
metadataReport.add(FIELDS_KEY, fields);
|
||||
metadataReport.add(LOOKUPS_KEY, lookups);
|
||||
return metadataReport;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,134 @@
|
|||
package org.reso.models;
|
||||
|
||||
import com.google.gson.Gson;
|
||||
import com.google.gson.reflect.TypeToken;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.reso.commander.Commander;
|
||||
|
||||
import java.lang.reflect.Type;
|
||||
import java.util.List;
|
||||
|
||||
import static org.reso.commander.common.ErrorMsg.getDefaultErrorMessage;
|
||||
|
||||
/**
|
||||
* Used to deserialize the Data Dictionary reference sheet into a cache of generators
|
||||
*/
|
||||
public class DataGenerator {
|
||||
private static final Logger LOG = LogManager.getLogger(DataGenerator.class);
|
||||
private final static String DATA_GENERATOR_JSON = "RESODataDictionary-1.7.data-generator.json";
|
||||
|
||||
private String description;
|
||||
private String version;
|
||||
private String generatedOn;
|
||||
private List<ResourceInfo> resourceInfo;
|
||||
private List<FieldDataGenerator> fields;
|
||||
|
||||
/**
|
||||
* Creates a nested map of Data Dictionary reference generators where
|
||||
* * the outer map is keyed by resource name
|
||||
* * inner map is keyed by standard field name and returns a generator for that field
|
||||
*
|
||||
* @return nested hashes of standard field generators
|
||||
*/
|
||||
public static DataGenerator deserialize() {
|
||||
final String generatorJson = Commander.convertInputStreamToString(Thread.currentThread().getContextClassLoader().getResourceAsStream(DATA_GENERATOR_JSON));
|
||||
assert generatorJson != null : getDefaultErrorMessage("could not load resource " + DATA_GENERATOR_JSON);
|
||||
|
||||
//final String generatorJson = Commander.convertInputStreamToString(Commander.deserializeFileFromPath(resource.getPath()));
|
||||
|
||||
//note the open braces before getType()
|
||||
Type targetClassType = new TypeToken<DataGenerator>() {}.getType();
|
||||
return new Gson().fromJson(generatorJson, targetClassType);
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public String getVersion() {
|
||||
return version;
|
||||
}
|
||||
|
||||
public String getGeneratedOn() {
|
||||
return generatedOn;
|
||||
}
|
||||
|
||||
public List<ResourceInfo> getResourceInfo() {
|
||||
return resourceInfo;
|
||||
}
|
||||
|
||||
public List<FieldDataGenerator> getFields() {
|
||||
return fields;
|
||||
}
|
||||
|
||||
public static final class FieldDataGenerator {
|
||||
private String fieldName;
|
||||
private String resourceName;
|
||||
private String fakerGeneratorName;
|
||||
private List<String> customExamples;
|
||||
|
||||
public FieldDataGenerator(String fieldName, String resourceName, String fakerGeneratorName, List<String> customExamples) {
|
||||
this.fieldName = fieldName;
|
||||
this.resourceName = resourceName;
|
||||
this.fakerGeneratorName = fakerGeneratorName;
|
||||
this.customExamples = customExamples;
|
||||
}
|
||||
|
||||
public String getFieldName() {
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
public String getResourceName() {
|
||||
return resourceName;
|
||||
}
|
||||
|
||||
public void setResourceName(String resourceName) {
|
||||
this.resourceName = resourceName;
|
||||
}
|
||||
|
||||
public String getFakerGeneratorName() {
|
||||
return fakerGeneratorName;
|
||||
}
|
||||
|
||||
public List<String> getCustomExamples() {
|
||||
return customExamples;
|
||||
}
|
||||
|
||||
public boolean hasFakerGenerator() {
|
||||
return fakerGeneratorName != null && fakerGeneratorName.length() > 0;
|
||||
}
|
||||
|
||||
public boolean hasCustomExamples() {
|
||||
return customExamples != null && customExamples.size() > 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "FieldDataGenerator{" +
|
||||
"fieldName='" + fieldName + '\'' +
|
||||
", resourceName=" + (resourceName == null ? "null" : "'" + resourceName + "'") +
|
||||
", fakerGeneratorName=" + (fakerGeneratorName == null ? "null" : "'" + fakerGeneratorName + "'") +
|
||||
", customExamples=" + customExamples +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
|
||||
public static final class ResourceInfo {
|
||||
private String resourceName;
|
||||
private Integer recordCount;
|
||||
|
||||
public ResourceInfo(String resourceName,Integer recordCount) {
|
||||
this.resourceName = resourceName;
|
||||
this.recordCount = recordCount;
|
||||
}
|
||||
|
||||
public String getResourceName() {
|
||||
return resourceName;
|
||||
}
|
||||
|
||||
public Integer getRecordCount() {
|
||||
return recordCount;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,46 +0,0 @@
|
|||
package org.reso.models;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
public class LookupResourceItem {
|
||||
private final String lookupKey;
|
||||
private final String lookupName;
|
||||
private final String lookupValue;
|
||||
private final String lookupStandardName;
|
||||
private final String legacyODataValue;
|
||||
private final Date modificationTimestamp;
|
||||
|
||||
public LookupResourceItem(String lookupKey, String lookupName, String lookupValue,
|
||||
String lookupStandardName, String legacyODataValue, Date modificationTimestamp) {
|
||||
this.lookupKey = lookupKey;
|
||||
this.lookupName = lookupName;
|
||||
this.lookupValue = lookupValue;
|
||||
this.lookupStandardName = lookupStandardName;
|
||||
this.legacyODataValue = legacyODataValue;
|
||||
this.modificationTimestamp = modificationTimestamp;
|
||||
}
|
||||
|
||||
public String getLookupKey() {
|
||||
return lookupKey;
|
||||
}
|
||||
|
||||
public String getLookupName() {
|
||||
return lookupName;
|
||||
}
|
||||
|
||||
public String getLookupValue() {
|
||||
return lookupValue;
|
||||
}
|
||||
|
||||
public String getLookupStandardName() {
|
||||
return lookupStandardName;
|
||||
}
|
||||
|
||||
public String getLegacyODataValue() {
|
||||
return legacyODataValue;
|
||||
}
|
||||
|
||||
public Date getModificationTimestamp() {
|
||||
return modificationTimestamp;
|
||||
}
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
package org.reso.models;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public final class LookupValue {
|
||||
private final String resourceName;
|
||||
private final String fieldName;
|
||||
private final String lookupValue;
|
||||
|
||||
public LookupValue(String resourceName, String fieldName, String lookupValue) {
|
||||
this.resourceName = resourceName;
|
||||
this.fieldName = fieldName;
|
||||
this.lookupValue = lookupValue;
|
||||
}
|
||||
|
||||
public String getResourceName() {
|
||||
return resourceName;
|
||||
}
|
||||
|
||||
public String getFieldName() {
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
public String getLookupValue() {
|
||||
return lookupValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
LookupValue that = (LookupValue) o;
|
||||
return resourceName.equals(that.resourceName) &&
|
||||
fieldName.equals(that.fieldName) &&
|
||||
lookupValue.equals(that.lookupValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(resourceName, fieldName, lookupValue);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,320 @@
|
|||
package org.reso.models;
|
||||
|
||||
import com.google.gson.*;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.olingo.client.core.edm.xml.ClientCsdlAnnotation;
|
||||
import org.apache.olingo.commons.api.edm.*;
|
||||
import org.apache.olingo.commons.core.edm.EdmAnnotationImpl;
|
||||
import org.reso.commander.common.Utils;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Type;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import static org.reso.commander.Commander.REPORT_DIVIDER;
|
||||
import static org.reso.commander.common.ErrorMsg.getDefaultErrorMessage;
|
||||
|
||||
public class MetadataReport implements JsonSerializer<MetadataReport> {
|
||||
private static final Logger LOG = LogManager.getLogger(MetadataReport.class);
|
||||
|
||||
private Edm metadata;
|
||||
|
||||
private MetadataReport() {
|
||||
//private default constructor
|
||||
}
|
||||
|
||||
public MetadataReport(Edm metadata) {
|
||||
this.metadata = metadata;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder reportBuilder = new StringBuilder();
|
||||
|
||||
reportBuilder
|
||||
.append("\n\n" + REPORT_DIVIDER)
|
||||
.append("\nRESO Metadata Report")
|
||||
.append("\n").append(new Date())
|
||||
.append("\n" + REPORT_DIVIDER);
|
||||
|
||||
JsonElement metadataReport = serialize(this, MetadataReport.class, null);
|
||||
reportBuilder.append(FieldJson.buildReportString(metadataReport));
|
||||
reportBuilder.append(LookupJson.buildReportString(metadataReport));
|
||||
|
||||
return reportBuilder.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* FieldJson uses a JSON payload with the following structure:
|
||||
*
|
||||
* {
|
||||
* "resourceName": "Property",
|
||||
* "fieldName": "AboveGradeFinishedArea",
|
||||
* "type": "Edm.Decimal"
|
||||
* }
|
||||
*/
|
||||
private static final class FieldJson implements JsonSerializer<FieldJson> {
|
||||
static final String
|
||||
RESOURCE_NAME_KEY = "resourceName",
|
||||
FIELD_NAME_KEY = "fieldName",
|
||||
NULLABLE_KEY = "nullable",
|
||||
MAX_LENGTH_KEY = "maxLength",
|
||||
PRECISION_KEY = "precision",
|
||||
SCALE_KEY = "scale",
|
||||
IS_COLLECTION_KEY = "isCollection",
|
||||
DEFAULT_VALUE_KEY = "defaultValue",
|
||||
UNICODE_KEY = "unicode",
|
||||
TYPE_KEY = "type",
|
||||
TERM_KEY = "term",
|
||||
VALUE_KEY= "value",
|
||||
ANNOTATIONS_KEY = "annotations",
|
||||
FIELDS_KEY = "fields";
|
||||
|
||||
String resourceName;
|
||||
EdmElement edmElement;
|
||||
|
||||
public FieldJson(String resourceName, EdmElement edmElement) {
|
||||
this.resourceName = resourceName;
|
||||
this.edmElement = edmElement;
|
||||
}
|
||||
|
||||
public static String buildReportString(JsonElement metadataReport) {
|
||||
StringBuilder reportBuilder = new StringBuilder();
|
||||
metadataReport.getAsJsonObject().get(FIELDS_KEY).getAsJsonArray().forEach(field -> {
|
||||
reportBuilder.append("\nResource: ");
|
||||
reportBuilder.append(field.getAsJsonObject().get(RESOURCE_NAME_KEY));
|
||||
reportBuilder.append("\nField: ");
|
||||
reportBuilder.append(field.getAsJsonObject().get(FIELD_NAME_KEY));
|
||||
reportBuilder.append("\nType: ");
|
||||
reportBuilder.append(field.getAsJsonObject().get(TYPE_KEY));
|
||||
|
||||
if (field.getAsJsonObject().get(ANNOTATIONS_KEY) != null) {
|
||||
JsonArray annotations = field.getAsJsonObject().get(ANNOTATIONS_KEY).getAsJsonArray();
|
||||
if (annotations != null && annotations.size() > 0) {
|
||||
reportBuilder.append("\n");
|
||||
reportBuilder.append("Annotations:");
|
||||
annotations.forEach(annotation -> {
|
||||
if (annotation.getAsJsonObject().get(TERM_KEY) != null) {
|
||||
reportBuilder.append("\n\tTerm: ");
|
||||
reportBuilder.append(annotation.getAsJsonObject().get(TERM_KEY));
|
||||
}
|
||||
|
||||
if (annotation.getAsJsonObject().get(VALUE_KEY) != null) {
|
||||
reportBuilder.append("\n\tValue: ");
|
||||
reportBuilder.append(annotation.getAsJsonObject().get(VALUE_KEY));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
reportBuilder.append("\n");
|
||||
});
|
||||
return reportBuilder.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public JsonElement serialize(FieldJson src, Type typeOfSrc, JsonSerializationContext context) {
|
||||
JsonObject field = new JsonObject();
|
||||
|
||||
|
||||
field.addProperty(RESOURCE_NAME_KEY, src.resourceName);
|
||||
field.addProperty(FIELD_NAME_KEY, src.edmElement.getName());
|
||||
|
||||
String typeName = null;
|
||||
try {
|
||||
typeName = src.edmElement.getType().getFullQualifiedName().getFullQualifiedNameAsString();
|
||||
field.addProperty(TYPE_KEY, typeName);
|
||||
} catch (Exception ex) {
|
||||
LOG.error(getDefaultErrorMessage("Field Name:", src.edmElement.getName(), ex.toString()));
|
||||
field.addProperty(TYPE_KEY, "UNDEFINED");
|
||||
}
|
||||
|
||||
field.addProperty(NULLABLE_KEY, ((EdmProperty) src.edmElement).isNullable());
|
||||
field.addProperty(MAX_LENGTH_KEY, ((EdmProperty) src.edmElement).getMaxLength());
|
||||
field.addProperty(SCALE_KEY, ((EdmProperty) src.edmElement).getScale());
|
||||
field.addProperty(PRECISION_KEY, ((EdmProperty) src.edmElement).getPrecision());
|
||||
field.addProperty(DEFAULT_VALUE_KEY, ((EdmProperty) src.edmElement).getDefaultValue());
|
||||
field.addProperty(IS_COLLECTION_KEY, src.edmElement.isCollection());
|
||||
field.addProperty(UNICODE_KEY, ((EdmProperty) src.edmElement).isUnicode());
|
||||
|
||||
//TODO: report issue to Apache
|
||||
// Can only get the annotation term using ((ClientCsdlAnnotation) ((EdmAnnotationImpl)edmAnnotation).annotatable).term
|
||||
// which a private member and cannot be accessed
|
||||
List<EdmAnnotation> annotations = ((EdmProperty)src.edmElement).getAnnotations();
|
||||
if (annotations != null && annotations.size() > 0) {
|
||||
JsonArray annotationsJsonArray = new JsonArray();
|
||||
annotations.forEach(edmAnnotation -> {
|
||||
JsonObject annotation = new JsonObject();
|
||||
if (edmAnnotation.getTerm() != null) {
|
||||
annotation.addProperty(TERM_KEY, edmAnnotation.getTerm().getFullQualifiedName().getFullQualifiedNameAsString());
|
||||
} else {
|
||||
SneakyAnnotationReader sneakyAnnotationReader = new SneakyAnnotationReader(edmAnnotation);
|
||||
annotation.addProperty(TERM_KEY, sneakyAnnotationReader.getTerm());
|
||||
}
|
||||
|
||||
if (edmAnnotation.getExpression() != null) {
|
||||
annotation.addProperty(VALUE_KEY, edmAnnotation.getExpression().asConstant().getValueAsString());
|
||||
}
|
||||
annotationsJsonArray.add(annotation);
|
||||
});
|
||||
field.add(ANNOTATIONS_KEY, annotationsJsonArray);
|
||||
}
|
||||
return field;
|
||||
}
|
||||
}
|
||||
|
||||
static class SneakyAnnotationReader {
|
||||
Class<? extends EdmAnnotationImpl> object;
|
||||
Field field;
|
||||
EdmAnnotationImpl edmAnnotationImpl;
|
||||
ClientCsdlAnnotation clientCsdlAnnotation;
|
||||
|
||||
public SneakyAnnotationReader(EdmAnnotation edmAnnotation) {
|
||||
try {
|
||||
edmAnnotationImpl = ((EdmAnnotationImpl)edmAnnotation);
|
||||
|
||||
// create an object of the class named Class
|
||||
object = edmAnnotationImpl.getClass();
|
||||
|
||||
// access the private variable
|
||||
field = object.getDeclaredField("annotation");
|
||||
// make private field accessible
|
||||
field.setAccessible(true);
|
||||
|
||||
clientCsdlAnnotation = (ClientCsdlAnnotation) field.get(edmAnnotationImpl);
|
||||
|
||||
} catch (Exception ex) {
|
||||
LOG.error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
public String getTerm() {
|
||||
return clientCsdlAnnotation.getTerm();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* LookupJson uses a JSON payload with the following structure:
|
||||
*
|
||||
* {
|
||||
* "lookupName": "org.reso.metadata.enums.CommunityFeatures",
|
||||
* "lookupValue": "Stables",
|
||||
* "type": "Edm.Int32"
|
||||
* }
|
||||
*/
|
||||
private static final class LookupJson implements JsonSerializer<LookupJson> {
|
||||
static final String
|
||||
LOOKUP_NAME_KEY = "lookupName", LOOKUP_VALUE_KEY = "lookupValue",
|
||||
TYPE_KEY = "type", VALUE_KEY= "value", ANNOTATIONS_KEY = "annotations",
|
||||
LOOKUPS_KEY = "lookups";
|
||||
|
||||
EdmEnumType edmEnumType;
|
||||
String memberName;
|
||||
|
||||
public LookupJson(String memberName, EdmEnumType edmEnumType) {
|
||||
this.edmEnumType = edmEnumType;
|
||||
this.memberName = memberName;
|
||||
}
|
||||
|
||||
public static String buildReportString(JsonElement metadataReport) {
|
||||
StringBuilder reportBuilder = new StringBuilder();
|
||||
metadataReport.getAsJsonObject().get(LOOKUPS_KEY).getAsJsonArray().forEach(field -> {
|
||||
reportBuilder.append("\nLookup Name: ");
|
||||
reportBuilder.append(field.getAsJsonObject().get(LOOKUP_NAME_KEY));
|
||||
reportBuilder.append("\nLookup Value: ");
|
||||
reportBuilder.append(field.getAsJsonObject().get(LOOKUP_VALUE_KEY));
|
||||
reportBuilder.append("\nType: ");
|
||||
reportBuilder.append(field.getAsJsonObject().get(TYPE_KEY));
|
||||
|
||||
if (field.getAsJsonObject().get(ANNOTATIONS_KEY) != null) {
|
||||
JsonArray annotations = field.getAsJsonObject().get(ANNOTATIONS_KEY).getAsJsonArray();
|
||||
if (annotations != null && annotations.size() > 0) {
|
||||
reportBuilder.append("\n");
|
||||
reportBuilder.append("Annotations:");
|
||||
annotations.forEach(annotation -> {
|
||||
if (annotation.getAsJsonObject().get(FieldJson.TERM_KEY) != null) {
|
||||
reportBuilder.append("\n\tTerm: ");
|
||||
reportBuilder.append(annotation.getAsJsonObject().get(FieldJson.TERM_KEY));
|
||||
}
|
||||
|
||||
if (annotation.getAsJsonObject().get(VALUE_KEY) != null) {
|
||||
reportBuilder.append("\n\tValue: ");
|
||||
reportBuilder.append(annotation.getAsJsonObject().get(VALUE_KEY));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
reportBuilder.append("\n");
|
||||
});
|
||||
return reportBuilder.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public JsonElement serialize(LookupJson src, Type typeOfSrc, JsonSerializationContext context) {
|
||||
JsonObject membersJsonObject = new JsonObject();
|
||||
membersJsonObject.addProperty(LOOKUP_NAME_KEY, src.edmEnumType.getFullQualifiedName().toString());
|
||||
membersJsonObject.addProperty(LOOKUP_VALUE_KEY, src.memberName);
|
||||
membersJsonObject.addProperty(TYPE_KEY, src.edmEnumType.getUnderlyingType().getFullQualifiedName().getFullQualifiedNameAsString());
|
||||
|
||||
if (src.edmEnumType.getMember(memberName).getAnnotations().size() > 0) {
|
||||
JsonArray annotations = new JsonArray();
|
||||
src.edmEnumType.getMember(memberName).getAnnotations().forEach(edmAnnotation -> {
|
||||
JsonObject annotation = new JsonObject();
|
||||
if (edmAnnotation.getTerm() != null) {
|
||||
annotation.addProperty(FieldJson.TERM_KEY, edmAnnotation.getTerm().getFullQualifiedName().getFullQualifiedNameAsString());
|
||||
} else {
|
||||
SneakyAnnotationReader sneakyAnnotationReader = new SneakyAnnotationReader(edmAnnotation);
|
||||
annotation.addProperty(FieldJson.TERM_KEY, sneakyAnnotationReader.getTerm());
|
||||
}
|
||||
|
||||
if (edmAnnotation.getExpression() != null) {
|
||||
annotation.addProperty(VALUE_KEY, edmAnnotation.getExpression().asConstant().getValueAsString());
|
||||
}
|
||||
annotations.add(annotation);
|
||||
});
|
||||
membersJsonObject.add(ANNOTATIONS_KEY, annotations);
|
||||
}
|
||||
return membersJsonObject;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public JsonElement serialize(MetadataReport src, Type typeOfSrc, JsonSerializationContext context) {
|
||||
final String
|
||||
DESCRIPTION_KEY = "description", DESCRIPTION = "RESO Data Dictionary Metadata Report",
|
||||
VERSION_KEY = "version", VERSION = "1.7",
|
||||
GENERATED_ON_KEY = "generatedOn",
|
||||
FIELDS_KEY = "fields",
|
||||
LOOKUPS_KEY = "lookups";
|
||||
|
||||
JsonArray fields = new JsonArray();
|
||||
JsonArray lookups = new JsonArray();
|
||||
|
||||
src.metadata.getSchemas().forEach(edmSchema -> {
|
||||
//serialize entities (resources) and members (fields)
|
||||
edmSchema.getEntityTypes().forEach(edmEntityType -> {
|
||||
edmEntityType.getPropertyNames().forEach(propertyName -> {
|
||||
FieldJson fieldJson = new FieldJson(edmEntityType.getName(), edmEntityType.getProperty(propertyName));
|
||||
fields.add(fieldJson.serialize(fieldJson, FieldJson.class, null));
|
||||
});
|
||||
});
|
||||
|
||||
//serialize enum types
|
||||
edmSchema.getEnumTypes().forEach(edmEnumType -> {
|
||||
edmEnumType.getMemberNames().forEach(memberName -> {
|
||||
LookupJson lookupJson = new LookupJson(memberName, edmEnumType);
|
||||
lookups.add(lookupJson.serialize(lookupJson, LookupJson.class, null));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
JsonObject metadataReport = new JsonObject();
|
||||
metadataReport.addProperty(DESCRIPTION_KEY, DESCRIPTION);
|
||||
metadataReport.addProperty(VERSION_KEY, VERSION);
|
||||
metadataReport.addProperty(GENERATED_ON_KEY, Utils.getIsoTimestamp());
|
||||
metadataReport.add(FIELDS_KEY, fields);
|
||||
metadataReport.add(LOOKUPS_KEY, lookups);
|
||||
return metadataReport;
|
||||
}
|
||||
}
|
|
@ -24,6 +24,8 @@ public class PayloadSample implements JsonSerializer<PayloadSample> {
|
|||
//keeps track of the list of key fields found on the server
|
||||
final List<String> keyFields = new LinkedList<>();
|
||||
|
||||
final List<String> payloadFields = new LinkedList<>();
|
||||
|
||||
public PayloadSample(String resourceName, String dateField, List<String> keyFields) {
|
||||
assert resourceName != null : "resourceName MUST be present";
|
||||
this.resourceName = resourceName;
|
||||
|
@ -31,6 +33,10 @@ public class PayloadSample implements JsonSerializer<PayloadSample> {
|
|||
this.keyFields.addAll(keyFields);
|
||||
}
|
||||
|
||||
public void setPayloadFields(List<String> payloadFields) {
|
||||
this.payloadFields.addAll(payloadFields);
|
||||
}
|
||||
|
||||
public void addSample(Map<String, String> sample) {
|
||||
encodedSamples.add(sample);
|
||||
}
|
||||
|
@ -89,7 +95,8 @@ public class PayloadSample implements JsonSerializer<PayloadSample> {
|
|||
RESOURCE_NAME_KEY = "resourceName",
|
||||
DATE_FIELD_KEY = "dateField",
|
||||
KEY_FIELDS_KEY = "keyFields",
|
||||
ENCODED_VALUES_KEY = "encodedValues";
|
||||
ENCODED_VALUES_KEY = "encodedValues",
|
||||
PAYLOAD_FIELDS_KEY = "payloadFields";
|
||||
|
||||
|
||||
JsonObject serialized = new JsonObject();
|
||||
|
@ -105,6 +112,10 @@ public class PayloadSample implements JsonSerializer<PayloadSample> {
|
|||
|
||||
serialized.addProperty(DATE_FIELD_KEY, src.dateField);
|
||||
|
||||
JsonArray payloadFieldsJson = new JsonArray();
|
||||
src.payloadFields.forEach(payloadFieldsJson::add);
|
||||
serialized.add(PAYLOAD_FIELDS_KEY, payloadFieldsJson);
|
||||
|
||||
JsonArray encodedSamplesJson = new JsonArray();
|
||||
src.encodedSamples.forEach(sample -> {
|
||||
JsonObject sampleJson = new JsonObject();
|
||||
|
|
|
@ -10,7 +10,6 @@ import org.reso.commander.common.Utils;
|
|||
import java.lang.reflect.Type;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.time.format.DateTimeParseException;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
@ -19,47 +18,49 @@ import java.util.concurrent.atomic.AtomicReference;
|
|||
public class PayloadSampleReport implements JsonSerializer<PayloadSampleReport> {
|
||||
private static final Logger LOG = LogManager.getLogger(PayloadSampleReport.class);
|
||||
private static final String POSTAL_CODE_KEY = "PostalCode";
|
||||
private static final AtomicReference<Map<String, List<PayloadSample>>> resourcePayloadSamplesMap = new AtomicReference<>(Collections.synchronizedMap(new LinkedHashMap<>()));
|
||||
private static final AtomicReference<Map<String, Map<String, Integer>>> resourceFieldFrequencyMap = new AtomicReference<>(Collections.synchronizedMap(new LinkedHashMap<>(new LinkedHashMap<>())));
|
||||
private static final AtomicReference<Map<LookupValue, Integer>> lookupValueFrequencyMap = new AtomicReference<>(Collections.synchronizedMap(new LinkedHashMap<>()));
|
||||
private static final AtomicReference<Map<String, Integer>> resourceCounts = new AtomicReference<>(Collections.synchronizedMap(new LinkedHashMap<>()));
|
||||
private final Map<String, List<PayloadSample>> resourcePayloadSamplesMap = Collections.synchronizedMap(new LinkedHashMap<>());
|
||||
private final Map<String, Map<String, Integer>> resourceFieldTallies = Collections.synchronizedMap(new LinkedHashMap<>(new LinkedHashMap<>()));
|
||||
private final Map<String, Integer> resourceCounts = Collections.synchronizedMap(new LinkedHashMap<>());
|
||||
|
||||
private static Edm metadata;
|
||||
private Edm metadata;
|
||||
|
||||
public PayloadSampleReport(final Edm metadata, final Map<String, List<PayloadSample>> resourcePayloadSamplesMap,
|
||||
final Map<String, Integer> resourceCounts, final Map<LookupValue, Integer> lookupValueFrequencyMap) {
|
||||
PayloadSampleReport.metadata = metadata;
|
||||
PayloadSampleReport.resourcePayloadSamplesMap.get().putAll(resourcePayloadSamplesMap);
|
||||
PayloadSampleReport.resourceFieldFrequencyMap.get().putAll(createResourceFieldTallies(resourcePayloadSamplesMap));
|
||||
PayloadSampleReport.lookupValueFrequencyMap.get().putAll(lookupValueFrequencyMap);
|
||||
PayloadSampleReport.resourceCounts.get().putAll(resourceCounts);
|
||||
private PayloadSampleReport() {
|
||||
//private default constructor
|
||||
}
|
||||
|
||||
public PayloadSampleReport(final Edm metadata, final Map<String, List<PayloadSample>> resourcePayloadSamplesMap, final Map<String, Integer> resourceCounts) {
|
||||
this.metadata = metadata;
|
||||
this.resourcePayloadSamplesMap.putAll(resourcePayloadSamplesMap);
|
||||
resourceFieldTallies.putAll(createResourceFieldTallies(resourcePayloadSamplesMap));
|
||||
|
||||
this.resourceCounts.putAll(resourceCounts);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.valueOf(serialize(this, FieldsJson.class, null));
|
||||
return String.valueOf(serialize(this, FieldAvailabilityJson.class, null));
|
||||
}
|
||||
|
||||
/**
|
||||
* FieldsJson uses a JSON payload with the following structure:
|
||||
* <p>
|
||||
* {
|
||||
* "resourceName": "Property",
|
||||
* "fieldName": "AboveGradeFinishedArea",
|
||||
* "availability": 0.1
|
||||
* }
|
||||
* FieldAvailabilityJson uses a JSON payload with the following structure:
|
||||
*
|
||||
* {
|
||||
* "resourceName": "Property",
|
||||
* "fieldName": "AboveGradeFinishedArea",
|
||||
* "availability": 0.1
|
||||
* }
|
||||
*/
|
||||
private static final class FieldsJson implements JsonSerializer<FieldsJson> {
|
||||
private final class FieldAvailabilityJson implements JsonSerializer<FieldAvailabilityJson> {
|
||||
static final String
|
||||
RESOURCE_NAME_KEY = "resourceName",
|
||||
FIELD_NAME_KEY = "fieldName",
|
||||
FIELDS_KEY = "fields",
|
||||
FREQUENCY_KEY = "frequency";
|
||||
AVAILABILITY_KEY = "availability";
|
||||
|
||||
String resourceName;
|
||||
EdmElement edmElement;
|
||||
|
||||
public FieldsJson(String resourceName, EdmElement edmElement) {
|
||||
public FieldAvailabilityJson(String resourceName, EdmElement edmElement) {
|
||||
this.resourceName = resourceName;
|
||||
this.edmElement = edmElement;
|
||||
}
|
||||
|
@ -71,106 +72,55 @@ public class PayloadSampleReport implements JsonSerializer<PayloadSampleReport>
|
|||
reportBuilder.append(field.getAsJsonObject().get(RESOURCE_NAME_KEY));
|
||||
reportBuilder.append("\nField: ");
|
||||
reportBuilder.append(field.getAsJsonObject().get(FIELD_NAME_KEY));
|
||||
reportBuilder.append("\nFrequency: ");
|
||||
reportBuilder.append(field.getAsJsonObject().get(FREQUENCY_KEY));
|
||||
reportBuilder.append("\nAvailability: ");
|
||||
reportBuilder.append(field.getAsJsonObject().get(AVAILABILITY_KEY));
|
||||
reportBuilder.append("\n");
|
||||
});
|
||||
return reportBuilder.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public JsonElement serialize(FieldsJson src, Type typeOfSrc, JsonSerializationContext context) {
|
||||
public JsonElement serialize(FieldAvailabilityJson src, Type typeOfSrc, JsonSerializationContext context) {
|
||||
JsonObject field = new JsonObject();
|
||||
|
||||
int frequency = resourceFieldFrequencyMap.get().get(src.resourceName) != null
|
||||
&& resourceFieldFrequencyMap.get().get(src.resourceName).get(src.edmElement.getName()) != null
|
||||
? resourceFieldFrequencyMap.get().get(src.resourceName).get(src.edmElement.getName()) : 0;
|
||||
int numTimesPresent = resourceFieldTallies.get(src.resourceName) != null
|
||||
&& resourceFieldTallies.get(src.resourceName).get(src.edmElement.getName()) != null
|
||||
? resourceFieldTallies.get(src.resourceName).get(src.edmElement.getName()) : 0;
|
||||
|
||||
int numSamples = resourcePayloadSamplesMap.get(src.resourceName) != null
|
||||
? resourcePayloadSamplesMap.get(src.resourceName).stream().reduce(0, (a, f) -> a + f.encodedSamples.size(), Integer::sum) : 0;
|
||||
|
||||
field.addProperty(RESOURCE_NAME_KEY, src.resourceName);
|
||||
field.addProperty(FIELD_NAME_KEY, src.edmElement.getName());
|
||||
field.addProperty(FREQUENCY_KEY, frequency);
|
||||
field.addProperty(AVAILABILITY_KEY, numSamples > 0 ? (1.0 * numTimesPresent) / numSamples : 0);
|
||||
|
||||
return field;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* resourceName: "Property",
|
||||
* fieldName: "StateOrProvince",
|
||||
* lookupName: "StateOrProvince",
|
||||
* lookupValue: "CA",
|
||||
* availability: 0.03
|
||||
*/
|
||||
private static final class LookupValuesJson implements JsonSerializer<LookupValuesJson> {
|
||||
final String resourceName, fieldName, lookupValue;
|
||||
final Integer frequency;
|
||||
|
||||
static final String
|
||||
RESOURCE_NAME_KEY = "resourceName",
|
||||
FIELD_NAME_KEY = "fieldName",
|
||||
LOOKUP_VALUE_KEY = "lookupValue",
|
||||
FREQUENCY_KEY = "frequency";
|
||||
|
||||
public LookupValuesJson(String resourceName, String fieldName, String lookupValue, Integer frequency) {
|
||||
this.resourceName = resourceName;
|
||||
this.fieldName = fieldName;
|
||||
this.lookupValue = lookupValue;
|
||||
this.frequency = frequency;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gson invokes this call-back method during serialization when it encounters a field of the
|
||||
* specified type.
|
||||
*
|
||||
* <p>In the implementation of this call-back method, you should consider invoking
|
||||
* {@link JsonSerializationContext#serialize(Object, Type)} method to create JsonElements for any
|
||||
* non-trivial field of the {@code src} object. However, you should never invoke it on the
|
||||
* {@code src} object itself since that will cause an infinite loop (Gson will call your
|
||||
* call-back method again).</p>
|
||||
*
|
||||
* @param src the object that needs to be converted to Json.
|
||||
* @param typeOfSrc the actual type (fully genericized version) of the source object.
|
||||
* @param context the context of the request
|
||||
* @return a JsonElement corresponding to the specified object.
|
||||
*/
|
||||
@Override
|
||||
public JsonElement serialize(LookupValuesJson src, Type typeOfSrc, JsonSerializationContext context) {
|
||||
JsonObject obj = new JsonObject();
|
||||
|
||||
obj.addProperty(RESOURCE_NAME_KEY, resourceName);
|
||||
obj.addProperty(FIELD_NAME_KEY, fieldName);
|
||||
obj.addProperty(LOOKUP_VALUE_KEY, lookupValue);
|
||||
obj.addProperty(FREQUENCY_KEY, frequency);
|
||||
|
||||
return obj;
|
||||
}
|
||||
}
|
||||
|
||||
private static Map<String, Map<String, Integer>> createResourceFieldTallies(Map<String, List<PayloadSample>> resourcePayloadSamplesMap) {
|
||||
AtomicReference<Map<String, Map<String, Integer>>> resourceTallies = new AtomicReference<>(Collections.synchronizedMap(new LinkedHashMap<>()));
|
||||
AtomicReference<Map<String, Map<String, Integer>>> resourceTallies = new AtomicReference<>(new LinkedHashMap<>());
|
||||
AtomicInteger numSamples = new AtomicInteger(0);
|
||||
resourcePayloadSamplesMap.keySet().forEach(resourceName -> {
|
||||
LOG.info("Processing resource: " + resourceName);
|
||||
|
||||
//if there are samples for the given resource, sum the tallies, otherwise 0.
|
||||
numSamples.set(resourcePayloadSamplesMap.get(resourceName) != null
|
||||
? resourcePayloadSamplesMap.get(resourceName).stream().reduce(0, (a, f) -> a + f.getSamples().size(), Integer::sum) : 0);
|
||||
|
||||
LOG.info("Sample size: " + numSamples.get());
|
||||
|
||||
//for each resource, go through the keys and tally the data presence counts for each field
|
||||
//as well as the number of samples in each case
|
||||
resourceTallies.get().putIfAbsent(resourceName, new LinkedHashMap<>());
|
||||
if (numSamples.get() > 0) {
|
||||
resourcePayloadSamplesMap.get(resourceName)
|
||||
.forEach(payloadSample -> payloadSample.getSamples()
|
||||
.forEach(sample -> sample
|
||||
.forEach((fieldName, encodedValue) -> {
|
||||
if (encodedValue != null) {
|
||||
resourceTallies.get().get(resourceName).putIfAbsent(fieldName, 0);
|
||||
resourceTallies.get().get(resourceName).put(fieldName, resourceTallies.get().get(resourceName).get(fieldName) + 1);
|
||||
}
|
||||
})));
|
||||
resourcePayloadSamplesMap.get(resourceName).forEach(payloadSample -> {
|
||||
payloadSample.getSamples().forEach(sample -> {
|
||||
sample.forEach((fieldName, encodedValue) -> {
|
||||
if (encodedValue != null) {
|
||||
resourceTallies.get().get(resourceName).putIfAbsent(fieldName, 0);
|
||||
resourceTallies.get().get(resourceName).put(fieldName, resourceTallies.get().get(resourceName).get(fieldName) + 1);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
return resourceTallies.get();
|
||||
|
@ -182,166 +132,93 @@ public class PayloadSampleReport implements JsonSerializer<PayloadSampleReport>
|
|||
DESCRIPTION_KEY = "description", DESCRIPTION = "RESO Data Availability Report",
|
||||
VERSION_KEY = "version", VERSION = "1.7",
|
||||
GENERATED_ON_KEY = "generatedOn",
|
||||
RESOURCE_INFO_KEY = "resources",
|
||||
FIELDS_KEY = "fields",
|
||||
LOOKUPS_KEY = "lookups",
|
||||
LOOKUP_VALUES_KEY = "lookupValues";
|
||||
RESOURCE_INFO_KEY = "resourceInfo",
|
||||
FIELDS_KEY = "fields";
|
||||
|
||||
//serialize fields
|
||||
JsonArray fields = new JsonArray();
|
||||
metadata.getSchemas().forEach(edmSchema -> {
|
||||
|
||||
src.metadata.getSchemas().forEach(edmSchema -> {
|
||||
//serialize entities (resources) and members (fields)
|
||||
edmSchema.getEntityTypes().forEach(edmEntityType -> edmEntityType.getPropertyNames().forEach(propertyName -> {
|
||||
FieldsJson fieldJson = new FieldsJson(edmEntityType.getName(), edmEntityType.getProperty(propertyName));
|
||||
fields.add(fieldJson.serialize(fieldJson, FieldsJson.class, null));
|
||||
}));
|
||||
});
|
||||
|
||||
//serialize lookups
|
||||
JsonArray lookups = new JsonArray();
|
||||
final Map<String, Map<String, Integer>> resourceFieldLookupTotalsMap = new LinkedHashMap<>();
|
||||
lookupValueFrequencyMap.get().forEach((lookupValue, frequency) -> {
|
||||
resourceFieldLookupTotalsMap.putIfAbsent(lookupValue.getResourceName(), new LinkedHashMap<>());
|
||||
resourceFieldLookupTotalsMap.get(lookupValue.getResourceName()).putIfAbsent(lookupValue.getFieldName(), 0);
|
||||
resourceFieldLookupTotalsMap.get(lookupValue.getResourceName()).put(lookupValue.getFieldName(),
|
||||
resourceFieldLookupTotalsMap.get(lookupValue.getResourceName()).get(lookupValue.getFieldName()) + frequency);
|
||||
});
|
||||
|
||||
resourceFieldLookupTotalsMap.forEach((resourceName, fieldLookupTotalsMap) -> {
|
||||
fieldLookupTotalsMap.forEach((fieldName, numLookupsTotal) -> {
|
||||
LookupsJson lookupsJson = new LookupsJson(resourceName, fieldName, numLookupsTotal);
|
||||
lookups.add(lookupsJson.serialize(lookupsJson, LookupsJson.class, null));
|
||||
edmSchema.getEntityTypes().forEach(edmEntityType -> {
|
||||
edmEntityType.getPropertyNames().forEach(propertyName -> {
|
||||
FieldAvailabilityJson fieldJson = new FieldAvailabilityJson(edmEntityType.getName(), edmEntityType.getProperty(propertyName));
|
||||
fields.add(fieldJson.serialize(fieldJson, FieldAvailabilityJson.class, null));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
//serialize lookup values
|
||||
JsonArray lookupValues = new JsonArray();
|
||||
lookupValueFrequencyMap.get().forEach((lookupValue, frequency) -> {
|
||||
LookupValuesJson lookupValuesJson = new LookupValuesJson(lookupValue.getResourceName(), lookupValue.getFieldName(), lookupValue.getLookupValue(), frequency);
|
||||
lookupValues.add(lookupValuesJson.serialize(lookupValuesJson, LookupValuesJson.class, null));
|
||||
});
|
||||
|
||||
JsonObject availabilityReport = new JsonObject();
|
||||
availabilityReport.addProperty(DESCRIPTION_KEY, DESCRIPTION);
|
||||
availabilityReport.addProperty(VERSION_KEY, VERSION);
|
||||
availabilityReport.addProperty(GENERATED_ON_KEY, Utils.getIsoTimestamp());
|
||||
|
||||
final JsonArray resourceTotalsByResource = new JsonArray();
|
||||
resourcePayloadSamplesMap.get().keySet().forEach(resourceName -> {
|
||||
src.resourcePayloadSamplesMap.keySet().forEach(resourceName -> {
|
||||
Set<String> postalCodes = new LinkedHashSet<>();
|
||||
ResourcesJson resourcesJson = new ResourcesJson(resourceName);
|
||||
ResourceInfo resourceInfo = new ResourceInfo(resourceName);
|
||||
|
||||
int resourceRecordCount = 0;
|
||||
if (resourceCounts.get().get(resourceName) != null) {
|
||||
resourceRecordCount = resourceCounts.get().get(resourceName);
|
||||
if (src.resourceCounts.get(resourceName) != null) {
|
||||
resourceRecordCount = src.resourceCounts.get(resourceName);
|
||||
}
|
||||
resourcesJson.numRecordsTotal.set(resourceRecordCount);
|
||||
resourceInfo.numRecordsTotal.set(resourceRecordCount);
|
||||
|
||||
PayloadSample zerothSample = resourcePayloadSamplesMap.get().get(resourceName) != null
|
||||
&& resourcePayloadSamplesMap.get().get(resourceName).size() > 0
|
||||
? resourcePayloadSamplesMap.get().get(resourceName).get(0) : null;
|
||||
PayloadSample zerothSample = resourcePayloadSamplesMap.get(resourceName) != null
|
||||
&& resourcePayloadSamplesMap.get(resourceName).size() > 0
|
||||
? resourcePayloadSamplesMap.get(resourceName).get(0) : null;
|
||||
|
||||
if (zerothSample != null) {
|
||||
resourcesJson.keyFields.set(zerothSample.keyFields);
|
||||
resourcesJson.dateField.set(zerothSample.dateField);
|
||||
resourceInfo.keyFields.set(zerothSample.keyFields);
|
||||
resourceInfo.dateField.set(zerothSample.dateField);
|
||||
}
|
||||
|
||||
if (resourcePayloadSamplesMap.get().get(resourceName) != null) {
|
||||
if (src.resourcePayloadSamplesMap.get(resourceName) != null) {
|
||||
AtomicReference<OffsetDateTime> offsetDateTime = new AtomicReference<>();
|
||||
|
||||
resourcePayloadSamplesMap.get().get(resourceName).forEach(payloadSample -> {
|
||||
resourcesJson.totalBytesReceived.getAndAdd(payloadSample.getResponseSizeBytes());
|
||||
resourcesJson.totalResponseTimeMillis.getAndAdd(payloadSample.getResponseTimeMillis());
|
||||
resourcesJson.numSamplesProcessed.getAndIncrement();
|
||||
resourcesJson.numRecordsFetched.getAndAdd(payloadSample.encodedSamples.size());
|
||||
src.resourcePayloadSamplesMap.get(resourceName).forEach(payloadSample -> {
|
||||
resourceInfo.totalBytesReceived.getAndAdd(payloadSample.getResponseSizeBytes());
|
||||
resourceInfo.totalResponseTimeMillis.getAndAdd(payloadSample.getResponseTimeMillis());
|
||||
resourceInfo.numSamplesProcessed.getAndIncrement();
|
||||
resourceInfo.numRecordsFetched.getAndAdd(payloadSample.encodedSamples.size());
|
||||
|
||||
payloadSample.encodedSamples.forEach(encodedSample -> {
|
||||
try {
|
||||
offsetDateTime.set(OffsetDateTime.parse(encodedSample.get(payloadSample.dateField)));
|
||||
if (offsetDateTime.get() != null) {
|
||||
if (resourcesJson.dateLow.get() == null) {
|
||||
resourcesJson.dateLow.set(offsetDateTime.get());
|
||||
} else if (offsetDateTime.get().isBefore(resourcesJson.dateLow.get())) {
|
||||
resourcesJson.dateLow.set(offsetDateTime.get());
|
||||
}
|
||||
payloadSample.encodedSamples.forEach(encodedSample -> {
|
||||
offsetDateTime.set(OffsetDateTime.parse(encodedSample.get(payloadSample.dateField)));
|
||||
if (offsetDateTime.get() != null) {
|
||||
if (resourceInfo.dateLow.get() == null) {
|
||||
resourceInfo.dateLow.set(offsetDateTime.get());
|
||||
} else if (offsetDateTime.get().isBefore(resourceInfo.dateLow.get())) {
|
||||
resourceInfo.dateLow.set(offsetDateTime.get());
|
||||
}
|
||||
|
||||
if (resourcesJson.dateHigh.get() == null) {
|
||||
resourcesJson.dateHigh.set(offsetDateTime.get());
|
||||
} else if (offsetDateTime.get().isAfter(resourcesJson.dateHigh.get())) {
|
||||
resourcesJson.dateHigh.set(offsetDateTime.get());
|
||||
}
|
||||
}
|
||||
if (resourceInfo.dateHigh.get() == null) {
|
||||
resourceInfo.dateHigh.set(offsetDateTime.get());
|
||||
} else if (offsetDateTime.get().isAfter(resourceInfo.dateHigh.get())) {
|
||||
resourceInfo.dateHigh.set(offsetDateTime.get());
|
||||
}
|
||||
}
|
||||
|
||||
if (encodedSample.containsKey(POSTAL_CODE_KEY)) {
|
||||
postalCodes.add(encodedSample.get(POSTAL_CODE_KEY));
|
||||
}
|
||||
} catch (DateTimeParseException dateTimeParseException) {
|
||||
LOG.error("Could not parse date for field " + payloadSample.dateField + ", with value: "
|
||||
+ encodedSample.get(payloadSample.dateField) + ". Expected ISO 8601 timestamp format!"
|
||||
);
|
||||
throw dateTimeParseException;
|
||||
}
|
||||
});
|
||||
if (encodedSample.containsKey(POSTAL_CODE_KEY)) {
|
||||
postalCodes.add(encodedSample.get(POSTAL_CODE_KEY));
|
||||
}
|
||||
});
|
||||
|
||||
if (resourcesJson.pageSize.get() == 0) resourcesJson.pageSize.set(payloadSample.getSamples().size());
|
||||
});
|
||||
if (resourceInfo.pageSize.get() == 0) resourceInfo.pageSize.set(payloadSample.getSamples().size());
|
||||
});
|
||||
}
|
||||
if (postalCodes.size() > 0) {
|
||||
resourcesJson.postalCodes.set(postalCodes);
|
||||
resourceInfo.postalCodes.set(postalCodes);
|
||||
}
|
||||
|
||||
resourceTotalsByResource.add(resourcesJson.serialize(resourcesJson, ResourcesJson.class, null));
|
||||
resourceTotalsByResource.add(resourceInfo.serialize(resourceInfo, ResourceInfo.class, null));
|
||||
});
|
||||
|
||||
availabilityReport.add(RESOURCE_INFO_KEY, resourceTotalsByResource);
|
||||
availabilityReport.add(FIELDS_KEY, fields);
|
||||
availabilityReport.add(LOOKUPS_KEY, lookups);
|
||||
availabilityReport.add(LOOKUP_VALUES_KEY, lookupValues);
|
||||
|
||||
return availabilityReport;
|
||||
}
|
||||
|
||||
static final class LookupsJson implements JsonSerializer<LookupsJson> {
|
||||
final String resourceName, fieldName;
|
||||
final Integer numLookupsTotal;
|
||||
|
||||
public LookupsJson(String resourceName, String fieldName, Integer numLookupsTotal) {
|
||||
this.resourceName = resourceName;
|
||||
this.fieldName = fieldName;
|
||||
this.numLookupsTotal = numLookupsTotal;
|
||||
}
|
||||
|
||||
final String
|
||||
RESOURCE_NAME_KEY = "resourceName",
|
||||
FIELD_NAME_KEY = "fieldName",
|
||||
NUM_LOOKUPS_TOTAL = "numLookupsTotal";
|
||||
|
||||
/**
|
||||
* Gson invokes this call-back method during serialization when it encounters a field of the
|
||||
* specified type.
|
||||
*
|
||||
* <p>In the implementation of this call-back method, you should consider invoking
|
||||
* {@link JsonSerializationContext#serialize(Object, Type)} method to create JsonElements for any
|
||||
* non-trivial field of the {@code src} object. However, you should never invoke it on the
|
||||
* {@code src} object itself since that will cause an infinite loop (Gson will call your
|
||||
* call-back method again).</p>
|
||||
*
|
||||
* @param src the object that needs to be converted to Json.
|
||||
* @param typeOfSrc the actual type (fully genericized version) of the source object.
|
||||
* @param context the context of the request
|
||||
* @return a JsonElement corresponding to the specified object.
|
||||
*/
|
||||
@Override
|
||||
public JsonElement serialize(LookupsJson src, Type typeOfSrc, JsonSerializationContext context) {
|
||||
JsonObject obj = new JsonObject();
|
||||
obj.addProperty(RESOURCE_NAME_KEY, src.resourceName);
|
||||
obj.addProperty(FIELD_NAME_KEY, src.fieldName);
|
||||
obj.addProperty(NUM_LOOKUPS_TOTAL, src.numLookupsTotal);
|
||||
return obj;
|
||||
}
|
||||
}
|
||||
|
||||
static final class ResourcesJson implements JsonSerializer<ResourcesJson> {
|
||||
static final class ResourceInfo implements JsonSerializer<ResourceInfo> {
|
||||
final String
|
||||
RESOURCE_NAME_KEY = "resourceName",
|
||||
RECORD_COUNT_KEY = "recordCount",
|
||||
|
@ -369,7 +246,7 @@ public class PayloadSampleReport implements JsonSerializer<PayloadSampleReport>
|
|||
final AtomicReference<OffsetDateTime> dateHigh = new AtomicReference<>(null);
|
||||
final AtomicReference<Set<String>> postalCodes = new AtomicReference<>(new LinkedHashSet<>());
|
||||
|
||||
public ResourcesJson(String resourceName) {
|
||||
public ResourceInfo(String resourceName) {
|
||||
this.resourceName.set(resourceName);
|
||||
}
|
||||
|
||||
|
@ -385,11 +262,11 @@ public class PayloadSampleReport implements JsonSerializer<PayloadSampleReport>
|
|||
*
|
||||
* @param src the object that needs to be converted to Json.
|
||||
* @param typeOfSrc the actual type (fully genericized version) of the source object.
|
||||
* @param context the context of the request
|
||||
* @param context
|
||||
* @return a JsonElement corresponding to the specified object.
|
||||
*/
|
||||
@Override
|
||||
public JsonElement serialize(ResourcesJson src, Type typeOfSrc, JsonSerializationContext context) {
|
||||
public JsonElement serialize(ResourceInfo src, Type typeOfSrc, JsonSerializationContext context) {
|
||||
JsonObject totals = new JsonObject();
|
||||
|
||||
totals.addProperty(RESOURCE_NAME_KEY, src.resourceName.get());
|
||||
|
@ -414,7 +291,7 @@ public class PayloadSampleReport implements JsonSerializer<PayloadSampleReport>
|
|||
? src.dateLow.get().format(DateTimeFormatter.ISO_INSTANT) : null);
|
||||
|
||||
totals.addProperty(DATE_HIGH_KEY, src.dateHigh.get() != null
|
||||
? src.dateHigh.get().format(DateTimeFormatter.ISO_INSTANT) : null);
|
||||
? src.dateHigh.get().format(DateTimeFormatter.ISO_INSTANT): null);
|
||||
|
||||
JsonArray keyFields = new JsonArray();
|
||||
src.keyFields.get().forEach(keyFields::add);
|
||||
|
@ -429,4 +306,5 @@ public class PayloadSampleReport implements JsonSerializer<PayloadSampleReport>
|
|||
return totals;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -102,14 +102,8 @@ public class ReferenceStandardField {
|
|||
return lookup;
|
||||
}
|
||||
|
||||
public String getLookupName() {
|
||||
String lookupName = getLookup()
|
||||
.replace("<n/a>", "")
|
||||
.replace("Lookups", "").trim();
|
||||
|
||||
if (lookupName.length() == 0) return null;
|
||||
|
||||
return lookupName;
|
||||
public String getLookupStandardName() {
|
||||
return getLookup().replace("Lookups", "").trim();
|
||||
}
|
||||
|
||||
public String getCollection() {
|
||||
|
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"description": "RESO Data Dictionary Metadata Report",
|
||||
"version": "1.7",
|
||||
"generatedOn": "2021-12-13T01:44:51.102Z",
|
||||
"generatedOn": "2021-04-30T12:16:46.822Z",
|
||||
"fields": [
|
||||
{
|
||||
"resourceName": "Property",
|
||||
|
@ -29170,204 +29170,6 @@
|
|||
"value": "The website URL or ID of social media site or account of the member. This is a repeating element. Replace [Type] with any of the options from the SocialMediaType field to create a unique field for that type of social media. For example: SocialMediaFacebookUrlOrID, SocialMediaSkypeUrlOrID, etc."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"resourceName": "Field",
|
||||
"fieldName": "FieldKey",
|
||||
"type": "Edm.String",
|
||||
"nullable": true,
|
||||
"isCollection": false,
|
||||
"unicode": true,
|
||||
"annotations": [
|
||||
{
|
||||
"term": "RESO.OData.Metadata.StandardName",
|
||||
"value": "Field Key"
|
||||
},
|
||||
{
|
||||
"term": "Core.Description",
|
||||
"value": "The key used to uniquely identify the Field."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"resourceName": "Field",
|
||||
"fieldName": "ResourceName",
|
||||
"type": "Edm.String",
|
||||
"nullable": true,
|
||||
"isCollection": false,
|
||||
"unicode": true,
|
||||
"annotations": [
|
||||
{
|
||||
"term": "RESO.OData.Metadata.StandardName",
|
||||
"value": "Resource Name"
|
||||
},
|
||||
{
|
||||
"term": "Core.Description",
|
||||
"value": "The name of the resource the field belongs to. This will be a RESO Standard Name, when applicable, but may also be a local resource name, for example \"Property.\""
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"resourceName": "Field",
|
||||
"fieldName": "FieldName",
|
||||
"type": "Edm.String",
|
||||
"nullable": true,
|
||||
"isCollection": false,
|
||||
"unicode": true,
|
||||
"annotations": [
|
||||
{
|
||||
"term": "RESO.OData.Metadata.StandardName",
|
||||
"value": "Field Name"
|
||||
},
|
||||
{
|
||||
"term": "Core.Description",
|
||||
"value": "The name of the field as expressed in the payload. For OData APIs, this field MUST meet certain naming requirements and should be consistent with what’s advertised in the OData XML metadata (to be verified in certification). For example, \"ListPrice.\""
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"resourceName": "Field",
|
||||
"fieldName": "DisplayName",
|
||||
"type": "Edm.String",
|
||||
"nullable": true,
|
||||
"isCollection": false,
|
||||
"unicode": true,
|
||||
"annotations": [
|
||||
{
|
||||
"term": "RESO.OData.Metadata.StandardName",
|
||||
"value": "Display Name"
|
||||
},
|
||||
{
|
||||
"term": "Core.Description",
|
||||
"value": "The display name for the field. SHOULD be provided in all cases where the use of display names is needed, even if the display name is the same as the underlying field name. The DisplayName MAY be a RESO Standard Display Name or a local one."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"resourceName": "Field",
|
||||
"fieldName": "ModificationTimestamp",
|
||||
"type": "Edm.DateTimeOffset",
|
||||
"nullable": true,
|
||||
"isCollection": false,
|
||||
"unicode": true,
|
||||
"annotations": [
|
||||
{
|
||||
"term": "RESO.OData.Metadata.StandardName",
|
||||
"value": "Modification Timestamp"
|
||||
},
|
||||
{
|
||||
"term": "Core.Description",
|
||||
"value": "The timestamp when the field metadata item was last modified. This is used to help rebuild caches when metadata items change so consumers don\u0027t have to re-pull and reprocess the entire set of metadata when only a small number of changes have been made."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"resourceName": "Lookup",
|
||||
"fieldName": "LookupKey",
|
||||
"type": "Edm.String",
|
||||
"nullable": true,
|
||||
"isCollection": false,
|
||||
"unicode": true,
|
||||
"annotations": [
|
||||
{
|
||||
"term": "RESO.OData.Metadata.StandardName",
|
||||
"value": "Lookup Key"
|
||||
},
|
||||
{
|
||||
"term": "Core.Description",
|
||||
"value": "The key used to uniquely identify the Lookup entry."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"resourceName": "Lookup",
|
||||
"fieldName": "LookupName",
|
||||
"type": "Edm.String",
|
||||
"nullable": true,
|
||||
"isCollection": false,
|
||||
"unicode": true,
|
||||
"annotations": [
|
||||
{
|
||||
"term": "RESO.OData.Metadata.StandardName",
|
||||
"value": "Lookup Name"
|
||||
},
|
||||
{
|
||||
"term": "Core.Description",
|
||||
"value": "It is called a \"LookupName\" in this proposal because more than one field can have a given lookup, so it refers to the name of the lookup rather than a given field. For example, Listing with CountyOrParish and Office with OfficeCountyOrParish having the same CountyOrParish LookupName. This MUST match the Data Dictionary definition for in cases where the lookup is defined. Vendors MAY add their own enumerations otherwise. The LookupName a given field uses is required to be annotated at the field level in the OData XML Metadata, as outlined later in this proposal."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"resourceName": "Lookup",
|
||||
"fieldName": "LookupValue",
|
||||
"type": "Edm.String",
|
||||
"nullable": true,
|
||||
"isCollection": false,
|
||||
"unicode": true,
|
||||
"annotations": [
|
||||
{
|
||||
"term": "RESO.OData.Metadata.StandardName",
|
||||
"value": "Lookup Value"
|
||||
},
|
||||
{
|
||||
"term": "Core.Description",
|
||||
"value": "The human-friendly display name the data consumer receives in the payload and uses in queries. This MAY be a local name or synonym for a given RESO Data Dictionary lookup item."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"resourceName": "Lookup",
|
||||
"fieldName": "StandardLookupValue",
|
||||
"type": "Edm.String",
|
||||
"nullable": true,
|
||||
"isCollection": false,
|
||||
"unicode": true,
|
||||
"annotations": [
|
||||
{
|
||||
"term": "RESO.OData.Metadata.StandardName",
|
||||
"value": "Standard Lookup Value"
|
||||
},
|
||||
{
|
||||
"term": "Core.Description",
|
||||
"value": "The Data Dictionary LookupDisplayName of the enumerated value. This field is required when the LookupValue for a given item corresponds to a RESO standard value, meaning a standard lookup display name, known synonym, local name, or translation of that value. Local lookups MAY omit this value if they don\u0027t correspond to an existing RESO standard lookup value."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"resourceName": "Lookup",
|
||||
"fieldName": "LegacyODataValue",
|
||||
"type": "Edm.String",
|
||||
"nullable": true,
|
||||
"isCollection": false,
|
||||
"unicode": true,
|
||||
"annotations": [
|
||||
{
|
||||
"term": "RESO.OData.Metadata.StandardName",
|
||||
"value": "Legacy OData Value"
|
||||
},
|
||||
{
|
||||
"term": "Core.Description",
|
||||
"value": "The Legacy OData lookup value that the server vendor provided in their OData XML Metadata. This value is optional, and has been included in order to provide a stable mechanism for translating OData lookup values to RESO standard lookup display names, as well as for historical data that might have included the OData value at some point, even after the vendor had converted to human friendly display names."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"resourceName": "Lookup",
|
||||
"fieldName": "ModificationTimestamp",
|
||||
"type": "Edm.DateTimeOffset",
|
||||
"nullable": true,
|
||||
"isCollection": false,
|
||||
"unicode": true,
|
||||
"annotations": [
|
||||
{
|
||||
"term": "RESO.OData.Metadata.StandardName",
|
||||
"value": "Modification Timestamp"
|
||||
},
|
||||
{
|
||||
"term": "Core.Description",
|
||||
"value": "The timestamp for when the enumeration value was last modified. The timestamp for when the enumeration value was last modified."
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"lookups": [
|
||||
|
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
|
@ -17,9 +17,6 @@
|
|||
<Logger name="org.apache.olingo.client.core" level="error" additivity="false">
|
||||
<AppenderRef ref="Log"/>
|
||||
</Logger>
|
||||
<Logger name="org.apache.velocity.runtime" level="error" additivity="false">
|
||||
<AppenderRef ref="Log"/>
|
||||
</Logger>
|
||||
<Root level="all">
|
||||
<AppenderRef ref="Console" level="info"/>
|
||||
<AppenderRef ref="Log" level="error"/>
|
||||
|
|
|
@ -38,7 +38,7 @@
|
|||
<Request
|
||||
RequestId="metadata-request"
|
||||
OutputFile="metadata-request.xml"
|
||||
Url="*ClientSettings_WebAPIURI*/$metadata?$format=application/xml"
|
||||
Url="*ClientSettings_WebAPIURI*/$metadata*Parameter_OptionalMetadataFormatParameter*"
|
||||
/>
|
||||
|
||||
<Request
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
Feature: Web API Container Tests
|
||||
|
||||
Background:
|
||||
Given a Web API test container was created using the RESOScript "mock.web-api-server.core.2.0.0.resoscript"
|
||||
Given a Web API test container was created using the RESOScript "mock.web-api-server.core.1.0.2.resoscript"
|
||||
And a Commander instance exists within the test container
|
||||
|
||||
####################################
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue