Compare commits
11 Commits
feature/co
...
main
Author | SHA1 | Date |
---|---|---|
YuCheng Hu | 355ca0bc16 | |
YuCheng Hu | 4b4adb2cf5 | |
YuCheng Hu | d2e2f54722 | |
YuCheng Hu | 40523282fb | |
YuCheng Hu | 1e14988bb9 | |
YuCheng Hu | 087c46b273 | |
michaelpede | 5433467c45 | |
michaelpede | efccb76206 | |
michaelpede | 953baf5a64 | |
michaelpede | 2171cd60a7 | |
michaelpede | 86f6d28044 |
|
@ -0,0 +1,62 @@
|
||||||
|
name: RESO Web API Reference Server Development Enviroment CI/CD
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "develop"
|
||||||
|
- "feature/*-development"
|
||||||
|
- "release/*-development"
|
||||||
|
- "hotfix/*-development"
|
||||||
|
- "support/*-development"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-deploy:
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
env:
|
||||||
|
ENVIRONMENT: dev
|
||||||
|
DOCKER_BUILDKIT: 1
|
||||||
|
COMPOSE_FILE: docker-compose.yml:./optional/docker-db-compose.yml
|
||||||
|
SQL_HOST: docker-mysql
|
||||||
|
SQL_USER: root
|
||||||
|
SQL_PASSWORD: root
|
||||||
|
SQL_DB_DRIVER: com.mysql.cj.jdbc.Driver
|
||||||
|
SQL_CONNECTION_STR: jdbc:mysql://docker-mysql/reso_data_dictionary_1_7?autoReconnect=true&maxReconnects=4
|
||||||
|
CERT_REPORT_FILENAME: RESODataDictionary-1.7.metadata-report.json
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Set up JDK 11
|
||||||
|
uses: actions/setup-java@v2
|
||||||
|
with:
|
||||||
|
java-version: '11'
|
||||||
|
distribution: 'adopt'
|
||||||
|
|
||||||
|
- name: Run ENV setup script
|
||||||
|
run: |
|
||||||
|
chmod +x ./docker/scripts/setup_build_env.sh
|
||||||
|
./docker/scripts/setup_build_env.sh
|
||||||
|
|
||||||
|
- name: Run build script
|
||||||
|
run: |
|
||||||
|
chmod +x ./docker/scripts/build.sh
|
||||||
|
./docker/scripts/build.sh
|
||||||
|
|
||||||
|
- name: Run docker-compose to make the images
|
||||||
|
run: docker-compose build
|
||||||
|
|
||||||
|
-
|
||||||
|
name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
-
|
||||||
|
name: Push to Dockerhub
|
||||||
|
id: docker_build
|
||||||
|
uses: docker/build-push-action@v2
|
||||||
|
with:
|
||||||
|
push: true
|
||||||
|
tags: michaelpede/reso-web-api-reference-server_odata-manager-app:latest
|
||||||
|
# We don't really need the SQL server "build" as it's a default Docker build
|
||||||
|
# - name: Run docker build for the WEB API Server Database
|
||||||
|
# run: docker save --output database-server.tar mysql/mysql-server
|
|
@ -1,41 +0,0 @@
|
||||||
# This workflow will build a package using Gradle and then publish it to GitHub packages when a release is created
|
|
||||||
# For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#Publishing-using-gradle
|
|
||||||
|
|
||||||
name: Gradle Package
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- "develop" # not a branch that currently exists. No need to do this atm.
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
packages: write
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set up JDK 11
|
|
||||||
uses: actions/setup-java@v2
|
|
||||||
with:
|
|
||||||
java-version: '11'
|
|
||||||
distribution: 'adopt'
|
|
||||||
server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
|
|
||||||
settings-path: ${{ github.workspace }} # location for the settings.xml file
|
|
||||||
|
|
||||||
- name: Build with Gradle
|
|
||||||
run: gradle build
|
|
||||||
|
|
||||||
- name: Create WAR
|
|
||||||
run: gradle war
|
|
||||||
|
|
||||||
# The USERNAME and TOKEN need to correspond to the credentials environment variables used in
|
|
||||||
# the publishing section of your build.gradle
|
|
||||||
- name: Publish to GitHub Packages
|
|
||||||
run: gradle publish
|
|
||||||
env:
|
|
||||||
USERNAME: ${{ github.actor }}
|
|
||||||
TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
|
@ -36,6 +36,8 @@ bin
|
||||||
classes
|
classes
|
||||||
.DS_Store
|
.DS_Store
|
||||||
*.local
|
*.local
|
||||||
|
/*.sql
|
||||||
|
/*.json
|
||||||
nb-configuration.xml
|
nb-configuration.xml
|
||||||
.externalToolBuilders
|
.externalToolBuilders
|
||||||
maven-eclipse.xml
|
maven-eclipse.xml
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
FROM tomcat:9
|
FROM tomcat:9
|
||||||
#FROM tomcat:latest
|
#FROM tomcat:latest # latest stopped working, Nov 2021
|
||||||
|
|
||||||
ENV JPDA_ADDRESS="*:8000"
|
ENV JPDA_ADDRESS="*:8000"
|
||||||
ENV JPDA_TRANSPORT="dt_socket"
|
ENV JPDA_TRANSPORT="dt_socket"
|
||||||
|
|
||||||
#Not needed while volume mapped for development
|
#Not needed while volume mapped for development
|
||||||
#COPY ./target/RESOservice-1.0.war /usr/local/tomcat/webapps/
|
COPY ./target/RESOservice-1.0.war /usr/local/tomcat/webapps/
|
||||||
#COPY ./target/RESODataDictionary-1.7.metadata-report.json /usr/local/tomcat/webapps/
|
COPY ./target/RESODataDictionary-1.7.metadata-report.json /usr/local/tomcat/webapps/
|
||||||
|
|
||||||
CMD ["catalina.sh", "jpda", "run"]
|
CMD ["catalina.sh", "jpda", "run"]
|
||||||
|
|
28
README.md
28
README.md
|
@ -18,9 +18,11 @@ Run the `run.sh`
|
||||||
|
|
||||||
## Access the Server
|
## Access the Server
|
||||||
|
|
||||||
Assuming you're running the server locally, go to [http://localhost:8080/RESOservice-1.0/$metadata](http://localhost:8080/RESOservice-1.0/$metadata)\
|
Assuming you're running the server locally, go to [http://localhost:8080/core/2.0.0/$metadata](http://localhost:8080/core/2.0.0/$metadata)\
|
||||||
Otherwise, you will have to replace `localhost` with the IP of your Docker machine.
|
Otherwise, you will have to replace `localhost` with the IP of your Docker machine.
|
||||||
|
|
||||||
|
![](https://cdn.ossez.com/discourse-uploads/optimized/2X/8/881469dc9b204975de209b56f0d897ba8b782347_2_690x439.png)
|
||||||
|
|
||||||
## Running with a different database
|
## Running with a different database
|
||||||
|
|
||||||
If you set the `SQL_HOST` Environment Variable, then the build script will not build the test database.
|
If you set the `SQL_HOST` Environment Variable, then the build script will not build the test database.
|
||||||
|
@ -31,3 +33,27 @@ You will need to configure the following environment variables, so the server ca
|
||||||
* SQL_HOST
|
* SQL_HOST
|
||||||
* SQL_USER
|
* SQL_USER
|
||||||
* SQL_PASSWORD
|
* SQL_PASSWORD
|
||||||
|
|
||||||
|
## ENVIRNONMENT SPECIFIC NOTES
|
||||||
|
|
||||||
|
The build scripts were moved to take place in a Docker container so that they would work consistently across environments.
|
||||||
|
|
||||||
|
### Windows
|
||||||
|
In Windows, running under a Bash shell will work, assuming you meet the above requirements.
|
||||||
|
Don't forget to have Docker installed for Windows.
|
||||||
|
|
||||||
|
There is an `env-default-windows` file you should rename to `.env` before running the build script.
|
||||||
|
|
||||||
|
The `docker/docker-builder` file has a line commented out for Windows users, and a line that needs to be commented out.
|
||||||
|
|
||||||
|
### MAC
|
||||||
|
|
||||||
|
This has not been tested. Anyone wanting to give feedback would be appreciated.
|
||||||
|
|
||||||
|
## Build Failures
|
||||||
|
|
||||||
|
In the case this happens, and you have fixed the source of the error and need to rebuild everything using the build scripts, you should delete any prior Docker containers.
|
||||||
|
|
||||||
|
## Customizing your setup
|
||||||
|
|
||||||
|
You can have your own SQL database. Just copy the `env-default` file to `.env` and modify the appropriate properties.
|
|
@ -11,14 +11,14 @@ plugins {
|
||||||
repositories {
|
repositories {
|
||||||
mavenLocal()
|
mavenLocal()
|
||||||
maven {
|
maven {
|
||||||
url = uri('https://repo.maven.apache.org/maven2/')
|
url = uri('https://repo.ossez.com/repository/maven-public/')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
implementation 'org.postgresql:postgresql:42.2.23'
|
implementation 'org.postgresql:postgresql:42.2.23'
|
||||||
implementation 'org.apache.olingo:odata-server-api:4.8.0'
|
implementation 'org.apache.olingo:odata-server-api:4.8.0'
|
||||||
implementation 'com.fasterxml.jackson.core:jackson-databind:2.11.1'
|
implementation 'com.fasterxml.jackson.core:jackson-databind:2.15.2'
|
||||||
implementation 'org.apache.olingo:odata-commons-api:4.8.0'
|
implementation 'org.apache.olingo:odata-commons-api:4.8.0'
|
||||||
implementation 'org.apache.olingo:odata-commons-core:4.8.0'
|
implementation 'org.apache.olingo:odata-commons-core:4.8.0'
|
||||||
implementation 'org.slf4j:slf4j-api:1.7.11'
|
implementation 'org.slf4j:slf4j-api:1.7.11'
|
||||||
|
|
8
build.sh
8
build.sh
|
@ -5,6 +5,14 @@ HOME_DIR=`dirname ${REAL_VAR0}`
|
||||||
TEMP_DIR="${HOME_DIR}/temp"
|
TEMP_DIR="${HOME_DIR}/temp"
|
||||||
SQL_DIR="${HOME_DIR}/sql"
|
SQL_DIR="${HOME_DIR}/sql"
|
||||||
|
|
||||||
|
if [ ! -f "${HOME_DIR}/.env" ]
|
||||||
|
then
|
||||||
|
cp "${HOME_DIR}/env-default" "${HOME_DIR}/.env"
|
||||||
|
fi
|
||||||
|
|
||||||
|
#Needed for Linux builds
|
||||||
|
chmod a+x ./docker/scripts/*
|
||||||
|
|
||||||
docker build -t reso-builder -f docker/docker-builder .
|
docker build -t reso-builder -f docker/docker-builder .
|
||||||
docker run --name builder --mount type=bind,source="${HOME_DIR}",target=/usr/src/app -t reso-builder
|
docker run --name builder --mount type=bind,source="${HOME_DIR}",target=/usr/src/app -t reso-builder
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,11 @@
|
||||||
FROM ubuntu:20.10
|
FROM ubuntu:22.04
|
||||||
|
|
||||||
WORKDIR /usr/src/app
|
WORKDIR /usr/src/app
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
# Comment next line out for Windows builds
|
||||||
|
#ARG DEBIAN_FRONTEND=noninteractive
|
||||||
RUN apt-get update && apt-get install -y wget openjdk-8-jdk curl pip maven docker-compose
|
RUN apt-get update && apt-get install -y wget openjdk-8-jdk curl pip maven docker-compose
|
||||||
|
|
||||||
|
# Needed for Windows builds
|
||||||
RUN update-java-alternatives -s java-1.8.0-openjdk-amd64
|
RUN update-java-alternatives -s java-1.8.0-openjdk-amd64
|
||||||
|
|
||||||
CMD ./docker/scripts/build.sh
|
CMD sh /usr/src/app/docker/scripts/build.sh
|
||||||
|
|
|
@ -1,18 +1,13 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
REAL_VAR0=`readlink -f $0`
|
|
||||||
HOME_DIR_OLD=`dirname ${REAL_VAR0}`
|
|
||||||
HOME_DIR=/usr/src/app
|
HOME_DIR=/usr/src/app
|
||||||
|
cd "${HOME_DIR}"
|
||||||
TEMP_DIR="${HOME_DIR}/temp"
|
TEMP_DIR="${HOME_DIR}/temp"
|
||||||
SQL_DIR="${HOME_DIR}/sql"
|
SQL_DIR="${HOME_DIR}/sql"
|
||||||
|
|
||||||
# Ensure we have directories set up
|
# Ensure we have directories set up
|
||||||
[ -d "${TEMP_DIR}" ] && echo "temp directory found." || mkdir ${TEMP_DIR}
|
[ -d "${TEMP_DIR}" ] && echo "temp directory found." || mkdir ${TEMP_DIR}
|
||||||
[ -d "${SQL_DIR}" ] && echo "sql directory found." || mkdir ${SQL_DIR}
|
[ -d "${SQL_DIR}" ] && echo "sql directory found." || mkdir ${SQL_DIR}
|
||||||
if [ ! -f "${HOME_DIR}/.env" ]
|
|
||||||
then
|
|
||||||
cp "${HOME_DIR}/env-default" "${HOME_DIR}/.env"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "${SQL_HOST}" ]
|
if [ -z "${SQL_HOST}" ]
|
||||||
then
|
then
|
||||||
|
@ -42,16 +37,24 @@ fi
|
||||||
#unzip -d /opt/gradle /tmp/gradle-${VERSION}-bin.zip
|
#unzip -d /opt/gradle /tmp/gradle-${VERSION}-bin.zip
|
||||||
#ln -s /opt/gradle/gradle-${VERSION} /opt/gradle/latest
|
#ln -s /opt/gradle/gradle-${VERSION} /opt/gradle/latest
|
||||||
|
|
||||||
|
if gradle build
|
||||||
|
then
|
||||||
|
cp build/libs/RESOservice-1.0.war ./target/core.war
|
||||||
|
cp RESODataDictionary-1.7.metadata-report.json ./target/
|
||||||
|
else
|
||||||
|
|
||||||
#/opt/gradle/latest/bin/gradle build
|
#/opt/gradle/latest/bin/gradle build
|
||||||
#mkdir target
|
#mkdir target
|
||||||
#cp build/libs/RESOservice-1.0.war ./target/
|
#cp build/libs/RESOservice-1.0.war ./target/
|
||||||
#cp RESODataDictionary-1.7.metadata-report.json ./target/
|
#cp RESODataDictionary-1.7.metadata-report.json ./target/
|
||||||
|
|
||||||
if ! mvn compile
|
if ! mvn compile
|
||||||
then
|
then
|
||||||
echo "Maven could not be found."
|
echo "Maven could not be found."
|
||||||
exit
|
exit
|
||||||
else
|
else
|
||||||
mvn package
|
mvn package
|
||||||
cp RESODataDictionary-1.7.metadata-report.json ./target/
|
mv ./target/RESOservice-1.0.war ./target/core.war
|
||||||
|
cp RESODataDictionary-1.7.metadata-report.json ./target/
|
||||||
|
fi
|
||||||
fi
|
fi
|
|
@ -0,0 +1,5 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# This will run commands to setup your build server / Ubuntu 20.10 with the needed prerequisites.
|
||||||
|
|
||||||
|
DEBIAN_FRONTEND=noninteractive apt-get update && apt-get install -y wget pip maven docker-compose
|
|
@ -4,5 +4,4 @@ SQL_USER=root
|
||||||
SQL_PASSWORD=root
|
SQL_PASSWORD=root
|
||||||
SQL_DB_DRIVER=com.mysql.cj.jdbc.Driver
|
SQL_DB_DRIVER=com.mysql.cj.jdbc.Driver
|
||||||
SQL_CONNECTION_STR=jdbc:mysql://docker-mysql/reso_data_dictionary_1_7?autoReconnect=true&maxReconnects=4
|
SQL_CONNECTION_STR=jdbc:mysql://docker-mysql/reso_data_dictionary_1_7?autoReconnect=true&maxReconnects=4
|
||||||
#&user=root&password=root
|
|
||||||
CERT_REPORT_FILENAME=RESODataDictionary-1.7.metadata-report.json
|
CERT_REPORT_FILENAME=RESODataDictionary-1.7.metadata-report.json
|
|
@ -0,0 +1,8 @@
|
||||||
|
#Composer file needs a specific separator on Windows.
|
||||||
|
COMPOSE_FILE=docker-compose.yml;./optional/docker-db-compose.yml
|
||||||
|
SQL_HOST=docker-mysql
|
||||||
|
SQL_USER=root
|
||||||
|
SQL_PASSWORD=root
|
||||||
|
SQL_DB_DRIVER=com.mysql.cj.jdbc.Driver
|
||||||
|
SQL_CONNECTION_STR=jdbc:mysql://docker-mysql/reso_data_dictionary_1_7?autoReconnect=true&maxReconnects=4
|
||||||
|
CERT_REPORT_FILENAME=RESODataDictionary-1.7.metadata-report.json
|
4
pom.xml
4
pom.xml
|
@ -11,8 +11,8 @@
|
||||||
<packaging>war</packaging>
|
<packaging>war</packaging>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<maven.compiler.target>1.8</maven.compiler.target>
|
<maven.compiler.target>11</maven.compiler.target>
|
||||||
<maven.compiler.source>1.8</maven.compiler.source>
|
<maven.compiler.source>11</maven.compiler.source>
|
||||||
<junit.version>5.7.0</junit.version>
|
<junit.version>5.7.0</junit.version>
|
||||||
<javax.version>2.5</javax.version>
|
<javax.version>2.5</javax.version>
|
||||||
<odata.version>4.8.0</odata.version>
|
<odata.version>4.8.0</odata.version>
|
||||||
|
|
|
@ -307,6 +307,7 @@ public class GenericEntityCollectionProcessor implements EntityCollectionProcess
|
||||||
}
|
}
|
||||||
for (Entity product :productList)
|
for (Entity product :productList)
|
||||||
{
|
{
|
||||||
|
// The getValue should already be a String, so the toString should just pass it through, while making the following assignment simple.
|
||||||
String key = product.getProperty(primaryFieldName).getValue().toString();
|
String key = product.getProperty(primaryFieldName).getValue().toString();
|
||||||
HashMap<String, Object> enumValues = entities.get(key);
|
HashMap<String, Object> enumValues = entities.get(key);
|
||||||
CommonDataProcessing.setEntityEnums(enumValues,product,enumFields);
|
CommonDataProcessing.setEntityEnums(enumValues,product,enumFields);
|
||||||
|
|
|
@ -32,10 +32,7 @@ import org.slf4j.LoggerFactory;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.sql.Connection;
|
import java.sql.*;
|
||||||
import java.sql.ResultSet;
|
|
||||||
import java.sql.SQLException;
|
|
||||||
import java.sql.Statement;
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
import static org.reso.service.servlet.RESOservlet.resourceLookup;
|
import static org.reso.service.servlet.RESOservlet.resourceLookup;
|
||||||
|
@ -102,12 +99,23 @@ public class GenericEntityProcessor implements EntityProcessor
|
||||||
response.setHeader(HttpHeader.CONTENT_TYPE, responseFormat.toContentTypeString());
|
response.setHeader(HttpHeader.CONTENT_TYPE, responseFormat.toContentTypeString());
|
||||||
}
|
}
|
||||||
|
|
||||||
protected Entity getData(EdmEntitySet edmEntitySet, List<UriParameter> keyPredicates, ResourceInfo resource) throws ODataApplicationException {
|
|
||||||
|
/**
|
||||||
|
* Reads data from a resource and returns it as a HashMap
|
||||||
|
* @param keyPredicates
|
||||||
|
* @param resource
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
private HashMap<String,Object> getDataToHash(List<UriParameter> keyPredicates, ResourceInfo resource)
|
||||||
|
{
|
||||||
|
return CommonDataProcessing.translateEntityToMap(this.getData(null, keyPredicates, resource));
|
||||||
|
}
|
||||||
|
|
||||||
|
protected Entity getData(EdmEntitySet edmEntitySet, List<UriParameter> keyPredicates, ResourceInfo resource) {
|
||||||
ArrayList<FieldInfo> fields = resource.getFieldList();
|
ArrayList<FieldInfo> fields = resource.getFieldList();
|
||||||
|
|
||||||
Entity product = null;
|
Entity product = null;
|
||||||
|
|
||||||
Map<String, String> properties = System.getenv();
|
|
||||||
List<FieldInfo> enumFields = CommonDataProcessing.gatherEnumFields(resource);
|
List<FieldInfo> enumFields = CommonDataProcessing.gatherEnumFields(resource);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -119,18 +127,21 @@ public class GenericEntityProcessor implements EntityProcessor
|
||||||
// Result set get the result of the SQL query
|
// Result set get the result of the SQL query
|
||||||
String queryString = null;
|
String queryString = null;
|
||||||
|
|
||||||
for (final UriParameter key : keyPredicates)
|
if (null!=keyPredicates)
|
||||||
{
|
{
|
||||||
// key
|
for (final UriParameter key : keyPredicates)
|
||||||
String keyName = key.getName(); // .toLowerCase();
|
|
||||||
String keyValue = key.getText();
|
|
||||||
if (sqlCriteria==null)
|
|
||||||
{
|
{
|
||||||
sqlCriteria = keyName + " = " + keyValue;
|
// key
|
||||||
}
|
String keyName = key.getName(); // .toLowerCase();
|
||||||
else
|
String keyValue = key.getText();
|
||||||
{
|
if (sqlCriteria==null)
|
||||||
sqlCriteria = sqlCriteria + " and " + keyName + " = " + keyValue;
|
{
|
||||||
|
sqlCriteria = keyName + " = " + keyValue;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
sqlCriteria = sqlCriteria + " and " + keyName + " = " + keyValue;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -213,22 +224,23 @@ public class GenericEntityProcessor implements EntityProcessor
|
||||||
DeserializerResult result = deserializer.entity(requestInputStream, edmEntityType);
|
DeserializerResult result = deserializer.entity(requestInputStream, edmEntityType);
|
||||||
Entity requestEntity = result.getEntity();
|
Entity requestEntity = result.getEntity();
|
||||||
// 2.2 do the creation in backend, which returns the newly created entity
|
// 2.2 do the creation in backend, which returns the newly created entity
|
||||||
//Entity createdEntity = storage.createEntityData(edmEntitySet, requestEntity);
|
|
||||||
HashMap<String, Object> mappedObj = CommonDataProcessing.translateEntityToMap(requestEntity);
|
HashMap<String, Object> mappedObj = CommonDataProcessing.translateEntityToMap(requestEntity);
|
||||||
String primaryFieldName = resource.getPrimaryKeyName();
|
|
||||||
List<FieldInfo> enumFields = CommonDataProcessing.gatherEnumFields(resource);
|
List<FieldInfo> enumFields = CommonDataProcessing.gatherEnumFields(resource);
|
||||||
ArrayList<Object> enumValues = new ArrayList<>();
|
HashMap<String, Object> enumValues = new HashMap<>();
|
||||||
for (FieldInfo field: enumFields)
|
for (FieldInfo field: enumFields)
|
||||||
{
|
{
|
||||||
|
// We remove all entities that are collections to save to the lookup_value table separately. @TODO: save these values
|
||||||
if (field.isCollection())
|
if (field.isCollection())
|
||||||
{
|
{
|
||||||
String fieldName = field.getFieldName();
|
String fieldName = field.getFieldName();
|
||||||
Object value = mappedObj.remove(fieldName);
|
Object value = mappedObj.remove(fieldName);
|
||||||
enumValues.add(value);
|
enumValues.put(fieldName, value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
saveData(resource, mappedObj);
|
saveData(resource, mappedObj);
|
||||||
|
saveEnumData(resource, enumValues);
|
||||||
|
|
||||||
// 3. serialize the response (we have to return the created entity)
|
// 3. serialize the response (we have to return the created entity)
|
||||||
ContextURL contextUrl = ContextURL.with().entitySet(edmEntitySet).build();
|
ContextURL contextUrl = ContextURL.with().entitySet(edmEntitySet).build();
|
||||||
|
@ -242,7 +254,8 @@ public class GenericEntityProcessor implements EntityProcessor
|
||||||
//4. configure the response object
|
//4. configure the response object
|
||||||
response.setContent(serializedResponse.getContent());
|
response.setContent(serializedResponse.getContent());
|
||||||
response.setStatusCode(HttpStatusCode.CREATED.getStatusCode());
|
response.setStatusCode(HttpStatusCode.CREATED.getStatusCode());
|
||||||
response.setHeader(HttpHeader.CONTENT_TYPE, responseFormat.toContentTypeString()); }
|
response.setHeader(HttpHeader.CONTENT_TYPE, responseFormat.toContentTypeString());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
private void saveData(ResourceInfo resource, HashMap<String, Object> mappedObj)
|
private void saveData(ResourceInfo resource, HashMap<String, Object> mappedObj)
|
||||||
|
@ -300,6 +313,75 @@ public class GenericEntityProcessor implements EntityProcessor
|
||||||
// Result set get the result of the SQL query
|
// Result set get the result of the SQL query
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void saveEnumData(ResourceInfo resource, HashMap<String, Object> enumValues)
|
||||||
|
{
|
||||||
|
for (String key: enumValues.keySet() )
|
||||||
|
{
|
||||||
|
Object value = enumValues.get(key);
|
||||||
|
saveEnumData(resource, key, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save the Enum values for the enumObject for the resource.
|
||||||
|
* lookup_value table:
|
||||||
|
* +--------------------------+------------+------+-----+---------------------+-------------------------------+
|
||||||
|
* | Field | Type | Null | Key | Default | Extra |
|
||||||
|
* +--------------------------+------------+------+-----+---------------------+-------------------------------+
|
||||||
|
* | LookupValueKey | text | YES | | NULL | |
|
||||||
|
* | LookupValueKeyNumeric | bigint(20) | YES | | NULL | |
|
||||||
|
* | ResourceName | text | YES | | NULL | |
|
||||||
|
* | ResourceRecordKey | text | YES | | NULL | |
|
||||||
|
* | ResourceRecordKeyNumeric | bigint(20) | YES | | NULL | |
|
||||||
|
* | LookupKey | text | YES | | NULL | |
|
||||||
|
* | modificationTimestamp | timestamp | NO | | current_timestamp() | on update current_timestamp() |
|
||||||
|
* | FieldName | text | NO | | NULL | |
|
||||||
|
* +--------------------------+------------+------+-----+---------------------+-------------------------------+
|
||||||
|
* @param resource
|
||||||
|
* @param values
|
||||||
|
*/
|
||||||
|
private void saveEnumData(ResourceInfo resource, String lookupEnumField, Object values)
|
||||||
|
{
|
||||||
|
String queryString = "insert into lookup_value";
|
||||||
|
|
||||||
|
/**
|
||||||
|
String value = resultSet.getString("LookupKey");
|
||||||
|
String fieldName = resultSet.getString("FieldName");
|
||||||
|
String resourceRecordKey = resultSet.getString("ResourceRecordKey");
|
||||||
|
*/
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
Statement statement = connect.createStatement();
|
||||||
|
List<String> columnNames = Arrays.asList("FieldName","LookupKey");
|
||||||
|
|
||||||
|
ArrayList valueArray;
|
||||||
|
|
||||||
|
if (values instanceof ArrayList)
|
||||||
|
{
|
||||||
|
valueArray = (ArrayList) values;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
ArrayList temp = new ArrayList();
|
||||||
|
temp.add(values);
|
||||||
|
valueArray = temp;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (Object value : valueArray)
|
||||||
|
{
|
||||||
|
ArrayList<String> columnValues = new ArrayList(Arrays.asList(lookupEnumField,value.toString()));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
catch (SQLException e)
|
||||||
|
{
|
||||||
|
LOG.error(e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override public void updateEntity(ODataRequest request, ODataResponse response, UriInfo uriInfo, ContentType requestFormat, ContentType responseFormat)
|
@Override public void updateEntity(ODataRequest request, ODataResponse response, UriInfo uriInfo, ContentType requestFormat, ContentType responseFormat)
|
||||||
throws ODataApplicationException, ODataLibraryException
|
throws ODataApplicationException, ODataLibraryException
|
||||||
|
|
|
@ -35,6 +35,13 @@ public class CommonDataProcessing
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(CommonDataProcessing.class);
|
private static final Logger LOG = LoggerFactory.getLogger(CommonDataProcessing.class);
|
||||||
private static HashMap<String, List<FieldInfo>> resourceEnumFields = new HashMap<>();
|
private static HashMap<String, List<FieldInfo>> resourceEnumFields = new HashMap<>();
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function will return the Enum fields for a given resource.
|
||||||
|
* It returns from the cache if found, otherwise it finds the Enum fields from the Field list and caches it for later use.
|
||||||
|
* @param resource
|
||||||
|
* @return List<FieldInfo> The Enum fields' FieldInfo values
|
||||||
|
*/
|
||||||
public static List<FieldInfo> gatherEnumFields(ResourceInfo resource)
|
public static List<FieldInfo> gatherEnumFields(ResourceInfo resource)
|
||||||
{
|
{
|
||||||
String resourceName = resource.getResourceName();
|
String resourceName = resource.getResourceName();
|
||||||
|
@ -56,23 +63,35 @@ public class CommonDataProcessing
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Put it in the cache
|
||||||
CommonDataProcessing.resourceEnumFields.put(resourceName, enumFields);
|
CommonDataProcessing.resourceEnumFields.put(resourceName, enumFields);
|
||||||
|
|
||||||
return enumFields;
|
return enumFields;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This will return the value for the field from the result set from the data source.
|
||||||
|
* @param field The field metadata
|
||||||
|
* @param resultSet The data source row
|
||||||
|
* @return A Java Object representing the value. It can be anything, but should be a simple representation for ease of manipulating.
|
||||||
|
* @throws SQLException in case of SQL error from the data source
|
||||||
|
*/
|
||||||
public static Object getFieldValueFromRow(FieldInfo field, ResultSet resultSet) throws SQLException
|
public static Object getFieldValueFromRow(FieldInfo field, ResultSet resultSet) throws SQLException
|
||||||
{
|
{
|
||||||
String fieldName = field.getFieldName();
|
String fieldName = field.getFieldName();
|
||||||
Object value = null;
|
Object value = null;
|
||||||
|
// In case of a String
|
||||||
if (field.getType().equals(EdmPrimitiveTypeKind.String.getFullQualifiedName()))
|
if (field.getType().equals(EdmPrimitiveTypeKind.String.getFullQualifiedName()))
|
||||||
{
|
{
|
||||||
value = resultSet.getString(fieldName);
|
value = resultSet.getString(fieldName);
|
||||||
}
|
}
|
||||||
|
// In case of a DateTime entry
|
||||||
else if (field.getType().equals(EdmPrimitiveTypeKind.DateTimeOffset.getFullQualifiedName()))
|
else if (field.getType().equals(EdmPrimitiveTypeKind.DateTimeOffset.getFullQualifiedName()))
|
||||||
{
|
{
|
||||||
value = resultSet.getTimestamp(fieldName);
|
value = resultSet.getTimestamp(fieldName);
|
||||||
}
|
}
|
||||||
|
// @TODO: More will have to be added here, ie: Integers, as data comes in, we can extend this easily here.
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
LOG.info("Field Name: "+field.getFieldName()+" Field type: "+field.getType());
|
LOG.info("Field Name: "+field.getFieldName()+" Field type: "+field.getType());
|
||||||
|
@ -81,16 +100,32 @@ public class CommonDataProcessing
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds an Entity from the row from the Resource's data source
|
||||||
|
* @param resultSet Data source row result
|
||||||
|
* @param resource The resource we're making an Entity for
|
||||||
|
* @param selectLookup An optional lookup of boolean flags that will only fill in the Entity values for entries with True lookup values
|
||||||
|
* @return An Entity representing the data source row
|
||||||
|
* @throws SQLException in case of SQL error from the data source
|
||||||
|
*/
|
||||||
public static Entity getEntityFromRow(ResultSet resultSet, ResourceInfo resource, HashMap<String,Boolean> selectLookup) throws SQLException
|
public static Entity getEntityFromRow(ResultSet resultSet, ResourceInfo resource, HashMap<String,Boolean> selectLookup) throws SQLException
|
||||||
{
|
{
|
||||||
String primaryFieldName = resource.getPrimaryKeyName();
|
String primaryFieldName = resource.getPrimaryKeyName();
|
||||||
ArrayList<FieldInfo> fields = resource.getFieldList();
|
ArrayList<FieldInfo> fields = resource.getFieldList();
|
||||||
|
|
||||||
|
// Lookup Key for the primary key
|
||||||
String lookupKey = null;
|
String lookupKey = null;
|
||||||
|
// We only need to set the entity ID later if we're providing selectLookup and the primary field name is being requested
|
||||||
|
// @TODO: May need different logic here, ie: selectLookup==null || ...
|
||||||
if (selectLookup!=null && selectLookup.get(primaryFieldName)!=null)
|
if (selectLookup!=null && selectLookup.get(primaryFieldName)!=null)
|
||||||
{
|
{
|
||||||
lookupKey = resultSet.getString(primaryFieldName);
|
lookupKey = resultSet.getString(primaryFieldName);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// New entity to be populated
|
||||||
Entity ent = new Entity();
|
Entity ent = new Entity();
|
||||||
|
|
||||||
for (FieldInfo field : fields)
|
for (FieldInfo field : fields)
|
||||||
{
|
{
|
||||||
String fieldName = field.getODATAFieldName();
|
String fieldName = field.getODATAFieldName();
|
||||||
|
@ -98,14 +133,17 @@ public class CommonDataProcessing
|
||||||
if ( (selectLookup==null || selectLookup.containsKey(fieldName) ))
|
if ( (selectLookup==null || selectLookup.containsKey(fieldName) ))
|
||||||
{
|
{
|
||||||
value = CommonDataProcessing.getFieldValueFromRow(field, resultSet);
|
value = CommonDataProcessing.getFieldValueFromRow(field, resultSet);
|
||||||
|
// We only load Enums from the lookup_value table. @TODO: This may need revision to accommodate lookups on resource tables
|
||||||
if (field instanceof EnumFieldInfo)
|
if (field instanceof EnumFieldInfo)
|
||||||
{
|
{
|
||||||
LOG.error("ENUMS currently only handles by values in lookup_value table. One must Define if this uses a key a numeric value.");
|
LOG.error("ENUMS currently only handles by values in lookup_value table. One must Define if this uses a key a numeric value.");
|
||||||
}
|
}
|
||||||
|
// This is Enums that are bit masks, stored on the resource.
|
||||||
else if (field.isCollection())
|
else if (field.isCollection())
|
||||||
{
|
{
|
||||||
ent.addProperty(new Property(null, fieldName, ValueType.ENUM, value));
|
ent.addProperty(new Property(null, fieldName, ValueType.ENUM, value));
|
||||||
}
|
}
|
||||||
|
// Simply put in primitive values as entity properties.
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
ent.addProperty(new Property(null, fieldName, ValueType.PRIMITIVE, value));
|
ent.addProperty(new Property(null, fieldName, ValueType.PRIMITIVE, value));
|
||||||
|
@ -113,6 +151,7 @@ public class CommonDataProcessing
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Set the entity ID if the lookupKey is provided in the select lookups
|
||||||
if (lookupKey!=null)
|
if (lookupKey!=null)
|
||||||
{
|
{
|
||||||
ent.setId(createId(resource.getResourcesName(), lookupKey));
|
ent.setId(createId(resource.getResourcesName(), lookupKey));
|
||||||
|
@ -121,6 +160,16 @@ public class CommonDataProcessing
|
||||||
return ent;
|
return ent;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a HashMap representation of a row from the data source, similar to the above function.
|
||||||
|
* Useful for building a simple Lookup cache, apart from Entities
|
||||||
|
* @param resultSet Data source row result
|
||||||
|
* @param resource The resource we're making an Entity for
|
||||||
|
* @param selectLookup An optional lookup of boolean flags that will only fill in the Entity values for entries with True lookup values
|
||||||
|
* @return A HashMap representing the data source row
|
||||||
|
* @throws SQLException in case of SQL error from the data source
|
||||||
|
*/
|
||||||
public static HashMap<String,Object> getObjectFromRow(ResultSet resultSet, ResourceInfo resource, HashMap<String,Boolean> selectLookup) throws SQLException
|
public static HashMap<String,Object> getObjectFromRow(ResultSet resultSet, ResourceInfo resource, HashMap<String,Boolean> selectLookup) throws SQLException
|
||||||
{
|
{
|
||||||
String primaryFieldName = resource.getPrimaryKeyName();
|
String primaryFieldName = resource.getPrimaryKeyName();
|
||||||
|
@ -150,6 +199,14 @@ public class CommonDataProcessing
|
||||||
return ent;
|
return ent;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For populating entity values Enums based on a potential non-sequential data source query results
|
||||||
|
* @param resultSet Data source row result
|
||||||
|
* @param entities A lookup of HashMap entities to be populated with Enum values
|
||||||
|
* @param enumFields The Enum fields to populate for the resource
|
||||||
|
* @throws SQLException in case of SQL error from the data source
|
||||||
|
*/
|
||||||
public static void getEntityValues(ResultSet resultSet,HashMap<String, HashMap<String, Object>> entities, List<FieldInfo> enumFields) throws SQLException
|
public static void getEntityValues(ResultSet resultSet,HashMap<String, HashMap<String, Object>> entities, List<FieldInfo> enumFields) throws SQLException
|
||||||
{
|
{
|
||||||
HashMap<String, EnumFieldInfo> enumFieldLookup = new HashMap<>();
|
HashMap<String, EnumFieldInfo> enumFieldLookup = new HashMap<>();
|
||||||
|
@ -207,20 +264,26 @@ public class CommonDataProcessing
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void setEntityEnums(HashMap<String, Object> enumValues, Entity entity, List<FieldInfo> enumFields) throws SQLException
|
|
||||||
{
|
|
||||||
HashMap<String,EnumFieldInfo> enumFieldLookup = new HashMap<>();
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Translate the Enum values from a HashMap representation to an Entity representation
|
||||||
|
* @param enumValues The HashMap representation of the Enum values from the data source
|
||||||
|
* @param entity The Entity to populate with Enum values
|
||||||
|
* @param enumFields The Enum fields on the Entity we want values for
|
||||||
|
*/
|
||||||
|
public static void setEntityEnums(HashMap<String, Object> enumValues, Entity entity, List<FieldInfo> enumFields)
|
||||||
|
{
|
||||||
for (FieldInfo field: enumFields)
|
for (FieldInfo field: enumFields)
|
||||||
{
|
{
|
||||||
EnumFieldInfo enumField = (EnumFieldInfo) field;
|
EnumFieldInfo enumField = (EnumFieldInfo) field;
|
||||||
String fieldName = enumField.getFieldName();
|
String fieldName = enumField.getFieldName();
|
||||||
long totalFlagValues = 3;
|
long totalFlagValues = 0;
|
||||||
|
|
||||||
if (field.isFlags())
|
if (field.isFlags())
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
// Builds a bit flag representation of the multiple values.
|
||||||
Object flagValues = enumValues.get(fieldName);
|
Object flagValues = enumValues.get(fieldName);
|
||||||
ArrayList<Object> flagsArray = (ArrayList<Object>) flagValues;
|
ArrayList<Object> flagsArray = (ArrayList<Object>) flagValues;
|
||||||
for (Object flagObj : flagsArray)
|
for (Object flagObj : flagsArray)
|
||||||
|
@ -229,18 +292,21 @@ public class CommonDataProcessing
|
||||||
totalFlagValues = totalFlagValues + flagLong;
|
totalFlagValues = totalFlagValues + flagLong;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (Exception e)
|
catch (Exception e) // In case of casting error. "Should not happen"
|
||||||
{
|
{
|
||||||
LOG.error(e.getMessage());
|
LOG.error(e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// There's many ways to represent Enums
|
||||||
if (field.isCollection())
|
if (field.isCollection())
|
||||||
{
|
{
|
||||||
|
// As a Collection with bit flags
|
||||||
if (field.isFlags())
|
if (field.isFlags())
|
||||||
{
|
{
|
||||||
entity.addProperty(new Property(null, fieldName, ValueType.ENUM, totalFlagValues)); // @ToDo: This might not be compatible with anything...
|
entity.addProperty(new Property(null, fieldName, ValueType.ENUM, totalFlagValues)); // @ToDo: This might not be compatible with anything...
|
||||||
}
|
}
|
||||||
|
// A collection of Primitive types
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
entity.addProperty(new Property(null, fieldName, ValueType.COLLECTION_PRIMITIVE, enumValues.get(fieldName)));
|
entity.addProperty(new Property(null, fieldName, ValueType.COLLECTION_PRIMITIVE, enumValues.get(fieldName)));
|
||||||
|
@ -248,10 +314,12 @@ public class CommonDataProcessing
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
// Single value, bit flag representation
|
||||||
if (field.isFlags())
|
if (field.isFlags())
|
||||||
{
|
{
|
||||||
entity.addProperty(new Property(null, fieldName, ValueType.PRIMITIVE, totalFlagValues));
|
entity.addProperty(new Property(null, fieldName, ValueType.PRIMITIVE, totalFlagValues));
|
||||||
}
|
}
|
||||||
|
// Single value Primitive
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
entity.addProperty(new Property(null, fieldName, ValueType.PRIMITIVE, enumValues.get(fieldName)));
|
entity.addProperty(new Property(null, fieldName, ValueType.PRIMITIVE, enumValues.get(fieldName)));
|
||||||
|
@ -260,6 +328,12 @@ public class CommonDataProcessing
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Translates an Entity to a HashMap representation
|
||||||
|
* @param entity The Entity to turn into a HashMap
|
||||||
|
* @return The HashMap representation of the Entity
|
||||||
|
*/
|
||||||
public static HashMap<String,Object> translateEntityToMap(Entity entity)
|
public static HashMap<String,Object> translateEntityToMap(Entity entity)
|
||||||
{
|
{
|
||||||
HashMap<String,Object> result = new HashMap<>();
|
HashMap<String,Object> result = new HashMap<>();
|
||||||
|
@ -276,6 +350,13 @@ public class CommonDataProcessing
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Loads all Resource entries into a List of HashMap representations of the entries. Useful for caching.
|
||||||
|
* @param connect The data source connection
|
||||||
|
* @param resource The Resource to load
|
||||||
|
* @return A List of HashMap representations of the entries
|
||||||
|
*/
|
||||||
public static ArrayList<HashMap<String,Object>> loadAllResource(Connection connect, ResourceInfo resource)
|
public static ArrayList<HashMap<String,Object>> loadAllResource(Connection connect, ResourceInfo resource)
|
||||||
{
|
{
|
||||||
ArrayList<FieldInfo> fields = resource.getFieldList();
|
ArrayList<FieldInfo> fields = resource.getFieldList();
|
||||||
|
@ -312,8 +393,15 @@ public class CommonDataProcessing
|
||||||
}
|
}
|
||||||
|
|
||||||
return productList;
|
return productList;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates an unique URI identifier for the entity / id
|
||||||
|
* @param entitySetName Name of the Entity set
|
||||||
|
* @param id unique ID of the object
|
||||||
|
* @return unique URI identifier for the entity / id
|
||||||
|
*/
|
||||||
private static URI createId(String entitySetName, Object id) {
|
private static URI createId(String entitySetName, Object id) {
|
||||||
try {
|
try {
|
||||||
return new URI(entitySetName + "('" + id + "')");
|
return new URI(entitySetName + "('" + id + "')");
|
||||||
|
|
|
@ -61,10 +61,10 @@ public class LookupDefinition extends ResourceInfo
|
||||||
fieldInfo = new FieldInfo("ModificationTimestamp", EdmPrimitiveTypeKind.DateTimeOffset.getFullQualifiedName());
|
fieldInfo = new FieldInfo("ModificationTimestamp", EdmPrimitiveTypeKind.DateTimeOffset.getFullQualifiedName());
|
||||||
list.add(fieldInfo);
|
list.add(fieldInfo);
|
||||||
|
|
||||||
|
/**
|
||||||
//// Enum Test code
|
//// Enum Test code
|
||||||
EnumFieldInfo enumFieldInfo = new EnumFieldInfo("EnumTest", EdmPrimitiveTypeKind.Int64.getFullQualifiedName());
|
EnumFieldInfo enumFieldInfo = new EnumFieldInfo("EnumTest", EdmPrimitiveTypeKind.Int64.getFullQualifiedName());
|
||||||
|
|
||||||
/**
|
|
||||||
enumFieldInfo.setLookupName("EnumTest");
|
enumFieldInfo.setLookupName("EnumTest");
|
||||||
//enumFieldInfo.setCollection();
|
//enumFieldInfo.setCollection();
|
||||||
enumFieldInfo.setFlags();
|
enumFieldInfo.setFlags();
|
||||||
|
|
|
@ -17,140 +17,118 @@ import java.util.HashMap;
|
||||||
import static org.reso.service.servlet.RESOservlet.resourceLookup;
|
import static org.reso.service.servlet.RESOservlet.resourceLookup;
|
||||||
import static org.reso.service.servlet.RESOservlet.getConnection;
|
import static org.reso.service.servlet.RESOservlet.getConnection;
|
||||||
|
|
||||||
public class EnumFieldInfo extends FieldInfo
|
public class EnumFieldInfo extends FieldInfo {
|
||||||
{
|
private String lookupName;
|
||||||
private String lookupName;
|
private final ArrayList<EnumValueInfo> values = new ArrayList<>();
|
||||||
private final ArrayList<EnumValueInfo> values = new ArrayList<>();
|
private final HashMap<String, Object> valueLookup = new HashMap<>();
|
||||||
private final HashMap<String,Object> valueLookup = new HashMap<>();
|
|
||||||
|
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(EnumFieldInfo.class);
|
private static final Logger LOG = LoggerFactory.getLogger(EnumFieldInfo.class);
|
||||||
private boolean isCollection = false;
|
private boolean isCollection = false;
|
||||||
private boolean isFlags = false;
|
private boolean isFlags = false;
|
||||||
|
|
||||||
private static final String LOOKUP_COLUMN_NAME = "LookupValue";
|
private static final String LOOKUP_COLUMN_NAME = "LookupValue";
|
||||||
|
|
||||||
public EnumFieldInfo(String fieldName, FullQualifiedName type)
|
public EnumFieldInfo(String fieldName, FullQualifiedName type) {
|
||||||
{
|
super(fieldName, type);
|
||||||
super(fieldName, type);
|
}
|
||||||
}
|
|
||||||
|
|
||||||
public void addValue(EnumValueInfo value)
|
public void addValue(EnumValueInfo value) {
|
||||||
{
|
values.add(value);
|
||||||
values.add(value);
|
}
|
||||||
}
|
|
||||||
|
|
||||||
private void loadValues()
|
private void loadValues() {
|
||||||
{
|
ResourceInfo resource = resourceLookup.get("Lookup");
|
||||||
ResourceInfo resource = resourceLookup.get("Lookup");
|
if (resource != null) {
|
||||||
if (resource!=null)
|
Connection connect = getConnection();
|
||||||
{
|
String queryString = null;
|
||||||
Connection connect = getConnection();
|
try {
|
||||||
String queryString = null;
|
Statement statement = connect.createStatement();
|
||||||
try
|
HashMap<String, Boolean> selectLookup = new HashMap<>();
|
||||||
{
|
selectLookup.put(LOOKUP_COLUMN_NAME, true);
|
||||||
Statement statement = connect.createStatement();
|
|
||||||
HashMap<String,Boolean> selectLookup = new HashMap<>();
|
|
||||||
selectLookup.put(LOOKUP_COLUMN_NAME, true);
|
|
||||||
|
|
||||||
queryString = "Select "+LOOKUP_COLUMN_NAME+" from "+resource.getTableName()+" WHERE LookupName = '"+lookupName+"'";
|
queryString = "Select " + LOOKUP_COLUMN_NAME + " from " + resource.getTableName() + " WHERE LookupName = '" + lookupName + "'";
|
||||||
LOG.debug("Query: "+queryString);
|
LOG.debug("Query: " + queryString);
|
||||||
|
|
||||||
ResultSet resultSet = statement.executeQuery(queryString);
|
ResultSet resultSet = statement.executeQuery(queryString);
|
||||||
while (resultSet.next())
|
while (resultSet.next()) {
|
||||||
{
|
Entity ent = CommonDataProcessing.getEntityFromRow(resultSet, resource, selectLookup);
|
||||||
Entity ent = CommonDataProcessing.getEntityFromRow(resultSet, resource, selectLookup);
|
Property property = ent.getProperty(LOOKUP_COLUMN_NAME);
|
||||||
Property property = ent.getProperty(LOOKUP_COLUMN_NAME);
|
String val = property.getValue().toString();
|
||||||
String val = property.getValue().toString();
|
values.add(new EnumValueInfo(val));
|
||||||
values.add( new EnumValueInfo(val) );
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOG.info("Query: " + queryString);
|
||||||
|
LOG.error("Error in finding Lookup values for " + lookupName + ": " + e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (Exception e)
|
}
|
||||||
{
|
|
||||||
LOG.info("Query: "+queryString);
|
|
||||||
LOG.error("Error in finding Lookup values for "+lookupName+": "+e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public ArrayList<EnumValueInfo> getValues()
|
public ArrayList<EnumValueInfo> getValues() {
|
||||||
{
|
if (values.size() == 0) {
|
||||||
if (values.size()==0)
|
EnumValueInfo sampleValue = new EnumValueInfo("Sample" + lookupName + "EnumValue");
|
||||||
{
|
values.add(sampleValue);
|
||||||
EnumValueInfo sampleValue = new EnumValueInfo("Sample"+lookupName+"EnumValue");
|
}
|
||||||
values.add(sampleValue);
|
|
||||||
}
|
|
||||||
|
|
||||||
return values;
|
return values;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setLookupName(String name) { lookupName=name; }
|
public void setLookupName(String name) {
|
||||||
|
lookupName = name;
|
||||||
|
}
|
||||||
|
|
||||||
public FullQualifiedName getType()
|
public FullQualifiedName getType() {
|
||||||
{
|
if (values.size() == 0) {
|
||||||
if (values.size()==0)
|
getValues();
|
||||||
{
|
}
|
||||||
getValues();
|
if (values.size() > 0) {
|
||||||
}
|
return new FullQualifiedName("org.reso.metadata.enums." + lookupName);
|
||||||
if (values.size()>0)
|
}
|
||||||
{
|
|
||||||
return new FullQualifiedName("org.reso.metadata.enums." + lookupName);
|
|
||||||
}
|
|
||||||
|
|
||||||
return super.getType();
|
return super.getType();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Accessor for lookupName
|
* Accessor for lookupName
|
||||||
* @return
|
*
|
||||||
*/
|
* @return
|
||||||
public String getLookupName()
|
*/
|
||||||
{
|
public String getLookupName() {
|
||||||
return lookupName;
|
return lookupName;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean isCollection()
|
public boolean isCollection() {
|
||||||
{
|
return isCollection;
|
||||||
return isCollection;
|
}
|
||||||
}
|
|
||||||
|
|
||||||
public void setCollection()
|
public void setCollection() {
|
||||||
{
|
isCollection = true;
|
||||||
isCollection = true;
|
}
|
||||||
}
|
|
||||||
|
|
||||||
public void setFlags()
|
public void setFlags() {
|
||||||
{
|
isFlags = true;
|
||||||
isFlags = true;
|
}
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isFlags()
|
public boolean isFlags() {
|
||||||
{
|
return isFlags;
|
||||||
return isFlags;
|
}
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public Object getValueOf(String enumStringValue)
|
public Object getValueOf(String enumStringValue) {
|
||||||
{
|
Object value = valueLookup.get(enumStringValue);
|
||||||
Object value = valueLookup.get(enumStringValue);
|
if (value == null) {
|
||||||
if (value==null)
|
long bitValue = 1;
|
||||||
{
|
for (EnumValueInfo val : values) {
|
||||||
long bitValue = 1;
|
valueLookup.put(val.getValue(), bitValue);
|
||||||
for (EnumValueInfo val: values)
|
if (isFlags) {
|
||||||
{
|
bitValue = bitValue * 2;
|
||||||
valueLookup.put(val.getValue(),bitValue);
|
} else {
|
||||||
if (isFlags)
|
bitValue = bitValue + 1;
|
||||||
{
|
}
|
||||||
bitValue = bitValue * 2;
|
|
||||||
}
|
}
|
||||||
else
|
value = valueLookup.get(enumStringValue);
|
||||||
{
|
}
|
||||||
bitValue = bitValue+1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
value = valueLookup.get(enumStringValue);
|
|
||||||
}
|
|
||||||
|
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,6 +12,10 @@ import java.util.*;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import org.reso.service.servlet.RESOservlet;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
|
||||||
public class DefinitionBuilder
|
public class DefinitionBuilder
|
||||||
{
|
{
|
||||||
|
@ -31,6 +35,8 @@ public class DefinitionBuilder
|
||||||
new AbstractMap.SimpleEntry<>("version", true))
|
new AbstractMap.SimpleEntry<>("version", true))
|
||||||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
|
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||||
|
|
||||||
|
private static final Logger LOG = LoggerFactory.getLogger(DefinitionBuilder.class);
|
||||||
|
|
||||||
// Internals
|
// Internals
|
||||||
private final String fileName;
|
private final String fileName;
|
||||||
private JsonReader reader;
|
private JsonReader reader;
|
||||||
|
@ -50,6 +56,7 @@ public class DefinitionBuilder
|
||||||
}
|
}
|
||||||
catch (FileNotFoundException e)
|
catch (FileNotFoundException e)
|
||||||
{
|
{
|
||||||
|
LOG.info("ERROR:",e.getMessage());
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,6 +23,6 @@
|
||||||
|
|
||||||
<servlet-mapping>
|
<servlet-mapping>
|
||||||
<servlet-name>RESOservlet</servlet-name>
|
<servlet-name>RESOservlet</servlet-name>
|
||||||
<url-pattern>/*</url-pattern>
|
<url-pattern>/2.0.0/*</url-pattern>
|
||||||
</servlet-mapping>
|
</servlet-mapping>
|
||||||
</web-app>
|
</web-app>
|
Loading…
Reference in New Issue