Compare commits

...

11 Commits

Author SHA1 Message Date
YuCheng Hu 355ca0bc16 update the image to run results 2023-08-29 11:07:44 -04:00
YuCheng Hu 4b4adb2cf5 Fixed the build error because windows WSL in ubuntu version 22.04 2023-08-28 17:48:57 -04:00
YuCheng Hu d2e2f54722 format the source code for the issue we have 2023-08-28 17:46:18 -04:00
YuCheng Hu 40523282fb Change Java version to 11 2023-08-28 17:16:57 -04:00
YuCheng Hu 1e14988bb9 Update the package for jackson to version 2.15.2 2023-08-28 17:16:39 -04:00
YuCheng Hu 087c46b273 Change to use our maven repo 2023-08-28 16:14:12 -04:00
michaelpede 5433467c45 Progress on saving & changing URI to include version 2021-11-08 11:57:00 -08:00
michaelpede efccb76206 Tweaking scripts for Windows build
Windows is not expected to be a large target environment, but nice to have for a development test server.
2021-11-03 23:46:34 -07:00
michaelpede 953baf5a64 Build to DockerHub 2021-11-02 09:58:05 -07:00
michaelpede 2171cd60a7 Notes and changes for deploying to different environments. 2021-10-26 15:36:30 -07:00
michaelpede 86f6d28044 Default ENV changes 2021-10-26 10:31:33 -07:00
20 changed files with 439 additions and 209 deletions

62
.github/workflows/commit-dev.yml vendored Normal file
View File

@ -0,0 +1,62 @@
name: RESO Web API Reference Server Development Enviroment CI/CD
on:
push:
branches:
- "develop"
- "feature/*-development"
- "release/*-development"
- "hotfix/*-development"
- "support/*-development"
jobs:
build-and-deploy:
runs-on: ubuntu-20.04
env:
ENVIRONMENT: dev
DOCKER_BUILDKIT: 1
COMPOSE_FILE: docker-compose.yml:./optional/docker-db-compose.yml
SQL_HOST: docker-mysql
SQL_USER: root
SQL_PASSWORD: root
SQL_DB_DRIVER: com.mysql.cj.jdbc.Driver
SQL_CONNECTION_STR: jdbc:mysql://docker-mysql/reso_data_dictionary_1_7?autoReconnect=true&maxReconnects=4
CERT_REPORT_FILENAME: RESODataDictionary-1.7.metadata-report.json
steps:
- uses: actions/checkout@v2
- name: Set up JDK 11
uses: actions/setup-java@v2
with:
java-version: '11'
distribution: 'adopt'
- name: Run ENV setup script
run: |
chmod +x ./docker/scripts/setup_build_env.sh
./docker/scripts/setup_build_env.sh
- name: Run build script
run: |
chmod +x ./docker/scripts/build.sh
./docker/scripts/build.sh
- name: Run docker-compose to make the images
run: docker-compose build
-
name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
-
name: Push to Dockerhub
id: docker_build
uses: docker/build-push-action@v2
with:
push: true
tags: michaelpede/reso-web-api-reference-server_odata-manager-app:latest
# We don't really need the SQL server "build" as it's a default Docker build
# - name: Run docker build for the WEB API Server Database
# run: docker save --output database-server.tar mysql/mysql-server

View File

@ -1,41 +0,0 @@
# This workflow will build a package using Gradle and then publish it to GitHub packages when a release is created
# For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#Publishing-using-gradle
name: Gradle Package
on:
push:
branches:
- "develop" # not a branch that currently exists. No need to do this atm.
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- uses: actions/checkout@v2
- name: Set up JDK 11
uses: actions/setup-java@v2
with:
java-version: '11'
distribution: 'adopt'
server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
settings-path: ${{ github.workspace }} # location for the settings.xml file
- name: Build with Gradle
run: gradle build
- name: Create WAR
run: gradle war
# The USERNAME and TOKEN need to correspond to the credentials environment variables used in
# the publishing section of your build.gradle
- name: Publish to GitHub Packages
run: gradle publish
env:
USERNAME: ${{ github.actor }}
TOKEN: ${{ secrets.GITHUB_TOKEN }}

2
.gitignore vendored
View File

@ -36,6 +36,8 @@ bin
classes
.DS_Store
*.local
/*.sql
/*.json
nb-configuration.xml
.externalToolBuilders
maven-eclipse.xml

View File

@ -1,11 +1,11 @@
FROM tomcat:9
#FROM tomcat:latest
#FROM tomcat:latest # latest stopped working, Nov 2021
ENV JPDA_ADDRESS="*:8000"
ENV JPDA_TRANSPORT="dt_socket"
#Not needed while volume mapped for development
#COPY ./target/RESOservice-1.0.war /usr/local/tomcat/webapps/
#COPY ./target/RESODataDictionary-1.7.metadata-report.json /usr/local/tomcat/webapps/
COPY ./target/RESOservice-1.0.war /usr/local/tomcat/webapps/
COPY ./target/RESODataDictionary-1.7.metadata-report.json /usr/local/tomcat/webapps/
CMD ["catalina.sh", "jpda", "run"]

View File

@ -18,9 +18,11 @@ Run the `run.sh`
## Access the Server
Assuming you're running the server locally, go to [http://localhost:8080/RESOservice-1.0/$metadata](http://localhost:8080/RESOservice-1.0/$metadata)\
Assuming you're running the server locally, go to [http://localhost:8080/core/2.0.0/$metadata](http://localhost:8080/core/2.0.0/$metadata)\
Otherwise, you will have to replace `localhost` with the IP of your Docker machine.
![](https://cdn.ossez.com/discourse-uploads/optimized/2X/8/881469dc9b204975de209b56f0d897ba8b782347_2_690x439.png)
## Running with a different database
If you set the `SQL_HOST` Environment Variable, then the build script will not build the test database.
@ -31,3 +33,27 @@ You will need to configure the following environment variables, so the server ca
* SQL_HOST
* SQL_USER
* SQL_PASSWORD
## ENVIRNONMENT SPECIFIC NOTES
The build scripts were moved to take place in a Docker container so that they would work consistently across environments.
### Windows
In Windows, running under a Bash shell will work, assuming you meet the above requirements.
Don't forget to have Docker installed for Windows.
There is an `env-default-windows` file you should rename to `.env` before running the build script.
The `docker/docker-builder` file has a line commented out for Windows users, and a line that needs to be commented out.
### MAC
This has not been tested. Anyone wanting to give feedback would be appreciated.
## Build Failures
In the case this happens, and you have fixed the source of the error and need to rebuild everything using the build scripts, you should delete any prior Docker containers.
## Customizing your setup
You can have your own SQL database. Just copy the `env-default` file to `.env` and modify the appropriate properties.

View File

@ -11,14 +11,14 @@ plugins {
repositories {
mavenLocal()
maven {
url = uri('https://repo.maven.apache.org/maven2/')
url = uri('https://repo.ossez.com/repository/maven-public/')
}
}
dependencies {
implementation 'org.postgresql:postgresql:42.2.23'
implementation 'org.apache.olingo:odata-server-api:4.8.0'
implementation 'com.fasterxml.jackson.core:jackson-databind:2.11.1'
implementation 'com.fasterxml.jackson.core:jackson-databind:2.15.2'
implementation 'org.apache.olingo:odata-commons-api:4.8.0'
implementation 'org.apache.olingo:odata-commons-core:4.8.0'
implementation 'org.slf4j:slf4j-api:1.7.11'

View File

@ -5,6 +5,14 @@ HOME_DIR=`dirname ${REAL_VAR0}`
TEMP_DIR="${HOME_DIR}/temp"
SQL_DIR="${HOME_DIR}/sql"
if [ ! -f "${HOME_DIR}/.env" ]
then
cp "${HOME_DIR}/env-default" "${HOME_DIR}/.env"
fi
#Needed for Linux builds
chmod a+x ./docker/scripts/*
docker build -t reso-builder -f docker/docker-builder .
docker run --name builder --mount type=bind,source="${HOME_DIR}",target=/usr/src/app -t reso-builder

View File

@ -1,9 +1,11 @@
FROM ubuntu:20.10
FROM ubuntu:22.04
WORKDIR /usr/src/app
ARG DEBIAN_FRONTEND=noninteractive
# Comment next line out for Windows builds
#ARG DEBIAN_FRONTEND=noninteractive
RUN apt-get update && apt-get install -y wget openjdk-8-jdk curl pip maven docker-compose
# Needed for Windows builds
RUN update-java-alternatives -s java-1.8.0-openjdk-amd64
CMD ./docker/scripts/build.sh
CMD sh /usr/src/app/docker/scripts/build.sh

View File

@ -1,18 +1,13 @@
#!/bin/bash
REAL_VAR0=`readlink -f $0`
HOME_DIR_OLD=`dirname ${REAL_VAR0}`
HOME_DIR=/usr/src/app
cd "${HOME_DIR}"
TEMP_DIR="${HOME_DIR}/temp"
SQL_DIR="${HOME_DIR}/sql"
# Ensure we have directories set up
[ -d "${TEMP_DIR}" ] && echo "temp directory found." || mkdir ${TEMP_DIR}
[ -d "${SQL_DIR}" ] && echo "sql directory found." || mkdir ${SQL_DIR}
if [ ! -f "${HOME_DIR}/.env" ]
then
cp "${HOME_DIR}/env-default" "${HOME_DIR}/.env"
fi
if [ -z "${SQL_HOST}" ]
then
@ -42,16 +37,24 @@ fi
#unzip -d /opt/gradle /tmp/gradle-${VERSION}-bin.zip
#ln -s /opt/gradle/gradle-${VERSION} /opt/gradle/latest
if gradle build
then
cp build/libs/RESOservice-1.0.war ./target/core.war
cp RESODataDictionary-1.7.metadata-report.json ./target/
else
#/opt/gradle/latest/bin/gradle build
#mkdir target
#cp build/libs/RESOservice-1.0.war ./target/
#cp RESODataDictionary-1.7.metadata-report.json ./target/
if ! mvn compile
then
echo "Maven could not be found."
exit
else
mvn package
cp RESODataDictionary-1.7.metadata-report.json ./target/
if ! mvn compile
then
echo "Maven could not be found."
exit
else
mvn package
mv ./target/RESOservice-1.0.war ./target/core.war
cp RESODataDictionary-1.7.metadata-report.json ./target/
fi
fi

View File

@ -0,0 +1,5 @@
#!/bin/bash
# This will run commands to setup your build server / Ubuntu 20.10 with the needed prerequisites.
DEBIAN_FRONTEND=noninteractive apt-get update && apt-get install -y wget pip maven docker-compose

View File

@ -4,5 +4,4 @@ SQL_USER=root
SQL_PASSWORD=root
SQL_DB_DRIVER=com.mysql.cj.jdbc.Driver
SQL_CONNECTION_STR=jdbc:mysql://docker-mysql/reso_data_dictionary_1_7?autoReconnect=true&maxReconnects=4
#&user=root&password=root
CERT_REPORT_FILENAME=RESODataDictionary-1.7.metadata-report.json

8
env-default-windows Normal file
View File

@ -0,0 +1,8 @@
#Composer file needs a specific separator on Windows.
COMPOSE_FILE=docker-compose.yml;./optional/docker-db-compose.yml
SQL_HOST=docker-mysql
SQL_USER=root
SQL_PASSWORD=root
SQL_DB_DRIVER=com.mysql.cj.jdbc.Driver
SQL_CONNECTION_STR=jdbc:mysql://docker-mysql/reso_data_dictionary_1_7?autoReconnect=true&maxReconnects=4
CERT_REPORT_FILENAME=RESODataDictionary-1.7.metadata-report.json

View File

@ -11,8 +11,8 @@
<packaging>war</packaging>
<properties>
<maven.compiler.target>1.8</maven.compiler.target>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>11</maven.compiler.target>
<maven.compiler.source>11</maven.compiler.source>
<junit.version>5.7.0</junit.version>
<javax.version>2.5</javax.version>
<odata.version>4.8.0</odata.version>

View File

@ -307,6 +307,7 @@ public class GenericEntityCollectionProcessor implements EntityCollectionProcess
}
for (Entity product :productList)
{
// The getValue should already be a String, so the toString should just pass it through, while making the following assignment simple.
String key = product.getProperty(primaryFieldName).getValue().toString();
HashMap<String, Object> enumValues = entities.get(key);
CommonDataProcessing.setEntityEnums(enumValues,product,enumFields);

View File

@ -32,10 +32,7 @@ import org.slf4j.LoggerFactory;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.*;
import java.util.*;
import static org.reso.service.servlet.RESOservlet.resourceLookup;
@ -102,12 +99,23 @@ public class GenericEntityProcessor implements EntityProcessor
response.setHeader(HttpHeader.CONTENT_TYPE, responseFormat.toContentTypeString());
}
protected Entity getData(EdmEntitySet edmEntitySet, List<UriParameter> keyPredicates, ResourceInfo resource) throws ODataApplicationException {
/**
* Reads data from a resource and returns it as a HashMap
* @param keyPredicates
* @param resource
* @return
*/
private HashMap<String,Object> getDataToHash(List<UriParameter> keyPredicates, ResourceInfo resource)
{
return CommonDataProcessing.translateEntityToMap(this.getData(null, keyPredicates, resource));
}
protected Entity getData(EdmEntitySet edmEntitySet, List<UriParameter> keyPredicates, ResourceInfo resource) {
ArrayList<FieldInfo> fields = resource.getFieldList();
Entity product = null;
Map<String, String> properties = System.getenv();
List<FieldInfo> enumFields = CommonDataProcessing.gatherEnumFields(resource);
try {
@ -119,18 +127,21 @@ public class GenericEntityProcessor implements EntityProcessor
// Result set get the result of the SQL query
String queryString = null;
for (final UriParameter key : keyPredicates)
if (null!=keyPredicates)
{
// key
String keyName = key.getName(); // .toLowerCase();
String keyValue = key.getText();
if (sqlCriteria==null)
for (final UriParameter key : keyPredicates)
{
sqlCriteria = keyName + " = " + keyValue;
}
else
{
sqlCriteria = sqlCriteria + " and " + keyName + " = " + keyValue;
// key
String keyName = key.getName(); // .toLowerCase();
String keyValue = key.getText();
if (sqlCriteria==null)
{
sqlCriteria = keyName + " = " + keyValue;
}
else
{
sqlCriteria = sqlCriteria + " and " + keyName + " = " + keyValue;
}
}
}
@ -213,22 +224,23 @@ public class GenericEntityProcessor implements EntityProcessor
DeserializerResult result = deserializer.entity(requestInputStream, edmEntityType);
Entity requestEntity = result.getEntity();
// 2.2 do the creation in backend, which returns the newly created entity
//Entity createdEntity = storage.createEntityData(edmEntitySet, requestEntity);
HashMap<String, Object> mappedObj = CommonDataProcessing.translateEntityToMap(requestEntity);
String primaryFieldName = resource.getPrimaryKeyName();
List<FieldInfo> enumFields = CommonDataProcessing.gatherEnumFields(resource);
ArrayList<Object> enumValues = new ArrayList<>();
HashMap<String, Object> enumValues = new HashMap<>();
for (FieldInfo field: enumFields)
{
// We remove all entities that are collections to save to the lookup_value table separately. @TODO: save these values
if (field.isCollection())
{
String fieldName = field.getFieldName();
Object value = mappedObj.remove(fieldName);
enumValues.add(value);
enumValues.put(fieldName, value);
}
}
saveData(resource, mappedObj);
saveEnumData(resource, enumValues);
// 3. serialize the response (we have to return the created entity)
ContextURL contextUrl = ContextURL.with().entitySet(edmEntitySet).build();
@ -242,7 +254,8 @@ public class GenericEntityProcessor implements EntityProcessor
//4. configure the response object
response.setContent(serializedResponse.getContent());
response.setStatusCode(HttpStatusCode.CREATED.getStatusCode());
response.setHeader(HttpHeader.CONTENT_TYPE, responseFormat.toContentTypeString()); }
response.setHeader(HttpHeader.CONTENT_TYPE, responseFormat.toContentTypeString());
}
private void saveData(ResourceInfo resource, HashMap<String, Object> mappedObj)
@ -300,6 +313,75 @@ public class GenericEntityProcessor implements EntityProcessor
// Result set get the result of the SQL query
}
private void saveEnumData(ResourceInfo resource, HashMap<String, Object> enumValues)
{
for (String key: enumValues.keySet() )
{
Object value = enumValues.get(key);
saveEnumData(resource, key, value);
}
}
/**
* Save the Enum values for the enumObject for the resource.
* lookup_value table:
* +--------------------------+------------+------+-----+---------------------+-------------------------------+
* | Field | Type | Null | Key | Default | Extra |
* +--------------------------+------------+------+-----+---------------------+-------------------------------+
* | LookupValueKey | text | YES | | NULL | |
* | LookupValueKeyNumeric | bigint(20) | YES | | NULL | |
* | ResourceName | text | YES | | NULL | |
* | ResourceRecordKey | text | YES | | NULL | |
* | ResourceRecordKeyNumeric | bigint(20) | YES | | NULL | |
* | LookupKey | text | YES | | NULL | |
* | modificationTimestamp | timestamp | NO | | current_timestamp() | on update current_timestamp() |
* | FieldName | text | NO | | NULL | |
* +--------------------------+------------+------+-----+---------------------+-------------------------------+
* @param resource
* @param values
*/
private void saveEnumData(ResourceInfo resource, String lookupEnumField, Object values)
{
String queryString = "insert into lookup_value";
/**
String value = resultSet.getString("LookupKey");
String fieldName = resultSet.getString("FieldName");
String resourceRecordKey = resultSet.getString("ResourceRecordKey");
*/
try
{
Statement statement = connect.createStatement();
List<String> columnNames = Arrays.asList("FieldName","LookupKey");
ArrayList valueArray;
if (values instanceof ArrayList)
{
valueArray = (ArrayList) values;
}
else
{
ArrayList temp = new ArrayList();
temp.add(values);
valueArray = temp;
}
for (Object value : valueArray)
{
ArrayList<String> columnValues = new ArrayList(Arrays.asList(lookupEnumField,value.toString()));
}
}
catch (SQLException e)
{
LOG.error(e.getMessage());
}
}
@Override public void updateEntity(ODataRequest request, ODataResponse response, UriInfo uriInfo, ContentType requestFormat, ContentType responseFormat)
throws ODataApplicationException, ODataLibraryException

View File

@ -35,6 +35,13 @@ public class CommonDataProcessing
private static final Logger LOG = LoggerFactory.getLogger(CommonDataProcessing.class);
private static HashMap<String, List<FieldInfo>> resourceEnumFields = new HashMap<>();
/**
* This function will return the Enum fields for a given resource.
* It returns from the cache if found, otherwise it finds the Enum fields from the Field list and caches it for later use.
* @param resource
* @return List<FieldInfo> The Enum fields' FieldInfo values
*/
public static List<FieldInfo> gatherEnumFields(ResourceInfo resource)
{
String resourceName = resource.getResourceName();
@ -56,23 +63,35 @@ public class CommonDataProcessing
}
}
// Put it in the cache
CommonDataProcessing.resourceEnumFields.put(resourceName, enumFields);
return enumFields;
}
/**
* This will return the value for the field from the result set from the data source.
* @param field The field metadata
* @param resultSet The data source row
* @return A Java Object representing the value. It can be anything, but should be a simple representation for ease of manipulating.
* @throws SQLException in case of SQL error from the data source
*/
public static Object getFieldValueFromRow(FieldInfo field, ResultSet resultSet) throws SQLException
{
String fieldName = field.getFieldName();
Object value = null;
// In case of a String
if (field.getType().equals(EdmPrimitiveTypeKind.String.getFullQualifiedName()))
{
value = resultSet.getString(fieldName);
}
// In case of a DateTime entry
else if (field.getType().equals(EdmPrimitiveTypeKind.DateTimeOffset.getFullQualifiedName()))
{
value = resultSet.getTimestamp(fieldName);
}
// @TODO: More will have to be added here, ie: Integers, as data comes in, we can extend this easily here.
else
{
LOG.info("Field Name: "+field.getFieldName()+" Field type: "+field.getType());
@ -81,16 +100,32 @@ public class CommonDataProcessing
return value;
}
/**
* Builds an Entity from the row from the Resource's data source
* @param resultSet Data source row result
* @param resource The resource we're making an Entity for
* @param selectLookup An optional lookup of boolean flags that will only fill in the Entity values for entries with True lookup values
* @return An Entity representing the data source row
* @throws SQLException in case of SQL error from the data source
*/
public static Entity getEntityFromRow(ResultSet resultSet, ResourceInfo resource, HashMap<String,Boolean> selectLookup) throws SQLException
{
String primaryFieldName = resource.getPrimaryKeyName();
ArrayList<FieldInfo> fields = resource.getFieldList();
// Lookup Key for the primary key
String lookupKey = null;
// We only need to set the entity ID later if we're providing selectLookup and the primary field name is being requested
// @TODO: May need different logic here, ie: selectLookup==null || ...
if (selectLookup!=null && selectLookup.get(primaryFieldName)!=null)
{
lookupKey = resultSet.getString(primaryFieldName);
}
// New entity to be populated
Entity ent = new Entity();
for (FieldInfo field : fields)
{
String fieldName = field.getODATAFieldName();
@ -98,14 +133,17 @@ public class CommonDataProcessing
if ( (selectLookup==null || selectLookup.containsKey(fieldName) ))
{
value = CommonDataProcessing.getFieldValueFromRow(field, resultSet);
// We only load Enums from the lookup_value table. @TODO: This may need revision to accommodate lookups on resource tables
if (field instanceof EnumFieldInfo)
{
LOG.error("ENUMS currently only handles by values in lookup_value table. One must Define if this uses a key a numeric value.");
}
// This is Enums that are bit masks, stored on the resource.
else if (field.isCollection())
{
ent.addProperty(new Property(null, fieldName, ValueType.ENUM, value));
}
// Simply put in primitive values as entity properties.
else
{
ent.addProperty(new Property(null, fieldName, ValueType.PRIMITIVE, value));
@ -113,6 +151,7 @@ public class CommonDataProcessing
}
}
// Set the entity ID if the lookupKey is provided in the select lookups
if (lookupKey!=null)
{
ent.setId(createId(resource.getResourcesName(), lookupKey));
@ -121,6 +160,16 @@ public class CommonDataProcessing
return ent;
}
/**
* Returns a HashMap representation of a row from the data source, similar to the above function.
* Useful for building a simple Lookup cache, apart from Entities
* @param resultSet Data source row result
* @param resource The resource we're making an Entity for
* @param selectLookup An optional lookup of boolean flags that will only fill in the Entity values for entries with True lookup values
* @return A HashMap representing the data source row
* @throws SQLException in case of SQL error from the data source
*/
public static HashMap<String,Object> getObjectFromRow(ResultSet resultSet, ResourceInfo resource, HashMap<String,Boolean> selectLookup) throws SQLException
{
String primaryFieldName = resource.getPrimaryKeyName();
@ -150,6 +199,14 @@ public class CommonDataProcessing
return ent;
}
/**
* For populating entity values Enums based on a potential non-sequential data source query results
* @param resultSet Data source row result
* @param entities A lookup of HashMap entities to be populated with Enum values
* @param enumFields The Enum fields to populate for the resource
* @throws SQLException in case of SQL error from the data source
*/
public static void getEntityValues(ResultSet resultSet,HashMap<String, HashMap<String, Object>> entities, List<FieldInfo> enumFields) throws SQLException
{
HashMap<String, EnumFieldInfo> enumFieldLookup = new HashMap<>();
@ -207,20 +264,26 @@ public class CommonDataProcessing
}
}
public static void setEntityEnums(HashMap<String, Object> enumValues, Entity entity, List<FieldInfo> enumFields) throws SQLException
{
HashMap<String,EnumFieldInfo> enumFieldLookup = new HashMap<>();
/**
* Translate the Enum values from a HashMap representation to an Entity representation
* @param enumValues The HashMap representation of the Enum values from the data source
* @param entity The Entity to populate with Enum values
* @param enumFields The Enum fields on the Entity we want values for
*/
public static void setEntityEnums(HashMap<String, Object> enumValues, Entity entity, List<FieldInfo> enumFields)
{
for (FieldInfo field: enumFields)
{
EnumFieldInfo enumField = (EnumFieldInfo) field;
String fieldName = enumField.getFieldName();
long totalFlagValues = 3;
long totalFlagValues = 0;
if (field.isFlags())
{
try
{
// Builds a bit flag representation of the multiple values.
Object flagValues = enumValues.get(fieldName);
ArrayList<Object> flagsArray = (ArrayList<Object>) flagValues;
for (Object flagObj : flagsArray)
@ -229,18 +292,21 @@ public class CommonDataProcessing
totalFlagValues = totalFlagValues + flagLong;
}
}
catch (Exception e)
catch (Exception e) // In case of casting error. "Should not happen"
{
LOG.error(e.getMessage());
}
}
// There's many ways to represent Enums
if (field.isCollection())
{
// As a Collection with bit flags
if (field.isFlags())
{
entity.addProperty(new Property(null, fieldName, ValueType.ENUM, totalFlagValues)); // @ToDo: This might not be compatible with anything...
}
// A collection of Primitive types
else
{
entity.addProperty(new Property(null, fieldName, ValueType.COLLECTION_PRIMITIVE, enumValues.get(fieldName)));
@ -248,10 +314,12 @@ public class CommonDataProcessing
}
else
{
// Single value, bit flag representation
if (field.isFlags())
{
entity.addProperty(new Property(null, fieldName, ValueType.PRIMITIVE, totalFlagValues));
}
// Single value Primitive
else
{
entity.addProperty(new Property(null, fieldName, ValueType.PRIMITIVE, enumValues.get(fieldName)));
@ -260,6 +328,12 @@ public class CommonDataProcessing
}
}
/**
* Translates an Entity to a HashMap representation
* @param entity The Entity to turn into a HashMap
* @return The HashMap representation of the Entity
*/
public static HashMap<String,Object> translateEntityToMap(Entity entity)
{
HashMap<String,Object> result = new HashMap<>();
@ -276,6 +350,13 @@ public class CommonDataProcessing
return result;
}
/**
* Loads all Resource entries into a List of HashMap representations of the entries. Useful for caching.
* @param connect The data source connection
* @param resource The Resource to load
* @return A List of HashMap representations of the entries
*/
public static ArrayList<HashMap<String,Object>> loadAllResource(Connection connect, ResourceInfo resource)
{
ArrayList<FieldInfo> fields = resource.getFieldList();
@ -312,8 +393,15 @@ public class CommonDataProcessing
}
return productList;
}
/**
* Creates an unique URI identifier for the entity / id
* @param entitySetName Name of the Entity set
* @param id unique ID of the object
* @return unique URI identifier for the entity / id
*/
private static URI createId(String entitySetName, Object id) {
try {
return new URI(entitySetName + "('" + id + "')");

View File

@ -61,10 +61,10 @@ public class LookupDefinition extends ResourceInfo
fieldInfo = new FieldInfo("ModificationTimestamp", EdmPrimitiveTypeKind.DateTimeOffset.getFullQualifiedName());
list.add(fieldInfo);
/**
//// Enum Test code
EnumFieldInfo enumFieldInfo = new EnumFieldInfo("EnumTest", EdmPrimitiveTypeKind.Int64.getFullQualifiedName());
/**
enumFieldInfo.setLookupName("EnumTest");
//enumFieldInfo.setCollection();
enumFieldInfo.setFlags();

View File

@ -17,140 +17,118 @@ import java.util.HashMap;
import static org.reso.service.servlet.RESOservlet.resourceLookup;
import static org.reso.service.servlet.RESOservlet.getConnection;
public class EnumFieldInfo extends FieldInfo
{
private String lookupName;
private final ArrayList<EnumValueInfo> values = new ArrayList<>();
private final HashMap<String,Object> valueLookup = new HashMap<>();
public class EnumFieldInfo extends FieldInfo {
private String lookupName;
private final ArrayList<EnumValueInfo> values = new ArrayList<>();
private final HashMap<String, Object> valueLookup = new HashMap<>();
private static final Logger LOG = LoggerFactory.getLogger(EnumFieldInfo.class);
private boolean isCollection = false;
private boolean isFlags = false;
private static final Logger LOG = LoggerFactory.getLogger(EnumFieldInfo.class);
private boolean isCollection = false;
private boolean isFlags = false;
private static final String LOOKUP_COLUMN_NAME = "LookupValue";
private static final String LOOKUP_COLUMN_NAME = "LookupValue";
public EnumFieldInfo(String fieldName, FullQualifiedName type)
{
super(fieldName, type);
}
public EnumFieldInfo(String fieldName, FullQualifiedName type) {
super(fieldName, type);
}
public void addValue(EnumValueInfo value)
{
values.add(value);
}
public void addValue(EnumValueInfo value) {
values.add(value);
}
private void loadValues()
{
ResourceInfo resource = resourceLookup.get("Lookup");
if (resource!=null)
{
Connection connect = getConnection();
String queryString = null;
try
{
Statement statement = connect.createStatement();
HashMap<String,Boolean> selectLookup = new HashMap<>();
selectLookup.put(LOOKUP_COLUMN_NAME, true);
private void loadValues() {
ResourceInfo resource = resourceLookup.get("Lookup");
if (resource != null) {
Connection connect = getConnection();
String queryString = null;
try {
Statement statement = connect.createStatement();
HashMap<String, Boolean> selectLookup = new HashMap<>();
selectLookup.put(LOOKUP_COLUMN_NAME, true);
queryString = "Select "+LOOKUP_COLUMN_NAME+" from "+resource.getTableName()+" WHERE LookupName = '"+lookupName+"'";
LOG.debug("Query: "+queryString);
queryString = "Select " + LOOKUP_COLUMN_NAME + " from " + resource.getTableName() + " WHERE LookupName = '" + lookupName + "'";
LOG.debug("Query: " + queryString);
ResultSet resultSet = statement.executeQuery(queryString);
while (resultSet.next())
{
Entity ent = CommonDataProcessing.getEntityFromRow(resultSet, resource, selectLookup);
Property property = ent.getProperty(LOOKUP_COLUMN_NAME);
String val = property.getValue().toString();
values.add( new EnumValueInfo(val) );
ResultSet resultSet = statement.executeQuery(queryString);
while (resultSet.next()) {
Entity ent = CommonDataProcessing.getEntityFromRow(resultSet, resource, selectLookup);
Property property = ent.getProperty(LOOKUP_COLUMN_NAME);
String val = property.getValue().toString();
values.add(new EnumValueInfo(val));
}
} catch (Exception e) {
LOG.info("Query: " + queryString);
LOG.error("Error in finding Lookup values for " + lookupName + ": " + e.getMessage());
}
}
catch (Exception e)
{
LOG.info("Query: "+queryString);
LOG.error("Error in finding Lookup values for "+lookupName+": "+e.getMessage());
}
}
}
}
}
public ArrayList<EnumValueInfo> getValues()
{
if (values.size()==0)
{
EnumValueInfo sampleValue = new EnumValueInfo("Sample"+lookupName+"EnumValue");
values.add(sampleValue);
}
public ArrayList<EnumValueInfo> getValues() {
if (values.size() == 0) {
EnumValueInfo sampleValue = new EnumValueInfo("Sample" + lookupName + "EnumValue");
values.add(sampleValue);
}
return values;
}
return values;
}
public void setLookupName(String name) { lookupName=name; }
public void setLookupName(String name) {
lookupName = name;
}
public FullQualifiedName getType()
{
if (values.size()==0)
{
getValues();
}
if (values.size()>0)
{
return new FullQualifiedName("org.reso.metadata.enums." + lookupName);
}
public FullQualifiedName getType() {
if (values.size() == 0) {
getValues();
}
if (values.size() > 0) {
return new FullQualifiedName("org.reso.metadata.enums." + lookupName);
}
return super.getType();
}
return super.getType();
}
/**
* Accessor for lookupName
* @return
*/
public String getLookupName()
{
return lookupName;
}
/**
* Accessor for lookupName
*
* @return
*/
public String getLookupName() {
return lookupName;
}
public boolean isCollection()
{
return isCollection;
}
public boolean isCollection() {
return isCollection;
}
public void setCollection()
{
isCollection = true;
}
public void setCollection() {
isCollection = true;
}
public void setFlags()
{
isFlags = true;
}
public void setFlags() {
isFlags = true;
}
public boolean isFlags()
{
return isFlags;
}
public boolean isFlags() {
return isFlags;
}
public Object getValueOf(String enumStringValue)
{
Object value = valueLookup.get(enumStringValue);
if (value==null)
{
long bitValue = 1;
for (EnumValueInfo val: values)
{
valueLookup.put(val.getValue(),bitValue);
if (isFlags)
{
bitValue = bitValue * 2;
public Object getValueOf(String enumStringValue) {
Object value = valueLookup.get(enumStringValue);
if (value == null) {
long bitValue = 1;
for (EnumValueInfo val : values) {
valueLookup.put(val.getValue(), bitValue);
if (isFlags) {
bitValue = bitValue * 2;
} else {
bitValue = bitValue + 1;
}
}
else
{
bitValue = bitValue+1;
}
}
value = valueLookup.get(enumStringValue);
}
value = valueLookup.get(enumStringValue);
}
return value;
}
return value;
}
}

View File

@ -12,6 +12,10 @@ import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.reso.service.servlet.RESOservlet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DefinitionBuilder
{
@ -31,6 +35,8 @@ public class DefinitionBuilder
new AbstractMap.SimpleEntry<>("version", true))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
private static final Logger LOG = LoggerFactory.getLogger(DefinitionBuilder.class);
// Internals
private final String fileName;
private JsonReader reader;
@ -50,6 +56,7 @@ public class DefinitionBuilder
}
catch (FileNotFoundException e)
{
LOG.info("ERROR:",e.getMessage());
e.printStackTrace();
}
}

View File

@ -23,6 +23,6 @@
<servlet-mapping>
<servlet-name>RESOservlet</servlet-name>
<url-pattern>/*</url-pattern>
<url-pattern>/2.0.0/*</url-pattern>
</servlet-mapping>
</web-app>