merge master

This commit is contained in:
Strong Liu 2013-03-25 13:52:05 +08:00
commit 72c5f8f5e9
97 changed files with 6757 additions and 3949 deletions

View File

@ -84,7 +84,7 @@ task jaxb {
// input schemas // input schemas
cfgXsd = file( 'src/main/resources/org/hibernate/hibernate-configuration-4.0.xsd') cfgXsd = file( 'src/main/resources/org/hibernate/hibernate-configuration-4.0.xsd')
hbmXsd = file( 'src/main/resources/org/hibernate/hibernate-mapping-4.0.xsd' ) hbmXsd = file( 'src/main/resources/org/hibernate/hibernate-mapping-4.0.xsd' )
ormXsd = file( 'src/main/resources/org/hibernate/ejb/orm_2_0.xsd' ) ormXsd = file( 'src/main/resources/org/hibernate/jpa/orm_2_1.xsd' )
// input bindings // input bindings
cfgXjb = file( 'src/main/xjb/hbm-configuration-bindings.xjb' ) cfgXjb = file( 'src/main/xjb/hbm-configuration-bindings.xjb' )

View File

@ -49,26 +49,34 @@ public class InvalidMappingException extends MappingException {
this.path=path; this.path=path;
} }
public InvalidMappingException(String customMessage, XmlDocument xmlDocument, Throwable cause) { // public InvalidMappingException(String customMessage, XmlDocument xmlDocument, Throwable cause) {
this( customMessage, xmlDocument.getOrigin().getType(), xmlDocument.getOrigin().getName(), cause ); // this( customMessage, xmlDocument.getOrigin().getType(), xmlDocument.getOrigin().getName(), cause );
} // }
//
public InvalidMappingException(String customMessage, XmlDocument xmlDocument) { // public InvalidMappingException(String customMessage, XmlDocument xmlDocument) {
this( customMessage, xmlDocument.getOrigin().getType(), xmlDocument.getOrigin().getName() ); // this( customMessage, xmlDocument.getOrigin().getType(), xmlDocument.getOrigin().getName() );
} // }
//
public InvalidMappingException(String customMessage, Origin origin) { // public InvalidMappingException(String customMessage, Origin origin) {
this( customMessage, origin.getType().toString(), origin.getName() ); // this( customMessage, origin.getType().toString(), origin.getName() );
} // }
//
public InvalidMappingException(String type, String path) { // public InvalidMappingException(String type, String path) {
this("Could not parse mapping document from " + type + (path==null?"":" " + path), type, path); // this("Could not parse mapping document from " + type + (path==null?"":" " + path), type, path);
} // }
public InvalidMappingException(String type, String path, Throwable cause) { public InvalidMappingException(String type, String path, Throwable cause) {
this("Could not parse mapping document from " + type + (path==null?"":" " + path), type, path, cause); this("Could not parse mapping document from " + type + (path==null?"":" " + path), type, path, cause);
} }
public InvalidMappingException(String message, Origin origin, Exception cause) {
this( message, origin.getType().name(), origin.getName(), cause );
}
public InvalidMappingException(String message, Origin origin) {
this( message, origin, null );
}
public String getType() { public String getType() {
return type; return type;
} }

View File

@ -155,6 +155,7 @@ import org.hibernate.id.SequenceHiLoGenerator;
import org.hibernate.id.TableHiLoGenerator; import org.hibernate.id.TableHiLoGenerator;
import org.hibernate.id.enhanced.SequenceStyleGenerator; import org.hibernate.id.enhanced.SequenceStyleGenerator;
import org.hibernate.internal.CoreMessageLogger; import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.mapping.Any; import org.hibernate.mapping.Any;
import org.hibernate.mapping.Component; import org.hibernate.mapping.Component;
import org.hibernate.mapping.DependantValue; import org.hibernate.mapping.DependantValue;
@ -2090,12 +2091,12 @@ public final class AnnotationBinder {
if ( naturalIdAnn != null ) { if ( naturalIdAnn != null ) {
if ( joinColumns != null ) { if ( joinColumns != null ) {
for ( Ejb3Column column : joinColumns ) { for ( Ejb3Column column : joinColumns ) {
column.addUniqueKey( "_UniqueKey", inSecondPass ); column.addUniqueKey( StringHelper.randomFixedLengthHex("UK_"), inSecondPass );
} }
} }
else { else {
for ( Ejb3Column column : columns ) { for ( Ejb3Column column : columns ) {
column.addUniqueKey( "_UniqueKey", inSecondPass ); column.addUniqueKey( StringHelper.randomFixedLengthHex("UK_"), inSecondPass );
} }
} }
} }

View File

@ -112,11 +112,11 @@ import org.hibernate.internal.util.collections.JoinedIterator;
import org.hibernate.internal.util.config.ConfigurationHelper; import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.internal.util.xml.ErrorLogger; import org.hibernate.internal.util.xml.ErrorLogger;
import org.hibernate.internal.util.xml.MappingReader; import org.hibernate.internal.util.xml.MappingReader;
import org.hibernate.internal.util.xml.Origin;
import org.hibernate.internal.util.xml.OriginImpl;
import org.hibernate.internal.util.xml.XMLHelper; import org.hibernate.internal.util.xml.XMLHelper;
import org.hibernate.internal.util.xml.XmlDocument; import org.hibernate.internal.util.xml.XmlDocument;
import org.hibernate.internal.util.xml.XmlDocumentImpl; import org.hibernate.internal.util.xml.XmlDocumentImpl;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.jaxb.spi.SourceType;
import org.hibernate.mapping.AuxiliaryDatabaseObject; import org.hibernate.mapping.AuxiliaryDatabaseObject;
import org.hibernate.mapping.Collection; import org.hibernate.mapping.Collection;
import org.hibernate.mapping.Column; import org.hibernate.mapping.Column;
@ -473,12 +473,12 @@ public class Configuration implements Serializable {
catch ( FileNotFoundException e ) { catch ( FileNotFoundException e ) {
throw new MappingNotFoundException( "file", xmlFile.toString() ); throw new MappingNotFoundException( "file", xmlFile.toString() );
} }
add( inputSource, "file", name ); add( inputSource, SourceType.FILE, name );
return this; return this;
} }
private XmlDocument add(InputSource inputSource, String originType, String originName) { private XmlDocument add(InputSource inputSource, SourceType originType, String originName) {
return add( inputSource, new OriginImpl( originType, originName ) ); return add( inputSource, new Origin( originType, originName ) );
} }
private XmlDocument add(InputSource inputSource, Origin origin) { private XmlDocument add(InputSource inputSource, Origin origin) {
@ -549,7 +549,7 @@ public class Configuration implements Serializable {
} }
LOG.readingMappingsFromFile( xmlFile.getPath() ); LOG.readingMappingsFromFile( xmlFile.getPath() );
XmlDocument metadataXml = add( inputSource, "file", name ); XmlDocument metadataXml = add( inputSource, SourceType.FILE, name );
try { try {
LOG.debugf( "Writing cache file for: %s to: %s", xmlFile, cachedFile ); LOG.debugf( "Writing cache file for: %s to: %s", xmlFile, cachedFile );
@ -592,7 +592,7 @@ public class Configuration implements Serializable {
LOG.readingCachedMappings( cachedFile ); LOG.readingCachedMappings( cachedFile );
Document document = ( Document ) SerializationHelper.deserialize( new FileInputStream( cachedFile ) ); Document document = ( Document ) SerializationHelper.deserialize( new FileInputStream( cachedFile ) );
add( new XmlDocumentImpl( document, "file", xmlFile.getAbsolutePath() ) ); add( new XmlDocumentImpl( document, SourceType.FILE, xmlFile.getAbsolutePath() ) );
return this; return this;
} }
@ -622,7 +622,7 @@ public class Configuration implements Serializable {
public Configuration addXML(String xml) throws MappingException { public Configuration addXML(String xml) throws MappingException {
LOG.debugf( "Mapping XML:\n%s", xml ); LOG.debugf( "Mapping XML:\n%s", xml );
final InputSource inputSource = new InputSource( new StringReader( xml ) ); final InputSource inputSource = new InputSource( new StringReader( xml ) );
add( inputSource, "string", "XML String" ); add( inputSource, SourceType.STRING, "XML String" );
return this; return this;
} }
@ -640,7 +640,7 @@ public class Configuration implements Serializable {
LOG.debugf( "Reading mapping document from URL : %s", urlExternalForm ); LOG.debugf( "Reading mapping document from URL : %s", urlExternalForm );
try { try {
add( url.openStream(), "URL", urlExternalForm ); add( url.openStream(), SourceType.URL, urlExternalForm );
} }
catch ( IOException e ) { catch ( IOException e ) {
throw new InvalidMappingException( "Unable to open url stream [" + urlExternalForm + "]", "URL", urlExternalForm, e ); throw new InvalidMappingException( "Unable to open url stream [" + urlExternalForm + "]", "URL", urlExternalForm, e );
@ -648,7 +648,7 @@ public class Configuration implements Serializable {
return this; return this;
} }
private XmlDocument add(InputStream inputStream, final String type, final String name) { private XmlDocument add(InputStream inputStream, final SourceType type, final String name) {
final InputSource inputSource = new InputSource( inputStream ); final InputSource inputSource = new InputSource( inputStream );
try { try {
return add( inputSource, type, name ); return add( inputSource, type, name );
@ -675,7 +675,7 @@ public class Configuration implements Serializable {
LOG.debugf( "Mapping Document:\n%s", doc ); LOG.debugf( "Mapping Document:\n%s", doc );
final Document document = xmlHelper.createDOMReader().read( doc ); final Document document = xmlHelper.createDOMReader().read( doc );
add( new XmlDocumentImpl( document, "unknown", null ) ); add( new XmlDocumentImpl( document, SourceType.DOM, null ) );
return this; return this;
} }
@ -689,7 +689,7 @@ public class Configuration implements Serializable {
* processing the contained mapping document. * processing the contained mapping document.
*/ */
public Configuration addInputStream(InputStream xmlInputStream) throws MappingException { public Configuration addInputStream(InputStream xmlInputStream) throws MappingException {
add( xmlInputStream, "input stream", null ); add( xmlInputStream, SourceType.INPUT_STREAM, null );
return this; return this;
} }
@ -708,7 +708,7 @@ public class Configuration implements Serializable {
if ( resourceInputStream == null ) { if ( resourceInputStream == null ) {
throw new MappingNotFoundException( "resource", resourceName ); throw new MappingNotFoundException( "resource", resourceName );
} }
add( resourceInputStream, "resource", resourceName ); add( resourceInputStream, SourceType.RESOURCE, resourceName );
return this; return this;
} }
@ -734,7 +734,7 @@ public class Configuration implements Serializable {
if ( resourceInputStream == null ) { if ( resourceInputStream == null ) {
throw new MappingNotFoundException( "resource", resourceName ); throw new MappingNotFoundException( "resource", resourceName );
} }
add( resourceInputStream, "resource", resourceName ); add( resourceInputStream, SourceType.RESOURCE, resourceName );
return this; return this;
} }
@ -1186,6 +1186,21 @@ public class Configuration implements Serializable {
table.isQuoted() table.isQuoted()
); );
Iterator uniqueIter = table.getUniqueKeyIterator();
while ( uniqueIter.hasNext() ) {
final UniqueKey uniqueKey = (UniqueKey) uniqueIter.next();
// Skip if index already exists
if ( tableInfo != null && StringHelper.isNotEmpty( uniqueKey.getName() ) ) {
final IndexMetadata meta = tableInfo.getIndexMetadata( uniqueKey.getName() );
if ( meta != null ) {
continue;
}
}
String constraintString = uniqueKey.sqlCreateString( dialect,
mapping, tableCatalog, tableSchema );
if (constraintString != null) script.add( constraintString );
}
if ( dialect.hasAlterTable() ) { if ( dialect.hasAlterTable() ) {
Iterator subIter = table.getForeignKeyIterator(); Iterator subIter = table.getForeignKeyIterator();
while ( subIter.hasNext() ) { while ( subIter.hasNext() ) {
@ -1378,11 +1393,9 @@ public class Configuration implements Serializable {
for ( Map.Entry<Table, List<UniqueConstraintHolder>> tableListEntry : uniqueConstraintHoldersByTable.entrySet() ) { for ( Map.Entry<Table, List<UniqueConstraintHolder>> tableListEntry : uniqueConstraintHoldersByTable.entrySet() ) {
final Table table = tableListEntry.getKey(); final Table table = tableListEntry.getKey();
final List<UniqueConstraintHolder> uniqueConstraints = tableListEntry.getValue(); final List<UniqueConstraintHolder> uniqueConstraints = tableListEntry.getValue();
int uniqueIndexPerTable = 0;
for ( UniqueConstraintHolder holder : uniqueConstraints ) { for ( UniqueConstraintHolder holder : uniqueConstraints ) {
uniqueIndexPerTable++;
final String keyName = StringHelper.isEmpty( holder.getName() ) final String keyName = StringHelper.isEmpty( holder.getName() )
? "UK_" + table.getName() + "_" + uniqueIndexPerTable ? StringHelper.randomFixedLengthHex("UK_")
: holder.getName(); : holder.getName();
buildUniqueKeyFromColumnNames( table, keyName, holder.getColumns() ); buildUniqueKeyFromColumnNames( table, keyName, holder.getColumns() );
} }
@ -3537,7 +3550,7 @@ public class Configuration implements Serializable {
} }
catch ( MappingException me ) { catch ( MappingException me ) {
throw new InvalidMappingException( throw new InvalidMappingException(
metadataXml.getOrigin().getType(), metadataXml.getOrigin().getType().name(),
metadataXml.getOrigin().getName(), metadataXml.getOrigin().getName(),
me me
); );

View File

@ -56,37 +56,57 @@ public class EJB3DTDEntityResolver extends DTDEntityResolver {
@Override @Override
public InputSource resolveEntity(String publicId, String systemId) { public InputSource resolveEntity(String publicId, String systemId) {
LOG.tracev( "Resolving XML entity {0} : {1}", publicId, systemId ); LOG.tracev( "Resolving XML entity {0} : {1}", publicId, systemId );
InputSource is = super.resolveEntity( publicId, systemId ); if ( systemId != null ) {
if ( is == null ) { if ( systemId.endsWith( "orm_2_1.xsd" ) ) {
if ( systemId != null ) { InputStream dtdStream = getStreamFromClasspath( "orm_2_1.xsd" );
if ( systemId.endsWith( "orm_1_0.xsd" ) ) { final InputSource source = buildInputSource( publicId, systemId, dtdStream, false );
InputStream dtdStream = getStreamFromClasspath( "orm_1_0.xsd" ); if ( source != null ) {
final InputSource source = buildInputSource( publicId, systemId, dtdStream, false ); return source;
if (source != null) return source;
} }
else if ( systemId.endsWith( "orm_2_0.xsd" ) ) { }
InputStream dtdStream = getStreamFromClasspath( "orm_2_0.xsd" ); else if ( systemId.endsWith( "orm_2_0.xsd" ) ) {
final InputSource source = buildInputSource( publicId, systemId, dtdStream, false ); InputStream dtdStream = getStreamFromClasspath( "orm_2_0.xsd" );
if (source != null) return source; final InputSource source = buildInputSource( publicId, systemId, dtdStream, false );
if ( source != null ) {
return source;
} }
else if ( systemId.endsWith( "persistence_1_0.xsd" ) ) { }
InputStream dtdStream = getStreamFromClasspath( "persistence_1_0.xsd" ); else if ( systemId.endsWith( "orm_1_0.xsd" ) ) {
final InputSource source = buildInputSource( publicId, systemId, dtdStream, true ); InputStream dtdStream = getStreamFromClasspath( "orm_1_0.xsd" );
if (source != null) return source; final InputSource source = buildInputSource( publicId, systemId, dtdStream, false );
if ( source != null ) {
return source;
} }
else if ( systemId.endsWith( "persistence_2_0.xsd" ) ) { }
InputStream dtdStream = getStreamFromClasspath( "persistence_2_0.xsd" ); else if ( systemId.endsWith( "persistence_2_1.xsd" ) ) {
final InputSource source = buildInputSource( publicId, systemId, dtdStream, true ); InputStream dtdStream = getStreamFromClasspath( "persistence_2_1.xsd" );
if (source != null) return source; final InputSource source = buildInputSource( publicId, systemId, dtdStream, true );
if ( source != null ) {
return source;
}
}
else if ( systemId.endsWith( "persistence_2_0.xsd" ) ) {
InputStream dtdStream = getStreamFromClasspath( "persistence_2_0.xsd" );
final InputSource source = buildInputSource( publicId, systemId, dtdStream, true );
if ( source != null ) {
return source;
}
}
else if ( systemId.endsWith( "persistence_1_0.xsd" ) ) {
InputStream dtdStream = getStreamFromClasspath( "persistence_1_0.xsd" );
final InputSource source = buildInputSource( publicId, systemId, dtdStream, true );
if ( source != null ) {
return source;
} }
} }
} }
else {
// because the old code did this too (in terms of setting resolved)
InputSource source = super.resolveEntity( publicId, systemId );
if ( source != null ) {
resolved = true; resolved = true;
return is;
} }
//use the default behavior return source;
return null;
} }
private InputSource buildInputSource(String publicId, String systemId, InputStream dtdStream, boolean resolved) { private InputSource buildInputSource(String publicId, String systemId, InputStream dtdStream, boolean resolved) {
@ -103,8 +123,8 @@ public class EJB3DTDEntityResolver extends DTDEntityResolver {
} }
private InputStream getStreamFromClasspath(String fileName) { private InputStream getStreamFromClasspath(String fileName) {
LOG.trace( "Recognized JPA ORM namespace; attempting to resolve on classpath under org/hibernate/ejb" ); LOG.trace( "Recognized JPA ORM namespace; attempting to resolve on classpath under org/hibernate/jpa" );
String path = "org/hibernate/ejb/" + fileName; String path = "org/hibernate/jpa/" + fileName;
InputStream dtdStream = resolveInHibernateNamespace( path ); InputStream dtdStream = resolveInHibernateNamespace( path );
return dtdStream; return dtdStream;
} }

View File

@ -2247,7 +2247,7 @@ public final class HbmBinder {
} }
else if ( "natural-id".equals( name ) ) { else if ( "natural-id".equals( name ) ) {
UniqueKey uk = new UniqueKey(); UniqueKey uk = new UniqueKey();
uk.setName("_UniqueKey"); uk.setName(StringHelper.randomFixedLengthHex("UK_"));
uk.setTable(table); uk.setTable(table);
//by default, natural-ids are "immutable" (constant) //by default, natural-ids are "immutable" (constant)
boolean mutableId = "true".equals( subnode.attributeValue("mutable") ); boolean mutableId = "true".equals( subnode.attributeValue("mutable") );

View File

@ -65,10 +65,12 @@ import javax.persistence.ExcludeDefaultListeners;
import javax.persistence.ExcludeSuperclassListeners; import javax.persistence.ExcludeSuperclassListeners;
import javax.persistence.FetchType; import javax.persistence.FetchType;
import javax.persistence.FieldResult; import javax.persistence.FieldResult;
import javax.persistence.ForeignKey;
import javax.persistence.GeneratedValue; import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType; import javax.persistence.GenerationType;
import javax.persistence.Id; import javax.persistence.Id;
import javax.persistence.IdClass; import javax.persistence.IdClass;
import javax.persistence.Index;
import javax.persistence.Inheritance; import javax.persistence.Inheritance;
import javax.persistence.InheritanceType; import javax.persistence.InheritanceType;
import javax.persistence.JoinColumn; import javax.persistence.JoinColumn;
@ -227,6 +229,8 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
annotationToXml.put( MapKeyJoinColumns.class, "map-key-join-column" ); annotationToXml.put( MapKeyJoinColumns.class, "map-key-join-column" );
annotationToXml.put( OrderColumn.class, "order-column" ); annotationToXml.put( OrderColumn.class, "order-column" );
annotationToXml.put( Cacheable.class, "cacheable" ); annotationToXml.put( Cacheable.class, "cacheable" );
annotationToXml.put( Index.class, "index" );
annotationToXml.put( ForeignKey.class, "foreign-key" );
} }
private XMLContext xmlContext; private XMLContext xmlContext;
@ -659,6 +663,7 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
annotation.setValue( "schema", defaults.getSchema() ); annotation.setValue( "schema", defaults.getSchema() );
} }
buildUniqueConstraints( annotation, subelement ); buildUniqueConstraints( annotation, subelement );
buildIndex( annotation, subelement );
annotation.setValue( "joinColumns", getJoinColumns( subelement, false ) ); annotation.setValue( "joinColumns", getJoinColumns( subelement, false ) );
annotation.setValue( "inverseJoinColumns", getJoinColumns( subelement, true ) ); annotation.setValue( "inverseJoinColumns", getJoinColumns( subelement, true ) );
return AnnotationFactory.create( annotation ); return AnnotationFactory.create( annotation );
@ -1069,6 +1074,7 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
annotation.setValue( "joinColumns", joinColumns ); annotation.setValue( "joinColumns", joinColumns );
} }
buildUniqueConstraints( annotation, subelement ); buildUniqueConstraints( annotation, subelement );
buildIndex( annotation, subelement );
annotationList.add( AnnotationFactory.create( annotation ) ); annotationList.add( AnnotationFactory.create( annotation ) );
} }
} }
@ -2298,6 +2304,7 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
annotation.setValue( "schema", defaults.getSchema() ); annotation.setValue( "schema", defaults.getSchema() );
} }
buildUniqueConstraints( annotation, subelement ); buildUniqueConstraints( annotation, subelement );
buildIndex( annotation, subelement );
return AnnotationFactory.create( annotation ); return AnnotationFactory.create( annotation );
} }
} }
@ -2321,6 +2328,7 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
annotation.setValue( "schema", defaults.getSchema() ); annotation.setValue( "schema", defaults.getSchema() );
} }
buildUniqueConstraints( annotation, element ); buildUniqueConstraints( annotation, element );
buildIndex( annotation, element );
annotation.setValue( "pkJoinColumns", buildPrimaryKeyJoinColumns( element ) ); annotation.setValue( "pkJoinColumns", buildPrimaryKeyJoinColumns( element ) );
secondaryTables.add( (SecondaryTable) AnnotationFactory.create( annotation ) ); secondaryTables.add( (SecondaryTable) AnnotationFactory.create( annotation ) );
} }
@ -2376,7 +2384,19 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
} }
} }
} }
private static void buildIndex(AnnotationDescriptor annotation, Element element){
List indexElementList = element.elements( "index" );
Index[] indexes = new Index[indexElementList.size()];
for(int i=0;i<indexElementList.size();i++){
Element subelement = (Element)indexElementList.get( i );
AnnotationDescriptor indexAnn = new AnnotationDescriptor( Index.class );
copyStringAttribute( indexAnn, subelement, "name", false );
copyStringAttribute( indexAnn, subelement, "column-list", true );
copyBooleanAttribute( indexAnn, subelement, "unique" );
indexes[i] = AnnotationFactory.create( indexAnn );
}
annotation.setValue( "indexes", indexes );
}
private static void buildUniqueConstraints(AnnotationDescriptor annotation, Element element) { private static void buildUniqueConstraints(AnnotationDescriptor annotation, Element element) {
List uniqueConstraintElementList = element.elements( "unique-constraint" ); List uniqueConstraintElementList = element.elements( "unique-constraint" );
UniqueConstraint[] uniqueConstraints = new UniqueConstraint[uniqueConstraintElementList.size()]; UniqueConstraint[] uniqueConstraints = new UniqueConstraint[uniqueConstraintElementList.size()];

View File

@ -29,6 +29,7 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
import java.util.StringTokenizer; import java.util.StringTokenizer;
import java.util.UUID;
import org.hibernate.dialect.Dialect; import org.hibernate.dialect.Dialect;
import org.hibernate.internal.util.collections.ArrayHelper; import org.hibernate.internal.util.collections.ArrayHelper;
@ -763,4 +764,18 @@ public final class StringHelper {
public static String[] toArrayElement(String s) { public static String[] toArrayElement(String s) {
return ( s == null || s.length() == 0 ) ? new String[0] : new String[] { s }; return ( s == null || s.length() == 0 ) ? new String[0] : new String[] { s };
} }
// Oracle restricts identifier lengths to 30. Rather than tie this to
// Dialect, simply restrict randomly-generated constrain names across
// the board.
private static final int MAX_NAME_LENGTH = 30;
public static String randomFixedLengthHex(String prefix) {
int length = MAX_NAME_LENGTH - prefix.length();
String s = UUID.randomUUID().toString();
s = s.replace( "-", "" );
if (s.length() > length) {
s = s.substring( 0, length );
}
return prefix + s;
}
} }

View File

@ -0,0 +1,147 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.internal.util.xml;
import javax.xml.stream.XMLEventReader;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.events.Characters;
import javax.xml.stream.events.EntityDeclaration;
import javax.xml.stream.events.EntityReference;
import javax.xml.stream.events.XMLEvent;
import javax.xml.stream.util.EventReaderDelegate;
/**
* Base for XMLEventReader that implements the {@link #getElementText()} and {@link #nextTag()} APIs in a
* way that is agnostic from the rest of the XMLEventReader implementation. Both will use the subclasses
* {@link #internalNextEvent()} as the exclusive way to read events.
*
* Note, copied from the uPortal project by permission of author. See
* https://github.com/Jasig/uPortal/blob/master/uportal-war/src/main/java/org/jasig/portal/xml/stream/BaseXMLEventReader.java
*
* @author Eric Dalquist
*/
public abstract class BaseXMLEventReader extends EventReaderDelegate {
private XMLEvent previousEvent;
public BaseXMLEventReader(XMLEventReader reader) {
super(reader);
}
/**
* Subclass's version of {@link #nextEvent()}, called by {@link #next()}
*/
protected abstract XMLEvent internalNextEvent() throws XMLStreamException;
/**
* @return The XMLEvent returned by the last call to {@link #internalNextEvent()}
*/
protected final XMLEvent getPreviousEvent() {
return this.previousEvent;
}
@Override
public final XMLEvent nextEvent() throws XMLStreamException {
this.previousEvent = this.internalNextEvent();
return this.previousEvent;
}
/* (non-Javadoc)
* @see java.util.Iterator#next()
*/
@Override
public final Object next() {
try {
return this.nextEvent();
}
catch (XMLStreamException e) {
return null;
}
}
/* (non-Javadoc)
* @see javax.xml.stream.XMLEventReader#getElementText()
*/
@Override
public final String getElementText() throws XMLStreamException {
XMLEvent event = this.previousEvent;
if (event == null) {
throw new XMLStreamException("Must be on START_ELEMENT to read next text, element was null");
}
if (!event.isStartElement()) {
throw new XMLStreamException("Must be on START_ELEMENT to read next text", event.getLocation());
}
final StringBuilder text = new StringBuilder();
while (!event.isEndDocument()) {
switch (event.getEventType()) {
case XMLStreamConstants.CHARACTERS:
case XMLStreamConstants.SPACE:
case XMLStreamConstants.CDATA: {
final Characters characters = event.asCharacters();
text.append(characters.getData());
break;
}
case XMLStreamConstants.ENTITY_REFERENCE: {
final EntityReference entityReference = (EntityReference)event;
final EntityDeclaration declaration = entityReference.getDeclaration();
text.append(declaration.getReplacementText());
break;
}
case XMLStreamConstants.COMMENT:
case XMLStreamConstants.PROCESSING_INSTRUCTION: {
//Ignore
break;
}
default: {
throw new XMLStreamException("Unexpected event type '" + XMLStreamConstantsUtils.getEventName(event.getEventType()) + "' encountered. Found event: " + event, event.getLocation());
}
}
event = this.nextEvent();
}
return text.toString();
}
/* (non-Javadoc)
* @see javax.xml.stream.XMLEventReader#nextTag()
*/
@Override
public final XMLEvent nextTag() throws XMLStreamException {
XMLEvent event = this.nextEvent();
while ((event.isCharacters() && event.asCharacters().isWhiteSpace())
|| event.isProcessingInstruction()
|| event.getEventType() == XMLStreamConstants.COMMENT) {
event = this.nextEvent();
}
if (!event.isStartElement() && event.isEndElement()) {
throw new XMLStreamException("Unexpected event type '" + XMLStreamConstantsUtils.getEventName(event.getEventType()) + "' encountered. Found event: " + event, event.getLocation());
}
return event;
}
}

View File

@ -0,0 +1,196 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.internal.util.xml;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import javax.xml.stream.XMLEventReader;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.events.XMLEvent;
/**
* Buffers XML events for later re-reading
*
* Note, copied from the uPortal project by permission of author. See
* https://github.com/Jasig/uPortal/blob/master/uportal-war/src/main/java/org/jasig/portal/xml/stream/BufferedXMLEventReader.java
*
* @author Eric Dalquist
*/
public class BufferedXMLEventReader extends BaseXMLEventReader {
private final LinkedList<XMLEvent> eventBuffer = new LinkedList<XMLEvent>();
private int eventLimit = 0;
private ListIterator<XMLEvent> bufferReader = null;
/**
* Create new buffering reader, no buffering is done until {@link #mark(int)} is called.
*/
public BufferedXMLEventReader(XMLEventReader reader) {
super(reader);
}
/**
* Create new buffering reader. Calls {@link #mark(int)} with the specified event limit
* @see #mark(int)
*/
public BufferedXMLEventReader(XMLEventReader reader, int eventLimit) {
super(reader);
this.eventLimit = eventLimit;
}
/**
* @return A copy of the current buffer
*/
public List<XMLEvent> getBuffer() {
return new ArrayList<XMLEvent>(this.eventBuffer);
}
/* (non-Javadoc)
* @see org.jasig.portal.xml.stream.BaseXMLEventReader#internalNextEvent()
*/
@Override
protected XMLEvent internalNextEvent() throws XMLStreamException {
//If there is an iterator to read from reset was called, use the iterator
//until it runs out of events.
if (this.bufferReader != null) {
final XMLEvent event = this.bufferReader.next();
//If nothing left in the iterator, remove the reference and fall through to direct reading
if (!this.bufferReader.hasNext()) {
this.bufferReader = null;
}
return event;
}
//Get the next event from the underlying reader
final XMLEvent event = this.getParent().nextEvent();
//if buffering add the event
if (this.eventLimit != 0) {
this.eventBuffer.offer(event);
//If limited buffer size and buffer is too big trim the buffer.
if (this.eventLimit > 0 && this.eventBuffer.size() > this.eventLimit) {
this.eventBuffer.poll();
}
}
return event;
}
@Override
public boolean hasNext() {
return this.bufferReader != null || super.hasNext();
}
@Override
public XMLEvent peek() throws XMLStreamException {
if (this.bufferReader != null) {
final XMLEvent event = this.bufferReader.next();
this.bufferReader.previous(); //move the iterator back
return event;
}
return super.peek();
}
/**
* Same as calling {@link #mark(int)} with -1.
*/
public void mark() {
this.mark(-1);
}
/**
* Start buffering events
* @param eventLimit the maximum number of events to buffer. -1 will buffer all events, 0 will buffer no events.
*/
public void mark(int eventLimit) {
this.eventLimit = eventLimit;
//Buffering no events now, clear the buffer and buffered reader
if (this.eventLimit == 0) {
this.eventBuffer.clear();
this.bufferReader = null;
}
//Buffering limited set of events, lets trim the buffer if needed
else if (this.eventLimit > 0) {
//If there is an iterator check its current position and calculate the new iterator start position
int iteratorIndex = 0;
if (this.bufferReader != null) {
final int nextIndex = this.bufferReader.nextIndex();
iteratorIndex = Math.max(0, nextIndex - (this.eventBuffer.size() - this.eventLimit));
}
//Trim the buffer until it is not larger than the limit
while (this.eventBuffer.size() > this.eventLimit) {
this.eventBuffer.poll();
}
//If there is an iterator re-create it using the newly calculated index
if (this.bufferReader != null) {
this.bufferReader = this.eventBuffer.listIterator(iteratorIndex);
}
}
}
/**
* Reset the reader to these start of the buffered events.
*/
public void reset() {
if (this.eventBuffer.isEmpty()) {
this.bufferReader = null;
}
else {
this.bufferReader = this.eventBuffer.listIterator();
}
}
@Override
public void close() throws XMLStreamException {
this.mark(0);
super.close();
}
/**
* @return The number of events in the buffer.
*/
public int bufferSize() {
return this.eventBuffer.size();
}
/**
* If reading from the buffer after a {@link #reset()} call an {@link IllegalStateException} will be thrown.
*/
@Override
public void remove() {
if (this.bufferReader != null && this.bufferReader.hasNext()) {
throw new IllegalStateException("Cannot remove a buffered element");
}
super.remove();
}
}

View File

@ -58,9 +58,7 @@ public class ErrorLogger implements ErrorHandler, Serializable {
this.file = file; this.file = file;
} }
/** @Override
* {@inheritDoc}
*/
public void error(SAXParseException error) { public void error(SAXParseException error) {
if ( this.errors == null ) { if ( this.errors == null ) {
errors = new ArrayList<SAXParseException>(); errors = new ArrayList<SAXParseException>();
@ -68,23 +66,16 @@ public class ErrorLogger implements ErrorHandler, Serializable {
errors.add( error ); errors.add( error );
} }
/** @Override
* {@inheritDoc}
*/
public void fatalError(SAXParseException error) { public void fatalError(SAXParseException error) {
error( error ); error( error );
} }
/** @Override
* {@inheritDoc}
*/
public void warning(SAXParseException warn) { public void warning(SAXParseException warn) {
LOG.parsingXmlWarning( warn.getLineNumber(), warn.getMessage() ); LOG.parsingXmlWarning( warn.getLineNumber(), warn.getMessage() );
} }
/**
* @return returns a list of encountered xml parsing errors, or the empty list if there was no error
*/
public List<SAXParseException> getErrors() { public List<SAXParseException> getErrors() {
return errors; return errors;
} }

View File

@ -0,0 +1,141 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.internal.util.xml;
import java.util.Deque;
import java.util.LinkedList;
import java.util.NoSuchElementException;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLEventReader;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.events.EndElement;
import javax.xml.stream.events.StartElement;
import javax.xml.stream.events.XMLEvent;
/**
* Base class for {@link XMLEventReader}s that want to modify or remove events from the reader stream.
* If a {@link StartElement} event is removed the subclass's {@link #filterEvent(XMLEvent, boolean)} will
* not see any events until after the matching {@link EndElement} event.
*
* Note, copied from the uPortal project by permission of author. See
* https://github.com/Jasig/uPortal/blob/master/uportal-war/src/main/java/org/jasig/portal/xml/stream/FilteringXMLEventReader.java
*
* @author Eric Dalquist
*/
public abstract class FilteringXMLEventReader extends BaseXMLEventReader {
private final Deque<QName> prunedElements = new LinkedList<QName>();
private XMLEvent peekedEvent = null;
public FilteringXMLEventReader(XMLEventReader reader) {
super(reader);
}
@Override
protected final XMLEvent internalNextEvent() throws XMLStreamException {
return this.internalNext(false);
}
@Override
public boolean hasNext() {
try {
return peekedEvent != null || (super.hasNext() && this.peek() != null);
}
catch (XMLStreamException e) {
throw new RuntimeException(e.getMessage(), e);
}
catch (NoSuchElementException e) {
return false;
}
}
@Override
public final XMLEvent peek() throws XMLStreamException {
if (peekedEvent != null) {
return peekedEvent;
}
peekedEvent = internalNext(true);
return peekedEvent;
}
protected final XMLEvent internalNext(boolean peek) throws XMLStreamException {
XMLEvent event = null;
if (peekedEvent != null) {
event = peekedEvent;
peekedEvent = null;
return event;
}
do {
event = super.getParent().nextEvent();
//If there are pruned elements in the queue filtering events is still needed
if (!prunedElements.isEmpty()) {
//If another start element add it to the queue
if (event.isStartElement()) {
final StartElement startElement = event.asStartElement();
prunedElements.push(startElement.getName());
}
//If end element pop the newest name of the queue and double check that the start/end elements match up
else if (event.isEndElement()) {
final QName startElementName = prunedElements.pop();
final EndElement endElement = event.asEndElement();
final QName endElementName = endElement.getName();
if (!startElementName.equals(endElementName)) {
throw new IllegalArgumentException("Malformed XMLEvent stream. Expected end element for " + startElementName + " but found end element for " + endElementName);
}
}
event = null;
}
else {
final XMLEvent filteredEvent = this.filterEvent(event, peek);
//If the event is being removed and it is a start element all elements until the matching
//end element need to be removed as well
if (filteredEvent == null && event.isStartElement()) {
final StartElement startElement = event.asStartElement();
final QName name = startElement.getName();
prunedElements.push(name);
}
event = filteredEvent;
}
}
while (event == null);
return event;
}
/**
* @param event The current event
* @param peek If the event is from a {@link #peek()} call
* @return The event to return, if null is returned the event is dropped from the stream and the next event will be used.
*/
protected abstract XMLEvent filterEvent(XMLEvent event, boolean peek);
}

View File

@ -0,0 +1,148 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.internal.util.xml;
import javax.xml.stream.XMLResolver;
import javax.xml.stream.XMLStreamException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import org.xml.sax.InputSource;
import org.jboss.logging.Logger;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.ConfigHelper;
/**
* @author Steve Ebersole
*/
public class LocalXmlResourceResolver implements javax.xml.stream.XMLResolver {
private static final CoreMessageLogger log = Logger.getMessageLogger(
CoreMessageLogger.class,
MappingReader.class.getName()
);
public static final LocalXmlResourceResolver INSTANCE = new LocalXmlResourceResolver();
/**
* Namespace for the orm.xml xsd for jpa 1.0 and 2.0
*/
public static final String INITIAL_JPA_ORM_NS = "http://java.sun.com/xml/ns/persistence/orm";
/**
* Namespace for the orm.xml xsd for jpa 2.1
*/
public static final String SECOND_JPA_ORM_NS = "http://xmlns.jcp.org/xml/ns/persistence/orm";
public static final String HIBERNATE_MAPPING_DTD_URL_BASE = "http://www.hibernate.org/dtd/";
public static final String LEGACY_HIBERNATE_MAPPING_DTD_URL_BASE = "http://hibernate.sourceforge.net/";
public static final String CLASSPATH_EXTENSION_URL_BASE = "classpath://";
@Override
public Object resolveEntity(String publicID, String systemID, String baseURI, String namespace) throws XMLStreamException {
log.tracef( "In resolveEntity(%s, %s, %s, %s)", publicID, systemID, baseURI, namespace );
if ( namespace != null ) {
log.debugf( "Interpreting namespace : %s", namespace );
if ( INITIAL_JPA_ORM_NS.equals( namespace ) ) {
return openUrlStream( MappingReader.SupportedOrmXsdVersion.ORM_2_0.getSchemaUrl() );
}
else if ( SECOND_JPA_ORM_NS.equals( namespace ) ) {
return openUrlStream( MappingReader.SupportedOrmXsdVersion.ORM_2_1.getSchemaUrl() );
}
}
if ( systemID != null ) {
log.debugf( "Interpreting systemID : %s", namespace );
InputStream stream = null;
if ( systemID.startsWith( HIBERNATE_MAPPING_DTD_URL_BASE ) ) {
log.debug( "Recognized hibernate namespace; attempting to resolve on classpath under org/hibernate/" );
stream = resolveOnClassPath( systemID, HIBERNATE_MAPPING_DTD_URL_BASE );
}
else if ( systemID.startsWith( LEGACY_HIBERNATE_MAPPING_DTD_URL_BASE ) ) {
log.recognizedObsoleteHibernateNamespace( LEGACY_HIBERNATE_MAPPING_DTD_URL_BASE, HIBERNATE_MAPPING_DTD_URL_BASE );
log.debug( "Attempting to resolve on classpath under org/hibernate/" );
stream = resolveOnClassPath( systemID, LEGACY_HIBERNATE_MAPPING_DTD_URL_BASE );
}
else if ( systemID.startsWith( CLASSPATH_EXTENSION_URL_BASE ) ) {
log.debug( "Recognized local namespace; attempting to resolve on classpath" );
final String path = systemID.substring( CLASSPATH_EXTENSION_URL_BASE.length() );
stream = resolveInLocalNamespace( path );
if ( stream == null ) {
log.debugf( "Unable to resolve [%s] on classpath", systemID );
}
else {
log.debugf( "Resolved [%s] on classpath", systemID );
}
}
if ( stream != null ) {
return stream;
}
}
return null;
}
private InputStream openUrlStream(URL url) {
try {
return url.openStream();
}
catch (IOException e) {
throw new XmlInfrastructureException( "Could not open url stream : " + url.toExternalForm(), e );
}
}
private InputStream resolveOnClassPath(String systemID, String namespace) {
final String relativeResourceName = systemID.substring( namespace.length() );
final String path = "org/hibernate/" + relativeResourceName;
InputStream dtdStream = resolveInHibernateNamespace( path );
if ( dtdStream == null ) {
log.debugf( "Unable to locate [%s] on classpath", systemID );
if ( relativeResourceName.contains( "2.0" ) ) {
log.usingOldDtd();
}
return null;
}
else {
log.debugf( "Located [%s] in classpath", systemID );
return dtdStream;
}
}
private InputStream resolveInHibernateNamespace(String path) {
return this.getClass().getClassLoader().getResourceAsStream( path );
}
private InputStream resolveInLocalNamespace(String path) {
try {
return ConfigHelper.getUserResourceAsStream( path );
}
catch ( Throwable t ) {
return null;
}
}
}

View File

@ -23,17 +23,36 @@
*/ */
package org.hibernate.internal.util.xml; package org.hibernate.internal.util.xml;
import javax.xml.XMLConstants;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLEventReader;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.events.Attribute;
import javax.xml.stream.events.XMLEvent;
import javax.xml.transform.stax.StAXSource;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringReader; import java.io.StringReader;
import java.net.URL;
import org.dom4j.Document; import org.dom4j.Document;
import org.dom4j.io.SAXReader; import org.dom4j.io.SAXReader;
import org.jboss.logging.Logger; import org.dom4j.io.STAXEventReader;
import org.xml.sax.EntityResolver; import org.xml.sax.EntityResolver;
import org.xml.sax.InputSource; import org.xml.sax.InputSource;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
import org.jboss.logging.Logger;
import org.hibernate.InvalidMappingException; import org.hibernate.InvalidMappingException;
import org.hibernate.internal.CoreMessageLogger; import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.jaxb.spi.Origin;
/** /**
* Handles reading mapping documents, both {@code hbm} and {@code orm} varieties. * Handles reading mapping documents, both {@code hbm} and {@code orm} varieties.
@ -41,7 +60,6 @@ import org.hibernate.internal.CoreMessageLogger;
* @author Steve Ebersole * @author Steve Ebersole
*/ */
public class MappingReader { public class MappingReader {
private static final CoreMessageLogger LOG = Logger.getMessageLogger( private static final CoreMessageLogger LOG = Logger.getMessageLogger(
CoreMessageLogger.class, CoreMessageLogger.class,
MappingReader.class.getName() MappingReader.class.getName()
@ -59,10 +77,241 @@ public class MappingReader {
private MappingReader() { private MappingReader() {
} }
public XmlDocument readMappingDocument(InputSource source, Origin origin) {
XMLEventReader staxReader = buildStaxEventReader( source, origin );
try {
return read( staxReader, origin );
}
finally {
try {
staxReader.close();
}
catch ( Exception ignore ) {
}
}
}
private XMLEventReader buildStaxEventReader(InputSource source, Origin origin) {
XMLEventReader reader = null;
if ( source.getByteStream() != null ) {
try {
reader = staxFactory().createXMLEventReader( source.getByteStream() );
}
catch (XMLStreamException e) {
throw new XmlInfrastructureException(
"Unable to create stax reader, origin = " + toLoggableString( origin ),
e
);
}
}
else if ( source.getCharacterStream() != null ) {
try {
reader = staxFactory().createXMLEventReader( source.getCharacterStream() );
}
catch (XMLStreamException e) {
throw new XmlInfrastructureException(
"Unable to create stax reader, origin = " + toLoggableString( origin ),
e
);
}
}
// todo : try to interpret the InputSource SystemId or Origin path?
if ( reader == null ) {
throw new XmlInfrastructureException( "Unable to convert SAX InputStream into StAX XMLEventReader" );
}
// For performance we wrap the reader in a buffered reader
return new BufferedXMLEventReader( reader );
}
private XMLInputFactory staxFactory;
private XMLInputFactory staxFactory() {
if ( staxFactory == null ) {
staxFactory = buildStaxFactory();
}
return staxFactory;
}
@SuppressWarnings( { "UnnecessaryLocalVariable" })
private XMLInputFactory buildStaxFactory() {
XMLInputFactory staxFactory = XMLInputFactory.newInstance();
staxFactory.setXMLResolver( LocalXmlResourceResolver.INSTANCE );
return staxFactory;
}
private String toLoggableString(Origin origin) {
return "[type=" + origin.getType() + ", name=" + origin.getName() + "]";
}
private static final QName ORM_VERSION_ATTRIBUTE_QNAME = new QName( "version" );
private XmlDocument read(XMLEventReader staxEventReader, Origin origin) {
XMLEvent event;
try {
event = staxEventReader.peek();
while ( event != null && !event.isStartElement() ) {
staxEventReader.nextEvent();
event = staxEventReader.peek();
}
}
catch ( Exception e ) {
throw new InvalidMappingException( "Error accessing stax stream", origin, e );
}
if ( event == null ) {
throw new InvalidMappingException( "Could not locate root element", origin );
}
final String rootElementName = event.asStartElement().getName().getLocalPart();
if ( "entity-mappings".equals( rootElementName ) ) {
final Attribute attribute = event.asStartElement().getAttributeByName( ORM_VERSION_ATTRIBUTE_QNAME );
final String explicitVersion = attribute == null ? null : attribute.getValue();
validateMapping(
SupportedOrmXsdVersion.parse( explicitVersion, origin ),
staxEventReader,
origin
);
}
return new XmlDocumentImpl( toDom4jDocument( staxEventReader, origin ), origin );
}
private Document toDom4jDocument(XMLEventReader staxEventReader, Origin origin) {
STAXEventReader dom4jStaxEventReader = new STAXEventReader();
try {
// the dom4j converter class is touchy about comments (aka, comments make it implode)
// so wrap the event stream in a filtering stream to filter out comment events
staxEventReader = new FilteringXMLEventReader( staxEventReader ) {
@Override
protected XMLEvent filterEvent(XMLEvent event, boolean peek) {
return event.getEventType() == XMLStreamConstants.COMMENT
? null
: event;
}
};
return dom4jStaxEventReader.readDocument( staxEventReader );
}
catch (XMLStreamException e) {
throw new InvalidMappingException( "Unable to read StAX source as dom4j Document for processing", origin, e );
}
}
public static void validateMapping(SupportedOrmXsdVersion xsdVersion, XMLEventReader staxEventReader, Origin origin) {
final Validator validator = xsdVersion.getSchema().newValidator();
final StAXSource staxSource;
try {
staxSource = new StAXSource( staxEventReader );
}
catch (XMLStreamException e) {
throw new InvalidMappingException( "Unable to generate StAXSource from mapping", origin, e );
}
try {
validator.validate( staxSource );
}
catch (SAXException e) {
throw new InvalidMappingException( "SAXException performing validation", origin, e );
}
catch (IOException e) {
throw new InvalidMappingException( "IOException performing validation", origin, e );
}
}
public static enum SupportedOrmXsdVersion {
ORM_1_0( "org/hibernate/jpa/orm_1_0.xsd" ),
ORM_2_0( "org/hibernate/jpa/orm_2_0.xsd" ),
ORM_2_1( "org/hibernate/jpa/orm_2_1.xsd" ),
HBM_4_0( "org/hibernate/hibernate-mapping-4.0.xsd");
private final String schemaResourceName;
private SupportedOrmXsdVersion(String schemaResourceName) {
this.schemaResourceName = schemaResourceName;
}
public static SupportedOrmXsdVersion parse(String name, Origin origin) {
if ( "1.0".equals( name ) ) {
return ORM_1_0;
}
else if ( "2.0".equals( name ) ) {
return ORM_2_0;
}
else if ( "2.1".equals( name ) ) {
return ORM_2_1;
}
throw new UnsupportedOrmXsdVersionException( name, origin );
}
private URL schemaUrl;
public URL getSchemaUrl() {
if ( schemaUrl == null ) {
schemaUrl = resolveLocalSchemaUrl( schemaResourceName );
}
return schemaUrl;
}
private Schema schema;
public Schema getSchema() {
if ( schema == null ) {
schema = resolveLocalSchema( getSchemaUrl() );
}
return schema;
}
}
private static URL resolveLocalSchemaUrl(String schemaName) {
URL url = MappingReader.class.getClassLoader().getResource( schemaName );
if ( url == null ) {
throw new XmlInfrastructureException( "Unable to locate schema [" + schemaName + "] via classpath" );
}
return url;
}
private static Schema resolveLocalSchema(URL schemaUrl) {
try {
InputStream schemaStream = schemaUrl.openStream();
try {
StreamSource source = new StreamSource(schemaUrl.openStream());
SchemaFactory schemaFactory = SchemaFactory.newInstance( XMLConstants.W3C_XML_SCHEMA_NS_URI );
return schemaFactory.newSchema(source);
}
catch ( Exception e ) {
throw new XmlInfrastructureException( "Unable to load schema [" + schemaUrl.toExternalForm() + "]", e );
}
finally {
try {
schemaStream.close();
}
catch ( IOException e ) {
LOG.debugf( "Problem closing schema stream - %s", e.toString() );
}
}
}
catch ( IOException e ) {
throw new XmlInfrastructureException( "Stream error handling schema url [" + schemaUrl.toExternalForm() + "]" );
}
}
public XmlDocument readMappingDocument(EntityResolver entityResolver, InputSource source, Origin origin) { public XmlDocument readMappingDocument(EntityResolver entityResolver, InputSource source, Origin origin) {
return legacyReadMappingDocument( entityResolver, source, origin );
// return readMappingDocument( source, origin );
}
private XmlDocument legacyReadMappingDocument(EntityResolver entityResolver, InputSource source, Origin origin) {
// IMPL NOTE : this is the legacy logic as pulled from the old AnnotationConfiguration code // IMPL NOTE : this is the legacy logic as pulled from the old AnnotationConfiguration code
Exception failure; Exception failure;
ErrorLogger errorHandler = new ErrorLogger(); ErrorLogger errorHandler = new ErrorLogger();
SAXReader saxReader = new SAXReader(); SAXReader saxReader = new SAXReader();
@ -73,193 +322,81 @@ public class MappingReader {
Document document = null; Document document = null;
try { try {
// first try with orm 2.0 xsd validation // first try with orm 2.1 xsd validation
setValidationFor( saxReader, "orm_2_0.xsd" ); setValidationFor( saxReader, "orm_2_1.xsd" );
document = saxReader.read( source ); document = saxReader.read( source );
if ( errorHandler.hasErrors() ) { if ( errorHandler.hasErrors() ) {
throw errorHandler.getErrors().get( 0 ); throw errorHandler.getErrors().get( 0 );
} }
return new XmlDocumentImpl( document, origin.getType(), origin.getName() ); return new XmlDocumentImpl( document, origin );
} }
catch ( Exception orm2Problem ) { catch ( Exception e ) {
if ( LOG.isDebugEnabled() ) { if ( LOG.isDebugEnabled() ) {
LOG.debugf( "Problem parsing XML using orm 2 xsd : %s", orm2Problem.getMessage() ); LOG.debugf( "Problem parsing XML using orm 2.1 xsd, trying 2.0 xsd : %s", e.getMessage() );
} }
failure = orm2Problem; failure = e;
errorHandler.reset(); errorHandler.reset();
if ( document != null ) { if ( document != null ) {
// next try with orm 1.0 xsd validation // next try with orm 2.0 xsd validation
try { try {
setValidationFor( saxReader, "orm_1_0.xsd" ); setValidationFor( saxReader, "orm_2_0.xsd" );
document = saxReader.read( new StringReader( document.asXML() ) ); document = saxReader.read( new StringReader( document.asXML() ) );
if ( errorHandler.hasErrors() ) { if ( errorHandler.hasErrors() ) {
errorHandler.logErrors(); errorHandler.logErrors();
throw errorHandler.getErrors().get( 0 ); throw errorHandler.getErrors().get( 0 );
} }
return new XmlDocumentImpl( document, origin.getType(), origin.getName() ); return new XmlDocumentImpl( document, origin );
} }
catch ( Exception orm1Problem ) { catch ( Exception e2 ) {
if ( LOG.isDebugEnabled() ) { if ( LOG.isDebugEnabled() ) {
LOG.debugf( "Problem parsing XML using orm 1 xsd : %s", orm1Problem.getMessage() ); LOG.debugf( "Problem parsing XML using orm 2.0 xsd, trying 1.0 xsd : %s", e2.getMessage() );
}
errorHandler.reset();
if ( document != null ) {
// next try with orm 1.0 xsd validation
try {
setValidationFor( saxReader, "orm_1_0.xsd" );
document = saxReader.read( new StringReader( document.asXML() ) );
if ( errorHandler.hasErrors() ) {
errorHandler.logErrors();
throw errorHandler.getErrors().get( 0 );
}
return new XmlDocumentImpl( document, origin );
}
catch ( Exception e3 ) {
if ( LOG.isDebugEnabled() ) {
LOG.debugf( "Problem parsing XML using orm 1.0 xsd : %s", e3.getMessage() );
}
}
} }
} }
} }
} }
throw new InvalidMappingException( "Unable to read XML", origin.getType(), origin.getName(), failure ); throw new InvalidMappingException( "Unable to read XML", origin, failure );
} }
private void setValidationFor(SAXReader saxReader, String xsd) { private void setValidationFor(SAXReader saxReader, String xsd) {
try { try {
saxReader.setFeature( "http://apache.org/xml/features/validation/schema", true ); saxReader.setFeature( "http://apache.org/xml/features/validation/schema", true );
//saxReader.setFeature( "http://apache.org/xml/features/validation/dynamic", true ); // saxReader.setFeature( "http://apache.org/xml/features/validation/dynamic", true );
//set the default schema locators if ( "orm_2_1.xsd".equals( xsd ) ) {
saxReader.setProperty( saxReader.setProperty(
"http://apache.org/xml/properties/schema/external-schemaLocation", "http://apache.org/xml/properties/schema/external-schemaLocation",
"http://java.sun.com/xml/ns/persistence/orm " + xsd LocalXmlResourceResolver.SECOND_JPA_ORM_NS + " " + xsd
); );
}
else {
saxReader.setProperty(
"http://apache.org/xml/properties/schema/external-schemaLocation",
LocalXmlResourceResolver.INITIAL_JPA_ORM_NS + " " + xsd
);
}
} }
catch ( SAXException e ) { catch ( SAXException e ) {
saxReader.setValidation( false ); saxReader.setValidation( false );
} }
} }
// this is the version of the code I'd like to use, but it unfortunately works very differently between
// JDK 1.5 ad JDK 1.6. On 1.5 the vaildation "passes" even with invalid content.
//
// Options:
// 1) continue using the code above
// 2) Document the issue on 1.5 and how to fix (specifying alternate SchemaFactory instance)
// 3) Use a specific JAXP library (Xerces2, Saxon, Jing, MSV) and its SchemaFactory instance directly
// public XmlDocument readMappingDocument(EntityResolver entityResolver, InputSource source, Origin origin) {
// ErrorLogger errorHandler = new ErrorLogger();
//
// SAXReader saxReader = new SAXReader( new DOMDocumentFactory() );
// saxReader.setEntityResolver( entityResolver );
// saxReader.setErrorHandler( errorHandler );
// saxReader.setMergeAdjacentText( true );
//
// Document documentTree = null;
//
// // IMPL NOTE : here we enable DTD validation in case the mapping is a HBM file. This will validate
// // the document as it is parsed. This is needed because the DTD defines default values that have to be
// // applied as the document is parsed, so thats something we need to account for as we (if we) transition
// // to XSD.
// saxReader.setValidation( true );
// try {
// documentTree = saxReader.read( source );
// }
// catch ( DocumentException e ) {
// // we had issues reading the input, most likely malformed document or validation error against DTD
// throw new InvalidMappingException( "Unable to read XML", origin.getType(), origin.getName(), e );
// }
//
// Element rootElement = documentTree.getRootElement();
// if ( rootElement == null ) {
// throw new InvalidMappingException( "No root element", origin.getType(), origin.getName() );
// }
//
// if ( "entity-mappings".equals( rootElement.getName() ) ) {
// final String explicitVersion = rootElement.attributeValue( "version" );
// final String xsdVersionString = explicitVersion == null ? ASSUMED_ORM_XSD_VERSION : explicitVersion;
// final SupportedOrmXsdVersion xsdVersion = SupportedOrmXsdVersion.parse( xsdVersionString );
// final Schema schema = xsdVersion == SupportedOrmXsdVersion.ORM_1_0 ? orm1Schema() : orm2Schema();
// try {
// schema.newValidator().validate( new DOMSource( (org.w3c.dom.Document) documentTree ) );
// }
// catch ( SAXException e ) {
// throw new InvalidMappingException( "Validation problem", origin.getType(), origin.getName(), e );
// }
// catch ( IOException e ) {
// throw new InvalidMappingException( "Validation problem", origin.getType(), origin.getName(), e );
// }
// }
// else {
// if ( errorHandler.getError() != null ) {
// throw new InvalidMappingException(
// "Error validating hibernate-mapping against DTD",
// origin.getType(),
// origin.getName(),
// errorHandler.getError()
// );
// }
// }
//
// return new XmlDocumentImpl( documentTree, origin );
// }
//
// public static enum SupportedOrmXsdVersion {
// ORM_1_0,
// ORM_2_0;
//
// public static SupportedOrmXsdVersion parse(String name) {
// if ( "1.0".equals( name ) ) {
// return ORM_1_0;
// }
// else if ( "2.0".equals( name ) ) {
// return ORM_2_0;
// }
// throw new IllegalArgumentException( "Unsupported orm.xml XSD version encountered [" + name + "]" );
// }
// }
//
//
// public static final String ORM_1_SCHEMA_NAME = "org/hibernate/ejb/orm_1_0.xsd";
// public static final String ORM_2_SCHEMA_NAME = "org/hibernate/ejb/orm_2_0.xsd";
//
// private static Schema orm1Schema;
//
// private static Schema orm1Schema() {
// if ( orm1Schema == null ) {
// orm1Schema = resolveLocalSchema( ORM_1_SCHEMA_NAME );
// }
// return orm1Schema;
// }
//
// private static Schema orm2Schema;
//
// private static Schema orm2Schema() {
// if ( orm2Schema == null ) {
// orm2Schema = resolveLocalSchema( ORM_2_SCHEMA_NAME );
// }
// return orm2Schema;
// }
//
// private static Schema resolveLocalSchema(String schemaName) {
// return resolveLocalSchema( schemaName, XMLConstants.W3C_XML_SCHEMA_NS_URI );
// }
//
// private static Schema resolveLocalSchema(String schemaName, String schemaLanguage) {
// URL url = ConfigHelper.findAsResource( schemaName );
// if ( url == null ) {
// throw new MappingException( "Unable to locate schema [" + schemaName + "] via classpath" );
// }
// try {
// InputStream schemaStream = url.openStream();
// try {
// StreamSource source = new StreamSource(url.openStream());
// SchemaFactory schemaFactory = SchemaFactory.newInstance( schemaLanguage );
// return schemaFactory.newSchema(source);
// }
// catch ( SAXException e ) {
// throw new MappingException( "Unable to load schema [" + schemaName + "]", e );
// }
// catch ( IOException e ) {
// throw new MappingException( "Unable to load schema [" + schemaName + "]", e );
// }
// finally {
// try {
// schemaStream.close();
// }
// catch ( IOException e ) {
// log.warn( "Problem closing schema stream [{}]", e.toString() );
// }
// }
// }
// catch ( IOException e ) {
// throw new MappingException( "Stream error handling schema url [" + url.toExternalForm() + "]" );
// }
//
// }
} }

View File

@ -0,0 +1,43 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.internal.util.xml;
import org.hibernate.HibernateException;
import org.hibernate.jaxb.spi.Origin;
/**
* @author Steve Ebersole
*/
public class UnsupportedOrmXsdVersionException extends HibernateException {
public UnsupportedOrmXsdVersionException(String requestedVersion, Origin origin) {
super(
String.format(
"Encountered unsupported orm.xml xsd version [%s] in mapping document [type=%s, name=%s]",
requestedVersion,
origin.getType(),
origin.getName()
)
);
}
}

View File

@ -0,0 +1,69 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.internal.util.xml;
import javax.xml.stream.XMLStreamConstants;
/**
*
*
*
*
* Note, copied from the uPortal project by permission of author. See
* https://github.com/Jasig/uPortal/blob/master/uportal-war/src/main/java/org/jasig/portal/xml/stream/XMLStreamConstantsUtils.java
*
* @author Eric Dalquist
*/
public class XMLStreamConstantsUtils {
/**
* Get the human readable event name for the numeric event id
*/
public static String getEventName(int eventId) {
switch (eventId) {
case XMLStreamConstants.START_ELEMENT:
return "StartElementEvent";
case XMLStreamConstants.END_ELEMENT:
return "EndElementEvent";
case XMLStreamConstants.PROCESSING_INSTRUCTION:
return "ProcessingInstructionEvent";
case XMLStreamConstants.CHARACTERS:
return "CharacterEvent";
case XMLStreamConstants.COMMENT:
return "CommentEvent";
case XMLStreamConstants.START_DOCUMENT:
return "StartDocumentEvent";
case XMLStreamConstants.END_DOCUMENT:
return "EndDocumentEvent";
case XMLStreamConstants.ENTITY_REFERENCE:
return "EntityReferenceEvent";
case XMLStreamConstants.ATTRIBUTE:
return "AttributeBase";
case XMLStreamConstants.DTD:
return "DTDEvent";
case XMLStreamConstants.CDATA:
return "CDATA";
}
return "UNKNOWN_EVENT_TYPE";
}
}

View File

@ -27,6 +27,8 @@ import java.io.Serializable;
import org.dom4j.Document; import org.dom4j.Document;
import org.hibernate.jaxb.spi.Origin;
/** /**
* Describes a parsed xml document. * Describes a parsed xml document.
* *

View File

@ -27,6 +27,9 @@ import java.io.Serializable;
import org.dom4j.Document; import org.dom4j.Document;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.jaxb.spi.SourceType;
/** /**
* Basic implemementation of {@link XmlDocument} * Basic implemementation of {@link XmlDocument}
* *
@ -36,8 +39,8 @@ public class XmlDocumentImpl implements XmlDocument, Serializable {
private final Document documentTree; private final Document documentTree;
private final Origin origin; private final Origin origin;
public XmlDocumentImpl(Document documentTree, String originType, String originName) { public XmlDocumentImpl(Document documentTree, SourceType originType, String originName) {
this( documentTree, new OriginImpl( originType, originName ) ); this( documentTree, new Origin( originType, originName ) );
} }
public XmlDocumentImpl(Document documentTree, Origin origin) { public XmlDocumentImpl(Document documentTree, Origin origin) {

View File

@ -1,7 +1,7 @@
/* /*
* Hibernate, Relational Persistence for Idiomatic Java * Hibernate, Relational Persistence for Idiomatic Java
* *
* Copyright (c) 2010, Red Hat Inc. or third-party contributors as * Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution * indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are * statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc. * distributed under license by Red Hat Inc.
@ -23,33 +23,19 @@
*/ */
package org.hibernate.internal.util.xml; package org.hibernate.internal.util.xml;
import java.io.Serializable; import org.hibernate.HibernateException;
/** /**
* Basic implementation of {@link Origin} * An error using XML infrastructure (jaxp, stax, etc).
* *
* @author Steve Ebersole * @author Steve Ebersole
*/ */
public class OriginImpl implements Origin, Serializable { public class XmlInfrastructureException extends HibernateException {
private final String type; public XmlInfrastructureException(String message) {
private final String name; super( message );
public OriginImpl(String type, String name) {
this.type = type;
this.name = name;
} }
/** public XmlInfrastructureException(String message, Throwable root) {
* {@inheritDoc} super( message, root );
*/
public String getType() {
return type;
}
/**
* {@inheritDoc}
*/
public String getName() {
return name;
} }
} }

View File

@ -41,16 +41,22 @@ import javax.xml.stream.events.Attribute;
import javax.xml.stream.events.StartElement; import javax.xml.stream.events.StartElement;
import javax.xml.stream.events.XMLEvent; import javax.xml.stream.events.XMLEvent;
import javax.xml.transform.dom.DOMSource; import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stax.StAXSource;
import javax.xml.transform.stream.StreamSource; import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema; import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory; import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import org.jboss.logging.Logger; import org.jboss.logging.Logger;
import org.w3c.dom.Document; import org.w3c.dom.Document;
import org.w3c.dom.Element; import org.w3c.dom.Element;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
import org.hibernate.InvalidMappingException;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService; import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.xml.LocalXmlResourceResolver;
import org.hibernate.internal.util.xml.MappingReader;
import org.hibernate.jaxb.spi.JaxbRoot; import org.hibernate.jaxb.spi.JaxbRoot;
import org.hibernate.jaxb.spi.Origin; import org.hibernate.jaxb.spi.Origin;
import org.hibernate.jaxb.spi.hbm.JaxbHibernateMapping; import org.hibernate.jaxb.spi.hbm.JaxbHibernateMapping;
@ -68,7 +74,7 @@ import org.hibernate.service.ServiceRegistry;
public class JaxbMappingProcessor { public class JaxbMappingProcessor {
private static final Logger log = Logger.getLogger( JaxbMappingProcessor.class ); private static final Logger log = Logger.getLogger( JaxbMappingProcessor.class );
public static final String ASSUMED_ORM_XSD_VERSION = "2.0"; public static final String ASSUMED_ORM_XSD_VERSION = "2.1";
public static final String VALIDATE_XML_SETTING = "hibernate.xml.validate"; public static final String VALIDATE_XML_SETTING = "hibernate.xml.validate";
public static final String HIBERNATE_MAPPING_URI = "http://www.hibernate.org/xsd/hibernate-mapping"; public static final String HIBERNATE_MAPPING_URI = "http://www.hibernate.org/xsd/hibernate-mapping";
@ -123,6 +129,7 @@ public class JaxbMappingProcessor {
@SuppressWarnings( { "UnnecessaryLocalVariable" }) @SuppressWarnings( { "UnnecessaryLocalVariable" })
private XMLInputFactory buildStaxFactory() { private XMLInputFactory buildStaxFactory() {
XMLInputFactory staxFactory = XMLInputFactory.newInstance(); XMLInputFactory staxFactory = XMLInputFactory.newInstance();
staxFactory.setXMLResolver( LocalXmlResourceResolver.INSTANCE );
return staxFactory; return staxFactory;
} }
@ -154,7 +161,23 @@ public class JaxbMappingProcessor {
if ( "entity-mappings".equals( elementName ) ) { if ( "entity-mappings".equals( elementName ) ) {
final Attribute attribute = event.asStartElement().getAttributeByName( ORM_VERSION_ATTRIBUTE_QNAME ); final Attribute attribute = event.asStartElement().getAttributeByName( ORM_VERSION_ATTRIBUTE_QNAME );
final String explicitVersion = attribute == null ? null : attribute.getValue(); final String explicitVersion = attribute == null ? null : attribute.getValue();
validationSchema = validateXml ? resolveSupportedOrmXsd( explicitVersion ) : null; if ( !"2.1".equals( explicitVersion ) ) {
if ( validateXml ) {
MappingReader.validateMapping(
MappingReader.SupportedOrmXsdVersion.parse( explicitVersion, origin ),
staxEventReader,
origin
);
}
staxEventReader = new LegacyJPAEventReader(
staxEventReader,
LocalXmlResourceResolver.SECOND_JPA_ORM_NS
);
validationSchema = null; //disable JAXB validation
}
else {
validationSchema = validateXml ? resolveSupportedOrmXsd( explicitVersion, origin ) : null;
}
jaxbTarget = JaxbEntityMappings.class; jaxbTarget = JaxbEntityMappings.class;
} }
else { else {
@ -163,7 +186,7 @@ public class JaxbMappingProcessor {
log.debug( "HBM mapping document did not define namespaces; wrapping in custom event reader to introduce namespace information" ); log.debug( "HBM mapping document did not define namespaces; wrapping in custom event reader to introduce namespace information" );
staxEventReader = new NamespaceAddingEventReader( staxEventReader, HIBERNATE_MAPPING_URI ); staxEventReader = new NamespaceAddingEventReader( staxEventReader, HIBERNATE_MAPPING_URI );
} }
validationSchema = validateXml ? hbmSchema() : null; validationSchema = validateXml ? MappingReader.SupportedOrmXsdVersion.HBM_4_0.getSchema() : null;
jaxbTarget = JaxbHibernateMapping.class; jaxbTarget = JaxbHibernateMapping.class;
} }
@ -177,6 +200,7 @@ public class JaxbMappingProcessor {
target = unmarshaller.unmarshal( staxEventReader ); target = unmarshaller.unmarshal( staxEventReader );
} }
catch ( JAXBException e ) { catch ( JAXBException e ) {
e.printStackTrace();
StringBuilder builder = new StringBuilder(); StringBuilder builder = new StringBuilder();
builder.append( "Unable to perform unmarshalling at line number " ); builder.append( "Unable to perform unmarshalling at line number " );
builder.append( handler.getLineNumber() ); builder.append( handler.getLineNumber() );
@ -190,6 +214,7 @@ public class JaxbMappingProcessor {
return new JaxbRoot( target, origin ); return new JaxbRoot( target, origin );
} }
private boolean isNamespaced(StartElement startElement) { private boolean isNamespaced(StartElement startElement) {
return ! "".equals( startElement.getName().getNamespaceURI() ); return ! "".equals( startElement.getName().getNamespaceURI() );
} }
@ -206,7 +231,7 @@ public class JaxbMappingProcessor {
if ( "entity-mappings".equals( rootElement.getNodeName() ) ) { if ( "entity-mappings".equals( rootElement.getNodeName() ) ) {
final String explicitVersion = rootElement.getAttribute( "version" ); final String explicitVersion = rootElement.getAttribute( "version" );
validationSchema = validateXml ? resolveSupportedOrmXsd( explicitVersion ) : null; validationSchema = validateXml ? resolveSupportedOrmXsd( explicitVersion, origin ) : null;
jaxbTarget = JaxbEntityMappings.class; jaxbTarget = JaxbEntityMappings.class;
} }
else { else {
@ -228,20 +253,14 @@ public class JaxbMappingProcessor {
return new JaxbRoot( target, origin ); return new JaxbRoot( target, origin );
} }
private Schema resolveSupportedOrmXsd(String explicitVersion) { private Schema resolveSupportedOrmXsd(String explicitVersion, Origin origin) {
final String xsdVersionString = explicitVersion == null ? ASSUMED_ORM_XSD_VERSION : explicitVersion; if( StringHelper.isEmpty(explicitVersion)){
if ( "1.0".equals( xsdVersionString ) ) { return MappingReader.SupportedOrmXsdVersion.ORM_2_1.getSchema();
return orm1Schema();
} }
else if ( "2.0".equals( xsdVersionString ) ) { return MappingReader.SupportedOrmXsdVersion.parse( explicitVersion, origin ).getSchema();
return orm2Schema();
}
throw new IllegalArgumentException( "Unsupported orm.xml XSD version encountered [" + xsdVersionString + "]" );
} }
public static final String HBM_SCHEMA_NAME = "org/hibernate/hibernate-mapping-4.0.xsd"; public static final String HBM_SCHEMA_NAME = "org/hibernate/hibernate-mapping-4.0.xsd";
public static final String ORM_1_SCHEMA_NAME = "org/hibernate/ejb/orm_1_0.xsd";
public static final String ORM_2_SCHEMA_NAME = "org/hibernate/ejb/orm_2_0.xsd";
private Schema hbmSchema; private Schema hbmSchema;
@ -252,23 +271,6 @@ public class JaxbMappingProcessor {
return hbmSchema; return hbmSchema;
} }
private Schema orm1Schema;
private Schema orm1Schema() {
if ( orm1Schema == null ) {
orm1Schema = resolveLocalSchema( ORM_1_SCHEMA_NAME );
}
return orm1Schema;
}
private Schema orm2Schema;
private Schema orm2Schema() {
if ( orm2Schema == null ) {
orm2Schema = resolveLocalSchema( ORM_2_SCHEMA_NAME );
}
return orm2Schema;
}
private Schema resolveLocalSchema(String schemaName) { private Schema resolveLocalSchema(String schemaName) {
return resolveLocalSchema( schemaName, XMLConstants.W3C_XML_SCHEMA_NS_URI ); return resolveLocalSchema( schemaName, XMLConstants.W3C_XML_SCHEMA_NS_URI );

View File

@ -0,0 +1,89 @@
package org.hibernate.jaxb.internal;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLEventFactory;
import javax.xml.stream.XMLEventReader;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.events.Attribute;
import javax.xml.stream.events.Namespace;
import javax.xml.stream.events.StartElement;
import javax.xml.stream.events.XMLEvent;
import javax.xml.stream.util.EventReaderDelegate;
import org.hibernate.internal.util.xml.LocalXmlResourceResolver;
/**
* @author Strong Liu <stliu@hibernate.org>
*/
public class LegacyJPAEventReader extends EventReaderDelegate {
private final XMLEventFactory xmlEventFactory;
private final String namespaceUri;
public LegacyJPAEventReader(XMLEventReader reader, String namespaceUri) {
this( reader, XMLEventFactory.newInstance(), namespaceUri );
}
public LegacyJPAEventReader(XMLEventReader reader, XMLEventFactory xmlEventFactory, String namespaceUri) {
super( reader );
this.xmlEventFactory = xmlEventFactory;
this.namespaceUri = namespaceUri;
}
private StartElement withNamespace(StartElement startElement) {
// otherwise, wrap the start element event to provide a default namespace mapping
final List<Namespace> namespaces = new ArrayList<Namespace>();
namespaces.add( xmlEventFactory.createNamespace( "", namespaceUri ) );
Iterator<?> originalNamespaces = startElement.getNamespaces();
while ( originalNamespaces.hasNext() ) {
Namespace ns = (Namespace) originalNamespaces.next();
if ( !LocalXmlResourceResolver.INITIAL_JPA_ORM_NS.equals( ns.getNamespaceURI() ) ) {
namespaces.add( ns );
}
}
Iterator<?> attributes;
if ( "entity-mappings".equals( startElement.getName().getLocalPart() ) ) {
List st = new ArrayList();
Iterator itr = startElement.getAttributes();
while ( itr.hasNext() ) {
Attribute obj = (Attribute) itr.next();
if ( "version".equals( obj.getName().getLocalPart() ) ) {
if ( "".equals( obj.getName().getPrefix() ) ) {
st.add( xmlEventFactory.createAttribute( obj.getName(), "2.1" ) );
}
}
else {
st.add( obj );
}
}
attributes = st.iterator();
} else {
attributes = startElement.getAttributes();
}
return xmlEventFactory.createStartElement(
new QName( namespaceUri, startElement.getName().getLocalPart() ),
attributes,
namespaces.iterator()
);
}
@Override
public XMLEvent nextEvent() throws XMLStreamException {
return wrap( super.nextEvent() );
}
private XMLEvent wrap(XMLEvent event) {
if ( event.isStartElement() ) {
return withNamespace( event.asStartElement() );
}
return event;
}
@Override
public XMLEvent peek() throws XMLStreamException {
return wrap( super.peek() );
}
}

View File

@ -35,6 +35,8 @@ import javax.xml.stream.events.StartElement;
import javax.xml.stream.events.XMLEvent; import javax.xml.stream.events.XMLEvent;
import javax.xml.stream.util.EventReaderDelegate; import javax.xml.stream.util.EventReaderDelegate;
import org.hibernate.internal.util.xml.LocalXmlResourceResolver;
/** /**
* Used to wrap a StAX {@link XMLEventReader} in order to introduce namespaces into the underlying document. This * Used to wrap a StAX {@link XMLEventReader} in order to introduce namespaces into the underlying document. This
* is intended for temporary migration feature to allow legacy HBM mapping documents (DTD-based) to continue to * is intended for temporary migration feature to allow legacy HBM mapping documents (DTD-based) to continue to
@ -62,7 +64,10 @@ public class NamespaceAddingEventReader extends EventReaderDelegate {
namespaces.add( xmlEventFactory.createNamespace( "", namespaceUri ) ); namespaces.add( xmlEventFactory.createNamespace( "", namespaceUri ) );
Iterator<?> originalNamespaces = startElement.getNamespaces(); Iterator<?> originalNamespaces = startElement.getNamespaces();
while ( originalNamespaces.hasNext() ) { while ( originalNamespaces.hasNext() ) {
namespaces.add( (Namespace) originalNamespaces.next() ); Namespace ns = (Namespace) originalNamespaces.next();
if ( !LocalXmlResourceResolver.INITIAL_JPA_ORM_NS.equals( ns.getNamespaceURI() ) ) {
namespaces.add( ns );
}
} }
return xmlEventFactory.createStartElement( return xmlEventFactory.createStartElement(
new QName( namespaceUri, startElement.getName().getLocalPart() ), new QName( namespaceUri, startElement.getName().getLocalPart() ),
@ -73,7 +78,10 @@ public class NamespaceAddingEventReader extends EventReaderDelegate {
@Override @Override
public XMLEvent nextEvent() throws XMLStreamException { public XMLEvent nextEvent() throws XMLStreamException {
XMLEvent event = super.nextEvent(); return wrap( super.nextEvent() );
}
private XMLEvent wrap(XMLEvent event) {
if ( event.isStartElement() ) { if ( event.isStartElement() ) {
return withNamespace( event.asStartElement() ); return withNamespace( event.asStartElement() );
} }
@ -82,12 +90,6 @@ public class NamespaceAddingEventReader extends EventReaderDelegate {
@Override @Override
public XMLEvent peek() throws XMLStreamException { public XMLEvent peek() throws XMLStreamException {
XMLEvent event = super.peek(); return wrap( super.peek() );
if ( event.isStartElement() ) {
return withNamespace( event.asStartElement() );
}
else {
return event;
}
} }
} }

View File

@ -212,7 +212,7 @@ public abstract class Collection implements Fetchable, Value, Filterable {
} }
public void setRole(String role) { public void setRole(String role) {
this.role = role==null ? null : role.intern(); this.role = role;
} }
public void setSorted(boolean sorted) { public void setSorted(boolean sorted) {
@ -549,7 +549,7 @@ public abstract class Collection implements Fetchable, Value, Filterable {
} }
public void setLoaderName(String name) { public void setLoaderName(String name) {
this.loaderName = name==null ? null : name.intern(); this.loaderName = name;
} }
public String getReferencedPropertyName() { public String getReferencedPropertyName() {
@ -557,7 +557,7 @@ public abstract class Collection implements Fetchable, Value, Filterable {
} }
public void setReferencedPropertyName(String propertyRef) { public void setReferencedPropertyName(String propertyRef) {
this.referencedPropertyName = propertyRef==null ? null : propertyRef.intern(); this.referencedPropertyName = propertyRef;
} }
public boolean isOptimisticLocked() { public boolean isOptimisticLocked() {

View File

@ -36,6 +36,7 @@ import org.hibernate.HibernateException;
import org.hibernate.MappingException; import org.hibernate.MappingException;
import org.hibernate.dialect.Dialect; import org.hibernate.dialect.Dialect;
import org.hibernate.engine.spi.Mapping; import org.hibernate.engine.spi.Mapping;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.tool.hbm2ddl.ColumnMetadata; import org.hibernate.tool.hbm2ddl.ColumnMetadata;
import org.hibernate.tool.hbm2ddl.TableMetadata; import org.hibernate.tool.hbm2ddl.TableMetadata;
@ -395,8 +396,6 @@ public class Table implements RelationalModel, Serializable {
Iterator iter = getColumnIterator(); Iterator iter = getColumnIterator();
List results = new ArrayList(); List results = new ArrayList();
int uniqueIndexInteger = 0;
while ( iter.hasNext() ) { while ( iter.hasNext() ) {
Column column = (Column) iter.next(); Column column = (Column) iter.next();
@ -423,9 +422,8 @@ public class Table implements RelationalModel, Serializable {
} }
if ( column.isUnique() ) { if ( column.isUnique() ) {
uniqueIndexInteger++;
UniqueKey uk = getOrCreateUniqueKey( UniqueKey uk = getOrCreateUniqueKey(
"UK_" + name + "_" + uniqueIndexInteger); StringHelper.randomFixedLengthHex("UK_"));
uk.addColumn( column ); uk.addColumn( column );
alter.append( dialect.getUniqueDelegate() alter.append( dialect.getUniqueDelegate()
.applyUniqueToColumn( column ) ); .applyUniqueToColumn( column ) );
@ -494,7 +492,6 @@ public class Table implements RelationalModel, Serializable {
} }
Iterator iter = getColumnIterator(); Iterator iter = getColumnIterator();
int uniqueIndexInteger = 0;
while ( iter.hasNext() ) { while ( iter.hasNext() ) {
Column col = (Column) iter.next(); Column col = (Column) iter.next();
@ -528,9 +525,8 @@ public class Table implements RelationalModel, Serializable {
} }
if ( col.isUnique() ) { if ( col.isUnique() ) {
uniqueIndexInteger++;
UniqueKey uk = getOrCreateUniqueKey( UniqueKey uk = getOrCreateUniqueKey(
"uc_" + name + "_" + uniqueIndexInteger); StringHelper.randomFixedLengthHex("UK_"));
uk.addColumn( col ); uk.addColumn( col );
buf.append( dialect.getUniqueDelegate() buf.append( dialect.getUniqueDelegate()
.applyUniqueToColumn( col ) ); .applyUniqueToColumn( col ) );
@ -653,7 +649,7 @@ public class Table implements RelationalModel, Serializable {
} }
public UniqueKey createUniqueKey(List keyColumns) { public UniqueKey createUniqueKey(List keyColumns) {
String keyName = "UK_" + uniqueColumnString( keyColumns.iterator() ); String keyName = StringHelper.randomFixedLengthHex("UK_");
UniqueKey uk = getOrCreateUniqueKey( keyName ); UniqueKey uk = getOrCreateUniqueKey( keyName );
uk.addColumns( keyColumns.iterator() ); uk.addColumns( keyColumns.iterator() );
return uk; return uk;
@ -693,9 +689,7 @@ public class Table implements RelationalModel, Serializable {
fk.setName( keyName ); fk.setName( keyName );
} }
else { else {
fk.setName( "FK" + uniqueColumnString( keyColumns.iterator(), referencedEntityName ) ); fk.setName( StringHelper.randomFixedLengthHex("FK_") );
//TODO: add referencedClass to disambiguate to FKs on the same
// columns, pointing to different tables
} }
fk.setTable( this ); fk.setTable( this );
foreignKeys.put( key, fk ); foreignKeys.put( key, fk );
@ -714,22 +708,6 @@ public class Table implements RelationalModel, Serializable {
} }
public String uniqueColumnString(Iterator iterator) {
return uniqueColumnString( iterator, null );
}
public String uniqueColumnString(Iterator iterator, String referencedEntityName) {
int result = 0;
if ( referencedEntityName != null ) {
result += referencedEntityName.hashCode();
}
while ( iterator.hasNext() ) {
result += iterator.next().hashCode();
}
return ( Integer.toHexString( name.hashCode() ) + Integer.toHexString( result ) ).toUpperCase();
}
public String getSchema() { public String getSchema() {
return schema; return schema;

View File

@ -6,7 +6,7 @@
jaxb:extensionBindingPrefixes="inheritance" jaxb:extensionBindingPrefixes="inheritance"
version="2.1"> version="2.1">
<jaxb:bindings schemaLocation="../resources/org/hibernate/ejb/orm_2_0.xsd" node="/xsd:schema"> <jaxb:bindings schemaLocation="../resources/org/hibernate/jpa/orm_2_1.xsd" node="/xsd:schema">
<jaxb:schemaBindings> <jaxb:schemaBindings>
<jaxb:nameXmlTransform> <jaxb:nameXmlTransform>
<jaxb:typeName prefix="Jaxb"/> <jaxb:typeName prefix="Jaxb"/>

View File

@ -30,6 +30,7 @@ import org.hibernate.metamodel.MetadataSources;
import org.hibernate.metamodel.internal.MetadataImpl; import org.hibernate.metamodel.internal.MetadataImpl;
import org.hibernate.metamodel.spi.binding.EntityBinding; import org.hibernate.metamodel.spi.binding.EntityBinding;
import org.hibernate.metamodel.spi.source.MappingException; import org.hibernate.metamodel.spi.source.MappingException;
import org.hibernate.testing.FailureExpectedWithNewMetamodel;
import org.hibernate.testing.junit4.BaseUnitTestCase; import org.hibernate.testing.junit4.BaseUnitTestCase;
import static junit.framework.Assert.assertNotNull; import static junit.framework.Assert.assertNotNull;
@ -58,6 +59,7 @@ public class OrmXmlParserTests extends BaseUnitTestCase {
} }
@Test(expected = MappingException.class) @Test(expected = MappingException.class)
@FailureExpectedWithNewMetamodel(message = "JAXB validation is disabled ATM to support both JPA 2.1 and previous orm.xml")
public void testInvalidOrmXmlThrowsException() { public void testInvalidOrmXmlThrowsException() {
MetadataSources sources = new MetadataSources( new StandardServiceRegistryBuilder().build() ); MetadataSources sources = new MetadataSources( new StandardServiceRegistryBuilder().build() );
sources.addResource( "org/hibernate/metamodel/internal/source/annotations/xml/orm-invalid.xml" ); sources.addResource( "org/hibernate/metamodel/internal/source/annotations/xml/orm-invalid.xml" );

View File

@ -39,6 +39,10 @@ import org.jboss.jandex.Indexer;
import org.hibernate.AnnotationException; import org.hibernate.AnnotationException;
import org.hibernate.HibernateException; import org.hibernate.HibernateException;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService; import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.jaxb.internal.JaxbMappingProcessor;
import org.hibernate.jaxb.spi.JaxbRoot;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.jaxb.spi.SourceType;
import org.hibernate.jaxb.spi.orm.JaxbEntityMappings; import org.hibernate.jaxb.spi.orm.JaxbEntityMappings;
import org.hibernate.service.ServiceRegistry; import org.hibernate.service.ServiceRegistry;
import org.hibernate.testing.ServiceRegistryBuilder; import org.hibernate.testing.ServiceRegistryBuilder;
@ -50,8 +54,9 @@ import static org.junit.Assert.fail;
* @author Strong Liu * @author Strong Liu
*/ */
public abstract class AbstractMockerTest { public abstract class AbstractMockerTest {
private static final String ORM1_MAPPING_XSD = "org/hibernate/ejb/orm_1_0.xsd"; private static final String ORM1_MAPPING_XSD = "org/hibernate/jpa/orm_1_0.xsd";
private static final String ORM2_MAPPING_XSD = "org/hibernate/ejb/orm_2_0.xsd"; private static final String ORM2_MAPPING_XSD = "org/hibernate/jpa/orm_2_0.xsd";
private static final String ORM2_1_MAPPING_XSD = "org/hibernate/jpa/orm_2_1.xsd";
private IndexBuilder indexBuilder; private IndexBuilder indexBuilder;
private Index index; private Index index;
private ServiceRegistry serviceRegistry; private ServiceRegistry serviceRegistry;
@ -69,23 +74,14 @@ public abstract class AbstractMockerTest {
ClassLoaderService classLoaderService = getServiceRegistry().getService( ClassLoaderService.class ); ClassLoaderService classLoaderService = getServiceRegistry().getService( ClassLoaderService.class );
List<JaxbEntityMappings> xmlEntityMappingsList = new ArrayList<JaxbEntityMappings>(); List<JaxbEntityMappings> xmlEntityMappingsList = new ArrayList<JaxbEntityMappings>();
for ( String fileName : mappingFiles ) { for ( String fileName : mappingFiles ) {
JaxbEntityMappings entityMappings; JaxbMappingProcessor processor = new JaxbMappingProcessor( getServiceRegistry() );
try { JaxbRoot jaxbRoot = processor.unmarshal(
entityMappings = XmlHelper.unmarshallXml( classLoaderService.locateResourceStream( packagePrefix + fileName ),
packagePrefix + fileName, ORM2_MAPPING_XSD, JaxbEntityMappings.class, classLoaderService new Origin( SourceType.FILE, packagePrefix + fileName )
).getRoot(); );
} JaxbEntityMappings entityMappings = (JaxbEntityMappings)jaxbRoot.getRoot();
catch ( JAXBException orm2Exception ) {
// if we cannot parse against orm_2_0.xsd we try orm_1_0.xsd for backwards compatibility
try {
entityMappings = XmlHelper.unmarshallXml(
packagePrefix + fileName, ORM1_MAPPING_XSD, JaxbEntityMappings.class, classLoaderService
).getRoot();
}
catch ( JAXBException orm1Exception ) {
throw new AnnotationException( "Unable to parse xml configuration.", orm1Exception );
}
}
xmlEntityMappingsList.add( entityMappings ); xmlEntityMappingsList.add( entityMappings );
} }
return new EntityMappingsMocker( xmlEntityMappingsList, getIndex(), getServiceRegistry() ); return new EntityMappingsMocker( xmlEntityMappingsList, getIndex(), getServiceRegistry() );

View File

@ -0,0 +1,143 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013 by Red Hat Inc and/or its affiliates or by
* third-party contributors as indicated by either @author tags or express
* copyright attribution statements applied by the authors. All
* third-party contributions are distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.test.annotations.index.jpa;
import java.util.Iterator;
import org.junit.Test;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.mapping.Bag;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Index;
import org.hibernate.mapping.Join;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.Property;
import org.hibernate.mapping.Set;
import org.hibernate.mapping.Table;
import org.hibernate.mapping.UniqueKey;
import org.hibernate.testing.FailureExpectedWithNewMetamodel;
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
/**
* @author Strong Liu <stliu@hibernate.org>
*/
public abstract class AbstractJPAIndexTest extends BaseCoreFunctionalTestCase {
@Test
public void testTableIndex() {
PersistentClass entity = configuration().getClassMapping( Car.class.getName() );
Iterator itr = entity.getTable().getUniqueKeyIterator();
assertTrue( itr.hasNext() );
UniqueKey uk = (UniqueKey) itr.next();
assertFalse( itr.hasNext() );
assertTrue( StringHelper.isNotEmpty( uk.getName() ) );
assertEquals( 2, uk.getColumnSpan() );
Column column = (Column) uk.getColumns().get( 0 );
assertEquals( "brand", column.getName() );
column = (Column) uk.getColumns().get( 1 );
assertEquals( "producer", column.getName() );
assertSame( entity.getTable(), uk.getTable() );
itr = entity.getTable().getIndexIterator();
assertTrue( itr.hasNext() );
Index index = (Index)itr.next();
assertFalse( itr.hasNext() );
assertEquals( "Car_idx", index.getName() );
assertEquals( 1, index.getColumnSpan() );
column = index.getColumnIterator().next();
assertEquals( "since", column.getName() );
assertSame( entity.getTable(), index.getTable() );
}
@Test
public void testSecondaryTableIndex(){
PersistentClass entity = configuration().getClassMapping( Car.class.getName() );
Join join = (Join)entity.getJoinIterator().next();
Iterator<Index> itr = join.getTable().getIndexIterator();
assertTrue( itr.hasNext() );
Index index = itr.next();
assertFalse( itr.hasNext() );
assertTrue( "index name is not generated", StringHelper.isNotEmpty( index.getName() ) );
assertEquals( 2, index.getColumnSpan() );
Iterator<Column> columnIterator = index.getColumnIterator();
Column column = columnIterator.next();
assertEquals( "dealer_name", column.getName() );
column = columnIterator.next();
assertEquals( "rate", column.getName() );
assertSame( join.getTable(), index.getTable() );
}
@Test
public void testCollectionTableIndex(){
PersistentClass entity = configuration().getClassMapping( Car.class.getName() );
Property property = entity.getProperty( "otherDealers" );
Set set = (Set)property.getValue();
Table collectionTable = set.getCollectionTable();
Iterator<Index> itr = collectionTable.getIndexIterator();
assertTrue( itr.hasNext() );
Index index = itr.next();
assertFalse( itr.hasNext() );
assertTrue( "index name is not generated", StringHelper.isNotEmpty( index.getName() ) );
assertEquals( 1, index.getColumnSpan() );
Iterator<Column> columnIterator = index.getColumnIterator();
Column column = columnIterator.next();
assertEquals( "name", column.getName() );
assertSame( collectionTable, index.getTable() );
}
@Test
public void testJoinTableIndex(){
PersistentClass entity = configuration().getClassMapping( Importer.class.getName() );
Property property = entity.getProperty( "cars" );
Bag set = (Bag)property.getValue();
Table collectionTable = set.getCollectionTable();
Iterator<Index> itr = collectionTable.getIndexIterator();
assertTrue( itr.hasNext() );
Index index = itr.next();
assertFalse( itr.hasNext() );
assertTrue( "index name is not generated", StringHelper.isNotEmpty( index.getName() ) );
assertEquals( 1, index.getColumnSpan() );
Iterator<Column> columnIterator = index.getColumnIterator();
Column column = columnIterator.next();
assertEquals( "importers_id", column.getName() );
assertSame( collectionTable, index.getTable() );
}
// @Test
// public void testTableGeneratorIndex(){
// //todo
// }
}

View File

@ -23,9 +23,20 @@
*/ */
package org.hibernate.test.annotations.index.jpa; package org.hibernate.test.annotations.index.jpa;
import java.util.List;
import java.util.Set;
import javax.persistence.AttributeOverride;
import javax.persistence.AttributeOverrides;
import javax.persistence.CascadeType;
import javax.persistence.CollectionTable;
import javax.persistence.Column;
import javax.persistence.ElementCollection;
import javax.persistence.Embedded;
import javax.persistence.Entity; import javax.persistence.Entity;
import javax.persistence.Id; import javax.persistence.Id;
import javax.persistence.Index; import javax.persistence.Index;
import javax.persistence.ManyToMany;
import javax.persistence.SecondaryTable;
import javax.persistence.Table; import javax.persistence.Table;
@ -33,13 +44,85 @@ import javax.persistence.Table;
* @author Strong Liu <stliu@hibernate.org> * @author Strong Liu <stliu@hibernate.org>
*/ */
@Entity @Entity
@Table( indexes = {@Index( unique = true, columnList = "brand, producer") @Table(indexes = {
, @Index( name = "Car_idx", columnList = "since DESC")}) @Index(unique = true, columnList = "brand, producer")
, @Index(name = "Car_idx", columnList = "since DESC")
})
@SecondaryTable(name = "T_DEALER", indexes = @Index(columnList = "dealer_name ASC, rate DESC"))
public class Car { public class Car {
@Id @Id
long id; private long id;
String brand; private String brand;
String producer; private String producer;
long since; private long since;
@AttributeOverrides({
@AttributeOverride(name = "name", column = @Column(name = "dealer_name", table = "T_DEALER")),
@AttributeOverride(name = "rate", column = @Column(table = "T_DEALER"))
})
@Embedded
private Dealer dealer;
@ElementCollection
@CollectionTable(name = "CAR_DEALTERS", indexes = @Index(columnList = "name"))
private Set<Dealer> otherDealers;
@ManyToMany(cascade = CascadeType.ALL, mappedBy = "cars")
private List<Importer> importers;
public String getBrand() {
return brand;
}
public void setBrand(String brand) {
this.brand = brand;
}
public Dealer getDealer() {
return dealer;
}
public void setDealer(Dealer dealer) {
this.dealer = dealer;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public List<Importer> getImporters() {
return importers;
}
public void setImporters(List<Importer> importers) {
this.importers = importers;
}
public Set<Dealer> getOtherDealers() {
return otherDealers;
}
public void setOtherDealers(Set<Dealer> otherDealers) {
this.otherDealers = otherDealers;
}
public String getProducer() {
return producer;
}
public void setProducer(String producer) {
this.producer = producer;
}
public long getSince() {
return since;
}
public void setSince(long since) {
this.since = since;
}
} }

View File

@ -23,11 +23,30 @@
*/ */
package org.hibernate.test.annotations.index.jpa; package org.hibernate.test.annotations.index.jpa;
import javax.persistence.Entity; import java.io.Serializable;
import javax.persistence.Embeddable;
/** /**
* @author Strong Liu <stliu@hibernate.org> * @author Strong Liu <stliu@hibernate.org>
*/ */
@Entity @Embeddable
public class Dealer { public class Dealer implements Serializable {
private String name;
private long rate;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public long getRate() {
return rate;
}
public void setRate(long rate) {
this.rate = rate;
}
} }

View File

@ -0,0 +1,70 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013 by Red Hat Inc and/or its affiliates or by
* third-party contributors as indicated by either @author tags or express
* copyright attribution statements applied by the authors. All
* third-party contributions are distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.test.annotations.index.jpa;
import java.util.List;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Index;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
/**
* @author Strong Liu <stliu@hibernate.org>
*/
@Entity
public class Importer {
@Id
private long id;
private String name;
@ManyToMany(cascade = CascadeType.ALL)
@JoinTable( name = "CAR_IMPORTER",indexes = @Index(columnList = "importers_id"))
private List<Car> cars;
public List<Car> getCars() {
return cars;
}
public void setCars(List<Car> cars) {
this.cars = cars;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}

View File

@ -23,144 +23,21 @@
*/ */
package org.hibernate.test.annotations.index.jpa; package org.hibernate.test.annotations.index.jpa;
import java.util.Iterator;
import org.junit.Test;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.mapping.Bag;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Index;
import org.hibernate.mapping.Join;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.Property;
import org.hibernate.mapping.Set;
import org.hibernate.mapping.Table;
import org.hibernate.mapping.UniqueKey;
import org.hibernate.test.annotations.embedded.Address;
import org.hibernate.test.annotations.embedded.AddressType;
import org.hibernate.test.annotations.embedded.Book;
import org.hibernate.test.annotations.embedded.Person;
import org.hibernate.test.annotations.embedded.Summary;
import org.hibernate.test.annotations.embedded.WealthyPerson;
import org.hibernate.test.event.collection.detached.Alias;
import org.hibernate.testing.FailureExpectedWithNewMetamodel; import org.hibernate.testing.FailureExpectedWithNewMetamodel;
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
/** /**
* @author Strong Liu <stliu@hibernate.org> * @author Strong Liu <stliu@hibernate.org>
*/ */
@FailureExpectedWithNewMetamodel @FailureExpectedWithNewMetamodel
public class IndexTest extends BaseCoreFunctionalTestCase { public class IndexTest extends AbstractJPAIndexTest {
@Override @Override
protected Class<?>[] getAnnotatedClasses() { protected Class<?>[] getAnnotatedClasses() {
return new Class[] { Car.class, return new Class[] {
Book.class, Car.class,
Summary.class, Dealer.class,
WealthyPerson.class, Importer.class
Person.class,
AddressType.class,
Address.class,
Alias.class,
org.hibernate.test.event.collection.detached.Character.class
}; };
} }
@Test
public void testTableIndex() {
PersistentClass entity = configuration().getClassMapping( Car.class.getName() );
Iterator itr = entity.getTable().getUniqueKeyIterator();
assertTrue( itr.hasNext() );
UniqueKey uk = (UniqueKey) itr.next();
assertFalse( itr.hasNext() );
assertTrue( StringHelper.isNotEmpty( uk.getName() ) );
assertEquals( 2, uk.getColumnSpan() );
Column column = (Column) uk.getColumns().get( 0 );
assertEquals( "brand", column.getName() );
column = (Column) uk.getColumns().get( 1 );
assertEquals( "producer", column.getName() );
assertSame( entity.getTable(), uk.getTable() );
itr = entity.getTable().getIndexIterator();
assertTrue( itr.hasNext() );
Index index = (Index)itr.next();
assertFalse( itr.hasNext() );
assertEquals( "Car_idx", index.getName() );
assertEquals( 1, index.getColumnSpan() );
column = index.getColumnIterator().next();
assertEquals( "since", column.getName() );
assertSame( entity.getTable(), index.getTable() );
}
@Test
public void testSecondaryTableIndex(){
PersistentClass entity = configuration().getClassMapping( Book.class.getName() );
Join join = (Join)entity.getJoinIterator().next();
Iterator<Index> itr = join.getTable().getIndexIterator();
assertTrue( itr.hasNext() );
Index index = itr.next();
assertFalse( itr.hasNext() );
assertTrue( "index name is not generated", StringHelper.isNotEmpty( index.getName() ) );
assertEquals( 2, index.getColumnSpan() );
Iterator<Column> columnIterator = index.getColumnIterator();
Column column = columnIterator.next();
assertEquals( "summ_size", column.getName() );
column = columnIterator.next();
assertEquals( "text", column.getName() );
assertSame( join.getTable(), index.getTable() );
}
@Test
public void testCollectionTableIndex(){
PersistentClass entity = configuration().getClassMapping( WealthyPerson.class.getName() );
Property property = entity.getProperty( "explicitVacationHomes" );
Set set = (Set)property.getValue();
Table collectionTable = set.getCollectionTable();
Iterator<Index> itr = collectionTable.getIndexIterator();
assertTrue( itr.hasNext() );
Index index = itr.next();
assertFalse( itr.hasNext() );
assertTrue( "index name is not generated", StringHelper.isNotEmpty( index.getName() ) );
assertEquals( 2, index.getColumnSpan() );
Iterator<Column> columnIterator = index.getColumnIterator();
Column column = columnIterator.next();
assertEquals( "countryName", column.getName() );
column = columnIterator.next();
assertEquals( "type_id", column.getName() );
assertSame( collectionTable, index.getTable() );
}
@Test
public void testJoinTableIndex(){
PersistentClass entity = configuration().getClassMapping( Alias.class.getName() );
Property property = entity.getProperty( "characters" );
Bag set = (Bag)property.getValue();
Table collectionTable = set.getCollectionTable();
Iterator<Index> itr = collectionTable.getIndexIterator();
assertTrue( itr.hasNext() );
Index index = itr.next();
assertFalse( itr.hasNext() );
assertTrue( "index name is not generated", StringHelper.isNotEmpty( index.getName() ) );
assertEquals( 1, index.getColumnSpan() );
Iterator<Column> columnIterator = index.getColumnIterator();
Column column = columnIterator.next();
assertEquals( "characters_id", column.getName() );
assertSame( collectionTable, index.getTable() );
}
// @Test
public void testTableGeneratorIndex(){
//todo
}
} }

View File

@ -0,0 +1,15 @@
package org.hibernate.test.annotations.index.jpa;
import org.hibernate.testing.FailureExpectedWithNewMetamodel;
/**
* @author Strong Liu <stliu@hibernate.org>
*/
@FailureExpectedWithNewMetamodel
public class OrmXmlIndexTest extends AbstractJPAIndexTest {
@Override
protected String[] getXmlFiles() {
return new String[] { "org/hibernate/test/annotations/index/jpa/orm-index.xml" };
}
}

View File

@ -25,14 +25,16 @@ package org.hibernate.test.annotations.xml.ejb3;
import java.io.InputStream; import java.io.InputStream;
import org.hibernate.InvalidMappingException;
import org.hibernate.cfg.Configuration;
import org.hibernate.internal.util.xml.UnsupportedOrmXsdVersionException;
import org.junit.Test; import org.junit.Test;
import org.hibernate.MappingException;
import org.hibernate.cfg.Configuration;
import org.hibernate.testing.TestForIssue; import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail;
@TestForIssue(jiraKey = "HHH-6271") @TestForIssue(jiraKey = "HHH-6271")
public class NonExistentOrmVersionTest extends BaseCoreFunctionalTestCase { public class NonExistentOrmVersionTest extends BaseCoreFunctionalTestCase {
@ -45,14 +47,11 @@ public class NonExistentOrmVersionTest extends BaseCoreFunctionalTestCase {
InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream( xmlFileName ); InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream( xmlFileName );
config.addInputStream( is ); config.addInputStream( is );
config.buildMappings(); config.buildMappings();
fail( "Expecting failure due to unsupported xsd version" );
} }
catch ( MappingException mappingException ) { catch ( InvalidMappingException expected ) {
Throwable cause = mappingException.getCause(); }
assertTrue( catch ( UnsupportedOrmXsdVersionException expected ) {
cause.getMessage().contains(
"Value '3.0' of attribute 'version' of element 'entity-mappings' is not valid"
)
);
} }
} }
} }

View File

@ -20,57 +20,103 @@
*/ */
package org.hibernate.test.constraint; package org.hibernate.test.constraint;
import javax.persistence.Entity; import static org.junit.Assert.assertEquals;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.Table;
import org.junit.Test;
import org.hibernate.metamodel.spi.relational.Column;
import org.hibernate.test.util.SchemaUtil;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
/** import java.util.Iterator;
* HHH-7797 re-wrote the way dialects handle unique constraints. Test
* variations of unique & not null to ensure the constraints are created import javax.persistence.Entity;
* correctly for each dialect. import javax.persistence.GeneratedValue;
* import javax.persistence.Id;
* @author Brett Meyer import javax.persistence.OneToOne;
*/ import javax.persistence.Table;
@TestForIssue( jiraKey = "HHH-7797" ) import javax.persistence.UniqueConstraint;
import org.junit.Test;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.ForeignKey;
import org.hibernate.mapping.UniqueKey;
import org.hibernate.testing.FailureExpectedWithNewMetamodel;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
@FailureExpectedWithNewMetamodel
public class ConstraintTest extends BaseCoreFunctionalTestCase { public class ConstraintTest extends BaseCoreFunctionalTestCase {
private static final int MAX_NAME_LENGTH = 30;
private static final String EXPLICIT_FK_NAME = "fk_explicit";
private static final String EXPLICIT_UK_NAME = "uk_explicit";
@Override @Override
protected Class<?>[] getAnnotatedClasses() { protected Class<?>[] getAnnotatedClasses() {
return new Class<?>[] { return new Class<?>[] {
Entity1.class DataPoint.class, DataPoint2.class
}; };
} }
@Test @Test
public void testConstraints() { @TestForIssue( jiraKey = "HHH-7797" )
public void testUniqueConstraints() {
Column column = SchemaUtil.getColumn( Entity1.class, "foo1", metadata() ); Column column = (Column) configuration().getClassMapping( DataPoint.class.getName() )
.getProperty( "foo1" ).getColumnIterator().next();
assertFalse( column.isNullable() ); assertFalse( column.isNullable() );
assertTrue( column.isUnique() ); assertTrue( column.isUnique() );
column = SchemaUtil.getColumn( Entity1.class, "foo2", metadata() ); column = (Column) configuration().getClassMapping( DataPoint.class.getName() )
.getProperty( "foo2" ).getColumnIterator().next();
assertTrue( column.isNullable() ); assertTrue( column.isNullable() );
assertTrue( column.isUnique() ); assertTrue( column.isUnique() );
column = SchemaUtil.getColumn( Entity1.class, "id", metadata() ); column = (Column) configuration().getClassMapping( DataPoint.class.getName() )
.getProperty( "id" ).getColumnIterator().next();
assertFalse( column.isNullable() ); assertFalse( column.isNullable() );
assertTrue( column.isUnique() ); assertTrue( column.isUnique() );
} }
@Test
@TestForIssue( jiraKey = "HHH-1904" )
public void testConstraintNameLength() {
Iterator<org.hibernate.mapping.Table> tableItr = configuration().getTableMappings();
while (tableItr.hasNext()) {
org.hibernate.mapping.Table table = tableItr.next();
Iterator fkItr = table.getForeignKeyIterator();
while (fkItr.hasNext()) {
ForeignKey fk = (ForeignKey) fkItr.next();
assertTrue( fk.getName().length() <= MAX_NAME_LENGTH );
// ensure the randomly generated constraint name doesn't
// happen if explicitly given
Column column = fk.getColumn( 0 );
if ( column.getName().equals( "explicit" ) ) {
assertEquals( fk.getName(), EXPLICIT_FK_NAME );
}
}
Iterator ukItr = table.getUniqueKeyIterator();
while (ukItr.hasNext()) {
UniqueKey uk = (UniqueKey) ukItr.next();
assertTrue( uk.getName().length() <= MAX_NAME_LENGTH );
// ensure the randomly generated constraint name doesn't
// happen if explicitly given
Column column = uk.getColumn( 0 );
if ( column.getName().equals( "explicit" ) ) {
assertEquals( uk.getName(), EXPLICIT_UK_NAME );
}
}
}
}
@Entity @Entity
@Table( name = "Entity1" ) @Table( name = "DataPoint", uniqueConstraints = {
public static class Entity1 { @UniqueConstraint( name = EXPLICIT_UK_NAME, columnNames = { "explicit" } )
} )
public static class DataPoint {
@Id @Id
@GeneratedValue @GeneratedValue
@javax.persistence.Column( nullable = false, unique = true) @javax.persistence.Column( nullable = false, unique = true)
@ -81,5 +127,22 @@ public class ConstraintTest extends BaseCoreFunctionalTestCase {
@javax.persistence.Column( nullable = true, unique = true) @javax.persistence.Column( nullable = true, unique = true)
public String foo2; public String foo2;
public String explicit;
}
@Entity
@Table( name = "DataPoint2" )
public static class DataPoint2 {
@Id
@GeneratedValue
public long id;
@OneToOne
public DataPoint dp;
@OneToOne
@org.hibernate.annotations.ForeignKey(name = EXPLICIT_FK_NAME)
public DataPoint explicit;
} }
} }

View File

@ -12,6 +12,7 @@ import org.hibernate.Hibernate;
import org.hibernate.ScrollableResults; import org.hibernate.ScrollableResults;
import org.hibernate.Session; import org.hibernate.Session;
import org.hibernate.Transaction; import org.hibernate.Transaction;
import org.hibernate.dialect.DB2Dialect;
import org.hibernate.dialect.H2Dialect; import org.hibernate.dialect.H2Dialect;
import org.hibernate.dialect.Oracle8iDialect; import org.hibernate.dialect.Oracle8iDialect;
import org.hibernate.dialect.SQLServerDialect; import org.hibernate.dialect.SQLServerDialect;
@ -42,7 +43,7 @@ public class HQLScrollFetchTest extends BaseCoreFunctionalTestCase {
} }
@Test @Test
@SkipForDialect( { SQLServerDialect.class, Oracle8iDialect.class, H2Dialect.class } ) @SkipForDialect( { SQLServerDialect.class, Oracle8iDialect.class, H2Dialect.class, DB2Dialect.class } )
public void testScroll() { public void testScroll() {
Session s = openSession(); Session s = openSession();
ScrollableResults results = s.createQuery( QUERY ).scroll(); ScrollableResults results = s.createQuery( QUERY ).scroll();

View File

@ -1,6 +1,8 @@
package org.hibernate.test.querycache; package org.hibernate.test.querycache;
import java.io.Serializable; import java.io.Serializable;
import javax.persistence.Column;
import javax.persistence.Embeddable; import javax.persistence.Embeddable;
@Embeddable @Embeddable
@ -8,13 +10,20 @@ public class StringCompositeKey implements Serializable {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
private String substation; private String substation;
private String deviceType; private String deviceType;
private String device; private String device;
private String analog;
public String getSubstation() { // For some dialects, the sum of a primary key column lengths cannot
// be larger than 255 (DB2). Restrict them to a sufficiently
// small size. See HHH-8085.
@Column( length = 50 )
public String getSubstation() {
return substation; return substation;
} }
@ -22,6 +31,7 @@ public class StringCompositeKey implements Serializable {
this.substation = substation; this.substation = substation;
} }
@Column( length = 50 )
public String getDeviceType() { public String getDeviceType() {
return deviceType; return deviceType;
} }
@ -30,6 +40,7 @@ public class StringCompositeKey implements Serializable {
this.deviceType = deviceType; this.deviceType = deviceType;
} }
@Column( length = 50 )
public String getDevice() { public String getDevice() {
return device; return device;
} }
@ -38,6 +49,7 @@ public class StringCompositeKey implements Serializable {
this.device = device; this.device = device;
} }
@Column( length = 50 )
public String getAnalog() { public String getAnalog() {
return analog; return analog;
} }
@ -45,6 +57,4 @@ public class StringCompositeKey implements Serializable {
public void setAnalog(String analog) { public void setAnalog(String analog) {
this.analog = analog; this.analog = analog;
} }
private String analog;
} }

View File

@ -0,0 +1,67 @@
<?xml version="1.0" encoding="UTF-8"?>
<entity-mappings xmlns="http://xmlns.jcp.org/xml/ns/persistence/orm"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
version="2.1" >
<package>org.hibernate.test.annotations.index.jpa</package>
<!--<access>FIELD</access>-->
<entity class="Car">
<table name="T_FATHER">
<index unique="true" column-list="brand, producer"/>
<index name="Car_idx" column-list="since DESC"/>
</table>
<secondary-table name="T_DEALER">
<index column-list="dealer_name ASC, rate DESC"/>
</secondary-table>
<attributes>
<id name="id">
<generated-value strategy="AUTO"/>
</id>
<basic name="brand"/>
<basic name="producer"/>
<basic name="since"/>
<many-to-many name="importers" mapped-by="cars">
<cascade><cascade-all/></cascade>
</many-to-many>
<element-collection name="otherDealers">
<collection-table name="CAR_DEALTERS">
<index column-list="name"/>
</collection-table>
</element-collection>
<embedded name="dealer">
<attribute-override name="name">
<column name="dealer_name" table="T_DEALER"/>
</attribute-override>
<attribute-override name="rate">
<column table="T_DEALER"/>
</attribute-override>
</embedded>
</attributes>
</entity>
<entity class="Importer">
<attributes>
<id name="id"/>
<basic name="name"/>
<many-to-many name="cars">
<join-table name="CAR_IMPORTER">
<index column-list="importers_id"/>
</join-table>
<cascade><cascade-all/></cascade>
</many-to-many>
</attributes>
</entity>
<embeddable class="Dealer">
<attributes>
<basic name="name"/>
<basic name="rate"/>
</attributes>
</embeddable>
</entity-mappings>

View File

@ -26,10 +26,12 @@ package org.hibernate.ejb.packaging;
import java.io.InputStream; import java.io.InputStream;
/** /**
* @deprecated Use {@link org.hibernate.jpa.packaging.spi.NamedInputStream} instead * @deprecated Doubly deprecated actually :) Moved to {@link org.hibernate.jpa.boot.spi.NamedInputStream}
* due to package renaming (org.hibernate.ejb -> org.hibernate.jpa). But also, the role fulfilled by this class
* was moved to the new {@link org.hibernate.jpa.boot.spi.InputStreamAccess} contract.
*/ */
@Deprecated @Deprecated
public class NamedInputStream extends org.hibernate.jpa.packaging.spi.NamedInputStream { public class NamedInputStream extends org.hibernate.jpa.boot.spi.NamedInputStream {
public NamedInputStream(String name, InputStream stream) { public NamedInputStream(String name, InputStream stream) {
super( name, stream ); super( name, stream );
} }

View File

@ -24,8 +24,8 @@
package org.hibernate.ejb.packaging; package org.hibernate.ejb.packaging;
/** /**
* @deprecated Use {@link org.hibernate.jpa.packaging.spi.Scanner} instead * @deprecated Use {@link org.hibernate.jpa.boot.scan.spi.Scanner} instead
*/ */
@Deprecated @Deprecated
public interface Scanner extends org.hibernate.jpa.packaging.spi.Scanner { public interface Scanner extends org.hibernate.jpa.boot.scan.spi.Scanner {
} }

View File

@ -1,8 +1,10 @@
/* /*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as * Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution * indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are * statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC. * distributed under license by Red Hat Inc.
* *
* This copyrighted material is made available to anyone wishing to use, modify, * This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU * copy, or redistribute it subject to the terms and conditions of the GNU
@ -19,29 +21,26 @@
* 51 Franklin Street, Fifth Floor * 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA * Boston, MA 02110-1301 USA
*/ */
package org.hibernate.jpa.packaging.internal; package org.hibernate.jpa.boot.archive.internal;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL; import java.net.URL;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.jpa.internal.EntityManagerMessageLogger;
import org.jboss.logging.Logger; import org.jboss.logging.Logger;
import org.hibernate.jpa.boot.archive.spi.ArchiveException;
/** /**
* @author Emmanuel Bernard * @author Emmanuel Bernard
* @author Brett Meyer * @author Steve Ebersole
*/ */
public class JarVisitorFactory { public class ArchiveHelper {
private static final Logger log = Logger.getLogger( ArchiveHelper.class );
private static final EntityManagerMessageLogger LOG = Logger.getMessageLogger(EntityManagerMessageLogger.class,
JarVisitorFactory.class.getName());
/** /**
* Get the JAR URL of the JAR containing the given entry * Get the JAR URL of the JAR containing the given entry
@ -51,7 +50,6 @@ public class JarVisitorFactory {
* @param entry file known to be in the JAR * @param entry file known to be in the JAR
* @return the JAR URL * @return the JAR URL
* @throws IllegalArgumentException if none URL is found * @throws IllegalArgumentException if none URL is found
* TODO move to a ScannerHelper service?
*/ */
public static URL getJarURLFromURLEntry(URL url, String entry) throws IllegalArgumentException { public static URL getJarURLFromURLEntry(URL url, String entry) throws IllegalArgumentException {
URL jarUrl; URL jarUrl;
@ -106,7 +104,7 @@ public class JarVisitorFactory {
"Unable to determine JAR Url from " + url + ". Cause: " + e.getMessage() "Unable to determine JAR Url from " + url + ". Cause: " + e.getMessage()
); );
} }
LOG.trace("JAR URL from URL Entry: " + url + " >> " + jarUrl); log.trace("JAR URL from URL Entry: " + url + " >> " + jarUrl);
return jarUrl; return jarUrl;
} }
@ -114,7 +112,6 @@ public class JarVisitorFactory {
* get the URL from a given path string * get the URL from a given path string
* *
* @throws IllegalArgumentException is something goes wrong * @throws IllegalArgumentException is something goes wrong
* TODO move to a ScannerHelper service?
*/ */
public static URL getURLFromPath(String jarPath) { public static URL getURLFromPath(String jarPath) {
URL jarUrl; URL jarUrl;
@ -134,70 +131,40 @@ public class JarVisitorFactory {
return jarUrl; return jarUrl;
} }
/** public static String unqualifiedJarFileName(URL jarUrl) {
* Get a JarVisitor to the jar <code>jarPath</code> applying the given filters // todo : weak algorithm subject to AOOBE
* String fileName = jarUrl.getFile();
* Method used in a non-managed environment int exclamation = fileName.lastIndexOf( "!" );
* if (exclamation != -1) {
* @throws IllegalArgumentException if the jarPath is incorrect fileName = fileName.substring( 0, exclamation );
*/
public static JarVisitor getVisitor(String jarPath, Filter[] filters) throws IllegalArgumentException {
File file = new File( jarPath );
if ( file.isFile() ) {
return new InputStreamZippedJarVisitor( jarPath, filters );
} }
else {
return new ExplodedJarVisitor( jarPath, filters ); int slash = fileName.lastIndexOf( "/" );
if ( slash != -1 ) {
fileName = fileName.substring(
fileName.lastIndexOf( "/" ) + 1,
fileName.length()
);
}
if ( fileName.length() > 4 && fileName.endsWith( "ar" ) && fileName.charAt( fileName.length() - 4 ) == '.' ) {
fileName = fileName.substring( 0, fileName.length() - 4 );
}
return fileName;
}
public static byte[] getBytesFromInputStreamSafely(InputStream inputStream) {
try {
return getBytesFromInputStream( inputStream );
}
catch (IOException e) {
throw new ArchiveException( "Unable to extract bytes from InputStream", e );
} }
} }
/**
* Build a JarVisitor on the given JAR URL applying the given filters
*
* @throws IllegalArgumentException if the URL is malformed
*/
public static JarVisitor getVisitor(URL jarUrl, Filter[] filters) throws IllegalArgumentException {
return getVisitor( jarUrl, filters, "" );
}
public static JarVisitor getVisitor(URL jarUrl, Filter[] filters, String entry) throws IllegalArgumentException {
String protocol = jarUrl.getProtocol();
if ( "jar".equals( protocol ) ) {
return new JarProtocolVisitor( jarUrl, filters, entry );
}
else if ( StringHelper.isEmpty( protocol ) || "file".equals( protocol ) || "vfszip".equals( protocol ) || "vfsfile".equals( protocol ) ) {
File file;
try {
final String filePart = jarUrl.getFile();
if ( filePart != null && filePart.indexOf( ' ' ) != -1 ) {
//unescaped (from the container), keep as is
file = new File( jarUrl.getFile() );
}
else {
file = new File( jarUrl.toURI().getSchemeSpecificPart() );
}
}
catch (URISyntaxException e) {
throw new IllegalArgumentException(
"Unable to visit JAR " + jarUrl + ". Cause: " + e.getMessage(), e
);
}
if ( file.isDirectory() ) {
return new ExplodedJarVisitor( jarUrl, filters, entry );
}
else {
return new FileZippedJarVisitor( jarUrl, filters, entry );
}
}
else {
//let's assume the url can return the jar as a zip stream
return new InputStreamZippedJarVisitor( jarUrl, filters, entry );
}
}
// Optimized by HHH-7835
public static byte[] getBytesFromInputStream(InputStream inputStream) throws IOException { public static byte[] getBytesFromInputStream(InputStream inputStream) throws IOException {
// Optimized by HHH-7835
int size; int size;
List<byte[]> data = new LinkedList<byte[]>(); List<byte[]> data = new LinkedList<byte[]>();
int bufferSize = 4096; int bufferSize = 4096;

View File

@ -0,0 +1,212 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.archive.internal;
import java.io.File;
import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.Enumeration;
import java.util.jar.JarFile;
import java.util.zip.ZipEntry;
import org.jboss.logging.Logger;
import org.hibernate.jpa.boot.archive.spi.AbstractArchiveDescriptor;
import org.hibernate.jpa.boot.archive.spi.ArchiveContext;
import org.hibernate.jpa.boot.archive.spi.ArchiveDescriptorFactory;
import org.hibernate.jpa.boot.archive.spi.ArchiveEntry;
import org.hibernate.jpa.boot.archive.spi.ArchiveException;
import org.hibernate.jpa.boot.internal.FileInputStreamAccess;
import org.hibernate.jpa.boot.spi.InputStreamAccess;
import org.hibernate.jpa.internal.EntityManagerMessageLogger;
/**
* @author Steve Ebersole
*/
public class ExplodedArchiveDescriptor extends AbstractArchiveDescriptor {
private static final EntityManagerMessageLogger LOG = Logger.getMessageLogger(
EntityManagerMessageLogger.class,
ExplodedArchiveDescriptor.class.getName()
);
public ExplodedArchiveDescriptor(
ArchiveDescriptorFactory archiveDescriptorFactory,
URL archiveUrl,
String entryBasePrefix) {
super( archiveDescriptorFactory, archiveUrl, entryBasePrefix );
}
@Override
public void visitArchive(ArchiveContext context) {
final File rootDirectory = resolveRootDirectory();
if ( rootDirectory == null ) {
return;
}
if ( rootDirectory.isDirectory() ) {
processDirectory( rootDirectory, null, context );
}
else {
//assume zipped file
processZippedRoot( rootDirectory, context );
}
}
private File resolveRootDirectory() {
final File archiveUrlDirectory;
try {
final String filePart = getArchiveUrl().getFile();
if ( filePart != null && filePart.indexOf( ' ' ) != -1 ) {
//unescaped (from the container), keep as is
archiveUrlDirectory = new File( filePart );
}
else {
archiveUrlDirectory = new File( getArchiveUrl().toURI().getSchemeSpecificPart() );
}
}
catch (URISyntaxException e) {
LOG.malformedUrl( getArchiveUrl(), e );
return null;
}
if ( !archiveUrlDirectory.exists() ) {
LOG.explodedJarDoesNotExist( getArchiveUrl() );
return null;
}
if ( !archiveUrlDirectory.isDirectory() ) {
LOG.explodedJarNotDirectory( getArchiveUrl() );
return null;
}
final String entryBase = getEntryBasePrefix();
if ( entryBase != null && entryBase.length() > 0 && ! "/".equals( entryBase ) ) {
return new File( archiveUrlDirectory, entryBase );
}
else {
return archiveUrlDirectory;
}
}
private void processDirectory(
File directory,
String path,
ArchiveContext context) {
if ( directory == null ) {
return;
}
final File[] files = directory.listFiles();
if ( files == null ) {
return;
}
path = path == null ? "" : path + "/";
for ( final File localFile : files ) {
if ( !localFile.exists() ) {
// should never happen conceptually, but...
continue;
}
if ( localFile.isDirectory() ) {
processDirectory( localFile, path + localFile.getName(), context );
continue;
}
final String name = localFile.getAbsolutePath();
final String relativeName = path + localFile.getName();
final InputStreamAccess inputStreamAccess = new FileInputStreamAccess( name, localFile );
final ArchiveEntry entry = new ArchiveEntry() {
@Override
public String getName() {
return name;
}
@Override
public String getNameWithinArchive() {
return relativeName;
}
@Override
public InputStreamAccess getStreamAccess() {
return inputStreamAccess;
}
};
context.obtainArchiveEntryHandler( entry ).handleEntry( entry, context );
}
}
private void processZippedRoot(File rootFile, ArchiveContext context) {
try {
final JarFile jarFile = new JarFile(rootFile);
final Enumeration<? extends ZipEntry> entries = jarFile.entries();
while ( entries.hasMoreElements() ) {
final ZipEntry zipEntry = entries.nextElement();
if ( zipEntry.isDirectory() ) {
continue;
}
final String name = extractName( zipEntry );
final String relativeName = extractRelativeName( zipEntry );
final InputStreamAccess inputStreamAccess;
try {
inputStreamAccess = buildByteBasedInputStreamAccess( name, jarFile.getInputStream( zipEntry ) );
}
catch (IOException e) {
throw new ArchiveException(
String.format(
"Unable to access stream from jar file [%s] for entry [%s]",
jarFile.getName(),
zipEntry.getName()
)
);
}
final ArchiveEntry entry = new ArchiveEntry() {
@Override
public String getName() {
return name;
}
@Override
public String getNameWithinArchive() {
return relativeName;
}
@Override
public InputStreamAccess getStreamAccess() {
return inputStreamAccess;
}
};
context.obtainArchiveEntryHandler( entry ).handleEntry( entry, context );
}
}
catch (IOException e) {
throw new ArchiveException( "Error accessing jar file [" + rootFile.getAbsolutePath() + "]", e );
}
}
}

View File

@ -0,0 +1,193 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.archive.internal;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.Enumeration;
import java.util.jar.JarFile;
import java.util.jar.JarInputStream;
import java.util.zip.ZipEntry;
import org.jboss.logging.Logger;
import org.hibernate.jpa.boot.archive.spi.AbstractArchiveDescriptor;
import org.hibernate.jpa.boot.archive.spi.ArchiveContext;
import org.hibernate.jpa.boot.archive.spi.ArchiveDescriptorFactory;
import org.hibernate.jpa.boot.archive.spi.ArchiveEntry;
import org.hibernate.jpa.boot.archive.spi.ArchiveEntryHandler;
import org.hibernate.jpa.boot.archive.spi.ArchiveException;
import org.hibernate.jpa.internal.EntityManagerMessageLogger;
import org.hibernate.jpa.boot.spi.InputStreamAccess;
/**
* An ArchiveDescriptor implementation leveraging the {@link JarFile} API for processing.
*
* @author Steve Ebersole
*/
public class JarFileBasedArchiveDescriptor extends AbstractArchiveDescriptor {
private static final EntityManagerMessageLogger LOG = Logger.getMessageLogger(
EntityManagerMessageLogger.class,
JarFileBasedArchiveDescriptor.class.getName()
);
public JarFileBasedArchiveDescriptor(
ArchiveDescriptorFactory archiveDescriptorFactory,
URL archiveUrl,
String entry) {
super( archiveDescriptorFactory, archiveUrl, entry );
}
@Override
public void visitArchive(ArchiveContext context) {
final JarFile jarFile = resolveJarFileReference();
if ( jarFile == null ) {
return;
}
final Enumeration<? extends ZipEntry> zipEntries = jarFile.entries();
while ( zipEntries.hasMoreElements() ) {
final ZipEntry zipEntry = zipEntries.nextElement();
final String entryName = extractName( zipEntry );
if ( getEntryBasePrefix() != null && ! entryName.startsWith( getEntryBasePrefix() ) ) {
continue;
}
if ( zipEntry.isDirectory() ) {
continue;
}
if ( entryName.equals( getEntryBasePrefix() ) ) {
// exact match, might be a nested jar entry (ie from jar:file:..../foo.ear!/bar.jar)
//
// This algorithm assumes that the zipped file is only the URL root (including entry), not
// just any random entry
try {
InputStream is = new BufferedInputStream( jarFile.getInputStream( zipEntry ) );
try {
final JarInputStream jarInputStream = new JarInputStream( is );
ZipEntry subZipEntry = jarInputStream.getNextEntry();
while ( subZipEntry != null ) {
if ( ! subZipEntry.isDirectory() ) {
final String name = extractName( subZipEntry );
final String relativeName = extractRelativeName( subZipEntry );
final InputStreamAccess inputStreamAccess
= buildByteBasedInputStreamAccess( name, jarInputStream );
final ArchiveEntry entry = new ArchiveEntry() {
@Override
public String getName() {
return name;
}
@Override
public String getNameWithinArchive() {
return relativeName;
}
@Override
public InputStreamAccess getStreamAccess() {
return inputStreamAccess;
}
};
final ArchiveEntryHandler entryHandler = context.obtainArchiveEntryHandler( entry );
entryHandler.handleEntry( entry, context );
}
subZipEntry = jarInputStream.getNextEntry();
}
}
finally {
is.close();
}
}
catch (Exception e) {
throw new ArchiveException( "Error accessing JarFile entry [" + zipEntry.getName() + "]", e );
}
}
else {
final String name = extractName( zipEntry );
final String relativeName = extractRelativeName( zipEntry );
final InputStreamAccess inputStreamAccess;
try {
inputStreamAccess = buildByteBasedInputStreamAccess( name, jarFile.getInputStream( zipEntry ) );
}
catch (IOException e) {
throw new ArchiveException(
String.format(
"Unable to access stream from jar file [%s] for entry [%s]",
jarFile.getName(),
zipEntry.getName()
)
);
}
final ArchiveEntry entry = new ArchiveEntry() {
@Override
public String getName() {
return name;
}
@Override
public String getNameWithinArchive() {
return relativeName;
}
@Override
public InputStreamAccess getStreamAccess() {
return inputStreamAccess;
}
};
final ArchiveEntryHandler entryHandler = context.obtainArchiveEntryHandler( entry );
entryHandler.handleEntry( entry, context );
}
}
}
private JarFile resolveJarFileReference() {
try {
String filePart = getArchiveUrl().getFile();
if ( filePart != null && filePart.indexOf( ' ' ) != -1 ) {
// unescaped (from the container), keep as is
return new JarFile( getArchiveUrl().getFile() );
}
else {
return new JarFile( getArchiveUrl().toURI().getSchemeSpecificPart() );
}
}
catch (IOException e) {
LOG.unableToFindFile( getArchiveUrl(), e );
}
catch (URISyntaxException e) {
LOG.malformedUrlWarning( getArchiveUrl(), e );
}
return null;
}
}

View File

@ -0,0 +1,168 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.archive.internal;
import java.io.IOException;
import java.net.URL;
import java.util.jar.JarEntry;
import java.util.jar.JarInputStream;
import java.util.zip.ZipEntry;
import org.jboss.logging.Logger;
import org.hibernate.jpa.boot.archive.spi.AbstractArchiveDescriptor;
import org.hibernate.jpa.boot.archive.spi.ArchiveContext;
import org.hibernate.jpa.boot.archive.spi.ArchiveDescriptorFactory;
import org.hibernate.jpa.boot.archive.spi.ArchiveEntry;
import org.hibernate.jpa.boot.archive.spi.ArchiveException;
import org.hibernate.jpa.boot.spi.InputStreamAccess;
import org.hibernate.jpa.internal.EntityManagerMessageLogger;
/**
* An ArchiveDescriptor implementation that works on archives accessible through a {@link java.util.jar.JarInputStream}.
* NOTE : This is less efficient implementation than {@link JarFileBasedArchiveDescriptor}
*
* @author Emmanuel Bernard
* @author Steve Ebersole
*/
public class JarInputStreamBasedArchiveDescriptor extends AbstractArchiveDescriptor {
private static final EntityManagerMessageLogger LOG = Logger.getMessageLogger(
EntityManagerMessageLogger.class,
JarInputStreamBasedArchiveDescriptor.class.getName()
);
public JarInputStreamBasedArchiveDescriptor(
ArchiveDescriptorFactory archiveDescriptorFactory,
URL url,
String entry) {
super( archiveDescriptorFactory, url, entry );
}
@Override
public void visitArchive(ArchiveContext context) {
final JarInputStream jarInputStream;
try {
jarInputStream = new JarInputStream( getArchiveUrl().openStream() );
}
catch (Exception e) {
//really should catch IOException but Eclipse is buggy and raise NPE...
LOG.unableToFindFile( getArchiveUrl(), e );
return;
}
try {
JarEntry jarEntry;
while ( ( jarEntry = jarInputStream.getNextJarEntry() ) != null ) {
String jarEntryName = jarEntry.getName();
if ( getEntryBasePrefix() != null && ! jarEntryName.startsWith( getEntryBasePrefix() ) ) {
continue;
}
if ( jarEntry.isDirectory() ) {
continue;
}
if ( jarEntryName.equals( getEntryBasePrefix() ) ) {
// exact match, might be a nested jar entry (ie from jar:file:..../foo.ear!/bar.jar)
//
// This algorithm assumes that the zipped file is only the URL root (including entry), not
// just any random entry
try {
final JarInputStream subJarInputStream = new JarInputStream( jarInputStream );
try {
ZipEntry subZipEntry = jarInputStream.getNextEntry();
while (subZipEntry != null) {
if ( ! subZipEntry.isDirectory() ) {
final String subName = extractName( subZipEntry );
final InputStreamAccess inputStreamAccess
= buildByteBasedInputStreamAccess( subName, subJarInputStream );
final ArchiveEntry entry = new ArchiveEntry() {
@Override
public String getName() {
return subName;
}
@Override
public String getNameWithinArchive() {
return subName;
}
@Override
public InputStreamAccess getStreamAccess() {
return inputStreamAccess;
}
};
context.obtainArchiveEntryHandler( entry ).handleEntry( entry, context );
}
subZipEntry = jarInputStream.getNextJarEntry();
}
}
finally {
subJarInputStream.close();
}
}
catch (Exception e) {
throw new ArchiveException( "Error accessing nested jar", e );
}
}
else {
final String entryName = extractName( jarEntry );
final InputStreamAccess inputStreamAccess
= buildByteBasedInputStreamAccess( entryName, jarInputStream );
final String relativeName = extractRelativeName( jarEntry );
final ArchiveEntry entry = new ArchiveEntry() {
@Override
public String getName() {
return entryName;
}
@Override
public String getNameWithinArchive() {
return relativeName;
}
@Override
public InputStreamAccess getStreamAccess() {
return inputStreamAccess;
}
};
context.obtainArchiveEntryHandler( entry ).handleEntry( entry, context );
}
}
jarInputStream.close();
}
catch (IOException ioe) {
throw new ArchiveException(
String.format( "Error accessing JarInputStream [%s]", getArchiveUrl() ),
ioe
);
}
}
}

View File

@ -0,0 +1,71 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.archive.internal;
import java.net.URL;
import org.hibernate.annotations.common.AssertionFailure;
import org.hibernate.jpa.boot.archive.spi.ArchiveContext;
import org.hibernate.jpa.boot.archive.spi.ArchiveDescriptor;
import org.hibernate.jpa.boot.archive.spi.ArchiveDescriptorFactory;
/**
* An ArchiveDescriptor implementation for handling archives whose url reported a JAR protocol (i.e., jar://).
*
* @author Steve Ebersole
*/
public class JarProtocolArchiveDescriptor implements ArchiveDescriptor {
private final ArchiveDescriptor delegateDescriptor;
public JarProtocolArchiveDescriptor(
ArchiveDescriptorFactory archiveDescriptorFactory,
URL url,
String incomingEntry) {
if ( incomingEntry != null && incomingEntry.length() > 0 ) {
throw new IllegalArgumentException( "jar:jar: not supported: " + url );
}
final String urlFile = url.getFile();
final int subEntryIndex = urlFile.lastIndexOf( "!" );
if ( subEntryIndex == -1 ) {
throw new AssertionFailure( "JAR URL does not contain '!/' :" + url );
}
final String subEntry;
if ( subEntryIndex + 1 >= urlFile.length() ) {
subEntry = "";
}
else {
subEntry = urlFile.substring( subEntryIndex + 1 );
}
URL fileUrl = archiveDescriptorFactory.getJarURLFromURLEntry( url, subEntry );
delegateDescriptor = archiveDescriptorFactory.buildArchiveDescriptor( fileUrl, subEntry );
}
@Override
public void visitArchive(ArchiveContext context) {
delegateDescriptor.visitArchive( context );
}
}

View File

@ -0,0 +1,105 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.archive.internal;
import java.io.File;
import java.net.URISyntaxException;
import java.net.URL;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.jpa.boot.archive.spi.ArchiveDescriptor;
import org.hibernate.jpa.boot.archive.spi.ArchiveDescriptorFactory;
/**
* @author Emmanuel Bernard
* @author Steve Ebersole
*/
public class StandardArchiveDescriptorFactory implements ArchiveDescriptorFactory {
public static final StandardArchiveDescriptorFactory INSTANCE = new StandardArchiveDescriptorFactory();
@Override
public ArchiveDescriptor buildArchiveDescriptor(URL url) {
return buildArchiveDescriptor( url, "" );
}
@Override
public ArchiveDescriptor buildArchiveDescriptor(URL url, String entry) {
final String protocol = url.getProtocol();
if ( "jar".equals( protocol ) ) {
return new JarProtocolArchiveDescriptor( this, url, entry );
}
else if ( StringHelper.isEmpty( protocol )
|| "file".equals( protocol )
|| "vfszip".equals( protocol )
|| "vfsfile".equals( protocol ) ) {
final File file;
try {
final String filePart = url.getFile();
if ( filePart != null && filePart.indexOf( ' ' ) != -1 ) {
//unescaped (from the container), keep as is
file = new File( url.getFile() );
}
else {
file = new File( url.toURI().getSchemeSpecificPart() );
}
if ( ! file.exists() ) {
throw new IllegalArgumentException(
String.format(
"File [%s] referenced by given URL [%s] does not exist",
filePart,
url.toExternalForm()
)
);
}
}
catch (URISyntaxException e) {
throw new IllegalArgumentException(
"Unable to visit JAR " + url + ". Cause: " + e.getMessage(), e
);
}
if ( file.isDirectory() ) {
return new ExplodedArchiveDescriptor( this, url, entry );
}
else {
return new JarFileBasedArchiveDescriptor( this, url, entry );
}
}
else {
//let's assume the url can return the jar as a zip stream
return new JarInputStreamBasedArchiveDescriptor( this, url, entry );
}
}
@Override
public URL getJarURLFromURLEntry(URL url, String entry) throws IllegalArgumentException {
return ArchiveHelper.getJarURLFromURLEntry( url, entry );
}
@Override
public URL getURLFromPath(String jarPath) {
return ArchiveHelper.getURLFromPath( jarPath );
}
}

View File

@ -0,0 +1,92 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.archive.spi;
import java.io.InputStream;
import java.net.URL;
import java.util.zip.ZipEntry;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.jpa.boot.internal.ByteArrayInputStreamAccess;
import org.hibernate.jpa.boot.archive.internal.ArchiveHelper;
import org.hibernate.jpa.boot.spi.InputStreamAccess;
/**
* @author Steve Ebersole
*/
public abstract class AbstractArchiveDescriptor implements ArchiveDescriptor {
private final ArchiveDescriptorFactory archiveDescriptorFactory;
private final URL archiveUrl;
private final String entryBasePrefix;
protected AbstractArchiveDescriptor(
ArchiveDescriptorFactory archiveDescriptorFactory,
URL archiveUrl,
String entryBasePrefix) {
this.archiveDescriptorFactory = archiveDescriptorFactory;
this.archiveUrl = archiveUrl;
this.entryBasePrefix = normalizeEntryBasePrefix( entryBasePrefix );
}
private static String normalizeEntryBasePrefix(String entryBasePrefix) {
if ( StringHelper.isEmpty( entryBasePrefix ) || entryBasePrefix.length() == 1 ) {
return null;
}
return entryBasePrefix.startsWith( "/" ) ? entryBasePrefix.substring( 1 ) : entryBasePrefix;
}
protected ArchiveDescriptorFactory getArchiveDescriptorFactory() {
return archiveDescriptorFactory;
}
protected URL getArchiveUrl() {
return archiveUrl;
}
protected String getEntryBasePrefix() {
return entryBasePrefix;
}
protected String extractRelativeName(ZipEntry zipEntry) {
final String entryName = extractName( zipEntry );
return entryBasePrefix == null ? entryName : entryName.substring( entryBasePrefix.length() );
}
protected String extractName(ZipEntry zipEntry) {
return normalizePathName( zipEntry.getName() );
}
protected String normalizePathName(String pathName) {
return pathName.startsWith( "/" ) ? pathName.substring( 1 ) : pathName;
}
protected InputStreamAccess buildByteBasedInputStreamAccess(final String name, InputStream inputStream) {
// because of how jar InputStreams work we need to extract the bytes immediately. However, we
// do delay the creation of the ByteArrayInputStreams until needed
final byte[] bytes = ArchiveHelper.getBytesFromInputStreamSafely( inputStream );
return new ByteArrayInputStreamAccess( name, bytes );
}
}

View File

@ -0,0 +1,37 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.archive.spi;
import org.hibernate.jpa.boot.spi.PersistenceUnitDescriptor;
/**
* @author Steve Ebersole
*/
public interface ArchiveContext {
public PersistenceUnitDescriptor getPersistenceUnitDescriptor();
public boolean isRootUrl();
public ArchiveEntryHandler obtainArchiveEntryHandler(ArchiveEntry entry);
}

View File

@ -1,8 +1,10 @@
/* /*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as * Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution * indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are * statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC. * distributed under license by Red Hat Inc.
* *
* This copyrighted material is made available to anyone wishing to use, modify, * This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU * copy, or redistribute it subject to the terms and conditions of the GNU
@ -19,22 +21,14 @@
* 51 Franklin Street, Fifth Floor * 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA * Boston, MA 02110-1301 USA
*/ */
package org.hibernate.jpa.packaging.internal; package org.hibernate.jpa.boot.archive.spi;
/** /**
* Filter used when searching elements in a JAR * Contract for visiting an archive, which might be a jar, a zip, an exploded directory, etc.
* *
* @author Steve Ebersole
* @author Emmanuel Bernard * @author Emmanuel Bernard
*/ */
public abstract class Filter { public interface ArchiveDescriptor {
private boolean retrieveStream; public void visitArchive(ArchiveContext archiveContext);
protected Filter(boolean retrieveStream) {
this.retrieveStream = retrieveStream;
}
public boolean getStream() {
return retrieveStream;
}
} }

View File

@ -1,8 +1,10 @@
/* /*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as * Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution * indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are * statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC. * distributed under license by Red Hat Inc.
* *
* This copyrighted material is made available to anyone wishing to use, modify, * This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU * copy, or redistribute it subject to the terms and conditions of the GNU
@ -19,25 +21,19 @@
* 51 Franklin Street, Fifth Floor * 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA * Boston, MA 02110-1301 USA
*/ */
package org.hibernate.jpa.packaging.internal; package org.hibernate.jpa.boot.archive.spi;
import java.net.URL;
/** /**
* Filter use to match a file by its name * Contract for building ArchiveDescriptor instances.
* *
* @author Emmanuel Bernard * @author Steve Ebersole
*/ */
public abstract class FileFilter extends Filter { public interface ArchiveDescriptorFactory {
public ArchiveDescriptor buildArchiveDescriptor(URL url);
public ArchiveDescriptor buildArchiveDescriptor(URL jarUrl, String entry);
/** public URL getJarURLFromURLEntry(URL url, String entry) throws IllegalArgumentException;
* @param retrieveStream Give back an open stream to the matching element or not public URL getURLFromPath(String jarPath);
*/ }
public FileFilter(boolean retrieveStream) {
super( retrieveStream );
}
/**
* Return true if the fully qualified file name match
*/
public abstract boolean accept(String name);
}

View File

@ -1,7 +1,7 @@
/* /*
* Hibernate, Relational Persistence for Idiomatic Java * Hibernate, Relational Persistence for Idiomatic Java
* *
* Copyright (c) 2010, Red Hat Inc. or third-party contributors as * Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution * indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are * statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc. * distributed under license by Red Hat Inc.
@ -21,29 +21,35 @@
* 51 Franklin Street, Fifth Floor * 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA * Boston, MA 02110-1301 USA
*/ */
package org.hibernate.internal.util.xml; package org.hibernate.jpa.boot.archive.spi;
import java.io.Serializable; import org.hibernate.jpa.boot.spi.InputStreamAccess;
/** /**
* Describes the origin of an xml document * Represent an entry in the archive.
* *
* @author Steve Ebersole * @author Steve Ebersole
*/ */
public interface Origin extends Serializable { public interface ArchiveEntry {
/** /**
* Retrieve the type of origin. This is not a discrete set, but might be somethign like * Get the entry's name
* {@code file} for file protocol URLs, or {@code resource} for classpath resource lookups.
* *
* @return The origin type. * @return
*/
public String getType();
/**
* The name of the document origin. Interpretation is relative to the type, but might be the
* resource name or file URL.
*
* @return The name.
*/ */
public String getName(); public String getName();
/**
* Get the relative name of the entry within the archive. Typically what we are looking for here is
* the ClassLoader resource lookup name.
*
* @return
*/
public String getNameWithinArchive();
/**
* Get access to the stream for the entry
*
* @return
*/
public InputStreamAccess getStreamAccess();
} }

View File

@ -0,0 +1,33 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.archive.spi;
/**
* Handler for archive entries, based on the classified type of the entry
*
* @author Steve Ebersole
*/
public interface ArchiveEntryHandler {
public void handleEntry(ArchiveEntry entry, ArchiveContext context);
}

View File

@ -1,8 +1,10 @@
/* /*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as * Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution * indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are * statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC. * distributed under license by Red Hat Inc.
* *
* This copyrighted material is made available to anyone wishing to use, modify, * This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU * copy, or redistribute it subject to the terms and conditions of the GNU
@ -19,20 +21,19 @@
* 51 Franklin Street, Fifth Floor * 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA * Boston, MA 02110-1301 USA
*/ */
package org.hibernate.jpa.packaging.internal; package org.hibernate.jpa.boot.archive.spi;
import org.hibernate.HibernateException;
/** /**
* Filter on class elements * @author Steve Ebersole
*
* @author Emmanuel Bernard
* @see JavaElementFilter
*/ */
public abstract class ClassFilter extends JavaElementFilter { public class ArchiveException extends HibernateException {
/** public ArchiveException(String message) {
* @see JavaElementFilter#JavaElementFilter(boolean, Class[]) super( message );
*/
protected ClassFilter(boolean retrieveStream, Class[] annotations) {
super( retrieveStream, annotations );
} }
}
public ArchiveException(String message, Throwable root) {
super( message, root );
}
}

View File

@ -0,0 +1,60 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.internal;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import org.hibernate.jpa.boot.spi.InputStreamAccess;
import org.hibernate.jpa.boot.spi.NamedInputStream;
/**
* An InputStreamAccess implementation based on a byte array
*
* @author Steve Ebersole
*/
public class ByteArrayInputStreamAccess implements InputStreamAccess {
private final String name;
private final byte[] bytes;
public ByteArrayInputStreamAccess(String name, byte[] bytes) {
this.name = name;
this.bytes = bytes;
}
@Override
public String getStreamName() {
return name;
}
@Override
public InputStream accessInputStream() {
return new ByteArrayInputStream( bytes );
}
@Override
public NamedInputStream asNamedInputStream() {
return new NamedInputStream( getStreamName(), accessInputStream() );
}
}

View File

@ -1,8 +1,10 @@
/* /*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as * Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution * indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are * statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC. * distributed under license by Red Hat Inc.
* *
* This copyrighted material is made available to anyone wishing to use, modify, * This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU * copy, or redistribute it subject to the terms and conditions of the GNU
@ -19,44 +21,48 @@
* 51 Franklin Street, Fifth Floor * 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA * Boston, MA 02110-1301 USA
*/ */
package org.hibernate.jpa.packaging.internal; package org.hibernate.jpa.boot.internal;
import java.io.InputStream;
import org.hibernate.jpa.boot.spi.ClassDescriptor;
import org.hibernate.jpa.boot.spi.InputStreamAccess;
/** /**
* Represent a JAR entry * @author Steve Ebersole
* Contains a name and an optional Input stream to the entry
*
* @author Emmanuel Bernard
*/ */
public class Entry { public class ClassDescriptorImpl implements ClassDescriptor {
private String name; private final String name;
private InputStream is; private final InputStreamAccess streamAccess;
public Entry(String name, InputStream is) { public ClassDescriptorImpl(String name, InputStreamAccess streamAccess) {
this.name = name; this.name = name;
this.is = is; this.streamAccess = streamAccess;
} }
@Override
public String getName() { public String getName() {
return name; return name;
} }
public InputStream getInputStream() { @Override
return is; public InputStreamAccess getStreamAccess() {
return streamAccess;
} }
@Override
public boolean equals(Object o) { public boolean equals(Object o) {
if ( this == o ) return true; if ( this == o ) {
if ( o == null || getClass() != o.getClass() ) return false; return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
final Entry entry = (Entry) o; ClassDescriptorImpl that = (ClassDescriptorImpl) o;
return name.equals( that.name );
if ( !name.equals( entry.name ) ) return false;
return true;
} }
@Override
public int hashCode() { public int hashCode() {
return name.hashCode(); return name.hashCode();
} }
} }

View File

@ -23,12 +23,17 @@
*/ */
package org.hibernate.jpa.boot.internal; package org.hibernate.jpa.boot.internal;
import javax.persistence.AttributeConverter;
import javax.persistence.EntityManagerFactory;
import javax.persistence.EntityNotFoundException;
import javax.persistence.PersistenceException;
import javax.persistence.spi.PersistenceUnitTransactionType;
import javax.sql.DataSource;
import java.io.BufferedInputStream; import java.io.BufferedInputStream;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.Serializable; import java.io.Serializable;
import java.lang.annotation.Annotation;
import java.net.URL; import java.net.URL;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@ -42,18 +47,18 @@ import java.util.Properties;
import java.util.Set; import java.util.Set;
import java.util.StringTokenizer; import java.util.StringTokenizer;
import javax.persistence.AttributeConverter; import org.jboss.jandex.AnnotationInstance;
import javax.persistence.Converter; import org.jboss.jandex.ClassInfo;
import javax.persistence.Embeddable; import org.jboss.jandex.CompositeIndex;
import javax.persistence.Entity; import org.jboss.jandex.DotName;
import javax.persistence.EntityManagerFactory; import org.jboss.jandex.Index;
import javax.persistence.EntityNotFoundException; import org.jboss.jandex.IndexView;
import javax.persistence.MappedSuperclass; import org.jboss.jandex.Indexer;
import javax.persistence.PersistenceException;
import javax.persistence.spi.PersistenceUnitTransactionType; import org.jboss.logging.Logger;
import javax.sql.DataSource;
import org.hibernate.Interceptor; import org.hibernate.Interceptor;
import org.hibernate.InvalidMappingException;
import org.hibernate.MappingException; import org.hibernate.MappingException;
import org.hibernate.MappingNotFoundException; import org.hibernate.MappingNotFoundException;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
@ -86,29 +91,29 @@ import org.hibernate.jpa.boot.spi.IntegratorProvider;
import org.hibernate.jpa.boot.spi.JpaUnifiedSettingsBuilder; import org.hibernate.jpa.boot.spi.JpaUnifiedSettingsBuilder;
import org.hibernate.jpa.boot.spi.PersistenceUnitDescriptor; import org.hibernate.jpa.boot.spi.PersistenceUnitDescriptor;
import org.hibernate.jpa.event.spi.JpaIntegrator; import org.hibernate.jpa.event.spi.JpaIntegrator;
import org.hibernate.jpa.internal.schemagen.JpaSchemaGenerator;
import org.hibernate.jpa.internal.EntityManagerFactoryImpl; import org.hibernate.jpa.internal.EntityManagerFactoryImpl;
import org.hibernate.jpa.internal.EntityManagerMessageLogger; import org.hibernate.jpa.internal.EntityManagerMessageLogger;
import org.hibernate.jpa.internal.schemagen.JpaSchemaGenerator;
import org.hibernate.jpa.internal.util.LogHelper; import org.hibernate.jpa.internal.util.LogHelper;
import org.hibernate.jpa.internal.util.PersistenceUnitTransactionTypeHelper; import org.hibernate.jpa.internal.util.PersistenceUnitTransactionTypeHelper;
import org.hibernate.jpa.packaging.internal.NativeScanner; import org.hibernate.jpa.boot.scan.internal.StandardScanOptions;
import org.hibernate.jpa.packaging.spi.NamedInputStream; import org.hibernate.jpa.boot.scan.internal.StandardScanner;
import org.hibernate.jpa.packaging.spi.Scanner; import org.hibernate.jpa.boot.spi.ClassDescriptor;
import org.hibernate.jpa.boot.spi.InputStreamAccess;
import org.hibernate.jpa.boot.spi.MappingFileDescriptor;
import org.hibernate.jpa.boot.spi.NamedInputStream;
import org.hibernate.jpa.boot.spi.PackageDescriptor;
import org.hibernate.jpa.boot.scan.spi.ScanOptions;
import org.hibernate.jpa.boot.scan.spi.ScanResult;
import org.hibernate.jpa.boot.scan.spi.Scanner;
import org.hibernate.jpa.spi.IdentifierGeneratorStrategyProvider; import org.hibernate.jpa.spi.IdentifierGeneratorStrategyProvider;
import org.hibernate.metamodel.MetadataSources;
import org.hibernate.metamodel.internal.source.annotations.util.JPADotNames; import org.hibernate.metamodel.internal.source.annotations.util.JPADotNames;
import org.hibernate.metamodel.internal.source.annotations.util.JandexHelper; import org.hibernate.metamodel.internal.source.annotations.util.JandexHelper;
import org.hibernate.proxy.EntityNotFoundDelegate; import org.hibernate.proxy.EntityNotFoundDelegate;
import org.hibernate.secure.internal.JACCConfiguration; import org.hibernate.secure.internal.JACCConfiguration;
import org.hibernate.service.ServiceRegistry; import org.hibernate.service.ServiceRegistry;
import org.hibernate.service.spi.ServiceRegistryImplementor; import org.hibernate.service.spi.ServiceRegistryImplementor;
import org.jboss.jandex.AnnotationInstance;
import org.jboss.jandex.ClassInfo;
import org.jboss.jandex.CompositeIndex;
import org.jboss.jandex.DotName;
import org.jboss.jandex.Index;
import org.jboss.jandex.IndexView;
import org.jboss.jandex.Indexer;
import org.jboss.logging.Logger;
/** /**
* @author Steve Ebersole * @author Steve Ebersole
@ -203,13 +208,14 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Next we do a preliminary pass at metadata processing, which involves: // Next we do a preliminary pass at metadata processing, which involves:
// 1) scanning // 1) scanning
ScanResult scanResult = scan( bootstrapServiceRegistry ); final ScanResult scanResult = scan( bootstrapServiceRegistry );
final DeploymentResources deploymentResources = buildDeploymentResources( scanResult, bootstrapServiceRegistry );
// 2) building a Jandex index // 2) building a Jandex index
Set<String> collectedManagedClassNames = collectManagedClassNames( scanResult ); final IndexView jandexIndex = locateOrBuildJandexIndex( deploymentResources );
IndexView jandexIndex = locateOrBuildJandexIndex( collectedManagedClassNames, scanResult.getPackageNames(), bootstrapServiceRegistry );
// 3) building "metadata sources" to keep for later to use in building the SessionFactory // 3) building "metadata sources" to keep for later to use in building the SessionFactory
metadataSources = prepareMetadataSources( jandexIndex, collectedManagedClassNames, scanResult, bootstrapServiceRegistry ); metadataSources = prepareMetadataSources( jandexIndex, deploymentResources, bootstrapServiceRegistry );
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
withValidatorFactory( configurationValues.get( AvailableSettings.VALIDATION_FACTORY ) ); withValidatorFactory( configurationValues.get( AvailableSettings.VALIDATION_FACTORY ) );
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@ -221,6 +227,241 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
} }
} }
private static interface DeploymentResources {
public Iterable<ClassDescriptor> getClassDescriptors();
public Iterable<PackageDescriptor> getPackageDescriptors();
public Iterable<MappingFileDescriptor> getMappingFileDescriptors();
}
private DeploymentResources buildDeploymentResources(
ScanResult scanResult,
BootstrapServiceRegistry bootstrapServiceRegistry) {
// mapping files ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
final ArrayList<MappingFileDescriptor> mappingFileDescriptors = new ArrayList<MappingFileDescriptor>();
final Set<String> nonLocatedMappingFileNames = new HashSet<String>();
final List<String> explicitMappingFileNames = persistenceUnit.getMappingFileNames();
if ( explicitMappingFileNames != null ) {
nonLocatedMappingFileNames.addAll( explicitMappingFileNames );
}
for ( MappingFileDescriptor mappingFileDescriptor : scanResult.getLocatedMappingFiles() ) {
mappingFileDescriptors.add( mappingFileDescriptor );
nonLocatedMappingFileNames.remove( mappingFileDescriptor.getName() );
}
for ( String name : nonLocatedMappingFileNames ) {
MappingFileDescriptor descriptor = buildMappingFileDescriptor( name, bootstrapServiceRegistry );
mappingFileDescriptors.add( descriptor );
}
// classes and packages ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
final HashMap<String, ClassDescriptor> classDescriptorMap = new HashMap<String, ClassDescriptor>();
final HashMap<String, PackageDescriptor> packageDescriptorMap = new HashMap<String, PackageDescriptor>();
for ( ClassDescriptor classDescriptor : scanResult.getLocatedClasses() ) {
classDescriptorMap.put( classDescriptor.getName(), classDescriptor );
}
for ( PackageDescriptor packageDescriptor : scanResult.getLocatedPackages() ) {
packageDescriptorMap.put( packageDescriptor.getName(), packageDescriptor );
}
final List<String> explicitClassNames = persistenceUnit.getManagedClassNames();
if ( explicitClassNames != null ) {
for ( String explicitClassName : explicitClassNames ) {
// IMPL NOTE : explicitClassNames can contain class or package names!!!
if ( classDescriptorMap.containsKey( explicitClassName ) ) {
continue;
}
if ( packageDescriptorMap.containsKey( explicitClassName ) ) {
continue;
}
// try it as a class name first...
final String classFileName = explicitClassName.replace( '.', '/' ) + ".class";
final URL classFileUrl = bootstrapServiceRegistry.getService( ClassLoaderService.class )
.locateResource( classFileName );
if ( classFileUrl != null ) {
classDescriptorMap.put(
explicitClassName,
new ClassDescriptorImpl( explicitClassName, new UrlInputStreamAccess( classFileUrl ) )
);
continue;
}
// otherwise, try it as a package name
final String packageInfoFileName = explicitClassName.replace( '.', '/' ) + "/package-info.class";
final URL packageInfoFileUrl = bootstrapServiceRegistry.getService( ClassLoaderService.class )
.locateResource( packageInfoFileName );
if ( packageInfoFileUrl != null ) {
packageDescriptorMap.put(
explicitClassName,
new PackageDescriptorImpl( explicitClassName, new UrlInputStreamAccess( packageInfoFileUrl ) )
);
continue;
}
LOG.debugf(
"Unable to resolve class [%s] named in persistence unit [%s]",
explicitClassName,
persistenceUnit.getName()
);
}
}
return new DeploymentResources() {
@Override
public Iterable<ClassDescriptor> getClassDescriptors() {
return classDescriptorMap.values();
}
@Override
public Iterable<PackageDescriptor> getPackageDescriptors() {
return packageDescriptorMap.values();
}
@Override
public Iterable<MappingFileDescriptor> getMappingFileDescriptors() {
return mappingFileDescriptors;
}
};
}
private MappingFileDescriptor buildMappingFileDescriptor(
String name,
BootstrapServiceRegistry bootstrapServiceRegistry) {
final URL url = bootstrapServiceRegistry.getService( ClassLoaderService.class ).locateResource( name );
if ( url == null ) {
throw persistenceException( "Unable to resolve named mapping-file [" + name + "]" );
}
return new MappingFileDescriptorImpl( name, new UrlInputStreamAccess( url ) );
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// temporary!
@SuppressWarnings("unchecked")
public Map getConfigurationValues() {
return Collections.unmodifiableMap( configurationValues );
}
public Configuration getHibernateConfiguration() {
return hibernateConfiguration;
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@SuppressWarnings("unchecked")
private MetadataSources prepareMetadataSources(
IndexView jandexIndex,
DeploymentResources deploymentResources,
BootstrapServiceRegistry bootstrapServiceRegistry) {
// todo : this needs to tie into the metamodel branch...
MetadataSources metadataSources = new MetadataSources();
for ( ClassDescriptor classDescriptor : deploymentResources.getClassDescriptors() ) {
final String className = classDescriptor.getName();
final ClassInfo classInfo = jandexIndex.getClassByName( DotName.createSimple( className ) );
if ( classInfo == null ) {
// Not really sure what this means. Most likely it is explicitly listed in the persistence unit,
// but mapped via mapping file. Anyway assume its a mapping class...
metadataSources.annotatedMappingClassNames.add( className );
continue;
}
// logic here assumes an entity is not also a converter...
AnnotationInstance converterAnnotation = JandexHelper.getSingleAnnotation(
classInfo.annotations(),
JPADotNames.CONVERTER
);
if ( converterAnnotation != null ) {
metadataSources.converterDescriptors.add(
new MetadataSources.ConverterDescriptor(
className,
JandexHelper.getValue( converterAnnotation, "autoApply", boolean.class )
)
);
}
else {
metadataSources.annotatedMappingClassNames.add( className );
}
}
for ( PackageDescriptor packageDescriptor : deploymentResources.getPackageDescriptors() ) {
metadataSources.packageNames.add( packageDescriptor.getName() );
}
for ( MappingFileDescriptor mappingFileDescriptor : deploymentResources.getMappingFileDescriptors() ) {
metadataSources.namedMappingFileInputStreams.add( mappingFileDescriptor.getStreamAccess().asNamedInputStream() );
}
final String explicitHbmXmls = (String) configurationValues.remove( AvailableSettings.HBXML_FILES );
if ( explicitHbmXmls != null ) {
metadataSources.mappingFileResources.addAll( Arrays.asList( StringHelper.split( ", ", explicitHbmXmls ) ) );
}
final List<String> explicitOrmXml = (List<String>) configurationValues.remove( AvailableSettings.XML_FILE_NAMES );
if ( explicitOrmXml != null ) {
metadataSources.mappingFileResources.addAll( explicitOrmXml );
}
return metadataSources;
}
private IndexView locateOrBuildJandexIndex(DeploymentResources deploymentResources) {
// for now create a whole new Index to work with, eventually we need to:
// 1) accept an Index as an incoming config value
// 2) pass that Index along to the metamodel code...
IndexView jandexIndex = (IndexView) configurationValues.get( JANDEX_INDEX );
if ( jandexIndex == null ) {
jandexIndex = buildJandexIndex( deploymentResources );
}
return jandexIndex;
}
private IndexView buildJandexIndex(DeploymentResources deploymentResources) {
Indexer indexer = new Indexer();
for ( ClassDescriptor classDescriptor : deploymentResources.getClassDescriptors() ) {
indexStream( indexer, classDescriptor.getStreamAccess() );
}
for ( PackageDescriptor packageDescriptor : deploymentResources.getPackageDescriptors() ) {
indexStream( indexer, packageDescriptor.getStreamAccess() );
}
// for now we just skip entities defined in (1) orm.xml files and (2) hbm.xml files. this part really needs
// metamodel branch...
// for now, we also need to wrap this in a CompositeIndex until Jandex is updated to use a common interface
// between the 2...
return indexer.complete();
}
private void indexStream(Indexer indexer, InputStreamAccess streamAccess) {
try {
InputStream stream = streamAccess.accessInputStream();
try {
indexer.index( stream );
}
finally {
try {
stream.close();
}
catch (Exception ignore) {
}
}
}
catch ( IOException e ) {
throw persistenceException( "Unable to index from stream " + streamAccess.getStreamName(), e );
}
}
/** /**
* Builds the {@link BootstrapServiceRegistry} used to eventually build the {@link org.hibernate.boot.registry.StandardServiceRegistryBuilder}; mainly * Builds the {@link BootstrapServiceRegistry} used to eventually build the {@link org.hibernate.boot.registry.StandardServiceRegistryBuilder}; mainly
* used here during instantiation to define class-loading behavior. * used here during instantiation to define class-loading behavior.
@ -360,7 +601,7 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
= (JaxbHibernateConfiguration.JaxbSessionFactory.JaxbClassCache) cacheDeclaration; = (JaxbHibernateConfiguration.JaxbSessionFactory.JaxbClassCache) cacheDeclaration;
cacheRegionDefinitions.add( cacheRegionDefinitions.add(
new CacheRegionDefinition( new CacheRegionDefinition(
CacheRegionDefinition.CacheRegionType.ENTITY, CacheRegionDefinition.CacheType.ENTITY,
jaxbClassCache.getClazz(), jaxbClassCache.getClazz(),
jaxbClassCache.getUsage().value(), jaxbClassCache.getUsage().value(),
jaxbClassCache.getRegion(), jaxbClassCache.getRegion(),
@ -373,7 +614,7 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
= (JaxbHibernateConfiguration.JaxbSessionFactory.JaxbCollectionCache) cacheDeclaration; = (JaxbHibernateConfiguration.JaxbSessionFactory.JaxbCollectionCache) cacheDeclaration;
cacheRegionDefinitions.add( cacheRegionDefinitions.add(
new CacheRegionDefinition( new CacheRegionDefinition(
CacheRegionDefinition.CacheRegionType.COLLECTION, CacheRegionDefinition.CacheType.COLLECTION,
jaxbCollectionCache.getCollection(), jaxbCollectionCache.getCollection(),
jaxbCollectionCache.getUsage().value(), jaxbCollectionCache.getUsage().value(),
jaxbCollectionCache.getRegion(), jaxbCollectionCache.getRegion(),
@ -398,98 +639,6 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
} }
} }
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// temporary!
@SuppressWarnings("unchecked")
public Map getConfigurationValues() {
return Collections.unmodifiableMap( configurationValues );
}
public Configuration getHibernateConfiguration() {
return hibernateConfiguration;
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@SuppressWarnings("unchecked")
private MetadataSources prepareMetadataSources(
IndexView jandexIndex,
Set<String> collectedManagedClassNames,
ScanResult scanResult,
BootstrapServiceRegistry bootstrapServiceRegistry) {
// todo : this needs to tie into the metamodel branch...
MetadataSources metadataSources = new MetadataSources();
for ( String className : collectedManagedClassNames ) {
final ClassInfo classInfo = jandexIndex.getClassByName( DotName.createSimple( className ) );
if ( classInfo == null ) {
// Not really sure what this means. Most likely it is explicitly listed in the persistence unit,
// but mapped via mapping file. Anyway assume its a mapping class...
metadataSources.annotatedMappingClassNames.add( className );
continue;
}
// logic here assumes an entity is not also a converter...
AnnotationInstance converterAnnotation = JandexHelper.getSingleAnnotation(
classInfo.annotations(),
JPADotNames.CONVERTER
);
if ( converterAnnotation != null ) {
metadataSources.converterDescriptors.add(
new MetadataSources.ConverterDescriptor(
className,
JandexHelper.getValue( converterAnnotation, "autoApply", boolean.class )
)
);
}
else {
metadataSources.annotatedMappingClassNames.add( className );
}
}
metadataSources.packageNames.addAll( scanResult.getPackageNames() );
metadataSources.namedMappingFileInputStreams.addAll( scanResult.getHbmFiles() );
metadataSources.mappingFileResources.addAll( scanResult.getMappingFiles() );
final String explicitHbmXmls = (String) configurationValues.remove( AvailableSettings.HBXML_FILES );
if ( explicitHbmXmls != null ) {
metadataSources.mappingFileResources.addAll( Arrays.asList( StringHelper.split( ", ", explicitHbmXmls ) ) );
}
final List<String> explicitOrmXml = (List<String>) configurationValues.remove( AvailableSettings.XML_FILE_NAMES );
if ( explicitOrmXml != null ) {
metadataSources.mappingFileResources.addAll( explicitOrmXml );
}
return metadataSources;
}
private Set<String> collectManagedClassNames(ScanResult scanResult) {
Set<String> collectedNames = new HashSet<String>();
if ( persistenceUnit.getManagedClassNames() != null ) {
collectedNames.addAll( persistenceUnit.getManagedClassNames() );
}
collectedNames.addAll( scanResult.getManagedClassNames() );
return collectedNames;
}
private IndexView locateOrBuildJandexIndex(
Set<String> collectedManagedClassNames,
List<String> packageNames,
BootstrapServiceRegistry bootstrapServiceRegistry) {
// for now create a whole new Index to work with, eventually we need to:
// 1) accept an Index as an incoming config value
// 2) pass that Index along to the metamodel code...
//
// (1) is mocked up here, but JBoss AS does not currently pass in any Index to use...
IndexView jandexIndex = (IndexView) configurationValues.get( JANDEX_INDEX );
if ( jandexIndex == null ) {
jandexIndex = buildJandexIndex( collectedManagedClassNames, packageNames, bootstrapServiceRegistry );
}
return jandexIndex;
}
private IndexView buildJandexIndex(Set<String> classNamesSource, List<String> packageNames, BootstrapServiceRegistry bootstrapServiceRegistry) { private IndexView buildJandexIndex(Set<String> classNamesSource, List<String> packageNames, BootstrapServiceRegistry bootstrapServiceRegistry) {
Indexer indexer = new Indexer(); Indexer indexer = new Indexer();
@ -551,11 +700,11 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
} }
} }
private void addCacheRegionDefinition(String role, String value, CacheRegionDefinition.CacheRegionType cacheType) { private void addCacheRegionDefinition(String role, String value, CacheRegionDefinition.CacheType cacheType) {
final StringTokenizer params = new StringTokenizer( value, ";, " ); final StringTokenizer params = new StringTokenizer( value, ";, " );
if ( !params.hasMoreTokens() ) { if ( !params.hasMoreTokens() ) {
StringBuilder error = new StringBuilder( "Illegal usage of " ); StringBuilder error = new StringBuilder( "Illegal usage of " );
if ( cacheType == CacheRegionDefinition.CacheRegionType.ENTITY ) { if ( cacheType == CacheRegionDefinition.CacheType.ENTITY ) {
error.append( AvailableSettings.CLASS_CACHE_PREFIX ) error.append( AvailableSettings.CLASS_CACHE_PREFIX )
.append( ": " ) .append( ": " )
.append( AvailableSettings.CLASS_CACHE_PREFIX ); .append( AvailableSettings.CLASS_CACHE_PREFIX );
@ -579,7 +728,7 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
region = params.nextToken(); region = params.nextToken();
} }
boolean lazyProperty = true; boolean lazyProperty = true;
if ( cacheType == CacheRegionDefinition.CacheRegionType.ENTITY ) { if ( cacheType == CacheRegionDefinition.CacheType.ENTITY ) {
if ( params.hasMoreTokens() ) { if ( params.hasMoreTokens() ) {
lazyProperty = "all".equalsIgnoreCase( params.nextToken() ); lazyProperty = "all".equalsIgnoreCase( params.nextToken() );
} }
@ -594,37 +743,24 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private ScanResult scan(BootstrapServiceRegistry bootstrapServiceRegistry) { private ScanResult scan(BootstrapServiceRegistry bootstrapServiceRegistry) {
Scanner scanner = locateOrBuildScanner( bootstrapServiceRegistry ); final Scanner scanner = locateOrBuildScanner( bootstrapServiceRegistry );
ScanningContext scanningContext = new ScanningContext(); final ScanOptions scanOptions = determineScanOptions();
final ScanResult scanResult = new ScanResult(); return scanner.scan( persistenceUnit, scanOptions );
if ( persistenceUnit.getMappingFileNames() != null ) { }
scanResult.getMappingFiles().addAll( persistenceUnit.getMappingFileNames() );
}
// dunno, but the old code did it... private ScanOptions determineScanOptions() {
scanningContext.setSearchOrm( ! scanResult.getMappingFiles().contains( META_INF_ORM_XML ) ); return new StandardScanOptions(
(String) configurationValues.get( AvailableSettings.AUTODETECTION ),
if ( persistenceUnit.getJarFileUrls() != null ) { persistenceUnit.isExcludeUnlistedClasses()
prepareAutoDetectionSettings( scanningContext, false ); );
for ( URL jar : persistenceUnit.getJarFileUrls() ) {
scanningContext.setUrl( jar );
scanInContext( scanner, scanningContext, scanResult );
}
}
prepareAutoDetectionSettings( scanningContext, persistenceUnit.isExcludeUnlistedClasses() );
scanningContext.setUrl( persistenceUnit.getPersistenceUnitRootUrl() );
scanInContext( scanner, scanningContext, scanResult );
return scanResult;
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private Scanner locateOrBuildScanner(BootstrapServiceRegistry bootstrapServiceRegistry) { private Scanner locateOrBuildScanner(BootstrapServiceRegistry bootstrapServiceRegistry) {
final Object value = configurationValues.remove( AvailableSettings.SCANNER ); final Object value = configurationValues.remove( AvailableSettings.SCANNER );
if ( value == null ) { if ( value == null ) {
return new NativeScanner(); return new StandardScanner();
} }
if ( Scanner.class.isInstance( value ) ) { if ( Scanner.class.isInstance( value ) ) {
@ -660,91 +796,6 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
} }
} }
private void prepareAutoDetectionSettings(ScanningContext context, boolean excludeUnlistedClasses) {
final String detectionSetting = (String) configurationValues.get( AvailableSettings.AUTODETECTION );
if ( detectionSetting == null ) {
if ( excludeUnlistedClasses ) {
context.setDetectClasses( false );
context.setDetectHbmFiles( false );
}
else {
context.setDetectClasses( true );
context.setDetectHbmFiles( true );
}
}
else {
for ( String token : StringHelper.split( ", ", detectionSetting ) ) {
if ( "class".equalsIgnoreCase( token ) ) {
context.setDetectClasses( true );
}
if ( "hbm".equalsIgnoreCase( token ) ) {
context.setDetectClasses( true );
}
}
}
}
private void scanInContext(
Scanner scanner,
ScanningContext scanningContext,
ScanResult scanResult) {
if ( scanningContext.getUrl() == null ) {
// not sure i like just ignoring this being null, but this is exactly what the old code does...
LOG.containerProvidingNullPersistenceUnitRootUrl();
return;
}
if ( scanningContext.getUrl().getProtocol().equalsIgnoreCase( "bundle" ) ) {
// TODO: Is there a way to scan the root bundle URL in OSGi containers?
// Although the URL provides a stream handler that works for finding
// resources in a specific Bundle, the root one does not work.
return;
}
try {
if ( scanningContext.isDetectClasses() ) {
Set<Package> matchingPackages = scanner.getPackagesInJar( scanningContext.url, new HashSet<Class<? extends Annotation>>(0) );
for ( Package pkg : matchingPackages ) {
scanResult.getPackageNames().add( pkg.getName() );
}
Set<Class<? extends Annotation>> annotationsToLookFor = new HashSet<Class<? extends Annotation>>();
annotationsToLookFor.add( Entity.class );
annotationsToLookFor.add( MappedSuperclass.class );
annotationsToLookFor.add( Embeddable.class );
annotationsToLookFor.add( Converter.class );
Set<Class<?>> matchingClasses = scanner.getClassesInJar( scanningContext.url, annotationsToLookFor );
for ( Class<?> clazz : matchingClasses ) {
scanResult.getManagedClassNames().add( clazz.getName() );
}
}
Set<String> patterns = new HashSet<String>();
if ( scanningContext.isSearchOrm() ) {
patterns.add( META_INF_ORM_XML );
}
if ( scanningContext.isDetectHbmFiles() ) {
patterns.add( "**/*.hbm.xml" );
}
if ( ! scanResult.getMappingFiles().isEmpty() ) {
patterns.addAll( scanResult.getMappingFiles() );
}
if ( patterns.size() != 0 ) {
Set<NamedInputStream> files = scanner.getFilesInJar( scanningContext.getUrl(), patterns );
for ( NamedInputStream file : files ) {
scanResult.getHbmFiles().add( file );
scanResult.getMappingFiles().remove( file.getName() );
}
}
}
catch (PersistenceException e ) {
throw e;
}
catch ( RuntimeException e ) {
throw persistenceException( "error trying to scan url: " + scanningContext.getUrl().toString(), e );
}
}
@Override @Override
public EntityManagerFactoryBuilder withValidatorFactory(Object validatorFactory) { public EntityManagerFactoryBuilder withValidatorFactory(Object validatorFactory) {
this.validatorFactory = validatorFactory; this.validatorFactory = validatorFactory;
@ -886,14 +937,14 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
addCacheRegionDefinition( addCacheRegionDefinition(
keyString.substring( AvailableSettings.CLASS_CACHE_PREFIX.length() + 1 ), keyString.substring( AvailableSettings.CLASS_CACHE_PREFIX.length() + 1 ),
(String) entry.getValue(), (String) entry.getValue(),
CacheRegionDefinition.CacheRegionType.ENTITY CacheRegionDefinition.CacheType.ENTITY
); );
} }
else if ( keyString.startsWith( AvailableSettings.COLLECTION_CACHE_PREFIX ) ) { else if ( keyString.startsWith( AvailableSettings.COLLECTION_CACHE_PREFIX ) ) {
addCacheRegionDefinition( addCacheRegionDefinition(
keyString.substring( AvailableSettings.COLLECTION_CACHE_PREFIX.length() + 1 ), keyString.substring( AvailableSettings.COLLECTION_CACHE_PREFIX.length() + 1 ),
(String) entry.getValue(), (String) entry.getValue(),
CacheRegionDefinition.CacheRegionType.COLLECTION CacheRegionDefinition.CacheType.COLLECTION
); );
} }
else if ( keyString.startsWith( AvailableSettings.JACC_PREFIX ) else if ( keyString.startsWith( AvailableSettings.JACC_PREFIX )
@ -1006,30 +1057,30 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
if ( jaccDefinitions != null ) { if ( jaccDefinitions != null ) {
for ( JaccDefinition jaccDefinition : jaccDefinitions ) { for ( JaccDefinition jaccDefinition : jaccDefinitions ) {
JACCConfiguration jaccCfg = new JACCConfiguration( jaccDefinition.getContextId() ); JACCConfiguration jaccCfg = new JACCConfiguration( jaccDefinition.contextId );
jaccCfg.addPermission( jaccCfg.addPermission(
jaccDefinition.getRole(), jaccDefinition.role,
jaccDefinition.getClazz(), jaccDefinition.clazz,
jaccDefinition.getActions() jaccDefinition.actions
); );
} }
} }
if ( cacheRegionDefinitions != null ) { if ( cacheRegionDefinitions != null ) {
for ( CacheRegionDefinition cacheRegionDefinition : cacheRegionDefinitions ) { for ( CacheRegionDefinition cacheRegionDefinition : cacheRegionDefinitions ) {
if ( cacheRegionDefinition.getRegionType() == CacheRegionDefinition.CacheRegionType.ENTITY ) { if ( cacheRegionDefinition.cacheType == CacheRegionDefinition.CacheType.ENTITY ) {
cfg.setCacheConcurrencyStrategy( cfg.setCacheConcurrencyStrategy(
cacheRegionDefinition.getRole(), cacheRegionDefinition.role,
cacheRegionDefinition.getUsage(), cacheRegionDefinition.usage,
cacheRegionDefinition.getRegion(), cacheRegionDefinition.region,
cacheRegionDefinition.isCacheLazy() cacheRegionDefinition.cacheLazy
); );
} }
else { else {
cfg.setCollectionCacheConcurrencyStrategy( cfg.setCollectionCacheConcurrencyStrategy(
cacheRegionDefinition.getRole(), cacheRegionDefinition.role,
cacheRegionDefinition.getUsage(), cacheRegionDefinition.usage,
cacheRegionDefinition.getRegion() cacheRegionDefinition.region
); );
} }
} }
@ -1116,14 +1167,28 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
//addInputStream has the responsibility to close the stream //addInputStream has the responsibility to close the stream
cfg.addInputStream( new BufferedInputStream( namedInputStream.getStream() ) ); cfg.addInputStream( new BufferedInputStream( namedInputStream.getStream() ) );
} }
catch (MappingException me) { catch ( InvalidMappingException e ) {
//try our best to give the file name // try our best to give the file name
if ( StringHelper.isEmpty( namedInputStream.getName() ) ) { if ( StringHelper.isNotEmpty( namedInputStream.getName() ) ) {
throw me; throw new InvalidMappingException(
"Error while parsing file: " + namedInputStream.getName(),
e.getType(),
e.getPath(),
e
);
} }
else { else {
throw e;
}
}
catch (MappingException me) {
// try our best to give the file name
if ( StringHelper.isNotEmpty( namedInputStream.getName() ) ) {
throw new MappingException("Error while parsing file: " + namedInputStream.getName(), me ); throw new MappingException("Error while parsing file: " + namedInputStream.getName(), me );
} }
else {
throw me;
}
} }
} }
for ( String packageName : metadataSources.packageNames ) { for ( String packageName : metadataSources.packageNames ) {
@ -1162,65 +1227,39 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
return "[PersistenceUnit: " + persistenceUnit.getName() + "] "; return "[PersistenceUnit: " + persistenceUnit.getName() + "] ";
} }
public static class ScanningContext { public static class CacheRegionDefinition {
private URL url; public static enum CacheType { ENTITY, COLLECTION }
private boolean detectClasses;
private boolean detectHbmFiles;
private boolean searchOrm;
public URL getUrl() { public final CacheType cacheType;
return url; public final String role;
} public final String usage;
public final String region;
public final boolean cacheLazy;
public void setUrl(URL url) { public CacheRegionDefinition(
this.url = url; CacheType cacheType,
} String role,
String usage,
public boolean isDetectClasses() { String region, boolean cacheLazy) {
return detectClasses; this.cacheType = cacheType;
} this.role = role;
this.usage = usage;
public void setDetectClasses(boolean detectClasses) { this.region = region;
this.detectClasses = detectClasses; this.cacheLazy = cacheLazy;
}
public boolean isDetectHbmFiles() {
return detectHbmFiles;
}
public void setDetectHbmFiles(boolean detectHbmFiles) {
this.detectHbmFiles = detectHbmFiles;
}
public boolean isSearchOrm() {
return searchOrm;
}
public void setSearchOrm(boolean searchOrm) {
this.searchOrm = searchOrm;
} }
} }
private static class ScanResult { public static class JaccDefinition {
private final List<String> managedClassNames = new ArrayList<String>(); public final String contextId;
private final List<String> packageNames = new ArrayList<String>(); public final String role;
private final List<NamedInputStream> hbmFiles = new ArrayList<NamedInputStream>(); public final String clazz;
private final List<String> mappingFiles = new ArrayList<String>(); public final String actions;
public List<String> getManagedClassNames() { public JaccDefinition(String contextId, String role, String clazz, String actions) {
return managedClassNames; this.contextId = contextId;
} this.role = role;
this.clazz = clazz;
public List<String> getPackageNames() { this.actions = actions;
return packageNames;
}
public List<NamedInputStream> getHbmFiles() {
return hbmFiles;
}
public List<String> getMappingFiles() {
return mappingFiles;
} }
} }

View File

@ -0,0 +1,77 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.internal;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import org.hibernate.HibernateException;
import org.hibernate.jpa.boot.archive.spi.ArchiveException;
import org.hibernate.jpa.boot.spi.InputStreamAccess;
import org.hibernate.jpa.boot.spi.NamedInputStream;
/**
* An InputStreamAccess implementation based on a File reference
*
* @author Steve Ebersole
*/
public class FileInputStreamAccess implements InputStreamAccess {
private final String name;
private final File file;
public FileInputStreamAccess(String name, File file) {
this.name = name;
this.file = file;
if ( ! file.exists() ) {
throw new HibernateException( "File must exist : " + file.getAbsolutePath() );
}
}
@Override
public String getStreamName() {
return name;
}
@Override
public InputStream accessInputStream() {
try {
return new BufferedInputStream( new FileInputStream( file ) );
}
catch (FileNotFoundException e) {
// should never ever ever happen, but...
throw new ArchiveException(
"File believed to exist based on File.exists threw error when passed to FileInputStream ctor",
e
);
}
}
@Override
public NamedInputStream asNamedInputStream() {
return new NamedInputStream( getStreamName(), accessInputStream() );
}
}

View File

@ -0,0 +1,73 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.internal;
import org.hibernate.jpa.boot.spi.InputStreamAccess;
import org.hibernate.jpa.boot.spi.MappingFileDescriptor;
/**
* @author Steve Ebersole
*/
public class MappingFileDescriptorImpl implements MappingFileDescriptor {
private final String name;
private final InputStreamAccess streamAccess;
public MappingFileDescriptorImpl(String name, InputStreamAccess streamAccess) {
this.name = name;
this.streamAccess = streamAccess;
}
@Override
public String getName() {
return name;
}
@Override
public InputStreamAccess getStreamAccess() {
return streamAccess;
}
// @Override
// public boolean equals(Object o) {
// if ( this == o ) {
// return true;
// }
// if ( o == null || getClass() != o.getClass() ) {
// return false;
// }
//
// MappingFileDescriptorImpl that = (MappingFileDescriptorImpl) o;
//
// return name.equals( that.name )
// && streamAccess.getStreamName().equals( that.streamAccess.getStreamName() );
//
// }
//
// @Override
// public int hashCode() {
// int result = name.hashCode();
// result = 31 * result + streamAccess.getStreamName().hashCode();
// return result;
// }
}

View File

@ -0,0 +1,68 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.internal;
import org.hibernate.jpa.boot.spi.InputStreamAccess;
import org.hibernate.jpa.boot.spi.PackageDescriptor;
/**
* @author Steve Ebersole
*/
public class PackageDescriptorImpl implements PackageDescriptor {
private final String name;
private final InputStreamAccess streamAccess;
public PackageDescriptorImpl(String name, InputStreamAccess streamAccess) {
this.name = name;
this.streamAccess = streamAccess;
}
@Override
public String getName() {
return name;
}
@Override
public InputStreamAccess getStreamAccess() {
return streamAccess;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
PackageDescriptorImpl that = (PackageDescriptorImpl) o;
return name.equals( that.name );
}
@Override
public int hashCode() {
return name.hashCode();
}
}

View File

@ -54,8 +54,8 @@ import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException; import org.xml.sax.SAXParseException;
import org.hibernate.jpa.AvailableSettings; import org.hibernate.jpa.AvailableSettings;
import org.hibernate.jpa.boot.archive.internal.ArchiveHelper;
import org.hibernate.jpa.internal.EntityManagerMessageLogger; import org.hibernate.jpa.internal.EntityManagerMessageLogger;
import org.hibernate.jpa.packaging.internal.JarVisitorFactory;
import org.hibernate.jpa.internal.util.ConfigurationHelper; import org.hibernate.jpa.internal.util.ConfigurationHelper;
import org.hibernate.internal.util.StringHelper; import org.hibernate.internal.util.StringHelper;
import org.hibernate.boot.registry.classloading.internal.ClassLoaderServiceImpl; import org.hibernate.boot.registry.classloading.internal.ClassLoaderServiceImpl;
@ -121,7 +121,7 @@ public class PersistenceXmlParser {
final Element element = (Element) children.item( i ); final Element element = (Element) children.item( i );
final String tag = element.getTagName(); final String tag = element.getTagName();
if ( tag.equals( "persistence-unit" ) ) { if ( tag.equals( "persistence-unit" ) ) {
final URL puRootUrl = JarVisitorFactory.getJarURLFromURLEntry( xmlUrl, "/META-INF/persistence.xml" ); final URL puRootUrl = ArchiveHelper.getJarURLFromURLEntry( xmlUrl, "/META-INF/persistence.xml" );
ParsedPersistenceXmlDescriptor persistenceUnit = new ParsedPersistenceXmlDescriptor( puRootUrl ); ParsedPersistenceXmlDescriptor persistenceUnit = new ParsedPersistenceXmlDescriptor( puRootUrl );
bindPersistenceUnit( persistenceUnit, element ); bindPersistenceUnit( persistenceUnit, element );
@ -214,7 +214,7 @@ public class PersistenceXmlParser {
persistenceUnit.addMappingFiles( extractContent( element ) ); persistenceUnit.addMappingFiles( extractContent( element ) );
} }
else if ( tag.equals( "jar-file" ) ) { else if ( tag.equals( "jar-file" ) ) {
persistenceUnit.addJarFileUrl( JarVisitorFactory.getURLFromPath( extractContent( element ) ) ); persistenceUnit.addJarFileUrl( ArchiveHelper.getURLFromPath( extractContent( element ) ) );
} }
else if ( tag.equals( "exclude-unlisted-classes" ) ) { else if ( tag.equals( "exclude-unlisted-classes" ) ) {
persistenceUnit.setExcludeUnlistedClasses( true ); persistenceUnit.setExcludeUnlistedClasses( true );

View File

@ -0,0 +1,62 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.internal;
import java.io.InputStream;
import java.net.URL;
import org.hibernate.HibernateException;
import org.hibernate.jpa.boot.spi.InputStreamAccess;
import org.hibernate.jpa.boot.spi.NamedInputStream;
/**
* @author Steve Ebersole
*/
public class UrlInputStreamAccess implements InputStreamAccess {
private final URL url;
public UrlInputStreamAccess(URL url) {
this.url = url;
}
@Override
public String getStreamName() {
return url.toExternalForm();
}
@Override
public InputStream accessInputStream() {
try {
return url.openStream();
}
catch (Exception e) {
throw new HibernateException( "Could not open url stream : " + url.toExternalForm() );
}
}
@Override
public NamedInputStream asNamedInputStream() {
return new NamedInputStream( getStreamName(), accessInputStream() );
}
}

View File

@ -0,0 +1,67 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.scan.internal;
import org.hibernate.jpa.boot.scan.spi.ScanOptions;
/**
* @author Steve Ebersole
*/
public class StandardScanOptions implements ScanOptions {
private final boolean detectClassesInRoot;
private final boolean detectClassesInNonRoot;
private final boolean detectHibernateMappingFiles;
public StandardScanOptions() {
this( "hbm,class", false );
}
public StandardScanOptions(String explicitDetectionSetting, boolean persistenceUnitExcludeUnlistedClassesValue) {
if ( explicitDetectionSetting == null ) {
detectHibernateMappingFiles = true;
detectClassesInRoot = ! persistenceUnitExcludeUnlistedClassesValue;
detectClassesInNonRoot = true;
}
else {
detectHibernateMappingFiles = explicitDetectionSetting.contains( "hbm" );
detectClassesInRoot = explicitDetectionSetting.contains( "class" );
detectClassesInNonRoot = detectClassesInRoot;
}
}
@Override
public boolean canDetectUnlistedClassesInRoot() {
return detectClassesInRoot;
}
@Override
public boolean canDetectUnlistedClassesInNonRoot() {
return detectClassesInNonRoot;
}
@Override
public boolean canDetectHibernateMappingFiles() {
return detectHibernateMappingFiles;
}
}

View File

@ -0,0 +1,40 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.scan.internal;
import org.hibernate.jpa.boot.archive.internal.StandardArchiveDescriptorFactory;
import org.hibernate.jpa.boot.scan.spi.AbstractScannerImpl;
/**
* Standard implementation of the Scanner contract, supporting typical archive walking support where
* the urls we are processing can be treated using normal file handling.
*
* @author Steve Ebersole
* @author Emmanuel Bernard
*/
public class StandardScanner extends AbstractScannerImpl {
public StandardScanner() {
super( StandardArchiveDescriptorFactory.INSTANCE );
}
}

View File

@ -0,0 +1,67 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.scan.spi;
import org.hibernate.jpa.boot.archive.spi.ArchiveContext;
import org.hibernate.jpa.boot.archive.spi.ArchiveEntryHandler;
/**
* Base class for commonality between handling class file entries and handling package-info file entries.
*
* @author Steve Ebersole
*/
public abstract class AbstractJavaArtifactArchiveEntryHandler implements ArchiveEntryHandler {
private final ScanOptions scanOptions;
protected AbstractJavaArtifactArchiveEntryHandler(ScanOptions scanOptions) {
this.scanOptions = scanOptions;
}
/**
* Check to see if the incoming name (class/package name) is either:<ul>
* <li>explicitly listed in a {@code <class/>} entry within the {@code <persistence-unit/>}</li>
* <li>whether the scan options indicate that we are allowed to detect this entry</li>
* </ul>
*
* @param context Information about the archive. Mainly whether it is the root of the PU
* @param name The class/package name
*
* @return {@code true} if the named class/package is either detectable or explicitly listed; {@code false}
* otherwise.
*/
protected boolean isListedOrDetectable(ArchiveContext context, String name) {
// IMPL NOTE : protect the isExplicitlyListed call unless needed, since it can take time in a PU
// with lots of listed classes. The other conditions are simple boolean flag checks.
if ( context.isRootUrl() ) {
return scanOptions.canDetectUnlistedClassesInRoot() || isExplicitlyListed( context, name );
}
else {
return scanOptions.canDetectUnlistedClassesInNonRoot() || isExplicitlyListed( context, name );
}
}
private boolean isExplicitlyListed(ArchiveContext context, String name) {
return context.getPersistenceUnitDescriptor().getManagedClassNames().contains( name );
}
}

View File

@ -0,0 +1,302 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.scan.spi;
import java.net.URL;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.hibernate.jpa.boot.archive.spi.ArchiveContext;
import org.hibernate.jpa.boot.archive.spi.ArchiveDescriptor;
import org.hibernate.jpa.boot.archive.spi.ArchiveDescriptorFactory;
import org.hibernate.jpa.boot.archive.spi.ArchiveEntry;
import org.hibernate.jpa.boot.archive.spi.ArchiveEntryHandler;
import org.hibernate.jpa.boot.internal.ClassDescriptorImpl;
import org.hibernate.jpa.boot.internal.MappingFileDescriptorImpl;
import org.hibernate.jpa.boot.internal.PackageDescriptorImpl;
import org.hibernate.jpa.boot.spi.ClassDescriptor;
import org.hibernate.jpa.boot.spi.MappingFileDescriptor;
import org.hibernate.jpa.boot.spi.PackageDescriptor;
import org.hibernate.jpa.boot.spi.PersistenceUnitDescriptor;
/**
* @author Steve Ebersole
*/
public abstract class AbstractScannerImpl implements Scanner {
private final ArchiveDescriptorFactory archiveDescriptorFactory;
private final Map<URL, ArchiveDescriptorInfo> archiveDescriptorCache = new HashMap<URL, ArchiveDescriptorInfo>();
protected AbstractScannerImpl(ArchiveDescriptorFactory archiveDescriptorFactory) {
this.archiveDescriptorFactory = archiveDescriptorFactory;
}
@Override
public ScanResult scan(PersistenceUnitDescriptor persistenceUnit, ScanOptions scanOptions) {
final ResultCollector resultCollector = new ResultCollector( scanOptions );
if ( persistenceUnit.getJarFileUrls() != null ) {
for ( URL url : persistenceUnit.getJarFileUrls() ) {
final ArchiveDescriptor descriptor = buildArchiveDescriptor( url, false, scanOptions );
final ArchiveContext context = buildArchiveContext( persistenceUnit, false, resultCollector );
descriptor.visitArchive( context );
}
}
if ( persistenceUnit.getPersistenceUnitRootUrl() != null ) {
final ArchiveDescriptor descriptor = buildArchiveDescriptor( persistenceUnit.getPersistenceUnitRootUrl(), true, scanOptions );
final ArchiveContext context = buildArchiveContext( persistenceUnit, false, resultCollector );
descriptor.visitArchive( context );
}
return ScanResultImpl.from( resultCollector );
}
private ArchiveContext buildArchiveContext(
PersistenceUnitDescriptor persistenceUnit,
boolean isRoot,
ArchiveEntryHandlers entryHandlers) {
return new ArchiveContextImpl( persistenceUnit, isRoot, entryHandlers );
}
protected static interface ArchiveEntryHandlers {
public ArchiveEntryHandler getClassFileHandler();
public ArchiveEntryHandler getPackageInfoHandler();
public ArchiveEntryHandler getFileHandler();
}
private ArchiveDescriptor buildArchiveDescriptor(URL url, boolean isRootUrl, ScanOptions scanOptions) {
final ArchiveDescriptor descriptor;
final ArchiveDescriptorInfo descriptorInfo = archiveDescriptorCache.get( url );
if ( descriptorInfo == null ) {
descriptor = archiveDescriptorFactory.buildArchiveDescriptor( url );
archiveDescriptorCache.put(
url,
new ArchiveDescriptorInfo( descriptor, isRootUrl, scanOptions )
);
}
else {
validateReuse( descriptorInfo, isRootUrl, scanOptions );
descriptor = descriptorInfo.archiveDescriptor;
}
return descriptor;
}
public static class ResultCollector
implements ArchiveEntryHandlers,
PackageInfoArchiveEntryHandler.Callback,
ClassFileArchiveEntryHandler.Callback,
NonClassFileArchiveEntryHandler.Callback {
private final ClassFileArchiveEntryHandler classFileHandler;
private final PackageInfoArchiveEntryHandler packageInfoHandler;
private final NonClassFileArchiveEntryHandler fileHandler;
private final Set<PackageDescriptor> packageDescriptorSet = new HashSet<PackageDescriptor>();
private final Set<ClassDescriptor> classDescriptorSet = new HashSet<ClassDescriptor>();
private final Set<MappingFileDescriptor> mappingFileSet = new HashSet<MappingFileDescriptor>();
public ResultCollector(ScanOptions scanOptions) {
this.classFileHandler = new ClassFileArchiveEntryHandler( scanOptions, this );
this.packageInfoHandler = new PackageInfoArchiveEntryHandler( scanOptions, this );
this.fileHandler = new NonClassFileArchiveEntryHandler( scanOptions, this );
}
@Override
public ArchiveEntryHandler getClassFileHandler() {
return classFileHandler;
}
@Override
public ArchiveEntryHandler getPackageInfoHandler() {
return packageInfoHandler;
}
@Override
public ArchiveEntryHandler getFileHandler() {
return fileHandler;
}
@Override
public void locatedPackage(PackageDescriptor packageDescriptor) {
if ( PackageDescriptorImpl.class.isInstance( packageDescriptor ) ) {
packageDescriptorSet.add( packageDescriptor );
}
else {
// to make sure we have proper equals/hashcode
packageDescriptorSet.add(
new PackageDescriptorImpl(
packageDescriptor.getName(),
packageDescriptor.getStreamAccess()
)
);
}
}
@Override
public void locatedClass(ClassDescriptor classDescriptor) {
if ( ClassDescriptorImpl.class.isInstance( classDescriptor ) ) {
classDescriptorSet.add( classDescriptor );
}
else {
// to make sure we have proper equals/hashcode
classDescriptorSet.add(
new ClassDescriptorImpl(
classDescriptor.getName(),
classDescriptor.getStreamAccess()
)
);
}
}
@Override
public void locatedMappingFile(MappingFileDescriptor mappingFileDescriptor) {
if ( MappingFileDescriptorImpl.class.isInstance( mappingFileDescriptor ) ) {
mappingFileSet.add( mappingFileDescriptor );
}
else {
// to make sure we have proper equals/hashcode
mappingFileSet.add(
new MappingFileDescriptorImpl(
mappingFileDescriptor.getName(),
mappingFileDescriptor.getStreamAccess()
)
);
}
}
public Set<PackageDescriptor> getPackageDescriptorSet() {
return packageDescriptorSet;
}
public Set<ClassDescriptor> getClassDescriptorSet() {
return classDescriptorSet;
}
public Set<MappingFileDescriptor> getMappingFileSet() {
return mappingFileSet;
}
}
private static class ArchiveDescriptorInfo {
private final ArchiveDescriptor archiveDescriptor;
private final boolean isRoot;
private final ScanOptions scanOptions;
private ArchiveDescriptorInfo(
ArchiveDescriptor archiveDescriptor,
boolean isRoot,
ScanOptions scanOptions) {
this.archiveDescriptor = archiveDescriptor;
this.isRoot = isRoot;
this.scanOptions = scanOptions;
}
}
protected void validateReuse(ArchiveDescriptorInfo descriptor, boolean root, ScanOptions options) {
// is it really reasonable that a single url be processed multiple times?
// for now, throw an exception, mainly because I am interested in situations where this might happen
throw new IllegalStateException( "ArchiveDescriptor reused; can URLs be processed multiple times?" );
}
public static class ArchiveContextImpl implements ArchiveContext {
private final PersistenceUnitDescriptor persistenceUnitDescriptor;
private final boolean isRootUrl;
private final ArchiveEntryHandlers entryHandlers;
public ArchiveContextImpl(
PersistenceUnitDescriptor persistenceUnitDescriptor,
boolean isRootUrl,
ArchiveEntryHandlers entryHandlers) {
this.persistenceUnitDescriptor = persistenceUnitDescriptor;
this.isRootUrl = isRootUrl;
this.entryHandlers = entryHandlers;
}
@Override
public PersistenceUnitDescriptor getPersistenceUnitDescriptor() {
return persistenceUnitDescriptor;
}
@Override
public boolean isRootUrl() {
return isRootUrl;
}
@Override
public ArchiveEntryHandler obtainArchiveEntryHandler(ArchiveEntry entry) {
final String nameWithinArchive = entry.getNameWithinArchive();
if ( nameWithinArchive.endsWith( "package-info.class" ) ) {
return entryHandlers.getPackageInfoHandler();
}
else if ( nameWithinArchive.endsWith( ".class" ) ) {
return entryHandlers.getClassFileHandler();
}
else {
return entryHandlers.getFileHandler();
}
}
}
private static class ScanResultImpl implements ScanResult {
private final Set<PackageDescriptor> packageDescriptorSet;
private final Set<ClassDescriptor> classDescriptorSet;
private final Set<MappingFileDescriptor> mappingFileSet;
private ScanResultImpl(
Set<PackageDescriptor> packageDescriptorSet,
Set<ClassDescriptor> classDescriptorSet,
Set<MappingFileDescriptor> mappingFileSet) {
this.packageDescriptorSet = packageDescriptorSet;
this.classDescriptorSet = classDescriptorSet;
this.mappingFileSet = mappingFileSet;
}
private static ScanResult from(ResultCollector resultCollector) {
return new ScanResultImpl(
Collections.unmodifiableSet( resultCollector.packageDescriptorSet ),
Collections.unmodifiableSet( resultCollector.classDescriptorSet ),
Collections.unmodifiableSet( resultCollector.mappingFileSet )
);
}
@Override
public Set<PackageDescriptor> getLocatedPackages() {
return packageDescriptorSet;
}
@Override
public Set<ClassDescriptor> getLocatedClasses() {
return classDescriptorSet;
}
@Override
public Set<MappingFileDescriptor> getLocatedMappingFiles() {
return mappingFileSet;
}
}
}

View File

@ -0,0 +1,125 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.scan.spi;
import javax.persistence.Converter;
import javax.persistence.Embeddable;
import javax.persistence.Entity;
import javax.persistence.MappedSuperclass;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import javassist.bytecode.AnnotationsAttribute;
import javassist.bytecode.ClassFile;
import org.hibernate.jpa.boot.archive.spi.ArchiveContext;
import org.hibernate.jpa.boot.archive.spi.ArchiveEntry;
import org.hibernate.jpa.boot.archive.spi.ArchiveException;
import org.hibernate.jpa.boot.internal.ClassDescriptorImpl;
import org.hibernate.jpa.boot.spi.ClassDescriptor;
/**
* Defines handling and filtering for class file entries within an archive
*
* @author Steve Ebersole
*/
public class ClassFileArchiveEntryHandler extends AbstractJavaArtifactArchiveEntryHandler {
private final Callback callback;
/**
* Contract for the thing interested in being notified about accepted class descriptors.
*/
public static interface Callback {
public void locatedClass(ClassDescriptor classDescriptor);
}
public ClassFileArchiveEntryHandler(ScanOptions scanOptions, Callback callback) {
super( scanOptions );
this.callback = callback;
}
@Override
public void handleEntry(ArchiveEntry entry, ArchiveContext context) {
final ClassFile classFile = toClassFile( entry );
final ClassDescriptor classDescriptor = toClassDescriptor( classFile, entry );
if ( ! isListedOrDetectable( context, classDescriptor.getName() ) ) {
return;
}
// we are only interested in classes with certain annotations, so see if the ClassDescriptor
// represents a class which contains any of those annotations
if ( ! containsClassAnnotationsOfInterest( classFile ) ) {
return;
}
notifyMatchedClass( classDescriptor );
}
private ClassFile toClassFile(ArchiveEntry entry) {
final InputStream inputStream = entry.getStreamAccess().accessInputStream();
final DataInputStream dataInputStream = new DataInputStream( inputStream );
try {
return new ClassFile( dataInputStream );
}
catch (IOException e) {
throw new ArchiveException( "Could not build ClassFile" );
}
finally {
try {
dataInputStream.close();
}
catch (Exception ignore) {
}
try {
inputStream.close();
}
catch (IOException ignore) {
}
}
}
@SuppressWarnings("SimplifiableIfStatement")
private boolean containsClassAnnotationsOfInterest(ClassFile cf) {
final AnnotationsAttribute visibleAnnotations = (AnnotationsAttribute) cf.getAttribute( AnnotationsAttribute.visibleTag );
if ( visibleAnnotations == null ) {
return false;
}
return visibleAnnotations.getAnnotation( Entity.class.getName() ) != null
|| visibleAnnotations.getAnnotation( MappedSuperclass.class.getName() ) != null
|| visibleAnnotations.getAnnotation( Embeddable.class.getName() ) != null
|| visibleAnnotations.getAnnotation( Converter.class.getName() ) != null;
}
protected ClassDescriptor toClassDescriptor(ClassFile classFile, ArchiveEntry entry) {
return new ClassDescriptorImpl( classFile.getName(), entry.getStreamAccess() );
}
protected final void notifyMatchedClass(ClassDescriptor classDescriptor) {
callback.locatedClass( classDescriptor );
}
}

View File

@ -0,0 +1,87 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.scan.spi;
import org.hibernate.jpa.boot.archive.spi.ArchiveContext;
import org.hibernate.jpa.boot.archive.spi.ArchiveEntry;
import org.hibernate.jpa.boot.archive.spi.ArchiveEntryHandler;
import org.hibernate.jpa.boot.internal.MappingFileDescriptorImpl;
import org.hibernate.jpa.boot.spi.MappingFileDescriptor;
/**
* Defines handling and filtering for all non-class file (package-info is also a class file...) entries within an archive
*
* @author Steve Ebersole
*/
public class NonClassFileArchiveEntryHandler implements ArchiveEntryHandler {
private final ScanOptions scanOptions;
private final Callback callback;
/**
* Contract for the thing interested in being notified about accepted mapping file descriptors.
*/
public static interface Callback {
public void locatedMappingFile(MappingFileDescriptor mappingFileDescriptor);
}
public NonClassFileArchiveEntryHandler(ScanOptions scanOptions, Callback callback) {
this.scanOptions = scanOptions;
this.callback = callback;
}
@Override
public void handleEntry(ArchiveEntry entry, ArchiveContext context) {
if ( acceptAsMappingFile( entry, context) ) {
notifyMatchedMappingFile( entry );
}
}
@SuppressWarnings("SimplifiableIfStatement")
private boolean acceptAsMappingFile(ArchiveEntry entry, ArchiveContext context) {
if ( entry.getName().endsWith( "hbm.xml" ) ) {
return scanOptions.canDetectHibernateMappingFiles();
}
// todo : should really do this case-insensitively
if ( entry.getName().endsWith( "META-INF/orm.xml" ) ) {
if ( context.getPersistenceUnitDescriptor().getMappingFileNames().contains( "META-INF/orm.xml" ) ) {
// if the user explicitly listed META-INF/orm.xml, only except the root one
//
// not sure why exactly, but this is what the old code does
return context.isRootUrl();
}
return true;
}
return context.getPersistenceUnitDescriptor().getMappingFileNames().contains( entry.getNameWithinArchive() );
}
protected final void notifyMatchedMappingFile(ArchiveEntry entry) {
callback.locatedMappingFile( toMappingFileDescriptor( entry ) );
}
protected MappingFileDescriptor toMappingFileDescriptor(ArchiveEntry entry) {
return new MappingFileDescriptorImpl( entry.getNameWithinArchive(), entry.getStreamAccess() );
}
}

View File

@ -0,0 +1,79 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.scan.spi;
import org.hibernate.jpa.boot.archive.spi.ArchiveContext;
import org.hibernate.jpa.boot.archive.spi.ArchiveEntry;
import org.hibernate.jpa.boot.internal.PackageDescriptorImpl;
import org.hibernate.jpa.boot.spi.PackageDescriptor;
import static java.io.File.separatorChar;
/**
* Defines handling and filtering for package-info file entries within an archive
*
* @author Steve Ebersole
*/
public class PackageInfoArchiveEntryHandler extends AbstractJavaArtifactArchiveEntryHandler {
private final Callback callback;
/**
* Contract for the thing interested in being notified about accepted package-info descriptors.
*/
public static interface Callback {
public void locatedPackage(PackageDescriptor packageDescriptor);
}
public PackageInfoArchiveEntryHandler(ScanOptions scanOptions, Callback callback) {
super( scanOptions );
this.callback = callback;
}
@Override
public void handleEntry(ArchiveEntry entry, ArchiveContext context) {
if ( entry.getNameWithinArchive().equals( "package-info.class" ) ) {
// the old code skipped package-info in the root package/dir...
return;
}
if ( ! isListedOrDetectable( context, entry.getName() ) ) {
// the package is not explicitly listed, and we are not allowed to detect it.
return;
}
notifyMatchedPackage( toPackageDescriptor( entry ) );
}
protected PackageDescriptor toPackageDescriptor(ArchiveEntry entry) {
final String packageInfoFilePath = entry.getNameWithinArchive();
final String packageName = packageInfoFilePath.substring( 0, packageInfoFilePath.lastIndexOf( '/' ) )
.replace( separatorChar, '.' );
return new PackageDescriptorImpl( packageName, entry.getStreamAccess() );
}
protected final void notifyMatchedPackage(PackageDescriptor packageDescriptor) {
callback.locatedPackage( packageDescriptor );
}
}

View File

@ -1,8 +1,10 @@
/* /*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as * Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution * indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are * statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC. * distributed under license by Red Hat Inc.
* *
* This copyrighted material is made available to anyone wishing to use, modify, * This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU * copy, or redistribute it subject to the terms and conditions of the GNU
@ -19,20 +21,14 @@
* 51 Franklin Street, Fifth Floor * 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA * Boston, MA 02110-1301 USA
*/ */
package org.hibernate.jpa.packaging.internal; package org.hibernate.jpa.boot.scan.spi;
/** /**
* Filter on pachage element * @author Steve Ebersole
*
* @author Emmanuel Bernard
* @see JavaElementFilter
*/ */
public abstract class PackageFilter extends JavaElementFilter { public interface ScanOptions {
/** public boolean canDetectUnlistedClassesInRoot();
* @see JavaElementFilter#JavaElementFilter(boolean, Class[]) public boolean canDetectUnlistedClassesInNonRoot();
*/
protected PackageFilter(boolean retrieveStream, Class[] annotations) { public boolean canDetectHibernateMappingFiles();
super( retrieveStream, annotations ); }
}
}

View File

@ -0,0 +1,41 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.scan.spi;
import java.util.Set;
import org.hibernate.jpa.boot.spi.ClassDescriptor;
import org.hibernate.jpa.boot.spi.MappingFileDescriptor;
import org.hibernate.jpa.boot.spi.PackageDescriptor;
/**
* Defines the result of scanning
*
* @author Steve Ebersole
*/
public interface ScanResult {
public Set<PackageDescriptor> getLocatedPackages();
public Set<ClassDescriptor> getLocatedClasses();
public Set<MappingFileDescriptor> getLocatedMappingFiles();
}

View File

@ -1,8 +1,10 @@
/* /*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as * Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution * indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are * statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC. * distributed under license by Red Hat Inc.
* *
* This copyrighted material is made available to anyone wishing to use, modify, * This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU * copy, or redistribute it subject to the terms and conditions of the GNU
@ -19,28 +21,26 @@
* 51 Franklin Street, Fifth Floor * 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA * Boston, MA 02110-1301 USA
*/ */
package org.hibernate.jpa.packaging.internal; package org.hibernate.jpa.boot.scan.spi;
import java.io.IOException;
import java.util.Set; import org.hibernate.jpa.boot.spi.PersistenceUnitDescriptor;
/** /**
* Defines the contract for Hibernate to be able to scan for classes, packages and resources inside a
* persistence unit.
*
* @author Emmanuel Bernard * @author Emmanuel Bernard
* @author Steve Ebersole
*/ */
public interface JarVisitor { public interface Scanner {
/** /**
* Get the unqualified Jar name (ie wo path and wo extension) * Perform the scanning against the described persistence unit using the defined options, and return the scan
* results.
* *
* @return the unqualified jar name. * @param persistenceUnit THe description of the persistence unit.
*/ * @param options The scan options
String getUnqualifiedJarName();
Filter[] getFilters();
/**
* Return the matching entries for each filter in the same order the filter where passed
* *
* @return array of Set of JarVisitor.Entry * @return The scan results.
* @throws java.io.IOException if something went wrong
*/ */
Set[] getMatchingEntries() throws IOException; public ScanResult scan(PersistenceUnitDescriptor persistenceUnit, ScanOptions options);
} }

View File

@ -0,0 +1,34 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.spi;
/**
* Defines the result of scanning a persistence unit for classes.
*
* @author Steve Ebersole
*/
public interface ClassDescriptor {
public String getName();
public InputStreamAccess getStreamAccess();
}

View File

@ -0,0 +1,52 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.spi;
import java.io.InputStream;
/**
* Contract for building InputStreams, especially in on-demand situations
*
* @author Steve Ebersole
*/
public interface InputStreamAccess {
/**
* Get the name of the resource backing the stream
*
* @return The backing resource name
*/
public String getStreamName();
/**
* Get access to the stream. Can be called multiple times, a different stream instance should be returned each time.
*
* @return The stream
*/
public InputStream accessInputStream();
/**
* @deprecated Needed until we can remove NamedInputStream
*/
public NamedInputStream asNamedInputStream();
}

View File

@ -0,0 +1,32 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.spi;
/**
* @author Steve Ebersole
*/
public interface MappingFileDescriptor {
public String getName();
public InputStreamAccess getStreamAccess();
}

View File

@ -21,34 +21,33 @@
* 51 Franklin Street, Fifth Floor * 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA * Boston, MA 02110-1301 USA
*/ */
package org.hibernate.jpa.packaging.spi; package org.hibernate.jpa.boot.spi;
import java.io.InputStream; import java.io.InputStream;
/** /**
* Bundles together a stream and the name that was used to locate it. The name is often useful for logging.
*
* @deprecated Use {@link org.hibernate.jpa.boot.spi.InputStreamAccess} instead.
*
* @author Emmanuel Bernard * @author Emmanuel Bernard
* @author Steve Ebersole
*/ */
@Deprecated
public class NamedInputStream { public class NamedInputStream {
private final String name;
private final InputStream stream;
public NamedInputStream(String name, InputStream stream) { public NamedInputStream(String name, InputStream stream) {
this.name = name; this.name = name;
this.stream = stream; this.stream = stream;
} }
private String name;
private InputStream stream;
public InputStream getStream() { public InputStream getStream() {
return stream; return stream;
} }
public void setStream(InputStream stream) {
this.stream = stream;
}
public String getName() { public String getName() {
return name; return name;
} }
public void setName(String name) {
this.name = name;
}
} }

View File

@ -0,0 +1,34 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.boot.spi;
/**
* Defines the result of scanning a persistence unit for packages.
*
* @author Steve Ebersole
*/
public interface PackageDescriptor {
public String getName();
public InputStreamAccess getStreamAccess();
}

View File

@ -1,262 +0,0 @@
/*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.packaging.internal;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javassist.bytecode.AnnotationsAttribute;
import javassist.bytecode.ClassFile;
import org.jboss.logging.Logger;
import org.hibernate.jpa.internal.EntityManagerMessageLogger;
/**
* Parse a JAR of any form (zip file, exploded directory, ...)
* apply a set of filters (File filter, Class filter, Package filter)
* and return the appropriate matching sets of elements
*
* @author Emmanuel Bernard
*/
public abstract class AbstractJarVisitor implements JarVisitor {
//TODO shortcut when filters are null or empty
private static final EntityManagerMessageLogger LOG = Logger.getMessageLogger(EntityManagerMessageLogger.class,
AbstractJarVisitor.class.getName());
protected String unqualifiedJarName;
protected URL jarUrl;
protected boolean done = false;
private List<Filter> filters = new ArrayList<Filter>();
private Set<FileFilter> fileFilters = new HashSet<FileFilter>();
private Set<JavaElementFilter> classFilters = new HashSet<JavaElementFilter>();
private Set<JavaElementFilter> packageFilters = new HashSet<JavaElementFilter>();
private Set[] entries;
/**
* Build a jar visitor from its jar string path
*/
private AbstractJarVisitor(String jarPath) {
this.jarUrl = JarVisitorFactory.getURLFromPath( jarPath );
unqualify();
}
protected AbstractJarVisitor(String fileName, Filter[] filters) {
this( fileName );
initFilters( filters );
}
private void initFilters(Filter[] filters) {
for ( Filter filter : filters ) {
if ( filter instanceof FileFilter ) {
fileFilters.add( (FileFilter) filter );
}
else if ( filter instanceof ClassFilter ) {
classFilters.add( (ClassFilter) filter );
}
else if ( filter instanceof PackageFilter ) {
packageFilters.add( (PackageFilter) filter );
}
else {
throw new AssertionError( "Unknown filter type: " + filter.getClass().getName() );
}
this.filters.add( filter );
}
int size = this.filters.size();
this.entries = new Set[ size ];
for ( int index = 0; index < size ; index++ ) {
this.entries[index] = new HashSet<Entry>();
}
}
protected AbstractJarVisitor(URL url, Filter[] filters) {
this( url );
initFilters( filters );
}
private AbstractJarVisitor(URL url) {
jarUrl = url;
unqualify();
}
protected void unqualify() {
//FIXME weak algorithm subject to AOOBE
String fileName = jarUrl.getFile();
int exclamation = fileName.lastIndexOf( "!" );
if (exclamation != -1) fileName = fileName.substring( 0, exclamation );
int slash = fileName.lastIndexOf( "/" );
if ( slash != -1 ) {
fileName = fileName.substring(
fileName.lastIndexOf( "/" ) + 1,
fileName.length()
);
}
if ( fileName.length() > 4 && fileName.endsWith( "ar" ) && fileName.charAt( fileName.length() - 4 ) == '.' ) {
fileName = fileName.substring( 0, fileName.length() - 4 );
}
unqualifiedJarName = fileName;
LOG.debugf("Searching mapped entities in jar/par: %s", jarUrl);
}
/**
* Get the unqualified Jar name (ie wo path and wo extension)
*/
public String getUnqualifiedJarName() {
return unqualifiedJarName;
}
public Filter[] getFilters() {
return filters.toArray( new Filter[ filters.size() ] );
}
/**
* Return the matching entries for each filter in the same order the filter where passed
*
* @return array of Set of JarVisitor.Entry
* @throws IOException if something went wrong
*/
public Set[] getMatchingEntries() throws IOException {
if ( !done ) {
//avoid url access and so on
if ( filters.size() > 0 ) doProcessElements();
done = true;
}
return entries;
}
protected abstract void doProcessElements() throws IOException;
//TODO avoid 2 input stream when not needed
protected final void addElement(String entryName, InputStream is, InputStream secondIs) throws IOException {
int entryNameLength = entryName.length();
if ( entryName.endsWith( "package-info.class" ) ) {
String name;
if ( entryNameLength == "package-info.class".length() ) {
name = "";
}
else {
name = entryName.substring( 0, entryNameLength - ".package-info.class".length() ).replace( '/', '.' );
}
executeJavaElementFilter( name, packageFilters, is, secondIs );
}
else if ( entryName.endsWith( ".class" ) ) {
String name = entryName.substring( 0, entryNameLength - ".class".length() ).replace( '/', '.' );
LOG.debugf("Filtering: %s", name);
executeJavaElementFilter( name, classFilters, is, secondIs );
}
else {
String name = entryName;
boolean accepted = false;
for ( FileFilter filter : fileFilters ) {
if ( filter.accept( name ) ) {
accepted = true;
InputStream localIs;
if ( filter.getStream() ) {
localIs = secondIs;
}
else {
localIs = null;
secondIs.close();
}
is.close();
LOG.debugf("File Filter matched for %s", name);
Entry entry = new Entry( name, localIs );
int index = this.filters.indexOf( filter );
this.entries[index].add( entry );
}
}
if (!accepted) {
//not accepted free resources
is.close();
secondIs.close();
}
}
}
private void executeJavaElementFilter(
String name, Set<JavaElementFilter> filters, InputStream is, InputStream secondIs
) throws IOException {
boolean accepted = false;
for ( JavaElementFilter filter : filters ) {
if ( filter.accept( name ) ) {
//FIXME cannot currently have a class filtered twice but matching once
// need to copy the is
boolean match = checkAnnotationMatching( is, filter );
if ( match ) {
accepted = true;
InputStream localIs;
if ( filter.getStream() ) {
localIs = secondIs;
}
else {
localIs = null;
secondIs.close();
}
LOG.debugf("Java element filter matched for %s", name);
Entry entry = new Entry( name, localIs );
int index = this.filters.indexOf( filter );
this.entries[index].add( entry );
break; //we matched
}
}
}
if (!accepted) {
is.close();
secondIs.close();
}
}
private boolean checkAnnotationMatching(InputStream is, JavaElementFilter filter) throws IOException {
if ( filter.getAnnotations().length == 0 ) {
is.close();
return true;
}
DataInputStream dstream = new DataInputStream( is );
ClassFile cf = null;
try {
cf = new ClassFile( dstream );
}
finally {
dstream.close();
is.close();
}
boolean match = false;
AnnotationsAttribute visible = (AnnotationsAttribute) cf.getAttribute( AnnotationsAttribute.visibleTag );
if ( visible != null ) {
for ( Class annotation : filter.getAnnotations() ) {
match = visible.getAnnotation( annotation.getName() ) != null;
if ( match ) break;
}
}
return match;
}
}

View File

@ -1,138 +0,0 @@
/*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.packaging.internal;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.Enumeration;
import java.util.jar.JarFile;
import java.util.zip.ZipEntry;
import org.jboss.logging.Logger;
import org.hibernate.jpa.internal.EntityManagerMessageLogger;
/**
* @author Emmanuel Bernard
*/
public class ExplodedJarVisitor extends AbstractJarVisitor {
private static final EntityManagerMessageLogger LOG = Logger.getMessageLogger(EntityManagerMessageLogger.class,
ExplodedJarVisitor.class.getName());
private String entry;
public ExplodedJarVisitor(URL url, Filter[] filters, String entry) {
super( url, filters );
this.entry = entry;
}
public ExplodedJarVisitor(String fileName, Filter[] filters) {
super( fileName, filters );
}
@Override
protected void doProcessElements() throws IOException {
File jarFile;
try {
String filePart = jarUrl.getFile();
if ( filePart != null && filePart.indexOf( ' ' ) != -1 ) {
//unescaped (from the container), keep as is
jarFile = new File( jarUrl.getFile() );
}
else {
jarFile = new File( jarUrl.toURI().getSchemeSpecificPart() );
}
}
catch (URISyntaxException e) {
LOG.malformedUrl(jarUrl, e);
return;
}
if ( !jarFile.exists() ) {
LOG.explodedJarDoesNotExist(jarUrl);
return;
}
if ( !jarFile.isDirectory() ) {
LOG.explodedJarNotDirectory(jarUrl);
return;
}
File rootFile;
if (entry != null && entry.length() > 0 && ! "/".equals( entry ) ) {
rootFile = new File(jarFile, entry);
}
else {
rootFile = jarFile;
}
if ( rootFile.isDirectory() ) {
getClassNamesInTree( rootFile, null );
}
else {
//assume zipped file
processZippedRoot(rootFile);
}
}
//FIXME shameful copy of FileZippedJarVisitor.doProcess()
//TODO long term fix is to introduce a process interface (closure like) to addElements and then share the code
private void processZippedRoot(File rootFile) throws IOException {
JarFile jarFile = new JarFile(rootFile);
Enumeration<? extends ZipEntry> entries = jarFile.entries();
while ( entries.hasMoreElements() ) {
ZipEntry zipEntry = entries.nextElement();
String name = zipEntry.getName();
if ( !zipEntry.isDirectory() ) {
//build relative name
if ( name.startsWith( "/" ) ) name = name.substring( 1 );
addElement(
name,
new BufferedInputStream( jarFile.getInputStream( zipEntry ) ),
new BufferedInputStream( jarFile.getInputStream( zipEntry ) )
);
}
}
}
private void getClassNamesInTree(File jarFile, String header) throws IOException {
File[] files = jarFile.listFiles();
header = header == null ? "" : header + "/";
for ( File localFile : files ) {
if ( !localFile.isDirectory() ) {
String entryName = localFile.getName();
addElement(
header + entryName,
new BufferedInputStream( new FileInputStream( localFile ) ),
new BufferedInputStream( new FileInputStream( localFile ) )
);
}
else {
getClassNamesInTree( localFile, header + localFile.getName() );
}
}
}
}

View File

@ -1,133 +0,0 @@
/*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.packaging.internal;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.Enumeration;
import java.util.jar.JarFile;
import java.util.jar.JarInputStream;
import java.util.zip.ZipEntry;
import org.jboss.logging.Logger;
import org.hibernate.jpa.internal.EntityManagerMessageLogger;
/**
* Work on a JAR that can be accessed through a File
*
* @author Emmanuel Bernard
*/
public class FileZippedJarVisitor extends AbstractJarVisitor {
private static final EntityManagerMessageLogger LOG = Logger.getMessageLogger(EntityManagerMessageLogger.class,
FileZippedJarVisitor.class.getName());
private String entry;
public FileZippedJarVisitor(String fileName, Filter[] filters) {
super( fileName, filters );
}
public FileZippedJarVisitor(URL url, Filter[] filters, String entry) {
super( url, filters );
this.entry = entry;
}
@Override
protected void doProcessElements() throws IOException {
JarFile jarFile;
try {
String filePart = jarUrl.getFile();
if ( filePart != null && filePart.indexOf( ' ' ) != -1 ) {
//unescaped (from the container), keep as is
jarFile = new JarFile( jarUrl.getFile() );
}
else {
jarFile = new JarFile( jarUrl.toURI().getSchemeSpecificPart() );
}
}
catch (IOException ze) {
LOG.unableToFindFile(jarUrl, ze);
return;
}
catch (URISyntaxException e) {
LOG.malformedUrlWarning(jarUrl, e);
return;
}
if ( entry != null && entry.length() == 1 ) entry = null; //no entry
if ( entry != null && entry.startsWith( "/" ) ) entry = entry.substring( 1 ); //remove '/' header
Enumeration<? extends ZipEntry> entries = jarFile.entries();
while ( entries.hasMoreElements() ) {
ZipEntry zipEntry = entries.nextElement();
String name = zipEntry.getName();
if ( entry != null && ! name.startsWith( entry ) ) continue; //filter it out
if ( !zipEntry.isDirectory() ) {
if ( name.equals( entry ) ) {
//exact match, might be a nested jar entry (ie from jar:file:..../foo.ear!/bar.jar)
/*
* This algorithm assumes that the zipped file is only the URL root (including entry), not just any random entry
*/
InputStream is = null;
try {
is = new BufferedInputStream( jarFile.getInputStream( zipEntry ) );
JarInputStream jis = new JarInputStream( is );
ZipEntry subZipEntry = jis.getNextEntry();
while (subZipEntry != null) {
if ( ! subZipEntry.isDirectory() ) {
//FIXME copy sucks
byte[] entryBytes = JarVisitorFactory.getBytesFromInputStream( jis );
String subname = subZipEntry.getName();
if ( subname.startsWith( "/" ) ) subname = subname.substring( 1 );
addElement(
subname,
new ByteArrayInputStream(entryBytes),
new ByteArrayInputStream(entryBytes)
);
}
subZipEntry = jis.getNextEntry();
}
}
finally {
if ( is != null) is.close();
}
}
else {
//build relative name
if (entry != null) name = name.substring( entry.length() );
if ( name.startsWith( "/" ) ) name = name.substring( 1 );
addElement(
name,
new BufferedInputStream( jarFile.getInputStream( zipEntry ) ),
new BufferedInputStream( jarFile.getInputStream( zipEntry ) )
);
}
}
}
}
}

View File

@ -1,121 +0,0 @@
/*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.packaging.internal;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.net.URL;
import java.util.jar.JarEntry;
import java.util.jar.JarInputStream;
import java.util.zip.ZipEntry;
import org.jboss.logging.Logger;
import org.hibernate.jpa.internal.EntityManagerMessageLogger;
/**
* Work on a JAR that can only be accessed through a inputstream
* This is less efficient than the {@link FileZippedJarVisitor}
*
* @author Emmanuel Bernard
*/
public class InputStreamZippedJarVisitor extends AbstractJarVisitor {
private static final EntityManagerMessageLogger LOG = Logger.getMessageLogger(EntityManagerMessageLogger.class,
InputStreamZippedJarVisitor.class.getName());
private String entry;
public InputStreamZippedJarVisitor(URL url, Filter[] filters, String entry) {
super( url, filters );
this.entry = entry;
}
public InputStreamZippedJarVisitor(String fileName, Filter[] filters) {
super( fileName, filters );
}
@Override
protected void doProcessElements() throws IOException {
JarInputStream jis;
try {
jis = new JarInputStream( jarUrl.openStream() );
}
catch (Exception ze) {
//really should catch IOException but Eclipse is buggy and raise NPE...
LOG.unableToFindFile(jarUrl, ze);
return;
}
if ( entry != null && entry.length() == 1 ) entry = null; //no entry
if ( entry != null && entry.startsWith( "/" ) ) entry = entry.substring( 1 ); //remove '/' header
JarEntry jarEntry;
while ( ( jarEntry = jis.getNextJarEntry() ) != null ) {
String name = jarEntry.getName();
if ( entry != null && ! name.startsWith( entry ) ) continue; //filter it out
if ( !jarEntry.isDirectory() ) {
if ( name.equals( entry ) ) {
//exact match, might be a nested jar entry (ie from jar:file:..../foo.ear!/bar.jar)
/*
* This algorithm assumes that the zipped file is only the URL root (including entry), not just any random entry
*/
JarInputStream subJis = null;
try {
subJis = new JarInputStream( jis );
ZipEntry subZipEntry = jis.getNextEntry();
while (subZipEntry != null) {
if ( ! subZipEntry.isDirectory() ) {
//FIXME copy sucks
byte[] entryBytes = JarVisitorFactory.getBytesFromInputStream( jis );
String subname = subZipEntry.getName();
if ( subname.startsWith( "/" ) ) subname = subname.substring( 1 );
addElement(
subname,
new ByteArrayInputStream(entryBytes),
new ByteArrayInputStream(entryBytes)
);
}
subZipEntry = jis.getNextJarEntry();
}
}
finally {
if (subJis != null) subJis.close();
}
}
else {
byte[] entryBytes = JarVisitorFactory.getBytesFromInputStream( jis );
//build relative name
if (entry != null) name = name.substring( entry.length() );
if ( name.startsWith( "/" ) ) name = name.substring( 1 );
//this is bad cause we actually read everything instead of walking it lazily
addElement(
name,
new ByteArrayInputStream( entryBytes ),
new ByteArrayInputStream( entryBytes )
);
}
}
}
jis.close();
}
}

View File

@ -1,73 +0,0 @@
/*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.packaging.internal;
import java.io.IOException;
import java.net.URL;
import java.util.Set;
import org.hibernate.annotations.common.AssertionFailure;
/**
* @author Emmanuel Bernard
*/
public class JarProtocolVisitor implements JarVisitor {
private JarVisitor delegate;
private URL jarUrl;
private Filter[] filters;
public JarProtocolVisitor(URL url, Filter[] filters, String entry) {
this.jarUrl = url;
this.filters = filters;
if (entry != null && entry.length() > 0) throw new IllegalArgumentException( "jar:jar: not supported: " + jarUrl );
init();
}
private void init() {
String file = jarUrl.getFile();
String entry;
int subEntryIndex = file.lastIndexOf( "!" );
if (subEntryIndex == -1) throw new AssertionFailure("JAR URL does not contain '!/' :" + jarUrl);
if ( subEntryIndex + 1 >= file.length() ) {
entry = "";
}
else {
entry = file.substring( subEntryIndex + 1 );
}
URL fileUrl = JarVisitorFactory.getJarURLFromURLEntry( jarUrl, entry );
delegate = JarVisitorFactory.getVisitor( fileUrl, filters, entry );
}
public String getUnqualifiedJarName() {
return delegate.getUnqualifiedJarName();
}
public Filter[] getFilters() {
return delegate.getFilters();
}
public Set[] getMatchingEntries() throws IOException {
return delegate.getMatchingEntries();
}
}

View File

@ -1,52 +0,0 @@
/*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.packaging.internal;
/**
* Filter a Java element (class or package per fully qualified name and annotation existence)
* At least 1 annotation has to annotate the element and the accept method must match
* If none annotations are passed, only the accept method must pass.
*
* @author Emmanuel Bernard
*/
public abstract class JavaElementFilter extends Filter {
private Class[] annotations;
/**
* @param retrieveStream Give back an open stream to the matching element or not
* @param annotations Array of annotations that must be present to match (1 of them should annotate the element
*/
protected JavaElementFilter(boolean retrieveStream, Class[] annotations) {
super( retrieveStream );
this.annotations = annotations == null ? new Class[]{} : annotations;
}
public Class[] getAnnotations() {
return annotations;
}
/**
* Return true if the fully qualified name match
*/
public abstract boolean accept(String javaElementName);
}

View File

@ -1,248 +0,0 @@
/*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.packaging.internal;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.net.URL;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.persistence.Converter;
import javax.persistence.Embeddable;
import javax.persistence.Entity;
import javax.persistence.MappedSuperclass;
import org.hibernate.AssertionFailure;
import org.hibernate.internal.util.ReflectHelper;
import org.hibernate.jpa.packaging.spi.NamedInputStream;
import org.hibernate.jpa.packaging.spi.Scanner;
/**
* @author Emmanuel Bernard
*/
public class NativeScanner implements Scanner {
private static final String META_INF_ORM_XML = "META-INF/orm.xml";
private Map<URL, StateJarVisitor> visitors = new HashMap<URL, StateJarVisitor>();
private static final int PACKAGE_FILTER_INDEX = 0;
private static final int CLASS_FILTER_INDEX = 1;
private static final int FILE_FILTER_INDEX = 2;
/**
* This implementation does not honor the list of annotations and return everything.
* Must strictly be used by HEM
*/
public Set<Package> getPackagesInJar(URL jarToScan, Set<Class<? extends Annotation>> annotationsToLookFor) {
if ( annotationsToLookFor.size() > 0 ) {
throw new AssertionFailure( "Improper use of NativeScanner: must not filter packages" );
}
JarVisitor jarVisitor = getVisitor( jarToScan );
final Set<Entry> packageEntries;
try {
packageEntries = ( Set<Entry> ) jarVisitor.getMatchingEntries()[PACKAGE_FILTER_INDEX];
}
catch ( IOException e ) {
throw new RuntimeException( "Error while reading " + jarToScan.toString(), e );
}
Set<Package> packages = new HashSet<Package>( packageEntries.size() );
for ( Entry entry : packageEntries ) {
try {
packages.add( ReflectHelper.classForName( entry.getName() + ".package-info" ).getPackage() );
}
catch ( ClassNotFoundException e ) {
//should never happen, if it happens, simply ignore the flawed package
}
}
return packages;
}
/**
* Build a JarVisitor with some assumptions wrt the scanning
* This helps do one scan instead of several
*/
private JarVisitor getVisitor(URL jar) {
StateJarVisitor stateJarVisitor = visitors.get( jar );
if ( stateJarVisitor == null ) {
Filter[] filters = new Filter[3];
filters[PACKAGE_FILTER_INDEX] = new PackageFilter( false, null ) {
public boolean accept(String javaElementName) {
return true;
}
};
filters[CLASS_FILTER_INDEX] = new ClassFilter(
false, new Class[] {
Entity.class,
MappedSuperclass.class,
Embeddable.class
}
) {
public boolean accept(String javaElementName) {
return true;
}
};
filters[FILE_FILTER_INDEX] = new FileFilter( true ) {
public boolean accept(String javaElementName) {
return javaElementName.endsWith( "hbm.xml" )
|| javaElementName.endsWith( META_INF_ORM_XML );
}
};
stateJarVisitor = new StateJarVisitor( JarVisitorFactory.getVisitor( jar, filters ) );
visitors.put( jar, stateJarVisitor );
}
return stateJarVisitor.visitor;
}
public Set<Class<?>> getClassesInJar(URL jarToScan, Set<Class<? extends Annotation>> annotationsToLookFor) {
if ( isValidForClasses( annotationsToLookFor ) ) {
throw new AssertionFailure(
"Improper use of NativeScanner: "
+ "must not filter classes by other annotations than Entity, MappedSuperclass, embeddable"
);
}
JarVisitor jarVisitor = getVisitor( jarToScan );
final Set<Entry> classesEntry;
try {
classesEntry = ( Set<Entry> ) jarVisitor.getMatchingEntries()[CLASS_FILTER_INDEX];
}
catch ( IOException e ) {
throw new RuntimeException( "Error while reading " + jarToScan.toString(), e );
}
Set<Class<?>> classes = new HashSet<Class<?>>( classesEntry.size() );
for ( Entry entry : classesEntry ) {
try {
classes.add( ReflectHelper.classForName( entry.getName() ) );
}
catch ( ClassNotFoundException e ) {
//should never happen, if it happens, simply ignore the flawed package
}
}
return classes;
}
private boolean isValidForClasses(Set<Class<? extends Annotation>> annotationsToLookFor) {
return annotationsToLookFor.size() != 4
|| !annotationsToLookFor.contains( Entity.class )
|| !annotationsToLookFor.contains( MappedSuperclass.class )
|| !annotationsToLookFor.contains( Embeddable.class )
|| !annotationsToLookFor.contains( Converter.class );
}
/**
* support for patterns is primitive:
* - **\/*.hbm.xml
* Other patterns will not be found
*/
public Set<NamedInputStream> getFilesInJar(URL jarToScan, Set<String> filePatterns) {
StringBuilder sb = new StringBuilder("URL: ").append( jarToScan )
.append( "\n" );
for (String pattern : filePatterns) {
sb.append( " " ).append( pattern ).append( "\n" );
}
JarVisitor jarVisitor = getVisitor( jarToScan );
//state visitor available
final StateJarVisitor stateVisitor = visitors.get( jarToScan );
if ( stateVisitor.hasReadFiles ) {
throw new AssertionFailure( "Cannot read files twice on NativeScanner" );
}
stateVisitor.hasReadFiles = true;
Set<String> endWiths = new HashSet<String>();
Set<String> exacts = new HashSet<String>();
for ( String pattern : filePatterns ) {
if ( pattern.startsWith( "**/*" ) ) {
final String patternTail = pattern.substring( 4, pattern.length() );
if ( !patternTail.equals( ".hbm.xml" ) ) {
throw new AssertionFailure(
"Improper use of NativeScanner: "
+ "must not filter files via pattern other than .hbm.xml"
);
}
endWiths.add( patternTail );
}
else {
exacts.add( pattern );
}
}
final Set<Entry> fileEntries;
try {
fileEntries = ( Set<Entry> ) jarVisitor.getMatchingEntries()[FILE_FILTER_INDEX];
}
catch ( IOException e ) {
throw new RuntimeException( "Error while reading " + jarToScan.toString(), e );
}
Set<NamedInputStream> files = new HashSet<NamedInputStream>( fileEntries.size() );
Set<Entry> leftOver = new HashSet<Entry>( fileEntries );
for ( Entry entry : fileEntries ) {
boolean done = false;
for ( String exact : exacts ) {
if ( entry.getName().equals( exact ) ) {
files.add( new NamedInputStream( entry.getName(), entry.getInputStream() ) );
leftOver.remove( entry );
done = true;
}
}
if (done) continue;
for ( String endWithPattern : endWiths ) {
if ( entry.getName().endsWith( endWithPattern ) ) {
files.add( new NamedInputStream( entry.getName(), entry.getInputStream() ) );
leftOver.remove( entry );
}
}
}
for ( Entry entry : leftOver ) {
try {
entry.getInputStream().close();
}
catch ( IOException e ) {
//swallow as we don't care about these files
}
}
return files;
}
public Set<NamedInputStream> getFilesInClasspath(Set<String> filePatterns) {
throw new AssertionFailure( "Not implemented" );
}
public String getUnqualifiedJarName(URL jarToScan) {
JarVisitor jarVisitor = getVisitor( jarToScan );
return jarVisitor.getUnqualifiedJarName();
}
private static class StateJarVisitor {
StateJarVisitor(JarVisitor visitor) {
this.visitor = visitor;
}
JarVisitor visitor;
boolean hasReadFiles = false;
}
}

View File

@ -1,67 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.packaging.spi;
import java.lang.annotation.Annotation;
import java.net.URL;
import java.util.Set;
/**
* @author Emmanuel Bernard
*/
public interface Scanner {
/**
* return all packages in the jar matching one of these annotations
* if annotationsToLookFor is empty, return all packages
*/
Set<Package> getPackagesInJar(URL jartoScan, Set<Class<? extends Annotation>> annotationsToLookFor);
/**
* return all classes in the jar matching one of these annotations
* if annotationsToLookFor is empty, return all classes
*/
Set<Class<?>> getClassesInJar(URL jartoScan, Set<Class<? extends Annotation>> annotationsToLookFor);
/**
* return all files in the jar matching one of these file names
* if filePatterns is empty, return all files
* eg **\/*.hbm.xml, META-INF/orm.xml
*/
Set<NamedInputStream> getFilesInJar(URL jartoScan, Set<String> filePatterns);
/**
* Return all files in the classpath (ie PU visibility) matching one of these file names
* if filePatterns is empty, return all files
* the use case is really exact file name.
*
* NOT USED by HEM at the moment. We use exact file search via getResourceAsStream for now.
*/
Set<NamedInputStream> getFilesInClasspath(Set<String> filePatterns);
/**
* return the unqualified JAR name ie customer-model.jar or store.war
*/
String getUnqualifiedJarName(URL jarUrl);
}

View File

@ -0,0 +1,67 @@
package org.hibernate.jpa.test;
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import static java.io.File.separatorChar;
/**
* @author Steve Ebersole
*/
public class TestHelper {
private static URL RESOLVED_TEST_ROOT_URL;
public static URL determineTestRootUrl() {
if ( RESOLVED_TEST_ROOT_URL == null ) {
RESOLVED_TEST_ROOT_URL = resolveRootUrl( TestHelper.class );
}
return RESOLVED_TEST_ROOT_URL;
}
public static URL resolveRootUrl(Class knownClass) {
final String knownClassFileName = '/' + knownClass.getName().replace( '.', separatorChar ) + ".class";
final URL knownClassFileUrl = TestHelper.class.getResource( knownClassFileName );
final String knownClassFileUrlString = knownClassFileUrl.toExternalForm();
// to start, strip off the class file name
String rootUrlString = knownClassFileUrlString.substring( 0, knownClassFileUrlString.lastIndexOf( separatorChar ) );
// then strip off each package dir
final String packageName = knownClass.getPackage().getName();
for ( String packageNamePart : packageName.split( "\\." ) ) {
rootUrlString = rootUrlString.substring( 0, rootUrlString.lastIndexOf( separatorChar ) );
}
try {
return new URL( rootUrlString );
}
catch (MalformedURLException e) {
throw new RuntimeException( "Could not convert class base url as string to URL ref", e );
}
}
}

View File

@ -1,19 +1,17 @@
package org.hibernate.jpa.test.packaging; package org.hibernate.jpa.test.packaging;
import java.lang.annotation.Annotation; import org.hibernate.jpa.boot.spi.PersistenceUnitDescriptor;
import java.net.URL; import org.hibernate.jpa.boot.scan.internal.StandardScanner;
import java.util.Set; import org.hibernate.jpa.boot.scan.spi.ScanOptions;
import org.hibernate.jpa.boot.scan.spi.ScanResult;
import org.hibernate.jpa.packaging.internal.NativeScanner; import org.hibernate.jpa.boot.scan.spi.Scanner;
import org.hibernate.jpa.packaging.spi.NamedInputStream;
import org.hibernate.jpa.packaging.spi.Scanner;
/** /**
* @author Emmanuel Bernard * @author Emmanuel Bernard
*/ */
public class CustomScanner implements Scanner { public class CustomScanner implements Scanner {
public static boolean isUsed = false; public static boolean isUsed = false;
private Scanner scanner = new NativeScanner(); private Scanner delegate = new StandardScanner();
public static boolean isUsed() { public static boolean isUsed() {
return isUsed; return isUsed;
@ -23,28 +21,9 @@ public class CustomScanner implements Scanner {
isUsed = false; isUsed = false;
} }
public Set<Package> getPackagesInJar(URL jartoScan, Set<Class<? extends Annotation>> annotationsToLookFor) { @Override
public ScanResult scan(PersistenceUnitDescriptor persistenceUnit, ScanOptions options) {
isUsed = true; isUsed = true;
return scanner.getPackagesInJar( jartoScan, annotationsToLookFor ); return delegate.scan( persistenceUnit, options );
}
public Set<Class<?>> getClassesInJar(URL jartoScan, Set<Class<? extends Annotation>> annotationsToLookFor) {
isUsed = true;
return scanner.getClassesInJar( jartoScan, annotationsToLookFor );
}
public Set<NamedInputStream> getFilesInJar(URL jartoScan, Set<String> filePatterns) {
isUsed = true;
return scanner.getFilesInJar( jartoScan, filePatterns );
}
public Set<NamedInputStream> getFilesInClasspath(Set<String> filePatterns) {
isUsed = true;
return scanner.getFilesInClasspath( filePatterns );
}
public String getUnqualifiedJarName(URL jarUrl) {
isUsed = true;
return scanner.getUnqualifiedJarName( jarUrl );
} }
} }

View File

@ -23,12 +23,6 @@
*/ */
package org.hibernate.jpa.test.packaging; package org.hibernate.jpa.test.packaging;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.BufferedInputStream; import java.io.BufferedInputStream;
import java.io.File; import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
@ -38,29 +32,32 @@ import java.net.URL;
import java.net.URLConnection; import java.net.URLConnection;
import java.net.URLStreamHandler; import java.net.URLStreamHandler;
import java.net.URLStreamHandlerFactory; import java.net.URLStreamHandlerFactory;
import java.util.Set;
import javax.persistence.Embeddable;
import javax.persistence.Entity;
import javax.persistence.MappedSuperclass;
import org.hibernate.dialect.H2Dialect; import org.hibernate.dialect.H2Dialect;
import org.hibernate.jpa.packaging.internal.ClassFilter; import org.hibernate.jpa.boot.archive.internal.ArchiveHelper;
import org.hibernate.jpa.packaging.internal.Entry; import org.hibernate.jpa.boot.archive.internal.ExplodedArchiveDescriptor;
import org.hibernate.jpa.packaging.internal.ExplodedJarVisitor; import org.hibernate.jpa.boot.archive.internal.JarFileBasedArchiveDescriptor;
import org.hibernate.jpa.packaging.internal.FileFilter; import org.hibernate.jpa.boot.archive.internal.JarInputStreamBasedArchiveDescriptor;
import org.hibernate.jpa.packaging.internal.FileZippedJarVisitor; import org.hibernate.jpa.boot.archive.internal.JarProtocolArchiveDescriptor;
import org.hibernate.jpa.packaging.internal.Filter; import org.hibernate.jpa.boot.archive.internal.StandardArchiveDescriptorFactory;
import org.hibernate.jpa.packaging.internal.InputStreamZippedJarVisitor; import org.hibernate.jpa.boot.archive.spi.ArchiveDescriptor;
import org.hibernate.jpa.packaging.internal.JarProtocolVisitor; import org.hibernate.jpa.boot.internal.ClassDescriptorImpl;
import org.hibernate.jpa.packaging.internal.JarVisitor; import org.hibernate.jpa.boot.scan.internal.StandardScanOptions;
import org.hibernate.jpa.packaging.internal.JarVisitorFactory; import org.hibernate.jpa.boot.scan.spi.AbstractScannerImpl;
import org.hibernate.jpa.packaging.internal.PackageFilter; import org.hibernate.jpa.boot.spi.MappingFileDescriptor;
import org.hibernate.jpa.test.PersistenceUnitDescriptorAdapter;
import org.hibernate.jpa.test.pack.defaultpar.Version; import org.hibernate.jpa.test.pack.defaultpar.Version;
import org.hibernate.jpa.test.pack.explodedpar.Carpet; import org.hibernate.jpa.test.pack.explodedpar.Carpet;
import org.junit.Test;
import org.hibernate.testing.RequiresDialect; import org.hibernate.testing.RequiresDialect;
import org.hibernate.testing.TestForIssue; import org.hibernate.testing.TestForIssue;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/** /**
* @author Emmanuel Bernard * @author Emmanuel Bernard
@ -72,7 +69,7 @@ import org.junit.Test;
public class JarVisitorTest extends PackagingTestCase { public class JarVisitorTest extends PackagingTestCase {
@Test @Test
public void testHttp() throws Exception { public void testHttp() throws Exception {
URL url = JarVisitorFactory.getJarURLFromURLEntry( URL url = ArchiveHelper.getJarURLFromURLEntry(
new URL( new URL(
"jar:http://www.ibiblio.org/maven/hibernate/jars/hibernate-annotations-3.0beta1.jar!/META-INF/persistence.xml" "jar:http://www.ibiblio.org/maven/hibernate/jars/hibernate-annotations-3.0beta1.jar!/META-INF/persistence.xml"
), ),
@ -86,10 +83,18 @@ public class JarVisitorTest extends PackagingTestCase {
//fail silently //fail silently
return; return;
} }
JarVisitor visitor = JarVisitorFactory.getVisitor( url, getFilters() ); ArchiveDescriptor archiveDescriptor = StandardArchiveDescriptorFactory.INSTANCE.buildArchiveDescriptor( url );
assertEquals( 0, visitor.getMatchingEntries()[0].size() ); AbstractScannerImpl.ResultCollector resultCollector = new AbstractScannerImpl.ResultCollector( new StandardScanOptions() );
assertEquals( 0, visitor.getMatchingEntries()[1].size() ); archiveDescriptor.visitArchive(
assertEquals( 0, visitor.getMatchingEntries()[2].size() ); new AbstractScannerImpl.ArchiveContextImpl(
new PersistenceUnitDescriptorAdapter(),
true,
resultCollector
)
);
assertEquals( 0, resultCollector.getClassDescriptorSet().size() );
assertEquals( 0, resultCollector.getPackageDescriptorSet().size() );
assertEquals( 0, resultCollector.getMappingFileSet().size() );
} }
@Test @Test
@ -97,20 +102,40 @@ public class JarVisitorTest extends PackagingTestCase {
File defaultPar = buildDefaultPar(); File defaultPar = buildDefaultPar();
addPackageToClasspath( defaultPar ); addPackageToClasspath( defaultPar );
Filter[] filters = getFilters(); ArchiveDescriptor archiveDescriptor = new JarInputStreamBasedArchiveDescriptor(
JarVisitor jarVisitor = new InputStreamZippedJarVisitor( defaultPar.toURL(), filters, "" ); StandardArchiveDescriptorFactory.INSTANCE,
assertEquals( "defaultpar", jarVisitor.getUnqualifiedJarName() ); defaultPar.toURL(),
Set entries = jarVisitor.getMatchingEntries()[1]; ""
assertEquals( 3, entries.size() ); );
Entry entry = new Entry( org.hibernate.jpa.test.pack.defaultpar.ApplicationServer.class.getName(), null );
assertTrue( entries.contains( entry ) ); AbstractScannerImpl.ResultCollector resultCollector = new AbstractScannerImpl.ResultCollector( new StandardScanOptions() );
entry = new Entry( Version.class.getName(), null ); archiveDescriptor.visitArchive(
assertTrue( entries.contains( entry ) ); new AbstractScannerImpl.ArchiveContextImpl(
assertNull( ( ( Entry ) entries.iterator().next() ).getInputStream() ); new PersistenceUnitDescriptorAdapter(),
assertEquals( 2, jarVisitor.getMatchingEntries()[2].size() ); true,
for ( Entry localEntry : ( Set<Entry> ) jarVisitor.getMatchingEntries()[2] ) { resultCollector
assertNotNull( localEntry.getInputStream() ); )
localEntry.getInputStream().close(); );
validateResults( resultCollector, org.hibernate.jpa.test.pack.defaultpar.ApplicationServer.class, Version.class );
}
private void validateResults(AbstractScannerImpl.ResultCollector resultCollector, Class... expectedClasses) throws IOException {
assertEquals( 3, resultCollector.getClassDescriptorSet().size() );
for ( Class expectedClass : expectedClasses ) {
assertTrue(
resultCollector.getClassDescriptorSet().contains(
new ClassDescriptorImpl( expectedClass.getName(), null )
)
);
}
assertEquals( 2, resultCollector.getMappingFileSet().size() );
for ( MappingFileDescriptor mappingFileDescriptor : resultCollector.getMappingFileSet() ) {
assertNotNull( mappingFileDescriptor.getStreamAccess() );
final InputStream stream = mappingFileDescriptor.getStreamAccess().accessInputStream();
assertNotNull( stream );
stream.close();
} }
} }
@ -122,41 +147,39 @@ public class JarVisitorTest extends PackagingTestCase {
addPackageToClasspath( nestedEar ); addPackageToClasspath( nestedEar );
String jarFileName = nestedEar.toURL().toExternalForm() + "!/defaultpar.par"; String jarFileName = nestedEar.toURL().toExternalForm() + "!/defaultpar.par";
Filter[] filters = getFilters();
JarVisitor jarVisitor = new JarProtocolVisitor( new URL( jarFileName ), filters, "" ); JarProtocolArchiveDescriptor archiveDescriptor = new JarProtocolArchiveDescriptor(
//TODO should we fix the name here to reach defaultpar rather than nestedjar ?? StandardArchiveDescriptorFactory.INSTANCE,
//assertEquals( "defaultpar", jarVisitor.getUnqualifiedJarName() ); new URL( jarFileName ),
Set entries = jarVisitor.getMatchingEntries()[1]; ""
assertEquals( 3, entries.size() ); );
Entry entry = new Entry( org.hibernate.jpa.test.pack.defaultpar.ApplicationServer.class.getName(), null ); AbstractScannerImpl.ResultCollector resultCollector = new AbstractScannerImpl.ResultCollector( new StandardScanOptions() );
assertTrue( entries.contains( entry ) ); archiveDescriptor.visitArchive(
entry = new Entry( Version.class.getName(), null ); new AbstractScannerImpl.ArchiveContextImpl(
assertTrue( entries.contains( entry ) ); new PersistenceUnitDescriptorAdapter(),
assertNull( ( ( Entry ) entries.iterator().next() ).getInputStream() ); true,
assertEquals( 2, jarVisitor.getMatchingEntries()[2].size() ); resultCollector
for ( Entry localEntry : ( Set<Entry> ) jarVisitor.getMatchingEntries()[2] ) { )
assertNotNull( localEntry.getInputStream() ); );
localEntry.getInputStream().close();
} validateResults( resultCollector, org.hibernate.jpa.test.pack.defaultpar.ApplicationServer.class, Version.class );
jarFileName = nestedEarDir.toURL().toExternalForm() + "!/defaultpar.par"; jarFileName = nestedEarDir.toURL().toExternalForm() + "!/defaultpar.par";
//JarVisitor jarVisitor = new ZippedJarVisitor( jarFileName, true, true ); archiveDescriptor = new JarProtocolArchiveDescriptor(
filters = getFilters(); StandardArchiveDescriptorFactory.INSTANCE,
jarVisitor = new JarProtocolVisitor( new URL( jarFileName ), filters, "" ); new URL( jarFileName ),
//TODO should we fix the name here to reach defaultpar rather than nestedjar ?? ""
//assertEquals( "defaultpar", jarVisitor.getUnqualifiedJarName() ); );
entries = jarVisitor.getMatchingEntries()[1]; resultCollector = new AbstractScannerImpl.ResultCollector( new StandardScanOptions() );
assertEquals( 3, entries.size() ); archiveDescriptor.visitArchive(
entry = new Entry( org.hibernate.jpa.test.pack.defaultpar.ApplicationServer.class.getName(), null ); new AbstractScannerImpl.ArchiveContextImpl(
assertTrue( entries.contains( entry ) ); new PersistenceUnitDescriptorAdapter(),
entry = new Entry( Version.class.getName(), null ); true,
assertTrue( entries.contains( entry ) ); resultCollector
assertNull( ( ( Entry ) entries.iterator().next() ).getInputStream() ); )
assertEquals( 2, jarVisitor.getMatchingEntries()[2].size() ); );
for ( Entry localEntry : ( Set<Entry> ) jarVisitor.getMatchingEntries()[2] ) {
assertNotNull( localEntry.getInputStream() ); validateResults( resultCollector, org.hibernate.jpa.test.pack.defaultpar.ApplicationServer.class, Version.class );
localEntry.getInputStream().close();
}
} }
@Test @Test
@ -165,21 +188,26 @@ public class JarVisitorTest extends PackagingTestCase {
addPackageToClasspath( war ); addPackageToClasspath( war );
String jarFileName = war.toURL().toExternalForm() + "!/WEB-INF/classes"; String jarFileName = war.toURL().toExternalForm() + "!/WEB-INF/classes";
Filter[] filters = getFilters(); JarProtocolArchiveDescriptor archiveDescriptor = new JarProtocolArchiveDescriptor(
JarVisitor jarVisitor = new JarProtocolVisitor( new URL( jarFileName ), filters, "" ); StandardArchiveDescriptorFactory.INSTANCE,
assertEquals( "war", jarVisitor.getUnqualifiedJarName() ); new URL( jarFileName ),
Set entries = jarVisitor.getMatchingEntries()[1]; ""
assertEquals( 3, entries.size() ); );
Entry entry = new Entry( org.hibernate.jpa.test.pack.war.ApplicationServer.class.getName(), null );
assertTrue( entries.contains( entry ) ); AbstractScannerImpl.ResultCollector resultCollector = new AbstractScannerImpl.ResultCollector( new StandardScanOptions() );
entry = new Entry( org.hibernate.jpa.test.pack.war.Version.class.getName(), null ); archiveDescriptor.visitArchive(
assertTrue( entries.contains( entry ) ); new AbstractScannerImpl.ArchiveContextImpl(
assertNull( ( ( Entry ) entries.iterator().next() ).getInputStream() ); new PersistenceUnitDescriptorAdapter(),
assertEquals( 2, jarVisitor.getMatchingEntries()[2].size() ); true,
for ( Entry localEntry : ( Set<Entry> ) jarVisitor.getMatchingEntries()[2] ) { resultCollector
assertNotNull( localEntry.getInputStream() ); )
localEntry.getInputStream().close(); );
}
validateResults(
resultCollector,
org.hibernate.jpa.test.pack.war.ApplicationServer.class,
org.hibernate.jpa.test.pack.war.Version.class
);
} }
@Test @Test
@ -187,21 +215,21 @@ public class JarVisitorTest extends PackagingTestCase {
File defaultPar = buildDefaultPar(); File defaultPar = buildDefaultPar();
addPackageToClasspath( defaultPar ); addPackageToClasspath( defaultPar );
Filter[] filters = getFilters(); JarFileBasedArchiveDescriptor archiveDescriptor = new JarFileBasedArchiveDescriptor(
JarVisitor jarVisitor = new FileZippedJarVisitor( defaultPar.toURL(), filters, "" ); StandardArchiveDescriptorFactory.INSTANCE,
assertEquals( "defaultpar", jarVisitor.getUnqualifiedJarName() ); defaultPar.toURL(),
Set entries = jarVisitor.getMatchingEntries()[1]; ""
assertEquals( 3, entries.size() ); );
Entry entry = new Entry( org.hibernate.jpa.test.pack.defaultpar.ApplicationServer.class.getName(), null ); AbstractScannerImpl.ResultCollector resultCollector = new AbstractScannerImpl.ResultCollector( new StandardScanOptions() );
assertTrue( entries.contains( entry ) ); archiveDescriptor.visitArchive(
entry = new Entry( Version.class.getName(), null ); new AbstractScannerImpl.ArchiveContextImpl(
assertTrue( entries.contains( entry ) ); new PersistenceUnitDescriptorAdapter(),
assertNull( ( ( Entry ) entries.iterator().next() ).getInputStream() ); true,
assertEquals( 2, jarVisitor.getMatchingEntries()[2].size() ); resultCollector
for ( Entry localEntry : ( Set<Entry> ) jarVisitor.getMatchingEntries()[2] ) { )
assertNotNull( localEntry.getInputStream() ); );
localEntry.getInputStream().close();
} validateResults( resultCollector, org.hibernate.jpa.test.pack.defaultpar.ApplicationServer.class, Version.class );
} }
@Test @Test
@ -209,32 +237,50 @@ public class JarVisitorTest extends PackagingTestCase {
File explodedPar = buildExplodedPar(); File explodedPar = buildExplodedPar();
addPackageToClasspath( explodedPar ); addPackageToClasspath( explodedPar );
Filter[] filters = getFilters();
String dirPath = explodedPar.toURL().toExternalForm(); String dirPath = explodedPar.toURL().toExternalForm();
// TODO - shouldn't ExplodedJarVisitor take care of a trailing slash? // TODO - shouldn't ExplodedJarVisitor take care of a trailing slash?
if ( dirPath.endsWith( "/" ) ) { if ( dirPath.endsWith( "/" ) ) {
dirPath = dirPath.substring( 0, dirPath.length() - 1 ); dirPath = dirPath.substring( 0, dirPath.length() - 1 );
} }
JarVisitor jarVisitor = new ExplodedJarVisitor( dirPath, filters );
assertEquals( "explodedpar", jarVisitor.getUnqualifiedJarName() );
Set[] entries = jarVisitor.getMatchingEntries();
assertEquals( 1, entries[1].size() );
assertEquals( 1, entries[0].size() );
assertEquals( 1, entries[2].size() );
Entry entry = new Entry( Carpet.class.getName(), null ); ExplodedArchiveDescriptor archiveDescriptor = new ExplodedArchiveDescriptor(
assertTrue( entries[1].contains( entry ) ); StandardArchiveDescriptorFactory.INSTANCE,
for ( Entry localEntry : ( Set<Entry> ) jarVisitor.getMatchingEntries()[2] ) { ArchiveHelper.getURLFromPath( dirPath ),
assertNotNull( localEntry.getInputStream() ); ""
localEntry.getInputStream().close(); );
AbstractScannerImpl.ResultCollector resultCollector = new AbstractScannerImpl.ResultCollector( new StandardScanOptions() );
archiveDescriptor.visitArchive(
new AbstractScannerImpl.ArchiveContextImpl(
new PersistenceUnitDescriptorAdapter(),
true,
resultCollector
)
);
assertEquals( 1, resultCollector.getClassDescriptorSet().size() );
assertEquals( 1, resultCollector.getPackageDescriptorSet().size() );
assertEquals( 1, resultCollector.getMappingFileSet().size() );
assertTrue(
resultCollector.getClassDescriptorSet().contains(
new ClassDescriptorImpl( Carpet.class.getName(), null )
)
);
for ( MappingFileDescriptor mappingFileDescriptor : resultCollector.getMappingFileSet() ) {
assertNotNull( mappingFileDescriptor.getStreamAccess() );
final InputStream stream = mappingFileDescriptor.getStreamAccess().accessInputStream();
assertNotNull( stream );
stream.close();
} }
} }
@Test @Test
@TestForIssue(jiraKey = "HHH-6806") @TestForIssue(jiraKey = "HHH-6806")
public void testJarVisitorFactory() throws Exception{ public void testJarVisitorFactory() throws Exception {
final File explodedPar = buildExplodedPar();
addPackageToClasspath( buildExplodedPar(), buildDefaultPar() ); final File defaultPar = buildDefaultPar();
addPackageToClasspath( explodedPar, defaultPar );
//setting URL to accept vfs based protocol //setting URL to accept vfs based protocol
URL.setURLStreamHandlerFactory(new URLStreamHandlerFactory() { URL.setURLStreamHandlerFactory(new URLStreamHandlerFactory() {
@ -250,21 +296,21 @@ public class JarVisitorTest extends PackagingTestCase {
} }
}); });
URL jarUrl = new URL ("file:./target/packages/defaultpar.par"); URL jarUrl = defaultPar.toURL();
JarVisitor jarVisitor = JarVisitorFactory.getVisitor(jarUrl, getFilters(), null); ArchiveDescriptor descriptor = StandardArchiveDescriptorFactory.INSTANCE.buildArchiveDescriptor( jarUrl );
assertEquals(FileZippedJarVisitor.class.getName(), jarVisitor.getClass().getName()); assertEquals( JarFileBasedArchiveDescriptor.class.getName(), descriptor.getClass().getName() );
jarUrl = new URL ("file:./target/packages/explodedpar"); jarUrl = explodedPar.toURL();
jarVisitor = JarVisitorFactory.getVisitor(jarUrl, getFilters(), null); descriptor = StandardArchiveDescriptorFactory.INSTANCE.buildArchiveDescriptor( jarUrl );
assertEquals(ExplodedJarVisitor.class.getName(), jarVisitor.getClass().getName()); assertEquals( ExplodedArchiveDescriptor.class.getName(), descriptor.getClass().getName() );
jarUrl = new URL ("vfszip:./target/packages/defaultpar.par"); jarUrl = new URL( defaultPar.toURL().toExternalForm().replace( "file:", "vfszip:" ) );
jarVisitor = JarVisitorFactory.getVisitor(jarUrl, getFilters(), null); descriptor = StandardArchiveDescriptorFactory.INSTANCE.buildArchiveDescriptor( jarUrl );
assertEquals(FileZippedJarVisitor.class.getName(), jarVisitor.getClass().getName()); assertEquals( JarFileBasedArchiveDescriptor.class.getName(), descriptor.getClass().getName());
jarUrl = new URL ("vfsfile:./target/packages/explodedpar"); jarUrl = new URL( explodedPar.toURL().toExternalForm().replace( "file:", "vfsfile:" ) );
jarVisitor = JarVisitorFactory.getVisitor(jarUrl, getFilters(), null); descriptor = StandardArchiveDescriptorFactory.INSTANCE.buildArchiveDescriptor( jarUrl );
assertEquals(ExplodedJarVisitor.class.getName(), jarVisitor.getClass().getName()); assertEquals( ExplodedArchiveDescriptor.class.getName(), descriptor.getClass().getName() );
} }
@Test @Test
@ -315,36 +361,29 @@ public class JarVisitorTest extends PackagingTestCase {
@Test @Test
@TestForIssue(jiraKey = "HHH-7835") @TestForIssue(jiraKey = "HHH-7835")
public void testGetBytesFromInputStream() { public void testGetBytesFromInputStream() throws Exception {
try { File file = buildLargeJar();
File file = buildLargeJar();
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
InputStream stream = new BufferedInputStream( InputStream stream = new BufferedInputStream(
new FileInputStream( file ) ); new FileInputStream( file ) );
int oldLength = getBytesFromInputStream( stream ).length; int oldLength = getBytesFromInputStream( stream ).length;
stream.close(); stream.close();
long oldTime = System.currentTimeMillis() - start; long oldTime = System.currentTimeMillis() - start;
start = System.currentTimeMillis(); start = System.currentTimeMillis();
stream = new BufferedInputStream( new FileInputStream( file ) ); stream = new BufferedInputStream( new FileInputStream( file ) );
int newLength = JarVisitorFactory.getBytesFromInputStream( int newLength = ArchiveHelper.getBytesFromInputStream( stream ).length;
stream ).length; stream.close();
stream.close(); long newTime = System.currentTimeMillis() - start;
long newTime = System.currentTimeMillis() - start;
assertEquals( oldLength, newLength ); assertEquals( oldLength, newLength );
assertTrue( oldTime > newTime ); assertTrue( oldTime > newTime );
}
catch ( Exception e ) {
fail( e.getMessage() );
}
} }
// This is the old getBytesFromInputStream from JarVisitorFactory before // This is the old getBytesFromInputStream from JarVisitorFactory before
// it was changed by HHH-7835. Use it as a regression test. // it was changed by HHH-7835. Use it as a regression test.
private byte[] getBytesFromInputStream( private byte[] getBytesFromInputStream(InputStream inputStream) throws IOException {
InputStream inputStream) throws IOException {
int size; int size;
byte[] entryBytes = new byte[0]; byte[] entryBytes = new byte[0];
@ -363,46 +402,16 @@ public class JarVisitorTest extends PackagingTestCase {
@Test @Test
@TestForIssue(jiraKey = "HHH-7835") @TestForIssue(jiraKey = "HHH-7835")
public void testGetBytesFromZeroInputStream() { public void testGetBytesFromZeroInputStream() throws Exception {
try { // Ensure that JarVisitorFactory#getBytesFromInputStream
// Ensure that JarVisitorFactory#getBytesFromInputStream // can handle 0 length streams gracefully.
// can handle 0 length streams gracefully. URL emptyTxtUrl = getClass().getResource( "/org/hibernate/jpa/test/packaging/empty.txt" );
InputStream emptyStream = new BufferedInputStream( if ( emptyTxtUrl == null ) {
new FileInputStream( new File( throw new RuntimeException( "Bah!" );
"src/test/resources/org/hibernate/jpa/test/packaging/empty.txt" ) ) );
int length = JarVisitorFactory.getBytesFromInputStream(
emptyStream ).length;
assertEquals( length, 0 );
emptyStream.close();
} }
catch ( Exception e ) { InputStream emptyStream = new BufferedInputStream( emptyTxtUrl.openStream() );
fail( e.getMessage() ); int length = ArchiveHelper.getBytesFromInputStream( emptyStream ).length;
} assertEquals( length, 0 );
} emptyStream.close();
private Filter[] getFilters() {
return new Filter[] {
new PackageFilter( false, null ) {
public boolean accept(String javaElementName) {
return true;
}
},
new ClassFilter(
false, new Class[] {
Entity.class,
MappedSuperclass.class,
Embeddable.class
}
) {
public boolean accept(String javaElementName) {
return true;
}
},
new FileFilter( true ) {
public boolean accept(String javaElementName) {
return javaElementName.endsWith( "hbm.xml" ) || javaElementName.endsWith( "META-INF/orm.xml" );
}
}
};
} }
} }

View File

@ -45,6 +45,7 @@ import org.hibernate.jpa.test.Cat;
import org.hibernate.jpa.test.Distributor; import org.hibernate.jpa.test.Distributor;
import org.hibernate.jpa.test.Item; import org.hibernate.jpa.test.Item;
import org.hibernate.jpa.test.Kitten; import org.hibernate.jpa.test.Kitten;
import org.hibernate.jpa.test.TestHelper;
import org.hibernate.jpa.test.pack.cfgxmlpar.Morito; import org.hibernate.jpa.test.pack.cfgxmlpar.Morito;
import org.hibernate.jpa.test.pack.defaultpar.ApplicationServer; import org.hibernate.jpa.test.pack.defaultpar.ApplicationServer;
import org.hibernate.jpa.test.pack.defaultpar.IncrementListener; import org.hibernate.jpa.test.pack.defaultpar.IncrementListener;
@ -204,6 +205,10 @@ public abstract class PackagingTestCase extends BaseCoreFunctionalTestCase {
} }
protected File buildExplicitPar() { protected File buildExplicitPar() {
// explicitpar/persistence.xml references externaljar.jar so build that from here.
// this is the reason for tests failing after clean at least on my (Steve) local system
buildExternalJar();
String fileName = "explicitpar.par"; String fileName = "explicitpar.par";
JavaArchive archive = ShrinkWrap.create( JavaArchive.class, fileName ); JavaArchive archive = ShrinkWrap.create( JavaArchive.class, fileName );
archive.addClasses( archive.addClasses(
@ -342,8 +347,10 @@ public abstract class PackagingTestCase extends BaseCoreFunctionalTestCase {
// Build a large jar by adding a lorem ipsum file repeatedly. // Build a large jar by adding a lorem ipsum file repeatedly.
for ( int i = 0; i < 100; i++ ) { for ( int i = 0; i < 100; i++ ) {
ArchivePath path = ArchivePaths.create( "META-INF/file" + i ); ArchivePath path = ArchivePaths.create( "META-INF/file" + i );
archive.addAsResource( new File( "src/test/resources/org/hibernate/jpa/test/packaging/loremipsum.txt" ), archive.addAsResource(
path ); "org/hibernate/jpa/test/packaging/loremipsum.txt",
path
);
} }
File testPackage = new File( packageTargetDir, fileName ); File testPackage = new File( packageTargetDir, fileName );

View File

@ -23,31 +23,32 @@
*/ */
package org.hibernate.jpa.test.packaging; package org.hibernate.jpa.test.packaging;
import java.io.File;
import java.lang.annotation.Annotation;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Set;
import javax.persistence.Converter;
import javax.persistence.Embeddable;
import javax.persistence.Entity;
import javax.persistence.EntityManagerFactory; import javax.persistence.EntityManagerFactory;
import javax.persistence.MappedSuperclass;
import javax.persistence.Persistence; import javax.persistence.Persistence;
import java.io.File;
import org.junit.Test; import java.io.InputStream;
import java.util.HashMap;
import org.hibernate.jpa.AvailableSettings; import org.hibernate.jpa.AvailableSettings;
import org.hibernate.jpa.packaging.internal.NativeScanner; import org.hibernate.jpa.boot.internal.ParsedPersistenceXmlDescriptor;
import org.hibernate.jpa.boot.spi.PersistenceUnitDescriptor;
import org.hibernate.jpa.boot.scan.internal.StandardScanOptions;
import org.hibernate.jpa.boot.scan.internal.StandardScanner;
import org.hibernate.jpa.boot.spi.ClassDescriptor;
import org.hibernate.jpa.boot.spi.MappingFileDescriptor;
import org.hibernate.jpa.boot.spi.NamedInputStream;
import org.hibernate.jpa.boot.scan.spi.ScanOptions;
import org.hibernate.jpa.boot.scan.spi.ScanResult;
import org.hibernate.jpa.boot.scan.spi.Scanner;
import org.hibernate.jpa.test.pack.defaultpar.ApplicationServer; import org.hibernate.jpa.test.pack.defaultpar.ApplicationServer;
import org.hibernate.jpa.packaging.spi.NamedInputStream;
import org.hibernate.jpa.packaging.spi.Scanner;
import org.hibernate.jpa.test.pack.defaultpar.Version; import org.hibernate.jpa.test.pack.defaultpar.Version;
import org.junit.Test;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/** /**
* @author Emmanuel Bernard * @author Emmanuel Bernard
@ -59,32 +60,39 @@ public class ScannerTest extends PackagingTestCase {
File defaultPar = buildDefaultPar(); File defaultPar = buildDefaultPar();
addPackageToClasspath( defaultPar ); addPackageToClasspath( defaultPar );
Scanner scanner = new NativeScanner(); PersistenceUnitDescriptor descriptor = new ParsedPersistenceXmlDescriptor( defaultPar.toURL() );
assertEquals( "defaultpar", scanner.getUnqualifiedJarName( defaultPar.toURL() ) ); ScanOptions options = new StandardScanOptions( "hbm,class", descriptor.isExcludeUnlistedClasses() );
Scanner scanner = new StandardScanner();
ScanResult scanResult = scanner.scan( descriptor, options );
Set<Class<? extends Annotation>> annotationsToLookFor = new HashSet<Class<? extends Annotation>>( 3 ); assertEquals( 3, scanResult.getLocatedClasses().size() );
annotationsToLookFor.add( Entity.class ); assertClassesContained( scanResult, ApplicationServer.class );
annotationsToLookFor.add( MappedSuperclass.class ); assertClassesContained( scanResult, Version.class );
annotationsToLookFor.add( Embeddable.class );
annotationsToLookFor.add( Converter.class );
final Set<Class<?>> classes = scanner.getClassesInJar( defaultPar.toURL(), annotationsToLookFor );
assertEquals( 3, classes.size() ); assertEquals( 2, scanResult.getLocatedMappingFiles().size() );
assertTrue( classes.contains( ApplicationServer.class ) ); for ( MappingFileDescriptor mappingFileDescriptor : scanResult.getLocatedMappingFiles() ) {
assertTrue( classes.contains( Version.class ) ); assertNotNull( mappingFileDescriptor.getName() );
assertNotNull( mappingFileDescriptor.getStreamAccess() );
Set<String> filePatterns = new HashSet<String>( 2 ); InputStream stream = mappingFileDescriptor.getStreamAccess().accessInputStream();
filePatterns.add( "**/*.hbm.xml" ); assertNotNull( stream );
filePatterns.add( "META-INF/orm.xml" ); stream.close();
final Set<NamedInputStream> files = scanner.getFilesInJar( defaultPar.toURL(), filePatterns ); NamedInputStream namedInputStream = mappingFileDescriptor.getStreamAccess().asNamedInputStream();
assertNotNull( namedInputStream );
assertEquals( 2, files.size() ); stream = namedInputStream.getStream();
for ( NamedInputStream file : files ) { assertNotNull( stream );
assertNotNull( file.getStream() ); stream.close();
file.getStream().close();
} }
} }
private void assertClassesContained(ScanResult scanResult, Class classToCheckFor) {
for ( ClassDescriptor classDescriptor : scanResult.getLocatedClasses() ) {
if ( classDescriptor.getName().equals( classToCheckFor.getName() ) ) {
return;
}
}
fail( "ScanResult did not contain expected Class : " + classToCheckFor.getName() );
}
@Test @Test
public void testCustomScanner() throws Exception { public void testCustomScanner() throws Exception {
File defaultPar = buildDefaultPar(); File defaultPar = buildDefaultPar();

View File

@ -50,6 +50,7 @@ import org.hibernate.persister.collection.AbstractCollectionPersister;
* @author HernпїЅn Chanfreau * @author HernпїЅn Chanfreau
* @author Steve Ebersole * @author Steve Ebersole
* @author Michal Skowronek (mskowr at o2 dot pl) * @author Michal Skowronek (mskowr at o2 dot pl)
* @author Lukasz Antoniak (lukasz dot antoniak at gmail dot com)
*/ */
public abstract class BaseEnversCollectionEventListener extends BaseEnversEventListener { public abstract class BaseEnversCollectionEventListener extends BaseEnversEventListener {
protected BaseEnversCollectionEventListener(AuditConfiguration enversConfiguration) { protected BaseEnversCollectionEventListener(AuditConfiguration enversConfiguration) {
@ -65,15 +66,12 @@ public abstract class BaseEnversCollectionEventListener extends BaseEnversEventL
PersistentCollection newColl, PersistentCollection newColl,
Serializable oldColl, Serializable oldColl,
CollectionEntry collectionEntry) { CollectionEntry collectionEntry) {
String entityName = event.getAffectedOwnerEntityName(); if ( shouldGenerateRevision( event ) ) {
if ( ! getAuditConfiguration().getGlobalCfg().isGenerateRevisionsForCollections() ) {
return;
}
if ( getAuditConfiguration().getEntCfg().isVersioned( entityName ) ) {
checkIfTransactionInProgress(event.getSession()); checkIfTransactionInProgress(event.getSession());
AuditProcess auditProcess = getAuditConfiguration().getSyncManager().get(event.getSession()); AuditProcess auditProcess = getAuditConfiguration().getSyncManager().get(event.getSession());
String entityName = event.getAffectedOwnerEntityName();
String ownerEntityName = ((AbstractCollectionPersister) collectionEntry.getLoadedPersister()).getOwnerEntityName(); String ownerEntityName = ((AbstractCollectionPersister) collectionEntry.getLoadedPersister()).getOwnerEntityName();
String referencingPropertyName = collectionEntry.getRole().substring(ownerEntityName.length() + 1); String referencingPropertyName = collectionEntry.getRole().substring(ownerEntityName.length() + 1);
@ -123,6 +121,27 @@ public abstract class BaseEnversCollectionEventListener extends BaseEnversEventL
} }
} }
/**
* Forces persistent collection initialization.
* @param event Collection event.
* @return Stored snapshot.
*/
protected Serializable initializeCollection(AbstractCollectionEvent event) {
event.getCollection().forceInitialization();
return event.getCollection().getStoredSnapshot();
}
/**
* Checks whether modification of not-owned relation field triggers new revision and owner entity is versioned.
* @param event Collection event.
* @return {@code true} if revision based on given event should be generated, {@code false} otherwise.
*/
protected boolean shouldGenerateRevision(AbstractCollectionEvent event) {
final String entityName = event.getAffectedOwnerEntityName();
return getAuditConfiguration().getGlobalCfg().isGenerateRevisionsForCollections()
&& getAuditConfiguration().getEntCfg().isVersioned( entityName );
}
/** /**
* Looks up a relation description corresponding to the given property in the given entity. If no description is * Looks up a relation description corresponding to the given property in the given entity. If no description is
* found in the given entity, the parent entity is checked (so that inherited relations work). * found in the given entity, the parent entity is checked (so that inherited relations work).

View File

@ -23,6 +23,8 @@
*/ */
package org.hibernate.envers.event; package org.hibernate.envers.event;
import java.io.Serializable;
import org.hibernate.engine.spi.CollectionEntry; import org.hibernate.engine.spi.CollectionEntry;
import org.hibernate.envers.configuration.AuditConfiguration; import org.hibernate.envers.configuration.AuditConfiguration;
import org.hibernate.event.spi.PreCollectionRemoveEvent; import org.hibernate.event.spi.PreCollectionRemoveEvent;
@ -32,6 +34,7 @@ import org.hibernate.event.spi.PreCollectionRemoveEventListener;
* @author Adam Warski (adam at warski dot org) * @author Adam Warski (adam at warski dot org)
* @author HernпїЅn Chanfreau * @author HernпїЅn Chanfreau
* @author Steve Ebersole * @author Steve Ebersole
* @author Lukasz Antoniak (lukasz dot antoniak at gmail dot com)
*/ */
public class EnversPreCollectionRemoveEventListenerImpl public class EnversPreCollectionRemoveEventListenerImpl
extends BaseEnversCollectionEventListener extends BaseEnversCollectionEventListener
@ -45,7 +48,12 @@ public class EnversPreCollectionRemoveEventListenerImpl
public void onPreRemoveCollection(PreCollectionRemoveEvent event) { public void onPreRemoveCollection(PreCollectionRemoveEvent event) {
CollectionEntry collectionEntry = getCollectionEntry( event ); CollectionEntry collectionEntry = getCollectionEntry( event );
if ( collectionEntry != null && !collectionEntry.getLoadedPersister().isInverse() ) { if ( collectionEntry != null && !collectionEntry.getLoadedPersister().isInverse() ) {
onCollectionAction( event, null, collectionEntry.getSnapshot(), collectionEntry ); Serializable oldColl = collectionEntry.getSnapshot();
if ( !event.getCollection().wasInitialized() && shouldGenerateRevision( event ) ) {
// In case of uninitialized collection we need a fresh snapshot to properly calculate audit data.
oldColl = initializeCollection( event );
}
onCollectionAction( event, null, oldColl, collectionEntry );
} }
} }
} }

View File

@ -0,0 +1,95 @@
package org.hibernate.envers.test.entities.manytomany.unidirectional;
import java.io.Serializable;
import java.util.HashSet;
import java.util.Set;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import org.hibernate.envers.Audited;
import org.hibernate.envers.test.entities.StrTestEntity;
/**
* @author Lukasz Antoniak (lukasz dot antoniak at gmail dot com)
*/
@Entity
@Audited
public class JoinTableEntity implements Serializable {
@Id
@GeneratedValue
private Long id;
private String data;
@ManyToMany
@JoinTable(name = "test_join_table",
joinColumns = @JoinColumn(name = "assoc_id1"),
inverseJoinColumns = @JoinColumn(name = "assoc_id2")
)
private Set<StrTestEntity> references = new HashSet<StrTestEntity>();
public JoinTableEntity() {
}
public JoinTableEntity(String data) {
this.data = data;
}
public JoinTableEntity(Long id, String data) {
this.id = id;
this.data = data;
}
@Override
public boolean equals(Object o) {
if ( this == o ) return true;
if ( !( o instanceof JoinTableEntity ) ) return false;
JoinTableEntity that = (JoinTableEntity) o;
if ( data != null ? !data.equals( that.data ) : that.data != null ) return false;
if ( id != null ? !id.equals( that.id ) : that.id != null ) return false;
return true;
}
@Override
public int hashCode() {
int result = id != null ? id.hashCode() : 0;
result = 31 * result + ( data != null ? data.hashCode() : 0 );
return result;
}
@Override
public String toString() {
return "JoinTableEntity(id = " + id + ", data = " + data + ")";
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Set<StrTestEntity> getReferences() {
return references;
}
public void setReferences(Set<StrTestEntity> references) {
this.references = references;
}
public String getData() {
return data;
}
public void setData(String data) {
this.data = data;
}
}

View File

@ -0,0 +1,137 @@
package org.hibernate.envers.test.integration.manytomany.unidirectional;
import java.util.Arrays;
import java.util.HashSet;
import javax.persistence.EntityManager;
import org.junit.Assert;
import org.junit.Test;
import org.hibernate.envers.test.BaseEnversJPAFunctionalTestCase;
import org.hibernate.envers.test.Priority;
import org.hibernate.envers.test.entities.StrTestEntity;
import org.hibernate.envers.test.entities.manytomany.unidirectional.JoinTableEntity;
import org.hibernate.testing.TestForIssue;
/**
* @author Lukasz Antoniak (lukasz dot antoniak at gmail dot com)
*/
@TestForIssue( jiraKey = "HHH-8087" )
public class JoinTableDetachedTest extends BaseEnversJPAFunctionalTestCase {
private Long collectionEntityId = null;
private Integer element1Id = null;
private Integer element2Id = null;
@Override
protected Class<?>[] getAnnotatedClasses() {
return new Class<?>[] { JoinTableEntity.class, StrTestEntity.class };
}
@Test
@Priority(10)
public void initData() {
EntityManager em = getEntityManager();
// Revision 1 - addition
em.getTransaction().begin();
JoinTableEntity collectionEntity = new JoinTableEntity( "some data" );
StrTestEntity element1 = new StrTestEntity( "str1" );
StrTestEntity element2 = new StrTestEntity( "str2" );
collectionEntity.getReferences().add( element1 );
collectionEntity.getReferences().add( element2 );
em.persist( element1 );
em.persist( element2 );
em.persist( collectionEntity );
em.getTransaction().commit();
collectionEntityId = collectionEntity.getId();
element1Id = element1.getId();
element2Id = element2.getId();
em.close();
em = getEntityManager();
// Revision 2 - simple modification
em.getTransaction().begin();
collectionEntity = em.find( JoinTableEntity.class, collectionEntity.getId() );
collectionEntity.setData( "some other data" );
collectionEntity = em.merge( collectionEntity );
em.getTransaction().commit();
em.close();
em = getEntityManager();
// Revision 3 - remove detached object from collection
em.getTransaction().begin();
collectionEntity = em.find( JoinTableEntity.class, collectionEntity.getId() );
collectionEntity.getReferences().remove( element1 );
collectionEntity = em.merge( collectionEntity );
em.getTransaction().commit();
em.close();
em = getEntityManager();
// Revision 4 - replace the collection
em.getTransaction().begin();
collectionEntity = em.find( JoinTableEntity.class, collectionEntity.getId() );
collectionEntity.setReferences( new HashSet<StrTestEntity>() );
collectionEntity = em.merge( collectionEntity );
em.getTransaction().commit();
em.close();
em = getEntityManager();
// Revision 5 - add to collection
em.getTransaction().begin();
collectionEntity = em.find( JoinTableEntity.class, collectionEntity.getId() );
collectionEntity.getReferences().add( element1 );
collectionEntity = em.merge( collectionEntity );
em.getTransaction().commit();
em.close();
}
@Test
public void testRevisionsCounts() {
Assert.assertEquals( Arrays.asList( 1, 2, 3, 4, 5 ), getAuditReader().getRevisions(JoinTableEntity.class, collectionEntityId ) );
Assert.assertEquals( Arrays.asList( 1 ), getAuditReader().getRevisions(StrTestEntity.class, element1Id ) );
Assert.assertEquals( Arrays.asList( 1 ), getAuditReader().getRevisions(StrTestEntity.class, element2Id ) );
}
@Test
public void testHistoryOfCollectionEntity() {
// Revision 1
JoinTableEntity collectionEntity = new JoinTableEntity( collectionEntityId, "some data" );
StrTestEntity element1 = new StrTestEntity( "str1", element1Id );
StrTestEntity element2 = new StrTestEntity( "str2", element2Id );
collectionEntity.getReferences().add( element1 );
collectionEntity.getReferences().add( element2 );
JoinTableEntity ver1 = getAuditReader().find( JoinTableEntity.class, collectionEntityId, 1 );
Assert.assertEquals( collectionEntity, ver1 );
Assert.assertEquals( collectionEntity.getReferences(), ver1.getReferences() );
// Revision 2
collectionEntity.setData( "some other data" );
JoinTableEntity ver2 = getAuditReader().find( JoinTableEntity.class, collectionEntityId, 2 );
Assert.assertEquals( collectionEntity, ver2 );
Assert.assertEquals( collectionEntity.getReferences(), ver2.getReferences() );
// Revision 3
collectionEntity.getReferences().remove( element1 );
JoinTableEntity ver3 = getAuditReader().find( JoinTableEntity.class, collectionEntityId, 3 );
Assert.assertEquals( collectionEntity, ver3 );
Assert.assertEquals( collectionEntity.getReferences(), ver3.getReferences() );
// Revision 4
collectionEntity.setReferences( new HashSet<StrTestEntity>() );
JoinTableEntity ver4 = getAuditReader().find( JoinTableEntity.class, collectionEntityId, 4 );
Assert.assertEquals( collectionEntity, ver4 );
Assert.assertEquals( collectionEntity.getReferences(), ver4.getReferences() );
// Revision 5
collectionEntity.getReferences().add( element1 );
JoinTableEntity ver5 = getAuditReader().find( JoinTableEntity.class, collectionEntityId, 5 );
Assert.assertEquals( collectionEntity, ver5 );
Assert.assertEquals( collectionEntity.getReferences(), ver5.getReferences() );
}
}

View File

@ -58,7 +58,7 @@ ext {
// javax // javax
jpa: 'org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.0.Draft-14', jpa: 'org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.0.Draft-14',
jta: 'org.jboss.spec.javax.transaction:jboss-transaction-api_1.1_spec:1.0.0.Final', jta: 'org.jboss.spec.javax.transaction:jboss-transaction-api_1.2_spec:1.0.0.Alpha1',
validation: 'javax.validation:validation-api:1.0.0.GA', validation: 'javax.validation:validation-api:1.0.0.GA',
jacc: 'org.jboss.spec.javax.security.jacc:jboss-jacc-api_1.4_spec:1.0.2.Final', jacc: 'org.jboss.spec.javax.security.jacc:jboss-jacc-api_1.4_spec:1.0.2.Final',