Methods Summary |
---|
protected void | add(org.dom4j.Document doc)
HbmBinder.bindRoot( doc, createMappings(), CollectionHelper.EMPTY_MAP );
|
public void | addAuxiliaryDatabaseObject(org.hibernate.mapping.AuxiliaryDatabaseObject object)
auxiliaryDatabaseObjects.add( object );
|
public org.hibernate.cfg.Configuration | addCacheableFile(java.io.File xmlFile)Add a cached mapping file. A cached file is a serialized representation
of the DOM structure of a particular mapping. It is saved from a previous
call as a file with the name xmlFile + ".bin" where xmlFile is
the name of the original mapping file.
If a cached xmlFile + ".bin" exists and is newer than
xmlFile the ".bin" file will be read directly. Otherwise
xmlFile is read and then serialized to xmlFile + ".bin" for use
the next time.
try {
File cachedFile = new File( xmlFile.getAbsolutePath() + ".bin" );
org.dom4j.Document doc = null;
final boolean useCachedFile = xmlFile.exists() &&
cachedFile.exists() &&
xmlFile.lastModified() < cachedFile.lastModified();
if ( useCachedFile ) {
try {
log.info( "Reading mappings from cache file: " + cachedFile );
doc = ( org.dom4j.Document ) SerializationHelper.deserialize( new FileInputStream( cachedFile ) );
}
catch ( SerializationException e ) {
log.warn( "Could not deserialize cache file: " + cachedFile.getPath(), e );
}
catch ( FileNotFoundException e ) {
log.warn( "I/O reported cached file could not be found : " + cachedFile.getPath(), e );
}
}
// if doc is null, then for whatever reason, the cached file cannot be used...
if ( doc == null ) {
if ( !xmlFile.exists() ) {
throw new MappingNotFoundException( "file", xmlFile.toString() );
}
log.info( "Reading mappings from file: " + xmlFile );
List errors = new ArrayList();
try {
doc = xmlHelper.createSAXReader( xmlFile.getAbsolutePath(), errors, entityResolver ).read( xmlFile );
if ( errors.size() != 0 ) {
throw new MappingException( "invalid mapping", ( Throwable ) errors.get( 0 ) );
}
}
catch( DocumentException e){
throw new MappingException( "invalid mapping", e );
}
try {
log.debug( "Writing cache file for: " + xmlFile + " to: " + cachedFile );
SerializationHelper.serialize( ( Serializable ) doc, new FileOutputStream( cachedFile ) );
}
catch ( SerializationException e ) {
log.warn( "Could not write cached file: " + cachedFile, e );
}
catch ( FileNotFoundException e ) {
log.warn( "I/O reported error writing cached file : " + cachedFile.getPath(), e );
}
}
add( doc );
return this;
}
catch ( InvalidMappingException e ) {
throw e;
}
catch ( MappingNotFoundException e ) {
throw e;
}
catch ( Exception e ) {
throw new InvalidMappingException( "file", xmlFile.toString(), e );
}
|
public org.hibernate.cfg.Configuration | addCacheableFile(java.lang.String xmlFile)Add a cacheable mapping file.
return addCacheableFile( new File( xmlFile ) );
|
public org.hibernate.cfg.Configuration | addClass(java.lang.Class persistentClass)Read a mapping as an application resouurce using the convention that a class
named foo.bar.Foo is mapped by a file foo/bar/Foo.hbm.xml
which can be resolved as a classpath resource.
String mappingResourceName = persistentClass.getName().replace( '.", '/" ) + ".hbm.xml";
log.info( "Reading mappings from resource: " + mappingResourceName );
return addResource( mappingResourceName, persistentClass.getClassLoader() );
|
public org.hibernate.cfg.Configuration | addDirectory(java.io.File dir)Read all mapping documents from a directory tree.
Assumes that any file named *.hbm.xml is a mapping document.
File[] files = dir.listFiles();
for ( int i = 0; i < files.length ; i++ ) {
if ( files[i].isDirectory() ) {
addDirectory( files[i] );
}
else if ( files[i].getName().endsWith( ".hbm.xml" ) ) {
addFile( files[i] );
}
}
return this;
|
public org.hibernate.cfg.Configuration | addDocument(org.w3c.dom.Document doc)Read mappings from a DOM Document
if ( log.isDebugEnabled() ) {
log.debug( "Mapping document:\n" + doc );
}
add( xmlHelper.createDOMReader().read( doc ) );
return this;
|
public org.hibernate.cfg.Configuration | addFile(java.lang.String xmlFile)Read mappings from a particular XML file
return addFile( new File( xmlFile ) );
|
public org.hibernate.cfg.Configuration | addFile(java.io.File xmlFile)Read mappings from a particular XML file
log.info( "Reading mappings from file: " + xmlFile.getPath() );
if ( !xmlFile.exists() ) {
throw new MappingNotFoundException( "file", xmlFile.toString() );
}
try {
List errors = new ArrayList();
org.dom4j.Document doc = xmlHelper.createSAXReader( xmlFile.toString(), errors, entityResolver ).read( xmlFile );
if ( errors.size() != 0 ) {
throw new InvalidMappingException( "file", xmlFile.toString(), ( Throwable ) errors.get( 0 ) );
}
add( doc );
return this;
}
catch ( InvalidMappingException e ) {
throw e;
}
catch ( MappingNotFoundException e ) {
throw e;
}
catch ( Exception e ) {
throw new InvalidMappingException( "file", xmlFile.toString(), e );
}
|
public void | addFilterDefinition(org.hibernate.engine.FilterDefinition definition)
filterDefinitions.put( definition.getFilterName(), definition );
|
public org.hibernate.cfg.Configuration | addInputStream(java.io.InputStream xmlInputStream)Read mappings from an {@link java.io.InputStream}.
try {
List errors = new ArrayList();
org.dom4j.Document doc = xmlHelper.createSAXReader( "XML InputStream", errors, entityResolver )
.read( new InputSource( xmlInputStream ) );
if ( errors.size() != 0 ) {
throw new InvalidMappingException( "invalid mapping", null, (Throwable) errors.get( 0 ) );
}
add( doc );
return this;
}
catch (DocumentException e) {
throw new InvalidMappingException( "input stream", null, e );
}
finally {
try {
xmlInputStream.close();
}
catch (IOException ioe) {
log.warn( "Could not close input stream", ioe );
}
}
|
public org.hibernate.cfg.Configuration | addJar(java.io.File jar)Read all mappings from a jar file
Assumes that any file named *.hbm.xml is a mapping document.
log.info( "Searching for mapping documents in jar: " + jar.getName() );
JarFile jarFile = null;
try {
try {
jarFile = new JarFile( jar );
}
catch (IOException ioe) {
throw new InvalidMappingException(
"Could not read mapping documents from jar: " + jar.getName(), "jar", jar.getName(),
ioe
);
}
Enumeration jarEntries = jarFile.entries();
while ( jarEntries.hasMoreElements() ) {
ZipEntry ze = (ZipEntry) jarEntries.nextElement();
if ( ze.getName().endsWith( ".hbm.xml" ) ) {
log.info( "Found mapping document in jar: " + ze.getName() );
try {
addInputStream( jarFile.getInputStream( ze ) );
}
catch (Exception e) {
throw new InvalidMappingException(
"Could not read mapping documents from jar: " + jar.getName(),
"jar",
jar.getName(),
e
);
}
}
}
}
finally {
try {
if ( jarFile != null ) {
jarFile.close();
}
}
catch (IOException ioe) {
log.error("could not close jar", ioe);
}
}
return this;
|
public org.hibernate.cfg.Configuration | addProperties(java.util.Properties extraProperties)Set the given properties
this.properties.putAll( extraProperties );
return this;
|
private void | addProperties(org.dom4j.Element parent)
Iterator iter = parent.elementIterator( "property" );
while ( iter.hasNext() ) {
Element node = (Element) iter.next();
String name = node.attributeValue( "name" );
String value = node.getText().trim();
log.debug( name + "=" + value );
properties.setProperty( name, value );
if ( !name.startsWith( "hibernate" ) ) {
properties.setProperty( "hibernate." + name, value );
}
}
Environment.verifyProperties( properties );
|
public org.hibernate.cfg.Configuration | addResource(java.lang.String resourceName, java.lang.ClassLoader classLoader)Read mappings as a application resource (i.e. classpath lookup).
log.info( "Reading mappings from resource: " + resourceName );
InputStream rsrc = classLoader.getResourceAsStream( resourceName );
if ( rsrc == null ) {
throw new MappingNotFoundException( "resource", resourceName );
}
try {
return addInputStream( rsrc );
}
catch (MappingException me) {
throw new InvalidMappingException( "resource", resourceName, me );
}
|
public org.hibernate.cfg.Configuration | addResource(java.lang.String resourceName)Read mappings as a application resourceName (i.e. classpath lookup)
trying different classloaders.
log.info( "Reading mappings from resource : " + resourceName );
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
InputStream rsrc = null;
if (contextClassLoader!=null) {
rsrc = contextClassLoader.getResourceAsStream( resourceName );
}
if ( rsrc == null ) {
rsrc = Environment.class.getClassLoader().getResourceAsStream( resourceName );
}
if ( rsrc == null ) {
throw new MappingNotFoundException( "resource", resourceName );
}
try {
return addInputStream( rsrc );
}
catch (MappingException me) {
throw new InvalidMappingException( "resource", resourceName, me );
}
|
public void | addSqlFunction(java.lang.String functionName, org.hibernate.dialect.function.SQLFunction function)
sqlFunctions.put( functionName, function );
|
public org.hibernate.cfg.Configuration | addURL(java.net.URL url)Read mappings from a URL
if ( log.isDebugEnabled() ) {
log.debug( "Reading mapping document from URL:" + url.toExternalForm() );
}
try {
addInputStream( url.openStream() );
}
catch ( InvalidMappingException e ) {
throw new InvalidMappingException( "URL", url.toExternalForm(), e.getCause() );
}
catch (Exception e) {
throw new InvalidMappingException( "URL", url.toExternalForm(), e );
}
return this;
|
public org.hibernate.cfg.Configuration | addXML(java.lang.String xml)Read mappings from a String
if ( log.isDebugEnabled() ) {
log.debug( "Mapping XML:\n" + xml );
}
try {
List errors = new ArrayList();
org.dom4j.Document doc = xmlHelper.createSAXReader( "XML String", errors, entityResolver )
.read( new StringReader( xml ) );
if ( errors.size() != 0 ) {
throw new MappingException( "invalid mapping", (Throwable) errors.get( 0 ) );
}
add( doc );
}
catch (DocumentException e) {
throw new MappingException( "Could not parse mapping document in XML string", e );
}
return this;
|
public org.hibernate.engine.Mapping | buildMapping()
return new Mapping() {
/**
* Returns the identifier type of a mapped class
*/
public Type getIdentifierType(String persistentClass) throws MappingException {
PersistentClass pc = ( (PersistentClass) classes.get( persistentClass ) );
if ( pc == null ) {
throw new MappingException( "persistent class not known: " + persistentClass );
}
return pc.getIdentifier().getType();
}
public String getIdentifierPropertyName(String persistentClass) throws MappingException {
final PersistentClass pc = (PersistentClass) classes.get( persistentClass );
if ( pc == null ) {
throw new MappingException( "persistent class not known: " + persistentClass );
}
if ( !pc.hasIdentifierProperty() ) {
return null;
}
return pc.getIdentifierProperty().getName();
}
public Type getReferencedPropertyType(String persistentClass, String propertyName) throws MappingException {
final PersistentClass pc = (PersistentClass) classes.get( persistentClass );
if ( pc == null ) {
throw new MappingException( "persistent class not known: " + persistentClass );
}
Property prop = pc.getReferencedProperty( propertyName );
if ( prop == null ) {
throw new MappingException(
"property not known: " +
persistentClass + '." + propertyName
);
}
return prop.getType();
}
};
|
public void | buildMappings()Call this to ensure the mappings are fully compiled/built. Usefull to ensure getting
access to all information in the metamodel when calling e.g. getClassMappings().
secondPassCompile();
|
public org.hibernate.SessionFactory | buildSessionFactory()Instantiate a new SessionFactory, using the properties and
mappings in this configuration. The SessionFactory will be
immutable, so changes made to the Configuration after
building the SessionFactory will not affect it.
log.debug( "Preparing to build session factory with filters : " + filterDefinitions );
secondPassCompile();
validate();
Environment.verifyProperties( properties );
Properties copy = new Properties();
copy.putAll( properties );
PropertiesHelper.resolvePlaceHolders( copy );
Settings settings = buildSettings( copy );
return new SessionFactoryImpl(
this,
mapping,
settings,
getInitializedEventListeners()
);
|
public Settings | buildSettings()Create an object-oriented view of the configuration properties
Properties clone = ( Properties ) properties.clone();
PropertiesHelper.resolvePlaceHolders( clone );
return settingsFactory.buildSettings( clone );
|
public Settings | buildSettings(java.util.Properties props)
return settingsFactory.buildSettings( props );
|
public org.hibernate.cfg.Configuration | configure()Use the mappings and properties specified in an application
resource named hibernate.cfg.xml.
configure( "/hibernate.cfg.xml" );
return this;
|
public org.hibernate.cfg.Configuration | configure(java.lang.String resource)Use the mappings and properties specified in the given application
resource. The format of the resource is defined in
hibernate-configuration-3.0.dtd.
The resource is found via getConfigurationInputStream(resource).
log.info( "configuring from resource: " + resource );
InputStream stream = getConfigurationInputStream( resource );
return doConfigure( stream, resource );
|
public org.hibernate.cfg.Configuration | configure(java.net.URL url)Use the mappings and properties specified in the given document.
The format of the document is defined in
hibernate-configuration-3.0.dtd.
log.info( "configuring from url: " + url.toString() );
try {
return doConfigure( url.openStream(), url.toString() );
}
catch (IOException ioe) {
throw new HibernateException( "could not configure from URL: " + url, ioe );
}
|
public org.hibernate.cfg.Configuration | configure(java.io.File configFile)Use the mappings and properties specified in the given application
file. The format of the file is defined in
hibernate-configuration-3.0.dtd.
log.info( "configuring from file: " + configFile.getName() );
try {
return doConfigure( new FileInputStream( configFile ), configFile.toString() );
}
catch (FileNotFoundException fnfe) {
throw new HibernateException( "could not find file: " + configFile, fnfe );
}
|
public org.hibernate.cfg.Configuration | configure(org.w3c.dom.Document document)Use the mappings and properties specified in the given XML document.
The format of the file is defined in
hibernate-configuration-3.0.dtd.
log.info( "configuring from XML document" );
return doConfigure( xmlHelper.createDOMReader().read( document ) );
|
public Mappings | createMappings()Create a new Mappings to add class and collection
mappings to.
return new Mappings(
classes,
collections,
tables,
namedQueries,
namedSqlQueries,
sqlResultSetMappings,
imports,
secondPasses,
propertyReferences,
namingStrategy,
typeDefs,
filterDefinitions,
extendsQueue,
auxiliaryDatabaseObjects,
tableNameBinding,
columnNameBindingPerTable
);
|
protected org.hibernate.cfg.Configuration | doConfigure(java.io.InputStream stream, java.lang.String resourceName)Use the mappings and properties specified in the given application
resource. The format of the resource is defined in
hibernate-configuration-3.0.dtd.
org.dom4j.Document doc;
try {
List errors = new ArrayList();
doc = xmlHelper.createSAXReader( resourceName, errors, entityResolver )
.read( new InputSource( stream ) );
if ( errors.size() != 0 ) {
throw new MappingException(
"invalid configuration",
(Throwable) errors.get( 0 )
);
}
}
catch (DocumentException e) {
throw new HibernateException(
"Could not parse configuration: " + resourceName,
e
);
}
finally {
try {
stream.close();
}
catch (IOException ioe) {
log.warn( "could not close input stream for: " + resourceName, ioe );
}
}
return doConfigure( doc );
|
protected org.hibernate.cfg.Configuration | doConfigure(org.dom4j.Document doc)
Element sfNode = doc.getRootElement().element( "session-factory" );
String name = sfNode.attributeValue( "name" );
if ( name != null ) {
properties.setProperty( Environment.SESSION_FACTORY_NAME, name );
}
addProperties( sfNode );
parseSessionFactory( sfNode, name );
Element secNode = doc.getRootElement().element( "security" );
if ( secNode != null ) {
parseSecurity( secNode );
}
log.info( "Configured SessionFactory: " + name );
log.debug( "properties: " + properties );
return this;
|
protected org.dom4j.Document | findPossibleExtends()Find the first possible element in the queue of extends.
// Iterator iter = extendsQueue.iterator();
Iterator iter = extendsQueue.keySet().iterator();
while ( iter.hasNext() ) {
final ExtendsQueueEntry entry = ( ExtendsQueueEntry ) iter.next();
if ( getClassMapping( entry.getExplicitName() ) != null ) {
// found
iter.remove();
return entry.getDocument();
}
else if ( getClassMapping( HbmBinder.getClassName( entry.getExplicitName(), entry.getMappingPackage() ) ) != null ) {
// found
iter.remove();
return entry.getDocument();
}
}
return null;
|
public java.lang.String[] | generateDropSchemaScript(org.hibernate.dialect.Dialect dialect)Generate DDL for dropping tables
secondPassCompile();
String defaultCatalog = properties.getProperty( Environment.DEFAULT_CATALOG );
String defaultSchema = properties.getProperty( Environment.DEFAULT_SCHEMA );
ArrayList script = new ArrayList( 50 );
// drop them in reverse order in case db needs it done that way...
ListIterator itr = auxiliaryDatabaseObjects.listIterator( auxiliaryDatabaseObjects.size() );
while ( itr.hasPrevious() ) {
AuxiliaryDatabaseObject object = (AuxiliaryDatabaseObject) itr.previous();
if ( object.appliesToDialect( dialect ) ) {
script.add( object.sqlDropString( dialect, defaultCatalog, defaultSchema ) );
}
}
if ( dialect.dropConstraints() ) {
Iterator iter = getTableMappings();
while ( iter.hasNext() ) {
Table table = (Table) iter.next();
if ( table.isPhysicalTable() ) {
Iterator subIter = table.getForeignKeyIterator();
while ( subIter.hasNext() ) {
ForeignKey fk = (ForeignKey) subIter.next();
if ( fk.isPhysicalConstraint() ) {
script.add(
fk.sqlDropString(
dialect,
defaultCatalog,
defaultSchema
)
);
}
}
}
}
}
Iterator iter = getTableMappings();
while ( iter.hasNext() ) {
Table table = (Table) iter.next();
if ( table.isPhysicalTable() ) {
/*Iterator subIter = table.getIndexIterator();
while ( subIter.hasNext() ) {
Index index = (Index) subIter.next();
if ( !index.isForeignKey() || !dialect.hasImplicitIndexForForeignKey() ) {
script.add( index.sqlDropString(dialect) );
}
}*/
script.add(
table.sqlDropString(
dialect,
defaultCatalog,
defaultSchema
)
);
}
}
iter = iterateGenerators( dialect );
while ( iter.hasNext() ) {
String[] lines = ( (PersistentIdentifierGenerator) iter.next() ).sqlDropStrings( dialect );
for ( int i = 0; i < lines.length ; i++ ) {
script.add( lines[i] );
}
}
return ArrayHelper.toStringArray( script );
|
public java.lang.String[] | generateSchemaCreationScript(org.hibernate.dialect.Dialect dialect)Generate DDL for creating tables
secondPassCompile();
ArrayList script = new ArrayList( 50 );
String defaultCatalog = properties.getProperty( Environment.DEFAULT_CATALOG );
String defaultSchema = properties.getProperty( Environment.DEFAULT_SCHEMA );
Iterator iter = getTableMappings();
while ( iter.hasNext() ) {
Table table = (Table) iter.next();
if ( table.isPhysicalTable() ) {
script.add(
table.sqlCreateString(
dialect,
mapping,
defaultCatalog,
defaultSchema
)
);
Iterator comments = table.sqlCommentStrings( dialect, defaultCatalog, defaultSchema );
while ( comments.hasNext() ) {
script.add( comments.next() );
}
}
}
iter = getTableMappings();
while ( iter.hasNext() ) {
Table table = (Table) iter.next();
if ( table.isPhysicalTable() ) {
if ( !dialect.supportsUniqueConstraintInCreateAlterTable() ) {
Iterator subIter = table.getUniqueKeyIterator();
while ( subIter.hasNext() ) {
UniqueKey uk = (UniqueKey) subIter.next();
String constraintString = uk.sqlCreateString( dialect, mapping, defaultCatalog, defaultSchema );
if (constraintString != null) script.add( constraintString );
}
}
Iterator subIter = table.getIndexIterator();
while ( subIter.hasNext() ) {
Index index = (Index) subIter.next();
script.add(
index.sqlCreateString(
dialect,
mapping,
defaultCatalog,
defaultSchema
)
);
}
if ( dialect.hasAlterTable() ) {
subIter = table.getForeignKeyIterator();
while ( subIter.hasNext() ) {
ForeignKey fk = (ForeignKey) subIter.next();
if ( fk.isPhysicalConstraint() ) {
script.add(
fk.sqlCreateString(
dialect, mapping,
defaultCatalog,
defaultSchema
)
);
}
}
}
}
}
iter = iterateGenerators( dialect );
while ( iter.hasNext() ) {
String[] lines = ( (PersistentIdentifierGenerator) iter.next() ).sqlCreateStrings( dialect );
for ( int i = 0; i < lines.length ; i++ ) {
script.add( lines[i] );
}
}
Iterator itr = auxiliaryDatabaseObjects.iterator();
while ( itr.hasNext() ) {
AuxiliaryDatabaseObject object = (AuxiliaryDatabaseObject) itr.next();
if ( object.appliesToDialect( dialect ) ) {
script.add( object.sqlCreateString( dialect, mapping, defaultCatalog, defaultSchema ) );
}
}
return ArrayHelper.toStringArray( script );
|
public java.lang.String[] | generateSchemaUpdateScript(org.hibernate.dialect.Dialect dialect, org.hibernate.tool.hbm2ddl.DatabaseMetadata databaseMetadata)Generate DDL for altering tables
secondPassCompile();
String defaultCatalog = properties.getProperty( Environment.DEFAULT_CATALOG );
String defaultSchema = properties.getProperty( Environment.DEFAULT_SCHEMA );
ArrayList script = new ArrayList( 50 );
Iterator iter = getTableMappings();
while ( iter.hasNext() ) {
Table table = (Table) iter.next();
if ( table.isPhysicalTable() ) {
TableMetadata tableInfo = databaseMetadata.getTableMetadata(
table.getName(),
( table.getSchema() == null ) ? defaultSchema : table.getSchema(),
( table.getCatalog() == null ) ? defaultCatalog : table.getCatalog(),
table.isQuoted()
);
if ( tableInfo == null ) {
script.add(
table.sqlCreateString(
dialect,
mapping,
defaultCatalog,
defaultSchema
)
);
}
else {
Iterator subiter = table.sqlAlterStrings(
dialect,
mapping,
tableInfo,
defaultCatalog,
defaultSchema
);
while ( subiter.hasNext() ) {
script.add( subiter.next() );
}
}
Iterator comments = table.sqlCommentStrings( dialect, defaultCatalog, defaultSchema );
while ( comments.hasNext() ) {
script.add( comments.next() );
}
}
}
iter = getTableMappings();
while ( iter.hasNext() ) {
Table table = (Table) iter.next();
if ( table.isPhysicalTable() ) {
TableMetadata tableInfo = databaseMetadata.getTableMetadata(
table.getName(),
table.getSchema(),
table.getCatalog(),
table.isQuoted()
);
if ( dialect.hasAlterTable() ) {
Iterator subIter = table.getForeignKeyIterator();
while ( subIter.hasNext() ) {
ForeignKey fk = (ForeignKey) subIter.next();
if ( fk.isPhysicalConstraint() ) {
boolean create = tableInfo == null || (
tableInfo.getForeignKeyMetadata( fk.getName() ) == null && (
//Icky workaround for MySQL bug:
!( dialect instanceof MySQLDialect ) ||
tableInfo.getIndexMetadata( fk.getName() ) == null
)
);
if ( create ) {
script.add(
fk.sqlCreateString(
dialect,
mapping,
defaultCatalog,
defaultSchema
)
);
}
}
}
}
}
/*//broken, 'cos we don't generate these with names in SchemaExport
subIter = table.getIndexIterator();
while ( subIter.hasNext() ) {
Index index = (Index) subIter.next();
if ( !index.isForeignKey() || !dialect.hasImplicitIndexForForeignKey() ) {
if ( tableInfo==null || tableInfo.getIndexMetadata( index.getFilterName() ) == null ) {
script.add( index.sqlCreateString(dialect, mapping) );
}
}
}
//broken, 'cos we don't generate these with names in SchemaExport
subIter = table.getUniqueKeyIterator();
while ( subIter.hasNext() ) {
UniqueKey uk = (UniqueKey) subIter.next();
if ( tableInfo==null || tableInfo.getIndexMetadata( uk.getFilterName() ) == null ) {
script.add( uk.sqlCreateString(dialect, mapping) );
}
}*/
}
iter = iterateGenerators( dialect );
while ( iter.hasNext() ) {
PersistentIdentifierGenerator generator = (PersistentIdentifierGenerator) iter.next();
Object key = generator.generatorKey();
if ( !databaseMetadata.isSequence( key ) && !databaseMetadata.isTable( key ) ) {
String[] lines = generator.sqlCreateStrings( dialect );
for ( int i = 0; i < lines.length ; i++ ) {
script.add( lines[i] );
}
}
}
return ArrayHelper.toStringArray( script );
|
public org.hibernate.mapping.PersistentClass | getClassMapping(java.lang.String entityName)Get the mapping for a particular entity
return (PersistentClass) classes.get( entityName );
|
public java.util.Iterator | getClassMappings()Iterate the entity mappings
return classes.values().iterator();
|
public org.hibernate.mapping.Collection | getCollectionMapping(java.lang.String role)Get the mapping for a particular collection role
return (Collection) collections.get( role );
|
public java.util.Iterator | getCollectionMappings()Iterate the collection mappings
return collections.values().iterator();
|
protected java.io.InputStream | getConfigurationInputStream(java.lang.String resource)Get the configuration file as an InputStream. Might be overridden
by subclasses to allow the configuration to be located by some arbitrary
mechanism.
log.info( "Configuration resource: " + resource );
return ConfigHelper.getResourceAsStream( resource );
|
public org.hibernate.proxy.EntityNotFoundDelegate | getEntityNotFoundDelegate()Retrieve the user-supplied delegate to handle non-existent entity
scenarios. May be null.
return entityNotFoundDelegate;
|
public org.xml.sax.EntityResolver | getEntityResolver()
return entityResolver;
|
public org.hibernate.event.EventListeners | getEventListeners()
return eventListeners;
|
public java.util.Map | getFilterDefinitions()
return filterDefinitions;
|
public java.util.Map | getImports()Get the query language imports
return imports;
|
private org.hibernate.event.EventListeners | getInitializedEventListeners()
EventListeners result = (EventListeners) eventListeners.shallowCopy();
result.initializeListeners( this );
return result;
|
public org.hibernate.Interceptor | getInterceptor()Return the configured Interceptor
return interceptor;
|
public java.util.Map | getNamedQueries()Get the named queries
return namedQueries;
|
public java.util.Map | getNamedSQLQueries()
return namedSqlQueries;
|
public NamingStrategy | getNamingStrategy()
return namingStrategy;
|
public java.util.Properties | getProperties()Get all properties
return properties;
|
public java.lang.String | getProperty(java.lang.String propertyName)Get a property
return properties.getProperty( propertyName );
|
org.hibernate.mapping.RootClass | getRootClassMapping(java.lang.String clazz)
try {
return (RootClass) getClassMapping( clazz );
}
catch (ClassCastException cce) {
throw new MappingException( "You may only specify a cache for root <class> mappings" );
}
|
public java.util.Map | getSqlFunctions()
return sqlFunctions;
|
public java.util.Map | getSqlResultSetMappings()
return sqlResultSetMappings;
|
public java.util.Iterator | getTableMappings()Iterate the table mappings
return tables.values().iterator();
|
private java.util.Iterator | iterateGenerators(org.hibernate.dialect.Dialect dialect)
TreeMap generators = new TreeMap();
String defaultCatalog = properties.getProperty( Environment.DEFAULT_CATALOG );
String defaultSchema = properties.getProperty( Environment.DEFAULT_SCHEMA );
Iterator iter = classes.values().iterator();
while ( iter.hasNext() ) {
PersistentClass pc = (PersistentClass) iter.next();
if ( !pc.isInherited() ) {
IdentifierGenerator ig = pc.getIdentifier()
.createIdentifierGenerator(
dialect,
defaultCatalog,
defaultSchema,
(RootClass) pc
);
if ( ig instanceof PersistentIdentifierGenerator ) {
generators.put( ( (PersistentIdentifierGenerator) ig ).generatorKey(), ig );
}
}
}
iter = collections.values().iterator();
while ( iter.hasNext() ) {
Collection collection = (Collection) iter.next();
if ( collection.isIdentified() ) {
IdentifierGenerator ig = ( (IdentifierCollection) collection ).getIdentifier()
.createIdentifierGenerator(
dialect,
defaultCatalog,
defaultSchema,
null
);
if ( ig instanceof PersistentIdentifierGenerator ) {
generators.put( ( (PersistentIdentifierGenerator) ig ).generatorKey(), ig );
}
}
}
return generators.values().iterator();
|
public org.hibernate.cfg.Configuration | mergeProperties(java.util.Properties properties)Adds the incoming properties to the internap properties structure,
as long as the internal structure does not already contain an
entry for the given key.
Iterator itr = properties.entrySet().iterator();
while ( itr.hasNext() ) {
final Map.Entry entry = ( Map.Entry ) itr.next();
if ( this.properties.containsKey( entry.getKey() ) ) {
continue;
}
this.properties.setProperty( ( String ) entry.getKey(), ( String ) entry.getValue() );
}
return this;
|
private void | parseEvent(org.dom4j.Element element)
String type = element.attributeValue( "type" );
List listeners = element.elements();
String[] listenerClasses = new String[ listeners.size() ];
for ( int i = 0; i < listeners.size() ; i++ ) {
listenerClasses[i] = ( (Element) listeners.get( i ) ).attributeValue( "class" );
}
log.debug( "Event listeners: " + type + "=" + StringHelper.toString( listenerClasses ) );
setListeners( type, listenerClasses );
|
private void | parseListener(org.dom4j.Element element)
String type = element.attributeValue( "type" );
if ( type == null ) {
throw new MappingException( "No type specified for listener" );
}
String impl = element.attributeValue( "class" );
log.debug( "Event listener: " + type + "=" + impl );
setListeners( type, new String[]{impl} );
|
protected void | parseMappingElement(org.dom4j.Element subelement, java.lang.String name)
Attribute rsrc = subelement.attribute( "resource" );
Attribute file = subelement.attribute( "file" );
Attribute jar = subelement.attribute( "jar" );
Attribute pkg = subelement.attribute( "package" );
Attribute clazz = subelement.attribute( "class" );
if ( rsrc != null ) {
log.debug( name + "<-" + rsrc );
addResource( rsrc.getValue() );
}
else if ( jar != null ) {
log.debug( name + "<-" + jar );
addJar( new File( jar.getValue() ) );
}
else if ( pkg != null ) {
throw new MappingException(
"An AnnotationConfiguration instance is required to use <mapping package=\"" +
pkg.getValue() + "\"/>"
);
}
else if ( clazz != null ) {
throw new MappingException(
"An AnnotationConfiguration instance is required to use <mapping class=\"" +
clazz.getValue() + "\"/>"
);
}
else {
if ( file == null ) {
throw new MappingException(
"<mapping> element in configuration specifies no attributes"
);
}
log.debug( name + "<-" + file );
addFile( file.getValue() );
}
|
private void | parseSecurity(org.dom4j.Element secNode)
String contextId = secNode.attributeValue( "context" );
setProperty(Environment.JACC_CONTEXTID, contextId);
log.info( "JACC contextID: " + contextId );
JACCConfiguration jcfg = new JACCConfiguration( contextId );
Iterator grantElements = secNode.elementIterator();
while ( grantElements.hasNext() ) {
Element grantElement = (Element) grantElements.next();
String elementName = grantElement.getName();
if ( "grant".equals( elementName ) ) {
jcfg.addPermission(
grantElement.attributeValue( "role" ),
grantElement.attributeValue( "entity-name" ),
grantElement.attributeValue( "actions" )
);
}
}
|
private void | parseSessionFactory(org.dom4j.Element sfNode, java.lang.String name)
Iterator elements = sfNode.elementIterator();
while ( elements.hasNext() ) {
Element subelement = (Element) elements.next();
String subelementName = subelement.getName();
if ( "mapping".equals( subelementName ) ) {
parseMappingElement( subelement, name );
}
else if ( "class-cache".equals( subelementName ) ) {
String className = subelement.attributeValue( "class" );
Attribute regionNode = subelement.attribute( "region" );
final String region = ( regionNode == null ) ? className : regionNode.getValue();
boolean includeLazy = !"non-lazy".equals( subelement.attributeValue( "include" ) );
setCacheConcurrencyStrategy( className, subelement.attributeValue( "usage" ), region, includeLazy );
}
else if ( "collection-cache".equals( subelementName ) ) {
String role = subelement.attributeValue( "collection" );
Attribute regionNode = subelement.attribute( "region" );
final String region = ( regionNode == null ) ? role : regionNode.getValue();
setCollectionCacheConcurrencyStrategy( role, subelement.attributeValue( "usage" ), region );
}
else if ( "listener".equals( subelementName ) ) {
parseListener( subelement );
}
else if ( "event".equals( subelementName ) ) {
parseEvent( subelement );
}
}
|
private void | processExtendsQueue()Try to empty the extends queue.
// todo : would love to have this work on a notification basis
// where the successful binding of an entity/subclass would
// emit a notification which the extendsQueue entries could
// react to...
org.dom4j.Document document = findPossibleExtends();
while ( document != null ) {
add( document );
document = findPossibleExtends();
}
if ( extendsQueue.size() > 0 ) {
// Iterator iterator = extendsQueue.iterator();
Iterator iterator = extendsQueue.keySet().iterator();
StringBuffer buf = new StringBuffer( "Following superclasses referenced in extends not found: " );
while ( iterator.hasNext() ) {
final ExtendsQueueEntry entry = ( ExtendsQueueEntry ) iterator.next();
buf.append( entry.getExplicitName() );
if ( entry.getMappingPackage() != null ) {
buf.append( "[" ).append( entry.getMappingPackage() ).append( "]" );
}
if ( iterator.hasNext() ) {
buf.append( "," );
}
}
throw new MappingException( buf.toString() );
}
|
private void | readObject(java.io.ObjectInputStream ois)
ois.defaultReadObject();
this.mapping = buildMapping();
xmlHelper = new XMLHelper();
|
protected void | reset()
classes = new HashMap();
imports = new HashMap();
collections = new HashMap();
tables = new TreeMap();
namedQueries = new HashMap();
namedSqlQueries = new HashMap();
sqlResultSetMappings = new HashMap();
xmlHelper = new XMLHelper();
typeDefs = new HashMap();
propertyReferences = new ArrayList();
secondPasses = new ArrayList();
interceptor = EmptyInterceptor.INSTANCE;
properties = Environment.getProperties();
entityResolver = XMLHelper.DEFAULT_DTD_RESOLVER;
eventListeners = new EventListeners();
filterDefinitions = new HashMap();
// extendsQueue = new ArrayList();
extendsQueue = new HashMap();
auxiliaryDatabaseObjects = new ArrayList();
tableNameBinding = new HashMap();
columnNameBindingPerTable = new HashMap();
namingStrategy = DefaultNamingStrategy.INSTANCE;
sqlFunctions = new HashMap();
|
protected void | secondPassCompile()
log.debug( "processing extends queue" );
processExtendsQueue();
log.debug( "processing collection mappings" );
Iterator iter = secondPasses.iterator();
while ( iter.hasNext() ) {
SecondPass sp = (SecondPass) iter.next();
if ( ! (sp instanceof QuerySecondPass) ) {
sp.doSecondPass( classes );
iter.remove();
}
}
log.debug( "processing native query and ResultSetMapping mappings" );
iter = secondPasses.iterator();
while ( iter.hasNext() ) {
SecondPass sp = (SecondPass) iter.next();
sp.doSecondPass( classes );
iter.remove();
}
log.debug( "processing association property references" );
iter = propertyReferences.iterator();
while ( iter.hasNext() ) {
Mappings.PropertyReference upr = (Mappings.PropertyReference) iter.next();
PersistentClass clazz = getClassMapping( upr.referencedClass );
if ( clazz == null ) {
throw new MappingException(
"property-ref to unmapped class: " +
upr.referencedClass
);
}
Property prop = clazz.getReferencedProperty( upr.propertyName );
if ( upr.unique ) {
( (SimpleValue) prop.getValue() ).setAlternateUniqueKey( true );
}
}
//TODO: Somehow add the newly created foreign keys to the internal collection
log.debug( "processing foreign key constraints" );
iter = getTableMappings();
Set done = new HashSet();
while ( iter.hasNext() ) {
secondPassCompileForeignKeys( (Table) iter.next(), done );
}
|
protected void | secondPassCompileForeignKeys(org.hibernate.mapping.Table table, java.util.Set done)
table.createForeignKeys();
Iterator iter = table.getForeignKeyIterator();
while ( iter.hasNext() ) {
ForeignKey fk = (ForeignKey) iter.next();
if ( !done.contains( fk ) ) {
done.add( fk );
final String referencedEntityName = fk.getReferencedEntityName();
if ( referencedEntityName == null ) {
throw new MappingException(
"An association from the table " +
fk.getTable().getName() +
" does not specify the referenced entity"
);
}
if ( log.isDebugEnabled() ) {
log.debug( "resolving reference to class: " + referencedEntityName );
}
PersistentClass referencedClass = (PersistentClass) classes.get( referencedEntityName );
if ( referencedClass == null ) {
throw new MappingException(
"An association from the table " +
fk.getTable().getName() +
" refers to an unmapped class: " +
referencedEntityName
);
}
if ( referencedClass.isJoinedSubclass() ) {
secondPassCompileForeignKeys( referencedClass.getSuperclass().getTable(), done );
}
fk.setReferencedTable( referencedClass.getTable() );
fk.alignColumns();
}
}
|
public org.hibernate.cfg.Configuration | setCacheConcurrencyStrategy(java.lang.String clazz, java.lang.String concurrencyStrategy)Set up a cache for an entity class
setCacheConcurrencyStrategy( clazz, concurrencyStrategy, clazz );
return this;
|
public void | setCacheConcurrencyStrategy(java.lang.String clazz, java.lang.String concurrencyStrategy, java.lang.String region)
setCacheConcurrencyStrategy( clazz, concurrencyStrategy, region, true );
|
void | setCacheConcurrencyStrategy(java.lang.String clazz, java.lang.String concurrencyStrategy, java.lang.String region, boolean includeLazy)
RootClass rootClass = getRootClassMapping( clazz );
if ( rootClass == null ) {
throw new MappingException( "Cannot cache an unknown entity: " + clazz );
}
rootClass.setCacheConcurrencyStrategy( concurrencyStrategy );
rootClass.setCacheRegionName( region );
rootClass.setLazyPropertiesCacheable( includeLazy );
|
public org.hibernate.cfg.Configuration | setCollectionCacheConcurrencyStrategy(java.lang.String collectionRole, java.lang.String concurrencyStrategy)Set up a cache for a collection role
setCollectionCacheConcurrencyStrategy( collectionRole, concurrencyStrategy, collectionRole );
return this;
|
public void | setCollectionCacheConcurrencyStrategy(java.lang.String collectionRole, java.lang.String concurrencyStrategy, java.lang.String region)
Collection collection = getCollectionMapping( collectionRole );
if ( collection == null ) {
throw new MappingException( "Cannot cache an unknown collection: " + collectionRole );
}
collection.setCacheConcurrencyStrategy( concurrencyStrategy );
collection.setCacheRegionName( region );
|
public void | setEntityNotFoundDelegate(org.hibernate.proxy.EntityNotFoundDelegate entityNotFoundDelegate)Specify a user-supplied delegate to be used to handle scenarios where an entity could not be
located by specified id. This is mainly intended for EJB3 implementations to be able to
control how proxy initialization errors should be handled...
this.entityNotFoundDelegate = entityNotFoundDelegate;
|
public void | setEntityResolver(org.xml.sax.EntityResolver entityResolver)Set a custom entity resolver. This entity resolver must be
set before addXXX(misc) call.
Default value is {@link org.hibernate.util.DTDEntityResolver}
this.entityResolver = entityResolver;
|
public org.hibernate.cfg.Configuration | setInterceptor(org.hibernate.Interceptor interceptor)Configure an Interceptor
this.interceptor = interceptor;
return this;
|
public void | setListener(java.lang.String type, java.lang.Object listener)
if ( listener == null ) {
setListener( type, null );
}
else {
Object[] listeners = (Object[]) Array.newInstance( eventListeners.getListenerClassFor(type), 1 );
listeners[0] = listener;
setListeners( type, listeners );
}
|
public void | setListeners(java.lang.String type, java.lang.String[] listenerClasses)
Object[] listeners = (Object[]) Array.newInstance( eventListeners.getListenerClassFor(type), listenerClasses.length );
for ( int i = 0; i < listeners.length ; i++ ) {
try {
listeners[i] = ReflectHelper.classForName( listenerClasses[i] ).newInstance();
}
catch (Exception e) {
throw new MappingException(
"Unable to instantiate specified event (" + type + ") listener class: " + listenerClasses[i],
e
);
}
}
setListeners( type, listeners );
|
public void | setListeners(java.lang.String type, java.lang.Object[] listeners)
if ( "auto-flush".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setAutoFlushEventListeners( new AutoFlushEventListener[]{} );
}
else {
eventListeners.setAutoFlushEventListeners( (AutoFlushEventListener[]) listeners );
}
}
else if ( "merge".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setMergeEventListeners( new MergeEventListener[]{} );
}
else {
eventListeners.setMergeEventListeners( (MergeEventListener[]) listeners );
}
}
else if ( "create".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setPersistEventListeners( new PersistEventListener[]{} );
}
else {
eventListeners.setPersistEventListeners( (PersistEventListener[]) listeners );
}
}
else if ( "create-onflush".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setPersistOnFlushEventListeners( new PersistEventListener[]{} );
}
else {
eventListeners.setPersistOnFlushEventListeners( (PersistEventListener[]) listeners );
}
}
else if ( "delete".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setDeleteEventListeners( new DeleteEventListener[]{} );
}
else {
eventListeners.setDeleteEventListeners( (DeleteEventListener[]) listeners );
}
}
else if ( "dirty-check".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setDirtyCheckEventListeners( new DirtyCheckEventListener[]{} );
}
else {
eventListeners.setDirtyCheckEventListeners( (DirtyCheckEventListener[]) listeners );
}
}
else if ( "evict".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setEvictEventListeners( new EvictEventListener[]{} );
}
else {
eventListeners.setEvictEventListeners( (EvictEventListener[]) listeners );
}
}
else if ( "flush".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setFlushEventListeners( new FlushEventListener[]{} );
}
else {
eventListeners.setFlushEventListeners( (FlushEventListener[]) listeners );
}
}
else if ( "flush-entity".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setFlushEntityEventListeners( new FlushEntityEventListener[]{} );
}
else {
eventListeners.setFlushEntityEventListeners( (FlushEntityEventListener[]) listeners );
}
}
else if ( "load".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setLoadEventListeners( new LoadEventListener[]{} );
}
else {
eventListeners.setLoadEventListeners( (LoadEventListener[]) listeners );
}
}
else if ( "load-collection".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setInitializeCollectionEventListeners(
new InitializeCollectionEventListener[]{}
);
}
else {
eventListeners.setInitializeCollectionEventListeners(
(InitializeCollectionEventListener[]) listeners
);
}
}
else if ( "lock".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setLockEventListeners( new LockEventListener[]{} );
}
else {
eventListeners.setLockEventListeners( (LockEventListener[]) listeners );
}
}
else if ( "refresh".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setRefreshEventListeners( new RefreshEventListener[]{} );
}
else {
eventListeners.setRefreshEventListeners( (RefreshEventListener[]) listeners );
}
}
else if ( "replicate".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setReplicateEventListeners( new ReplicateEventListener[]{} );
}
else {
eventListeners.setReplicateEventListeners( (ReplicateEventListener[]) listeners );
}
}
else if ( "save-update".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setSaveOrUpdateEventListeners( new SaveOrUpdateEventListener[]{} );
}
else {
eventListeners.setSaveOrUpdateEventListeners( (SaveOrUpdateEventListener[]) listeners );
}
}
else if ( "save".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setSaveEventListeners( new SaveOrUpdateEventListener[]{} );
}
else {
eventListeners.setSaveEventListeners( (SaveOrUpdateEventListener[]) listeners );
}
}
else if ( "update".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setUpdateEventListeners( new SaveOrUpdateEventListener[]{} );
}
else {
eventListeners.setUpdateEventListeners( (SaveOrUpdateEventListener[]) listeners );
}
}
else if ( "pre-load".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setPreLoadEventListeners( new PreLoadEventListener[]{} );
}
else {
eventListeners.setPreLoadEventListeners( (PreLoadEventListener[]) listeners );
}
}
else if ( "pre-update".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setPreUpdateEventListeners( new PreUpdateEventListener[]{} );
}
else {
eventListeners.setPreUpdateEventListeners( (PreUpdateEventListener[]) listeners );
}
}
else if ( "pre-delete".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setPreDeleteEventListeners( new PreDeleteEventListener[]{} );
}
else {
eventListeners.setPreDeleteEventListeners( (PreDeleteEventListener[]) listeners );
}
}
else if ( "pre-insert".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setPreInsertEventListeners( new PreInsertEventListener[]{} );
}
else {
eventListeners.setPreInsertEventListeners( (PreInsertEventListener[]) listeners );
}
}
else if ( "post-load".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setPostLoadEventListeners( new PostLoadEventListener[]{} );
}
else {
eventListeners.setPostLoadEventListeners( (PostLoadEventListener[]) listeners );
}
}
else if ( "post-update".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setPostUpdateEventListeners( new PostUpdateEventListener[]{} );
}
else {
eventListeners.setPostUpdateEventListeners( (PostUpdateEventListener[]) listeners );
}
}
else if ( "post-delete".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setPostDeleteEventListeners( new PostDeleteEventListener[]{} );
}
else {
eventListeners.setPostDeleteEventListeners( (PostDeleteEventListener[]) listeners );
}
}
else if ( "post-insert".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setPostInsertEventListeners( new PostInsertEventListener[]{} );
}
else {
eventListeners.setPostInsertEventListeners( (PostInsertEventListener[]) listeners );
}
}
else if ( "post-commit-update".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setPostCommitUpdateEventListeners(
new PostUpdateEventListener[]{}
);
}
else {
eventListeners.setPostCommitUpdateEventListeners( (PostUpdateEventListener[]) listeners );
}
}
else if ( "post-commit-delete".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setPostCommitDeleteEventListeners(
new PostDeleteEventListener[]{}
);
}
else {
eventListeners.setPostCommitDeleteEventListeners( (PostDeleteEventListener[]) listeners );
}
}
else if ( "post-commit-insert".equals( type ) ) {
if ( listeners == null ) {
eventListeners.setPostCommitInsertEventListeners(
new PostInsertEventListener[]{}
);
}
else {
eventListeners.setPostCommitInsertEventListeners( (PostInsertEventListener[]) listeners );
}
}
else {
log.warn( "Unrecognized listener type [" + type + "]" );
}
|
public org.hibernate.cfg.Configuration | setNamingStrategy(NamingStrategy namingStrategy)Set a custom naming strategy
this.namingStrategy = namingStrategy;
return this;
|
public org.hibernate.cfg.Configuration | setProperties(java.util.Properties properties)Specify a completely new set of properties
this.properties = properties;
return this;
|
public org.hibernate.cfg.Configuration | setProperty(java.lang.String propertyName, java.lang.String value)Set a property
properties.setProperty( propertyName, value );
return this;
|
private void | validate()
Iterator iter = classes.values().iterator();
while ( iter.hasNext() ) {
( (PersistentClass) iter.next() ).validate( mapping );
}
iter = collections.values().iterator();
while ( iter.hasNext() ) {
( (Collection) iter.next() ).validate( mapping );
}
|
public void | validateSchema(org.hibernate.dialect.Dialect dialect, org.hibernate.tool.hbm2ddl.DatabaseMetadata databaseMetadata)
secondPassCompile();
String defaultCatalog = properties.getProperty( Environment.DEFAULT_CATALOG );
String defaultSchema = properties.getProperty( Environment.DEFAULT_SCHEMA );
Iterator iter = getTableMappings();
while ( iter.hasNext() ) {
Table table = (Table) iter.next();
if ( table.isPhysicalTable() ) {
TableMetadata tableInfo = databaseMetadata.getTableMetadata(
table.getName(),
( table.getSchema() == null ) ? defaultSchema : table.getSchema(),
( table.getCatalog() == null ) ? defaultCatalog : table.getCatalog(),
table.isQuoted());
if ( tableInfo == null ) {
throw new HibernateException( "Missing table: " + table.getName() );
}
else {
table.validateColumns( dialect, mapping, tableInfo );
}
}
}
iter = iterateGenerators( dialect );
while ( iter.hasNext() ) {
PersistentIdentifierGenerator generator = (PersistentIdentifierGenerator) iter.next();
Object key = generator.generatorKey();
if ( !databaseMetadata.isSequence( key ) && !databaseMetadata.isTable( key ) ) {
throw new HibernateException( "Missing sequence or table: " + key );
}
}
|