FileDocCategorySizeDatePackage
SchemaManager.javaAPI DocGlassfish v2 API36823Tue May 22 16:54:54 BST 2007oracle.toplink.essentials.tools.schemaframework

SchemaManager

public class SchemaManager extends Object

Purpose: Define all user level protocol for development time database manipulation.

Responsibilities:

  • Define protocol for schema creation.
  • Define any useful testing specific protocol.

Fields Summary
protected DatabaseSessionImpl
session
protected Writer
createSchemaWriter
protected Writer
dropSchemaWriter
protected boolean
createSQLFiles
protected TableCreator
defaultTableCreator
Constructors Summary
public SchemaManager(DatabaseSessionImpl session)


       
        this.session = session;
    
public SchemaManager(DatabaseSession session)

        this.session = ((DatabaseSessionImpl)session);
    
Methods Summary
public voidalterSequence(oracle.toplink.essentials.tools.schemaframework.SequenceDefinition sequenceDefinition)
Use the definition to alter sequence.

        if (!sequenceDefinition.isAlterSupported()) {
            return;
        }
        if (shouldWriteToDatabase()) {
            sequenceDefinition.alterOnDatabase(getSession());
        } else {
            sequenceDefinition.alter(getSession(), createSchemaWriter);
        }
    
public voidappendToDDLWriter(java.lang.String stringToWrite)
PUBLIC: If the schema manager is writing to a writer, append this string to that writer.

        // If this method is called, we know that it is the old case and
        // it would not matter which schemaWriter we use as both the 
        // create and drop schemaWriters are essentially the same. 
        // So just pick one.
        appendToDDLWriter(createSchemaWriter, stringToWrite);
    
public voidappendToDDLWriter(java.io.Writer schemaWriter, java.lang.String stringToWrite)

        if (schemaWriter == null) {
            return;//do nothing.  Ignore append request
        }

        try {
            schemaWriter.write(stringToWrite);
            schemaWriter.flush();
        } catch (java.io.IOException ioException) {
            throw ValidationException.fileError(ioException);
        }
    
public voidbuildFieldTypes(oracle.toplink.essentials.tools.schemaframework.TableDefinition tableDef)
INTERNAL: builds the field names based on the type read in from the builder

        tableDef.buildFieldTypes(getSession());
    
protected oracle.toplink.essentials.tools.schemaframework.SequenceDefinitionbuildSequenceDefinition(oracle.toplink.essentials.sequencing.Sequence sequence)

        if (sequence instanceof DefaultSequence) {
            String name = sequence.getName();
            int size = sequence.getPreallocationSize();
            int initialValue = sequence.getInitialValue();
            sequence = getSession().getDatasourcePlatform().getDefaultSequence();

            if (sequence instanceof TableSequence) {
                TableSequence tableSequence = (TableSequence)sequence;
                return new TableSequenceDefinition(name, tableSequence);
            } else if (sequence instanceof NativeSequence) {
                if (getSession().getDatasourcePlatform().isOracle()) { 
                    return new OracleSequenceDefinition(name, size, initialValue);
                } else if (getSession().getDatasourcePlatform().isTimesTen()) {
                    return new TimesTenSequenceDefinition(name, size, initialValue);
                } else {
                    return null;
                }
            } else {
                return null;
            }
        } else if (sequence instanceof TableSequence) {
            TableSequence tableSequence = (TableSequence)sequence;

            return new TableSequenceDefinition(tableSequence);
        } else if (sequence instanceof NativeSequence) {
            if (getSession().getDatasourcePlatform().isOracle()) {
                NativeSequence nativeSequence = (NativeSequence)sequence;
                return new OracleSequenceDefinition(nativeSequence);
            } else if (getSession().getDatasourcePlatform().isTimesTen()) {
                NativeSequence nativeSequence = (NativeSequence)sequence;
                return new TimesTenSequenceDefinition(nativeSequence);
            } else {
                return null;
            }
        } else {
            return null;
        }
    
private voidbuildTableAndSequenceDefinitions(java.util.HashSet sequenceDefinitions, java.util.HashSet processedSequenceNames, java.util.HashMap tableDefinitions)

        Iterator descriptors = getSession().getDescriptors().values().iterator();

        while (descriptors.hasNext()) {
            ClassDescriptor descriptor = (ClassDescriptor)descriptors.next();

            if (descriptor.usesSequenceNumbers()) {
                String seqName = descriptor.getSequenceNumberName();

                if (seqName == null) {
                    seqName = getSession().getDatasourcePlatform().getDefaultSequence().getName();
                }

                if (processedSequenceNames.contains(seqName)) {
                    continue;
                }

                processedSequenceNames.add(seqName);

                Sequence sequence = getSession().getDatasourcePlatform().getSequence(seqName);

                if (sequence.shouldAcquireValueAfterInsert()) {
                    continue;
                }

                SequenceDefinition sequenceDefinition = buildSequenceDefinition(sequence);

                if (sequenceDefinition == null) {
                    continue;
                }

                sequenceDefinitions.add(sequenceDefinition);

                TableDefinition tableDefinition = sequenceDefinition.buildTableDefinition();

                if (tableDefinition != null) {
                    String tableName = tableDefinition.getName();
                    TableDefinition otherTableDefinition = (TableDefinition)tableDefinitions.get(tableName);

                    if (otherTableDefinition != null) {
                        // check for a conflict; if there is one - throw a ValidationException
                    } else {
                        tableDefinitions.put(tableName, tableDefinition);
                    }
                }
            }
        }
    
public voidcloseDDLWriter()
PUBLIC: Close the schema writer.

        closeDDLWriter(createSchemaWriter);
        closeDDLWriter(dropSchemaWriter);
        createSchemaWriter = null;
        dropSchemaWriter = null;
    
public voidcloseDDLWriter(java.io.Writer schemaWriter)

        if (schemaWriter == null) {
            return;
        }

        try {
            schemaWriter.flush();
            schemaWriter.close();
        } catch (java.io.IOException ioException) {
            throw ValidationException.fileError(ioException);
        }
    
public voidcreateConstraints(oracle.toplink.essentials.tools.schemaframework.TableDefinition tableDefinition)
Use the table definition to add the constraints to the database, this is normally done in two steps to avoid dependencies.

        if (shouldWriteToDatabase()) {
            tableDefinition.createConstraintsOnDatabase(getSession());
        } else {
            tableDefinition.setCreateSQLFiles(createSQLFiles);
            tableDefinition.createConstraints(getSession(), createSchemaWriter);
        }
    
public voidcreateDefaultTables()
Create the default table schema for the TopLink project this session associated with.

        //Create each table w/o throwing exception and/or exit if some of them are already existed in the db. 
        //If a table is already existed, skip the creation.

        boolean shouldLogExceptionStackTrace = getSession().getSessionLog().shouldLogExceptionStackTrace();
        getSession().getSessionLog().setShouldLogExceptionStackTrace(false);

        try {
            TableCreator tableCreator = getDefaultTableCreator();
            tableCreator.createTables(session, this);
        } catch (DatabaseException ex) {
            // Ignore error
        } finally {
            getSession().getSessionLog().setShouldLogExceptionStackTrace(shouldLogExceptionStackTrace);
        }
    
voidcreateForeignConstraints(oracle.toplink.essentials.tools.schemaframework.TableDefinition tableDefinition)

        if (shouldWriteToDatabase()) {
            tableDefinition.createForeignConstraintsOnDatabase(getSession());
        } else {
            tableDefinition.setCreateSQLFiles(createSQLFiles);
            tableDefinition.createForeignConstraints(getSession(), createSchemaWriter);
        }
    
public voidcreateObject(oracle.toplink.essentials.tools.schemaframework.DatabaseObjectDefinition databaseObjectDefinition)
Use the definition object to create the schema entity on the database. This is used for creating tables, views, procedures ... etc ...

        if (shouldWriteToDatabase()) {
            databaseObjectDefinition.createOnDatabase(getSession());
        } else {
            databaseObjectDefinition.createObject(getSession(), createSchemaWriter);
            if (createSQLFiles){
                this.appendToDDLWriter(createSchemaWriter, getSession().getPlatform().getStoredProcedureTerminationToken());
            }
            this.appendToDDLWriter(createSchemaWriter, "\n");
        }
    
protected voidcreateOrReplaceSequences(boolean create)
Common implementor for createSequence and replaceSequence

        Sequencing sequencing = getSession().getSequencing();

        if ((sequencing == null) || (sequencing.whenShouldAcquireValueForAll() == Sequencing.AFTER_INSERT)) {
            // Not required on Sybase native etc.
            return;
        }

        // Prepare table and sequence definitions
        // table name mapped to TableDefinition
        HashMap tableDefinitions = new HashMap();

        // sequence name to SequenceDefinition
        HashSet sequenceDefinitions = new HashSet();

        // remember the processed - to handle each sequence just once.
        HashSet processedSequenceNames = new HashSet();
        buildTableAndSequenceDefinitions(sequenceDefinitions, processedSequenceNames, tableDefinitions);
        processTableDefinitions(tableDefinitions, create);
        processSequenceDefinitions(sequenceDefinitions, create);
    
protected voidcreateOrReplaceSequences(boolean create, boolean drop)
Common implementor for createSequence and replaceSequence

        Sequencing sequencing = getSession().getSequencing();

        if ((sequencing == null) || (sequencing.whenShouldAcquireValueForAll() == Sequencing.AFTER_INSERT)) {
            // Not required on Sybase native etc.
            return;
        }

        // Prepare table and sequence definitions
        // table name mapped to TableDefinition
        HashMap tableDefinitions = new HashMap();

        // sequence name to SequenceDefinition
        HashSet sequenceDefinitions = new HashSet();

        // remember the processed - to handle each sequence just once.
        HashSet processedSequenceNames = new HashSet();

        buildTableAndSequenceDefinitions(sequenceDefinitions, processedSequenceNames, tableDefinitions);
        processTableDefinitions(tableDefinitions, create);
        processSequenceDefinitions(sequenceDefinitions, drop);
    
public voidcreateSequences()
Create all the receiver's sequences on the database for all of the loaded descriptors.

        createOrReplaceSequences(true);
    
voidcreateUniqueConstraints(oracle.toplink.essentials.tools.schemaframework.TableDefinition tableDefinition)

        if (shouldWriteToDatabase()) {
            tableDefinition.createUniqueConstraintsOnDatabase(getSession());
        } else {
            tableDefinition.setCreateSQLFiles(createSQLFiles);
            tableDefinition.createUniqueConstraints(getSession(), createSchemaWriter);
        }
    
public voiddropConstraints(oracle.toplink.essentials.tools.schemaframework.TableDefinition tableDefinition)
Use the table definition to drop the constraints from the table, this is normally done in two steps to avoid dependencies.

        if (shouldWriteToDatabase()) {
            tableDefinition.dropConstraintsOnDatabase(getSession());
        } else {
            tableDefinition.setCreateSQLFiles(createSQLFiles);
            tableDefinition.dropConstraints(getSession(), getDropSchemaWriter());
        }
    
public voiddropObject(oracle.toplink.essentials.tools.schemaframework.DatabaseObjectDefinition databaseObjectDefinition)
Use the definition object to drop the schema entity from the database. This is used for droping tables, views, procedures ... etc ...

        if (shouldWriteToDatabase()) {
            databaseObjectDefinition.dropFromDatabase(getSession());
        } else {
            Writer dropSchemaWriter = getDropSchemaWriter();
            databaseObjectDefinition.dropObject(getSession(), dropSchemaWriter);
            if (createSQLFiles){
                this.appendToDDLWriter(dropSchemaWriter, getSession().getPlatform().getStoredProcedureTerminationToken());
            }
            this.appendToDDLWriter(dropSchemaWriter, "\n");
        }
    
public voiddropTable(java.lang.String tableName)
Drop (delete) the table named tableName from the database.

        TableDefinition tableDefinition;

        tableDefinition = new TableDefinition();
        tableDefinition.setName(tableName);
        dropObject(tableDefinition);
    
public voidfinalize()
INTERNAL: Close the schema writer when the schema manger is garbage collected

        try {
            this.closeDDLWriter();
        } catch (ValidationException exception) {
            // do nothing
        }
    
protected oracle.toplink.essentials.internal.databaseaccess.DatabaseAccessorgetAccessor()
Return the appropriate accessor. Assume we are dealing with a JDBC accessor.

        return (DatabaseAccessor)getSession().getAccessor();
    
public java.util.VectorgetAllColumnNames(java.lang.String tableName)
Get a description of table columns available in a catalog.

Each column description has the following columns:

  1. TABLE_CAT String => table catalog (may be null)
  2. TABLE_SCHEM String => table schema (may be null)
  3. TABLE_NAME String => table name
  4. COLUMN_NAME String => column name
  5. DATA_TYPE short => SQL type from java.sql.Types
  6. TYPE_NAME String => Data source dependent type name
  7. COLUMN_SIZE int => column size. For char or date types this is the maximum number of characters, for numeric or decimal types this is precision.
  8. BUFFER_LENGTH is not used.
  9. DECIMAL_DIGITS int => the number of fractional digits
  10. NUM_PREC_RADIX int => Radix (typically either 10 or 2)
  11. NULLABLE int => is NULL allowed?
    • columnNoNulls - might not allow NULL values
    • columnNullable - definitely allows NULL values
    • columnNullableUnknown - nullability unknown
  12. REMARKS String => comment describing column (may be null)
  13. COLUMN_DEF String => default value (may be null)
  14. SQL_DATA_TYPE int => unused
  15. SQL_DATETIME_SUB int => unused
  16. CHAR_OCTET_LENGTH int => for char types the maximum number of bytes in the column
  17. ORDINAL_POSITION int => index of column in table (starting at 1)
  18. IS_NULLABLE String => "NO" means column definitely does not allow NULL values; "YES" means the column might allow NULL values. An empty string means nobody knows.

param
tableName a table name pattern
return
a Vector of DatabaseRows.

        return getAccessor().getColumnInfo(null, null, tableName, null, getSession());
    
public java.util.VectorgetAllColumnNames(java.lang.String creatorName, java.lang.String tableName)
Get a description of table columns available in a catalog.

Each column description has the following columns:

  1. TABLE_CAT String => table catalog (may be null)
  2. TABLE_SCHEM String => table schema (may be null)
  3. TABLE_NAME String => table name
  4. COLUMN_NAME String => column name
  5. DATA_TYPE short => SQL type from java.sql.Types
  6. TYPE_NAME String => Data source dependent type name
  7. COLUMN_SIZE int => column size. For char or date types this is the maximum number of characters, for numeric or decimal types this is precision.
  8. BUFFER_LENGTH is not used.
  9. DECIMAL_DIGITS int => the number of fractional digits
  10. NUM_PREC_RADIX int => Radix (typically either 10 or 2)
  11. NULLABLE int => is NULL allowed?
    • columnNoNulls - might not allow NULL values
    • columnNullable - definitely allows NULL values
    • columnNullableUnknown - nullability unknown
  12. REMARKS String => comment describing column (may be null)
  13. COLUMN_DEF String => default value (may be null)
  14. SQL_DATA_TYPE int => unused
  15. SQL_DATETIME_SUB int => unused
  16. CHAR_OCTET_LENGTH int => for char types the maximum number of bytes in the column
  17. ORDINAL_POSITION int => index of column in table (starting at 1)
  18. IS_NULLABLE String => "NO" means column definitely does not allow NULL values; "YES" means the column might allow NULL values. An empty string means nobody knows.

param
creatorName a schema name pattern; "" retrieves those without a schema
param
tableName a table name pattern
return
a Vector of DatabaseRows.

        return getAccessor().getColumnInfo(null, creatorName, tableName, null, getSession());
    
public java.util.VectorgetAllTableNames()
Get a description of tables available in a catalog.

Each table description has the following columns:

  1. TABLE_CAT String => table catalog (may be null)
  2. TABLE_SCHEM String => table schema (may be null)
  3. TABLE_NAME String => table name
  4. TABLE_TYPE String => table type. Typical types are "TABLE", "VIEW", "SYSTEM TABLE", "GLOBAL TEMPORARY", "LOCAL TEMPORARY", "ALIAS", "SYNONYM".
  5. REMARKS String => explanatory comment on the table

Note: Some databases may not return information for all tables.

return
a Vector of DatabaseRows.

        return getAccessor().getTableInfo(null, null, null, null, getSession());
    
public java.util.VectorgetAllTableNames(java.lang.String creatorName)
Get a description of table columns available in a catalog.

Each column description has the following columns:

  1. TABLE_CAT String => table catalog (may be null)
  2. TABLE_SCHEM String => table schema (may be null)
  3. TABLE_NAME String => table name
  4. COLUMN_NAME String => column name
  5. DATA_TYPE short => SQL type from java.sql.Types
  6. TYPE_NAME String => Data source dependent type name
  7. COLUMN_SIZE int => column size. For char or date types this is the maximum number of characters, for numeric or decimal types this is precision.
  8. BUFFER_LENGTH is not used.
  9. DECIMAL_DIGITS int => the number of fractional digits
  10. NUM_PREC_RADIX int => Radix (typically either 10 or 2)
  11. NULLABLE int => is NULL allowed?
    • columnNoNulls - might not allow NULL values
    • columnNullable - definitely allows NULL values
    • columnNullableUnknown - nullability unknown
  12. REMARKS String => comment describing column (may be null)
  13. COLUMN_DEF String => default value (may be null)
  14. SQL_DATA_TYPE int => unused
  15. SQL_DATETIME_SUB int => unused
  16. CHAR_OCTET_LENGTH int => for char types the maximum number of bytes in the column
  17. ORDINAL_POSITION int => index of column in table (starting at 1)
  18. IS_NULLABLE String => "NO" means column definitely does not allow NULL values; "YES" means the column might allow NULL values. An empty string means nobody knows.

param
creatorName a schema name pattern; "" retrieves those without a schema
return
a Vector of DatabaseRows.

        return getAccessor().getTableInfo(null, creatorName, null, null, getSession());
    
public java.util.VectorgetColumnInfo(java.lang.String catalog, java.lang.String schema, java.lang.String tableName, java.lang.String columnName)
Get a description of table columns available in a catalog.

Only column descriptions matching the catalog, schema, table and column name criteria are returned. They are ordered by TABLE_SCHEM, TABLE_NAME and ORDINAL_POSITION.

Each column description has the following columns:

  1. TABLE_CAT String => table catalog (may be null)
  2. TABLE_SCHEM String => table schema (may be null)
  3. TABLE_NAME String => table name
  4. COLUMN_NAME String => column name
  5. DATA_TYPE short => SQL type from java.sql.Types
  6. TYPE_NAME String => Data source dependent type name
  7. COLUMN_SIZE int => column size. For char or date types this is the maximum number of characters, for numeric or decimal types this is precision.
  8. BUFFER_LENGTH is not used.
  9. DECIMAL_DIGITS int => the number of fractional digits
  10. NUM_PREC_RADIX int => Radix (typically either 10 or 2)
  11. NULLABLE int => is NULL allowed?
    • columnNoNulls - might not allow NULL values
    • columnNullable - definitely allows NULL values
    • columnNullableUnknown - nullability unknown
  12. REMARKS String => comment describing column (may be null)
  13. COLUMN_DEF String => default value (may be null)
  14. SQL_DATA_TYPE int => unused
  15. SQL_DATETIME_SUB int => unused
  16. CHAR_OCTET_LENGTH int => for char types the maximum number of bytes in the column
  17. ORDINAL_POSITION int => index of column in table (starting at 1)
  18. IS_NULLABLE String => "NO" means column definitely does not allow NULL values; "YES" means the column might allow NULL values. An empty string means nobody knows.

param
catalog a catalog name; "" retrieves those without a catalog; null means drop catalog name from the selection criteria
param
schemaPattern a schema name pattern; "" retrieves those without a schema
param
tableNamePattern a table name pattern
param
columnNamePattern a column name pattern
return
a Vector of DatabaseRows.

        return getAccessor().getColumnInfo(catalog, schema, tableName, columnName, getSession());
    
protected oracle.toplink.essentials.tools.schemaframework.TableCreatorgetDefaultTableCreator()
Construct the default TableCreator. If the default TableCreator is already created, just returns it.

        if(defaultTableCreator == null) {
            defaultTableCreator = new DefaultTableGenerator(session.getProject()).generateDefaultTableCreator();
            defaultTableCreator.setIgnoreDatabaseException(true);
        }
        return defaultTableCreator;
    
protected java.io.WritergetDropSchemaWriter()

        if (null == dropSchemaWriter) {
            return createSchemaWriter;
        } else {
            return dropSchemaWriter;
        }
    
public oracle.toplink.essentials.internal.sessions.AbstractSessiongetSession()

        return session;
    
public java.util.VectorgetTableInfo(java.lang.String catalog, java.lang.String schema, java.lang.String tableName, java.lang.String[] types)
Get a description of tables available in a catalog.

Only table descriptions matching the catalog, schema, table name and type criteria are returned. They are ordered by TABLE_TYPE, TABLE_SCHEM and TABLE_NAME.

Each table description has the following columns:

  1. TABLE_CAT String => table catalog (may be null)
  2. TABLE_SCHEM String => table schema (may be null)
  3. TABLE_NAME String => table name
  4. TABLE_TYPE String => table type. Typical types are "TABLE", "VIEW", "SYSTEM TABLE", "GLOBAL TEMPORARY", "LOCAL TEMPORARY", "ALIAS", "SYNONYM".
  5. REMARKS String => explanatory comment on the table

Note: Some databases may not return information for all tables.

param
catalog a catalog name; "" retrieves those without a catalog; null means drop catalog name from the selection criteria
param
schemaPattern a schema name pattern; "" retrieves those without a schema
param
tableNamePattern a table name pattern
param
types a list of table types to include; null returns all types
return
a Vector of DatabaseRows.

        return getAccessor().getTableInfo(catalog, schema, tableName, types, getSession());
    
public voidoutputCreateDDLToFile(java.lang.String fileName)

        try {
            this.createSchemaWriter = new java.io.FileWriter(fileName);
        } catch (java.io.IOException ioException) {
            throw ValidationException.fileError(ioException);
        }
    
public voidoutputCreateDDLToWriter(java.io.Writer createWriter)

        this.createSchemaWriter = createWriter;
    
public voidoutputDDLToDatabase()
PUBLIC: Output all DDL statements directly to the database.

        this.createSchemaWriter = null;
        this.dropSchemaWriter = null;
    
public voidoutputDDLToFile(java.lang.String fileName)
PUBLIC: Output all DDL statements to a file writer specified by the name in the parameter.

        try {
            this.createSchemaWriter = new java.io.FileWriter(fileName);
        } catch (java.io.IOException ioException) {
            throw ValidationException.fileError(ioException);
        }
    
public voidoutputDDLToWriter(java.io.Writer schemaWriter)
PUBLIC: Output all DDL statements to a writer specified in the parameter.

        this.createSchemaWriter = schemaWriter;
    
public voidoutputDropDDLToFile(java.lang.String fileName)

        try {
            this.dropSchemaWriter = new java.io.FileWriter(fileName);
        } catch (java.io.IOException ioException) {
            throw ValidationException.fileError(ioException);
        }
    
public voidoutputDropDDLToWriter(java.io.Writer dropWriter)

        this.dropSchemaWriter = dropWriter;
    
private voidprocessSequenceDefinitions(java.util.HashSet sequenceDefinitions, boolean create)


        // create sequence objects
        Iterator itSequenceDefinitions = sequenceDefinitions.iterator();

        while (itSequenceDefinitions.hasNext()) {
            SequenceDefinition sequenceDefinition = (SequenceDefinition)itSequenceDefinitions.next();

            if (!create) {
                try {
                    dropObject(sequenceDefinition);
                } catch (DatabaseException exception) {
                    // Ignore sequence not found for first creation
                }
            }

            createObject(sequenceDefinition);
        }
    
private voidprocessTableDefinitions(java.util.HashMap tableDefinitions, boolean create)


        // create tables
        Iterator itTableDefinitions = tableDefinitions.values().iterator();

        while (itTableDefinitions.hasNext()) {
            TableDefinition tableDefinition = (TableDefinition)itTableDefinitions.next();

            // CR 3870467, do not log stack
            boolean shouldLogExceptionStackTrace = session.getSessionLog().shouldLogExceptionStackTrace();

            if (shouldLogExceptionStackTrace) {
                session.getSessionLog().setShouldLogExceptionStackTrace(false);
            }

            if (create) {
                try {
                    createObject(tableDefinition);
                } catch (DatabaseException exception) {
                    // Ignore already created
                } finally {
                    if (shouldLogExceptionStackTrace) {
                        session.getSessionLog().setShouldLogExceptionStackTrace(true);
                    }
                }
            } else {
                try {
                    dropObject(tableDefinition);
                } catch (DatabaseException exception) {
                    // Ignore table not found for first creation
                } finally {
                    if (shouldLogExceptionStackTrace) {
                        session.getSessionLog().setShouldLogExceptionStackTrace(true);
                    }
                }

                createObject(tableDefinition);
            }
        }
    
public voidreplaceDefaultTables()
Drop and recreate the default table schema for the TopLink project this session associated with.

        boolean shouldLogExceptionStackTrace = getSession().getSessionLog().shouldLogExceptionStackTrace();
        getSession().getSessionLog().setShouldLogExceptionStackTrace(false);

        try {
            TableCreator tableCreator = getDefaultTableCreator();
            tableCreator.replaceTables(session, this);
        } catch (DatabaseException exception) {
            // Ignore error
        } finally {
            getSession().getSessionLog().setShouldLogExceptionStackTrace(shouldLogExceptionStackTrace);
        }
    
public voidreplaceDefaultTables(boolean keepSequenceTables)
Drop and recreate the default table schema for the TopLink project this session associated with.

        boolean shouldLogExceptionStackTrace = getSession().getSessionLog().shouldLogExceptionStackTrace();
        getSession().getSessionLog().setShouldLogExceptionStackTrace(false);

        try {
            TableCreator tableCreator = getDefaultTableCreator();
            tableCreator.replaceTables(session, this, keepSequenceTables);
        } catch (DatabaseException exception) {
            // Ignore error
        } finally {
            getSession().getSessionLog().setShouldLogExceptionStackTrace(shouldLogExceptionStackTrace);
        }
    
public voidreplaceObject(oracle.toplink.essentials.tools.schemaframework.DatabaseObjectDefinition databaseDefinition)
Use the definition object to drop and recreate the schema entity on the database. This is used for dropping tables, views, procedures ... etc ... This handles and ignore any database error while droping incase the object did not previously exist.

        // CR 3870467, do not log stack
        boolean shouldLogExceptionStackTrace = getSession().getSessionLog().shouldLogExceptionStackTrace();

        if (shouldLogExceptionStackTrace) {
            getSession().getSessionLog().setShouldLogExceptionStackTrace(false);
        }

        try {
            dropObject(databaseDefinition);
        } catch (DatabaseException exception) {
            // Ignore error
        } finally {
            if (shouldLogExceptionStackTrace) {
                getSession().getSessionLog().setShouldLogExceptionStackTrace(true);
            }
        }

        createObject(databaseDefinition);
    
public voidreplaceSequences()
Drop and recreate all the receiver's sequences on the database for all of the loaded descriptors.

        createOrReplaceSequences(false);
    
public voidsetCreateSQLFiles(boolean genFlag)

        this.createSQLFiles = genFlag;
    
public voidsetSession(oracle.toplink.essentials.internal.sessions.DatabaseSessionImpl session)

        this.session = session;
    
public booleanshouldWriteToDatabase()
PUBLIC: Return true if this SchemaManager should write to the database directly

        return ((this.createSchemaWriter == null) && (this.dropSchemaWriter == null));