Methods Summary |
---|
public void | alterSequence(oracle.toplink.essentials.tools.schemaframework.SequenceDefinition sequenceDefinition)Use the definition to alter sequence.
if (!sequenceDefinition.isAlterSupported()) {
return;
}
if (shouldWriteToDatabase()) {
sequenceDefinition.alterOnDatabase(getSession());
} else {
sequenceDefinition.alter(getSession(), createSchemaWriter);
}
|
public void | appendToDDLWriter(java.lang.String stringToWrite)PUBLIC: If the schema manager is writing to a writer, append this string
to that writer.
// If this method is called, we know that it is the old case and
// it would not matter which schemaWriter we use as both the
// create and drop schemaWriters are essentially the same.
// So just pick one.
appendToDDLWriter(createSchemaWriter, stringToWrite);
|
public void | appendToDDLWriter(java.io.Writer schemaWriter, java.lang.String stringToWrite)
if (schemaWriter == null) {
return;//do nothing. Ignore append request
}
try {
schemaWriter.write(stringToWrite);
schemaWriter.flush();
} catch (java.io.IOException ioException) {
throw ValidationException.fileError(ioException);
}
|
public void | buildFieldTypes(oracle.toplink.essentials.tools.schemaframework.TableDefinition tableDef)INTERNAL:
builds the field names based on the type read in from the builder
tableDef.buildFieldTypes(getSession());
|
protected oracle.toplink.essentials.tools.schemaframework.SequenceDefinition | buildSequenceDefinition(oracle.toplink.essentials.sequencing.Sequence sequence)
if (sequence instanceof DefaultSequence) {
String name = sequence.getName();
int size = sequence.getPreallocationSize();
int initialValue = sequence.getInitialValue();
sequence = getSession().getDatasourcePlatform().getDefaultSequence();
if (sequence instanceof TableSequence) {
TableSequence tableSequence = (TableSequence)sequence;
return new TableSequenceDefinition(name, tableSequence);
} else if (sequence instanceof NativeSequence) {
if (getSession().getDatasourcePlatform().isOracle()) {
return new OracleSequenceDefinition(name, size, initialValue);
} else if (getSession().getDatasourcePlatform().isTimesTen()) {
return new TimesTenSequenceDefinition(name, size, initialValue);
} else {
return null;
}
} else {
return null;
}
} else if (sequence instanceof TableSequence) {
TableSequence tableSequence = (TableSequence)sequence;
return new TableSequenceDefinition(tableSequence);
} else if (sequence instanceof NativeSequence) {
if (getSession().getDatasourcePlatform().isOracle()) {
NativeSequence nativeSequence = (NativeSequence)sequence;
return new OracleSequenceDefinition(nativeSequence);
} else if (getSession().getDatasourcePlatform().isTimesTen()) {
NativeSequence nativeSequence = (NativeSequence)sequence;
return new TimesTenSequenceDefinition(nativeSequence);
} else {
return null;
}
} else {
return null;
}
|
private void | buildTableAndSequenceDefinitions(java.util.HashSet sequenceDefinitions, java.util.HashSet processedSequenceNames, java.util.HashMap tableDefinitions)
Iterator descriptors = getSession().getDescriptors().values().iterator();
while (descriptors.hasNext()) {
ClassDescriptor descriptor = (ClassDescriptor)descriptors.next();
if (descriptor.usesSequenceNumbers()) {
String seqName = descriptor.getSequenceNumberName();
if (seqName == null) {
seqName = getSession().getDatasourcePlatform().getDefaultSequence().getName();
}
if (processedSequenceNames.contains(seqName)) {
continue;
}
processedSequenceNames.add(seqName);
Sequence sequence = getSession().getDatasourcePlatform().getSequence(seqName);
if (sequence.shouldAcquireValueAfterInsert()) {
continue;
}
SequenceDefinition sequenceDefinition = buildSequenceDefinition(sequence);
if (sequenceDefinition == null) {
continue;
}
sequenceDefinitions.add(sequenceDefinition);
TableDefinition tableDefinition = sequenceDefinition.buildTableDefinition();
if (tableDefinition != null) {
String tableName = tableDefinition.getName();
TableDefinition otherTableDefinition = (TableDefinition)tableDefinitions.get(tableName);
if (otherTableDefinition != null) {
// check for a conflict; if there is one - throw a ValidationException
} else {
tableDefinitions.put(tableName, tableDefinition);
}
}
}
}
|
public void | closeDDLWriter()PUBLIC:
Close the schema writer.
closeDDLWriter(createSchemaWriter);
closeDDLWriter(dropSchemaWriter);
createSchemaWriter = null;
dropSchemaWriter = null;
|
public void | closeDDLWriter(java.io.Writer schemaWriter)
if (schemaWriter == null) {
return;
}
try {
schemaWriter.flush();
schemaWriter.close();
} catch (java.io.IOException ioException) {
throw ValidationException.fileError(ioException);
}
|
public void | createConstraints(oracle.toplink.essentials.tools.schemaframework.TableDefinition tableDefinition)Use the table definition to add the constraints to the database, this is normally done
in two steps to avoid dependencies.
if (shouldWriteToDatabase()) {
tableDefinition.createConstraintsOnDatabase(getSession());
} else {
tableDefinition.setCreateSQLFiles(createSQLFiles);
tableDefinition.createConstraints(getSession(), createSchemaWriter);
}
|
public void | createDefaultTables()Create the default table schema for the TopLink project this session associated with.
//Create each table w/o throwing exception and/or exit if some of them are already existed in the db.
//If a table is already existed, skip the creation.
boolean shouldLogExceptionStackTrace = getSession().getSessionLog().shouldLogExceptionStackTrace();
getSession().getSessionLog().setShouldLogExceptionStackTrace(false);
try {
TableCreator tableCreator = getDefaultTableCreator();
tableCreator.createTables(session, this);
} catch (DatabaseException ex) {
// Ignore error
} finally {
getSession().getSessionLog().setShouldLogExceptionStackTrace(shouldLogExceptionStackTrace);
}
|
void | createForeignConstraints(oracle.toplink.essentials.tools.schemaframework.TableDefinition tableDefinition)
if (shouldWriteToDatabase()) {
tableDefinition.createForeignConstraintsOnDatabase(getSession());
} else {
tableDefinition.setCreateSQLFiles(createSQLFiles);
tableDefinition.createForeignConstraints(getSession(), createSchemaWriter);
}
|
public void | createObject(oracle.toplink.essentials.tools.schemaframework.DatabaseObjectDefinition databaseObjectDefinition)Use the definition object to create the schema entity on the database.
This is used for creating tables, views, procedures ... etc ...
if (shouldWriteToDatabase()) {
databaseObjectDefinition.createOnDatabase(getSession());
} else {
databaseObjectDefinition.createObject(getSession(), createSchemaWriter);
if (createSQLFiles){
this.appendToDDLWriter(createSchemaWriter, getSession().getPlatform().getStoredProcedureTerminationToken());
}
this.appendToDDLWriter(createSchemaWriter, "\n");
}
|
protected void | createOrReplaceSequences(boolean create)Common implementor for createSequence and replaceSequence
Sequencing sequencing = getSession().getSequencing();
if ((sequencing == null) || (sequencing.whenShouldAcquireValueForAll() == Sequencing.AFTER_INSERT)) {
// Not required on Sybase native etc.
return;
}
// Prepare table and sequence definitions
// table name mapped to TableDefinition
HashMap tableDefinitions = new HashMap();
// sequence name to SequenceDefinition
HashSet sequenceDefinitions = new HashSet();
// remember the processed - to handle each sequence just once.
HashSet processedSequenceNames = new HashSet();
buildTableAndSequenceDefinitions(sequenceDefinitions, processedSequenceNames, tableDefinitions);
processTableDefinitions(tableDefinitions, create);
processSequenceDefinitions(sequenceDefinitions, create);
|
protected void | createOrReplaceSequences(boolean create, boolean drop)Common implementor for createSequence and replaceSequence
Sequencing sequencing = getSession().getSequencing();
if ((sequencing == null) || (sequencing.whenShouldAcquireValueForAll() == Sequencing.AFTER_INSERT)) {
// Not required on Sybase native etc.
return;
}
// Prepare table and sequence definitions
// table name mapped to TableDefinition
HashMap tableDefinitions = new HashMap();
// sequence name to SequenceDefinition
HashSet sequenceDefinitions = new HashSet();
// remember the processed - to handle each sequence just once.
HashSet processedSequenceNames = new HashSet();
buildTableAndSequenceDefinitions(sequenceDefinitions, processedSequenceNames, tableDefinitions);
processTableDefinitions(tableDefinitions, create);
processSequenceDefinitions(sequenceDefinitions, drop);
|
public void | createSequences()Create all the receiver's sequences on the database for all of the loaded descriptors.
createOrReplaceSequences(true);
|
void | createUniqueConstraints(oracle.toplink.essentials.tools.schemaframework.TableDefinition tableDefinition)
if (shouldWriteToDatabase()) {
tableDefinition.createUniqueConstraintsOnDatabase(getSession());
} else {
tableDefinition.setCreateSQLFiles(createSQLFiles);
tableDefinition.createUniqueConstraints(getSession(), createSchemaWriter);
}
|
public void | dropConstraints(oracle.toplink.essentials.tools.schemaframework.TableDefinition tableDefinition)Use the table definition to drop the constraints from the table, this is normally done
in two steps to avoid dependencies.
if (shouldWriteToDatabase()) {
tableDefinition.dropConstraintsOnDatabase(getSession());
} else {
tableDefinition.setCreateSQLFiles(createSQLFiles);
tableDefinition.dropConstraints(getSession(), getDropSchemaWriter());
}
|
public void | dropObject(oracle.toplink.essentials.tools.schemaframework.DatabaseObjectDefinition databaseObjectDefinition)Use the definition object to drop the schema entity from the database.
This is used for droping tables, views, procedures ... etc ...
if (shouldWriteToDatabase()) {
databaseObjectDefinition.dropFromDatabase(getSession());
} else {
Writer dropSchemaWriter = getDropSchemaWriter();
databaseObjectDefinition.dropObject(getSession(), dropSchemaWriter);
if (createSQLFiles){
this.appendToDDLWriter(dropSchemaWriter, getSession().getPlatform().getStoredProcedureTerminationToken());
}
this.appendToDDLWriter(dropSchemaWriter, "\n");
}
|
public void | dropTable(java.lang.String tableName)Drop (delete) the table named tableName from the database.
TableDefinition tableDefinition;
tableDefinition = new TableDefinition();
tableDefinition.setName(tableName);
dropObject(tableDefinition);
|
public void | finalize()INTERNAL:
Close the schema writer when the schema manger is garbage collected
try {
this.closeDDLWriter();
} catch (ValidationException exception) {
// do nothing
}
|
protected oracle.toplink.essentials.internal.databaseaccess.DatabaseAccessor | getAccessor()Return the appropriate accessor.
Assume we are dealing with a JDBC accessor.
return (DatabaseAccessor)getSession().getAccessor();
|
public java.util.Vector | getAllColumnNames(java.lang.String tableName)Get a description of table columns available in a catalog.
Each column description has the following columns:
- TABLE_CAT String => table catalog (may be null)
- TABLE_SCHEM String => table schema (may be null)
- TABLE_NAME String => table name
- COLUMN_NAME String => column name
- DATA_TYPE short => SQL type from java.sql.Types
- TYPE_NAME String => Data source dependent type name
- COLUMN_SIZE int => column size. For char or date
types this is the maximum number of characters, for numeric or
decimal types this is precision.
- BUFFER_LENGTH is not used.
- DECIMAL_DIGITS int => the number of fractional digits
- NUM_PREC_RADIX int => Radix (typically either 10 or 2)
- NULLABLE int => is NULL allowed?
- columnNoNulls - might not allow NULL values
- columnNullable - definitely allows NULL values
- columnNullableUnknown - nullability unknown
- REMARKS String => comment describing column (may be null)
- COLUMN_DEF String => default value (may be null)
- SQL_DATA_TYPE int => unused
- SQL_DATETIME_SUB int => unused
- CHAR_OCTET_LENGTH int => for char types the
maximum number of bytes in the column
- ORDINAL_POSITION int => index of column in table
(starting at 1)
- IS_NULLABLE String => "NO" means column definitely
does not allow NULL values; "YES" means the column might
allow NULL values. An empty string means nobody knows.
return getAccessor().getColumnInfo(null, null, tableName, null, getSession());
|
public java.util.Vector | getAllColumnNames(java.lang.String creatorName, java.lang.String tableName)Get a description of table columns available in a catalog.
Each column description has the following columns:
- TABLE_CAT String => table catalog (may be null)
- TABLE_SCHEM String => table schema (may be null)
- TABLE_NAME String => table name
- COLUMN_NAME String => column name
- DATA_TYPE short => SQL type from java.sql.Types
- TYPE_NAME String => Data source dependent type name
- COLUMN_SIZE int => column size. For char or date
types this is the maximum number of characters, for numeric or
decimal types this is precision.
- BUFFER_LENGTH is not used.
- DECIMAL_DIGITS int => the number of fractional digits
- NUM_PREC_RADIX int => Radix (typically either 10 or 2)
- NULLABLE int => is NULL allowed?
- columnNoNulls - might not allow NULL values
- columnNullable - definitely allows NULL values
- columnNullableUnknown - nullability unknown
- REMARKS String => comment describing column (may be null)
- COLUMN_DEF String => default value (may be null)
- SQL_DATA_TYPE int => unused
- SQL_DATETIME_SUB int => unused
- CHAR_OCTET_LENGTH int => for char types the
maximum number of bytes in the column
- ORDINAL_POSITION int => index of column in table
(starting at 1)
- IS_NULLABLE String => "NO" means column definitely
does not allow NULL values; "YES" means the column might
allow NULL values. An empty string means nobody knows.
return getAccessor().getColumnInfo(null, creatorName, tableName, null, getSession());
|
public java.util.Vector | getAllTableNames()Get a description of tables available in a catalog.
Each table description has the following columns:
- TABLE_CAT String => table catalog (may be null)
- TABLE_SCHEM String => table schema (may be null)
- TABLE_NAME String => table name
- TABLE_TYPE String => table type. Typical types are "TABLE",
"VIEW", "SYSTEM TABLE", "GLOBAL TEMPORARY",
"LOCAL TEMPORARY", "ALIAS", "SYNONYM".
- REMARKS String => explanatory comment on the table
Note: Some databases may not return information for
all tables.
return getAccessor().getTableInfo(null, null, null, null, getSession());
|
public java.util.Vector | getAllTableNames(java.lang.String creatorName)Get a description of table columns available in a catalog.
Each column description has the following columns:
- TABLE_CAT String => table catalog (may be null)
- TABLE_SCHEM String => table schema (may be null)
- TABLE_NAME String => table name
- COLUMN_NAME String => column name
- DATA_TYPE short => SQL type from java.sql.Types
- TYPE_NAME String => Data source dependent type name
- COLUMN_SIZE int => column size. For char or date
types this is the maximum number of characters, for numeric or
decimal types this is precision.
- BUFFER_LENGTH is not used.
- DECIMAL_DIGITS int => the number of fractional digits
- NUM_PREC_RADIX int => Radix (typically either 10 or 2)
- NULLABLE int => is NULL allowed?
- columnNoNulls - might not allow NULL values
- columnNullable - definitely allows NULL values
- columnNullableUnknown - nullability unknown
- REMARKS String => comment describing column (may be null)
- COLUMN_DEF String => default value (may be null)
- SQL_DATA_TYPE int => unused
- SQL_DATETIME_SUB int => unused
- CHAR_OCTET_LENGTH int => for char types the
maximum number of bytes in the column
- ORDINAL_POSITION int => index of column in table
(starting at 1)
- IS_NULLABLE String => "NO" means column definitely
does not allow NULL values; "YES" means the column might
allow NULL values. An empty string means nobody knows.
return getAccessor().getTableInfo(null, creatorName, null, null, getSession());
|
public java.util.Vector | getColumnInfo(java.lang.String catalog, java.lang.String schema, java.lang.String tableName, java.lang.String columnName)Get a description of table columns available in a catalog.
Only column descriptions matching the catalog, schema, table
and column name criteria are returned. They are ordered by
TABLE_SCHEM, TABLE_NAME and ORDINAL_POSITION.
Each column description has the following columns:
- TABLE_CAT String => table catalog (may be null)
- TABLE_SCHEM String => table schema (may be null)
- TABLE_NAME String => table name
- COLUMN_NAME String => column name
- DATA_TYPE short => SQL type from java.sql.Types
- TYPE_NAME String => Data source dependent type name
- COLUMN_SIZE int => column size. For char or date
types this is the maximum number of characters, for numeric or
decimal types this is precision.
- BUFFER_LENGTH is not used.
- DECIMAL_DIGITS int => the number of fractional digits
- NUM_PREC_RADIX int => Radix (typically either 10 or 2)
- NULLABLE int => is NULL allowed?
- columnNoNulls - might not allow NULL values
- columnNullable - definitely allows NULL values
- columnNullableUnknown - nullability unknown
- REMARKS String => comment describing column (may be null)
- COLUMN_DEF String => default value (may be null)
- SQL_DATA_TYPE int => unused
- SQL_DATETIME_SUB int => unused
- CHAR_OCTET_LENGTH int => for char types the
maximum number of bytes in the column
- ORDINAL_POSITION int => index of column in table
(starting at 1)
- IS_NULLABLE String => "NO" means column definitely
does not allow NULL values; "YES" means the column might
allow NULL values. An empty string means nobody knows.
return getAccessor().getColumnInfo(catalog, schema, tableName, columnName, getSession());
|
protected oracle.toplink.essentials.tools.schemaframework.TableCreator | getDefaultTableCreator()Construct the default TableCreator.
If the default TableCreator is already created, just returns it.
if(defaultTableCreator == null) {
defaultTableCreator = new DefaultTableGenerator(session.getProject()).generateDefaultTableCreator();
defaultTableCreator.setIgnoreDatabaseException(true);
}
return defaultTableCreator;
|
protected java.io.Writer | getDropSchemaWriter()
if (null == dropSchemaWriter) {
return createSchemaWriter;
} else {
return dropSchemaWriter;
}
|
public oracle.toplink.essentials.internal.sessions.AbstractSession | getSession()
return session;
|
public java.util.Vector | getTableInfo(java.lang.String catalog, java.lang.String schema, java.lang.String tableName, java.lang.String[] types)Get a description of tables available in a catalog.
Only table descriptions matching the catalog, schema, table
name and type criteria are returned. They are ordered by
TABLE_TYPE, TABLE_SCHEM and TABLE_NAME.
Each table description has the following columns:
- TABLE_CAT String => table catalog (may be null)
- TABLE_SCHEM String => table schema (may be null)
- TABLE_NAME String => table name
- TABLE_TYPE String => table type. Typical types are "TABLE",
"VIEW", "SYSTEM TABLE", "GLOBAL TEMPORARY",
"LOCAL TEMPORARY", "ALIAS", "SYNONYM".
- REMARKS String => explanatory comment on the table
Note: Some databases may not return information for
all tables.
return getAccessor().getTableInfo(catalog, schema, tableName, types, getSession());
|
public void | outputCreateDDLToFile(java.lang.String fileName)
try {
this.createSchemaWriter = new java.io.FileWriter(fileName);
} catch (java.io.IOException ioException) {
throw ValidationException.fileError(ioException);
}
|
public void | outputCreateDDLToWriter(java.io.Writer createWriter)
this.createSchemaWriter = createWriter;
|
public void | outputDDLToDatabase()PUBLIC:
Output all DDL statements directly to the database.
this.createSchemaWriter = null;
this.dropSchemaWriter = null;
|
public void | outputDDLToFile(java.lang.String fileName)PUBLIC:
Output all DDL statements to a file writer specified by the name in the parameter.
try {
this.createSchemaWriter = new java.io.FileWriter(fileName);
} catch (java.io.IOException ioException) {
throw ValidationException.fileError(ioException);
}
|
public void | outputDDLToWriter(java.io.Writer schemaWriter)PUBLIC:
Output all DDL statements to a writer specified in the parameter.
this.createSchemaWriter = schemaWriter;
|
public void | outputDropDDLToFile(java.lang.String fileName)
try {
this.dropSchemaWriter = new java.io.FileWriter(fileName);
} catch (java.io.IOException ioException) {
throw ValidationException.fileError(ioException);
}
|
public void | outputDropDDLToWriter(java.io.Writer dropWriter)
this.dropSchemaWriter = dropWriter;
|
private void | processSequenceDefinitions(java.util.HashSet sequenceDefinitions, boolean create)
// create sequence objects
Iterator itSequenceDefinitions = sequenceDefinitions.iterator();
while (itSequenceDefinitions.hasNext()) {
SequenceDefinition sequenceDefinition = (SequenceDefinition)itSequenceDefinitions.next();
if (!create) {
try {
dropObject(sequenceDefinition);
} catch (DatabaseException exception) {
// Ignore sequence not found for first creation
}
}
createObject(sequenceDefinition);
}
|
private void | processTableDefinitions(java.util.HashMap tableDefinitions, boolean create)
// create tables
Iterator itTableDefinitions = tableDefinitions.values().iterator();
while (itTableDefinitions.hasNext()) {
TableDefinition tableDefinition = (TableDefinition)itTableDefinitions.next();
// CR 3870467, do not log stack
boolean shouldLogExceptionStackTrace = session.getSessionLog().shouldLogExceptionStackTrace();
if (shouldLogExceptionStackTrace) {
session.getSessionLog().setShouldLogExceptionStackTrace(false);
}
if (create) {
try {
createObject(tableDefinition);
} catch (DatabaseException exception) {
// Ignore already created
} finally {
if (shouldLogExceptionStackTrace) {
session.getSessionLog().setShouldLogExceptionStackTrace(true);
}
}
} else {
try {
dropObject(tableDefinition);
} catch (DatabaseException exception) {
// Ignore table not found for first creation
} finally {
if (shouldLogExceptionStackTrace) {
session.getSessionLog().setShouldLogExceptionStackTrace(true);
}
}
createObject(tableDefinition);
}
}
|
public void | replaceDefaultTables()Drop and recreate the default table schema for the TopLink project this session associated with.
boolean shouldLogExceptionStackTrace = getSession().getSessionLog().shouldLogExceptionStackTrace();
getSession().getSessionLog().setShouldLogExceptionStackTrace(false);
try {
TableCreator tableCreator = getDefaultTableCreator();
tableCreator.replaceTables(session, this);
} catch (DatabaseException exception) {
// Ignore error
} finally {
getSession().getSessionLog().setShouldLogExceptionStackTrace(shouldLogExceptionStackTrace);
}
|
public void | replaceDefaultTables(boolean keepSequenceTables)Drop and recreate the default table schema for the TopLink project this session associated with.
boolean shouldLogExceptionStackTrace = getSession().getSessionLog().shouldLogExceptionStackTrace();
getSession().getSessionLog().setShouldLogExceptionStackTrace(false);
try {
TableCreator tableCreator = getDefaultTableCreator();
tableCreator.replaceTables(session, this, keepSequenceTables);
} catch (DatabaseException exception) {
// Ignore error
} finally {
getSession().getSessionLog().setShouldLogExceptionStackTrace(shouldLogExceptionStackTrace);
}
|
public void | replaceObject(oracle.toplink.essentials.tools.schemaframework.DatabaseObjectDefinition databaseDefinition)Use the definition object to drop and recreate the schema entity on the database.
This is used for dropping tables, views, procedures ... etc ...
This handles and ignore any database error while droping incase the object did not previously exist.
// CR 3870467, do not log stack
boolean shouldLogExceptionStackTrace = getSession().getSessionLog().shouldLogExceptionStackTrace();
if (shouldLogExceptionStackTrace) {
getSession().getSessionLog().setShouldLogExceptionStackTrace(false);
}
try {
dropObject(databaseDefinition);
} catch (DatabaseException exception) {
// Ignore error
} finally {
if (shouldLogExceptionStackTrace) {
getSession().getSessionLog().setShouldLogExceptionStackTrace(true);
}
}
createObject(databaseDefinition);
|
public void | replaceSequences()Drop and recreate all the receiver's sequences on the database for all of the loaded descriptors.
createOrReplaceSequences(false);
|
public void | setCreateSQLFiles(boolean genFlag)
this.createSQLFiles = genFlag;
|
public void | setSession(oracle.toplink.essentials.internal.sessions.DatabaseSessionImpl session)
this.session = session;
|
public boolean | shouldWriteToDatabase()PUBLIC:
Return true if this SchemaManager should write to the database directly
return ((this.createSchemaWriter == null) && (this.dropSchemaWriter == null));
|