Here primary use
case is, in most of the large IT organizations, lot’s of internal IT applications
uses some kind of RDBMS as back-end and over the years, one will see hundred of
databases. Again typical SOLO style operations
i.e. one department focus on their needs only. In general one will end up
seeing lot’s of duplication of data. However In my case, I end up analyzing
very large database (hundred of schema or tables spaces and thousands of tables
and 5 digit number of columns.) views, materialized views, stored procedures
and many more. I noticed lot’s of people
are using Oracle SQL developer for analysis and keep jumping from one table or
view to other tables in other schemas. After seeing this, I wrote a small database
walker. Primary purpose is to crawls entire
Oracle data dictionary and produces some xml. I am feeding this to SOLR so that
I can build simple Google kind of interface with using SOLRs default velocity templates
based web UI to search for tables or columns or schemas or primary key and many
more. I will host entire project in the GIT hub. In this post, I am including
Oracle Table metadata only. (i.e. table columns, primary key, import & exported
keys and column meta data etc. I wrote more code to pull stored procedures code
etc.)
public static void tableInfo(DatabaseMetaData meta,String tableName,String tableType, String schemaName) throws Exception {
String catalog = null;
String schemaPattern = schemaName;
String tableNamePattern = tableName;
String columnNamePattern = null;
String outputFile = stageDir+schemaName+"_"+tableName+".xml";
File f = new File(outputFile);
if (f.exists()){
System.out.print("Skiping->"+outputFile);
return;
}
FileWriter fw = null;
try{
fw = new FileWriter(outputFile);
}catch(Exception e){
System.out.print("Error ... Skiping->"+outputFile);
return;
}
if (fw == null){
System.out.print("Unable to open file. Skiping->"+outputFile);
return;
}
fw.write("<add>\n");
fw.write("<doc>\n");
ResultSet result = meta.getColumns(
catalog, schemaPattern, tableNamePattern, columnNamePattern);
String colName = "field name=\""+"id"+"\"";
fw.write("<" + colName + ">");
fw.write(tableName);
fw.write("</field>");
fw.write( "\n");
colName = "field name=\""+"tableName"+"\"";
fw.write("<" + colName + ">");
fw.write(tableName);
fw.write("</field>");
fw.write( "\n");
colName = "field name=\""+"tableType"+"\"";
fw.write("<" + colName + ">");
fw.write(tableType);
fw.write("</field>");
fw.write( "\n");
colName = "field name=\""+"schemaName"+"\"";
fw.write("<" + colName + ">");
fw.write(schemaName);
fw.write("</field>");
fw.write( "\n");
while(result.next()){///TODO remove 2,3,3 junk
String columnName = result.getString(4);
int columnType = result.getInt(5);
String columnTypeStr = result.getString(6);
String catName = result.getString(1);
colName = "field name=\""+"colName"+"\"";
fw.write("<" + colName + ">");
fw.write(columnName);
fw.write("</field>");
fw.write( "\n");
//colName = "field name=\""+columnName+"_dtype"+"\"";
// fw.write("<" + colName + ">");
//fw.write(columnTypeStr);
// fw.write("</field>");
//fw.write( "\n");
colName = "field name=\""+"colMeta"+"\"";
fw.write("<" + colName + ">");
fw.write(columnName+","+columnTypeStr);
fw.write("</field>");
fw.write( "\n");
////pull logical data.
String[] logicalData = LogicalMetadata.getLogicalData(schemaName, tableName,columnName);
if ( logicalData != null && logicalData.length <=7){
String entityName = logicalData[3];
String entityAttrName = logicalData[4];
String entityAttrDesc = logicalData[6];
/*
colName = "field name=\""+"logicalEntityName"+"\"";
fw.write("<" + colName + ">");
fw.write(entityName);
fw.write("</field>");
fw.write( "\n");*/
String logicalName = columnName+ "_lan";
colName = "field name=\""+logicalName+"\"";
fw.write("<" + colName + ">");
fw.write(entityAttrName);
fw.write("</field>");
fw.write( "\n");
String logicalDesc = columnName+ "_lad";
colName = "field name=\""+logicalDesc+"\"";
fw.write("<" + colName + ">");
fw.write(entityAttrDesc);
fw.write("</field>");
fw.write( "\n");
}
}
result.close();
ResultSet result1 = meta.getPrimaryKeys(
catalog, schemaName, tableNamePattern);
String columnName = null;
HashSet set = new HashSet();
while(result1.next()){
columnName = result1.getString(4);
if (set.contains(columnName)){
//do nothing
}else{
colName = "field name=\""+"primaryKey"+"\"";
fw.write("<" + colName + ">");
fw.write(columnName);
fw.write("</field>");
fw.write( "\n");
set.add(columnName);
//System.out.println(" primary key" + columnName);
}
}
result1.close();
/////
set.clear();
ResultSet rs = meta.getExportedKeys(
catalog, schemaPattern, tableNamePattern );
while (rs.next()) {
String fkTableName = rs.getString("FKTABLE_NAME");
String fkColumnName = rs.getString("FKCOLUMN_NAME");
int fkSequence = rs.getInt("KEY_SEQ");
colName = "field name=\""+"ExportedKeys_Table_Colum_Seq"+"\"";
fw.write("<" + colName + ">");
fw.write(fkTableName+"."+fkColumnName+"."+fkSequence);
fw.write("</field>");
fw.write( "\n");
}
rs.close();
ResultSet foreignKeys = meta.getImportedKeys( catalog, schemaName, tableNamePattern);
while (foreignKeys.next()) {
String fkTableName = foreignKeys.getString("FKTABLE_NAME");
String fkColumnName = foreignKeys.getString("FKCOLUMN_NAME");
String pkTableName = foreignKeys.getString("PKTABLE_NAME");
String pkColumnName = foreignKeys.getString("PKCOLUMN_NAME");
colName = "field name=\""+"ImportedKeys_Table_Colum_Seq"+"\"";
fw.write("<" + colName + ">");
fw.write(fkTableName+"."+fkColumnName+"."+pkTableName+"."+pkColumnName);
fw.write("</field>");
fw.write( "\n");
}
foreignKeys.close();
fw.write("</doc>\n");
fw.write("</add>\n");
fw.flush();
fw.close();
}
String catalog = null;
String schemaPattern = schemaName;
String tableNamePattern = tableName;
String columnNamePattern = null;
String outputFile = stageDir+schemaName+"_"+tableName+".xml";
File f = new File(outputFile);
if (f.exists()){
System.out.print("Skiping->"+outputFile);
return;
}
FileWriter fw = null;
try{
fw = new FileWriter(outputFile);
}catch(Exception e){
System.out.print("Error ... Skiping->"+outputFile);
return;
}
if (fw == null){
System.out.print("Unable to open file. Skiping->"+outputFile);
return;
}
fw.write("<add>\n");
fw.write("<doc>\n");
ResultSet result = meta.getColumns(
catalog, schemaPattern, tableNamePattern, columnNamePattern);
String colName = "field name=\""+"id"+"\"";
fw.write("<" + colName + ">");
fw.write(tableName);
fw.write("</field>");
fw.write( "\n");
colName = "field name=\""+"tableName"+"\"";
fw.write("<" + colName + ">");
fw.write(tableName);
fw.write("</field>");
fw.write( "\n");
colName = "field name=\""+"tableType"+"\"";
fw.write("<" + colName + ">");
fw.write(tableType);
fw.write("</field>");
fw.write( "\n");
colName = "field name=\""+"schemaName"+"\"";
fw.write("<" + colName + ">");
fw.write(schemaName);
fw.write("</field>");
fw.write( "\n");
while(result.next()){///TODO remove 2,3,3 junk
String columnName = result.getString(4);
int columnType = result.getInt(5);
String columnTypeStr = result.getString(6);
String catName = result.getString(1);
colName = "field name=\""+"colName"+"\"";
fw.write("<" + colName + ">");
fw.write(columnName);
fw.write("</field>");
fw.write( "\n");
//colName = "field name=\""+columnName+"_dtype"+"\"";
// fw.write("<" + colName + ">");
//fw.write(columnTypeStr);
// fw.write("</field>");
//fw.write( "\n");
colName = "field name=\""+"colMeta"+"\"";
fw.write("<" + colName + ">");
fw.write(columnName+","+columnTypeStr);
fw.write("</field>");
fw.write( "\n");
////pull logical data.
String[] logicalData = LogicalMetadata.getLogicalData(schemaName, tableName,columnName);
if ( logicalData != null && logicalData.length <=7){
String entityName = logicalData[3];
String entityAttrName = logicalData[4];
String entityAttrDesc = logicalData[6];
/*
colName = "field name=\""+"logicalEntityName"+"\"";
fw.write("<" + colName + ">");
fw.write(entityName);
fw.write("</field>");
fw.write( "\n");*/
String logicalName = columnName+ "_lan";
colName = "field name=\""+logicalName+"\"";
fw.write("<" + colName + ">");
fw.write(entityAttrName);
fw.write("</field>");
fw.write( "\n");
String logicalDesc = columnName+ "_lad";
colName = "field name=\""+logicalDesc+"\"";
fw.write("<" + colName + ">");
fw.write(entityAttrDesc);
fw.write("</field>");
fw.write( "\n");
}
}
result.close();
ResultSet result1 = meta.getPrimaryKeys(
catalog, schemaName, tableNamePattern);
String columnName = null;
HashSet set = new HashSet();
while(result1.next()){
columnName = result1.getString(4);
if (set.contains(columnName)){
//do nothing
}else{
colName = "field name=\""+"primaryKey"+"\"";
fw.write("<" + colName + ">");
fw.write(columnName);
fw.write("</field>");
fw.write( "\n");
set.add(columnName);
//System.out.println(" primary key" + columnName);
}
}
result1.close();
/////
set.clear();
ResultSet rs = meta.getExportedKeys(
catalog, schemaPattern, tableNamePattern );
while (rs.next()) {
String fkTableName = rs.getString("FKTABLE_NAME");
String fkColumnName = rs.getString("FKCOLUMN_NAME");
int fkSequence = rs.getInt("KEY_SEQ");
colName = "field name=\""+"ExportedKeys_Table_Colum_Seq"+"\"";
fw.write("<" + colName + ">");
fw.write(fkTableName+"."+fkColumnName+"."+fkSequence);
fw.write("</field>");
fw.write( "\n");
}
rs.close();
ResultSet foreignKeys = meta.getImportedKeys( catalog, schemaName, tableNamePattern);
while (foreignKeys.next()) {
String fkTableName = foreignKeys.getString("FKTABLE_NAME");
String fkColumnName = foreignKeys.getString("FKCOLUMN_NAME");
String pkTableName = foreignKeys.getString("PKTABLE_NAME");
String pkColumnName = foreignKeys.getString("PKCOLUMN_NAME");
colName = "field name=\""+"ImportedKeys_Table_Colum_Seq"+"\"";
fw.write("<" + colName + ">");
fw.write(fkTableName+"."+fkColumnName+"."+pkTableName+"."+pkColumnName);
fw.write("</field>");
fw.write( "\n");
}
foreignKeys.close();
fw.write("</doc>\n");
fw.write("</add>\n");
fw.flush();
fw.close();
}
No comments:
Post a Comment