/** Load the Database Info */ public DatabaseMeta loadDatabaseMeta(ObjectId id_database) throws KettleException { DatabaseMeta databaseMeta = new DatabaseMeta(); try { RowMetaAndData r = getDatabase(id_database); if (r != null) { ObjectId id_database_type = new LongObjectId( r.getInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_TYPE, 0)); // con_type String dbTypeDesc = getDatabaseTypeCode(id_database_type); if (dbTypeDesc != null) { databaseMeta.setDatabaseInterface(DatabaseMeta.getDatabaseInterface(dbTypeDesc)); databaseMeta.setAttributes(new Properties()); // new attributes } databaseMeta.setObjectId(id_database); databaseMeta.setName(r.getString(KettleDatabaseRepository.FIELD_DATABASE_NAME, "")); ObjectId id_database_contype = new LongObjectId( r.getInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_CONTYPE, 0)); // con_access databaseMeta.setAccessType( DatabaseMeta.getAccessType(getDatabaseConTypeCode(id_database_contype))); databaseMeta.setHostname( r.getString(KettleDatabaseRepository.FIELD_DATABASE_HOST_NAME, "")); databaseMeta.setDBName( r.getString(KettleDatabaseRepository.FIELD_DATABASE_DATABASE_NAME, "")); databaseMeta.setDBPort(r.getString(KettleDatabaseRepository.FIELD_DATABASE_PORT, "")); databaseMeta.setUsername(r.getString(KettleDatabaseRepository.FIELD_DATABASE_USERNAME, "")); databaseMeta.setPassword( Encr.decryptPasswordOptionallyEncrypted( r.getString(KettleDatabaseRepository.FIELD_DATABASE_PASSWORD, ""))); databaseMeta.setServername( r.getString(KettleDatabaseRepository.FIELD_DATABASE_SERVERNAME, "")); databaseMeta.setDataTablespace( r.getString(KettleDatabaseRepository.FIELD_DATABASE_DATA_TBS, "")); databaseMeta.setIndexTablespace( r.getString(KettleDatabaseRepository.FIELD_DATABASE_INDEX_TBS, "")); // Also, load all the properties we can find... final Collection<RowMetaAndData> attrs = repository.connectionDelegate.getDatabaseAttributes(id_database); for (RowMetaAndData row : attrs) { String code = row.getString(KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_CODE, ""); String attribute = row.getString(KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_VALUE_STR, ""); databaseMeta.getAttributes().put(code, Const.NVL(attribute, "")); } } return databaseMeta; } catch (KettleDatabaseException dbe) { throw new KettleException( "Error loading database connection from repository (id_database=" + id_database + ")", dbe); } }
// Load user with login from repository, don't verify password... public UserInfo(Repository rep, String login) throws KettleException { try { long id_profile; setID(rep.getUserID(login)); if (getID() > 0) { RowMetaAndData r = rep.getUser(getID()); if (r != null) { this.login = r.getString("LOGIN", null); password = Encr.decryptPassword(r.getString("PASSWORD", null)); name = r.getString("NAME", null); description = r.getString("DESCRIPTION", null); enabled = r.getBoolean("ENABLED", false); id_profile = r.getInteger("ID_PROFILE", 0); profile = new ProfileMeta(rep, id_profile); } else { setID(-1L); throw new KettleDatabaseException( Messages.getString("UserInfo.Error.UserNotFound", login)); } } else { setID(-1L); throw new KettleDatabaseException(Messages.getString("UserInfo.Error.UserNotFound", login)); } } catch (KettleDatabaseException dbe) { rep.log.logError( toString(), Messages.getString("UserInfo.Error.UserNotLoaded", login, dbe.getMessage())); throw new KettleException(Messages.getString("UserInfo.Error.UserNotLoaded", login, ""), dbe); } }
public synchronized int getNrDatabases() throws KettleException { int retval = 0; String sql = "SELECT COUNT(*) FROM " + quoteTable(KettleDatabaseRepository.TABLE_R_DATABASE); RowMetaAndData r = repository.connectionDelegate.getOneRow(sql); if (r != null) { retval = (int) r.getInteger(0, 0L); } return retval; }
public synchronized int getNrDatabaseAttributes(ObjectId id_database) throws KettleException { int retval = 0; String sql = "SELECT COUNT(*) FROM " + quoteTable(KettleDatabaseRepository.TABLE_R_DATABASE_ATTRIBUTE) + " WHERE " + quote(KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE) + " = " + id_database; RowMetaAndData r = repository.connectionDelegate.getOneRow(sql); if (r != null) { retval = (int) r.getInteger(0, 0L); } return retval; }
public synchronized int getNrStepAttributes(ObjectId id_step) throws KettleException { int retval = 0; RowMetaAndData par = repository.connectionDelegate.getParameterMetaData(id_step); String sql = "SELECT COUNT(*) FROM " + quoteTable(KettleDatabaseRepository.TABLE_R_STEP_ATTRIBUTE) + " WHERE " + quote(KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP) + " = ? "; RowMetaAndData r = repository.connectionDelegate.getOneRow(sql, par.getRowMeta(), par.getData()); if (r != null) { retval = (int) r.getInteger(0, 0L); } return retval; }
public Collection<RowMetaAndData> getDatabaseAttributes() throws KettleDatabaseException, KettleValueException { List<RowMetaAndData> attrs = new ArrayList<RowMetaAndData>(); List<Object[]> rows = repository.connectionDelegate.getRows( "SELECT * FROM " + quoteTable(KettleDatabaseRepository.TABLE_R_DATABASE_ATTRIBUTE), 0); for (Object[] row : rows) { RowMetaAndData rowWithMeta = new RowMetaAndData(repository.connectionDelegate.getReturnRowMeta(), row); long id = rowWithMeta.getInteger( quote(KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE), 0); if (id > 0) { attrs.add(rowWithMeta); } } return attrs; }
public synchronized int getNrDatabases(ObjectId id_transformation) throws KettleException { int retval = 0; RowMetaAndData transIdRow = repository.connectionDelegate.getParameterMetaData(id_transformation); String sql = "SELECT COUNT(*) FROM " + quoteTable(KettleDatabaseRepository.TABLE_R_STEP_DATABASE) + " WHERE " + quote(KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_TRANSFORMATION) + " = ? "; RowMetaAndData r = repository.connectionDelegate.getOneRow(sql, transIdRow.getRowMeta(), transIdRow.getData()); if (r != null) { retval = (int) r.getInteger(0, 0L); } return retval; }
/** * Create a new step by loading the metadata from the specified repository. * * @param rep * @param stepId * @param databases * @param counters * @param partitionSchemas * @throws KettleException */ public StepMeta loadStepMeta( ObjectId stepId, List<DatabaseMeta> databases, List<PartitionSchema> partitionSchemas) throws KettleException { StepMeta stepMeta = new StepMeta(); PluginRegistry registry = PluginRegistry.getInstance(); try { RowMetaAndData r = getStep(stepId); if (r != null) { stepMeta.setObjectId(stepId); stepMeta.setName(r.getString(KettleDatabaseRepository.FIELD_STEP_NAME, null)); stepMeta.setDescription(r.getString(KettleDatabaseRepository.FIELD_STEP_DESCRIPTION, null)); long id_step_type = r.getInteger(KettleDatabaseRepository.FIELD_STEP_ID_STEP_TYPE, -1L); RowMetaAndData steptyperow = getStepType(new LongObjectId(id_step_type)); stepMeta.setStepID( steptyperow.getString(KettleDatabaseRepository.FIELD_STEP_TYPE_CODE, null)); stepMeta.setDistributes(r.getBoolean(KettleDatabaseRepository.FIELD_STEP_DISTRIBUTE, true)); int copies = (int) r.getInteger(KettleDatabaseRepository.FIELD_STEP_COPIES, 1); String copiesString = r.getString(KettleDatabaseRepository.FIELD_STEP_COPIES_STRING, null); if (!Const.isEmpty(copiesString)) { stepMeta.setCopiesString(copiesString); } else { stepMeta.setCopies(copies); } int x = (int) r.getInteger(KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_X, 0); int y = (int) r.getInteger(KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_Y, 0); stepMeta.setLocation(new Point(x, y)); stepMeta.setDraw(r.getBoolean(KettleDatabaseRepository.FIELD_STEP_GUI_DRAW, false)); // Generate the appropriate class... PluginInterface sp = registry.findPluginWithId(StepPluginType.class, stepMeta.getStepID()); if (sp != null) { stepMeta.setStepMetaInterface((StepMetaInterface) registry.loadClass(sp)); } else { throw new KettlePluginLoaderException( stepMeta.getStepID(), BaseMessages.getString( PKG, "StepMeta.Exception.UnableToLoadClass", stepMeta.getStepID() + Const.CR)); } if (stepMeta.getStepMetaInterface() != null) { // Read the step info from the repository! readRepCompatibleStepMeta( stepMeta.getStepMetaInterface(), repository, stepMeta.getObjectId(), databases); stepMeta .getStepMetaInterface() .readRep(repository, repository.metaStore, stepMeta.getObjectId(), databases); } // Get the partitioning as well... // stepMeta.setStepPartitioningMeta(loadStepPartitioningMeta(stepMeta.getObjectId())); stepMeta.getStepPartitioningMeta().setPartitionSchemaAfterLoading(partitionSchemas); // Get the cluster schema name // stepMeta.setClusterSchemaName(repository.getStepAttributeString(stepId, "cluster_schema")); // Are we using a custom row distribution plugin? // String rowDistributionCode = repository.getStepAttributeString(stepId, 0, "row_distribution_code"); RowDistributionInterface rowDistribution = PluginRegistry.getInstance() .loadClass( RowDistributionPluginType.class, rowDistributionCode, RowDistributionInterface.class); stepMeta.setRowDistribution(rowDistribution); // Load the attribute groups map // stepMeta.setAttributesMap(loadStepAttributesMap(stepId)); // Done! // return stepMeta; } else { throw new KettleException( BaseMessages.getString( PKG, "StepMeta.Exception.StepInfoCouldNotBeFound", String.valueOf(stepId))); } } catch (KettleDatabaseException dbe) { throw new KettleException( BaseMessages.getString( PKG, "StepMeta.Exception.StepCouldNotBeLoaded", String.valueOf(stepMeta.getObjectId())), dbe); } }
public Result execute(Result previousResult, int nr) { Result result = previousResult; result.setResult(false); // see PDI-10270, PDI-10644 for details boolean oldBehavior = "Y" .equalsIgnoreCase( getVariable(Const.KETTLE_COMPATIBILITY_SET_ERROR_ON_SPECIFIC_JOB_ENTRIES, "N")); String countSQLStatement = null; long rowsCount = 0; long errCount = 0; boolean successOK = false; int nrRowsLimit = Const.toInt(environmentSubstitute(limit), 0); if (log.isDetailed()) { logDetailed( BaseMessages.getString( PKG, "JobEntryEvalTableContent.Log.nrRowsLimit", "" + nrRowsLimit)); } if (connection != null) { Database db = new Database(this, connection); db.shareVariablesWith(this); try { db.connect(parentJob.getTransactionId(), null); if (iscustomSQL) { String realCustomSQL = customSQL; if (isUseVars) { realCustomSQL = environmentSubstitute(realCustomSQL); } if (log.isDebug()) { logDebug( BaseMessages.getString( PKG, "JobEntryEvalTableContent.Log.EnteredCustomSQL", realCustomSQL)); } if (!Const.isEmpty(realCustomSQL)) { countSQLStatement = realCustomSQL; } else { errCount++; logError(BaseMessages.getString(PKG, "JobEntryEvalTableContent.Error.NoCustomSQL")); } } else { String realTablename = environmentSubstitute(tablename); String realSchemaname = environmentSubstitute(schemaname); if (!Const.isEmpty(realTablename)) { if (!Const.isEmpty(realSchemaname)) { countSQLStatement = selectCount + db.getDatabaseMeta() .getQuotedSchemaTableCombination(realSchemaname, realTablename); } else { countSQLStatement = selectCount + db.getDatabaseMeta().quoteField(realTablename); } } else { errCount++; logError(BaseMessages.getString(PKG, "JobEntryEvalTableContent.Error.NoTableName")); } } if (countSQLStatement != null) { if (log.isDetailed()) { logDetailed( BaseMessages.getString( PKG, "JobEntryEvalTableContent.Log.RunSQLStatement", countSQLStatement)); } if (iscustomSQL) { if (isClearResultList) { result.getRows().clear(); } List<Object[]> ar = db.getRows(countSQLStatement, 0); if (ar != null) { rowsCount = ar.size(); // ad rows to result RowMetaInterface rowMeta = db.getQueryFields(countSQLStatement, false); List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>(); for (int i = 0; i < ar.size(); i++) { rows.add(new RowMetaAndData(rowMeta, ar.get(i))); } if (isAddRowsResult && iscustomSQL) { if (rows != null) { result.getRows().addAll(rows); } } } else { if (log.isDebug()) { logDebug( BaseMessages.getString( PKG, "JobEntryEvalTableContent.Log.customSQLreturnedNothing", countSQLStatement)); } } } else { RowMetaAndData row = db.getOneRow(countSQLStatement); if (row != null) { rowsCount = row.getInteger(0); } } if (log.isDetailed()) { logDetailed( BaseMessages.getString( PKG, "JobEntryEvalTableContent.Log.NrRowsReturned", "" + rowsCount)); } switch (successCondition) { case JobEntryEvalTableContent.SUCCESS_CONDITION_ROWS_COUNT_EQUAL: successOK = (rowsCount == nrRowsLimit); break; case JobEntryEvalTableContent.SUCCESS_CONDITION_ROWS_COUNT_DIFFERENT: successOK = (rowsCount != nrRowsLimit); break; case JobEntryEvalTableContent.SUCCESS_CONDITION_ROWS_COUNT_SMALLER: successOK = (rowsCount < nrRowsLimit); break; case JobEntryEvalTableContent.SUCCESS_CONDITION_ROWS_COUNT_SMALLER_EQUAL: successOK = (rowsCount <= nrRowsLimit); break; case JobEntryEvalTableContent.SUCCESS_CONDITION_ROWS_COUNT_GREATER: successOK = (rowsCount > nrRowsLimit); break; case JobEntryEvalTableContent.SUCCESS_CONDITION_ROWS_COUNT_GREATER_EQUAL: successOK = (rowsCount >= nrRowsLimit); break; default: break; } if (!successOK && oldBehavior) { errCount++; } } // end if countSQLStatement!=null } catch (KettleException dbe) { errCount++; logError( BaseMessages.getString( PKG, "JobEntryEvalTableContent.Error.RunningEntry", dbe.getMessage())); } finally { if (db != null) { db.disconnect(); } } } else { errCount++; logError(BaseMessages.getString(PKG, "JobEntryEvalTableContent.NoDbConnection")); } result.setResult(successOK); result.setNrLinesRead(rowsCount); result.setNrErrors(errCount); return result; }
protected boolean SQLDataOK( Result result, long nrRowsLimit, String realSchemaName, String realTableName, String customSQL) throws KettleException { String countStatement = null; long rowsCount = 0; boolean successOK = false; List<Object[]> ar = null; RowMetaInterface rowMeta = null; Database db = new Database(this, connection); db.shareVariablesWith(this); try { db.connect(parentJob.getTransactionId(), null); if (iscustomSQL) { countStatement = customSQL; } else { if (!Const.isEmpty(realSchemaName)) { countStatement = selectCount + db.getDatabaseMeta() .getQuotedSchemaTableCombination(realSchemaName, realTableName); } else { countStatement = selectCount + db.getDatabaseMeta().quoteField(realTableName); } } if (countStatement != null) { if (log.isDetailed()) { logDetailed( BaseMessages.getString( PKG, "JobEntryWaitForSQL.Log.RunSQLStatement", countStatement)); } if (iscustomSQL) { ar = db.getRows(countStatement, 0); if (ar != null) { rowsCount = ar.size(); } else { if (log.isDebug()) { logDebug( BaseMessages.getString( PKG, "JobEntryWaitForSQL.Log.customSQLreturnedNothing", countStatement)); } } } else { RowMetaAndData row = db.getOneRow(countStatement); if (row != null) { rowsCount = row.getInteger(0); } } if (log.isDetailed()) { logDetailed( BaseMessages.getString(PKG, "JobEntryWaitForSQL.Log.NrRowsReturned", "" + rowsCount)); } switch (successCondition) { case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_EQUAL: successOK = (rowsCount == nrRowsLimit); break; case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_DIFFERENT: successOK = (rowsCount != nrRowsLimit); break; case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_SMALLER: successOK = (rowsCount < nrRowsLimit); break; case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_SMALLER_EQUAL: successOK = (rowsCount <= nrRowsLimit); break; case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_GREATER: successOK = (rowsCount > nrRowsLimit); break; case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_GREATER_EQUAL: successOK = (rowsCount >= nrRowsLimit); break; default: break; } } // end if countStatement!=null } catch (KettleDatabaseException dbe) { logError( BaseMessages.getString(PKG, "JobEntryWaitForSQL.Error.RunningEntry", dbe.getMessage())); } finally { if (db != null) { if (isAddRowsResult && iscustomSQL && ar != null) { rowMeta = db.getQueryFields(countStatement, false); } db.disconnect(); } } if (successOK) { // ad rows to result if (isAddRowsResult && iscustomSQL && ar != null) { List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>(); for (int i = 0; i < ar.size(); i++) { rows.add(new RowMetaAndData(rowMeta, ar.get(i))); } if (rows != null) { result.getRows().addAll(rows); } } } return successOK; }