public void saveRep(Repository rep, ObjectId id_transformation, ObjectId id_step) throws KettleException { try { for (int i = 0; i < fieldName.length; i++) { if (fieldName[i] != null && fieldName[i].length() != 0) { rep.saveStepAttribute( id_transformation, id_step, i, "field_name", fieldName[i]); // $NON-NLS-1$ rep.saveStepAttribute( id_transformation, id_step, i, "field_type", ValueMeta.getTypeDesc(fieldType[i])); // $NON-NLS-1$ rep.saveStepAttribute( id_transformation, id_step, i, "field_length", fieldLength[i]); // $NON-NLS-1$ rep.saveStepAttribute( id_transformation, id_step, i, "field_precision", fieldPrecision[i]); // $NON-NLS-1$ } } rep.saveStepAttribute( id_transformation, id_step, "select_unspecified", selectingAndSortingUnspecifiedFields); //$NON-NLS-1$ } catch (Exception e) { throw new KettleException( BaseMessages.getString(PKG, "MappingInputMeta.Exception.UnableToSaveStepInfo") + id_step, e); //$NON-NLS-1$ } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { port = rep.getJobEntryAttributeString(id_jobentry, "port"); serverName = rep.getJobEntryAttributeString(id_jobentry, "servername"); oid = rep.getJobEntryAttributeString(id_jobentry, "oid"); message = rep.getJobEntryAttributeString(id_jobentry, "message"); comString = rep.getJobEntryAttributeString(id_jobentry, "comstring"); timeout = rep.getJobEntryAttributeString(id_jobentry, "timeout"); nrretry = rep.getJobEntryAttributeString(id_jobentry, "nrretry"); targettype = rep.getJobEntryAttributeString(id_jobentry, "targettype"); user = rep.getJobEntryAttributeString(id_jobentry, "user"); passphrase = rep.getJobEntryAttributeString(id_jobentry, "passphrase"); engineid = rep.getJobEntryAttributeString(id_jobentry, "engineid"); } catch (KettleException dbe) { throw new KettleException( "Unable to load job entry of type 'SNMPTrap' from the repository for id_jobentry=" + id_jobentry, dbe); } }
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException { try { rep.saveJobEntryAttribute(id_job, getObjectId(), "xmlfilename", xmlfilename); rep.saveJobEntryAttribute(id_job, getObjectId(), "xslfilename", xslfilename); rep.saveJobEntryAttribute(id_job, getObjectId(), "outputfilename", outputfilename); rep.saveJobEntryAttribute(id_job, getObjectId(), "iffileexists", iffileexists); rep.saveJobEntryAttribute(id_job, getObjectId(), "addfiletoresult", addfiletoresult); rep.saveJobEntryAttribute( id_job, getObjectId(), "filenamesfromprevious", filenamesfromprevious); rep.saveJobEntryAttribute(id_job, getObjectId(), "xsltfactory", xsltfactory); for (int i = 0; i < parameterName.length; i++) { rep.saveJobEntryAttribute(id_job, getObjectId(), i, "param_field", parameterField[i]); rep.saveJobEntryAttribute(id_job, getObjectId(), i, "param_name", parameterName[i]); } for (int i = 0; i < outputPropertyName.length; i++) { rep.saveJobEntryAttribute( id_job, getObjectId(), i, "output_property_name", outputPropertyName[i]); rep.saveJobEntryAttribute( id_job, getObjectId(), i, "output_property_value", outputPropertyValue[i]); } } catch (KettleDatabaseException dbe) { throw new KettleException( "Unable to save job entry of type 'xslt' to the repository for id_job=" + id_job, dbe); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { argFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "arg_from_previous"); includeSubfolders = rep.getJobEntryAttributeBoolean(id_jobentry, "include_subfolders"); // How many arguments? int argnr = rep.countNrJobEntryAttributes(id_jobentry, "name"); arguments = new String[argnr]; filemasks = new String[argnr]; // Read them all... for (int a = 0; a < argnr; a++) { arguments[a] = rep.getJobEntryAttributeString(id_jobentry, a, "name"); filemasks[a] = rep.getJobEntryAttributeString(id_jobentry, a, "filemask"); } } catch (KettleException dbe) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryCheckFilesLocked.UnableToLoadFromRepo", String.valueOf(id_jobentry)), dbe); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases); schemaname = rep.getJobEntryAttributeString(id_jobentry, "schemaname"); tablename = rep.getJobEntryAttributeString(id_jobentry, "tablename"); successCondition = getSuccessConditionByCode( Const.NVL(rep.getJobEntryAttributeString(id_jobentry, "success_condition"), "")); limit = rep.getJobEntryAttributeString(id_jobentry, "limit"); iscustomSQL = rep.getJobEntryAttributeBoolean(id_jobentry, "is_custom_sql"); isUseVars = rep.getJobEntryAttributeBoolean(id_jobentry, "is_usevars"); isAddRowsResult = rep.getJobEntryAttributeBoolean(id_jobentry, "add_rows_result"); isClearResultList = rep.getJobEntryAttributeBoolean(id_jobentry, "clear_result_rows"); customSQL = rep.getJobEntryAttributeString(id_jobentry, "custom_sql"); } catch (KettleDatabaseException dbe) { throw new KettleException( BaseMessages.getString(PKG, "JobEntryEvalTableContent.UnableLoadRep", "" + id_jobentry), dbe); } }
public void saveRep(Repository rep, ObjectId id_transformation, ObjectId id_step) throws KettleException { try { for (int i = 0; i < fieldName.length; i++) { if (fieldName[i] != null && fieldName[i].length() != 0) { rep.saveStepAttribute(id_transformation, id_step, i, "field_name", fieldName[i]); rep.saveStepAttribute(id_transformation, id_step, i, "field_type", fieldType[i]); rep.saveStepAttribute(id_transformation, id_step, i, "field_format", fieldFormat[i]); rep.saveStepAttribute(id_transformation, id_step, i, "field_currency", currency[i]); rep.saveStepAttribute(id_transformation, id_step, i, "field_decimal", decimal[i]); rep.saveStepAttribute(id_transformation, id_step, i, "field_group", group[i]); rep.saveStepAttribute(id_transformation, id_step, i, "field_nullif", value[i]); rep.saveStepAttribute(id_transformation, id_step, i, "field_length", fieldLength[i]); rep.saveStepAttribute( id_transformation, id_step, i, "field_precision", fieldPrecision[i]); rep.saveStepAttribute( id_transformation, id_step, i, "set_empty_string", setEmptyString[i]); } } rep.saveStepAttribute(id_transformation, id_step, "limit", rowLimit); } catch (Exception e) { throw new KettleException( "Unable to save step information to the repository for id_step=" + id_step, e); } }
public void saveRep(Repository rep, ObjectId id_transformation, ObjectId id_step) throws KettleException { rep.saveStepAttribute(id_transformation, id_step, "hostname", hostname); rep.saveStepAttribute(id_transformation, id_step, "port", port); rep.saveStepAttribute(id_transformation, id_step, "buffer_size", bufferSize); rep.saveStepAttribute(id_transformation, id_step, "compressed", compressed); }
public void readRep( Repository rep, ObjectId id_step, List<DatabaseMeta> databases, Map<String, Counter> counters) throws KettleException { hostname = rep.getStepAttributeString(id_step, "hostname"); port = rep.getStepAttributeString(id_step, "port"); bufferSize = rep.getStepAttributeString(id_step, "buffer_size"); compressed = rep.getStepAttributeBoolean(id_step, "compressed"); }
// // Save the attributes of this job entry // public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException { try { rep.saveJobEntryAttribute(id_job, getObjectId(), "maximumTimeout", maximumTimeout); rep.saveJobEntryAttribute(id_job, getObjectId(), "scaletime", scaleTime); } catch (KettleDatabaseException dbe) { throw new KettleException( BaseMessages.getString(PKG, "JobEntryDelay.UnableToSaveToRepo.Label") + id_job, dbe); } }
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step) throws KettleException { try { rep.saveDatabaseMetaStepAttribute(id_transformation, id_step, "id_connection", databaseMeta); rep.saveStepAttribute(id_transformation, id_step, "commit", commitSize); rep.saveStepAttribute(id_transformation, id_step, "sql_field", sqlField); rep.saveStepAttribute(id_transformation, id_step, "insert_field", insertField); rep.saveStepAttribute(id_transformation, id_step, "update_field", updateField); rep.saveStepAttribute(id_transformation, id_step, "delete_field", deleteField); rep.saveStepAttribute(id_transformation, id_step, "read_field", readField); // Also, save the step-database relationship! if (databaseMeta != null) { rep.insertStepDatabase(id_transformation, id_step, databaseMeta.getObjectId()); } rep.saveStepAttribute(id_transformation, id_step, "sqlFromfile", sqlFromfile); rep.saveStepAttribute(id_transformation, id_step, "sendOneStatement", sendOneStatement); } catch (Exception e) { throw new KettleException( BaseMessages.getString(PKG, "ExecSQLRowMeta.Exception.UnableToSaveStepInfo") + id_step, e); } }
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases) throws KettleException { try { databaseMeta = rep.loadDatabaseMetaFromStepAttribute(id_step, "id_connection", databases); commitSize = (int) rep.getStepAttributeInteger(id_step, "commit"); sqlField = rep.getStepAttributeString(id_step, "sql_field"); insertField = rep.getStepAttributeString(id_step, "insert_field"); updateField = rep.getStepAttributeString(id_step, "update_field"); deleteField = rep.getStepAttributeString(id_step, "delete_field"); readField = rep.getStepAttributeString(id_step, "read_field"); sqlFromfile = rep.getStepAttributeBoolean(id_step, "sqlFromfile"); String sendOneStatementString = rep.getStepAttributeString(id_step, "sendOneStatement"); if (Const.isEmpty(sendOneStatementString)) { sendOneStatement = true; } else { sendOneStatement = rep.getStepAttributeBoolean(id_step, "sendOneStatement"); } } catch (Exception e) { throw new KettleException( BaseMessages.getString(PKG, "ExecSQLRowMeta.Exception.UnexpectedErrorReadingStepInfo"), e); } }
public void saveRep(Repository rep, ObjectId id_job) throws KettleException { try { rep.saveStepAttribute(id_job, getObjectId(), "recordset_name", recordsetName); // private Text algType; //NaiveBayes, Logistic // private Text dependentVar; // 1 // private Text independentVar; // 2 rep.saveStepAttribute(id_job, getObjectId(), "model", model); rep.saveStepAttribute(id_job, getObjectId(), "independentVar", independentVar); rep.saveStepAttribute(id_job, getObjectId(), "classifyType", classifyType); rep.saveStepAttribute(id_job, getObjectId(), "dataType", dataType); rep.saveStepAttribute(id_job, getObjectId(), "alpha", alpha); rep.saveStepAttribute(id_job, getObjectId(), "passes", passes); rep.saveStepAttribute(id_job, getObjectId(), "ridge", ridge); rep.saveStepAttribute(id_job, getObjectId(), "epsilon", epsilon); rep.saveStepAttribute(id_job, getObjectId(), "maxIter", maxIter); } catch (Exception e) { throw new KettleException("Unable to save info into repository" + id_job, e); } }
public void readRep( Repository rep, long id_step, List<DatabaseMeta> databases, Map<String, Counter> counters) throws KettleException { try { long id_connection = rep.getStepAttributeInteger(id_step, "id_connection"); // $NON-NLS-1$ databaseMeta = DatabaseMeta.findDatabase(databases, id_connection); commitSize = (int) rep.getStepAttributeInteger(id_step, "commit"); // $NON-NLS-1$ schemaName = rep.getStepAttributeString(id_step, "schema"); // $NON-NLS-1$ tableName = rep.getStepAttributeString(id_step, "table"); // $NON-NLS-1$ int nrkeys = rep.countNrStepAttributes(id_step, "key_name"); // $NON-NLS-1$ allocate(nrkeys); for (int i = 0; i < nrkeys; i++) { keyStream[i] = rep.getStepAttributeString(id_step, i, "key_name"); // $NON-NLS-1$ keyLookup[i] = rep.getStepAttributeString(id_step, i, "key_field"); // $NON-NLS-1$ keyCondition[i] = rep.getStepAttributeString(id_step, i, "key_condition"); // $NON-NLS-1$ keyStream2[i] = rep.getStepAttributeString(id_step, i, "key_name2"); // $NON-NLS-1$ } } catch (Exception e) { throw new KettleException( Messages.getString("DeleteMeta.Exception.UnexpectedErrorInReadingStepInfo"), e); //$NON-NLS-1$ } }
public void saveRep(Repository rep, ObjectId id_job) throws KettleException { try { rep.saveStepAttribute(id_job, getObjectId(), "fields", fields); // $NON-NLS-1$ rep.saveStepAttribute(id_job, getObjectId(), "datasetName", datasetName); // $NON-NLS-1$ rep.saveStepAttribute(id_job, getObjectId(), "recordset_name", recordsetName); // $NON-NLS-1$ // rep.saveStepAttribute(id_job, getObjectId(), "recordName", recordName); //$NON-NLS-1$ // rep.saveStepAttribute(id_job, getObjectId(), "recordDef", recordDef); //$NON-NLS-1$ } catch (Exception e) { throw new KettleException("Unable to save info into repository" + id_job, e); } }
public void saveRep(Repository rep, ObjectId id_transformation, ObjectId id_step) throws KettleException { try { rep.saveStepAttribute(id_transformation, id_step, "rule_file_name", ruleFilename); rep.saveStepAttribute(id_transformation, id_step, "rule_step_main", stepMain); rep.saveStepAttribute(id_transformation, id_step, "rule_step_rule_results", stepRuleResults); rep.saveStepAttribute(id_transformation, id_step, "output_type", outputType); } catch (KettleDatabaseException dbe) { throw new KettleException( "Unable to save rule engine step information to the repository, id_step=" + id_step, dbe); } }
public void saveRep(Repository rep, ObjectId id_job) throws KettleException { try { rep.saveStepAttribute(id_job, getObjectId(), "recordset_name", recordsetName); rep.saveStepAttribute(id_job, getObjectId(), "recordset", recordset); rep.saveStepAttribute(id_job, getObjectId(), "breakCriteria", breakCriteria); rep.saveStepAttribute(id_job, getObjectId(), "isAll", this.getIsAllString()); rep.saveStepAttribute(id_job, getObjectId(), "runLocal", this.getIsRunLocalString()); } catch (Exception e) { throw new KettleException("Unable to save info into repository" + id_job, e); } }
public void saveRep(Repository rep, ObjectId id_transformation, ObjectId id_step) throws KettleException { if (!Const.isEmpty(m_zookeeperHosts)) { rep.saveStepAttribute(id_transformation, id_step, 0, "zookeeper_hosts", m_zookeeperHosts); } if (!Const.isEmpty(m_zookeeperPort)) { rep.saveStepAttribute(id_transformation, id_step, 0, "zookeeper_port", m_zookeeperPort); } if (!Const.isEmpty(m_coreConfigURL)) { rep.saveStepAttribute(id_transformation, id_step, 0, "core_config_url", m_coreConfigURL); } if (!Const.isEmpty(m_defaultConfigURL)) { rep.saveStepAttribute( id_transformation, id_step, 0, "default_config_url", m_defaultConfigURL); } if (!Const.isEmpty(m_targetTableName)) { rep.saveStepAttribute(id_transformation, id_step, 0, "target_table_name", m_targetTableName); } if (!Const.isEmpty(m_targetMappingName)) { rep.saveStepAttribute( id_transformation, id_step, 0, "target_mapping_name", m_targetMappingName); } if (!Const.isEmpty(m_writeBufferSize)) { rep.saveStepAttribute(id_transformation, id_step, 0, "write_buffer_size", m_writeBufferSize); } rep.saveStepAttribute(id_transformation, id_step, 0, "disable_wal", m_disableWriteToWAL); if (m_mapping != null) { m_mapping.saveRep(rep, id_transformation, id_step); } }
public void saveRep(Repository rep, long id_transformation, long id_step) throws KettleException { try { rep.saveStepAttribute( id_transformation, id_step, "id_connection", databaseMeta == null ? -1 : databaseMeta.getID()); // $NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "commit", commitSize); // $NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "schema", schemaName); // $NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "table", tableName); // $NON-NLS-1$ for (int i = 0; i < keyStream.length; i++) { rep.saveStepAttribute( id_transformation, id_step, i, "key_name", keyStream[i]); // $NON-NLS-1$ rep.saveStepAttribute( id_transformation, id_step, i, "key_field", keyLookup[i]); // $NON-NLS-1$ rep.saveStepAttribute( id_transformation, id_step, i, "key_condition", keyCondition[i]); // $NON-NLS-1$ rep.saveStepAttribute( id_transformation, id_step, i, "key_name2", keyStream2[i]); // $NON-NLS-1$ } // Also, save the step-database relationship! if (databaseMeta != null) rep.insertStepDatabase(id_transformation, id_step, databaseMeta.getID()); } catch (Exception e) { throw new KettleException( Messages.getString("DeleteMeta.Exception.UnableToSaveStepInfo") + id_step, e); //$NON-NLS-1$ } }
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step) throws KettleException { try { rep.saveDatabaseMetaStepAttribute(id_transformation, id_step, "id_connection", databaseMeta); rep.saveStepAttribute(id_transformation, id_step, "commit", commitSize); rep.saveStepAttribute(id_transformation, id_step, "schema", schemaName); rep.saveStepAttribute(id_transformation, id_step, "table", tableName); for (int i = 0; i < keyStream.length; i++) { rep.saveStepAttribute(id_transformation, id_step, i, "key_name", keyStream[i]); rep.saveStepAttribute(id_transformation, id_step, i, "key_field", keyLookup[i]); rep.saveStepAttribute(id_transformation, id_step, i, "key_condition", keyCondition[i]); rep.saveStepAttribute(id_transformation, id_step, i, "key_name2", keyStream2[i]); } // Also, save the step-database relationship! if (databaseMeta != null) { rep.insertStepDatabase(id_transformation, id_step, databaseMeta.getObjectId()); } } catch (Exception e) { throw new KettleException( BaseMessages.getString(PKG, "DeleteMeta.Exception.UnableToSaveStepInfo") + id_step, e); } }
// Rogers (2012): Persistir campos no repositorio @Override public void saveRep(Repository repository, ObjectId idOfTransformation, ObjectId idOfStep) throws KettleException { repository.saveStepAttribute( idOfTransformation, idOfStep, Field.ENDPOINT_URI.name(), endpointUri); repository.saveStepAttribute( idOfTransformation, idOfStep, Field.DEFAULT_GRAPH.name(), defaultGraph); repository.saveStepAttribute( idOfTransformation, idOfStep, Field.QUERY_STRING.name(), queryString); repository.saveStepAttribute( idOfTransformation, idOfStep, Field.PREFIXES.name(), new XStream().toXML(prefixes)); repository.saveStepAttribute(idOfTransformation, idOfStep, Field.VAR_RESULT.name(), varResult); }
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases) throws KettleException { try { valuename = rep.getStepAttributeString(id_step, "valuename"); slaveServerName = rep.getStepAttributeString(id_step, "slave"); sequenceName = rep.getStepAttributeString(id_step, "seqname"); increment = rep.getStepAttributeString(id_step, "increment"); } catch (Exception e) { throw new KettleException( BaseMessages.getString(PKG, "GetSequenceMeta.Exception.UnableToReadStepInfo") + id_step, e); } }
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step) throws KettleException { try { rep.saveStepAttribute(id_transformation, id_step, "valuename", valuename); rep.saveStepAttribute(id_transformation, id_step, "slave", slaveServerName); rep.saveStepAttribute(id_transformation, id_step, "seqname", sequenceName); rep.saveStepAttribute(id_transformation, id_step, "increment", increment); } catch (Exception e) { throw new KettleException( BaseMessages.getString(PKG, "GetSequenceMeta.Exception.UnableToSaveStepInfo") + id_step, e); } }
public void saveSharedObjects( final RepositoryElementInterface element, final String versionComment) throws KettleException { TransMeta transMeta = (TransMeta) element; // First store the databases and other depending objects in the transformation. // // Only store if the database has actually changed or doesn't have an object ID (imported) // for (DatabaseMeta databaseMeta : transMeta.getDatabases()) { if (databaseMeta.hasChanged() || databaseMeta.getObjectId() == null) { if (databaseMeta.getObjectId() == null || unifiedRepositoryConnectionAclService.hasAccess( databaseMeta.getObjectId(), RepositoryFilePermission.WRITE)) { repo.save(databaseMeta, versionComment, null); } else { log.logError( BaseMessages.getString( PKG, "PurRepository.ERROR_0004_DATABASE_UPDATE_ACCESS_DENIED", databaseMeta.getName())); } } } // Store the slave servers... // for (SlaveServer slaveServer : transMeta.getSlaveServers()) { if (slaveServer.hasChanged() || slaveServer.getObjectId() == null) { repo.save(slaveServer, versionComment, null); } } // Store the cluster schemas // for (ClusterSchema clusterSchema : transMeta.getClusterSchemas()) { if (clusterSchema.hasChanged() || clusterSchema.getObjectId() == null) { repo.save(clusterSchema, versionComment, null); } } // Save the partition schemas // for (PartitionSchema partitionSchema : transMeta.getPartitionSchemas()) { if (partitionSchema.hasChanged() || partitionSchema.getObjectId() == null) { repo.save(partitionSchema, versionComment, null); } } }
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step) throws KettleException { try { rep.saveStepAttribute(id_transformation, id_step, "urlField", urlField); rep.saveStepAttribute(id_transformation, id_step, "connectTimeOut", connectTimeOut); rep.saveStepAttribute(id_transformation, id_step, "readTimeOut", readTimeOut); rep.saveStepAttribute(id_transformation, id_step, "resultfieldname", resultfieldname); } catch (Exception e) { throw new KettleException( BaseMessages.getString(PKG, "WebServiceAvailableMeta.Exception.UnableToSaveStepInfo") + id_step, e); } }
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases) throws KettleException { try { urlField = rep.getStepAttributeString(id_step, "urlField"); connectTimeOut = rep.getStepAttributeString(id_step, "connectTimeOut"); readTimeOut = rep.getStepAttributeString(id_step, "readTimeOut"); resultfieldname = rep.getStepAttributeString(id_step, "resultfieldname"); } catch (Exception e) { throw new KettleException( BaseMessages.getString( PKG, "WebServiceAvailableMeta.Exception.UnexpectedErrorReadingStepInfo"), e); } }
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException { try { rep.saveJobEntryAttribute(id_job, getObjectId(), "hostname", hostname); rep.saveJobEntryAttribute(id_job, getObjectId(), "nbr_packets", nbrPackets); // TODO: The following line may be removed 3 versions after 2.5.0 rep.saveJobEntryAttribute(id_job, getObjectId(), "nbrpaquets", nbrPackets); rep.saveJobEntryAttribute(id_job, getObjectId(), "timeout", timeout); rep.saveJobEntryAttribute(id_job, getObjectId(), "pingtype", pingtype); } catch (KettleDatabaseException dbe) { throw new KettleException( "Unable to save job entry of type 'ping' to the repository for id_job=" + id_job, dbe); } }
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step) throws KettleException { try { rep.saveStepAttribute(id_transformation, id_step, "usevar", usevar); for (int i = 0; i < fieldName.length; i++) { rep.saveStepAttribute(id_transformation, id_step, i, "field_name", fieldName[i]); rep.saveStepAttribute(id_transformation, id_step, i, "replace_value", replaceValue[i]); rep.saveStepAttribute(id_transformation, id_step, i, "replace_mask", replaceMask[i]); rep.saveStepAttribute(id_transformation, id_step, i, "set_empty_string", setEmptyString[i]); } } catch (Exception e) { throw new KettleException( "Unable to save step information to the repository for id_step=" + id_step, e); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { maximumTimeout = rep.getJobEntryAttributeString(id_jobentry, "maximumTimeout"); scaleTime = (int) rep.getJobEntryAttributeInteger(id_jobentry, "scaletime"); } catch (KettleDatabaseException dbe) { throw new KettleException( BaseMessages.getString(PKG, "JobEntryDelay.UnableToLoadFromRepo.Label") + id_jobentry, dbe); } }
@Override public void lookupRepositoryReferences(Repository repository) throws KettleException { // The correct reference is stored in the trans name and directory attributes... // RepositoryDirectoryInterface repositoryDirectoryInterface = RepositoryImportLocation.getRepositoryImportLocation().findDirectory(directoryPath); transObjectId = repository.getTransformationID(transName, repositoryDirectoryInterface); }
public void saveRep(Repository rep, ObjectId id_transformation, ObjectId id_step) throws KettleException { try { for (int i = 0; i < fieldInStream.length; i++) { rep.saveStepAttribute( id_transformation, id_step, i, "in_stream_name", fieldInStream[i]); // $NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, i, "out_stream_name", fieldOutStream[i]); rep.saveStepAttribute(id_transformation, id_step, i, "cut_from", cutFrom[i]); // $NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, i, "cut_to", cutTo[i]); // $NON-NLS-1$ } } catch (Exception e) { throw new KettleException( BaseMessages.getString(PKG, "StringCutMeta.Exception.UnableToSaveStepInfo") + id_step, e); //$NON-NLS-1$ } }