public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { argFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "arg_from_previous"); includeSubfolders = rep.getJobEntryAttributeBoolean(id_jobentry, "include_subfolders"); // How many arguments? int argnr = rep.countNrJobEntryAttributes(id_jobentry, "name"); arguments = new String[argnr]; filemasks = new String[argnr]; // Read them all... for (int a = 0; a < argnr; a++) { arguments[a] = rep.getJobEntryAttributeString(id_jobentry, a, "name"); filemasks[a] = rep.getJobEntryAttributeString(id_jobentry, a, "filemask"); } } catch (KettleException dbe) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryCheckFilesLocked.UnableToLoadFromRepo", String.valueOf(id_jobentry)), dbe); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { port = rep.getJobEntryAttributeString(id_jobentry, "port"); serverName = rep.getJobEntryAttributeString(id_jobentry, "servername"); oid = rep.getJobEntryAttributeString(id_jobentry, "oid"); message = rep.getJobEntryAttributeString(id_jobentry, "message"); comString = rep.getJobEntryAttributeString(id_jobentry, "comstring"); timeout = rep.getJobEntryAttributeString(id_jobentry, "timeout"); nrretry = rep.getJobEntryAttributeString(id_jobentry, "nrretry"); targettype = rep.getJobEntryAttributeString(id_jobentry, "targettype"); user = rep.getJobEntryAttributeString(id_jobentry, "user"); passphrase = rep.getJobEntryAttributeString(id_jobentry, "passphrase"); engineid = rep.getJobEntryAttributeString(id_jobentry, "engineid"); } catch (KettleException dbe) { throw new KettleException( "Unable to load job entry of type 'SNMPTrap' from the repository for id_jobentry=" + id_jobentry, dbe); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases); schemaname = rep.getJobEntryAttributeString(id_jobentry, "schemaname"); tablename = rep.getJobEntryAttributeString(id_jobentry, "tablename"); successCondition = getSuccessConditionByCode( Const.NVL(rep.getJobEntryAttributeString(id_jobentry, "success_condition"), "")); limit = rep.getJobEntryAttributeString(id_jobentry, "limit"); iscustomSQL = rep.getJobEntryAttributeBoolean(id_jobentry, "is_custom_sql"); isUseVars = rep.getJobEntryAttributeBoolean(id_jobentry, "is_usevars"); isAddRowsResult = rep.getJobEntryAttributeBoolean(id_jobentry, "add_rows_result"); isClearResultList = rep.getJobEntryAttributeBoolean(id_jobentry, "clear_result_rows"); customSQL = rep.getJobEntryAttributeString(id_jobentry, "custom_sql"); } catch (KettleDatabaseException dbe) { throw new KettleException( BaseMessages.getString(PKG, "JobEntryEvalTableContent.UnableLoadRep", "" + id_jobentry), dbe); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { schemaname = rep.getJobEntryAttributeString(id_jobentry, "schemaname"); tablename = rep.getJobEntryAttributeString(id_jobentry, "tablename"); filename = rep.getJobEntryAttributeString(id_jobentry, "filename"); separator = rep.getJobEntryAttributeString(id_jobentry, "separator"); enclosed = rep.getJobEntryAttributeString(id_jobentry, "enclosed"); lineterminated = rep.getJobEntryAttributeString(id_jobentry, "lineterminated"); limitlines = rep.getJobEntryAttributeString(id_jobentry, "limitlines"); listcolumn = rep.getJobEntryAttributeString(id_jobentry, "listcolumn"); highpriority = rep.getJobEntryAttributeBoolean(id_jobentry, "highpriority"); optionenclosed = rep.getJobEntryAttributeBoolean(id_jobentry, "optionenclosed"); outdumpvalue = (int) rep.getJobEntryAttributeInteger(id_jobentry, "outdumpvalue"); iffileexists = (int) rep.getJobEntryAttributeInteger(id_jobentry, "iffileexists"); addfiletoresult = rep.getJobEntryAttributeBoolean(id_jobentry, "addfiletoresult"); connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases); } catch (KettleDatabaseException dbe) { throw new KettleException( "Unable to load job entry of type 'table exists' from the repository for id_jobentry=" + id_jobentry, dbe); } }
@Override public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases); schemaname = rep.getJobEntryAttributeString(id_jobentry, "schemaname"); tablename = rep.getJobEntryAttributeString(id_jobentry, "tablename"); successCondition = getSuccessConditionByCode( Const.NVL(rep.getJobEntryAttributeString(id_jobentry, "success_condition"), "")); rowsCountValue = rep.getJobEntryAttributeString(id_jobentry, "rows_count_value"); iscustomSQL = rep.getJobEntryAttributeBoolean(id_jobentry, "is_custom_sql"); isUseVars = rep.getJobEntryAttributeBoolean(id_jobentry, "is_usevars"); isAddRowsResult = rep.getJobEntryAttributeBoolean(id_jobentry, "add_rows_result"); customSQL = rep.getJobEntryAttributeString(id_jobentry, "custom_sql"); maximumTimeout = rep.getJobEntryAttributeString(id_jobentry, "maximum_timeout"); checkCycleTime = rep.getJobEntryAttributeString(id_jobentry, "check_cycle_time"); successOnTimeout = rep.getJobEntryAttributeBoolean(id_jobentry, "success_on_timeout"); isClearResultList = rep.getJobEntryAttributeBoolean(id_jobentry, "clear_result_rows"); } catch (KettleDatabaseException dbe) { throw new KettleException( BaseMessages.getString(PKG, "JobEntryWaitForSQL.UnableLoadRep", "" + id_jobentry), dbe); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { filename = rep.getJobEntryAttributeString(id_jobentry, "filename"); maximumTimeout = rep.getJobEntryAttributeString(id_jobentry, "maximum_timeout"); checkCycleTime = rep.getJobEntryAttributeString(id_jobentry, "check_cycle_time"); successOnTimeout = rep.getJobEntryAttributeBoolean(id_jobentry, "success_on_timeout"); fileSizeCheck = rep.getJobEntryAttributeBoolean(id_jobentry, "file_size_check"); addFilenameToResult = rep.getJobEntryAttributeBoolean(id_jobentry, "add_filename_result"); } catch (KettleException dbe) { throw new KettleException( "Unable to load job entry of type 'wait for file' from the repository for id_jobentry=" + id_jobentry, dbe); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { hostname = rep.getJobEntryAttributeString(id_jobentry, "hostname"); nbrPackets = rep.getJobEntryAttributeString(id_jobentry, "nbr_packets"); // TODO: The following lines may be removed 3 versions after 2.5.0 String nbrPaquets = rep.getJobEntryAttributeString(id_jobentry, "nbrpaquets"); if (nbrPackets == null && nbrPaquets != null) { // if only nbrpaquets exists this means that the file was // save by a version 2.5.0 ping job entry nbrPackets = nbrPaquets; } timeout = rep.getJobEntryAttributeString(id_jobentry, "timeout"); pingtype = rep.getJobEntryAttributeString(id_jobentry, "pingtype"); if (Const.isEmpty(pingtype)) { pingtype = classicPing; ipingtype = iclassicPing; } else { if (pingtype.equals(systemPing)) { ipingtype = isystemPing; } else if (pingtype.equals(bothPings)) { ipingtype = ibothPings; } else { ipingtype = iclassicPing; } } } catch (KettleException dbe) { throw new KettleException( "Unable to load job entry of type 'ping' exists from the repository for id_jobentry=" + id_jobentry, dbe); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { replaceVars = rep.getJobEntryAttributeBoolean(id_jobentry, "replacevars"); filename = rep.getJobEntryAttributeString(id_jobentry, "filename"); fileVariableType = getVariableType(rep.getJobEntryAttributeString(id_jobentry, "file_variable_type")); // How many variableName? int argnr = rep.countNrJobEntryAttributes(id_jobentry, "variable_name"); variableName = new String[argnr]; variableValue = new String[argnr]; variableType = new int[argnr]; // Read them all... for (int a = 0; a < argnr; a++) { variableName[a] = rep.getJobEntryAttributeString(id_jobentry, a, "variable_name"); variableValue[a] = rep.getJobEntryAttributeString(id_jobentry, a, "variable_value"); variableType[a] = getVariableType(rep.getJobEntryAttributeString(id_jobentry, a, "variable_type")); } } catch (KettleException dbe) { throw new KettleException( BaseMessages.getString( PKG, "JobEntrySetVariables.Meta.UnableLoadRep", String.valueOf(id_jobentry), dbe.getMessage()), dbe); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { maximumTimeout = rep.getJobEntryAttributeString(id_jobentry, "maximumTimeout"); scaleTime = (int) rep.getJobEntryAttributeInteger(id_jobentry, "scaletime"); } catch (KettleDatabaseException dbe) { throw new KettleException( BaseMessages.getString(PKG, "JobEntryDelay.UnableToLoadFromRepo.Label") + id_jobentry, dbe); } }
public void loadRep( Repository rep, long id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { super.loadRep(rep, id_jobentry, databases, slaveServers); filename = rep.getJobEntryAttributeString(id_jobentry, "filename"); // $NON-NLS-1$ } catch (KettleException dbe) { throw new KettleException( Messages.getString( "JobEntryFileExists.ERROR_0002_Cannot_Load_Job_From_Repository", Long.toString(id_jobentry)), dbe); //$NON-NLS-1$ } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { xmlfilename = rep.getJobEntryAttributeString(id_jobentry, "xmlfilename"); xslfilename = rep.getJobEntryAttributeString(id_jobentry, "xslfilename"); outputfilename = rep.getJobEntryAttributeString(id_jobentry, "outputfilename"); iffileexists = (int) rep.getJobEntryAttributeInteger(id_jobentry, "iffileexists"); addfiletoresult = rep.getJobEntryAttributeBoolean(id_jobentry, "addfiletoresult"); filenamesfromprevious = rep.getJobEntryAttributeBoolean(id_jobentry, "filenamesfromprevious"); xsltfactory = rep.getJobEntryAttributeString(id_jobentry, "xsltfactory"); if (xsltfactory == null) { xsltfactory = FACTORY_JAXP; } int nrparams = rep.countNrJobEntryAttributes(id_jobentry, "param_name"); int nroutputprops = rep.countNrJobEntryAttributes(id_jobentry, "output_property_name"); allocate(nrparams, nroutputprops); for (int i = 0; i < nrparams; i++) { parameterField[i] = rep.getJobEntryAttributeString(id_jobentry, i, "param_field"); parameterName[i] = rep.getJobEntryAttributeString(id_jobentry, i, "param_name"); } for (int i = 0; i < nroutputprops; i++) { outputPropertyName[i] = rep.getJobEntryAttributeString(id_jobentry, i, "output_property_name"); outputPropertyValue[i] = rep.getJobEntryAttributeString(id_jobentry, i, "output_property_value"); } } catch (KettleException dbe) { throw new KettleException( "Unable to load job entry of type 'xslt' from the repository for id_jobentry=" + id_jobentry, dbe); } }
// Load the jobentry from repository public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { setFileName(rep.getJobEntryAttributeString(id_jobentry, "file_name")); setWorkDirectory(rep.getJobEntryAttributeString(id_jobentry, "work_directory")); argFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "arg_from_previous"); execPerRow = rep.getJobEntryAttributeBoolean(id_jobentry, "exec_per_row"); setLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_logfile"); setAppendLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_append_logfile"); addDate = rep.getJobEntryAttributeBoolean(id_jobentry, "add_date"); addTime = rep.getJobEntryAttributeBoolean(id_jobentry, "add_time"); logfile = rep.getJobEntryAttributeString(id_jobentry, "logfile"); logext = rep.getJobEntryAttributeString(id_jobentry, "logext"); logFileLevel = LogLevel.getLogLevelForCode(rep.getJobEntryAttributeString(id_jobentry, "loglevel")); insertScript = rep.getJobEntryAttributeBoolean(id_jobentry, "insertScript"); script = rep.getJobEntryAttributeString(id_jobentry, "script"); // How many arguments? int argnr = rep.countNrJobEntryAttributes(id_jobentry, "argument"); arguments = new String[argnr]; // Read them all... for (int a = 0; a < argnr; a++) { arguments[a] = rep.getJobEntryAttributeString(id_jobentry, a, "argument"); } } catch (KettleDatabaseException dbe) { throw new KettleException( "Unable to load job entry of type 'shell' from the repository with id_jobentry=" + id_jobentry, dbe); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { serverName = rep.getJobEntryAttributeString(id_jobentry, "servername"); userName = rep.getJobEntryAttributeString(id_jobentry, "username"); password = Encr.decryptPasswordOptionallyEncrypted( rep.getJobEntryAttributeString(id_jobentry, "password")); serverPort = rep.getJobEntryAttributeString(id_jobentry, "serverport"); ftpDirectory = rep.getJobEntryAttributeString(id_jobentry, "ftpdirectory"); localDirectory = rep.getJobEntryAttributeString(id_jobentry, "localdirectory"); wildcard = rep.getJobEntryAttributeString(id_jobentry, "wildcard"); onlyGettingNewFiles = rep.getJobEntryAttributeBoolean(id_jobentry, "only_new"); usehttpproxy = rep.getJobEntryAttributeBoolean(id_jobentry, "usehttpproxy"); httpproxyhost = rep.getJobEntryAttributeString(id_jobentry, "httpproxyhost"); httpproxyusername = rep.getJobEntryAttributeString(id_jobentry, "httpproxyusername"); httpProxyPassword = rep.getJobEntryAttributeString(id_jobentry, "httpproxypassword"); publicpublickey = rep.getJobEntryAttributeBoolean(id_jobentry, "publicpublickey"); keyFilename = rep.getJobEntryAttributeString(id_jobentry, "keyfilename"); keyFilePass = rep.getJobEntryAttributeString(id_jobentry, "keyfilepass"); useBasicAuthentication = rep.getJobEntryAttributeBoolean(id_jobentry, "usebasicauthentication"); createRemoteFolder = rep.getJobEntryAttributeBoolean(id_jobentry, "createremotefolder"); afterFtpPut = rep.getJobEntryAttributeString(id_jobentry, "afterftpput"); destinationfolder = rep.getJobEntryAttributeString(id_jobentry, "destinationfolder"); createDestinationFolder = rep.getJobEntryAttributeBoolean(id_jobentry, "createdestinationfolder"); cachehostkey = rep.getJobEntryAttributeBoolean(id_jobentry, "cachehostkey"); timeout = (int) rep.getJobEntryAttributeInteger(id_jobentry, "timeout"); } catch (KettleException dbe) { throw new KettleException( BaseMessages.getString( PKG, "JobSSH2PUT.Log.UnableLoadRep", "" + id_jobentry, dbe.getMessage())); } }