protected void fillKafkaToConfig(MJob job) { MConfigList toConfig = job.getToJobConfig(); toConfig.getStringInput("toJobConfig.topic").setValue(topic); List<String> topics = new ArrayList<String>(1); topics.add(topic); testUtil.initTopicList(topics); }
private void displayJob(MJob job) { DateFormat formatter = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT); printlnResource( Constants.RES_SHOW_PROMPT_JOB_INFO, job.getName(), job.getEnabled(), job.getCreationUser(), formatter.format(job.getCreationDate()), job.getLastUpdateUser(), formatter.format(job.getLastUpdateDate())); displayConfig(job.getDriverConfig().getConfigs(), client.getDriverConfigBundle()); printlnResource(Constants.RES_SHOW_PROMPT_JOB_FROM_LID_INFO, job.getFromLinkName()); displayConfig( job.getFromJobConfig().getConfigs(), client.getConnectorConfigBundle(job.getFromConnectorName())); printlnResource(Constants.RES_SHOW_PROMPT_JOB_TO_LID_INFO, job.getToLinkName()); displayConfig( job.getToJobConfig().getConfigs(), client.getConnectorConfigBundle(job.getToConnectorName())); }
/** * Fill TO config with specific storage and output type. * * @param job MJob object to fill * @param output Output type that should be set */ protected void fillHdfsToConfig(MJob job, ToFormat output) { MConfigList toConfig = job.getToJobConfig(); toConfig.getEnumInput("toJobConfig.outputFormat").setValue(output); toConfig.getStringInput("toJobConfig.outputDirectory").setValue(getMapreduceDirectory()); }
protected void fillRdbmsToConfig(MJob job) { MConfigList toConfig = job.getToJobConfig(); toConfig.getStringInput("toJobConfig.tableName").setValue(getTableName().getTableName()); }