/** * Creates a cache builder for the specified cache (of candidate). * * @param subcommand subcommand name * @param bean current importer script * @param location cache location * @param info cache information * @return the future object of the execution, or {@code null} if nothing to do * @throws IOException if failed to start execution */ protected Callable<?> createCacheBuilder( final String subcommand, ImportBean bean, final URI location, final CacheInfo info) throws IOException { assert subcommand != null; assert bean != null; assert location != null; assert info != null; List<String> command = new ArrayList<>(); command.add(cacheBuildCommand); command.add(subcommand); command.add(bean.getBatchId()); command.add(bean.getJobflowId()); command.add(bean.getExecutionId()); command.add(location.toString()); command.add(info.getModelClassName()); command.add(info.getTableName()); LOG.info( "TG-EXTRACTOR-12001", subcommand, info.getId(), info.getTableName(), bean.getTargetName(), bean.getBatchId(), bean.getJobflowId(), bean.getExecutionId(), command); final ProcessBuilder builder = new ProcessBuilder(command); builder.directory(new File(System.getProperty("user.home", "."))); return new Callable<Void>() { @Override public Void call() throws Exception { LOG.info("TG-EXTRACTOR-12003", subcommand, info.getId(), info.getTableName()); Process process = builder.redirectOutput(Redirect.INHERIT).redirectError(Redirect.INHERIT).start(); try { int exitCode = process.waitFor(); if (exitCode != 0) { throw new IOException( MessageFormat.format("Cache builder returns unexpected exit code: {0}", exitCode)); } LOG.info("TG-EXTRACTOR-12004", subcommand, info.getId(), info.getTableName()); } catch (Exception e) { throw new BulkLoaderSystemException( e, DfsFileImport.class, "TG-EXTRACTOR-12005", subcommand, info.getId(), info.getTableName()); } finally { process.destroy(); } return null; } }; }
/** * Updates a cache. * @throws Exception if failed */ @Test public void update_cache_rebuild() throws Exception { ImportBean bean = createBean(); Map<String, ImportTargetTableBean> targetTable = new HashMap<String, ImportTargetTableBean>(); final ImportTargetTableBean tb1 = new ImportTargetTableBean(); tb1.setCacheId("tb1"); tb1.setDfsFilePath("tb1"); tb1.setImportTargetType(ImportTarget1.class); tb1.setImportTargetColumns(Arrays.asList("A")); tb1.setSearchCondition(""); targetTable.put("__TG_TEST1", tb1); Connection conn = DBConnection.getConnection(); final Calendar last = offset(-1); try { LocalCacheInfoRepository repo = new LocalCacheInfoRepository(conn); repo.putCacheInfo(new LocalCacheInfo( tb1.getCacheId(), null, last, "__TG_TEST1", tb1.getDfsFilePath())); } finally { conn.close(); } bean.setTargetTable(targetTable); ImportProtocolDecide service = new ImportProtocolDecide() { @Override protected Map<String, CacheInfo> collectRemoteCacheInfo(ImportBean _) throws BulkLoaderSystemException { return Collections.singletonMap("tb1", new CacheInfo( CacheInfo.FEATURE_VERSION, tb1.getCacheId(), last, "__TG_TEST1", tb1.getImportTargetColumns(), tb1.getImportTargetType().getName(), new ImportTarget1().__tgc__DataModelVersion())); } }; service.execute(bean); assertThat(tb1.getImportProtocol().getKind(), is(FileProtocol.Kind.UPDATE_CACHE)); assertThat(tb1.getImportProtocol().getLocation(), is(tb1.getDfsFilePath())); assertThat(tb1.getStartTimestamp(), is(notNullValue())); CacheInfo info = tb1.getImportProtocol().getInfo(); assertThat(info, is(notNullValue())); assertThat(info.getId(), is("tb1")); assertThat(info.getFeatureVersion(), is(CacheInfo.FEATURE_VERSION)); assertThat(info.getTimestamp(), is(not(nullValue()))); assertThat(info.getTableName(), is("__TG_TEST1")); assertThat(info.getColumnNames(), is((Object) new HashSet<String>(tb1.getImportTargetColumns()))); assertThat(info.getModelClassName(), is(ImportTarget1.class.getName())); assertThat(info.getModelClassVersion(), is(new ImportTarget1().__tgc__DataModelVersion())); }
private Callable<?> createCacheBuilder( FileProtocol protocol, ImportBean bean, String user, long recordCount) throws BulkLoaderSystemException { assert protocol != null; assert bean != null; assert user != null; CacheInfo info = protocol.getInfo(); URI location = resolveLocation(bean, user, protocol.getLocation()); assert info != null; try { switch (protocol.getKind()) { case CREATE_CACHE: return createCacheBuilder(CacheBuildClient.SUBCOMMAND_CREATE, bean, location, info); case UPDATE_CACHE: if (recordCount > 0) { return createCacheBuilder(CacheBuildClient.SUBCOMMAND_UPDATE, bean, location, info); } else { return null; } default: throw new AssertionError(protocol); } } catch (IOException e) { throw new BulkLoaderSystemException( e, getClass(), "TG-EXTRACTOR-12002", protocol.getKind(), info.getId(), info.getTableName(), bean.getTargetName(), bean.getBatchId(), bean.getJobflowId(), bean.getExecutionId()); } }
/** * Creates a new cache. * @throws Exception if failed */ @Test public void create_cache() throws Exception { ImportBean bean = createBean(); Map<String, ImportTargetTableBean> targetTable = new HashMap<String, ImportTargetTableBean>(); ImportTargetTableBean tb1 = new ImportTargetTableBean(); tb1.setCacheId("tb1"); tb1.setDfsFilePath("tb1"); tb1.setImportTargetType(ImportTarget1.class); tb1.setImportTargetColumns(Arrays.asList("A")); tb1.setSearchCondition(""); targetTable.put("__TG_TEST1", tb1); bean.setTargetTable(targetTable); ImportProtocolDecide service = new ImportProtocolDecide() { @Override protected Map<String, CacheInfo> collectRemoteCacheInfo(ImportBean _) throws BulkLoaderSystemException { return Collections.emptyMap(); } }; service.execute(bean); assertThat(tb1.getImportProtocol().getKind(), is(FileProtocol.Kind.CREATE_CACHE)); assertThat(tb1.getImportProtocol().getLocation(), is(tb1.getDfsFilePath())); assertThat(tb1.getStartTimestamp(), is(nullValue())); CacheInfo info = tb1.getImportProtocol().getInfo(); assertThat(info, is(notNullValue())); assertThat(info.getId(), is("tb1")); assertThat(info.getFeatureVersion(), is(CacheInfo.FEATURE_VERSION)); assertThat(info.getTimestamp(), is(not(nullValue()))); assertThat(info.getTableName(), is("__TG_TEST1")); assertThat(info.getColumnNames(), is((Object) new HashSet<String>(tb1.getImportTargetColumns()))); assertThat(info.getModelClassName(), is(ImportTarget1.class.getName())); assertThat(info.getModelClassVersion(), is(new ImportTarget1().__tgc__DataModelVersion())); }
private long putCachePatch( FileProtocol protocol, InputStream content, ImportBean bean, String user) throws BulkLoaderSystemException { assert protocol != null; assert content != null; assert bean != null; assert user != null; assert protocol.getKind() == FileProtocol.Kind.CREATE_CACHE || protocol.getKind() == FileProtocol.Kind.UPDATE_CACHE; CacheInfo info = protocol.getInfo(); assert info != null; ImportTargetTableBean targetTableBean = bean.getTargetTable(info.getTableName()); if (targetTableBean == null) { // 対応するテーブルの定義がDSL存在しない場合異常終了する。 throw new BulkLoaderSystemException( getClass(), "TG-EXTRACTOR-02001", MessageFormat.format("エントリに対応するテーブルの定義がDSL存在しない。テーブル名:{0}", info.getTableName())); } URI dfsFilePath = resolveLocation(bean, user, protocol.getLocation()); try (CacheStorage storage = new CacheStorage(new Configuration(), dfsFilePath)) { LOG.info( "TG-EXTRACTOR-11001", info.getId(), info.getTableName(), storage.getPatchProperties()); storage.putPatchCacheInfo(info); LOG.info( "TG-EXTRACTOR-11002", info.getId(), info.getTableName(), storage.getPatchProperties()); Class<?> targetTableModel = targetTableBean.getImportTargetType(); Path targetUri = storage.getPatchContents("0"); LOG.info("TG-EXTRACTOR-11003", info.getId(), info.getTableName(), targetUri); long recordCount = write(targetTableModel, targetUri.toUri(), content); LOG.info("TG-EXTRACTOR-11004", info.getId(), info.getTableName(), targetUri, recordCount); LOG.info( "TG-PROFILE-01002", bean.getTargetName(), bean.getBatchId(), bean.getJobflowId(), bean.getExecutionId(), info.getTableName(), recordCount); return recordCount; } catch (IOException e) { throw new BulkLoaderSystemException( e, getClass(), "TG-EXTRACTOR-11005", info.getId(), info.getTableName(), dfsFilePath); } }