@Before public void setUp() throws InterruptedException { jdbcDataFetcher = new JdbcDataFetcher( derbyConnectorRule.getMetadataConnectorConfig(), "tableName", "keyColumn", "valueColumn", 100); handle = derbyConnectorRule.getConnector().getDBI().open(); Assert.assertEquals( 0, handle .createStatement( String.format( "CREATE TABLE %s (%s VARCHAR(64), %s VARCHAR(64))", tableName, keyColumn, valueColumn)) .setQueryTimeout(1) .execute()); handle .createStatement(String.format("TRUNCATE TABLE %s", tableName)) .setQueryTimeout(1) .execute(); for (Map.Entry<String, String> entry : lookupMap.entrySet()) { insertValues(entry.getKey(), entry.getValue(), handle); } handle.commit(); }
private void insertValues(final String key, final String val, Handle handle) { final String query; handle .createStatement(String.format("DELETE FROM %s WHERE %s='%s'", tableName, keyColumn, key)) .setQueryTimeout(1) .execute(); query = String.format( "INSERT INTO %s (%s, %s) VALUES ('%s', '%s')", tableName, keyColumn, valueColumn, key, val); Assert.assertEquals(1, handle.createStatement(query).setQueryTimeout(1).execute()); handle.commit(); }
@Override public <T extends Schedule> void updateSchedules( int projId, List<T> schedules, ScheduleUpdateAction<T> func) throws ResourceConflictException { Map<String, Integer> oldScheduleNames = idNameListToHashMap(dao.getScheduleNames(projId)); // Concurrent call of updateSchedules doesn't happen because having // ProjectControlStore means that the project is locked. // // However, ScheduleExecutor modifies schedules without locking the // project. Instead, ScheduleExecutor locks schedules. To avoid // concurrent update of schedules, here needs to lock schedules // before UPDATE. for (T schedule : schedules) { Integer matchedSchedId = oldScheduleNames.get(schedule.getWorkflowName()); if (matchedSchedId != null) { // found the same name. lock it and update ScheduleStatus status = dao.lockScheduleById(matchedSchedId); if (status != null) { ScheduleTime newSchedule = func.apply(status, schedule); dao.updateScheduleById( matchedSchedId, schedule.getWorkflowDefinitionId(), newSchedule.getRunTime().getEpochSecond(), newSchedule.getTime().getEpochSecond()); oldScheduleNames.remove(schedule.getWorkflowName()); } } else { // not found this name. inserting a new entry. catchConflict( () -> dao.insertSchedule( projId, schedule.getWorkflowDefinitionId(), schedule.getNextRunTime().getEpochSecond(), schedule.getNextScheduleTime().getEpochSecond()), "workflow_definition_id=%d", schedule.getWorkflowDefinitionId()); } } // delete unused schedules if (!oldScheduleNames.isEmpty()) { // those names don exist any more. handle .createStatement( "delete from schedules" + " where id " + inLargeIdListExpression(oldScheduleNames.values())) .execute(); } }
@After public void tearDown() { handle.createStatement("DROP TABLE " + tableName).setQueryTimeout(1).execute(); handle.close(); }