@Test public void testInvalidTransitions() throws Exception { Feed mockEntity = new Feed(); mockEntity.setName("test"); storeEntity(EntityType.FEED, "test"); StateService.get().handleStateChange(mockEntity, EntityState.EVENT.SUBMIT, listener); // Attempt suspending a submitted entity try { StateService.get().handleStateChange(mockEntity, EntityState.EVENT.SUSPEND, listener); Assert.fail("Exception expected"); } catch (InvalidStateTransitionException e) { // Do nothing } StateService.get().handleStateChange(mockEntity, EntityState.EVENT.SCHEDULE, listener); // Attempt resuming a scheduled entity try { StateService.get().handleStateChange(mockEntity, EntityState.EVENT.RESUME, listener); Assert.fail("Exception expected"); } catch (InvalidStateTransitionException e) { // Do nothing } // Attempt scheduling a cluster Cluster mockCluster = new Cluster(); mockCluster.setName("test"); StateService.get().handleStateChange(mockCluster, EntityState.EVENT.SUBMIT, listener); try { StateService.get().handleStateChange(mockCluster, EntityState.EVENT.SCHEDULE, listener); Assert.fail("Exception expected"); } catch (FalconException e) { // Do nothing } }
protected void storeEntity(EntityType type, String name, String resource) throws Exception { Unmarshaller unmarshaller = type.getUnmarshaller(); ConfigurationStore store = ConfigurationStore.get(); store.remove(type, name); switch (type) { case CLUSTER: Cluster cluster = (Cluster) unmarshaller.unmarshal(this.getClass().getResource(resource)); cluster.setName(name); store.publish(type, cluster); break; case FEED: Feed feed = (Feed) unmarshaller.unmarshal(this.getClass().getResource(resource)); feed.setName(name); store.publish(type, feed); break; case PROCESS: Process process = (Process) unmarshaller.unmarshal(this.getClass().getResource(resource)); process.setName(name); store.publish(type, process); break; default: } }
protected Entity storeEntity(EntityType type, String name, String resource, String writeEndpoint) throws Exception { Unmarshaller unmarshaller = type.getUnmarshaller(); ConfigurationStore store = ConfigurationStore.get(); switch (type) { case CLUSTER: Cluster cluster = (Cluster) unmarshaller.unmarshal(this.getClass().getResource(resource)); if (name != null) { store.remove(type, name); cluster.setName(name); } store.publish(type, cluster); if (writeEndpoint != null) { ClusterHelper.getInterface(cluster, Interfacetype.WRITE).setEndpoint(writeEndpoint); FileSystem fs = new Path(writeEndpoint).getFileSystem(EmbeddedCluster.newConfiguration()); fs.create( new Path( ClusterHelper.getLocation(cluster, ClusterLocationType.WORKING).getPath(), "libext/FEED/retention/ext.jar")) .close(); fs.create( new Path( ClusterHelper.getLocation(cluster, ClusterLocationType.WORKING).getPath(), "libext/FEED/replication/ext.jar")) .close(); } return cluster; case FEED: Feed feed = (Feed) unmarshaller.unmarshal(this.getClass().getResource(resource)); if (name != null) { store.remove(type, name); feed.setName(name); } store.publish(type, feed); return feed; case PROCESS: Process process = (Process) unmarshaller.unmarshal(this.getClass().getResource(resource)); if (name != null) { store.remove(type, name); process.setName(name); } store.publish(type, process); return process; default: } throw new IllegalArgumentException("Unhandled type: " + type); }
private Feed buildFeed(String name) { org.apache.falcon.entity.v0.feed.ACL acl = new org.apache.falcon.entity.v0.feed.ACL(); acl.setOwner("user"); acl.setGroup("hdfs"); acl.setPermission("*"); Feed feed = new Feed(); feed.setName(name); feed.setACL(acl); feed.setClusters(createBlankClusters()); Locations locations = new Locations(); feed.setLocations(locations); feed.getLocations() .getLocations() .add( createLocation( LocationType.DATA, "/falcon/test/input/${YEAR}/${MONTH}/${DAY}/${HOUR}")); return feed; }