@Test public void testInvalidTransitions() throws Exception { Feed mockEntity = new Feed(); mockEntity.setName("test"); storeEntity(EntityType.FEED, "test"); StateService.get().handleStateChange(mockEntity, EntityState.EVENT.SUBMIT, listener); // Attempt suspending a submitted entity try { StateService.get().handleStateChange(mockEntity, EntityState.EVENT.SUSPEND, listener); Assert.fail("Exception expected"); } catch (InvalidStateTransitionException e) { // Do nothing } StateService.get().handleStateChange(mockEntity, EntityState.EVENT.SCHEDULE, listener); // Attempt resuming a scheduled entity try { StateService.get().handleStateChange(mockEntity, EntityState.EVENT.RESUME, listener); Assert.fail("Exception expected"); } catch (InvalidStateTransitionException e) { // Do nothing } // Attempt scheduling a cluster Cluster mockCluster = new Cluster(); mockCluster.setName("test"); StateService.get().handleStateChange(mockCluster, EntityState.EVENT.SUBMIT, listener); try { StateService.get().handleStateChange(mockCluster, EntityState.EVENT.SCHEDULE, listener); Assert.fail("Exception expected"); } catch (FalconException e) { // Do nothing } }
@Test public void testGetEntityDefinition() throws Exception { ClientResponse response; Map<String, String> overlay = getUniqueOverlay(); response = submitToFalcon(CLUSTER_FILE_TEMPLATE, overlay, EntityType.CLUSTER); assertSuccessful(response); response = submitToFalcon(FEED_TEMPLATE1, overlay, EntityType.FEED); assertSuccessful(response); response = this.service .path("api/entities/definition/feed/" + overlay.get("inputFeedName")) .header("Remote-User", REMOTE_USER) .accept(MediaType.TEXT_XML) .get(ClientResponse.class); String feedXML = response.getEntity(String.class); try { Feed result = (Feed) unmarshaller.unmarshal(new StringReader(feedXML)); Assert.assertEquals(result.getName(), overlay.get("inputFeedName")); } catch (JAXBException e) { Assert.fail("Reponse " + feedXML + " is not valid", e); } }
protected void storeEntity(EntityType type, String name, String resource) throws Exception { Unmarshaller unmarshaller = type.getUnmarshaller(); ConfigurationStore store = ConfigurationStore.get(); store.remove(type, name); switch (type) { case CLUSTER: Cluster cluster = (Cluster) unmarshaller.unmarshal(this.getClass().getResource(resource)); cluster.setName(name); store.publish(type, cluster); break; case FEED: Feed feed = (Feed) unmarshaller.unmarshal(this.getClass().getResource(resource)); feed.setName(name); store.publish(type, feed); break; case PROCESS: Process process = (Process) unmarshaller.unmarshal(this.getClass().getResource(resource)); process.setName(name); store.publish(type, process); break; default: } }
@Test(dependsOnMethods = "testOnChange") public void testOnFeedEntityChange() throws Exception { Feed oldFeed = inputFeeds.get(0); Feed newFeed = EntityBuilderTestUtil.buildFeed( oldFeed.getName(), clusterEntity, "classified-as=Secured,source=data-warehouse", "reporting"); addStorage( newFeed, Storage.TYPE.FILESYSTEM, "jail://global:00/falcon/impression-feed/20140101"); try { configStore.initiateUpdate(newFeed); // add cluster org.apache.falcon.entity.v0.feed.Cluster feedCluster = new org.apache.falcon.entity.v0.feed.Cluster(); feedCluster.setName(anotherCluster.getName()); newFeed.getClusters().getClusters().add(feedCluster); configStore.update(EntityType.FEED, newFeed); } finally { configStore.cleanupUpdateInit(); } verifyUpdatedEdges(newFeed); Assert.assertEquals(getVerticesCount(service.getGraph()), 22); // +2 = 2 new tags Assert.assertEquals(getEdgesCount(service.getGraph()), 35); // +2 = 1 new cluster, 1 new tag }
public Set<FeedGroup> getGroups(org.apache.falcon.entity.v0.feed.Feed feed) throws FalconException { return getGroups( feed.getGroups(), feed.getFrequency(), FeedHelper.createStorage(feed).getUriTemplate(LocationType.DATA)); }
@Test(dependsOnMethods = "testOnAddClusterEntity") public void testOnAddFeedEntity() throws Exception { Feed impressionsFeed = addFeedEntity( "impression-feed", clusterEntity, "classified-as=Secure", "analytics", Storage.TYPE.FILESYSTEM, "/falcon/impression-feed/${YEAR}/${MONTH}/${DAY}"); inputFeeds.add(impressionsFeed); verifyEntityWasAddedToGraph(impressionsFeed.getName(), RelationshipType.FEED_ENTITY); verifyFeedEntityEdges(impressionsFeed.getName(), "Secure", "analytics"); Assert.assertEquals(getVerticesCount(service.getGraph()), 7); // +4 = feed, tag, group, user Assert.assertEquals(getEdgesCount(service.getGraph()), 6); // +4 = cluster, tag, group, user Feed clicksFeed = addFeedEntity( "clicks-feed", clusterEntity, "classified-as=Secure,classified-as=Financial", "analytics", Storage.TYPE.FILESYSTEM, "/falcon/clicks-feed/${YEAR}-${MONTH}-${DAY}"); inputFeeds.add(clicksFeed); verifyEntityWasAddedToGraph(clicksFeed.getName(), RelationshipType.FEED_ENTITY); Assert.assertEquals(getVerticesCount(service.getGraph()), 9); // feed and financial vertex Assert.assertEquals( getEdgesCount(service.getGraph()), 11); // +5 = cluster + user + 2Group + Tag Feed join1Feed = addFeedEntity( "imp-click-join1", clusterEntity, "classified-as=Financial", "reporting,bi", Storage.TYPE.FILESYSTEM, "/falcon/imp-click-join1/${YEAR}${MONTH}${DAY}"); outputFeeds.add(join1Feed); verifyEntityWasAddedToGraph(join1Feed.getName(), RelationshipType.FEED_ENTITY); Assert.assertEquals(getVerticesCount(service.getGraph()), 12); // + 3 = 1 feed and 2 groups Assert.assertEquals(getEdgesCount(service.getGraph()), 16); // +5 = cluster + user + // Group + 2Tags Feed join2Feed = addFeedEntity( "imp-click-join2", clusterEntity, "classified-as=Secure,classified-as=Financial", "reporting,bi", Storage.TYPE.FILESYSTEM, "/falcon/imp-click-join2/${YEAR}${MONTH}${DAY}"); outputFeeds.add(join2Feed); verifyEntityWasAddedToGraph(join2Feed.getName(), RelationshipType.FEED_ENTITY); Assert.assertEquals(getVerticesCount(service.getGraph()), 13); // +1 feed // +6 = user + 2tags + 2Groups + Cluster Assert.assertEquals(getEdgesCount(service.getGraph()), 22); }
@Override public void onAdd(Entity entity) throws FalconException { if (entity.getEntityType().equals(EntityType.FEED)) { Feed feed = (Feed) entity; if (feed.getGroups() == null || feed.getGroups().equals("")) { return; } Set<FeedGroup> groupSet = getGroups(feed); addGroups(feed.getName(), groupSet); } }
protected Entity storeEntity(EntityType type, String name, String resource, String writeEndpoint) throws Exception { Unmarshaller unmarshaller = type.getUnmarshaller(); ConfigurationStore store = ConfigurationStore.get(); switch (type) { case CLUSTER: Cluster cluster = (Cluster) unmarshaller.unmarshal(this.getClass().getResource(resource)); if (name != null) { store.remove(type, name); cluster.setName(name); } store.publish(type, cluster); if (writeEndpoint != null) { ClusterHelper.getInterface(cluster, Interfacetype.WRITE).setEndpoint(writeEndpoint); FileSystem fs = new Path(writeEndpoint).getFileSystem(EmbeddedCluster.newConfiguration()); fs.create( new Path( ClusterHelper.getLocation(cluster, ClusterLocationType.WORKING).getPath(), "libext/FEED/retention/ext.jar")) .close(); fs.create( new Path( ClusterHelper.getLocation(cluster, ClusterLocationType.WORKING).getPath(), "libext/FEED/replication/ext.jar")) .close(); } return cluster; case FEED: Feed feed = (Feed) unmarshaller.unmarshal(this.getClass().getResource(resource)); if (name != null) { store.remove(type, name); feed.setName(name); } store.publish(type, feed); return feed; case PROCESS: Process process = (Process) unmarshaller.unmarshal(this.getClass().getResource(resource)); if (name != null) { store.remove(type, name); process.setName(name); } store.publish(type, process); return process; default: } throw new IllegalArgumentException("Unhandled type: " + type); }
public void updateFeedEntity(Feed oldFeed, Feed newFeed) { LOG.info("Updating feed entity: {}", newFeed.getName()); Vertex feedEntityVertex = findVertex(oldFeed.getName(), RelationshipType.FEED_ENTITY); if (feedEntityVertex == null) { LOG.error("Illegal State: Feed entity vertex must exist for {}", oldFeed.getName()); throw new IllegalStateException(oldFeed.getName() + " entity vertex must exist."); } updateDataClassification(oldFeed.getTags(), newFeed.getTags(), feedEntityVertex); updateGroups(oldFeed.getGroups(), newFeed.getGroups(), feedEntityVertex); updateFeedClusters( oldFeed.getClusters().getClusters(), newFeed.getClusters().getClusters(), feedEntityVertex); }
/** * Tests should be enabled only in local environments as they need running instance of webserver */ @Test public void testUpdateCheckUser() throws Exception { Map<String, String> overlay = getUniqueOverlay(); String tmpFileName = overlayParametersOverTemplate(PROCESS_TEMPLATE, overlay); Process process = (Process) EntityType.PROCESS.getUnmarshaller().unmarshal(new File(tmpFileName)); Validity processValidity = process.getClusters().getClusters().get(0).getValidity(); processValidity.setEnd(new Date(new Date().getTime() + 2 * 24 * 60 * 60 * 1000)); File tmpFile = getTempFile(); EntityType.PROCESS.getMarshaller().marshal(process, tmpFile); scheduleProcess(tmpFile.getAbsolutePath(), overlay); waitForBundleStart(Status.RUNNING); List<BundleJob> bundles = getBundles(); Assert.assertEquals(bundles.size(), 1); Assert.assertEquals(bundles.get(0).getUser(), REMOTE_USER); ClientResponse response = this.service .path("api/entities/definition/feed/" + outputFeedName) .header("Remote-User", REMOTE_USER) .accept(MediaType.TEXT_XML) .get(ClientResponse.class); Feed feed = (Feed) EntityType.FEED .getUnmarshaller() .unmarshal(new StringReader(response.getEntity(String.class))); // change output feed path and update feed as another user feed.getLocations() .getLocations() .get(0) .setPath("/falcon/test/output2/${YEAR}/${MONTH}/${DAY}"); tmpFile = getTempFile(); EntityType.FEED.getMarshaller().marshal(feed, tmpFile); response = this.service .path("api/entities/update/feed/" + outputFeedName) .header("Remote-User", "testuser") .accept(MediaType.TEXT_XML) .post(ClientResponse.class, getServletInputStream(tmpFile.getAbsolutePath())); assertSuccessful(response); bundles = getBundles(); Assert.assertEquals(bundles.size(), 2); Assert.assertEquals(bundles.get(0).getUser(), REMOTE_USER); Assert.assertEquals(bundles.get(1).getUser(), REMOTE_USER); }
private static void addStorage(Feed feed, Storage.TYPE storageType, String uriTemplate) { if (storageType == Storage.TYPE.FILESYSTEM) { Locations locations = new Locations(); feed.setLocations(locations); Location location = new Location(); location.setType(LocationType.DATA); location.setPath(uriTemplate); feed.getLocations().getLocations().add(location); } else { CatalogTable table = new CatalogTable(); table.setUri(uriTemplate); feed.setTable(table); } }
@Override public void onRemove(Entity entity) throws FalconException { if (entity.getEntityType().equals(EntityType.FEED)) { Feed feed = (Feed) entity; if (StringUtils.isEmpty(feed.getGroups())) { return; } String[] groups = feed.getGroups().split(","); for (String group : groups) { groupsMapping.get(group).getFeeds().remove(entity.getName()); if (groupsMapping.get(group).getFeeds().size() == 0) { groupsMapping.remove(group); } } } }
private void addClusterAndFeedForReplication() throws Exception { // Add cluster clusterEntity = addClusterEntity(CLUSTER_ENTITY_NAME, COLO_NAME, "classification=production"); // Add backup cluster Cluster bcpCluster = addClusterEntity(BCP_CLUSTER_ENTITY_NAME, "east-coast", "classification=bcp"); Cluster[] clusters = {clusterEntity, bcpCluster}; // Add feed Feed rawFeed = addFeedEntity( REPLICATED_FEED, clusters, "classified-as=Secure", "analytics", Storage.TYPE.FILESYSTEM, "/falcon/raw-click/${YEAR}/${MONTH}/${DAY}"); // Add uri template for each cluster for (org.apache.falcon.entity.v0.feed.Cluster feedCluster : rawFeed.getClusters().getClusters()) { if (feedCluster.getName().equals(CLUSTER_ENTITY_NAME)) { addStorage( feedCluster, rawFeed, Storage.TYPE.FILESYSTEM, "/falcon/raw-click/primary/${YEAR}/${MONTH}/${DAY}"); } else { addStorage( feedCluster, rawFeed, Storage.TYPE.FILESYSTEM, "/falcon/raw-click/bcp/${YEAR}/${MONTH}/${DAY}"); } } // update config store try { configStore.initiateUpdate(rawFeed); configStore.update(EntityType.FEED, rawFeed); } finally { configStore.cleanupUpdateInit(); } inputFeeds.add(rawFeed); }
private Feed addFeedEntity( String feedName, Cluster[] clusters, String tags, String groups, Storage.TYPE storageType, String uriTemplate) throws Exception { Feed feed = EntityBuilderTestUtil.buildFeed(feedName, clusters, tags, groups); addStorage(feed, storageType, uriTemplate); for (org.apache.falcon.entity.v0.feed.Cluster feedCluster : feed.getClusters().getClusters()) { if (feedCluster.getName().equals(BCP_CLUSTER_ENTITY_NAME)) { feedCluster.setType(ClusterType.TARGET); } } configStore.publish(EntityType.FEED, feed); return feed; }
private void addFeedInstance( Vertex processInstance, RelationshipLabel edgeLabel, WorkflowExecutionContext context, String feedName, String feedInstanceDataPath) throws FalconException { String clusterName = context.getClusterName(); LOG.info( "Computing feed instance for : name=" + feedName + ", path= " + feedInstanceDataPath + ", in cluster: " + clusterName); String feedInstanceName = getFeedInstanceName( feedName, clusterName, feedInstanceDataPath, context.getNominalTimeAsISO8601()); LOG.info("Adding feed instance: " + feedInstanceName); Vertex feedInstance = addVertex( feedInstanceName, RelationshipType.FEED_INSTANCE, context.getTimeStampAsISO8601()); addProcessFeedEdge(processInstance, feedInstance, edgeLabel); addInstanceToEntity( feedInstance, feedName, RelationshipType.FEED_ENTITY, RelationshipLabel.INSTANCE_ENTITY_EDGE); addInstanceToEntity( feedInstance, clusterName, RelationshipType.CLUSTER_ENTITY, RelationshipLabel.FEED_CLUSTER_EDGE); addInstanceToEntity( feedInstance, context.getWorkflowUser(), RelationshipType.USER, RelationshipLabel.USER); if (isPreserveHistory()) { Feed feed = ConfigurationStore.get().get(EntityType.FEED, feedName); addDataClassification(feed.getTags(), feedInstance); addGroups(feed.getGroups(), feedInstance); } }
private static String getFileSystemFeedInstanceName( String feedInstancePath, Feed feed, Cluster cluster, String nominalTime) throws FalconException { Storage rawStorage = FeedHelper.createStorage(cluster, feed); String feedPathTemplate = rawStorage.getUriTemplate(LocationType.DATA); String instance = feedInstancePath; String[] elements = FeedDataPath.PATTERN.split(feedPathTemplate); for (String element : elements) { instance = instance.replaceFirst(element, ""); } return StringUtils.isEmpty(instance) ? feed.getName() + "/" + nominalTime : feed.getName() + "/" + SchemaHelper.formatDateUTCToISO8601(instance, FEED_INSTANCE_FORMAT); }
private static void addStorage( org.apache.falcon.entity.v0.feed.Cluster cluster, Feed feed, Storage.TYPE storageType, String uriTemplate) { if (storageType == Storage.TYPE.FILESYSTEM) { Locations locations = new Locations(); feed.setLocations(locations); Location location = new Location(); location.setType(LocationType.DATA); location.setPath(uriTemplate); cluster.setLocations(new Locations()); cluster.getLocations().getLocations().add(location); } else { CatalogTable table = new CatalogTable(); table.setUri(uriTemplate); cluster.setTable(table); } }
private void verifyUpdatedEdges(Feed newFeed) { Vertex feedVertex = getEntityVertex(newFeed.getName(), RelationshipType.FEED_ENTITY); // groups Edge edge = feedVertex.getEdges(Direction.OUT, RelationshipLabel.GROUPS.getName()).iterator().next(); Assert.assertEquals(edge.getVertex(Direction.IN).getProperty("name"), "reporting"); // tags edge = feedVertex.getEdges(Direction.OUT, "classified-as").iterator().next(); Assert.assertEquals(edge.getVertex(Direction.IN).getProperty("name"), "Secured"); edge = feedVertex.getEdges(Direction.OUT, "source").iterator().next(); Assert.assertEquals(edge.getVertex(Direction.IN).getProperty("name"), "data-warehouse"); // new cluster List<String> actual = new ArrayList<String>(); for (Edge clusterEdge : feedVertex.getEdges(Direction.OUT, RelationshipLabel.FEED_CLUSTER_EDGE.getName())) { actual.add(clusterEdge.getVertex(Direction.IN).<String>getProperty("name")); } Assert.assertTrue( actual.containsAll(Arrays.asList("primary-cluster", "another-cluster")), "Actual does not contain expected: " + actual); }
private Feed buildFeed(String name) { org.apache.falcon.entity.v0.feed.ACL acl = new org.apache.falcon.entity.v0.feed.ACL(); acl.setOwner("user"); acl.setGroup("hdfs"); acl.setPermission("*"); Feed feed = new Feed(); feed.setName(name); feed.setACL(acl); feed.setClusters(createBlankClusters()); Locations locations = new Locations(); feed.setLocations(locations); feed.getLocations() .getLocations() .add( createLocation( LocationType.DATA, "/falcon/test/input/${YEAR}/${MONTH}/${DAY}/${HOUR}")); return feed; }
public void addFeedEntity(Feed feed) { LOG.info("Adding feed entity: {}", feed.getName()); Vertex feedVertex = addVertex(feed.getName(), RelationshipType.FEED_ENTITY); addUserRelation(feedVertex); addDataClassification(feed.getTags(), feedVertex); addGroups(feed.getGroups(), feedVertex); for (org.apache.falcon.entity.v0.feed.Cluster feedCluster : feed.getClusters().getClusters()) { if (ClusterType.TARGET != feedCluster.getType()) { addRelationToCluster( feedVertex, feedCluster.getName(), RelationshipLabel.FEED_CLUSTER_EDGE); } } for (org.apache.falcon.entity.v0.feed.Cluster feedCluster : feed.getClusters().getClusters()) { if (FeedHelper.isImportEnabled(feedCluster)) { addRelationToDatasource( feedVertex, FeedHelper.getImportDatasourceName(feedCluster), RelationshipLabel.DATASOURCE_IMPORT_EDGE); } } }
private static String getTableFeedInstanceName( Feed feed, String feedInstancePath, Storage.TYPE storageType) throws URISyntaxException { CatalogStorage instanceStorage = (CatalogStorage) FeedHelper.createStorage(storageType.name(), feedInstancePath); return feed.getName() + "/" + instanceStorage.toPartitionAsPath(); }