public Vertex addProcessInstance(WorkflowExecutionContext context) throws FalconException {
    String processInstanceName = getProcessInstanceName(context);
    LOG.info("Adding process instance: " + processInstanceName);

    Vertex processInstance =
        addVertex(
            processInstanceName,
            RelationshipType.PROCESS_INSTANCE,
            context.getTimeStampAsISO8601());
    addWorkflowInstanceProperties(processInstance, context);

    addInstanceToEntity(
        processInstance,
        context.getEntityName(),
        RelationshipType.PROCESS_ENTITY,
        RelationshipLabel.INSTANCE_ENTITY_EDGE);
    addInstanceToEntity(
        processInstance,
        context.getClusterName(),
        RelationshipType.CLUSTER_ENTITY,
        RelationshipLabel.PROCESS_CLUSTER_EDGE);
    addInstanceToEntity(
        processInstance, context.getWorkflowUser(), RelationshipType.USER, RelationshipLabel.USER);

    if (isPreserveHistory()) {
      Process process = ConfigurationStore.get().get(EntityType.PROCESS, context.getEntityName());
      addDataClassification(process.getTags(), processInstance);
      addPipelines(process.getPipelines(), processInstance);
    }

    return processInstance;
  }
  @Test
  public void testProcessEndtimeUpdate() throws Exception {
    scheduleProcess();
    waitForBundleStart(Job.Status.RUNNING);

    ClientResponse response =
        this.service
            .path("api/entities/definition/process/" + processName)
            .header("Remote-User", REMOTE_USER)
            .accept(MediaType.TEXT_XML)
            .get(ClientResponse.class);
    Process process =
        (Process)
            EntityType.PROCESS
                .getUnmarshaller()
                .unmarshal(new StringReader(response.getEntity(String.class)));

    Validity processValidity = process.getClusters().getClusters().get(0).getValidity();
    processValidity.setEnd(new Date(new Date().getTime() + 60 * 60 * 1000));
    File tmpFile = getTempFile();
    EntityType.PROCESS.getMarshaller().marshal(process, tmpFile);
    response =
        this.service
            .path("api/entities/update/process/" + processName)
            .header("Remote-User", REMOTE_USER)
            .accept(MediaType.TEXT_XML)
            .post(ClientResponse.class, getServletInputStream(tmpFile.getAbsolutePath()));
    assertSuccessful(response);

    // Assert that update does not create new bundle
    List<BundleJob> bundles = getBundles();
    Assert.assertEquals(bundles.size(), 1);
  }
  protected void storeEntity(EntityType type, String name, String resource) throws Exception {
    Unmarshaller unmarshaller = type.getUnmarshaller();
    ConfigurationStore store = ConfigurationStore.get();
    store.remove(type, name);
    switch (type) {
      case CLUSTER:
        Cluster cluster = (Cluster) unmarshaller.unmarshal(this.getClass().getResource(resource));
        cluster.setName(name);
        store.publish(type, cluster);
        break;

      case FEED:
        Feed feed = (Feed) unmarshaller.unmarshal(this.getClass().getResource(resource));
        feed.setName(name);
        store.publish(type, feed);
        break;

      case PROCESS:
        Process process = (Process) unmarshaller.unmarshal(this.getClass().getResource(resource));
        process.setName(name);
        store.publish(type, process);
        break;

      default:
    }
  }
  private void verifyUpdatedEdges(Process newProcess) {
    Vertex processVertex = getEntityVertex(newProcess.getName(), RelationshipType.PROCESS_ENTITY);

    // cluster
    Edge edge =
        processVertex
            .getEdges(Direction.OUT, RelationshipLabel.PROCESS_CLUSTER_EDGE.getName())
            .iterator()
            .next();
    Assert.assertEquals(edge.getVertex(Direction.IN).getProperty("name"), anotherCluster.getName());

    // inputs
    edge =
        processVertex
            .getEdges(Direction.IN, RelationshipLabel.FEED_PROCESS_EDGE.getName())
            .iterator()
            .next();
    Assert.assertEquals(
        edge.getVertex(Direction.OUT).getProperty("name"),
        newProcess.getInputs().getInputs().get(0).getFeed());

    // outputs
    for (Edge e :
        processVertex.getEdges(Direction.OUT, RelationshipLabel.PROCESS_FEED_EDGE.getName())) {
      Assert.fail("there should not be any edges to output feeds" + e);
    }
  }
예제 #5
0
 @Test(dataProvider = "state_and_events")
 public void testIdempotency(EntityState.STATE state, EntityState.EVENT event) throws Exception {
   Process mockEntity = new Process();
   mockEntity.setName("test");
   storeEntity(EntityType.PROCESS, "test");
   EntityState entityState = new EntityState(mockEntity).setCurrentState(state);
   entityState.nextTransition(event);
   Assert.assertEquals(entityState.getCurrentState(), state);
 }
예제 #6
0
  protected Entity storeEntity(EntityType type, String name, String resource, String writeEndpoint)
      throws Exception {
    Unmarshaller unmarshaller = type.getUnmarshaller();
    ConfigurationStore store = ConfigurationStore.get();
    switch (type) {
      case CLUSTER:
        Cluster cluster = (Cluster) unmarshaller.unmarshal(this.getClass().getResource(resource));
        if (name != null) {
          store.remove(type, name);
          cluster.setName(name);
        }
        store.publish(type, cluster);

        if (writeEndpoint != null) {
          ClusterHelper.getInterface(cluster, Interfacetype.WRITE).setEndpoint(writeEndpoint);
          FileSystem fs = new Path(writeEndpoint).getFileSystem(EmbeddedCluster.newConfiguration());
          fs.create(
                  new Path(
                      ClusterHelper.getLocation(cluster, ClusterLocationType.WORKING).getPath(),
                      "libext/FEED/retention/ext.jar"))
              .close();
          fs.create(
                  new Path(
                      ClusterHelper.getLocation(cluster, ClusterLocationType.WORKING).getPath(),
                      "libext/FEED/replication/ext.jar"))
              .close();
        }

        return cluster;

      case FEED:
        Feed feed = (Feed) unmarshaller.unmarshal(this.getClass().getResource(resource));
        if (name != null) {
          store.remove(type, name);
          feed.setName(name);
        }
        store.publish(type, feed);
        return feed;

      case PROCESS:
        Process process = (Process) unmarshaller.unmarshal(this.getClass().getResource(resource));
        if (name != null) {
          store.remove(type, name);
          process.setName(name);
        }
        store.publish(type, process);
        return process;

      default:
    }

    throw new IllegalArgumentException("Unhandled type: " + type);
  }
 private static void bindTagsProperties(
     final org.apache.falcon.entity.v0.process.Process process,
     final Properties extensionProperties) {
   String falconSystemTags = process.getTags();
   String tags = extensionProperties.getProperty(ExtensionProperties.JOB_TAGS.getName());
   if (StringUtils.isNotEmpty(tags)) {
     if (StringUtils.isNotEmpty(falconSystemTags)) {
       tags += ", " + falconSystemTags;
     }
     process.setTags(tags);
   }
 }
  /**
   * Tests should be enabled only in local environments as they need running instance of webserver
   */
  @Test
  public void testUpdateCheckUser() throws Exception {
    Map<String, String> overlay = getUniqueOverlay();
    String tmpFileName = overlayParametersOverTemplate(PROCESS_TEMPLATE, overlay);
    Process process =
        (Process) EntityType.PROCESS.getUnmarshaller().unmarshal(new File(tmpFileName));
    Validity processValidity = process.getClusters().getClusters().get(0).getValidity();
    processValidity.setEnd(new Date(new Date().getTime() + 2 * 24 * 60 * 60 * 1000));
    File tmpFile = getTempFile();
    EntityType.PROCESS.getMarshaller().marshal(process, tmpFile);
    scheduleProcess(tmpFile.getAbsolutePath(), overlay);
    waitForBundleStart(Status.RUNNING);

    List<BundleJob> bundles = getBundles();
    Assert.assertEquals(bundles.size(), 1);
    Assert.assertEquals(bundles.get(0).getUser(), REMOTE_USER);

    ClientResponse response =
        this.service
            .path("api/entities/definition/feed/" + outputFeedName)
            .header("Remote-User", REMOTE_USER)
            .accept(MediaType.TEXT_XML)
            .get(ClientResponse.class);
    Feed feed =
        (Feed)
            EntityType.FEED
                .getUnmarshaller()
                .unmarshal(new StringReader(response.getEntity(String.class)));

    // change output feed path and update feed as another user
    feed.getLocations()
        .getLocations()
        .get(0)
        .setPath("/falcon/test/output2/${YEAR}/${MONTH}/${DAY}");
    tmpFile = getTempFile();
    EntityType.FEED.getMarshaller().marshal(feed, tmpFile);
    response =
        this.service
            .path("api/entities/update/feed/" + outputFeedName)
            .header("Remote-User", "testuser")
            .accept(MediaType.TEXT_XML)
            .post(ClientResponse.class, getServletInputStream(tmpFile.getAbsolutePath()));
    assertSuccessful(response);

    bundles = getBundles();
    Assert.assertEquals(bundles.size(), 2);
    Assert.assertEquals(bundles.get(0).getUser(), REMOTE_USER);
    Assert.assertEquals(bundles.get(1).getUser(), REMOTE_USER);
  }
  @Test
  public void testProcessInputUpdate() throws Exception {
    scheduleProcess();
    waitForBundleStart(Job.Status.RUNNING);

    ClientResponse response =
        this.service
            .path("api/entities/definition/process/" + processName)
            .header("Remote-User", REMOTE_USER)
            .accept(MediaType.TEXT_XML)
            .get(ClientResponse.class);
    Process process =
        (Process)
            EntityType.PROCESS
                .getUnmarshaller()
                .unmarshal(new StringReader(response.getEntity(String.class)));

    String feed3 = "f3" + System.currentTimeMillis();
    Map<String, String> overlay = new HashMap<String, String>();
    overlay.put("inputFeedName", feed3);
    overlay.put("cluster", clusterName);
    response = submitToFalcon(FEED_TEMPLATE1, overlay, EntityType.FEED);
    assertSuccessful(response);

    Input input = new Input();
    input.setFeed(feed3);
    input.setName("inputData2");
    input.setStart("today(20,0)");
    input.setEnd("today(20,20)");
    process.getInputs().getInputs().add(input);

    Validity processValidity = process.getClusters().getClusters().get(0).getValidity();
    processValidity.setEnd(new Date(new Date().getTime() + 2 * 24 * 60 * 60 * 1000));
    File tmpFile = getTempFile();
    EntityType.PROCESS.getMarshaller().marshal(process, tmpFile);
    response =
        this.service
            .path("api/entities/update/process/" + processName)
            .header("Remote-User", REMOTE_USER)
            .accept(MediaType.TEXT_XML)
            .post(ClientResponse.class, getServletInputStream(tmpFile.getAbsolutePath()));
    assertSuccessful(response);

    // Assert that update creates new bundle
    List<BundleJob> bundles = getBundles();
    Assert.assertEquals(bundles.size(), 2);
  }
  @Test(dependsOnMethods = "testOnFeedEntityChange")
  public void testOnProcessEntityChange() throws Exception {
    Process oldProcess = processEntity;
    Process newProcess =
        EntityBuilderTestUtil.buildProcess(oldProcess.getName(), anotherCluster, null, null);
    EntityBuilderTestUtil.addProcessWorkflow(newProcess, GENERATE_WORKFLOW_NAME, "2.0.0");
    EntityBuilderTestUtil.addInput(newProcess, inputFeeds.get(0));

    try {
      configStore.initiateUpdate(newProcess);
      configStore.update(EntityType.PROCESS, newProcess);
    } finally {
      configStore.cleanupUpdateInit();
    }

    verifyUpdatedEdges(newProcess);
    Assert.assertEquals(getVerticesCount(service.getGraph()), 22); // +0, no net new
    Assert.assertEquals(
        getEdgesCount(service.getGraph()), 29); // -6 = -2 outputs, -1 tag, -1 cluster, -2 pipelines
  }
  public static Entity createProcessFromTemplate(
      final String processTemplate,
      final String extensionName,
      final Properties extensionProperties,
      final String wfPath,
      final String wfLibPath)
      throws FalconException {
    if (StringUtils.isBlank(processTemplate)
        || StringUtils.isBlank(extensionName)
        || extensionProperties == null
        || StringUtils.isBlank(wfPath)) {
      throw new FalconException("Invalid arguments passed to extension builder");
    }
    org.apache.falcon.entity.v0.process.Process process =
        bindAttributesInTemplate(
            processTemplate, extensionProperties, extensionName, wfPath, wfLibPath);

    validateGeneratedProcess(process.toString());
    return process;
  }
  @Test
  public void testProcessDeleteAndSchedule() throws Exception {
    // Submit process with invalid property so that coord submit fails and bundle goes to failed
    // state
    Map<String, String> overlay = getUniqueOverlay();
    String tmpFileName = overlayParametersOverTemplate(PROCESS_TEMPLATE, overlay);
    Process process =
        (Process) EntityType.PROCESS.getUnmarshaller().unmarshal(new File(tmpFileName));
    Property prop = new Property();
    prop.setName("newProp");
    prop.setValue("${formatTim()}");
    process.getProperties().getProperties().add(prop);
    File tmpFile = getTempFile();
    EntityType.PROCESS.getMarshaller().marshal(process, tmpFile);
    scheduleProcess(tmpFile.getAbsolutePath(), overlay);
    waitForBundleStart(Status.FAILED);

    // Delete and re-submit the process with correct workflow
    ClientResponse clientRepsonse =
        this.service
            .path("api/entities/delete/process/" + processName)
            .header("Remote-User", REMOTE_USER)
            .accept(MediaType.TEXT_XML)
            .delete(ClientResponse.class);
    assertSuccessful(clientRepsonse);
    process.getWorkflow().setPath("/falcon/test/workflow");
    tmpFile = getTempFile();
    EntityType.PROCESS.getMarshaller().marshal(process, tmpFile);
    clientRepsonse =
        this.service
            .path("api/entities/submitAndSchedule/process")
            .header("Remote-User", REMOTE_USER)
            .accept(MediaType.TEXT_XML)
            .type(MediaType.TEXT_XML)
            .post(ClientResponse.class, getServletInputStream(tmpFile.getAbsolutePath()));
    assertSuccessful(clientRepsonse);

    // Assert that new schedule creates new bundle
    List<BundleJob> bundles = getBundles();
    Assert.assertEquals(bundles.size(), 2);
  }
  @Test(enabled = false)
  public void testOptionalInput() throws Exception {
    Map<String, String> overlay = getUniqueOverlay();
    String tmpFileName = overlayParametersOverTemplate(PROCESS_TEMPLATE, overlay);
    Process process =
        (Process) EntityType.PROCESS.getUnmarshaller().unmarshal(new File(tmpFileName));

    Input in1 = process.getInputs().getInputs().get(0);
    Input in2 = new Input();
    in2.setFeed(in1.getFeed());
    in2.setName("input2");
    in2.setOptional(true);
    in2.setPartition(in1.getPartition());
    in2.setStart("now(-1,0)");
    in2.setEnd("now(0,0)");
    process.getInputs().getInputs().add(in2);

    File tmpFile = getTempFile();
    EntityType.PROCESS.getMarshaller().marshal(process, tmpFile);
    scheduleProcess(tmpFile.getAbsolutePath(), overlay);
    waitForWorkflowStart(processName);
  }
예제 #14
0
 // Tests a schedulable entity's lifecycle : Submit -> run -> suspend -> resume
 @Test
 public void testLifeCycle() throws Exception {
   Process mockEntity = new Process();
   mockEntity.setName("test");
   storeEntity(EntityType.PROCESS, "test");
   StateService.get().handleStateChange(mockEntity, EntityState.EVENT.SUBMIT, listener);
   EntityState entityFromStore = AbstractStateStore.get().getAllEntities().iterator().next();
   Mockito.verify(listener).onSubmit(mockEntity);
   Assert.assertTrue(entityFromStore.getCurrentState().equals(EntityState.STATE.SUBMITTED));
   StateService.get().handleStateChange(mockEntity, EntityState.EVENT.SCHEDULE, listener);
   Mockito.verify(listener).onSchedule(mockEntity);
   entityFromStore = AbstractStateStore.get().getAllEntities().iterator().next();
   Assert.assertTrue(entityFromStore.getCurrentState().equals(EntityState.STATE.SCHEDULED));
   StateService.get().handleStateChange(mockEntity, EntityState.EVENT.SUSPEND, listener);
   Mockito.verify(listener).onSuspend(mockEntity);
   entityFromStore = AbstractStateStore.get().getAllEntities().iterator().next();
   Assert.assertTrue(entityFromStore.getCurrentState().equals(EntityState.STATE.SUSPENDED));
   StateService.get().handleStateChange(mockEntity, EntityState.EVENT.RESUME, listener);
   Mockito.verify(listener).onResume(mockEntity);
   entityFromStore = AbstractStateStore.get().getAllEntities().iterator().next();
   Assert.assertTrue(entityFromStore.getCurrentState().equals(EntityState.STATE.SCHEDULED));
 }
  @Test(dependsOnMethods = "testOnAddFeedEntity")
  public void testOnAddProcessEntity() throws Exception {
    processEntity =
        addProcessEntity(
            PROCESS_ENTITY_NAME,
            clusterEntity,
            "classified-as=Critical",
            "testPipeline,dataReplication_Pipeline",
            GENERATE_WORKFLOW_NAME,
            WORKFLOW_VERSION);

    verifyEntityWasAddedToGraph(processEntity.getName(), RelationshipType.PROCESS_ENTITY);
    verifyProcessEntityEdges();

    // +4 = 1 process + 1 tag + 2 pipeline
    Assert.assertEquals(getVerticesCount(service.getGraph()), 17);
    // +9 = user,tag,cluster, 2 inputs,2 outputs, 2 pipelines
    Assert.assertEquals(getEdgesCount(service.getGraph()), 31);
  }
  public void addProcessEntity(Process process) {
    String processName = process.getName();
    LOG.info("Adding process entity: {}", processName);
    Vertex processVertex = addVertex(processName, RelationshipType.PROCESS_ENTITY);
    addWorkflowProperties(process.getWorkflow(), processVertex, processName);

    addUserRelation(processVertex);
    addDataClassification(process.getTags(), processVertex);
    addPipelines(process.getPipelines(), processVertex);

    for (org.apache.falcon.entity.v0.process.Cluster cluster :
        process.getClusters().getClusters()) {
      addRelationToCluster(
          processVertex, cluster.getName(), RelationshipLabel.PROCESS_CLUSTER_EDGE);
    }

    addInputFeeds(process.getInputs(), processVertex);
    addOutputFeeds(process.getOutputs(), processVertex);
  }
예제 #17
0
  private Entity buildProcess(String name, String username, String tags, String pipelines) {
    ACL acl = new ACL();
    acl.setOwner(username);
    acl.setGroup("hdfs");
    acl.setPermission("*");

    Process process = new Process();
    process.setName(name);
    process.setACL(acl);
    if (!StringUtils.isEmpty(pipelines)) {
      process.setPipelines(pipelines);
    }
    if (!StringUtils.isEmpty(tags)) {
      process.setTags(tags);
    }
    process.setClusters(buildClusters("cluster" + name));
    return process;
  }
  private static org.apache.falcon.entity.v0.process.Process bindAttributesInTemplate(
      final String processTemplate,
      final Properties extensionProperties,
      final String extensionName,
      final String wfPath,
      final String wfLibPath)
      throws FalconException {
    if (StringUtils.isBlank(processTemplate) || extensionProperties == null) {
      throw new FalconException("Process template or properties cannot be null");
    }

    org.apache.falcon.entity.v0.process.Process process;
    try {
      Unmarshaller unmarshaller = EntityType.PROCESS.getUnmarshaller();
      // Validation can be skipped for unmarshalling as we want to bind template with the
      // properties.
      // Vaildation is handled as part of marshalling
      unmarshaller.setSchema(null);
      unmarshaller.setEventHandler(
          new ValidationEventHandler() {
            public boolean handleEvent(ValidationEvent validationEvent) {
              return true;
            }
          });
      process =
          (org.apache.falcon.entity.v0.process.Process)
              unmarshaller.unmarshal(new StringReader(processTemplate));
    } catch (Exception e) {
      throw new FalconException(e);
    }

    /* For optional properties user might directly set them in the process xml and might not set it in properties
       file. Before doing the submission validation is done to confirm process xml doesn't have
       EXTENSION_VAR_PATTERN
    */

    String processName = extensionProperties.getProperty(ExtensionProperties.JOB_NAME.getName());
    if (StringUtils.isNotEmpty(processName)) {
      process.setName(processName);
    }

    // DR process template has only one cluster
    bindClusterProperties(process.getClusters().getClusters().get(0), extensionProperties);

    // bind scheduling properties
    String processFrequency =
        extensionProperties.getProperty(ExtensionProperties.FREQUENCY.getName());
    if (StringUtils.isNotEmpty(processFrequency)) {
      process.setFrequency(Frequency.fromString(processFrequency));
    }

    String zone = extensionProperties.getProperty(ExtensionProperties.TIMEZONE.getName());
    if (StringUtils.isNotBlank(zone)) {
      process.setTimezone(TimeZone.getTimeZone(zone));
    } else {
      process.setTimezone(TimeZone.getTimeZone("UTC"));
    }

    bindWorkflowProperties(process.getWorkflow(), extensionName, wfPath, wfLibPath);
    bindRetryProperties(process.getRetry(), extensionProperties);
    bindNotificationProperties(process.getNotification(), extensionProperties);
    bindACLProperties(process.getACL(), extensionProperties);
    bindTagsProperties(process, extensionProperties);
    bindCustomProperties(process.getProperties(), extensionProperties);

    return process;
  }
  public void updateProcessEntity(Process oldProcess, Process newProcess) {
    LOG.info("Updating process entity: {}", newProcess.getName());
    Vertex processEntityVertex = findVertex(oldProcess.getName(), RelationshipType.PROCESS_ENTITY);
    if (processEntityVertex == null) {
      LOG.error("Illegal State: Process entity vertex must exist for {}", oldProcess.getName());
      throw new IllegalStateException(oldProcess.getName() + " entity vertex must exist");
    }

    updateWorkflowProperties(
        oldProcess.getWorkflow(),
        newProcess.getWorkflow(),
        processEntityVertex,
        newProcess.getName());
    updateDataClassification(oldProcess.getTags(), newProcess.getTags(), processEntityVertex);
    updatePipelines(oldProcess.getPipelines(), newProcess.getPipelines(), processEntityVertex);
    updateProcessClusters(
        oldProcess.getClusters().getClusters(),
        newProcess.getClusters().getClusters(),
        processEntityVertex);
    updateProcessInputs(oldProcess.getInputs(), newProcess.getInputs(), processEntityVertex);
    updateProcessOutputs(oldProcess.getOutputs(), newProcess.getOutputs(), processEntityVertex);
  }