Example #1
0
  @AfterClass
  public static void teardown() throws Exception {
    // Unset FLINK_CONF_DIR, as it might change the behavior of other tests
    Map<String, String> map = new HashMap<>(System.getenv());
    map.remove(CliFrontend.ENV_CONFIG_DIRECTORY);
    TestBaseUtils.setEnv(map);

    // When we are on travis, we copy the tmp files of JUnit (containing the MiniYARNCluster log
    // files)
    // to <flinkRoot>/target/flink-yarn-tests-*.
    // The files from there are picked up by the ./tools/travis_watchdog.sh script
    // to upload them to Amazon S3.
    if (isOnTravis()) {
      File target = new File("../target" + yarnConfiguration.get(TEST_CLUSTER_NAME_KEY));
      if (!target.mkdirs()) {
        LOG.warn("Error creating dirs to {}", target);
      }
      File src = tmp.getRoot();
      LOG.info(
          "copying the final files from {} to {}", src.getAbsolutePath(), target.getAbsolutePath());
      try {
        FileUtils.copyDirectoryToDirectory(src, target);
      } catch (IOException e) {
        LOG.warn(
            "Error copying the final files from {} to {}: msg: {}",
            src.getAbsolutePath(),
            target.getAbsolutePath(),
            e.getMessage(),
            e);
      }
    }
  }
  @Test
  public void testDynamicProperties() throws IOException {

    Map<String, String> map = new HashMap<String, String>(System.getenv());
    File tmpFolder = tmp.newFolder();
    File fakeConf = new File(tmpFolder, "flink-conf.yaml");
    fakeConf.createNewFile();
    map.put(ConfigConstants.ENV_FLINK_CONF_DIR, tmpFolder.getAbsolutePath());
    TestBaseUtils.setEnv(map);
    FlinkYarnSessionCli cli = new FlinkYarnSessionCli("", "", false);
    Options options = new Options();
    cli.addGeneralOptions(options);
    cli.addRunOptions(options);

    CommandLineParser parser = new DefaultParser();
    CommandLine cmd = null;
    try {
      cmd =
          parser.parse(
              options,
              new String[] {"run", "-j", "fake.jar", "-n", "15", "-D", "akka.ask.timeout=5 min"});
    } catch (Exception e) {
      e.printStackTrace();
      Assert.fail("Parsing failed with " + e.getMessage());
    }

    AbstractYarnClusterDescriptor flinkYarnDescriptor = cli.createDescriptor(null, cmd);

    Assert.assertNotNull(flinkYarnDescriptor);

    Map<String, String> dynProperties =
        FlinkYarnSessionCli.getDynamicProperties(flinkYarnDescriptor.getDynamicPropertiesEncoded());
    Assert.assertEquals(1, dynProperties.size());
    Assert.assertEquals("5 min", dynProperties.get("akka.ask.timeout"));
  }
Example #3
0
 @Test
 public void getFrontPage() {
   try {
     String fromHTTP = TestBaseUtils.getFromHTTP("http://localhost:" + port + "/index.html");
     String text = "Apache Flink Dashboard";
     Assert.assertTrue("Startpage should contain " + text, fromHTTP.contains(text));
   } catch (Exception e) {
     e.printStackTrace();
     Assert.fail(e.getMessage());
   }
 }
  @Test
  public void testWithSimpleGraph() throws Exception {
    Graph<IntValue, NullValue, NullValue> graph =
        undirectedSimpleGraph.run(new MaximumDegree<IntValue, NullValue, NullValue>(3));

    String expectedVerticesResult =
        "(0,(null))\n" + "(1,(null))\n" + "(2,(null))\n" + "(4,(null))\n" + "(5,(null))";

    TestBaseUtils.compareResultAsText(graph.getVertices().collect(), expectedVerticesResult);

    String expectedEdgesResult =
        "(0,1,(null))\n"
            + "(0,2,(null))\n"
            + "(1,0,(null))\n"
            + "(1,2,(null))\n"
            + "(2,0,(null))\n"
            + "(2,1,(null))";

    TestBaseUtils.compareResultAsText(graph.getEdges().collect(), expectedEdgesResult);
  }
Example #5
0
 @Test
 public void getNumberOfTaskManagers() {
   try {
     String json = TestBaseUtils.getFromHTTP("http://localhost:" + port + "/taskmanagers/");
     JSONObject response = new JSONObject(json);
     JSONArray taskManagers = response.getJSONArray("taskmanagers");
     Assert.assertNotNull(taskManagers);
     Assert.assertEquals(cluster.numTaskManagers(), taskManagers.length());
   } catch (Throwable e) {
     e.printStackTrace();
     Assert.fail(e.getMessage());
   }
 }
Example #6
0
  private static void start(Configuration conf, String principal, String keytab) {
    // set the home directory to a temp directory. Flink on YARN is using the home dir to distribute
    // the file
    File homeDir = null;
    try {
      homeDir = tmp.newFolder();
    } catch (IOException e) {
      e.printStackTrace();
      Assert.fail(e.getMessage());
    }
    System.setProperty("user.home", homeDir.getAbsolutePath());
    String uberjarStartLoc = "..";
    LOG.info("Trying to locate uberjar in {}", new File(uberjarStartLoc));
    flinkUberjar = findFile(uberjarStartLoc, new RootDirFilenameFilter());
    Assert.assertNotNull("Flink uberjar not found", flinkUberjar);
    String flinkDistRootDir = flinkUberjar.getParentFile().getParent();
    flinkLibFolder = flinkUberjar.getParentFile(); // the uberjar is located in lib/
    Assert.assertNotNull("Flink flinkLibFolder not found", flinkLibFolder);
    Assert.assertTrue("lib folder not found", flinkLibFolder.exists());
    Assert.assertTrue("lib folder not found", flinkLibFolder.isDirectory());

    if (!flinkUberjar.exists()) {
      Assert.fail("Unable to locate yarn-uberjar.jar");
    }

    try {
      LOG.info("Starting up MiniYARNCluster");
      if (yarnCluster == null) {
        yarnCluster =
            new MiniYARNCluster(
                conf.get(YarnTestBase.TEST_CLUSTER_NAME_KEY), NUM_NODEMANAGERS, 1, 1);

        yarnCluster.init(conf);
        yarnCluster.start();
      }

      Map<String, String> map = new HashMap<String, String>(System.getenv());

      File flinkConfDirPath =
          findFile(flinkDistRootDir, new ContainsName(new String[] {"flink-conf.yaml"}));
      Assert.assertNotNull(flinkConfDirPath);

      if (!StringUtils.isBlank(principal) && !StringUtils.isBlank(keytab)) {
        // copy conf dir to test temporary workspace location
        tempConfPathForSecureRun = tmp.newFolder("conf");

        String confDirPath = flinkConfDirPath.getParentFile().getAbsolutePath();
        FileUtils.copyDirectory(new File(confDirPath), tempConfPathForSecureRun);

        try (FileWriter fw =
                new FileWriter(new File(tempConfPathForSecureRun, "flink-conf.yaml"), true);
            BufferedWriter bw = new BufferedWriter(fw);
            PrintWriter out = new PrintWriter(bw)) {
          LOG.info(
              "writing keytab: " + keytab + " and principal: " + principal + " to config file");
          out.println("");
          out.println("#Security Configurations Auto Populated ");
          out.println(ConfigConstants.SECURITY_KEYTAB_KEY + ": " + keytab);
          out.println(ConfigConstants.SECURITY_PRINCIPAL_KEY + ": " + principal);
          out.println("");
        } catch (IOException e) {
          throw new RuntimeException(
              "Exception occured while trying to append the security configurations.", e);
        }

        String configDir = tempConfPathForSecureRun.getAbsolutePath();

        LOG.info(
            "Temporary Flink configuration directory to be used for secure test: {}", configDir);

        Assert.assertNotNull(configDir);

        map.put(ConfigConstants.ENV_FLINK_CONF_DIR, configDir);

      } else {
        map.put(ConfigConstants.ENV_FLINK_CONF_DIR, flinkConfDirPath.getParent());
      }

      File yarnConfFile = writeYarnSiteConfigXML(conf);
      map.put("YARN_CONF_DIR", yarnConfFile.getParentFile().getAbsolutePath());
      map.put("IN_TESTS", "yes we are in tests"); // see YarnClusterDescriptor() for more infos
      TestBaseUtils.setEnv(map);

      Assert.assertTrue(yarnCluster.getServiceState() == Service.STATE.STARTED);

      // wait for the nodeManagers to connect
      while (!yarnCluster.waitForNodeManagersToConnect(500)) {
        LOG.info("Waiting for Nodemanagers to connect");
      }
    } catch (Exception ex) {
      ex.printStackTrace();
      LOG.error("setup failure", ex);
      Assert.fail();
    }
  }
  /** Test TaskManager failure */
  @Test(timeout = 100000) // timeout after 100 seconds
  public void testTaskManagerFailure() {
    LOG.info("Starting testTaskManagerFailure()");
    Runner runner =
        startWithArgs(
            new String[] {
              "-j",
              flinkUberjar.getAbsolutePath(),
              "-t",
              flinkLibFolder.getAbsolutePath(),
              "-n",
              "1",
              "-jm",
              "768",
              "-tm",
              "1024",
              "-nm",
              "customName",
              "-Dfancy-configuration-value=veryFancy",
              "-Dyarn.maximum-failed-containers=3"
            },
            "Number of connected TaskManagers changed to 1. Slots available: 1",
            RunTypes.YARN_SESSION);

    Assert.assertEquals(2, getRunningContainers());

    // ------------------------ Test if JobManager web interface is accessible -------
    try {
      YarnClient yc = YarnClient.createYarnClient();
      yc.init(yarnConfiguration);
      yc.start();
      List<ApplicationReport> apps = yc.getApplications(EnumSet.of(YarnApplicationState.RUNNING));
      Assert.assertEquals(1, apps.size()); // Only one running
      ApplicationReport app = apps.get(0);
      Assert.assertEquals("customName", app.getName());
      String url = app.getTrackingUrl();
      if (!url.endsWith("/")) {
        url += "/";
      }
      if (!url.startsWith("http://")) {
        url = "http://" + url;
      }
      LOG.info("Got application URL from YARN {}", url);

      String response = TestBaseUtils.getFromHTTP(url + "taskmanagers/");
      JSONObject parsedTMs = new JSONObject(response);
      JSONArray taskManagers = parsedTMs.getJSONArray("taskmanagers");
      Assert.assertNotNull(taskManagers);
      Assert.assertEquals(1, taskManagers.length());
      Assert.assertEquals(1, taskManagers.getJSONObject(0).getInt("slotsNumber"));

      // get the configuration from webinterface & check if the dynamic properties from YARN show up
      // there.
      String jsonConfig = TestBaseUtils.getFromHTTP(url + "jobmanager/config");
      JSONArray parsed = new JSONArray(jsonConfig);
      Map<String, String> parsedConfig = WebMonitorUtils.fromKeyValueJsonArray(parsed);

      Assert.assertEquals("veryFancy", parsedConfig.get("fancy-configuration-value"));
      Assert.assertEquals("3", parsedConfig.get("yarn.maximum-failed-containers"));

      // -------------- FLINK-1902: check if jobmanager hostname/port are shown in web interface
      // first, get the hostname/port
      String oC = outContent.toString();
      Pattern p = Pattern.compile("Flink JobManager is now running on ([a-zA-Z0-9.-]+):([0-9]+)");
      Matcher matches = p.matcher(oC);
      String hostname = null;
      String port = null;
      while (matches.find()) {
        hostname = matches.group(1).toLowerCase();
        port = matches.group(2);
      }
      LOG.info("Extracted hostname:port: {} {}", hostname, port);

      Assert.assertEquals(
          "unable to find hostname in " + parsed,
          hostname,
          parsedConfig.get(ConfigConstants.JOB_MANAGER_IPC_ADDRESS_KEY));
      Assert.assertEquals(
          "unable to find port in " + parsed,
          port,
          parsedConfig.get(ConfigConstants.JOB_MANAGER_IPC_PORT_KEY));

      // test logfile access
      String logs = TestBaseUtils.getFromHTTP(url + "jobmanager/log");
      Assert.assertTrue(logs.contains("Starting YARN ApplicationMaster/JobManager (Version"));
    } catch (Throwable e) {
      LOG.warn("Error while running test", e);
      Assert.fail(e.getMessage());
    }

    // ------------------------ Kill container with TaskManager  -------

    // find container id of taskManager:
    ContainerId taskManagerContainer = null;
    NodeManager nodeManager = null;
    UserGroupInformation remoteUgi = null;
    NMTokenIdentifier nmIdent = null;
    try {
      remoteUgi = UserGroupInformation.getCurrentUser();
    } catch (IOException e) {
      LOG.warn("Unable to get curr user", e);
      Assert.fail();
    }
    for (int nmId = 0; nmId < NUM_NODEMANAGERS; nmId++) {
      NodeManager nm = yarnCluster.getNodeManager(nmId);
      ConcurrentMap<ContainerId, Container> containers = nm.getNMContext().getContainers();
      for (Map.Entry<ContainerId, Container> entry : containers.entrySet()) {
        String command = Joiner.on(" ").join(entry.getValue().getLaunchContext().getCommands());
        if (command.contains(YarnTaskManagerRunner.class.getSimpleName())) {
          taskManagerContainer = entry.getKey();
          nodeManager = nm;
          nmIdent =
              new NMTokenIdentifier(taskManagerContainer.getApplicationAttemptId(), null, "", 0);
          // allow myself to do stuff with the container
          // remoteUgi.addCredentials(entry.getValue().getCredentials());
          remoteUgi.addTokenIdentifier(nmIdent);
        }
      }
      sleep(500);
    }

    Assert.assertNotNull("Unable to find container with TaskManager", taskManagerContainer);
    Assert.assertNotNull("Illegal state", nodeManager);

    List<ContainerId> toStop = new LinkedList<ContainerId>();
    toStop.add(taskManagerContainer);
    StopContainersRequest scr = StopContainersRequest.newInstance(toStop);

    try {
      nodeManager.getNMContext().getContainerManager().stopContainers(scr);
    } catch (Throwable e) {
      LOG.warn("Error stopping container", e);
      Assert.fail("Error stopping container: " + e.getMessage());
    }

    // stateful termination check:
    // wait until we saw a container being killed and AFTERWARDS a new one launched
    boolean ok = false;
    do {
      LOG.debug("Waiting for correct order of events. Output: {}", errContent.toString());

      String o = errContent.toString();
      int killedOff = o.indexOf("Container killed by the ApplicationMaster");
      if (killedOff != -1) {
        o = o.substring(killedOff);
        ok = o.indexOf("Launching container") > 0;
      }
      sleep(1000);
    } while (!ok);

    // send "stop" command to command line interface
    runner.sendStop();
    // wait for the thread to stop
    try {
      runner.join(1000);
    } catch (InterruptedException e) {
      LOG.warn("Interrupted while stopping runner", e);
    }
    LOG.warn("stopped");

    // ----------- Send output to logger
    System.setOut(originalStdout);
    System.setErr(originalStderr);
    String oC = outContent.toString();
    String eC = errContent.toString();
    LOG.info("Sending stdout content through logger: \n\n{}\n\n", oC);
    LOG.info("Sending stderr content through logger: \n\n{}\n\n", eC);

    // ------ Check if everything happened correctly
    Assert.assertTrue(
        "Expect to see failed container", eC.contains("New messages from the YARN cluster"));
    Assert.assertTrue(
        "Expect to see failed container", eC.contains("Container killed by the ApplicationMaster"));
    Assert.assertTrue(
        "Expect to see new container started",
        eC.contains("Launching container") && eC.contains("on host"));

    // cleanup auth for the subsequent tests.
    remoteUgi.getTokenIdentifiers().remove(nmIdent);

    LOG.info("Finished testTaskManagerFailure()");
  }
Example #8
0
  public static void startYARNWithConfig(Configuration conf) {
    // set the home directory to a tmp directory. Flink on YARN is using the home dir to distribute
    // the file
    File homeDir = null;
    try {
      homeDir = tmp.newFolder();
    } catch (IOException e) {
      e.printStackTrace();
      Assert.fail(e.getMessage());
    }
    System.setProperty("user.home", homeDir.getAbsolutePath());
    String uberjarStartLoc = "..";
    LOG.info("Trying to locate uberjar in {}", new File(uberjarStartLoc));
    flinkUberjar = findFile(uberjarStartLoc, new RootDirFilenameFilter());
    Assert.assertNotNull("Flink uberjar not found", flinkUberjar);
    String flinkDistRootDir = flinkUberjar.getParentFile().getParent();
    flinkLibFolder = flinkUberjar.getParentFile(); // the uberjar is located in lib/
    Assert.assertNotNull("Flink flinkLibFolder not found", flinkLibFolder);
    Assert.assertTrue("lib folder not found", flinkLibFolder.exists());
    Assert.assertTrue("lib folder not found", flinkLibFolder.isDirectory());

    if (!flinkUberjar.exists()) {
      Assert.fail("Unable to locate yarn-uberjar.jar");
    }

    try {
      LOG.info("Starting up MiniYARNCluster");
      if (yarnCluster == null) {
        yarnCluster =
            new MiniYARNCluster(
                conf.get(YarnTestBase.TEST_CLUSTER_NAME_KEY), NUM_NODEMANAGERS, 1, 1);

        yarnCluster.init(conf);
        yarnCluster.start();
      }

      Map<String, String> map = new HashMap<String, String>(System.getenv());

      File flinkConfDirPath =
          findFile(flinkDistRootDir, new ContainsName(new String[] {"flink-conf.yaml"}));
      Assert.assertNotNull(flinkConfDirPath);

      map.put(CliFrontend.ENV_CONFIG_DIRECTORY, flinkConfDirPath.getParent());

      File yarnConfFile = writeYarnSiteConfigXML(conf);
      map.put("YARN_CONF_DIR", yarnConfFile.getParentFile().getAbsolutePath());
      map.put("IN_TESTS", "yes we are in tests"); // see YarnClusterDescriptor() for more infos
      TestBaseUtils.setEnv(map);

      Assert.assertTrue(yarnCluster.getServiceState() == Service.STATE.STARTED);

      // wait for the nodeManagers to connect
      while (!yarnCluster.waitForNodeManagersToConnect(500)) {
        LOG.info("Waiting for Nodemanagers to connect");
      }
    } catch (Exception ex) {
      ex.printStackTrace();
      LOG.error("setup failure", ex);
      Assert.fail();
    }
  }