@RequestMapping(
      value = "",
      method = {RequestMethod.PUT})
  @ResponseBody
  public StreamingRequest updateStreamingConfig(@RequestBody StreamingRequest streamingRequest)
      throws JsonProcessingException {
    StreamingConfig streamingConfig = deserializeSchemalDesc(streamingRequest);
    KafkaConfig kafkaConfig = deserializeKafkaSchemalDesc(streamingRequest);

    if (streamingConfig == null) {
      return streamingRequest;
    }
    try {
      streamingConfig = streamingService.updateStreamingConfig(streamingConfig);
    } catch (AccessDeniedException accessDeniedException) {
      throw new ForbiddenException("You don't have right to update this StreamingConfig.");
    } catch (Exception e) {
      logger.error("Failed to deal with the request:" + e.getLocalizedMessage(), e);
      throw new InternalErrorException(
          "Failed to deal with the request: " + e.getLocalizedMessage());
    }
    try {
      kafkaConfig = kafkaConfigService.updateKafkaConfig(kafkaConfig);
    } catch (AccessDeniedException accessDeniedException) {
      throw new ForbiddenException("You don't have right to update this KafkaConfig.");
    } catch (Exception e) {
      logger.error("Failed to deal with the request:" + e.getLocalizedMessage(), e);
      throw new InternalErrorException(
          "Failed to deal with the request: " + e.getLocalizedMessage());
    }

    streamingRequest.setSuccessful(true);

    return streamingRequest;
  }
 private KafkaConfig deserializeKafkaSchemalDesc(StreamingRequest streamingRequest) {
   KafkaConfig desc = null;
   try {
     logger.debug("Saving KafkaConfig " + streamingRequest.getKafkaConfig());
     desc = JsonUtil.readValue(streamingRequest.getKafkaConfig(), KafkaConfig.class);
   } catch (JsonParseException e) {
     logger.error("The KafkaConfig definition is invalid.", e);
     updateRequest(streamingRequest, false, e.getMessage());
   } catch (JsonMappingException e) {
     logger.error("The data KafkaConfig definition is invalid.", e);
     updateRequest(streamingRequest, false, e.getMessage());
   } catch (IOException e) {
     logger.error("Failed to deal with the request.", e);
     throw new InternalErrorException("Failed to deal with the request:" + e.getMessage(), e);
   }
   return desc;
 }
  private TableDesc deserializeTableDesc(StreamingRequest streamingRequest) {
    TableDesc desc = null;
    try {
      logger.debug("Saving TableDesc " + streamingRequest.getTableData());
      desc = JsonUtil.readValue(streamingRequest.getTableData(), TableDesc.class);
    } catch (JsonParseException e) {
      logger.error("The TableDesc definition is invalid.", e);
      updateRequest(streamingRequest, false, e.getMessage());
    } catch (JsonMappingException e) {
      logger.error("The data TableDesc definition is invalid.", e);
      updateRequest(streamingRequest, false, e.getMessage());
    } catch (IOException e) {
      logger.error("Failed to deal with the request.", e);
      throw new InternalErrorException("Failed to deal with the request:" + e.getMessage(), e);
    }

    String[] dbTable = HadoopUtil.parseHiveTableName(desc.getName());
    desc.setName(dbTable[1]);
    desc.setDatabase(dbTable[0]);
    desc.getIdentity();
    return desc;
  }
 private void updateRequest(StreamingRequest request, boolean success, String message) {
   request.setSuccessful(success);
   request.setMessage(message);
 }
  /**
   * create Streaming Schema
   *
   * @throws java.io.IOException
   */
  @RequestMapping(
      value = "",
      method = {RequestMethod.POST})
  @ResponseBody
  public StreamingRequest saveStreamingConfig(@RequestBody StreamingRequest streamingRequest) {

    String project = streamingRequest.getProject();
    TableDesc tableDesc = deserializeTableDesc(streamingRequest);
    StreamingConfig streamingConfig = deserializeSchemalDesc(streamingRequest);
    KafkaConfig kafkaConfig = deserializeKafkaSchemalDesc(streamingRequest);
    boolean saveStreamingSuccess = false, saveKafkaSuccess = false;

    try {
      tableDesc.setUuid(UUID.randomUUID().toString());
      MetadataManager metaMgr = MetadataManager.getInstance(KylinConfig.getInstanceFromEnv());
      metaMgr.saveSourceTable(tableDesc);
      cubeMgmtService.syncTableToProject(new String[] {tableDesc.getIdentity()}, project);
    } catch (IOException e) {
      throw new BadRequestException("Failed to add streaming table.");
    }

    streamingConfig.setName(tableDesc.getIdentity());
    kafkaConfig.setName(tableDesc.getIdentity());
    try {
      if (StringUtils.isEmpty(streamingConfig.getName())) {
        logger.info("StreamingConfig should not be empty.");
        throw new BadRequestException("StremingConfig name should not be empty.");
      }
      try {
        streamingConfig.setUuid(UUID.randomUUID().toString());
        streamingService.createStreamingConfig(streamingConfig);
        saveStreamingSuccess = true;
      } catch (IOException e) {
        logger.error("Failed to save StreamingConfig:" + e.getLocalizedMessage(), e);
        throw new InternalErrorException(
            "Failed to save StreamingConfig: " + e.getLocalizedMessage());
      }
      try {
        kafkaConfig.setUuid(UUID.randomUUID().toString());
        kafkaConfigService.createKafkaConfig(kafkaConfig);
        saveKafkaSuccess = true;
      } catch (IOException e) {
        try {
          streamingService.dropStreamingConfig(streamingConfig);
        } catch (IOException e1) {
          throw new InternalErrorException(
              "StreamingConfig is created, but failed to create KafkaConfig: "
                  + e.getLocalizedMessage());
        }
        logger.error("Failed to save KafkaConfig:" + e.getLocalizedMessage(), e);
        throw new InternalErrorException("Failed to save KafkaConfig: " + e.getLocalizedMessage());
      }
    } finally {
      if (saveKafkaSuccess == false || saveStreamingSuccess == false) {

        if (saveStreamingSuccess == true) {
          StreamingConfig sConfig =
              streamingService.getStreamingManager().getStreamingConfig(streamingConfig.getName());
          try {
            streamingService.dropStreamingConfig(sConfig);
          } catch (IOException e) {
            throw new InternalErrorException(
                "Action failed and failed to rollback the created streaming config: "
                    + e.getLocalizedMessage());
          }
        }
        if (saveKafkaSuccess == true) {
          try {
            KafkaConfig kConfig = kafkaConfigService.getKafkaConfig(kafkaConfig.getName());
            kafkaConfigService.dropKafkaConfig(kConfig);
          } catch (IOException e) {
            throw new InternalErrorException(
                "Action failed and failed to rollback the created kafka config: "
                    + e.getLocalizedMessage());
          }
        }
      }
    }
    streamingRequest.setSuccessful(true);
    return streamingRequest;
  }