@Override public void close(TaskAttemptContext context) throws InterruptedException, IOException { LOG.info("SqoopOutputFormatLoadExecutor::SqoopRecordWriter is about to be closed"); free.acquire(); writerFinished = true; filled.release(); waitForConsumer(); LOG.info("SqoopOutputFormatLoadExecutor::SqoopRecordWriter is closed"); }
@SuppressWarnings({"rawtypes", "unchecked"}) @Override public void run() { LOG.info("SqoopOutputFormatLoadExecutor consumer thread is starting"); try { DataReader reader = new SqoopOutputFormatDataReader(); Configuration conf = context.getConfiguration(); Loader loader = (Loader) ClassUtils.instantiate(loaderName); // Objects that should be passed to the Loader PrefixContext subContext = new PrefixContext(conf, MRJobConstants.PREFIX_CONNECTOR_TO_CONTEXT); Object connectorLinkConfig = MRConfigurationUtils.getConnectorLinkConfigUnsafe(Direction.TO, conf); Object connectorToJobConfig = MRConfigurationUtils.getConnectorJobConfigUnsafe(Direction.TO, conf); // Using the TO schema since the SqoopDataWriter in the SqoopMapper // encapsulates the toDataFormat // Create loader context LoaderContext loaderContext = new LoaderContext(subContext, reader, matcher.getToSchema()); LOG.info("Running loader class " + loaderName); loader.load(loaderContext, connectorLinkConfig, connectorToJobConfig); LOG.info("Loader has finished"); ((TaskAttemptContext) jobctx) .getCounter(SqoopCounters.ROWS_WRITTEN) .increment(loader.getRowsWritten()); } catch (Throwable t) { readerFinished = true; LOG.error("Error while loading data out of MR job.", t); // Release so that the writer can tell Sqoop something went // wrong. free.release(); throw new SqoopException(SparkExecutionError.SPARK_EXEC_0018, t); } // if no exception happens yet and reader finished before writer, // something went wrong if (!writerFinished) { // throw exception if data are not all consumed readerFinished = true; LOG.error("Reader terminated, but writer is still running!"); // Release so that the writer can tell Sqoop something went // wrong. free.release(); throw new SqoopException(SparkExecutionError.SPARK_EXEC_0019); } // inform writer that reader is finished readerFinished = true; }