private void startForward(boolean inputFileChangeSenstive, String bigTableBucket) throws Exception { for (Operator<?> source : work.getAliasToWork().values()) { source.reset(); } if (inputFileChangeSenstive) { execContext.setCurrentBigBucketFile(bigTableBucket); } for (Map.Entry<String, FetchOperator> entry : fetchOperators.entrySet()) { String alias = entry.getKey(); FetchOperator fetchOp = entry.getValue(); if (inputFileChangeSenstive) { fetchOp.clearFetchContext(); setUpFetchOpContext(fetchOp, alias, bigTableBucket); } // get the root operator Operator<? extends OperatorDesc> forwardOp = work.getAliasToWork().get(alias); // walk through the operator tree while (!forwardOp.getDone()) { InspectableObject row = fetchOp.getNextRow(); if (row == null) { break; } forwardOp.process(row.o, 0); } forwardOp.flush(); } for (Operator<?> source : work.getAliasToWork().values()) { source.close(false); } }
public int executeInProcess(DriverContext driverContext) { // check the local work if (work == null) { return -1; } if (execContext == null) { execContext = new ExecMapperContext(job); } memoryMXBean = ManagementFactory.getMemoryMXBean(); long startTime = System.currentTimeMillis(); console.printInfo( Utilities.now() + "\tStarting to launch local task to process map join;\tmaximum memory = " + memoryMXBean.getHeapMemoryUsage().getMax()); execContext.setJc(job); // set the local work, so all the operator can get this context execContext.setLocalWork(work); try { startForward(null); long currentTime = System.currentTimeMillis(); long elapsed = currentTime - startTime; console.printInfo( Utilities.now() + "\tEnd of local task; Time Taken: " + Utilities.showTime(elapsed) + " sec."); } catch (Throwable throwable) { if (throwable instanceof OutOfMemoryError || (throwable instanceof MapJoinMemoryExhaustionException)) { l4j.error("Hive Runtime Error: Map local work exhausted memory", throwable); return 3; } else { l4j.error("Hive Runtime Error: Map local work failed", throwable); return 2; } } return 0; }