コード例 #1
0
  @Override
  public void closeOp(boolean abort) throws HiveException {

    // ricardoj checking mapper hashmap usage
    memoryMXBean = ManagementFactory.getMemoryMXBean();
    System.gc();
    System.gc();
    System.gc();
    System.gc();
    System.gc();
    System.gc();
    long usedMemory = memoryMXBean.getHeapMemoryUsage().getUsed();
    LOG.info("ricardoj memory usage after deleting tables: " + usedMemory / (1024 * 1024) + "MB");

    if (mapJoinTables != null) {
      for (HashMapWrapper<?, ?> hashTable : mapJoinTables.values()) {
        hashTable.close();
      }
    }

    mapJoinTables = null;
    // ricardoj
    System.gc();
    System.gc();
    System.gc();
    System.gc();
    System.gc();
    System.gc();
    usedMemory = memoryMXBean.getHeapMemoryUsage().getUsed();
    LOG.info("ricardoj memory usage after deleting tables: " + usedMemory / (1024 * 1024) + "MB");

    super.closeOp(abort);
  }
コード例 #2
0
  private void loadHashTable() throws HiveException {

    if (!this.getExecContext().getLocalWork().getInputFileChangeSensitive()) {
      if (hashTblInitedOnce) {
        return;
      } else {
        hashTblInitedOnce = true;
      }
    }

    boolean localMode = HiveConf.getVar(hconf, HiveConf.ConfVars.HADOOPJT).equals("local");
    String baseDir = null;

    String currentInputFile = HiveConf.getVar(hconf, HiveConf.ConfVars.HADOOPMAPFILENAME);
    LOG.info("******* Load from HashTable File: input : " + currentInputFile);

    String currentFileName;

    if (this.getExecContext().getLocalWork().getInputFileChangeSensitive()) {
      currentFileName = this.getFileName(currentInputFile);
    } else {
      currentFileName = "-";
    }

    try {
      if (localMode) {
        baseDir = this.getExecContext().getLocalWork().getTmpFileURI();
      } else {
        Path[] localArchives;
        String stageID = this.getExecContext().getLocalWork().getStageID();
        String suffix = Utilities.generateTarFileName(stageID);
        FileSystem localFs = FileSystem.getLocal(hconf);
        localArchives = DistributedCache.getLocalCacheArchives(this.hconf);
        Path archive;
        for (int j = 0; j < localArchives.length; j++) {
          archive = localArchives[j];
          if (!archive.getName().endsWith(suffix)) {
            continue;
          }
          Path archiveLocalLink = archive.makeQualified(localFs);
          baseDir = archiveLocalLink.toUri().getPath();
        }
      }
      for (Map.Entry<Byte, HashMapWrapper<AbstractMapJoinKey, MapJoinObjectValue>> entry :
          mapJoinTables.entrySet()) {
        Byte pos = entry.getKey();
        HashMapWrapper<AbstractMapJoinKey, MapJoinObjectValue> hashtable = entry.getValue();
        // ====code changed====
        String filePath = Utilities.generatePath(baseDir, pos, currentFileName);
        // String filePath = Utilities.generatePath(baseDir, conf.getDumpFilePrefix(), pos,
        // currentFileName);
        // ====code changed====
        Path path = new Path(filePath);
        LOG.info("\tLoad back 1 hashtable file from tmp file uri:" + path.toString());
        hashtable.initilizePersistentHash(path.toUri().getPath());
      }
    } catch (Exception e) {
      LOG.error("Load Distributed Cache Error");
      throw new HiveException(e.getMessage());
    }
  }