/** Raise error if the given query contains transforms */ @Override public void run(HookContext hookContext) throws Exception { HiveAuthzBinding hiveAuthzBinding = HiveAuthzBinding.get(hookContext.getConf()); try { QueryPlan qPlan = hookContext.getQueryPlan(); if ((qPlan == null) || (qPlan.getQueryProperties() == null)) { return; } // validate server level permissions permission for transforms if (qPlan.getQueryProperties().usesScript()) { if (hiveAuthzBinding == null) { LOG.warn("No authorization binding found, skipping the authorization for transform"); return; } List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); List<DBModelAuthorizable> serverHierarchy = new ArrayList<DBModelAuthorizable>(); serverHierarchy.add(hiveAuthzBinding.getAuthServer()); inputHierarchy.add(serverHierarchy); hiveAuthzBinding.authorize( HiveOperation.QUERY, HiveAuthzPrivilegesMap.getHiveExtendedAuthzPrivileges(HiveExtendedOperation.TRANSFORM), new Subject(hookContext.getUserName()), inputHierarchy, outputHierarchy); } } finally { if (hiveAuthzBinding != null) { hiveAuthzBinding.clear(hookContext.getConf()); } } }
private JSONObject getQueryPlan(HiveConf hiveConf, QueryPlan queryPlan) throws Exception { try { ExplainTask explain = new ExplainTask(); explain.initialize(hiveConf, queryPlan, null); List<Task<?>> rootTasks = queryPlan.getRootTasks(); return explain.getJSONPlan( null, null, rootTasks, queryPlan.getFetchTask(), true, false, false); } catch (Throwable e) { LOG.info("Failed to get queryplan", e); return new JSONObject(); } }
/** Get the FetchWork. Only SELECTs have them. */ private synchronized FetchWork getFetchWork() { QueryPlan plan = driver.getPlan(); FetchTask fetchTask = null; if (plan != null) { fetchTask = plan.getFetchTask(); if (fetchTask != null) { fetchTask.initialize(hiveConf, plan, null); } } if (fetchTask == null) { return null; } FetchWork work = fetchTask.getWork(); return work; }
protected ExecutionMode getExecutionMode(QueryPlan plan) { int numMRJobs = Utilities.getMRTasks(plan.getRootTasks()).size(); int numSparkJobs = Utilities.getSparkTasks(plan.getRootTasks()).size(); int numTezJobs = Utilities.getTezTasks(plan.getRootTasks()).size(); ExecutionMode mode = ExecutionMode.MR; if (0 == (numMRJobs + numSparkJobs + numTezJobs)) { mode = ExecutionMode.NONE; } else if (numSparkJobs > 0) { return ExecutionMode.SPARK; } else if (numTezJobs > 0) { mode = ExecutionMode.TEZ; // Need to go in and check if any of the tasks is running in LLAP mode. for (TezTask tezTask : Utilities.getTezTasks(plan.getRootTasks())) { if (tezTask.getWork().getLlapMode()) { mode = ExecutionMode.LLAP; break; } } } return mode; }
private List<HiveLockObj> getLockObjects( QueryPlan plan, Database db, Table t, Partition p, HiveLockMode mode) throws LockException { List<HiveLockObj> locks = new LinkedList<HiveLockObj>(); HiveLockObject.HiveLockObjectData lockData = new HiveLockObject.HiveLockObjectData( plan.getQueryId(), String.valueOf(System.currentTimeMillis()), "IMPLICIT", plan.getQueryStr()); if (db != null) { locks.add(new HiveLockObj(new HiveLockObject(db.getName(), lockData), mode)); return locks; } if (t != null) { locks.add(new HiveLockObj(new HiveLockObject(t, lockData), mode)); mode = HiveLockMode.SHARED; locks.add(new HiveLockObj(new HiveLockObject(t.getDbName(), lockData), mode)); return locks; } if (p != null) { if (!(p instanceof DummyPartition)) { locks.add(new HiveLockObj(new HiveLockObject(p, lockData), mode)); } // All the parents are locked in shared mode mode = HiveLockMode.SHARED; // For dummy partitions, only partition name is needed String name = p.getName(); if (p instanceof DummyPartition) { name = p.getName().split("@")[2]; } String partialName = ""; String[] partns = name.split("/"); int len = p instanceof DummyPartition ? partns.length : partns.length - 1; Map<String, String> partialSpec = new LinkedHashMap<String, String>(); for (int idx = 0; idx < len; idx++) { String partn = partns[idx]; partialName += partn; String[] nameValue = partn.split("="); assert (nameValue.length == 2); partialSpec.put(nameValue[0], nameValue[1]); try { locks.add( new HiveLockObj( new HiveLockObject( new DummyPartition( p.getTable(), p.getTable().getDbName() + "/" + MetaStoreUtils.encodeTableName(p.getTable().getTableName()) + "/" + partialName, partialSpec), lockData), mode)); partialName += "/"; } catch (HiveException e) { throw new LockException(e.getMessage()); } } locks.add(new HiveLockObj(new HiveLockObject(p.getTable(), lockData), mode)); locks.add(new HiveLockObj(new HiveLockObject(p.getTable().getDbName(), lockData), mode)); } return locks; }
@Override public void acquireLocks(QueryPlan plan, Context ctx, String username) throws LockException { // Make sure we've built the lock manager getLockManager(); // If the lock manager is still null, then it means we aren't using a // lock manager if (lockMgr == null) return; List<HiveLockObj> lockObjects = new ArrayList<HiveLockObj>(); // Sort all the inputs, outputs. // If a lock needs to be acquired on any partition, a read lock needs to be acquired on all // its parents also for (ReadEntity input : plan.getInputs()) { if (!input.needsLock()) { continue; } LOG.debug("Adding " + input.getName() + " to list of lock inputs"); if (input.getType() == ReadEntity.Type.DATABASE) { lockObjects.addAll( getLockObjects(plan, input.getDatabase(), null, null, HiveLockMode.SHARED)); } else if (input.getType() == ReadEntity.Type.TABLE) { lockObjects.addAll(getLockObjects(plan, null, input.getTable(), null, HiveLockMode.SHARED)); } else { lockObjects.addAll( getLockObjects(plan, null, null, input.getPartition(), HiveLockMode.SHARED)); } } for (WriteEntity output : plan.getOutputs()) { HiveLockMode lockMode = getWriteEntityLockMode(output); if (lockMode == null) { continue; } LOG.debug("Adding " + output.getName() + " to list of lock outputs"); List<HiveLockObj> lockObj = null; if (output.getType() == WriteEntity.Type.DATABASE) { lockObjects.addAll(getLockObjects(plan, output.getDatabase(), null, null, lockMode)); } else if (output.getTyp() == WriteEntity.Type.TABLE) { lockObj = getLockObjects(plan, null, output.getTable(), null, lockMode); } else if (output.getTyp() == WriteEntity.Type.PARTITION) { lockObj = getLockObjects(plan, null, null, output.getPartition(), lockMode); } // In case of dynamic queries, it is possible to have incomplete dummy partitions else if (output.getTyp() == WriteEntity.Type.DUMMYPARTITION) { lockObj = getLockObjects(plan, null, null, output.getPartition(), HiveLockMode.SHARED); } if (lockObj != null) { lockObjects.addAll(lockObj); ctx.getOutputLockObjects().put(output, lockObj); } } if (lockObjects.isEmpty() && !ctx.isNeedLockMgr()) { return; } dedupLockObjects(lockObjects); List<HiveLock> hiveLocks = lockMgr.lock(lockObjects, false); if (hiveLocks == null) { throw new LockException(ErrorMsg.LOCK_CANNOT_BE_ACQUIRED.getMsg()); } else { ctx.setHiveLocks(hiveLocks); } }