static { CodeSource codeSrc = SikuliX.class.getProtectionDomain().getCodeSource(); if (codeSrc != null && codeSrc.getLocation() != null) { URL jarURL = codeSrc.getLocation(); jarPath = FileManager.slashify(new File(jarURL.getPath()).getAbsolutePath(), false); jarParentPath = (new File(jarPath)).getParent(); if (jarPath.endsWith(".jar")) { runningFromJar = true; } else { jarPath += "/"; } } }
public MoquiStart(ClassLoader parent, boolean loadWebInf) { super(parent); this.loadWebInf = loadWebInf; URL wrapperWarUrl = null; try { // get outer file (the war file) pd = getClass().getProtectionDomain(); CodeSource cs = pd.getCodeSource(); wrapperWarUrl = cs.getLocation(); outerFile = new JarFile(new File(wrapperWarUrl.toURI())); // allow for classes in the outerFile as well jarFileList.add(outerFile); Enumeration<JarEntry> jarEntries = outerFile.entries(); while (jarEntries.hasMoreElements()) { JarEntry je = jarEntries.nextElement(); if (je.isDirectory()) continue; // if we aren't loading the WEB-INF files and it is one, skip it if (!loadWebInf && je.getName().startsWith("WEB-INF")) continue; // get jars, can be anywhere in the file String jeName = je.getName().toLowerCase(); if (jeName.lastIndexOf(".jar") == jeName.length() - 4) { File file = createTempFile(je); jarFileList.add(new JarFile(file)); } } } catch (Exception e) { System.out.println("Error loading jars in war file [" + wrapperWarUrl + "]: " + e.toString()); } }
@Override public void init(SpoutApplication args) { boolean inJar = false; try { CodeSource cs = SpoutClient.class.getProtectionDomain().getCodeSource(); inJar = cs.getLocation().toURI().getPath().endsWith(".jar"); } catch (URISyntaxException e) { e.printStackTrace(); } if (inJar) { unpackLwjgl(); } ExecutorService executorBoss = Executors.newCachedThreadPool(new NamedThreadFactory("SpoutServer - Boss", true)); ExecutorService executorWorker = Executors.newCachedThreadPool(new NamedThreadFactory("SpoutServer - Worker", true)); ChannelFactory factory = new NioClientSocketChannelFactory(executorBoss, executorWorker); bootstrap.setFactory(factory); ChannelPipelineFactory pipelineFactory = new CommonPipelineFactory(this, true); bootstrap.setPipelineFactory(pipelineFactory); super.init(args); getScheduler().startRenderThread(); }
/** * 获取类的class文件位置的URL * * @param cls * @return */ private static URL getClassLocationURL(final Class cls) { if (cls == null) throw new IllegalArgumentException("null input: cls"); URL result = null; final String clsAsResource = cls.getName().replace('.', '/').concat(".class"); final ProtectionDomain pd = cls.getProtectionDomain(); if (pd != null) { final CodeSource cs = pd.getCodeSource(); if (cs != null) result = cs.getLocation(); if (result != null) { if ("file".equals(result.getProtocol())) { try { if (result.toExternalForm().endsWith(".jar") || result.toExternalForm().endsWith(".zip")) result = new URL( "jar:".concat(result.toExternalForm()).concat("!/").concat(clsAsResource)); else if (new File(result.getFile()).isDirectory()) result = new URL(result, clsAsResource); } catch (MalformedURLException ignore) { } } } } if (result == null) { final ClassLoader clsLoader = cls.getClassLoader(); result = clsLoader != null ? clsLoader.getResource(clsAsResource) : ClassLoader.getSystemResource(clsAsResource); } return result; }
public StoredDataManager() { // для получения пути к исполняемому файлу src = this.getClass().getProtectionDomain().getCodeSource(); if (src != null) { try { url = new URL(src.getLocation(), PropFileName); } catch (MalformedURLException e) { // С таким способом получения URL ошибка не должна возникать. e.printStackTrace(); } // URL файла String path = url.getPath(); try { // декодирование URL для устранения проблемы с пробелами decodedPath = URLDecoder.decode(path, "UTF-8"); // окончательное получение файла настроек propFile = new File(decodedPath); } catch (UnsupportedEncodingException e) { // Не вижу причин возникновения ошибки тут. e.printStackTrace(); } } // загрузка сохраненных настроек (если возможно) if (propFile.canRead()) { try { props.load(new FileInputStream(propFile)); loaded = true; } catch (IOException e) { // Тут ничего не надо делать. Нет файла настроек - и ладно, // будут значения по умолчанию. e.printStackTrace(); } } }
private static List<File> createPossibleHomeDirList() { List<File> homeDirCheckList = new ArrayList<File>(4); // include codeSource dir in check list CodeSource lib = OCSSWRuntimeConfig.class.getProtectionDomain().getCodeSource(); if (lib != null) { URL libUrl = lib.getLocation(); if (libUrl.getProtocol().equals("file")) { String libPath = libUrl.getPath(); File libParentDir = new File(libPath).getParentFile(); if (libParentDir != null) { // include one above libParentDir if (libParentDir.getParentFile() != null) { homeDirCheckList.add(libParentDir.getParentFile()); } // include libParentDir homeDirCheckList.add(libParentDir); } } } // include CWD in check list homeDirCheckList.add(new File(".").getAbsoluteFile()); // include one above CWD in check list homeDirCheckList.add(new File("src/test").getAbsoluteFile()); return homeDirCheckList; }
private static PrintWriter getPrintWriter(String saveLocation, String fileName) { CodeSource codeSource = GrepolisBot.class.getProtectionDomain().getCodeSource(); File jarFile = null; try { jarFile = new File(codeSource.getLocation().toURI().getPath()); } catch (URISyntaxException e) { e.printStackTrace(); } String jarDir; if (jarFile != null) { jarDir = jarFile.getParentFile().getPath(); File directory = new File(jarDir + File.separator + saveLocation + File.separator); // System.out.println("Directory: " + jarDir + File.separator + saveLocation + // File.separator); if (!directory.exists()) { directory.mkdirs(); } try { return new PrintWriter(jarDir + File.separator + saveLocation + File.separator + fileName); } catch (FileNotFoundException e) { System.out.println("Error saving " + fileName); return null; } } return null; }
boolean isSelected(String className, ProtectionDomain protectionDomain) { CodeSource codeSource = protectionDomain.getCodeSource(); if (codeSource == null || className.charAt(0) == '[' || className.startsWith("mockit.") || className.startsWith("org.junit.") || className.startsWith("junit.") || className.startsWith("org.testng.")) { return false; } if (classesToExclude != null && classesToExclude.reset(className).matches()) { return false; } else if (classesToInclude != null && classesToInclude.reset(className).matches()) { return true; } else if (testCode != null && testCode.reset(className).matches()) { return false; } String location = codeSource.getLocation().getPath(); return !location.endsWith(".jar") && !location.endsWith("/.cp/") && (testCode == null || !location.endsWith("/test-classes/") && !location.endsWith("/jmockit/main/classes/")); }
public static ClassLoader newClassLoader(final Class... userClasses) throws Exception { Set<URL> userClassUrls = new HashSet<>(); for (Class anyUserClass : userClasses) { ProtectionDomain protectionDomain = anyUserClass.getProtectionDomain(); CodeSource codeSource = protectionDomain.getCodeSource(); URL classLocation = codeSource.getLocation(); userClassUrls.add(classLocation); } StringTokenizer tokenString = new StringTokenizer(System.getProperty("java.class.path"), File.pathSeparator); String pathIgnore = System.getProperty("java.home"); if (pathIgnore == null) { pathIgnore = userClassUrls.iterator().next().toString(); } List<URL> urls = new ArrayList<>(); while (tokenString.hasMoreElements()) { String value = tokenString.nextToken(); URL itemLocation = new File(value).toURI().toURL(); if (!userClassUrls.contains(itemLocation) && itemLocation.toString().indexOf(pathIgnore) >= 0) { urls.add(itemLocation); } } URL[] urlArray = urls.toArray(new URL[urls.size()]); ClassLoader masterClassLoader = URLClassLoader.newInstance(urlArray, null); ClassLoader appClassLoader = URLClassLoader.newInstance(userClassUrls.toArray(new URL[0]), masterClassLoader); return appClassLoader; }
public static String getSourceLocation(final Class<?> cls) { String exMsg = null; java.security.CodeSource codeSource = null; try { codeSource = cls.getProtectionDomain().getCodeSource(); } catch (final Exception e) { exMsg = e.toString(); } final URL classURL = codeSource == null ? getSourceURL(cls) : codeSource.getLocation(); if (classURL == null) { return cls.getName() + ": (missing codeSource and classLoader)"; } final String path = classURL.getPath(); final File file = new File(path); if (!file.isFile()) { return cls.getName() + ": " + path + " (not a file)" + (exMsg == null ? "" : "; " + exMsg); } return String.format( "%s (SN: %s): %s (MD5: %s)%s", cls.getName(), getSerialVersionUID(cls), path, MD5Kit.md5sum(file), exMsg == null ? "" : "; " + exMsg); }
/* * Match CodeSource to a CodeSigner[] in the signer cache. */ private CodeSigner[] findMatchingSigners(CodeSource cs) { if (cs instanceof VerifierCodeSource) { VerifierCodeSource vcs = (VerifierCodeSource) cs; if (vcs.isSameDomain(csdomain)) { return ((VerifierCodeSource) cs).getPrivateSigners(); } } /* * In practice signers should always be optimized above * but this handles a CodeSource of any type, just in case. */ CodeSource[] sources = mapSignersToCodeSources(cs.getLocation(), getJarCodeSigners(), true); List sourceList = new ArrayList(); for (int i = 0; i < sources.length; i++) { sourceList.add(sources[i]); } int j = sourceList.indexOf(cs); if (j != -1) { CodeSigner[] match; match = ((VerifierCodeSource) sourceList.get(j)).getPrivateSigners(); if (match == null) { match = emptySigner; } return match; } return null; }
public void addClassDependency(Class<?> clazz) { CodeSource source = clazz.getProtectionDomain().getCodeSource(); if (source == null) return; Path absolutePath = null; try { absolutePath = Paths.get(source.getLocation().toURI()).toAbsolutePath(); } catch (URISyntaxException e) { e.printStackTrace(); } globalDependencies.add(absolutePath); }
/* this code is workaround for subtle bug/feature in JDK1.3.1 and 1.4, related to loading applets behind proxy */ protected PermissionCollection getPermissions(CodeSource codesource) { PermissionCollection sysPerms = null; Policy policy = (Policy) AccessController.doPrivileged( new PrivilegedAction() { public Object run() { return Policy.getPolicy(); } }); if (policy != null) sysPerms = policy.getPermissions(new CodeSource(null, null)); else sysPerms = new Permissions(); final PermissionCollection perms = sysPerms; if (base != null && base.getHost() != null) perms.add(new SocketPermission(base.getHost() + ":1-", "accept,connect,resolve")); URL url = codesource.getLocation(); if (url.getProtocol().equals("file")) { String path = url.getFile().replace('/', File.separatorChar); if (!path.endsWith(File.separator)) { int endIndex = path.lastIndexOf(File.separatorChar); if (endIndex != -1) { path = path.substring(0, endIndex + 1) + "-"; perms.add(new FilePermission(path, "read")); } } perms.add(new SocketPermission("localhost", "connect,accept")); AccessController.doPrivileged( new PrivilegedAction() { public Object run() { try { String host = InetAddress.getLocalHost().getHostName(); perms.add(new SocketPermission(host, "connect,accept")); } catch (UnknownHostException uhe) { } return null; } }); if (base.getProtocol().equals("file")) { String bpath = base.getFile().replace('/', File.separatorChar); if (bpath.endsWith(File.separator)) { bpath += "-"; } perms.add(new FilePermission(bpath, "read")); } } // for (Enumeration e=perms.elements();e.hasMoreElements();) // System.err.println("p="+e.nextElement()); return perms; }
private String getPathOfJar() { String jarDir = null; try { CodeSource codeSource = ClientController.class.getProtectionDomain().getCodeSource(); File jarFile = new File(codeSource.getLocation().toURI().getPath()); jarDir = jarFile.getParentFile().getPath(); } catch (URISyntaxException e1) { e1.printStackTrace(); } return jarDir; }
private static URI getJarURI() throws URISyntaxException { final ProtectionDomain domain; final CodeSource source; final URL url; final URI uri; domain = ExtractExeToLocalMachine.class.getProtectionDomain(); source = domain.getCodeSource(); url = source.getLocation(); uri = url.toURI(); return (uri); }
String getJarFromClass(Class<?> clz) { CodeSource source = clz.getProtectionDomain().getCodeSource(); if (null == source) { throw new RuntimeException("Could not get CodeSource for class"); } URL jarUrl = source.getLocation(); String jar = jarUrl.getPath(); if (!jar.endsWith(".jar")) { throw new RuntimeException("Need to have a jar to run mapreduce: " + jar); } return jar; }
/** * Returns JAR archive structure. * * @param jarClass any class within the JAR * @param allowedExtensions list of extension filters * @param allowedPackgages list of allowed packages * @param listener jar download listener * @return JAR archive structure */ public static JarStructure getJarStructure( final Class jarClass, final List<String> allowedExtensions, final List<String> allowedPackgages, final FileDownloadListener listener) { try { final CodeSource src = jarClass.getProtectionDomain().getCodeSource(); if (src != null) { // Creating structure // Source url final URL jarUrl = src.getLocation(); final URI uri = jarUrl.toURI(); // Source file final File jarFile; final String scheme = uri.getScheme(); if (scheme != null && scheme.equalsIgnoreCase("file")) { // Local jar-file jarFile = new File(uri); } else { // Remote jar-file jarFile = FileUtils.downloadFile( jarUrl.toString(), File.createTempFile("jar_file", ".tmp"), listener); } // Creating final JarEntry jarEntry = new JarEntry(JarEntryType.jarEntry, jarFile.getName()); final JarStructure jarStructure = new JarStructure(jarEntry); jarStructure.setJarLocation(jarFile.getAbsolutePath()); // Reading all entries and parsing them into structure final ZipInputStream zip = new ZipInputStream(jarUrl.openStream()); ZipEntry zipEntry; while ((zipEntry = zip.getNextEntry()) != null) { final String entryName = zipEntry.getName(); if (isAllowedPackage(entryName, allowedPackgages) && (zipEntry.isDirectory() || isAllowedExtension(entryName, allowedExtensions))) { parseElement(jarEntry, entryName, zipEntry); } } zip.close(); return jarStructure; } } catch (final IOException e) { FlatLafLogger.error(ReflectUtils.class, e); } catch (final URISyntaxException e) { FlatLafLogger.error(ReflectUtils.class, e); } return null; }
/*--------------------------------------------------------------------------*/ private String getNativePath(String name, NativeLibraryClient client) { ProtectionDomain domain = client.getClass().getProtectionDomain(); CodeSource codeSource = domain.getCodeSource(); URL url = codeSource.getLocation(); String path = url.getPath(); path = path + nativeDirectory + '/' + name + extension; path = path.replace('/', File.separatorChar); // Revise the URI-path to a file path; needed in uninstaller because it // writes the jar contents into a sandbox; may be with blanks in the // path. path = revisePath(path); return (path); }
public static String getVersion(Class<?> cls, String defaultVersion) { try { // 首先查找MANIFEST.MF规范中的版本号 String version = cls.getPackage().getImplementationVersion(); if (version == null || version.length() == 0) { version = cls.getPackage().getSpecificationVersion(); } if (version == null || version.length() == 0) { // 如果规范中没有版本号,基于jar包名获取版本号 CodeSource codeSource = cls.getProtectionDomain().getCodeSource(); if (codeSource == null) { LOGGER.info( "No codeSource for class " + cls.getName() + " when getVersion, use default version " + defaultVersion); } else { String file = codeSource.getLocation().getFile(); if (file != null && file.length() > 0 && file.endsWith(".jar")) { file = file.substring(0, file.length() - 4); int i = file.lastIndexOf('/'); if (i >= 0) { file = file.substring(i + 1); } i = file.indexOf("-"); if (i >= 0) { file = file.substring(i + 1); } while (file.length() > 0 && !Character.isDigit(file.charAt(0))) { i = file.indexOf("-"); if (i >= 0) { file = file.substring(i + 1); } else { break; } } version = file; } } } // 返回版本号,如果为空返回缺省版本号 return version == null || version.length() == 0 ? defaultVersion : version; } catch (Throwable e) { // 防御性容错 // 忽略异常,返回缺省版本号 LOGGER.error("return default version, ignore exception " + e.getMessage(), e); return defaultVersion; } }
/** 获取类的class文件位置的URL。这个方法是本类最基础的方法,供其它方法调用。 */ private static URL getClassLocationURL(final Class cls) { if (cls == null) throw new IllegalArgumentException("null input: cls"); URL result = null; final String clsAsResource = cls.getName().replace('.', '/').concat(".class"); final ProtectionDomain pd = cls.getProtectionDomain(); // java.lang.Class contract does not specify // if 'pd' can ever be null; // it is not the case for Sun's implementations, // but guard against null // just in case: if (pd != null) { final CodeSource cs = pd.getCodeSource(); // 'cs' can be null depending on // the classloader behavior: if (cs != null) result = cs.getLocation(); if (result != null) { // Convert a code source location into // a full class file location // for some common cases: if ("file".equals(result.getProtocol())) { try { if (result.toExternalForm().endsWith(".jar") || result.toExternalForm().endsWith(".zip")) result = new URL( "jar:".concat(result.toExternalForm()).concat("!/").concat(clsAsResource)); else if (new File(result.getFile()).isDirectory()) result = new URL(result, clsAsResource); } catch (MalformedURLException ignore) { } } } } if (result == null) { // Try to find 'cls' definition as a resource; // this is not // document.d to be legal, but Sun's // implementations seem to //allow this: final ClassLoader clsLoader = cls.getClassLoader(); result = clsLoader != null ? clsLoader.getResource(clsAsResource) : ClassLoader.getSystemResource(clsAsResource); } return result; }
public ChunkerFeatureExtractor() throws ClassNotFoundException, IOException { @SuppressWarnings("unchecked") // req for deserialize CodeSource src = this.getClass().getProtectionDomain().getCodeSource(); String loc = src.getLocation().toString(); File hmmFile = new File( loc.substring(5, loc.length() - 10) + "/objects/pos-en-general-brown.HiddenMarkovModel"); // File hmmFile = new // File("C:/Users/D059348/dev/HU/MaschinelleSprachverarbeitung/objects/pos-en-general-brown.HiddenMarkovModel"); HiddenMarkovModel posHmm = (HiddenMarkovModel) AbstractExternalizable.readObject(hmmFile); FastCache<String, double[]> emissionCache = new FastCache<String, double[]>(100000); mPosTagger = new HmmDecoder(posHmm, null, emissionCache); }
/** * Returns JAR location File for the specified class. * * @param jarClass any class from that JAR * @return JAR location File */ public static File getJarLocationFile(final Class jarClass) { try { final CodeSource src = jarClass.getProtectionDomain().getCodeSource(); if (src != null) { final URL jarUrl = src.getLocation(); final URI uri = jarUrl.toURI(); final String scheme = uri.getScheme(); if (scheme != null && scheme.equalsIgnoreCase("file")) { return new File(uri); } } } catch (final URISyntaxException e) { FlatLafLogger.error(ReflectUtils.class, e); } return null; }
private static List<TFM_CommandInfo> getCommands() { List<TFM_CommandInfo> commandList = new ArrayList<TFM_CommandInfo>(); try { CodeSource codeSource = TotalFreedomMod.class.getProtectionDomain().getCodeSource(); if (codeSource != null) { ZipInputStream zip = new ZipInputStream(codeSource.getLocation().openStream()); ZipEntry zipEntry; while ((zipEntry = zip.getNextEntry()) != null) { String entryName = zipEntry.getName(); Matcher matcher = COMMAND_PATTERN.matcher(entryName); if (matcher.find()) { try { Class<?> commandClass = Class.forName(TFM_CommandHandler.COMMAND_PATH + "." + matcher.group(1)); CommandPermissions commandPermissions = commandClass.getAnnotation(CommandPermissions.class); CommandParameters commandParameters = commandClass.getAnnotation(CommandParameters.class); if (commandPermissions != null && commandParameters != null) { TFM_CommandInfo commandInfo = new TFM_CommandInfo( commandClass, matcher.group(1).split("_")[1], commandPermissions.level(), commandPermissions.source(), commandPermissions.blockHostConsole(), commandParameters.description(), commandParameters.usage(), commandParameters.aliases()); commandList.add(commandInfo); } } catch (ClassNotFoundException ex) { TFM_Log.severe(ex); } } } } } catch (IOException ex) { TFM_Log.severe(ex); } return commandList; }
private void findAgent() { try { if (this.agent == null || this.agent.length == 0) { Class<?> loaded = Class.forName(SPRING_LOADED_AGENT_CLASSNAME); if (loaded != null) { if (this.noverify == null) { this.noverify = true; } CodeSource source = loaded.getProtectionDomain().getCodeSource(); if (source != null) { this.agent = new File[] {new File(source.getLocation().getFile())}; } } } } catch (ClassNotFoundException ex) { // ignore; } if (this.noverify == null) { this.noverify = false; } }
private URL getClassLocation(final Class<XmlTransformServer> classToFind) { URL result = null; if (classToFind == null) { throw new IllegalArgumentException("Class is null"); } final String classAsResource = classToFind.getName().replace('.', '/').concat(".class"); final ProtectionDomain pd = classToFind.getProtectionDomain(); if (pd != null) { final CodeSource cs = pd.getCodeSource(); if (cs != null) { result = cs.getLocation(); } if (result != null) { // Convert a code source location into a full class file location if (result.getProtocol().equals("file")) { try { if (result.toExternalForm().endsWith(".jar") || result.toExternalForm().endsWith(".zip")) { result = new URL( "jar:".concat(result.toExternalForm()).concat("!/").concat(classAsResource)); } else if (new File(result.getFile()).isDirectory()) { result = new URL(result, classAsResource); } } catch (MalformedURLException ignore) { // do nothing } } } } if (result == null) { // Try to find class definition as a resource final ClassLoader classLoader = classToFind.getClassLoader(); result = classLoader != null ? classLoader.getResource(classAsResource) : ClassLoader.getSystemResource(classAsResource); } return result; }
public void addJarDependency(BOperatorInvocation op, Class<?> clazz) { CodeSource thisCodeSource = this.getClass().getProtectionDomain().getCodeSource(); CodeSource source = clazz.getProtectionDomain().getCodeSource(); if (null == source || thisCodeSource.equals(source)) { return; } Path absolutePath = null; try { absolutePath = Paths.get(source.getLocation().toURI()).toAbsolutePath(); } catch (URISyntaxException e) { e.printStackTrace(); } if (operatorToJarDependencies.containsKey(op)) { operatorToJarDependencies.get(op).add(absolutePath); } else { operatorToJarDependencies.put(op, new HashSet<Path>()); operatorToJarDependencies.get(op).add(absolutePath); } }
/** * Constructor. * * @param parent class loader parent. */ public JarClassLoader(ClassLoader parent) { super(parent); initLogger(); hmClass = new HashMap<String, Class<?>>(); lstJarFile = new ArrayList<JarFileInfo>(); hsDeleteOnExit = new HashSet<File>(); // Prepare common for all protocols String sUrlTopJar = null; pd = getClass().getProtectionDomain(); CodeSource cs = pd.getCodeSource(); URL urlTopJar = cs.getLocation(); String protocol = urlTopJar.getProtocol(); // Work with different cases: JarFileInfo jarFileInfo = null; if ("http".equals(protocol) || "https".equals(protocol)) { // Protocol 'http' - application launched from WebStart / JNLP or as Java applet try { // Convert: // urlTopJar = "http://.../MyApp.jar" --> connection // sun.net.www.protocol.http.HttpURLConnection // to // urlTopJar = "jar:http://.../MyApp.jar!/" --> connection java.net.JarURLConnection urlTopJar = new URL("jar:" + urlTopJar + "!/"); JarURLConnection jarCon = (JarURLConnection) urlTopJar.openConnection(); JarFile jarFile = jarCon.getJarFile(); jarFileInfo = new JarFileInfo(jarFile, jarFile.getName(), null, null); logInfo(LogArea.JAR, "Loading from top JAR: '%s' PROTOCOL: '%s'", urlTopJar, protocol); } catch (Exception e) { // ClassCastException, IOException logError(LogArea.JAR, "Failure to load HTTP JAR: %s %s", urlTopJar, e.toString()); return; } } if ("file".equals(protocol)) { // Protocol 'file' - application launched from exploded dir or JAR // Decoding required for 'space char' in URL: // URL.getFile() returns "/C:/my%20dir/MyApp.jar" for "/C:/my dir/MyApp.jar" try { sUrlTopJar = URLDecoder.decode(urlTopJar.getFile(), "UTF-8"); } catch (UnsupportedEncodingException e) { logError(LogArea.JAR, "Failure to decode URL: %s %s", urlTopJar, e.toString()); return; } File fileJar = new File(sUrlTopJar); // Application is loaded from directory: if (fileJar.isDirectory()) { logInfo(LogArea.JAR, "Loading from exploded directory: %s", sUrlTopJar); return; // JarClassLoader completed its job } // Application is loaded from a JAR: try { // The call "new JarFile(fileJar)" might throw IOException jarFileInfo = new JarFileInfo(new JarFile(fileJar), fileJar.getName(), null, null); logInfo(LogArea.JAR, "Loading from top JAR: '%s' PROTOCOL: '%s'", sUrlTopJar, protocol); } catch (IOException e) { logError(LogArea.JAR, "Not a JAR: %s %s", sUrlTopJar, e.toString()); return; } } // FINALLY LOAD TOP JAR: try { if (jarFileInfo == null) { throw new IOException(String.format("Unknown protocol %s", protocol)); } loadJar(jarFileInfo); } catch (IOException e) { logError(LogArea.JAR, "Not valid URL: %s %s", urlTopJar, e.toString()); return; } checkShading(); Runtime.getRuntime() .addShutdownHook( new Thread() { public void run() { shutdown(); } }); } // JarClassLoader()
public PermissionCollection getPermissions(CodeSource codesource) { trace("Granting AllPermission to %s", codesource.getLocation()); return all; }
/** * Pre-analyze hook called after compilation and before semantic analysis We extract things for to * Database and metadata level operations which are not capture in the input/output entities * during semantic analysis. Ideally it should be handled in Hive. We need to move most of these * into hive semantic analyzer and then remove it from the access hook. */ @Override public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) throws SemanticException { switch (ast.getToken().getType()) { // Hive parser doesn't capture the database name in output entity, so we store it here for // now case HiveParser.TOK_CREATEDATABASE: case HiveParser.TOK_ALTERDATABASE_PROPERTIES: case HiveParser.TOK_DROPDATABASE: case HiveParser.TOK_SWITCHDATABASE: case HiveParser.TOK_DESCDATABASE: currDB = new Database(BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText())); break; case HiveParser.TOK_CREATETABLE: case HiveParser.TOK_CREATEVIEW: /* * Compiler doesn't create read/write entities for create table. * Hence we need extract dbname from db.tab format, if applicable */ currDB = extractDatabase((ASTNode) ast.getChild(0)); break; case HiveParser.TOK_DROPTABLE: case HiveParser.TOK_DROPVIEW: case HiveParser.TOK_SHOW_CREATETABLE: case HiveParser.TOK_ALTERTABLE_SERIALIZER: case HiveParser.TOK_ALTERVIEW_ADDPARTS: case HiveParser.TOK_ALTERVIEW_DROPPARTS: case HiveParser.TOK_ALTERVIEW_PROPERTIES: case HiveParser.TOK_ALTERVIEW_RENAME: case HiveParser.TOK_CREATEINDEX: case HiveParser.TOK_DROPINDEX: case HiveParser.TOK_LOCKTABLE: case HiveParser.TOK_UNLOCKTABLE: currTab = extractTable((ASTNode) ast.getFirstChildWithType(HiveParser.TOK_TABNAME)); currDB = extractDatabase((ASTNode) ast.getChild(0)); break; case HiveParser.TOK_ALTERINDEX_REBUILD: currTab = extractTable((ASTNode) ast.getChild(0)); // type is not TOK_TABNAME currDB = extractDatabase((ASTNode) ast.getChild(0)); break; case HiveParser.TOK_SHOW_TABLESTATUS: currDB = extractDatabase((ASTNode) ast.getChild(0)); int children = ast.getChildCount(); for (int i = 1; i < children; i++) { ASTNode child = (ASTNode) ast.getChild(i); if (child.getToken().getType() == HiveParser.Identifier) { currDB = new Database(child.getText()); break; } } // loosing the requested privileges for possible wildcard tables, since // further authorization will be done at the filter step and those unwanted will // eventually be filtered out from the output currTab = Table.ALL; break; case HiveParser.TOK_ALTERTABLE_RENAME: case HiveParser.TOK_ALTERTABLE_PROPERTIES: case HiveParser.TOK_ALTERTABLE_DROPPARTS: case HiveParser.TOK_ALTERTABLE_RENAMECOL: case HiveParser.TOK_ALTERTABLE_ADDCOLS: case HiveParser.TOK_ALTERTABLE_REPLACECOLS: case HiveParser.TOK_SHOW_TBLPROPERTIES: case HiveParser.TOK_SHOWINDEXES: case HiveParser.TOK_SHOWPARTITIONS: // token name TOK_TABNAME is not properly set in this case currTab = extractTable((ASTNode) ast.getChild(0)); currDB = extractDatabase((ASTNode) ast.getChild(0)); break; case HiveParser.TOK_MSCK: // token name TOK_TABNAME is not properly set in this case and child(0) does // not contain the table name. // TODO: Fix Hive to capture the table and DB name currOutTab = extractTable((ASTNode) ast.getChild(1)); currOutDB = extractDatabase((ASTNode) ast.getChild(0)); break; case HiveParser.TOK_ALTERTABLE_ADDPARTS: /* * Compiler doesn't create read/write entities for create table. * Hence we need extract dbname from db.tab format, if applicable */ currTab = extractTable((ASTNode) ast.getChild(0)); currDB = extractDatabase((ASTNode) ast.getChild(0)); partitionURI = extractPartition(ast); break; case HiveParser.TOK_CREATEFUNCTION: String udfClassName = BaseSemanticAnalyzer.unescapeSQLString(ast.getChild(1).getText()); try { CodeSource udfSrc = Class.forName(udfClassName, true, Utilities.getSessionSpecifiedClassLoader()) .getProtectionDomain() .getCodeSource(); if (udfSrc == null) { throw new SemanticException("Could not resolve the jar for UDF class " + udfClassName); } String udfJar = udfSrc.getLocation().getPath(); if (udfJar == null || udfJar.isEmpty()) { throw new SemanticException( "Could not find the jar for UDF class " + udfClassName + "to validate privileges"); } udfURI = parseURI(udfSrc.getLocation().toString(), true); } catch (ClassNotFoundException e) { throw new SemanticException("Error retrieving udf class:" + e.getMessage(), e); } // create/drop function is allowed with any database currDB = Database.ALL; break; case HiveParser.TOK_DROPFUNCTION: // create/drop function is allowed with any database currDB = Database.ALL; break; case HiveParser.TOK_LOAD: String dbName = BaseSemanticAnalyzer.unescapeIdentifier( ast.getChild(1).getChild(0).getChild(0).getText()); currDB = new Database(dbName); break; case HiveParser.TOK_DESCTABLE: currDB = getCanonicalDb(); // For DESCRIBE FORMATTED/EXTENDED ast will have an additional child node with value // "FORMATTED/EXTENDED". isDescTableBasic = (ast.getChildCount() == 1); break; case HiveParser.TOK_TRUNCATETABLE: // SENTRY-826: // Truncate empty partitioned table should throw SemanticException only if the // user does not have permission. // In postAnalyze, currOutDB and currOutTbl will be added into outputHierarchy // which will be validated in the hiveAuthzBinding.authorize method. Preconditions.checkArgument(ast.getChildCount() == 1); // childcount is 1 for table without partition, 2 for table with partitions Preconditions.checkArgument(ast.getChild(0).getChildCount() >= 1); Preconditions.checkArgument(ast.getChild(0).getChild(0).getChildCount() == 1); currOutDB = extractDatabase((ASTNode) ast.getChild(0)); currOutTab = extractTable((ASTNode) ast.getChild(0).getChild(0).getChild(0)); break; default: currDB = getCanonicalDb(); break; } return ast; }
private void addExtendHierarchy( HiveOperation hiveOp, HiveAuthzPrivileges stmtAuthPrivileges, List<List<DBModelAuthorizable>> inputHierarchyList, List<List<DBModelAuthorizable>> outputHierarchyList, String command, HiveAuthzBinding hiveAuthzBinding) throws HiveAuthzPluginException, HiveAccessControlException { String currDatabase = null; switch (stmtAuthPrivileges.getOperationScope()) { case SERVER: // validate server level privileges if applicable. Eg create UDF,register jar etc .. List<DBModelAuthorizable> serverHierarchy = new ArrayList<DBModelAuthorizable>(); serverHierarchy.add(hiveAuthzBinding.getAuthServer()); inputHierarchyList.add(serverHierarchy); break; case DATABASE: // workaround for metadata queries. if (EX_DB_ALL.contains(hiveOp)) { SimpleSemanticAnalyzer analyzer = new SimpleSemanticAnalyzer(hiveOp, command); currDatabase = analyzer.getCurrentDb(); List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>(); externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer()); externalAuthorizableHierarchy.add(new Database(currDatabase)); if (EX_DB_INPUT.contains(hiveOp)) { inputHierarchyList.add(externalAuthorizableHierarchy); } else { outputHierarchyList.add(externalAuthorizableHierarchy); } } break; case TABLE: case COLUMN: // workaround for drop table/view. if (EX_TB_ALL.contains(hiveOp)) { SimpleSemanticAnalyzer analyzer = new SimpleSemanticAnalyzer(hiveOp, command); currDatabase = analyzer.getCurrentDb(); String currTable = analyzer.getCurrentTb(); List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>(); externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer()); externalAuthorizableHierarchy.add(new Database(currDatabase)); externalAuthorizableHierarchy.add(new Table(currTable)); if (EX_TB_INPUT.contains(hiveOp)) { inputHierarchyList.add(externalAuthorizableHierarchy); } else if (META_TB_INPUT.contains(hiveOp)) { externalAuthorizableHierarchy.add(Column.SOME); inputHierarchyList.add(externalAuthorizableHierarchy); } else { outputHierarchyList.add(externalAuthorizableHierarchy); } } break; case FUNCTION: if (hiveOp.equals(HiveOperation.CREATEFUNCTION)) { SimpleSemanticAnalyzer analyzer = new SimpleSemanticAnalyzer(hiveOp, command); currDatabase = analyzer.getCurrentDb(); String udfClassName = analyzer.getCurrentTb(); try { CodeSource udfSrc = Class.forName(udfClassName).getProtectionDomain().getCodeSource(); if (udfSrc == null) { throw new HiveAuthzPluginException( "Could not resolve the jar for UDF class " + udfClassName); } String udfJar = udfSrc.getLocation().getPath(); if (udfJar == null || udfJar.isEmpty()) { throw new HiveAuthzPluginException( "Could not find the jar for UDF class " + udfClassName + "to validate privileges"); } AccessURI udfURI = SentryAuthorizerUtil.parseURI(udfSrc.getLocation().toString(), true); List<DBModelAuthorizable> udfUriHierarchy = new ArrayList<DBModelAuthorizable>(); udfUriHierarchy.add(hiveAuthzBinding.getAuthServer()); udfUriHierarchy.add(udfURI); inputHierarchyList.add(udfUriHierarchy); } catch (Exception e) { throw new HiveAuthzPluginException("Error retrieving udf class", e); } } break; case CONNECT: /* * The 'CONNECT' is an implicit privilege scope currently used for - USE <db> It's allowed * when the user has any privilege on the current database. For application backward * compatibility, we allow (optional) implicit connect permission on 'default' db. */ List<DBModelAuthorizable> connectHierarchy = new ArrayList<DBModelAuthorizable>(); connectHierarchy.add(hiveAuthzBinding.getAuthServer()); if (hiveOp.equals(HiveOperation.SWITCHDATABASE)) { currDatabase = command.split(" ")[1]; } // by default allow connect access to default db Table currTbl = Table.ALL; Database currDB = new Database(currDatabase); Column currCol = Column.ALL; if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(currDatabase) && "false" .equalsIgnoreCase( authzConf.get( HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), "false"))) { currDB = Database.ALL; currTbl = Table.SOME; } connectHierarchy.add(currDB); connectHierarchy.add(currTbl); connectHierarchy.add(currCol); inputHierarchyList.add(connectHierarchy); break; } }