public void enterPath() { FileObject fileObject = (FileObject) table.getItem(currentRow).getData("fileObject"); try { if (fileObject.getType().equals(FileType.FOLDER)) { historyManager.setSelectedItem( currentFileObject.getName().getPath(), fileObject.getName().getBaseName()); currentFileObject = currentFileObject.getChild(fileObject.getName().getBaseName()); changeCurrentNode(); textLocation.setText(currentFileObject.getName().getPath()); } else { editFile(fileObject); } } catch (Exception e) { e.printStackTrace(); } }
/** * Attempt to discover a valid Hadoop configuration from the provided folder. * * @param folder Folder that may represent a Hadoop configuration * @return A Hadoop configuration for the folder provided or null if none is found. * @throws ConfigurationException Error when loading the Hadoop configuration. */ protected HadoopConfiguration loadHadoopConfiguration(FileObject folder) throws ConfigurationException { ShimProperties configurationProperties = new ShimProperties(); try { FileObject configFile = folder.getChild(CONFIG_PROPERTIES_FILE); if (configFile != null) { configurationProperties.putAll(loadProperties(configFile)); } } catch (Exception ex) { throw new ConfigurationException( BaseMessages.getString( PKG, "Error.UnableToLoadConfigurationProperties", CONFIG_PROPERTIES_FILE)); } for (Entry<String, String> entry : configurationProperties.getPrefixedProperties("java.system").entrySet()) { System.setProperty(entry.getKey(), entry.getValue()); } try { // Parse all URLs from an optional classpath from the configuration file List<URL> classpathElements = parseURLs(folder, configurationProperties.getProperty(CONFIG_PROPERTY_CLASSPATH)); // Allow external configuration of classes to ignore String ignoredClassesProperty = configurationProperties.getProperty(CONFIG_PROPERTY_IGNORE_CLASSES); String[] ignoredClasses = null; if (ignoredClassesProperty != null) { ignoredClasses = ignoredClassesProperty.split(","); } // Pass our class loader in to the configurations' CL as its parent so it // can find the same // API classes we're using ClassLoader cl = createConfigurationLoader( folder, getClass().getClassLoader(), classpathElements, configurationProperties, ignoredClasses); verifyClasses( cl, configurationProperties.getProperty("required.classes"), configurationProperties.getProperty("name")); // Treat the Hadoop shim special. It is absolutely required for a Hadoop configuration. HadoopShim hadoopShim = null; List<PentahoHadoopShim> shims = new ArrayList<PentahoHadoopShim>(); // Attempt to locate a shim within this folder for (Class<? extends PentahoHadoopShim> shimType : SHIM_TYPES) { PentahoHadoopShim s = locateServiceImpl(cl, shimType); if (s == null && shimType.getAnnotation(Required.class) != null) { logger.warn( BaseMessages.getString(PKG, "Error.MissingRequiredShim", shimType.getSimpleName())); // Do not continue to load the configuration if we are missing a required shim return null; } if (HadoopShim.class.isAssignableFrom(shimType)) { hadoopShim = (HadoopShim) s; } else { shims.add(s); } } String id = folder.getName().getBaseName(); String name = configurationProperties.getProperty(CONFIG_PROPERTY_NAME, id); HadoopConfiguration config = new HadoopConfiguration( configurationProperties, folder, id, name, hadoopShim, shims.toArray(EMPTY_SHIM_ARRAY)); // Register native libraries after everything else has been loaded successfully registerNativeLibraryPaths(configurationProperties.getProperty(CONFIG_PROPERTY_LIBRARY_PATH)); hadoopShim.onLoad(config, fsm); return config; } catch (Throwable t) { throw new ConfigurationException( BaseMessages.getString(PKG, "Error.LoadingConfiguration"), t); } }