/** Initialize SecondaryNameNode. */
  private void initialize(Configuration conf) throws IOException {
    // initiate Java VM metrics
    JvmMetrics.init("SecondaryNameNode", conf.get("session.id"));

    // Create connection to the namenode.
    shouldRun = true;
    nameNodeAddr = NameNode.getAddress(conf);

    this.conf = conf;
    this.namenode =
        (NamenodeProtocol)
            RPC.waitForProxy(
                NamenodeProtocol.class, NamenodeProtocol.versionID, nameNodeAddr, conf);

    // initialize checkpoint directories
    fsName = getInfoServer();
    checkpointDirs = FSImage.getCheckpointDirs(conf, "/tmp/hadoop/dfs/namesecondary");
    checkpointEditsDirs = FSImage.getCheckpointEditsDirs(conf, "/tmp/hadoop/dfs/namesecondary");
    checkpointImage = new CheckpointStorage(conf);
    checkpointImage.recoverCreate(checkpointDirs, checkpointEditsDirs);

    // Initialize other scheduling parameters from the configuration
    checkpointPeriod = conf.getLong("fs.checkpoint.period", 3600);
    checkpointSize = conf.getLong("fs.checkpoint.size", 4194304);

    // initialize the webserver for uploading files.
    String infoAddr =
        NetUtils.getServerAddress(
            conf,
            "dfs.secondary.info.bindAddress",
            "dfs.secondary.info.port",
            "dfs.secondary.http.address");
    InetSocketAddress infoSocAddr = NetUtils.createSocketAddr(infoAddr);
    infoBindAddress = infoSocAddr.getHostName();
    int tmpInfoPort = infoSocAddr.getPort();
    infoServer = new HttpServer("secondary", infoBindAddress, tmpInfoPort, tmpInfoPort == 0, conf);
    infoServer.setAttribute("name.system.image", checkpointImage);
    this.infoServer.setAttribute("name.conf", conf);
    infoServer.addInternalServlet("getimage", "/getimage", GetImageServlet.class);
    infoServer.start();

    // The web-server port can be ephemeral... ensure we have the correct info
    infoPort = infoServer.getPort();
    conf.set("dfs.secondary.http.address", infoBindAddress + ":" + infoPort);
    LOG.info("Secondary Web-server up at: " + infoBindAddress + ":" + infoPort);
    LOG.warn(
        "Checkpoint Period   :"
            + checkpointPeriod
            + " secs "
            + "("
            + checkpointPeriod / 60
            + " min)");
    LOG.warn(
        "Log Size Trigger    :"
            + checkpointSize
            + " bytes "
            + "("
            + checkpointSize / 1024
            + " KB)");
  }
  /**
   * Initialize the backend systems, the log handler and the restrictor. A subclass can tune this
   * step by overriding {@link #createRestrictor(String)} and {@link
   * #createLogHandler(ServletConfig, boolean)}
   *
   * @param pServletConfig servlet configuration
   */
  @Override
  public void init(ServletConfig pServletConfig) throws ServletException {
    super.init(pServletConfig);

    Configuration config = initConfig(pServletConfig);

    // Create a log handler early in the lifecycle, but not too early
    String logHandlerClass = config.get(ConfigKey.LOGHANDLER_CLASS);
    logHandler =
        logHandlerClass != null
            ? (LogHandler) ClassUtil.newInstance(logHandlerClass)
            : createLogHandler(pServletConfig, Boolean.valueOf(config.get(ConfigKey.DEBUG)));

    // Different HTTP request handlers
    httpGetHandler = newGetHttpRequestHandler();
    httpPostHandler = newPostHttpRequestHandler();

    if (restrictor == null) {
      restrictor =
          createRestrictor(NetworkUtil.replaceExpression(config.get(ConfigKey.POLICY_LOCATION)));
    } else {
      logHandler.info("Using custom access restriction provided by " + restrictor);
    }
    configMimeType = config.get(ConfigKey.MIME_TYPE);
    backendManager = new BackendManager(config, logHandler, restrictor);
    requestHandler = new HttpRequestHandler(config, backendManager, logHandler);

    initDiscoveryMulticast(config);
  }
Exemple #3
0
 /**
  * Gets the urlClassLoader that enables to access to the objects jar files.
  *
  * @return an URLClassLoader
  */
 public URLClassLoader getObjectsClassLoader() {
   if (objectsClassLoader == null) {
     try {
       if (isExecutionMode()) {
         URL[] listUrl = new URL[1];
         listUrl[0] = instance.getTangaraPath().toURI().toURL();
         objectsClassLoader = new URLClassLoader(listUrl);
       } else {
         File f = new File(instance.getTangaraPath().getParentFile(), "objects");
         File[] list = f.listFiles();
         Vector<URL> vector = new Vector<URL>();
         for (int i = 0; i < list.length; i++) {
           if (list[i].getName().endsWith(".jar")) vector.add(list[i].toURI().toURL());
         }
         File flib =
             new File(
                 instance.getTangaraPath().getParentFile().getAbsolutePath().replace("\\", "/")
                     + "/objects/lib");
         File[] listflib = flib.listFiles();
         for (int j = 0; j < listflib.length; j++) {
           if (listflib[j].getName().endsWith(".jar")) vector.add(listflib[j].toURI().toURL());
         }
         URL[] listUrl = new URL[vector.size()];
         for (int j = 0; j < vector.size(); j++) listUrl[j] = vector.get(j);
         objectsClassLoader = new URLClassLoader(listUrl);
       }
     } catch (Exception e1) {
       displayError("URL MAL FORMED " + e1);
       return null;
     }
   }
   return objectsClassLoader;
 }
 void setConfig(Configuration config) {
   log.debug("config: " + config);
   proxyHost = config.get(PARAM_PROXY_HOST);
   proxyPort = config.getInt(PARAM_PROXY_PORT, DEFAULT_PROXY_PORT);
   if (StringUtil.isNullString(proxyHost) || proxyPort <= 0) {
     String http_proxy = System.getenv("http_proxy");
     if (!StringUtil.isNullString(http_proxy)) {
       try {
         HostPortParser hpp = new HostPortParser(http_proxy);
         proxyHost = hpp.getHost();
         proxyPort = hpp.getPort();
       } catch (HostPortParser.InvalidSpec e) {
         log.warning("Can't parse http_proxy environment var, ignoring: " + http_proxy + ": " + e);
       }
     }
   }
   if (StringUtil.isNullString(proxyHost) || proxyPort <= 0) {
     proxyHost = null;
   } else {
     log.info("Proxying through " + proxyHost + ":" + proxyPort);
   }
   userAgent = config.get(PARAM_USER_AGENT);
   if (StringUtil.isNullString(userAgent)) {
     userAgent = null;
   } else {
     log.debug("Setting User-Agent to " + userAgent);
   }
 }
 // Called by RegistryPlugin iff any config below RegistryPlugin.PREFIX
 // has changed
 protected void setConfig(
     Configuration config, Configuration prevConfig, Configuration.Differences changedKeys) {
   m_maxRefetchDepth =
       config.getInt(
           NewContentCrawler.PARAM_MAX_CRAWL_DEPTH, NewContentCrawler.DEFAULT_MAX_CRAWL_DEPTH);
   fetchRateLimiter = recomputeFetchRateLimiter(fetchRateLimiter);
   enablePolls = config.getBoolean(PARAM_ENABLE_REGISTRY_POLLS, DEFAULT_ENABLE_REGISTRY_POLLS);
 }
 public static String getRepositorySpec(ArchivalUnit au) {
   Configuration auConfig = au.getConfiguration();
   if (auConfig != null) { // can be null in unit tests
     String repoSpec = auConfig.get(PluginManager.AU_PARAM_REPOSITORY);
     if (repoSpec != null && repoSpec.startsWith("local:")) {
       return repoSpec;
     }
   }
   return "local:" + CurrentConfig.getParam(PARAM_CACHE_LOCATION);
 }
  // Examines servlet config and servlet context for configuration parameters.
  // Configuration from the servlet context overrides servlet parameters defined in web.xml
  Configuration initConfig(ServletConfig pConfig) {
    Configuration config =
        new Configuration(ConfigKey.AGENT_ID, NetworkUtil.getAgentId(hashCode(), "servlet"));
    // From ServletContext ....
    config.updateGlobalConfiguration(new ServletConfigFacade(pConfig));
    // ... and ServletConfig
    config.updateGlobalConfiguration(new ServletContextFacade(getServletContext()));

    // Set type last and overwrite anything written
    config.updateGlobalConfiguration(
        Collections.singletonMap(ConfigKey.AGENT_TYPE.getKeyValue(), "servlet"));
    return config;
  }
 /*
  * Iterate over the sites in the given configuration and remove the one which
  * has a url matching the given location.
  */
 public boolean removeSite(Configuration configuration, String location)
     throws IOException, URISyntaxException {
   File left = new File(new URI(location)).getCanonicalFile();
   List sites = configuration.getSites();
   for (Iterator iter = sites.iterator(); iter.hasNext(); ) {
     Site tempSite = (Site) iter.next();
     String siteURL = tempSite.getUrl();
     File right = new File(new URI(siteURL)).getCanonicalFile();
     if (left.equals(right)) {
       return configuration.removeSite(tempSite);
     }
   }
   return false;
 }
Exemple #9
0
 /** Loads the configuration for log4j from the file log4j.properties */
 public void configureLogging() {
   // BasicConfigurator is used to quickly configure the package log4j
   // Add a ConsoleAppender that uses PatternLayout using the
   // PatternLayout.TTCC_CONVERSION_PATTERN
   // and prints to System.out to the root category.
   BasicConfigurator.configure();
   // Configure log4j by the url : log4j.properties.
   String fileName = "log4j_" + Configuration.instance().getLogLevel() + ".properties";
   System.out.println("Loading logging configuration file: " + fileName);
   URL url = Main.class.getResource(fileName);
   if (url == null) {
     System.out.println(
         "Logging configuration file not found - loading file: log4j_off.properties");
     url = Main.class.getResource("log4j_off.properties"); // $NON-NLS-1$
     if (url == null) {
       System.err.println("No logging configuration found");
       return;
     }
   }
   // PropertyConfigurator allows the configuration of log4j from an
   // external file
   // It will read configuration options from URL url.
   PropertyConfigurator.configure(url);
   LOG = Logger.getLogger(Configuration.class);
 }
 // For war agent needs to be switched on
 private boolean listenForDiscoveryMcRequests(Configuration pConfig) {
   // Check for system props, system env and agent config
   boolean sysProp =
       System.getProperty("jolokia." + ConfigKey.DISCOVERY_ENABLED.getKeyValue()) != null;
   boolean env = System.getenv("JOLOKIA_DISCOVERY") != null;
   boolean config = pConfig.getAsBoolean(ConfigKey.DISCOVERY_ENABLED);
   return sysProp || env || config;
 }
Exemple #11
0
  /**
   * Creates a new Help window associated with the JMenuItem passed as parameters
   *
   * @param help
   */
  public HelpWindow(JMenuItem help) {
    File path = new File(Configuration.instance().getTangaraPath().getParentFile(), "Help");

    File log = new File(path, LOG_FILE);

    if (!log.exists()) {
      // Log file does not exist : we create the help set
      createJavaHelp(path, Configuration.instance().getLanguage());
    } else {
      // Log file exists : we check that help set correspond to the current language
      try {
        BufferedReader reader = new BufferedReader(new FileReader(log));
        String ligne = reader.readLine();
        if (ligne != null) {
          StringTokenizer st = new StringTokenizer(ligne);
          if (st.nextToken().equals("Language")
              && !st.nextToken().equals(Configuration.instance().getLanguage()))
            // Language has changed: we re-create the help set
            createJavaHelp(path, Configuration.instance().getLanguage());
        }
        reader.close();
      } catch (Exception e) {
        LOG.error("Error while reading log file " + e);
      }
    }

    // Set up the help viewer
    // FIXME help desactivated
    //		try {
    //			URL [] list = new URL[1];
    //			list[0] = path.toURI().toURL();
    //
    //			ClassLoader cl = new URLClassLoader(list);
    //			URL url = HelpSet.findHelpSet(cl, HELPSET_NAME, new
    // Locale(Configuration.instance().getLanguage()));
    //			HelpSet hs = new HelpSet(cl, url);
    //
    //			HelpBroker hb = hs.createHelpBroker();
    //
    //			CSH.setHelpIDString(help, FIRST_PAGE);
    //
    //			help.addActionListener(new CSH.DisplayHelpFromSource(hb));
    //		} catch (Exception e1) {
    //			LOG.error("Error1 " + e1);
    //		}
  }
Exemple #12
0
 public static InetSocketAddress getAddress(Configuration conf) {
   String jobTrackerStr = conf.get("mapred.job.tracker", "localhost:8012");
   int colon = jobTrackerStr.indexOf(":");
   if (colon < 0) {
     throw new RuntimeException("Bad mapred.job.tracker: " + jobTrackerStr);
   }
   String jobTrackerName = jobTrackerStr.substring(0, colon);
   int jobTrackerPort = Integer.parseInt(jobTrackerStr.substring(colon + 1));
   return new InetSocketAddress(jobTrackerName, jobTrackerPort);
 }
Exemple #13
0
  /**
   * Enables to get the name of an object in the spoken language thanks to the jar file
   *
   * @param jarName the file that contains the object classes
   * @return the object name in the spoken language
   */
  private String getLangName(File jarName) {
    String name = null;

    try {
      URL url = jarName.toURI().toURL();
      JarInputStream jarFile = new JarInputStream(url.openStream());
      JarEntry jarEntry = jarFile.getNextJarEntry();
      while (jarEntry != null) {
        if (!jarEntry.isDirectory()
            && jarEntry.getName().contains(Configuration.instance().getLanguage())) {
          int lang_index = jarEntry.getName().lastIndexOf(Configuration.instance().getLanguage());
          name = jarEntry.getName().substring(lang_index + 3, jarEntry.getName().length() - 6);
        }
        jarEntry = jarFile.getNextJarEntry();
      }
    } catch (Exception e) {
      LOG.error("Error getLangName " + jarName + " " + e);
    }
    return name;
  }
 // Try to find an URL for system props or config
 private String findAgentUrl(Configuration pConfig) {
   // System property has precedence
   String url = System.getProperty("jolokia." + ConfigKey.DISCOVERY_AGENT_URL.getKeyValue());
   if (url == null) {
     url = System.getenv("JOLOKIA_DISCOVERY_AGENT_URL");
     if (url == null) {
       url = pConfig.get(ConfigKey.DISCOVERY_AGENT_URL);
     }
   }
   return NetworkUtil.replaceExpression(url);
 }
 /*
  * Save the given configuration to disk.
  */
 public void save(String message, Configuration configuration) {
   File configLocation =
       new File(output, getRootFolder() + "configuration/org.eclipse.update/platform.xml");
   File installLocation = new File(output, getRootFolder());
   try {
     configuration.save(configLocation, installLocation.toURL());
   } catch (ProvisionException e) {
     fail(message, e);
   } catch (MalformedURLException e) {
     fail(message, e);
   }
 }
 public Configuration loadConfiguration(File configLocation, File installLocation) {
   try {
     return Configuration.load(configLocation, installLocation.toURL());
   } catch (ProvisionException e) {
     fail("Error while reading configuration from " + configLocation);
   } catch (MalformedURLException e) {
     fail("Unable to convert install location to URL " + installLocation);
   }
   assertTrue("Unable to read configuration from " + configLocation, false);
   // avoid compiler error
   return null;
 }
 /*
  * Assert that a feature with the given id exists in the configuration. If
  * a version is specified then match the version, otherwise any version will
  * do.
  */
 public void assertFeatureExists(
     String message, Configuration configuration, String id, String version) {
   List sites = configuration.getSites();
   assertNotNull(message, sites);
   boolean found = false;
   for (Iterator iter = sites.iterator(); iter.hasNext(); ) {
     Site site = (Site) iter.next();
     Feature[] features = site.getFeatures();
     for (int i = 0; features != null && i < features.length; i++) {
       if (id.equals(features[i].getId())) {
         if (version == null) found = true;
         else if (version.equals(features[i].getVersion())) found = true;
       }
     }
   }
   assertTrue(message, found);
 }
  public void loadAuConfigDescrs(Configuration config) throws ConfigurationException {
    super.loadAuConfigDescrs(config);
    this.m_registryUrl = config.get(ConfigParamDescr.BASE_URL.getKey());
    // Now we can construct a valid CC permission checker.
    m_permissionCheckers =
        //       ListUtil.list(new CreativeCommonsPermissionChecker(m_registryUrl));
        ListUtil.list(new CreativeCommonsPermissionChecker());

    paramMap.putLong(
        KEY_AU_NEW_CONTENT_CRAWL_INTERVAL,
        CurrentConfig.getTimeIntervalParam(
            PARAM_REGISTRY_CRAWL_INTERVAL, DEFAULT_REGISTRY_CRAWL_INTERVAL));
    if (log.isDebug2()) {
      log.debug2(
          "Setting Registry AU recrawl interval to "
              + StringUtil.timeIntervalToString(
                  paramMap.getLong(KEY_AU_NEW_CONTENT_CRAWL_INTERVAL)));
    }
  }
Exemple #19
0
  public void start(String appConfigurationLocation, Dimension appSize) throws Exception {
    this.appTitle = Configuration.getStringValue(Constants.APPLICATION_DISPLAY_NAME);
    this.appSize = appSize;

    this.unitsFormat = new WWOUnitsFormat();
    this.unitsFormat.setShowUTM(true);
    this.unitsFormat.setShowWGS84(true);

    this.appConfigurationLocation = appConfigurationLocation;
    final AppConfiguration appConfig = new AppConfiguration();
    appConfig.initialize(this);

    appConfig.configure(this.appConfigurationLocation);

    SwingUtilities.invokeLater(
        new Runnable() {
          public void run() {
            redraw();
          }
        });
  }
Exemple #20
0
  /** Start the JobTracker process, listen on the indicated port */
  JobTracker(Configuration conf) throws IOException {
    //
    // Grab some static constants
    //
    maxCurrentTasks = conf.getInt("mapred.tasktracker.tasks.maximum", 2);
    RETIRE_JOB_INTERVAL = conf.getLong("mapred.jobtracker.retirejob.interval", 24 * 60 * 60 * 1000);
    RETIRE_JOB_CHECK_INTERVAL = conf.getLong("mapred.jobtracker.retirejob.check", 60 * 1000);
    TASK_ALLOC_EPSILON = conf.getFloat("mapred.jobtracker.taskalloc.loadbalance.epsilon", 0.2f);
    PAD_FRACTION = conf.getFloat("mapred.jobtracker.taskalloc.capacitypad", 0.1f);
    MIN_SLOTS_FOR_PADDING = 3 * maxCurrentTasks;

    // This is a directory of temporary submission files.  We delete it
    // on startup, and can delete any files that we're done with
    this.conf = conf;
    JobConf jobConf = new JobConf(conf);
    this.systemDir = jobConf.getSystemDir();
    this.fs = FileSystem.get(conf);
    FileUtil.fullyDelete(fs, systemDir);
    fs.mkdirs(systemDir);

    // Same with 'localDir' except it's always on the local disk.
    jobConf.deleteLocalFiles(SUBDIR);

    // Set ports, start RPC servers, etc.
    InetSocketAddress addr = getAddress(conf);
    this.localMachine = addr.getHostName();
    this.port = addr.getPort();
    this.interTrackerServer = RPC.getServer(this, addr.getPort(), 10, false, conf);
    this.interTrackerServer.start();
    Properties p = System.getProperties();
    for (Iterator it = p.keySet().iterator(); it.hasNext(); ) {
      String key = (String) it.next();
      String val = (String) p.getProperty(key);
      LOG.info("Property '" + key + "' is " + val);
    }

    this.infoPort = conf.getInt("mapred.job.tracker.info.port", 50030);
    this.infoServer = new JobTrackerInfoServer(this, infoPort);
    this.infoServer.start();

    this.startTime = System.currentTimeMillis();

    new Thread(this.expireTrackers).start();
    new Thread(this.retireJobs).start();
    new Thread(this.initJobs).start();
  }
  /**
   * Check how prefetch override works.
   *
   * @throws Exception IF failed.
   */
  public void testOpenPrefetchOverride() throws Exception {
    create(igfsSecondary, paths(DIR, SUBDIR), paths(FILE));

    // Write enough data to the secondary file system.
    final int blockSize = IGFS_BLOCK_SIZE;

    IgfsOutputStream out = igfsSecondary.append(FILE, false);

    int totalWritten = 0;

    while (totalWritten < blockSize * 2 + chunk.length) {
      out.write(chunk);

      totalWritten += chunk.length;
    }

    out.close();

    awaitFileClose(igfsSecondary.asSecondary(), FILE);

    // Instantiate file system with overridden "seq reads before prefetch" property.
    Configuration cfg = new Configuration();

    cfg.addResource(U.resolveIgniteUrl(PRIMARY_CFG));

    int seqReads = SEQ_READS_BEFORE_PREFETCH + 1;

    cfg.setInt(String.format(PARAM_IGFS_SEQ_READS_BEFORE_PREFETCH, "igfs:grid@"), seqReads);

    FileSystem fs = FileSystem.get(new URI(PRIMARY_URI), cfg);

    // Read the first two blocks.
    Path fsHome = new Path(PRIMARY_URI);
    Path dir = new Path(fsHome, DIR.name());
    Path subdir = new Path(dir, SUBDIR.name());
    Path file = new Path(subdir, FILE.name());

    FSDataInputStream fsIn = fs.open(file);

    final byte[] readBuf = new byte[blockSize * 2];

    fsIn.readFully(0, readBuf, 0, readBuf.length);

    // Wait for a while for prefetch to finish (if any).
    IgfsMetaManager meta = igfs.context().meta();

    IgfsFileInfo info = meta.info(meta.fileId(FILE));

    IgfsBlockKey key = new IgfsBlockKey(info.id(), info.affinityKey(), info.evictExclude(), 2);

    IgniteCache<IgfsBlockKey, byte[]> dataCache =
        igfs.context().kernalContext().cache().jcache(igfs.configuration().getDataCacheName());

    for (int i = 0; i < 10; i++) {
      if (dataCache.containsKey(key)) break;
      else U.sleep(100);
    }

    fsIn.close();

    // Remove the file from the secondary file system.
    igfsSecondary.delete(FILE, false);

    // Try reading the third block. Should fail.
    GridTestUtils.assertThrows(
        log,
        new Callable<Object>() {
          @Override
          public Object call() throws Exception {
            IgfsInputStream in0 = igfs.open(FILE);

            in0.seek(blockSize * 2);

            try {
              in0.read(readBuf);
            } finally {
              U.closeQuiet(in0);
            }

            return null;
          }
        },
        IOException.class,
        "Failed to read data due to secondary file system exception: /dir/subdir/file");
  }
Exemple #22
0
  private void restLdapHealth(final PwmRequest pwmRequest, final ConfigGuideBean configGuideBean)
      throws IOException, PwmUnrecoverableException {
    final Configuration tempConfiguration =
        new Configuration(configGuideBean.getStoredConfiguration());
    final PwmApplication tempApplication =
        new PwmApplication.PwmEnvironment(
                tempConfiguration, pwmRequest.getPwmApplication().getApplicationPath())
            .setApplicationMode(PwmApplication.MODE.NEW)
            .setInternalRuntimeInstance(true)
            .setWebInfPath(pwmRequest.getPwmApplication().getWebInfPath())
            .createPwmApplication();
    final LDAPStatusChecker ldapStatusChecker = new LDAPStatusChecker();
    final List<HealthRecord> records = new ArrayList<>();
    final LdapProfile ldapProfile = tempConfiguration.getDefaultLdapProfile();
    switch (configGuideBean.getStep()) {
      case LDAP_SERVER:
        {
          try {
            checkLdapServer(configGuideBean);
            records.add(password.pwm.health.HealthRecord.forMessage(HealthMessage.LDAP_OK));
          } catch (Exception e) {
            records.add(
                new HealthRecord(
                    HealthStatus.WARN,
                    HealthTopic.LDAP,
                    "Can not connect to remote server: " + e.getMessage()));
          }
        }
        break;

      case LDAP_ADMIN:
        {
          records.addAll(
              ldapStatusChecker.checkBasicLdapConnectivity(
                  tempApplication, tempConfiguration, ldapProfile, false));
          if (records.isEmpty()) {
            records.add(password.pwm.health.HealthRecord.forMessage(HealthMessage.LDAP_OK));
          }
        }
        break;

      case LDAP_CONTEXT:
        {
          records.addAll(
              ldapStatusChecker.checkBasicLdapConnectivity(
                  tempApplication, tempConfiguration, ldapProfile, true));
          if (records.isEmpty()) {
            records.add(
                new HealthRecord(
                    HealthStatus.GOOD, HealthTopic.LDAP, "LDAP Contextless Login Root validated"));
          }
          try {
            final UserMatchViewerFunction userMatchViewerFunction = new UserMatchViewerFunction();
            final Collection<UserIdentity> results =
                userMatchViewerFunction.discoverMatchingUsers(
                    pwmRequest.getPwmApplication(),
                    2,
                    configGuideBean.getStoredConfiguration(),
                    PwmSetting.QUERY_MATCH_PWM_ADMIN,
                    null);

            if (results.isEmpty()) {
              records.add(
                  new HealthRecord(HealthStatus.WARN, HealthTopic.LDAP, "No matching admin users"));
            } else {
              records.add(
                  new HealthRecord(HealthStatus.GOOD, HealthTopic.LDAP, "Admin group validated"));
            }
          } catch (PwmException e) {
            records.add(
                new HealthRecord(
                    HealthStatus.WARN,
                    HealthTopic.LDAP,
                    "Error during admin group validation: "
                        + e.getErrorInformation().toDebugStr()));
          } catch (Exception e) {
            records.add(
                new HealthRecord(
                    HealthStatus.WARN,
                    HealthTopic.LDAP,
                    "Error during admin group validation: " + e.getMessage()));
          }
        }
        break;

      case LDAP_TESTUSER:
        {
          final String testUserValue = configGuideBean.getFormData().get(PARAM_LDAP_TEST_USER);
          if (testUserValue != null && !testUserValue.isEmpty()) {
            records.addAll(
                ldapStatusChecker.checkBasicLdapConnectivity(
                    tempApplication, tempConfiguration, ldapProfile, false));
            records.addAll(
                ldapStatusChecker.doLdapTestUserCheck(
                    tempConfiguration, ldapProfile, tempApplication));
          } else {
            records.add(
                new HealthRecord(HealthStatus.CAUTION, HealthTopic.LDAP, "No test user specified"));
          }
        }
        break;
    }

    HealthData jsonOutput = new HealthData();
    jsonOutput.records =
        password.pwm.ws.server.rest.bean.HealthRecord.fromHealthRecords(
            records, pwmRequest.getLocale(), tempConfiguration);
    jsonOutput.timestamp = new Date();
    jsonOutput.overall = HealthMonitor.getMostSevereHealthStatus(records).toString();
    final RestResultBean restResultBean = new RestResultBean();
    restResultBean.setData(jsonOutput);
    pwmRequest.outputJsonResult(restResultBean);
  }
Exemple #23
0
  private void mapAndTocForFile(File f, File path, String underscore) {
    // we get the single name of the file and the urlname
    int under_index2 = f.getName().indexOf("_");
    int point_index2 = f.getName().indexOf(".");
    String named = f.getName().substring(under_index2 + 1, point_index2);
    String tmp = path.getAbsolutePath() + "Main_pages/fr/";
    String url_name = f.getPath().replace("\\", "/").substring(tmp.length());
    String targetName = url_name.substring(0, url_name.lastIndexOf(".html"));

    // now we will add into the map and the toc
    print.println("<mapID target=\"" + targetName + "\" url=\"pages/" + url_name + "\"/>");
    File dir_associated = new File(f.getParent(), named);

    if ((dir_associated.exists() && dir_associated.isDirectory()) || named.equals("objects")) {
      print2.println(
          "<tocitem text=\"" + getTitle(f) + "\" target=\"" + targetName + "\" image=\"tamicon\">");
      if (dir_associated.exists()) {
        // Apres on fait pareil pour tous les sous fichiers
        File[] sub_files = dir_associated.listFiles();
        for (int i = 0; i < sub_files.length; i++) {
          if (!sub_files[i].getName().equals("CVS") && sub_files[i].isFile()) {
            if (sub_files[i].getName().endsWith(".html"))
              mapAndTocForFile(sub_files[i], path, underscore);
            else {
              try {
                copyFile(
                    sub_files[i], new File(path, "pages/" + named + "/" + sub_files[i].getName()));
              } catch (IOException e) {
                LOG.error("Error while copying normal files in help " + e);
              }
            }
          }
        }
      }
      if (named.equals("objects")) {
        // Specialement pour les objets on les rajoute tous
        File objects_dir =
            new File(
                Configuration.instance()
                        .getTangaraPath()
                        .getParentFile()
                        .getAbsolutePath()
                        .replace("\\", "/")
                    + "/objects/");
        File[] listfiles = objects_dir.listFiles();
        Vector<String> list_names = new Vector<String>();
        HashMap<String, String> map = new HashMap<String, String>();
        for (int i = 0; i < listfiles.length; i++) {
          try {
            if (listfiles[i].getName().endsWith(".jar")) {
              int point_index = listfiles[i].getName().lastIndexOf(".");
              String name = listfiles[i].getName().substring(0, point_index);

              // Copy the pages in the right directory
              File object_dir = new File(path, "pages/" + name);
              object_dir.mkdir();
              File object_ressource =
                  new File(
                      Configuration.instance()
                              .getTangaraPath()
                              .getParentFile()
                              .getAbsolutePath()
                              .replace("\\", "/")
                          + "/objects/resources/"
                          + name
                          + "/Help");
              if (object_ressource.exists()) {
                File[] list_html_object = object_ressource.listFiles();
                for (int e = 0; e < list_html_object.length; e++) {
                  if (list_html_object[e].getName().endsWith(".html")) {
                    int under_index = list_html_object[e].getName().lastIndexOf("_");
                    if (underscore.equals("") && under_index == -1)
                      copyFile(
                          list_html_object[e],
                          new File(path, "pages/" + name + "/" + list_html_object[e].getName()));
                    else if (!underscore.equals("")) {
                      if (list_html_object[e].getName().contains(underscore))
                        copyFile(
                            list_html_object[e],
                            new File(path, "pages/" + name + "/" + list_html_object[e].getName()));
                    }
                  } else
                    copyFile(
                        list_html_object[e],
                        new File(path, "pages/" + name + "/" + list_html_object[e].getName()));
                }
                // Gets the name of the object in the selected language
                String name_lang = null;
                if (underscore.equals("")) name_lang = name;
                else {
                  name_lang = getLangName(listfiles[i]);
                }
                if (name_lang != null) {
                  list_names.add(name_lang);
                  map.put(name_lang, name);
                  // Add to the map file
                  print.println(
                      "<mapID target=\""
                          + name
                          + "\" url=\"pages/"
                          + name
                          + "/index"
                          + underscore
                          + ".html\" />");
                }
              }
            }
          } catch (Exception e2) {
            LOG.error("Error2 getHelp " + e2);
          }
        }
        // Add to the tam file
        Collections.sort(list_names);
        for (String s : list_names) {
          print2.println(
              "<tocitem text=\"" + s + "\" target=\"" + map.get(s) + "\" image=\"fileicon\" />");
        }
      }
      print2.println("</tocitem>");
    } else {
      // pas de sous fichiers
      print2.println(
          "<tocitem text=\""
              + getTitle(f)
              + "\" target=\""
              + targetName
              + "\" image=\"fileicon\"/>");
    }

    File parent = new File(path, "pages/" + url_name.substring(0, url_name.lastIndexOf(named) - 3));
    if (!parent.exists()) parent.mkdirs();
    File in_pages = new File(path, "pages/" + url_name);
    try {
      in_pages.createNewFile();
      copyFile(f, in_pages);
    } catch (IOException e3) {
      LOG.error("Error 3 getHelp " + e3 + " " + f.getName());
    }
  }
  /**
   * Test how IPC cache map works.
   *
   * @throws Exception If failed.
   */
  @SuppressWarnings("unchecked")
  public void testIpcCache() throws Exception {
    Field cacheField = GridGgfsHadoopIpcIo.class.getDeclaredField("ipcCache");

    cacheField.setAccessible(true);

    Field activeCntField = GridGgfsHadoopIpcIo.class.getDeclaredField("activeCnt");

    activeCntField.setAccessible(true);

    Map<String, GridGgfsHadoopIpcIo> cache =
        (Map<String, GridGgfsHadoopIpcIo>) cacheField.get(null);

    String name = "ggfs:" + getTestGridName(0) + "@";

    Configuration cfg = new Configuration();

    cfg.addResource(U.resolveGridGainUrl(HADOOP_FS_CFG));
    cfg.setBoolean("fs.ggfs.impl.disable.cache", true);
    cfg.setBoolean(String.format(GridGgfsHadoopUtils.PARAM_GGFS_ENDPOINT_NO_EMBED, name), true);

    // Ensure that existing IO is reused.
    FileSystem fs1 = FileSystem.get(new URI("ggfs://" + name + "/"), cfg);

    assertEquals(1, cache.size());

    GridGgfsHadoopIpcIo io = null;

    System.out.println("CACHE: " + cache);

    for (String key : cache.keySet()) {
      if (key.contains("10500")) {
        io = cache.get(key);

        break;
      }
    }

    assert io != null;

    assertEquals(1, ((AtomicInteger) activeCntField.get(io)).get());

    // Ensure that when IO is used by multiple file systems and one of them is closed, IO is not
    // stopped.
    FileSystem fs2 = FileSystem.get(new URI("ggfs://" + name + "/abc"), cfg);

    assertEquals(1, cache.size());
    assertEquals(2, ((AtomicInteger) activeCntField.get(io)).get());

    fs2.close();

    assertEquals(1, cache.size());
    assertEquals(1, ((AtomicInteger) activeCntField.get(io)).get());

    Field stopField = GridGgfsHadoopIpcIo.class.getDeclaredField("stopping");

    stopField.setAccessible(true);

    assert !(Boolean) stopField.get(io);

    // Ensure that IO is stopped when nobody else is need it.
    fs1.close();

    assert cache.isEmpty();

    assert (Boolean) stopField.get(io);
  }
  public void generateFileChunks(JspWriter out, HttpServletRequest req, Configuration conf)
      throws IOException, InterruptedException {
    long startOffset = 0;
    int datanodePort = 0;
    int chunkSizeToView = 0;

    String namenodeInfoPortStr = req.getParameter("namenodeInfoPort");
    int namenodeInfoPort = -1;
    if (namenodeInfoPortStr != null) namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr);

    String filename = HtmlQuoting.unquoteHtmlChars(req.getParameter("filename"));
    if (filename == null) {
      out.print("Invalid input (filename absent)");
      return;
    }

    String blockIdStr = null;
    long blockId = 0;
    blockIdStr = req.getParameter("blockId");
    if (blockIdStr == null) {
      out.print("Invalid input (blockId absent)");
      return;
    }
    blockId = Long.parseLong(blockIdStr);

    String tokenString = req.getParameter(JspHelper.DELEGATION_PARAMETER_NAME);
    UserGroupInformation ugi = JspHelper.getUGI(req, conf);
    final DFSClient dfs = JspHelper.getDFSClient(ugi, jspHelper.nameNodeAddr, conf);

    Token<BlockTokenIdentifier> accessToken = BlockTokenSecretManager.DUMMY_TOKEN;
    if (conf.getBoolean(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, false)) {
      List<LocatedBlock> blks =
          dfs.namenode.getBlockLocations(filename, 0, Long.MAX_VALUE).getLocatedBlocks();
      if (blks == null || blks.size() == 0) {
        out.print("Can't locate file blocks");
        dfs.close();
        return;
      }
      for (int i = 0; i < blks.size(); i++) {
        if (blks.get(i).getBlock().getBlockId() == blockId) {
          accessToken = blks.get(i).getBlockToken();
          break;
        }
      }
    }

    String blockGenStamp = null;
    long genStamp = 0;
    blockGenStamp = req.getParameter("genstamp");
    if (blockGenStamp == null) {
      out.print("Invalid input (genstamp absent)");
      return;
    }
    genStamp = Long.parseLong(blockGenStamp);

    String blockSizeStr;
    long blockSize = 0;
    blockSizeStr = req.getParameter("blockSize");
    if (blockSizeStr == null) {
      out.print("Invalid input (blockSize absent)");
      return;
    }
    blockSize = Long.parseLong(blockSizeStr);

    String chunkSizeToViewStr = req.getParameter("chunkSizeToView");
    if (chunkSizeToViewStr != null && Integer.parseInt(chunkSizeToViewStr) > 0)
      chunkSizeToView = Integer.parseInt(chunkSizeToViewStr);
    else chunkSizeToView = JspHelper.getDefaultChunkSize(conf);

    String startOffsetStr = req.getParameter("startOffset");
    if (startOffsetStr == null || Long.parseLong(startOffsetStr) < 0) startOffset = 0;
    else startOffset = Long.parseLong(startOffsetStr);

    String datanodePortStr = req.getParameter("datanodePort");
    if (datanodePortStr == null) {
      out.print("Invalid input (datanodePort absent)");
      return;
    }
    datanodePort = Integer.parseInt(datanodePortStr);
    out.print("<h3>File: ");
    JspHelper.printPathWithLinks(
        HtmlQuoting.quoteHtmlChars(filename), out, namenodeInfoPort, tokenString);
    out.print("</h3><hr>");
    String parent = new File(filename).getParent();
    JspHelper.printGotoForm(out, namenodeInfoPort, tokenString, HtmlQuoting.quoteHtmlChars(parent));
    out.print("<hr>");
    out.print(
        "<a href=\"http://"
            + req.getServerName()
            + ":"
            + req.getServerPort()
            + "/browseDirectory.jsp?dir="
            + URLEncoder.encode(parent, "UTF-8")
            + "&namenodeInfoPort="
            + namenodeInfoPort
            + "\"><i>Go back to dir listing</i></a><br>");
    out.print("<a href=\"#viewOptions\">Advanced view/download options</a><br>");
    out.print("<hr>");

    // Determine the prev & next blocks
    long nextStartOffset = 0;
    long nextBlockSize = 0;
    String nextBlockIdStr = null;
    String nextGenStamp = null;
    String nextHost = req.getServerName();
    int nextPort = req.getServerPort();
    int nextDatanodePort = datanodePort;
    // determine data for the next link
    if (startOffset + chunkSizeToView >= blockSize) {
      // we have to go to the next block from this point onwards
      List<LocatedBlock> blocks =
          dfs.namenode.getBlockLocations(filename, 0, Long.MAX_VALUE).getLocatedBlocks();
      for (int i = 0; i < blocks.size(); i++) {
        if (blocks.get(i).getBlock().getBlockId() == blockId) {
          if (i != blocks.size() - 1) {
            LocatedBlock nextBlock = blocks.get(i + 1);
            nextBlockIdStr = Long.toString(nextBlock.getBlock().getBlockId());
            nextGenStamp = Long.toString(nextBlock.getBlock().getGenerationStamp());
            nextStartOffset = 0;
            nextBlockSize = nextBlock.getBlock().getNumBytes();
            DatanodeInfo d = jspHelper.bestNode(nextBlock);
            String datanodeAddr = d.getName();
            nextDatanodePort =
                Integer.parseInt(
                    datanodeAddr.substring(datanodeAddr.indexOf(':') + 1, datanodeAddr.length()));
            nextHost = InetAddress.getByName(d.getHost()).getCanonicalHostName();
            nextPort = d.getInfoPort();
          }
        }
      }
    } else {
      // we are in the same block
      nextBlockIdStr = blockIdStr;
      nextStartOffset = startOffset + chunkSizeToView;
      nextBlockSize = blockSize;
      nextGenStamp = blockGenStamp;
    }
    String nextUrl = null;
    if (nextBlockIdStr != null) {
      nextUrl =
          "http://"
              + nextHost
              + ":"
              + nextPort
              + "/browseBlock.jsp?blockId="
              + nextBlockIdStr
              + "&blockSize="
              + nextBlockSize
              + "&startOffset="
              + nextStartOffset
              + "&genstamp="
              + nextGenStamp
              + "&filename="
              + URLEncoder.encode(filename, "UTF-8")
              + "&chunkSizeToView="
              + chunkSizeToView
              + "&datanodePort="
              + nextDatanodePort
              + "&namenodeInfoPort="
              + namenodeInfoPort
              + JspHelper.getDelegationTokenUrlParam(tokenString);
      out.print("<a href=\"" + nextUrl + "\">View Next chunk</a>&nbsp;&nbsp;");
    }
    // determine data for the prev link
    String prevBlockIdStr = null;
    String prevGenStamp = null;
    long prevStartOffset = 0;
    long prevBlockSize = 0;
    String prevHost = req.getServerName();
    int prevPort = req.getServerPort();
    int prevDatanodePort = datanodePort;
    if (startOffset == 0) {
      List<LocatedBlock> blocks =
          dfs.namenode.getBlockLocations(filename, 0, Long.MAX_VALUE).getLocatedBlocks();
      for (int i = 0; i < blocks.size(); i++) {
        if (blocks.get(i).getBlock().getBlockId() == blockId) {
          if (i != 0) {
            LocatedBlock prevBlock = blocks.get(i - 1);
            prevBlockIdStr = Long.toString(prevBlock.getBlock().getBlockId());
            prevGenStamp = Long.toString(prevBlock.getBlock().getGenerationStamp());
            prevStartOffset = prevBlock.getBlock().getNumBytes() - chunkSizeToView;
            if (prevStartOffset < 0) prevStartOffset = 0;
            prevBlockSize = prevBlock.getBlock().getNumBytes();
            DatanodeInfo d = jspHelper.bestNode(prevBlock);
            String datanodeAddr = d.getName();
            prevDatanodePort =
                Integer.parseInt(
                    datanodeAddr.substring(datanodeAddr.indexOf(':') + 1, datanodeAddr.length()));
            prevHost = InetAddress.getByName(d.getHost()).getCanonicalHostName();
            prevPort = d.getInfoPort();
          }
        }
      }
    } else {
      // we are in the same block
      prevBlockIdStr = blockIdStr;
      prevStartOffset = startOffset - chunkSizeToView;
      if (prevStartOffset < 0) prevStartOffset = 0;
      prevBlockSize = blockSize;
      prevGenStamp = blockGenStamp;
    }

    String prevUrl = null;
    if (prevBlockIdStr != null) {
      prevUrl =
          "http://"
              + prevHost
              + ":"
              + prevPort
              + "/browseBlock.jsp?blockId="
              + prevBlockIdStr
              + "&blockSize="
              + prevBlockSize
              + "&startOffset="
              + prevStartOffset
              + "&filename="
              + URLEncoder.encode(filename, "UTF-8")
              + "&chunkSizeToView="
              + chunkSizeToView
              + "&genstamp="
              + prevGenStamp
              + "&datanodePort="
              + prevDatanodePort
              + "&namenodeInfoPort="
              + namenodeInfoPort
              + JspHelper.getDelegationTokenUrlParam(tokenString);
      out.print("<a href=\"" + prevUrl + "\">View Prev chunk</a>&nbsp;&nbsp;");
    }
    out.print("<hr>");
    out.print("<textarea cols=\"100\" rows=\"25\" wrap=\"virtual\" style=\"width:100%\" READONLY>");
    try {
      jspHelper.streamBlockInAscii(
          new InetSocketAddress(req.getServerName(), datanodePort),
          blockId,
          accessToken,
          genStamp,
          blockSize,
          startOffset,
          chunkSizeToView,
          out,
          conf);
    } catch (Exception e) {
      out.print(e);
    }
    out.print("</textarea>");
    dfs.close();
  }
 /** Set the session timeout to the configured value */
 protected void setSessionTimeout(HttpSession session) {
   Configuration config = CurrentConfig.getCurrentConfig();
   setSessionTimeout(
       session, config.getTimeInterval(PARAM_UI_SESSION_TIMEOUT, DEFAULT_UI_SESSION_TIMEOUT));
 }
 public void setConfig(
     Configuration config, Configuration oldConfig, Configuration.Differences changedKeys) {
   //  Build list of repositories from list of disk (fs) paths).  Needs to
   //  be generalized if ever another repository implementation.
   if (changedKeys.contains(ConfigManager.PARAM_PLATFORM_DISK_SPACE_LIST)) {
     List lst = new ArrayList();
     String dspace = config.get(ConfigManager.PARAM_PLATFORM_DISK_SPACE_LIST, "");
     List paths = StringUtil.breakAt(dspace, ';');
     if (paths != null) {
       for (Iterator iter = paths.iterator(); iter.hasNext(); ) {
         lst.add("local:" + (String) iter.next());
       }
     }
     repoList = lst;
   }
   if (changedKeys.contains(PARAM_MAX_PER_AU_CACHE_SIZE)) {
     paramNodeCacheSize =
         config.getInt(PARAM_MAX_PER_AU_CACHE_SIZE, DEFAULT_MAX_PER_AU_CACHE_SIZE);
     for (Iterator iter = getDaemon().getAllLockssRepositories().iterator(); iter.hasNext(); ) {
       LockssRepository repo = (LockssRepository) iter.next();
       if (repo instanceof LockssRepositoryImpl) {
         LockssRepositoryImpl repoImpl = (LockssRepositoryImpl) repo;
         repoImpl.setNodeCacheSize(paramNodeCacheSize);
       }
     }
   }
   if (changedKeys.contains(PARAM_MAX_SUSPECT_VERSIONS_CACHE_SIZE)) {
     paramSuspectVersionsCacheSize =
         config.getInt(
             PARAM_MAX_SUSPECT_VERSIONS_CACHE_SIZE, DEFAULT_MAX_SUSPECT_VERSIONS_CACHE_SIZE);
     suspectVersionsCache.setMaxSize(paramSuspectVersionsCacheSize);
   }
   if (changedKeys.contains(GLOBAL_CACHE_PREFIX)) {
     paramIsGlobalNodeCache =
         config.getBoolean(PARAM_GLOBAL_CACHE_ENABLED, DEFAULT_GLOBAL_CACHE_ENABLED);
     if (paramIsGlobalNodeCache) {
       paramGlobalNodeCacheSize =
           config.getInt(PARAM_MAX_GLOBAL_CACHE_SIZE, DEFAULT_MAX_GLOBAL_CACHE_SIZE);
       log.debug("global node cache size: " + paramGlobalNodeCacheSize);
       globalNodeCache.setMaxSize(paramGlobalNodeCacheSize);
     }
   }
   if (changedKeys.contains(DISK_PREFIX)) {
     int minMB = config.getInt(PARAM_DISK_WARN_FRRE_MB, DEFAULT_DISK_WARN_FRRE_MB);
     double minPer =
         config.getPercentage(PARAM_DISK_WARN_FRRE_PERCENT, DEFAULT_DISK_WARN_FRRE_PERCENT);
     paramDFWarn = PlatformUtil.DF.makeThreshold(minMB, minPer);
     minMB = config.getInt(PARAM_DISK_FULL_FRRE_MB, DEFAULT_DISK_FULL_FRRE_MB);
     minPer = config.getPercentage(PARAM_DISK_FULL_FRRE_PERCENT, DEFAULT_DISK_FULL_FRRE_PERCENT);
     paramDFFull = PlatformUtil.DF.makeThreshold(minMB, minPer);
   }
   if (changedKeys.contains(PARAM_SIZE_CALC_MAX_LOAD)) {
     sizeCalcMaxLoad = config.getPercentage(PARAM_SIZE_CALC_MAX_LOAD, DEFAULT_SIZE_CALC_MAX_LOAD);
   }
   if (changedKeys.contains(PREFIX)) {
     maxUnusedDirSearch =
         config.getInt(PARAM_MAX_UNUSED_DIR_SEARCH, DEFAULT_MAX_UNUSED_DIR_SEARCH);
     isStatefulUnusedDirSearch =
         config.getBoolean(
             PARAM_IS_STATEFUL_UNUSED_DIR_SEARCH, DEFAULT_IS_STATEFUL_UNUSED_DIR_SEARCH);
     enableLongComponents =
         config.getBoolean(PARAM_ENABLE_LONG_COMPONENTS, DEFAULT_ENABLE_LONG_COMPONENTS);
     enableLongComponentsCompatibility =
         config.getBoolean(
             PARAM_ENABLE_LONG_COMPONENTS_COMPATIBILITY,
             DEFAULT_ENABLE_LONG_COMPONENTS_COMPATIBILITY);
     maxComponentLength = config.getInt(PARAM_MAX_COMPONENT_LENGTH, DEFAULT_MAX_COMPONENT_LENGTH);
     checkUnnormalized =
         (CheckUnnormalizedMode)
             config.getEnum(
                 CheckUnnormalizedMode.class,
                 PARAM_CHECK_UNNORMALIZED,
                 DEFAULT_CHECK_UNNORMALIZED);
   }
 }
  /**
   * Initializes store.
   *
   * @throws GridException If failed to initialize.
   */
  private void init() throws GridException {
    if (initGuard.compareAndSet(false, true)) {
      if (log.isDebugEnabled()) log.debug("Initializing cache store.");

      try {
        if (sesFactory != null)
          // Session factory has been provided - nothing to do.
          return;

        if (!F.isEmpty(hibernateCfgPath)) {
          try {
            URL url = new URL(hibernateCfgPath);

            sesFactory = new Configuration().configure(url).buildSessionFactory();

            if (log.isDebugEnabled()) log.debug("Configured session factory using URL: " + url);

            // Session factory has been successfully initialized.
            return;
          } catch (MalformedURLException e) {
            if (log.isDebugEnabled())
              log.debug("Caught malformed URL exception: " + e.getMessage());
          }

          // Provided path is not a valid URL. File?
          File cfgFile = new File(hibernateCfgPath);

          if (cfgFile.exists()) {
            sesFactory = new Configuration().configure(cfgFile).buildSessionFactory();

            if (log.isDebugEnabled())
              log.debug("Configured session factory using file: " + hibernateCfgPath);

            // Session factory has been successfully initialized.
            return;
          }

          // Provided path is not a file. Classpath resource?
          sesFactory = new Configuration().configure(hibernateCfgPath).buildSessionFactory();

          if (log.isDebugEnabled())
            log.debug("Configured session factory using classpath resource: " + hibernateCfgPath);
        } else {
          if (hibernateProps == null) {
            U.warn(
                log, "No Hibernate configuration has been provided for store (will use default).");

            hibernateProps = new Properties();

            hibernateProps.setProperty("hibernate.connection.url", DFLT_CONN_URL);
            hibernateProps.setProperty("hibernate.show_sql", DFLT_SHOW_SQL);
            hibernateProps.setProperty("hibernate.hbm2ddl.auto", DFLT_HBM2DDL_AUTO);
          }

          Configuration cfg = new Configuration();

          cfg.setProperties(hibernateProps);

          assert resourceAvailable(MAPPING_RESOURCE);

          cfg.addResource(MAPPING_RESOURCE);

          sesFactory = cfg.buildSessionFactory();

          if (log.isDebugEnabled())
            log.debug("Configured session factory using properties: " + hibernateProps);
        }
      } catch (HibernateException e) {
        throw new GridException("Failed to initialize store.", e);
      } finally {
        initLatch.countDown();
      }
    } else if (initLatch.getCount() > 0) U.await(initLatch);

    if (sesFactory == null) throw new GridException("Cache store was not properly initialized.");
  }
Exemple #29
0
 /** Called by org.lockss.config.MiscConfig */
 public static void setConfig(
     Configuration config, Configuration oldConfig, Configuration.Differences diffs) {
   if (diffs.contains(PREFIX)) {
     maxErrors = config.getInt(PARAM_MAX_ERRORS, DEFAULT_MAX_ERRORS);
   }
 }