/** * Returns the path of the installation of the directory server. Note that this method assumes * that this code is being run locally. * * @param installPath The installation path * @return the path of the installation of the directory server. */ static String getInstancePathFromInstallPath(final String installPath) { String instancePathFileName = Installation.INSTANCE_LOCATION_PATH; final File _svcScriptPath = new File(installPath + File.separator + SVC_SCRIPT_FILE_NAME); // look for /etc/opt/opendj/instance.loc File f = new File(instancePathFileName); if (!_svcScriptPath.exists() || !f.exists()) { // look for <installPath>/instance.loc instancePathFileName = installPath + File.separator + Installation.INSTANCE_LOCATION_PATH_RELATIVE; f = new File(instancePathFileName); if (!f.exists()) { return installPath; } } BufferedReader reader; try { reader = new BufferedReader(new FileReader(instancePathFileName)); } catch (Exception e) { return installPath; } // Read the first line and close the file. String line; try { line = reader.readLine(); File instanceLoc = new File(line.trim()); if (instanceLoc.isAbsolute()) { return instanceLoc.getAbsolutePath(); } else { return new File(installPath + File.separator + instanceLoc.getPath()).getAbsolutePath(); } } catch (Exception e) { return installPath; } finally { StaticUtils.close(reader); } }
/** * Creates a new file in the config/upgrade folder. The new file is a concatenation of entries of * all files contained in the config/schema folder. * * @param folder The folder containing the schema files. * @param revision The revision number of the current binary version. * @throws Exception If we cannot read the files contained in the folder where the schema files * are supposed to be, or the file has errors. */ static void updateConfigUpgradeSchemaFile(final File folder, final String revision) throws Exception { // We need to upgrade the schema.ldif.<rev> file contained in the // config/upgrade folder otherwise, we cannot enable the backend at // server's start. We need to read all files contained in config/schema // and add all attribute/object classes in this new super entry which // will be read at start-up. Entry theNewSchemaEntry = new LinkedHashMapEntry(); LDIFEntryReader reader = null; LDIFEntryWriter writer = null; try { if (folder.isDirectory()) { final FilenameFilter filter = new SchemaConfigManager.SchemaFileFilter(); for (final File f : folder.listFiles(filter)) { logger.debug(LocalizableMessage.raw(String.format("Processing %s", f.getAbsolutePath()))); reader = new LDIFEntryReader(new FileInputStream(f)); try { while (reader.hasNext()) { final Entry entry = reader.readEntry(); theNewSchemaEntry.setName(entry.getName()); for (final Attribute at : entry.getAllAttributes()) { theNewSchemaEntry.addAttribute(at); } } } catch (Exception ex) { throw new Exception( "Error parsing existing schema file " + f.getName() + " - " + ex.getMessage(), ex); } } // Creates a File object representing // config/upgrade/schema.ldif.revision which the server creates // the first time it starts if there are schema customizations. final File destination = new File( configDirectory, Installation.UPGRADE_PATH + File.separator + "schema.ldif." + revision); // Checks if the parent exists (eg. embedded // server doesn't seem to provide that folder) File parentDirectory = destination.getParentFile(); if (!parentDirectory.exists()) { logger.debug( LocalizableMessage.raw( String.format("Parent file of %s doesn't exist", destination.getPath()))); parentDirectory.mkdirs(); logger.debug( LocalizableMessage.raw( String.format("Parent directory %s created.", parentDirectory.getPath()))); } if (!destination.exists()) { destination.createNewFile(); } logger.debug( LocalizableMessage.raw( String.format("Writing entries in %s.", destination.getAbsolutePath()))); writer = new LDIFEntryWriter(new FileOutputStream(destination)); writer.writeEntry(theNewSchemaEntry); logger.debug( LocalizableMessage.raw( String.format( "%s created and completed successfully.", destination.getAbsolutePath()))); } } finally { StaticUtils.close(reader, writer); } }
/** * This task adds new attributes / object classes to the specified destination file. The new * attributes and object classes must be originally defined in the template file. * * @param templateFile The file in which the new attribute/object definition can be read. * @param destination The file where we want to add the new definitions. * @param attributes Those attributes needed to be inserted into the new destination file. * @param objectClasses Those object classes needed to be inserted into the new destination file. * @return An integer which represents each time an attribute / object class is inserted * successfully to the destination file. * @throws IOException If an unexpected IO error occurred while reading the entry. * @throws IllegalStateException Failure to find an attribute in the template schema indicates * either a programming error (e.g. typo in the attribute name) or template corruption. * Upgrade should stop. */ static int updateSchemaFile( final File templateFile, final File destination, final String[] attributes, final String[] objectClasses) throws IOException, IllegalStateException { int changeCount = 0; LDIFEntryReader templateReader = null; LDIFEntryReader destinationReader = null; LDIFEntryWriter destinationWriter = null; File copy = null; try { templateReader = new LDIFEntryReader(new FileInputStream(templateFile)); if (!templateReader.hasNext()) { // Unless template are corrupted, this should not happen. throw new IOException( ERR_UPGRADE_CORRUPTED_TEMPLATE.get(templateFile.getPath()).toString()); } final Entry templateSchemaEntry = templateReader.readEntry(); destinationReader = new LDIFEntryReader(new FileInputStream(destination)); if (!destinationReader.hasNext()) { // Unless template are corrupted, this should not happen. throw new IOException(ERR_UPGRADE_CORRUPTED_TEMPLATE.get(destination.getPath()).toString()); } final Entry destinationSchemaEntry = destinationReader.readEntry(); if (attributes != null) { for (final String att : attributes) { final ByteString attributeType = getSchemaElement(templateSchemaEntry, "attributeTypes", att); destinationSchemaEntry.getAttribute("attributeTypes").add(attributeType); changeCount++; logger.debug(LocalizableMessage.raw(String.format("Added %s", attributeType))); } } if (objectClasses != null) { for (final String oc : objectClasses) { final ByteString objectClass = getSchemaElement(templateSchemaEntry, "objectClasses", oc); destinationSchemaEntry.getAttribute("objectClasses").add(objectClass); changeCount++; logger.trace("Added %s", objectClass); } } // Then writes the new schema entry. copy = File.createTempFile("copySchema", ".tmp", destination.getParentFile()); final FileOutputStream fos = new FileOutputStream(copy); destinationWriter = new LDIFEntryWriter(fos); destinationWriter.setWrapColumn(79); // Copy comments to fos (get License and first comments only). writeFileHeaderComments(templateFile, destinationWriter); // Writes the entry after. destinationWriter.writeEntry(destinationSchemaEntry); } finally { // Readers and writer must be close before writing files. // This causes exceptions under windows OS. StaticUtils.close(templateReader, destinationReader, destinationWriter); } // Renames the copy to make it the new schema file. try { rename(copy, destination); } catch (IOException e) { logger.error(LocalizableMessage.raw(e.getMessage())); deleteRecursively(copy); throw e; } return changeCount; }