private void resolveLocalFileHeaderData( final Map<ZipEntry, NameAndComment> entriesWithoutUTF8Flag) throws IOException { for (final Entry ze : this.entries) { final OffsetEntry offsetEntry = ze.getOffsetEntry(); final long offset = offsetEntry.headerOffset; this.archive.seek(offset + 26L); this.archive.readFully(this.SHORT_BUF); final int fileNameLen = ZipShort.getValue(this.SHORT_BUF); this.archive.readFully(this.SHORT_BUF); final int extraFieldLen = ZipShort.getValue(this.SHORT_BUF); int skipped; for (int lenToSkip = fileNameLen; lenToSkip > 0; lenToSkip -= skipped) { skipped = this.archive.skipBytes(lenToSkip); if (skipped <= 0) { throw new IOException("failed to skip file name in local file header"); } } final byte[] localExtraData = new byte[extraFieldLen]; this.archive.readFully(localExtraData); ze.setExtra(localExtraData); offsetEntry.dataOffset = offset + 26L + 2L + 2L + fileNameLen + extraFieldLen; if (entriesWithoutUTF8Flag.containsKey(ze)) { final NameAndComment nc = entriesWithoutUTF8Flag.get(ze); ZipUtil.setNameAndCommentFromExtraFields(ze, nc.name, nc.comment); } final String name = ze.getName(); LinkedList<ZipEntry> entriesOfThatName = this.nameMap.get(name); if (entriesOfThatName == null) { entriesOfThatName = new LinkedList<ZipEntry>(); this.nameMap.put(name, entriesOfThatName); } entriesOfThatName.addLast(ze); } }
/** * Creates a new file in the config/upgrade folder. The new file is a concatenation of entries of * all files contained in the config/schema folder. * * @param folder The folder containing the schema files. * @param revision The revision number of the current binary version. * @throws Exception If we cannot read the files contained in the folder where the schema files * are supposed to be, or the file has errors. */ static void updateConfigUpgradeSchemaFile(final File folder, final String revision) throws Exception { // We need to upgrade the schema.ldif.<rev> file contained in the // config/upgrade folder otherwise, we cannot enable the backend at // server's start. We need to read all files contained in config/schema // and add all attribute/object classes in this new super entry which // will be read at start-up. Entry theNewSchemaEntry = new LinkedHashMapEntry(); LDIFEntryReader reader = null; LDIFEntryWriter writer = null; try { if (folder.isDirectory()) { final FilenameFilter filter = new SchemaConfigManager.SchemaFileFilter(); for (final File f : folder.listFiles(filter)) { logger.debug(LocalizableMessage.raw(String.format("Processing %s", f.getAbsolutePath()))); reader = new LDIFEntryReader(new FileInputStream(f)); try { while (reader.hasNext()) { final Entry entry = reader.readEntry(); theNewSchemaEntry.setName(entry.getName()); for (final Attribute at : entry.getAllAttributes()) { theNewSchemaEntry.addAttribute(at); } } } catch (Exception ex) { throw new Exception( "Error parsing existing schema file " + f.getName() + " - " + ex.getMessage(), ex); } } // Creates a File object representing // config/upgrade/schema.ldif.revision which the server creates // the first time it starts if there are schema customizations. final File destination = new File( configDirectory, Installation.UPGRADE_PATH + File.separator + "schema.ldif." + revision); // Checks if the parent exists (eg. embedded // server doesn't seem to provide that folder) File parentDirectory = destination.getParentFile(); if (!parentDirectory.exists()) { logger.debug( LocalizableMessage.raw( String.format("Parent file of %s doesn't exist", destination.getPath()))); parentDirectory.mkdirs(); logger.debug( LocalizableMessage.raw( String.format("Parent directory %s created.", parentDirectory.getPath()))); } if (!destination.exists()) { destination.createNewFile(); } logger.debug( LocalizableMessage.raw( String.format("Writing entries in %s.", destination.getAbsolutePath()))); writer = new LDIFEntryWriter(new FileOutputStream(destination)); writer.writeEntry(theNewSchemaEntry); logger.debug( LocalizableMessage.raw( String.format( "%s created and completed successfully.", destination.getAbsolutePath()))); } } finally { StaticUtils.close(reader, writer); } }
/** * Updates the config file during the upgrade process. * * @param configPath The original path to the file. * @param filter The filter to select entries. Only useful for modify change type. * @param changeType The change type which must be applied to ldif lines. * @param ldifLines The change record ldif lines. For ADD change type, the first line must be the * dn. For DELETE change type, the first and only line must be the dn. * @throws IOException If an Exception occurs during the input output methods. * @return The changes number that have occurred. */ static int updateConfigFile( final String configPath, final Filter filter, final ChangeOperationType changeType, final String... ldifLines) throws IOException { final File original = new File(configPath); final File copyConfig = File.createTempFile("copyConfig", ".tmp", original.getParentFile()); int changeCount = 0; LDIFEntryReader entryReader = null; LDIFEntryWriter writer = null; try { final Schema schema = getUpgradeSchema(); entryReader = new LDIFEntryReader(new FileInputStream(configPath)).setSchema(schema); writer = new LDIFEntryWriter(new FileOutputStream(copyConfig)); writer.setWrapColumn(80); // Writes the header on the new file. writer.writeComment(INFO_CONFIG_FILE_HEADER.get()); writer.setWrapColumn(0); boolean entryAlreadyExist = false; DN ldifDN = null; if (filter == null && (changeType == ADD || changeType == DELETE)) { // The first line should start with dn: ldifDN = DN.valueOf(ldifLines[0].replaceFirst("dn: ", "")); } final Filter f = filter != null ? filter : Filter.alwaysFalse(); final Matcher matcher = f.matcher(schema); while (entryReader.hasNext()) { Entry entry = entryReader.readEntry(); final DN entryDN = entry.getName(); // Searching for the related entries if (changeType == MODIFY && matcher.matches(entry) == ConditionResult.TRUE) { try { final ModifyRequest mr = Requests.newModifyRequest(readLDIFLines(entryDN, changeType, ldifLines)); entry = Entries.modifyEntryPermissive(entry, mr.getModifications()); changeCount++; logger.debug( LocalizableMessage.raw("The following entry has been modified : %s", entryDN)); } catch (Exception ex) { logger.error(LocalizableMessage.raw(ex.getMessage())); } } if (entryDN.equals(ldifDN)) { logger.debug(LocalizableMessage.raw("Entry %s found", entryDN)); entryAlreadyExist = true; if (changeType == DELETE) { entry = null; changeCount++; logger.debug( LocalizableMessage.raw("The following entry has been deleted : %s", entryDN)); } } if (entry != null) { writer.writeEntry(entry); } } if (changeType == ADD && !entryAlreadyExist) { final AddRequest ar = Requests.newAddRequest(ldifLines); writer.writeEntry(ar); logger.debug( LocalizableMessage.raw( "Entry successfully added %s in %s", ldifDN, original.getAbsolutePath())); changeCount++; } } catch (Exception ex) { throw new IOException(ex.getMessage()); } finally { // The reader and writer must be close before renaming files. // Otherwise it causes exceptions under windows OS. StaticUtils.close(entryReader, writer); } try { // Renaming the file, overwriting previous one. rename(copyConfig, new File(configPath)); } catch (IOException e) { logger.error(LocalizableMessage.raw(e.getMessage())); deleteRecursively(original); throw e; } return changeCount; }