Exemplo n.º 1
0
  /** Test for escaped characters in templates, check LDIF output. */
  @Test(
      dataProvider = "templatesToTestLDIFOutput",
      dependsOnMethods = {"testParsingEscapeCharInTemplate"})
  public void testLDIFOutputFromTemplate(
      String testName, String[] lines, String attrName, String expectedValue) throws Exception {
    File tmpFile = File.createTempFile(testName, "out.ldif");
    tmpFile.deleteOnExit();
    String outLdifFilePath = tmpFile.getAbsolutePath();

    LdifFileWriter.makeLdif(outLdifFilePath, resourcePath, lines);

    LDIFImportConfig ldifConfig = new LDIFImportConfig(outLdifFilePath);
    ldifConfig.setValidateSchema(false);
    LDIFReader reader = new LDIFReader(ldifConfig);
    Entry top = reader.readEntry();
    Entry e = reader.readEntry();
    reader.close();

    assertNotNull(top);
    assertNotNull(e);
    List<Attribute> attrs = e.getAttribute(attrName);
    assertFalse(attrs.isEmpty());
    Attribute a = attrs.get(0);
    Attribute expectedRes = Attributes.create(attrName, expectedValue);
    assertEquals(a, expectedRes);
  }
Exemplo n.º 2
0
 /**
  * Retrieves the backends from the current configuration file. The backends must be enabled to be
  * listed. No operations should be done within a disabled backend.
  *
  * @return A backend list.
  */
 static List<String> getIndexedBackendsFromConfig() {
   final SearchRequest sr =
       Requests.newSearchRequest(
           "",
           SearchScope.WHOLE_SUBTREE,
           "(&(objectclass=ds-cfg-pluggable-backend)(ds-cfg-enabled=true))",
           "ds-cfg-base-dn");
   final List<String> listBackends = new LinkedList<>();
   try (final EntryReader entryReader = searchConfigFile(sr)) {
     while (entryReader.hasNext()) {
       final Entry entry = entryReader.readEntry();
       listBackends.addAll(entry.parseAttribute("ds-cfg-base-dn").asSetOfString());
     }
   } catch (Exception ex) {
     logger.error(LocalizableMessage.raw(ex.getMessage()));
   }
   return listBackends;
 }
Exemplo n.º 3
0
  /**
   * Test for escaped characters in templates, check LDIF output when the templates combines escaped
   * characters and variables.
   */
  @Test(dependsOnMethods = {"testParsingEscapeCharInTemplate"})
  public void testOutputCombineEscapeCharInTemplate() throws Exception {
    String[] lines = {
      "branch: dc=test",
      "subordinateTemplate: templateWithEscape:1",
      "",
      "template: templateWithEscape",
      "rdnAttr: uid",
      "objectclass: inetOrgPerson",
      "uid: testEntry",
      "sn: Bar",
      // The value below combines variable, randoms and escaped chars.
      // The resulting value is "Foo <?>{1}Bar" where ? is a letter from [A-Z].
      "cn: Foo \\<<random:chars:ABCDEFGHIJKLMNOPQRSTUVWXYZ:1>\\>\\{1\\}{sn}",
      "",
    };

    File tmpFile = File.createTempFile("combineEscapeChar", "out.ldif");
    tmpFile.deleteOnExit();
    String outLdifFilePath = tmpFile.getAbsolutePath();

    LdifFileWriter.makeLdif(outLdifFilePath, resourcePath, lines);

    LDIFImportConfig ldifConfig = new LDIFImportConfig(outLdifFilePath);
    ldifConfig.setValidateSchema(false);
    LDIFReader reader = new LDIFReader(ldifConfig);
    Entry top = reader.readEntry();
    Entry e = reader.readEntry();
    reader.close();

    assertNotNull(top);
    assertNotNull(e);
    List<Attribute> attrs = e.getAttribute("cn");
    assertFalse(attrs.isEmpty());
    Attribute a = attrs.get(0);
    assertTrue(
        a.iterator().next().toString().matches("Foo <[A-Z]>\\{1\\}Bar"),
        "cn value doesn't match the expected value");
  }
Exemplo n.º 4
0
 /**
  * Returns the definition of the selected attribute / object class OID.
  *
  * @param schemaEntry The selected schema entry to search on.
  * @param type The type of the research. ("objectClasses" or "attributeTypes")
  * @param oid The OID of the element to search for.
  * @return The byte string definition of the element.
  */
 private static ByteString getSchemaElement(
     final Entry schemaEntry, final String type, final String oid) {
   final Attribute attribute = schemaEntry.getAttribute(type);
   final MatchingRule mrule = CoreSchema.getObjectIdentifierFirstComponentMatchingRule();
   Assertion assertion;
   try {
     assertion = mrule.getAssertion(ByteString.valueOfUtf8(oid));
     for (final ByteString value : attribute) {
       final ByteString nvalue = mrule.normalizeAttributeValue(value);
       if (assertion.matches(nvalue).toBoolean()) {
         return value;
       }
     }
   } catch (DecodeException e) {
     throw new IllegalStateException(e);
   }
   throw new IllegalStateException(ERR_UPGRADE_UNKNOWN_OC_ATT.get(type, oid).toString());
 }
Exemplo n.º 5
0
  /**
   * Creates a new file in the config/upgrade folder. The new file is a concatenation of entries of
   * all files contained in the config/schema folder.
   *
   * @param folder The folder containing the schema files.
   * @param revision The revision number of the current binary version.
   * @throws Exception If we cannot read the files contained in the folder where the schema files
   *     are supposed to be, or the file has errors.
   */
  static void updateConfigUpgradeSchemaFile(final File folder, final String revision)
      throws Exception {
    // We need to upgrade the schema.ldif.<rev> file contained in the
    // config/upgrade folder otherwise, we cannot enable the backend at
    // server's start. We need to read all files contained in config/schema
    // and add all attribute/object classes in this new super entry which
    // will be read at start-up.
    Entry theNewSchemaEntry = new LinkedHashMapEntry();
    LDIFEntryReader reader = null;
    LDIFEntryWriter writer = null;
    try {
      if (folder.isDirectory()) {
        final FilenameFilter filter = new SchemaConfigManager.SchemaFileFilter();
        for (final File f : folder.listFiles(filter)) {
          logger.debug(LocalizableMessage.raw(String.format("Processing %s", f.getAbsolutePath())));
          reader = new LDIFEntryReader(new FileInputStream(f));
          try {
            while (reader.hasNext()) {
              final Entry entry = reader.readEntry();
              theNewSchemaEntry.setName(entry.getName());
              for (final Attribute at : entry.getAllAttributes()) {
                theNewSchemaEntry.addAttribute(at);
              }
            }
          } catch (Exception ex) {
            throw new Exception(
                "Error parsing existing schema file " + f.getName() + " - " + ex.getMessage(), ex);
          }
        }

        // Creates a File object representing
        // config/upgrade/schema.ldif.revision which the server creates
        // the first time it starts if there are schema customizations.
        final File destination =
            new File(
                configDirectory,
                Installation.UPGRADE_PATH + File.separator + "schema.ldif." + revision);

        // Checks if the parent exists (eg. embedded
        // server doesn't seem to provide that folder)
        File parentDirectory = destination.getParentFile();
        if (!parentDirectory.exists()) {
          logger.debug(
              LocalizableMessage.raw(
                  String.format("Parent file of %s doesn't exist", destination.getPath())));

          parentDirectory.mkdirs();

          logger.debug(
              LocalizableMessage.raw(
                  String.format("Parent directory %s created.", parentDirectory.getPath())));
        }
        if (!destination.exists()) {
          destination.createNewFile();
        }

        logger.debug(
            LocalizableMessage.raw(
                String.format("Writing entries in %s.", destination.getAbsolutePath())));

        writer = new LDIFEntryWriter(new FileOutputStream(destination));
        writer.writeEntry(theNewSchemaEntry);

        logger.debug(
            LocalizableMessage.raw(
                String.format(
                    "%s created and completed successfully.", destination.getAbsolutePath())));
      }
    } finally {
      StaticUtils.close(reader, writer);
    }
  }
Exemplo n.º 6
0
  /**
   * This task adds new attributes / object classes to the specified destination file. The new
   * attributes and object classes must be originally defined in the template file.
   *
   * @param templateFile The file in which the new attribute/object definition can be read.
   * @param destination The file where we want to add the new definitions.
   * @param attributes Those attributes needed to be inserted into the new destination file.
   * @param objectClasses Those object classes needed to be inserted into the new destination file.
   * @return An integer which represents each time an attribute / object class is inserted
   *     successfully to the destination file.
   * @throws IOException If an unexpected IO error occurred while reading the entry.
   * @throws IllegalStateException Failure to find an attribute in the template schema indicates
   *     either a programming error (e.g. typo in the attribute name) or template corruption.
   *     Upgrade should stop.
   */
  static int updateSchemaFile(
      final File templateFile,
      final File destination,
      final String[] attributes,
      final String[] objectClasses)
      throws IOException, IllegalStateException {
    int changeCount = 0;
    LDIFEntryReader templateReader = null;
    LDIFEntryReader destinationReader = null;
    LDIFEntryWriter destinationWriter = null;
    File copy = null;
    try {
      templateReader = new LDIFEntryReader(new FileInputStream(templateFile));
      if (!templateReader.hasNext()) {
        // Unless template are corrupted, this should not happen.
        throw new IOException(
            ERR_UPGRADE_CORRUPTED_TEMPLATE.get(templateFile.getPath()).toString());
      }
      final Entry templateSchemaEntry = templateReader.readEntry();

      destinationReader = new LDIFEntryReader(new FileInputStream(destination));
      if (!destinationReader.hasNext()) {
        // Unless template are corrupted, this should not happen.
        throw new IOException(ERR_UPGRADE_CORRUPTED_TEMPLATE.get(destination.getPath()).toString());
      }
      final Entry destinationSchemaEntry = destinationReader.readEntry();

      if (attributes != null) {
        for (final String att : attributes) {
          final ByteString attributeType =
              getSchemaElement(templateSchemaEntry, "attributeTypes", att);
          destinationSchemaEntry.getAttribute("attributeTypes").add(attributeType);
          changeCount++;
          logger.debug(LocalizableMessage.raw(String.format("Added %s", attributeType)));
        }
      }

      if (objectClasses != null) {
        for (final String oc : objectClasses) {
          final ByteString objectClass = getSchemaElement(templateSchemaEntry, "objectClasses", oc);
          destinationSchemaEntry.getAttribute("objectClasses").add(objectClass);
          changeCount++;
          logger.trace("Added %s", objectClass);
        }
      }

      // Then writes the new schema entry.
      copy = File.createTempFile("copySchema", ".tmp", destination.getParentFile());
      final FileOutputStream fos = new FileOutputStream(copy);
      destinationWriter = new LDIFEntryWriter(fos);
      destinationWriter.setWrapColumn(79);
      // Copy comments to fos (get License and first comments only).
      writeFileHeaderComments(templateFile, destinationWriter);
      // Writes the entry after.
      destinationWriter.writeEntry(destinationSchemaEntry);
    } finally {
      // Readers and writer must be close before writing files.
      // This causes exceptions under windows OS.
      StaticUtils.close(templateReader, destinationReader, destinationWriter);
    }

    // Renames the copy to make it the new schema file.
    try {
      rename(copy, destination);
    } catch (IOException e) {
      logger.error(LocalizableMessage.raw(e.getMessage()));
      deleteRecursively(copy);
      throw e;
    }

    return changeCount;
  }
Exemplo n.º 7
0
  /**
   * Updates the config file during the upgrade process.
   *
   * @param configPath The original path to the file.
   * @param filter The filter to select entries. Only useful for modify change type.
   * @param changeType The change type which must be applied to ldif lines.
   * @param ldifLines The change record ldif lines. For ADD change type, the first line must be the
   *     dn. For DELETE change type, the first and only line must be the dn.
   * @throws IOException If an Exception occurs during the input output methods.
   * @return The changes number that have occurred.
   */
  static int updateConfigFile(
      final String configPath,
      final Filter filter,
      final ChangeOperationType changeType,
      final String... ldifLines)
      throws IOException {
    final File original = new File(configPath);
    final File copyConfig = File.createTempFile("copyConfig", ".tmp", original.getParentFile());

    int changeCount = 0;
    LDIFEntryReader entryReader = null;
    LDIFEntryWriter writer = null;
    try {
      final Schema schema = getUpgradeSchema();
      entryReader = new LDIFEntryReader(new FileInputStream(configPath)).setSchema(schema);

      writer = new LDIFEntryWriter(new FileOutputStream(copyConfig));
      writer.setWrapColumn(80);

      // Writes the header on the new file.
      writer.writeComment(INFO_CONFIG_FILE_HEADER.get());
      writer.setWrapColumn(0);

      boolean entryAlreadyExist = false;
      DN ldifDN = null;
      if (filter == null && (changeType == ADD || changeType == DELETE)) {
        // The first line should start with dn:
        ldifDN = DN.valueOf(ldifLines[0].replaceFirst("dn: ", ""));
      }
      final Filter f = filter != null ? filter : Filter.alwaysFalse();
      final Matcher matcher = f.matcher(schema);
      while (entryReader.hasNext()) {
        Entry entry = entryReader.readEntry();
        final DN entryDN = entry.getName();
        // Searching for the related entries
        if (changeType == MODIFY && matcher.matches(entry) == ConditionResult.TRUE) {
          try {
            final ModifyRequest mr =
                Requests.newModifyRequest(readLDIFLines(entryDN, changeType, ldifLines));
            entry = Entries.modifyEntryPermissive(entry, mr.getModifications());
            changeCount++;
            logger.debug(
                LocalizableMessage.raw("The following entry has been modified : %s", entryDN));
          } catch (Exception ex) {
            logger.error(LocalizableMessage.raw(ex.getMessage()));
          }
        }

        if (entryDN.equals(ldifDN)) {
          logger.debug(LocalizableMessage.raw("Entry %s found", entryDN));
          entryAlreadyExist = true;

          if (changeType == DELETE) {
            entry = null;
            changeCount++;
            logger.debug(
                LocalizableMessage.raw("The following entry has been deleted : %s", entryDN));
          }
        }

        if (entry != null) {
          writer.writeEntry(entry);
        }
      }

      if (changeType == ADD && !entryAlreadyExist) {
        final AddRequest ar = Requests.newAddRequest(ldifLines);
        writer.writeEntry(ar);
        logger.debug(
            LocalizableMessage.raw(
                "Entry successfully added %s in %s", ldifDN, original.getAbsolutePath()));
        changeCount++;
      }
    } catch (Exception ex) {
      throw new IOException(ex.getMessage());
    } finally {
      // The reader and writer must be close before renaming files.
      // Otherwise it causes exceptions under windows OS.
      StaticUtils.close(entryReader, writer);
    }

    try {
      // Renaming the file, overwriting previous one.
      rename(copyConfig, new File(configPath));
    } catch (IOException e) {
      logger.error(LocalizableMessage.raw(e.getMessage()));
      deleteRecursively(original);
      throw e;
    }

    return changeCount;
  }