예제 #1
0
 /**
  * Updates the file in the working tree with content and mode from an entry in the index. The new
  * content is first written to a new temporary file in the same directory as the real file. Then
  * that new file is renamed to the final filename.
  *
  * <p>TODO: this method works directly on File IO, we may need another abstraction (like
  * WorkingTreeIterator). This way we could tell e.g. Eclipse that Files in the workspace got
  * changed
  *
  * @param repo
  * @param f the file to be modified. The parent directory for this file has to exist already
  * @param entry the entry containing new mode and content
  * @param or object reader to use for checkout
  * @throws IOException
  */
 public static void checkoutEntry(
     final Repository repo, File f, DirCacheEntry entry, ObjectReader or) throws IOException {
   ObjectLoader ol = or.open(entry.getObjectId());
   File parentDir = f.getParentFile();
   parentDir.mkdirs();
   File tmpFile = File.createTempFile("._" + f.getName(), null, parentDir); // $NON-NLS-1$
   WorkingTreeOptions opt = repo.getConfig().get(WorkingTreeOptions.KEY);
   FileOutputStream rawChannel = new FileOutputStream(tmpFile);
   OutputStream channel;
   if (opt.getAutoCRLF() == AutoCRLF.TRUE) channel = new AutoCRLFOutputStream(rawChannel);
   else channel = rawChannel;
   try {
     ol.copyTo(channel);
   } finally {
     channel.close();
   }
   FS fs = repo.getFS();
   if (opt.isFileMode() && fs.supportsExecute()) {
     if (FileMode.EXECUTABLE_FILE.equals(entry.getRawMode())) {
       if (!fs.canExecute(tmpFile)) fs.setExecute(tmpFile, true);
     } else {
       if (fs.canExecute(tmpFile)) fs.setExecute(tmpFile, false);
     }
   }
   try {
     FileUtils.rename(tmpFile, f);
   } catch (IOException e) {
     throw new IOException(
         MessageFormat.format(JGitText.get().couldNotWriteFile, tmpFile.getPath(), f.getPath()));
   }
   entry.setLastModified(f.lastModified());
   if (opt.getAutoCRLF() != AutoCRLF.FALSE)
     entry.setLength(f.length()); // AutoCRLF wants on-disk-size
   else entry.setLength((int) ol.getSize());
 }
예제 #2
0
파일: RevObject.java 프로젝트: saces/jgit
 final byte[] loadCanonical(final RevWalk walk)
     throws IOException, MissingObjectException, IncorrectObjectTypeException,
         CorruptObjectException {
   final ObjectLoader ldr = walk.db.openObject(walk.curs, this);
   if (ldr == null) throw new MissingObjectException(this, getType());
   final byte[] data = ldr.getCachedBytes();
   if (getType() != ldr.getType()) throw new IncorrectObjectTypeException(this, getType());
   return data;
 }
예제 #3
0
  /**
   * Gets the type of the object with the specified ID (either a tree, tag, blob, or commit).
   *
   * @param id the object ID
   * @return the object type, one of the {@code OBJ_*} constants in the {@link Constants} class.
   */
  public int typeOfObject(ObjectId id) {
    ObjectReader reader = repository.newObjectReader();

    try {
      ObjectLoader loader = reader.open(id);
      return loader.getType();
    } catch (Exception e) {
      return Constants.OBJ_BAD;
    } finally {
      reader.release();
    }
  }
예제 #4
0
  /**
   * Writes the contents of the specified blob to an output stream.
   *
   * @param objectId the id of the blob
   * @param stream the stream to write the blob to
   */
  public void writeBlobToStream(ObjectId objectId, OutputStream stream) {
    ObjectReader reader = repository.newObjectReader();

    try {
      ObjectLoader loader = reader.open(objectId);
      loader.copyTo(stream);
    } catch (IOException e) {
      log.error("An exception occurred while getting the blob " + "contents: ", e);
    } finally {
      reader.release();
    }
  }
예제 #5
0
  protected byte[] readFile(String fileName) throws IOException {
    if (revision == null) {
      return new byte[] {};
    }

    TreeWalk tw = TreeWalk.forPath(reader, fileName, revision.getTree());
    if (tw != null) {
      ObjectLoader obj = reader.open(tw.getObjectId(0), Constants.OBJ_BLOB);
      return obj.getCachedBytes(Integer.MAX_VALUE);

    } else {
      return new byte[] {};
    }
  }
예제 #6
0
 @NotNull
 private String createLocalFile(@NotNull ObjectId id, @NotNull ObjectLoader loader)
     throws IOException {
   // Create LFS stream.
   final File tmpFile = new File(tempPath, UUID.randomUUID().toString());
   final MessageDigest md = createSha256();
   try (InputStream istream = loader.openStream();
       OutputStream ostream = new FileOutputStream(tmpFile)) {
     byte[] buffer = new byte[0x10000];
     while (true) {
       int size = istream.read(buffer);
       if (size <= 0) break;
       ostream.write(buffer, 0, size);
       md.update(buffer, 0, size);
     }
   }
   final String hash = new String(Hex.encodeHex(md.digest(), true));
   cacheSha256.putIfAbsent(id.name(), hash);
   cache.commit();
   // Rename file.
   final File lfsFile =
       new File(
           basePath,
           "lfs/objects/" + hash.substring(0, 2) + "/" + hash.substring(2, 4) + "/" + hash);
   makeParentDirs(lfsFile.getParentFile());
   if (lfsFile.exists()) {
     if (!tmpFile.delete()) {
       log.warn("Can't delete temporary file: {}", lfsFile.getAbsolutePath());
     }
   } else if (!tmpFile.renameTo(lfsFile)) {
     throw new IOException("Can't rename file: " + tmpFile + " -> " + lfsFile);
   }
   return hash;
 }
예제 #7
0
 @NotNull
 private String createRemoteFile(
     @NotNull ObjectId id, @NotNull ObjectLoader loader, @NotNull Uploader uploader)
     throws IOException {
   // Create LFS stream.
   final String hash;
   final String cached = cacheSha256.get(id.name());
   long size = 0;
   if (cached == null) {
     final MessageDigest md = createSha256();
     try (InputStream istream = loader.openStream()) {
       byte[] buffer = new byte[0x10000];
       while (true) {
         int read = istream.read(buffer);
         if (read <= 0) break;
         md.update(buffer, 0, read);
         size += read;
       }
     }
     hash = new String(Hex.encodeHex(md.digest(), true));
     cacheSha256.put(id.name(), hash);
     cache.commit();
   } else {
     hash = cached;
   }
   uploader.upload(id, new Meta(hash, size));
   return hash;
 }
 private static void copy(TemporaryBuffer.Heap tinyPack, ObjectLoader ldr) throws IOException {
   final byte[] buf = new byte[64];
   final byte[] content = ldr.getCachedBytes();
   int dataLength = content.length;
   int nextLength = dataLength >>> 4;
   int size = 0;
   buf[size++] =
       (byte) ((nextLength > 0 ? 0x80 : 0x00) | (ldr.getType() << 4) | (dataLength & 0x0F));
   dataLength = nextLength;
   while (dataLength > 0) {
     nextLength >>>= 7;
     buf[size++] = (byte) ((nextLength > 0 ? 0x80 : 0x00) | (dataLength & 0x7F));
     dataLength = nextLength;
   }
   tinyPack.write(buf, 0, size);
   deflate(tinyPack, content);
 }
예제 #9
0
  /**
   * Gets the contents of the specified blob as raw data.
   *
   * @param objectId the id of the blob
   * @return an {@code NSData} object containing the raw data from the blob
   */
  public NSData contentForBlob(ObjectId objectId) {
    ObjectReader reader = repository.newObjectReader();

    try {
      ObjectLoader loader = reader.open(objectId);

      NSMutableDataOutputStream output = new NSMutableDataOutputStream();
      loader.copyTo(output);
      output.close();

      return output.data();
    } catch (IOException e) {
      log.error("An exception occurred while getting the blob " + "contents: ", e);
      return null;
    } finally {
      reader.release();
    }
  }
예제 #10
0
  private byte[] open(DiffEntry.Side side, DiffEntry entry) throws IOException {
    if (entry.getMode(side) == FileMode.MISSING) return EMPTY;

    if (entry.getMode(side).getObjectType() != Constants.OBJ_BLOB) return EMPTY;

    if (isBinary()) return BINARY;

    AbbreviatedObjectId id = entry.getId(side);
    if (!id.isComplete()) {
      Collection<ObjectId> ids = reader.resolve(id);
      if (ids.size() == 1) {
        id = AbbreviatedObjectId.fromObjectId(ids.iterator().next());
        switch (side) {
          case OLD:
            entry.oldId = id;
            break;
          case NEW:
            entry.newId = id;
            break;
        }
      } else if (ids.size() == 0) throw new MissingObjectException(id, Constants.OBJ_BLOB);
      else throw new AmbiguousObjectException(id, ids);
    }

    try {
      ObjectLoader ldr = source.open(side, entry);
      return ldr.getBytes(binaryFileThreshold);

    } catch (LargeObjectException.ExceedsLimit overLimit) {
      return BINARY;

    } catch (LargeObjectException.ExceedsByteArrayLimit overLimit) {
      return BINARY;

    } catch (LargeObjectException.OutOfMemory tooBig) {
      return BINARY;

    } catch (LargeObjectException tooBig) {
      tooBig.setObjectId(id.toObjectId());
      throw tooBig;
    }
  }
예제 #11
0
 public static JSONArray getListEntries(
     TreeWalk treeWalk, Repository repo, Git git, Ref head, String filePath, String projectName)
     throws MissingObjectException, IncorrectObjectTypeException, CorruptObjectException,
         IOException {
   JSONArray contents = new JSONArray();
   do {
     if (treeWalk.isSubtree()) {
       String test = new String(treeWalk.getRawPath());
       if (test.length() /*treeWalk.getPathLength()*/ > filePath.length()) {
         listEntry(
             treeWalk.getNameString(),
             "dir",
             "0",
             treeWalk.getPathString(),
             projectName,
             head.getName(),
             git,
             contents);
       }
       if (test.length() /*treeWalk.getPathLength()*/ <= filePath.length()) {
         treeWalk.enterSubtree();
       }
     } else {
       ObjectId objId = treeWalk.getObjectId(0);
       ObjectLoader loader = repo.open(objId);
       long size = loader.getSize();
       listEntry(
           treeWalk.getNameString(),
           "file",
           Long.toString(size),
           treeWalk.getPathString(),
           projectName,
           head.getName(),
           git,
           contents);
     }
   } while (treeWalk.next());
   return contents;
 }
예제 #12
0
  /**
   * Push a candidate object onto the generator's traversal stack.
   *
   * <p>Candidates should be pushed in history order from oldest-to-newest. Applications should push
   * the starting commit first, then the index revision (if the index is interesting), and finally
   * the working tree copy (if the working tree is interesting).
   *
   * @param description description of the blob revision, such as "Working Tree".
   * @param id may be a commit or a blob.
   * @return {@code this}
   * @throws IOException the repository cannot be read.
   */
  public BlameGenerator push(String description, AnyObjectId id) throws IOException {
    ObjectLoader ldr = reader.open(id);
    if (ldr.getType() == OBJ_BLOB) {
      if (description == null) description = JGitText.get().blameNotCommittedYet;
      BlobCandidate c = new BlobCandidate(description, resultPath);
      c.sourceBlob = id.toObjectId();
      c.sourceText = new RawText(ldr.getCachedBytes(Integer.MAX_VALUE));
      c.regionList = new Region(0, 0, c.sourceText.size());
      remaining = c.sourceText.size();
      push(c);
      return this;
    }

    RevCommit commit = revPool.parseCommit(id);
    if (!find(commit, resultPath)) return this;

    Candidate c = new Candidate(commit, resultPath);
    c.sourceBlob = idBuf.toObjectId();
    c.loadText(reader);
    c.regionList = new Region(0, 0, c.sourceText.size());
    remaining = c.sourceText.size();
    push(c);
    return this;
  }
예제 #13
0
 public static GitObject createGitObject(RepositoryData repositoryData, String hash)
     throws MissingObjectException, IOException {
   Repository repository = repositoryData.getRepository();
   ObjectId id = repository.resolve(hash);
   ObjectLoader loader = repository.open(id);
   GitObject object = null;
   switch (loader.getType()) {
     case Constants.OBJ_COMMIT:
       object = new Commit(hash, new String(loader.getCachedBytes()));
       break;
     case Constants.OBJ_TREE:
       object = new Tree(hash, loader.getBytes());
       break;
     case Constants.OBJ_BLOB:
       object = new Blob(hash, new String(loader.getCachedBytes()));
       break;
     case Constants.OBJ_TAG:
       object = new Tag(hash, new String(loader.getCachedBytes()));
       break;
   }
   object.setRepositoryData(repositoryData);
   return object;
 }
예제 #14
0
  @Override
  protected void doGet(final HttpServletRequest req, final HttpServletResponse rsp)
      throws IOException {
    String keyStr = req.getPathInfo();

    // We shouldn't have to do this extra decode pass, but somehow we
    // are now receiving our "^1" suffix as "%5E1", which confuses us
    // downstream. Other times we get our embedded "," as "%2C", which
    // is equally bad. And yet when these happen a "%2F" is left as-is,
    // rather than escaped as "%252F", which makes me feel really really
    // uncomfortable with a blind decode right here.
    //
    keyStr = URLDecoder.decode(keyStr, "UTF-8");

    if (!keyStr.startsWith("/")) {
      rsp.sendError(HttpServletResponse.SC_NOT_FOUND);
      return;
    }
    keyStr = keyStr.substring(1);

    final Patch.Key patchKey;
    final int side;
    {
      final int c = keyStr.lastIndexOf('^');
      if (c == 0) {
        rsp.sendError(HttpServletResponse.SC_NOT_FOUND);
        return;
      }

      if (c < 0) {
        side = 0;

      } else {
        try {
          side = Integer.parseInt(keyStr.substring(c + 1));
          keyStr = keyStr.substring(0, c);
        } catch (NumberFormatException e) {
          rsp.sendError(HttpServletResponse.SC_NOT_FOUND);
          return;
        }
      }

      try {
        patchKey = Patch.Key.parse(keyStr);
      } catch (NumberFormatException e) {
        rsp.sendError(HttpServletResponse.SC_NOT_FOUND);
        return;
      }
    }

    final Change.Id changeId = patchKey.getParentKey().getParentKey();
    final Project project;
    final PatchSet patchSet;
    try {
      final ReviewDb db = requestDb.get();
      final ChangeControl control = changeControl.validateFor(changeId);

      project = control.getProject();
      patchSet = db.patchSets().get(patchKey.getParentKey());
      if (patchSet == null) {
        rsp.sendError(HttpServletResponse.SC_NOT_FOUND);
        return;
      }
    } catch (NoSuchChangeException e) {
      rsp.sendError(HttpServletResponse.SC_NOT_FOUND);
      return;
    } catch (OrmException e) {
      getServletContext().log("Cannot query database", e);
      rsp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
      return;
    }

    final Repository repo;
    try {
      repo = repoManager.openRepository(project.getNameKey());
    } catch (RepositoryNotFoundException e) {
      getServletContext().log("Cannot open repository", e);
      rsp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
      return;
    }

    final ObjectLoader blobLoader;
    final RevCommit fromCommit;
    final String suffix;
    final String path = patchKey.getFileName();
    try {
      final ObjectReader reader = repo.newObjectReader();
      try {
        final RevWalk rw = new RevWalk(reader);
        final RevCommit c;
        final TreeWalk tw;

        c = rw.parseCommit(ObjectId.fromString(patchSet.getRevision().get()));
        if (side == 0) {
          fromCommit = c;
          suffix = "new";

        } else if (1 <= side && side - 1 < c.getParentCount()) {
          fromCommit = rw.parseCommit(c.getParent(side - 1));
          if (c.getParentCount() == 1) {
            suffix = "old";
          } else {
            suffix = "old" + side;
          }

        } else {
          rsp.sendError(HttpServletResponse.SC_NOT_FOUND);
          return;
        }

        tw = TreeWalk.forPath(reader, path, fromCommit.getTree());
        if (tw == null) {
          rsp.sendError(HttpServletResponse.SC_NOT_FOUND);
          return;
        }

        if (tw.getFileMode(0).getObjectType() == Constants.OBJ_BLOB) {
          blobLoader = reader.open(tw.getObjectId(0), Constants.OBJ_BLOB);

        } else {
          rsp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
          return;
        }
      } finally {
        reader.release();
      }
    } catch (IOException e) {
      getServletContext().log("Cannot read repository", e);
      rsp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
      return;
    } catch (RuntimeException e) {
      getServletContext().log("Cannot read repository", e);
      rsp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
      return;
    } finally {
      repo.close();
    }

    final byte[] raw = blobLoader.isLarge() ? null : blobLoader.getCachedBytes();
    final long when = fromCommit.getCommitTime() * 1000L;

    rsp.setDateHeader("Last-Modified", when);
    rsp.setDateHeader("Expires", 0L);
    rsp.setHeader("Pragma", "no-cache");
    rsp.setHeader("Cache-Control", "no-cache, must-revalidate");

    OutputStream out;
    ZipOutputStream zo;

    final MimeType contentType = registry.getMimeType(path, raw);
    if (!registry.isSafeInline(contentType)) {
      // The content may not be safe to transmit inline, as a browser might
      // interpret it as HTML or JavaScript hosted by this site. Such code
      // might then run in the site's security domain, and may be able to use
      // the user's cookies to perform unauthorized actions.
      //
      // Usually, wrapping the content into a ZIP file forces the browser to
      // save the content to the local system instead.
      //

      rsp.setContentType(ZIP.toString());
      rsp.setHeader(
          "Content-Disposition",
          "attachment; filename=\"" + safeFileName(path, suffix) + ".zip" + "\"");

      zo = new ZipOutputStream(rsp.getOutputStream());

      final ZipEntry e = new ZipEntry(safeFileName(path, rand(req, suffix)));
      e.setComment(fromCommit.name() + ":" + path);
      e.setSize(blobLoader.getSize());
      e.setTime(when);
      zo.putNextEntry(e);
      out = zo;

    } else {
      rsp.setContentType(contentType.toString());
      rsp.setHeader("Content-Length", "" + blobLoader.getSize());

      out = rsp.getOutputStream();
      zo = null;
    }

    if (raw != null) {
      out.write(raw);
    } else {
      blobLoader.copyTo(out);
    }

    if (zo != null) {
      zo.closeEntry();
    }
    out.close();
  }
예제 #15
0
 private boolean isLfsPointer(@NotNull ObjectLoader loader) {
   return loader.getSize() <= ru.bozaro.gitlfs.pointer.Constants.POINTER_MAX_SIZE
       && Pointer.parsePointer(loader.getBytes()) != null;
 }
예제 #16
0
  protected void addInformationForPath(
      Repository repository,
      Git git,
      DocumentWriter writer,
      RevCommit commit,
      String path,
      CallSpecification spec,
      Values values)
      throws GitAPIException, IOException {
    // Make sure the path is in the canonical form we need ...
    if (path.startsWith("/")) {
      if (path.length() == 1) path = "";
      else path = path.substring(1);
    }

    // Now see if we're actually referring to the "jcr:content" node ...
    boolean isContentNode = false;
    if (path.endsWith(JCR_CONTENT_SUFFIX)) {
      isContentNode = true;
      path = path.substring(0, path.length() - JCR_CONTENT_SUFFIX.length());
    }

    // Create the TreeWalk that we'll use to navigate the files/directories ...
    final TreeWalk tw = new TreeWalk(repository);
    tw.addTree(commit.getTree());
    if ("".equals(path)) {
      // This is the top-level directory, so we don't need to pre-walk to find anything ...
      tw.setRecursive(false);
      while (tw.next()) {
        String childName = tw.getNameString();
        String childId = spec.childId(childName);
        writer.addChild(childId, childName);
      }
    } else {
      // We need to first find our path *before* we can walk the children ...
      PathFilter filter = PathFilter.create(path);
      tw.setFilter(filter);
      while (tw.next()) {
        if (filter.isDone(tw)) {
          break;
        } else if (tw.isSubtree()) {
          tw.enterSubtree();
        }
      }
      // Now that the TreeWalk is the in right location given by the 'path', we can get the
      if (tw.isSubtree()) {
        // The object at the 'path' is a directory, so go into it ...
        tw.enterSubtree();

        // Find the commit in which this folder was last modified ...
        // This may not be terribly efficient, but it seems to work faster on subsequent runs ...
        RevCommit folderCommit = git.log().addPath(path).call().iterator().next();

        // Add folder-related properties ...
        String committer = folderCommit.getCommitterIdent().getName();
        String author = folderCommit.getAuthorIdent().getName();
        DateTime committed = values.dateFrom(folderCommit.getCommitTime());
        writer.setPrimaryType(GitLexicon.FOLDER);
        writer.addProperty(JcrLexicon.CREATED, committed);
        writer.addProperty(JcrLexicon.CREATED_BY, committer);
        writer.addProperty(GitLexicon.OBJECT_ID, folderCommit.getId().name());
        writer.addProperty(GitLexicon.AUTHOR, author);
        writer.addProperty(GitLexicon.COMMITTER, committer);
        writer.addProperty(GitLexicon.COMMITTED, committed);
        writer.addProperty(GitLexicon.TITLE, folderCommit.getShortMessage());

        // And now walk the contents of the directory ...
        while (tw.next()) {
          String childName = tw.getNameString();
          String childId = spec.childId(childName);
          writer.addChild(childId, childName);
        }
      } else {
        // The path specifies a file (or a content node) ...

        // Find the commit in which this folder was last modified ...
        // This may not be terribly efficient, but it seems to work faster on subsequent runs ...
        RevCommit fileCommit = git.log().addPath(path).call().iterator().next();

        // Add file-related properties ...
        String committer = fileCommit.getCommitterIdent().getName();
        String author = fileCommit.getAuthorIdent().getName();
        DateTime committed = values.dateFrom(fileCommit.getCommitTime());
        if (isContentNode) {
          writer.setPrimaryType(GitLexicon.RESOURCE);
          writer.addProperty(JcrLexicon.LAST_MODIFIED, committed);
          writer.addProperty(JcrLexicon.LAST_MODIFIED_BY, committer);
          writer.addProperty(GitLexicon.OBJECT_ID, fileCommit.getId().name());
          writer.addProperty(GitLexicon.AUTHOR, author);
          writer.addProperty(GitLexicon.COMMITTER, committer);
          writer.addProperty(GitLexicon.COMMITTED, committed);
          writer.addProperty(GitLexicon.TITLE, fileCommit.getShortMessage());
          // Create the BinaryValue ...
          ObjectId fileObjectId = tw.getObjectId(0);
          ObjectLoader fileLoader = repository.open(fileObjectId);
          BinaryKey key = new BinaryKey(fileObjectId.getName());
          BinaryValue value = values.binaryFor(key, fileLoader.getSize());
          if (value == null) {
            // It wasn't found in the binary store ...
            if (fileLoader.isLarge()) {
              // Too large to hold in memory, so use the binary store (which reads the file
              // immediately) ...
              value = values.binaryFrom(fileLoader.openStream());
            } else {
              // This is small enough to fit into a byte[], but it still may be pretty big ...
              value =
                  new GitBinaryValue(
                      fileObjectId,
                      fileLoader,
                      connector.getSourceName(),
                      name,
                      connector.getMimeTypeDetector());
            }
          }
          writer.addProperty(JcrLexicon.DATA, value);
          if (connector.includeMimeType()) {
            try {
              String filename =
                  spec.parameter(spec.parameterCount() - 1); // the last is 'jcr:content'
              String mimeType = value.getMimeType(filename);
              if (mimeType != null) writer.addProperty(JcrLexicon.MIMETYPE, mimeType);
            } catch (RepositoryException e) {
              // do nothing
            } catch (IOException e) {
              // do nothing
            }
          }
        } else {
          writer.setPrimaryType(GitLexicon.FILE);
          writer.addProperty(JcrLexicon.CREATED, committed);
          writer.addProperty(JcrLexicon.CREATED_BY, committer);
          writer.addProperty(GitLexicon.OBJECT_ID, fileCommit.getId().name());
          writer.addProperty(GitLexicon.AUTHOR, author);
          writer.addProperty(GitLexicon.COMMITTER, committer);
          writer.addProperty(GitLexicon.COMMITTED, committed);
          writer.addProperty(GitLexicon.TITLE, fileCommit.getShortMessage());

          // Add the "jcr:content" child node ...
          String childId = spec.childId(JCR_CONTENT);
          writer.addChild(childId, JCR_CONTENT);
        }
      }
    }
  }