final byte[] loadCanonical(final RevWalk walk) throws IOException, MissingObjectException, IncorrectObjectTypeException, CorruptObjectException { final ObjectLoader ldr = walk.db.openObject(walk.curs, this); if (ldr == null) throw new MissingObjectException(this, getType()); final byte[] data = ldr.getCachedBytes(); if (getType() != ldr.getType()) throw new IncorrectObjectTypeException(this, getType()); return data; }
public static GitObject createGitObject(RepositoryData repositoryData, String hash) throws MissingObjectException, IOException { Repository repository = repositoryData.getRepository(); ObjectId id = repository.resolve(hash); ObjectLoader loader = repository.open(id); GitObject object = null; switch (loader.getType()) { case Constants.OBJ_COMMIT: object = new Commit(hash, new String(loader.getCachedBytes())); break; case Constants.OBJ_TREE: object = new Tree(hash, loader.getBytes()); break; case Constants.OBJ_BLOB: object = new Blob(hash, new String(loader.getCachedBytes())); break; case Constants.OBJ_TAG: object = new Tag(hash, new String(loader.getCachedBytes())); break; } object.setRepositoryData(repositoryData); return object; }
protected byte[] readFile(String fileName) throws IOException { if (revision == null) { return new byte[] {}; } TreeWalk tw = TreeWalk.forPath(reader, fileName, revision.getTree()); if (tw != null) { ObjectLoader obj = reader.open(tw.getObjectId(0), Constants.OBJ_BLOB); return obj.getCachedBytes(Integer.MAX_VALUE); } else { return new byte[] {}; } }
private static void copy(TemporaryBuffer.Heap tinyPack, ObjectLoader ldr) throws IOException { final byte[] buf = new byte[64]; final byte[] content = ldr.getCachedBytes(); int dataLength = content.length; int nextLength = dataLength >>> 4; int size = 0; buf[size++] = (byte) ((nextLength > 0 ? 0x80 : 0x00) | (ldr.getType() << 4) | (dataLength & 0x0F)); dataLength = nextLength; while (dataLength > 0) { nextLength >>>= 7; buf[size++] = (byte) ((nextLength > 0 ? 0x80 : 0x00) | (dataLength & 0x7F)); dataLength = nextLength; } tinyPack.write(buf, 0, size); deflate(tinyPack, content); }
/** * Push a candidate object onto the generator's traversal stack. * * <p>Candidates should be pushed in history order from oldest-to-newest. Applications should push * the starting commit first, then the index revision (if the index is interesting), and finally * the working tree copy (if the working tree is interesting). * * @param description description of the blob revision, such as "Working Tree". * @param id may be a commit or a blob. * @return {@code this} * @throws IOException the repository cannot be read. */ public BlameGenerator push(String description, AnyObjectId id) throws IOException { ObjectLoader ldr = reader.open(id); if (ldr.getType() == OBJ_BLOB) { if (description == null) description = JGitText.get().blameNotCommittedYet; BlobCandidate c = new BlobCandidate(description, resultPath); c.sourceBlob = id.toObjectId(); c.sourceText = new RawText(ldr.getCachedBytes(Integer.MAX_VALUE)); c.regionList = new Region(0, 0, c.sourceText.size()); remaining = c.sourceText.size(); push(c); return this; } RevCommit commit = revPool.parseCommit(id); if (!find(commit, resultPath)) return this; Candidate c = new Candidate(commit, resultPath); c.sourceBlob = idBuf.toObjectId(); c.loadText(reader); c.regionList = new Region(0, 0, c.sourceText.size()); remaining = c.sourceText.size(); push(c); return this; }
@Override protected void doGet(final HttpServletRequest req, final HttpServletResponse rsp) throws IOException { String keyStr = req.getPathInfo(); // We shouldn't have to do this extra decode pass, but somehow we // are now receiving our "^1" suffix as "%5E1", which confuses us // downstream. Other times we get our embedded "," as "%2C", which // is equally bad. And yet when these happen a "%2F" is left as-is, // rather than escaped as "%252F", which makes me feel really really // uncomfortable with a blind decode right here. // keyStr = URLDecoder.decode(keyStr, "UTF-8"); if (!keyStr.startsWith("/")) { rsp.sendError(HttpServletResponse.SC_NOT_FOUND); return; } keyStr = keyStr.substring(1); final Patch.Key patchKey; final int side; { final int c = keyStr.lastIndexOf('^'); if (c == 0) { rsp.sendError(HttpServletResponse.SC_NOT_FOUND); return; } if (c < 0) { side = 0; } else { try { side = Integer.parseInt(keyStr.substring(c + 1)); keyStr = keyStr.substring(0, c); } catch (NumberFormatException e) { rsp.sendError(HttpServletResponse.SC_NOT_FOUND); return; } } try { patchKey = Patch.Key.parse(keyStr); } catch (NumberFormatException e) { rsp.sendError(HttpServletResponse.SC_NOT_FOUND); return; } } final Change.Id changeId = patchKey.getParentKey().getParentKey(); final Project project; final PatchSet patchSet; try { final ReviewDb db = requestDb.get(); final ChangeControl control = changeControl.validateFor(changeId); project = control.getProject(); patchSet = db.patchSets().get(patchKey.getParentKey()); if (patchSet == null) { rsp.sendError(HttpServletResponse.SC_NOT_FOUND); return; } } catch (NoSuchChangeException e) { rsp.sendError(HttpServletResponse.SC_NOT_FOUND); return; } catch (OrmException e) { getServletContext().log("Cannot query database", e); rsp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } final Repository repo; try { repo = repoManager.openRepository(project.getNameKey()); } catch (RepositoryNotFoundException e) { getServletContext().log("Cannot open repository", e); rsp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } final ObjectLoader blobLoader; final RevCommit fromCommit; final String suffix; final String path = patchKey.getFileName(); try { final ObjectReader reader = repo.newObjectReader(); try { final RevWalk rw = new RevWalk(reader); final RevCommit c; final TreeWalk tw; c = rw.parseCommit(ObjectId.fromString(patchSet.getRevision().get())); if (side == 0) { fromCommit = c; suffix = "new"; } else if (1 <= side && side - 1 < c.getParentCount()) { fromCommit = rw.parseCommit(c.getParent(side - 1)); if (c.getParentCount() == 1) { suffix = "old"; } else { suffix = "old" + side; } } else { rsp.sendError(HttpServletResponse.SC_NOT_FOUND); return; } tw = TreeWalk.forPath(reader, path, fromCommit.getTree()); if (tw == null) { rsp.sendError(HttpServletResponse.SC_NOT_FOUND); return; } if (tw.getFileMode(0).getObjectType() == Constants.OBJ_BLOB) { blobLoader = reader.open(tw.getObjectId(0), Constants.OBJ_BLOB); } else { rsp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } } finally { reader.release(); } } catch (IOException e) { getServletContext().log("Cannot read repository", e); rsp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } catch (RuntimeException e) { getServletContext().log("Cannot read repository", e); rsp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } finally { repo.close(); } final byte[] raw = blobLoader.isLarge() ? null : blobLoader.getCachedBytes(); final long when = fromCommit.getCommitTime() * 1000L; rsp.setDateHeader("Last-Modified", when); rsp.setDateHeader("Expires", 0L); rsp.setHeader("Pragma", "no-cache"); rsp.setHeader("Cache-Control", "no-cache, must-revalidate"); OutputStream out; ZipOutputStream zo; final MimeType contentType = registry.getMimeType(path, raw); if (!registry.isSafeInline(contentType)) { // The content may not be safe to transmit inline, as a browser might // interpret it as HTML or JavaScript hosted by this site. Such code // might then run in the site's security domain, and may be able to use // the user's cookies to perform unauthorized actions. // // Usually, wrapping the content into a ZIP file forces the browser to // save the content to the local system instead. // rsp.setContentType(ZIP.toString()); rsp.setHeader( "Content-Disposition", "attachment; filename=\"" + safeFileName(path, suffix) + ".zip" + "\""); zo = new ZipOutputStream(rsp.getOutputStream()); final ZipEntry e = new ZipEntry(safeFileName(path, rand(req, suffix))); e.setComment(fromCommit.name() + ":" + path); e.setSize(blobLoader.getSize()); e.setTime(when); zo.putNextEntry(e); out = zo; } else { rsp.setContentType(contentType.toString()); rsp.setHeader("Content-Length", "" + blobLoader.getSize()); out = rsp.getOutputStream(); zo = null; } if (raw != null) { out.write(raw); } else { blobLoader.copyTo(out); } if (zo != null) { zo.closeEntry(); } out.close(); }