/** * Load in the filesystem image. It's a big list of filenames and blocks. Return whether we should * "re-save" and consolidate the edit-logs */ boolean loadFSImage(File fsdir, File edits) throws IOException { // // Atomic move sequence, to recover from interrupted save // File curFile = new File(fsdir, FS_IMAGE); File newFile = new File(fsdir, NEW_FS_IMAGE); File oldFile = new File(fsdir, OLD_FS_IMAGE); // Maybe we were interrupted between 2 and 4 if (oldFile.exists() && curFile.exists()) { oldFile.delete(); if (edits.exists()) { edits.delete(); } } else if (oldFile.exists() && newFile.exists()) { // Or maybe between 1 and 2 newFile.renameTo(curFile); oldFile.delete(); } else if (curFile.exists() && newFile.exists()) { // Or else before stage 1, in which case we lose the edits newFile.delete(); } // // Load in bits // if (curFile.exists()) { DataInputStream in = new DataInputStream(new BufferedInputStream(new FileInputStream(curFile))); try { int numFiles = in.readInt(); for (int i = 0; i < numFiles; i++) { UTF8 name = new UTF8(); name.readFields(in); int numBlocks = in.readInt(); if (numBlocks == 0) { unprotectedAddFile(name, null); } else { Block blocks[] = new Block[numBlocks]; for (int j = 0; j < numBlocks; j++) { blocks[j] = new Block(); blocks[j].readFields(in); } unprotectedAddFile(name, blocks); } } } finally { in.close(); } } if (edits.exists() && loadFSEdits(edits) > 0) { return true; } else { return false; } }
@SuppressWarnings("deprecation") public void write(DataOutput out) throws IOException { out.writeLong(rpcVersion); UTF8.writeString(out, declaringClassProtocolName); UTF8.writeString(out, methodName); out.writeLong(clientVersion); out.writeInt(clientMethodsHash); out.writeInt(parameterClasses.length); for (int i = 0; i < parameterClasses.length; i++) { ObjectWritable.writeObject(out, parameters[i], parameterClasses[i], conf, true); } }
public void write(DataOutput out) throws IOException { out.write(BASE_TYPE); out.writeUTF(name); out.writeInt(sampleStrs.size()); for (int i = 0; i < sampleStrs.size(); i++) { UTF8.writeString(out, sampleStrs.get(i)); } out.writeInt(tokenClassIdentifier); out.writeBoolean(tokenParameter != null); if (tokenParameter != null) { UTF8.writeString(out, tokenParameter); } }
@SuppressWarnings("deprecation") public void readFields(DataInput in) throws IOException { rpcVersion = in.readLong(); declaringClassProtocolName = UTF8.readString(in); methodName = UTF8.readString(in); clientVersion = in.readLong(); clientMethodsHash = in.readInt(); parameters = new Object[in.readInt()]; parameterClasses = new Class[parameters.length]; ObjectWritable objectWritable = new ObjectWritable(); for (int i = 0; i < parameters.length; i++) { parameters[i] = ObjectWritable.readObject(in, objectWritable, this.conf); parameterClasses[i] = objectWritable.getDeclaredClass(); } }
public void readFields(DataInput in) throws IOException { // instance-specific this.sampleStrs = new ArrayList<String>(); int numSamples = in.readInt(); for (int i = 0; i < numSamples; i++) { sampleStrs.add(UTF8.readString(in).toString()); } this.tokenClassIdentifier = in.readInt(); if (in.readBoolean()) { this.tokenParameter = UTF8.readString(in); } else { this.tokenParameter = null; } this.schema = computeAvroSchema(); }
String normalizePath(UTF8 src) { String srcs = src.toString(); if (srcs.length() > 1 && srcs.endsWith("/")) { srcs = srcs.substring(0, srcs.length() - 1); } return srcs; }
public void readFields(DataInput in) throws IOException { methodName = UTF8.readString(in); parameters = new Object[in.readInt()]; parameterClasses = new Class[parameters.length]; ObjectWritable objectWritable = new ObjectWritable(); for (int i = 0; i < parameters.length; i++) { parameters[i] = ObjectWritable.readObject(in, objectWritable, this.conf); parameterClasses[i] = objectWritable.getDeclaredClass(); } }
/** Get the blocks associated with the file */ public Block[] getFile(UTF8 src) { waitForReady(); synchronized (rootDir) { INode targetNode = rootDir.getNode(src.toString()); if (targetNode == null) { return null; } else { return targetNode.blocks; } } }
boolean unprotectedAddFile(UTF8 name, Block blocks[]) { synchronized (rootDir) { if (blocks != null) { // Add file->block mapping for (int i = 0; i < blocks.length; i++) { activeBlocks.add(blocks[i]); } } return (rootDir.addNode(name.toString(), blocks) != null); } }
/** Add the given filename to the fs. */ public boolean addFile(UTF8 src, Block blocks[]) { waitForReady(); // Always do an implicit mkdirs for parent directory tree mkdirs(DFSFile.getDFSParent(src.toString())); if (unprotectedAddFile(src, blocks)) { logEdit(OP_ADD, src, new ArrayWritable(Block.class, blocks)); return true; } else { return false; } }
protected void writeCorruptedData(RandomAccessFile file) throws IOException { final String messageForPreUpgradeVersion = "\nThis file is INTENTIONALLY CORRUPTED so that versions\n" + "of Hadoop prior to 0.13 (which are incompatible\n" + "with this directory layout) will fail to start.\n"; file.seek(0); file.writeInt(FSConstants.LAYOUT_VERSION); org.apache.hadoop.io.UTF8.writeString(file, ""); file.writeBytes(messageForPreUpgradeVersion); file.getFD().sync(); }
private final void readFieldsCompressed(DataInput in) throws IOException { byte oldVersion = in.readByte(); switch (oldVersion) { case 0: case 1: url = UTF8.readString(in); // read url base = UTF8.readString(in); // read base content = new byte[in.readInt()]; // read content in.readFully(content); contentType = UTF8.readString(in); // read contentType // reconstruct metadata int keySize = in.readInt(); String key; for (int i = 0; i < keySize; i++) { key = UTF8.readString(in); int valueSize = in.readInt(); for (int j = 0; j < valueSize; j++) { metadata.add(key, UTF8.readString(in)); } } break; case 2: url = Text.readString(in); // read url base = Text.readString(in); // read base content = new byte[in.readInt()]; // read content in.readFully(content); contentType = Text.readString(in); // read contentType metadata.readFields(in); // read meta data break; default: throw new VersionMismatchException((byte) 2, oldVersion); } }
boolean unprotectedRenameTo(UTF8 src, UTF8 dst) { synchronized (rootDir) { INode removedNode = rootDir.getNode(src.toString()); if (removedNode == null) { return false; } removedNode.removeNode(); if (isDir(dst)) { dst = new UTF8(dst.toString() + "/" + new File(src.toString()).getName()); } INode newNode = rootDir.addNode(dst.toString(), removedNode.blocks); if (newNode != null) { newNode.children = removedNode.children; for (Iterator it = newNode.children.values().iterator(); it.hasNext(); ) { INode child = (INode) it.next(); child.parent = newNode; } return true; } else { rootDir.addNode(src.toString(), removedNode.blocks); return false; } } }
Block[] unprotectedDelete(UTF8 src) { synchronized (rootDir) { INode targetNode = rootDir.getNode(src.toString()); if (targetNode == null) { return null; } else { // // Remove the node from the namespace and GC all // the blocks underneath the node. // if (!targetNode.removeNode()) { return null; } else { Vector v = new Vector(); targetNode.collectSubtreeBlocks(v); for (Iterator it = v.iterator(); it.hasNext(); ) { Block b = (Block) it.next(); activeBlocks.remove(b); } return (Block[]) v.toArray(new Block[v.size()]); } } } }
/** Create the given directory and all its parent dirs. */ public boolean mkdirs(UTF8 src) { return mkdirs(src.toString()); }
/** * Load an edit log, and apply the changes to the in-memory structure * * <p>This is where we apply edits that we've been writing to disk all along. */ int loadFSEdits(File edits) throws IOException { int numEdits = 0; if (edits.exists()) { DataInputStream in = new DataInputStream(new BufferedInputStream(new FileInputStream(edits))); try { while (in.available() > 0) { byte opcode = in.readByte(); numEdits++; switch (opcode) { case OP_ADD: { UTF8 name = new UTF8(); name.readFields(in); ArrayWritable aw = new ArrayWritable(Block.class); aw.readFields(in); Writable writables[] = (Writable[]) aw.get(); Block blocks[] = new Block[writables.length]; System.arraycopy(writables, 0, blocks, 0, blocks.length); unprotectedAddFile(name, blocks); break; } case OP_RENAME: { UTF8 src = new UTF8(); UTF8 dst = new UTF8(); src.readFields(in); dst.readFields(in); unprotectedRenameTo(src, dst); break; } case OP_DELETE: { UTF8 src = new UTF8(); src.readFields(in); unprotectedDelete(src); break; } case OP_MKDIR: { UTF8 src = new UTF8(); src.readFields(in); unprotectedMkdir(src.toString()); break; } default: { throw new IOException("Never seen opcode " + opcode); } } } } finally { in.close(); } } return numEdits; }