/** * Returns true if this category contains any bases visible by the given {@link * arlut.csd.ganymede.server.GanymedeSession session}. If session is null, we're supergash, so of * course this category is going to contain a visible base. */ private boolean containsVisibleBase(GanymedeSession session) { Vector<CategoryNode> contents; boolean result = false; /* -- */ if (session == null) { return true; // we're not filtering, return true immediately } contents = getNodes(); for (CategoryNode node : contents) { if (node instanceof DBObjectBase) { DBObjectBase base = (DBObjectBase) node; if (session.getPermManager().getPerm(base.getTypeID(), true).isVisible()) { result = true; } } else if (node instanceof DBBaseCategory) { DBBaseCategory subCategory = (DBBaseCategory) node; result = subCategory.containsVisibleBase(session); } } return result; }
/** * This method takes all the children of the passed-in category (both {@link * arlut.csd.ganymede.server.DBObjectBase DBObjectBase} objects and contained {@link * arlut.csd.ganymede.server.DBBaseCategory DBBaseCategory} objects) and makes copies under this. */ private void recurseDown(DBBaseCategory category, Hashtable baseHash, DBSchemaEdit editor) throws RemoteException { Vector<CategoryNode> children = category.getNodes(); DBObjectBase oldBase, newBase; DBBaseCategory oldCategory, newCategory; /* -- */ if (debug) { Ganymede.debug("** recurseDown"); if (editor == null) { Ganymede.debug("**#?!?!!! DBBaseCategory.recurseDown(): editor == null!!!"); } } for (CategoryNode node : children) { if (node instanceof DBObjectBase) { oldBase = (DBObjectBase) node; // a new copy, with the same objects under it newBase = new DBObjectBase(oldBase, editor); baseHash.put(newBase.getKey(), newBase); if (false) { Ganymede.debug( "Created newBase " + newBase.getName() + " in recursive category tree duplication"); } // we want this base to be added to the current end of this category addNodeAfter(newBase, null); if (false) { Ganymede.debug("Added " + newBase.getName() + " to new category tree"); } } else if (node instanceof DBBaseCategory) { oldCategory = (DBBaseCategory) node; newCategory = (DBBaseCategory) newSubCategory(oldCategory.getName()); newCategory.editor = editor; if (false) { Ganymede.debug( "Created newCategory " + newCategory.getName() + " in recursive category tree duplication"); } newCategory.recurseDown(oldCategory, baseHash, editor); } } }
/** * This method is used to concatenate this DBBaseCategory's information to the passed-in {@link * arlut.csd.ganymede.common.CategoryTransport CategoryTransport} object for serialization to the * client. * * <p>This is kind of icky code, really.. {@link arlut.csd.ganymede.server.DBBaseCategory * DBBaseCategory}, {@link arlut.csd.ganymede.server.DBObjectBase DBObjectBase}, {@link * arlut.csd.ganymede.server.GanymedeSession GanymedeSession} and {@link * arlut.csd.ganymede.common.CategoryTransport CategoryTransport} classes are all getting * involved, here. * * <p>The reason for that is that we want to provide a CategoryTransport object to the client that * makes no reference to the server-side GanymedeSession class, and proper generation of a * CategoryTransport requires making method calls to a GanymedeSession. * * <p>The down side of that is that the actual structure of the CategoryTransport is generated * across two places.. here and in {@link * arlut.csd.ganymede.server.DBObjectBase#addBaseToTransport(arlut.csd.ganymede.common.CategoryTransport, * arlut.csd.ganymede.server.GanymedeSession)}. */ private void addCategoryToTransport( CategoryTransport transport, GanymedeSession session, boolean hideNonEditables) { Vector<CategoryNode> contents; /* -- */ transport.addChunk("cat"); transport.addChunk(getName()); contents = getNodes(); if (contents.size() > 0) { transport.addChunk("<"); for (CategoryNode node : contents) { if (node instanceof DBObjectBase) { DBObjectBase base = (DBObjectBase) node; if (session == null || (hideNonEditables && session.getPermManager().getPerm(base.getTypeID(), true).isEditable()) || (!hideNonEditables && session.getPermManager().getPerm(base.getTypeID(), true).isVisible())) { base.addBaseToTransport(transport, session); } } else if (node instanceof DBBaseCategory) { DBBaseCategory subCategory = (DBBaseCategory) node; if (session == null || (hideNonEditables && subCategory.containsEditableBase(session)) || (!hideNonEditables && subCategory.containsVisibleBase(session))) { subCategory.addCategoryToTransport(transport, session, hideNonEditables); } } } } // terminate this category record transport.addChunk(">"); }
/** Emits this category and its contents to <out>, in ganymede.db form. */ synchronized void emit(DataOutput out) throws IOException { out.writeUTF(this.getPath()); out.writeInt(contents.size()); for (CategoryNode node : contents) { // in DBStore 2.0 and later, we emit all bases during our // DBBaseCategory dump. if (node instanceof DBBaseCategory) { out.writeBoolean(false); // it's a category ((DBBaseCategory) node).emit(out); } else if (node instanceof DBObjectBase) { out.writeBoolean(true); // it's a base ((DBObjectBase) node).emit(out, true); } } }
/** Emits this category and its contents to <out>, in XML form. */ synchronized void emitXML(XMLDumpContext xmlOut) throws IOException { xmlOut.startElementIndent("category"); xmlOut.attribute("name", getName()); xmlOut.skipLine(); // skip line after category start xmlOut.indentOut(); for (CategoryNode node : contents) { if (node instanceof DBBaseCategory) { ((DBBaseCategory) node).emitXML(xmlOut); xmlOut.skipLine(); } else if (node instanceof DBObjectBase) { ((DBObjectBase) node).emitXML(xmlOut); } } xmlOut.indentIn(); xmlOut.endElementIndent("category"); }
/** * This DBObjectDeltaRec constructor is used to generate a delta record that records the * difference between two objects for the Ganymede journal */ public DBObjectDeltaRec(DBObject oldObj, DBObject newObj) { if (oldObj == null || newObj == null) { throw new IllegalArgumentException( "Got a null object parameter" + ((oldObj == null) ? " old " : "") + ((newObj == null) ? " new " : "")); } if (!oldObj.getInvid().equals(newObj.getInvid())) { throw new IllegalArgumentException("Old and New object id's don't match"); } /* - */ this.invid = oldObj.getInvid(); DBObjectBase objectBase = oldObj.getBase(); DBField origField, currentField; /* -- */ // algorithm: iterate over base.getFieldsInFieldOrder() to find // all fields possibly contained in the object.. for each field, // check to see if the value has changed. if so, create a // fieldDeltaRec for it. // note that we're counting on objectBase.sortedFields not being // changed while we're iterating here.. this is an ok assumption, // since only the loader and the schema editor will trigger changes // in sortedFields. if (debug) { System.err.println( "Entering deltarec creation for objects " + oldObj.getLabel() + " and " + newObj.getLabel()); } for (DBObjectBaseField fieldDef : objectBase.getFieldsInFieldOrder()) { if (debug) { System.err.println("Comparing field " + fieldDef.getName()); } origField = (DBField) oldObj.getField(fieldDef.getID()); currentField = (DBField) newObj.getField(fieldDef.getID()); if ((origField == null || !origField.isDefined()) && (currentField == null || !currentField.isDefined())) { // no change.. was null/undefined, still is. continue; } if (currentField == null || !currentField.isDefined()) { // lost this field fieldRecs.addElement(new fieldDeltaRec(fieldDef.getID(), null)); continue; } if (origField == null || !origField.isDefined()) { // we gained this field fieldRecs.addElement(new fieldDeltaRec(fieldDef.getID(), currentField)); continue; } if (currentField.equals(origField)) { // no changes, we don't need to write this one out. continue; } // at this point, we know we need to write out a change // record.. the only question now is whether it is for a // scalar or a vector. if (!fieldDef.isArray()) { // got a scalar.. save this field entire to write out // when we emit to the journal fieldRecs.addElement(new fieldDeltaRec(fieldDef.getID(), currentField)); continue; } // it's a vector.. use the DBField.getVectorDiff() method // to generate a vector diff. fieldRecs.addElement(currentField.getVectorDiff(origField)); } }
/** This method emits this delta rec to a file */ public void emit(DataOutput out) throws IOException { DBObjectBase baseDef; DBObjectBaseField fieldDef; /* -- */ // write the object id baseDef = Ganymede.db.getObjectBase(invid.getType()); if (debug) { System.err.println("Emitting delta rec for invid " + invid.toString()); } invid.emit(out); // write the fieldrec count out.writeInt(fieldRecs.size()); if (debug) { System.err.println( "Emitting " + fieldRecs.size() + " field records for invid " + invid.toString()); } // now let's write out the fields fieldDeltaRec fdRec; Object value; for (int i = 0; i < fieldRecs.size(); i++) { fdRec = (fieldDeltaRec) fieldRecs.elementAt(i); // write out our field code.. this will be used by the loader // code to determine what kind of field this is, and what // kind of data types need to be loaded. if (debug) { System.err.println("Emitting fieldDeltaRec:\n\t" + fdRec.toString()); } out.writeShort(fdRec.fieldcode); // are we deleting? if (!fdRec.vector && fdRec.scalarValue == null) { // yes, we're deleting this field out.writeBoolean(true); continue; } // no, we're redefining this field out.writeBoolean(false); // write out our field type code.. this will be used by the loader // to verify that the schema hasn't undergone an incompatible // change since the journal was written. fieldDef = (DBObjectBaseField) baseDef.getField(fdRec.fieldcode); out.writeShort(fieldDef.getType()); if (fdRec.scalarValue != null) { out.writeBoolean(true); // scalar redefinition fdRec.scalarValue.emit(out); continue; } out.writeBoolean(false); // vector mod // write out what is being added to this vector if (debug) { System.err.println("====== Emitting " + fdRec.addValues.size() + " addition elements"); } if (fdRec.addValues == null) { out.writeInt(0); } else { out.writeInt(fdRec.addValues.size()); for (int j = 0; j < fdRec.addValues.size(); j++) { value = fdRec.addValues.elementAt(j); // we only support 3 vector field types if (value instanceof String) { out.writeUTF((String) value); } else if (value instanceof Invid) { ((Invid) value).emit(out); } else if (value instanceof IPwrap) { Byte[] bytes = ((IPwrap) value).address; out.writeByte(bytes.length); for (int k = 0; k < bytes.length; k++) { out.writeByte(bytes[k].byteValue()); } } else { Ganymede.debug("DBObjectDeltaRec.emit(): Error! Unrecognized element in vector!"); } } } // write out what is being removed from this vector if (fdRec.delValues == null) { out.writeInt(0); } else { out.writeInt(fdRec.delValues.size()); for (int j = 0; j < fdRec.delValues.size(); j++) { value = fdRec.delValues.elementAt(j); // we only support 3 vector field types if (value instanceof String) { out.writeUTF((String) value); } else if (value instanceof Invid) { Invid invid = (Invid) value; out.writeShort(invid.getType()); out.writeInt(invid.getNum()); } else if (value instanceof IPwrap) { Byte[] bytes = ((IPwrap) value).address; out.writeByte(bytes.length); for (int k = 0; k < bytes.length; k++) { out.writeByte(bytes[k].byteValue()); } } } } } }
/** This DBObjectDeltaRec constructor is used to load a delta record from a Journal stream. */ public DBObjectDeltaRec(DataInput in) throws IOException { short fieldcode; short typecode; boolean scalar; DBObjectBase baseDef; DBObjectBaseField fieldDef; String fieldName = null; String status = null; DBObject obj = null; /* -- */ status = "Reading invid"; boolean debug = true; try { invid = Invid.createInvid(in); baseDef = Ganymede.db.getObjectBase(invid.getType()); obj = Ganymede.db.viewDBObject(invid); if (debug) { System.err.println( "\n>*> Reading delta rec for " + baseDef.getName() + " <" + invid.getNum() + ">"); } status = "Reading field count"; int fieldcount = in.readInt(); if (debug) { System.err.println( ">> DBObjectDeltaRec(): " + fieldcount + " fields in on-disk delta rec."); } for (int i = 0; i < fieldcount; i++) { status = "\nReading field code for field " + i; if (debug) { System.err.println(status); } fieldcode = in.readShort(); fieldDef = (DBObjectBaseField) baseDef.getField(fieldcode); typecode = fieldDef.getType(); fieldName = fieldDef.getName(); status = "Reading deletion boolean for field " + i; if (in.readBoolean()) { // we're deleting this field if (debug) { System.err.println( "Reading field deletion record field (" + fieldName + ":" + fieldcode + ") for field " + i); } fieldRecs.addElement(new fieldDeltaRec(fieldcode, null)); continue; } // okay, we've got a field redefinition.. check the type // code to make sure we don't have an incompatible journal // entry status = "Reading type code for field " + i; if (in.readShort() != typecode) { throw new RuntimeException("Error, field type mismatch in journal file"); } // ok.. now, is it a total redefinition, or a vector delta record? status = "Reading scalar/vector delta boolean for field " + i; scalar = in.readBoolean(); if (scalar) { fieldDeltaRec f_r = null; status = "Reading field (" + fieldName + ":" + fieldcode + ") for field " + i; if (debug) { System.err.println(status); } f_r = new fieldDeltaRec(fieldcode, DBField.readField(obj, in, fieldDef)); fieldRecs.addElement(f_r); if (debug) { System.err.println("Value: " + f_r.toString()); } } else { // ok, we have a vector delta chunk fieldDeltaRec fieldRec = new fieldDeltaRec(fieldcode); Object value = null; // read in the additions status = "Reading vector addition count for field " + fieldName + "(" + i + ")"; if (debug) { System.err.println(status); } int size = in.readInt(); if (debug) { System.err.println(">> DBObjectDeltaRec(): reading " + size + " additions."); } for (int j = 0; j < size; j++) { // we only support 3 vector field types switch (typecode) { case STRING: status = "Reading string addition " + j + " for field " + i + ":" + fieldName; if (debug) { System.err.println(status); } value = in.readUTF(); break; case INVID: status = "Reading invid addition " + j + " for field " + i + ":" + fieldName; if (debug) { System.err.println(status); } value = Invid.createInvid(in); break; case IP: status = "Reading ip addition " + j + " for field " + i + ":" + fieldName; if (debug) { System.err.println(status); } byte bytelength = in.readByte(); Byte[] bytes = new Byte[bytelength]; for (int k = 0; k < bytelength; k++) { bytes[k] = Byte.valueOf(in.readByte()); } value = bytes; } fieldRec.addValue(value); } // read in the deletions status = "Reading vector deletion count for field " + i; if (debug) { System.err.println(status); } size = in.readInt(); if (debug) { System.err.println(">> DBObjectDeltaRec(): reading " + size + " deletions."); } for (int j = 0; j < size; j++) { // we only support 3 vector field types switch (typecode) { case STRING: status = "Reading string deletion " + j + " for field " + i + ":" + fieldName; value = in.readUTF(); break; case INVID: status = "Reading invid deletion " + j + " for field " + i + ":" + fieldName; value = Invid.createInvid(in); break; case IP: status = "Reading IP deletion " + j + " for field " + i + ":" + fieldName; byte bytelength = in.readByte(); Byte[] bytes = new Byte[bytelength]; for (int k = 0; k < bytelength; k++) { bytes[k] = Byte.valueOf(in.readByte()); } value = bytes; } fieldRec.delValue(value); } // and save this field fieldRecs.addElement(fieldRec); } } } catch (IOException ex) { System.err.println("DBObjectDeltaRec constructor: IOException in state " + status); Ganymede.logError(ex); throw ex; } }
/** * This method is used to remove a Category Node from under us. * * @see arlut.csd.ganymede.rmi.Category */ public synchronized void removeNode(CategoryNode node) throws RemoteException { int i, index = -1; /* -- */ if (node == null) { throw new IllegalArgumentException("Can't remove a null node"); } // find our deletion point if (debug) { try { Ganymede.debug("DBBaseCategory (" + getName() + ").removeNode(" + node.getPath() + ")"); } catch (RemoteException ex) { Ganymede.logError(ex); throw new RuntimeException("rmi local failure?" + ex.getMessage()); } } for (i = 0; i < contents.size(); i++) { if (debug) { try { Ganymede.debug(" examining: " + ((CategoryNode) contents.elementAt(i)).getPath()); } catch (RemoteException ex) { Ganymede.logError(ex); throw new RuntimeException("rmi local failure?" + ex.getMessage()); } } if (contents.elementAt(i).equals(node)) { index = i; } } if (index == -1) { throw new IllegalArgumentException("can't delete a node that's not in the category"); } // remove our node from our content list contents.removeElementAt(index); if (false) { if (node instanceof DBObjectBase) { DBObjectBase base = (DBObjectBase) node; if (!base.isEditing()) { System.err.println( "DBBaseCategory.removeNode(): " + base.getName() + " has a null editor!"); } else { System.err.println( "DBBaseCategory.removeNode(): " + base.getName() + " has a non-null editor!"); } } } // Sorry, kid, yer on your own now! node.setCategory(null); }
/** * This method is used to remove a Category Node from under us. * * @see arlut.csd.ganymede.rmi.Category */ public synchronized void removeNode(String name) throws RemoteException { int i, index = -1; CategoryNode node = null; /* -- */ if (name == null) { throw new IllegalArgumentException("Can't remove a null name"); } // find our deletion point if (debug) { Ganymede.debug("DBBaseCategory (" + getName() + ").removeNode(" + name + ")"); } for (i = 0; i < contents.size() && (index == -1); i++) { if (debug) { Ganymede.debug(" examining: " + contents.elementAt(i)); } node = (CategoryNode) contents.elementAt(i); try { if (node.getName().equals(name)) { index = i; } } catch (RemoteException ex) { throw new RuntimeException("caught remote: " + ex); } } if (index == -1) { throw new IllegalArgumentException("can't delete a name that's not in the category"); } else if (debug) { System.err.println("DBBaseCategory.removeNode(): found node " + node); if (node instanceof DBObjectBase) { System.err.println("DBBaseCategory.removeNode(): node is DBObjectBase"); } else if (node instanceof Base) { System.err.println("DBBaseCategory.removeNode(): node is Base"); } else if (node instanceof DBBaseCategory) { System.err.println("DBBaseCategory.removeNode(): node is DBBaseCategory"); } else if (node instanceof Category) { System.err.println("DBBaseCategory.removeNode(): node is Category"); } else { System.err.println("DBBaseCategory.removeNode(): node is <unrecognized>"); } } // remove our node from our content list contents.removeElementAt(index); if (debug) { if (node instanceof DBObjectBase) { DBObjectBase base = (DBObjectBase) node; if (!base.isEditing()) { System.err.println( "DBBaseCategory.removeNode(2): " + base.getName() + " has a null editor!"); } else { System.err.println( "DBBaseCategory.removeNode(2): " + base.getName() + " has a non-null editor!"); } } } // Sorry, kid, yer on your own now! node.setCategory(null); }