public Enum<?> putConfirm(PublicKey publicKey, Number640 key, Data newData) { RangeLock<Number640>.Range lock = lock(key); try { if (!securityEntryCheck( key.locationAndDomainAndContentKey(), publicKey, newData.publicKey(), newData.isProtectedEntry())) { return PutStatus.FAILED_SECURITY; } final Data data = backend.get(key); if (data != null) { // remove prepare flag data.prepareFlag(false); data.validFromMillis(newData.validFromMillis()); data.ttlSeconds(newData.ttlSeconds()); long expiration = data.expirationMillis(); // handle timeout backend.addTimeout(key, expiration); backend.put(key, data); // don't release data as we just update return PutStatus.OK; } else { return PutStatus.NOT_FOUND; } } finally { newData.release(); lock.unlock(); } // TODO: check for FORKS! }
/** {@inheritDoc} */ public void save(AuthzGroup azGroup) throws GroupNotDefinedException, AuthzPermissionException { if (azGroup.getId() == null) throw new GroupNotDefinedException("<null>"); Reference ref = entityManager().newReference(azGroup.getId()); if (!SiteService.allowUpdateSiteMembership(ref.getId())) { // check security (throws if not permitted) unlock(SECURE_UPDATE_AUTHZ_GROUP, authzGroupReference(azGroup.getId())); } // make sure it's in storage if (!m_storage.check(azGroup.getId())) { // if this was new, create it in storage if (((BaseAuthzGroup) azGroup).m_isNew) { // reserve an AuthzGroup with this id from the info store - if it's in use, this will return // null AuthzGroup newAzg = m_storage.put(azGroup.getId()); if (newAzg == null) { M_log.warn("saveUsingSecurity, storage.put for a new returns null"); } } else { throw new GroupNotDefinedException(azGroup.getId()); } } // complete the save completeSave(azGroup); }
@Override public void bestLocationChanged(Location oldLocation, Location newLocation) { long currentTime = System.currentTimeMillis(); LocationController cont = LocationController.getInstance(this.context); Log.d("HistoryController", "Best location updated! (hasGPSLock: " + cont.hasGPSLock() + ")"); if (cont.hasGPSLock()) { if (stationaryLocationStart == null) { stationaryLocationStart = newLocation; stationaryLocationEnd = newLocation; stationaryLocationStartTime = currentTime; } else { Log.d( "HistoryController", "Distance from stationary point: " + stationaryLocationStart.distanceTo(newLocation)); if (stationaryLocationStart.distanceTo(newLocation) < STATIONARY_RADIUS) { Log.d("HistoryController", "New location was within radius"); stationaryLocationEnd = newLocation; } else { Log.d( "HistoryController", "New location out of radius (time delta: " + (currentTime - stationaryLocationStartTime) + ")"); if (currentTime - stationaryLocationStartTime > STATIONARY_TIME) { Log.d( "HistoryController", "Stayed in previous location long enough, recording to history"); HistoryItem item = new HistoryItem(); item.date = stationaryLocationStartTime; item.elapsed_time = currentTime - stationaryLocationStartTime; item.latitude = stationaryLocationStart.getLatitude(); item.longitude = stationaryLocationStart.getLongitude(); historyStorage.put(item); // // Notify listeners // synchronized (this) { for (HistoryListener l : this.listeners) l.onNewHistoryItem(item); } } stationaryLocationStart = newLocation; stationaryLocationEnd = newLocation; stationaryLocationStartTime = currentTime; } } } }
void store(Address address, Content content) throws Exception { if (isResponsible(address)) { // local (even if this should be store elsewhere, store this for // cache purpose. // JLG.debug("local store"); storage.put(address, content); } else { // transfer the order to another agent LOG.info("transfert the order"); Id nodeId = getNodeId(address); Queue<Contact> contactQueue = makeContactQueue(nodeId); getClient().store(contactQueue, address, content); } }
/** @inheritDoc */ public DigestEdit add(String id) throws IdUsedException { // check security (throws if not permitted) // unlock(SECURE_ADD_DIGEST, digestReference(id)); // one add/edit at a time, please, to make sync. only one digest per user synchronized (m_storage) { // reserve a user with this id from the info store - if it's in use, this will return null DigestEdit edit = m_storage.put(id); if (edit == null) { throw new IdUsedException(id); } return edit; } }
public Enum<?> updateMeta(PublicKey publicKey, Number640 key, Data newData) { RangeLock<Number640>.Range lock = lock(key); try { if (!securityEntryCheck( key.locationAndDomainAndContentKey(), publicKey, newData.publicKey(), newData.isProtectedEntry())) { return PutStatus.FAILED_SECURITY; } final Data data = backend.get(key); boolean changed = false; if (data != null && newData.publicKey() != null) { data.publicKey(newData.publicKey()); changed = true; } if (data != null && newData.isSigned()) { data.signature(newData.signature()); changed = true; } if (data != null) { data.validFromMillis(newData.validFromMillis()); data.ttlSeconds(newData.ttlSeconds()); changed = true; } if (changed) { long expiration = data.expirationMillis(); // handle timeout backend.addTimeout(key, expiration); // no release of old data, as we just update it backend.put(key, data); return PutStatus.OK; } else { return PutStatus.NOT_FOUND; } } finally { newData.release(); lock.unlock(); } }
/** {@inheritDoc} */ public AuthzGroup addAuthzGroup(String id) throws GroupIdInvalidException, GroupAlreadyDefinedException, AuthzPermissionException { // check security (throws if not permitted) unlock(SECURE_ADD_AUTHZ_GROUP, authzGroupReference(id)); // reserve an AuthzGroup with this id from the info store - if it's in use, this will return // null AuthzGroup azGroup = m_storage.put(id); if (azGroup == null) { throw new GroupAlreadyDefinedException(id); } ((BaseAuthzGroup) azGroup).setEvent(SECURE_ADD_AUTHZ_GROUP); // update the properties addLiveProperties((BaseAuthzGroup) azGroup); // save completeSave(azGroup); return azGroup; }
public Map<Number640, Enum<?>> putAll( final NavigableMap<Number640, Data> dataMap, PublicKey publicKey, boolean putIfAbsent, boolean domainProtection, boolean sendSelf) { if (dataMap.isEmpty()) { return Collections.emptyMap(); } final Number640 min = dataMap.firstKey(); final Number640 max = dataMap.lastKey(); final Map<Number640, Enum<?>> retVal = new HashMap<Number640, Enum<?>>(); final HashSet<Number480> keysToCheck = new HashSet<Number480>(); final RangeLock<Number640>.Range lock = lock(min, max); try { for (Map.Entry<Number640, Data> entry : dataMap.entrySet()) { Number640 key = entry.getKey(); keysToCheck.add(key.locationAndDomainAndContentKey()); Data newData = entry.getValue(); if (!securityDomainCheck( key.locationAndDomainKey(), publicKey, publicKey, domainProtection)) { retVal.put(key, PutStatus.FAILED_SECURITY); newData.release(); continue; } // We need this check in case we did not use the encoder/decoder, // which is the case if we send the message to ourself. In that // case, the public key of the data is never set to the message // publick key, if the publick key of the data was null. final PublicKey dataKey; if (sendSelf && newData.publicKey() == null) { dataKey = publicKey; } else { dataKey = newData.publicKey(); } if (!securityEntryCheck( key.locationAndDomainAndContentKey(), publicKey, dataKey, newData.isProtectedEntry())) { retVal.put(key, PutStatus.FAILED_SECURITY); newData.release(); continue; } final Data oldDataGet = backend.get(key); if (oldDataGet != null) { if (putIfAbsent) { retVal.put(key, PutStatus.FAILED_NOT_ABSENT); newData.release(); continue; } if (oldDataGet.isDeleted()) { retVal.put(key, PutStatus.DELETED); newData.release(); continue; } if (!oldDataGet.basedOnSet().equals(newData.basedOnSet())) { retVal.put(key, PutStatus.VERSION_FORK); newData.release(); continue; } } final Data oldDataPut = backend.put(key, newData); long expiration = newData.expirationMillis(); // handle timeout backend.addTimeout(key, expiration); if (newData.hasPrepareFlag()) { retVal.put(key, PutStatus.OK_PREPARED); } else { retVal.put(key, PutStatus.OK); } if (oldDataPut != null && oldDataPut != newData) { oldDataPut.release(); } } for (Number480 key : keysToCheck) { // now check for forks Number640 minVersion = new Number640(key, Number160.ZERO); Number640 maxVersion = new Number640(key, Number160.MAX_VALUE); NavigableMap<Number640, Data> tmp = backend.subMap(minVersion, maxVersion); tmp = filterCopyOrig(tmp, -1, true); NavigableMap<Number640, Data> heads = getLatestInternalOrig(tmp); final boolean forked = heads.size() > 1; for (final Map.Entry<Number640, Data> entry : heads.entrySet()) { if (forked) { if (retVal.containsKey(entry.getKey())) { retVal.put(entry.getKey(), PutStatus.VERSION_FORK); } } } // now remove old versions if (maxVersions > 0) { NavigableMap<Number640, Data> versions = backend.subMap(minVersion, maxVersion); while (!versions.isEmpty() && versions.firstKey().versionKey().timestamp() + maxVersions <= versions.lastKey().versionKey().timestamp()) { Map.Entry<Number640, Data> entry = versions.pollFirstEntry(); Data removed = backend.remove(entry.getKey(), true); if (removed != null) { removed.release(); } backend.removeTimeout(entry.getKey()); } } } return retVal; } finally { lock.unlock(); } }