@Override protected boolean isOld(StorageItem item) { if (M1ArtifactRecognizer.isMetadata(item.getPath())) { return isOld(getMetadataMaxAge(), item); } if (M1ArtifactRecognizer.isSnapshot(item.getPath())) { return isOld(getArtifactMaxAge(), item); } // we are using Gav to test the path Gav gav = null; try { gav = gavCalculator.pathToGav(item.getPath()); } catch (IllegalArtifactCoordinateException e) { getLogger().info("Illegal artifact path: '" + item.getPath() + "'" + e.getMessage()); } if (gav == null) { // this is not an artifact, it is just any "file" return super.isOld(item); } // it is a release return isOld(getArtifactMaxAge(), item); }
@Override protected StorageItem doRetrieveItem(ResourceStoreRequest request) throws IllegalOperationException, ItemNotFoundException, StorageException { StorageItem result = super.doRetrieveItem(request); List<String> wf = getWelcomeFiles(); boolean useWelcomeFiles = !request.getRequestContext().containsKey(WebSiteRepository.USE_WELCOME_FILES_KEY) || Boolean.TRUE.equals( request.getRequestContext().get(WebSiteRepository.USE_WELCOME_FILES_KEY)); if (useWelcomeFiles && result instanceof StorageCollectionItem && wf.size() > 0) { // it is a collection, check for one of the "welcome" files Collection<StorageItem> collItems = list(false, (StorageCollectionItem) result); for (StorageItem item : collItems) { if (item instanceof StorageFileItem && wf.contains(item.getName())) { // it is a file, it's name is in welcomeFiles list, so return it instead parent collection return item; } } } return result; }
private static void addItems( HashSet<String> names, ArrayList<StorageItem> result, Collection<StorageItem> listItems) { for (StorageItem item : listItems) { if (names.add(item.getPath())) { result.add(item); } } }
public void copyItem(ResourceStoreRequest from, ResourceStoreRequest to) throws UnsupportedStorageOperationException, ItemNotFoundException, IllegalOperationException, StorageException, AccessDeniedException { RequestRoute fromRoute = getRequestRouteForRequest(from); RequestRoute toRoute = getRequestRouteForRequest(to); if (fromRoute.isRepositoryHit() && toRoute.isRepositoryHit()) { // it hits a repository, mangle path and call it try { from.pushRequestPath(fromRoute.getRepositoryPath()); to.pushRequestPath(toRoute.getRepositoryPath()); if (fromRoute.getTargetedRepository() == toRoute.getTargetedRepository()) { fromRoute.getTargetedRepository().copyItem(from, to); } else { StorageItem item = fromRoute.getTargetedRepository().retrieveItem(from); if (item instanceof StorageFileItem) { try { toRoute .getTargetedRepository() .storeItem( to, ((StorageFileItem) item).getInputStream(), item.getRepositoryItemAttributes().asMap()); } catch (IOException e) { // XXX: this is nonsense, to box IOException into subclass of IOException! throw new LocalStorageException(e); } } else if (item instanceof StorageCollectionItem) { toRoute .getTargetedRepository() .createCollection(to, item.getRepositoryItemAttributes().asMap()); } else { throw new IllegalRequestException( from, "Cannot copy item of class='" + item.getClass().getName() + "' over multiple repositories."); } } } finally { from.popRequestPath(); to.popRequestPath(); } } else { // this is "above" repositories if (!fromRoute.isRepositoryHit()) { throw new IllegalRequestException( from, "The path '" + from.getRequestPath() + "' does not points to any repository!"); } else { throw new IllegalRequestException( to, "The path '" + to.getRequestPath() + "' does not points to any repository!"); } } }
@Override public void processItem(WalkerContext context, StorageItem item) { if (item instanceof StorageFileItem) { try { mdHelper.processFile(item.getPath()); } catch (Exception e) { logger.warn("Error occured while processing item '" + item.getPath() + "'.", e); } } }
@Override public void processItem(WalkerContext context, StorageItem item) { if (StorageCollectionItem.class.isAssignableFrom(item.getClass())) { colls++; } else if (StorageFileItem.class.isAssignableFrom(item.getClass())) { files++; } else if (StorageLinkItem.class.isAssignableFrom(item.getClass())) { links++; } }
@Override public void processItem(final WalkerContext context, final StorageItem item) throws Exception { // cancelation CancelableUtil.checkInterruption(); if (item instanceof StorageFileItem) { parentOMatic.addPath(item.getPath()); } }
@Override public boolean isOld(StorageItem item) { if (AbstractP2MetadataSource.isP2MetadataItem(item.getPath())) { return super.isOld(getMetadataMaxAge(), item); } else { return super.isOld(getArtifactMaxAge(), item); } }
private void putChecksumItem( Map<String, StorageItem> checksums, ResourceStoreRequest request, StorageItem artifact, String attrname, String suffix) { String hash = artifact.getRepositoryItemAttributes().get(attrname); if (hash != null) { String hashPath = artifact.getPath() + suffix; request.pushRequestPath(hashPath); try { checksums.put(hashPath, newHashItem(this, request, artifact, hash)); } finally { request.popRequestPath(); } } }
@Override protected Collection<StorageItem> doListItems(ResourceStoreRequest request) throws ItemNotFoundException, StorageException { Collection<StorageItem> items = super.doListItems(request); if (getRepositoryKind().isFacetAvailable(ProxyRepository.class)) { Map<String, StorageItem> result = new TreeMap<String, StorageItem>(); for (StorageItem item : items) { putChecksumItem(result, request, item, ATTR_REMOTE_SHA1, SUFFIX_SHA1); putChecksumItem(result, request, item, ATTR_REMOTE_MD5, SUFFIX_MD5); } for (StorageItem item : items) { if (!result.containsKey(item.getPath())) { result.put(item.getPath(), item); } } items = result.values(); } return items; }
@Override public void storeItem(boolean fromTask, StorageItem item) throws UnsupportedStorageOperationException, IllegalOperationException, StorageException { final ResourceStoreRequest request = new ResourceStoreRequest(item); // this is local only request if (shouldServeByPolicies(request)) { if (getRepositoryKind().isFacetAvailable(ProxyRepository.class) && item instanceof StorageFileItem && !item.getPath().startsWith("/.")) { try { if (item.getPath().endsWith(SUFFIX_SHA1)) { doStoreSHA1(this, doRetrieveArtifactItem(request, SUFFIX_SHA1), (StorageFileItem) item); } else if (item.getPath().endsWith(SUFFIX_MD5)) { doStoreMD5(this, doRetrieveArtifactItem(request, SUFFIX_MD5), (StorageFileItem) item); } else { super.storeItem(fromTask, item); } } catch (ItemNotFoundException e) { // ignore storeItem request // this is a maven2 proxy repository, it is requested to store .sha1/.md5 file // and not there is not corresponding artifact } } else { super.storeItem(fromTask, item); } } else { String msg = "Storing of item " + item.getRepositoryItemUid().toString() + " is forbidden by Maven Repository policy. Because " + getId() + " is a " + getRepositoryPolicy().name() + " repository"; getLogger().info(msg); throw new UnsupportedStorageOperationException(msg); } }
protected void gatherArtifactNodeInfoIfAvailable( final String path, final DefaultMergedTreeNode mnode) { if (!CHECK_LOCAL_AVAILABILITY) { return; } final ResourceStoreRequest request = getResourceStoreRequest(path); // default it to not available mnode.setLocallyAvailable(false); try { final StorageItem item = getRepository().retrieveItem(request); if (item instanceof StorageFileItem) { mnode.setLocallyAvailable(true); mnode.setArtifactTimestamp(item.getModified()); mnode.setArtifactMd5Checksum( item.getRepositoryItemAttributes().get(DigestCalculatingInspector.DIGEST_MD5_KEY)); mnode.setArtifactSha1Checksum( item.getRepositoryItemAttributes().get(DigestCalculatingInspector.DIGEST_SHA1_KEY)); mnode.setInitiatorUserId( item.getRepositoryItemAttributes().get(AccessManager.REQUEST_USER)); mnode.setInitiatorIpAddress( item.getRepositoryItemAttributes().get(AccessManager.REQUEST_REMOTE_ADDRESS)); mnode.setArtifactOriginUrl(item.getRemoteUrl()); if (!StringUtils.isEmpty(mnode.getArtifactOriginUrl())) { mnode.setArtifactOriginReason("cached"); } else { mnode.setArtifactOriginReason("deployed"); } } } catch (ItemNotFoundException e) { // mute it, probably not available locally } catch (AccessDeniedException e) { // mute it, probably user does not have authz to access this part of repo } catch (IllegalOperationException e) { // like "repo is out of service", but why is then tree view accessed at all? In that case it's // a bug logger.warn( "Illegal operation tried against repository {}", RepositoryStringUtils.getHumanizedNameString(getRepository()), e); } catch (StorageException e) { // this is lethal, some "io related" problem. Is basically IOException and is a problem on // your instance or HW/net logger.warn( "IO related problem in repository {}", RepositoryStringUtils.getHumanizedNameString(getRepository()), e); } }
protected StorageItem mangle( boolean isList, ResourceStoreRequest request, RequestRoute route, StorageItem item) throws AccessDeniedException, ItemNotFoundException, IllegalOperationException, StorageException { if (isList) { ((AbstractStorageItem) item) .setPath(ItemPathUtils.concatPaths(route.getOriginalRequestPath(), item.getName())); } else { ((AbstractStorageItem) item).setPath(route.getOriginalRequestPath()); } if (isFollowLinks() && item instanceof StorageLinkItem) { return dereferenceLink((StorageLinkItem) item); } else { return item; } }
@Override public String extractItemPath(StorageItem item) { return item.getPath(); }
@Override public boolean isMavenMetadata(StorageItem item) { return isMavenMetadataPath(item.getPath()); }
@Override public boolean isMavenArtifact(StorageItem item) { return isMavenArtifactPath(item.getPath()); }
public boolean shouldProcess(WalkerContext context, StorageItem item) { return !item.getPath().startsWith(".") && !AbstractP2MetadataSource.isP2MetadataItem(item.getPath()); }
public void doOnCollectionExit(WalkerContext context, StorageCollectionItem coll) throws Exception { if (getLogger().isDebugEnabled()) { getLogger().debug("onCollectionExit() :: " + coll.getRepositoryItemUid().toString()); } shouldProcessCollection = coll.getPath().endsWith("SNAPSHOT"); if (!shouldProcessCollection) { return; } deletableSnapshotsAndFiles.clear(); remainingSnapshotsAndFiles.clear(); removeWholeGAV = false; Gav gav = null; Collection<StorageItem> items; items = repository.list(false, coll); HashSet<Long> versionsToRemove = new HashSet<Long>(); // gathering the facts for (StorageItem item : items) { if (!item.isVirtual() && !StorageCollectionItem.class.isAssignableFrom(item.getClass())) { gav = ((MavenRepository) coll.getRepositoryItemUid().getRepository()) .getGavCalculator() .pathToGav(item.getPath()); if (gav != null) { // if we find a pom, check for delete on release if (!gav.isHash() && !gav.isSignature() && gav.getExtension().equals("pom")) { if (request.isRemoveIfReleaseExists() && releaseExistsForSnapshot(gav, item.getItemContext())) { getLogger().debug("Found POM and release exists, removing whole gav."); removeWholeGAV = true; // Will break out and junk whole gav break; } } item.getItemContext().put(Gav.class.getName(), gav); if (gav.getSnapshotTimeStamp() != null) { getLogger().debug("Using GAV snapshot timestamp"); long itemTimestamp = gav.getSnapshotTimeStamp().longValue(); getLogger().debug("NOW is " + itemTimestamp); // If this timestamp is already marked to be removed, junk it if (versionsToRemove.contains(new Long(itemTimestamp))) { addStorageFileItemToMap(deletableSnapshotsAndFiles, gav, (StorageFileItem) item); } else { getLogger() .debug("itemTimestamp=" + itemTimestamp + ", dateTreshold=" + dateThreshold); // if dateTreshold is not used (zero days) OR // if itemTimestamp is less then dateTreshold (NB: both are positive!) // below will the retentionCount overrule if needed this if (-1 == dateThreshold || itemTimestamp < dateThreshold) { versionsToRemove.add(new Long(itemTimestamp)); addStorageFileItemToMap(deletableSnapshotsAndFiles, gav, (StorageFileItem) item); } else { addStorageFileItemToMap(remainingSnapshotsAndFiles, gav, (StorageFileItem) item); } } } else { // If no timestamp on gav, then it is a non-unique snapshot // and should _not_ be removed getLogger() .debug("GAV Snapshot timestamp not available, skipping non-unique snapshot"); addStorageFileItemToMap(remainingSnapshotsAndFiles, gav, (StorageFileItem) item); } } } } // and doing the work here if (removeWholeGAV) { try { for (StorageItem item : items) { try { // preserve possible subdirs if (!(item instanceof StorageCollectionItem)) { repository.deleteItem(false, new ResourceStoreRequest(item)); } } catch (ItemNotFoundException e) { if (getLogger().isDebugEnabled()) { getLogger() .debug( "Could not delete whole GAV " + coll.getRepositoryItemUid().toString(), e); } } } } catch (Exception e) { getLogger() .warn("Could not delete whole GAV " + coll.getRepositoryItemUid().toString(), e); } } else { // and now check some things if (remainingSnapshotsAndFiles.size() < request.getMinCountOfSnapshotsToKeep()) { // do something if (remainingSnapshotsAndFiles.size() + deletableSnapshotsAndFiles.size() < request.getMinCountOfSnapshotsToKeep()) { // delete nothing, since there is less snapshots in total as allowed deletableSnapshotsAndFiles.clear(); } else { TreeSet<Version> keys = new TreeSet<Version>(deletableSnapshotsAndFiles.keySet()); while (!keys.isEmpty() && remainingSnapshotsAndFiles.size() < request.getMinCountOfSnapshotsToKeep()) { Version keyToMove = keys.last(); if (remainingSnapshotsAndFiles.containsKey(keyToMove)) { remainingSnapshotsAndFiles .get(keyToMove) .addAll(deletableSnapshotsAndFiles.get(keyToMove)); } else { remainingSnapshotsAndFiles.put( keyToMove, deletableSnapshotsAndFiles.get(keyToMove)); } deletableSnapshotsAndFiles.remove(keyToMove); keys.remove(keyToMove); } } } // NEXUS-814: is this GAV have remaining artifacts? boolean gavHasMoreTimestampedSnapshots = remainingSnapshotsAndFiles.size() > 0; for (Version key : deletableSnapshotsAndFiles.keySet()) { List<StorageFileItem> files = deletableSnapshotsAndFiles.get(key); deletedSnapshots++; for (StorageFileItem file : files) { try { // NEXUS-814: mark that we are deleting a TS snapshot, but there are still remaining // ones in repository. if (gavHasMoreTimestampedSnapshots) { file.getItemContext().put(MORE_TS_SNAPSHOTS_EXISTS_FOR_GAV, Boolean.TRUE); } gav = (Gav) file.getItemContext().get(Gav.class.getName()); repository.deleteItem(false, new ResourceStoreRequest(file)); deletedFiles++; } catch (ItemNotFoundException e) { if (getLogger().isDebugEnabled()) { getLogger().debug("Could not delete file:", e); } } catch (Exception e) { getLogger().info("Could not delete file:", e); } } } } removeDirectoryIfEmpty(coll); updateMetadataIfNecessary(context, coll); }
@Test public void testMergingPlugins() throws Exception { String spoofedPath = "/merge-plugins/maven-metadata.xml"; File mdmFile = createTempFile("mdm", "tmp"); try { Metadata mdm; // get metadata from a gidr router, merging should happen StorageItem item = getRootRouter() .retrieveItem(new ResourceStoreRequest("/groups/test" + spoofedPath, false)); // it should be a file assertTrue(StorageFileItem.class.isAssignableFrom(item.getClass())); // save it saveItemToFile(((StorageFileItem) item), mdmFile); // it should came modified and be different of any existing assertFalse( contentEquals( new File(getBasedir(), "target/test-classes/repo1" + spoofedPath), mdmFile)); assertFalse( contentEquals( new File(getBasedir(), "target/test-classes/repo2" + spoofedPath), mdmFile)); assertFalse( contentEquals( new File(getBasedir(), "target/test-classes/repo3" + spoofedPath), mdmFile)); mdm = readMetadata(mdmFile); assertTrue(mdm.getPlugins() != null); assertEquals(4, mdm.getPlugins().size()); // heh? why? // assertEquals( "20020202020202", mdm.getVersioning().getLastUpdated() ); // get hash from a gidr router, merging should happen and newly calced hash should come down item = getRootRouter() .retrieveItem(new ResourceStoreRequest("/groups/test" + spoofedPath + ".md5", false)); // it should be a file assertTrue(StorageFileItem.class.isAssignableFrom(item.getClass())); // save it String md5hash = contentAsString(item); // get hash from a gidr router, merging should happen and newly calced hash should come down item = getRootRouter() .retrieveItem( new ResourceStoreRequest("/groups/test" + spoofedPath + ".sha1", false)); // it should be a file assertTrue(StorageFileItem.class.isAssignableFrom(item.getClass())); // save it String sha1hash = contentAsString(item); Md5Digester md5Digester = new Md5Digester(); md5Digester.verify(mdmFile, md5hash); Sha1Digester sha1Digester = new Sha1Digester(); sha1Digester.verify(mdmFile, sha1hash); } finally { mdmFile.delete(); } }
public Object retrieveView( ResourceStore store, ResourceStoreRequest request, StorageItem item, Request req) throws IOException { RepositoryItemUid itemUid = null; if (item == null) { if (store instanceof RepositoryRouter) { RepositoryRouter repositoryRouter = (RepositoryRouter) store; // item is either not present or is not here yet (remote index) // the we can "simulate" what route would be used to get it, and just get info from the // route RequestRoute route; try { route = repositoryRouter.getRequestRouteForRequest(request); } catch (ItemNotFoundException e) { // this is thrown while getting routes for any path "outside" of legal ones is given // like /content/foo/bar, since 2nd pathelem may be "repositories", "groups", "shadows", // etc // (depends on // type of registered reposes) return null; } // request would be processed by targeted repository Repository itemRepository = route.getTargetedRepository(); // create an UID against that repository itemUid = itemRepository.createUid(route.getRepositoryPath()); } else if (store instanceof Repository) { itemUid = ((Repository) store).createUid(request.getRequestPath()); } else { // this is highly unbelievable, unless Core gets extended by 3rd party return null; } } else { itemUid = item.getRepositoryItemUid(); if ((item instanceof StorageLinkItem) && dereferenceLinks()) { // TODO: we may have "deeper" links too! Implement this properly! try { item = repositoryRouter.dereferenceLink( (StorageLinkItem) item, request.isRequestLocalOnly(), request.isRequestRemoteOnly()); } catch (Exception e) { getLogger() .warn("Failed to dereference the storagelink " + item.getRepositoryItemUid(), e); // leave item unchanged } } } // so, we ended with: // itemUid is always populated, hence we have Repository and repository path // so, item may be null or non-null, if non-null, it may be link // check for item not found finally. Those may be allowed in proxy repositories only. if (item == null && !processNotFoundItems(itemUid.getRepository())) { // return not-applicable. This is not a proxy repository, and the item is not found. Since it // is not // proxy repo, it will be never cached from remote too, simply, it is not here. return null; } return retrieveView(request, itemUid, item, req); }
@Override public String extractItemPath(StorageItem item) { return item.getRepositoryItemUid().getPath(); }
@Override public void storeItem( final Repository repository, final File repositoryBaseDir, final StorageItem item, final File target, final ContentLocator cl) throws UnsupportedStorageOperationException, LocalStorageException { // create parents down to the file itself (this will make those if needed, otherwise return // silently) mkParentDirs(repository, target); if (cl != null) { // we have _content_ (content or link), hence we store a file final File hiddenTarget = getHiddenTarget(repository, repositoryBaseDir, target, item); // NEXUS-4550: Part One, saving to "hidden" (temp) file // In case of error cleaning up only what needed // No locking needed, AbstractRepository took care of that FileOutputStream os = null; InputStream is = null; try { os = new FileOutputStream(hiddenTarget); is = cl.getContent(); IOUtil.copy(is, os, getCopyStreamBufferSize()); os.flush(); } catch (EOFException e) { if (hiddenTarget != null) { hiddenTarget.delete(); } throw new LocalStorageEofException( String.format( "EOF during storing on path \"%s\" (while writing to hiddenTarget: \"%s\")", item.getRepositoryItemUid().toString(), hiddenTarget.getAbsolutePath()), e); } catch (IOException e) { if (hiddenTarget != null) { hiddenTarget.delete(); } throw new LocalStorageException( String.format( "Got exception during storing on path \"%s\" (while writing to hiddenTarget: \"%s\")", item.getRepositoryItemUid().toString(), hiddenTarget.getAbsolutePath()), e); } finally { IOUtil.close(is); IOUtil.close(os); } // NEXUS-4550: Part Two, moving the "hidden" (temp) file to final location // In case of error cleaning up both files // Locking is needed, AbstractRepository got shared lock only for destination // NEXUS-4550: FSPeer is the one that handles the rename in case of FS LS, // so we need here to claim exclusive lock on actual UID to perform the rename final RepositoryItemUidLock uidLock = item.getRepositoryItemUid().getLock(); uidLock.lock(Action.create); try { handleRenameOperation(hiddenTarget, target); target.setLastModified(item.getModified()); } catch (IOException e) { // if we ARE NOT handling attributes, do proper cleanup in case of IOEx // if we ARE handling attributes, leave backups in case of IOEx final boolean isCleanupNeeded = !item.getRepositoryItemUid() .getBooleanAttributeValue(IsItemAttributeMetacontentAttribute.class); if (target != null && (isCleanupNeeded || // NEXUS-4871 prevent zero length/corrupt files target.length() == 0)) { target.delete(); } if (hiddenTarget != null && (isCleanupNeeded || // NEXUS-4871 prevent zero length/corrupt files hiddenTarget.length() == 0)) { hiddenTarget.delete(); } if (!isCleanupNeeded) { getLogger() .warn( "No cleanup done for error that happened while trying to save attibutes of item {}, the backup is left as {}!", item.getRepositoryItemUid().toString(), hiddenTarget.getAbsolutePath()); } throw new LocalStorageException( String.format( "Got exception during storing on path \"%s\" (while moving to final destination)", item.getRepositoryItemUid().toString()), e); } finally { uidLock.unlock(); } } else { // we have no content, we talk about directory target.mkdir(); target.setLastModified(item.getModified()); } }