protected void addRepositoryDetails( Request request, SearchNGResponse response, Repository repository) { boolean add = true; for (NexusNGRepositoryDetail repoDetail : response.getRepoDetails()) { if (repoDetail.getRepositoryId().equals(repository.getId())) { add = false; break; } } if (add) { NexusNGRepositoryDetail repoDetail = new NexusNGRepositoryDetail(); repoDetail.setRepositoryId(repository.getId()); repoDetail.setRepositoryName(repository.getName()); repoDetail.setRepositoryURL( createRepositoryReference(request, repository.getId()).getTargetRef().toString()); repoDetail.setRepositoryContentClass(repository.getRepositoryContentClass().getId()); repoDetail.setRepositoryKind(extractRepositoryKind(repository)); MavenRepository mavenRepo = repository.adaptToFacet(MavenRepository.class); if (mavenRepo != null) { repoDetail.setRepositoryPolicy(mavenRepo.getRepositoryPolicy().name()); } response.addRepoDetail(repoDetail); } }
// clean protected RepositoryListResourceResponse listRepositories( Request request, boolean allReposes, boolean includeGroups) throws ResourceException { RepositoryListResourceResponse result = new RepositoryListResourceResponse(); RepositoryListResource repoRes; Collection<Repository> repositories = getRepositoryRegistry().getRepositories(); for (Repository repository : repositories) { // To save UI changes at the moment, not including groups in repo call if ((allReposes || repository.isUserManaged()) && (includeGroups || !repository.getRepositoryKind().isFacetAvailable(GroupRepository.class))) { repoRes = new RepositoryListResource(); repoRes.setResourceURI(createRepositoryReference(request, repository.getId()).toString()); repoRes.setContentResourceURI( repositoryURLBuilder.getExposedRepositoryContentUrl(repository)); repoRes.setRepoType(getRestRepoType(repository)); repoRes.setProvider(NexusCompat.getRepositoryProviderHint(repository)); repoRes.setProviderRole(NexusCompat.getRepositoryProviderRole(repository)); repoRes.setFormat(repository.getRepositoryContentClass().getId()); repoRes.setId(repository.getId()); repoRes.setName(repository.getName()); repoRes.setUserManaged(repository.isUserManaged()); repoRes.setExposed(repository.isExposed()); repoRes.setEffectiveLocalStorageUrl(repository.getLocalUrl()); if (repository.getRepositoryKind().isFacetAvailable(MavenRepository.class)) { repoRes.setRepoPolicy( repository.adaptToFacet(MavenRepository.class).getRepositoryPolicy().toString()); } if (repository.getRepositoryKind().isFacetAvailable(ProxyRepository.class)) { repoRes.setRemoteUri(repository.adaptToFacet(ProxyRepository.class).getRemoteUrl()); } result.addData(repoRes); } } return result; }
protected Repository getRepositoryForPathPrefixOrId( String pathPrefixOrId, Class<? extends Repository> kind) throws NoSuchRepositoryException { List<? extends Repository> repositories = repositoryRegistry.getRepositoriesWithFacet(kind); Repository idMatched = null; Repository pathPrefixMatched = null; for (Repository repository : repositories) { if (StringUtils.equals(repository.getId(), pathPrefixOrId)) { idMatched = repository; } if (StringUtils.equals(repository.getPathPrefix(), pathPrefixOrId)) { pathPrefixMatched = repository; } } if (idMatched != null) { // id wins return idMatched; } if (pathPrefixMatched != null) { // if no id found, prefix wins return pathPrefixMatched; } // nothing found throw new NoSuchRepositoryException("pathPrefixOrId: '" + pathPrefixOrId + "'"); }
@Override public ManagedRepositories getManagedRepositories() { final ManagedRepositories managedRepositories = new ManagedRepositories(); final Set<String> managedRepositoryIds = new HashSet<String>(this.groupManagementPluginConfiguration.getManagedRepositories()); for (final Repository repository : this.repositoryRegistry.getRepositories()) { final ManagedRepository managedRepository = createManagedRepository(repository); if (managedRepositoryIds.remove(repository.getId())) { managedRepositories.addManagedRepository(managedRepository); } else { managedRepositories.addUnmanagedRepository(managedRepository); } } if (!managedRepositoryIds.isEmpty()) { getLogger() .warn( "The following managed repository IDs no longer exist as repositories in Nexus: " + managedRepositoryIds); } return managedRepositories; }
protected void dropRepositories() { for (Repository repository : repositoryRegistry.getRepositories()) { try { repositoryRegistry.removeRepositorySilently(repository.getId()); } catch (NoSuchRepositoryException e) { // will not happen } } }
private void inspectForTimeline(Event<?> evt, Repository repository) { // we do not want RSS entries about boot and repo additions during boot StringBuilder sb = new StringBuilder(); if (repository.getRepositoryKind().isFacetAvailable(GroupRepository.class)) { sb.append(" repository group "); } else { sb.append(" repository "); } sb.append(repository.getName()); sb.append(" (ID="); sb.append(repository.getId()); sb.append(") "); if (repository.getRepositoryKind().isFacetAvailable(ProxyRepository.class)) { sb.append(" as proxy repository for URL "); sb.append(repository.adaptToFacet(ProxyRepository.class).getRemoteUrl()); } else if (repository.getRepositoryKind().isFacetAvailable(HostedRepository.class)) { sb.append(" as hosted repository"); } else if (repository.getRepositoryKind().isFacetAvailable(ShadowRepository.class)) { sb.append(" as "); sb.append(repository.getClass().getName()); sb.append(" virtual repository for "); sb.append(repository.adaptToFacet(ShadowRepository.class).getMasterRepository().getName()); sb.append(" (ID="); sb.append(repository.adaptToFacet(ShadowRepository.class).getMasterRepository().getId()); sb.append(") "); } sb.append("."); if (evt instanceof RepositoryRegistryEventAdd) { sb.insert(0, "Registered"); } else if (evt instanceof RepositoryRegistryEventRemove) { sb.insert(0, "Unregistered"); } else if (evt instanceof RepositoryConfigurationUpdatedEvent) { sb.insert(0, "Updated"); } getFeedRecorder().addSystemEvent(FeedRecorder.SYSTEM_CONFIG_ACTION, sb.toString()); }
private boolean process( SnapshotRemovalRequest request, SnapshotRemovalResult result, Repository repository) { // only from maven repositories, stay silent for others and simply skip if (!repository.getRepositoryContentClass().isCompatible(contentClass)) { getLogger().debug("Skipping '" + repository.getId() + "' is not a maven 2 repository"); return false; } if (!repository.getLocalStatus().shouldServiceRequest()) { getLogger().debug("Skipping '" + repository.getId() + "' the repository is out of service"); return false; } if (repository.getRepositoryKind().isFacetAvailable(GroupRepository.class)) { process(request, result, repository.adaptToFacet(GroupRepository.class)); } else if (repository.getRepositoryKind().isFacetAvailable(MavenRepository.class)) { result.addResult( removeSnapshotsFromMavenRepository( repository.adaptToFacet(MavenRepository.class), request)); } return true; }
@Override public ContentLocator generateContent(Repository repository, String path, StorageFileItem item) throws IllegalOperationException, ItemNotFoundException, LocalStorageException { // make length unknown (since it will be known only in the moment of actual content pull) item.setLength(-1); return new ArchetypeContentLocator( repository.getId(), ((DefaultIndexerManager) indexerManager).getRepositoryIndexContext(repository), macPlugin, new ArtifactInfoFilter() { public boolean accepts(IndexingContext ctx, ArtifactInfo ai) { return indexArtifactFilter.filterArtifactInfo(ai); } }); }
private List<Repository> filterAccessToRepositories( Collection<? extends Repository> repositories) { if (repositories == null) { return null; } List<Repository> filteredRepositories = new ArrayList<Repository>(); for (Repository repository : repositories) { if (this.itemAuthorizer.isViewable( NexusItemAuthorizer.VIEW_REPOSITORY_KEY, repository.getId())) { filteredRepositories.add(repository); } } return filteredRepositories; }
private void regenerateMetadataForGroups() { if (StringUtils.isBlank(getVersion())) { try { final Repository repository = repositoryRegistry.getRepository(getRepositoryId()); for (GroupRepository groupRepository : repositoryRegistry.getGroupsOfRepository(repository)) { if (yumRegistry.isRegistered(repository.getId())) { MergeMetadataTask.createTaskFor(nexusScheduler, groupRepository); } } } catch (NoSuchRepositoryException e) { logger.warn( "Repository '{}' does not exist anymore. Backing out from triggering group merge for it.", getRepositoryId()); } } }
private String generateCatalogPayload(IndexingContext context) throws IOException { final MacRequest req = new MacRequest(repository.getId(), repositoryContentUrl, artifactInfoFilter); // NEXUS-5216: Warn if indexing context is null (indexable=false) for given repository but // continue // to return the correct empty catalog if (context == null) { logger.info( "Archetype Catalog for repository {} is not buildable as it lacks IndexingContext (indexable=false?).", RepositoryStringUtils.getHumanizedNameString(repository)); } // get the catalog final ArchetypeCatalog catalog = macPlugin.listArcherypesAsCatalog(req, context); // serialize it to XML final StringWriter sw = new StringWriter(); final ArchetypeCatalogXpp3Writer writer = new ArchetypeCatalogXpp3Writer(); writer.write(sw, catalog); return sw.toString(); }
protected void setDefaults() throws MalformedURLException, URISyntaxException { final Repository repository = findRepository(); if (isBlank(getRpmDir()) && repository != null) { setRpmDir(RepositoryUtils.getBaseDir(repository).getAbsolutePath()); } if (isBlank(getRpmUrl()) && repository != null) { final String rpmUrl = repositoryURLBuilder.getExposedRepositoryContentUrl(repository, true); if (StringUtils.isBlank(rpmUrl)) { throw new IllegalStateException( SimpleFormat.format( "Not able to build content URL of the repository \"%s\" [id=%s], baseUrl not set!", repository.getName(), repository.getId())); } setRpmUrl(rpmUrl); } if (isBlank(getParameter(PARAM_REPO_DIR)) && isNotBlank(getRpmDir())) { setRepoDir(new File(getRpmDir())); } if (isBlank(getRepoUrl()) && isNotBlank(getRpmUrl())) { setRepoUrl(getRpmUrl()); } }
protected void inspect(Event<?> evt) { if (!isNexusStarted()) { return; } Repository repository = null; if (evt instanceof RepositoryRegistryRepositoryEvent) { repository = ((RepositoryRegistryRepositoryEvent) evt).getRepository(); } else { repository = ((RepositoryConfigurationUpdatedEvent) evt).getRepository(); } try { // check registry for existence, wont be able to do much // if doesn't exist yet repoRegistry.getRepository(repository.getId()); inspectForTimeline(evt, repository); } catch (NoSuchRepositoryException e) { log.debug("Attempted to handle repository that isn't yet in registry"); } }
public SnapshotRemovalResult removeSnapshots(SnapshotRemovalRequest request) throws NoSuchRepositoryException, IllegalArgumentException { SnapshotRemovalResult result = new SnapshotRemovalResult(); logDetails(request); if (request.getRepositoryId() != null) { Repository repository = getRepositoryRegistry().getRepository(request.getRepositoryId()); if (!process(request, result, repository)) { throw new IllegalArgumentException( "The repository with ID=" + repository.getId() + " is not valid for Snapshot Removal Task!"); } } else { for (Repository repository : getRepositoryRegistry().getRepositories()) { process(request, result, repository); } } return result; }
protected void repackIteratorSearchResponse( Request request, Map<String, String> terms, SearchNGResponse response, boolean collapsed, Integer from, int count, IteratorSearchResponse iterator, SystemWideLatestVersionCollector systemWideCollector, RepositoryWideLatestVersionCollector repositoryWideCollector) throws NoSuchRepositoryException, IOException { response.setCollapsed(collapsed); response.setTotalCount(iterator.getTotalHitsCount()); response.setFrom(from == null ? -1 : from.intValue()); response.setCount(count == LUCENE_HIT_LIMIT ? -1 : count); // System.out.println( "** Query is \"" + iterator.getQuery().toString() + "\"." ); try { if (!response.isTooManyResults()) { // 1st pass, collect results LinkedHashMap<String, NexusNGArtifact> hits = new LinkedHashMap<String, NexusNGArtifact>(); NexusNGArtifact artifact; float firstDocumentScore = -1f; float lastDocumentScore = -1f; final long startedAtMillis = System.currentTimeMillis(); // 1sd pass, build first two level (no links), and actually consume the iterator and // collectors will be // set for (ArtifactInfo ai : iterator) { final String key = ai.groupId + ":" + ai.artifactId + ":" + ai.version; artifact = hits.get(key); // System.out.println( "* " + ai.context + " : " + ai.toString() + " -- " + // ai.getLuceneScore() + // " -- " // + ( artifact != null ? "F" : "N" ) ); if (artifact == null) { if (System.currentTimeMillis() - startedAtMillis > FIRST_LOOP_EXECUTION_TIME_LIMIT) { getSearchDiagnosticLogger() .debug( "Stopping delivering search results since we spent more than " + FIRST_LOOP_EXECUTION_TIME_LIMIT + " millis in 1st loop processing results."); break; } // we stop if we delivered "most important" hits (change of relevance from 1st document // we got) if (hits.size() > 10 && (firstDocumentScore - ai.getLuceneScore()) > DOCUMENT_TOP_RELEVANCE_HIT_CHANGE_THRESHOLD) { getSearchDiagnosticLogger() .debug( "Stopping delivering search results since we span " + DOCUMENT_TOP_RELEVANCE_HIT_CHANGE_THRESHOLD + " of score change (firstDocScore=" + firstDocumentScore + ", currentDocScore=" + ai.getLuceneScore() + ")."); break; } // we stop if we detect a "big drop" in relevance in relation to previous document's // score if (hits.size() > 10 && lastDocumentScore > 0) { if ((lastDocumentScore - ai.getLuceneScore()) > DOCUMENT_RELEVANCE_HIT_CHANGE_THRESHOLD) { getSearchDiagnosticLogger() .debug( "Stopping delivering search results since we hit a relevance drop bigger than " + DOCUMENT_RELEVANCE_HIT_CHANGE_THRESHOLD + " (lastDocScore=" + lastDocumentScore + ", currentDocScore=" + ai.getLuceneScore() + ")."); // the relevance change was big, so we stepped over "trash" results that are // probably not relevant at all, just stop here then break; } } // we stop if we hit the GA limit if ((hits.size() + 1) > GA_HIT_LIMIT) { getSearchDiagnosticLogger() .debug( "Stopping delivering search results since we hit a GA hit limit of " + GA_HIT_LIMIT + "."); // check for HIT_LIMIT: if we are stepping it over, stop here break; } else { artifact = new NexusNGArtifact(); artifact.setGroupId(ai.groupId); artifact.setArtifactId(ai.artifactId); artifact.setVersion(ai.version); artifact.setHighlightedFragment(getMatchHighlightHtmlSnippet(ai)); hits.put(key, artifact); } } Repository repository = getUnprotectedRepositoryRegistry().getRepository(ai.repository); addRepositoryDetails(request, response, repository); NexusNGArtifactHit hit = null; for (NexusNGArtifactHit artifactHit : artifact.getArtifactHits()) { if (repository.getId().equals(artifactHit.getRepositoryId())) { hit = artifactHit; break; } } if (hit == null) { hit = new NexusNGArtifactHit(); hit.setRepositoryId(repository.getId()); // if collapsed, we add links in 2nd pass, otherwise here if (!collapsed) { // we are adding the POM link "blindly", unless packaging is POM, // since the it will be added below the "usual" way if (!"pom".equals(ai.packaging)) { NexusNGArtifactLink link = createNexusNGArtifactLink( request, ai.repository, ai.groupId, ai.artifactId, ai.version, "pom", null); // add the POM link hit.addArtifactLink(link); } } // we just created it, add it artifact.addArtifactHit(hit); } if (!collapsed) { boolean needsToBeAdded = true; for (NexusNGArtifactLink link : hit.getArtifactLinks()) { if (StringUtils.equals(link.getClassifier(), ai.classifier) && StringUtils.equals(link.getExtension(), ai.fextension)) { needsToBeAdded = false; break; } } if (needsToBeAdded) { NexusNGArtifactLink link = createNexusNGArtifactLink( request, ai.repository, ai.groupId, ai.artifactId, ai.version, ai.fextension, ai.classifier); hit.addArtifactLink(link); } } if (firstDocumentScore < 0) { firstDocumentScore = ai.getLuceneScore(); } lastDocumentScore = ai.getLuceneScore(); } // summary: getSearchDiagnosticLogger() .debug( "Query terms \"" + terms + "\" (LQL \"" + iterator.getQuery() + "\") matched total of " + iterator.getTotalHitsCount() + " records, " + iterator.getTotalProcessedArtifactInfoCount() + " records were processed out of those, resulting in " + hits.size() + " unique GA records. Lucene scored documents first=" + firstDocumentScore + ", last=" + lastDocumentScore + ". Main processing loop took " + (System.currentTimeMillis() - startedAtMillis) + " ms."); // 2nd pass, set versions for (NexusNGArtifact artifactNg : hits.values()) { final String systemWideCollectorKey = systemWideCollector.getKey(artifactNg.getGroupId(), artifactNg.getArtifactId()); LatestVersionHolder systemWideHolder = systemWideCollector.getLVHForKey(systemWideCollectorKey); if (systemWideHolder != null) { if (systemWideHolder.getLatestSnapshot() != null) { artifactNg.setLatestSnapshot(systemWideHolder.getLatestSnapshot().toString()); artifactNg.setLatestSnapshotRepositoryId( systemWideHolder.getLatestSnapshotRepositoryId()); } if (systemWideHolder.getLatestRelease() != null) { artifactNg.setLatestRelease(systemWideHolder.getLatestRelease().toString()); artifactNg.setLatestReleaseRepositoryId( systemWideHolder.getLatestReleaseRepositoryId()); } } // add some "touche" on 1st level if (collapsed) { // set the top level version to one of the latest ones if (artifactNg.getLatestRelease() != null) { artifactNg.setVersion(artifactNg.getLatestRelease()); } else { artifactNg.setVersion(artifactNg.getLatestSnapshot()); } // "create" the links now for (NexusNGArtifactHit hit : artifactNg.getArtifactHits()) { final String repositoryWideCollectorKey = repositoryWideCollector.getKey( hit.getRepositoryId(), artifactNg.getGroupId(), artifactNg.getArtifactId()); LatestECVersionHolder repositoryWideHolder = repositoryWideCollector.getLVHForKey(repositoryWideCollectorKey); if (repositoryWideHolder != null) { String versionToSet = null; // do we have a "latest release" version? if (repositoryWideHolder.getLatestRelease() != null) { versionToSet = repositoryWideHolder.getLatestRelease().toString(); } else { versionToSet = repositoryWideHolder.getLatestSnapshot().toString(); } // add POM link NexusNGArtifactLink pomLink = createNexusNGArtifactLink( request, hit.getRepositoryId(), artifactNg.getGroupId(), artifactNg.getArtifactId(), versionToSet, "pom", null); hit.addArtifactLink(pomLink); // TODO: order! // add main artifact link // add everything else // make the list by joining two collections // rationale: in case of reposes, only one of these will be populated, other will be // empty // but in case of mixed policy (like group), probably both will exist // TODO: this will not work like it in groups, since then the versions will // mismatch! ArrayList<ECHolder> ecHolders = new ArrayList<ECHolder>(repositoryWideHolder.getReleaseECHolders()); ecHolders.addAll(repositoryWideHolder.getSnapshotECHolders()); for (ECHolder holder : ecHolders) { // add non-poms only, since we added POMs above if (!"pom".equals(holder.getExtension())) { NexusNGArtifactLink link = createNexusNGArtifactLink( request, hit.getRepositoryId(), artifactNg.getGroupId(), artifactNg.getArtifactId(), versionToSet, holder.getExtension(), holder.getClassifier()); hit.addArtifactLink(link); } } } } } } response.setData(new ArrayList<NexusNGArtifact>(hits.values())); } } finally { iterator.close(); } response.setTooManyResults(iterator.getTotalHitsCount() > count); }
protected ManagedRepository createManagedRepository(final Repository repository) { final ManagedRepository managedRepository = new ManagedRepository(); managedRepository.setId(repository.getId()); managedRepository.setName(repository.getName()); return managedRepository; }
/** * Tests listing a directory, when a contained file does NOT exists. * * @throws Exception */ @SuppressWarnings({"unchecked"}) @Test public void testListFilesThrowsItemNotFoundException() throws Exception { File repoLocation = new File(getBasedir(), "target/" + getClass().getSimpleName() + "/repo/"); // the contents of the "valid" directory, only contains a "valid.txt" file File validDir = new File(repoLocation, "valid/"); validDir.mkdirs(); FileUtils.fileWrite(new File(validDir, "valid.txt"), "UTF-8", "something valid"); Collection<File> validFileCollection = Arrays.asList(validDir.listFiles()); // the contents of the "invalid" directory, this dir contains a missing file File invalidDir = new File(repoLocation, "invalid/"); invalidDir.mkdirs(); FileUtils.fileWrite(new File(invalidDir, "invalid.txt"), "UTF-8", "something valid"); List<File> invalidFileCollection = new ArrayList<File>(Arrays.asList(invalidDir.listFiles())); invalidFileCollection.add(new File(invalidDir, "missing.txt")); // Mocks Wastebasket wastebasket = mock(Wastebasket.class); LinkPersister linkPersister = mock(LinkPersister.class); MimeSupport mimeUtil = mock(MimeSupport.class); // Mock FSPeer to return the results created above FSPeer fsPeer = mock(FSPeer.class); when(fsPeer.listItems( Mockito.any(Repository.class), Mockito.any(ResourceStoreRequest.class), eq(validDir))) .thenReturn(validFileCollection); when(fsPeer.listItems( Mockito.any(Repository.class), Mockito.any(ResourceStoreRequest.class), eq(new File(repoLocation, "invalid/")))) .thenReturn(invalidFileCollection); // create Repository Mock Repository repository = mock(Repository.class); when(repository.getId()).thenReturn("mock"); when(repository.getRepositoryKind()) .thenReturn(new DefaultRepositoryKind(HostedRepository.class, null)); when(repository.getLocalUrl()).thenReturn(repoLocation.toURI().toURL().toString()); AttributesHandler attributesHandler = mock(AttributesHandler.class); when(repository.getAttributesHandler()).thenReturn(attributesHandler); DefaultFSLocalRepositoryStorage localRepositoryStorageUnderTest = new DefaultFSLocalRepositoryStorage(wastebasket, linkPersister, mimeUtil, fsPeer); ResourceStoreRequest validRequest = new ResourceStoreRequest("valid"); // positive test, valid.txt should be found Collection<StorageItem> items = localRepositoryStorageUnderTest.listItems(repository, validRequest); assertThat(items.iterator().next().getName(), equalTo("valid.txt")); assertThat(items, hasSize(1)); // missing.txt was listed in this directory, but it does NOT exist, only invalid.txt should be // found ResourceStoreRequest invalidRequest = new ResourceStoreRequest("invalid"); items = localRepositoryStorageUnderTest.listItems(repository, invalidRequest); assertThat(items.iterator().next().getName(), equalTo("invalid.txt")); assertThat(items, hasSize(1)); }
/** Converting App model to REST DTO. */ public RepositoryBaseResource getRepositoryRestModel(Request request, Repository repository) { RepositoryResource resource = null; if (repository.getRepositoryKind().isFacetAvailable(ProxyRepository.class)) { resource = getRepositoryProxyRestModel(repository.adaptToFacet(ProxyRepository.class)); } else if (repository.getRepositoryKind().isFacetAvailable(ShadowRepository.class)) { return getRepositoryShadowRestModel(request, repository.adaptToFacet(ShadowRepository.class)); } else { resource = new RepositoryResource(); } resource.setContentResourceURI(repositoryURLBuilder.getExposedRepositoryContentUrl(repository)); resource.setProvider(NexusCompat.getRepositoryProviderHint(repository)); resource.setProviderRole(NexusCompat.getRepositoryProviderRole(repository)); resource.setFormat(repository.getRepositoryContentClass().getId()); resource.setRepoType(getRestRepoType(repository)); resource.setId(repository.getId()); resource.setName(repository.getName()); resource.setWritePolicy(repository.getWritePolicy().name()); resource.setBrowseable(repository.isBrowseable()); resource.setIndexable(repository.isSearchable()); resource.setExposed(repository.isExposed()); resource.setNotFoundCacheTTL(repository.getNotFoundCacheTimeToLive()); // TODO: remove the default local storage, this is a work around for NEXUS-1994 // the new 1.4 API doesn't store the default URL, well, it is part of the CRepo, but it is not // exposed. // so we can figure it out again, I think the default local Storage should be removed from the // REST message // which is part of the reason for not exposing it. The other part is it is not used anywhere // except to set // the localUrl if not already set. // apples to apples here, man i hate this section of code!!!! // always set to default (see AbstractRepositoryConfigurator) String defaultLocalStorageUrl = ((CRepositoryCoreConfiguration) repository.getCurrentCoreConfiguration()) .getConfiguration(false) .defaultLocalStorageUrl; resource.setDefaultLocalStorageUrl(defaultLocalStorageUrl); // if not user set (but using default), this is null, otherwise it contains user-set value String overrideLocalStorageUrl = ((CRepositoryCoreConfiguration) repository.getCurrentCoreConfiguration()) .getConfiguration(false) .getLocalStorage() .getUrl(); if (StringUtils.isNotBlank(overrideLocalStorageUrl)) { resource.setOverrideLocalStorageUrl(overrideLocalStorageUrl); } if (repository.getRepositoryKind().isFacetAvailable(MavenRepository.class)) { resource.setRepoPolicy( repository.adaptToFacet(MavenRepository.class).getRepositoryPolicy().toString()); if (repository.getRepositoryKind().isFacetAvailable(MavenProxyRepository.class)) { resource.setChecksumPolicy( repository.adaptToFacet(MavenProxyRepository.class).getChecksumPolicy().toString()); resource.setDownloadRemoteIndexes( repository.adaptToFacet(MavenProxyRepository.class).isDownloadRemoteIndexes()); } } // as this is a required field on ui, we need this to be set for non-maven type repos else { resource.setRepoPolicy(RepositoryPolicy.MIXED.name()); resource.setChecksumPolicy(ChecksumPolicy.IGNORE.name()); resource.setDownloadRemoteIndexes(false); } return resource; }
public DefaultMergedTreeNodeFactory(Repository repository) { super(repository.getId()); this.repository = repository; }
protected Collection<StorageItem> listVirtualPath( ResourceStoreRequest request, RequestRoute route) throws ItemNotFoundException { if (route.getRequestDepth() == 0) { // 1st level ArrayList<StorageItem> result = new ArrayList<StorageItem>(); for (RepositoryTypeDescriptor rtd : repositoryTypeRegistry.getRegisteredRepositoryTypeDescriptors()) { // check is there any repo registered if (!repositoryRegistry.getRepositoriesWithFacet(rtd.getRole()).isEmpty()) { ResourceStoreRequest req = new ResourceStoreRequest( ItemPathUtils.concatPaths(request.getRequestPath(), rtd.getPrefix())); DefaultStorageCollectionItem repositories = new DefaultStorageCollectionItem(this, req, true, false); repositories.getItemContext().putAll(request.getRequestContext()); result.add(repositories); } } return result; } else if (route.getRequestDepth() == 1) { // 2nd level List<? extends Repository> repositories = null; Class<? extends Repository> kind = null; for (RepositoryTypeDescriptor rtd : repositoryTypeRegistry.getRegisteredRepositoryTypeDescriptors()) { if (route.getStrippedPrefix().startsWith("/" + rtd.getPrefix())) { kind = rtd.getRole(); repositories = repositoryRegistry.getRepositoriesWithFacet(kind); break; } } // if no prefix matched, Item not found if (repositories == null || repositories.isEmpty()) { throw new ItemNotFoundException(request); } // filter access to the repositories // NOTE: do this AFTER the null/empty check so we return an empty list vs. an ItemNotFound repositories = filterAccessToRepositories(repositories); ArrayList<StorageItem> result = new ArrayList<StorageItem>(repositories.size()); for (Repository repository : repositories) { if (repository.isExposed() && repository.isBrowseable()) { DefaultStorageCollectionItem repoItem = null; ResourceStoreRequest req = null; if (Repository.class.equals(kind)) { req = new ResourceStoreRequest( ItemPathUtils.concatPaths(request.getRequestPath(), repository.getId())); } else { req = new ResourceStoreRequest( ItemPathUtils.concatPaths( request.getRequestPath(), repository.getPathPrefix())); } repoItem = new DefaultStorageCollectionItem(this, req, true, false); repoItem.getItemContext().putAll(request.getRequestContext()); result.add(repoItem); } } return result; } else { throw new ItemNotFoundException(request); } }
public List<Repository> getMappedRepositories( Repository repository, ResourceStoreRequest request, List<Repository> resolvedRepositories) throws NoSuchRepositoryException { if (!compiled) { compile(); } // NEXUS-2852: to make our life easier, we will work with repository IDs, // and will fill the result with Repositories at the end LinkedHashSet<String> reposIdSet = new LinkedHashSet<String>(resolvedRepositories.size()); for (Repository resolvedRepositorty : resolvedRepositories) { reposIdSet.add(resolvedRepositorty.getId()); } // for tracking what is applied ArrayList<RepositoryPathMapping> appliedMappings = new ArrayList<RepositoryPathMapping>(); // if include found, add it to the list. boolean firstAdd = true; for (RepositoryPathMapping mapping : blockings) { if (mapping.matches(repository, request)) { if (getLogger().isDebugEnabled()) { getLogger() .debug( "The request path [" + request.toString() + "] is blocked by rule " + mapping.toString()); } return Collections.emptyList(); } } // include, if found a match // NEXUS-2852: watch to not add multiple times same repository // ie. you have different inclusive rules that are triggered by same request // and contains some repositories. This is now solved using LinkedHashSet and using repo IDs. for (RepositoryPathMapping mapping : inclusions) { if (mapping.matches(repository, request)) { appliedMappings.add(mapping); if (firstAdd) { reposIdSet.clear(); firstAdd = false; } // add only those that are in initial resolvedRepositories list and that are non-user // managed // (preserve ordering) if (mapping.getMappedRepositories().size() == 1 && "*".equals(mapping.getMappedRepositories().get(0))) { for (Repository repo : resolvedRepositories) { reposIdSet.add(repo.getId()); } } else { for (Repository repo : resolvedRepositories) { if (mapping.getMappedRepositories().contains(repo.getId()) || !repo.isUserManaged()) { reposIdSet.add(repo.getId()); } } } } } // then, if exlude found, remove those for (RepositoryPathMapping mapping : exclusions) { if (mapping.matches(repository, request)) { appliedMappings.add(mapping); if (mapping.getMappedRepositories().size() == 1 && "*".equals(mapping.getMappedRepositories().get(0))) { reposIdSet.clear(); break; } for (String repositoryId : mapping.getMappedRepositories()) { Repository mappedRepository = repositoryRegistry.getRepository(repositoryId); // but only if is user managed if (mappedRepository.isUserManaged()) { reposIdSet.remove(mappedRepository.getId()); } } } } // store the applied mappings to request context ArrayList<String> appliedMappingsList = new ArrayList<String>(appliedMappings.size()); for (RepositoryPathMapping mapping : appliedMappings) { appliedMappingsList.add(mapping.toString()); } request.addAppliedMappingsList(repository, appliedMappingsList); // log it if needed if (getLogger().isDebugEnabled()) { if (appliedMappings.isEmpty()) { getLogger().debug("No mapping exists for request path [" + request.toString() + "]"); } else { StringBuilder sb = new StringBuilder( "Request for path \"" + request.toString() + "\" with the initial list of processable repositories of \"" + ResourceStoreUtils.getResourceStoreListAsString(resolvedRepositories) + "\" got these mappings applied:\n"); for (RepositoryPathMapping mapping : appliedMappings) { sb.append(" * ").append(mapping.toString()).append("\n"); } getLogger().debug(sb.toString()); if (reposIdSet.size() == 0) { getLogger() .debug( "Mapping for path [" + request.toString() + "] excluded all storages from servicing the request."); } else { getLogger() .debug( "Request path for [" + request.toString() + "] is MAPPED to reposes: " + reposIdSet); } } } ArrayList<Repository> result = new ArrayList<Repository>(reposIdSet.size()); try { for (String repoId : reposIdSet) { result.add(repositoryRegistry.getRepository(repoId)); } } catch (NoSuchRepositoryException e) { getLogger() .error( "Some of the Routes contains references to non-existant repositories! Please check the following mappings: \"" + appliedMappingsList.toString() + "\"."); throw e; } return result; }
public synchronized void deleteRepository(String id) throws NoSuchRepositoryException, IOException, ConfigurationException { Repository repository = repositoryRegistry.getRepository(id); // put out of service so wont be accessed any longer repository.setLocalStatus(LocalStatus.OUT_OF_SERVICE); // disable indexing for same purpose repository.setIndexable(false); repository.setSearchable(false); // remove dependants too // ======= // shadows // (fail if any repo references the currently processing one) List<ShadowRepository> shadows = repositoryRegistry.getRepositoriesWithFacet(ShadowRepository.class); for (Iterator<ShadowRepository> i = shadows.iterator(); i.hasNext(); ) { ShadowRepository shadow = i.next(); if (repository.getId().equals(shadow.getMasterRepository().getId())) { throw new RepositoryDependentException(repository, shadow); } } // ====== // groups // (correction in config only, registry DOES handle it) // since NEXUS-1770, groups are "self maintaining" // =========== // pahMappings // (correction, since registry is completely unaware of this component) List<CPathMappingItem> pathMappings = getConfigurationModel().getRepositoryGrouping().getPathMappings(); for (Iterator<CPathMappingItem> i = pathMappings.iterator(); i.hasNext(); ) { CPathMappingItem item = i.next(); item.removeRepository(id); } // =========== // and finally // this cleans it properly from the registry (from reposes and repo groups) repositoryRegistry.removeRepository(id); List<CRepository> reposes = getConfigurationModel().getRepositories(); for (Iterator<CRepository> i = reposes.iterator(); i.hasNext(); ) { CRepository repo = i.next(); if (repo.getId().equals(id)) { i.remove(); saveConfiguration(); releaseRepository(repository, getConfigurationModel(), repo); return; } } throw new NoSuchRepositoryException(id); }