/** * Returns PackageOverviews from a search. * * @param pids List of package ids returned from search server. * @param archLabels List of channel arch labels. * @param relevantUserId user id to filter by if relevant or architecture search server the user * can see is subscribed to * @param filterChannelId channel id to filter by if channel search * @param searchType type of search to do, one of "relevant", "channel", "architecture", or "all" * @return PackageOverviews from a search. */ public static List<PackageOverview> packageSearch( List<Long> pids, List<String> archLabels, Long relevantUserId, Long filterChannelId, String searchType) { Map<String, Object> params = new HashMap<String, Object>(); SelectMode m = null; if (searchType.equals(PackageSearchAction.ARCHITECTURE)) { if (!(archLabels != null && archLabels.size() > 0)) { throw new MissingArchitectureException( "archLabels must not be null for architecture search!"); } // This makes me very sad. PreparedSatement.setObject does not allow // you to pass in Lists or Arrays. We can't manually convert archLabels // to a string and use the regular infrastructure because it will // escape the quotes between architectures. The only thing we can do // is to get the SelectMode and manually insert the architecture types // before we continue. If we can get PreparedStatement to accept Lists // then all this hackishness can go away. NOTE: we know that we have to // guard against sql injection in this case. Notice that the archLabels // will all be enclosed in single quotes. Valid archLabels will only // contain alphanumeric, '-', and "_" characters. We will simply // check and enforce that constraint, and then even if someone injected // something we would either end up throwing an error or it would be // in a string, and therefore not dangerous. m = ModeFactory.getMode("Package_queries", "searchByIdAndArches"); CachedStatement cs = m.getQuery(); String query = cs.getOrigQuery(); String archString = "'" + sanitizeArchLabel(archLabels.get(0)) + "'"; for (int i = 1; i < archLabels.size(); i++) { archString += ", '" + sanitizeArchLabel(archLabels.get(i)) + "'"; } query = query.replace(":channel_arch_labels", archString); cs.setQuery(query); m.setQuery(cs); } else if (searchType.equals(PackageSearchAction.RELEVANT)) { if (relevantUserId == null) { throw new IllegalArgumentException("relevantUserId must not be null for relevant search!"); } params.put("uid", relevantUserId); m = ModeFactory.getMode("Package_queries", "relevantSearchById"); } else if (searchType.equals(PackageSearchAction.CHANNEL)) { if (filterChannelId == null) { throw new IllegalArgumentException("filterChannelId must not be null for channel search!"); } params.put("cid", filterChannelId); m = ModeFactory.getMode("Package_queries", "searchByIdInChannel"); } else { m = ModeFactory.getMode("Package_queries", "searchById"); } // SelectMode.execute will batch the size properly and CachedStatement.execute // will create a comma separated string representation of the list of pids DataResult result = m.execute(params, pids); result.elaborate(); return result; }
/** * Primarily a convenience method to make testing easier * * @param ctx Quartz job runtime environment * @throws JobExecutionException Indicates somes sort of fatal error */ public void execute(JobExecutionContext ctx) throws JobExecutionException { try { SelectMode select = ModeFactory.getMode( TaskConstants.MODE_NAME, TaskConstants.TASK_QUERY_KSCLEANUP_FIND_CANDIDATES); DataResult dr = select.execute(Collections.EMPTY_MAP); if (log.isDebugEnabled()) { log.debug("Found " + dr.size() + " entries to process"); } // Bail early if no candidates if (dr.size() == 0) { return; } Long failedStateId = findFailedStateId(); if (failedStateId == null) { log.warn("Failed kickstart state id not found"); return; } for (Iterator iter = dr.iterator(); iter.hasNext(); ) { Map row = (Map) iter.next(); processRow(failedStateId, row); } } catch (Exception e) { log.error(e.getMessage(), e); throw new JobExecutionException(e); } }
private Long findTopmostParentAction(Long startingAction) { SelectMode select = ModeFactory.getMode( TaskConstants.MODE_NAME, TaskConstants.TASK_QUERY_KSCLEANUP_FIND_PREREQ_ACTION); Map params = new HashMap(); params.put("action_id", startingAction); if (log.isDebugEnabled()) { log.debug("StartingAction: " + startingAction); } Long retval = startingAction; Long preqid = startingAction; DataResult dr = select.execute(params); if (log.isDebugEnabled()) { log.debug("dr: " + dr); } while (dr.size() > 0 && preqid != null) { preqid = (Long) ((Map) dr.get(0)).get("prerequisite"); if (preqid != null) { retval = preqid; params.put("action_id", retval); dr = select.execute(params); } } if (log.isDebugEnabled()) { log.debug("preqid: " + preqid); log.debug("Returning: " + retval); } return retval; }
/** * Returns information, whether each package in the list is channel compatible and whether the org * has accesds to * * @param orgId organization id * @param channelId channel id * @param packageIds list of package ids * @return dataresult(id, package_arch_id, org_package, org_access, shared_access) */ public static DataResult getPackagesChannelArchCompatAndOrgAccess( Long orgId, Long channelId, List<Long> packageIds) { Map<String, Object> params = new HashMap<String, Object>(); params.put("org_id", orgId); params.put("channel_id", channelId); SelectMode m = ModeFactory.getMode("Package_queries", "channel_arch_and_org_access"); return m.execute(params, packageIds); }
/** * Gets the list of active org admins (com.redhat.rhn.domain.user.User objects) in this org. * * @return Returns the set of active org admins in this org. */ public List<User> getActiveOrgAdmins() { SelectMode m = ModeFactory.getMode("User_queries", "active_org_admins"); Map<String, Object> params = new HashMap<String, Object>(); params.put(ORG_ID_KEY, this.getId()); DataResult dr = m.execute(params); if (dr == null) { return null; } return getUsers(dr); }
private Long findFailedStateId() { Long retval = null; SelectMode select = ModeFactory.getMode( TaskConstants.MODE_NAME, TaskConstants.TASK_QUERY_KSCLEANUP_FIND_FAILED_STATE_ID); DataResult dr = select.execute(Collections.EMPTY_MAP); if (dr.size() > 0) { retval = (Long) ((Map) dr.get(0)).get("id"); } return retval; }
/** * Returns true if the Package with the given name and evr ids exists in the Channel whose id is * cid. * * @param cid Channel id to look in * @param nameId Package name id * @param evrId Package evr id * @return true if the Package with the given name and evr ids exists in the Channel whose id is * cid. */ public static boolean isPackageInChannel(Long cid, Long nameId, Long evrId) { Map<String, Object> params = new HashMap<String, Object>(); params.put("cid", cid); params.put("name_id", nameId); params.put("evr_id", evrId); SelectMode m = ModeFactory.getMode("Channel_queries", "is_package_in_channel"); DataResult dr = m.execute(params); if (dr.isEmpty()) { return false; } BooleanWrapper bw = (BooleanWrapper) dr.get(0); return bw.booleanValue(); }
/** * Looks up a list of applicable kickstart profiles. The list is generated based on matches * between the server's base channel arch and the profile's channel arch * * @return DataResult, else null if the server does not exist or does not have a base channel * assigned */ public DataResult<KickstartDto> getKickstartProfiles() { log.debug("getKickstartProfiles()"); DataResult<KickstartDto> retval = new DataResult<KickstartDto>(Collections.EMPTY_LIST); // Profiles are associated with the host; the target system might not be created // yet. Also, the host will be the one performing the kickstart, so the profile // is relative to that system. Server hostServer = getHostServer(); if (hostServer != null) { log.debug("getKickstartProfiles(): hostServer isnt null"); Channel baseChannel = hostServer.getBaseChannel(); if (baseChannel != null) { log.debug("getKickstartProfiles(): hostServer.baseChannel isnt null"); ChannelArch arch = baseChannel.getChannelArch(); SelectMode mode = getMode(); Map<String, Object> params = new HashMap<String, Object>(); params.put("org_id", this.user.getOrg().getId()); params.put("prim_arch_id", arch.getId()); if (arch.getName().equals("x86_64")) { log.debug(" Adding IA-32 to search list."); ChannelArch ia32arch = ChannelFactory.lookupArchByName("IA-32"); params.put("sec_arch_id", ia32arch.getId()); } else if (arch.getName().equals("IA-32") && (hostServer.getServerArch().getName().equals(ServerConstants.getArchI686().getName()) || hostServer .getServerArch() .getName() .equals(ServerConstants.getArchATHLON().getName()))) { log.debug(" Adding x86_64 to search list."); ChannelArch x86Arch = ChannelFactory.lookupArchByName("x86_64"); params.put("sec_arch_id", x86Arch.getId()); } else if (arch.getName().equals("PPC")) { log.debug(" Adding ppc64le to search list."); ChannelArch ppc64le = ChannelFactory.lookupArchByName("PPC64LE"); params.put("sec_arch_id", ppc64le.getId()); } else if (arch.getName().equals("PPC64LE")) { log.debug(" Adding ppc to search list."); ChannelArch ppc = ChannelFactory.lookupArchByName("PPC"); params.put("sec_arch_id", ppc.getId()); } else { params.put("sec_arch_id", arch.getId()); } retval = mode.execute(params); if (log.isDebugEnabled()) { log.debug("got back from DB: " + retval); } KickstartLister.getInstance().setKickstartUrls(retval, user); KickstartLister.getInstance().pruneInvalid(user, retval); retval.setTotalSize(retval.size()); } } List<CobblerProfileDto> dtos = KickstartLister.getInstance().listCobblerProfiles(user); if (log.isDebugEnabled()) { log.debug("got back from cobbler: " + dtos); } retval.setTotalSize(retval.getTotalSize() + dtos.size()); retval.addAll(dtos); return retval; }