private List getSectionTeachingAssistants(String sectionUuid) { Group group = siteService.findGroup(sectionUuid); CourseSection section = getSection(sectionUuid); if (section == null) { return new ArrayList(); } if (log.isDebugEnabled()) log.debug("Getting section enrollments in " + sectionUuid); Set taRoles = getSectionTaRoles(group); if (taRoles == null || taRoles.isEmpty()) { if (log.isDebugEnabled()) log.debug("There is no role for TAs in this site... returning an empty list"); return new ArrayList(); } Set sakaiUserUids = new HashSet(); for (Iterator iter = taRoles.iterator(); iter.hasNext(); ) { String role = (String) iter.next(); sakaiUserUids.addAll(group.getUsersHasRole(role)); } List sakaiUsers = userDirectoryService.getUsers(sakaiUserUids); List<TeachingAssistantRecordImpl> membersList = new ArrayList<TeachingAssistantRecordImpl>(); for (Iterator iter = sakaiUsers.iterator(); iter.hasNext(); ) { User user = SakaiUtil.convertUser((org.sakaiproject.user.api.User) iter.next()); if (user != null) { TeachingAssistantRecordImpl record = new TeachingAssistantRecordImpl(section, user); membersList.add(record); } } return membersList; }
@Override public void close() throws IOException { if (log.isDebugEnabled()) log.debug("close(targetId=" + targetId); boolean alreadyClosed = closed.getAndSet(true); if (log.isDebugEnabled()) log.debug("alreadyClosed=" + alreadyClosed); if (!alreadyClosed) { if (log.isDebugEnabled()) log.debug( "complete AsyncContext of targetId=" + targetId + " with status=" + HttpServletResponse.SC_OK); // The stream data must be completely read. // Otherwise the data remains in the socket and // disturbs the next request. while (is.read() != -1) { // if (log.isDebugEnabled()) log.debug("read before close, " + (char)c); } is.close(); HttpServletResponse response = (HttpServletResponse) rctxt.getResponse(); response.getOutputStream().close(); response.setStatus(HttpServletResponse.SC_OK); rctxt.complete(); super.close(); } if (log.isDebugEnabled()) log.debug(")close"); }
private View getBestView(List<View> candidateViews, List<MediaType> requestedMediaTypes) { for (View candidateView : candidateViews) { if (candidateView instanceof SmartView) { SmartView smartView = (SmartView) candidateView; if (smartView.isRedirectView()) { if (logger.isDebugEnabled()) { logger.debug("Returning redirect view [" + candidateView + "]"); } return candidateView; } } } for (MediaType mediaType : requestedMediaTypes) { for (View candidateView : candidateViews) { if (StringUtils.hasText(candidateView.getContentType())) { MediaType candidateContentType = MediaType.parseMediaType(candidateView.getContentType()); if (mediaType.includes(candidateContentType)) { if (logger.isDebugEnabled()) { logger.debug( "Returning [" + candidateView + "] based on requested media type '" + mediaType + "'"); } return candidateView; } } } } return null; }
/** * Reads and stores the mime type setting corresponding to a file extension, by reading text from * an InputStream. If a mime type setting already exists when this method is run, the mime type * value is replaced with the newer one. * * @param is * @throws IOException */ public void loadAndReplaceMimetypes(InputStream is) throws IOException { BufferedReader br = new BufferedReader(new InputStreamReader(is)); String line = null; while ((line = br.readLine()) != null) { line = line.trim(); if (line.startsWith("#") || line.length() == 0) { // Ignore comments and empty lines. } else { StringTokenizer st = new StringTokenizer(line, " \t"); if (st.countTokens() > 1) { String mimetype = st.nextToken(); while (st.hasMoreTokens()) { String extension = st.nextToken(); extensionToMimetypeMap.put(extension, mimetype); if (log.isDebugEnabled()) { log.debug( "Setting mime type for extension '" + extension + "' to '" + mimetype + "'"); } } } else { if (log.isDebugEnabled()) { log.debug("Ignoring mimetype with no associated file extensions: '" + line + "'"); } } } } }
@Override public Object intercept(Object obj, Method method, Object[] args, MethodProxy proxy) throws Throwable { RedisCommand commandToExecute = RedisCommand.failsafeCommandLookup(method.getName()); if (isPotentiallyThreadBoundCommand(commandToExecute)) { if (log.isDebugEnabled()) { log.debug(String.format("Invoke '%s' on bound conneciton", method.getName())); } return invoke(method, obj, args); } if (log.isDebugEnabled()) { log.debug(String.format("Invoke '%s' on unbound conneciton", method.getName())); } RedisConnection connection = factory.getConnection(); try { return invoke(method, connection, args); } finally { // properly close the unbound connection after executing command if (!connection.isClosed()) { connection.close(); } } }
@Override public void processChildNode(String appDirName, String childNodeName, byte[] childData) throws com.google.protobuf.InvalidProtocolBufferException { if (childNodeName.startsWith(ApplicationId.appIdStrPrefix)) { // application if (LOG.isDebugEnabled()) { LOG.debug("Loading application from node: " + childNodeName); } ApplicationStateDataPBImpl appState = new ApplicationStateDataPBImpl(ApplicationStateDataProto.parseFrom(childData)); ApplicationId appId = appState.getApplicationSubmissionContext().getApplicationId(); rmState.appState.put(appId, appState); } else if (childNodeName.startsWith(ApplicationAttemptId.appAttemptIdStrPrefix)) { // attempt if (LOG.isDebugEnabled()) { LOG.debug("Loading application attempt from node: " + childNodeName); } ApplicationAttemptStateDataPBImpl attemptState = new ApplicationAttemptStateDataPBImpl( ApplicationAttemptStateDataProto.parseFrom(childData)); attempts.add(attemptState); } else { LOG.info("Unknown child node with name: " + childNodeName); } }
@Override protected void updateMeta(final byte[] oldRegion1, final byte[] oldRegion2, HRegion newRegion) throws IOException { byte[][] regionsToDelete = {oldRegion1, oldRegion2}; for (int r = 0; r < regionsToDelete.length; r++) { Delete delete = new Delete(regionsToDelete[r]); delete.deleteColumns(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); delete.deleteColumns(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER); delete.deleteColumns(HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER); delete.deleteColumns(HConstants.CATALOG_FAMILY, HConstants.SPLITA_QUALIFIER); delete.deleteColumns(HConstants.CATALOG_FAMILY, HConstants.SPLITB_QUALIFIER); root.delete(delete, null, true); if (LOG.isDebugEnabled()) { LOG.debug("updated columns in row: " + Bytes.toStringBinary(regionsToDelete[r])); } } HRegionInfo newInfo = newRegion.getRegionInfo(); newInfo.setOffline(true); Put put = new Put(newRegion.getRegionName()); put.add( HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER, Writables.getBytes(newInfo)); root.put(put); if (LOG.isDebugEnabled()) { LOG.debug("updated columns in row: " + Bytes.toStringBinary(newRegion.getRegionName())); } }
public Object invoke(JDinkExecutionContext executionContext) throws Throwable { Object value = getArgument(0); Object thenStatement = getArgument(1); Object elseStatement = getArgument(2); if (log.isDebugEnabled()) { log.debug("if " + value + " then " + thenStatement + " else " + elseStatement); } if (value instanceof JDinkScriptFunctionCall) { value = ((JDinkScriptFunctionCall) value).invoke(executionContext); } if (thenStatement == null) { throw new RuntimeException("then-statement missing"); } boolean b; if (value instanceof Boolean) { b = ((Boolean) value).booleanValue(); } else if (value instanceof String) { b = new Boolean((String) value).booleanValue(); } else if (value instanceof Integer) { b = ((Integer) value).intValue() != 0; } else { throw new IllegalArgumentException("if: boolean value expected"); } if (log.isDebugEnabled()) { log.debug("b=" + b); } if (b) { return asValue(executionContext, thenStatement); } else { return asValue(executionContext, elseStatement); } }
/** * Is this a fault that should put the endpoint on SUSPEND? or is this a fault to ignore? * * @param synCtx the current fault message * @return true if this fault should suspend the endpoint */ protected boolean isSuspendFault(MessageContext synCtx) { Integer errorCode = (Integer) synCtx.getProperty(SynapseConstants.ERROR_CODE); if (errorCode != null) { if (definition.getSuspendErrorCodes().isEmpty()) { // if suspend codes are not defined, any error will be fatal for the endpoint if (log.isDebugEnabled()) { log.debug(this.toString() + " encountered a fatal error : " + errorCode); } return true; } else { if (definition.getSuspendErrorCodes().contains(errorCode)) { if (log.isDebugEnabled()) { log.debug( "Encountered a suspend error : " + errorCode + " defined suspend codes are : " + definition.getSuspendErrorCodes()); } return true; } } } if (log.isDebugEnabled()) { log.debug( "Encountered a non-fatal error sending to " + this.toString() + ", error code : " + errorCode + ". Error will be handled, but endpoint will not fail"); } return false; }
@Override public void undeploySynapseArtifact(String artifactName) { if (log.isDebugEnabled()) { log.debug("Endpoint Undeployment of the endpoint named : " + artifactName + " : Started"); } try { Endpoint ep = getSynapseConfiguration().getDefinedEndpoints().get(artifactName); if (ep != null) { CustomLogSetter.getInstance() .setLogAppender((ep != null) ? ep.getArtifactContainerName() : ""); getSynapseConfiguration().removeEndpoint(artifactName); if (log.isDebugEnabled()) { log.debug("Destroying the endpoint named : " + artifactName); } ep.destroy(); if (log.isDebugEnabled()) { log.debug( "Endpoint Undeployment of the endpoint named : " + artifactName + " : Completed"); } log.info("Endpoint named '" + ep.getName() + "' has been undeployed"); } else if (log.isDebugEnabled()) { log.debug("Endpoint " + artifactName + " has already been undeployed"); } } catch (Exception e) { handleSynapseArtifactDeploymentError( "Endpoint Undeployement of endpoint named : " + artifactName + " : Failed", e); } }
@Override public void restoreSynapseArtifact(String artifactName) { if (log.isDebugEnabled()) { log.debug("Restoring the Endpoint with name : " + artifactName + " : Started"); } try { Endpoint ep = getSynapseConfiguration().getDefinedEndpoints().get(artifactName); CustomLogSetter.getInstance() .setLogAppender((ep != null) ? ep.getArtifactContainerName() : ""); OMElement epElem = EndpointSerializer.getElementFromEndpoint(ep); if (ep.getFileName() != null) { String fileName = getServerConfigurationInformation().getSynapseXMLLocation() + File.separator + MultiXMLConfigurationBuilder.ENDPOINTS_DIR + File.separator + ep.getFileName(); writeToFile(epElem, fileName); if (log.isDebugEnabled()) { log.debug("Restoring the Endpoint with name : " + artifactName + " : Completed"); } log.info("Endpoint named '" + artifactName + "' has been restored"); } else { handleSynapseArtifactDeploymentError( "Couldn't restore the endpoint named '" + artifactName + "', filename cannot be found"); } } catch (Exception e) { handleSynapseArtifactDeploymentError( "Restoring of the endpoint named '" + artifactName + "' has failed", e); } }
private void checkAndDeactivateProcessor(int attemptCount, int maxAttempts) { if (maxAttempts > 0) { this.attemptCount++; if (attemptCount >= maxAttempts) { terminate(); this.messageProcessor.deactivate(); if (this.isMaxDeliveryAttemptDropEnabled) { dropMessageAndContinueMessageProcessor(); if (log.isDebugEnabled()) { log.debug( "Message processor [" + messageProcessor.getName() + "] Dropped the failed message and continue due to reach of max attempts"); } } else { terminate(); this.messageProcessor.deactivate(); if (log.isDebugEnabled()) { log.debug( "Message processor [" + messageProcessor.getName() + "] stopped due to reach of max attempts"); } } } } }
public ActionForward topMenuList( ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { if (logger.isDebugEnabled()) { logger.debug( "topMenuList(ActionMapping, ActionForm, HttpServletRequest, HttpServletResponse) - start"); } MenuBean _mb = null; ActionForward _forward = mapping.findForward("top"); ActionErrors _errors = new ActionErrors(); try { _mb = new MenuBean(); MenuVO[] _menuVOs = _mb.leftMenuList("top"); if (_menuVOs != null && _menuVOs.length > 0) { request.setAttribute("smirtMenus", _menuVOs); } } catch (BusinessServiceException e) { logger.error("topMenuList() - BusinesServiceException", e); } catch (Exception e) { logger.error("topMenuList() - uncatched exception", e); } if (logger.isDebugEnabled()) { logger.debug( "topMenuList(ActionMapping, ActionForm, HttpServletRequest, HttpServletResponse) - end"); } return _forward; }
private void storeOrUpdateRMDT( RMDelegationTokenIdentifier tokenId, Long renewDate, boolean isUpdate) throws IOException { String tokenKey = getRMDTTokenNodeKey(tokenId); RMDelegationTokenIdentifierData tokenData = new RMDelegationTokenIdentifierData(tokenId, renewDate); if (LOG.isDebugEnabled()) { LOG.debug("Storing token to " + tokenKey); } try { WriteBatch batch = db.createWriteBatch(); try { batch.put(bytes(tokenKey), tokenData.toByteArray()); if (!isUpdate) { ByteArrayOutputStream bs = new ByteArrayOutputStream(); try (DataOutputStream ds = new DataOutputStream(bs)) { ds.writeInt(tokenId.getSequenceNumber()); } if (LOG.isDebugEnabled()) { LOG.debug( "Storing " + tokenId.getSequenceNumber() + " to " + RM_DT_SEQUENCE_NUMBER_KEY); } batch.put(bytes(RM_DT_SEQUENCE_NUMBER_KEY), bs.toByteArray()); } db.write(batch); } finally { batch.close(); } } catch (DBException e) { throw new IOException(e); } }
public String transform(String type, String input, Map<String, String> transformModel) throws TransformException { if (log.isDebugEnabled()) { log.debug("transform(input=" + input + ")"); } try { Templates template = templates.get(type); if (log.isDebugEnabled()) { log.debug("template=" + template); } if (template != null) { Transformer transformer = template.newTransformer(); StringWriter writer = new StringWriter(INITIAL_BUFFER_SIZE); transformer.setErrorListener(new DefaultErrorListener()); if (transformModel != null) { for (Map.Entry<String, String> entry : transformModel.entrySet()) { transformer.setParameter(entry.getKey(), entry.getValue()); } } transformer.transform(new StreamSource(new StringReader(input)), new StreamResult(writer)); String result = writer.toString().trim(); if (log.isDebugEnabled()) { log.debug("result=" + result); } return result; } else { String errMsg = "Transformation '" + type + "' not available"; log.error(errMsg); throw new RuntimeException(errMsg); } } catch (Exception e) { log.error("Error during transform", e); throw new TransformException(e); } }
@Override public void connect(MessageContext messageContext) throws ConnectException { if (log.isDebugEnabled()) { log.info("executing twitter get user time line"); } try { String screenName = (TwitterUtils.lookupTemplateParamater(messageContext, SCREEN_NAME) != null && !TwitterUtils.lookupTemplateParamater(messageContext, SCREEN_NAME).isEmpty()) ? TwitterUtils.lookupTemplateParamater(messageContext, SCREEN_NAME) : null; String userID = (TwitterUtils.lookupTemplateParamater(messageContext, USER_ID) != null && !TwitterUtils.lookupTemplateParamater(messageContext, USER_ID).isEmpty()) ? TwitterUtils.lookupTemplateParamater(messageContext, USER_ID) : null; String count = (TwitterUtils.lookupTemplateParamater(messageContext, COUNT) != null && !TwitterUtils.lookupTemplateParamater(messageContext, COUNT).isEmpty()) ? TwitterUtils.lookupTemplateParamater(messageContext, COUNT) : null; Twitter twitter = new TwitterClientLoader(messageContext).loadApiClient(); List<User> results = null; if (screenName != null && !screenName.isEmpty()) { if (count != null && !count.isEmpty()) { results = twitter.getFollowersList(screenName, Long.parseLong(count)); } else { results = twitter.getFollowersList(screenName, -1); } } else if (userID != null && !userID.isEmpty()) { if (count != null && !count.isEmpty()) { results = twitter.getFollowersList(Long.parseLong(userID), Long.parseLong(count)); } else { results = twitter.getFollowersList(Long.parseLong(userID), -1); } } if (log.isDebugEnabled()) { log.error("Retrived results : " + results.toString()); } OMElement element = this.performSearchMessages(results); super.preparePayload(messageContext, element); } catch (TwitterException te) { log.error("Failed to search twitter : " + te.getMessage(), te); TwitterUtils.storeErrorResponseStatus(messageContext, te); } catch (Exception te) { log.error("Failed to search generic: " + te.getMessage(), te); TwitterUtils.storeErrorResponseStatus(messageContext, te); } }
/** * SOQL クエリを実行する。 * * @param service {@link SalesforceService}。null 不可。 * @param query SOQL クエリ。 * @param force ユーザに確認せずに処理を実行する場合は true、それ以外は false。 * @throws CommandException * @throws SalesforceServiceException */ public void update(final SalesforceService service, final String query, final boolean force) throws CommandException, SalesforceServiceException { if (service == null) { throw new NullPointerException("service"); } if (LOG.isDebugEnabled()) { LOG.debug("in Update#update() : service=" + service); LOG.debug("in Update#update() : query=" + query); LOG.debug("in Update#update() : force=" + force); } // ユーザに確認。 if (!force && !ConsoleUtil.confirmToContinue( SalesforceResource.getInstance() .getResourceString(this.getClass(), "message.confirm.update"))) { if (LOG.isDebugEnabled()) { LOG.debug("User cancelled."); } return; } UpdateMonitor monitor = new MyUpdateMonitor(); service.update(query, monitor); if (LOG.isDebugEnabled()) { LOG.debug("out Update#update()"); } }
@Override public void dispose() { if (logger.isDebugEnabled()) { logger.debug("Waiting for works to finish execution"); } long initialMillis = System.currentTimeMillis(); while (workTracker.pendingWorks().size() != 0 && !isTimeoutExpired(initialMillis)) { try { Thread.sleep(waitMillis); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } if (logger.isDebugEnabled()) { logger.debug( String.format( "Stop waiting for works completion. There are %s works unfinished works", workTracker.pendingWorks().size())); } workTracker.dispose(); }
@Override protected void updateMeta(final byte[] oldRegion1, final byte[] oldRegion2, HRegion newRegion) throws IOException { byte[][] regionsToDelete = {oldRegion1, oldRegion2}; for (int r = 0; r < regionsToDelete.length; r++) { if (Bytes.equals(regionsToDelete[r], latestRegion.getRegionName())) { latestRegion = null; } Delete delete = new Delete(regionsToDelete[r]); table.delete(delete); if (LOG.isDebugEnabled()) { LOG.debug("updated columns in row: " + Bytes.toStringBinary(regionsToDelete[r])); } } newRegion.getRegionInfo().setOffline(true); Put put = new Put(newRegion.getRegionName()); put.add( HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER, Writables.getBytes(newRegion.getRegionInfo())); table.put(put); if (LOG.isDebugEnabled()) { LOG.debug("updated columns in row: " + Bytes.toStringBinary(newRegion.getRegionName())); } }
/** * Initializes the JXPathContext based on any relevant properties set for the filter. * * @param context the JXPathContext to initialize */ protected void initialise(JXPathContext context) { Map.Entry entry; if (namespaces != null) { if (logger.isDebugEnabled()) { logger.debug("Initializing JXPathContext with namespaces: " + namespaces); } for (Iterator iterator = namespaces.entrySet().iterator(); iterator.hasNext(); ) { entry = (Map.Entry) iterator.next(); context.registerNamespace(entry.getKey().toString(), entry.getValue().toString()); } } if (contextProperties != null) { if (logger.isDebugEnabled()) { logger.debug("Initializing JXPathContext with properties: " + contextProperties); } for (Iterator iterator = contextProperties.entrySet().iterator(); iterator.hasNext(); ) { entry = (Map.Entry) iterator.next(); context.setValue(entry.getKey().toString(), entry.getValue()); } } if (factory != null) { context.setFactory(factory); } context.setLenient(lenient); }
/** * Determines the mimetype of a file by looking up the file's extension in an internal listing to * find the corresponding mime type. If the file has no extension, or the extension is not * available in the listing contained in this class, the default mimetype <code> * application/octet-stream</code> is returned. * * <p>A file extension is one or more characters that occur after the last period (.) in the * file's name. If a file has no extension, Guesses the mimetype of file data based on the file's * extension. * * @param fileName The name of the file whose extension may match a known mimetype. * @return The file's mimetype based on its extension, or a default value of <code> * application/octet-stream</code> if a mime type value cannot be found. */ public String getMimetype(String fileName) { int lastPeriodIndex = fileName.lastIndexOf("."); if (lastPeriodIndex > 0 && lastPeriodIndex + 1 < fileName.length()) { String ext = fileName.substring(lastPeriodIndex + 1); if (extensionToMimetypeMap.keySet().contains(ext)) { String mimetype = (String) extensionToMimetypeMap.get(ext); if (log.isDebugEnabled()) { log.debug("Recognised extension '" + ext + "', mimetype is: '" + mimetype + "'"); } return mimetype; } else { if (log.isDebugEnabled()) { log.debug( "Extension '" + ext + "' is unrecognized in mime type listing" + ", using default mime type: '" + MIMETYPE_OCTET_STREAM + "'"); } } } else { if (log.isDebugEnabled()) { log.debug("File name has no extension, mime type cannot be recognised for: " + fileName); } } return MIMETYPE_OCTET_STREAM; }
/** * Performs a number of checks to ensure response saneness according to the rules of RFC2616: * * <ol> * <li>If the response code is {@link javax.servlet.http.HttpServletResponse#SC_NO_CONTENT} then * it is illegal for the body to contain anything. See * http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.2.5 * <li>If the response code is {@link javax.servlet.http.HttpServletResponse#SC_NOT_MODIFIED} * then it is illegal for the body to contain anything. See * http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5 * </ol> * * @param request the client HTTP request * @param responseStatus the responseStatus * @return true if the response should be 0, even if it is isn't. */ public static boolean shouldBodyBeZero(HttpServletRequest request, int responseStatus) { // Check for NO_CONTENT if (responseStatus == HttpServletResponse.SC_NO_CONTENT) { if (LOG.isDebugEnabled()) { LOG.debug( request.getRequestURL() + " resulted in a " + HttpServletResponse.SC_NO_CONTENT + " response. Removing message body in accordance with RFC2616."); } return true; } // Check for NOT_MODIFIED if (responseStatus == HttpServletResponse.SC_NOT_MODIFIED) { if (LOG.isDebugEnabled()) { LOG.debug( request.getRequestURL() + " resulted in a " + HttpServletResponse.SC_NOT_MODIFIED + " response. Removing message body in accordance with RFC2616."); } return true; } return false; }
/** Main dispatch method called from the LatherBoss. */ public LatherValue dispatch(LatherContext ctx, String method, LatherValue arg) throws LatherRemoteException { Integer agentId = null; if (!haService.alertTriggersHaveInitialized()) { if (log.isDebugEnabled()) { log.debug("Not ready - received request for " + method + " from " + ctx.getCallerIP()); } throw new LatherRemoteException("Server still initializing"); } if (log.isDebugEnabled()) { log.debug("Request for " + method + "() from " + ctx.getCallerIP()); } if (!HAUtil.isMasterNode()) { log.warn("Non-primary server received communication from an agent. Request will be denied."); throw new LatherRemoteException( "This server is not the primary node in the HA configuration. Agent request denied."); } if (secureCommands.contains(method)) { if (!(arg instanceof SecureAgentLatherValue)) { log.warn( "Authenticated call made from " + ctx.getCallerIP() + " which did not subclass the correct authentication class"); throw new LatherRemoteException("Unauthorized agent denied"); } String agentToken = ((SecureAgentLatherValue) arg).getAgentToken(); validateAgent(ctx, agentToken); synchronized (tokensToTime) { tokensToTime.put(agentToken, System.currentTimeMillis()); } try { Agent a = agentManager.getAgent(agentToken); agentId = a.getId(); } catch (AgentNotFoundException e) { log.debug(e, e); } } AgentConnection conn = null; long start = 0; try { conn = agentManager.getAgentConnection(method, ctx.getCallerIP(), agentId); start = now(); return runCommand(ctx, method, arg); } catch (LatherRemoteException e) { concurrentStatsCollector.addStat(1, LATHER_REMOTE_EXCEPTION); throw e; } finally { if (conn != null) { agentManager.disconnectAgent(conn); } long duration = now() - start; concurrentStatsCollector.addStat(duration, LATHER_RUN_COMMAND_TIME); } }
/** Initialize the MessageSource. Use parent's if none defined in this context. */ protected void initMessageSource() { ConfigurableListableBeanFactory beanFactory = getBeanFactory(); if (beanFactory.containsLocalBean(MESSAGE_SOURCE_BEAN_NAME)) { this.messageSource = beanFactory.getBean(MESSAGE_SOURCE_BEAN_NAME, MessageSource.class); // Make MessageSource aware of parent MessageSource. if (this.parent != null && this.messageSource instanceof HierarchicalMessageSource) { HierarchicalMessageSource hms = (HierarchicalMessageSource) this.messageSource; if (hms.getParentMessageSource() == null) { // Only set parent context as parent MessageSource if no parent MessageSource // registered already. hms.setParentMessageSource(getInternalParentMessageSource()); } } if (logger.isDebugEnabled()) { logger.debug("Using MessageSource [" + this.messageSource + "]"); } } else { // Use empty MessageSource to be able to accept getMessage calls. DelegatingMessageSource dms = new DelegatingMessageSource(); dms.setParentMessageSource(getInternalParentMessageSource()); this.messageSource = dms; beanFactory.registerSingleton(MESSAGE_SOURCE_BEAN_NAME, this.messageSource); if (logger.isDebugEnabled()) { logger.debug( "Unable to locate MessageSource with name '" + MESSAGE_SOURCE_BEAN_NAME + "': using default [" + this.messageSource + "]"); } } }
// Returns getUser and getUserByEid on the input string // @return Either the id of the user, or the same string if not defined private String getUserDefined(String usr) { // Set the original user id String userId = usr; User userinfo; try { userinfo = UserDirectoryService.getUser(usr); userId = userinfo.getId(); if (LOG.isDebugEnabled()) { LOG.debug("getUserDefined: username for " + usr + " is " + userId); } return userId; } catch (UserNotDefinedException e) { try { // try with the user eid userinfo = UserDirectoryService.getUserByEid(usr); userId = userinfo.getId(); } catch (UserNotDefinedException ee) { // This is mostly expected behavior, don't need to notify about it, the UI can handle it if (LOG.isDebugEnabled()) { LOG.debug("getUserDefined: User Not Defined" + userId); } } } return userId; }
/** * Convert a public key to the SSH format used in the <code>authorized_keys</code> files. * * <p>Note that only RSA keys are supported at the moment. * * @param key the public key to convert * @param alias the alias to be appended at the end of the line, if it is <code>null</code> * nothing will be appended * @return an string that can be directly written to the <code>authorized_keys</code> file or * <code>null</code> if the conversion can't be performed for whatever the reason */ public static String getKeyString(final PublicKey key, String alias) { // Get the serialized version of the key: final byte[] keyBytes = getKeyBytes(key); if (keyBytes == null) { log.error("Can't get key bytes, will return null."); return null; } // Encode it using BASE64: final Base64 encoder = new Base64(0); final String encoding = encoder.encodeToString(keyBytes); if (log.isDebugEnabled()) { log.debug("Key encoding is \"" + encoding + "\"."); } // Return the generated SSH public key: final StringBuilder buffer = new StringBuilder( SSH_RSA.length() + 1 + encoding.length() + (alias != null ? 1 + alias.length() : 0)); buffer.append(SSH_RSA); buffer.append(" "); buffer.append(encoding); if (alias != null) { buffer.append(" "); buffer.append(alias); } final String keyString = buffer.toString(); if (log.isDebugEnabled()) { log.debug("Key string is \"" + keyString + "\"."); } return keyString; }
@Override protected void initThisSubclass(String key, String value) { String method = "initThisSubclass()"; if (log.isDebugEnabled()) { log.debug(enter(method)); } boolean setLocally = false; if (FILEPATH_KEY.equals(key)) { FILEPATH = value; setLocally = true; } else { if (log.isDebugEnabled()) { log.debug(format(method, "deferring to super")); } super.initThisSubclass(key, value); } if (setLocally) { if (log.isInfoEnabled()) { log.info(method + "known parameter " + key + "==" + value); } } if (log.isDebugEnabled()) { log.debug(exit(method)); } }
/** * Creates the subcontext identified by the given <code>path</code> * * @param context * @param path slash separated hierarchy of sub contexts (e.g. /mdsp/brodcasterlistener/ * @throws NamingException */ public static Context createSubcontext(Context context, String path) throws NamingException { Validate.notNull(context, "context"); Validate.notNull(path, "path"); if (log.isDebugEnabled()) { log.debug( "> createSubcontext(context=" + context.getNameInNamespace() + ", path=" + path + ")"); } String[] subContextsNames = StringUtils.split(path, "/"); Context currentContext = context; for (String subContextName : subContextsNames) { try { currentContext = (Context) currentContext.lookup(subContextName); if (log.isDebugEnabled()) { log.debug("Context '" + subContextName + "' already exist"); } } catch (NameNotFoundException e) { currentContext = currentContext.createSubcontext(subContextName); if (log.isDebugEnabled()) { log.debug("Context '" + subContextName + "' created"); } } } if (log.isDebugEnabled()) { log.debug("< createSubcontext() : " + currentContext.getNameInNamespace()); } return currentContext; }
public int read(char[] theChars, int offset, int length) throws IOException { if (log.isDebugEnabled()) { log.debug("read called: offset = " + offset + ", length = " + length); } if (first) { if (log.isDebugEnabled()) { log.debug(header); } first = false; for (int i = 0; i < header.length(); i++) { theChars[i] = header.charAt(i); } return header.length(); } else { if (last) return -1; int cnt = internal.read(theChars, offset, length); if (log.isDebugEnabled()) { log.debug(theChars); } if (cnt == -1) { for (int i = offset; i < footer.length() + offset; i++) { theChars[i] = footer.charAt(i - offset); } cnt = footer.length(); last = true; } return cnt; } }
private static File getImplicitConfigurationDirectory(Map<String, String> envp) { if (LOG.isDebugEnabled()) { LOG.debug("Detecting default Hadoop configuration dir from Hadoop installation path"); } File command = findHadoopCommand(envp); if (command == null) { return null; } if (LOG.isDebugEnabled()) { LOG.debug(MessageFormat.format("Hadoop command: {0}", command)); } File conf; conf = getHadoopConfigurationDirectoryByRelative(command); if (conf != null) { if (LOG.isDebugEnabled()) { LOG.debug( MessageFormat.format( "Found implicit Hadoop confdir (from hadoop command path): {0}", conf)); } return conf; } conf = getHadoopConfigurationDirectoryByCommand(command, envp); if (conf != null) { if (LOG.isDebugEnabled()) { LOG.debug( MessageFormat.format( "Found implicit Hadoop confdir (from hadoop command execution): {0}", conf)); } return conf; } return null; }