@Parameters({"target"})
 @BeforeClass
 public void setUp(String target) throws Exception {
   log.info("METADATA VALIDATOR TARGET: " + target);
   sf = new TestServiceFactory().proxy();
   config = new ImportConfig();
   // Let the user know at what level we're logging
   ch.qos.logback.classic.Logger lociLogger =
       (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("loci");
   ch.qos.logback.classic.Logger omeLogger =
       (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("ome.formats");
   log.info(
       String.format(
           "Log levels -- Bio-Formats: %s OMERO.importer: %s",
           lociLogger.getLevel(), omeLogger.getLevel()));
   store = new OMEROMetadataStoreClient();
   store.initialize(sf);
   store.setEnumerationProvider(new TestEnumerationProvider());
   store.setInstanceProvider(new BlitzInstanceProvider(store.getEnumerationProvider()));
   minimalStore = new OMEROMetadataStoreClient();
   minimalStore.initialize(sf);
   minimalStore.setEnumerationProvider(new TestEnumerationProvider());
   minimalStore.setInstanceProvider(
       new BlitzInstanceProvider(minimalStore.getEnumerationProvider()));
   wrapper = new OMEROWrapper(config);
   wrapper.setMetadataOptions(new DefaultMetadataOptions(MetadataLevel.ALL));
   minimalWrapper = new OMEROWrapper(config);
   minimalWrapper.setMetadataOptions(new DefaultMetadataOptions(MetadataLevel.MINIMUM));
   wrapper.setMetadataStore(store);
   store.setReader(wrapper.getImageReader());
   minimalStore.setReader(minimalWrapper.getImageReader());
 }
  // A few things that need to get set up before regular init().
  @Override
  protected void internalInit() {
    log.debug("Starting CWM Application Internal Init");
    log.debug("Application Class is " + getClass().getName());

    loadAppProperties();

    // If using Logback as the logger, and we have a logConfig property,
    // then read that configuration.
    File logConfig = configuration.getOptionalFile("cwm.logConfig");
    if (logConfig != null && LoggerFactory.getILoggerFactory() instanceof LoggerContext) {
      log.info("Log Configuration: {}", logConfig);
      LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory();

      try {
        JoranConfigurator configurator = new JoranConfigurator();
        configurator.setContext(lc);
        // the context was probably already configured by default configuration rules
        lc.reset();
        configurator.doConfigure(logConfig);
      } catch (JoranException je) {
        je.printStackTrace();
      }
      StatusPrinter.printInCaseOfErrorsOrWarnings(lc);
    }

    loadServices();

    getComponentInstantiationListeners()
        .add(new GuiceComponentInjector(this, getInjectionModuleArray()));

    super.internalInit();
  }
  /** Launch process and waits until it's down */
  public void launch(Monitored mp) {
    if (!lifecycle.tryToMoveTo(Lifecycle.State.STARTING)) {
      throw new IllegalStateException("Already started");
    }
    monitored = mp;

    try {
      LoggerFactory.getLogger(getClass()).info("Starting " + getKey());
      Runtime.getRuntime().addShutdownHook(shutdownHook);
      stopWatcher.start();

      monitored.start();
      boolean ready = false;
      while (!ready) {
        ready = monitored.isReady();
        Thread.sleep(20L);
      }

      // notify monitor that process is ready
      commands.setReady();

      if (lifecycle.tryToMoveTo(Lifecycle.State.STARTED)) {
        monitored.awaitStop();
      }
    } catch (Exception e) {
      LoggerFactory.getLogger(getClass()).warn("Fail to start " + getKey(), e);

    } finally {
      stop();
    }
  }
 public ReadWriteLoggingFilter(String logname, int capacity) {
   logQueue = new LinkedBlockingQueue<Log>(capacity);
   this.RECEIVE_LOGGER = LoggerFactory.getLogger(logname + ".Recv");
   this.SENT_LOGGER = LoggerFactory.getLogger(logname + ".Sent");
   this.EXCEPTION_LOGGER = LoggerFactory.getLogger(logname + ".Exception");
   this.EVENT_LOGGER = LoggerFactory.getLogger(logname + ".Event");
 }
Пример #5
0
 /**
  * Fixes the Chameleon logging configuration to write the logs in the logs/wisdom.log file instead
  * of chameleon.log file.
  *
  * @param basedir the base directory of the chameleon
  */
 private static void fixLoggingSystem(File basedir) {
   ILoggerFactory factory = LoggerFactory.getILoggerFactory();
   if (factory instanceof LoggerContext) {
     // We know that we are using logback from here.
     LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory();
     ch.qos.logback.classic.Logger logbackLogger = lc.getLogger(Logger.ROOT_LOGGER_NAME);
     if (logbackLogger == null) {
       return;
     }
     Appender<ILoggingEvent> appender = logbackLogger.getAppender("FILE");
     if (appender instanceof RollingFileAppender) {
       RollingFileAppender<ILoggingEvent> fileAppender =
           (RollingFileAppender<ILoggingEvent>) appender;
       String file = new File(basedir, "logs/wisdom.log").getAbsolutePath();
       fileAppender.stop();
       // Remove the created log directory.
       // We do that afterwards because on Windows the file cannot be deleted while we still have a
       // logger
       // using it.
       FileUtils.deleteQuietly(new File("logs"));
       fileAppender.setFile(file);
       fileAppender.setContext(lc);
       fileAppender.start();
     }
   }
 }
/** JMS Service. This service implements the sending a message to a jms queue. */
@Stateless
public class JMSHelperService {
  private static final Logger LOGGER = LoggerFactory.getLogger(JMSHelperService.class);
  private static final Logger LOGGER_CONFIDENTIAL =
      LoggerFactory.getLogger(USEFLogCategory.CONFIDENTIAL);

  @Resource(mappedName = USEFConstants.OUT_QUEUE_NAME)
  private Queue outQueue;

  @Resource(mappedName = USEFConstants.IN_QUEUE_NAME)
  private Queue inQueue;

  @Resource(mappedName = USEFConstants.NOT_SENT_QUEUE_NAME)
  private Queue notSentQueue;

  @Inject private JMSContext context;

  private void sendMessage(Queue queue, String message) {
    try {
      context.createProducer().send(queue, message);
    } catch (Exception e) {
      LOGGER.error("Error sending the message: ", e);
      LOGGER_CONFIDENTIAL.debug("Error sending the message: '{}' to the queue", message, e);
      throw new TechnicalException(e);
    }
  }

  /**
   * Sets a message to the out queue.
   *
   * @param message message
   */
  public void sendMessageToOutQueue(String message) {
    LOGGER.debug("Started sending msg to the out queue {}");
    sendMessage(outQueue, message);
    LOGGER.debug("Msg is successfully sent to the out queue");
  }

  /**
   * Sets a message to the in queue.
   *
   * @param message message
   */
  public void sendMessageToInQueue(String message) {
    LOGGER.debug("Started sending msg to the in queue");
    sendMessage(inQueue, message);
    LOGGER.debug("Msg is successfully sent to the in queue");
  }

  /**
   * Sets a message to the not sent queue.
   *
   * @param message message
   */
  public void sendMessageToNotSentQueue(String message) {
    LOGGER.debug("Started sending msg to the not sent queue");
    sendMessage(notSentQueue, message);
    LOGGER.debug("Msg is successfully sent to the not sent queue");
  }
}
Пример #7
0
 private void importActiveRules(RulesDao rulesDao, RulesProfile profile) {
   for (Iterator<ActiveRule> iar = profile.getActiveRules(true).iterator(); iar.hasNext(); ) {
     ActiveRule activeRule = iar.next();
     Rule unMarshalledRule = activeRule.getRule();
     Rule matchingRuleInDb =
         rulesDao.getRuleByKey(unMarshalledRule.getRepositoryKey(), unMarshalledRule.getKey());
     if (matchingRuleInDb == null) {
       LoggerFactory.getLogger(getClass())
           .error(
               "Unable to find active rule "
                   + unMarshalledRule.getRepositoryKey()
                   + ":"
                   + unMarshalledRule.getKey());
       iar.remove();
       continue;
     }
     activeRule.setRule(matchingRuleInDb);
     activeRule.setRulesProfile(profile);
     activeRule.getActiveRuleParams();
     for (Iterator<ActiveRuleParam> irp = activeRule.getActiveRuleParams().iterator();
         irp.hasNext(); ) {
       ActiveRuleParam activeRuleParam = irp.next();
       RuleParam unMarshalledRP = activeRuleParam.getRuleParam();
       RuleParam matchingRPInDb = rulesDao.getRuleParam(matchingRuleInDb, unMarshalledRP.getKey());
       if (matchingRPInDb == null) {
         LoggerFactory.getLogger(getClass())
             .error("Unable to find active rule parameter " + unMarshalledRP.getKey());
         irp.remove();
         continue;
       }
       activeRuleParam.setActiveRule(activeRule);
       activeRuleParam.setRuleParam(matchingRPInDb);
     }
   }
 }
  public static void main(String[] args) throws Exception {
    LOG = LoggerFactory.getLogger(ExampleCollapserMain.class);
    CLOG = LoggerFactory.getLogger("console");
    initializeHystrixSettings();

    runLoopTest();
  }
  @Override
  public void createDestination() throws Exception {
    FileInputStream schemaIn = new FileInputStream(avsc);
    Schema original = new Schema.Parser().parse(schemaIn);
    schemaIn.close();

    Schema evolved = getEvolvedSchema(original);

    FileOutputStream schemaOut = new FileOutputStream(evolvedAvsc);
    schemaOut.write(evolved.toString(true).getBytes());
    schemaOut.close();

    List<String> createArgs =
        Lists.newArrayList("create", dest, "-s", evolvedAvsc, "-r", repoUri, "-d", "target/data");
    createArgs.addAll(getExtraCreateArgs());

    TestUtil.run(
        LoggerFactory.getLogger(this.getClass()),
        "delete",
        dest,
        "-r",
        repoUri,
        "-d",
        "target/data");
    TestUtil.run(
        LoggerFactory.getLogger(this.getClass()),
        createArgs.toArray(new String[createArgs.size()]));
    this.console = mock(Logger.class);
    this.command = new CopyCommand(console);
    command.setConf(new Configuration());
  }
Пример #10
0
  @Override
  public void run() {
    List<CompactionInfo> runningCompactions = Compactor.getRunningCompactions();

    Set<List<Long>> newKeys = new HashSet<>();

    long time = System.currentTimeMillis();

    for (CompactionInfo ci : runningCompactions) {
      List<Long> compactionKey =
          Arrays.asList(ci.getID(), ci.getEntriesRead(), ci.getEntriesWritten());
      newKeys.add(compactionKey);

      if (!observedCompactions.containsKey(compactionKey)) {
        observedCompactions.put(compactionKey, new ObservedCompactionInfo(ci, time));
      }
    }

    // look for compactions that finished or made progress and logged a warning
    HashMap<List<Long>, ObservedCompactionInfo> copy = new HashMap<>(observedCompactions);
    copy.keySet().removeAll(newKeys);

    for (ObservedCompactionInfo oci : copy.values()) {
      if (oci.loggedWarning) {
        LoggerFactory.getLogger(CompactionWatcher.class)
            .info("Compaction of " + oci.compactionInfo.getExtent() + " is no longer stuck");
      }
    }

    // remove any compaction that completed or made progress
    observedCompactions.keySet().retainAll(newKeys);

    long warnTime = config.getTimeInMillis(Property.TSERV_COMPACTION_WARN_TIME);

    // check for stuck compactions
    for (ObservedCompactionInfo oci : observedCompactions.values()) {
      if (time - oci.firstSeen > warnTime && !oci.loggedWarning) {
        Thread compactionThread = oci.compactionInfo.getThread();
        if (compactionThread != null) {
          StackTraceElement[] trace = compactionThread.getStackTrace();
          Exception e =
              new Exception(
                  "Possible stack trace of compaction stuck on " + oci.compactionInfo.getExtent());
          e.setStackTrace(trace);
          LoggerFactory.getLogger(CompactionWatcher.class)
              .warn(
                  "Compaction of "
                      + oci.compactionInfo.getExtent()
                      + " to "
                      + oci.compactionInfo.getOutputFile()
                      + " has not made progress for at least "
                      + (time - oci.firstSeen)
                      + "ms",
                  e);
          oci.loggedWarning = true;
        }
      }
    }
  }
Пример #11
0
  public LogOperator(final String category, int options) {

    this.log =
        category != null && !category.isEmpty()
            ? LoggerFactory.getLogger(category)
            : LoggerFactory.getLogger(LogOperator.class);
    this.options = options;
  }
Пример #12
0
 @Override
 public Logger supply(Dependency<? super Logger> dependency, Injector injector) {
   if (dependency.isUntargeted()) {
     return LoggerFactory.getLogger("General");
   } else {
     return LoggerFactory.getLogger(dependency.target().getType().getRawType());
   }
 }
Пример #13
0
  public static void main(String[] args) {

    Logger logger = LoggerFactory.getLogger("chapters.introduction.HelloWorld2");
    logger.debug("Hello world.");

    LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory();
    StatusPrinter.print(lc);
  }
Пример #14
0
  public static void main(String[] args) {
    Logger logger = LoggerFactory.getLogger(HelloWorld2.class);
    logger.debug("Hello world.");

    // print internal state
    LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory();
    StatusPrinter.print(lc);
  }
Пример #15
0
  public static void ConfigureLogbackDirectly(Context context) {
    // reset the default context (which may already have been initialized)
    // since we want to reconfigure it
    LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory();
    lc.reset();

    // final String LOG_DIR = "/sdcard/GPSLogger";
    SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
    final String LOG_DIR =
        prefs.getString(
            "gpslogger_folder", Environment.getExternalStorageDirectory() + "/GPSLogger");

    GpsRollingFileAppender<ILoggingEvent> rollingFileAppender =
        new GpsRollingFileAppender<ILoggingEvent>();
    rollingFileAppender.setAppend(true);
    rollingFileAppender.setContext(lc);

    // OPTIONAL: Set an active log file (separate from the rollover files).
    // If rollingPolicy.fileNamePattern already set, you don't need this.
    rollingFileAppender.setFile(LOG_DIR + "/debuglog.txt");
    rollingFileAppender.setLazy(true);

    TimeBasedRollingPolicy<ILoggingEvent> rollingPolicy =
        new TimeBasedRollingPolicy<ILoggingEvent>();
    rollingPolicy.setFileNamePattern(LOG_DIR + "/debuglog.%d.txt");
    rollingPolicy.setMaxHistory(3);
    rollingPolicy.setParent(rollingFileAppender); // parent and context required!
    rollingPolicy.setContext(lc);
    rollingPolicy.start();

    rollingFileAppender.setRollingPolicy(rollingPolicy);

    PatternLayoutEncoder encoder = new PatternLayoutEncoder();
    encoder.setPattern("%d{HH:mm:ss} %-5p %class{0}.%method:%L - %m%n");
    encoder.setContext(lc);
    encoder.start();

    rollingFileAppender.setEncoder(encoder);
    rollingFileAppender.start();

    // setup LogcatAppender
    PatternLayoutEncoder encoder2 = new PatternLayoutEncoder();
    encoder2.setContext(lc);
    encoder2.setPattern("%method:%L - %m%n");
    encoder2.start();

    LogcatAppender logcatAppender = new LogcatAppender();
    logcatAppender.setContext(lc);
    logcatAppender.setEncoder(encoder2);
    logcatAppender.start();

    // add the newly created appenders to the root logger;
    // qualify Logger to disambiguate from org.slf4j.Logger
    ch.qos.logback.classic.Logger root =
        (ch.qos.logback.classic.Logger) LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME);
    root.addAppender(rollingFileAppender);
    root.addAppender(logcatAppender);
  }
/**
 * The Netty handler responsible for decrypting/encrypting RLPx frames with the FrameCodec crated
 * during HandshakeHandler initial work
 *
 * <p>Created by Anton Nashatyrev on 15.10.2015.
 */
public class FrameCodecHandler extends NettyByteToMessageCodec<FrameCodec.Frame> {
  private static final Logger loggerWire = LoggerFactory.getLogger("wire");
  private static final Logger loggerNet = LoggerFactory.getLogger("net");

  public FrameCodec frameCodec;
  public Channel channel;

  public FrameCodecHandler(FrameCodec frameCodec, Channel channel) {
    this.frameCodec = frameCodec;
    this.channel = channel;
  }

  protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out)
      throws IOException {
    if (in.readableBytes() == 0) {
      loggerWire.trace("in.readableBytes() == 0");
      return;
    }

    loggerWire.trace("Decoding frame (" + in.readableBytes() + " bytes)");
    List<FrameCodec.Frame> frames = frameCodec.readFrames(in);

    // Check if a full frame was available.  If not, we'll try later when more bytes come in.
    if (frames == null || frames.isEmpty()) return;

    for (int i = 0; i < frames.size(); i++) {
      FrameCodec.Frame frame = frames.get(i);

      channel.getNodeStatistics().rlpxInMessages.add();
    }

    out.addAll(frames);
  }

  @Override
  protected void encode(ChannelHandlerContext ctx, FrameCodec.Frame frame, ByteBuf out)
      throws Exception {

    frameCodec.writeFrame(frame, out);

    channel.getNodeStatistics().rlpxOutMessages.add();
  }

  @Override
  public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
    if (channel.isDiscoveryMode()) {
      loggerNet.trace("FrameCodec failed: ", cause);
    } else {
      if (cause instanceof IOException) {
        loggerNet.debug("FrameCodec failed: " + ctx.channel().remoteAddress(), cause);
      } else {
        loggerNet.warn("FrameCodec failed: ", cause);
      }
    }
    ctx.close();
  }
}
 public void nullSafeSet(PreparedStatement st, Object value, int index, SessionImplementor session)
     throws HibernateException, SQLException {
   if (value == null || defaultValue.equals(value)) {
     LoggerFactory.getLogger(getClass()).trace("binding null to parameter: " + index);
     st.setNull(index, Types.INTEGER);
   } else {
     LoggerFactory.getLogger(getClass()).trace("binding " + value + " to parameter: " + index);
     st.setInt(index, ((Integer) value).intValue());
   }
 }
Пример #18
0
  @Override
  public void execute(JobExecutionContext context) {
    LoggerFactory.getLogger(Loggers.SCHEDULER)
        .debug("Scheduler " + PostingConstraintJob.class.getName() + " is started");

    SaleConstraintFacade.getInstance().performPosting();

    LoggerFactory.getLogger(Loggers.SCHEDULER)
        .debug("Scheduler " + PostingConstraintJob.class.getName() + " is finished");
  }
 @Override
 public void execute(JobExecutionContext context) throws JobExecutionException {
   Context initContext;
   try {
     initContext = new InitialContext();
     MetricsUpdater bean = (MetricsUpdater) initContext.lookup(EJB_JNDI_NAME);
     bean.updateMetrics();
     LoggerFactory.getLogger(JobExample.class).info("Job executed");
   } catch (NamingException e) {
     LoggerFactory.getLogger(JobExample.class).error("", e);
   }
 }
Пример #20
0
  @Test
  public void LBCLASSIC_50() throws JoranException {

    LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory();

    JoranConfigurator configurator = new JoranConfigurator();
    configurator.setContext(lc);
    lc.reset();
    configurator.doConfigure(ClassicTestConstants.JORAN_INPUT_PREFIX + "/syslog_LBCLASSIC_50.xml");

    org.slf4j.Logger logger = LoggerFactory.getLogger(this.getClass());
    logger.info("hello");
  }
Пример #21
0
/**
 * Created by zhujiajun 16/6/27 20:20
 *
 * <p>日志工具类
 */
public final class LoggerTool {

  public static final Logger LOGGER = LoggerFactory.getLogger(LoggerTool.class);
  public static final Logger ERROR = LoggerFactory.getLogger("adult.error");

  private LoggerTool() {}

  public static String getTrace(Throwable t) {
    StringWriter stringWriter = new StringWriter();
    PrintWriter printWriter = new PrintWriter(stringWriter);
    t.printStackTrace(printWriter);
    return stringWriter.toString();
  }
}
Пример #22
0
 /**
  * Produces an instance of a slf4j logger for the given injection point.
  *
  * @param injectionPoint to use
  * @return a logger
  */
 @Produces
 public Logger getLogger(final InjectionPoint injectionPoint) {
   // The injection point is used to instantiate the correct logger for the
   // caller class.
   Bean<?> bean = injectionPoint.getBean();
   Logger l = null;
   if (bean != null) {
     Class<?> beanClass = bean.getBeanClass();
     l = LoggerFactory.getLogger(beanClass);
   } else {
     l = LoggerFactory.getLogger("Default logger");
   }
   return l;
 }
 public static void execute(Propagator toPropagate, IPropagationEngine engine)
     throws ContradictionException {
   if (LoggerFactory.getLogger(IPropagationEngine.class).isDebugEnabled()) {
     LoggerFactory.getLogger(IPropagationEngine.class).debug("[A] {}", toPropagate);
   }
   if (toPropagate.isStateLess()) {
     toPropagate.setActive();
     toPropagate.propagate(PropagatorEventType.FULL_PROPAGATION.getStrengthenedMask());
     engine.onPropagatorExecution(toPropagate);
   } else if (toPropagate.isActive()) { // deal with updated propagator
     toPropagate.propagate(PropagatorEventType.FULL_PROPAGATION.getStrengthenedMask());
     engine.onPropagatorExecution(toPropagate);
   }
 }
Пример #24
0
  /**
   * Adds this object to the SMTP logs appender observable, to intercept logs.
   *
   * <p>The goal is to be informed when the log appender will received some debug SMTP logs.<br>
   * When a log is written, the appender will notify this class which will display it in the text
   * area.
   */
  private void addObserverToSmtpLogAppender() {
    Logger smtpLogger = LoggerFactory.getLogger(org.subethamail.smtp.server.Session.class);
    String appenderName = Configuration.INSTANCE.get("logback.appender.name");

    @SuppressWarnings("unchecked")
    SMTPLogsAppender<ILoggingEvent> appender =
        (SMTPLogsAppender<ILoggingEvent>)
            ((AppenderAttachable<ILoggingEvent>) smtpLogger).getAppender(appenderName);
    if (appender == null) {
      LoggerFactory.getLogger(LogsPane.class).error("Can't find logger: {}", appenderName);
    } else {
      appender.getObservable().addObserver(this);
    }
  }
  @Override
  public void print(Object[] channel, String line) {
    // Parse the channels
    Class source = null; // The class the message is coming from
    Object backupSource = null; // Another identifier for the message
    Redwood.Flag flag = Redwood.Flag.STDOUT;
    for (Object c : channel) {
      if (c instanceof Class) {
        source = (Class) c; // This is a class the message is coming from
      } else if (c instanceof Redwood.Flag) {
        if (c != Redwood.Flag.FORCE) { // This is a Redwood flag
          flag = (Redwood.Flag) c;
        }
      } else {
        backupSource = c; // This is another "source" for the log message
      }
    }

    // Get the logger
    Logger impl = null;
    if (source != null) {
      impl = LoggerFactory.getLogger(source);
    } else if (backupSource != null) {
      impl = LoggerFactory.getLogger(backupSource.toString());
    } else {
      impl = LoggerFactory.getLogger("CoreNLP");
    }

    // Route the signal
    switch (flag) {
      case ERROR:
        impl.error(line);
        break;
      case WARN:
        impl.warn(line);
        break;
      case DEBUG:
        impl.debug(line);
        break;
      case STDOUT:
      case STDERR:
        impl.info(line);
        break;
      case FORCE:
        throw new IllegalStateException("Should not reach this switch case");
      default:
        throw new IllegalStateException("Unknown Redwood flag for slf4j integration: " + flag);
    }
  }
public abstract class CoreUtils {

  public static Logger LOGGER = LoggerFactory.getLogger("StaticLogger");
  protected Logger classlogger = LoggerFactory.getLogger(getClass());
  public static String separator = System.getProperty("file.separator");

  /**
   * This can load a gradle resource, such as a .properties file.
   *
   * @param fileName
   * @return
   */
  public static File loadGradleResource(String fileName) {
    File junitFile = new File(fileName);
    if (junitFile.exists()) {
      LOGGER.info("The file '" + junitFile.getAbsolutePath() + "' exists.");
    } else {
      LOGGER.info("Problem loading Gradle resource: " + junitFile.getAbsolutePath());
    }
    return junitFile;
  }

  public static void waitTimer(int units, int mills) {
    DecimalFormat df = new DecimalFormat("###.##");
    double totalSeconds = ((double) units * mills) / 1000;
    LOGGER.info(
        "Explicit pause for "
            + df.format(totalSeconds)
            + " seconds divided by "
            + units
            + " units of time: ");
    try {
      Thread.currentThread();
      int x = 0;
      while (x < units) {
        Thread.sleep(mills);
        LOGGER.info(".");
        x = x + 1;
      }
    } catch (InterruptedException ex) {
      ex.printStackTrace();
    }
  }

  protected CoreUtils() {
    // do nothing
  }
}
Пример #27
0
@Configuration
@EnableAsync
@EnableScheduling
public class AsyncConfiguration implements AsyncConfigurer {

  private final Logger log = LoggerFactory.getLogger(AsyncConfiguration.class);

  @Inject private JHipsterProperties jHipsterProperties;

  @Override
  @Bean(name = "taskExecutor")
  public Executor getAsyncExecutor() {
    log.debug("Creating Async Task Executor");
    ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
    executor.setCorePoolSize(jHipsterProperties.getAsync().getCorePoolSize());
    executor.setMaxPoolSize(jHipsterProperties.getAsync().getMaxPoolSize());
    executor.setQueueCapacity(jHipsterProperties.getAsync().getQueueCapacity());
    executor.setThreadNamePrefix("nailit-Executor-");
    return new ExceptionHandlingAsyncTaskExecutor(executor);
  }

  @Override
  public AsyncUncaughtExceptionHandler getAsyncUncaughtExceptionHandler() {
    return new SimpleAsyncUncaughtExceptionHandler();
  }
}
/** @author Łukasz Dumiszewski */
@Service("duplicateWorkDetectMapService")
public class DuplicateWorkDetectMapService
    implements DiMapService<Writable, BytesWritable, Text, BytesWritable> {

  @SuppressWarnings("unused")
  private static Logger log = LoggerFactory.getLogger(DuplicateWorkDetectMapService.class);

  @Autowired private WorkKeyGenerator keyGen;

  @Override
  public void map(
      Writable key,
      BytesWritable value,
      Mapper<Writable, BytesWritable, Text, BytesWritable>.Context context)
      throws IOException, InterruptedException {

    DocumentWrapper docWrapper = DocumentProtos.DocumentWrapper.parseFrom(value.copyBytes());

    String docKey = keyGen.generateKey(docWrapper.getDocumentMetadata(), 0);

    if (!docKey.isEmpty()) {
      DocumentWrapper thinDocWrapper = DocumentWrapperUtils.cloneDocumentMetadata(docWrapper);
      context.write(new Text(docKey), new BytesWritable(thinDocWrapper.toByteArray()));
    }
  }
  // ******************** PRIVATE ********************
}
Пример #29
0
/**
 * ComponentA is composed of ComponentB and some other fields. Tests the interactions between two
 * loaders, where the first component's loader loads component B by using getWithLoader, which in
 * turn invokes component B's loader.
 *
 * @author <a href="mailto:[email protected]">Greg Luck</a>
 * @version $Id$
 */
public class CompositeLoaderTest {

  private static final Logger LOG = LoggerFactory.getLogger(CompositeLoaderTest.class.getName());

  /**
   * Sets up the fixture, for example, open a network connection. This method is called before a
   * test is executed.
   */
  @Before
  public void setUp() throws Exception {
    CacheHelper.init();
  }

  /**
   * Tears down the fixture, for example, close a network connection. This method is called after a
   * test is executed.
   */
  @After
  public void tearDown() throws Exception {
    CacheHelper.shutdown();
  }

  /**
   * This test reproduces a deadlock found in 1.4-beta1 around loading interactions and
   * getWithLoader. Now fixed.
   */
  @Test
  public void testCompositeLoad() {
    LOG.info("Getting from cache");
    ComponentA compA =
        (ComponentA) CacheHelper.get("ehcache-loaderinteractions.xml", "ACache", "key1");
    LOG.info(compA.toString());
  }
}
/**
 * Created with IntelliJ IDEA. User: Alex Date: 24.03.13 Time: 21:18 To change this template use
 * File | Settings | File Templates.
 */
public class SheduleDataModel extends ListDataModel<SheduleModel>
    implements SelectableDataModel<SheduleModel> {

  private static final org.slf4j.Logger log = LoggerFactory.getLogger(SheduleDataModel.class);

  public SheduleDataModel() {}

  public SheduleDataModel(List<SheduleModel> list) {
    super(list);
  }

  @Override
  public Object getRowKey(SheduleModel sheduleModel) {
    //        log.info("onRowSelect " + sheduleModel.getUnique() + " "
    // +sheduleModel.getTrainNumber());
    return sheduleModel.getUnique();
  }

  @Override
  public SheduleModel getRowData(String s) {
    //        log.debug("getRowData " + s);
    List<SheduleModel> shedules = (List<SheduleModel>) getWrappedData();
    //        log.info("getRowData " + shedules);
    for (SheduleModel sheduleModel : shedules) {
      //            log.info("rows " + sheduleModel.getUnique() + " " + s + " " +
      // sheduleModel.getUnique().equals(s));
      if (sheduleModel.getUnique().equals(s)) return sheduleModel;
    }
    return null;
  }
}