/** Launch process and waits until it's down */
  public void launch(Monitored mp) {
    if (!lifecycle.tryToMoveTo(Lifecycle.State.STARTING)) {
      throw new IllegalStateException("Already started");
    }
    monitored = mp;

    try {
      LoggerFactory.getLogger(getClass()).info("Starting " + getKey());
      Runtime.getRuntime().addShutdownHook(shutdownHook);
      stopWatcher.start();

      monitored.start();
      boolean ready = false;
      while (!ready) {
        ready = monitored.isReady();
        Thread.sleep(20L);
      }

      // notify monitor that process is ready
      commands.setReady();

      if (lifecycle.tryToMoveTo(Lifecycle.State.STARTED)) {
        monitored.awaitStop();
      }
    } catch (Exception e) {
      LoggerFactory.getLogger(getClass()).warn("Fail to start " + getKey(), e);

    } finally {
      stop();
    }
  }
 private void importActiveRules(RulesDao rulesDao, RulesProfile profile) {
   for (Iterator<ActiveRule> iar = profile.getActiveRules(true).iterator(); iar.hasNext(); ) {
     ActiveRule activeRule = iar.next();
     Rule unMarshalledRule = activeRule.getRule();
     Rule matchingRuleInDb =
         rulesDao.getRuleByKey(unMarshalledRule.getRepositoryKey(), unMarshalledRule.getKey());
     if (matchingRuleInDb == null) {
       LoggerFactory.getLogger(getClass())
           .error(
               "Unable to find active rule "
                   + unMarshalledRule.getRepositoryKey()
                   + ":"
                   + unMarshalledRule.getKey());
       iar.remove();
       continue;
     }
     activeRule.setRule(matchingRuleInDb);
     activeRule.setRulesProfile(profile);
     activeRule.getActiveRuleParams();
     for (Iterator<ActiveRuleParam> irp = activeRule.getActiveRuleParams().iterator();
         irp.hasNext(); ) {
       ActiveRuleParam activeRuleParam = irp.next();
       RuleParam unMarshalledRP = activeRuleParam.getRuleParam();
       RuleParam matchingRPInDb = rulesDao.getRuleParam(matchingRuleInDb, unMarshalledRP.getKey());
       if (matchingRPInDb == null) {
         LoggerFactory.getLogger(getClass())
             .error("Unable to find active rule parameter " + unMarshalledRP.getKey());
         irp.remove();
         continue;
       }
       activeRuleParam.setActiveRule(activeRule);
       activeRuleParam.setRuleParam(matchingRPInDb);
     }
   }
 }
/** JMS Service. This service implements the sending a message to a jms queue. */
@Stateless
public class JMSHelperService {
  private static final Logger LOGGER = LoggerFactory.getLogger(JMSHelperService.class);
  private static final Logger LOGGER_CONFIDENTIAL =
      LoggerFactory.getLogger(USEFLogCategory.CONFIDENTIAL);

  @Resource(mappedName = USEFConstants.OUT_QUEUE_NAME)
  private Queue outQueue;

  @Resource(mappedName = USEFConstants.IN_QUEUE_NAME)
  private Queue inQueue;

  @Resource(mappedName = USEFConstants.NOT_SENT_QUEUE_NAME)
  private Queue notSentQueue;

  @Inject private JMSContext context;

  private void sendMessage(Queue queue, String message) {
    try {
      context.createProducer().send(queue, message);
    } catch (Exception e) {
      LOGGER.error("Error sending the message: ", e);
      LOGGER_CONFIDENTIAL.debug("Error sending the message: '{}' to the queue", message, e);
      throw new TechnicalException(e);
    }
  }

  /**
   * Sets a message to the out queue.
   *
   * @param message message
   */
  public void sendMessageToOutQueue(String message) {
    LOGGER.debug("Started sending msg to the out queue {}");
    sendMessage(outQueue, message);
    LOGGER.debug("Msg is successfully sent to the out queue");
  }

  /**
   * Sets a message to the in queue.
   *
   * @param message message
   */
  public void sendMessageToInQueue(String message) {
    LOGGER.debug("Started sending msg to the in queue");
    sendMessage(inQueue, message);
    LOGGER.debug("Msg is successfully sent to the in queue");
  }

  /**
   * Sets a message to the not sent queue.
   *
   * @param message message
   */
  public void sendMessageToNotSentQueue(String message) {
    LOGGER.debug("Started sending msg to the not sent queue");
    sendMessage(notSentQueue, message);
    LOGGER.debug("Msg is successfully sent to the not sent queue");
  }
}
  public static void main(String[] args) throws Exception {
    LOG = LoggerFactory.getLogger(ExampleCollapserMain.class);
    CLOG = LoggerFactory.getLogger("console");
    initializeHystrixSettings();

    runLoopTest();
  }
 public ReadWriteLoggingFilter(String logname, int capacity) {
   logQueue = new LinkedBlockingQueue<Log>(capacity);
   this.RECEIVE_LOGGER = LoggerFactory.getLogger(logname + ".Recv");
   this.SENT_LOGGER = LoggerFactory.getLogger(logname + ".Sent");
   this.EXCEPTION_LOGGER = LoggerFactory.getLogger(logname + ".Exception");
   this.EVENT_LOGGER = LoggerFactory.getLogger(logname + ".Event");
 }
 @Parameters({"target"})
 @BeforeClass
 public void setUp(String target) throws Exception {
   log.info("METADATA VALIDATOR TARGET: " + target);
   sf = new TestServiceFactory().proxy();
   config = new ImportConfig();
   // Let the user know at what level we're logging
   ch.qos.logback.classic.Logger lociLogger =
       (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("loci");
   ch.qos.logback.classic.Logger omeLogger =
       (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("ome.formats");
   log.info(
       String.format(
           "Log levels -- Bio-Formats: %s OMERO.importer: %s",
           lociLogger.getLevel(), omeLogger.getLevel()));
   store = new OMEROMetadataStoreClient();
   store.initialize(sf);
   store.setEnumerationProvider(new TestEnumerationProvider());
   store.setInstanceProvider(new BlitzInstanceProvider(store.getEnumerationProvider()));
   minimalStore = new OMEROMetadataStoreClient();
   minimalStore.initialize(sf);
   minimalStore.setEnumerationProvider(new TestEnumerationProvider());
   minimalStore.setInstanceProvider(
       new BlitzInstanceProvider(minimalStore.getEnumerationProvider()));
   wrapper = new OMEROWrapper(config);
   wrapper.setMetadataOptions(new DefaultMetadataOptions(MetadataLevel.ALL));
   minimalWrapper = new OMEROWrapper(config);
   minimalWrapper.setMetadataOptions(new DefaultMetadataOptions(MetadataLevel.MINIMUM));
   wrapper.setMetadataStore(store);
   store.setReader(wrapper.getImageReader());
   minimalStore.setReader(minimalWrapper.getImageReader());
 }
  @Override
  public void createDestination() throws Exception {
    FileInputStream schemaIn = new FileInputStream(avsc);
    Schema original = new Schema.Parser().parse(schemaIn);
    schemaIn.close();

    Schema evolved = getEvolvedSchema(original);

    FileOutputStream schemaOut = new FileOutputStream(evolvedAvsc);
    schemaOut.write(evolved.toString(true).getBytes());
    schemaOut.close();

    List<String> createArgs =
        Lists.newArrayList("create", dest, "-s", evolvedAvsc, "-r", repoUri, "-d", "target/data");
    createArgs.addAll(getExtraCreateArgs());

    TestUtil.run(
        LoggerFactory.getLogger(this.getClass()),
        "delete",
        dest,
        "-r",
        repoUri,
        "-d",
        "target/data");
    TestUtil.run(
        LoggerFactory.getLogger(this.getClass()),
        createArgs.toArray(new String[createArgs.size()]));
    this.console = mock(Logger.class);
    this.command = new CopyCommand(console);
    command.setConf(new Configuration());
  }
  @Override
  public void run() {
    List<CompactionInfo> runningCompactions = Compactor.getRunningCompactions();

    Set<List<Long>> newKeys = new HashSet<>();

    long time = System.currentTimeMillis();

    for (CompactionInfo ci : runningCompactions) {
      List<Long> compactionKey =
          Arrays.asList(ci.getID(), ci.getEntriesRead(), ci.getEntriesWritten());
      newKeys.add(compactionKey);

      if (!observedCompactions.containsKey(compactionKey)) {
        observedCompactions.put(compactionKey, new ObservedCompactionInfo(ci, time));
      }
    }

    // look for compactions that finished or made progress and logged a warning
    HashMap<List<Long>, ObservedCompactionInfo> copy = new HashMap<>(observedCompactions);
    copy.keySet().removeAll(newKeys);

    for (ObservedCompactionInfo oci : copy.values()) {
      if (oci.loggedWarning) {
        LoggerFactory.getLogger(CompactionWatcher.class)
            .info("Compaction of " + oci.compactionInfo.getExtent() + " is no longer stuck");
      }
    }

    // remove any compaction that completed or made progress
    observedCompactions.keySet().retainAll(newKeys);

    long warnTime = config.getTimeInMillis(Property.TSERV_COMPACTION_WARN_TIME);

    // check for stuck compactions
    for (ObservedCompactionInfo oci : observedCompactions.values()) {
      if (time - oci.firstSeen > warnTime && !oci.loggedWarning) {
        Thread compactionThread = oci.compactionInfo.getThread();
        if (compactionThread != null) {
          StackTraceElement[] trace = compactionThread.getStackTrace();
          Exception e =
              new Exception(
                  "Possible stack trace of compaction stuck on " + oci.compactionInfo.getExtent());
          e.setStackTrace(trace);
          LoggerFactory.getLogger(CompactionWatcher.class)
              .warn(
                  "Compaction of "
                      + oci.compactionInfo.getExtent()
                      + " to "
                      + oci.compactionInfo.getOutputFile()
                      + " has not made progress for at least "
                      + (time - oci.firstSeen)
                      + "ms",
                  e);
          oci.loggedWarning = true;
        }
      }
    }
  }
Exemple #9
0
 @Override
 public Logger supply(Dependency<? super Logger> dependency, Injector injector) {
   if (dependency.isUntargeted()) {
     return LoggerFactory.getLogger("General");
   } else {
     return LoggerFactory.getLogger(dependency.target().getType().getRawType());
   }
 }
Exemple #10
0
  public LogOperator(final String category, int options) {

    this.log =
        category != null && !category.isEmpty()
            ? LoggerFactory.getLogger(category)
            : LoggerFactory.getLogger(LogOperator.class);
    this.options = options;
  }
/**
 * The Netty handler responsible for decrypting/encrypting RLPx frames with the FrameCodec crated
 * during HandshakeHandler initial work
 *
 * <p>Created by Anton Nashatyrev on 15.10.2015.
 */
public class FrameCodecHandler extends NettyByteToMessageCodec<FrameCodec.Frame> {
  private static final Logger loggerWire = LoggerFactory.getLogger("wire");
  private static final Logger loggerNet = LoggerFactory.getLogger("net");

  public FrameCodec frameCodec;
  public Channel channel;

  public FrameCodecHandler(FrameCodec frameCodec, Channel channel) {
    this.frameCodec = frameCodec;
    this.channel = channel;
  }

  protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out)
      throws IOException {
    if (in.readableBytes() == 0) {
      loggerWire.trace("in.readableBytes() == 0");
      return;
    }

    loggerWire.trace("Decoding frame (" + in.readableBytes() + " bytes)");
    List<FrameCodec.Frame> frames = frameCodec.readFrames(in);

    // Check if a full frame was available.  If not, we'll try later when more bytes come in.
    if (frames == null || frames.isEmpty()) return;

    for (int i = 0; i < frames.size(); i++) {
      FrameCodec.Frame frame = frames.get(i);

      channel.getNodeStatistics().rlpxInMessages.add();
    }

    out.addAll(frames);
  }

  @Override
  protected void encode(ChannelHandlerContext ctx, FrameCodec.Frame frame, ByteBuf out)
      throws Exception {

    frameCodec.writeFrame(frame, out);

    channel.getNodeStatistics().rlpxOutMessages.add();
  }

  @Override
  public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
    if (channel.isDiscoveryMode()) {
      loggerNet.trace("FrameCodec failed: ", cause);
    } else {
      if (cause instanceof IOException) {
        loggerNet.debug("FrameCodec failed: " + ctx.channel().remoteAddress(), cause);
      } else {
        loggerNet.warn("FrameCodec failed: ", cause);
      }
    }
    ctx.close();
  }
}
 public void nullSafeSet(PreparedStatement st, Object value, int index, SessionImplementor session)
     throws HibernateException, SQLException {
   if (value == null || defaultValue.equals(value)) {
     LoggerFactory.getLogger(getClass()).trace("binding null to parameter: " + index);
     st.setNull(index, Types.INTEGER);
   } else {
     LoggerFactory.getLogger(getClass()).trace("binding " + value + " to parameter: " + index);
     st.setInt(index, ((Integer) value).intValue());
   }
 }
  @Override
  public void execute(JobExecutionContext context) {
    LoggerFactory.getLogger(Loggers.SCHEDULER)
        .debug("Scheduler " + PostingConstraintJob.class.getName() + " is started");

    SaleConstraintFacade.getInstance().performPosting();

    LoggerFactory.getLogger(Loggers.SCHEDULER)
        .debug("Scheduler " + PostingConstraintJob.class.getName() + " is finished");
  }
 @Override
 public void execute(JobExecutionContext context) throws JobExecutionException {
   Context initContext;
   try {
     initContext = new InitialContext();
     MetricsUpdater bean = (MetricsUpdater) initContext.lookup(EJB_JNDI_NAME);
     bean.updateMetrics();
     LoggerFactory.getLogger(JobExample.class).info("Job executed");
   } catch (NamingException e) {
     LoggerFactory.getLogger(JobExample.class).error("", e);
   }
 }
 /**
  * Produces an instance of a slf4j logger for the given injection point.
  *
  * @param injectionPoint to use
  * @return a logger
  */
 @Produces
 public Logger getLogger(final InjectionPoint injectionPoint) {
   // The injection point is used to instantiate the correct logger for the
   // caller class.
   Bean<?> bean = injectionPoint.getBean();
   Logger l = null;
   if (bean != null) {
     Class<?> beanClass = bean.getBeanClass();
     l = LoggerFactory.getLogger(beanClass);
   } else {
     l = LoggerFactory.getLogger("Default logger");
   }
   return l;
 }
Exemple #16
0
  /**
   * Adds this object to the SMTP logs appender observable, to intercept logs.
   *
   * <p>The goal is to be informed when the log appender will received some debug SMTP logs.<br>
   * When a log is written, the appender will notify this class which will display it in the text
   * area.
   */
  private void addObserverToSmtpLogAppender() {
    Logger smtpLogger = LoggerFactory.getLogger(org.subethamail.smtp.server.Session.class);
    String appenderName = Configuration.INSTANCE.get("logback.appender.name");

    @SuppressWarnings("unchecked")
    SMTPLogsAppender<ILoggingEvent> appender =
        (SMTPLogsAppender<ILoggingEvent>)
            ((AppenderAttachable<ILoggingEvent>) smtpLogger).getAppender(appenderName);
    if (appender == null) {
      LoggerFactory.getLogger(LogsPane.class).error("Can't find logger: {}", appenderName);
    } else {
      appender.getObservable().addObserver(this);
    }
  }
 public static void execute(Propagator toPropagate, IPropagationEngine engine)
     throws ContradictionException {
   if (LoggerFactory.getLogger(IPropagationEngine.class).isDebugEnabled()) {
     LoggerFactory.getLogger(IPropagationEngine.class).debug("[A] {}", toPropagate);
   }
   if (toPropagate.isStateLess()) {
     toPropagate.setActive();
     toPropagate.propagate(PropagatorEventType.FULL_PROPAGATION.getStrengthenedMask());
     engine.onPropagatorExecution(toPropagate);
   } else if (toPropagate.isActive()) { // deal with updated propagator
     toPropagate.propagate(PropagatorEventType.FULL_PROPAGATION.getStrengthenedMask());
     engine.onPropagatorExecution(toPropagate);
   }
 }
/**
 * Created by zhujiajun 16/6/27 20:20
 *
 * <p>日志工具类
 */
public final class LoggerTool {

  public static final Logger LOGGER = LoggerFactory.getLogger(LoggerTool.class);
  public static final Logger ERROR = LoggerFactory.getLogger("adult.error");

  private LoggerTool() {}

  public static String getTrace(Throwable t) {
    StringWriter stringWriter = new StringWriter();
    PrintWriter printWriter = new PrintWriter(stringWriter);
    t.printStackTrace(printWriter);
    return stringWriter.toString();
  }
}
  @Override
  public void print(Object[] channel, String line) {
    // Parse the channels
    Class source = null; // The class the message is coming from
    Object backupSource = null; // Another identifier for the message
    Redwood.Flag flag = Redwood.Flag.STDOUT;
    for (Object c : channel) {
      if (c instanceof Class) {
        source = (Class) c; // This is a class the message is coming from
      } else if (c instanceof Redwood.Flag) {
        if (c != Redwood.Flag.FORCE) { // This is a Redwood flag
          flag = (Redwood.Flag) c;
        }
      } else {
        backupSource = c; // This is another "source" for the log message
      }
    }

    // Get the logger
    Logger impl = null;
    if (source != null) {
      impl = LoggerFactory.getLogger(source);
    } else if (backupSource != null) {
      impl = LoggerFactory.getLogger(backupSource.toString());
    } else {
      impl = LoggerFactory.getLogger("CoreNLP");
    }

    // Route the signal
    switch (flag) {
      case ERROR:
        impl.error(line);
        break;
      case WARN:
        impl.warn(line);
        break;
      case DEBUG:
        impl.debug(line);
        break;
      case STDOUT:
      case STDERR:
        impl.info(line);
        break;
      case FORCE:
        throw new IllegalStateException("Should not reach this switch case");
      default:
        throw new IllegalStateException("Unknown Redwood flag for slf4j integration: " + flag);
    }
  }
public abstract class CoreUtils {

  public static Logger LOGGER = LoggerFactory.getLogger("StaticLogger");
  protected Logger classlogger = LoggerFactory.getLogger(getClass());
  public static String separator = System.getProperty("file.separator");

  /**
   * This can load a gradle resource, such as a .properties file.
   *
   * @param fileName
   * @return
   */
  public static File loadGradleResource(String fileName) {
    File junitFile = new File(fileName);
    if (junitFile.exists()) {
      LOGGER.info("The file '" + junitFile.getAbsolutePath() + "' exists.");
    } else {
      LOGGER.info("Problem loading Gradle resource: " + junitFile.getAbsolutePath());
    }
    return junitFile;
  }

  public static void waitTimer(int units, int mills) {
    DecimalFormat df = new DecimalFormat("###.##");
    double totalSeconds = ((double) units * mills) / 1000;
    LOGGER.info(
        "Explicit pause for "
            + df.format(totalSeconds)
            + " seconds divided by "
            + units
            + " units of time: ");
    try {
      Thread.currentThread();
      int x = 0;
      while (x < units) {
        Thread.sleep(mills);
        LOGGER.info(".");
        x = x + 1;
      }
    } catch (InterruptedException ex) {
      ex.printStackTrace();
    }
  }

  protected CoreUtils() {
    // do nothing
  }
}
Exemple #21
0
@Configuration
@EnableAsync
@EnableScheduling
public class AsyncConfiguration implements AsyncConfigurer {

  private final Logger log = LoggerFactory.getLogger(AsyncConfiguration.class);

  @Inject private JHipsterProperties jHipsterProperties;

  @Override
  @Bean(name = "taskExecutor")
  public Executor getAsyncExecutor() {
    log.debug("Creating Async Task Executor");
    ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
    executor.setCorePoolSize(jHipsterProperties.getAsync().getCorePoolSize());
    executor.setMaxPoolSize(jHipsterProperties.getAsync().getMaxPoolSize());
    executor.setQueueCapacity(jHipsterProperties.getAsync().getQueueCapacity());
    executor.setThreadNamePrefix("nailit-Executor-");
    return new ExceptionHandlingAsyncTaskExecutor(executor);
  }

  @Override
  public AsyncUncaughtExceptionHandler getAsyncUncaughtExceptionHandler() {
    return new SimpleAsyncUncaughtExceptionHandler();
  }
}
Exemple #22
0
/**
 * ComponentA is composed of ComponentB and some other fields. Tests the interactions between two
 * loaders, where the first component's loader loads component B by using getWithLoader, which in
 * turn invokes component B's loader.
 *
 * @author <a href="mailto:[email protected]">Greg Luck</a>
 * @version $Id$
 */
public class CompositeLoaderTest {

  private static final Logger LOG = LoggerFactory.getLogger(CompositeLoaderTest.class.getName());

  /**
   * Sets up the fixture, for example, open a network connection. This method is called before a
   * test is executed.
   */
  @Before
  public void setUp() throws Exception {
    CacheHelper.init();
  }

  /**
   * Tears down the fixture, for example, close a network connection. This method is called after a
   * test is executed.
   */
  @After
  public void tearDown() throws Exception {
    CacheHelper.shutdown();
  }

  /**
   * This test reproduces a deadlock found in 1.4-beta1 around loading interactions and
   * getWithLoader. Now fixed.
   */
  @Test
  public void testCompositeLoad() {
    LOG.info("Getting from cache");
    ComponentA compA =
        (ComponentA) CacheHelper.get("ehcache-loaderinteractions.xml", "ACache", "key1");
    LOG.info(compA.toString());
  }
}
/** @author Łukasz Dumiszewski */
@Service("duplicateWorkDetectMapService")
public class DuplicateWorkDetectMapService
    implements DiMapService<Writable, BytesWritable, Text, BytesWritable> {

  @SuppressWarnings("unused")
  private static Logger log = LoggerFactory.getLogger(DuplicateWorkDetectMapService.class);

  @Autowired private WorkKeyGenerator keyGen;

  @Override
  public void map(
      Writable key,
      BytesWritable value,
      Mapper<Writable, BytesWritable, Text, BytesWritable>.Context context)
      throws IOException, InterruptedException {

    DocumentWrapper docWrapper = DocumentProtos.DocumentWrapper.parseFrom(value.copyBytes());

    String docKey = keyGen.generateKey(docWrapper.getDocumentMetadata(), 0);

    if (!docKey.isEmpty()) {
      DocumentWrapper thinDocWrapper = DocumentWrapperUtils.cloneDocumentMetadata(docWrapper);
      context.write(new Text(docKey), new BytesWritable(thinDocWrapper.toByteArray()));
    }
  }
  // ******************** PRIVATE ********************
}
/**
 * Created with IntelliJ IDEA. User: Alex Date: 24.03.13 Time: 21:18 To change this template use
 * File | Settings | File Templates.
 */
public class SheduleDataModel extends ListDataModel<SheduleModel>
    implements SelectableDataModel<SheduleModel> {

  private static final org.slf4j.Logger log = LoggerFactory.getLogger(SheduleDataModel.class);

  public SheduleDataModel() {}

  public SheduleDataModel(List<SheduleModel> list) {
    super(list);
  }

  @Override
  public Object getRowKey(SheduleModel sheduleModel) {
    //        log.info("onRowSelect " + sheduleModel.getUnique() + " "
    // +sheduleModel.getTrainNumber());
    return sheduleModel.getUnique();
  }

  @Override
  public SheduleModel getRowData(String s) {
    //        log.debug("getRowData " + s);
    List<SheduleModel> shedules = (List<SheduleModel>) getWrappedData();
    //        log.info("getRowData " + shedules);
    for (SheduleModel sheduleModel : shedules) {
      //            log.info("rows " + sheduleModel.getUnique() + " " + s + " " +
      // sheduleModel.getUnique().equals(s));
      if (sheduleModel.getUnique().equals(s)) return sheduleModel;
    }
    return null;
  }
}
/** @author bonisv */
@Component
public class MetricsKeywordRepositoryImpl implements MetricsKeywordRepository {

  private static final Logger log = LoggerFactory.getLogger(MetricsKeywordRepositoryImpl.class);

  @Autowired private MetricsKeywordCrudRepository metricsKeywordCrudRepository;

  @Override
  public void delete(MetricsKeyword entity) {
    metricsKeywordCrudRepository.delete(entity);
  }

  @Override
  public void delete(Iterable<MetricsKeyword> entities) {
    metricsKeywordCrudRepository.delete(entities);
  }

  @Override
  public Iterable<MetricsKeyword> save(Iterable<MetricsKeyword> entities) {
    return metricsKeywordCrudRepository.save(entities);
  }

  @Override
  public MetricsKeyword save(MetricsKeyword entity) {
    return metricsKeywordCrudRepository.save(entity);
  }

  @Override
  public List<MetricsKeyword> findByMetrics(Metrics metrics) {
    return metricsKeywordCrudRepository.findByMetrics(metrics);
  }
}
public class CatLogDaoImpl extends CommonDaoSupporter implements CatLogDao {

  private static final long serialVersionUID = -8140396918843969392L;

  private static final transient Logger LOGGER = LoggerFactory.getLogger(CatLogDaoImpl.class);

  /** 貓新增log */
  private static final String CAT_INSERT_LOG_PO_NAME = CatInsertLogImpl.class.getName();

  public CatLogDaoImpl() {}

  /**
   * 刪除貓新增log
   *
   * @param accountId
   * @return
   */
  public int deleteCatInsertLog(String catId) {
    Map<String, Object> params = new LinkedHashMap<String, Object>();
    //
    StringBuilder hql = new StringBuilder();
    hql.append("delete from ");
    hql.append(CAT_INSERT_LOG_PO_NAME + " ");
    hql.append("where 1=1 ");

    // catId
    hql.append("and catId = :catId ");
    params.put("catId", catId);
    //
    return executeByHql(hql, params);
  }
}
Exemple #27
0
/**
 * {@link HelloWorld}
 *
 * @author Jay
 */
@Path("/")
@Singleton
@Produces({APPLICATION_JSON, APPLICATION_XML})
@Consumes({APPLICATION_XML, APPLICATION_JSON})
@Api(value = "/", description = "Some Sample Resource")
public class HelloWorld {
  private static Logger LOG = LoggerFactory.getLogger(HelloWorld.class);

  @GET
  @ApiOperation(value = "Hello Resource.", response = UIMUserBasicInfo.class)
  @ApiResponses({@ApiResponse(code = 200, message = "If Some Test Hello. ")})
  public UIMUserBasicInfo sayHello() {
    UIMUserBasicInfo info = new UIMUserBasicInfo();
    info.setProfileId("1234");
    info.setFirstName("Jayaram");
    return info;
  }

  @POST
  @ApiOperation(value = "User Registration.")
  @ApiResponses({
    @ApiResponse(code = 201, message = "When User registeration is completed. "),
    @ApiResponse(code = 400, message = "If So. "),
  })
  public void register(@NotNull(message = "{some.test}") UIMUserBasicInfo info) {
    return;
  }
}
// Kill Bill generic cache dispatcher
public class CacheControllerDispatcher {

  private static final Logger logger = LoggerFactory.getLogger(CacheControllerDispatcher.class);

  public static final String CACHE_KEY_SEPARATOR = "::";

  private final Map<CacheType, CacheController<Object, Object>> caches;

  @Inject
  public CacheControllerDispatcher(final Map<CacheType, CacheController<Object, Object>> caches) {
    this.caches = caches;
  }

  // Test only
  public CacheControllerDispatcher() {
    caches = new HashMap<CacheType, CacheController<Object, Object>>();
  }

  public CacheController<Object, Object> getCacheController(final CacheType cacheType) {
    return caches.get(cacheType);
  }

  public void clearAll() {
    for (final CacheController<Object, Object> cacheController : caches.values()) {
      cacheController.removeAll();
    }
  }
}
/** Service for access PointOfSale related info. */
@Service
public class PointOfSaleService extends BaseEntityService<PointOfSale>
    implements IPointOfSaleService {
  private final Logger logger = LoggerFactory.getLogger(PointOfSaleService.class);

  @Autowired private ICategoryDao categoryDao;

  /**
   * Return list of Category by POS id.
   *
   * @param posId
   * @return a list of Category
   * @throws BusinessException
   */
  public List<Category> getCategoryByPosId(Long posId) throws BusinessException {
    List<Category> categoryList = new ArrayList<Category>();
    Set<Category> categorySet = null;
    try {
      categorySet = categoryDao.getCategorySetByPOS(posId);
    } catch (DAOException e) {
      logger.error(e.getMessage(), e);
      throw new BusinessException(e.getMessage(), e);
    }
    if (categorySet != null) {
      categoryList.addAll(categorySet);
    } else {
      throw new BusinessException(Constant.ERROR_CAUSE_CATEGORY_NOT_EXIST);
    }
    return categoryList;
  }
}
public final class OCLintProfile extends ProfileDefinition {
  private static final Logger LOGGER = LoggerFactory.getLogger(OCLintProfile.class);

  private final OCLintProfileImporter importer;

  public OCLintProfile(final OCLintProfileImporter importer) {
    this.importer = importer;
  }

  @Override
  public RulesProfile createProfile(final ValidationMessages messages) {
    LOGGER.info("Creating OCLint Profile");
    Reader profileXmlReader = null;

    try {
      profileXmlReader =
          new InputStreamReader(
              OCLintProfile.class.getResourceAsStream(
                  "/org/sonar/plugins/objectivec/profile-oclint.xml"));

      RulesProfile profile = importer.importProfile(profileXmlReader, messages);
      profile.setLanguage(ObjectiveC.KEY);
      profile.setName(OCLintRulesDefinition.REPOSITORY_NAME);
      profile.setDefaultProfile(true);

      return profile;
    } finally {
      Closeables.closeQuietly(profileXmlReader);
    }
  }
}