示例#1
0
  @Override
  public MorphiaIterator<T, T> fetch(final FindOptions options) {
    final DBCursor cursor = prepareCursor(options);
    if (LOG.isTraceEnabled()) {
      LOG.trace("Getting cursor(" + dbColl.getName() + ")  for query:" + cursor.getQuery());
    }

    return new MorphiaIterator<T, T>(ds, cursor, ds.getMapper(), clazz, dbColl.getName(), cache);
  }
示例#2
0
 @Test
 public void testDropDatabaseFromFongoWithMultipleCollectionsDropsBothCollections()
     throws Exception {
   Fongo fongo = newFongo();
   DB db = fongo.getDB("db");
   DBCollection collection1 = db.getCollection("coll1");
   DBCollection collection2 = db.getCollection("coll2");
   db.dropDatabase();
   assertFalse("Collection 1 shouldn't exist in DB", db.collectionExists(collection1.getName()));
   assertFalse("Collection 2 shouldn't exist in DB", db.collectionExists(collection2.getName()));
   assertFalse("DB shouldn't exist in fongo", fongo.getDatabaseNames().contains("db"));
 }
示例#3
0
  /**
   * Initialises the MongoDB connection using the Mongo object provided to the endpoint
   *
   * @throws CamelMongoDbException
   */
  public void initializeConnection() throws CamelMongoDbException {
    LOG.info("Initialising MongoDb endpoint: {}", this.toString());
    if (database == null || collection == null) {
      throw new CamelMongoDbException(
          "Missing required endpoint configuration: database and/or collection");
    }
    db = mongoConnection.getDB(database);
    if (db == null) {
      throw new CamelMongoDbException(
          "Could not initialise MongoDbComponent. Database " + database + " does not exist.");
    }
    if (!createCollection && !db.collectionExists(collection)) {
      throw new CamelMongoDbException(
          "Could not initialise MongoDbComponent. Collection "
              + collection
              + " and createCollection is false.");
    }
    dbCollection = db.getCollection(collection);

    LOG.info(
        "MongoDb component initialised and endpoint bound to MongoDB collection with the following paramters. Address list: {}, Db: {}, Collection: {}",
        new Object[] {
          mongoConnection.getAllAddress().toString(), db.getName(), dbCollection.getName()
        });
  }
示例#4
0
  @Override
  public List<T> asList(final FindOptions options) {
    final List<T> results = new ArrayList<T>();
    final MorphiaIterator<T, T> iter = fetch(options);
    try {
      for (final T ent : iter) {
        results.add(ent);
      }
    } finally {
      iter.close();
    }

    if (LOG.isTraceEnabled()) {
      LOG.trace(
          format(
              "asList: %s \t %d entities, iterator time: driver %d ms, mapper %d ms %n\t cache: %s %n\t for %s",
              dbColl.getName(),
              results.size(),
              iter.getDriverTime(),
              iter.getMapperTime(),
              cache.stats(),
              getQueryObject()));
    }

    return results;
  }
  public <T> T findAndDelete(Query<T> query) {
    DBCollection dbColl = ((QueryImpl<T>) query).getCollection();
    // TODO remove this after testing.
    if (dbColl == null) dbColl = getCollection(((QueryImpl<T>) query).getEntityClass());

    QueryImpl<T> qi = ((QueryImpl<T>) query);
    EntityCache cache = createCache();

    if (log.isTraceEnabled())
      log.trace("Executing findAndModify(" + dbColl.getName() + ") with delete ...");

    DBObject result =
        dbColl.findAndModify(
            qi.getQueryObject(),
            qi.getFieldsObject(),
            qi.getSortObject(),
            true,
            null,
            false,
            false);

    if (result != null) {
      T entity = (T) mapr.fromDBObject(qi.getEntityClass(), result, cache);
      return entity;
    }

    return null;
  }
示例#6
0
  protected void createIndex(DBCollection dbCollection, MongoIndex index) {
    BasicDBObject fields = new BasicDBObject();
    for (String f : index.fields()) {
      fields.put(f, 1);
    }

    boolean unique = index.unique();
    boolean sparse = index.sparse();

    BasicDBObject options = new BasicDBObject();
    if (unique) {
      options.put("unique", unique);
    }
    if (sparse) {
      options.put("sparse", sparse);
    }

    dbCollection.ensureIndex(fields, options);

    logger.debug(
        "Created index "
            + fields
            + "(options: "
            + options
            + ") on "
            + dbCollection.getName()
            + " in "
            + this.database.getName());
  }
示例#7
0
 @Override
 @Deprecated
 public long countAll() {
   final DBObject query = getQueryObject();
   if (LOG.isTraceEnabled()) {
     LOG.trace("Executing count(" + dbColl.getName() + ") for query: " + query);
   }
   return dbColl.getCount(query);
 }
示例#8
0
  @Override
  public MorphiaKeyIterator<T> fetchKeys(final FindOptions options) {
    QueryImpl<T> cloned = cloneQuery();
    cloned.getOptions().projection(new BasicDBObject(Mapper.ID_KEY, 1));
    cloned.includeFields = true;

    return new MorphiaKeyIterator<T>(
        ds, cloned.prepareCursor(options), ds.getMapper(), clazz, dbColl.getName());
  }
  protected <T> void ensureIndex(
      Class<T> clazz,
      String name,
      BasicDBObject fields,
      boolean unique,
      boolean dropDupsOnCreate,
      boolean background,
      boolean sparse) {
    // validate field names and translate them to the stored values
    BasicDBObject keys = new BasicDBObject();
    for (Entry<String, Object> entry : fields.entrySet()) {
      StringBuffer sb = new StringBuffer(entry.getKey());
      Mapper.validate(clazz, mapr, sb, FilterOperator.IN, "", true, false);
      keys.put(sb.toString(), entry.getValue());
    }

    BasicDBObjectBuilder keyOpts = new BasicDBObjectBuilder();
    if (name != null && name.length() > 0) {
      keyOpts.add("name", name);
    }
    if (unique) {
      keyOpts.add("unique", true);
      if (dropDupsOnCreate) keyOpts.add("dropDups", true);
    }

    if (background) keyOpts.add("background", true);
    if (sparse) keyOpts.add("sparse", true);

    DBCollection dbColl = getCollection(clazz);

    BasicDBObject opts = (BasicDBObject) keyOpts.get();
    if (opts.isEmpty()) {
      log.debug("Ensuring index for " + dbColl.getName() + " with keys:" + keys);
      dbColl.ensureIndex(keys);
    } else {
      log.debug(
          "Ensuring index for " + dbColl.getName() + " with keys:" + keys + " and opts:" + opts);
      dbColl.ensureIndex(keys, opts);
    }

    // TODO: remove this once using 2.4 driver does this in ensureIndex
    CommandResult cr = dbColl.getDB().getLastError();
    cr.throwOnError();
  }
示例#10
0
 @Test
 public void testDropCollectionAlsoDropsFromDB() throws Exception {
   DBCollection collection = newCollection();
   collection.insert(new BasicDBObject());
   collection.drop();
   assertEquals("Collection should have no data", 0, collection.count());
   assertFalse(
       "Collection shouldn't exist in DB",
       collection.getDB().getCollectionNames().contains(collection.getName()));
 }
示例#11
0
  /** call postSaveOperations and returns Key for entity */
  protected <T> Key<T> postSaveGetKey(
      T entity, DBObject dbObj, DBCollection dbColl, Map<Object, DBObject> involvedObjects) {
    if (dbObj.get(Mapper.ID_KEY) == null) throw new MappingException("Missing _id after save!");

    postSaveOperations(entity, dbObj, involvedObjects);
    Key<T> key = new Key<T>(dbColl.getName(), getId(entity));
    key.setKindClass((Class<? extends T>) entity.getClass());

    return key;
  }
示例#12
0
 // Don't drop database, but just clear all data in managed collections (useful for export/import
 // or during development)
 protected void clearManagedCollections(Class<?>[] managedEntityTypes) {
   for (Class<?> clazz : managedEntityTypes) {
     DBCollection dbCollection = getDBCollectionForType(clazz);
     if (dbCollection != null) {
       dbCollection.remove(new BasicDBObject());
       logger.debug(
           "Collection " + dbCollection.getName() + " cleared from " + this.database.getName());
     }
   }
 }
示例#13
0
 @Test
 public void testDropDatabaseFromFongoDropsAllData() throws Exception {
   Fongo fongo = newFongo();
   DBCollection collection = fongo.getDB("db").getCollection("coll");
   collection.insert(new BasicDBObject());
   fongo.dropDatabase("db");
   assertEquals("Collection should have no data", 0, collection.count());
   assertFalse(
       "Collection shouldn't exist in DB",
       collection.getDB().getCollectionNames().contains(collection.getName()));
   assertFalse("DB shouldn't exist in fongo", fongo.getDatabaseNames().contains("db"));
 }
示例#14
0
  @Test
  public void getCollection() {
    DBCollection collection = template.getCollection(false);
    assertEquals("simple", collection.getName());
    assertEquals(ReadPreference.PRIMARY, collection.getReadPreference());

    collection = template.getCollection(true);
    assertEquals(ReadPreference.SECONDARY, collection.getReadPreference());

    collection = template.getCollection();
    assertEquals(mds.getDb().getReadPreference(), collection.getReadPreference());
  }
示例#15
0
  @Override
  public MorphiaIterator<U, U> aggregate(
      final String collectionName,
      final Class<U> target,
      final AggregationOptions options,
      final ReadPreference readPreference) {
    LOG.debug("stages = " + stages);

    Cursor cursor = collection.aggregate(stages, options, readPreference);
    return new MorphiaIterator<U, U>(
        cursor, mapper, target, collection.getName(), mapper.createEntityCache());
  }
示例#16
0
  private DBCollection calculateCollection(Exchange exchange) throws Exception {
    // dynamic calculation is an option. In most cases it won't be used and we should not penalise
    // all users with running this
    // resolution logic on every Exchange if they won't be using this functionality at all
    if (!endpoint.isDynamicity()) {
      return endpoint.getDbCollection();
    }

    String dynamicDB = exchange.getIn().getHeader(MongoDbConstants.DATABASE, String.class);
    String dynamicCollection =
        exchange.getIn().getHeader(MongoDbConstants.COLLECTION, String.class);

    @SuppressWarnings("unchecked")
    List<DBObject> dynamicIndex =
        exchange.getIn().getHeader(MongoDbConstants.COLLECTION_INDEX, List.class);

    DBCollection dbCol = null;

    if (dynamicDB == null && dynamicCollection == null) {
      dbCol = endpoint.getDbCollection();
    } else {
      DB db = null;

      if (dynamicDB == null) {
        db = endpoint.getDb();
      } else {
        db = endpoint.getMongoConnection().getDB(dynamicDB);
      }

      if (dynamicCollection == null) {
        dbCol = db.getCollection(endpoint.getCollection());
      } else {
        dbCol = db.getCollection(dynamicCollection);

        // on the fly add index
        if (dynamicIndex == null) {
          endpoint.ensureIndex(dbCol, endpoint.createIndex());
        } else {
          endpoint.ensureIndex(dbCol, dynamicIndex);
        }
      }
    }

    if (LOG.isDebugEnabled()) {
      LOG.debug(
          "Dynamic database and/or collection selected: {}->{}",
          dbCol.getDB().getName(),
          dbCol.getName());
    }
    return dbCol;
  }
示例#17
0
  private <T> UpdateResults<T> update(
      Query<T> query, DBObject u, boolean createIfMissing, boolean multi, WriteConcern wc) {
    QueryImpl<T> qi = (QueryImpl<T>) query;

    DBCollection dbColl = qi.getCollection();
    // TODO remove this after testing.
    if (dbColl == null) dbColl = getCollection(qi.getEntityClass());

    if (qi.getSortObject() != null
        && qi.getSortObject().keySet() != null
        && !qi.getSortObject().keySet().isEmpty())
      throw new QueryException("sorting is not allowed for updates.");
    if (qi.getOffset() > 0) throw new QueryException("a query offset is not allowed for updates.");
    if (qi.getLimit() > 0) throw new QueryException("a query limit is not allowed for updates.");

    DBObject q = qi.getQueryObject();
    if (q == null) q = new BasicDBObject();

    if (log.isTraceEnabled())
      log.trace(
          "Executing update("
              + dbColl.getName()
              + ") for query: "
              + q
              + ", ops: "
              + u
              + ", multi: "
              + multi
              + ", upsert: "
              + createIfMissing);

    WriteResult wr;
    if (wc == null) wr = dbColl.update(q, u, createIfMissing, multi);
    else wr = dbColl.update(q, u, createIfMissing, multi, wc);

    throwOnError(wc, wr);

    return new UpdateResults<T>(wr);
  }
示例#18
0
  private DBCursor prepareCursor(final FindOptions findOptions) {
    final DBObject query = getQueryObject();

    if (LOG.isTraceEnabled()) {
      LOG.trace(
          String.format(
              "Running query(%s) : %s, options: %s,", dbColl.getName(), query, findOptions));
    }

    if (findOptions.isSnapshot()
        && (findOptions.getSortDBObject() != null || findOptions.hasHint())) {
      LOG.warning("Snapshotted query should not have hint/sort.");
    }

    if (findOptions.getCursorType() != NonTailable && (findOptions.getSortDBObject() != null)) {
      LOG.warning("Sorting on tail is not allowed.");
    }

    return dbColl
        .find(
            query,
            findOptions.getOptions().copy().sort(getSortObject()).projection(getFieldsObject()))
        .setDecoderFactory(ds.getDecoderFact());
  }
示例#19
0
  protected void initManagedCollections(Class<?>[] managedEntityTypes) {
    for (Class<?> clazz : managedEntityTypes) {
      EntityInfo entityInfo = getEntityInfo(clazz);
      String dbCollectionName = entityInfo.getDbCollectionName();
      if (dbCollectionName != null && !database.collectionExists(dbCollectionName)) {
        DBCollection dbCollection = database.getCollection(dbCollectionName);

        logger.debug(
            "Created collection " + dbCollection.getName() + " in " + this.database.getName());

        MongoIndex index = clazz.getAnnotation(MongoIndex.class);
        if (index != null) {
          createIndex(dbCollection, index);
        }

        MongoIndexes indexes = clazz.getAnnotation(MongoIndexes.class);
        if (indexes != null) {
          for (MongoIndex i : indexes.value()) {
            createIndex(dbCollection, i);
          }
        }
      }
    }
  }
示例#20
0
  public <T> T findAndModify(
      Query<T> query, UpdateOperations<T> ops, boolean oldVersion, boolean createIfMissing) {
    QueryImpl<T> qi = (QueryImpl<T>) query;

    DBCollection dbColl = qi.getCollection();
    // TODO remove this after testing.
    if (dbColl == null) dbColl = getCollection(qi.getEntityClass());

    if (log.isTraceEnabled())
      log.info("Executing findAndModify(" + dbColl.getName() + ") with update ");

    DBObject res =
        dbColl.findAndModify(
            qi.getQueryObject(),
            qi.getFieldsObject(),
            qi.getSortObject(),
            false,
            ((UpdateOpsImpl<T>) ops).getOps(),
            !oldVersion,
            createIfMissing);

    if (res == null) return null;
    else return (T) mapr.fromDBObject(qi.getEntityClass(), res, createCache());
  }
  /**
   * Does an initial sync the same way MongoDB does. https://groups.google.com/
   * forum/?fromgroups=#!topic/mongodb-user/sOKlhD_E2ns
   *
   * @return the last oplog timestamp before the import began
   * @throws InterruptedException if the blocking queue stream is interrupted while waiting
   */
  protected Timestamp<?> doInitialImport(DBCollection collection) throws InterruptedException {
    // TODO: ensure the index type is empty
    // DBCollection slurpedCollection =
    // slurpedDb.getCollection(definition.getMongoCollection());

    logger.info("MongoDBRiver is beginning initial import of " + collection.getFullName());
    Timestamp<?> startTimestamp = getCurrentOplogTimestamp();
    boolean inProgress = true;
    String lastId = null;
    while (inProgress) {
      DBCursor cursor = null;
      try {
        if (definition.isDisableIndexRefresh()) {
          updateIndexRefresh(definition.getIndexName(), -1L);
        }
        if (!definition.isMongoGridFS()) {
          logger.info("Collection {} - count: {}", collection.getName(), collection.count());
          long count = 0;
          cursor =
              collection.find(
                  getFilterForInitialImport(definition.getMongoCollectionFilter(), lastId));
          while (cursor.hasNext()) {
            DBObject object = cursor.next();
            count++;
            if (cursor.hasNext()) {
              lastId = addInsertToStream(null, applyFieldFilter(object), collection.getName());
            } else {
              logger.debug("Last entry for initial import - add timestamp: {}", startTimestamp);
              lastId =
                  addInsertToStream(startTimestamp, applyFieldFilter(object), collection.getName());
            }
          }
          inProgress = false;
          logger.info("Number documents indexed: {}", count);
        } else {
          // TODO: To be optimized.
          // https://github.com/mongodb/mongo-java-driver/pull/48#issuecomment-25241988
          // possible option: Get the object id list from .fs
          // collection
          // then call GriDFS.findOne
          GridFS grid =
              new GridFS(mongo.getDB(definition.getMongoDb()), definition.getMongoCollection());

          cursor = grid.getFileList();
          while (cursor.hasNext()) {
            DBObject object = cursor.next();
            if (object instanceof GridFSDBFile) {
              GridFSDBFile file =
                  grid.findOne(new ObjectId(object.get(MongoDBRiver.MONGODB_ID_FIELD).toString()));
              if (cursor.hasNext()) {
                lastId = addInsertToStream(null, file);
              } else {
                logger.debug("Last entry for initial import - add timestamp: {}", startTimestamp);
                lastId = addInsertToStream(startTimestamp, file);
              }
            }
          }
          inProgress = false;
        }
      } catch (MongoException.CursorNotFound e) {
        logger.info(
            "Initial import - Cursor {} has been closed. About to open a new cusor.",
            cursor.getCursorId());
        logger.debug("Total document inserted [{}]", totalDocuments.get());
      } finally {
        if (cursor != null) {
          logger.trace("Closing initial import cursor");
          cursor.close();
        }
        if (definition.isDisableIndexRefresh()) {
          updateIndexRefresh(definition.getIndexName(), TimeValue.timeValueSeconds(1));
        }
      }
    }
    return startTimestamp;
  }
示例#22
0
  @SuppressWarnings("rawtypes")
  public <T> MapreduceResults<T> mapReduce(
      MapreduceType type,
      Query query,
      String map,
      String reduce,
      String finalize,
      Map<String, Object> scopeFields,
      Class<T> outputType) {
    Assert.parametersNotNull("map", map);
    Assert.parameterNotEmpty(map, "map");
    Assert.parametersNotNull("reduce", reduce);
    Assert.parameterNotEmpty(reduce, "reduce");

    QueryImpl<T> qi = (QueryImpl<T>) query;

    DBCollection dbColl = qi.getCollection();
    // TODO remove this after testing.
    if (dbColl == null) dbColl = getCollection(qi.getEntityClass());

    if (log.isTraceEnabled())
      log.info(
          "Executing mapReduce("
              + dbColl.getName()
              + ") with query("
              + qi.toString()
              + ") map("
              + map
              + ") reduce("
              + reduce
              + ") finalize("
              + finalize
              + ") scope("
              + scopeFields
              + ")");

    // TODO replace this with the 2.4 driver impl.
    String outColl = mapr.getCollectionName(outputType);
    BasicDBObjectBuilder bldr =
        BasicDBObjectBuilder.start("mapreduce", mapr.getCollectionName(qi.getEntityClass()));

    switch (type) {
      case REDUCE:
        bldr.push("out").add("reduce", outColl).pop();
        break;
      case MERGE:
        bldr.push("out").add("merge", outColl).pop();
        break;
      case INLINE:
        bldr.push("out").add("inline", 1).pop();
        break;
      default:
        bldr.add("out", outColl);
        break;
    }

    if (qi.getOffset() != 0 || qi.getFieldsObject() != null)
      throw new QueryException(
          "mapReduce does not allow the offset/retrievedFields query options.");

    if (qi.getQueryObject() != null) bldr.add("query", qi.getQueryObject());
    if (qi.getLimit() > 0) bldr.add("limit", qi.getLimit());
    if (qi.getSortObject() != null) bldr.add("sort", qi.getSortObject());

    bldr.add("map", map);
    bldr.add("reduce", reduce);

    if (finalize != null && finalize.length() > 0) bldr.add("finalize", finalize);

    if (scopeFields != null && scopeFields.size() > 0)
      bldr.add("scope", mapr.toMongoObject(null, null, scopeFields));

    DBObject dbObj = bldr.get();
    CommandResult cr = dbColl.getDB().command(dbObj);
    cr.throwOnError();
    MapreduceResults mrRes =
        (MapreduceResults) mapr.fromDBObject(MapreduceResults.class, cr, createCache());

    QueryImpl baseQ = null;
    if (!MapreduceType.INLINE.equals(type))
      baseQ = new QueryImpl(outputType, db.getCollection(mrRes.getOutputCollectionName()), this);
    // TODO Handle inline case and create an iterator/able.

    mrRes.setBits(type, baseQ);
    return mrRes;
  }
示例#23
0
  public void execute(JobExecutionContext context) throws JobExecutionException {

    // System.out.println("AVG  called");

    // String currentUser = LoginServlet.getCurrentUser();
    System.out.println("AVG Response called");

    long now = System.currentTimeMillis();
    // System.out.println("current time 11" + now);
    System.out.println(Convertor.timeInDefaultFormat(now));

    // System.out.println("current time 11" + now);
    DBCursor alertData1 = null;
    // -1Hr
    // (60*60*1000)=3600000

    DBCollection coll2 = m.getCollection("DurationDB");
    // System.out.println("collection name:" + coll2.getName());

    BasicDBObject findObj1 = new BasicDBObject();
    alertData1 = coll2.find(findObj1);
    alertData1.sort(new BasicDBObject("_id", -1));
    alertData1.limit(1);
    List<DBObject> dbObjs2 = alertData1.toArray();
    // System.out.println(dbObjs2);

    for (int j = dbObjs2.size() - 1; j >= 0; j--) {

      DBObject txnDataObject2 = dbObjs2.get(j);
      int averagealerts = (Integer) txnDataObject2.get("Average_Alerts");
      System.out.println("DURATION OF LIVE:" + averagealerts);

      long beforetime = System.currentTimeMillis() - averagealerts * 1000;

      DBCollection coll = m.getCollection("CISResponse");

      BasicDBObject gtQuery = new BasicDBObject();

      // gtQuery.put("UAID", new BasicDBObject("$eq","app123"));

      gtQuery.put("exectime", new BasicDBObject("$gt", beforetime).append("$lt", now)); // within

      BasicDBObject gtQuery1 = new BasicDBObject("response_time", 1).append("_id", 0);

      ArrayList<Integer> list11 = new ArrayList<Integer>();

      DBCursor cursor = coll.find(gtQuery, gtQuery1);
      List<DBObject> list = cursor.toArray();
      // System.out.println("Query sss   " + cursor);
      // System.out.println("AVG LIST"+list);

      for (int i = 0; i < list.size() - 1; i++) {
        DBObject txnDataObject = list.get(i);
        double responseTi = Double.parseDouble(txnDataObject.get("response_time").toString());
        Integer resp = (int) responseTi;

        // System.out.println("Response time:" + resp);
        // //System.out.println("----inside loop");
        list11.add(resp);
      }
      cursor.close();
      // System.out.println("response times:" + list11);

      // find the sum of responses
      int sum = 0;

      for (int i : list11) {
        sum += i;
      }
      System.out.println("sum of responses:" + sum);

      double x; // total count
      x =
          coll.count(
              new BasicDBObject(
                  "exectime", new BasicDBObject("$gt", beforetime).append("$lt", now)));
      System.out.println("Total responses Count:" + x);

      Double avg;
      avg = sum / x;

      avg = (double) Math.round(sum / x);
      System.out.println("Average:" + avg);

      // ------------------------------------<<--FETCH--->>------------------------------------------------------------
      DBCursor alertData = null;

      try {

        // System.out.println("DB Name:" + m.getName());

        // 3.selcet the collection
        DBCollection coll1 = m.getCollection("ThresholdDB");
        System.out.println("collection name avg:" + coll1.getName());

        BasicDBObject findObj = new BasicDBObject();
        alertData = coll1.find(findObj);
        alertData.sort(new BasicDBObject("_id", -1));
        alertData.limit(1); // LIMIT-LAST 1 DATA
        List<DBObject> dbObjs = alertData.toArray();

        for (int i = dbObjs.size() - 1; i > dbObjs.size() - 2; i--) {

          DBObject txnDataObject = dbObjs.get(i);
          int alertthreshold = 1000; /*(Integer) txnDataObject
							.get("Web_threshold");*/
          // System.out.println("THRESHOLD-->" + alertthreshold);
          // System.out.println(avg+"_____"+alertthreshold);

          /*
           *
           * String msg =
           * "Threshold exceeded in LIVE RESPONSE between " +
           * beforetime + " and " + now + "and Average is:" + avg;
           * String email = "*****@*****.**";
           *
           * Mail mail = new Mail(); mail.mailer(msg, email);
           *
           * //System.out.println("mail sent from LIVE RESPONSE");
           */

          // 3000>2--->true
          if (avg > alertthreshold) {

            String msg = "Dear customer <br><br><br><h2 style='color:red'>Alert Message </h2>";
            msg =
                msg
                    + "Threshold exceeded in WEB LIVE RESPONSE between <b> "
                    + Convertor.timeInDefaultFormat(beforetime)
                    + "</b> and  <b> "
                    + Convertor.timeInDefaultFormat(now)
                    + " </b> and Average is: <b>"
                    + avg
                    + "  </b>Check your CIS Incident";
            msg = msg + "<br><br> <a href='http://cis.avekshaa.com/'>Click here to check</a>'";
            msg = msg + "<br><br>Thanks With Regard<br>  Team , Avekshaa Technology Pvt. Ltd";
            // String email = "*****@*****.**";
            // String email = userMail.get(currentUser);
            /*
             * String sms
             * ="Threshold exceeded in LIVE RESPONSE between " +
             * Convertor.timeInDefaultFormat(beforetime)+ " and " +
             * Convertor.timeInDefaultFormat(now) +
             * "and Average is:" + avg+"  Check your CIS Incident";
             */
            Mail mail = new Mail();
            mail.mailer(msg, email);
            // SmsAlerts.sendIncidentText(sms);
            System.out.println("MAIL Sent from Live Response");
          }
        }

      } catch (Exception e) {
        e.printStackTrace();
        // logger.error("Unexpected error",e);
      } finally {
        alertData.close();
      }

      // create a document and store in AVG colln

      DBCollection coll3 = m.getCollection("AVG");
      BasicDBObject document = new BasicDBObject();
      document.put("avg_response", avg);
      document.put("system_current_time", now);
      // coll3.insert(document);
      // System.out.println("stored..");

    }
    alertData1.close();
  }