示例#1
0
  @SuppressWarnings("unchecked")
  private Map<BigInteger, Order> openMap(DB database) {
    // OPEN MAP
    BTreeMap<BigInteger, Order> map =
        database.createTreeMap("orders").valueSerializer(new OrderSerializer()).makeOrGet();

    // HAVE/WANT KEY
    this.haveWantKeyMap =
        database.createTreeMap("orders_key_have_want").comparator(Fun.COMPARATOR).makeOrGet();

    // BIND HAVE/WANT KEY
    Bind.secondaryKey(
        map,
        this.haveWantKeyMap,
        new Fun.Function2<Tuple4<Long, Long, BigDecimal, BigInteger>, BigInteger, Order>() {
          @Override
          public Tuple4<Long, Long, BigDecimal, BigInteger> run(BigInteger key, Order value) {
            return new Tuple4<Long, Long, BigDecimal, BigInteger>(
                value.getHave(), value.getWant(), value.getPrice(), key);
          }
        });

    // RETURN
    return map;
  }
 private SensorMeta getSensorMeta(
     StationMeta stationMeta, String sensorName, boolean createIfNotExists) {
   throwNull(stationMeta);
   throwNull(sensorName);
   BTreeMap<String, SensorMeta> sensorMap = db.getTreeMap(stationMeta.db_name_sensor_map);
   SensorMeta sensorMeta = sensorMap.get(sensorName);
   if (sensorMeta == null && createIfNotExists) {
     sensorMeta = new SensorMeta(stationMeta.stationName, sensorName);
     db.checkNameNotExists(sensorMeta.db_name_sensor_chunk_map);
     db.createTreeMap(sensorMeta.db_name_sensor_chunk_map)
         .keySerializer(BTreeKeySerializer.ZERO_OR_POSITIVE_INT)
         // .valueSerializer(Chunk.DELTA_TIME_DELTA_DELTA_VALUE_INT_QUANTIZED_SERIALIZER)
         // .valueSerializer(Chunk.SNAPPY_DELTA_TIME_DELTA_DELTA_VALUE_INT_QUANTIZED_SERIALIZER)
         .valueSerializer(ChunkSerializer.DEFAULT)
         // .valuesOutsideNodesEnable() // !!! does not work: growing database
         // .
         .makeOrGet();
     db.checkNameNotExists(sensorMeta.db_name_sensor_chunkmeta_map);
     db.createTreeMap(sensorMeta.db_name_sensor_chunkmeta_map)
         .keySerializer(BTreeKeySerializer.ZERO_OR_POSITIVE_INT)
         .valueSerializer(ChunkMeta.SERIALIZER)
         .makeOrGet();
     sensorMap.put(sensorName, sensorMeta);
   }
   if (sensorMeta == null) {
     // new Throwable().printStackTrace();
     log.warn("no sensor: " + sensorName + "  in station: " + stationMeta.stationName);
   }
   return sensorMeta;
 }
  private StationMeta getStationMeta(String stationName, boolean createIfNotExists) {
    throwNull(stationName);
    StationMeta stationMeta = stationMetaMap.get(stationName);
    if (stationMeta == null && createIfNotExists) {
      stationMeta = new StationMeta(stationName);

      db.checkNameNotExists(stationMeta.db_name_sensor_map);
      db.createTreeMap(stationMeta.db_name_sensor_map)
          .keySerializer(BTreeKeySerializer.STRING)
          .valueSerializer(SensorMeta.SERIALIZER)
          .makeOrGet();

      db.checkNameNotExists(stationMeta.db_name_sensor_time_series_mask_map);
      db.createTreeMap(stationMeta.db_name_sensor_time_series_mask_map)
          .keySerializer(BTreeKeySerializer.STRING)
          .valueSerializer(TimeSeriesMask.SERIALIZER)
          .makeOrGet();

      stationMetaMap.put(stationName, stationMeta);
    }
    if (stationMeta == null) {
      // new Throwable().printStackTrace();
      log.warn("no station: " + stationName);
    }
    return stationMeta;
  }
示例#4
0
  @Test
  public void test() {
    DB db =
        DBMaker.newTempFileDB()
            .mmapFileEnableIfSupported()
            .compressionEnable()
            .transactionDisable()
            .checksumEnable()
            .commitFileSyncDisable()
            .make();
    Iterator<Fun.Pair<Long, String>> newIterator =
        new Iterator<Fun.Pair<Long, String>>() {
          private AtomicLong value = new AtomicLong(10000000);

          @Override
          public boolean hasNext() {
            return value.get() > 0;
          }

          @Override
          public Fun.Pair<Long, String> next() {
            Long v = value.decrementAndGet();
            return new Fun.Pair<Long, String>(v, v.toString());
          }

          @Override
          public void remove() {}
        };
    BTreeMap<Long, String> cubeData = db.createTreeMap("data").pumpSource(newIterator).make();
  }
  public StreamDB(String streamdbPathPrefix) {
    throwNull(streamdbPathPrefix);
    String pathName = streamdbPathPrefix;

    try {
      File dir = new File(streamdbPathPrefix);
      dir.getParentFile().mkdirs();
    } catch (Exception e) {
      log.error(e);
    }

    db =
        DBMaker.newFileDB(new File(pathName))
            // .checksumEnable()
            // .compressionEnable() //in new db disabled!
            // .transactionDisable()
            // .mmapFileEnable() //slow commit and close!!!
            .mmapFileEnablePartial()
            .asyncWriteEnable()
            .asyncWriteFlushDelay(500)
            .cacheWeakRefEnable()
            .cacheSize(1000000)
            .closeOnJvmShutdown()
            .make();

    stationMetaMap =
        db.createTreeMap(DB_NAME_STATION_MAP)
            .keySerializer(BTreeKeySerializer.STRING)
            .valueSerializer(StationMeta.SERIALIZER)
            .makeOrGet();
  }
示例#6
0
  public static void main(String[] args) throws IOException {

    DB db = DBMaker.newMemoryDB().make();
    //
    // TreeMap has build in support for lazily loaded values.
    // In that case each value are not stored inside node,
    // but in separate record.
    //
    // use DB.createTreeMap to create TreeMap with non-default parameters

    Map map = db.createTreeMap("name").valuesOutsideNodesEnable().make();
    map.put("key", "this string is loaded lazily with 'map.get(key)' ");

    //
    // Other option for lazily loaded record is to use Atomic.Var.
    // In this case you have singleton record with name.
    // As bonus you can update reference in thread-safe atomic manner.
    //
    Atomic.Var<String> record = db.createAtomicVar("lazyRecord", "aaa", db.getDefaultSerializer());

    record.set("some value");
    System.out.println(record.get());

    // Last option is to use low level Engine storage directly.
    // Each stored record gets assigned unique recid (record id),
    // which is latter used to get or update record.
    // Your code should store only recid as reference to object.
    // All MapDB collections are written this way.

    // insert new record
    long recid = db.getEngine().put("something", SerializerBase.STRING_NOSIZE);

    // load record
    String lazyString = db.getEngine().get(recid, SerializerBase.STRING_NOSIZE);

    // update record
    db.getEngine().update(recid, "new value", SerializerBase.STRING_NOSIZE);

    // I hope this example helped!
    db.close();
  }
示例#7
0
  public static void main(String[] args) throws IOException {

    /** max number of elements to import */
    final long max = (int) 1e6;

    /** Open database in temporary directory */
    File dbFile = File.createTempFile("mapdb", "temp");
    DB db =
        DBMaker.newFileDB(dbFile)
            /** disabling Write Ahead Log makes import much faster */
            .transactionDisable()
            .make();

    long time = System.currentTimeMillis();

    /**
     * Source of data which randomly generates strings. In real world this would return data from
     * file.
     */
    Iterator<String> source =
        new Iterator<String>() {

          long counter = 0;

          @Override
          public boolean hasNext() {
            return counter < max;
          }

          @Override
          public String next() {
            counter++;
            return randomString(10);
          }

          @Override
          public void remove() {}
        };

    /**
     * BTreeMap Data Pump requires data source to be presorted in reverse order (highest to lowest).
     * There is method in Data Pump we can use to sort data. It uses temporarly files and can handle
     * fairly large data sets.
     */
    source =
        Pump.sort(
            source,
            true,
            100000,
            Collections.reverseOrder(BTreeMap.COMPARABLE_COMPARATOR), // reverse  order comparator
            db.getDefaultSerializer());

    /**
     * Disk space used by serialized keys should be minimised. Keys are sorted, so only difference
     * between consequential keys is stored. This method is called delta-packing and typically saves
     * 60% of disk space.
     */
    BTreeKeySerializer<String> keySerializer = BTreeKeySerializer.STRING;

    /** Translates Map Key into Map Value. */
    Fun.Function1<Integer, String> valueExtractor =
        new Fun.Function1<Integer, String>() {
          @Override
          public Integer run(String s) {
            return s.hashCode();
          }
        };

    /** Create BTreeMap and fill it with data */
    Map<String, Integer> map =
        db.createTreeMap("map")
            .pumpSource(source, valueExtractor)
            // .pumpPresort(100000) // for presorting data we could also use this method
            .keySerializer(keySerializer)
            .make();

    System.out.println(
        "Finished; total time: "
            + (System.currentTimeMillis() - time) / 1000
            + "s; there are "
            + map.size()
            + " items in map");
    db.close();
  }