@Override public int delete(String table, String key) { try { as.delete(writePolicy, new Key(NAMESPACE, SET, key)); return OK; } catch (AerospikeException e) { return RESULT_CODE_MAPPER.get(e.getResultCode()); } }
@Override public int read( String table, String key, Set<String> fields, HashMap<String, ByteIterator> result) { try { if (fields != null) { as.get(policy, new Key(NAMESPACE, SET, key), fields.toArray(new String[fields.size()])); return OK; } else { as.get(policy, new Key(NAMESPACE, SET, key)); return OK; } } catch (AerospikeException e) { return RESULT_CODE_MAPPER.get(e.getResultCode()); } }
@Override public int update(String table, String key, HashMap<String, ByteIterator> values) { Bin[] bins = new Bin[values.size()]; int index = 0; for (Map.Entry<String, ByteIterator> entry : values.entrySet()) { bins[index] = new Bin(entry.getKey(), entry.getValue().toArray()); index++; } try { as.put(writePolicy, new Key(NAMESPACE, SET, key), bins); return OK; } catch (AerospikeException e) { return e.getResultCode(); // return RESULT_CODE_MAPPER.get(e.getResultCode()); } }
private boolean processLine() { log.debug("processing File:line " + Utils.getFileName(fileName) + this.lineNumber); bins = new ArrayList<Bin>(); boolean lineProcessed = false; long errorTotal = 0; try { if (columns.size() != counters.write.colTotal) { if (columns.size() < counters.write.colTotal) { log.error( "File:" + Utils.getFileName(this.fileName) + " Line:" + lineNumber + " Number of column mismatch:Columns in data file is less than number of column in config file."); } else { throw new ParseException(lineNumber); } } // retrieve set name first for (ColumnDefinition metadataColumn : this.metadataMapping) { if (metadataColumn.staticValue && metadataColumn.getBinNameHeader().equalsIgnoreCase(Constants.SET)) { this.set = metadataColumn.binValueHeader; } else { String metadataRawText = this.columns.get(metadataColumn.getBinValuePos()); if (metadataColumn.getBinNameHeader().equalsIgnoreCase(Constants.SET)) { if (this.set == null) { this.set = metadataRawText; } } } } // use set name to create key for (ColumnDefinition metadataColumn : this.metadataMapping) { if (metadataColumn.getBinNameHeader().equalsIgnoreCase(Constants.KEY)) { String metadataRawText = this.columns.get(metadataColumn.getBinValuePos()); if (metadataColumn.getSrcType() == SrcColumnType.INTEGER) { Long integer = Long.parseLong(metadataRawText); this.key = new Key(this.nameSpace, this.set, integer); } else { this.key = new Key(this.nameSpace, this.set, metadataRawText); } } } for (ColumnDefinition binColumn : this.binMapping) { Bin bin = null; if (!binColumn.staticName) { binColumn.binNameHeader = this.columns.get(binColumn.binNamePos); } if (!binColumn.staticValue) { String binRawText = null; if (binColumn.binValueHeader != null && binColumn.binValueHeader.toLowerCase().equals(Constants.SYSTEM_TIME)) { SimpleDateFormat sdf = new SimpleDateFormat(binColumn.getEncoding()); // dd/MM/yyyy Date now = new Date(); binRawText = sdf.format(now); } else { binRawText = this.columns.get(binColumn.getBinValuePos()); } if (binRawText.equals("")) continue; switch (binColumn.getSrcType()) { case INTEGER: // Server stores all integer type data in 64bit so use long Long integer; try { integer = Long.parseLong(binRawText); bin = new Bin(binColumn.getBinNameHeader(), integer); } catch (Exception pi) { log.error( "File:" + Utils.getFileName(this.fileName) + " Line:" + lineNumber + " Integer/Long Parse Error:" + pi); } break; case FLOAT: /** Floating type data can be stored as 8 byte byte array. */ try { float binfloat = Float.parseFloat(binRawText); byte[] byteFloat = ByteBuffer.allocate(8).putFloat(binfloat).array(); bin = new Bin(binColumn.getBinNameHeader(), byteFloat); } catch (Exception e) { log.error( "File:" + Utils.getFileName(this.fileName) + " Line:" + lineNumber + " Floating number Parse Error:" + e); } break; case STRING: bin = new Bin(binColumn.getBinNameHeader(), binRawText); break; case BLOB: if (binColumn.getDstType().equals(DstColumnType.BLOB)) { if (binColumn.encoding.equalsIgnoreCase(Constants.HEX_ENCODING)) bin = new Bin(binColumn.getBinNameHeader(), this.toByteArray(binRawText)); // TODO } break; case LIST: /* * Assumptions * 1. Items are separated by a colon ',' * 2. Item value will be a string * 3. List will be in double quotes * * No support for nested maps or nested lists * */ List<String> list = new ArrayList<String>(); String[] listValues = binRawText.split(Constants.LIST_DELEMITER, -1); if (listValues.length > 0) { for (String value : listValues) { list.add(value.trim()); } bin = Bin.asList(binColumn.getBinNameHeader(), list); } else { bin = null; log.error("Error: Cannot parse to a list: " + binRawText); } break; case MAP: /* * Asumptions: * 1. Items are separated by a colon ',' * 2. Name value pairs are separated by equals ':' * 3. Map key is a string * 4. Map value will be a string * 5. Map will be in double quotes * * No support for nested maps or nested lists * */ Map<String, Object> map = new HashMap<String, Object>(); String[] mapValues = binRawText.split(Constants.MAP_DELEMITER, -1); if (mapValues.length > 0) { for (String value : mapValues) { String[] kv = value.split(Constants.MAPKEY_DELEMITER); if (kv.length != 2) log.error("Error: Cannot parse map <k,v> using: " + kv); else map.put(kv[0].trim(), kv[1].trim()); } log.debug(map.toString()); bin = Bin.asMap(binColumn.getBinNameHeader(), map); } else { bin = null; log.error("Error: Cannot parse to a map: " + binRawText); } break; case JSON: try { log.debug(binRawText); if (jsonParser == null) jsonParser = new JSONParser(); Object obj = jsonParser.parse(binRawText); if (obj instanceof JSONArray) { JSONArray jsonArray = (JSONArray) obj; bin = Bin.asList(binColumn.getBinNameHeader(), jsonArray); } else { JSONObject jsonObj = (JSONObject) obj; bin = Bin.asMap(binColumn.getBinNameHeader(), jsonObj); } } catch (ParseException e) { log.error("Failed to parse JSON", e); } break; case TIMESTAMP: if (binColumn.getDstType().equals(DstColumnType.INTEGER)) { DateFormat format = new SimpleDateFormat(binColumn.getEncoding()); try { Date formatDate = format.parse(binRawText); long miliSecondForDate = formatDate.getTime() - timeZoneOffset; if (binColumn.getEncoding().contains(".SSS") && binColumn.binValueHeader.toLowerCase().equals(Constants.SYSTEM_TIME)) { // We need time in miliseconds so no need to change it to seconds } else { miliSecondForDate = miliSecondForDate / 1000; } bin = new Bin(binColumn.getBinNameHeader(), miliSecondForDate); log.trace("Date format:" + binRawText + " in seconds:" + miliSecondForDate); } catch (java.text.ParseException e) { e.printStackTrace(); } } else if (binColumn.getDstType().equals(DstColumnType.STRING)) { bin = new Bin(binColumn.getBinNameHeader(), binRawText); } break; default: } } else { bin = new Bin(binColumn.getBinNameHeader(), binColumn.getBinValueHeader()); } if (bin != null) { bins.add(bin); } } lineProcessed = true; log.trace( "Formed key and bins for line " + lineNumber + " Key: " + this.key.userKey + " Bins:" + this.bins.toString()); } catch (AerospikeException ae) { log.error( "File:" + Utils.getFileName(this.fileName) + " Line:" + lineNumber + " Aerospike Bin processing Error:" + ae.getResultCode()); if (log.isDebugEnabled()) { ae.printStackTrace(); } counters.write.processingErrors.getAndIncrement(); counters.write.recordProcessed.addAndGet(this.lineSize); errorTotal = (counters.write.readErrors.get() + counters.write.writeErrors.get() + counters.write.processingErrors.get()); if (this.abortErrorCount != 0 && this.abortErrorCount < errorTotal) { System.exit(-1); } } catch (ParseException pe) { log.error( "File:" + Utils.getFileName(this.fileName) + " Line:" + lineNumber + " Parsing Error:" + pe); if (log.isDebugEnabled()) { pe.printStackTrace(); } counters.write.processingErrors.getAndIncrement(); counters.write.recordProcessed.addAndGet(this.lineSize); errorTotal = (counters.write.readErrors.get() + counters.write.writeErrors.get() + counters.write.processingErrors.get()); if (this.abortErrorCount != 0 && this.abortErrorCount < errorTotal) { System.exit(-1); } } catch (Exception e) { log.error( "File:" + Utils.getFileName(this.fileName) + " Line:" + lineNumber + " Unknown Error:" + e); if (log.isDebugEnabled()) { e.printStackTrace(); } counters.write.processingErrors.getAndIncrement(); counters.write.recordProcessed.addAndGet(this.lineSize); errorTotal = (counters.write.readErrors.get() + counters.write.writeErrors.get() + counters.write.processingErrors.get()); if (this.abortErrorCount != 0 && this.abortErrorCount < errorTotal) { System.exit(-1); } } return lineProcessed; }
/** * writes a record to the Aerospike Cluster * * @throws AerospikeException */ private int writeToAs() { int value = 0; long errorTotal = 0; if (this.client != null) { try { if (!bins.isEmpty()) { this.client.put(this.writePolicy, this.key, this.bins.toArray(new Bin[bins.size()])); counters.write.recordProcessed.addAndGet(this.lineSize); counters.write.writeCount.getAndIncrement(); log.trace("Wrote line " + lineNumber + " Key: " + this.key.userKey + " to Aerospike"); value = 1; } else { log.trace("No bins to insert"); } } catch (AerospikeException ae) { log.error( "File:" + Utils.getFileName(this.fileName) + " Line:" + lineNumber + "Aerospike Write Error:" + ae.getResultCode()); if (log.isDebugEnabled()) { ae.printStackTrace(); } counters.write.recordProcessed.addAndGet(this.lineSize); counters.write.writeErrors.getAndIncrement(); errorTotal = (counters.write.readErrors.get() + counters.write.writeErrors.get() + counters.write.processingErrors.get()); switch (ae.getResultCode()) { case ResultCode.TIMEOUT: counters.write.writeTimeouts.getAndIncrement(); break; case ResultCode.KEY_EXISTS_ERROR: counters.write.writeKeyExists.getAndIncrement(); break; default: } if ((this.abortErrorCount != 0 && this.abortErrorCount < errorTotal)) { System.exit(-1); } } catch (Exception e) { log.error( "File:" + Utils.getFileName(this.fileName) + " Line:" + lineNumber + " Write Error:" + e); if (log.isDebugEnabled()) { e.printStackTrace(); } counters.write.recordProcessed.addAndGet(this.lineSize); counters.write.writeErrors.getAndIncrement(); errorTotal = (counters.write.readErrors.get() + counters.write.writeErrors.get() + counters.write.processingErrors.get()); if (this.abortErrorCount != 0 && this.abortErrorCount < errorTotal) { System.exit(-1); } } } return value; }
/** * Do the main operation in emulate Mode: (1) Write a new URL Site Visit Record to the base DB (2) * Check to see if the corresponding User Record is in the Segmented Cache -- If not, write a new * User Record with the CACHE TTL -- UPDATE THE USER RECORD WITH A NEW (FULL) LDT Load (Scan the * existing LDT in the base DB and write to the cache LDT) (3) Probe the Base DB to see if the LDT * data is consistent. * * @param opNum */ private void doOperation(int opNum, ILdtOperations ldtOps) { final String meth = "doOperation()"; boolean recordPresent = false; String baseSet; String cacheSet; try { int customerSeed = random.nextInt(this.customerMax); CustomerRecord custRec = new CustomerRecord(console, customerSeed); long userSeed = getUserRecordSeed(this.userMax); UserRecord userRec = new UserRecord(console, dbOps, custRec.getCustomerID(), (int) userSeed); SiteVisitEntry sve = new SiteVisitEntry( console, custRec.getCustomerID(), userRec.getUserID(), opNum, LDT_BIN, this.timeToLive); baseSet = userRec.getCustomerBaseSet(); cacheSet = userRec.getCustomerCacheSet(); // Write the Site Visit to Storage -- which is hidden behind // this interface because there can be multiple implementations // of the LDT. sve.toStorage(client, baseNamespace, baseSet, ldtOps); // Check to see if the UserRecord is in the Segment Cache. If it is, // then add to the Cache LDT. If it is not, then create a new // User Record in the Segment, and populate the LDT Info (the Site // Visit Data) with the LDT data from the DB User Record. recordPresent = userRec.updateCache(client, cacheNamespace); if (recordPresent) { sve.toStorage(client, cacheNamespace, cacheSet, ldtOps); } else { sve.reloadCache(client, baseNamespace, cacheNamespace, ldtOps); } String keyStr = userRec.getUserID(); // At predetermined milestones, perform various actions // Show Simple Status at regular internals. For the regular large // scale tests with 100 threads, we won't hit this very often. if ((opNum + threadNumber) % 1000 == 0) { console.debug( "<%s:%s> Thread(%d) Cust#(%d) BaseSet(%s) User#(%d) UserID(%s) Iteration(%d)", CLASSNAME, meth, threadNumber, customerSeed, baseSet, userSeed, keyStr, opNum); } // Do a heavy duty scan less frequently. if ((opNum + threadNumber) % 2000 == 0) { Key baseKey = new Key(baseNamespace, baseSet, keyStr); Key cacheKey = new Key(cacheNamespace, cacheSet, keyStr); console.debug( "<%s:%s> <<SCAN TEST>> Thread(%d) Cust#(%d) BaseSet(%s) CacheSet(%s) User#(%d) UserID(%s) Iteration(%d)", CLASSNAME, meth, threadNumber, customerSeed, baseSet, cacheSet, userSeed, keyStr, opNum); List<Map<String, Object>> baseResult = null; List<Map<String, Object>> cacheResult = null; int baseResultSize = 0; int baseCheckSize = 0; int cacheResultSize = 0; int cacheCheckSize = 0; try { baseResult = ldtOps.scanLDT(baseKey); cacheResult = ldtOps.scanLDT(cacheKey); cacheCheckSize = ldtOps.ldtSize(cacheKey, LDT_BIN); if (baseResult != null) { baseResultSize = baseResult.size(); baseCheckSize = ldtOps.ldtSize(baseKey, LDT_BIN); if (baseResultSize != baseCheckSize) { console.error( "<%s:%s> <<BASE SCAN Size Error>> Thread(%d) Cust#(%d) BaseSet(%s) UserID(%s) ScanSide(%d) LDT Size(%d)", CLASSNAME, meth, threadNumber, customerSeed, baseSet, keyStr, baseResultSize, baseCheckSize); } } if (cacheResult != null) { cacheResultSize = cacheResult.size(); cacheCheckSize = ldtOps.ldtSize(cacheKey, LDT_BIN); if (cacheResultSize != cacheCheckSize) { console.error( "<%s:%s> <<CACHE SCAN Size Error>> Thread(%d) Cust#(%d) CacheSet(%s) UserID(%s) ScanSide(%d) LDT Size(%d)", CLASSNAME, meth, threadNumber, customerSeed, cacheSet, keyStr, cacheResultSize, cacheCheckSize); } } } catch (AerospikeException ae) { console.error( "Aerospike Error Code(%d) Error Message(%s)", ae.getResultCode(), ae.getMessage()); console.info("Keep on Truckin"); } console.debug( "<%s:%s> <<SCAN RESULT>> Thread(%d) Cust#(%d) BaseSet(%s) CacheSet(%s) UserID(%s) BaseLDT(%d) CacheLDT(%d)", CLASSNAME, meth, threadNumber, customerSeed, baseSet, cacheSet, keyStr, baseResultSize, cacheResultSize); } } catch (AerospikeException ae) { console.error( "Aerospike Error Code(%d) Error Message(%s)", ae.getResultCode(), ae.getMessage()); } catch (Exception e) { e.printStackTrace(); console.error( "[%s] Problem with Thread(%d) Customer Record: Seed(%d)", "Emulate: doOperation(): ", threadNumber, opNum); } }