/** * Before downloading a file for map icons (see "retrieveMapImageForIcon" below), first remove any * existing .img and .img.* files * * <p>e.g. delete all that start with (DataFile name) + ".img" * * @param mapLayerMetadata * @return * @throws IOException */ private boolean deleteOlderMapThumbnails(MapLayerMetadata mapLayerMetadata) { if (mapLayerMetadata == null) { logger.warning("mapLayerMetadata is null"); return false; } // Retrieve the data file // DataFile df = mapLayerMetadata.getDataFile(); try { DataFileIO dataAccess = df.getAccessObject(); if (dataAccess == null || !dataAccess.isLocalFile()) { return false; } // Get the parent directory // Path fileDirname = dataAccess.getFileSystemPath().getParent(); if (fileDirname == null) { logger.warning( "DataFile directory has null path. Directory path: " + dataAccess.getFileSystemPath().toString()); return false; } // Verify that the directory exists // File fileDirectory = new File(fileDirname.normalize().toString()); if (!(fileDirectory.isDirectory())) { logger.warning( "DataFile directory is not actuall a directory. Directory path: " + fileDirectory.toString()); return false; } /* Iterate through directory and delete any ".img" files for this DataFile Example: Datafile name: 14a5e4abf7d-e7eebfb6474d Types of files that would be deleted (if they exist): 14a5e4abf7d-e7eebfb6474d.img 14a5e4abf7d-e7eebfb6474d.img.thumb64 14a5e4abf7d-e7eebfb6474d.img.thumb400 */ String iconBaseFilename = dataAccess.getFileSystemPath().toString() + ".img"; for (File singleFile : fileDirectory.listFiles()) { if (singleFile.toString().startsWith(iconBaseFilename)) { // logger.info("file found: " + singleFile.toString()); singleFile.delete(); // results.add(file.getName()); } } } catch (IOException ioEx) { return false; } return true; }
@Test public void testReadingExistentFileByCallingList() throws IOException, DatabaseCorruptedException { filePath.getParent().toFile().mkdir(); try (DataOutputStream file = new DataOutputStream(new FileOutputStream(filePath.toString()))) { file.write(correctKey1.getBytes(encoding)); file.write('\0'); file.writeInt(correctKey1.length() + correctKey2.length() + 2 + offsetLength * 2); file.write(correctKey2.getBytes(encoding)); file.write('\0'); file.writeInt( correctKey2.length() + correctKey2.length() + 2 + offsetLength * 2 + testStringValue.length()); file.write(testStringValue.getBytes(encoding)); file.write(testStringValue.getBytes(encoding)); } DataFile test = new DataFile(testDir, new Coordinates(folderIndex, fileIndex), table, provider); Set<String> expectedKeySet = new HashSet<>(); expectedKeySet.add(correctKey1); expectedKeySet.add(correctKey2); assertEquals(expectedKeySet, test.list()); }
@Test public void testRemoveCalledForCorrectExistentKey() throws IOException, DatabaseCorruptedException { DataFile test = new DataFile(testDir, new Coordinates(folderIndex, fileIndex), table, provider); test.put(correctKey1, testStoreableValue); assertEquals(testStoreableValue, test.remove(correctKey1)); }
@Test public void testSizeMethod() throws IOException, DatabaseCorruptedException { DataFile test = new DataFile(testDir, new Coordinates(folderIndex, fileIndex), table, provider); test.put(correctKey1, testStoreableValue); assertEquals(test.size(), 1); test.remove(correctKey1); assertEquals(test.size(), 0); }
public void doAppendBatch(WriteCommand writeRecord) throws IOException { writeRecord.location.setDataFileId(dataFile.getDataFileId()); writeRecord.location.setOffset(offset + size); size += writeRecord.location.getSize(); dataFile.incrementLength(writeRecord.location.getSize()); journal.addToTotalLength(writeRecord.location.getSize()); writes.offer(writeRecord); }
@Test public void testSavingEmptyDataFileToDisk() throws IOException, DatabaseCorruptedException { DataFile test = new DataFile(testDir, new Coordinates(folderIndex, fileIndex), table, provider); test.put(correctKey1, testStoreableValue); test.remove(correctKey1); test.commit(); test = new DataFile(testDir, new Coordinates(folderIndex, fileIndex), table, provider); assertTrue(test.list().isEmpty()); }
/** * Adds a new DataFile object * * @param attr DataFile object to add */ public void addDataFile(DataFile df) throws SignedDocException { if (countSignatures() > 0) throw new SignedDocException( SignedDocException.ERR_SIGATURES_EXIST, "Cannot add DataFiles when signatures exist!", null); if (m_dataFiles == null) m_dataFiles = new ArrayList<DataFile>(); if (df.getId() == null) df.setId(getNewDataFileId()); m_dataFiles.add(df); }
@Test public void testWritingNewFileToDiskAndThenReadingIt() throws IOException, DatabaseCorruptedException { DataFile test = new DataFile(testDir, new Coordinates(folderIndex, fileIndex), table, provider); String[] keyList = {correctKey1, correctKey2}; test.put(keyList[0], testStoreableValue); test.put(keyList[1], testStoreableValue); test.commit(); assertArrayEquals(keyList, test.list().toArray()); }
@Test public void testPutKeysAndThenCallList() throws IOException, DatabaseCorruptedException { DataFile test = new DataFile(testDir, new Coordinates(folderIndex, fileIndex), table, provider); test.put(correctKey1, testStoreableValue); test.put(correctKey2, testStoreableValue); Set<String> expectedKeySet = new HashSet<>(); expectedKeySet.add(correctKey1); expectedKeySet.add(correctKey2); Set<String> actualKeySet = new HashSet<>(); actualKeySet.addAll(test.list()); assertEquals(expectedKeySet, actualKeySet); }
public void doFirstBatch(WriteCommand controlRecord, WriteCommand writeRecord) throws IOException { controlRecord.location.setType(Journal.BATCH_CONTROL_RECORD_TYPE); controlRecord.location.setSize(Journal.BATCH_CONTROL_RECORD_SIZE); controlRecord.location.setDataFileId(dataFile.getDataFileId()); controlRecord.location.setOffset(offset); writeRecord.location.setDataFileId(dataFile.getDataFileId()); writeRecord.location.setOffset(offset + Journal.BATCH_CONTROL_RECORD_SIZE); size = Journal.BATCH_CONTROL_RECORD_SIZE + writeRecord.location.getSize(); dataFile.incrementLength(size); journal.addToTotalLength(size); writes.offer(controlRecord); writes.offer(writeRecord); }
private void save(List<WeakReference<DataFile>> toSave) throws DataFileException { Iterator<WeakReference<DataFile>> it = toSave.iterator(); while (it.hasNext()) { WeakReference<DataFile> ref = it.next(); if (ref != null) { DataFile df = ref.get(); if (df != null) { df.save(); } else { it.remove(); } } else { it.remove(); } } }
/** * Helper method to validate the whole SignedDoc object * * @param bStrong flag that specifies if Id atribute value is to be rigorously checked (according * to digidoc format) or only as required by XML-DSIG * @return a possibly empty list of SignedDocException objects */ public ArrayList<SignedDocException> validate(boolean bStrong) { ArrayList<SignedDocException> errs = new ArrayList<SignedDocException>(); SignedDocException ex = validateFormat(m_format); if (ex != null) errs.add(ex); ex = validateVersion(m_version); if (ex != null) errs.add(ex); for (int i = 0; i < countDataFiles(); i++) { DataFile df = getDataFile(i); ArrayList<SignedDocException> e = df.validate(bStrong); if (!e.isEmpty()) errs.addAll(e); } for (int i = 0; i < countSignatures(); i++) { Signature sig = getSignature(i); ArrayList<SignedDocException> e = sig.validate(); if (!e.isEmpty()) errs.addAll(e); } return errs; }
protected void unscheduleAutoSaving(DataFile df) { Iterator<WeakReference<DataFile>> it = scheduled.iterator(); while (it.hasNext()) { WeakReference<DataFile> ref = it.next(); if (ref != null) { DataFile rdf = ref.get(); if (rdf != null) { if (rdf.equals(df)) { it.remove(); } } else { it.remove(); } } else { it.remove(); } } }
/** * return a new available DataFile id * * @retusn new DataFile id */ public String getNewDataFileId() { int nDf = 0; String id = "D" + nDf; boolean bExists = false; do { bExists = false; for (int d = 0; d < countDataFiles(); d++) { DataFile df = getDataFile(d); if (df.getId().equals(id)) { nDf++; id = "D" + nDf; bExists = true; continue; } } } while (bExists); return id; }
/** * Writes the SignedDoc to an output file and automatically calculates DataFile sizes and digests * * @param outputFile output file name * @throws SignedDocException for all errors */ public void writeToStream(OutputStream os) throws SignedDocException { // TODO read DataFile elements from old file try { os.write(xmlHeader().getBytes()); for (int i = 0; i < countDataFiles(); i++) { DataFile df = getDataFile(i); df.writeToFile(os); os.write("\n".getBytes()); } for (int i = 0; i < countSignatures(); i++) { Signature sig = getSignature(i); os.write(sig.toXML()); os.write("\n".getBytes()); } os.write(xmlTrailer().getBytes()); } catch (SignedDocException ex) { throw ex; // already handled } catch (Exception ex) { SignedDocException.handleException(ex, SignedDocException.ERR_WRITE_FILE); } }
public static void main(String[] args) { Model model = new Model(); File studentFile = new File("StudentInfo.csv"); File professorFile = new File("ProfessorInfo.csv"); model.runProgram(professorFile, studentFile, 19); shuffleArray(model.getAllStudents()); System.out.println(scoreSections(model)); for (int i = 0; i < 1000000; i++) { model.swap(); System.out.println(scoreSections(model)); } DataFile.writeCSVFile(model.getAllSections(), ""); }
@Test public void testDataFileGetCommand() throws IOException, DatabaseCorruptedException { filePath.getParent().toFile().mkdir(); try (DataOutputStream file = new DataOutputStream(new FileOutputStream(filePath.toString()))) { file.write(correctKey1.getBytes(encoding)); file.write('\0'); file.writeInt(correctKey1.length() + correctKey2.length() + 2 + offsetLength * 2); file.write(correctKey2.getBytes(encoding)); file.write('\0'); file.writeInt( correctKey2.length() + correctKey2.length() + 2 + offsetLength * 2 + testStringValue.length()); file.write(testStringValue.getBytes(encoding)); file.write(testStringValue.getBytes(encoding)); } DataFile test = new DataFile(testDir, new Coordinates(folderIndex, fileIndex), table, provider); assertEquals(testStoreableValue, test.get(correctKey1)); assertEquals(testStoreableValue, test.get(correctKey2)); }
/** * Converts the SignedDoc to XML form * * @return XML representation of SignedDoc */ public String toXML() throws SignedDocException { // System.out.println("TO-XML:"); StringBuffer sb = new StringBuffer(xmlHeader()); // System.out.println("DFS: " + countDataFiles()); for (int i = 0; i < countDataFiles(); i++) { DataFile df = getDataFile(i); String str = df.toString(); // System.out.println("DF: " + df.getId() + " size: " + str.length()); sb.append(str); sb.append("\n"); } // System.out.println("SIGS: " + countSignatures()); for (int i = 0; i < countSignatures(); i++) { Signature sig = getSignature(i); String str = sig.toString(); // System.out.println("SIG: " + sig.getId() + " size: " + str.length()); sb.append(str); sb.append("\n"); } sb.append(xmlTrailer()); // System.out.println("Doc size: " + sb.toString().length()); return sb.toString(); }
/** * Adds a new uncomplete signature to signed doc * * @param sdoc SignedDoc object * @param profile new signature profile. Use NULL for default * @param cert signers certificate * @param claimedRoles signers claimed roles * @param adr signers address * @param sId new signature id, Use NULL for default value * @param sSigMethod signature method uri - ddoc: SignedDoc.RSA_SHA1_SIGNATURE_METHOD, bdoc: * depends on card type. Use null for default value * @param sDigType digest type (all other hashes but SignedInfo). Use null for default type * @return new Signature object */ public static Signature prepareXadesBES( SignedDoc sdoc, String profile, X509Certificate cert, String[] claimedRoles, SignatureProductionPlace adr, String sId, String sSigMethod, String sDigType) throws DigiDocException { if (m_logger.isDebugEnabled()) m_logger.debug( "Prepare signature in sdoc: " + sdoc.getFormat() + "/" + sdoc.getVersion() + "/" + sdoc.getProfile() + " profile: " + profile + " signer: " + ((cert != null) ? SignedDoc.getCommonName(cert.getSubjectDN().getName()) : "unknown") + " id " + sId); boolean bWeakSig = false; for (int i = 0; i < sdoc.countSignatures(); i++) { Signature sig = sdoc.getSignature(i); if (sig.getAltDigestMatch()) bWeakSig = true; } if (bWeakSig) { m_logger.error( "One or more signatures has wrong DataFile hash even if alternative hash matches!"); throw new DigiDocException( DigiDocException.ERR_VERIFY, "One or more signatures has wrong DataFile hash even if alternative hash matches!", null); } // count roles if (claimedRoles != null && claimedRoles.length > 1) { m_logger.error("Currently supports no more than 1 ClaimedRole"); throw new DigiDocException( DigiDocException.ERR_UNSUPPORTED, "Currently supports no more than 1 ClaimedRole", null); } // cannot proceed if cert has not been read if (cert == null) { m_logger.error("Signers certificate missing during signature preparation!"); throw new DigiDocException( DigiDocException.ERR_SIGNERS_CERT, "Signers certificate missing during signature preparation!", null); } boolean bCheckNonRepu = ConfigManager.instance().getBooleanProperty("KEY_USAGE_CHECK", true); if (bCheckNonRepu && !ConfigManager.isSignatureKey(cert)) { if (m_logger.isDebugEnabled()) m_logger.debug("Signers cert does not have non-repudiation bit set!"); throw new DigiDocException( DigiDocException.ERR_SIGNERS_CERT_NONREPUD, "Signers cert does not have non-repudiation bit set!", null); } Signature sig = new Signature(sdoc); sig.setId(sId != null ? sId : sdoc.getNewSignatureId()); if (profile != null) { // use new profile for this signature sig.setProfile(profile); if (sdoc.getProfile() == null || sdoc.getProfile().equals(SignedDoc.BDOC_PROFILE_BES)) sdoc.setProfile(profile); // change also container to new profile } else // use default profile sig.setProfile(sdoc.getProfile()); // create SignedInfo block SignedInfo si = new SignedInfo( sig, ((sSigMethod != null) ? sSigMethod : SignedDoc.RSA_SHA1_SIGNATURE_METHOD), SignedDoc.CANONICALIZATION_METHOD_20010315); if (sdoc.getFormat().equals(SignedDoc.FORMAT_BDOC) && sdoc.getVersion().equals(SignedDoc.BDOC_VERSION_2_1)) { si.setCanonicalizationMethod(SignedDoc.CANONICALIZATION_METHOD_1_1); sdoc.setDefaultNsPref(SignedDoc.FORMAT_BDOC); } if (m_logger.isDebugEnabled()) m_logger.debug( "Signer: " + cert.getSubjectDN().getName() + " EC key: " + isEcPubKey(cert) + " pre-2011: " + isPre2011IdCard(cert) + " digi-id: " + isDigiIdCard(cert) + " 2011: " + is2011Card(cert)); if (sSigMethod == null) { // default values if (sdoc.getFormat().equals(SignedDoc.FORMAT_BDOC)) { if (isPre2011IdCard(cert)) { if (m_logger.isDebugEnabled()) m_logger.debug("Generating rsa-sha224 signature for pre-2011 card"); si.setSignatureMethod(SignedDoc.RSA_SHA224_SIGNATURE_METHOD); } else { String dType = ConfigManager.instance().getStringProperty("DIGIDOC_DIGEST_TYPE", "SHA-256"); String sSigMeth = ConfigManager.digType2SigMeth(dType, isEcPubKey(cert)); if (m_logger.isDebugEnabled()) m_logger.debug("Generating digest: " + dType + " and signature: " + sSigMeth); if (sSigMeth != null) si.setSignatureMethod(sSigMeth); else throw new DigiDocException( DigiDocException.ERR_DIGEST_ALGORITHM, "Invalid digest type: " + dType, null); } } } if (sdoc.getFormat().equals(SignedDoc.FORMAT_XADES) || sdoc.getFormat().equals(SignedDoc.FORMAT_BDOC)) si.setId(sig.getId() + "-SignedInfo"); // SignedDataObjectProperties SignedDataObjectProperties sdop = new SignedDataObjectProperties(); // add DataFile references for (int i = 0; i < sdoc.countDataFiles(); i++) { DataFile df = sdoc.getDataFile(i); if (sdoc.getFormat().equals(SignedDoc.FORMAT_BDOC)) { if (!df.isDigestsCalculated()) { try { InputStream is = null; if (df.getDfCacheFile() != null) is = df.getBodyAsStream(); if (is == null) is = sdoc.findDataFileAsStream(df.getFileName()); if (is == null) is = new java.io.FileInputStream(df.getFileName()); df.calcHashes(is); } catch (java.io.FileNotFoundException ex) { throw new DigiDocException( DigiDocException.ERR_READ_FILE, "Cannot read file: " + df.getFileName(), null); } } } else { if (!df.isDigestsCalculated()) df.calculateFileSizeAndDigest(null); } if (m_logger.isDebugEnabled()) m_logger.debug("Add ref for df: " + df.getId()); Reference ref = new Reference(si, df, sDigType); if (sdoc.getFormat().equals(SignedDoc.FORMAT_XADES) || sdoc.getFormat().equals(SignedDoc.FORMAT_BDOC)) ref.setId(sig.getId() + "-ref-" + i); si.addReference(ref); if (sdoc.getFormat().equals(SignedDoc.FORMAT_BDOC) && sdoc.getVersion().equals(SignedDoc.BDOC_VERSION_2_1)) { DataObjectFormat dof = new DataObjectFormat("#" + ref.getId()); dof.setMimeType(df.getMimeType()); sdop.addDataObjectFormat(dof); } } // manifest.xml reference - bdoc 2.1-s ei allkirjasta manifest.xml-i // create key info KeyInfo ki = new KeyInfo(cert); if (sdoc.getFormat().equals(SignedDoc.FORMAT_XADES) || sdoc.getFormat().equals(SignedDoc.FORMAT_BDOC)) ki.setId(sig.getId() + "-KeyInfo"); sig.setKeyInfo(ki); ki.setSignature(sig); registerCert(cert, CertValue.CERTVAL_TYPE_SIGNER, null, sig); if (m_logger.isDebugEnabled()) m_logger.debug("Signer cert: " + cert.getSubjectDN().getName()); boolean bUseLocal = ConfigManager.instance().getBooleanProperty("DIGIDOC_USE_LOCAL_TSL", false); if (sdoc.getFormat().equals(SignedDoc.FORMAT_XADES) || sdoc.getFormat().equals(SignedDoc.FORMAT_BDOC)) { TrustServiceFactory tslFac = ConfigManager.instance().getTslFactory(); // first lookup in TSL-s X509Certificate ca = tslFac.findCaForCert(cert, bUseLocal, null); if (ca != null) { String caId = sig.getId() + "-" + ConvertUtils.getCommonName(ca.getSubjectDN().getName()); registerCert(ca, CertValue.CERTVAL_TYPE_CA, caId, sig); } // TODO: maybe copy local CA certs to signature until the first ca that is in TSL? } // create signed properties SignedProperties sp = new SignedProperties(sig, cert, claimedRoles, adr); sig.setSignedProperties(sp); // bdoc 2.0 nonce policy if (sdoc.getFormat().equals(SignedDoc.FORMAT_BDOC) && sdoc.getVersion().equals(SignedDoc.BDOC_VERSION_2_1) && (sig.getProfile().equals(SignedDoc.BDOC_PROFILE_TM) || sig.getProfile().equals(SignedDoc.BDOC_PROFILE_BES) || sig.getProfile().equals(SignedDoc.BDOC_PROFILE_CL) || sig.getProfile().equals(SignedDoc.BDOC_PROFILE_TMA))) { sp.setSignedDataObjectProperties(sdop); Identifier id1 = new Identifier(Identifier.OIDAsURN); id1.setUri(BDOC_210_OID); ObjectIdentifier oid1 = new ObjectIdentifier(id1); SignaturePolicyId spi1 = new SignaturePolicyId(oid1); spi1.setDigestAlgorithm(BDOC_210_DIGEST_METHOD); spi1.setDigestValue(ConvertUtils.hex2bin(BDOC_210_DIGEST_HEX)); // System.out.println("Spec hash: " + BDOC_210_DIGEST_HEX + " b64: " + // Base64Util.encode(ConvertUtils.hex2bin(BDOC_210_DIGEST_HEX))); spi1.addSigPolicyQualifier(new SpUri(BDOC_210_SPURI)); SignaturePolicyIdentifier spid1 = new SignaturePolicyIdentifier(spi1); sp.setSignaturePolicyIdentifier(spid1); } else { SignaturePolicyIdentifier spid1 = new SignaturePolicyIdentifier(null); sp.setSignaturePolicyIdentifier(spid1); } Reference ref = new Reference(si, sp, sDigType); if (sdoc.getFormat().equals(SignedDoc.FORMAT_XADES) || sdoc.getFormat().equals(SignedDoc.FORMAT_BDOC)) ref.setId(sig.getId() + "-ref-sp"); ref.setType(SignedDoc.SIGNEDPROPERTIES_TYPE); si.addReference(ref); sig.setSignedInfo(si); sdoc.addSignature(sig); if (m_logger.isDebugEnabled()) m_logger.debug("Prepared signature: " + sig.getId() + "/" + sig.getProfile()); return sig; }
/** * The async processing loop that writes to the data files and does the force calls. Since the * file sync() call is the slowest of all the operations, this algorithm tries to 'batch' or group * together several file sync() requests into a single file sync() call. The batching is * accomplished attaching the same CountDownLatch instance to every force request in a group. */ private void processQueue() { DataFile dataFile = null; RandomAccessFile file = null; try { DataByteArrayOutputStream buff = new DataByteArrayOutputStream(journal.getMaxWriteBatchSize()); boolean last = false; while (true) { WriteBatch wb = batchQueue.take(); if (shutdown) { last = true; } if (!wb.writes.isEmpty()) { boolean newOrRotated = dataFile != wb.dataFile; if (newOrRotated) { if (file != null) { dataFile.closeRandomAccessFile(file); } dataFile = wb.dataFile; file = dataFile.openRandomAccessFile(); } // Write an empty batch control record. buff.reset(); buff.writeInt(Journal.BATCH_CONTROL_RECORD_SIZE); buff.writeByte(Journal.BATCH_CONTROL_RECORD_TYPE); buff.write(Journal.BATCH_CONTROL_RECORD_MAGIC); buff.writeInt(0); buff.writeLong(0); boolean forceToDisk = false; WriteCommand control = wb.writes.poll(); WriteCommand first = wb.writes.peek(); WriteCommand latest = null; for (WriteCommand current : wb.writes) { forceToDisk |= current.sync; buff.writeInt(current.location.getSize()); buff.writeByte(current.location.getType()); buff.write(current.data.getData(), current.data.getOffset(), current.data.getLength()); latest = current; } Buffer sequence = buff.toBuffer(); // Now we can fill in the batch control record properly. buff.reset(); buff.skip(Journal.HEADER_SIZE + Journal.BATCH_CONTROL_RECORD_MAGIC.length); buff.writeInt(sequence.getLength() - Journal.BATCH_CONTROL_RECORD_SIZE); if (journal.isChecksum()) { Checksum checksum = new Adler32(); checksum.update( sequence.getData(), sequence.getOffset() + Journal.BATCH_CONTROL_RECORD_SIZE, sequence.getLength() - Journal.BATCH_CONTROL_RECORD_SIZE); buff.writeLong(checksum.getValue()); } // Now do the 1 big write. file.seek(wb.offset); file.write(sequence.getData(), sequence.getOffset(), sequence.getLength()); ReplicationTarget replicationTarget = journal.getReplicationTarget(); if (replicationTarget != null) { replicationTarget.replicate(control.location, sequence, forceToDisk); } if (forceToDisk) { IOHelper.sync(file.getFD()); } journal.setLastAppendLocation(latest.location); // Now that the data is on disk, remove the writes from the in // flight // cache. inflightWrites.remove(control.location); for (WriteCommand current : wb.writes) { if (!current.sync) { inflightWrites.remove(current.location); } } if (journal.getListener() != null) { try { journal.getListener().synced(wb.writes.toArray(new WriteCommand[wb.writes.size()])); } catch (Throwable ex) { warn(ex, ex.getMessage()); } } // Clear unused data: wb.writes.clear(); // Signal any waiting threads that the write is on disk. wb.latch.countDown(); } if (last) { break; } } } catch (Exception e) { firstAsyncException.compareAndSet(null, e); } finally { try { if (file != null) { dataFile.closeRandomAccessFile(file); } } catch (Throwable ignore) { } shutdownDone.countDown(); } }
private WriteBatch enqueue(WriteCommand writeRecord) throws IOException { WriteBatch currentBatch = null; int spinnings = 0; int limit = 100; while (true) { if (shutdown) { throw new IOException("DataFileAppender Writer Thread Shutdown!"); } if (firstAsyncException.get() != null) { throw new IOException(firstAsyncException.get()); } try { if (batching.compareAndSet(false, true) && !shutdown) { if (nextWriteBatch == null) { DataFile file = journal.getCurrentWriteFile(); boolean canBatch = false; currentBatch = new WriteBatch(file, file.getLength(), writeRecord); canBatch = currentBatch.canBatch(writeRecord); if (!canBatch) { file = journal.rotateWriteFile(); currentBatch = new WriteBatch(file, file.getLength(), writeRecord); } WriteCommand controlRecord = new WriteCommand(new Location(), null, false); currentBatch.doFirstBatch(controlRecord, writeRecord); if (!writeRecord.sync) { inflightWrites.put(controlRecord.location, controlRecord); inflightWrites.put(writeRecord.location, writeRecord); nextWriteBatch = currentBatch; batching.set(false); } else { batchQueue.put(currentBatch); batching.set(false); } break; } else { boolean canBatch = nextWriteBatch.canBatch(writeRecord); if (canBatch && !writeRecord.sync) { nextWriteBatch.doAppendBatch(writeRecord); inflightWrites.put(writeRecord.location, writeRecord); currentBatch = nextWriteBatch; batching.set(false); break; } else if (canBatch && writeRecord.sync) { nextWriteBatch.doAppendBatch(writeRecord); batchQueue.put(nextWriteBatch); currentBatch = nextWriteBatch; nextWriteBatch = null; batching.set(false); break; } else { batchQueue.put(nextWriteBatch); nextWriteBatch = null; batching.set(false); } } } else { // Spin waiting for new batch ... if (spinnings <= limit) { spinnings++; continue; } else { Thread.sleep(250); continue; } } } catch (InterruptedException ex) { throw new IllegalStateException(ex.getMessage(), ex); } } return currentBatch; }
@Test(expected = IllegalArgumentException.class) public void testPutThrowsExceptionForCorrectKeyAndNullValue() throws IOException, DatabaseCorruptedException { DataFile test = new DataFile(testDir, new Coordinates(folderIndex, fileIndex), table, provider); test.put(correctKey1, null); }
@Test public void testGetCalledForNonAssociatedCorrectKey() throws IOException, DatabaseCorruptedException { DataFile test = new DataFile(testDir, new Coordinates(folderIndex, fileIndex), table, provider); assertEquals(null, test.get(correctKey1)); }
/** Removes temporary DataFile cache files */ public void cleanupDfCache() { for (int i = 0; (m_dataFiles != null) && (i < m_dataFiles.size()); i++) { DataFile df = m_dataFiles.get(i); df.cleanupDfCache(); } }
@Test public void testPutCalledTwiceForSameKey() throws IOException, DatabaseCorruptedException { DataFile test = new DataFile(testDir, new Coordinates(folderIndex, fileIndex), table, provider); assertEquals(null, test.put(correctKey1, testStoreableValue)); assertEquals(testStoreableValue, test.put(correctKey1, testStoreableValue)); }
public DataFileStatistic(DataFile pDataFile, int row) { this.setDataFile_id(pDataFile.getId()); this.setRetailerSite_id(pDataFile.getRetailerSite().getId()); this.setRow(row); }
@Test(expected = IllegalArgumentException.class) public void testRemoveThrowsExceptionForKeyNotFromThisDataFile() throws IOException, DatabaseCorruptedException { DataFile test = new DataFile(testDir, new Coordinates(folderIndex, fileIndex), table, provider); test.remove(wrongKey); }
@Test public void testListCalledForEmptyDataFile() throws IOException, DatabaseCorruptedException { DataFile test = new DataFile(testDir, new Coordinates(folderIndex, fileIndex), table, provider); assertTrue(test.list().isEmpty()); }