/** * Returns the contents of the datafile as specified by ImportDataRecord in an DataMatrix. May * return null if there is a problem reading the file. * * @param importDataRecord ImportDataRecord * @return DataMatrix * @throws Exception */ @Override public DataMatrix getFileContents(ImportDataRecord importDataRecord) throws Exception { if (LOG.isInfoEnabled()) { LOG.info("getFileContents(): " + importDataRecord); } // determine path to file (does override file exist?) String fileCanonicalPath = importDataRecord.getCanonicalPathToData(); // get filedata inputstream InputStream fileContents; // data can be compressed if (GzipUtils.isCompressedFilename(fileCanonicalPath.toLowerCase())) { if (LOG.isInfoEnabled()) { LOG.info("getFileContents(): processing file: " + fileCanonicalPath); } fileContents = readContent( importDataRecord, org.apache.commons.io.FileUtils.openInputStream(new File(fileCanonicalPath))); } else { if (LOG.isInfoEnabled()) { LOG.info("getFileContents(): processing file: " + fileCanonicalPath); } fileContents = org.apache.commons.io.FileUtils.openInputStream(new File(fileCanonicalPath)); } // outta here return getDataMatrix(fileContents); }
/* * (non-Javadoc) * * @see * org.jboss.arquillian.container.sramp.SrampService#deployArchive(java. * lang.String, java.lang.String, java.io.InputStream) */ public BaseArtifactType deployArchive( String archiveId, String archiveName, String artifactTypeArg, InputStream content) { assert content != null; ZipToSrampArchive expander = null; SrampArchive archive = null; BaseArtifactType artifact = null; File tempResourceFile = null; try { // internal integrity check artifactCounter = client.query("/s-ramp").getTotalResults(); // First, stash the content in a temp file - we may need it multiple // times. tempResourceFile = stashResourceContent(content); content = FileUtils.openInputStream(tempResourceFile); ArtifactType artifactType = ArtifactType.valueOf(artifactTypeArg); if (artifactType.isExtendedType()) { artifactType = ArtifactType.ExtendedDocument(artifactType.getExtendedType()); } artifact = client.uploadArtifact(artifactType, content, archiveName); IOUtils.closeQuietly(content); // for all uploaded files add custom property SrampModelUtils.setCustomProperty(artifact, "arquillian-archive-id", archiveId); client.updateArtifactMetaData(artifact); content = FileUtils.openInputStream(tempResourceFile); // Now also add "expanded" content to the s-ramp repository expander = ZipToSrampArchiveRegistry.createExpander(artifactType, content); if (expander != null) { expander.setContextParam(DefaultMetaDataFactory.PARENT_UUID, artifact.getUuid()); archive = expander.createSrampArchive(); client.uploadBatch(archive); } } catch (Exception e) { log.error("Upload failure:", e); IOUtils.closeQuietly(content); } finally { SrampArchive.closeQuietly(archive); ZipToSrampArchive.closeQuietly(expander); FileUtils.deleteQuietly(tempResourceFile); } return artifact; }
@Test() public void testSonyDSCHXV5() throws IOException, CatalogTransformerException, UnsupportedQueryException, SourceUnavailableException, FederationException, ParseException { File file = new File(TEST_DATA_PATH + "Sony DSC-HX5V.jpg"); FileInputStream fis = FileUtils.openInputStream(file); Metacard metacard = createTransformer().transform(fis); assertNotNull(metacard); assertNotNull(metacard.getCreatedDate()); assertThat(metacard.getCreatedDate().getYear() + 1900, is(2010)); assertThat(metacard.getCreatedDate().getMonth() + 1, is(7)); assertThat(metacard.getCreatedDate().getDate(), is(14)); assertThat(metacard.getCreatedDate().getHours(), is(11)); assertThat(metacard.getCreatedDate().getMinutes(), is(00)); assertThat(metacard.getCreatedDate().getSeconds(), is(23)); assertNotNull(metacard.getModifiedDate()); assertThat(metacard.getModifiedDate().getYear() + 1900, is(2010)); assertThat(metacard.getModifiedDate().getMonth() + 1, is(7)); assertThat(metacard.getModifiedDate().getDate(), is(14)); assertThat(metacard.getModifiedDate().getHours(), is(11)); assertThat(metacard.getModifiedDate().getMinutes(), is(00)); assertThat(metacard.getModifiedDate().getSeconds(), is(23)); WKTReader reader = new WKTReader(); Geometry geometry = reader.read(metacard.getLocation()); assertEquals(-104.303846389, geometry.getCoordinate().x, 0.00001); assertEquals(39.5698783333, geometry.getCoordinate().y, 0.00001); byte[] thumbnail = metacard.getThumbnail(); assertNotNull(thumbnail); assertThat(thumbnail.length, is(11490)); }
public List<BBIOutcomeDTO> parseAndSaveArchiveOfBBIfiles( File archive, String originalFileName, User calibratorEmployee) throws IOException, ZipException, SQLException, ClassNotFoundException, ParseException { try (InputStream inputStream = FileUtils.openInputStream(archive)) { return parseAndSaveArchiveOfBBIfiles(inputStream, originalFileName, calibratorEmployee); } }
private void processCommentLines(File file) throws SAXException, IOException { SAXParser parser = newSaxParser(false); XMLReader xmlReader = parser.getXMLReader(); commentHandler = new CommentHandler(); xmlReader.setProperty("http://xml.org/sax/properties/lexical-handler", commentHandler); parser.parse(FileUtils.openInputStream(file), commentHandler); }
public static FileInputStream openInputStream(File file) { try { return FileUtils.openInputStream(file); } catch (IOException e) { throw new UncheckedIOException(e); } }
@Test() public void testIPhone() throws IOException, CatalogTransformerException, UnsupportedQueryException, SourceUnavailableException, FederationException, ParseException { File file = new File(TEST_DATA_PATH + "Apple iPhone 4.jpg"); FileInputStream fis = FileUtils.openInputStream(file); Metacard metacard = createTransformer().transform(fis); assertNotNull(metacard); assertNotNull(metacard.getCreatedDate()); assertThat(metacard.getCreatedDate().getYear() + 1900, is(2011)); assertThat(metacard.getCreatedDate().getMonth() + 1, is(1)); assertThat(metacard.getCreatedDate().getDate(), is(13)); assertThat(metacard.getCreatedDate().getHours(), is(14)); assertThat(metacard.getCreatedDate().getMinutes(), is(33)); assertThat(metacard.getCreatedDate().getSeconds(), is(39)); assertEquals(metacard.getCreatedDate(), metacard.getModifiedDate()); WKTReader reader = new WKTReader(); Geometry geometry = reader.read(metacard.getLocation()); assertEquals(12.488833, geometry.getCoordinate().x, 0.00001); assertEquals(41.853, geometry.getCoordinate().y, 0.00001); }
@GET @Produces(MediaType.APPLICATION_OCTET_STREAM) public Response downloadFile( @PathParam("platform") String platform, @PathParam("major") Long major, @PathParam("minor") Long minor, @PathParam("build") Long build, @PathParam("filename") String filename) { SoftwareRelease sr = this.softwareManager.getRelease(platform, major, minor, build, null); logger.debug("Got software release: " + sr.getPlatform()); File file = new File(sr.getPath()); if (file != null && file.exists()) { InputStream stream; try { stream = FileUtils.openInputStream(file); } catch (Exception ex) { return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(ex.getMessage()) .build(); } return Response.ok().entity(stream).build(); } return Response.status(Response.Status.NOT_FOUND).entity("No Content").build(); }
protected void processFile(File file) { System.out.println( "processing file " + file.getName() + "at time " + sdf.format(Calendar.getInstance().getTime())); FileInputStream openInputStream = null; try { openInputStream = FileUtils.openInputStream(file); List<String> lines = IOUtils.readLines(openInputStream); VrpProblem problem = VrpUtils.createProblemFromStringList(lines); try { Solution solution = createInitialSolution(problem); createLeiEtAlSolution(solution); } catch (Exception e) { System.err.println( "An error occurred while calculating the solution with the Lei et al. heuristic."); e.printStackTrace(); } } catch (Exception e) { e.printStackTrace(); } finally { IOUtils.closeQuietly(openInputStream); } }
@Override public String execute() { // 获取上传文件路径,应在配置文件中定义 /*String path = ServletActionContext.getServletContext().getRealPath("/appends"); File file = new File(path); if(!file.exists()){ file.mkdir(); }*/ Json j = new Json(); try { FileInputStream fis = FileUtils.openInputStream(upload); _importExcel.ImportExcelToDB("t_lw", fis); j.setSuccess(true); j.setMsg("导入成功!"); // super.writeJson(j); // FileUtils.copyFile(upload, new File(file, uploadFileName)); } catch (Exception e) { e.printStackTrace(); } result = "上传成功"; return SUCCESS; }
private Bundle installBundleFromFile( File savedBundleFile, boolean startBundle, boolean updateExistingBundle) throws IOException, BundleException { InputStream bundleInputStream = null; try { bundleInputStream = FileUtils.openInputStream(savedBundleFile); JarInformation jarInformation = new JarInformation(savedBundleFile); if (!isValidPluginBundle(jarInformation)) { LOG.warn(jarInformation.getFilename() + " is not allowed to install as add-on"); return null; } Bundle bundle = findMatchingBundle(jarInformation); if (bundle == null) { final String bundleFileLocationAsURL = savedBundleFile.toURI().toURL().toExternalForm(); bundle = bundleContext.installBundle(bundleFileLocationAsURL, bundleInputStream); } else if (updateExistingBundle) { LOG.info("Updating bundle " + bundle.getSymbolicName() + "|" + bundle.getVersion()); bundle.update(bundleInputStream); } if (!isFragmentBundle(bundle) && startBundle) { bundle.start(); } return bundle; } finally { IOUtils.closeQuietly(bundleInputStream); } }
private void teardown(String hadoopHost, String hadoopPort, File hdfsPropertiesFile) throws IOException { createArchiverMbeanWithArchiverRootURI(createHdfsArchiverRootUri(hadoopHost, hadoopPort)); Properties hdfsProperties = new Properties(); hdfsProperties.load(FileUtils.openInputStream(hdfsPropertiesFile)); assertEquals(hadoopHost, hdfsProperties.getProperty("hadoop.host")); assertEquals(hadoopPort, hdfsProperties.getProperty("hadoop.port")); }
/** * This method creates archive with file instead of deleting it. * * @param file file to truncate */ protected void truncate(File file) { File backupRoot = new File(backupDir); if (!backupRoot.exists() && !backupRoot.mkdirs()) { log.warn("Can't create backup dir for backup storage"); return; } String date = ""; try { BufferedReader reader = new BufferedReader(new FileReader(file)); date = reader.readLine().split("\f")[1]; reader.close(); } catch (IOException e) { e.printStackTrace(); } File zipFile = new File(backupRoot, file.getName() + "." + date + ".zip"); ZipOutputStream zos = null; FileInputStream fis = null; try { zos = new ZipOutputStream(new FileOutputStream(zipFile)); ZipEntry entry = new ZipEntry(file.getName()); entry.setMethod(ZipEntry.DEFLATED); entry.setCrc(FileUtils.checksumCRC32(file)); zos.putNextEntry(entry); fis = FileUtils.openInputStream(file); byte[] buffer = new byte[1024]; int readed; while ((readed = fis.read(buffer)) != -1) { zos.write(buffer, 0, readed); } } catch (Exception e) { log.warn("Can't create zip file", e); } finally { if (zos != null) { try { zos.close(); } catch (IOException e) { log.warn("Can't close zip file", e); } } if (fis != null) { try { fis.close(); } catch (IOException e) { log.warn("Can't close zipped file", e); } } } if (!file.delete()) { log.warn("Can't delete old log file " + file.getAbsolutePath()); } }
public void uploadAttachments(JiraTickets tickets) throws ExecutionException, InterruptedException, IOException { for (JiraTicket t : tickets) { Promise<Issue> issuePromise = issueRestClient.getIssue(t.getId()); Issue i = issuePromise.get(); File rollback = t.getRollback(); File hotfix = t.getHotfix(); if (rollback != null && rollback.canRead()) { issueRestClient.addAttachment( i.getAttachmentsUri(), FileUtils.openInputStream(rollback), rollback.getName()); } if (hotfix != null && hotfix.canRead()) { issueRestClient.addAttachment( i.getAttachmentsUri(), FileUtils.openInputStream(hotfix), hotfix.getName()); } } }
@Test public final void givenUsingCommonsIO_whenConvertingAnInputStreamToAFile_thenCorrect() throws IOException { final InputStream initialStream = FileUtils.openInputStream(new File("src/main/resources/sample.txt")); final File targetFile = new File("src/main/resources/targetFile.tmp"); FileUtils.copyInputStreamToFile(initialStream, targetFile); }
public InputStream verifyDatatype2(Profile p, Profile baseP, String id, String type) { String result = ""; // Datatype dt = p.getDatatypeLibrary().findOne(id); try { // Create temporary file File tmpJsonFile = File.createTempFile("resultTmp", ".json"); // Generate json file JsonFactory factory = new JsonFactory(); JsonGenerator generator = factory.createGenerator(new FileWriter(tmpJsonFile)); generator.setPrettyPrinter(new DefaultPrettyPrinter()); generator.writeStartObject(); generator.writeStringField("id", id); generator.writeStringField("type", type); generator.writeArrayFieldStart("eltVerification"); // for (Component c : dt.getComponents()){ // generator.writeStartObject(); // generator.writeStringField("eltName", "usage"); // generator.writeStringField("eltAtt", c.getUsage().value()); // result = this.validateChangeUsage(p.getMetaData().getHl7Version(), // baseP.getDatatypeLibrary().findOneComponent(c.getId()).getUsage(), // p.getDatatypeLibrary().findOneComponent(c.getId()).getUsage()); // generator.writeStringField("result", result); // generator.writeEndObject(); // // generator.writeStartObject(); // generator.writeStringField("eltName", "minLength"); // generator.writeStringField("eltAtt", String.valueOf(c.getMinLength())); // result = this.validateChangeLength(String.valueOf(c.getMinLength()), c.getMaxLength()); // generator.writeStringField("result", result); // generator.writeEndObject(); // // generator.writeStartObject(); // generator.writeStringField("eltName", "maxLength"); // generator.writeStringField("eltAtt", String.valueOf(c.getMaxLength())); // result = this.validateChangeLength(String.valueOf(c.getMinLength()), c.getMaxLength()); // generator.writeStringField("result", result); // generator.writeEndObject(); // // } generator.writeEndArray(); generator.writeEndObject(); generator.close(); return FileUtils.openInputStream(tmpJsonFile); } catch (IOException e) { return new NullInputStream(1L); } }
/** @see org.overlord.sramp.shell.api.shell.ShellCommand#execute() */ @Override public boolean execute() throws Exception { String filePathArg = this.requiredArgument( 0, Messages.i18n.format("Upload.InvalidArgMsg.LocalFile")); // $NON-NLS-1$ String artifactTypeArg = this.optionalArgument(1); QName clientVarName = new QName("s-ramp", "client"); // $NON-NLS-1$ //$NON-NLS-2$ SrampAtomApiClient client = (SrampAtomApiClient) getContext().getVariable(clientVarName); if (client == null) { print(Messages.i18n.format("MissingSRAMPConnection")); // $NON-NLS-1$ return false; } InputStream content = null; ZipToSrampArchive expander = null; SrampArchive archive = null; try { File file = new File(filePathArg); ArtifactType artifactType = null; if (artifactTypeArg != null) { artifactType = ArtifactType.valueOf(artifactTypeArg); if (artifactType.isExtendedType()) { artifactType = ArtifactType.ExtendedDocument(artifactType.getExtendedType()); } } else { artifactType = determineArtifactType(file); } content = FileUtils.openInputStream(file); BaseArtifactType artifact = client.uploadArtifact(artifactType, content, file.getName()); IOUtils.closeQuietly(content); // Now also add "expanded" content to the s-ramp repository expander = ZipToSrampArchiveRegistry.createExpander(artifactType, file); if (expander != null) { expander.setContextParam(DefaultMetaDataFactory.PARENT_UUID, artifact.getUuid()); archive = expander.createSrampArchive(); client.uploadBatch(archive); } // Put the artifact in the session as the active artifact QName artifactVarName = new QName("s-ramp", "artifact"); // $NON-NLS-1$ //$NON-NLS-2$ getContext().setVariable(artifactVarName, artifact); print(Messages.i18n.format("Upload.Success")); // $NON-NLS-1$ PrintArtifactMetaDataVisitor visitor = new PrintArtifactMetaDataVisitor(); ArtifactVisitorHelper.visitArtifact(visitor, artifact); } catch (Exception e) { print(Messages.i18n.format("Upload.Failure")); // $NON-NLS-1$ print("\t" + e.getMessage()); // $NON-NLS-1$ IOUtils.closeQuietly(content); return false; } return true; }
public List<Rule> parse(File file) { Reader reader = null; try { reader = new InputStreamReader(FileUtils.openInputStream(file), CharEncoding.UTF_8); return parse(reader); } catch (IOException e) { throw new SonarException("Fail to load the file: " + file, e); } finally { Closeables.closeQuietly(reader); } }
static { InputStream inputStream = null; try { inputStream = FileUtils.openInputStream(new File(Database.DICTIONARY)); for (String word : IOUtils.readLines(inputStream, "UTF-8")) { WORDS.add(org.apache.commons.lang3.StringUtils.capitalize(word)); } } catch (IOException e) { e.printStackTrace(); } finally { IOUtils.closeQuietly(inputStream); } }
public boolean responseMatchesFile() throws Throwable { File outFile = sendMessage(); if (IOUtils.contentEquals( new BufferedInputStream(FileUtils.openInputStream(outFile)), getClass().getClassLoader().getResourceAsStream(this.expectedResponseFile))) { System.out.println("Response matches"); return true; } else { System.out.println("Response does not match"); return false; } }
@Override public DeviceTestData parseAndSaveBBIFile( File BBIfile, String verificationID, String originalFileName) throws IOException, DecoderException { DeviceTestData deviceTestData; try (InputStream inputStream = FileUtils.openInputStream(BBIfile)) { deviceTestData = parseAndSaveBBIFile(inputStream, verificationID, originalFileName); calibrationTestService.createNewTest(deviceTestData, verificationID); } catch (DecoderException e) { throw e; } return deviceTestData; }
public InputStream getResourceStream(String resource) throws IOException { InputStream stream = null; if (resource.startsWith("file:/")) { try { stream = FileUtils.openInputStream(new File(new URI(resource))); } catch (URISyntaxException e) { throw new IOException(e); } } else { stream = ClassLoader.getSystemResourceAsStream(getResourceName(resource)); } assertThat("Failed to find test resource " + resource, stream, Matchers.notNullValue()); return stream; }
@GET @Produces(MediaType.APPLICATION_JSON) @Path("hash/") public Response hashing() throws IOException { VolumeDAO volumeDAO = DAOFactory.getInstance().getVolumeDAO(); List<Volume> findAll = volumeDAO.findAll(); Transaction beginTransaction = volumeDAO.getSession().beginTransaction(); for (Volume volume : findAll) { String fileSHA1 = DigestUtils.shaHex(FileUtils.openInputStream(new File(volume.getPath()))); volume.setHashSH1(fileSHA1); } beginTransaction.commit(); return Response.ok("hashed").build(); }
@Override public CloseableIterator<String> readScannerLogs() { File file = delegate.getFileStructure().analysisLog(); if (!file.exists()) { return CloseableIterator.emptyCloseableIterator(); } try { InputStreamReader reader = new InputStreamReader(FileUtils.openInputStream(file), StandardCharsets.UTF_8); return new LineReaderIterator(reader); } catch (IOException e) { throw new IllegalStateException("Fail to open file " + file, e); } }
private void runHdfsTestCase(File hdfsPropertiesFile) throws IOException { String host = "thehost"; String port = "9876"; String archiverRootURI = createHdfsArchiverRootUri(host, port); createArchiverMbeanWithArchiverRootURI(archiverRootURI); assertEquals("hdfs", archiverMBean.getBackendName()); assertEquals("/archiver_root", archiverMBean.getArchivePath()); Properties hdfsProperties = new Properties(); hdfsProperties.load(FileUtils.openInputStream(hdfsPropertiesFile)); assertEquals(host, hdfsProperties.getProperty("hadoop.host")); assertEquals(port, hdfsProperties.getProperty("hadoop.port")); }
@Override public Optional<CloseableIterator<String>> readFileSource(int fileRef) { File file = delegate.readFileSource(fileRef); if (file == null) { return Optional.absent(); } try { return Optional.<CloseableIterator<String>>of( new CloseableLineIterator( IOUtils.lineIterator(FileUtils.openInputStream(file), StandardCharsets.UTF_8))); } catch (IOException e) { throw new IllegalStateException("Fail to traverse file: " + file, e); } }
public ExceptionBuilder(Logger logger) { try { this.logger = logger; messages = new Properties(); logger.info( "loading system messages repository: " + System.getProperty("touresbalon.system.messages")); File file = new File(System.getProperty("touresbalon.system.messages")); messages.load(FileUtils.openInputStream(file)); } catch (IOException io) { logger.error("error system messages repository: " + io.getMessage(), io); } }
@Override public CloseableIterator<BatchReport.CoverageDetail> readCoverageDetails(int testFileRef) { File file = delegate.readCoverageDetails(testFileRef); if (file == null) { return CloseableIterator.emptyCloseableIterator(); } try { return new ParserCloseableIterator<>( BatchReport.CoverageDetail.PARSER, FileUtils.openInputStream(file)); } catch (IOException e) { Throwables.propagate(e); // actually never reached return CloseableIterator.emptyCloseableIterator(); } }
public static void readConfigFile() { InputStream in; try { String configFilePath = CarbonUtils.getCarbonConfigDirPath() + File.separator + "advanced" + File.separator + STREAMDEFN_XML; in = FileUtils.openInputStream(new File(configFilePath)); } catch (Exception e) { in = Utils.class.getClassLoader().getResourceAsStream(STREAMDEFN_XML); } OMXMLParserWrapper builder = OMXMLBuilderFactory.createOMBuilder(in); OMElement documentElement = builder.getDocumentElement(); OMElement replicationFactorEl = documentElement.getFirstChildWithName(new QName("ReplicationFactor")); if (replicationFactorEl != null) { replicationFactor = Integer.parseInt(replicationFactorEl.getText()); } OMElement readLevelEl = documentElement.getFirstChildWithName(new QName("ReadConsistencyLevel")); if (replicationFactorEl != null) { readConsistencyLevel = readLevelEl.getText(); } OMElement writeLevelEl = documentElement.getFirstChildWithName(new QName("WriteConsistencyLevel")); if (writeLevelEl != null) { writeConsistencyLevel = writeLevelEl.getText(); } globalConsistencyLevelPolicy = new StreamDefnConsistencyLevelPolicy(readConsistencyLevel, writeConsistencyLevel); OMElement strategyEl = documentElement.getFirstChildWithName(new QName("StrategyClass")); if (strategyEl != null) { strategyClass = strategyEl.getText(); } }
private void set_Backend_archivePath_andAmazonProperties() throws IOException { String id = "theId"; String secret = "theSecret"; String bucket = "theBucket"; String archiverRootURI = "s3n://" + id + ":" + secret + "@" + bucket + "/archiver_root"; createArchiverMbeanWithArchiverRootURI(archiverRootURI); assertEquals("s3n", archiverMBean.getBackendName()); assertEquals("/archiver_root", archiverMBean.getArchivePath()); Properties amazonProperties = new Properties(); File amazonPropertiesFile = BackendConfigurationFiles.create().getByName(AWSCredentialsImpl.AMAZON_PROPERTIES_FILENAME); amazonProperties.load(FileUtils.openInputStream(amazonPropertiesFile)); assertEquals(id, amazonProperties.getProperty("aws.id")); assertEquals(secret, amazonProperties.getProperty("aws.secret")); assertEquals(bucket, amazonProperties.getProperty("s3.bucket")); }