/** * Returns list of mapped files, that should be transformed. Files can by specified via attributes * (srcFile, srcDir) or resources (FileSet, FileList, DirSet, etc). Mapped file represents input * and output file for transformation. * * @return list of mapped files */ public List<MappedFile> getMappedFiles() { mappedFiles.clear(); // one src file if (getSrcFile() != null) { addMappedFile(getSrcFile()); } if (getSrcDir() != null) { addMappedFile(getSrcDir()); } Iterator element = resources.iterator(); while (element.hasNext()) { ResourceCollection rc = (ResourceCollection) element.next(); if (rc instanceof FileSet && rc.isFilesystemOnly()) { FileSet fs = (FileSet) rc; File fromDir = fs.getDir(getProject()); DirectoryScanner ds; try { ds = fs.getDirectoryScanner(getProject()); } catch (BuildException ex) { log("Could not scan directory " + fromDir, ex, Project.MSG_ERR); continue; } for (String f : ds.getIncludedFiles()) { addMappedFile(new File(fromDir + System.getProperty("file.separator") + f), fromDir); } } else { if (!rc.isFilesystemOnly()) { log("Only filesystem resources are supported", Project.MSG_WARN); continue; } Iterator rcIt = rc.iterator(); while (rcIt.hasNext()) { Resource r = (Resource) rcIt.next(); if (!r.isExists()) { log("Could not find resource " + r.toLongString(), Project.MSG_VERBOSE); continue; } if (r instanceof FileResource) { FileResource fr = (FileResource) r; addMappedFile(fr.getFile(), fr.getBaseDir()); } else { log( "Only file resources are supported (" + r.getClass().getSimpleName() + " found)", Project.MSG_WARN); continue; } } } } return mappedFiles; }
/** * Compare the content of two Resources. A nonexistent Resource's content is "less than" that of * an existing Resource; a directory-type Resource's content is "less than" that of a file-type * Resource. * * @param r1 the Resource whose content is to be compared. * @param r2 the other Resource whose content is to be compared. * @param text true if the content is to be treated as text and differences in kind of line break * are to be ignored. * @return a negative integer, zero, or a positive integer as the first argument is less than, * equal to, or greater than the second. * @throws IOException if the Resources cannot be read. * @since Ant 1.7 */ public static int compareContent(Resource r1, Resource r2, boolean text) throws IOException { if (r1.equals(r2)) { return 0; } boolean e1 = r1.isExists(); boolean e2 = r2.isExists(); if (!(e1 || e2)) { return 0; } if (e1 != e2) { return e1 ? 1 : -1; } boolean d1 = r1.isDirectory(); boolean d2 = r2.isDirectory(); if (d1 && d2) { return 0; } if (d1 || d2) { return d1 ? -1 : 1; } return text ? textCompare(r1, r2) : binaryCompare(r1, r2); }
/** * Compares the contents of two Resources. * * @param r1 the Resource whose content is to be compared. * @param r2 the other Resource whose content is to be compared. * @param text true if the content is to be treated as text and differences in kind of line break * are to be ignored. * @return true if the content of the Resources is the same. * @throws IOException if the Resources cannot be read. * @since Ant 1.7 */ public static boolean contentEquals(Resource r1, Resource r2, boolean text) throws IOException { if (r1.isExists() != r2.isExists()) { return false; } if (!r1.isExists()) { // two not existing files are equal return true; } // should the following two be switched? If r1 and r2 refer to the same file, // isn't their content equal regardless of whether that file is a directory? if (r1.isDirectory() || r2.isDirectory()) { // don't want to compare directory contents for now return false; } if (r1.equals(r2)) { return true; } if (!text && r1.getSize() != r2.getSize()) { return false; } return compareContent(r1, r2, text) == 0; }
/** * load Ant properties from the source file or resource * * @exception BuildException if something goes wrong with the build */ public final void execute() throws BuildException { // validation if (src == null) { throw new BuildException("A source resource is required."); } if (!src.isExists()) { if (src instanceof JavaResource) { // dreaded backwards compatibility log("Unable to find resource " + src, Project.MSG_WARN); return; } throw new BuildException("Source resource does not exist: " + src); } BufferedInputStream bis = null; Reader instream = null; ByteArrayInputStream tis = null; try { bis = new BufferedInputStream(src.getInputStream()); if (encoding == null) { instream = new InputStreamReader(bis); } else { instream = new InputStreamReader(bis, encoding); } ChainReaderHelper crh = new ChainReaderHelper(); crh.setPrimaryReader(instream); crh.setFilterChains(filterChains); crh.setProject(getProject()); instream = crh.getAssembledReader(); String text = crh.readFully(instream); if (text != null && text.length() != 0) { if (!text.endsWith("\n")) { text = text + "\n"; } tis = new ByteArrayInputStream(text.getBytes("ISO8859_1")); final Properties props = new Properties(); props.load(tis); Property propertyTask = new Property(); propertyTask.bindToOwner(this); propertyTask.addProperties(props); } } catch (final IOException ioe) { throw new BuildException("Unable to load file: " + ioe, ioe, getLocation()); } finally { FileUtils.close(bis); FileUtils.close(tis); } }
/** * Index the fileset. * * @exception IOException if Lucene I/O exception TODO: refactor!!!!! */ private void indexDocs() throws IOException { Date start = new Date(); boolean create = overwrite; // If the index directory doesn't exist, // create it and force create mode if (indexDir.mkdirs() && !overwrite) { create = true; } FSDirectory dir = FSDirectory.open(indexDir); try { Searcher searcher = null; boolean checkLastModified = false; if (!create) { try { searcher = new IndexSearcher(dir, true); checkLastModified = true; } catch (IOException ioe) { log("IOException: " + ioe.getMessage()); // Empty - ignore, which indicates to index all // documents } } log("checkLastModified = " + checkLastModified, Project.MSG_VERBOSE); IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_CURRENT, analyzer) .setOpenMode(create ? OpenMode.CREATE : OpenMode.APPEND); LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy(); lmp.setUseCompoundFile(useCompoundIndex); lmp.setMergeFactor(mergeFactor); IndexWriter writer = new IndexWriter(dir, conf); int totalFiles = 0; int totalIndexed = 0; int totalIgnored = 0; try { for (int i = 0; i < rcs.size(); i++) { ResourceCollection rc = rcs.elementAt(i); if (rc.isFilesystemOnly()) { Iterator resources = rc.iterator(); while (resources.hasNext()) { Resource r = (Resource) resources.next(); if (!r.isExists() || !(r instanceof FileResource)) { continue; } totalFiles++; File file = ((FileResource) r).getFile(); if (!file.exists() || !file.canRead()) { throw new BuildException( "File \"" + file.getAbsolutePath() + "\" does not exist or is not readable."); } boolean indexIt = true; if (checkLastModified) { Term pathTerm = new Term("path", file.getPath()); TermQuery query = new TermQuery(pathTerm); ScoreDoc[] hits = searcher.search(query, null, 1).scoreDocs; // if document is found, compare the // indexed last modified time with the // current file // - don't index if up to date if (hits.length > 0) { Document doc = searcher.doc(hits[0].doc); String indexModified = doc.get("modified").trim(); if (indexModified != null) { long lastModified = 0; try { lastModified = DateTools.stringToTime(indexModified); } catch (ParseException e) { // if modified time is not parsable, skip } if (lastModified == file.lastModified()) { // TODO: remove existing document indexIt = false; } } } } if (indexIt) { try { log("Indexing " + file.getPath(), Project.MSG_VERBOSE); Document doc = handler.getDocument(file); if (doc == null) { totalIgnored++; } else { // Add the path of the file as a field named "path". Use a Keyword field, so // that the index stores the path, and so that the path is searchable doc.add( new Field( "path", file.getPath(), Field.Store.YES, Field.Index.NOT_ANALYZED)); // Add the last modified date of the file a field named "modified". Use a // Keyword field, so that it's searchable, but so that no attempt is made // to tokenize the field into words. doc.add( new Field( "modified", DateTools.timeToString( file.lastModified(), DateTools.Resolution.MILLISECOND), Field.Store.YES, Field.Index.NOT_ANALYZED)); writer.addDocument(doc); totalIndexed++; } } catch (DocumentHandlerException e) { throw new BuildException(e); } } } // for j } // if (fs != null) } // for i writer.optimize(); } // try finally { // always make sure everything gets closed, // no matter how we exit. writer.close(); if (searcher != null) { searcher.close(); } } Date end = new Date(); log( totalIndexed + " out of " + totalFiles + " indexed (" + totalIgnored + " ignored) in " + (end.getTime() - start.getTime()) + " milliseconds"); } finally { dir.close(); } }
/** * Convenience method to copy content from one Resource to another specifying whether token * filtering must be used, whether filter chains must be used, whether newer destination files may * be overwritten and whether the last modified time of <code>dest</code> file should be made * equal to the last modified time of <code>source</code>. * * @param source the Resource to copy from. Must not be <code>null</code>. * @param dest the Resource to copy to. Must not be <code>null</code>. * @param filters the collection of filters to apply to this copy. * @param filterChains filterChains to apply during the copy. * @param overwrite Whether or not the destination Resource should be overwritten if it already * exists. * @param preserveLastModified Whether or not the last modified time of the destination Resource * should be set to that of the source. * @param inputEncoding the encoding used to read the files. * @param outputEncoding the encoding used to write the files. * @param project the project instance. * @throws IOException if the copying fails. * @since Ant 1.7 */ public static void copyResource( Resource source, Resource dest, FilterSetCollection filters, Vector filterChains, boolean overwrite, boolean preserveLastModified, String inputEncoding, String outputEncoding, Project project) throws IOException { if (!overwrite) { long slm = source.getLastModified(); if (dest.isExists() && slm != 0 && dest.getLastModified() > slm) { return; } } final boolean filterSetsAvailable = (filters != null && filters.hasFilters()); final boolean filterChainsAvailable = (filterChains != null && filterChains.size() > 0); if (filterSetsAvailable) { BufferedReader in = null; BufferedWriter out = null; try { InputStreamReader isr = null; if (inputEncoding == null) { isr = new InputStreamReader(source.getInputStream()); } else { isr = new InputStreamReader(source.getInputStream(), inputEncoding); } in = new BufferedReader(isr); OutputStreamWriter osw = null; if (outputEncoding == null) { osw = new OutputStreamWriter(dest.getOutputStream()); } else { osw = new OutputStreamWriter(dest.getOutputStream(), outputEncoding); } out = new BufferedWriter(osw); if (filterChainsAvailable) { ChainReaderHelper crh = new ChainReaderHelper(); crh.setBufferSize(FileUtils.BUF_SIZE); crh.setPrimaryReader(in); crh.setFilterChains(filterChains); crh.setProject(project); Reader rdr = crh.getAssembledReader(); in = new BufferedReader(rdr); } LineTokenizer lineTokenizer = new LineTokenizer(); lineTokenizer.setIncludeDelims(true); String newline = null; String line = lineTokenizer.getToken(in); while (line != null) { if (line.length() == 0) { // this should not happen, because the lines are // returned with the end of line delimiter out.newLine(); } else { newline = filters.replaceTokens(line); out.write(newline); } line = lineTokenizer.getToken(in); } } finally { FileUtils.close(out); FileUtils.close(in); } } else if (filterChainsAvailable || (inputEncoding != null && !inputEncoding.equals(outputEncoding)) || (inputEncoding == null && outputEncoding != null)) { BufferedReader in = null; BufferedWriter out = null; try { InputStreamReader isr = null; if (inputEncoding == null) { isr = new InputStreamReader(source.getInputStream()); } else { isr = new InputStreamReader(source.getInputStream(), inputEncoding); } in = new BufferedReader(isr); OutputStreamWriter osw = null; if (outputEncoding == null) { osw = new OutputStreamWriter(dest.getOutputStream()); } else { osw = new OutputStreamWriter(dest.getOutputStream(), outputEncoding); } out = new BufferedWriter(osw); if (filterChainsAvailable) { ChainReaderHelper crh = new ChainReaderHelper(); crh.setBufferSize(FileUtils.BUF_SIZE); crh.setPrimaryReader(in); crh.setFilterChains(filterChains); crh.setProject(project); Reader rdr = crh.getAssembledReader(); in = new BufferedReader(rdr); } char[] buffer = new char[FileUtils.BUF_SIZE]; while (true) { int nRead = in.read(buffer, 0, buffer.length); if (nRead == -1) { break; } out.write(buffer, 0, nRead); } } finally { FileUtils.close(out); FileUtils.close(in); } } else { InputStream in = null; OutputStream out = null; try { in = source.getInputStream(); out = dest.getOutputStream(); byte[] buffer = new byte[FileUtils.BUF_SIZE]; int count = 0; do { out.write(buffer, 0, count); count = in.read(buffer, 0, buffer.length); } while (count != -1); } finally { FileUtils.close(out); FileUtils.close(in); } } if (preserveLastModified && dest instanceof Touchable) { setLastModified((Touchable) dest, source.getLastModified()); } }
/** * Tells which sources should be reprocessed based on the last modification date of targets. * * @param logTo where to send (more or less) interesting output. * @param source ResourceCollection. * @param mapper filename mapper indicating how to find the target Resources. * @param targets object able to map a relative path as a Resource. * @param granularity The number of milliseconds leeway to give before deciding a target is out of * date. * @return ResourceCollection. * @since Ant 1.7 */ public static ResourceCollection selectOutOfDateSources( ProjectComponent logTo, ResourceCollection source, FileNameMapper mapper, ResourceFactory targets, long granularity) { if (source.size() == 0) { logTo.log("No sources found.", Project.MSG_VERBOSE); return Resources.NONE; } source = Union.getInstance(source); logFuture(logTo, source, granularity); Union result = new Union(); for (Iterator iter = source.iterator(); iter.hasNext(); ) { Resource sr = (Resource) iter.next(); String srName = sr.getName(); srName = srName == null ? srName : srName.replace('/', File.separatorChar); String[] targetnames = null; try { targetnames = mapper.mapFileName(srName); } catch (Exception e) { logTo.log("Caught " + e + " mapping resource " + sr, Project.MSG_VERBOSE); } if (targetnames == null || targetnames.length == 0) { logTo.log(sr + " skipped - don\'t know how to handle it", Project.MSG_VERBOSE); continue; } Union targetColl = new Union(); for (int i = 0; i < targetnames.length; i++) { targetColl.add(targets.getResource(targetnames[i].replace(File.separatorChar, '/'))); } // find the out-of-date targets: Restrict r = new Restrict(); r.add( new And( new ResourceSelector[] { Type.FILE, new Or(new ResourceSelector[] {NOT_EXISTS, new Outdated(sr, granularity)}) })); r.add(targetColl); if (r.size() > 0) { result.add(sr); Resource t = (Resource) (r.iterator().next()); logTo.log( sr.getName() + " added as " + t.getName() + (t.isExists() ? " is outdated." : " doesn\'t exist."), Project.MSG_VERBOSE); continue; } // log uptodateness of all targets: logTo.log( sr.getName() + " omitted as " + targetColl.toString() + (targetColl.size() == 1 ? " is" : " are ") + " up to date.", Project.MSG_VERBOSE); } return result; }