/** * Loads the enum array data for the provided pv. * * @param pvname the name of the PV to load data from * @param optimized true if optimized data should be loaded or false otherwise * @param count number of points to load in case optimized retrieval is selected * @param start the start Instant of the data * @param end the end Instant of the data * @return the array of data * @throws Exception in case of an error */ protected ArchiveVType[] getValuesEnumArray( String pvname, boolean optimized, int count, Instant start, Instant end) throws Exception { ValueIterator iterator = getIterator(pvname, optimized, count, start, end); // exception should occur in the next line iterator.next(); return null; }
/** * Loads the numerical data for the provided pv. * * @param pvname the name of the PV to load data from * @param optimized true if optimized data should be loaded or false otherwise * @param count number of points to load in case optimized retrieval is selected * @param start the start Instant of the data * @param end the end Instant of the data * @return the array of data * @throws Exception in case of an error */ protected ArchiveVNumberArray[] getValuesNumberArray( String pvname, boolean optimized, int count, Instant start, Instant end) throws Exception { ValueIterator iterator = getIterator(pvname, optimized, count, start, end); ArrayList<ArchiveVNumberArray> vals = new ArrayList<ArchiveVNumberArray>(); while (iterator.hasNext()) { vals.add((ArchiveVNumberArray) iterator.next()); } iterator.close(); return vals.toArray(new ArchiveVNumberArray[vals.size()]); }
/** * Loads the data as statistics if applicable for the given pv name. This method will always * retrieve optimized data. * * @param pvname the name of the provided PV * @param count the number of requested points * @param start the start Instant of the data * @param end the end Instant of the data * @return the array of statistical data * @throws Exception in case of an error */ protected ArchiveVType[] getValuesStatistics(String pvname, int count, Instant start, Instant end) throws Exception { ValueIterator iterator = getIterator(pvname, true, count, start, end); ArrayList<ArchiveVType> vals = new ArrayList<ArchiveVType>(); while (iterator.hasNext()) { vals.add((ArchiveVType) iterator.next()); } iterator.close(); return vals.toArray(new ArchiveVType[vals.size()]); }
/** {@inheritDoc} */ @Override protected void performExport(final IProgressMonitor monitor, final PrintStream out) throws Exception { final DateFormat date_format = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss.SSS"); int count = 0; for (ModelItem item : model.getItems()) { // Item header if (count > 0) out.println(); printItemInfo(out, item); // Get data monitor.subTask(NLS.bind("Fetching data for {0}", item.getName())); final ValueIterator values = createValueIterator(item); // Dump all values MatlabQualityHelper qualities = new MatlabQualityHelper(); long line_count = 0; out.println("clear t;"); out.println("clear v;"); out.println("clear q;"); while (values.hasNext() && !monitor.isCanceled()) { final VType value = values.next(); ++line_count; // t(1)='2010/03/15 13:30:10.123'; out.println( "t{" + line_count + "}='" + date_format.format(VTypeHelper.getTimestamp(value).toDate()) + "';"); // v(1)=4.125; final double num = VTypeHelper.toDouble(value); if (Double.isNaN(num) || Double.isInfinite(num)) out.println("v(" + line_count + ")=NaN;"); else out.println("v(" + line_count + ")=" + num + ";"); // q(1)=0; out.println( "q(" + line_count + ")=" + qualities.getQualityCode( VTypeHelper.getSeverity(value), VTypeHelper.getMessage(value)) + ";"); if (line_count % PROGRESS_UPDATE_LINES == 0) monitor.subTask(NLS.bind("{0}: Wrote {1} samples", item.getName(), line_count)); } out.println(comment + "Convert time stamps into 'date numbers'"); out.println("tn=datenum(t, 'yyyy/mm/dd HH:MM:SS.FFF');"); out.println(comment + "Prepare patched data because"); out.println(comment + "timeseries() cannot handle duplicate time stamps"); out.println("[xx, idx]=unique(tn, 'last');"); out.println("pt=tn(idx);"); out.println("pv=v(idx);"); out.println("pq=q(idx);"); out.println("clear xx idx"); out.println(comment + "Convert into time series and plot"); // Patch "_" in name because Matlab plot will interprete it as LaTeX sub-script final String channel_name = item.getDisplayName().replace("_", "\\_"); out.println( "channel" + count + "=timeseries(pv', pt', pq', 'IsDatenum', true, 'Name', '" + channel_name + "');"); out.print("channel" + count + ".QualityInfo.Code=["); for (int q = 0; q < qualities.getNumCodes(); ++q) out.print(" " + q); out.println(" ];"); out.print("channel" + count + ".QualityInfo.Description={"); for (int q = 0; q < qualities.getNumCodes(); ++q) out.print(" '" + qualities.getQuality(q) + "'"); out.println(" };"); out.println(); ++count; } out.println(comment + "Example for plotting the data"); for (int i = 0; i < count; ++i) { out.println("subplot(1, " + count + ", " + (i + 1) + ");"); out.println("plot(channel" + i + ");"); } }
/** {@inheritDoc} */ @Override public void run() { Activator.getLogger().log(Level.FINE, "Starting {0}", ArchiveFetchJob.this); // $NON-NLS-1$ final BenchmarkTimer timer = new BenchmarkTimer(); long samples = 0; final int bins = Preferences.getPlotBins(); final ArchiveDataSource archives[] = item.getArchiveDataSources(); List<ArchiveDataSource> sourcesWhereChannelDoesntExist = new ArrayList<>(); for (int i = 0; i < archives.length && !cancelled; ++i) { final ArchiveDataSource archive = archives[i]; final String url = archive.getUrl(); // Display "N/total", using '1' for the first sub-archive. synchronized (this) { message = NLS.bind( Messages.ArchiveFetchDetailFmt, new Object[] {archive.getName(), (i + 1), archives.length}); } try { final ArchiveReader the_reader; synchronized (this) { the_reader = reader = ArchiveRepository.getInstance().getArchiveReader(url); } the_reader.enableConcurrency(concurrency); final ValueIterator value_iter; try { if (item.getRequestType() == RequestType.RAW) value_iter = the_reader.getRawValues(archive.getKey(), item.getResolvedName(), start, end); else value_iter = the_reader.getOptimizedValues( archive.getKey(), item.getResolvedName(), start, end, bins); } catch (UnknownChannelException e) { // Do not immediately notify about unknown channels. First search for the data in all // archive // sources and only report this kind of errors at the end sourcesWhereChannelDoesntExist.add(archives[i]); continue; } // Get samples into array final List<VType> result = new ArrayList<VType>(); while (value_iter.hasNext()) result.add(value_iter.next()); samples += result.size(); item.mergeArchivedSamples(the_reader.getServerName(), result); if (cancelled) break; value_iter.close(); } catch (Exception ex) { // Tell listener unless it's the result of a 'cancel'? if (!cancelled) listener.archiveFetchFailed(ArchiveFetchJob.this, archive, ex); // Continue with the next data source } finally { synchronized (this) { if (reader != null) reader.close(); reader = null; } } } if (!sourcesWhereChannelDoesntExist.isEmpty() && !cancelled) { listener.channelNotFound( ArchiveFetchJob.this, sourcesWhereChannelDoesntExist.size() < archives.length, sourcesWhereChannelDoesntExist.toArray( new ArchiveDataSource[sourcesWhereChannelDoesntExist.size()])); } timer.stop(); if (!cancelled) listener.fetchCompleted(ArchiveFetchJob.this); Activator.getLogger() .log( Level.FINE, "Ended {0} with {1} samples in {2}", //$NON-NLS-1$ new Object[] {ArchiveFetchJob.this, samples, timer}); }