/** * The results of the {@link SR_EELS_CharacterisationPlugin} plugin are parsed. * * <p>This function extracts the values that describe the pathway of the borders of a spectrum. * * @return a polynomial that fits the given data points */ private SR_EELS_Polynomial_2D getFunctionBorders() { final double[][] vals = new double[3 * importer.size()][3]; int i = 0; for (final float[] point : importer) { for (int j = 0; j < 3; j++) { // y coordinate of the fit function at the centre of the // image/camera -> // z value vals[i + j][0] = point[2 + 2 * j] - CameraSetup.getFullWidth() / 2; // Coordinate on the energy dispersive axis -> x value // The same value for top, centre and bottom. vals[i + j][1] = point[0] - CameraSetup.getFullHeight() / 2; // coordinate on the lateral axis -> y value // The indices for centre, top and bottom are 2, 4 and 6. vals[i + j][2] = importer.getYInterceptPoint(i / 3)[11 + j] - CameraSetup.getFullHeight() / 2; } i += 3; } /* * Define the orders of the 2D polynomial. */ final int m = 3; final int n = 2; final SR_EELS_Polynomial_2D func = new SR_EELS_Polynomial_2D(m, n); final double[] a_fit = new double[(m + 1) * (n + 1)]; Arrays.fill(a_fit, 1.); final LMA lma = new LMA(func, a_fit, vals); lma.fit(); /* * TODO: Output information about the fit using IJ.log */ return new SR_EELS_Polynomial_2D(m, n, a_fit); }
@SuppressWarnings("unchecked") private void addFields( DataConfig.Entity entity, DocWrapper doc, Map<String, Object> arow, VariableResolver vr) { for (Map.Entry<String, Object> entry : arow.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); if (value == null) continue; if (key.startsWith("$")) continue; List<DataConfig.Field> field = entity.colNameVsField.get(key); if (field == null && dataImporter.getSchema() != null) { // This can be a dynamic field or a field which does not have an entry in data-config ( an // implicit field) SchemaField sf = dataImporter.getSchema().getFieldOrNull(key); if (sf == null) { sf = dataImporter.getConfig().lowerNameVsSchemaField.get(key.toLowerCase(Locale.ENGLISH)); } if (sf != null) { addFieldToDoc(entry.getValue(), sf.getName(), 1.0f, sf.multiValued(), doc); } // else do nothing. if we add it it may fail } else { if (field != null) { for (DataConfig.Field f : field) { String name = f.getName(); if (f.dynamicName) { name = vr.replaceTokens(name); } if (f.toWrite) addFieldToDoc(entry.getValue(), name, f.boost, f.multiValued, doc); } } } } }
/** * The results of the {@link SR_EELS_CharacterisationPlugin} plugin are parsed. * * <p>This function extracts the values that describe the width of a spectrum depending on its * position on the camera. * * @return a polynomial that fits the given data points */ private SR_EELS_Polynomial_2D getFunctionWidth() { final double[][] vals = new double[importer.size()][3]; int i = 0; for (final float[] point : importer) { // The width of the spectrum -> z value vals[i][0] = point[8]; // Coordinate on the energy dispersive axis -> x value // The same value for top, centre and bottom. vals[i][1] = point[0] - CameraSetup.getFullWidth() / 2; // coordinate on the lateral axis -> y value // The indices for centre, top and bottom are 2, 4 and 6. vals[i][2] = point[2] - CameraSetup.getFullHeight() / 2; i++; } /* * Define the orders of the 2D polynomial. */ final int m = 2; final int n = 2; final SR_EELS_Polynomial_2D func = new SR_EELS_Polynomial_2D(m, n); final double[] b_fit = new double[(m + 1) * (n + 1)]; Arrays.fill(b_fit, 1.); final LMA lma = new LMA(func, b_fit, vals); lma.fit(); /* * TODO: Output information about the fit using IJ.log */ return new SR_EELS_Polynomial_2D(m, n, b_fit); }
private void invokeEventListener(String className) { try { EventListener listener = (EventListener) loadClass(className, dataImporter.getCore()).newInstance(); notifyListener(listener); } catch (Exception e) { wrapAndThrow(SEVERE, e, "Unable to load class : " + className); } }
private void notifyListener(EventListener listener) { String currentProcess; if (dataImporter.getStatus() == DataImporter.Status.RUNNING_DELTA_DUMP) { currentProcess = Context.DELTA_DUMP; } else { currentProcess = Context.FULL_DUMP; } listener.onEvent( new ContextImpl(null, getVariableResolver(), null, currentProcess, session, null, this)); }
@SuppressWarnings("unchecked") private void doFullDump() { addStatusMessage("Full Dump Started"); if (dataImporter.getConfig().isMultiThreaded && !verboseDebug) { try { LOG.info("running multithreaded full-import"); new EntityRunner(root, null).run(null, Context.FULL_DUMP, null); } catch (Exception e) { LOG.error("error in import", e); } } else { buildDocument(getVariableResolver(), null, null, root, true, null); } }
public VariableResolverImpl getVariableResolver() { try { VariableResolverImpl resolver = null; if (dataImporter != null && dataImporter.getCore() != null) { resolver = new VariableResolverImpl( dataImporter.getCore().getResourceLoader().getCoreProperties()); } else resolver = new VariableResolverImpl(); Map<String, Object> indexerNamespace = new HashMap<String, Object>(); if (persistedProperties.getProperty(LAST_INDEX_TIME) != null) { indexerNamespace.put(LAST_INDEX_TIME, persistedProperties.getProperty(LAST_INDEX_TIME)); } else { // set epoch indexerNamespace.put(LAST_INDEX_TIME, DataImporter.DATE_TIME_FORMAT.get().format(EPOCH)); } indexerNamespace.put(INDEX_START_TIME, dataImporter.getIndexStartTime()); indexerNamespace.put("request", requestParameters.requestParams); indexerNamespace.put("functions", functionsNamespace); for (DataConfig.Entity entity : dataImporter.getConfig().document.entities) { String key = entity.name + "." + SolrWriter.LAST_INDEX_KEY; String lastIndex = persistedProperties.getProperty(key); if (lastIndex != null) { indexerNamespace.put(key, lastIndex); } else { indexerNamespace.put(key, DataImporter.DATE_TIME_FORMAT.get().format(EPOCH)); } } resolver.addNamespace(DataConfig.IMPORTER_NS_SHORT, indexerNamespace); resolver.addNamespace(DataConfig.IMPORTER_NS, indexerNamespace); return resolver; } catch (Exception e) { wrapAndThrow(SEVERE, e); // unreachable statement return null; } }
private EntityProcessorWrapper getEntityProcessor(DataConfig.Entity entity) { if (entity.processor != null) return entity.processor; EntityProcessor entityProcessor = null; if (entity.proc == null) { entityProcessor = new SqlEntityProcessor(); } else { try { entityProcessor = (EntityProcessor) loadClass(entity.proc, dataImporter.getCore()).newInstance(); } catch (Exception e) { wrapAndThrow( SEVERE, e, "Unable to load EntityProcessor implementation for entity:" + entity.name); } } return entity.processor = new EntityProcessorWrapper(entityProcessor, this); }
@SuppressWarnings("unchecked") public void execute() { dataImporter.store(DataImporter.STATUS_MSGS, statusMessages); document = dataImporter.getConfig().document; final AtomicLong startTime = new AtomicLong(System.currentTimeMillis()); statusMessages.put( TIME_ELAPSED, new Object() { public String toString() { return getTimeElapsedSince(startTime.get()); } }); statusMessages.put(DataImporter.MSG.TOTAL_QUERIES_EXECUTED, importStatistics.queryCount); statusMessages.put(DataImporter.MSG.TOTAL_ROWS_EXECUTED, importStatistics.rowsCount); statusMessages.put(DataImporter.MSG.TOTAL_DOC_PROCESSED, importStatistics.docCount); statusMessages.put(DataImporter.MSG.TOTAL_DOCS_SKIPPED, importStatistics.skipDocCount); List<String> entities = requestParameters.entities; // Trigger onImportStart if (document.onImportStart != null) { invokeEventListener(document.onImportStart); } AtomicBoolean fullCleanDone = new AtomicBoolean(false); // we must not do a delete of *:* multiple times if there are multiple root entities to be run Properties lastIndexTimeProps = new Properties(); lastIndexTimeProps.setProperty( LAST_INDEX_KEY, DataImporter.DATE_TIME_FORMAT.get().format(dataImporter.getIndexStartTime())); for (DataConfig.Entity e : document.entities) { if (entities != null && !entities.contains(e.name)) continue; lastIndexTimeProps.setProperty( e.name + "." + LAST_INDEX_KEY, DataImporter.DATE_TIME_FORMAT.get().format(new Date())); root = e; String delQuery = e.allAttributes.get("preImportDeleteQuery"); if (dataImporter.getStatus() == DataImporter.Status.RUNNING_DELTA_DUMP) { cleanByQuery(delQuery, fullCleanDone); doDelta(); delQuery = e.allAttributes.get("postImportDeleteQuery"); if (delQuery != null) { fullCleanDone.set(false); cleanByQuery(delQuery, fullCleanDone); } } else { cleanByQuery(delQuery, fullCleanDone); doFullDump(); delQuery = e.allAttributes.get("postImportDeleteQuery"); if (delQuery != null) { fullCleanDone.set(false); cleanByQuery(delQuery, fullCleanDone); } } statusMessages.remove(DataImporter.MSG.TOTAL_DOC_PROCESSED); } if (stop.get()) { // Dont commit if aborted using command=abort statusMessages.put("Aborted", DataImporter.DATE_TIME_FORMAT.get().format(new Date())); rollback(); } else { // Do not commit unnecessarily if this is a delta-import and no documents were created or // deleted if (!requestParameters.clean) { if (importStatistics.docCount.get() > 0 || importStatistics.deletedDocCount.get() > 0) { finish(lastIndexTimeProps); } } else { // Finished operation normally, commit now finish(lastIndexTimeProps); } if (writer != null) { writer.finish(); } if (document.onImportEnd != null) { invokeEventListener(document.onImportEnd); } } statusMessages.remove(TIME_ELAPSED); statusMessages.put(DataImporter.MSG.TOTAL_DOC_PROCESSED, "" + importStatistics.docCount.get()); if (importStatistics.failedDocCount.get() > 0) statusMessages.put( DataImporter.MSG.TOTAL_FAILED_DOCS, "" + importStatistics.failedDocCount.get()); statusMessages.put("Time taken ", getTimeElapsedSince(startTime.get())); LOG.info("Time taken = " + getTimeElapsedSince(startTime.get())); }