コード例 #1
0
ファイル: DocBuilder.java プロジェクト: ashishlal/echidna_2.0
 @SuppressWarnings("unchecked")
 private void finish(Properties lastIndexTimeProps) {
   LOG.info("Import completed successfully");
   statusMessages.put(
       "",
       "Indexing completed. Added/Updated: "
           + importStatistics.docCount
           + " documents. Deleted "
           + importStatistics.deletedDocCount
           + " documents.");
   if (requestParameters.commit) {
     writer.commit(requestParameters.optimize);
     addStatusMessage("Committed");
     if (requestParameters.optimize) addStatusMessage("Optimized");
   }
   try {
     writer.persist(lastIndexTimeProps);
   } catch (Exception e) {
     LOG.error("Could not write property file", e);
     statusMessages.put(
         "error",
         "Could not write property file. Delta imports will not work. "
             + "Make sure your conf directory is writable");
   }
 }
コード例 #2
0
ファイル: DocBuilder.java プロジェクト: ashishlal/echidna_2.0
 private void cleanByQuery(String delQuery, AtomicBoolean completeCleanDone) {
   delQuery = getVariableResolver().replaceTokens(delQuery);
   if (requestParameters.clean) {
     if (delQuery == null && !completeCleanDone.get()) {
       writer.doDeleteAll();
       completeCleanDone.set(true);
     } else if (delQuery != null) {
       writer.deleteByQuery(delQuery);
     }
   }
 }
コード例 #3
0
ファイル: DocBuilder.java プロジェクト: ashishlal/echidna_2.0
  private void handleSpecialCommands(Map<String, Object> arow, DocWrapper doc) {
    Object value = arow.get("$deleteDocById");
    if (value != null) {
      if (value instanceof Collection) {
        Collection collection = (Collection) value;
        for (Object o : collection) {
          writer.deleteDoc(o.toString());
        }
      } else {
        writer.deleteDoc(value);
      }
    }
    value = arow.get("$deleteDocByQuery");
    if (value != null) {
      if (value instanceof Collection) {
        Collection collection = (Collection) value;
        for (Object o : collection) {
          writer.deleteByQuery(o.toString());
        }
      } else {
        writer.deleteByQuery(value.toString());
      }
    }
    value = arow.get("$docBoost");
    if (value != null) {
      float value1 = 1.0f;
      if (value instanceof Number) {
        value1 = ((Number) value).floatValue();
      } else {
        value1 = Float.parseFloat(value.toString());
      }
      doc.setDocumentBoost(value1);
    }

    value = arow.get("$skipDoc");
    if (value != null) {
      if (Boolean.parseBoolean(value.toString())) {
        throw new DataImportHandlerException(
            DataImportHandlerException.SKIP, "Document skipped :" + arow);
      }
    }

    value = arow.get("$skipRow");
    if (value != null) {
      if (Boolean.parseBoolean(value.toString())) {
        throw new DataImportHandlerException(DataImportHandlerException.SKIP_ROW);
      }
    }
  }
コード例 #4
0
ファイル: DocBuilder.java プロジェクト: ashishlal/echidna_2.0
 public DocBuilder(
     DataImporter dataImporter, SolrWriter writer, DataImporter.RequestParams reqParams) {
   INSTANCE.set(this);
   this.dataImporter = dataImporter;
   this.writer = writer;
   DataImporter.QUERY_COUNT.set(importStatistics.queryCount);
   requestParameters = reqParams;
   verboseDebug = requestParameters.debug && requestParameters.verbose;
   functionsNamespace =
       EvaluatorBag.getFunctionsNamespace(this.dataImporter.getConfig().functions, this);
   persistedProperties = writer.readIndexerProperties();
 }
コード例 #5
0
ファイル: DocBuilder.java プロジェクト: ashishlal/echidna_2.0
 private void deleteAll(Set<Map<String, Object>> deletedKeys) {
   LOG.info("Deleting stale documents ");
   Iterator<Map<String, Object>> iter = deletedKeys.iterator();
   while (iter.hasNext()) {
     Map<String, Object> map = iter.next();
     String keyName = root.isDocRoot ? root.getPk() : root.getSchemaPk();
     Object key = map.get(keyName);
     if (key == null) {
       LOG.warn("no key was available for deleteted pk query. keyName = " + keyName);
       continue;
     }
     writer.deleteDoc(key);
     importStatistics.deletedDocCount.incrementAndGet();
     iter.remove();
   }
 }
コード例 #6
0
ファイル: DocBuilder.java プロジェクト: ashishlal/echidna_2.0
  @SuppressWarnings("unchecked")
  private void doDelta() {
    addStatusMessage("Delta Dump started");
    VariableResolverImpl resolver = getVariableResolver();

    if (document.deleteQuery != null) {
      writer.deleteByQuery(document.deleteQuery);
    }

    addStatusMessage("Identifying Delta");
    LOG.info("Starting delta collection.");
    Set<Map<String, Object>> deletedKeys = new HashSet<Map<String, Object>>();
    Set<Map<String, Object>> allPks = collectDelta(root, resolver, deletedKeys);
    if (stop.get()) return;
    addStatusMessage("Deltas Obtained");
    addStatusMessage("Building documents");
    if (!deletedKeys.isEmpty()) {
      allPks.removeAll(deletedKeys);
      deleteAll(deletedKeys);
      // Make sure that documents are not re-created
    }
    deletedKeys = null;

    statusMessages.put("Total Changed Documents", allPks.size());
    VariableResolverImpl vri = getVariableResolver();
    Iterator<Map<String, Object>> pkIter = allPks.iterator();
    while (pkIter.hasNext()) {
      Map<String, Object> map = pkIter.next();
      vri.addNamespace(DataConfig.IMPORTER_NS_SHORT + ".delta", map);
      buildDocument(vri, null, map, root, true, null);
      pkIter.remove();
      // check for abort
      if (stop.get()) break;
    }

    if (!stop.get()) {
      LOG.info("Delta Import completed successfully");
    }
  }
コード例 #7
0
ファイル: DocBuilder.java プロジェクト: ashishlal/echidna_2.0
  @SuppressWarnings("unchecked")
  private void buildDocument(
      VariableResolverImpl vr,
      DocWrapper doc,
      Map<String, Object> pk,
      DataConfig.Entity entity,
      boolean isRoot,
      ContextImpl parentCtx) {

    EntityProcessorWrapper entityProcessor = getEntityProcessor(entity);

    ContextImpl ctx =
        new ContextImpl(
            entity,
            vr,
            null,
            pk == null ? Context.FULL_DUMP : Context.DELTA_DUMP,
            session,
            parentCtx,
            this);
    entityProcessor.init(ctx);
    Context.CURRENT_CONTEXT.set(ctx);

    if (requestParameters.start > 0) {
      writer.log(SolrWriter.DISABLE_LOGGING, null, null);
    }

    if (verboseDebug) {
      writer.log(SolrWriter.START_ENTITY, entity.name, null);
    }

    int seenDocCount = 0;

    try {
      while (true) {
        if (stop.get()) return;
        if (importStatistics.docCount.get() > (requestParameters.start + requestParameters.rows))
          break;
        try {
          seenDocCount++;

          if (seenDocCount > requestParameters.start) {
            writer.log(SolrWriter.ENABLE_LOGGING, null, null);
          }

          if (verboseDebug && entity.isDocRoot) {
            writer.log(SolrWriter.START_DOC, entity.name, null);
          }
          if (doc == null && entity.isDocRoot) {
            doc = new DocWrapper();
            ctx.setDoc(doc);
            DataConfig.Entity e = entity;
            while (e.parentEntity != null) {
              addFields(
                  e.parentEntity, doc, (Map<String, Object>) vr.resolve(e.parentEntity.name), vr);
              e = e.parentEntity;
            }
          }

          Map<String, Object> arow = entityProcessor.nextRow();
          if (arow == null) {
            break;
          }

          // Support for start parameter in debug mode
          if (entity.isDocRoot) {
            if (seenDocCount <= requestParameters.start) continue;
            if (seenDocCount > requestParameters.start + requestParameters.rows) {
              LOG.info("Indexing stopped at docCount = " + importStatistics.docCount);
              break;
            }
          }

          if (verboseDebug) {
            writer.log(SolrWriter.ENTITY_OUT, entity.name, arow);
          }
          importStatistics.rowsCount.incrementAndGet();
          if (doc != null) {
            handleSpecialCommands(arow, doc);
            addFields(entity, doc, arow, vr);
          }
          if (entity.entities != null) {
            vr.addNamespace(entity.name, arow);
            for (DataConfig.Entity child : entity.entities) {
              buildDocument(vr, doc, child.isDocRoot ? pk : null, child, false, ctx);
            }
            vr.removeNamespace(entity.name);
          }
          /*The child entities would have changed the CURRENT_CONTEXT. So when they are done, set it back to the old.
           *
           */
          Context.CURRENT_CONTEXT.set(ctx);

          if (entity.isDocRoot) {
            if (stop.get()) return;
            if (!doc.isEmpty()) {
              boolean result = writer.upload(doc);
              doc = null;
              if (result) {
                importStatistics.docCount.incrementAndGet();
              } else {
                importStatistics.failedDocCount.incrementAndGet();
              }
            }
          }

        } catch (DataImportHandlerException e) {
          if (verboseDebug) {
            writer.log(SolrWriter.ENTITY_EXCEPTION, entity.name, e);
          }
          if (e.getErrCode() == DataImportHandlerException.SKIP_ROW) {
            continue;
          }
          if (isRoot) {
            if (e.getErrCode() == DataImportHandlerException.SKIP) {
              importStatistics.skipDocCount.getAndIncrement();
              doc = null;
            } else {
              LOG.error("Exception while processing: " + entity.name + " document : " + doc, e);
            }
            if (e.getErrCode() == DataImportHandlerException.SEVERE) throw e;
          } else throw e;
        } catch (Throwable t) {
          if (verboseDebug) {
            writer.log(SolrWriter.ENTITY_EXCEPTION, entity.name, t);
          }
          throw new DataImportHandlerException(DataImportHandlerException.SEVERE, t);
        } finally {
          if (verboseDebug) {
            writer.log(SolrWriter.ROW_END, entity.name, null);
            if (entity.isDocRoot) writer.log(SolrWriter.END_DOC, null, null);
            Context.CURRENT_CONTEXT.remove();
          }
        }
      }
    } finally {
      if (verboseDebug) {
        writer.log(SolrWriter.END_ENTITY, null, null);
      }
      entityProcessor.destroy();
    }
  }
コード例 #8
0
ファイル: DocBuilder.java プロジェクト: ashishlal/echidna_2.0
 void rollback() {
   writer.rollback();
   statusMessages.put("", "Indexing failed. Rolled back all changes.");
   addStatusMessage("Rolledback");
 }
コード例 #9
0
ファイル: DocBuilder.java プロジェクト: ashishlal/echidna_2.0
  @SuppressWarnings("unchecked")
  public void execute() {
    dataImporter.store(DataImporter.STATUS_MSGS, statusMessages);
    document = dataImporter.getConfig().document;
    final AtomicLong startTime = new AtomicLong(System.currentTimeMillis());
    statusMessages.put(
        TIME_ELAPSED,
        new Object() {
          public String toString() {
            return getTimeElapsedSince(startTime.get());
          }
        });

    statusMessages.put(DataImporter.MSG.TOTAL_QUERIES_EXECUTED, importStatistics.queryCount);
    statusMessages.put(DataImporter.MSG.TOTAL_ROWS_EXECUTED, importStatistics.rowsCount);
    statusMessages.put(DataImporter.MSG.TOTAL_DOC_PROCESSED, importStatistics.docCount);
    statusMessages.put(DataImporter.MSG.TOTAL_DOCS_SKIPPED, importStatistics.skipDocCount);

    List<String> entities = requestParameters.entities;

    // Trigger onImportStart
    if (document.onImportStart != null) {
      invokeEventListener(document.onImportStart);
    }
    AtomicBoolean fullCleanDone = new AtomicBoolean(false);
    // we must not do a delete of *:* multiple times if there are multiple root entities to be run
    Properties lastIndexTimeProps = new Properties();
    lastIndexTimeProps.setProperty(
        LAST_INDEX_KEY,
        DataImporter.DATE_TIME_FORMAT.get().format(dataImporter.getIndexStartTime()));
    for (DataConfig.Entity e : document.entities) {
      if (entities != null && !entities.contains(e.name)) continue;
      lastIndexTimeProps.setProperty(
          e.name + "." + LAST_INDEX_KEY, DataImporter.DATE_TIME_FORMAT.get().format(new Date()));
      root = e;
      String delQuery = e.allAttributes.get("preImportDeleteQuery");
      if (dataImporter.getStatus() == DataImporter.Status.RUNNING_DELTA_DUMP) {
        cleanByQuery(delQuery, fullCleanDone);
        doDelta();
        delQuery = e.allAttributes.get("postImportDeleteQuery");
        if (delQuery != null) {
          fullCleanDone.set(false);
          cleanByQuery(delQuery, fullCleanDone);
        }
      } else {
        cleanByQuery(delQuery, fullCleanDone);
        doFullDump();
        delQuery = e.allAttributes.get("postImportDeleteQuery");
        if (delQuery != null) {
          fullCleanDone.set(false);
          cleanByQuery(delQuery, fullCleanDone);
        }
      }
      statusMessages.remove(DataImporter.MSG.TOTAL_DOC_PROCESSED);
    }

    if (stop.get()) {
      // Dont commit if aborted using command=abort
      statusMessages.put("Aborted", DataImporter.DATE_TIME_FORMAT.get().format(new Date()));
      rollback();
    } else {
      // Do not commit unnecessarily if this is a delta-import and no documents were created or
      // deleted
      if (!requestParameters.clean) {
        if (importStatistics.docCount.get() > 0 || importStatistics.deletedDocCount.get() > 0) {
          finish(lastIndexTimeProps);
        }
      } else {
        // Finished operation normally, commit now
        finish(lastIndexTimeProps);
      }

      if (writer != null) {
        writer.finish();
      }

      if (document.onImportEnd != null) {
        invokeEventListener(document.onImportEnd);
      }
    }

    statusMessages.remove(TIME_ELAPSED);
    statusMessages.put(DataImporter.MSG.TOTAL_DOC_PROCESSED, "" + importStatistics.docCount.get());
    if (importStatistics.failedDocCount.get() > 0)
      statusMessages.put(
          DataImporter.MSG.TOTAL_FAILED_DOCS, "" + importStatistics.failedDocCount.get());

    statusMessages.put("Time taken ", getTimeElapsedSince(startTime.get()));
    LOG.info("Time taken = " + getTimeElapsedSince(startTime.get()));
  }