예제 #1
0
  public QProfileRuleResult search(ProfileRuleQuery query, Paging paging) {
    SearchHits ruleHits =
        searchRules(
            query,
            paging,
            ruleFilterForActiveRuleSearch(query)
                .must(hasChildFilter(ESActiveRule.TYPE_ACTIVE_RULE, activeRuleFilter(query))));
    List<Integer> ruleIds = Lists.newArrayList();
    for (SearchHit ruleHit : ruleHits) {
      ruleIds.add(Integer.valueOf(ruleHit.id()));
    }

    List<QProfileRule> result = Lists.newArrayList();
    if (!ruleIds.isEmpty()) {
      SearchHits activeRuleHits = searchActiveRules(query, ruleIds, FIELD_SOURCE, FIELD_PARENT);

      Map<String, SearchHit> activeRuleByParent = Maps.newHashMap();
      for (SearchHit activeRuleHit : activeRuleHits) {
        activeRuleByParent.put(
            (String) activeRuleHit.field(FIELD_PARENT).getValue(), activeRuleHit);
      }

      for (SearchHit ruleHit : ruleHits) {
        result.add(
            new QProfileRule(
                ruleHit.sourceAsMap(), activeRuleByParent.get(ruleHit.id()).sourceAsMap()));
      }
    }
    return new QProfileRuleResult(
        result,
        PagingResult.create(paging.pageSize(), paging.pageIndex(), ruleHits.getTotalHits()));
  }
예제 #2
0
 public Collection<Class<? extends Module>> shardModules() {
   List<Class<? extends Module>> modules = Lists.newArrayList();
   for (App app : moduleApps.values()) {
     modules.addAll(app.shardModules());
   }
   return modules;
 }
  /** Inject input artifacts in the corresponding nodes. */
  public void processInputArtifacts(DeploymentTopology topology) {
    if (topology.getInputArtifacts() != null && !topology.getInputArtifacts().isEmpty()) {
      // we'll build a map inputArtifactId -> List<DeploymentArtifact>
      Map<String, List<DeploymentArtifact>> artifactMap = Maps.newHashMap();
      // iterate over nodes in order to remember all nodes referencing an input artifact
      for (NodeTemplate nodeTemplate : topology.getNodeTemplates().values()) {
        if (nodeTemplate.getArtifacts() != null && !nodeTemplate.getArtifacts().isEmpty()) {
          for (DeploymentArtifact da : nodeTemplate.getArtifacts().values()) {
            String inputArtifactId = InputArtifactUtil.getInputArtifactId(da);
            if (inputArtifactId != null) {
              List<DeploymentArtifact> das = artifactMap.get(inputArtifactId);
              if (das == null) {
                das = Lists.newArrayList();
                artifactMap.put(inputArtifactId, das);
              }
              das.add(da);
            }
          }
        }
      }

      for (Map.Entry<String, DeploymentArtifact> e : topology.getInputArtifacts().entrySet()) {
        List<DeploymentArtifact> nodeArtifacts = artifactMap.get(e.getKey());
        if (nodeArtifacts != null) {
          for (DeploymentArtifact nodeArtifact : nodeArtifacts) {
            nodeArtifact.setArtifactRef(e.getValue().getArtifactRef());
            nodeArtifact.setArtifactName(e.getValue().getArtifactName());
          }
        }
      }
    }
  }
예제 #4
0
 public Collection<Class<? extends CloseableIndexComponent>> shardServices() {
   List<Class<? extends CloseableIndexComponent>> services = Lists.newArrayList();
   for (App app : moduleApps.values()) {
     services.addAll(app.shardServices());
   }
   return services;
 }
 /** Adds a sort builder. */
 public SearchSourceBuilder sort(SortBuilder sort) {
   if (sorts == null) {
     sorts = Lists.newArrayList();
   }
   sorts.add(sort);
   return this;
 }
 /** Add a facet to perform as part of the search. */
 public SearchSourceBuilder facet(AbstractFacetBuilder facet) {
   if (facets == null) {
     facets = Lists.newArrayList();
   }
   facets.add(facet);
   return this;
 }
 public SearchSourceBuilder scriptField(String name, String script, Map<String, Object> params) {
   if (scriptFields == null) {
     scriptFields = Lists.newArrayList();
   }
   scriptFields.add(new ScriptField(name, script, params));
   return this;
 }
예제 #8
0
 /**
  * Helper method to get the plugin module method hooks, the module references
  *
  * @param app the app to invoke
  * @return an immutable map of the on-module refs of the apps
  */
 private List<OnModuleReference> onModuleRefs(App app) {
   List<OnModuleReference> list = Lists.newArrayList();
   for (Method method : app.getClass().getDeclaredMethods()) {
     if (!method.getName().equals("onModule")) {
       continue;
     }
     if (method.getParameterTypes().length == 0 || method.getParameterTypes().length > 1) {
       logger.warn(
           "plugin {} implementing onModule with no parameters or more than one parameter",
           app.name());
       continue;
     }
     Class moduleClass = method.getParameterTypes()[0];
     if (!Module.class.isAssignableFrom(moduleClass)) {
       logger.warn(
           "plugin {} implementing onModule by the type is not of Module type {}",
           app.name(),
           moduleClass);
       continue;
     }
     method.setAccessible(true);
     list.add(new OnModuleReference(moduleClass, method));
   }
   return list;
 }
예제 #9
0
 public Collection<Class<? extends LifecycleComponent>> services() {
   List<Class<? extends LifecycleComponent>> services = Lists.newArrayList();
   for (App app : moduleApps.values()) {
     services.addAll(app.services());
   }
   return services;
 }
예제 #10
0
 public Collection<Module> shardModules(Settings settings) {
   List<Module> modules = Lists.newArrayList();
   for (App app : moduleApps.values()) {
     modules.addAll(app.shardModules(settings));
   }
   return modules;
 }
@Getter
@Setter
public class ParsedPropertiesDefinitions {
  private Map<String, PropertyDefinition> definitions = Maps.newHashMap();
  private Map<String, List<String>> policies = Maps.newHashMap();
  private List<SimpleSuggestionEntry> suggestions = Lists.newArrayList();
}
  /*
  DATAES-82
  */
  @Test
  public void shouldFindAllByIdQuery() {
    // given
    String documentId = randomNumeric(5);
    SampleEntity sampleEntity = new SampleEntity();
    sampleEntity.setId(documentId);
    sampleEntity.setMessage("hello world.");
    sampleEntity.setVersion(System.currentTimeMillis());
    repository.save(sampleEntity);

    String documentId2 = randomNumeric(5);
    SampleEntity sampleEntity2 = new SampleEntity();
    sampleEntity2.setId(documentId2);
    sampleEntity2.setMessage("hello world.");
    sampleEntity2.setVersion(System.currentTimeMillis());
    repository.save(sampleEntity2);

    // when
    Iterable<SampleEntity> sampleEntities =
        repository.findAll(Arrays.asList(documentId, documentId2));

    // then
    assertNotNull("sample entities cant be null..", sampleEntities);
    List<SampleEntity> entities = Lists.newArrayList(sampleEntities);
    assertThat(entities.size(), is(2));
  }
예제 #13
0
  @Test
  public void testReaderReturnsEmptyIteratorForNullValue() {
    Mockito.when(tabixReader.query("13:12-12")).thenReturn(null);

    Iterable<Entity> actual =
        tabixRepository.findAll(tabixRepository.query().eq(CHROM, "13").and().eq(POS, 12));

    assertEquals(Collections.emptyList(), Lists.newArrayList(actual));
  }
 @Override
 public Collection<Class<? extends Module>> modules() {
   Collection<Class<? extends Module>> modules = Lists.newArrayList();
   if (isClient()) {
     modules.add(SuggestClientModule.class);
   } else {
     modules.add(SuggestModule.class);
   }
   return modules;
 }
  @SuppressWarnings("rawtypes")
  @Override
  public Collection<Class<? extends LifecycleComponent>> services() {
    Collection<Class<? extends LifecycleComponent>> services = Lists.newArrayList();

    if (!isClient()) {
      services.add(SuggestService.class);
    }
    return services;
  }
예제 #16
0
 public QProfileRuleResult searchInactives(ProfileRuleQuery query, Paging paging) {
   SearchHits hits =
       searchRules(
           query, paging, ruleFilterForInactiveRuleSearch(query), FIELD_SOURCE, FIELD_PARENT);
   List<QProfileRule> result = Lists.newArrayList();
   for (SearchHit hit : hits.getHits()) {
     result.add(new QProfileRule(hit.sourceAsMap()));
   }
   return new QProfileRuleResult(
       result, PagingResult.create(paging.pageSize(), paging.pageIndex(), hits.getTotalHits()));
 }
  public static List<Map<String, Object>> createProducts(int count) throws Exception {
    List<Map<String, Object>> products = Lists.newArrayList();

    for (int i = 0; i < count; i++) {
      Map<String, Object> product = Maps.newHashMap();
      product.put("ProductName", RandomStringGenerator.randomAlphabetic(10));
      product.put("ProductId", i + "_" + RandomStringGenerator.randomAlphanumeric(10));
      products.add(product);
    }

    return products;
  }
 private CommitPoints buildCommitPoints(ImmutableMap<String, BlobMetaData> blobs) {
   List<CommitPoint> commitPoints = Lists.newArrayList();
   for (String name : blobs.keySet()) {
     if (name.startsWith("commit-")) {
       try {
         commitPoints.add(CommitPoints.fromXContent(blobContainer.readBlobFully(name)));
       } catch (Exception e) {
         logger.warn("failed to read commit point [{}]", e, name);
       }
     }
   }
   return new CommitPoints(commitPoints);
 }
예제 #19
0
  /**
   * Get all the relationships in which a given node template is a target
   *
   * @param nodeTemplateName the name of the node template which is target for relationship
   * @param nodeTemplates all topology's node templates
   * @return all relationships which have nodeTemplateName as target
   */
  public List<RelationshipTemplate> getTargetRelatedRelatonshipsTemplate(
      String nodeTemplateName, Map<String, NodeTemplate> nodeTemplates) {
    List<RelationshipTemplate> toReturn = Lists.newArrayList();
    for (String key : nodeTemplates.keySet()) {
      NodeTemplate nodeTemp = nodeTemplates.get(key);
      if (nodeTemp.getRelationships() == null) {
        continue;
      }
      for (String key2 : nodeTemp.getRelationships().keySet()) {
        RelationshipTemplate relTemp = nodeTemp.getRelationships().get(key2);
        if (relTemp == null) {
          continue;
        }
        if (relTemp.getTarget() != null && relTemp.getTarget().equals(nodeTemplateName)) {
          toReturn.add(relTemp);
        }
      }
    }

    return toReturn;
  }
  @Test
  public void testMapAccess() {
    Map<String, Object> vars = new HashMap<String, Object>();

    Map<String, Object> obj2 =
        MapBuilder.<String, Object>newMapBuilder().put("prop2", "value2").map();
    Map<String, Object> obj1 =
        MapBuilder.<String, Object>newMapBuilder()
            .put("prop1", "value1")
            .put("obj2", obj2)
            .put("l", Lists.newArrayList("2", "1"))
            .map();
    vars.put("obj1", obj1);
    Object o = se.execute(se.compile("obj1"), vars);
    assertThat(o, instanceOf(Map.class));
    obj1 = (Map<String, Object>) o;
    assertThat((String) obj1.get("prop1"), equalTo("value1"));
    assertThat((String) ((Map<String, Object>) obj1.get("obj2")).get("prop2"), equalTo("value2"));

    o = se.execute(se.compile("obj1.l[0]"), vars);
    assertThat(((String) o), equalTo("2"));
  }
예제 #21
0
  @Test
  public void testReaderFiltersRows() throws IOException {
    Mockito.when(tabixReader.query("13:12-12")).thenReturn(iterator);
    Mockito.when(iterator.next())
        .thenReturn(
            "id1\t13\t11\tnope", "id2\t13\t12\tyup", "id3\t13\t12\tyup", "id3\t13\t13\tnope", null);

    Iterable<Entity> actual =
        tabixRepository.findAll(tabixRepository.query().eq(CHROM, "13").and().eq(POS, 12));

    Entity e1 = new MapEntity(entityMetaData);
    e1.set("ID", "id2");
    e1.set("#CHROM", "13");
    e1.set("POS", 12l);
    e1.set("Description", "yup");

    Entity e2 = new MapEntity(entityMetaData);
    e2.set("ID", "id3");
    e2.set("#CHROM", "13");
    e2.set("POS", 12l);
    e2.set("Description", "yup");
    assertEquals(Lists.newArrayList(actual), Arrays.asList(e1, e2));
  }
  @Test
  public void testAccessListInScript() {
    Map<String, Object> vars = new HashMap<String, Object>();
    Map<String, Object> obj2 =
        MapBuilder.<String, Object>newMapBuilder().put("prop2", "value2").map();
    Map<String, Object> obj1 =
        MapBuilder.<String, Object>newMapBuilder().put("prop1", "value1").put("obj2", obj2).map();
    vars.put("l", Lists.newArrayList("1", "2", "3", obj1));

    Object o = se.execute(se.compile("l.length"), vars);
    assertThat(((Number) o).intValue(), equalTo(4));

    o = se.execute(se.compile("l[0]"), vars);
    assertThat(((String) o), equalTo("1"));

    o = se.execute(se.compile("l[3]"), vars);
    obj1 = (Map<String, Object>) o;
    assertThat((String) obj1.get("prop1"), equalTo("value1"));
    assertThat((String) ((Map<String, Object>) obj1.get("obj2")).get("prop2"), equalTo("value2"));

    o = se.execute(se.compile("l[3].prop1"), vars);
    assertThat(((String) o), equalTo("value1"));
  }
/** @author kimchy (shay.banon) */
public class RangeScriptFacetBuilder extends AbstractFacetBuilder {

  private String keyScript;
  private String valueScript;
  private Map<String, Object> params;
  private List<Entry> entries = Lists.newArrayList();

  public RangeScriptFacetBuilder(String name) {
    super(name);
  }

  public RangeScriptFacetBuilder keyScript(String keyScript) {
    this.keyScript = keyScript;
    return this;
  }

  public RangeScriptFacetBuilder valueScript(String valueScript) {
    this.valueScript = valueScript;
    return this;
  }

  public RangeScriptFacetBuilder param(String name, Object value) {
    if (params == null) {
      params = Maps.newHashMap();
    }
    params.put(name, value);
    return this;
  }

  /**
   * Adds a range entry with explicit from and to.
   *
   * @param from The from range limit
   * @param to The to range limit
   */
  public RangeScriptFacetBuilder addRange(double from, double to) {
    entries.add(new Entry(from, to));
    return this;
  }

  /**
   * Adds a range entry with explicit from and unbounded to.
   *
   * @param from the from range limit, to is unbounded.
   */
  public RangeScriptFacetBuilder addUnboundedTo(double from) {
    entries.add(new Entry(from, Double.POSITIVE_INFINITY));
    return this;
  }

  /**
   * Adds a range entry with explicit to and unbounded from.
   *
   * @param to the to range limit, from is unbounded.
   */
  public RangeScriptFacetBuilder addUnboundedFrom(double to) {
    entries.add(new Entry(Double.NEGATIVE_INFINITY, to));
    return this;
  }

  public RangeScriptFacetBuilder global(boolean global) {
    this.global = global;
    return this;
  }

  public RangeScriptFacetBuilder facetFilter(XContentFilterBuilder filter) {
    this.facetFilter = filter;
    return this;
  }

  @Override
  public void toXContent(XContentBuilder builder, Params params) throws IOException {
    if (keyScript == null) {
      throw new SearchSourceBuilderException(
          "key_script must be set on range script facet for facet [" + name + "]");
    }
    if (valueScript == null) {
      throw new SearchSourceBuilderException(
          "value_script must be set on range script facet for facet [" + name + "]");
    }

    if (entries.isEmpty()) {
      throw new SearchSourceBuilderException(
          "at least one range must be defined for range facet [" + name + "]");
    }

    builder.startObject(name);

    builder.startObject(RangeFacetCollectorParser.NAME);
    builder.field("key_script", keyScript);
    builder.field("value_script", valueScript);

    builder.startArray("ranges");
    for (Entry entry : entries) {
      builder.startObject();
      if (!Double.isInfinite(entry.from)) {
        builder.field("from", entry.from);
      }
      if (!Double.isInfinite(entry.to)) {
        builder.field("to", entry.to);
      }
      builder.endObject();
    }
    builder.endArray();

    if (this.params != null) {
      builder.field("params");
      builder.map(this.params);
    }
    builder.endObject();

    addFilterFacetAndGlobal(builder, params);

    builder.endObject();
  }

  private static class Entry {
    final double from;
    final double to;

    private Entry(double from, double to) {
      this.from = from;
      this.to = to;
    }
  }
}
 @Override
 public Collection<Class<? extends Module>> shardModules() {
   Collection<Class<? extends Module>> modules = Lists.newArrayList();
   modules.add(ShardSuggestModule.class);
   return modules;
 }
    @Override
    public void run() {
      while (true) {
        if (closed) {
          break;
        }
        try {
          connection = connectionFactory.newConnection(rabbitAddresses);
          channel = connection.createChannel();
        } catch (Exception e) {
          if (!closed) {
            logger.warn("failed to created a connection / channel", e);
          } else {
            continue;
          }
          cleanup(0, "failed to connect");
          try {
            Thread.sleep(5000);
          } catch (InterruptedException e1) {
            // ignore, if we are closing, we will exit later
          }
        }

        QueueingConsumer consumer = null;
        // define the queue
        try {
          if (rabbitQueueDeclare) {
            // only declare the queue if we should
            channel.queueDeclare(
                rabbitQueue /*queue*/,
                rabbitQueueDurable /*durable*/,
                false /*exclusive*/,
                rabbitQueueAutoDelete /*autoDelete*/,
                rabbitQueueArgs /*extra args*/);
          }
          if (rabbitExchangeDeclare) {
            // only declare the exchange if we should
            channel.exchangeDeclare(
                rabbitExchange /*exchange*/, rabbitExchangeType /*type*/, rabbitExchangeDurable);
          }

          channel.basicQos(
              rabbitQosPrefetchSize /*qos_prefetch_size*/,
              rabbitQosPrefetchCount /*qos_prefetch_count*/,
              false);

          if (rabbitQueueBind) {
            // only bind queue if we should
            channel.queueBind(
                rabbitQueue /*queue*/,
                rabbitExchange /*exchange*/,
                rabbitRoutingKey /*routingKey*/);
          }
          consumer = new QueueingConsumer(channel);
          channel.basicConsume(rabbitQueue /*queue*/, false /*noAck*/, consumer);
        } catch (Exception e) {
          if (!closed) {
            logger.warn("failed to create queue [{}]", e, rabbitQueue);
          }
          cleanup(0, "failed to create queue");
          continue;
        }

        // now use the queue to listen for messages
        while (true) {
          if (closed) {
            break;
          }
          QueueingConsumer.Delivery task;
          try {
            task = consumer.nextDelivery();
          } catch (Exception e) {
            if (!closed) {
              logger.error("failed to get next message, reconnecting...", e);
            }
            cleanup(0, "failed to get message");
            break;
          }

          if (task != null && task.getBody() != null) {
            final List<Long> deliveryTags = Lists.newArrayList();

            BulkRequestBuilder bulkRequestBuilder = client.prepareBulk();

            try {
              processBody(task, bulkRequestBuilder);
            } catch (Exception e) {
              logger.warn(
                  "failed to parse request for delivery tag [{}], ack'ing...",
                  e,
                  task.getEnvelope().getDeliveryTag());
              try {
                channel.basicAck(task.getEnvelope().getDeliveryTag(), false);
              } catch (IOException e1) {
                logger.warn("failed to ack [{}]", e1, task.getEnvelope().getDeliveryTag());
              }
              continue;
            }

            deliveryTags.add(task.getEnvelope().getDeliveryTag());

            if (bulkRequestBuilder.numberOfActions() < bulkSize) {
              // try and spin some more of those without timeout, so we have a bigger bulk (bounded
              // by the bulk size)
              try {
                while ((task = consumer.nextDelivery(bulkTimeout.millis())) != null) {
                  try {
                    processBody(task, bulkRequestBuilder);
                    deliveryTags.add(task.getEnvelope().getDeliveryTag());
                  } catch (Throwable e) {
                    logger.warn(
                        "failed to parse request for delivery tag [{}], ack'ing...",
                        e,
                        task.getEnvelope().getDeliveryTag());
                    try {
                      channel.basicAck(task.getEnvelope().getDeliveryTag(), false);
                    } catch (Exception e1) {
                      logger.warn(
                          "failed to ack on failure [{}]", e1, task.getEnvelope().getDeliveryTag());
                    }
                  }
                  if (bulkRequestBuilder.numberOfActions() >= bulkSize) {
                    break;
                  }
                }
              } catch (InterruptedException e) {
                if (closed) {
                  break;
                }
              } catch (ShutdownSignalException sse) {
                logger.warn(
                    "Received a shutdown signal! initiatedByApplication: [{}], hard error: [{}]",
                    sse,
                    sse.isInitiatedByApplication(),
                    sse.isHardError());
                if (!closed && sse.isInitiatedByApplication()) {
                  logger.error("failed to get next message, reconnecting...", sse);
                }
                cleanup(0, "failed to get message");
                break;
              }
            }

            if (logger.isTraceEnabled()) {
              logger.trace(
                  "executing bulk with [{}] actions", bulkRequestBuilder.numberOfActions());
            }

            // if we have no bulk actions we might have processed custom commands, so ack them
            if (ordered || bulkRequestBuilder.numberOfActions() == 0) {
              try {
                if (bulkRequestBuilder.numberOfActions() > 0) {
                  BulkResponse response = bulkRequestBuilder.execute().actionGet();
                  if (response.hasFailures()) {
                    // TODO write to exception queue?
                    logger.warn("failed to execute: " + response.buildFailureMessage());
                  }
                }
              } catch (Exception e) {
                logger.warn("failed to execute bulk", e);
              }
              for (Long deliveryTag : deliveryTags) {
                try {
                  channel.basicAck(deliveryTag, false);
                } catch (Exception e1) {
                  logger.warn("failed to ack [{}]", e1, deliveryTag);
                }
              }
            } else {
              if (bulkRequestBuilder.numberOfActions() > 0) {
                bulkRequestBuilder.execute(
                    new ActionListener<BulkResponse>() {
                      @Override
                      public void onResponse(BulkResponse response) {
                        if (response.hasFailures()) {
                          // TODO write to exception queue?
                          logger.warn("failed to execute: " + response.buildFailureMessage());
                        }
                        for (Long deliveryTag : deliveryTags) {
                          try {
                            channel.basicAck(deliveryTag, false);
                          } catch (Exception e1) {
                            logger.warn("failed to ack [{}]", e1, deliveryTag);
                          }
                        }
                      }

                      @Override
                      public void onFailure(Throwable e) {
                        logger.warn(
                            "failed to execute bulk for delivery tags [{}], not ack'ing",
                            e,
                            deliveryTags);
                      }
                    });
              }
            }
          }
        }
      }
      cleanup(0, "closing river");
    }
  @Override
  public void recover(RecoveryStatus recoveryStatus) throws IndexShardGatewayRecoveryException {
    this.recoveryStatus = recoveryStatus;

    final ImmutableMap<String, BlobMetaData> blobs;
    try {
      blobs = blobContainer.listBlobs();
    } catch (IOException e) {
      throw new IndexShardGatewayRecoveryException(shardId, "Failed to list content of gateway", e);
    }

    List<CommitPoint> commitPointsList = Lists.newArrayList();
    boolean atLeastOneCommitPointExists = false;
    for (String name : blobs.keySet()) {
      if (name.startsWith("commit-")) {
        atLeastOneCommitPointExists = true;
        try {
          commitPointsList.add(CommitPoints.fromXContent(blobContainer.readBlobFully(name)));
        } catch (Exception e) {
          logger.warn("failed to read commit point [{}]", e, name);
        }
      }
    }
    if (atLeastOneCommitPointExists && commitPointsList.isEmpty()) {
      // no commit point managed to load, bail so we won't corrupt the index, will require manual
      // intervention
      throw new IndexShardGatewayRecoveryException(
          shardId, "Commit points exists but none could be loaded", null);
    }
    CommitPoints commitPoints = new CommitPoints(commitPointsList);

    if (commitPoints.commits().isEmpty()) {
      // no commit points, clean the store just so we won't recover wrong files
      try {
        indexShard.store().deleteContent();
      } catch (IOException e) {
        logger.warn("failed to clean store before starting shard", e);
      }
      recoveryStatus.index().startTime(System.currentTimeMillis());
      recoveryStatus.index().time(System.currentTimeMillis() - recoveryStatus.index().startTime());
      recoveryStatus.translog().startTime(System.currentTimeMillis());
      recoveryStatus
          .translog()
          .time(System.currentTimeMillis() - recoveryStatus.index().startTime());
      return;
    }

    for (CommitPoint commitPoint : commitPoints) {
      if (!commitPointExistsInBlobs(commitPoint, blobs)) {
        logger.warn(
            "listed commit_point [{}]/[{}], but not all files exists, ignoring",
            commitPoint.name(),
            commitPoint.version());
        continue;
      }
      try {
        recoveryStatus.index().startTime(System.currentTimeMillis());
        recoveryStatus.updateStage(RecoveryStatus.Stage.INDEX);
        recoverIndex(commitPoint, blobs);
        recoveryStatus
            .index()
            .time(System.currentTimeMillis() - recoveryStatus.index().startTime());

        recoveryStatus.translog().startTime(System.currentTimeMillis());
        recoveryStatus.updateStage(RecoveryStatus.Stage.TRANSLOG);
        recoverTranslog(commitPoint, blobs);
        recoveryStatus
            .translog()
            .time(System.currentTimeMillis() - recoveryStatus.index().startTime());
        return;
      } catch (Exception e) {
        throw new IndexShardGatewayRecoveryException(
            shardId,
            "failed to recover commit_point ["
                + commitPoint.name()
                + "]/["
                + commitPoint.version()
                + "]",
            e);
      }
    }
    throw new IndexShardGatewayRecoveryException(
        shardId, "No commit point data is available in gateway", null);
  }
  private void recoverIndex(CommitPoint commitPoint, ImmutableMap<String, BlobMetaData> blobs)
      throws Exception {
    int numberOfFiles = 0;
    long totalSize = 0;
    int numberOfReusedFiles = 0;
    long reusedTotalSize = 0;

    List<CommitPoint.FileInfo> filesToRecover = Lists.newArrayList();
    for (CommitPoint.FileInfo fileInfo : commitPoint.indexFiles()) {
      String fileName = fileInfo.physicalName();
      StoreFileMetaData md = null;
      try {
        md = store.metaData(fileName);
      } catch (Exception e) {
        // no file
      }
      // we don't compute checksum for segments, so always recover them
      if (!fileName.startsWith("segments") && md != null && fileInfo.isSame(md)) {
        numberOfFiles++;
        totalSize += md.length();
        numberOfReusedFiles++;
        reusedTotalSize += md.length();
        if (logger.isTraceEnabled()) {
          logger.trace(
              "not_recovering [{}], exists in local store and is same", fileInfo.physicalName());
        }
      } else {
        if (logger.isTraceEnabled()) {
          if (md == null) {
            logger.trace(
                "recovering [{}], does not exists in local store", fileInfo.physicalName());
          } else {
            logger.trace(
                "recovering [{}], exists in local store but is different", fileInfo.physicalName());
          }
        }
        numberOfFiles++;
        totalSize += fileInfo.length();
        filesToRecover.add(fileInfo);
      }
    }

    recoveryStatus.index().files(numberOfFiles, totalSize, numberOfReusedFiles, reusedTotalSize);
    if (filesToRecover.isEmpty()) {
      logger.trace("no files to recover, all exists within the local store");
    }

    if (logger.isTraceEnabled()) {
      logger.trace(
          "recovering_files [{}] with total_size [{}], reusing_files [{}] with reused_size [{}]",
          numberOfFiles,
          new ByteSizeValue(totalSize),
          numberOfReusedFiles,
          new ByteSizeValue(reusedTotalSize));
    }

    final CountDownLatch latch = new CountDownLatch(filesToRecover.size());
    final CopyOnWriteArrayList<Throwable> failures = new CopyOnWriteArrayList<Throwable>();

    for (final CommitPoint.FileInfo fileToRecover : filesToRecover) {
      recoverFile(fileToRecover, blobs, latch, failures);
    }

    try {
      latch.await();
    } catch (InterruptedException e) {
      throw new IndexShardGatewayRecoveryException(
          shardId, "Interrupted while recovering index", e);
    }

    if (!failures.isEmpty()) {
      throw new IndexShardGatewayRecoveryException(
          shardId, "Failed to recover index", failures.get(0));
    }

    // read the gateway data persisted
    long version = -1;
    try {
      if (IndexReader.indexExists(store.directory())) {
        version = IndexReader.getCurrentVersion(store.directory());
      }
    } catch (IOException e) {
      throw new IndexShardGatewayRecoveryException(
          shardId(), "Failed to fetch index version after copying it over", e);
    }
    recoveryStatus.index().updateVersion(version);

    /// now, go over and clean files that are in the store, but were not in the gateway
    try {
      for (String storeFile : store.directory().listAll()) {
        if (!commitPoint.containPhysicalIndexFile(storeFile)) {
          try {
            store.directory().deleteFile(storeFile);
          } catch (Exception e) {
            // ignore
          }
        }
      }
    } catch (Exception e) {
      // ignore
    }
  }
예제 #28
0
  public void updateSubstitutionType(final Topology topology) {
    if (!topology.getDelegateType().equalsIgnoreCase(TopologyTemplate.class.getSimpleName())) {
      return;
    }
    if (topology.getSubstitutionMapping() == null
        || topology.getSubstitutionMapping().getSubstitutionType() == null) {
      return;
    }
    IndexedNodeType nodeType =
        csarRepoSearchService.getElementInDependencies(
            IndexedNodeType.class,
            topology.getSubstitutionMapping().getSubstitutionType().getElementId(),
            topology.getDependencies());

    TopologyTemplate topologyTemplate =
        alienDAO.findById(TopologyTemplate.class, topology.getDelegateId());
    TopologyTemplateVersion topologyTemplateVersion =
        topologyTemplateVersionService.getByTopologyId(topology.getId());

    Set<CSARDependency> inheritanceDependencies = Sets.newHashSet();
    inheritanceDependencies.add(
        new CSARDependency(nodeType.getArchiveName(), nodeType.getArchiveVersion()));

    // we have to search for the eventually existing CSar to update it' deps
    // actually, the csar is not renamed when the topology template is renamed (this is not quite
    // simple to rename a csar if it
    // is used in topologies ....). So we have to search the csar using the topology id.
    Csar csar = csarService.getTopologySubstitutionCsar(topology.getId());
    if (csar == null) {
      // the csar can not be found, we create it
      String archiveName = topologyTemplate.getName();
      String archiveVersion = topologyTemplateVersion.getVersion();
      csar = new Csar(archiveName, archiveVersion);
      csar.setSubstitutionTopologyId(topology.getId());
    }
    csar.setDependencies(inheritanceDependencies);
    csar.getDependencies().addAll(topology.getDependencies());
    csarService.save(csar);

    IndexedNodeType topologyTemplateType = new IndexedNodeType();
    topologyTemplateType.setArchiveName(csar.getName());
    topologyTemplateType.setArchiveVersion(csar.getVersion());
    topologyTemplateType.setElementId(csar.getName());
    topologyTemplateType.setDerivedFrom(Lists.newArrayList(nodeType.getElementId()));
    topologyTemplateType.setSubstitutionTopologyId(topology.getId());
    List<CapabilityDefinition> capabilities = Lists.newArrayList();
    topologyTemplateType.setCapabilities(capabilities);
    List<RequirementDefinition> requirements = Lists.newArrayList();
    topologyTemplateType.setRequirements(requirements);
    // inputs from topology become properties of type
    topologyTemplateType.setProperties(topology.getInputs());
    // output attributes become attributes for the type
    Map<String, IValue> attributes = Maps.newHashMap();
    topologyTemplateType.setAttributes(attributes);
    Map<String, Set<String>> outputAttributes = topology.getOutputAttributes();
    if (outputAttributes != null) {
      for (Entry<String, Set<String>> oae : outputAttributes.entrySet()) {
        String nodeName = oae.getKey();
        NodeTemplate nodeTemplate = topology.getNodeTemplates().get(nodeName);
        IndexedNodeType nodeTemplateType =
            csarRepoSearchService.getRequiredElementInDependencies(
                IndexedNodeType.class, nodeTemplate.getType(), topology.getDependencies());
        for (String attributeName : oae.getValue()) {
          IValue ivalue = nodeTemplateType.getAttributes().get(attributeName);
          // we have an issue here : if several nodes have the same attribute name, there is a
          // conflict
          if (ivalue != null && !attributes.containsKey(attributeName)) {
            attributes.put(attributeName, ivalue);
          }
        }
      }
    }
    // output properties become attributes for the type
    Map<String, Set<String>> outputProperties = topology.getOutputProperties();
    if (outputProperties != null) {
      for (Entry<String, Set<String>> ope : outputProperties.entrySet()) {
        String nodeName = ope.getKey();
        NodeTemplate nodeTemplate = topology.getNodeTemplates().get(nodeName);
        IndexedNodeType nodeTemplateType =
            csarRepoSearchService.getRequiredElementInDependencies(
                IndexedNodeType.class, nodeTemplate.getType(), topology.getDependencies());
        for (String propertyName : ope.getValue()) {
          PropertyDefinition pd = nodeTemplateType.getProperties().get(propertyName);
          // we have an issue here : if several nodes have the same attribute name, there is a
          // conflict
          if (pd != null && !attributes.containsKey(propertyName)) {
            attributes.put(propertyName, pd);
          }
        }
      }
    }
    // output capabilities properties also become attributes for the type
    Map<String, Map<String, Set<String>>> outputCapabilityProperties =
        topology.getOutputCapabilityProperties();
    if (outputCapabilityProperties != null) {
      for (Entry<String, Map<String, Set<String>>> ocpe : outputCapabilityProperties.entrySet()) {
        String nodeName = ocpe.getKey();
        NodeTemplate nodeTemplate = topology.getNodeTemplates().get(nodeName);
        for (Entry<String, Set<String>> cpe : ocpe.getValue().entrySet()) {
          String capabilityName = cpe.getKey();
          String capabilityTypeName = nodeTemplate.getCapabilities().get(capabilityName).getType();
          IndexedCapabilityType capabilityType =
              csarRepoSearchService.getRequiredElementInDependencies(
                  IndexedCapabilityType.class, capabilityTypeName, topology.getDependencies());
          for (String propertyName : cpe.getValue()) {
            PropertyDefinition pd = capabilityType.getProperties().get(propertyName);
            // we have an issue here : if several nodes have the same attribute name, there is a
            // conflict
            if (pd != null && !attributes.containsKey(propertyName)) {
              attributes.put(propertyName, pd);
            }
          }
        }
      }
    }

    // capabilities substitution
    if (topology.getSubstitutionMapping().getCapabilities() != null) {
      for (Entry<String, SubstitutionTarget> e :
          topology.getSubstitutionMapping().getCapabilities().entrySet()) {
        String key = e.getKey();
        String nodeName = e.getValue().getNodeTemplateName();
        String capabilityName = e.getValue().getTargetId();
        NodeTemplate nodeTemplate = topology.getNodeTemplates().get(nodeName);
        IndexedNodeType nodeTemplateType =
            csarRepoSearchService.getRequiredElementInDependencies(
                IndexedNodeType.class, nodeTemplate.getType(), topology.getDependencies());
        CapabilityDefinition capabilityDefinition =
            IndexedModelUtils.getCapabilityDefinitionById(
                nodeTemplateType.getCapabilities(), capabilityName);
        capabilityDefinition.setId(key);
        topologyTemplateType.getCapabilities().add(capabilityDefinition);
      }
    }
    // requirement substitution
    if (topology.getSubstitutionMapping().getRequirements() != null) {
      for (Entry<String, SubstitutionTarget> e :
          topology.getSubstitutionMapping().getRequirements().entrySet()) {
        String key = e.getKey();
        String nodeName = e.getValue().getNodeTemplateName();
        String requirementName = e.getValue().getTargetId();
        NodeTemplate nodeTemplate = topology.getNodeTemplates().get(nodeName);
        IndexedNodeType nodeTemplateType =
            csarRepoSearchService.getRequiredElementInDependencies(
                IndexedNodeType.class, nodeTemplate.getType(), topology.getDependencies());
        RequirementDefinition requirementDefinition =
            IndexedModelUtils.getRequirementDefinitionById(
                nodeTemplateType.getRequirements(), requirementName);
        requirementDefinition.setId(key);
        topologyTemplateType.getRequirements().add(requirementDefinition);
      }
    }
    indexerService.indexInheritableElement(
        csar.getName(), csar.getVersion(), topologyTemplateType, inheritanceDependencies);
  }
/** @author kimchy (shay.banon) */
public class RangeScriptFacetBuilder extends AbstractFacetBuilder {

  private String lang;
  private String keyScript;
  private String valueScript;
  private Map<String, Object> params;
  private List<Entry> entries = Lists.newArrayList();

  public RangeScriptFacetBuilder(String name) {
    super(name);
  }

  /** The language of the script. */
  public RangeScriptFacetBuilder lang(String lang) {
    this.lang = lang;
    return this;
  }

  public RangeScriptFacetBuilder keyScript(String keyScript) {
    this.keyScript = keyScript;
    return this;
  }

  public RangeScriptFacetBuilder valueScript(String valueScript) {
    this.valueScript = valueScript;
    return this;
  }

  public RangeScriptFacetBuilder param(String name, Object value) {
    if (params == null) {
      params = Maps.newHashMap();
    }
    params.put(name, value);
    return this;
  }

  /**
   * Adds a range entry with explicit from and to.
   *
   * @param from The from range limit
   * @param to The to range limit
   */
  public RangeScriptFacetBuilder addRange(double from, double to) {
    entries.add(new Entry(from, to));
    return this;
  }

  /**
   * Adds a range entry with explicit from and unbounded to.
   *
   * @param from the from range limit, to is unbounded.
   */
  public RangeScriptFacetBuilder addUnboundedTo(double from) {
    entries.add(new Entry(from, Double.POSITIVE_INFINITY));
    return this;
  }

  /**
   * Adds a range entry with explicit to and unbounded from.
   *
   * @param to the to range limit, from is unbounded.
   */
  public RangeScriptFacetBuilder addUnboundedFrom(double to) {
    entries.add(new Entry(Double.NEGATIVE_INFINITY, to));
    return this;
  }

  /**
   * Should the facet run in global mode (not bounded by the search query) or not (bounded by the
   * search query). Defaults to <tt>false</tt>.
   */
  public RangeScriptFacetBuilder global(boolean global) {
    super.global(global);
    return this;
  }

  /** Marks the facet to run in a specific scope. */
  @Override
  public RangeScriptFacetBuilder scope(String scope) {
    super.scope(scope);
    return this;
  }

  public RangeScriptFacetBuilder facetFilter(FilterBuilder filter) {
    this.facetFilter = filter;
    return this;
  }

  @Override
  public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
    if (keyScript == null) {
      throw new SearchSourceBuilderException(
          "key_script must be set on range script facet for facet [" + name + "]");
    }
    if (valueScript == null) {
      throw new SearchSourceBuilderException(
          "value_script must be set on range script facet for facet [" + name + "]");
    }

    if (entries.isEmpty()) {
      throw new SearchSourceBuilderException(
          "at least one range must be defined for range facet [" + name + "]");
    }

    builder.startObject(name);

    builder.startObject(RangeFacet.TYPE);
    builder.field("key_script", keyScript);
    builder.field("value_script", valueScript);
    if (lang != null) {
      builder.field("lang", lang);
    }

    builder.startArray("ranges");
    for (Entry entry : entries) {
      builder.startObject();
      if (!Double.isInfinite(entry.from)) {
        builder.field("from", entry.from);
      }
      if (!Double.isInfinite(entry.to)) {
        builder.field("to", entry.to);
      }
      builder.endObject();
    }
    builder.endArray();

    if (this.params != null) {
      builder.field("params", this.params);
    }
    builder.endObject();

    addFilterFacetAndGlobal(builder, params);

    builder.endObject();
    return builder;
  }

  private static class Entry {
    final double from;
    final double to;

    private Entry(double from, double to) {
      this.from = from;
      this.to = to;
    }
  }
}
예제 #30
0
 /**
  * Helper for loading all plugins into the class path and building a plugin app map.
  *
  * @param pluginsFile the base folder for the plugins
  * @return
  */
 private Map<String, PluginApp> loadPlugins(File pluginsFile) {
   Map<String, PluginApp> map = newHashMap();
   // traverse all legacy plugins in the plugins folder
   File[] pluginsFiles = pluginsFile.listFiles();
   if (pluginsFiles == null) {
     logger.warn("no files exist in {}", pluginsFile.getAbsolutePath());
     return map;
   }
   for (File pluginFile : pluginsFiles) {
     if (pluginFile.isDirectory()) {
       try {
         // add the root
         classLoader.addUri(pluginFile.toURI());
         // gather files to add
         List<File> libFiles = Lists.newArrayList();
         if (pluginFile.listFiles() != null) {
           libFiles.addAll(Arrays.asList(pluginFile.listFiles()));
         }
         File libLocation = new File(pluginFile, "lib");
         if (libLocation.exists()
             && libLocation.isDirectory()
             && libLocation.listFiles() != null) {
           libFiles.addAll(Arrays.asList(libLocation.listFiles()));
         }
         // if there are jars in it, add it as well
         for (File libFile : libFiles) {
           if (!(libFile.getName().endsWith(".jar") || libFile.getName().endsWith(".zip"))) {
             continue;
           }
           classLoader.addUri(libFile.toURI());
         }
       } catch (Exception e) {
         logger.warn("failed to add plugin [{}]", pluginFile, e);
       }
     }
   }
   // now, everything is on the class path, build the plugin app map
   Enumeration<URL> propUrls = null;
   try {
     propUrls = classLoader.getResources(DEFAULT_RESOURCE);
   } catch (IOException e1) {
     logger.warn("failed to find resources on classpath", e1);
     return map;
   }
   while (propUrls.hasMoreElements()) {
     URL propUrl = propUrls.nextElement();
     Properties appProps = new Properties();
     InputStream is = null;
     try {
       // skip jar URLs, they are artifact apps
       is = propUrl.openStream();
       appProps.load(is);
       String appClassName = appProps.getProperty("plugin");
       Plugin plugin = instantiatePluginClass(appClassName);
       if (isArtifactPlugin(plugin)) {
         logger.debug("plugin at [{}] is already present as artifact app, skipping", propUrl);
       } else {
         PluginApp app = new PluginApp(PluginApp.GROUP_ID, propUrl, plugin);
         map.put(app.getCanonicalForm(), app);
       }
     } catch (Exception e) {
       logger.warn(
           "failed to load plugin from [{}], reason: {}", propUrl, ExceptionFormatter.format(e));
     } finally {
       if (is != null) {
         try {
           is.close();
         } catch (IOException e) {
           // ignore
         }
       }
     }
   }
   return map;
 }