@Test
  public void test() {

    Props props = new Props();

    TimeBasedRetentionPolicy policy = new TimeBasedRetentionPolicy(props);

    DateTimeUtils.setCurrentMillisFixed(new DateTime(2015, 6, 2, 18, 0, 0, 0).getMillis());

    TimestampedDatasetVersion datasetVersion1 =
        new TimestampedDatasetVersion(new DateTime(2015, 6, 2, 10, 0, 0, 0), new Path("test"));
    TimestampedDatasetVersion datasetVersion2 =
        new TimestampedDatasetVersion(new DateTime(2015, 6, 1, 10, 0, 0, 0), new Path("test"));

    Assert.assertEquals(policy.versionClass(), TimestampedDatasetVersion.class);

    List<TimestampedDatasetVersion> versions = Lists.newArrayList();
    versions.add(datasetVersion1);
    versions.add(datasetVersion2);
    List<TimestampedDatasetVersion> deletableVersions =
        Lists.newArrayList(policy.listDeletableVersions(versions));
    Assert.assertEquals(deletableVersions.size(), 1);
    Assert.assertEquals(
        deletableVersions.get(0).getDateTime(), new DateTime(2015, 6, 1, 10, 0, 0, 0));

    DateTimeUtils.setCurrentMillisSystem();
  }
  @Override
  public PhysicalOperator getPhysicalOperator(PhysicalPlanCreator creator) throws IOException {
    // Prel child = (Prel) this.getChild();

    final List<String> childFields = getChild().getRowType().getFieldNames();
    final List<String> fields = getRowType().getFieldNames();
    List<NamedExpression> keys = Lists.newArrayList();
    List<NamedExpression> exprs = Lists.newArrayList();

    for (int group : BitSets.toIter(groupSet)) {
      FieldReference fr = new FieldReference(childFields.get(group), ExpressionPosition.UNKNOWN);
      keys.add(new NamedExpression(fr, fr));
    }

    for (Ord<AggregateCall> aggCall : Ord.zip(aggCalls)) {
      FieldReference ref = new FieldReference(fields.get(groupSet.cardinality() + aggCall.i));
      LogicalExpression expr = toDrill(aggCall.e, childFields, new DrillParseContext());
      exprs.add(new NamedExpression(expr, ref));
    }

    Prel child = (Prel) this.getChild();
    StreamingAggregate g =
        new StreamingAggregate(
            child.getPhysicalOperator(creator),
            keys.toArray(new NamedExpression[keys.size()]),
            exprs.toArray(new NamedExpression[exprs.size()]),
            1.0f);

    return g;
  }
  @Test
  public void testExportReadsets() throws Exception {
    ExportReadsCommand command = new ExportReadsCommand();
    command.setDataStoreFactory(new MemoryDataStoreFactory());

    command.readGroupSetIds = Lists.newArrayList("r1", "r2");
    command.projectNumber = 3L;
    command.exportUri = "exportme";

    // Get the readsets
    Mockito.when(readsets.get("r1")).thenReturn(readsetGet);
    Mockito.when(readsets.get("r2")).thenReturn(readsetGet);
    Mockito.when(readsetGet.execute())
        .thenReturn(new ReadGroupSet().setName("name1"), new ReadGroupSet().setName("name2"));

    // Export them
    Mockito.when(readsets.export(Mockito.any(ExportReadGroupSetsRequest.class)))
        .thenReturn(readsetExport);
    Mockito.when(readsetExport.execute())
        .thenReturn(new ExportReadGroupSetsResponse().setJobId("8675309"));

    // Get the job
    Mockito.when(jobs.get("8675309")).thenReturn(jobGet);
    Mockito.when(jobGet.execute()).thenReturn(new Job().setDetailedStatus("description1"));

    command.handleRequest(genomics);

    String output = outContent.toString();
    assertTrue(output, output.contains("Exporting read group sets name1,name2"));
    assertTrue(output, output.contains("Export job:"));
    assertTrue(output, output.contains("description1"));
  }
Ejemplo n.º 4
0
 public List<InputState> getRunningInputs() {
   List<InputState> runningInputs = Lists.newArrayList();
   for (InputState inputState : inputStates) {
     if (inputState.getState() == InputState.InputStateType.RUNNING) runningInputs.add(inputState);
   }
   return inputStates;
 }
  @GET
  @Timed
  @ApiOperation(value = "Get a list of all stream rules")
  @Produces(MediaType.APPLICATION_JSON)
  public String get(
      @ApiParam(
              title = "streamid",
              description = "The id of the stream whose stream rules we want.",
              required = true)
          @PathParam("streamid")
          String streamid) {
    List<Map<String, Object>> streamRules = Lists.newArrayList();
    checkPermission(RestPermissions.STREAMS_READ, streamid);

    final Stream stream;
    try {
      stream = streamService.load(streamid);
    } catch (NotFoundException e) {
      throw new WebApplicationException(404);
    }

    try {
      for (StreamRule streamRule : streamRuleService.loadForStream(stream)) {
        streamRules.add(streamRule.asMap());
      }
    } catch (org.graylog2.database.NotFoundException e) {
      throw new WebApplicationException(404);
    }

    Map<String, Object> result = Maps.newHashMap();
    result.put("total", streamRules.size());
    result.put("stream_rules", streamRules);

    return json(result);
  }
 public AnnotationBasedTagProvider(EnvironmentVariables vars) {
   super(vars);
   configuration = new SystemPropertiesConfiguration(environmentVariables);
   rootPackage = THUCYDIDES_TEST_ROOT.from(environmentVariables, rootDirectory);
   persister = new RequirementPersister(configuration.getOutputDirectory(), rootPackage);
   leafRequirements = Lists.newArrayList();
 }
 private List<Requirement> findLeafRequirementsIn(Map<String, Requirement> requirementsByPath) {
   List<Requirement> leafRequirements = Lists.newArrayList();
   for (String path : requirementsByPath.keySet()) {
     if (!longerPathExists(path, requirementsByPath.keySet())) {
       leafRequirements.add(requirementsByPath.get(path));
     }
   }
   return leafRequirements;
 }
Ejemplo n.º 8
0
 @Override
 public PhysicalOperator visitOrder(Order order, Object value) throws OptimizerException {
   PhysicalOperator input = order.getInput().accept(this, value);
   List<OrderDef> ods = Lists.newArrayList();
   for (Ordering o : order.getOrderings()) {
     ods.add(OrderDef.create(o));
   }
   return new SelectionVectorRemover(new Sort(input, ods, false));
 }
Ejemplo n.º 9
0
 public Statistics(long totalLength, long sliceLength) {
   this.totalLength = totalLength;
   this.sliceLength = sliceLength;
   this.timestamp = System.currentTimeMillis() + sliceLength;
   countList = Lists.newArrayList();
   while ((totalLength -= sliceLength) >= 0) {
     countList.add(0);
   }
 }
Ejemplo n.º 10
0
  @Test
  public void shouldConvertToDtos() {
    List<UserDto> models = sut.toDtos(Lists.newArrayList(TestDataModels.user()));

    assertNotNull(models);
    assertTrue(models.size() == 1);

    UserDto dto = models.get(0);
    checkDto(dto);
  }
Ejemplo n.º 11
0
 @Test
 public void testCreateInjectionJavaKompile() {
   String[] argv = new String[] {"foo.k", "--backend", "java"};
   List<Module> modules = Lists.newArrayList(KompileFrontEnd.getModules());
   modules.addAll(new JavaBackendKModule().getKompileModules());
   Injector injector =
       Guice.createInjector(
           Modules.override(modules).with(new TestModule(), new BaseTestCase.TestModule()));
   prepInjector(injector, "-kompile", argv);
   assertTrue(injector.getInstance(FrontEnd.class) instanceof KompileFrontEnd);
 }
  @Test
  public void testExportReadsets_badIds() throws Exception {
    ExportReadsCommand command = new ExportReadsCommand();
    command.readGroupSetIds = Lists.newArrayList("bad");

    // Get the readsets
    Mockito.when(readsets.get(Mockito.anyString())).thenThrow(GoogleJsonResponseException.class);
    command.handleRequest(genomics);

    String output = outContent.toString();
    assertTrue(output, output.contains("The read group set ID bad won't work"));
  }
 private List<Requirement> buildRequirementsTree(
     SortedMap<String, Requirement> requirementsByPath,
     Map<Requirement, String> requirementPaths) {
   List<Requirement> requirementsTree = Lists.newArrayList();
   for (Requirement requirement : requirementsByPath.values()) {
     if (isRoot(requirementPaths.get(requirement))) {
       List<Requirement> children =
           findDirectChildrenFor(requirement, requirementsByPath, requirementPaths);
       requirementsTree.add(requirement.withChildren(children));
     }
   }
   return requirementsTree;
 }
Ejemplo n.º 14
0
  @Test
  public void testCreateInjectionSimulation() {

    context.kompileOptions.backend = "java";
    String[] argv = new String[] {"foo.c", "--simulation", "bar.c"};
    List<Module> definitionSpecificModules =
        Lists.newArrayList(KRunFrontEnd.getDefinitionSpecificModules());
    definitionSpecificModules.addAll(new JavaBackendKModule().getDefinitionSpecificKRunModules());
    Module definitionSpecificModuleOverride =
        Modules.override(definitionSpecificModules).with(new TestModule());
    List<Module> modules =
        Lists.newArrayList(
            KRunFrontEnd.getModules(ImmutableList.of(definitionSpecificModuleOverride)));
    modules.addAll(
        new JavaBackendKModule()
            .getKRunModules(ImmutableList.of(definitionSpecificModuleOverride)));
    Injector injector =
        Guice.createInjector(Modules.override(modules).with(new BaseTestCase.TestModule()));
    prepInjector(injector, "-krun", argv);
    assertTrue(injector.getInstance(FrontEnd.class) instanceof KRunFrontEnd);
    injector.getInstance(Key.get(Simulator.class, Main.class));
  }
Ejemplo n.º 15
0
    @Override
    public PhysicalOperator visitCollapsingAggregate(CollapsingAggregate agg, Object value)
        throws OptimizerException {

      if (!(agg.getInput() instanceof Segment)) {
        throw new OptimizerException(
            String.format(
                "Currently, Drill only supports CollapsingAggregate immediately preceded by a Segment.  The input of this operator is %s.",
                agg.getInput()));
      }
      Segment segment = (Segment) agg.getInput();

      if (!agg.getWithin().equals(segment.getName())) {
        throw new OptimizerException(
            String.format(
                "Currently, Drill only supports CollapsingAggregate immediately preceded by a Segment where the CollapsingAggregate works on the defined segments.  In this case, the segment has been defined based on the name %s but the collapsing aggregate is working within the field %s.",
                segment.getName(), agg.getWithin()));
      }

      // a collapsing aggregate is a currently implemented as a sort followed by a streaming
      // aggregate.
      List<OrderDef> orderDefs = Lists.newArrayList();

      List<NamedExpression> keys = Lists.newArrayList();
      for (LogicalExpression e : segment.getExprs()) {
        if (!(e instanceof SchemaPath))
          throw new OptimizerException(
              "The basic optimizer doesn't currently support collapsing aggregate where the segment value is something other than a SchemaPath.");
        keys.add(new NamedExpression(e, new FieldReference((SchemaPath) e)));
        orderDefs.add(new OrderDef(Direction.ASC, e));
      }
      Sort sort = new Sort(segment.getInput().accept(this, value), orderDefs, false);

      StreamingAggregate sa =
          new StreamingAggregate(
              sort, keys.toArray(new NamedExpression[keys.size()]), agg.getAggregations(), 1.0f);
      return sa;
    }
  private LogicalExpression toDrill(
      AggregateCall call, List<String> fn, DrillParseContext pContext) {
    List<LogicalExpression> args = Lists.newArrayList();
    for (Integer i : call.getArgList()) {
      args.add(new FieldReference(fn.get(i)));
    }

    // for count(1).
    if (args.isEmpty()) args.add(new ValueExpressions.LongExpression(1l));
    LogicalExpression expr =
        new FunctionCall(
            call.getAggregation().getName().toLowerCase(), args, ExpressionPosition.UNKNOWN);
    return expr;
  }
Ejemplo n.º 17
0
    @Override
    public PhysicalOperator visitJoin(Join join, Object value) throws OptimizerException {
      PhysicalOperator leftOp = join.getLeft().accept(this, value);
      List<OrderDef> leftOrderDefs = Lists.newArrayList();
      for (JoinCondition jc : join.getConditions()) {
        leftOrderDefs.add(new OrderDef(Direction.ASC, jc.getLeft()));
      }
      leftOp = new Sort(leftOp, leftOrderDefs, false);
      leftOp = new SelectionVectorRemover(leftOp);

      PhysicalOperator rightOp = join.getRight().accept(this, value);
      List<OrderDef> rightOrderDefs = Lists.newArrayList();
      for (JoinCondition jc : join.getConditions()) {
        rightOrderDefs.add(new OrderDef(Direction.ASC, jc.getRight()));
      }
      rightOp = new Sort(rightOp, rightOrderDefs, false);
      rightOp = new SelectionVectorRemover(rightOp);

      MergeJoinPOP mjp =
          new MergeJoinPOP(
              leftOp, rightOp, Arrays.asList(join.getConditions()), join.getJointType());
      return new SelectionVectorRemover(mjp);
    }
Ejemplo n.º 18
0
  public static List<Parameterized> parseArg(Object arg) {
    List<Parameterized> result = Lists.newArrayList();

    Class<? extends Object> rootClass = arg.getClass();

    // get the list of types that are extended or implemented by the root class
    // and all of its parent types
    Set<Class<?>> types = describeClassTree(rootClass);

    // analyze each type
    for (Class<?> cls : types) {

      // check fields
      for (Field f : cls.getDeclaredFields()) {
        Annotation annotation = f.getAnnotation(Parameter.class);
        Annotation delegateAnnotation = f.getAnnotation(ParametersDelegate.class);
        Annotation dynamicParameter = f.getAnnotation(DynamicParameter.class);
        if (annotation != null) {
          result.add(
              new Parameterized(new WrappedParameter((Parameter) annotation), null, f, null));
        } else if (dynamicParameter != null) {
          result.add(
              new Parameterized(
                  new WrappedParameter((DynamicParameter) dynamicParameter), null, f, null));
        } else if (delegateAnnotation != null) {
          result.add(new Parameterized(null, (ParametersDelegate) delegateAnnotation, f, null));
        }
      }

      // check methods
      for (Method m : cls.getDeclaredMethods()) {
        m.setAccessible(true);
        Annotation annotation = m.getAnnotation(Parameter.class);
        Annotation delegateAnnotation = m.getAnnotation(ParametersDelegate.class);
        Annotation dynamicParameter = m.getAnnotation(DynamicParameter.class);
        if (annotation != null) {
          result.add(
              new Parameterized(new WrappedParameter((Parameter) annotation), null, null, m));
        } else if (dynamicParameter != null) {
          result.add(
              new Parameterized(
                  new WrappedParameter((DynamicParameter) dynamicParameter), null, null, m));
        } else if (delegateAnnotation != null) {
          result.add(new Parameterized(null, (ParametersDelegate) delegateAnnotation, null, m));
        }
      }
    }

    return result;
  }
  public FileSystemPlugin(FileSystemConfig config, DrillbitContext context, String name)
      throws ExecutionSetupException {
    try {
      this.config = config;
      this.context = context;

      Configuration fsConf = new Configuration();
      fsConf.set(FileSystem.FS_DEFAULT_NAME_KEY, config.connection);
      fsConf.set("fs.classpath.impl", ClassPathFileSystem.class.getName());
      fsConf.set("fs.drill-local.impl", LocalSyncableFileSystem.class.getName());
      this.fs = FileSystemCreator.getFileSystem(context.getConfig(), fsConf);
      this.formatsByName = FormatCreator.getFormatPlugins(context, fs, config);
      List<FormatMatcher> matchers = Lists.newArrayList();
      formatPluginsByConfig = Maps.newHashMap();
      for (FormatPlugin p : formatsByName.values()) {
        matchers.add(p.getMatcher());
        formatPluginsByConfig.put(p.getConfig(), p);
      }

      List<WorkspaceSchemaFactory> factories = null;
      if (config.workspaces == null || config.workspaces.isEmpty()) {
        factories =
            Collections.singletonList(
                new WorkspaceSchemaFactory(this, "default", name, fs, "/", matchers));
      } else {
        factories = Lists.newArrayList();
        for (Map.Entry<String, String> space : config.workspaces.entrySet()) {
          factories.add(
              new WorkspaceSchemaFactory(
                  this, space.getKey(), name, fs, space.getValue(), matchers));
        }
      }
      this.schemaFactory = new FileSystemSchemaFactory(name, factories);
    } catch (IOException e) {
      throw new ExecutionSetupException("Failure setting up file system plugin.", e);
    }
  }
  public static class NodeCollector extends QueryModelVisitorBase<RuntimeException> {

    List<QueryModelNode> qNodes = Lists.newArrayList();

    public List<QueryModelNode> getNodes() {
      return qNodes;
    }

    @Override
    public void meetNode(QueryModelNode node) {
      if (node instanceof StatementPattern || node instanceof ExternalTupleSet) {
        qNodes.add(node);
      }
      super.meetNode(node);
    }
  }
  /**
   * Export the master branch of the named agencies to GTFS. The boolean variable can be either true
   * or false, it is only to make this method have a different erasure from the other
   */
  public ProcessGtfsSnapshotExport(
      Collection<String> agencies,
      File output,
      LocalDate startDate,
      LocalDate endDate,
      boolean isagency) {
    this.snapshots = Lists.newArrayList(agencies.size());

    for (String agency : agencies) {
      // leaving version null will cause master to be used
      this.snapshots.add(new Tuple2<String, Integer>(agency, null));
    }

    this.output = output;
    this.startDate = startDate;
    this.endDate = endDate;
  }
  @Test
  public void testSingleIndex() throws Exception {

    SPARQLParser parser = new SPARQLParser();

    ParsedQuery pq1 = parser.parseQuery(q15, null);
    ParsedQuery pq2 = parser.parseQuery(q7, null);
    ParsedQuery pq3 = parser.parseQuery(q8, null);
    ParsedQuery pq4 = parser.parseQuery(q9, null);

    SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr());
    SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr());
    SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet((Projection) pq4.getTupleExpr());

    List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();

    list.add(extTup1);

    List<QueryModelNode> optTupNodes = Lists.newArrayList();
    optTupNodes.add(extTup2);
    optTupNodes.add(extTup3);

    IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq1.getTupleExpr(), list);

    Iterator<TupleExpr> plans = new TupleExecutionPlanGenerator().getPlans(iep.getIndexedTuples());

    IndexPlanValidator ipv = new IndexPlanValidator(false);

    Iterator<TupleExpr> validPlans = ipv.getValidTuples(plans);

    ThreshholdPlanSelector tps = new ThreshholdPlanSelector(pq1.getTupleExpr());

    TupleExpr optimalTup = tps.getThreshholdQueryPlan(validPlans, .1, 1, 0, 0);

    NodeCollector nc = new NodeCollector();
    optimalTup.visit(nc);

    List<QueryModelNode> qNodes = nc.getNodes();

    Assert.assertEquals(qNodes.size(), optTupNodes.size());
    for (QueryModelNode node : qNodes) {
      Assert.assertTrue(optTupNodes.contains(node));
    }
  }
  @Test
  public void testCost3() throws Exception {

    String q1 =
        "" //
            + "SELECT ?f ?m ?d ?e ?l ?c " //
            + "{" //
            + "  Filter(?f > \"5\")." //
            + "  Filter(?e > \"6\")." //
            + "  ?f a ?m ." //
            + "  ?e a ?l ." //
            + "  ?d <uri:talksTo> ?f . " //
            + "  ?c <uri:talksTo> ?e . " //
            + "  ?m <http://www.w3.org/2000/01/rdf-schema#label> ?d ." //
            + "  ?l <http://www.w3.org/2000/01/rdf-schema#label> ?c ." //
            + "}"; //

    String q2 =
        "" //
            + "SELECT ?s ?t ?u " //
            + "{" //
            + "  ?s a ?t ." //
            + "  ?t <http://www.w3.org/2000/01/rdf-schema#label> ?u ." //
            + "  ?u <uri:talksTo> ?s . " //
            + "}"; //

    SPARQLParser parser = new SPARQLParser();

    ParsedQuery pq1 = parser.parseQuery(q1, null);
    ParsedQuery pq2 = parser.parseQuery(q2, null);

    SimpleExternalTupleSet sep = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr());
    List<ExternalTupleSet> eList = Lists.newArrayList();
    eList.add(sep);

    final TupleExpr te = pq1.getTupleExpr().clone();
    final PCJOptimizer pcj = new PCJOptimizer(eList, false);
    pcj.optimize(te, null, null);

    ThreshholdPlanSelector tps = new ThreshholdPlanSelector(pq1.getTupleExpr());
    double cost = tps.getCost(te, .4, .3, .3);
    Assert.assertEquals(.575, cost, .0001);
  }
  private void generateReportsFor(Collection<ReportingTask> reportingTasks) throws IOException {
    stopwatch.start();

    try {
      Reporter.generateReportsFor(reportingTasks);

      final List<Callable<Void>> partitions = Lists.newArrayList();
      for (ReportingTask reportingTask : reportingTasks) {
        partitions.add(new ReportExecutor(reportingTask));
      }

      final ExecutorService executorPool =
          Executors.newFixedThreadPool(NumberOfThreads.forIOOperations());
      for (Future<Void> executedTask : executorPool.invokeAll(partitions)) {
        executedTask.get();
      }
    } catch (Exception e) {
      LOGGER.error("Report generation failed", e);
    }

    LOGGER.debug("Test outcome reports generated in {} ms", stopwatch.stop());
  }
Ejemplo n.º 25
0
  @Override
  public Num calc(final AbstractCalculator abstractCalculator, final Num... numbers)
      throws Exception {

    final List<Num> sortedNumbers = Lists.newArrayList();

    Collections.addAll(sortedNumbers, numbers);
    Collections.sort(sortedNumbers);

    final int indexHalfSize = sortedNumbers.size() / 2;

    if (evenSize(sortedNumbers)) {
      final Num firstHalfValue = sortedNumbers.get(indexHalfSize - 1);
      final Num secondHalfValue = sortedNumbers.get(indexHalfSize);

      final BigDecimal result =
          (firstHalfValue.toBigDecimal().add(secondHalfValue.toBigDecimal()))
              .divide(BigDecimal.valueOf(2));

      return new Num(result);
    } else {
      return new Num(sortedNumbers.get(indexHalfSize));
    }
  }
  private List<Requirement> findDirectChildrenFor(
      Requirement requirement,
      SortedMap<String, Requirement> requirementsByPath,
      Map<Requirement, String> requirementPaths) {

    List<Requirement> immediateChildren = Lists.newArrayList();
    if (!isLeaf(requirement)) {
      String requirementPath = requirementPaths.get(requirement);
      for (String path : requirementsByPath.keySet()) {
        Requirement childRequirement = requirementsByPath.get(path);

        if ((childRequirement != requirement) && (isImmediateChild(requirementPath, path))) {
          if (isLeaf(childRequirement)) {
            immediateChildren.add(childRequirement);
          } else {
            immediateChildren.add(
                childRequirement.withChildren(
                    findDirectChildrenFor(childRequirement, requirementsByPath, requirementPaths)));
          }
        }
      }
    }
    return immediateChildren;
  }
Ejemplo n.º 27
0
@JsonTypeName("solr-scan")
public class SolrGroupScan extends AbstractGroupScan {
  protected SolrStoragePlugin solrPlugin;
  protected SolrStoragePluginConfig solrPluginConfig;
  protected SolrScanSpec solrScanSpec;
  protected List<SolrScanSpec> scanList = Lists.newArrayList();
  protected List<SchemaPath> columns;

  static final Logger logger = LoggerFactory.getLogger(SolrGroupScan.class);

  public SolrGroupScan(SolrGroupScan that) {
    super(that);
    this.solrPlugin = that.solrPlugin;
    this.solrPluginConfig = that.solrPlugin.getSolrStorageConfig();
    this.solrScanSpec = that.solrScanSpec;
    this.columns = that.columns;
    this.scanList.add(this.solrScanSpec);
  }

  public SolrGroupScan(
      String userName,
      SolrStoragePlugin solrStoragePlugin,
      SolrScanSpec scanSpec,
      List<SchemaPath> columns) {
    super(userName);
    this.solrPlugin = solrStoragePlugin;
    this.solrPluginConfig = solrStoragePlugin.getSolrStorageConfig();
    this.solrScanSpec = scanSpec;
    this.columns = columns;
    this.scanList.add(this.solrScanSpec);
  }

  @Override
  public GroupScan clone(List<SchemaPath> columns) {
    SolrGroupScan clone = new SolrGroupScan(this);
    clone.columns = columns;
    return clone;
  }

  @Override
  public void applyAssignments(List<DrillbitEndpoint> endpoints)
      throws PhysicalOperatorSetupException {
    // TODO write the distribution logic
  }

  public void setColumns(List<SchemaPath> columns) {
    this.columns = columns;
  }

  @Override
  public SubScan getSpecificScan(int minorFragmentId) throws ExecutionSetupException {
    // TODO Auto-generated method stub
    return new SolrSubScan(this);
  }

  @Override
  public int getMaxParallelizationWidth() {
    // TODO Auto-generated method stub
    return -1;
  }

  @Override
  public String getDigest() {
    // TODO Auto-generated method stub
    return toString();
  }

  @Override
  public ScanStats getScanStats() {
    // TODO Auto-generated method stub
    return ScanStats.TRIVIAL_TABLE;
  }

  @JsonIgnore
  @Override
  public PhysicalOperator getNewWithChildren(List<PhysicalOperator> children)
      throws ExecutionSetupException {
    // TODO Auto-generated method stub
    Preconditions.checkArgument(children.isEmpty());
    return new SolrGroupScan(this);
  }

  @JsonProperty
  public SolrScanSpec getSolrScanSpec() {
    return solrScanSpec;
  }

  @JsonProperty
  public SolrStoragePluginConfig getSolrPluginConfig() {
    return solrPluginConfig;
  }

  @JsonIgnore
  public SolrStoragePlugin getSolrPlugin() {
    return solrPlugin;
  }

  @Override
  @JsonIgnore
  public boolean canPushdownProjects(List<SchemaPath> columns) {
    // this.columns = columns;
    return true;
  }

  public List<SchemaPath> getColumns() {
    return columns;
  }

  @Override
  public String toString() {
    return "SolrGroupScan [SolrScanSpec=" + solrScanSpec + ", columns=" + columns + "]";
  }
}
Ejemplo n.º 28
0
/** @author Lennart Koopmann <*****@*****.**> */
public abstract class InputRegistry {

  protected static final Logger LOG = LoggerFactory.getLogger(InputRegistry.class);
  protected static final Map<String, ClassLoader> classLoaders = Maps.newHashMap();
  protected final List<InputState> inputStates = Lists.newArrayList();
  protected final ExecutorService executor =
      Executors.newCachedThreadPool(new ThreadFactoryBuilder().setNameFormat("inputs-%d").build());
  private final MessageInputFactory messageInputFactory;
  private final ProcessBuffer processBuffer;

  public InputRegistry(MessageInputFactory messageInputFactory, ProcessBuffer processBuffer) {
    this.messageInputFactory = messageInputFactory;
    this.processBuffer = processBuffer;
  }

  public MessageInput create(String inputClass) throws NoSuchInputTypeException {
    return messageInputFactory.create(inputClass);
  }

  public InputState launch(final MessageInput input, String id) {
    return launch(input, id, false);
  }

  protected abstract void finishedLaunch(InputState state);

  protected abstract void finishedTermination(InputState state);

  public InputState launch(final MessageInput input, String id, boolean register) {
    final InputState inputState = new InputState(input, id);
    inputStates.add(inputState);

    executor.submit(
        new Runnable() {
          @Override
          public void run() {
            LOG.info(
                "Starting [{}] input with ID <{}>",
                input.getClass().getCanonicalName(),
                input.getId());
            try {
              input.checkConfiguration();
              inputState.setState(InputState.InputStateType.STARTING);
              input.launch(processBuffer);
              inputState.setState(InputState.InputStateType.RUNNING);
              String msg =
                  "Completed starting ["
                      + input.getClass().getCanonicalName()
                      + "] input with ID <"
                      + input.getId()
                      + ">";
              LOG.info(msg);
            } catch (MisfireException | Exception e) {
              handleLaunchException(e, input, inputState);
            } finally {
              finishedLaunch(inputState);
            }
          }
        });

    return inputState;
  }

  protected void handleLaunchException(Throwable e, MessageInput input, InputState inputState) {
    StringBuilder msg =
        new StringBuilder(
            "The ["
                + input.getClass().getCanonicalName()
                + "] input with ID <"
                + input.getId()
                + "> misfired. Reason: ");

    String causeMsg = extractMessageCause(e);

    msg.append(causeMsg);

    LOG.error(msg.toString(), e);

    // Clean up.
    // cleanInput(input);

    inputState.setState(InputState.InputStateType.FAILED);
    inputState.setDetailedMessage(causeMsg);
  }

  private String extractMessageCause(Throwable e) {
    StringBuilder causeMsg = new StringBuilder(e.getMessage());

    // Go down the whole cause chain to build a message that provides as much information as
    // possible.
    int maxLevel = 7; // ;)
    Throwable cause = e.getCause();
    for (int i = 0; i < maxLevel; i++) {
      if (cause == null) {
        break;
      }

      causeMsg.append(", ").append(cause.getMessage());
      cause = cause.getCause();
    }
    return causeMsg.toString();
  }

  public InputState launch(final MessageInput input) {
    return launch(input, UUID.randomUUID().toString());
  }

  public List<InputState> getInputStates() {
    return inputStates;
  }

  public List<InputState> getRunningInputs() {
    List<InputState> runningInputs = Lists.newArrayList();
    for (InputState inputState : inputStates) {
      if (inputState.getState() == InputState.InputStateType.RUNNING) runningInputs.add(inputState);
    }
    return inputStates;
  }

  public boolean hasTypeRunning(Class klazz) {
    for (InputState inputState : inputStates) {
      if (inputState.getMessageInput().getClass().equals(klazz)) {
        return true;
      }
    }

    return false;
  }

  public Map<String, String> getAvailableInputs() {
    return messageInputFactory.getAvailableInputs();
  }

  public int runningCount() {
    return getRunningInputs().size();
  }

  public void removeFromRunning(MessageInput input) {
    // Remove from running list.
    InputState thisInputState = null;
    for (InputState inputState : inputStates) {
      if (inputState.getMessageInput().equals(input)) {
        thisInputState = inputState;
      }
    }
    inputStates.remove(thisInputState);
  }

  public InputState launchPersisted(MessageInput input) {
    return launch(input);
  }

  protected abstract List<MessageInput> getAllPersisted();

  public void launchAllPersisted() {
    for (MessageInput input : getAllPersisted()) {
      launchPersisted(input);
    }
  }

  public InputState terminate(MessageInput input) {
    InputState inputState = stop(input);

    if (inputState != null) {
      inputState.setState(InputState.InputStateType.TERMINATED);
      finishedTermination(inputState);
    }

    return inputState;
  }

  public InputState stop(MessageInput input) {
    InputState inputState = getRunningInputState(input.getId());

    if (inputState != null) {
      try {
        input.stop();
      } catch (Exception e) {
        LOG.warn("Stopping input <{}> failed, removing anyway: {}", input.getId(), e);
      }
      removeFromRunning(input);
      inputState.setState(InputState.InputStateType.STOPPED);
      finishedStop(inputState);
    }

    return inputState;
  }

  protected abstract void finishedStop(InputState inputState);

  public MessageInput getRunningInput(String inputId) {
    for (InputState inputState : inputStates) {
      if (inputState.getMessageInput().getId().equals(inputId)) return inputState.getMessageInput();
    }

    return null;
  }

  public InputState getRunningInputState(String inputStateId) {
    for (InputState inputState : inputStates) {
      if (inputState.getMessageInput().getId().equals(inputStateId)) return inputState;
    }

    return null;
  }

  public abstract void cleanInput(MessageInput input);

  public MessageInput getPersisted(String inputId) {
    for (MessageInput input : getAllPersisted()) {
      if (input.getId().equals(inputId)) return input;
    }

    return null;
  }
}
Ejemplo n.º 29
0
/**
 * Class to parse a Compressed Streaming file and export to an RDF file in Ntriples
 *
 * @author javi
 */
public class StreamFile2RDF {

  public String InputStream = null;
  public String rdfOutput = null;

  /* START> Description of parameters */
  @Parameter(description = "<input RDF_Comp> <outputRDF>")
  public List<String> parameters = Lists.newArrayList();

  @Parameter(names = "-quiet", description = "Do not show progress of the conversion")
  public boolean quiet = false;

  /* END> Description of parameters */

  /**
   * Main execution of the conversion
   *
   * @throws ParserException
   * @throws IOException
   */
  public void execute() throws ParserException, IOException {

    StopWatch sw = new StopWatch();
    PrintStream out = null;
    if (rdfOutput.equals("stdout")) {
      out = System.out;
    } else {
      out = new PrintStream(rdfOutput, "UTF-8");
    }

    CompressedStreaming2RDFExporterFactory exporterFactory =
        new CompressedStreaming2RDFExporterFactory();
    CompressedStreaming2RDF exporter =
        exporterFactory.loadFromFile(InputStream, out); // launch exporter

    out.close();
    if (!quiet) { // Show basic stats

      System.out.println("- Conversion Time: " + sw.stopAndShow());
      System.out.println("- Number of Triples: " + exporter.getNumTriples());
      System.out.println("- Number of Blocks: " + exporter.getNumBlocks());
    }
  }

  public static void main(String[] args) throws Throwable {
    StreamFile2RDF streamfile2rdf = new StreamFile2RDF();
    JCommander com = new JCommander(streamfile2rdf, args);
    com.setProgramName("stream2file");

    if (streamfile2rdf.parameters.size() == 1) {
      System.err.println("No output file specified, writing to standard output.");
      streamfile2rdf.rdfOutput = "stdout";
      streamfile2rdf.InputStream = streamfile2rdf.parameters.get(0);

    } else if (streamfile2rdf.parameters.size() == 2) {
      streamfile2rdf.InputStream = streamfile2rdf.parameters.get(0);
      streamfile2rdf.rdfOutput = streamfile2rdf.parameters.get(1);

    } else {
      com.usage();
      System.exit(1);
    }

    System.out.println(
        "Converting '"
            + streamfile2rdf.InputStream
            + "' to Stream File'"
            + streamfile2rdf.rdfOutput
            + "'");

    streamfile2rdf.execute();
    System.out.println("Bye!");
    System.exit(0);
  }
}
Ejemplo n.º 30
0
public final class StartupParameters {
  @SuppressWarnings("unused")
  private final String[] args;

  @Parameter private List<String> parameters = Lists.newArrayList();

  @Parameter(
      names = {"-console"},
      description = "Shows the console window")
  private boolean console = false;

  @Parameter(
      names = {"-width"},
      description = "Sets the width of the minecraft window to be fixed to this.")
  private int width = -1;

  @Parameter(
      names = {"-height"},
      description = "Sets the height of the minecraft window to be fixed to this.")
  private int height = -1;

  @Parameter(
      names = {"-fullscreen"},
      description = "Whether to launch minecraft in fullscreen mode.")
  private boolean fullscreen = false;

  public StartupParameters(String[] args) {
    this.args = args;
  }

  public List<String> getParameters() {
    return parameters;
  }

  public void logParameters(Logger log) {
    log.info("------------ Startup Parameters ------------");
    if (console) {
      log.info("Console frame enabled");
    }
    if (width != -1) {
      log.info("Minecraft frame width: " + width);
    }
    if (height != -1) {
      log.info("Minecraft frame height: " + height);
    }
    log.info("--------- End of Startup Parameters ---------");
  }

  public boolean isConsole() {
    return console;
  }

  public int getWidth() {
    return width;
  }

  public int getHeight() {
    return height;
  }

  public boolean getFullscreen() {
    return fullscreen;
  }
}