Example #1
0
/**
 * Created by jhutchins on 11/27/15.
 *
 * <p>Stored configured processors and delegate to them as appropriate.
 */
public class HttpProcessorManager {
  private final Map<String, HttpProcessor> processors = Maps.newHashMap();

  public void addProcessor(HttpProcessor processor) {
    processors.put(processor.getType(), processor);
  }

  /**
   * Process the request with the appropriate delegate processor.
   *
   * <p>Write a 406 Not Acceptable response in the case that no delegate exists
   *
   * @param request The {@link HttpRequest} to be processed
   * @param channel The {@link SocketChannel} to write the response to
   * @throws IOException
   */
  public void process(HttpRequest request, SocketChannel channel) throws IOException {
    Optional.ofNullable(processors.get(request.getMethod()))
        .map(processor -> processor.process(request, channel))
        .orElseGet(this::getNotAcceptableStatusResponseWriter)
        .write(channel);
  }

  private ResponseWriter getNotAcceptableStatusResponseWriter() {
    return new StatusResponseWriter(
        HttpResponseStatus.NOT_ACCEPTABLE,
        new HttpHeader("Allow", Joiner.on(", ").join(this.processors.keySet())));
  }
}
  public FileSystemPlugin(FileSystemConfig config, DrillbitContext context, String name)
      throws ExecutionSetupException {
    try {
      this.config = config;
      this.context = context;

      Configuration fsConf = new Configuration();
      fsConf.set(FileSystem.FS_DEFAULT_NAME_KEY, config.connection);
      fsConf.set("fs.classpath.impl", ClassPathFileSystem.class.getName());
      fsConf.set("fs.drill-local.impl", LocalSyncableFileSystem.class.getName());
      this.fs = FileSystemCreator.getFileSystem(context.getConfig(), fsConf);
      this.formatsByName = FormatCreator.getFormatPlugins(context, fs, config);
      List<FormatMatcher> matchers = Lists.newArrayList();
      formatPluginsByConfig = Maps.newHashMap();
      for (FormatPlugin p : formatsByName.values()) {
        matchers.add(p.getMatcher());
        formatPluginsByConfig.put(p.getConfig(), p);
      }

      List<WorkspaceSchemaFactory> factories = null;
      if (config.workspaces == null || config.workspaces.isEmpty()) {
        factories =
            Collections.singletonList(
                new WorkspaceSchemaFactory(this, "default", name, fs, "/", matchers));
      } else {
        factories = Lists.newArrayList();
        for (Map.Entry<String, String> space : config.workspaces.entrySet()) {
          factories.add(
              new WorkspaceSchemaFactory(
                  this, space.getKey(), name, fs, space.getValue(), matchers));
        }
      }
      this.schemaFactory = new FileSystemSchemaFactory(name, factories);
    } catch (IOException e) {
      throw new ExecutionSetupException("Failure setting up file system plugin.", e);
    }
  }
  @SuppressWarnings("unchecked")
  public <T, I> T getImplementationClass( //
      QueryClassLoader classLoader, //
      TemplateClassDefinition<T> templateDefinition, //
      String entireClass, //
      String materializedClassName)
      throws ClassTransformationException {

    final ClassSet set =
        new ClassSet(null, templateDefinition.getTemplateClassName(), materializedClassName);

    try {
      final byte[][] implementationClasses =
          classLoader.getClassByteCode(set.generated.clazz, entireClass);

      Map<String, ClassNode> classesToMerge = Maps.newHashMap();
      for (byte[] clazz : implementationClasses) {
        ClassNode node = getClassNodeFromByteCode(clazz);
        classesToMerge.put(node.name, node);
      }

      LinkedList<ClassSet> names = Lists.newLinkedList();
      Set<ClassSet> namesCompleted = Sets.newHashSet();
      names.add(set);

      while (!names.isEmpty()) {
        final ClassSet nextSet = names.removeFirst();
        if (namesCompleted.contains(nextSet)) continue;
        final ClassNames nextPrecompiled = nextSet.precompiled;
        final byte[] precompiledBytes =
            byteCodeLoader.getClassByteCodeFromPath(nextPrecompiled.clazz);
        ClassNames nextGenerated = nextSet.generated;
        ClassNode generatedNode = classesToMerge.get(nextGenerated.slash);
        MergedClassResult result =
            MergeAdapter.getMergedClass(nextSet, precompiledBytes, generatedNode);

        for (String s : result.innerClasses) {
          s = s.replace(FileUtils.separatorChar, '.');
          names.add(nextSet.getChild(s));
        }
        classLoader.injectByteCode(nextGenerated.dot, result.bytes);
        namesCompleted.add(nextSet);
      }

      //      logger.debug(String.format("[Compile Time] Janino: %dms, Bytecode load and parse:
      // %dms, Class Merge: %dms, Subclass remap and load: %dms.",
      // t1.elapsed(TimeUnit.MILLISECONDS), t2.elapsed(TimeUnit.MILLISECONDS),
      // t3.elapsed(TimeUnit.MILLISECONDS), t4.elapsed(TimeUnit.MILLISECONDS)));

      Class<?> c = classLoader.findClass(set.generated.dot);
      if (templateDefinition.getExternalInterface().isAssignableFrom(c)) {
        return (T) c.newInstance();
      } else {
        throw new ClassTransformationException(
            "The requested class did not implement the expected interface.");
      }

    } catch (CompileException
        | IOException
        | ClassNotFoundException
        | InstantiationException
        | IllegalAccessException e) {
      throw new ClassTransformationException(
          String.format("Failure generating transformation classes for value: \n %s", entireClass),
          e);
    }
  }
Example #4
0
  public static void main(String[] args) throws Exception {
    Preconditions.checkArgument(
        args.length == 6,
        "java "
            + ExternalIndexMain.class.getCanonicalName()
            + " sparqlFile cbinstance cbzk cbuser cbpassword rdfTablePrefix.");

    final String sparqlFile = args[0];

    instStr = args[1];
    zooStr = args[2];
    userStr = args[3];
    passStr = args[4];
    tablePrefix = args[5];

    String queryString = FileUtils.readFileToString(new File(sparqlFile));

    // Look for Extra Indexes
    Instance inst = new ZooKeeperInstance(instStr, zooStr);
    Connector c = inst.getConnector(userStr, passStr.getBytes());

    System.out.println("Searching for Indexes");
    Map<String, String> indexTables = Maps.newLinkedHashMap();
    for (String table : c.tableOperations().list()) {
      if (table.startsWith(tablePrefix + "INDEX_")) {
        Scanner s = c.createScanner(table, new Authorizations());
        s.setRange(Range.exact(new Text("~SPARQL")));
        for (Entry<Key, Value> e : s) {
          indexTables.put(table, e.getValue().toString());
        }
      }
    }

    List<ExternalTupleSet> index = Lists.newArrayList();

    if (indexTables.isEmpty()) {
      System.out.println("No Index found");
    } else {
      for (String table : indexTables.keySet()) {
        String indexSparqlString = indexTables.get(table);
        System.out.println("====================== INDEX FOUND ======================");
        System.out.println(" table : " + table);
        System.out.println(" sparql : ");
        System.out.println(indexSparqlString);

        index.add(new AccumuloIndexSet(indexSparqlString, c, table));
      }
    }

    // Connect to Rya
    Sail s = getRyaSail();
    SailRepository repo = new SailRepository(s);
    repo.initialize();

    // Perform Query

    CountingTupleQueryResultHandler count = new CountingTupleQueryResultHandler();

    SailRepositoryConnection conn;
    if (index.isEmpty()) {
      conn = repo.getConnection();

    } else {
      ExternalProcessor processor = new ExternalProcessor(index);

      Sail processingSail = new ExternalSail(s, processor);
      SailRepository smartSailRepo = new SailRepository(processingSail);
      smartSailRepo.initialize();

      conn = smartSailRepo.getConnection();
    }

    startTime = System.currentTimeMillis();
    lastTime = startTime;
    System.out.println("Query Started");
    conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(count);

    System.out.println("Count of Results found : " + count.i);
    System.out.println(
        "Total query time (s) : " + (System.currentTimeMillis() - startTime) / 1000.);
  }
Example #5
0
 private static Map<PROPERTY, Object> createPropertyMap(String path) {
   Map<PROPERTY, Object> temp = Maps.newHashMap();
   temp.put(PROPERTY.LocalPath, path);
   return temp;
 }
Example #6
0
  /**
   * Executes the log command using the provided options.
   *
   * @param cli
   * @throws IOException
   * @see org.geogit.cli.AbstractCommand#runInternal(org.geogit.cli.GeogitCLI)
   */
  @Override
  public void runInternal(GeogitCLI cli) throws Exception {
    final Platform platform = cli.getPlatform();
    Preconditions.checkState(
        cli.getGeogit() != null, "Not a geogit repository: " + platform.pwd().getAbsolutePath());

    Preconditions.checkArgument(
        !(args.summary && args.oneline), "--summary and --oneline cannot be used together");
    Preconditions.checkArgument(
        !(args.stats && args.oneline), "--stats and --oneline cannot be used together");
    Preconditions.checkArgument(
        !(args.stats && args.oneline), "--name-only and --oneline cannot be used together");

    geogit = cli.getGeogit();

    LogOp op =
        geogit.command(LogOp.class).setTopoOrder(args.topo).setFirstParentOnly(args.firstParent);

    refs = Maps.newHashMap();
    if (args.decoration) {
      Optional<Ref> head = geogit.command(RefParse.class).setName(Ref.HEAD).call();
      refs.put(head.get().getObjectId(), Ref.HEAD);
      ImmutableSet<Ref> set = geogit.command(ForEachRef.class).call();
      for (Ref ref : set) {
        ObjectId id = ref.getObjectId();
        if (refs.containsKey(id)) {
          refs.put(id, refs.get(id) + ", " + ref.getName());
        } else {
          refs.put(id, ref.getName());
        }
      }
    }
    if (args.all) {
      ImmutableSet<Ref> refs = geogit.command(ForEachRef.class).call();
      List<ObjectId> list = Lists.newArrayList();
      for (Ref ref : refs) {
        list.add(ref.getObjectId());
      }
      Optional<Ref> head = geogit.command(RefParse.class).setName(Ref.HEAD).call();
      if (head.isPresent()) {
        Ref ref = head.get();
        if (ref instanceof SymRef) {
          ObjectId id = ref.getObjectId();
          list.remove(id);
          list.add(id); // put the HEAD ref in the last position, to give it preference
        }
      }
      for (ObjectId id : list) {
        op.addCommit(id);
      }
    } else if (args.branch != null) {
      Optional<Ref> obj = geogit.command(RefParse.class).setName(args.branch).call();
      Preconditions.checkArgument(obj.isPresent(), "Wrong branch name: " + args.branch);
      op.addCommit(obj.get().getObjectId());
    }

    if (args.author != null && !args.author.isEmpty()) {
      op.setAuthor(args.author);
    }
    if (args.committer != null && !args.committer.isEmpty()) {
      op.setCommiter(args.committer);
    }
    if (args.skip != null) {
      op.setSkip(args.skip.intValue());
    }
    if (args.limit != null) {
      op.setLimit(args.limit.intValue());
    }
    if (args.since != null || args.until != null) {
      Date since = new Date(0);
      Date until = new Date();
      if (args.since != null) {
        since = new Date(geogit.command(ParseTimestamp.class).setString(args.since).call());
      }
      if (args.until != null) {
        until = new Date(geogit.command(ParseTimestamp.class).setString(args.until).call());
        if (args.all) {
          throw new IllegalStateException(
              "Cannot specify 'until' commit when listing all branches");
        }
      }
      op.setTimeRange(new Range<Date>(Date.class, since, until));
    }
    if (!args.sinceUntilPaths.isEmpty()) {
      List<String> sinceUntil =
          ImmutableList.copyOf((Splitter.on("..").split(args.sinceUntilPaths.get(0))));
      Preconditions.checkArgument(
          sinceUntil.size() == 1 || sinceUntil.size() == 2,
          "Invalid refSpec format, expected [<until>]|[<since>..<until>]: %s",
          args.sinceUntilPaths.get(0));

      String sinceRefSpec;
      String untilRefSpec;
      if (sinceUntil.size() == 1) {
        // just until was given
        sinceRefSpec = null;
        untilRefSpec = sinceUntil.get(0);
      } else {
        sinceRefSpec = sinceUntil.get(0);
        untilRefSpec = sinceUntil.get(1);
      }
      if (sinceRefSpec != null) {
        Optional<ObjectId> since;
        since = geogit.command(RevParse.class).setRefSpec(sinceRefSpec).call();
        Preconditions.checkArgument(since.isPresent(), "Object not found '%s'", sinceRefSpec);
        op.setSince(since.get());
      }
      if (untilRefSpec != null) {
        if (args.all) {
          throw new IllegalStateException(
              "Cannot specify 'until' commit when listing all branches");
        }
        Optional<ObjectId> until;
        until = geogit.command(RevParse.class).setRefSpec(untilRefSpec).call();
        Preconditions.checkArgument(until.isPresent(), "Object not found '%s'", sinceRefSpec);
        op.setUntil(until.get());
      }
    }
    if (!args.pathNames.isEmpty()) {
      for (String s : args.pathNames) {
        op.addPath(s);
      }
    }
    Iterator<RevCommit> log = op.call();
    console = cli.getConsole();
    Terminal terminal = console.getTerminal();
    switch (args.color) {
      case never:
        useColor = false;
        break;
      case always:
        useColor = true;
        break;
      default:
        useColor = terminal.isAnsiSupported();
    }

    if (!log.hasNext()) {
      console.println("No commits to show");
      console.flush();
      return;
    }

    LogEntryPrinter printer;
    if (args.oneline) {
      printer = new OneLineConverter();
    } else {
      LOG_DETAIL detail;
      if (args.summary) {
        detail = LOG_DETAIL.SUMMARY;
      } else if (args.names) {
        detail = LOG_DETAIL.NAMES_ONLY;
      } else if (args.stats) {
        detail = LOG_DETAIL.STATS;
      } else {
        detail = LOG_DETAIL.NOTHING;
      }

      printer = new StandardConverter(detail, geogit.getPlatform());
    }

    while (log.hasNext()) {
      printer.print(log.next());
      console.flush();
    }
  }