@Override public Iterator<Edge> edges(final Object... edgeIds) { try { if (0 == edgeIds.length) { return new HadoopEdgeIterator(this); } else { // base the conversion function on the first item in the id list as the expectation is that // these // id values will be a uniform list if (edgeIds[0] instanceof Edge) { // based on the first item assume all Edges in the argument list if (!Stream.of(edgeIds).allMatch(id -> id instanceof Edge)) throw Graph.Exceptions.idArgsMustBeEitherIdOrElement(); // no need to get the vertices again, so just flip it back - some implementation may want // to treat this // as a refresh operation. that's not necessary for hadoopgraph. return Stream.of(edgeIds).map(id -> (Edge) id).iterator(); } else { final Class<?> firstClass = edgeIds[0].getClass(); if (!Stream.of(edgeIds).map(Object::getClass).allMatch(firstClass::equals)) throw Graph.Exceptions .idArgsMustBeEitherIdOrElement(); // todo: change exception to be ids of the same // type return IteratorUtils.filter( new HadoopEdgeIterator(this), vertex -> ElementHelper.idExists(vertex.id(), edgeIds)); } } } catch (final IOException e) { throw new IllegalStateException(e.getMessage(), e); } }
@Override public <C extends GraphComputer> C compute(final Class<C> graphComputerClass) { if (graphComputerClass.equals(GiraphGraphComputer.class)) return (C) new GiraphGraphComputer(this); else if (graphComputerClass.equals(SparkGraphComputer.class)) return (C) new SparkGraphComputer(this); else throw Graph.Exceptions.graphDoesNotSupportProvidedGraphComputer(graphComputerClass); }