@Test public void polygonToPolygonsIntersectionTest() throws IOException { // create polygons List<Point> firstPolygonPoints = new ArrayList<Point>(); firstPolygonPoints.add(new PointImpl(10, 10, SpatialContext.GEO)); firstPolygonPoints.add(new PointImpl(8, 10, SpatialContext.GEO)); firstPolygonPoints.add(new PointImpl(8, 8, SpatialContext.GEO)); firstPolygonPoints.add(new PointImpl(10, 8, SpatialContext.GEO)); firstPolygonPoints.add(new PointImpl(10, 10, SpatialContext.GEO)); Map<String, Object> firstPolygon = buildGeoJsonPolygon(firstPolygonPoints); List<Point> secondPolygonPoints = new ArrayList<Point>(); secondPolygonPoints.add(new PointImpl(14, 10, SpatialContext.GEO)); secondPolygonPoints.add(new PointImpl(12, 10, SpatialContext.GEO)); secondPolygonPoints.add(new PointImpl(12, 8, SpatialContext.GEO)); secondPolygonPoints.add(new PointImpl(14, 8, SpatialContext.GEO)); secondPolygonPoints.add(new PointImpl(14, 10, SpatialContext.GEO)); Map<String, Object> secondPolygon = buildGeoJsonPolygon(secondPolygonPoints); // add the vertices to graph graph.addVertex(T.label, DOCUMENT_TYPE, T.id, "1", "location", firstPolygon); graph.addVertex(T.label, DOCUMENT_TYPE, T.id, "2", "location", secondPolygon); GraphTraversalSource g = graph.traversal(); String geoJsonPoint = "{ \"type\": \"Polygon\",\"coordinates\": [[[9, 10],[11, 10],[11, 8],[9, 8],[9, 10]]]}"; long intersectionCounter = g.V().has("location", Geo.intersercts(geoJsonPoint)).count().next(); assertEquals(1l, intersectionCounter); Element location = g.V().has("location", Geo.intersercts(geoJsonPoint)).next(); assertEquals("1", location.id().toString()); }
@Test public void testComplexChain() throws Exception { Spark.create("local[4]"); final String rddName = TestHelper.makeTestDataDirectory( PersistedInputOutputRDDIntegrateTest.class, "testComplexChain", "graphRDD"); final String rddName2 = TestHelper.makeTestDataDirectory( PersistedInputOutputRDDIntegrateTest.class, "testComplexChain", "graphRDD2"); final Configuration configuration = super.getBaseConfiguration(); configuration.setProperty( Constants.GREMLIN_HADOOP_GRAPH_READER, GryoInputFormat.class.getCanonicalName()); configuration.setProperty( Constants.GREMLIN_HADOOP_INPUT_LOCATION, SparkHadoopGraphProvider.PATHS.get("tinkerpop-modern.kryo")); configuration.setProperty( Constants.GREMLIN_HADOOP_GRAPH_WRITER, PersistedOutputRDD.class.getCanonicalName()); configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, rddName); configuration.setProperty(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, true); assertFalse(Spark.hasRDD(Constants.getGraphLocation(rddName))); assertEquals(0, Spark.getContext().getPersistentRDDs().size()); Graph graph = GraphFactory.open(configuration); graph = graph .compute(SparkGraphComputer.class) .persist(GraphComputer.Persist.EDGES) .program(PageRankVertexProgram.build().iterations(2).create(graph)) .submit() .get() .graph(); GraphTraversalSource g = graph.traversal(); assertEquals(6l, g.V().count().next().longValue()); assertEquals(6l, g.E().count().next().longValue()); assertEquals(6l, g.V().values(PageRankVertexProgram.PAGE_RANK).count().next().longValue()); //// assertTrue(Spark.hasRDD(Constants.getGraphLocation(rddName))); assertEquals(1, Spark.getContext().getPersistentRDDs().size()); //// configuration.setProperty( Constants.GREMLIN_HADOOP_GRAPH_READER, PersistedInputRDD.class.getCanonicalName()); configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION, rddName); configuration.setProperty( Constants.GREMLIN_HADOOP_GRAPH_WRITER, PersistedOutputRDD.class.getCanonicalName()); configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, rddName2); //// graph = GraphFactory.open(configuration); graph = graph .compute(SparkGraphComputer.class) .persist(GraphComputer.Persist.EDGES) .mapReduce(PageRankMapReduce.build().create()) .program(PageRankVertexProgram.build().iterations(2).create(graph)) .submit() .get() .graph(); g = graph.traversal(); assertEquals(6l, g.V().count().next().longValue()); assertEquals(6l, g.E().count().next().longValue()); assertEquals(6l, g.V().values(PageRankVertexProgram.PAGE_RANK).count().next().longValue()); //// assertTrue(Spark.hasRDD(Constants.getGraphLocation(rddName))); assertTrue(Spark.hasRDD(Constants.getGraphLocation(rddName2))); assertTrue( Spark.hasRDD(Constants.getMemoryLocation(rddName2, PageRankMapReduce.DEFAULT_MEMORY_KEY))); assertEquals(3, Spark.getContext().getPersistentRDDs().size()); //// graph = GraphFactory.open(configuration); graph = graph .compute(SparkGraphComputer.class) .persist(GraphComputer.Persist.VERTEX_PROPERTIES) .program(PageRankVertexProgram.build().iterations(2).create(graph)) .submit() .get() .graph(); g = graph.traversal(); assertEquals(6l, g.V().count().next().longValue()); assertEquals(0l, g.E().count().next().longValue()); assertEquals(6l, g.V().values(PageRankVertexProgram.PAGE_RANK).count().next().longValue()); //// assertTrue(Spark.hasRDD(Constants.getGraphLocation(rddName))); assertTrue(Spark.hasRDD(Constants.getGraphLocation(rddName2))); assertFalse( Spark.hasRDD(Constants.getMemoryLocation(rddName2, PageRankMapReduce.DEFAULT_MEMORY_KEY))); assertEquals(2, Spark.getContext().getPersistentRDDs().size()); //// graph = GraphFactory.open(configuration); graph = graph .compute(SparkGraphComputer.class) .persist(GraphComputer.Persist.NOTHING) .program(PageRankVertexProgram.build().iterations(2).create(graph)) .submit() .get() .graph(); assertFalse(Spark.hasRDD(Constants.getGraphLocation(rddName2))); g = graph.traversal(); assertEquals(0l, g.V().count().next().longValue()); assertEquals(0l, g.E().count().next().longValue()); assertEquals(0l, g.V().values(PageRankVertexProgram.PAGE_RANK).count().next().longValue()); //// assertTrue(Spark.hasRDD(Constants.getGraphLocation(rddName))); assertFalse(Spark.hasRDD(Constants.getGraphLocation(rddName2))); assertFalse( Spark.hasRDD(Constants.getMemoryLocation(rddName2, PageRankMapReduce.DEFAULT_MEMORY_KEY))); assertEquals(1, Spark.getContext().getPersistentRDDs().size()); Spark.close(); }
@Test public void testBulkLoaderVertexProgramChainWithInputOutputHelperMapping() throws Exception { Spark.create("local[4]"); final String rddName = TestHelper.makeTestDataDirectory( PersistedInputOutputRDDIntegrateTest.class, UUID.randomUUID().toString()); final Configuration readConfiguration = super.getBaseConfiguration(); readConfiguration.setProperty( Constants.GREMLIN_HADOOP_GRAPH_READER, GryoInputFormat.class.getCanonicalName()); readConfiguration.setProperty( Constants.GREMLIN_HADOOP_INPUT_LOCATION, SparkHadoopGraphProvider.PATHS.get("tinkerpop-modern.kryo")); readConfiguration.setProperty( Constants.GREMLIN_HADOOP_GRAPH_WRITER, PersistedOutputRDD.class.getCanonicalName()); readConfiguration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, rddName); readConfiguration.setProperty(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, true); Graph pageRankGraph = GraphFactory.open(readConfiguration); /////////////// final Configuration writeConfiguration = new BaseConfiguration(); writeConfiguration.setProperty(Graph.GRAPH, TinkerGraph.class.getCanonicalName()); writeConfiguration.setProperty(TinkerGraph.GREMLIN_TINKERGRAPH_GRAPH_FORMAT, "gryo"); writeConfiguration.setProperty( TinkerGraph.GREMLIN_TINKERGRAPH_GRAPH_LOCATION, TestHelper.makeTestDataDirectory(PersistedInputOutputRDDIntegrateTest.class) + "testBulkLoaderVertexProgramChainWithInputOutputHelperMapping.kryo"); final Graph bulkLoaderGraph = pageRankGraph .compute(SparkGraphComputer.class) .persist(GraphComputer.Persist.EDGES) .program(PageRankVertexProgram.build().create(pageRankGraph)) .submit() .get() .graph(); bulkLoaderGraph .compute(SparkGraphComputer.class) .persist(GraphComputer.Persist.NOTHING) .workers(1) .program( BulkLoaderVertexProgram.build() .userSuppliedIds(true) .writeGraph(writeConfiguration) .create(bulkLoaderGraph)) .submit() .get(); //// Spark.create(readConfiguration); assertTrue(Spark.hasRDD(Constants.getGraphLocation(rddName))); assertEquals(1, Spark.getContext().getPersistentRDDs().size()); //// final Graph graph = TinkerGraph.open(); final GraphTraversalSource g = graph.traversal(); graph .io(IoCore.gryo()) .readGraph( TestHelper.makeTestDataDirectory(PersistedInputOutputRDDIntegrateTest.class) + "testBulkLoaderVertexProgramChainWithInputOutputHelperMapping.kryo"); assertEquals(6l, g.V().count().next().longValue()); assertEquals(6l, g.E().count().next().longValue()); assertEquals("marko", g.V().has("name", "marko").values("name").next()); assertEquals(6l, g.V().values(PageRankVertexProgram.PAGE_RANK).count().next().longValue()); //// Spark.close(); }