@Before
 public void setUp() {
   String userSchema =
       "{\"namespace\": \"example.avro\","
           + "\"type\": \"record\","
           + "\"name\": \"User\","
           + "\"fields\": [{\"name\": \"name\", \"type\": \"string\"}]}";
   Schema.Parser parser = new Schema.Parser();
   recordSchema = parser.parse(userSchema);
   intSchema = parser.parse("{\"type\" : \"int\"}");
   schemaRegistry = new MockSchemaRegistryClient();
 }
Example #2
0
  public static void main(String[] args) throws Exception {

    if (args.length < 4) {
      System.err.println("Usage: WriteKV USER_SCHEMA PET_SCHEMA IN_FILE OUT_FILE");
      System.exit(1);
    }
    Schema.Parser parser = new Schema.Parser();
    Schema userSchema = parser.parse(new File(args[0]));
    Schema petSchema = parser.parse(new File(args[1]));

    File file = createInputFile(userSchema, petSchema, args[2], args[3]);
    System.out.println("wrote " + file.getName());
  }
  public static void main(String[] args) {
    SparkConf conf = new SparkConf().setAppName("kafka-sandbox").setMaster("local[*]");
    JavaSparkContext sc = new JavaSparkContext(conf);
    JavaStreamingContext ssc = new JavaStreamingContext(sc, new Duration(2000));

    Set<String> topics = Collections.singleton("mytopic");
    Map<String, String> kafkaParams = new HashMap<>();
    kafkaParams.put("metadata.broker.list", "sandbox.hortonworks.com:6667");

    JavaPairInputDStream<String, byte[]> directKafkaStream =
        KafkaUtils.createDirectStream(
            ssc,
            String.class,
            byte[].class,
            StringDecoder.class,
            DefaultDecoder.class,
            kafkaParams,
            topics);

    directKafkaStream.foreachRDD(
        rdd -> {
          rdd.foreach(
              avroRecord -> {
                Schema.Parser parser = new Schema.Parser();
                Schema schema = parser.parse(AvroVulabProducer.USER_SCHEMA);
                Injection<GenericRecord, byte[]> recordInjection =
                    GenericAvroCodecs.toBinary(schema);
                GenericRecord record = recordInjection.invert(avroRecord._2).get();

                System.out.println(
                    "str1= "
                        + record.get("str1")
                        + ", str2= "
                        + record.get("str2")
                        + ", int1="
                        + record.get("int1"));
              });
        });

    ssc.start();
    ssc.awaitTermination();
  }
Example #4
0
 public static Schema loadSchemaFromUrl(String schemaUrl) throws IOException {
   LOGGER.info("Fetching schema from {}", schemaUrl);
   Configuration conf = new Configuration();
   Schema.Parser parser = new Schema.Parser();
   InputStream is = null;
   try {
     if (schemaUrl.toLowerCase().startsWith("hdfs://")) {
       FileSystem fs = FileSystem.get(conf);
       is = fs.open(new Path(schemaUrl));
     } else {
       is = new URL(schemaUrl).openStream();
     }
     Schema schema = parser.parse(is);
     LOGGER.debug("Fetched schema from {}: {}", schemaUrl, schema);
     return schema;
   } finally {
     if (is != null) {
       is.close();
     }
   }
 }