/** Sets up and starts streaming pipeline. */
  public static void main(String[] args) {
    PubsubFileInjectorOptions options =
        PipelineOptionsFactory.fromArgs(args).withValidation().as(PubsubFileInjectorOptions.class);

    Pipeline pipeline = Pipeline.create(options);

    pipeline
        .apply(TextIO.Read.from(options.getInput()))
        .apply(
            IntraBundleParallelization.of(PubsubFileInjector.publish(options.getOutputTopic()))
                .withMaxParallelism(20));

    pipeline.run();
  }
 /**
  * Runs the batch injector for the streaming pipeline.
  *
  * <p>The injector pipeline will read from the given text file, and inject data into the Google
  * Cloud Pub/Sub topic.
  */
 public void runInjectorPipeline(String inputFile, String topic) {
   DataflowPipelineOptions copiedOptions = options.cloneAs(DataflowPipelineOptions.class);
   copiedOptions.setStreaming(false);
   copiedOptions.setNumWorkers(
       options.as(ExamplePubsubTopicOptions.class).getInjectorNumWorkers());
   copiedOptions.setJobName(options.getJobName() + "-injector");
   Pipeline injectorPipeline = Pipeline.create(copiedOptions);
   injectorPipeline
       .apply(TextIO.Read.from(inputFile))
       .apply(
           IntraBundleParallelization.of(PubsubFileInjector.publish(topic))
               .withMaxParallelism(20));
   DataflowPipelineJob injectorJob = (DataflowPipelineJob) injectorPipeline.run();
   jobsToCancel.add(injectorJob);
 }