@Override
  public void createSchema(SimpleFeatureType featureType) throws IOException {
    List<String> header = new ArrayList<String>();

    GeometryDescriptor geometryDescrptor = featureType.getGeometryDescriptor();
    if (geometryDescrptor != null
        && CRS.equalsIgnoreMetadata(
            DefaultGeographicCRS.WGS84, geometryDescrptor.getCoordinateReferenceSystem())
        && geometryDescrptor.getType().getBinding().isAssignableFrom(Point.class)) {
      header.add(this.latField);
      header.add(this.lngField);
    } else {
      throw new IOException(
          "Unable use '"
              + this.latField
              + "' / '"
              + this.lngField
              + "' to represent "
              + geometryDescrptor);
    }
    for (AttributeDescriptor descriptor : featureType.getAttributeDescriptors()) {
      if (descriptor instanceof GeometryDescriptor) continue;
      header.add(descriptor.getLocalName());
    }
    // Write out header, producing an empty file of the correct type
    CsvWriter writer = new CsvWriter(new FileWriter(this.csvFileState.getFile()), ',');
    try {
      writer.writeRecord(header.toArray(new String[header.size()]));
    } finally {
      writer.close();
    }
  }
 private static void runTopicCollab(DataManager mgr, int venueId, int[] stend)
     throws SQLException, IOException {
   CsvWriter outfile;
   DBLPTopicAuthCollabQuery tquery2 = new DBLPTopicAuthCollabQuery();
   tquery2.setResearchDomain(venueId);
   tquery2.setEarliestLatestYear(stend[0], stend[1]);
   tquery2.setTopicBasedCollaboration();
   outfile = new CsvWriter("out/topiccollabindex.csv");
   tquery2.processQuery(mgr.getConnection(), outfile);
   outfile.close();
   tquery2.setKWBasedCollaboration();
   outfile = new CsvWriter("out/collabindex.csv");
   tquery2.processQuery(mgr.getConnection(), outfile);
   outfile.close();
 }
Пример #3
0
  /** Adds the seached tweets to the file tweetsBase.csv */
  public void saveBase() {
    try {
      if (baseTweets == null) {
        baseTweets = new ArrayList<Tweet>();
      }
      CsvWriter csvOutput = new CsvWriter(new FileWriter(AppliSettings.filename, true), ';');
      for (Tweet tweet : listCleanTweets) {
        int index = alreadyIn(tweet.getId());
        if (index == -1) {
          csvOutput.write("" + tweet.getId());
          csvOutput.write(tweet.getUser());
          String text = cleanTweet(tweet.getTweet());
          csvOutput.write(text);
          csvOutput.write("" + tweet.getDate());
          csvOutput.write("" + tweet.getNote());
          csvOutput.endRecord();
          baseTweets.add(tweet);
        } else {
          updateCSV(tweet.getNote(), index);
        }
      }

      csvOutput.close();
    } catch (IOException e) {
      System.out.println("saveBase");
      System.out.println(e.getMessage());
    }
  }
  private static void runTopicDistribution(DataManager mgr, int venueId, int[] stend, int timestep)
      throws SQLException, IOException {
    CsvWriter outfile;
    /*		outfile= new CsvWriter("out/topicq1_total.csv");
    DBLPTopicQuery1 tquery1 = new DBLPTopicQuery1();
    tquery1.setResearchDomain(venueId);
    tquery1.setEarliestLatestYear(stend[0], stend[1]);
    tquery1.setTimeStep(timestep);
    System.out.println("Processing Topic Query#1- by Paper Count");

    tquery1.setTopicDistributionByPaperCount();
    tquery1.processQuery(mgr.getConnection(), outfile);
    outfile.close();
    System.out.println("Processing Topic Query#1- by Citation Count Hindsight");

    tquery1.setTopicDistributionByCitationHindsight();
    outfile= new CsvWriter("out/topicq1cite_hindsight_total.csv");
    tquery1.processQuery(mgr.getConnection(), outfile);
    outfile.close();

    System.out.println("Processing Topic Query#1- by Citation Count Yearbased");
    // Below two lines are useless. Doing it since the data import format was wrong
    DataExporterImporter importer = new DataExporterImporter();
    importer.createCitationTable(mgr.getConnection(), venueId);

    tquery1.setTopicDistributionByCitationInAYear();
    outfile= new CsvWriter("out/topicq1cite_total.csv");
    tquery1.processQuery(mgr.getConnection(), outfile);
    outfile.close();
    */
    /*
    		DBLPTopicAffinityQuery tquery2= new DBLPTopicAffinityQuery();
    		tquery2.setResearchDomain(venueId);
    		tquery2.setEarliestLatestYear(stend[0], stend[1]);
    		tquery2.setBeginEndYear(stend[0], stend[1]);
    		outfile= new CsvWriter("out/author_topicaffinity.csv");
    		tquery2.processQuery(mgr.getConnection(), outfile);
    		outfile.close();
    */
    /*
    		DBLPTopicAffinityQuery tquery2= new DBLPTopicAffinityQuery();
    		tquery2.setResearchDomain(venueId);
    		outfile= new CsvWriter("out/author_topicaffinity_"+stend[0] + "-" + stend[0]+".csv");
    		tquery2.setBeginEndYear(stend[0], stend[0]);
    		System.out.println("Processing Query to generate Topic Affinity for each author for " + stend[0] + "-" + stend[0]);
    		tquery2.processQuery(mgr.getConnection(), outfile);
    		outfile.close();

    		for (int curryr = stend[0]+timestep; curryr <= stend[1]; curryr+=timestep) {
    			int endyr= (curryr + timestep-1)>stend[1]?stend[1]:(curryr+timestep-1);
    			String yrRange = stend[0] + "-" + endyr;
    			outfile= new CsvWriter("out/author_topicaffinity_"+yrRange+".csv");
    			tquery2.setBeginEndYear(stend[0], endyr);
    			System.out.println("Processing Query to generate Topic Affinity for each author for " + yrRange);
    			tquery2.processQuery(mgr.getConnection(), outfile);
    			outfile.close();
    		}
    */
    System.out.println(
        "Processing Topic Query#4- half-life calculation based on Cited Half-Life and prospective half-life");
    outfile = new CsvWriter("out/newtopichl.5.csv");

    /*		DBLPTopicQuery2 tquery2 = new DBLPTopicQuery2();
    tquery2.setResearchDomain(venueId);
    tquery2.setEarliestLatestYear(stend[1], stend[1]);
    tquery2.setTimeStep(timestep);
    tquery2.processQuery(mgr.getConnection(), outfile);
    outfile.close(); */

    DBLPTopicQuery5 tquery4 = new DBLPTopicQuery5();
    tquery4.setResearchDomain(venueId);
    tquery4.setEarliestLatestYear(stend[0], stend[1]);
    tquery4.setTimeStep(timestep);
    tquery4.processQuery(mgr.getConnection(), outfile);
    outfile.close();
  }
Пример #5
0
  private void pisiCsv(CsvWriter csvWriter) {
    try {
      SortedMap<Date, Integer> vremena = new TreeMap<>();
      Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT+1"));
      cal.setTime(d1);
      int nrow = 0;
      while (!cal.getTime().after(d2)) {
        vremena.put(cal.getTime(), nrow);
        cal.add(Calendar.HOUR, 1);
        nrow++;
      }
      int size = selektiraniPodaci.keySet().size();

      csvWriter.write("Vrijeme");
      HashMap<ProgramMjerenja, Integer> komponente = new HashMap<>();
      Integer ncol = 0;
      for (ProgramMjerenja pm : selektiraniPodaci.keySet()) {
        try {
          komponente.put(pm, ncol);
          csvWriter.write(pm.getKomponentaId().getFormula());
          csvWriter.write("obuhvat");
          csvWriter.write("status");

          ncol++;
        } catch (IOException ex) {
          Logger.getLogger(PfTest.class.getName()).log(Level.SEVERE, null, ex);
        }
      }
      csvWriter.endRecord();
      Podatak[][] tablica = new Podatak[nrow][size];
      for (ProgramMjerenja pm : selektiraniPodaci.keySet()) {
        List<Podatak> podatak = podatakFacade.getPodatak(pm, d1, d2, true, true);
        for (Podatak p : podatak) {
          Integer i = komponente.get(pm);
          Integer j = vremena.get(p.getVrijeme());
          tablica[j][i] = p;
        }
      }
      for (Date d : vremena.keySet()) {
        try {

          csvWriter.write(sdf.format(d));
          for (int i = 0; i < ncol; i++) {
            Podatak p = tablica[vremena.get(d)][i];
            if (p != null) {
              csvWriter.write(p.getVrijednost().toString());
              csvWriter.write(p.getObuhvat().toString());
              csvWriter.write(Integer.toString(p.getStatus()));
            } else {

            }
          }
          csvWriter.endRecord();
        } catch (IOException ex) {
          Logger.getLogger(PfTest.class.getName()).log(Level.SEVERE, null, ex);
        }
      }
      csvWriter.flush();
    } catch (IOException ex) {
      Logger.getLogger(PfTest.class.getName()).log(Level.SEVERE, null, ex);
    }
  }
Пример #6
0
    @Override
    public void map(LongWritable key, Text value, OutputCollector<Text, Text> oc, Reporter reporter)
        throws IOException {
      BufferedReader fin = null;
      InputStream is = null;
      try {
        String s3Path = value.toString();
        URL url = new URL(s3Path);
        URLConnection conn = url.openConnection();
        conn.setConnectTimeout(20000);
        conn.setReadTimeout(20000);

        is = conn.getInputStream();
        is.read();
        is.read();
        fin = new BufferedReader(new InputStreamReader(new CBZip2InputStream(is), "UTF-8"));
        String currentTitle = "";
        // int cnt = 0;
        String line = null;
        StringWriter merged = null;
        CsvWriter writer;

        while ((line = fin.readLine()) != null) {
          if ("<page>".equals(line.trim())) {
            String secondLine = fin.readLine();
            currentTitle =
                new String(
                    secondLine.substring(
                        secondLine.indexOf(pre) + pre.length(), secondLine.indexOf(suf)));
            secondLine = null;
          }
          if (line.trim().startsWith("{{Infobox")) {
            sb = new StringBuilder();
            merged = new StringWriter();
            writer = new CsvWriter(merged, ',');
            sb.append(line);
            sb.append(sep);
            while (true) {
              line = fin.readLine().trim();
              sb.append(line);
              sb.append(sep);
              if ("}}".equals(line)) {
                sb.append(line);
                sb.append(sep);
                break;
              }
              reporter.progress();
            }
            writer.writeRecord(new String[] {currentTitle, sb.toString()});
            writer.flush();
            oc.collect(new Text(""), new Text(merged.toString()));
            reporter.progress();
            reporter.setStatus(value.toString() + " processed");
            sb = null;
            merged = null;
            writer = null;
          }

          line = null;
        }
      } catch (IOException ioe) {
        reporter.setStatus("This task didn't get fully passed");
      } finally {
        try {
          fin.close();
        } catch (Exception e) {
          e.printStackTrace();
        } finally {
        }
      }
    }
Пример #7
0
  public void createCSVResultFile(RemoteHostList remoteHostList, String currentTime) {
    for (RemoteHost rh : remoteHostList.getListOfRemoteHost()) {
      String outpuCsvtFile =
          "./host/"
              + currentTime
              + "_"
              + rh.getIP()
              + "/"
              + currentTime
              + "_"
              + rh.getIP()
              + "_EPAV_Result.csv";

      try {
        CsvWriter csvOutput = new CsvWriter(new FileWriter(outpuCsvtFile, true), ' ');

        csvOutput.write("Remote host infomation:");
        csvOutput.endRecord();
        csvOutput.endRecord();

        csvOutput.write("IP Address:");
        csvOutput.write(rh.getIP());
        csvOutput.endRecord();

        csvOutput.write("MAC Address:");
        csvOutput.write(rh.getMAC());
        csvOutput.endRecord();

        csvOutput.write("Operating system details:");
        csvOutput.write(rh.getOS());
        csvOutput.endRecord();
        csvOutput.endRecord();

        csvOutput.write("List of Ports:");
        csvOutput.endRecord();

        csvOutput.write("Name");
        csvOutput.write("State");
        csvOutput.write("Service");
        csvOutput.write("Warning");
        csvOutput.write("Solution");
        csvOutput.endRecord();

        for (Port p : rh.getPorts().getListOfPort()) {
          csvOutput.write(p.getName());
          csvOutput.write(p.getState());
          csvOutput.write(p.getService());
          csvOutput.write(p.getWarning());
          csvOutput.write(p.getSolution());
          csvOutput.endRecord();
        }
        csvOutput.endRecord();

        csvOutput.write("List of Vulnerabilities:");
        csvOutput.endRecord();

        csvOutput.write("Name");
        csvOutput.write("State");
        csvOutput.write("Patches");
        csvOutput.endRecord();

        for (Vulnerability v : rh.getVulnerabilities().getListOfVulnerability()) {
          csvOutput.write(v.getName());
          csvOutput.write(v.getState());
          csvOutput.write(v.getPatchList().display());
          csvOutput.endRecord();
        }

        csvOutput.close();
      } catch (Exception e) {
        e.printStackTrace();
      }
    }
  }
Пример #8
0
  public static void main(String argv[]) throws IOException {
    argv[0] = "-mis";
    argv[1] = "/Users/Indri/Eclipse_workspace/GeoNames/cities1000.txt";
    argv[2] = "/Users/Indri/Eclipse_workspace/GazIndex";
    argv[3] = "SampleInput/jsonTweets.txt";
    argv[4] = "-json";
    argv[5] = "SampleOutput/jsonTweets.out.csv";
    boolean misspell = argv[0].equals("-mis") ? true : false;
    String dicPath = argv[1]; // = "GeoNames/allCountries.txt";// gazetteer from geonames
    String indexPath = argv[2]; // index path
    String input = argv[3]; // = "tweet.csv";//to be determined.// test file path
    String type = argv[4]; // -json or -text
    String output = argv[5]; // = "output2.csv"; //output file path

    CollaborativeIndex ci =
        new CollaborativeIndex()
            .config("GazIndex/StringIndex", "GazIndex/InfoIndex", "mmap", "mmap")
            .open();

    EnglishParser enparser = new EnglishParser("res/", ci, false);
    ContextDisamb c = new ContextDisamb();
    LangDetector lang = new LangDetector("res/langdetect.profile");

    BufferedReader reader = GetReader.getUTF8FileReader(argv[3]);
    CsvWriter writer = new CsvWriter(output, ',', Charset.forName("utf-8")); // write

    writer.writeRecord(new String[] {"SPANISH TWEETS", "LOCATIONS"});

    String line = null;
    while ((line = reader.readLine()) != null) {
      line = line.trim();
      if (line.length() == 0) continue;
      Tweet t = new Tweet();
      String text = null;
      if (argv[4].equals("-text")) text = line;
      else
        try {
          text = (DataObjectFactory.createStatus(line.trim()).getText());
        } catch (TwitterException e) {
          // TODO Auto-generated catch block
          System.err.println("JSON format corrupted, or no content.");
          continue;
        }
      t.setText(text);
      List<String> match = enparser.parse(t);
      // Generate Matches
      if (match == null || match.size() == 0) {
        /** write blank result and the line itself if no match found. */
        writer.writeRecord(new String[] {text, ""});
        continue;
      }
      HashSet<String> reducedmatch = new HashSet<String>();
      for (String s : match) reducedmatch.add(s.substring(3, s.length() - 3));

      // Disambiguate topo
      HashMap<String, String[]> result = c.returnBestTopo(ci, reducedmatch);

      if (result == null) {
        System.out.println("No GPS for any location is found.");
      } else {
        System.out.println("The grounded location(s) are:");
        String topoStr = "";
        for (String topo : result.keySet())
          topoStr +=
              "["
                  + (topo
                      + ": "
                      + result.get(topo)[2]
                      + " "
                      + result.get(topo)[0]
                      + " "
                      + result.get(topo)[1])
                  + "] ";
        writer.writeRecord(new String[] {text, topoStr});
      }
    }
    reader.close();
    writer.close();
  }
  public void exportarProspecto(String nombreArchivo) {

    String outputFile = nombreArchivo;

    // before we open the file check to see if it already exists
    boolean alreadyExists = new File(outputFile).exists();

    try {
      // use FileWriter constructor that specifies open for appending
      CsvWriter csvOutput = new CsvWriter(new FileWriter(outputFile, true), ',');

      // if the file didn't already exist then we need to write out the header line
      if (!alreadyExists) {
        csvOutput.write("Dni");
        csvOutput.write("Nombres");
        csvOutput.write("Apellido_Paterno");
        csvOutput.write("Apellido_Materno");
        csvOutput.write("Telefono");
        csvOutput.write("FechaContacto");
        csvOutput.write("Correo");
        csvOutput.write("Direccion");
        csvOutput.write("Distrito");
        csvOutput.write("Departamento");
        csvOutput.endRecord();
      }
      // else assume that the file already has the correct header line

      List<Prospecto> prospecto = new ArrayList<Prospecto>();
      prospecto = devolverListaProspecto();
      for (Prospecto prospectos : prospecto) {
        // write out a few records
        csvOutput.write(prospectos.getDni());
        csvOutput.write(prospectos.getNombres());
        csvOutput.write(prospectos.getApellido_Paterno());
        csvOutput.write(prospectos.getApellido_Materno());
        csvOutput.write(prospectos.getTelefono());
        csvOutput.write(prospectos.getFechaContacto());
        csvOutput.write(prospectos.getCorreo());
        csvOutput.write(prospectos.getDireccion());
        csvOutput.write(prospectos.getDistrito());
        csvOutput.write(prospectos.getDepartamento());

        csvOutput.endRecord();
      }

      csvOutput.close();
    } catch (IOException e) {
      e.printStackTrace();
    }
  }