/* (non-Javadoc)
   * @see net.sourceforge.squirrel_sql.plugins.dataimport.importer.IFileImporter#getPreview(int)
   */
  public String[][] getPreview(int noOfLines) throws IOException {
    CsvReader csvReader =
        new CsvReader(
            new InputStreamReader(new FileInputStream(importFile), settings.getImportCharset()),
            settings.getSeperator());
    String[][] data = new String[noOfLines][];

    int row = 0;
    int columns = -1;
    while (csvReader.readRecord() && row < noOfLines) {
      if (columns == -1) {
        columns = csvReader.getColumnCount();
      }
      data[row] = new String[columns];
      for (int i = 0; i < columns; i++) {
        data[row][i] = csvReader.get(i);
      }
      row++;
    }
    csvReader.close();

    String[][] outData = new String[row][];
    for (int i = 0; i < row; i++) {
      outData[i] = data[i];
    }
    return outData;
  }
  private List<SimpleBicycle> getKnownModelList(String filename) {
    List<SimpleBicycle> list = new ArrayList<SimpleBicycle>();
    File file = new File(filename);
    try {
      InputStream inputStream = new FileInputStream(file);
      //            CsvReader csvReader = new CsvReader(inputStream, Charset.forName("CP1251"));
      CsvReader csvReader = new CsvReader(inputStream, Charset.forName("UTF-8"));
      csvReader.setDelimiter(';');
      while (csvReader.readRecord()) {
        String[] values = csvReader.getValues();
        SimpleBicycle simpleBicycle = new SimpleBicycle();
        simpleBicycle.setDirtyModel(values[0].trim());
        simpleBicycle.setModel(values[1].trim());
        simpleBicycle.setWheelSize(WheelSize.getSizeByValue(values[2].trim()));
        list.add(simpleBicycle);
      }

    } catch (FileNotFoundException ex) {
      ex.printStackTrace();
    } catch (Exception ex) {
      ex.printStackTrace();
    }

    return list;
  }
    public PaymentRecord(CsvReader reader) throws IOException, NotFoundException {
      if (!reader.readRecord()) {
        throw new NotFoundException("kein Payment gefunden");
      }
      // ToDo gleiche Belege nur einmal abarbeiten
      kontoStr = reader.get("Bu-Konto");
      belegStr = reader.get("BelegNr");
      //                String opBetragStr = reader.get("OP/Kost");
      opBetragStr = reader.get("OP-Betrag");
      opBetragStr = opBetragStr.replace(".", "");
      opBetragStr = opBetragStr.replace(",", ".");

      betragNr = 0;
      try {
        betragNr = Float.parseFloat(opBetragStr.replace(',', '.'));
      } catch (Throwable t) {
      }

      kontoNr = 0;
      try {
        kontoNr = Integer.parseInt(kontoStr);
      } catch (Throwable t) {
      }

      belegNr = 0;
      try {
        belegNr = Integer.parseInt(belegStr);
      } catch (Throwable t) {
      }
    }
  @Override
  protected SimpleFeatureType buildFeatureType() {
    String[] headers;
    Map<String, Class<?>> typesFromData;
    CsvReader csvReader = null;
    try {
      csvReader = csvFileState.openCSVReader();
      headers = csvReader.getHeaders();
      typesFromData = CSVStrategy.findMostSpecificTypesFromData(csvReader, headers);
    } catch (IOException e) {
      throw new RuntimeException(e);
    } finally {
      if (csvReader != null) {
        csvReader.close();
      }
    }
    SimpleFeatureTypeBuilder builder =
        CSVStrategy.createBuilder(csvFileState, headers, typesFromData);
    Class<?> latClass = typesFromData.get(latField);
    Class<?> lngClass = typesFromData.get(lngField);
    if (CSVStrategy.isNumeric(latClass) && CSVStrategy.isNumeric(lngClass)) {
      List<String> csvHeaders = Arrays.asList(headers);
      int index = csvHeaders.indexOf(latField);
      AttributeTypeBuilder builder2 = new AttributeTypeBuilder();
      builder2.setCRS(DefaultGeographicCRS.WGS84);
      builder2.binding(Point.class);
      AttributeDescriptor descriptor = builder2.buildDescriptor(pointField);
      builder.add(index, descriptor);

      builder.remove(latField);
      builder.remove(lngField);
    }
    return builder.buildFeatureType();
  }
Beispiel #5
0
  public static void main(String[] args) {
    logger.debug("Running");

    try {
      URL url =
          new URL(ApplicationConfiguration.getApplicationProperty("edubase.data.allschools.csv"));

      BufferedReader in = new BufferedReader(new InputStreamReader(url.openStream()));
      CsvReader schools = new CsvReader(in);

      schools.readHeaders();

      while (schools.readRecord()) {
        School s = new School(schools);

        Postcode p = new Postcode(s.getPostcode());

        // s.debug();
      }

      in.close();
    } catch (MalformedURLException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (FileNotFoundException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (IOException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }

    logger.debug("Finished");
  }
  public static Map<String, SNInventory> getMapByreader(CsvReader reader) {
    Map<String, SNInventory> map = new HashMap<String, SNInventory>();
    try {
      reader.readHeaders();

      while (reader.readRecord()) { // 逐行读入除表头的数据
        String[] strs = reader.getValues();
        if (null != strs) {
          SNInventory in = new SNInventory();
          for (int i = 0; i < strs.length; i++) {
            // logger.info(i);
            String str = strs[i];
            // logger.info(str);
            if (i == 0) {
              // logger.info(str);
              in.setGoodType("样机");
            } else if (i == 2) {

              in.setBranchName(str);
            } else if (i == 3) {
              in.setBranchNum(str);
            } else if (i == 6) {
              in.setGoodGroupName(str);
            } else if (i == 7) {
              in.setGoodGroupNum(str);
            } else if (i == 10) {
              in.setGoodpName(str);
            } else if (i == 11) {
              in.setGoodNum(str);
            } else if (i == 12) {
              double realnum = Double.valueOf(str);
              int re = (int) realnum;
              in.setModelnum(re);
            } else if (i == 13) {
              in.setSerialNumber(str);
            }
          }
          String key = in.getBranchNum() + "_" + in.getGoodNum();
          // logger.info(key);
          SNInventory inmap = map.get(key);

          if (null == inmap) {
            map.put(key, in);
          } else {
            inmap.setModelnum(inmap.getModelnum() + in.getModelnum());
          }
        }
      }

      logger.info(map.size());
      reader.close();
    } catch (IOException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } // 跳过表头 如果需要表头的话,不要写这句。
    return map;
  }
 /*
  * (non-Javadoc)
  * @see net.sourceforge.squirrel_sql.plugins.dataimport.importer.IFileImporter#reset()
  */
 public boolean reset() throws IOException {
   if (reader != null) {
     reader.close();
   }
   reader =
       new CsvReader(
           new InputStreamReader(new FileInputStream(importFile), settings.getImportCharset()),
           settings.getSeperator());
   reader.setSafetySwitch(safetySwitch);
   return true;
 }
Beispiel #8
0
  /**
   * Reads floating values from a CSV file and returns them in an array.
   *
   * @param state
   * @param terminalFileCSV the file with the float values
   * @return an array with all of the float values in the file
   */
  public double[] getRawTimeSeriesValuesFromCSVfile(
      EvolutionState state, CsvReader terminalFileCSV) {
    // expect the CSV file to contain only float values ...
    Vector rawValues = new Vector(100);
    try {
      while (terminalFileCSV.readRecord()) {
        for (int i = 0; i < terminalFileCSV.getColumnCount(); i++)
          rawValues.add(terminalFileCSV.get(i));
      }
    } catch (IOException e) {
      state.output.fatal("The file with time series raw values failed when reading records. " + e);
    }
    // convert the vector into an array
    double rvArray[] = new double[rawValues.size()];
    for (int i = 0; i < rvArray.length; i++)
      rvArray[i] = Double.parseDouble((String) rawValues.elementAt(i));

    return rvArray;
  }
 /*
  * (non-Javadoc)
  * @see net.sourceforge.squirrel_sql.plugins.dataimport.importer.IFileImporter#getLong(int)
  */
 public Long getLong(int column) throws IOException, UnsupportedFormatException {
   try {
     String longS = reader.get(column);
     if (null == longS || 0 == longS.trim().length()) {
       return null;
     }
     return Long.parseLong(longS);
   } catch (NumberFormatException nfe) {
     throw new UnsupportedFormatException();
   }
 }
  /*
   * (non-Javadoc)
   * @see net.sourceforge.squirrel_sql.plugins.dataimport.importer.IFileImporter#getInt(int)
   */
  public Integer getInt(int column) throws IOException, UnsupportedFormatException {
    try {

      String intS = reader.get(column);
      if (null == intS || 0 == intS.trim().length()) {
        return null;
      }
      return Integer.parseInt(intS);
    } catch (NumberFormatException nfe) {
      throw new UnsupportedFormatException(nfe);
    }
  }
  /**
   * This example shows how to obtain a color.
   *
   * @param g
   * @param monitor
   * @throws RenderException
   */
  public void render(Graphics2D g, IProgressMonitor monitor) throws RenderException {
    if (monitor == null) monitor = new NullProgressMonitor();

    CsvReader reader = null;
    try {
      ILayer layer = getContext().getLayer();
      IGeoResource resource = layer.findGeoResource(CSV.class);
      if (resource == null) return;
      ReferencedEnvelope bounds = getRenderBounds();
      monitor.subTask("connecting");
      CSV csv = resource.resolve(CSV.class, null);
      // LOOK UP STYLE
      IStyleBlackboard style = layer.getStyleBlackboard();
      Color color = (Color) style.get(ColorStyle.ID);

      // DATA TO WORLD
      CoordinateReferenceSystem dataCRS = layer.getCRS();
      CoordinateReferenceSystem worldCRS = context.getCRS();
      MathTransform dataToWorld = CRS.findMathTransform(dataCRS, worldCRS, false);

      // DRAW FILE
      monitor.beginTask("csv render", csv.getSize());
      reader = csv.reader();
      reader.readHeaders();
      int nameIndex = reader.getIndex("name");
      Coordinate worldLocation = new Coordinate();
      while (reader.readRecord()) {
        Point point = CSV.getPoint(reader);
        Coordinate dataLocation = point.getCoordinate();
        try {
          JTS.transform(dataLocation, worldLocation, dataToWorld);
        } catch (TransformException e) {
          continue;
        }
        if (bounds != null && !bounds.contains(worldLocation)) {
          continue; // optimize!
        }
        java.awt.Point p = getContext().worldToPixel(worldLocation);

        g.setColor(color);
        g.fillRect(p.x - 2, p.y - 2, 6, 6);

        g.setColor(Color.BLACK);
        String name = reader.get(nameIndex);
        g.drawString(name, p.x + 15, p.y + 15);
        monitor.worked(1);
        if (monitor.isCanceled()) break;
      }
    } catch (IOException e) {
      throw new RenderException(e); // rethrow any exceptions encountered
    } catch (FactoryException e) {
      throw new RenderException(e); // rethrow any exceptions encountered
    } finally {
      if (reader != null) reader.close();
      monitor.done();
    }
  }
 private List<Block> readBlocksCSV(File filename) throws IOException {
   CsvReader reader = new CsvReader(filename.getAbsolutePath(), ',', Charset.forName("UTF8"));
   reader.setTextQualifier('"');
   reader.skipLine();
   reader.readHeaders();
   ArrayList<Block> blocks = new ArrayList<Block>();
   while (reader.readRecord()) {
     int startIp = Integer.parseInt(reader.get("startIpNum"));
     int endIp = Integer.parseInt(reader.get("endIpNum"));
     int pixelId = Integer.parseInt(reader.get("pixelId"));
     Block block = new Block();
     block.startIp = startIp;
     block.endIp = endIp;
     block.pixelId = pixelId;
     blocks.add(block);
   }
   return blocks;
 }
 private List<Location> readLocationCSV(File filename) throws IOException {
   CsvReader reader = new CsvReader(filename.getAbsolutePath(), ',', Charset.forName("UTF8"));
   reader.setTextQualifier('"');
   reader.skipLine();
   reader.readHeaders();
   ArrayList<Location> locations = new ArrayList<Location>();
   while (reader.readRecord()) {
     double lat = Double.parseDouble(reader.get("latitude"));
     double lon = Double.parseDouble(reader.get("longitude"));
     String country = reader.get("country");
     Location location = new Location();
     location.latitude = lat;
     location.longitude = lon;
     location.country = country;
     locations.add(location);
   }
   return locations;
 }
 /*
  * (non-Javadoc)
  * @see net.sourceforge.squirrel_sql.plugins.dataimport.importer.IFileImporter#getDate(int)
  */
 public Date getDate(int column) throws IOException, UnsupportedFormatException {
   Date d = null;
   try {
     DateFormat f = new SimpleDateFormat(settings.getDateFormat());
     String dateString = reader.get(column);
     // we allow the return of null values if the the reader returns
     // an empty String or a null String
     if (null != dateString && dateString.trim().length() > 0) {
       d = f.parse(dateString);
     }
   } catch (IllegalArgumentException e) {
     // i18n[CSVFileImporter.invalidDateFormat=Invalid date format given]
     JOptionPane.showMessageDialog(null, stringMgr.getString("CSVFileImporter.invalidDateFormat"));
     throw new UnsupportedFormatException();
   } catch (ParseException pe) {
     throw new UnsupportedFormatException();
   }
   return d;
 }
Beispiel #15
0
  /**
   * Updates the table of quotes for this symbol. Assumes that the listofsymbols has been updated,
   * but the table itself may not exist. Takes a date range, including both start and end days.
   *
   * <p>Yahoo Finance returns an error message rather than an empty CSV if the start and end dates
   * are today. The caller is responsible for checking that the call range is acceptable.
   *
   * @param symbol - symbol to update
   * @param startDate - beginning of range to add to
   * @param endDate - end of range to add to
   */
  static void updateSymbol(String symbol, Date startDate, Date endDate) throws Exception {
    System.out.println("Trying to update:" + symbol);
    Connection conn = initialize();
    Statement stat = conn.createStatement();
    URL data = YahooCsvDownloadUrl(symbol, startDate, endDate);
    BufferedReader in = null;
    try {
      in = new BufferedReader(new InputStreamReader(data.openStream()));
    } catch (java.io.FileNotFoundException e) {
      System.out.println("Symbol not found:" + symbol);
      e.printStackTrace();
      return;
    }
    CsvReader reader = new CsvReader(in);
    reader.readHeaders();
    String[] headers = reader.getHeaders();
    stat.executeUpdate("CREATE TABLE IF NOT EXISTS " + symbol + " (" + getColNames(headers) + ");");
    String statement =
        "INSERT INTO "
            + symbol
            + " ("
            + getColNames(headers)
            + ") VALUES ("
            + getQueryQuestionMarks(headers)
            + ");";

    PreparedStatement prep = conn.prepareStatement(statement);

    while (reader.readRecord()) {
      for (int j = 0; j < headers.length; j++) {
        String str = reader.get(headers[j]);
        prep.setString(j + 1, str);
      }
      // TODO: salim, what's the point of these calls?
      prep.addBatch();
      conn.setAutoCommit(false);
      prep.executeBatch();
      conn.setAutoCommit(true);
    }
    reader.close();
    in.close();
    conn.close();
  }
  public static Map<Name, String> parseEmailAddresses(InputStream csvStream) throws IOException {
    final CsvReader reader = new CsvReader(csvStream, ',', Charset.forName("utf-8"));
    try {
      final Map<Name, String> parsedEmails =
          new TreeMap<Name, String>(SurnameFirstNameComparator.getInstance());
      final EmailValidator emailValidator = EmailValidator.getInstance();
      while (reader.readRecord()) {
        final String emailAddress = reader.get(0);
        if (!emailValidator.isValid(emailAddress)) {
          LOGGER.trace(
              "The e-mail address {} is not valid; this will not be considered a valid e-mail entry.",
              emailAddress);
          continue;
        }

        final String givenName = reader.get(1);
        final String surname = reader.get(2);
        if (StringUtils.isBlank(givenName) || StringUtils.isBlank(surname)) {
          LOGGER.trace(
              "A record for e-mail address {} with given name {} and surname {} contains an invalid name component and will be ignored.",
              emailAddress,
              givenName,
              surname);
          continue;
        }

        final Name name = new BasicName(givenName, surname);
        if (parsedEmails.containsKey(name)) {
          LOGGER.error(
              "Warning! {} {} already exists with e-mail address {} and will be replaced with {}.",
              givenName,
              surname,
              parsedEmails.get(name),
              emailAddress);
        }
        parsedEmails.put(name, emailAddress);
      }
      return parsedEmails;
    } finally {
      reader.close();
    }
  }
  /**
   * 1. get fileComponent from action
   *
   * <p>2. validate fileName to see if filePrefix is allowed
   *
   * <p>3. validate file format with the import file template
   *
   * <p>4.
   *
   * @see
   *     com.pc.core.web.interceptor.AroundInterceptor#before(com.opensymphony.xwork2.ActionInvocation)
   */
  public void before(ActionInvocation invocation) throws Exception {

    Action action = (Action) invocation.getAction();
    HttpServletRequest request =
        (HttpServletRequest)
            invocation.getInvocationContext().get(ServletActionContext.HTTP_REQUEST);

    if (action instanceof ImportPreparation && request instanceof MultiPartRequestWrapper) {

      ServletContext servletContext = ServletActionContext.getServletContext();
      ActionContext invocationContext = invocation.getInvocationContext();

      ImportPreparation importPreparation = (ImportPreparation) action;

      // 1. get fileComponent from valueStack
      FileComponent fileComponent =
          (FileComponent) invocation.getStack().findValue("fileComponent");

      if (fileComponent == null || fileComponent.getUpload() == null)
        throw new ImportException(
            "error.upload.file.empty", "r:" + importPreparation.getErrorReturn() + "/import/error");

      // 2. validate fileName
      String fileExt = fileComponent.getFileExtension();
      if (!Arrays.asList(this.allowedPrefix).contains(fileExt)) {
        throw new ImportException(
            "error.upload.file-ext.not-allowed",
            "r:" + importPreparation.getErrorReturn() + "/import/error");
      }

      // Create CsvReader to parse the file
      CsvReader csvReader = new CsvReader(new FileReader(fileComponent.getUpload()));

      try {

        // get file header information from cache
        String header =
            (String)
                servletContext.getAttribute(
                    importPreparation.getTarget().toUpperCase() + "_HEADERS");
        String[] headerKeys = header.split(",");

        // 3. validate file format
        if (csvReader.readHeaders()) {
          if (headerKeys.length != csvReader.getHeaderCount()) {
            throw new ImportException(
                "error.upload.file-format.mismatch",
                "r:" + importPreparation.getErrorReturn() + "/import/error");
          }
        }

        // Read data from CsvReader
        List<Map<String, String>> data = new ArrayList<Map<String, String>>();
        while (csvReader.readRecord()) {
          Map<String, String> record = new LinkedHashMap<String, String>();
          for (int i = 0; i < headerKeys.length; i++) {
            record.put(headerKeys[i], csvReader.get(i));
          }
          data.add(record);
        }

        // 4. validate data
        importPreparation.validate(data);

        // 5. set data
        OgnlContextState.setCreatingNullObjects(invocationContext.getContextMap(), true);
        OgnlContextState.setDenyMethodExecution(invocationContext.getContextMap(), true);
        OgnlContextState.setReportingConversionErrors(invocationContext.getContextMap(), true);

        prepareImportData(invocation, importPreparation.getDataName(), data);

      } finally {

        // release all the resource anyway
        csvReader.close();

        OgnlContextState.setCreatingNullObjects(invocationContext.getContextMap(), true);
        OgnlContextState.setDenyMethodExecution(invocationContext.getContextMap(), true);
        OgnlContextState.setReportingConversionErrors(invocationContext.getContextMap(), true);
      }
    }
  }
  // 型号 , 状态
  public static Map<String, List<SNInventory>> getMapBranchType(
      User user, String startTime, int branchid) {
    // startTime = "2015-05-03";
    // List<Inventory> list = new ArrayList<Inventory>();
    Branch branch = BranchService.getMap().get(branchid);
    String bnum = "";
    if (null != branch) {
      bnum = branch.getEncoded();
    }

    // logger.info()
    Map<String, Product> mapp = ProductService.gettypeNUmmap();

    // logger.info(mapp);

    Map<String, List<SNInventory>> map = new HashMap<String, List<SNInventory>>();
    if (null != branch && branch.getBranchtype().getSaletype() == SaleModel.Model.苏宁.getValue()) {
      try {
        String tempPath = PathUtill.getXMLpath();
        tempPath +=
            "data"
                + File.separator
                + "DownloadInventory"
                + File.separator
                + startTime
                + File.separator
                + "SuNing";
        logger.info(tempPath);
        File file = new File(tempPath);
        if (!file.exists()) {
          file.mkdirs();
        }

        File file2 = new File(tempPath + File.separator + "model.csv");
        // file2.createNewFile();

        CsvReader reader =
            new CsvReader(file2.getPath(), ',', Charset.forName("GBK")); // 一般用这编码读就可以了

        reader.readHeaders();

        while (reader.readRecord()) { // 逐行读入除表头的数据
          String[] strs = reader.getValues();
          if (null != strs) {
            SNInventory in = new SNInventory();
            for (int i = 0; i < strs.length; i++) {
              // logger.info(i);
              String str = strs[i];
              // logger.info(str);
              if (i == 0) {
                // logger.info(str);
                in.setGoodType("样机");
              } else if (i == 2) {

                in.setBranchName(str);
              } else if (i == 3) {
                in.setBranchNum(str);
              } else if (i == 6) {
                in.setGoodGroupName(str);
              } else if (i == 7) {
                in.setGoodGroupNum(str);
              } else if (i == 10) {
                in.setGoodpName(str);
              } else if (i == 11) {
                in.setGoodNum(str);
              } else if (i == 12) {
                double realnum = Double.valueOf(str);
                int re = (int) realnum;
                in.setNum(re);
              } else if (i == 13) {
                in.setSerialNumber(str);
              }
            }

            // logger.info(in.getBranchNum());
            String bnu = in.getBranchNum();
            // logger.info(bnu);
            // logger.info(in.getBranchNum());
            if (bnum.equals(bnu)) {
              String key = in.getGoodNum();
              // logger.info(key);
              Product p = mapp.get(key);
              // logger.info(p);
              if (null != p) {
                String pname = mapp.get(key).getType();

                List<SNInventory> inmap = map.get(pname);

                if (inmap == null) {
                  inmap = new ArrayList<SNInventory>();
                  map.put(pname, inmap);
                }

                inmap.add(in);
              }
            }
          }
        }

        logger.info(map.size());
        reader.close();
      } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
      } // 跳过表头 如果需要表头的话,不要写这句。
    }
    return map;
  }
Beispiel #19
0
  /** Sets up all the GEPSymbolSet symbols, loading them from the parameter file. */
  public void setup(
      final EvolutionState state, final Parameter base, final Parameter def, GEPSpecies species) {
    // Name of file with the terminal (variable) definitions and training values
    String terminalFilename;
    // Name of file with the test data values if specified
    String testingTerminalFilename;

    // keep track of the maximum arity of any function
    maxArity = 0;

    // What's my name? Don't really use this at this time ...
    name = state.parameters.getString(base.push(P_NAME), def.push(P_NAME));
    if (name == null || name.equals(""))
      state.output.warning(
          "No name was given for this GEP symbol set...not required at this time.",
          base.push(P_NAME),
          def.push(P_NAME));

    // How many functions do I have?
    numberOfFunctions =
        state.parameters.getInt(base.push(P_FUNCTIONSIZE), def.push(P_FUNCTIONSIZE), 1);
    numberOfSymbols = numberOfFunctions;

    // How many terminals do I have? Check for a data file first ...
    // if time series problem type and using raw time series data then
    //      number of terminals will be specified in the embedding dimension value
    //      provided in the parameter file
    // else if a file specified
    //      get the 1st line of the file and count the fields in it (#terminals is number of fields
    // minus
    //      the number of chromosomes/dependent variables)
    // else
    //      use the number of terminals specified in the parameter file

    terminalFilename =
        state.parameters.getStringWithDefault(
            base.push(P_TERMINALFILENAME), def.push(P_TERMINALFILENAME), "");
    testingTerminalFilename =
        state.parameters.getStringWithDefault(
            base.push(P_TESTINGTERMINALFILENAME), def.push(P_TESTINGTERMINALFILENAME), "");
    String terminalSymbolsfromFile[] = null;
    CsvReader terminalFileCSV = null;
    CsvReader testingTerminalFileCSV = null;
    // Are we processing raw time series data?
    boolean timeseriesWithRawDataValues =
        species.problemType == GEPSpecies.PT_TIMESERIES && species.timeseriesEmbeddingDimension > 0;
    if (!terminalFilename.equals("")) {
      String defaultTerminalFileSeparator = ","; // default field separator is comma
      try {
        // allow for gzip files .... end with .gz or .gzip\
        if (terminalFilename.endsWith(".gz") || terminalFilename.endsWith(".gzip")) {
          terminalFileCSV =
              new CsvReader(
                  (InputStream) (new GZIPInputStream(new FileInputStream(terminalFilename))),
                  Charset.forName("ISO-8859-1"));
          // set terminal file name to be the one with gzip or gz removed from the end
          if (terminalFilename.endsWith(".gz"))
            terminalFilename = terminalFilename.substring(0, terminalFilename.length() - 3);
          else terminalFilename = terminalFilename.substring(0, terminalFilename.length() - 5);
        } else terminalFileCSV = new CsvReader(terminalFilename);
      } catch (FileNotFoundException e) {
        state.output.fatal(
            "The file with terminal definitions and/or values ("
                + terminalFilename
                + ") could not be found",
            base.push(P_TERMINALFILENAME),
            def.push(P_TERMINALFILENAME));
      } catch (IOException e) {
        state.output.fatal(
            "The file with terminal definitions and/or values ("
                + terminalFilename
                + ") could not be found or the expected GZIP file could nor be opened",
            base.push(P_TERMINALFILENAME),
            def.push(P_TERMINALFILENAME));
      }
      // if filename has extension .dat it is space delimited, if .csv (or anything else
      // for that matter) it is comma delimited
      // (separator can still be changed with the terminalfileseparator parameter)
      if (terminalFilename.endsWith(".dat")) defaultTerminalFileSeparator = "space";
      // if using a file for the terminals and their values then check for a non-default separator
      String terminalFileSeparator =
          state.parameters.getStringWithDefault(
              base.push(P_TERMINALFILESEPARATOR),
              def.push(P_TERMINALFILESEPARATOR),
              defaultTerminalFileSeparator);
      if (terminalFileSeparator.toLowerCase().equals("comma")) terminalFileSeparator = ",";
      else if (terminalFileSeparator == "\\t" || terminalFileSeparator.toLowerCase().equals("tab"))
        terminalFileSeparator = "\t";
      else if (terminalFileSeparator == "space") terminalFileSeparator = " ";
      terminalFileCSV.setDelimiter(terminalFileSeparator.charAt(0));
      // let's check for a testing data file at this time as well .. if no file for
      // names and training data no need to worry about this one.
      if (!testingTerminalFilename.equals("")) {
        try {
          // allow for gzip files .... end with .gz or .gzip\
          if (testingTerminalFilename.endsWith(".gz") || testingTerminalFilename.endsWith(".gzip"))
            testingTerminalFileCSV =
                new CsvReader(
                    (InputStream)
                        (new GZIPInputStream(new FileInputStream(testingTerminalFilename))),
                    Charset.forName("ISO-8859-1"));
          else testingTerminalFileCSV = new CsvReader(testingTerminalFilename);
          testingTerminalFileCSV.setDelimiter(terminalFileSeparator.charAt(0));
        } catch (FileNotFoundException e) {
          state.output.fatal(
              "The file with testing data values ("
                  + testingTerminalFilename
                  + ") could not be found",
              base.push(P_TERMINALFILENAME),
              def.push(P_TERMINALFILENAME));
        } catch (IOException e) {
          state.output.fatal(
              "The file with testing data values ("
                  + terminalFilename
                  + ") could not be found or the expected GZIP file could nor be opened",
              base.push(P_TERMINALFILENAME),
              def.push(P_TERMINALFILENAME));
        }
      }
    }

    if (timeseriesWithRawDataValues) numberOfTerminals = species.timeseriesEmbeddingDimension;
    else if (terminalFileCSV != null) {
      // get the terminal symbols for the independent and dependent variables
      try {
        terminalFileCSV.readHeaders();
        terminalSymbolsfromFile = terminalFileCSV.getHeaders();
      } catch (IOException e) {
        state.output.fatal(
            "The file with variable (terminal) definitions and values ("
                + terminalFilename
                + ") failed to read the headers"
                + e,
            base.push(P_TERMINALFILENAME),
            def.push(P_TERMINALFILENAME));
      }
      // 1 less for each dependent variable (number of chromosomes) at the end
      numberOfTerminals = terminalSymbolsfromFile.length - species.numberOfChromosomes;
      if (numberOfTerminals < 1)
        state.output.fatal(
            "The file with terminal definitions and data values ("
                + terminalFilename
                + ") has no independent variables specified in record 1",
            base.push(P_TERMINALFILENAME),
            def.push(P_TERMINALFILENAME));
      // if using a file for the terminals and their values then check for a non-default separator
    } else {
      numberOfTerminals =
          state.parameters.getInt(base.push(P_TERMINALSIZE), def.push(P_TERMINALSIZE), 1);
    }
    numberOfSymbols += numberOfTerminals;

    if (numberOfSymbols < 1)
      state.output.error(
          "The GEPSymbolSet \"" + name + "\" have at least 1 terminal symbol defined.",
          base.push(P_TERMINALSIZE),
          def.push(P_TERMINALSIZE));

    // add a special Symbol for constants if we are using them ... it will be added to the
    // end of the array of symbols!
    if (species.useConstants) {
      numberOfTerminals++; // special constant terminal
      numberOfSymbols++;
    }

    symbols = new GEPSymbol[numberOfSymbols];

    int numberOfSymbolsWithoutConstantSymbol = numberOfSymbols;
    if (species.useConstants) // add the constant terminal symbol to the end
    {
      symbols[numberOfSymbols - 1] = (GEPSymbol) (new GEPConstantTerminalSymbol());
      symbols[numberOfSymbols - 1].id = numberOfSymbols - 1;
      numberOfSymbolsWithoutConstantSymbol--;
    }

    Parameter pTerminal = base.push(P_TERMINAL);
    Parameter pdefTerminal = def.push(P_TERMINAL);
    Parameter pFunction = base.push(P_FUNCTION);
    Parameter pdefFunction = def.push(P_FUNCTION);

    // create hashtable of names of terminals and hash table with names of functions
    // so we can easily check that they are not duplicates
    Hashtable functionHT = new Hashtable();
    Hashtable terminalHT = new Hashtable();

    //      process the functions
    for (int x = 0; x < numberOfFunctions; x++) {
      Parameter pp = pFunction.push("" + x);
      Parameter ppdef = pdefFunction.push("" + x);
      String function = state.parameters.getStringWithDefault(pp, ppdef, "");
      if (function.equals("")) // no name for the function
      state.output.fatal("Invalid function specifier: '" + function + "'", pp, ppdef);
      // make sure not specifying the same function more than once
      if (functionHT.get(function) != null)
        state.output.fatal(
            "Function '" + function + "' was specified more than once in list of function symbols");
      else functionHT.put(function, function);
      GEPFunctionSymbol fs = null;
      try {
        Class classDefinition = Class.forName(LOCATION_OF_FUNCTION_CLASSES + "." + function);
        fs = (GEPFunctionSymbol) classDefinition.newInstance();
      } catch (InstantiationException e) {
        state.output.fatal(
            "Unable to create GEPFunctionSymbol class for function '" + function + "'. " + e);
      } catch (IllegalAccessException e) {
        state.output.fatal(
            "Unable to create GEPFunctionSymbol class for function '" + function + "' " + e);
      } catch (ClassNotFoundException e) {
        state.output.fatal(
            "Unable to create GEPFunctionSymbol class for function '" + function + "' " + e);
      }

      // if using a logical function must be a logical problem
      if (fs.isLogicalFunction() && (species.problemType != GEPSpecies.PT_LOGICAL))
        state.output.fatal(
            "Can only use logical functions with a logical problem type. Function "
                + function
                + " is  a logical function.",
            pp,
            ppdef);
      // if using a numerical function must be an non logical problem
      if (!fs.isLogicalFunction() && (species.problemType == GEPSpecies.PT_LOGICAL))
        state.output.fatal(
            "Can only use logical functions with a non logical problem type. Function "
                + function
                + " is a numerical function.",
            pp,
            ppdef);

      symbols[x] = (GEPSymbol) fs;
      // symbols[x].setup(state, base);
      if (fs.arity < 1) state.output.fatal("Arity must be > 0 for a GEPTerminalSymbol)", pp, ppdef);
      symbols[x].id = x;
      int weight =
          state.parameters.getInt(pp.push(P_FUNCTIONWEIGHT), ppdef.push(P_FUNCTIONWEIGHT), 1);
      if (weight < 1) {
        state.output.warning(
            "Weight for GEP Function must be > 0; defaulting to 1)",
            pp.push(P_FUNCTIONWEIGHT),
            ppdef.push(P_FUNCTIONWEIGHT));
        weight = 1;
      }
      symbols[x].weight = weight;
      if (symbols[x].arity > maxArity) maxArity = symbols[x].arity;
    }

    // process the terminals  ... defined by default for timeseries data, in the
    // CSV file if specified and not timeseries, or in the params file if neither of those.
    for (int x = numberOfFunctions;
        x < numberOfSymbolsWithoutConstantSymbol;
        x++) { // load the terminal symbols
      int index = x - numberOfFunctions;
      String terminal = "";
      if (timeseriesWithRawDataValues) {
        // terminals get default names v0, v1, v2, v3, ... vn-1
        terminal = "v" + index;
      } else if (terminalFileCSV == null) // terminals defined in param file
      {
        Parameter pp = pTerminal.push("" + index);
        Parameter ppdef = pdefTerminal.push("" + index);
        terminal = state.parameters.getStringWithDefault(pp, ppdef, "");
      } else { // terminals defined in CSV file
        terminal = terminalSymbolsfromFile[index];
      }
      if (terminal.equals("")) // no name for the terminal
      state.output.fatal("Invalid terminal specifier: '" + terminal + "' for terminal # " + index);
      // make sure not specifying the same function more than once
      if (terminalHT.get(terminal) != null)
        state.output.fatal(
            "Terminal symbol (indep var) '"
                + terminal
                + "' was specified more than once in list of terminal symbols (independent variables)");
      else terminalHT.put(terminal, terminal);
      GEPTerminalSymbol ts = new GEPTerminalSymbol(terminal, this);
      symbols[x] = (GEPSymbol) ts;
      // symbols[x].setup(state, base);
      if (ts.arity != 0) // cannot happen
      state.output.fatal("Arity must be exactly 0 for a GEPTerminalSymbol)");
      symbols[x].id = x;
      symbols[x].weight = 1; // all Terminal symbols have weight of 1
    }

    // must be at least 1 Terminal symbol in the SymbolSet.
    // If not then the user didn't specify the terminals in the param file or in the data file
    if (numberOfTerminals < 1)
      state.output.fatal(
          "Must be at least one Terminal Symbol in the set of GEPSymbols\n"
              + "Either did not specify the terminal symbols in the param file or\n"
              + "did not specify the appropriate data file with the terminals specified in the first line.");

    // collect the id's (indices) of the terminal and function symbols that
    // are in the set of symbols
    terminals = new int[numberOfTerminals];
    int terminalNum = 0;
    functions = new int[numberOfFunctions];
    int functionNum = 0;
    for (int x = 0; x < numberOfSymbols; x++) {
      if (symbols[x] instanceof GEPConstantTerminalSymbol) terminals[terminalNum++] = x;
      else if (symbols[x] instanceof GEPTerminalSymbol) terminals[terminalNum++] = x;
      else if (symbols[x] instanceof GEPFunctionSymbol) functions[functionNum++] = x;
    }

    // collect the weights for symbols and terminals and normalize and cumulate them.
    // Then we can use these arrays to pick appropriate symbols or terminals according to
    // their weights ... using the RandomChooser.PickFromDistribution
    cumulativeNormalizedSymbolWeights = new float[numberOfSymbols];
    cumulativeNormalizedTerminalWeights = new float[numberOfTerminals];
    cumulativeNormalizedFunctionWeights = new float[numberOfFunctions];
    int j = 0, k = 0;
    for (int i = 0; i < numberOfSymbols; i++) {
      float weight = (float) (symbols[i].weight);
      cumulativeNormalizedSymbolWeights[i] = weight;
      if (symbols[i] instanceof GEPTerminalSymbol
          || symbols[i] instanceof GEPConstantTerminalSymbol)
        cumulativeNormalizedTerminalWeights[j++] = weight;
      if (symbols[i] instanceof GEPFunctionSymbol)
        cumulativeNormalizedFunctionWeights[k++] = weight;
    }
    RandomChoice.organizeDistribution(cumulativeNormalizedSymbolWeights);
    RandomChoice.organizeDistribution(cumulativeNormalizedTerminalWeights);
    RandomChoice.organizeDistribution(cumulativeNormalizedFunctionWeights);

    // use the 2/3 rule if fewer functions else the 1/2 rule (don't count the constant
    // terminal here)
    if (numberOfFunctions < (numberOfTerminals - (species.useConstants ? 1 : 0)))
      probabilityOfChoosingFunction = 2.0 / 3.0;
    else probabilityOfChoosingFunction = 0.5;

    // ... and finally get the training and testing data values for the terminals and dependent
    // variable
    // and put them into the Terminal instances (creating a 'special' Terminal Symbol to
    // hold the dependent variable training and testing values)

    // If this is a time series problem AND we are using the raw time series data then
    // we named the terminals v1, v2, ..., nn where n is the number of independent
    // variables as specified in the embedding dimension (which) was used to
    // determine the number of terminals. But we have to process the time series data
    // to get the values for each terminal ... get the raw data from the CSV file
    // if specified or from the user program ... then process it into rows of data
    // representing the independent variables and the dependent variable.
    //
    //   timeseries-delay -- if 1 uses each time series value, if 2 uses every other one, etc.
    //   timeseries-embeddingdimension -- determines the number of timeseries points to use
    //        as independent variables when transforming the set of time series data. Another
    //        data point is used as the dependent variable value. So the time series 'raw' data
    //        consisting of a list of single values is processed by splitting the data into
    //        groups (rows) of size embeddingdimension+1. From the end of the time series data
    //        embeddingdimension+1 values are chosen (if delay is 1 all values are chosen, if
    //        2 every other one is chosen). The last value is the independent variable value.
    //        Then the next row is selected by moving 'delay'
    //        values from the end and chosing embeddingdimension+1 values. This is repeated
    //        until no more sets of size embeddingdimension+1 can be chosen. If this produces
    //        n sets of data then testingprediction of them are used for testing and
    //        (n - testingpredictions) are used for training.
    //
    //        So if we had the data:
    //        1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21
    //        and delay was 1 and embeddingdimension was 4 then we'd process the set into
    //        the following 17 data sets. If testingpredictions was 6 then the 1st 11
    //        would be used for training and the last 6 for testing
    //        iv1 iv2 iv3 iv4  dv
    //          1   2   3   4   5
    //          2   3   4   5   6
    //          3   4   5   6   7
    //              . . .
    //         14  15  16  17  18
    //         15  16  17  18  19
    //         16  17  18  19  20
    //         17  18  19  20  21
    //        If delay was 2 then 7 sets would be formed as:
    //        iv1 iv2 iv3 iv4  dv
    //          1   3   5   7   9
    //          3   5   7   9  11
    //              . . .
    //          9  11  13  15  17
    //         11  13  15  17  19
    //         13  15  17  19  21
    //   timeseries-testingpredictions -- specifies the number of sets of data to devote to testing
    if (timeseriesWithRawDataValues) {
      GEPDependentVariable.symbol = "dependentVariable";
      double rawTimeSeriesValues[] = null;
      if (terminalFileCSV == null)
        rawTimeSeriesValues = ((GEPProblem) state.evaluator.p_problem).getTimeSeriesDataValues();
      else rawTimeSeriesValues = getRawTimeSeriesValuesFromCSVfile(state, terminalFileCSV);
      if (rawTimeSeriesValues == null)
        state.output.fatal("Unable to get time series data values from User Program or CSV file");
      Vector values[] = processRawTimeSeriesValues(state, species, rawTimeSeriesValues);
      // have an array of vectors; 1 vector for each indep variable and the dep variable(s)
      for (int i = 0; i < values.length; i++) {
        // get the values for training ... and testing (specified by timeseriesTestingPredictions)
        int sizeOfTrainingData = values[i].size() - species.timeseriesTestingPredictions;
        double v[] = new double[sizeOfTrainingData];
        double testingV[] = new double[species.timeseriesTestingPredictions];
        for (int m = 0; m < v.length; m++) v[m] = ((Double) values[i].elementAt(m)).doubleValue();
        for (int n = 0; n < testingV.length; n++)
          testingV[n] = ((Double) values[i].elementAt(n + sizeOfTrainingData)).doubleValue();
        int depVarIndex = i - values.length + species.numberOfChromosomes;
        if (depVarIndex >= 0) // last column(s) in file is(are) the dependent variable(s)
        {
          GEPDependentVariable.trainingData.setValues(v, depVarIndex);
          GEPDependentVariable.testingData.setValues(testingV, depVarIndex);
        } else {
          ((GEPTerminalSymbol) symbols[numberOfFunctions + i]).setTrainingValues(v);
          ((GEPTerminalSymbol) symbols[numberOfFunctions + i]).setTestingValues(testingV);
        }
      }
    }
    // else If there is a file with the terminals and dep variable(s) use this else ask for
    // the values from the User Program (problem).
    else if (terminalFileCSV != null) // terminals defined in CSV file
    {
      GEPDependentVariable.symbol = terminalSymbolsfromFile[terminalSymbolsfromFile.length - 1];
      // get all the values into an array of vectors (each vector holds the values for a
      // single terminal (dep or indep variable)
      Vector values[] = new Vector[terminalSymbolsfromFile.length];
      for (int i = 0; i < terminalSymbolsfromFile.length; i++) values[i] = new Vector();
      try {
        while (terminalFileCSV.readRecord()) {
          for (int i = 0; i < terminalSymbolsfromFile.length; i++)
            values[i].add(terminalFileCSV.get(i));
        }
      } catch (IOException e) {
        state.output.fatal(
            "The file with terminal definitions/values failed when reading records. " + e);
      }

      for (int i = 0; i < terminalSymbolsfromFile.length; i++) {
        double v[] = new double[values[i].size()];
        for (int m = 0; m < v.length; m++)
          try {
            v[m] = Double.parseDouble((String) values[i].elementAt(m));
          } catch (Exception e) {
            state.output.fatal(
                "Failed trying to read a training data set value. The field is supposed to be a number but was the string '"
                    + (String) values[i].elementAt(m)
                    + "'.\n"
                    + e);
          }
        int jj = terminalSymbolsfromFile.length - species.numberOfChromosomes;
        if (i >= jj) // last column(s) in file is(are) the dependent variable(s)
        GEPDependentVariable.trainingData.setValues(v, i - jj);
        else ((GEPTerminalSymbol) symbols[numberOfFunctions + i]).setTrainingValues(v);
      }
      // get the testing data as well if a file was specified
      if (testingTerminalFileCSV != null) // testing data defined in CSV file
      {
        // get all the values into an array of vectors (each vector holds the values for a
        // single terminal (dep or indep variable)
        Vector testingValues[] = new Vector[terminalSymbolsfromFile.length];
        for (int i = 0; i < terminalSymbolsfromFile.length; i++) testingValues[i] = new Vector();
        try {
          while (testingTerminalFileCSV.readRecord()) {
            for (int i = 0; i < terminalSymbolsfromFile.length; i++)
              testingValues[i].add(testingTerminalFileCSV.get(i));
          }
        } catch (IOException e) {
          state.output.fatal(
              "The file with testing data values failed when reading records. "
                  + "\nMake sure the file has the same column separators as the testing data file."
                  + "\nAlso check that it has the same as the number of columns as the testing file"
                  + e);
        }

        for (int i = 0; i < terminalSymbolsfromFile.length; i++) {
          double v[] = new double[testingValues[i].size()];
          for (int m = 0; m < v.length; m++)
            try {
              v[m] = Double.parseDouble((String) testingValues[i].elementAt(m));
            } catch (Exception e) {
              state.output.fatal(
                  "Failed trying to read a testing data set value. The field is supposed to be a number but was the string '"
                      + (String) testingValues[i].elementAt(m)
                      + "'.\n"
                      + e);
            }
          int jj = terminalSymbolsfromFile.length - species.numberOfChromosomes;
          if (i >= jj) // last column(s) in file is(are) the dependent variable(s)
          GEPDependentVariable.testingData.setValues(v, i - jj);
          else ((GEPTerminalSymbol) symbols[numberOfFunctions + i]).setTestingValues(v);
        }
      }
    }
    // else terminals were defined in the param file and no CSV file
    // defined so .... ask User Problem for the values, training and testing (if there are any)
    else {
      GEPDependentVariable.symbol = "dependentVariable";
      GEPProblem prob = (GEPProblem) state.evaluator.p_problem;
      double vals[] = null;
      for (int i = numberOfFunctions; i < numberOfSymbolsWithoutConstantSymbol; i++) {
        GEPTerminalSymbol ts = (GEPTerminalSymbol) symbols[i];
        vals = prob.getDataValues(ts.symbol);
        if (vals == null)
          state.output.fatal(
              "Expecting user problem (GEPProblem/ProblemForm) to supply training data values for terminal symbol '"
                  + ts
                  + "'.");
        ts.setTrainingValues(vals);
        vals = prob.getTestingDataValues(ts.symbol);
        if (vals != null) // don't have to supply testing data
        ts.setTestingValues(vals);
      }
      // if just one dep var then ask user by requesting with getdataValues("dependentVariable")
      // and if more than one dep var (more than 1 chromosome) then ask for dep variables
      // with getDataValues("dependentVariable0"), getDataValues("dependentVariable1"), ...
      for (int i = 0; i < species.numberOfChromosomes; i++) {
        String depVarSym = GEPDependentVariable.symbol;
        if (species.numberOfChromosomes > 1) depVarSym = depVarSym + i;
        vals = prob.getDataValues(depVarSym);
        if (vals == null)
          state.output.fatal(
              "Expecting user problem (GEPProblem/ProblemForm) to supply training data values for dependent variable '"
                  + depVarSym
                  + "'.");
        GEPDependentVariable.trainingData.setValues(vals, i);
        vals = prob.getTestingDataValues(depVarSym);
        if (vals != null) // don't have to supply testing data
        GEPDependentVariable.testingData.setValues(vals, i);
      }
    }

    // Some checking of data values to ensure they meet the requirements for the various problem
    // types.
    // For all problem types need to make sure all indep vars and the dep var have the same number
    // of values!
    int numValues = GEPDependentVariable.trainingData.values[0].length;
    for (int i = numberOfFunctions; i < numberOfSymbolsWithoutConstantSymbol; i++)
      if (((GEPTerminalSymbol) symbols[i]).trainingValues.length != numValues)
        state.output.fatal(
            "Must have same number of values for all independent variables and the dependent variable."
                + "/nNumber of values for Dependent Variable is: "
                + numValues
                + "/nNumber of values for Independent Variable '"
                + symbols[i].symbol
                + "' is: "
                + ((GEPTerminalSymbol) symbols[i]).trainingValues.length);
    // For Classification and logical problems all dependent variable values must be either 0 or 1
    if (species.problemType == GEPSpecies.PT_CLASSIFICATION
        || species.problemType == GEPSpecies.PT_LOGICAL) {
      double dvVals[] = GEPDependentVariable.trainingData.values[0];
      for (int i = 0; i < numValues; i++)
        if (dvVals[i] != 0.0 && dvVals[i] != 1.0)
          state.output.fatal(
              "For classification/logical problems all dependent variable values must be either 1 or 0.\nFound value "
                  + dvVals[i]
                  + " at index "
                  + i
                  + "in the values.");
    }
    // For Logical problems all independent variable values must be 0 or 1
    if (species.problemType == GEPSpecies.PT_LOGICAL) { // for each indep variable symbol
      for (int i = numberOfFunctions; i < numberOfSymbolsWithoutConstantSymbol; i++) {
        double ivVals[] = ((GEPTerminalSymbol) symbols[i]).trainingValues;
        for (int m = 0; m < numValues; m++)
          if (ivVals[m] != 0.0 && ivVals[m] != 1.0)
            state.output.fatal(
                "For logical problems all independent variable values must be either 1 or 0.\nFound value "
                    + ivVals[m]
                    + " at index '"
                    + m
                    + "' in the variable '"
                    + ((GEPTerminalSymbol) symbols[i]).symbol
                    + "'.");
      }
    }
    state.output.exitIfErrors();
  }
  public void importarProspecto(String nombreArchivo) {

    try {

      CsvReader prospecto = new CsvReader(nombreArchivo);

      prospecto.readHeaders();

      while (prospecto.readRecord()) {

        String dni = prospecto.get(0);
        String nombres = prospecto.get(1);
        String apellido_paterno = prospecto.get(2);
        String apellido_materno = prospecto.get(3);
        String telefono = prospecto.get(4);
        String fechacontacto = prospecto.get(5);
        String correo = prospecto.get(6);
        String direccion = prospecto.get(7);
        String distrito = prospecto.get(7);
        String departamento = prospecto.get(8);

        // perform program logic here
        registrarProspecto(
            dni,
            nombres,
            apellido_paterno,
            apellido_materno,
            telefono,
            fechacontacto,
            correo,
            direccion,
            distrito,
            departamento);
      }

      prospecto.close();

    } catch (FileNotFoundException e) {
      e.printStackTrace();
    } catch (IOException e) {
      e.printStackTrace();
    }
  }
Beispiel #21
0
  private static void filterCSV(
      String filename, LinkedList<String> filters, LinkedList<String> includes) throws IOException {
    FileReader fr = new FileReader(filename);
    CsvReader csvIn = new CsvReader(fr, SEPARATOR);

    csvIn.setSafetySwitch(false);
    if (csvIn.readHeaders()) {
      csvIn.readRecord();
      System.out.println("'" + filename + "' has " + csvIn.getColumnCount() + " column.");

      int usedColumn = 0;
      String[] headers = csvIn.getHeaders();
      StringBuffer newHeader = new StringBuffer();
      StringBuffer newValues = new StringBuffer();

      HashMap<String, String> data = new HashMap<String, String>();
      allData.put(filename, data);

      for (String header : headers) {
        boolean matches = false;

        // check if a filter matches the entry
        for (String filter : filters) {
          if (header.contains(filter)) {
            matches = true;

            // ok, filter matches, but maybe it is on the include list?
            for (String include : includes) {
              if (header.contains(include)) {
                matches = false;
                break;
              }
            }

            break;
          }
        }

        if (!matches) {
          usedColumn++;
          String value = csvIn.get(header);

          newHeader.append(header);
          newHeader.append(SEPARATOR_OUT);
          newValues.append(value);
          newValues.append(SEPARATOR_OUT);

          if (data != null) {
            if (!keys.containsKey(header)) {
              keys.put(header, true);
            }

            data.put(header, value);
          }
        }
      }
      System.out.println(" -> " + usedColumn + " column remains");

      FileWriter fw = new FileWriter(filename + FILENAME_POSTFIX, false);
      fw.write(newHeader.toString());
      fw.write(NEW_LINE);
      fw.write(newValues.toString());
      fw.close();
    } else {
      System.err.println("Can not read header from '" + filename + "'");
    }
  }
  @Test
  public void testCreateService() throws Exception {
    Activator instance = Activator.getDefault();
    assertNotNull("Run as a JUnit Plug-in Test", instance);

    Bundle bundle = instance.getBundle();
    URL url = bundle.getEntry("cities.csv");
    System.out.println("Bundle URL" + url);

    URL fileUrl = FileLocator.toFileURL(url);
    System.out.println("Bundle URL" + fileUrl);

    // get the service factory
    IServiceFactory factory = CatalogPlugin.getDefault().getServiceFactory();

    // create the service

    List<IService> services = factory.createService(fileUrl);

    // ensure the service was created
    assertNotNull(services);
    assertEquals(1, services.size());

    // ensure the right type of service was created
    IService service = services.get(0);
    assertNotNull(service);

    ICatalog catalog = CatalogPlugin.getDefault().getLocalCatalog();
    catalog.add(service); // we can now find this service!

    List<IResolve> found = catalog.search("csv", null, null);
    assertEquals(2, found.size());

    // get all the resources from the service
    List<? extends IGeoResource> resources = service.resources(null);
    assertNotNull(resources);
    assertEquals(resources.size(), 1);

    CSV csv = null;
    for (IGeoResource resource : resources) {
      IGeoResourceInfo info = resource.getInfo(null);

      String description = info.getDescription();
      assertNotNull(description);
      System.out.println("Description:" + description);

      ReferencedEnvelope bounds = info.getBounds();
      assertTrue(!bounds.isNull());
      System.out.println("Bounds:" + bounds);

      if (resource.canResolve(CSV.class)) {
        csv = resource.resolve(CSV.class, null);
      }
    }
    CsvReader reader = csv.reader();
    reader.readHeaders();
    reader.setCaptureRawRecord(true);
    reader.setTrimWhitespace(true);
    int count = 0;
    while (reader.readRecord()) {
      String x = reader.get("x");
      String y = reader.get("y");
      System.out.print(reader.getCurrentRecord() + " point " + x + " x " + y);
      Point point = CSV.getPoint(reader);
      System.out.println("-->" + point);

      count++;
    }
    reader.close();
    System.out.println(count);
  }
Beispiel #23
0
  public StringBuffer uploadAndReportCustomDataFile(
      InputStream inputStream,
      long size,
      String fileFormat,
      char delimChar,
      List<String> listOfUIDsToUpdate,
      CustomFieldGroup customFieldGroup,
      PhenoCollection phenoCollection,
      boolean overwriteExisting)
      throws FileFormatException, ArkSystemException {
    List<PhenoCollection> phenoCollectionsWithTheirDataToInsert = new ArrayList<PhenoCollection>();

    delimiterCharacter = delimChar;
    uploadReport = new StringBuffer();

    InputStream convertedInputStream;
    if (fileFormat.equalsIgnoreCase(Constants.FileFormat.XLS.toString())) {
      XLStoCSV xlsToCSV = new XLStoCSV(delimiterCharacter);
      convertedInputStream = xlsToCSV.convertXlsInputStreamToCsv(inputStream);
    } else {
      convertedInputStream = inputStream;
    }

    InputStreamReader inputStreamReader = null;
    CsvReader csvReader = null;
    DecimalFormat decimalFormat = new DecimalFormat("0.00");

    int subjectCount = 0;
    long updateFieldsCount = 0L;
    long insertFieldsCount = 0L;
    long emptyDataCount = 0L;
    try {
      inputStreamReader = new InputStreamReader(convertedInputStream);
      csvReader = new CsvReader(inputStreamReader, delimiterCharacter);
      String[] stringLineArray;

      List<LinkSubjectStudy> allSubjectWhichWillBeUpdated = null;
      if (listOfUIDsToUpdate.size() > 0) {
        allSubjectWhichWillBeUpdated =
            iArkCommonService.getUniqueSubjectsWithTheseUIDs(study, listOfUIDsToUpdate);
      } else {
        allSubjectWhichWillBeUpdated = new ArrayList<LinkSubjectStudy>();
      }
      if (size <= 0) {
        uploadReport.append(
            "ERROR:  The input size was not greater than 0. Actual length reported: ");
        uploadReport.append(size);
        uploadReport.append("\n");
        throw new FileFormatException(
            "The input size was not greater than 0. Actual length reported: " + size);
      }

      csvReader.readHeaders();

      List<String> fieldNameCollection = Arrays.asList(csvReader.getHeaders());
      ArkFunction phenoCustomFieldArkFunction =
          iArkCommonService.getArkFunctionByName(
              Constants.FUNCTION_KEY_VALUE_PHENO_COLLECTION); // ");

      List<CustomFieldDisplay> cfdsThatWeNeed =
          iArkCommonService.getCustomFieldDisplaysIn(
              fieldNameCollection, study, phenoCustomFieldArkFunction, customFieldGroup);

      // Paul has requested - in pheno we only insert List<PhenoData> dataThatWeHave =
      // iArkCommonService.getCustomFieldDataFor(cfdsThatWeNeed, allSubjectWhichWillBeUpdated);
      // read one line which contains potentially many custom fields
      QuestionnaireStatus uploadingStatus =
          iPhenotypicService.getPhenoCollectionStatusByName(
              Constants.PHENO_COLLECTION_STATUS_UPLOADED);

      while (csvReader.readRecord()) {
        List<PhenoData> phenoDataToInsertForThisPhenoCollection = new ArrayList<PhenoData>();
        log.info("reading record " + subjectCount);
        stringLineArray = csvReader.getValues();
        String subjectUID = stringLineArray[0];
        String recordDate = stringLineArray[1];
        Date recordDate_asDate =
            (recordDate.isEmpty() ? new Date() : simpleDateFormat.parse(recordDate));
        LinkSubjectStudy subject =
            getSubjectByUIDFromExistList(allSubjectWhichWillBeUpdated, subjectUID);
        // log.info("get subject from list");
        CustomField customField = null;
        List<PhenoCollection> subjectExistingMatchingPhenoCollections =
            iPhenotypicService.getSubjectMatchingPhenoCollections(
                subject, customFieldGroup, recordDate_asDate);
        PhenoCollection phenoCollectionIntoDB = new PhenoCollection();
        if (subjectExistingMatchingPhenoCollections.size() == 0 || !overwriteExisting) {
          phenoCollectionIntoDB.setDescription(phenoCollection.getDescription());
          phenoCollectionIntoDB.setLinkSubjectStudy(subject);
          //				phenoCollectionIntoDB.setName(phenoCollection.getName());
          phenoCollectionIntoDB.setQuestionnaire(customFieldGroup);
          if (recordDate.isEmpty()) {
            phenoCollectionIntoDB.setRecordDate(new Date());
          } else {
            phenoCollectionIntoDB.setRecordDate(recordDate_asDate);
          }
          phenoCollectionIntoDB.setStatus(
              uploadingStatus); // TODO for this to be UPLOADED TYPE STATUS
        } else {
          if (subjectExistingMatchingPhenoCollections.size() == 1) {
            recordDate_asDate =
                (recordDate.isEmpty() ? new Date() : simpleDateFormat.parse(recordDate));
            phenoCollectionIntoDB = subjectExistingMatchingPhenoCollections.get(0);
          } else {
            subjectCount++;
            continue;
          }
        }

        for (CustomFieldDisplay cfd : cfdsThatWeNeed) {

          String theDataAsString = null;
          customField = cfd.getCustomField();

          if (csvReader.getIndex(cfd.getCustomField().getName()) < 0) {
            for (String nameAsSeenInFile : fieldNameCollection) {
              if (nameAsSeenInFile.equalsIgnoreCase(cfd.getCustomField().getName())) {
                theDataAsString = csvReader.get(nameAsSeenInFile);
              }
            }
          } else {
            theDataAsString = csvReader.get(cfd.getCustomField().getName());
          }

          if (theDataAsString != null && !theDataAsString.isEmpty()) {
            PhenoData dataToInsert = new PhenoData();
            dataToInsert.setCustomFieldDisplay(cfd);
            // as much as i disagree...pheno data isn't tied to subject....pheno collection is
            // dataToInsert.setLinkSubjectStudy(subject);
            setValue(customField, cfd, dataToInsert, theDataAsString);
            boolean flag = true;
            for (PhenoData phenoData : phenoCollectionIntoDB.getPhenoData()) {
              if (phenoData.getCustomFieldDisplay().getId() == cfd.getId()) {
                phenoData.setDateDataValue(dataToInsert.getDateDataValue());
                phenoData.setErrorDataValue(dataToInsert.getErrorDataValue());
                phenoData.setNumberDataValue(dataToInsert.getNumberDataValue());
                phenoData.setTextDataValue(dataToInsert.getTextDataValue());
                flag = false;
                break;
              }
            }
            if (flag) {
              phenoDataToInsertForThisPhenoCollection.add(dataToInsert);
            }
            insertFieldsCount++;
          } else {
            emptyDataCount++;
          }
        }
        phenoCollectionIntoDB.getPhenoData().addAll(phenoDataToInsertForThisPhenoCollection);
        log.info(phenoCollectionIntoDB.toString());
        phenoCollectionsWithTheirDataToInsert.add(phenoCollectionIntoDB);
        subjectCount++;
      }
      log.info(
          "finished message for "
              + subjectCount
              + "\n      DATA inserts = "
              + insertFieldsCount
              + "  phenocollections = "
              + phenoCollectionsWithTheirDataToInsert.size()
              + "  amount of empty scells ="
              + emptyDataCount);
    } catch (IOException ioe) {
      uploadReport.append(
          "SYSTEM ERROR:   Unexpected I/O exception whilst reading the subject data file\n");
      log.error("processMatrixSubjectFile IOException stacktrace:", ioe);
      throw new ArkSystemException("Unexpected I/O exception whilst reading the subject data file");
    } catch (Exception ex) {
      uploadReport.append(
          "SYSTEM ERROR:   Unexpected exception whilst reading the subject data file\n");
      log.error("processMatrixSubjectFile Exception stacktrace:", ex);
      throw new ArkSystemException(
          "Unexpected exception occurred when trying to process subject data file");
    } finally {
      uploadReport.append("Total file size: ");
      uploadReport.append(decimalFormat.format(size / 1024.0 / 1024.0));
      uploadReport.append(" MB");
      uploadReport.append("\n");

      if (csvReader != null) {
        try {
          csvReader.close();
        } catch (Exception ex) {
          log.error("Cleanup operation failed: csvRdr.close()", ex);
        }
      }
      if (inputStreamReader != null) {
        try {
          inputStreamReader.close();
        } catch (Exception ex) {
          log.error("Cleanup operation failed: isr.close()", ex);
        }
      }
    }
    uploadReport.append("Processed ");
    uploadReport.append(subjectCount);
    uploadReport.append(" rows.");
    uploadReport.append("\n");
    uploadReport.append("Inserted ");
    uploadReport.append(insertFieldsCount);
    uploadReport.append(" rows of data.");
    uploadReport.append("\n");
    uploadReport.append("Updated ");
    uploadReport.append(updateFieldsCount);
    uploadReport.append(" rows of data.");
    uploadReport.append("\n");

    // TODO better exceptionhandling
    iPhenotypicService.processPhenoCollectionsWithTheirDataToInsertBatch(
        phenoCollectionsWithTheirDataToInsert, study);
    return uploadReport;
  }
  public PaymentStatistic processKZ(File csvFile, LoggingHandler lh) throws IOException {

    CsvReader reader = null;
    ArrayList<PaymentRecord> recordList = new ArrayList();
    Double sum = 0.0;
    PaymentStatistic statisticAll = new PaymentStatistic();

    try {

      reader = new CsvReader(csvFile.getCanonicalPath());
      reader.setDelimiter(';');
      reader.readHeaders();

      // Headers ohne Spaces mittels trim
      String[] headers = new String[reader.getHeaderCount()];
      for (int i = 0; i < reader.getHeaderCount(); i++) {
        headers[i] = reader.getHeader(i).trim();
      }
      reader.setHeaders(headers);
      // http://www.csvreader.com/java_csv_samples.php
      // http://javacsv.sourceforge.net/

      PaymentRecord record = new PaymentRecord(reader);
      recordList.add(record);

      while (true) {
        try {

          PaymentRecord newRecord = new PaymentRecord(reader);

          if (newRecord.getKontoNr() == record.getKontoNr()) {
            // nicht verarbeiten, gleicher Kunde
            recordList.add(newRecord);
            sum = sum + newRecord.getBetragNr();
            record = newRecord;
            continue;
          }

          // unterschiedliche Kundennummer, alte Belege verarbeiten
          if (sum == 0.0) {
            PaymentStatistic statistic = processKZPaymentRecord(recordList, lh);
            statisticAll.addStatistic(statistic);
          }
          // System.out.println("Kunde " + record.getKontoStr() + "OP: " + sum);
          recordList = new ArrayList();
          sum = 0.0;
          recordList.add(newRecord);
          sum = sum + newRecord.getBetragNr();
          record = newRecord;
          continue;

        } catch (Throwable th) {
          // kein neuer record mehr, Liste verarbeiten
          if (sum == 0.0) {
            PaymentStatistic statistic = processKZPaymentRecord(recordList, lh);
            statisticAll.addStatistic(statistic);
          }
        }
        break;
      }

      reader.close();

    } catch (Throwable th) {
      lh.getLogger(Globals.LOGGINGKZ).log(Level.SEVERE, null, th);
      return statisticAll;
    }
    return statisticAll;
  }
Beispiel #25
0
  /** Checks if some update is needed to be done and executes it if neccessary */
  public static void update() throws Exception {
    String version = Config.getInstance().getVersion();
    if (StringUtils.isEmpty(version)) {
      // program is started for the first time, no update neccessary
      return;
    }
    if (ObjectUtils.equals(version, Config.getLatestVersion())) { // already updated
      return;
    }
    logger.log(
        Level.INFO,
        "Updating from legacy version {0} to current version {1}",
        new Object[] {version, Config.getLatestVersion()});

    // changes to 1.4
    if (Config.compareProgramVersions(version, "1.4") < 0) {
      // add message ID to queue
      logger.fine("Updating queue to add message IDs...");
      try {
        Field queueFileField = PersistenceManager.class.getDeclaredField("queueFile");
        queueFileField.setAccessible(true);
        File queueFile = (File) queueFileField.get(null);

        List<String> lines = FileUtils.readLines(queueFile, "UTF-8");
        ArrayList<String> newLines = new ArrayList<String>();
        for (String line : lines) {
          newLines.add(line + ",");
        }
        FileUtils.writeLines(queueFile, "UTF-8", newLines);
      } catch (Exception ex) {
        logger.log(Level.SEVERE, "Updating queue file failed", ex);
      }
    }

    // changes to 0.8.0
    if (Config.compareProgramVersions(version, "0.8.0") < 0) {
      // set country prefix from locale
      if (StringUtils.isEmpty(Config.getInstance().getCountryPrefix())) {
        Config.getInstance()
            .setCountryPrefix(CountryPrefix.getCountryPrefix(Locale.getDefault().getCountry()));
      }
    }

    // changes to 0.17.0
    if (Config.compareProgramVersions(version, "0.16.0") <= 0) {
      // keyring encryption changed from AES to XOR
      logger.fine("Updating keyring file to newer encryption...");
      try {
        byte[] passphrase =
            new byte[] {
              -53, -103, 123, -53, -119, -12, -27, -82,
              3, -115, 119, -101, 86, 92, 92, 28
            };
        SecretKeySpec keySpec = new SecretKeySpec(passphrase, "AES");
        String CIPHER_TRANSFORMATION = "AES/ECB/PKCS5Padding";
        Cipher cipher = Cipher.getInstance(CIPHER_TRANSFORMATION);
        cipher.init(Cipher.DECRYPT_MODE, keySpec);

        Field keyringFileField = PersistenceManager.class.getDeclaredField("keyringFile");
        keyringFileField.setAccessible(true);
        File keyringFile = (File) keyringFileField.get(null);

        ContinuousSaveManager.disableKeyring();
        Keyring keyring = Keyring.getInstance();
        keyring.clearKeys();

        CsvReader reader = new CsvReader(keyringFile.getPath(), ',', Charset.forName("UTF-8"));
        reader.setUseComments(true);
        while (reader.readRecord()) {
          String gatewayName = reader.get(0);
          String login = reader.get(1);
          String password = reader.get(2);

          byte[] ciphertext = Base64.decodeBase64(password.getBytes("UTF-8"));
          byte[] cleartext = cipher.doFinal(ciphertext);
          password = new String(cleartext, "UTF-8");

          Tuple<String, String> key = new Tuple<String, String>(login, password);
          keyring.putKey(gatewayName, key);
        }
        reader.close();

        ContinuousSaveManager.enableKeyring();
      } catch (Exception ex) {
        logger.log(Level.SEVERE, "Updating keyring file failed", ex);
      }
    }

    // changes to 0.22.0
    if (Config.compareProgramVersions(version, "0.21") <= 0) {
      // transfer senderName and senderNumber settings

      Field configFileField = PersistenceManager.class.getDeclaredField("configFile");
      configFileField.setAccessible(true);
      File configFile = (File) configFileField.get(null);

      DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
      DocumentBuilder db = dbf.newDocumentBuilder();
      XPathFactory xpf = XPathFactory.newInstance();
      XPath xpath = xpf.newXPath();

      Document doc = db.parse(configFile);
      String senderNumber = xpath.evaluate("//void[@property='senderNumber']/string", doc);
      String senderName = xpath.evaluate("//void[@property='senderName']/string", doc);

      Signature defaultSig = Signatures.getInstance().get(Signature.DEFAULT.getProfileName());
      if (StringUtils.isNotEmpty(senderName)) {
        defaultSig.setUserName(senderName);
      }
      if (StringUtils.isNotEmpty(senderNumber)) {
        defaultSig.setUserNumber(senderNumber);
      }
    }
  }
 /*
  * (non-Javadoc)
  * @see net.sourceforge.squirrel_sql.plugins.dataimport.importer.IFileImporter#close()
  */
 public boolean close() throws IOException {
   if (reader != null) {
     reader.close();
   }
   return true;
 }
  public static List<SNInventory> getMap() {
    // startTime = "2015-05-03";
    List<SNInventory> list = new ArrayList<SNInventory>();

    Map<Integer, Map<String, Map<Integer, InventoryBranch>>> mapin =
        InventoryBranchManager.getInventoryMap();

    try {
      String tempPath = PathUtill.getXMLpath();
      tempPath +=
          "data"
              + File.separator
              + "DownloadInventory"
              + File.separator
              + TimeUtill.getdateString()
              + File.separator
              + "SuNing";
      logger.info(tempPath);
      File file = new File(tempPath);
      if (!file.exists()) {
        file.mkdirs();
      }

      File file2 = new File(tempPath + File.separator + "model.csv");
      // file2.createNewFile();

      CsvReader reader = new CsvReader(file2.getPath(), ',', Charset.forName("GBK")); // 一般用这编码读就可以了

      reader.readHeaders();
      int count = 0;
      while (reader.readRecord()) { // 逐行读入除表头的数据
        String[] strs = reader.getValues();
        if (null != strs) {
          SNInventory in = new SNInventory();
          for (int i = 0; i < strs.length; i++) {
            // logger.info(i);
            String str = strs[i];
            // logger.info(str);
            if (i == 0) {
              // logger.info(str);
              in.setGoodType("样机");
            } else if (i == 2) {

              in.setBranchName(str);
            } else if (i == 3) {
              in.setBranchNum(str);
            } else if (i == 6) {
              in.setGoodGroupName(str);
            } else if (i == 7) {
              in.setGoodGroupNum(str);
            } else if (i == 10) {
              in.setGoodpName(str);
            } else if (i == 11) {
              in.setGoodNum(str);
            } else if (i == 12) {
              double realnum = Double.valueOf(str);
              int re = (int) realnum;
              in.setNum(re);
            } else if (i == 13) {
              in.setSerialNumber(str);
            }
          }
          count++;
          int branchid = -1;
          int type = -1;
          try {
            branchid = BranchService.getNumMap(Model.苏宁.getValue()).get(in.getBranchNum()).getId();
            // logger.info(in.getBranchNum());
          } catch (Exception e) {
            logger.info("转化门店出错");
          }
          try {
            type = ProductService.gettypeNUmmap().get(in.getGoodNum()).getId();
            // logger.info(in.getGoodNum());
          } catch (Exception e) {
            logger.info("转化型号出错");
          }

          if (branchid != -1 && type != -1) {
            in.setBid(branchid);
            in.setTid(type);
            Map<String, Map<Integer, InventoryBranch>> mapb = mapin.get(branchid);
            if (null == mapb) {
              list.add(in);
            } else {
              Map<Integer, InventoryBranch> mapt = mapb.get(type + "");
              if (null == mapt) {
                list.add(in);

              } else {
                InventoryBranch inb = mapt.get(OrderGoods.prototype);
                if (null == inb) {
                  list.add(in);
                }
              }
            }
          }
        }
      }
      logger.info("count" + count);

      reader.close();
    } catch (IOException e) {
      // TODO Auto-generated catch block
      logger.info(e);
    } // 跳过表头 如果需要表头的话,不要写这句。

    logger.info("list" + list.size());

    return list;
  }
 /*
  * (non-Javadoc)
  * @see net.sourceforge.squirrel_sql.plugins.dataimport.importer.IFileImporter#next()
  */
 public boolean next() throws IOException {
   return reader.readRecord();
 }
  private static List<String[]> readRecords(
      final Hashtable<String, String> global, final CsvReader reader) throws IOException {
    final List<String[]> transactions = new ArrayList<String[]>();
    reader.setDelimiter(';');
    reader.setUseComments(false);
    reader.setTextQualifier('\'');

    int state = 0;

    while (reader.readRecord()) {
      final String[] record = reader.getValues();
      final String first = record[0];
      switch (state) {
        case 0:
          if (first.startsWith("#Klient")) {
            if (reader.readRecord()) {
              global.put("name", reader.getValues()[0]);
              state = 1;
            }
          }
          break;
        case 1:
          if (first.startsWith("#Za okres:")) {
            if (reader.readRecord()) {
              global.put("start_date", reader.getValues()[0]);
              global.put("end_date", reader.getValues()[1]);
              state = 2;
            }
          }
          break;
        case 2:
          if (first.startsWith("#Waluta")) {
            if (reader.readRecord()) {
              global.put("currency", reader.getValues()[0]);
              state = 3;
            }
          }
          break;
        case 3:
          if (first.startsWith("#Numer rachunku")) {
            if (reader.readRecord()) {
              global.put("account_number", reader.getValues()[0]);
              state = 4;
            }
          }
          break;
        case 4:
          if (first.startsWith("#Saldo pocz")) {
            global.put("start_balance", reader.getValues()[1]);
            state = 5;
          }
          break;
        case 5:
          if (first.startsWith("#Data operacji")) {
            state = 6;
          }
          break;
        case 6:
          if (record.length > 6 && record[6].startsWith("#Saldo ko")) {
            global.put("end_balance", record[4]);
            state = 7;
            continue;
          }
          if (record.length != 9) continue;
          transactions.add(record);
          break;
      }
    }
    return transactions;
  }
 /*
  * (non-Javadoc)
  * @see net.sourceforge.squirrel_sql.plugins.dataimport.importer.IFileImporter#getString(int)
  */
 public String getString(int column) throws IOException {
   return reader.get(column);
 }