示例#1
0
  @Test
  public void getBoolean() {

    SqlQuery sqlQuery;
    if (isOracle()) {
      sqlQuery = Ebean.createSqlQuery("SELECT 1 AS ISNT_NULL from dual");
    } else {
      sqlQuery = Ebean.createSqlQuery("SELECT 1 IS NOT NULL AS ISNT_NULL");
    }
    SqlRow row = sqlQuery.findUnique();
    Boolean value = row.getBoolean("ISNT_NULL");
    assertThat(value).isTrue();
  }
示例#2
0
  // SELECT DOCTOR .id as id, concat(surname, ' ', name, ' ', patronymic) as fio, doctor_type_name
  // as type FROM DOCTOR LEFT JOIN DOCTOR_TYPE ON DOCTOR_TYPE.ID = DOCTOR.DOCTOR_TYPE_ID
  public static Map<String, String> fioMapByType(Long doctorTypeNameId) {
    // TODO: check this statement at latest version of PostgreSQL
    Map<String, String> theMap = new HashMap<String, String>();
    String sql =
        "SELECT DOCTOR.id as id, concat(surname, ' ', name, ' ', patronymic) as fio FROM DOCTOR LEFT JOIN DOCTOR_TYPE ON DOCTOR_TYPE.ID = DOCTOR.DOCTOR_TYPE_ID WHERE doctor_type.id=:type_id";
    SqlQuery sqlQuery = Ebean.createSqlQuery(sql).setParameter("type_id", doctorTypeNameId);
    List<SqlRow> rows = sqlQuery.findList();
    for (SqlRow row : rows) {
      theMap.put(row.getString("id"), row.getString("fio"));
    }

    return theMap;
  }
示例#3
0
  public static List<String> fioSQLListByType(String doctorTypeName) {
    // TODO: check this statement at latest version of PostgreSQL
    List<String> theList = new ArrayList<String>();
    String sql =
        "SELECT concat(surname, ' ', name, ' ', patronymic) as fio FROM DOCTOR LEFT JOIN DOCTOR_TYPE ON DOCTOR_TYPE.ID = DOCTOR.DOCTOR_TYPE_ID WHERE DOCTOR_TYPE_NAME=:DOCTOR_TYPE_NAME";
    SqlQuery sqlQuery = Ebean.createSqlQuery(sql).setParameter("DOCTOR_TYPE_NAME", doctorTypeName);
    List<SqlRow> rows = sqlQuery.findList();
    // List<SqlRow> rows = Doctor.fioSQLListByType("Терапевт");
    for (SqlRow row : rows) {
      // Logger.info("row: "+ row);
      theList.add(row.getString("fio"));
    }

    return theList;
  }
示例#4
0
  @Override
  public long size() {

    if (size == null) {

      String sql = null;
      try {
        // Count all
        sql = mysqlQuery.toCountSQL();
        LOGGER.debug(sql);
        Statement dataSt = dataConn.createStatement();
        ResultSet dataRS = dataSt.executeQuery(sql);
        if (dataRS.next()) {
          size = dataRS.getLong("size");
        } else {
          size = Long.valueOf(0);
        }
        dataRS.close();
        dataSt.close();
      } catch (SQLException e) {
        throw new RuntimeException(e);
      }
    }

    return size;
  }
示例#5
0
  private void initDataRS() {
    String sql = null;
    try {
      dataSt = store.createReadStatement(dataConn);

      sql = mysqlQuery.toSelectSQL();
      LOGGER.debug(sql);
      dataRS = dataSt.executeQuery(sql);
    } catch (Exception e) {

      try {
        // Give a second change (Broken pipe exception)
        dataSt = store.createReadStatement(dataConn);

        sql = mysqlQuery.toSelectSQL();
        dataRS = dataSt.executeQuery(sql);
      } catch (Exception e2) {
        throw new RuntimeException(e2);
      }
    }
  }
 public void addConstraint(SqlQuery sqlQuery, RolapCube baseCube, AggStar aggStar) {
   // Use aggregate table to generate filter condition
   RolapNativeSql sql =
       new RolapNativeSql(sqlQuery, aggStar, getEvaluator(), args[0].getLevel());
   String filterSql = sql.generateFilterCondition(filterExpr);
   if (filterSql != null) {
     sqlQuery.addHaving(filterSql);
   }
   if (getEvaluator().isNonEmpty() || isJoinRequired()) {
     // only apply context constraint if non empty, or
     // if a join is required to fulfill the filter condition
     super.addConstraint(sqlQuery, baseCube, aggStar);
   }
 }
示例#7
0
  // TODO: repair choosing doctor by typename
  public static List<String> fioListByType(String doctorTypeName) {
    /*String query = "find doctor fetch doctor_type.doctor_type_id";
    List<Doctor> doctors = Ebean.find(Doctor.class)
      .setQuery(query)
      .setParameter("doctor_type_id", new Long(1))
      .findList(); */
    String sql =
        "SELECT * FROM DOCTOR LEFT JOIN DOCTOR_TYPE ON DOCTOR_TYPE.ID = DOCTOR.DOCTOR_TYPE_ID";
    SqlQuery sqlQuery = Ebean.createSqlQuery(sql);
    List<SqlRow> list = sqlQuery.findList();
    /*RawSql rawSql = RawSqlBuilder.parse(sql).create();
    javax.persistence.Query<Doctor> query = Ebean.find(Doctor.class);
      query.setRawSql(rawSql);
      //.columnMapping("id",  "doctor.id")
      //.columnMapping("name",  "doctor.name")
      //.columnMapping("surname",  "doctor.surname")
      //.columnMapping("patronymic",  "doctor.patronymic")
      //.columnMapping("patronymic",  "doctor_type.doctor_type_name");
      List<Doctor> doctors = query.findList();  */

    // List<Doctor> doctors = Ebean.find(Doctor.class)
    // .fetch("doctor_type","doctor.doctor_type_id")
    // .findList();
    // List<Doctor> doctors = find.fetch("doctor_type")
    // .where().eq("doctor.doctor_type_name", doctorTypeName)
    // .findList();
    /*com.avaje.ebean.Query q = Ebean.createQuery(Doctor.class);
    q.join("doctor_type");
    final List<Doctor> eventList = q.findList();*/
    List<Doctor> doctors = find.where().eq("doctor_type_id", new Long(1)).findList();
    List<String> theList = new ArrayList<String>();
    for (Doctor doctor : doctors) {
      theList.add(doctor.getFullName());
    }
    return theList;
  }
示例#8
0
 public static ResultSet execute(SqlQuery query) throws SQLException {
   return executeList(query.toString(), query.getParams());
 }
  NativeEvaluator createEvaluator(RolapEvaluator evaluator, FunDef fun, Exp[] args) {
    if (!isEnabled()) {
      return null;
    }
    if (!FilterConstraint.isValidContext(evaluator, restrictMemberTypes())) {
      return null;
    }
    // is this "Filter(<set>, <numeric expr>)"
    String funName = fun.getName();
    if (!"Filter".equalsIgnoreCase(funName)) {
      return null;
    }

    if (args.length != 2) {
      return null;
    }

    // extract the set expression
    List<CrossJoinArg[]> allArgs = crossJoinArgFactory().checkCrossJoinArg(evaluator, args[0]);

    // checkCrossJoinArg returns a list of CrossJoinArg arrays.  The first
    // array is the CrossJoin dimensions.  The second array, if any,
    // contains additional constraints on the dimensions. If either the
    // list or the first array is null, then native cross join is not
    // feasible.
    if (allArgs == null || allArgs.isEmpty() || allArgs.get(0) == null) {
      return null;
    }

    CrossJoinArg[] cjArgs = allArgs.get(0);
    if (isPreferInterpreter(cjArgs, false)) {
      return null;
    }

    // extract "order by" expression
    SchemaReader schemaReader = evaluator.getSchemaReader();
    DataSource ds = schemaReader.getDataSource();

    // generate the WHERE condition
    // Need to generate where condition here to determine whether
    // or not the filter condition can be created. The filter
    // condition could change to use an aggregate table later in evaluation
    SqlQuery sqlQuery = SqlQuery.newQuery(ds, "NativeFilter");
    RolapNativeSql sql = new RolapNativeSql(sqlQuery, null, evaluator, cjArgs[0].getLevel());
    final Exp filterExpr = args[1];
    String filterExprStr = sql.generateFilterCondition(filterExpr);
    if (filterExprStr == null) {
      return null;
    }

    // Check to see if evaluator contains a calculated member that can't be
    // expanded.  This is necessary due to the SqlConstraintsUtils.
    // addContextConstraint()
    // method which gets called when generating the native SQL.
    if (SqlConstraintUtils.containsCalculatedMember(evaluator.getNonAllMembers(), true)) {
      return null;
    }

    LOGGER.debug("using native filter");

    final int savepoint = evaluator.savepoint();
    try {
      overrideContext(evaluator, cjArgs, sql.getStoredMeasure());
      // Now construct the TupleConstraint that contains both the CJ
      // dimensions and the additional filter on them.
      CrossJoinArg[] combinedArgs = cjArgs;
      if (allArgs.size() == 2) {
        CrossJoinArg[] predicateArgs = allArgs.get(1);
        if (predicateArgs != null) {
          // Combined the CJ and the additional predicate args.
          combinedArgs = Util.appendArrays(cjArgs, predicateArgs);
        }
      }

      TupleConstraint constraint = new FilterConstraint(combinedArgs, evaluator, filterExpr);
      return new SetEvaluator(cjArgs, schemaReader, constraint);
    } finally {
      evaluator.restore(savepoint);
    }
  }