Example #1
0
  @Override
  public boolean equals(Object obj) {
    // returning false if null isn't the convention, oh well
    if (obj == null) return false;
    if (obj.getClass() != getClass()) return false;

    @SuppressWarnings("unchecked")
    CatalogMap<T> other = (CatalogMap<T>) obj;

    if (other.size() != size()) return false;

    for (Entry<String, T> e : m_items.entrySet()) {
      assert (e.getValue() != null);
      T type = other.get(e.getKey());
      if (type == null) return false;
      if (type.equals(e.getValue()) == false) return false;
    }

    return true;
  }
Example #2
0
  static void compileSingleStmtProcedure(
      VoltCompiler compiler,
      HSQLInterface hsql,
      DatabaseEstimates estimates,
      Catalog catalog,
      Database db,
      ProcedureDescriptor procedureDescriptor)
      throws VoltCompiler.VoltCompilerException {
    final String className = procedureDescriptor.m_className;
    if (className.indexOf('@') != -1) {
      throw compiler.new VoltCompilerException("User procedure names can't contain \"@\".");
    }

    // get the short name of the class (no package if a user procedure)
    // use the Table.<builtin> name (allowing the period) if builtin.
    String shortName = className;
    if (procedureDescriptor.m_builtInStmt == false) {
      String[] parts = className.split("\\.");
      shortName = parts[parts.length - 1];
    }

    // add an entry to the catalog (using the full className)
    final Procedure procedure = db.getProcedures().add(shortName);
    for (String groupName : procedureDescriptor.m_authGroups) {
      final Group group = db.getGroups().get(groupName);
      if (group == null) {
        throw compiler
        .new VoltCompilerException(
            "Procedure "
                + className
                + " allows access by a role "
                + groupName
                + " that does not exist");
      }
      final GroupRef groupRef = procedure.getAuthgroups().add(groupName);
      groupRef.setGroup(group);
    }
    procedure.setClassname(className);
    // sysprocs don't use the procedure compiler
    procedure.setSystemproc(false);
    procedure.setDefaultproc(procedureDescriptor.m_builtInStmt);
    procedure.setHasjava(false);

    // get the annotation
    // first try to get one that has been passed from the compiler
    ProcInfoData info = compiler.getProcInfoOverride(shortName);
    // then check for the usual one in the class itself
    // and create a ProcInfo.Data instance for it
    if (info == null) {
      info = new ProcInfoData();
      if (procedureDescriptor.m_partitionString != null) {
        info.partitionInfo = procedureDescriptor.m_partitionString;
        info.singlePartition = true;
      }
    }
    assert (info != null);

    // ADD THE STATEMENT

    // add the statement to the catalog
    Statement catalogStmt = procedure.getStatements().add(VoltDB.ANON_STMT_NAME);

    // compile the statement
    StatementPartitioning partitioning =
        info.singlePartition ? StatementPartitioning.forceSP() : StatementPartitioning.forceMP();
    // default to FASTER detmode because stmt procs can't feed read output into writes
    StatementCompiler.compileFromSqlTextAndUpdateCatalog(
        compiler,
        hsql,
        catalog,
        db,
        estimates,
        catalogStmt,
        procedureDescriptor.m_singleStmt,
        procedureDescriptor.m_joinOrder,
        DeterminismMode.FASTER,
        partitioning);

    // if the single stmt is not read only, then the proc is not read only
    boolean procHasWriteStmts = (catalogStmt.getReadonly() == false);

    // set the read onlyness of a proc
    procedure.setReadonly(procHasWriteStmts == false);

    int seqs = catalogStmt.getSeqscancount();
    procedure.setHasseqscans(seqs > 0);

    // set procedure parameter types
    CatalogMap<ProcParameter> params = procedure.getParameters();
    CatalogMap<StmtParameter> stmtParams = catalogStmt.getParameters();

    // set the procedure parameter types from the statement parameter types
    int paramCount = 0;
    for (StmtParameter stmtParam : CatalogUtil.getSortedCatalogItems(stmtParams, "index")) {
      // name each parameter "param1", "param2", etc...
      ProcParameter procParam = params.add("param" + String.valueOf(paramCount));
      procParam.setIndex(stmtParam.getIndex());
      procParam.setIsarray(stmtParam.getIsarray());
      procParam.setType(stmtParam.getJavatype());
      paramCount++;
    }

    // parse the procinfo
    procedure.setSinglepartition(info.singlePartition);
    if (info.singlePartition) {
      parsePartitionInfo(compiler, db, procedure, info.partitionInfo);
      if (procedure.getPartitionparameter() >= params.size()) {
        String msg =
            "PartitionInfo parameter not a valid parameter for procedure: "
                + procedure.getClassname();
        throw compiler.new VoltCompilerException(msg);
      }
      // TODO: The planner does not currently validate that a single-statement plan declared as
      // single-partition correctly uses
      // the designated parameter as a partitioning filter, maybe some day.
      // In theory, the PartitioningForStatement would confirm the use of (only) a parameter as a
      // partition key --
      // or if the partition key was determined to be some other hard-coded constant (expression?)
      // it might display a warning
      // message that the passed parameter is assumed to be equal to that constant (expression).
    } else {
      if (partitioning.getCountOfIndependentlyPartitionedTables() == 1) {
        AbstractExpression statementPartitionExpression =
            partitioning.singlePartitioningExpressionForReport();
        if (statementPartitionExpression != null) {
          // The planner has uncovered an overlooked opportunity to run the statement SP.
          String msg = null;
          if (statementPartitionExpression instanceof ParameterValueExpression) {
            msg =
                "This procedure would benefit from setting the attribute 'partitioninfo="
                    + partitioning.getFullColumnName()
                    + ":"
                    + ((ParameterValueExpression) statementPartitionExpression).getParameterIndex()
                    + "'";
          } else {
            String valueDescription = null;
            Object partitionValue = partitioning.getInferredPartitioningValue();
            if (partitionValue == null) {
              // Statement partitioned on a runtime constant. This is likely to be cryptic, but
              // hopefully gets the idea across.
              valueDescription = "of " + statementPartitionExpression.explain("");
            } else {
              valueDescription =
                  partitionValue.toString(); // A simple constant value COULD have been a parameter.
            }
            msg =
                "This procedure would benefit from adding a parameter to be passed the value "
                    + valueDescription
                    + " and setting the attribute 'partitioninfo="
                    + partitioning.getFullColumnName()
                    + ":"
                    + paramCount
                    + "'";
          }
          compiler.addWarn(msg);
        }
      }
    }
  }
Example #3
0
  static void compileJavaProcedure(
      VoltCompiler compiler,
      HSQLInterface hsql,
      DatabaseEstimates estimates,
      Catalog catalog,
      Database db,
      ProcedureDescriptor procedureDescriptor,
      InMemoryJarfile jarOutput)
      throws VoltCompiler.VoltCompilerException {

    final String className = procedureDescriptor.m_className;
    final Language lang = procedureDescriptor.m_language;

    // Load the class given the class name
    Class<?> procClass = procedureDescriptor.m_class;

    // get the short name of the class (no package)
    String shortName = deriveShortProcedureName(className);

    // add an entry to the catalog
    final Procedure procedure = db.getProcedures().add(shortName);
    for (String groupName : procedureDescriptor.m_authGroups) {
      final Group group = db.getGroups().get(groupName);
      if (group == null) {
        throw compiler
        .new VoltCompilerException(
            "Procedure "
                + className
                + " allows access by a role "
                + groupName
                + " that does not exist");
      }
      final GroupRef groupRef = procedure.getAuthgroups().add(groupName);
      groupRef.setGroup(group);
    }
    procedure.setClassname(className);
    // sysprocs don't use the procedure compiler
    procedure.setSystemproc(false);
    procedure.setDefaultproc(procedureDescriptor.m_builtInStmt);
    procedure.setHasjava(true);
    procedure.setLanguage(lang.name());
    ProcedureAnnotation pa = (ProcedureAnnotation) procedure.getAnnotation();
    if (pa == null) {
      pa = new ProcedureAnnotation();
      procedure.setAnnotation(pa);
    }
    if (procedureDescriptor.m_scriptImpl != null) {
      // This is a Groovy or other Java derived procedure and we need to add an annotation with
      // the script to the Procedure element in the Catalog
      pa.scriptImpl = procedureDescriptor.m_scriptImpl;
    }

    // get the annotation
    // first try to get one that has been passed from the compiler
    ProcInfoData info = compiler.getProcInfoOverride(shortName);
    // check if partition info was set in ddl
    ProcInfoData ddlInfo = null;
    if (procedureDescriptor.m_partitionString != null
        && !procedureDescriptor.m_partitionString.trim().isEmpty()) {
      ddlInfo = new ProcInfoData();
      ddlInfo.partitionInfo = procedureDescriptor.m_partitionString;
      ddlInfo.singlePartition = true;
    }
    // then check for the usual one in the class itself
    // and create a ProcInfo.Data instance for it
    if (info == null) {
      info = new ProcInfoData();
      ProcInfo annotationInfo = procClass.getAnnotation(ProcInfo.class);
      // error out if partition info is present in both ddl and annotation
      if (annotationInfo != null) {
        if (ddlInfo != null) {
          String msg = "Procedure: " + shortName + " has partition properties defined both in ";
          msg += "class \"" + className + "\" and in the schema defintion file(s)";
          throw compiler.new VoltCompilerException(msg);
        }
        // Prevent AutoGenerated DDL from including PARTITION PROCEDURE for this procedure.
        pa.classAnnotated = true;
        info.partitionInfo = annotationInfo.partitionInfo();
        info.singlePartition = annotationInfo.singlePartition();
      } else if (ddlInfo != null) {
        info = ddlInfo;
      }
    } else {
      pa.classAnnotated = true;
    }
    assert (info != null);

    // make sure multi-partition implies no partitoning info
    if (info.singlePartition == false) {
      if ((info.partitionInfo != null) && (info.partitionInfo.length() > 0)) {
        String msg = "Procedure: " + shortName + " is annotated as multi-partition";
        msg += " but partitionInfo has non-empty value: \"" + info.partitionInfo + "\"";
        throw compiler.new VoltCompilerException(msg);
      }
    }

    // track if there are any writer statements and/or sequential scans and/or an overlooked common
    // partitioning parameter
    boolean procHasWriteStmts = false;
    boolean procHasSeqScans = false;
    // procWantsCommonPartitioning == true but commonPartitionExpression == null signifies a proc
    // for which the planner was requested to attempt to find an SP plan, but that was not possible
    // -- it had a replicated write or it had one or more partitioned reads that were not all
    // filtered by the same partition key value -- so it was planned as an MP proc.
    boolean procWantsCommonPartitioning = true;
    AbstractExpression commonPartitionExpression = null;
    String exampleSPstatement = null;
    Object exampleSPvalue = null;

    // iterate through the fields and get valid sql statements
    Map<String, Object> fields = lang.accept(procedureIntrospector(compiler), procClass);

    // determine if proc is read or read-write by checking if the proc contains any write sql stmts
    boolean readWrite = false;
    for (Object field : fields.values()) {
      if (!(field instanceof SQLStmt)) continue;
      SQLStmt stmt = (SQLStmt) field;
      QueryType qtype = QueryType.getFromSQL(stmt.getText());
      if (!qtype.isReadOnly()) {
        readWrite = true;
        break;
      }
    }

    // default to FASTER determinism mode, which may favor non-deterministic plans
    // but if it's a read-write proc, use a SAFER planning mode wrt determinism.
    final DeterminismMode detMode = readWrite ? DeterminismMode.SAFER : DeterminismMode.FASTER;

    for (Entry<String, Object> entry : fields.entrySet()) {
      if (!(entry.getValue() instanceof SQLStmt)) continue;

      String stmtName = entry.getKey();
      SQLStmt stmt = (SQLStmt) entry.getValue();

      // add the statement to the catalog
      Statement catalogStmt = procedure.getStatements().add(stmtName);

      // compile the statement
      StatementPartitioning partitioning =
          info.singlePartition ? StatementPartitioning.forceSP() : StatementPartitioning.forceMP();
      boolean cacheHit =
          StatementCompiler.compileFromSqlTextAndUpdateCatalog(
              compiler,
              hsql,
              catalog,
              db,
              estimates,
              catalogStmt,
              stmt.getText(),
              stmt.getJoinOrder(),
              detMode,
              partitioning);

      // if this was a cache hit or specified single, don't worry about figuring out more
      // partitioning
      if (partitioning.wasSpecifiedAsSingle() || cacheHit) {
        procWantsCommonPartitioning = false; // Don't try to infer what's already been asserted.
        // The planner does not currently attempt to second-guess a plan declared as
        // single-partition, maybe some day.
        // In theory, the PartitioningForStatement would confirm the use of (only) a parameter as a
        // partition key --
        // or if the partition key was determined to be some other constant (expression?) it might
        // display an informational
        // message that the passed parameter is assumed to be equal to the hard-coded partition key
        // constant (expression).

        // Validate any inferred statement partitioning given the statement's possible usage, until
        // a contradiction is found.
      } else if (procWantsCommonPartitioning) {
        // Only consider statements that are capable of running SP with a partitioning parameter
        // that does not seem to
        // conflict with the partitioning of prior statements.
        if (partitioning.getCountOfIndependentlyPartitionedTables() == 1) {
          AbstractExpression statementPartitionExpression =
              partitioning.singlePartitioningExpressionForReport();
          if (statementPartitionExpression != null) {
            if (commonPartitionExpression == null) {
              commonPartitionExpression = statementPartitionExpression;
              exampleSPstatement = stmt.getText();
              exampleSPvalue = partitioning.getInferredPartitioningValue();
            } else if (commonPartitionExpression.equals(statementPartitionExpression)
                || (statementPartitionExpression instanceof ParameterValueExpression
                    && commonPartitionExpression instanceof ParameterValueExpression)) {
              // Any constant used for partitioning would have to be the same for all statements,
              // but
              // any statement parameter used for partitioning MIGHT come from the same proc
              // parameter as
              // any other statement's parameter used for partitioning.
            } else {
              procWantsCommonPartitioning =
                  false; // appears to be different partitioning for different statements
            }
          } else {
            // There is a statement with a partitioned table whose partitioning column is
            // not equality filtered with a constant or param. Abandon all hope.
            procWantsCommonPartitioning = false;
          }

          // Usually, replicated-only statements in a mix with others have no effect on the MP/SP
          // decision
        } else if (partitioning.getCountOfPartitionedTables() == 0) {
          // but SP is strictly forbidden for DML, to maintain the consistency of the replicated
          // data.
          if (partitioning.getIsReplicatedTableDML()) {
            procWantsCommonPartitioning = false;
          }

        } else {
          // There is a statement with a partitioned table whose partitioning column is
          // not equality filtered with a constant or param. Abandon all hope.
          procWantsCommonPartitioning = false;
        }
      }

      // if a single stmt is not read only, then the proc is not read only
      if (catalogStmt.getReadonly() == false) {
        procHasWriteStmts = true;
      }

      if (catalogStmt.getSeqscancount() > 0) {
        procHasSeqScans = true;
      }
    }

    // MIGHT the planner have uncovered an overlooked opportunity to run all statements SP?
    if (procWantsCommonPartitioning && (commonPartitionExpression != null)) {
      String msg = null;
      if (commonPartitionExpression instanceof ParameterValueExpression) {
        msg =
            "This procedure might benefit from an @ProcInfo annotation designating parameter "
                + ((ParameterValueExpression) commonPartitionExpression).getParameterIndex()
                + " of statement '"
                + exampleSPstatement
                + "'";
      } else {
        String valueDescription = null;
        if (exampleSPvalue == null) {
          // Statements partitioned on a runtime constant. This is likely to be cryptic, but
          // hopefully gets the idea across.
          valueDescription = "of " + commonPartitionExpression.explain("");
        } else {
          valueDescription =
              exampleSPvalue.toString(); // A simple constant value COULD have been a parameter.
        }
        msg =
            "This procedure might benefit from an @ProcInfo annotation referencing an added parameter passed the value "
                + valueDescription;
      }
      compiler.addInfo(msg);
    }

    // set the read onlyness of a proc
    procedure.setReadonly(procHasWriteStmts == false);

    procedure.setHasseqscans(procHasSeqScans);

    for (Statement catalogStmt : procedure.getStatements()) {
      if (catalogStmt.getIscontentdeterministic() == false) {
        String potentialErrMsg =
            "Procedure "
                + shortName
                + " has a statement with a non-deterministic result - statement: \""
                + catalogStmt.getSqltext()
                + "\" , reason: "
                + catalogStmt.getNondeterminismdetail();
        // throw compiler.new VoltCompilerException(potentialErrMsg);
        compiler.addWarn(potentialErrMsg);
      } else if (catalogStmt.getIsorderdeterministic() == false) {
        String warnMsg;
        if (procHasWriteStmts) {
          String rwPotentialErrMsg =
              "Procedure "
                  + shortName
                  + " is RW and has a statement whose result has a non-deterministic ordering - statement: \""
                  + catalogStmt.getSqltext()
                  + "\", reason: "
                  + catalogStmt.getNondeterminismdetail();
          // throw compiler.new VoltCompilerException(rwPotentialErrMsg);
          warnMsg = rwPotentialErrMsg;
        } else {
          warnMsg =
              "Procedure "
                  + shortName
                  + " has a statement with a non-deterministic result - statement: \""
                  + catalogStmt.getSqltext()
                  + "\", reason: "
                  + catalogStmt.getNondeterminismdetail();
        }
        compiler.addWarn(warnMsg);
      }
    }

    // set procedure parameter types
    CatalogMap<ProcParameter> params = procedure.getParameters();
    Class<?>[] paramTypes = lang.accept(procedureEntryPointParametersTypeExtractor, fields);
    for (int i = 0; i < paramTypes.length; i++) {
      Class<?> cls = paramTypes[i];
      ProcParameter param = params.add(String.valueOf(i));
      param.setIndex(i);

      // handle the case where the param is an array
      if (cls.isArray()) {
        param.setIsarray(true);
        cls = cls.getComponentType();
      } else param.setIsarray(false);

      // boxed types are not supported parameters at this time
      if ((cls == Long.class)
          || (cls == Integer.class)
          || (cls == Short.class)
          || (cls == Byte.class)
          || (cls == Double.class)
          || (cls == Character.class)
          || (cls == Boolean.class)) {
        String msg = "Procedure: " + shortName + " has a parameter with a boxed type: ";
        msg += cls.getSimpleName();
        msg +=
            ". Replace this parameter with the corresponding primitive type and the procedure may compile.";
        throw compiler.new VoltCompilerException(msg);
      } else if ((cls == Float.class) || (cls == float.class)) {
        String msg = "Procedure: " + shortName + " has a parameter with type: ";
        msg += cls.getSimpleName();
        msg += ". Replace this parameter type with double and the procedure may compile.";
        throw compiler.new VoltCompilerException(msg);
      }

      VoltType type;
      try {
        type = VoltType.typeFromClass(cls);
      } catch (VoltTypeException e) {
        // handle the case where the type is invalid
        String msg = "Procedure: " + shortName + " has a parameter with invalid type: ";
        msg += cls.getSimpleName();
        throw compiler.new VoltCompilerException(msg);
      } catch (RuntimeException e) {
        String msg =
            "Procedure: " + shortName + " unexpectedly failed a check on a parameter of type: ";
        msg += cls.getSimpleName();
        msg += " with error: ";
        msg += e.toString();
        throw compiler.new VoltCompilerException(msg);
      }

      param.setType(type.getValue());
    }

    // parse the procinfo
    procedure.setSinglepartition(info.singlePartition);
    if (info.singlePartition) {
      parsePartitionInfo(compiler, db, procedure, info.partitionInfo);
      if (procedure.getPartitionparameter() >= paramTypes.length) {
        String msg =
            "PartitionInfo parameter not a valid parameter for procedure: "
                + procedure.getClassname();
        throw compiler.new VoltCompilerException(msg);
      }

      // check the type of partition parameter meets our high standards
      Class<?> partitionType = paramTypes[procedure.getPartitionparameter()];
      Class<?>[] validPartitionClzzes = {
        Long.class, Integer.class, Short.class, Byte.class,
        long.class, int.class, short.class, byte.class,
        String.class, byte[].class
      };
      boolean found = false;
      for (Class<?> candidate : validPartitionClzzes) {
        if (partitionType == candidate) found = true;
      }
      if (!found) {
        String msg =
            "PartitionInfo parameter must be a String or Number for procedure: "
                + procedure.getClassname();
        throw compiler.new VoltCompilerException(msg);
      }

      VoltType columnType = VoltType.get((byte) procedure.getPartitioncolumn().getType());
      VoltType paramType = VoltType.typeFromClass(partitionType);
      if (!columnType.canExactlyRepresentAnyValueOf(paramType)) {
        String msg =
            "Type mismatch between partition column and partition parameter for procedure "
                + procedure.getClassname()
                + " may cause overflow or loss of precision.\nPartition column is type "
                + columnType
                + " and partition parameter is type "
                + paramType;
        throw compiler.new VoltCompilerException(msg);
      } else if (!paramType.canExactlyRepresentAnyValueOf(columnType)) {
        String msg =
            "Type mismatch between partition column and partition parameter for procedure "
                + procedure.getClassname()
                + " does not allow the full range of partition key values.\nPartition column is type "
                + columnType
                + " and partition parameter is type "
                + paramType;
        compiler.addWarn(msg);
      }
    }

    // put the compiled code for this procedure into the jarfile
    // need to find the outermost ancestor class for the procedure in the event
    // that it's actually an inner (or inner inner...) class.
    // addClassToJar recursively adds all the children, which should include this
    // class
    Class<?> ancestor = procClass;
    while (ancestor.getEnclosingClass() != null) {
      ancestor = ancestor.getEnclosingClass();
    }
    compiler.addClassToJar(jarOutput, ancestor);
  }
Example #4
0
  @SuppressWarnings("deprecation")
  @Override
  public void setUp() throws IOException, InterruptedException {
    VoltDB.instance().readBuildInfo("Test");

    // compile a catalog
    String testDir = BuildDirectoryUtils.getBuildDirectoryPath();
    String catalogJar = testDir + File.separator + JAR;

    TPCCProjectBuilder pb = new TPCCProjectBuilder();
    pb.addDefaultSchema();
    pb.addDefaultPartitioning();
    pb.addProcedures(MultiSiteSelect.class, InsertNewOrder.class);

    pb.compile(catalogJar, 2, 0);

    // load a catalog
    byte[] bytes = CatalogUtil.toBytes(new File(catalogJar));
    String serializedCatalog = CatalogUtil.loadCatalogFromJar(bytes, null);

    // create the catalog (that will be passed to the ClientInterface
    catalog = new Catalog();
    catalog.execute(serializedCatalog);

    // update the catalog with the data from the deployment file
    String pathToDeployment = pb.getPathToDeployment();
    assertTrue(CatalogUtil.compileDeploymentAndGetCRC(catalog, pathToDeployment, true) >= 0);

    cluster = catalog.getClusters().get("cluster");
    CatalogMap<Procedure> procedures = cluster.getDatabases().get("database").getProcedures();
    Procedure insertProc = procedures.get("InsertNewOrder");
    assert (insertProc != null);
    selectProc = procedures.get("MultiSiteSelect");
    assert (selectProc != null);

    // Each EE needs its own thread for correct initialization.
    final AtomicReference<ExecutionEngine> site1Reference = new AtomicReference<ExecutionEngine>();
    final byte configBytes[] = LegacyHashinator.getConfigureBytes(2);
    Thread site1Thread =
        new Thread() {
          @Override
          public void run() {
            site1Reference.set(
                new ExecutionEngineJNI(
                    cluster.getRelativeIndex(),
                    1,
                    0,
                    0,
                    "",
                    100,
                    HashinatorType.LEGACY,
                    configBytes));
          }
        };
    site1Thread.start();
    site1Thread.join();

    final AtomicReference<ExecutionEngine> site2Reference = new AtomicReference<ExecutionEngine>();
    Thread site2Thread =
        new Thread() {
          @Override
          public void run() {
            site2Reference.set(
                new ExecutionEngineJNI(
                    cluster.getRelativeIndex(),
                    2,
                    1,
                    0,
                    "",
                    100,
                    HashinatorType.LEGACY,
                    configBytes));
          }
        };
    site2Thread.start();
    site2Thread.join();

    // create two EEs
    site1 = new ExecutionSite(0); // site 0
    ee1 = site1Reference.get();
    ee1.loadCatalog(0, catalog.serialize());
    site2 = new ExecutionSite(1); // site 1
    ee2 = site2Reference.get();
    ee2.loadCatalog(0, catalog.serialize());

    // cache some plan fragments
    selectStmt = selectProc.getStatements().get("selectAll");
    assert (selectStmt != null);
    int i = 0;
    // this kinda assumes the right order
    for (PlanFragment f : selectStmt.getFragments()) {
      if (i == 0) selectTopFrag = f;
      else selectBottomFrag = f;
      i++;
    }
    assert (selectTopFrag != null);
    assert (selectBottomFrag != null);

    if (selectTopFrag.getHasdependencies() == false) {
      PlanFragment temp = selectTopFrag;
      selectTopFrag = selectBottomFrag;
      selectBottomFrag = temp;
    }

    // get the insert frag
    Statement insertStmt = insertProc.getStatements().get("insert");
    assert (insertStmt != null);

    for (PlanFragment f : insertStmt.getFragments()) insertFrag = f;

    // populate plan cache
    ActivePlanRepository.clear();
    ActivePlanRepository.addFragmentForTest(
        CatalogUtil.getUniqueIdForFragment(selectBottomFrag),
        Encoder.base64Decode(selectBottomFrag.getPlannodetree()));
    ActivePlanRepository.addFragmentForTest(
        CatalogUtil.getUniqueIdForFragment(selectTopFrag),
        Encoder.base64Decode(selectTopFrag.getPlannodetree()));
    ActivePlanRepository.addFragmentForTest(
        CatalogUtil.getUniqueIdForFragment(insertFrag),
        Encoder.base64Decode(insertFrag.getPlannodetree()));

    // insert some data
    ParameterSet params = ParameterSet.fromArrayNoCopy(1L, 1L, 1L);

    VoltTable[] results =
        ee2.executePlanFragments(
            1,
            new long[] {CatalogUtil.getUniqueIdForFragment(insertFrag)},
            null,
            new ParameterSet[] {params},
            1,
            0,
            42,
            Long.MAX_VALUE);
    assert (results.length == 1);
    assert (results[0].asScalarLong() == 1L);

    params = ParameterSet.fromArrayNoCopy(2L, 2L, 2L);

    results =
        ee1.executePlanFragments(
            1,
            new long[] {CatalogUtil.getUniqueIdForFragment(insertFrag)},
            null,
            new ParameterSet[] {params},
            2,
            1,
            42,
            Long.MAX_VALUE);
    assert (results.length == 1);
    assert (results[0].asScalarLong() == 1L);
  }
Example #5
0
  void compileDatabaseNode(DatabaseType database) throws VoltCompilerException {
    final ArrayList<String> programs = new ArrayList<String>();
    final ArrayList<String> schemas = new ArrayList<String>();
    final ArrayList<ProcedureDescriptor> procedures = new ArrayList<ProcedureDescriptor>();
    final ArrayList<Class<?>> classDependencies = new ArrayList<Class<?>>();
    final ArrayList<String[]> partitions = new ArrayList<String[]>();

    final String databaseName = database.getName();

    // schema does not verify that the database is named "database"
    if (databaseName.equals("database") == false) {
      final String msg =
          "VoltDB currently requires all database elements to be named "
              + "\"database\" (found: \""
              + databaseName
              + "\")";
      throw new VoltCompilerException(msg);
    }

    // create the database in the catalog
    m_catalog.execute("add /clusters[cluster] databases " + databaseName);
    Database db = m_catalog.getClusters().get("cluster").getDatabases().get(databaseName);

    SnapshotType snapshotSettings = database.getSnapshot();
    if (snapshotSettings != null) {
      SnapshotSchedule schedule = db.getSnapshotschedule().add("default");
      String frequency = snapshotSettings.getFrequency();
      if (!frequency.endsWith("s") && !frequency.endsWith("m") && !frequency.endsWith("h")) {
        throw new VoltCompilerException(
            "Snapshot frequency "
                + frequency
                + " needs to end with time unit specified"
                + " that is one of [s, m, h] (seconds, minutes, hours)");
      }

      int frequencyInt = 0;
      String frequencySubstring = frequency.substring(0, frequency.length() - 1);
      try {
        frequencyInt = Integer.parseInt(frequencySubstring);
      } catch (Exception e) {
        throw new VoltCompilerException("Frequency " + frequencySubstring + " is not an integer ");
      }

      String prefix = snapshotSettings.getPrefix();
      if (prefix == null || prefix.isEmpty()) {
        throw new VoltCompilerException("Snapshot prefix " + prefix + " is not a valid prefix ");
      }

      if (prefix.contains("-") || prefix.contains(",")) {
        throw new VoltCompilerException("Snapshot prefix " + prefix + " cannot include , or - ");
      }

      String path = snapshotSettings.getPath();
      if (path == null || path.isEmpty()) {
        throw new VoltCompilerException("Snapshot path " + path + " is not a valid path ");
      }

      if (snapshotSettings.getRetain() == null) {
        throw new VoltCompilerException("Snapshot retain value not provided");
      }

      int retain = snapshotSettings.getRetain().intValue();
      if (retain < 1) {
        throw new VoltCompilerException(
            "Snapshot retain value " + retain + " is not a valid value. Must be 1 or greater.");
      }

      schedule.setFrequencyunit(frequency.substring(frequency.length() - 1, frequency.length()));
      schedule.setFrequencyvalue(frequencyInt);
      schedule.setPath(path);
      schedule.setPrefix(prefix);
      schedule.setRetain(retain);
    }

    // schemas/schema
    for (SchemasType.Schema schema : database.getSchemas().getSchema()) {
      LOG.l7dlog(
          Level.DEBUG,
          LogKeys.compiler_VoltCompiler_CatalogPath.name(),
          new Object[] {schema.getPath()},
          null);
      schemas.add(schema.getPath());
    }

    // groups/group.
    if (database.getGroups() != null) {
      for (GroupsType.Group group : database.getGroups().getGroup()) {
        org.voltdb.catalog.Group catGroup = db.getGroups().add(group.getName());
        catGroup.setAdhoc(group.isAdhoc());
        catGroup.setSysproc(group.isSysproc());
      }
    }

    // users/user
    if (database.getUsers() != null) {
      for (UsersType.User user : database.getUsers().getUser()) {
        org.voltdb.catalog.User catUser = db.getUsers().add(user.getName());
        catUser.setAdhoc(user.isAdhoc());
        catUser.setSysproc(user.isSysproc());
        byte passwordHash[] = extractPassword(user.getPassword());
        catUser.setShadowpassword(Encoder.hexEncode(passwordHash));

        // process the @groups comma separated list
        if (user.getGroups() != null) {
          String grouplist[] = user.getGroups().split(",");
          for (final String group : grouplist) {
            final GroupRef groupRef = catUser.getGroups().add(group);
            final Group catalogGroup = db.getGroups().get(group);
            if (catalogGroup != null) {
              groupRef.setGroup(catalogGroup);
            }
          }
        }
      }
    }

    // procedures/procedure
    for (ProceduresType.Procedure proc : database.getProcedures().getProcedure()) {
      procedures.add(getProcedure(proc));
    }

    // classdependencies/classdependency
    if (database.getClassdependencies() != null) {
      for (Classdependency dep : database.getClassdependencies().getClassdependency()) {
        classDependencies.add(getClassDependency(dep));
      }
    }

    // partitions/table
    if (database.getPartitions() != null) {
      for (org.voltdb.compiler.projectfile.PartitionsType.Partition table :
          database.getPartitions().getPartition()) {
        partitions.add(getPartition(table));
      }
    }

    String msg = "Database \"" + databaseName + "\" ";
    // TODO: schema allows 0 procedures. Testbase relies on this.
    if (procedures.size() == 0) {
      msg +=
          "needs at least one \"procedure\" element "
              + "(currently has "
              + String.valueOf(procedures.size())
              + ")";
      throw new VoltCompilerException(msg);
    }
    if (procedures.size() < 1) {
      msg += "is missing the \"procedures\" element";
      throw new VoltCompilerException(msg);
    }

    // shutdown and make a new hsqldb
    m_hsql = HSQLInterface.loadHsqldb();

    // Actually parse and handle all the programs
    for (final String programName : programs) {
      m_catalog.execute("add " + db.getPath() + " programs " + programName);
    }

    // Actually parse and handle all the DDL
    final DDLCompiler ddlcompiler = new DDLCompiler(this, m_hsql);

    for (final String schemaPath : schemas) {
      File schemaFile = null;

      if (schemaPath.contains(".jar!")) {
        String ddlText = null;
        try {
          ddlText = JarReader.readFileFromJarfile(schemaPath);
        } catch (final Exception e) {
          throw new VoltCompilerException(e);
        }
        schemaFile = VoltProjectBuilder.writeStringToTempFile(ddlText);
      } else {
        schemaFile = new File(schemaPath);
      }

      if (!schemaFile.isAbsolute()) {
        // Resolve schemaPath relative to the database definition xml file
        schemaFile = new File(new File(m_projectFileURL).getParent(), schemaPath);
      }

      // add the file object's path to the list of files for the jar
      m_ddlFilePaths.put(schemaFile.getName(), schemaFile.getPath());

      ddlcompiler.loadSchema(schemaFile.getAbsolutePath());
    }
    ddlcompiler.compileToCatalog(m_catalog, db);

    // Actually parse and handle all the partitions
    // this needs to happen before procedures are compiled
    msg = "In database \"" + databaseName + "\", ";
    final CatalogMap<Table> tables = db.getTables();
    for (final String[] partition : partitions) {
      final String tableName = partition[0];
      final String colName = partition[1];
      final Table t = tables.getIgnoreCase(tableName);
      if (t == null) {
        msg += "\"partition\" element has unknown \"table\" attribute '" + tableName + "'";
        throw new VoltCompilerException(msg);
      }
      final Column c = t.getColumns().getIgnoreCase(colName);
      // make sure the column exists
      if (c == null) {
        msg += "\"partition\" element has unknown \"column\" attribute '" + colName + "'";
        throw new VoltCompilerException(msg);
      }
      // make sure the column is marked not-nullable
      if (c.getNullable() == true) {
        msg +=
            "Partition column '"
                + tableName
                + "."
                + colName
                + "' is nullable. "
                + "Partition columns must be constrained \"NOT NULL\".";
        throw new VoltCompilerException(msg);
      }
      t.setPartitioncolumn(c);
      t.setIsreplicated(false);

      // Set the destination tables of associated views non-replicated.
      // If a view's source table is replicated, then a full scan of the
      // associated view is singled-sited. If the source is partitioned,
      // a full scan of the view must be distributed.
      final CatalogMap<MaterializedViewInfo> views = t.getViews();
      for (final MaterializedViewInfo mvi : views) {
        mvi.getDest().setIsreplicated(false);
      }
    }

    // add vertical partitions
    if (database.getVerticalpartitions() != null) {
      for (Verticalpartition vp : database.getVerticalpartitions().getVerticalpartition()) {
        try {
          addVerticalPartition(db, vp.getTable(), vp.getColumn(), vp.isIndexed());
        } catch (Exception ex) {
          throw new VoltCompilerException(
              "Failed to create vertical partition for " + vp.getTable(), ex);
        }
      }
    }

    // this should reorder the tables and partitions all alphabetically
    String catData = m_catalog.serialize();
    m_catalog = new Catalog();
    m_catalog.execute(catData);
    db = m_catalog.getClusters().get("cluster").getDatabases().get(databaseName);

    // add database estimates info
    addDatabaseEstimatesInfo(m_estimates, db);
    addSystemProcsToCatalog(m_catalog, db);

    // Process and add exports and connectors to the catalog
    // Must do this before compiling procedures to deny updates
    // on append-only tables.
    if (database.getExports() != null) {
      // currently, only a single connector is allowed
      Connector conn = database.getExports().getConnector();
      compileConnector(conn, db);
    }

    // Actually parse and handle all the Procedures
    for (final ProcedureDescriptor procedureDescriptor : procedures) {
      final String procedureName = procedureDescriptor.m_className;
      m_currentFilename = procedureName.substring(procedureName.lastIndexOf('.') + 1);
      m_currentFilename += ".class";
      ProcedureCompiler.compile(this, m_hsql, m_estimates, m_catalog, db, procedureDescriptor);
    }

    // Add all the class dependencies to the output jar
    for (final Class<?> classDependency : classDependencies) {
      addClassToJar(classDependency, this);
    }

    m_hsql.close();
  }
Example #6
0
  /**
   * Cache the current statistics.
   *
   * @param time
   */
  private void statsTick(long time) {
    /*
     * grab the table statistics from ee and put it into the statistics
     * agent.
     */
    if (m_tableStats != null) {
      CatalogMap<Table> tables = m_context.database.getTables();
      int[] tableIds = new int[tables.size()];
      int i = 0;
      for (Table table : tables) {
        tableIds[i++] = table.getRelativeIndex();
      }

      // data to aggregate
      long tupleCount = 0;
      int tupleDataMem = 0;
      int tupleAllocatedMem = 0;
      int indexMem = 0;
      int stringMem = 0;

      // update table stats
      final VoltTable[] s1 = m_ee.getStats(SysProcSelector.TABLE, tableIds, false, time);
      if (s1 != null) {
        VoltTable stats = s1[0];
        assert (stats != null);

        // rollup the table memory stats for this site
        while (stats.advanceRow()) {
          tupleCount += stats.getLong(7);
          tupleAllocatedMem += (int) stats.getLong(8);
          tupleDataMem += (int) stats.getLong(9);
          stringMem += (int) stats.getLong(10);
        }
        stats.resetRowPosition();

        m_tableStats.setStatsTable(stats);
      }

      // update index stats
      final VoltTable[] s2 = m_ee.getStats(SysProcSelector.INDEX, tableIds, false, time);
      if ((s2 != null) && (s2.length > 0)) {
        VoltTable stats = s2[0];
        assert (stats != null);

        // rollup the index memory stats for this site
        while (stats.advanceRow()) {
          indexMem += stats.getLong(10);
        }
        stats.resetRowPosition();

        m_indexStats.setStatsTable(stats);
      }

      // update the rolled up memory statistics
      if (m_memStats != null) {
        m_memStats.eeUpdateMemStats(
            m_siteId,
            tupleCount,
            tupleDataMem,
            tupleAllocatedMem,
            indexMem,
            stringMem,
            m_ee.getThreadLocalPoolAllocations());
      }
    }
  }