Ejemplo n.º 1
0
  /**
   * @param projectFileURL URL of the project file.
   * @param clusterConfig Object containing desired physical cluster parameters
   * @param jarOutputPath The location to put the finished JAR to.
   * @param output Where to print status/errors to, usually stdout.
   * @param procInfoOverrides Optional overridden values for procedure annotations.
   */
  public boolean compile(
      final String projectFileURL,
      final ClusterConfig clusterConfig,
      final String jarOutputPath,
      final PrintStream output,
      final Map<String, ProcInfoData> procInfoOverrides) {
    m_hsql = null;
    m_projectFileURL = projectFileURL;
    m_jarOutputPath = jarOutputPath;
    m_outputStream = output;
    // use this map as default annotation values
    m_procInfoOverrides = procInfoOverrides;

    LOG.l7dlog(
        Level.DEBUG,
        LogKeys.compiler_VoltCompiler_LeaderAndHostCountAndSitesPerHost.name(),
        new Object[] {
          clusterConfig.getLeaderAddress(),
          clusterConfig.getHostCount(),
          clusterConfig.getSitesPerHost()
        },
        null);

    // do all the work to get the catalog
    final Catalog catalog = compileCatalog(projectFileURL, clusterConfig);
    if (catalog == null) {
      LOG.error(
          "VoltCompiler had " + m_errors.size() + " errors\n" + StringUtil.join("\n", m_errors));
      return (false);
    }

    // WRITE CATALOG TO JAR HERE
    final String catalogCommands = catalog.serialize();

    byte[] catalogBytes = null;
    try {
      catalogBytes = catalogCommands.getBytes("UTF-8");
    } catch (final UnsupportedEncodingException e1) {
      addErr("Can't encode the compiled catalog file correctly");
      return false;
    }

    // Create Dtxn.Coordinator configuration for cluster
    //        byte[] dtxnConfBytes = null;
    //        try {
    //            dtxnConfBytes = HStoreDtxnConf.toHStoreDtxnConf(catalog).getBytes("UTF-8");
    //        } catch (final Exception e1) {
    //            addErr("Can't encode the Dtxn.Coordinator configuration file correctly");
    //            return false;
    //        }

    try {
      //            m_jarBuilder.addEntry("dtxn.conf", dtxnConfBytes);
      m_jarBuilder.addEntry(CatalogUtil.CATALOG_FILENAME, catalogBytes);
      m_jarBuilder.addEntry("project.xml", new File(projectFileURL));
      for (final Entry<String, String> e : m_ddlFilePaths.entrySet())
        m_jarBuilder.addEntry(e.getKey(), new File(e.getValue()));
      m_jarBuilder.writeJarToDisk(jarOutputPath);
    } catch (final VoltCompilerException e) {
      return false;
    }

    assert (!hasErrors());

    if (hasErrors()) {
      return false;
    }

    return true;
  }
Ejemplo n.º 2
0
  @SuppressWarnings("unchecked")
  public Catalog compileCatalog(final String projectFileURL, final ClusterConfig clusterConfig) {
    if (!clusterConfig.validate()) {
      addErr(clusterConfig.getErrorMsg());
      return null;
    }

    // Compiler instance is reusable. Clear the cache.
    cachedAddedClasses.clear();
    m_currentFilename = new File(projectFileURL).getName();
    m_jarBuilder = new JarBuilder(this);

    if (m_outputStream != null) {
      m_outputStream.println("\n** BEGIN PROJECT COMPILE: " + m_currentFilename + " **");
    }

    ProjectType project = null;

    try {
      JAXBContext jc = JAXBContext.newInstance("org.voltdb.compiler.projectfile");
      // This schema shot the sheriff.
      SchemaFactory sf = SchemaFactory.newInstance(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI);
      Schema schema = sf.newSchema(this.getClass().getResource("ProjectFileSchema.xsd"));
      Unmarshaller unmarshaller = jc.createUnmarshaller();
      // But did not shoot unmarshaller!
      unmarshaller.setSchema(schema);
      JAXBElement<ProjectType> result =
          (JAXBElement<ProjectType>) unmarshaller.unmarshal(new File(projectFileURL));
      project = result.getValue();
    } catch (JAXBException e) {
      // Convert some linked exceptions to more friendly errors.
      if (e.getLinkedException() instanceof java.io.FileNotFoundException) {
        addErr(e.getLinkedException().getMessage());
        return null;
      }
      if (e.getLinkedException() instanceof org.xml.sax.SAXParseException) {
        addErr("Error schema validating project.xml file. " + e.getLinkedException().getMessage());
        return null;
      }
      throw new RuntimeException(e);
    } catch (SAXException e) {
      addErr("Error schema validating project.xml file. " + e.getMessage());
      return null;
    }

    try {
      compileXMLRootNode(project);
    } catch (final VoltCompilerException e) {
      //            compilerLog.l7dlog( Level.ERROR,
      // LogKeys.compiler_VoltCompiler_FailedToCompileXML.name(), null);
      LOG.error(e.getMessage(), e);
      // e.printStackTrace();
      return null;
    }
    assert (m_catalog != null);

    try {
      ClusterCompiler.compile(m_catalog, clusterConfig);
    } catch (RuntimeException e) {
      addErr(e.getMessage());
      return null;
    }

    // Optimization: Vertical Partitioning
    if (m_enableVerticalPartitionOptimizations) {
      if (m_verticalPartitionPlanner == null) {
        m_verticalPartitionPlanner =
            new VerticalPartitionPlanner(CatalogUtil.getDatabase(m_catalog), true);
      }
      try {
        m_verticalPartitionPlanner.optimizeDatabase();
      } catch (Exception ex) {
        LOG.warn("Unexpected error", ex);
        addErr("Failed to apply vertical partition optimizations");
      }
    }

    // add epoch info to catalog
    final int epoch = (int) (TransactionIdManager.getEpoch() / 1000);
    m_catalog.getClusters().get("cluster").setLocalepoch(epoch);

    // done handling files
    m_currentFilename = null;
    return m_catalog;
  }
Ejemplo n.º 3
0
 void addErr(final String msg) {
   addErr(msg, NO_LINE_NUMBER);
 }