/** * @param args * @throws Exception */ @SuppressWarnings("unchecked") public void process(String[] args, String... required) throws Exception { final boolean debug = LOG.isDebugEnabled(); if (debug) LOG.debug("Processing " + args.length + " parameters..."); final Pattern p = Pattern.compile("="); for (int i = 0, cnt = args.length; i < cnt; i++) { final String arg = args[i]; final String[] parts = p.split(arg, 2); if (parts[0].startsWith("-")) parts[0] = parts[0].substring(1); if (parts.length == 1) { if (parts[0].startsWith("${") == false) this.opt_params.add(parts[0]); continue; } else if (parts[0].equalsIgnoreCase("tag")) { continue; } else if (parts[1].startsWith("${") || parts[0].startsWith("#")) { continue; } if (debug) LOG.debug(String.format("%-35s = %s", parts[0], parts[1])); // DesignerHints Override if (parts[0].startsWith(PARAM_DESIGNER_HINTS_PREFIX)) { String param = parts[0].replace(PARAM_DESIGNER_HINTS_PREFIX, "").toLowerCase(); try { Field f = DesignerHints.class.getField(param); this.hints_params.put(f.getName(), parts[1]); if (debug) LOG.debug(String.format("DesignerHints.%s = %s", param, parts[1])); } catch (NoSuchFieldException ex) { throw new Exception("Unknown DesignerHints parameter: " + param, ex); } } // HStoreConf Parameter else if (HStoreConf.isConfParameter(parts[0])) { this.conf_params.put(parts[0].toLowerCase(), parts[1]); } // ArgumentsParser Parameter else if (PARAMS.contains(parts[0].toLowerCase())) { this.params.put(parts[0].toLowerCase(), parts[1]); } // Invalid! else { String suggestions = ""; i = 0; String end = CollectionUtil.last(parts[0].split("\\.")); for (String param : PARAMS) { String param_end = CollectionUtil.last(param.split("\\.")); if (param.startsWith(parts[0]) || (end != null && param.endsWith(end)) || (end != null && param_end != null && param_end.startsWith(end))) { if (suggestions.isEmpty()) suggestions = ". Possible Matches:"; suggestions += String.format("\n [%02d] %s", ++i, param); } } // FOR throw new Exception("Unknown parameter '" + parts[0] + "'" + suggestions); } } // FOR // ------------------------------------------------------- // CATALOGS // ------------------------------------------------------- // Text File if (this.params.containsKey(PARAM_CATALOG)) { String path = this.params.get(PARAM_CATALOG); if (debug) LOG.debug("Loading catalog from file '" + path + "'"); Catalog catalog = CatalogUtil.loadCatalog(path); if (catalog == null) throw new Exception("Failed to load catalog object from file '" + path + "'"); this.updateCatalog(catalog, new File(path)); } // Jar File else if (this.params.containsKey(PARAM_CATALOG_JAR)) { String path = this.params.get(PARAM_CATALOG_JAR); this.params.put(PARAM_CATALOG, path); File jar_file = new File(path); Catalog catalog = CatalogUtil.loadCatalogFromJar(path); if (catalog == null) throw new Exception("Failed to load catalog object from jar file '" + path + "'"); if (debug) LOG.debug("Loaded catalog from jar file '" + path + "'"); this.updateCatalog(catalog, jar_file); if (!this.params.containsKey(PARAM_CATALOG_TYPE)) { String jar_name = jar_file.getName(); int jar_idx = jar_name.lastIndexOf(".jar"); if (jar_idx != -1) { ProjectType type = ProjectType.get(jar_name.substring(0, jar_idx)); if (type != null) { if (debug) LOG.debug("Set catalog type '" + type + "' from catalog jar file name"); this.catalog_type = type; this.params.put(PARAM_CATALOG_TYPE, this.catalog_type.toString()); } } } } // Schema File else if (this.params.containsKey(PARAM_CATALOG_SCHEMA)) { String path = this.params.get(PARAM_CATALOG_SCHEMA); Catalog catalog = CompilerUtil.compileCatalog(path); if (catalog == null) throw new Exception("Failed to load schema from '" + path + "'"); if (debug) LOG.debug("Loaded catalog from schema file '" + path + "'"); this.updateCatalog(catalog, new File(path)); } // Catalog Type if (this.params.containsKey(PARAM_CATALOG_TYPE)) { String catalog_type = this.params.get(PARAM_CATALOG_TYPE); ProjectType type = ProjectType.get(catalog_type); if (type == null) { throw new Exception("Unknown catalog type '" + catalog_type + "'"); } this.catalog_type = type; } // Update Cluster Configuration if (this.params.containsKey(ArgumentsParser.PARAM_CATALOG_HOSTS)) { ClusterConfiguration cc = new ClusterConfiguration(this.getParam(ArgumentsParser.PARAM_CATALOG_HOSTS)); this.updateCatalog(FixCatalog.addHostInfo(this.catalog, cc), null); } // Check the requirements after loading the catalog, because some of the // above parameters will set the catalog one if (required != null && required.length > 0) this.require(required); // ------------------------------------------------------- // PHYSICAL DESIGN COMPONENTS // ------------------------------------------------------- if (this.params.containsKey(PARAM_PARTITION_PLAN)) { assert (this.catalog_db != null); File path = new File(this.params.get(PARAM_PARTITION_PLAN)); boolean ignoreMissing = this.getBooleanParam(ArgumentsParser.PARAM_PARTITION_PLAN_IGNORE_MISSING, false); if (path.exists() || (path.exists() == false && ignoreMissing == false)) { if (debug) LOG.debug("Loading in partition plan from '" + path + "'"); this.pplan = new PartitionPlan(); this.pplan.load(path.getAbsolutePath(), this.catalog_db); // Apply! if (this.params.containsKey(PARAM_PARTITION_PLAN_APPLY) && this.getBooleanParam(PARAM_PARTITION_PLAN_APPLY)) { boolean secondaryIndexes = this.getBooleanParam(PARAM_PARTITION_PLAN_NO_SECONDARY, false) == false; LOG.info( String.format( "Applying PartitionPlan '%s' to catalog [enableSecondary=%s]", path.getName(), secondaryIndexes)); this.pplan.apply(this.catalog_db, secondaryIndexes); } } } // ------------------------------------------------------- // DESIGNER COMPONENTS // ------------------------------------------------------- if (this.params.containsKey(PARAM_DESIGNER_THREADS)) { this.max_concurrent = Integer.valueOf(this.params.get(PARAM_DESIGNER_THREADS)); } if (this.params.containsKey(PARAM_DESIGNER_INTERVALS)) { this.num_intervals = Integer.valueOf(this.params.get(PARAM_DESIGNER_INTERVALS)); } if (this.params.containsKey(PARAM_DESIGNER_HINTS)) { String path = this.params.get(PARAM_DESIGNER_HINTS); if (debug) LOG.debug( "Loading in designer hints from '" + path + "'.\nForced Values:\n" + StringUtil.formatMaps(this.hints_params)); this.designer_hints.load(path, catalog_db, this.hints_params); } if (this.params.containsKey(PARAM_DESIGNER_CHECKPOINT)) { this.designer_checkpoint = new File(this.params.get(PARAM_DESIGNER_CHECKPOINT)); } String designer_attributes[] = { PARAM_DESIGNER_PARTITIONER, PARAM_DESIGNER_MAPPER, PARAM_DESIGNER_INDEXER, PARAM_DESIGNER_COSTMODEL }; ClassLoader loader = ClassLoader.getSystemClassLoader(); for (String key : designer_attributes) { if (this.params.containsKey(key)) { String target_name = this.params.get(key); Class<?> target_class = loader.loadClass(target_name); assert (target_class != null); if (debug) LOG.debug("Set " + key + " class to " + target_class.getName()); if (key.equals(PARAM_DESIGNER_PARTITIONER)) { this.partitioner_class = (Class<? extends AbstractPartitioner>) target_class; } else if (key.equals(PARAM_DESIGNER_MAPPER)) { this.mapper_class = (Class<? extends AbstractMapper>) target_class; } else if (key.equals(PARAM_DESIGNER_INDEXER)) { this.indexer_class = (Class<? extends AbstractIndexSelector>) target_class; } else if (key.equals(PARAM_DESIGNER_COSTMODEL)) { this.costmodel_class = (Class<? extends AbstractCostModel>) target_class; // Special Case: TimeIntervalCostModel if (target_name.endsWith(TimeIntervalCostModel.class.getSimpleName())) { this.costmodel = new TimeIntervalCostModel<SingleSitedCostModel>( this.catalog_db, SingleSitedCostModel.class, this.num_intervals); } else { this.costmodel = ClassUtil.newInstance( this.costmodel_class, new Object[] {this.catalog_db}, new Class[] {Database.class}); } } else { assert (false) : "Invalid key '" + key + "'"; } } } // FOR // ------------------------------------------------------- // TRANSACTION ESTIMATION COMPONENTS // ------------------------------------------------------- if (this.params.containsKey(PARAM_MAPPINGS)) { assert (this.catalog_db != null); File path = new File(this.params.get(PARAM_MAPPINGS)); if (path.exists()) { this.param_mappings.load(path.getAbsolutePath(), this.catalog_db); } else { LOG.warn("The ParameterMappings file '" + path + "' does not exist"); } } if (this.params.containsKey(PARAM_MARKOV_THRESHOLDS_VALUE)) { assert (this.catalog_db != null); float defaultValue = this.getDoubleParam(PARAM_MARKOV_THRESHOLDS_VALUE).floatValue(); this.thresholds = new EstimationThresholds(defaultValue); this.params.put(PARAM_MARKOV_THRESHOLDS, this.thresholds.toString()); LOG.debug("CREATED THRESHOLDS: " + this.thresholds); } else if (this.params.containsKey(PARAM_MARKOV_THRESHOLDS)) { assert (this.catalog_db != null); this.thresholds = new EstimationThresholds(); File path = new File(this.params.get(PARAM_MARKOV_THRESHOLDS)); if (path.exists()) { this.thresholds.load(path.getAbsolutePath(), this.catalog_db); } else { LOG.warn("The estimation thresholds file '" + path + "' does not exist"); } LOG.debug("LOADED THRESHOLDS: " + this.thresholds); } // ------------------------------------------------------- // HASHER // ------------------------------------------------------- if (this.catalog != null) { if (this.params.containsKey(PARAM_HASHER_CLASS)) { String hasherClassName = this.params.get(PARAM_HASHER_CLASS); this.hasher_class = (Class<? extends AbstractHasher>) loader.loadClass(hasherClassName); } Constructor<? extends AbstractHasher> constructor = this.hasher_class.getConstructor(new Class[] {Database.class, Integer.class}); int num_partitions = CatalogUtil.getNumberOfPartitions(this.catalog_db); this.hasher = constructor.newInstance(new Object[] {this.catalog_db, num_partitions}); if (!(this.hasher instanceof DefaultHasher)) LOG.debug("Loaded hasher " + this.hasher.getClass()); if (this.params.containsKey(PARAM_HASHER_PROFILE)) { this.hasher.load(this.params.get(PARAM_HASHER_PROFILE), null); } } // ------------------------------------------------------- // SAMPLE WORKLOAD TRACE // ------------------------------------------------------- this.loadWorkload(); }
/** @throws Exception */ private void loadWorkload() throws Exception { final boolean debug = LOG.isDebugEnabled(); // Workload Trace if (this.params.containsKey(PARAM_WORKLOAD)) { assert (this.catalog_db != null) : "Missing catalog!"; String path = new File(this.params.get(PARAM_WORKLOAD)).getAbsolutePath(); boolean weightedTxns = this.getBooleanParam(PARAM_WORKLOAD_XACT_WEIGHTS, false); if (debug) LOG.debug("Use Transaction Weights in Limits: " + weightedTxns); // This will prune out duplicate trace records... if (params.containsKey(PARAM_WORKLOAD_REMOVE_DUPES)) { DuplicateTraceFilter filter = new DuplicateTraceFilter(); this.workload_filter = (this.workload_filter != null ? filter.attach(this.workload_filter) : filter); if (debug) LOG.debug("Attached " + filter.debugImpl()); } // TRANSACTION OFFSET if (params.containsKey(PARAM_WORKLOAD_XACT_OFFSET)) { this.workload_xact_offset = Long.parseLong(params.get(PARAM_WORKLOAD_XACT_OFFSET)); ProcedureLimitFilter filter = new ProcedureLimitFilter(-1l, this.workload_xact_offset, weightedTxns); // Important! The offset should go in the front! this.workload_filter = (this.workload_filter != null ? filter.attach(this.workload_filter) : filter); if (debug) LOG.debug("Attached " + filter.debugImpl()); } // BASE PARTITIONS if (params.containsKey(PARAM_WORKLOAD_RANDOM_PARTITIONS) || params.containsKey(PARAM_WORKLOAD_BASE_PARTITIONS)) { BasePartitionTxnFilter filter = new BasePartitionTxnFilter(new PartitionEstimator(catalog_db)); // FIXED LIST if (params.containsKey(PARAM_WORKLOAD_BASE_PARTITIONS)) { for (String p_str : this.getParam(PARAM_WORKLOAD_BASE_PARTITIONS).split(",")) { workload_base_partitions.add(Integer.valueOf(p_str)); } // FOR // RANDOM } else { double factor = this.getDoubleParam(PARAM_WORKLOAD_RANDOM_PARTITIONS); List<Integer> all_partitions = new ArrayList<Integer>(CatalogUtil.getAllPartitionIds(catalog_db)); Collections.shuffle(all_partitions, new Random()); workload_base_partitions.addAll( all_partitions.subList(0, (int) (all_partitions.size() * factor))); } filter.addPartitions(workload_base_partitions); this.workload_filter = (this.workload_filter != null ? this.workload_filter.attach(filter) : filter); if (debug) LOG.debug("Attached " + filter.debugImpl()); } // Txn Limit this.workload_xact_limit = this.getLongParam(PARAM_WORKLOAD_XACT_LIMIT); Histogram<String> proc_histogram = null; // Include/exclude procedures from the traces if (params.containsKey(PARAM_WORKLOAD_PROC_INCLUDE) || params.containsKey(PARAM_WORKLOAD_PROC_EXCLUDE)) { Filter filter = new ProcedureNameFilter(weightedTxns); // INCLUDE String temp = params.get(PARAM_WORKLOAD_PROC_INCLUDE); if (temp != null && !temp.equals(ProcedureNameFilter.INCLUDE_ALL)) { // We can take the counts for PROC_INCLUDE and scale them // with the multiplier double multiplier = 1.0d; if (this.hasDoubleParam(PARAM_WORKLOAD_PROC_INCLUDE_MULTIPLIER)) { multiplier = this.getDoubleParam(PARAM_WORKLOAD_PROC_INCLUDE_MULTIPLIER); if (debug) LOG.debug("Workload Procedure Multiplier: " + multiplier); } // Default Txn Frequencies String procinclude = params.get(PARAM_WORKLOAD_PROC_INCLUDE); if (procinclude.equalsIgnoreCase("default")) { procinclude = AbstractProjectBuilder.getProjectBuilder(catalog_type) .getTransactionFrequencyString(); } Map<String, Integer> limits = new HashMap<String, Integer>(); int total_unlimited = 0; int total = 0; for (String proc_name : procinclude.split(",")) { int limit = -1; // Check if there is a limit for this procedure if (proc_name.contains(":")) { String pieces[] = proc_name.split(":"); proc_name = pieces[0]; limit = (int) Math.round(Integer.parseInt(pieces[1]) * multiplier); } if (limit < 0) { if (proc_histogram == null) { if (debug) LOG.debug("Generating procedure histogram from workload file"); proc_histogram = WorkloadUtil.getProcedureHistogram(new File(path)); } limit = (int) proc_histogram.get(proc_name, 0); total_unlimited += limit; } else { total += limit; } limits.put(proc_name, limit); } // FOR // If we have a workload limit and some txns that we want // to get unlimited // records from, then we want to modify the other txns so // that we fill in the "gap" if (this.workload_xact_limit != null && total_unlimited > 0) { int remaining = this.workload_xact_limit.intValue() - total - total_unlimited; if (remaining > 0) { for (Entry<String, Integer> e : limits.entrySet()) { double ratio = e.getValue() / (double) total; e.setValue((int) Math.ceil(e.getValue() + (ratio * remaining))); } // FOR } } Histogram<String> proc_multiplier_histogram = null; if (debug) { if (proc_histogram != null) LOG.debug("Full Workload Histogram:\n" + proc_histogram); proc_multiplier_histogram = new Histogram<String>(); } total = 0; for (Entry<String, Integer> e : limits.entrySet()) { if (debug) proc_multiplier_histogram.put(e.getKey(), e.getValue()); ((ProcedureNameFilter) filter).include(e.getKey(), e.getValue()); total += e.getValue(); } // FOR if (debug) LOG.debug("Multiplier Histogram [total=" + total + "]:\n" + proc_multiplier_histogram); } // EXCLUDE temp = params.get(PARAM_WORKLOAD_PROC_EXCLUDE); if (temp != null) { for (String proc_name : params.get(PARAM_WORKLOAD_PROC_EXCLUDE).split(",")) { ((ProcedureNameFilter) filter).exclude(proc_name); } // FOR } // Sampling!! if (this.getBooleanParam(PARAM_WORKLOAD_PROC_SAMPLE, false)) { if (debug) LOG.debug("Attaching sampling filter"); if (proc_histogram == null) proc_histogram = WorkloadUtil.getProcedureHistogram(new File(path)); Map<String, Integer> proc_includes = ((ProcedureNameFilter) filter).getProcIncludes(); SamplingFilter sampling_filter = new SamplingFilter(proc_includes, proc_histogram); filter = sampling_filter; if (debug) LOG.debug("Workload Procedure Histogram:\n" + proc_histogram); } // Attach our new filter to the chain (or make it the head if // it's the first one) this.workload_filter = (this.workload_filter != null ? this.workload_filter.attach(filter) : filter); if (debug) LOG.debug("Attached " + filter.debugImpl()); } // TRANSACTION LIMIT if (this.workload_xact_limit != null) { ProcedureLimitFilter filter = new ProcedureLimitFilter(this.workload_xact_limit, weightedTxns); this.workload_filter = (this.workload_filter != null ? this.workload_filter.attach(filter) : filter); if (debug) LOG.debug("Attached " + filter.debugImpl()); } // QUERY LIMIT if (params.containsKey(PARAM_WORKLOAD_QUERY_LIMIT)) { this.workload_query_limit = Long.parseLong(params.get(PARAM_WORKLOAD_QUERY_LIMIT)); QueryLimitFilter filter = new QueryLimitFilter(this.workload_query_limit); this.workload_filter = (this.workload_filter != null ? this.workload_filter.attach(filter) : filter); } if (this.workload_filter != null && debug) LOG.debug("Workload Filters: " + this.workload_filter.toString()); this.workload = new Workload(this.catalog); this.workload.load(path, this.catalog_db, this.workload_filter); this.workload_path = new File(path).getAbsolutePath(); if (this.workload_filter != null) this.workload_filter.reset(); } // Workload Statistics if (this.catalog_db != null) { this.stats = new WorkloadStatistics(this.catalog_db); if (this.params.containsKey(PARAM_STATS)) { String path = this.params.get(PARAM_STATS); if (debug) LOG.debug("Loading in workload statistics from '" + path + "'"); this.stats_path = new File(path).getAbsolutePath(); try { this.stats.load(path, this.catalog_db); } catch (Throwable ex) { throw new RuntimeException("Failed to load stats file '" + this.stats_path + "'", ex); } } // Scaling if (this.params.containsKey(PARAM_STATS_SCALE_FACTOR)) { double scale_factor = this.getDoubleParam(PARAM_STATS_SCALE_FACTOR); LOG.info("Scaling TableStatistics: " + scale_factor); AbstractTableStatisticsGenerator generator = AbstractTableStatisticsGenerator.factory( this.catalog_db, this.catalog_type, scale_factor); generator.apply(this.stats); } } }