@Test public void orderDuplication() { List<Order> orders; Problem problem; // empty problem orders = Lists.newArrayList(); problem = new Problem(100, orders); assertEquals(0, problem.size()); // two orders with different lengths orders = Lists.newArrayList(); orders.add(new Order(20, 5)); orders.add(new Order(30, 10)); problem = new Problem(100, orders); assertEquals(2, problem.size()); // order with length 20 encountered twice, // corresponding demands are summed up orders = Lists.newArrayList(); orders.add(new Order(20, 5)); orders.add(new Order(30, 10)); orders.add(new Order(20, 3)); problem = new Problem(100, orders); assertEquals(2, problem.size()); }
public static void main(String[] args) throws ClassNotFoundException, IllegalAccessException, InstantiationException { String data = args[0]; String className = data.split("#")[3]; Problem o = (Problem) Class.forName(className).newInstance(); o.fromJSON(data); System.out.println(o.solve().toJSON()); }
public void onThrowable(String url2, Throwable ex) { // don't log, it is logged elsewhere if (log.isInfoEnabled()) log.info("on throwable"); Problem p = new Problem(); p.setException(ex); p.setUrl(url2); promise.addResponse(p); }
public static void main(String[] args) { random = new Random(1234321); final String DATA_DIR = "data/B"; final String OUTPUT_DIR = "data/E"; File dir = new File(DATA_DIR); String[] children = dir.list(); for (int i = 0; i < children.length; i++) { String fileName = children[i]; // in this directory we have two types of files, so process the test only when we // read a model file if (fileName.startsWith("model") && !fileName.contains("test")) { String assignmentFile = fileName.replaceFirst("model", "assignment"); String modelFile = fileName; ProblemGenerator pg = new ProblemGenerator(new File(DATA_DIR, modelFile), new File(DATA_DIR, assignmentFile)); // pg.makeMoreResources(random.nextBoolean() ? 12 : 1); // pg.resourceUsage = pg.computeResourceUsage(); pg.reorderMachines(); pg.reorderProcesses(); pg.addResources(18, 25); // 20 will be more likely pg.changeProcessRequirement(0.8, 1.2); // +/- 10% pg.changeMachineCapacities(0.8, 1.3); // +/- 10% pg.changeMachineSafetyCapacities(0.8, 1.2); // +/- 10% pg.changeMachineMoveCosts(-1, +1); // +/- 1 pg.changeTransient(0.2); // 20% pg.changeResourceLoadCost(0.9, 1.1); // +/- 10% pg.changeSpread(0.6, 1.4); // +/- 40$ pg.addBalances(1, 4); // add from 1 to 2 new balances pg.changeServiceDependencies(0.2, 0.2); // add up to 20%, remove max 20% pg.changeWeights(0.5, 5.0); File newModel = new File(OUTPUT_DIR, modelFile); File newAssignement = new File(OUTPUT_DIR, assignmentFile); pg.saveProblem(newModel, newAssignement); Problem problem = new Problem(newModel, newAssignement); if (!problem.isSolutionFeasible(problem.getOriginalSolution())) { System.err.println("Ojej :("); System.exit(1); } // break; } } }
ProblemOnNode(Problem p, GraphClass n) { if (!p.validFor(n)) throw new IllegalArgumentException(p.getName() + " not applicable to " + n.getID()); problem = p; node = n; algos = new HashSet<Algorithm>(); complexity = new Complexity[STEPS]; for (int i = 0; i < complexity.length; i++) complexity[i] = Complexity.UNKNOWN; }
/** * Standard point of execution. * * @param args - not used */ public static void main(String[] args) { Problem curProb = new PE0059(); String result = curProb.getResult(); IO.info("result for problem #" + curProb); IO.info(" is '" + result + "'"); IO.infoln(" found in " + curProb.getRuntime(2) + " ms"); }
/** * This method is the access point to the planning procedure. Initially, it adds all variables * from axioms to the set of found vars, then does the linear planning. If lp does not solve the * problem and there are subtasks, goal-driven recursive planning with backtracking is invoked. * Planning is performed until no new variables are introduced into the algorithm. */ public EvaluationAlgorithm invokePlaning(Problem problem, boolean _computeAll) { long startTime = System.currentTimeMillis(); computeAll = _computeAll; EvaluationAlgorithm algorithm = new EvaluationAlgorithm(); PlanningContext context = problem.getCurrentContext(); // add all axioms at the beginning of an algorithm Collection<Var> flattened = new HashSet<Var>(); for (Iterator<Rel> axiomIter = problem.getAxioms().iterator(); axiomIter.hasNext(); ) { Rel rel = axiomIter.next(); unfoldVarsToSet(rel.getOutputs(), flattened); // do not overwrite values of variables that come via args of compute() or as inputs of // independent subtasks if (!problem.getAssumptions().containsAll(flattened) // do not overwrite values of already known variables. // typically this is the case when a value of a variable // is given in a scheme via a properties window // && !problem.getKnownVars().containsAll( flattened ) ) { algorithm.addRel(rel); } axiomIter.remove(); context.getKnownVars().addAll(flattened); flattened.clear(); } context.getFoundVars().addAll(context.getKnownVars()); // remove all known vars with no relations for (Iterator<Var> varIter = context.getKnownVars().iterator(); varIter.hasNext(); ) { if (varIter.next().getRels().isEmpty()) { varIter.remove(); } } // start planning if (problem.getRelsWithSubtasks().isEmpty() && linearForwardSearch(context, algorithm, computeAll)) { if (isLinearLoggingOn()) logger.debug("Problem solved without subtasks"); } else if (!problem.getRelsWithSubtasks().isEmpty() && subtaskPlanning(problem, algorithm)) { if (isLinearLoggingOn()) logger.debug("Problem solved with subtasks"); } else if (!computeAll) { if (isLinearLoggingOn()) logger.debug("Problem not solved"); } if (!nested) { logger.info("Planning time: " + (System.currentTimeMillis() - startTime) + "ms."); } return algorithm; }
private String getAllProblems() { StringBuilder builder = new StringBuilder(); for (Problem problem : problems) { if (builder.length() > 0) { builder.append(", "); } builder.append(problem.getLine()); builder.append('.'); builder.append(problem.getCharacter()); builder.append(':'); builder.append(problem.getMessage()); } return builder.toString(); }
Problem newProblem(int num) { // ----------------------------- String msg = ""; if (num == ACCOUNT_NOT_FOUND) msg = PB_ACCOUNT_NOT_FOUND; else if (num == NOT_ENOUGH_MONEY) msg = PB_NOT_ENOUGH_MONEY; else if (num == ILLEGAL_OPERATION) msg = PB_ILLEGAL_OPERATION; Problem prob = new Problem(); prob.setNum(num); prob.setMsg(msg); return prob; }
public JSONObject getJSON(JSONObject jo) { Problem p = null; try { p = ProblemMgr.getProblem(getProbId()); } catch (SQLException e) { e.printStackTrace(); // To change body of catch statement use File | Settings | File // Templates. } if (p != null && p.isQuickAuth() && p.isMultiChoice()) { jo.element(letter, val); } return jo; }
@SuppressWarnings("unchecked") public Solution<V, DT> merge(Map<String, DT> assignmentMap) { for (String name : assignmentMap.keySet()) { put((V) problem.getVariable(name), assignmentMap.get(name)); } return this; }
public boolean solve() { if (problem.solved()) { return true; } else { List steps = problem.possibleSteps(); for (Iterator i = steps.iterator(); i.hasNext(); ) { Step p = (Step) i.next(); problem.execute(p); if (solve()) { return true; } else { problem.undo(p); } } return false; } }
/** * Initializes the {@link #sieve} with the maximum of {@link #max} * * @see Sieve#Sieve(int) */ @Override public void prepare() { super.prepare(); max = 500000; cnt = -1; sieve = new Sieve(max); }
/** @param args */ public static void main(String[] args) { long startTime = System.currentTimeMillis(); try { Class<?> problemClass = Class.forName("problems.Problem" + args[0]); Problem problemInstance = (Problem) problemClass.newInstance(); problemInstance.solve(); } catch (ClassNotFoundException | SecurityException | InstantiationException | IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } long endTime = System.currentTimeMillis(); System.out.println("Done [Elapsed: " + ((endTime - startTime) / 1000.0) + "\"]"); }
/** Add an algorithm at the given deduction step. */ void addAlgo(Algorithm a, int step) { algos.add(a); try { updateComplexity(a.getComplexity(), step); } catch (ComplexityClashException e) { System.err.println( "Complexity clash for " + problem.getName() + " on " + node + " " + a + " and " + algos); } }
private void dumpProblems(PrintWriter writer, String categoryName, Problem[] problems) { if (problems != null && problems.length != 0) { writer.println( MessageFormat.format( Messages.fullReportTask_categoryheader, new String[] {categoryName})); for (int i = 0, max = problems.length; i < max; i++) { Problem problem = problems[i]; if ((i % 2) == 0) { switch (problem.severity) { case ApiPlugin.SEVERITY_ERROR: writer.println( MessageFormat.format( Messages.fullReportTask_problementry_even_error, new String[] {problem.getHtmlMessage()})); break; case ApiPlugin.SEVERITY_WARNING: writer.println( MessageFormat.format( Messages.fullReportTask_problementry_even_warning, new String[] {problem.getHtmlMessage()})); } } else { switch (problem.severity) { case ApiPlugin.SEVERITY_ERROR: writer.println( MessageFormat.format( Messages.fullReportTask_problementry_odd_error, new String[] {problem.getHtmlMessage()})); break; case ApiPlugin.SEVERITY_WARNING: writer.println( MessageFormat.format( Messages.fullReportTask_problementry_odd_warning, new String[] {problem.getHtmlMessage()})); } } } writer.println(Messages.fullReportTask_categoryfooter); } else { writer.println( MessageFormat.format( Messages.fullReportTask_category_no_elements, new String[] {categoryName})); } }
public Solution randomSolution(Problem pb) { // n number of total jobs, rj number of jobs randomly chosen for each batch, rb randomly chosen // index of each batch int n = pb.getNp(), rj, rb; Solution sol = new Solution(pb); Random rand = new Random(); ; // random selection of production batches while (n > 0) { rj = rand.nextInt(n) + 1; // between 1 and n jobs per batch rb = rand.nextInt( sol.getProductionSequenceMT().size() + 1); // batch randomly inserted in vector to prevent front-stacking (due to // probabilistic properties) sol.getProductionSequenceMT().add(rb, new Batch(rj)); n -= rj; } n = pb.getNp(); // random selection of transport batches, taking into account transporter capacity while (n > 0) { rj = rand.nextInt(Math.min(pb.transporter.getCapacity(), n)) + 1; rb = rand.nextInt(sol.getDeliverySequenceMT().size() + 1); sol.getDeliverySequenceMT().add(rb, new Batch(rj)); n -= rj; } sol.evaluate(); return sol; }
public static void MLalgo() { try { Problem problem = new Problem(); problem.l = train_count; // number of training examples problem.n = max_feature_count; // number of features problem.x = train_matrix; // feature nodes problem.y = ylable; // target values; SolverType solver = SolverType.L2R_LR; // -s 0 double C = 1.0; // cost of constraints violation double eps = 0.01; // stopping criteria Parameter parameter = new Parameter(solver, C, eps); model = Linear.train(problem, parameter); File modelFile = new File("model"); model.save(modelFile); // load model or use it directly model = Model.load(modelFile); } catch (Exception e) { e.printStackTrace(); } }
void getInfo() { Scanner console = new Scanner(System.in); int maxCost = 0, maxSols = 0; Problem prob = null; Solver solve = null; ArrStack stk = new ArrStack(); System.out.print("Enter problem type, solution type, " + "max cost and max # of solutions: "); try { prob = (Problem) Class.forName(console.next()).newInstance(); solve = (Solver) Class.forName(console.next()).newInstance(); maxCost = console.nextInt(); maxSols = console.nextInt(); } catch (Exception e) { System.out.println("" + e); } try { prob.read(console); } catch (Exception e) { System.out.println("Read error: " + e); return; } Solver.Solution[] sols; sols = solve.solveProblem(prob, maxCost, maxSols); if (sols == null) { System.out.println("No solutions "); return; } System.out.println("Answers are: "); for (int ans = 0; ans < sols.length && sols[ans] != null; ans++) { System.out.println("Answer " + ans + " with cost " + (sols[ans].mSteps.length - 1)); for (int stepNdx = 1; stepNdx < sols[ans].mSteps.length; stepNdx++) System.out.println(" " + sols[ans].mSteps[stepNdx]); } }
/** * Passes required information to RosterSolver and returns a new result to be displayed in a * table. */ public void generateRoster() { ArrayList<Nurse> nurses = ward.getListOfNurses(); System.out.println("USED FOR TESTING PURPOSES!------------------------"); System.out.println( "Ward Name: " + ward.getWardName() + " Roster Days: " + ward.getRoster() + " Number of Nurses: " + ward.getListOfNurses().size()); System.out.println(" Nurses:"); for (int i = 0; i < nurses.size(); i++) { Nurse n = nurses.get(i); System.out.print(" Nurse Name: "); System.out.println( n.getNurseName() + " Qualification: " + n.getQualification() + " Shift Pattern: " + n.getShiftPattern() + " Number of Shifts: " + n.getShifts()); } System.out.println("--------------------------------------------------"); RosterSolver r = new RosterSolver(ward.getListOfNurses().size(), ward.getRoster()); ArrayList<Nurse> nurseList = ward.getListOfNurses(); int SP = 0; int Q = 0; for (int i = 0; i < nurseList.size(); i++) { if (nurseList.get(i).getShiftPattern().equals("DN")) { SP = 6; } if (nurseList.get(i).getShiftPattern().equals("N")) { SP = 5; } if (nurseList.get(i).getShiftPattern().equals("D")) { SP = 4; } if (nurseList.get(i).getQualification().equals("SRN")) { Q = 1; } if (nurseList.get(i).getQualification().equals("RN")) { Q = 2; } r.setNurseShiftPattern(i, SP); r.setNurseGrade(i, Q); } Problem result = r.run(); for (String error : Problem.error) { System.out.print("\n" + error); } resultString = result.getCompletedRoster(); for (int i = 0; i < resultString.size(); i++) { System.out.println(resultString.get(i)); } // result.printRoster(); }
public double[] runBranchAndBound(Problem problem, int ciudadesSize) { Nodo nodoInicial = new Nodo(problem); stack.push(nodoInicial); while (!stack.isEmpty()) { Nodo nodoActual = stack.pop(); double Z; double[] Nx = new double[ciudadesSize]; double[] Ny = new double[ciudadesSize]; Z = nodoActual.getZ(); for (int i = 0; i < ciudadesSize; i++) { Nx[i] = nodoActual.getNxi(i); Ny[i] = nodoActual.getNyi(i); } int variablesNoEnteras = 0; double epsilon = 0.0000001; for (int i = 0; i < ciudadesSize; i++) { if (!(Math.abs(Math.floor(Nx[i]) - Nx[i]) <= epsilon)) { variablesNoEnteras++; } if (!(Math.abs(Math.floor(Ny[i]) - Ny[i]) <= epsilon)) { variablesNoEnteras++; } } if (Z != -1 || Z < cota || variablesNoEnteras == 0) { if (variablesNoEnteras == 0 && Z > cota) { cota = Z; Bx = nodoActual.getBx(); By = nodoActual.getBy(); } } else { int variable = -1; boolean isX = false; for (int i = 0; i < ciudadesSize; i++) { if (!(Math.abs(Math.floor(Nx[i]) - Nx[i]) <= epsilon)) { isX = true; variable = i; i = ciudadesSize; } if (!(Math.abs(Math.floor(Ny[i]) - Ny[i]) <= epsilon)) { isX = false; variable = i; i = ciudadesSize; } } Problem p1 = nodoActual.getProblem(); Problem p2 = nodoActual.getProblem(); if (variable != -1) { if (isX) { double valueFloor = Math.floor(Nx[variable]); double valueCeil = Math.ceil(Nx[variable]); p1.setVarLowerBound("Nx" + variable, valueFloor); p2.setVarUpperBound("Nx" + variable, valueCeil); Nodo nodoNuevo1 = new Nodo(p1); Nodo nodoNuevo2 = new Nodo(p2); stack.add(nodoNuevo1); stack.add(nodoNuevo2); } else { double valueFloor = Math.floor(Ny[variable]); double valueCeil = Math.ceil(Ny[variable]); p1.setVarLowerBound("Ny" + variable, valueFloor); p2.setVarUpperBound("Ny" + variable, valueCeil); Nodo nodoNuevo1 = new Nodo(p1); Nodo nodoNuevo2 = new Nodo(p2); stack.add(nodoNuevo1); stack.add(nodoNuevo2); } } } } double[] salida = new double[3]; salida[0] = Bx; salida[1] = By; salida[2] = cota; return salida; }
public KloReport parse(final File file) throws IOException { if (file == null) { throw new IllegalArgumentException("File input is mandatory."); } if (!file.exists()) { throw new IllegalArgumentException("File input " + file.getName() + " must exist."); } KloReport report = new KloReport(); List<ValidationError> list = KlocworkModel.OUTPUT_KLOCWORK_9_2.validate(file); if (!list.isEmpty()) { StringBuilder sb = new StringBuilder("XML Validation failed. See errors below :\n"); for (ValidationError val : list) { sb.append(val.toString()).append("\n"); } throw new IllegalArgumentException(sb.toString()); } try { ErrorList errList = getErrorList(file); List<KloFile> lowSeverities = new ArrayList<KloFile>(); List<KloFile> highSeverities = new ArrayList<KloFile>(); List<KloFile> errors = new ArrayList<KloFile>(); int i = 0; for (Problem problem : errList.getProblem()) { KloFile kloFile; kloFile = new KloFile(); kloFile.setKey(i + 1); /** Using reflection to get the tags' name and value and to put them in kloFile map */ for (Field f : problem.getClass().getDeclaredFields()) { f.setAccessible(true); try { String name = f.getName(); Object value = f.get(problem); if (value != null) { String valueToString = value.toString(); // Changing the default value returned by Object.toString() by an empty value if (valueToString.startsWith("com.thalesgroup.hudson.plugins.klocwork.model") && valueToString.contains("@")) { kloFile.store(name, ""); } else { kloFile.store(name, valueToString); } // Treating the trace tag if (name.equals("trace")) { Trace trace = (Trace) value; for (TraceBlock tracelt : trace.getTraceBlock()) { kloFile.addTraceBlock( tracelt.getFile(), tracelt.getMethod(), tracelt.getName(), tracelt.getId()); for (TraceLine traceLinelt : tracelt.getTraceLine()) { // Element traceLinelt = (Element) listTraceLine.get(k); String refId = null; if (problem.getRefID() != null && (refId = problem.getRefID().toString()) != null) { kloFile.addTraceLine( tracelt.getId(), traceLinelt.getLine(), traceLinelt.getText(), traceLinelt.getType().charAt(0), Integer.parseInt(refId)); } else { kloFile.addTraceLine( tracelt.getId(), traceLinelt.getLine(), traceLinelt.getText(), traceLinelt.getType().charAt(0)); } } } } } } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } f.setAccessible(false); } // Adding a new entry in the map corresponding to the file name without its path String fileName = kloFile.get("file"); String fileNameWithoutPath = extractFileName(fileName, "\\"); if (fileName.equals(fileNameWithoutPath)) { fileNameWithoutPath = extractFileName(fileName, "/"); } kloFile.store("fileNameOnly", fileNameWithoutPath); if (Integer.parseInt((String) kloFile.get("severitylevel")) > 3) { highSeverities.add(kloFile); } else { lowSeverities.add(kloFile); } errors.add(kloFile); agregateMap.put(kloFile.getKey(), kloFile); kloFiles.add(kloFile); i++; } if (!lowSeverities.isEmpty()) { report.setLowSeverities(lowSeverities); } if (!highSeverities.isEmpty()) { report.setHighSeverities(highSeverities); } report.setErrors(errors); } catch (JAXBException e) { e.printStackTrace(); } return report; }
/** * Reads the names from a file into the {@link #names local array}. * * @see IO#readSortedStrings(String) */ @Override public void prepare() { super.prepare(); names = IO.readSortedStrings("data-files/5000names.txt"); }
public String solution() { return problem.solution(); }
private void mainClassifierFunction(int option, String trainFile, String testFile, String ddgFile) throws IOException { // SentimentClassifierHindi this = new SentimentClassifierHindi(); // int finalSize = this.SentimentClassifierHindi(); int finalSize = this.generateFeature(option, trainFile, testFile, ddgFile); System.out.println("Hello aspectCategorizationSemEval2016!"); // Create features Problem problem = new Problem(); // Save X to problem double a[] = new double[this.trainingFeature.size()]; File file = new File(rootDirectory + "\\dataset\\trainingLabels.txt"); BufferedReader reader = new BufferedReader(new FileReader(file)); String read; int count = 0; while ((read = reader.readLine()) != null) { // System.out.println(read); a[count++] = Double.parseDouble(read.toString()); } // Feature[][] f = new Feature[][]{ {}, {}, {}, {}, {}, {} }; // trainingFeature = trainingObject.getList(); Feature[][] trainFeatureVector = new Feature[trainingFeature.size()][finalSize]; System.out.println("Training Instances: " + trainingFeature.size()); System.out.println("Feature Length: " + finalSize); System.out.println("Test Instances: " + testFeature.size()); for (int i = 0; i < trainingFeature.size(); i++) { // System.out.println(); // System.out.println(trainingFeature.get(i)); System.out.println(i + " trained."); for (int j = 0; j < finalSize; j++) { // System.out.print(trainingFeature.get(i).get(j + 1)+" "); // trainingFeature.get(i). if (trainingFeature.get(i).containsKey(j + 1)) { // System.out.print(j + 1 + ", "); trainFeatureVector[i][j] = new FeatureNode(j + 1, trainingFeature.get(i).get(j + 1)); } else { trainFeatureVector[i][j] = new FeatureNode(j + 1, 0.0); } } // System.out.println(); } problem.l = trainingFeature.size(); // number of training examples problem.n = finalSize; // number of features problem.x = trainFeatureVector; // feature nodes problem.y = a; // target values ---- BasicParser bp = new BasicParser(); SolverType solver = SolverType.L2R_LR; // -s 7 double C = 0.75; // cost of constraints violation double eps = 0.0001; // stopping criteria Parameter parameter = new Parameter(solver, C, eps); Model model = Linear.train(problem, parameter); File modelFile = new File("model"); model.save(modelFile); // PrintWriter write = new PrintWriter(new BufferedWriter(new FileWriter(rootDirectory + // "\\dataset\\predictedLabels.txt"))); PrintWriter write = new PrintWriter( new BufferedWriter( new FileWriter( rootDirectory + "\\dataset\\dataset_aspectCategorization\\predictedHotelsLabels.txt"))); if (option == 1) { BufferedReader trainReader = new BufferedReader( new FileReader( new File( rootDirectory + "\\dataset\\dataset_aspectCategorization\\" + trainFile))); HashMap<String, Integer> id = new HashMap<String, Integer>(); HashMap<String, String> review = new HashMap<String, String>(); double[] val = new double[trainingFeature.size()]; double[] tempVal = new double[trainingFeature.size()]; LinearCopy.crossValidation(problem, parameter, 5, val, tempVal); for (int i = 0; i < trainingFeature.size(); i++) { int flag = 0; String tokens[] = trainReader.readLine().split("\\|"); if (id.containsKey(tokens[1]) == true || tokens[2].compareToIgnoreCase("True") == 0) { } else { // System.out.println(tokens[1]); /*int max = -1; double probMax = -1.0; for(int j=0; j<13; j++){ if(probMax<val[i][j]){ probMax = val[i][j]; max = j; } }*/ // System.out.println(tempVal[i]); write.println((int) (val[i])); write.println("next"); id.put(tokens[1], 1); System.out.println(tokens[1] + "\t" + (int) (val[i])); if (review.containsKey(tokens[1])) { System.out.println(tokens[3]); System.out.println(review.get(tokens[1])); } else { review.put(tokens[1], tokens[3]); } } /*else{ for (int j = 0; j < 13; j++) { //System.out.print(val[i][j]+", "); if (val[i] >= 0.185) { flag = 1; //System.out.println("i"); write.println(j + 1); } } if (flag == 1) { write.println("next"); } else { write.println("-1"); write.println("next"); } //write.println(prediction); id.put(tokens[1], 1); //System.out.println(); }*/ } write.close(); return; } if (option == 3) { System.out.println(rootDirectory); BufferedReader testReader = new BufferedReader( new FileReader( new File( rootDirectory + "\\dataset\\dataset_aspectCategorization\\" + testFile))); HashMap<String, Integer> id = new HashMap<String, Integer>(); model = Model.load(modelFile); int countNext = 0; for (int i = 0; i < testFeature.size(); i++) { // System.out.println(i+", "+testFeature.size()+", "+testFeature.get(i).size()); Feature[] instance = new Feature[testFeature.get(i).size()]; int j = 0; for (Map.Entry<Integer, Double> entry : testFeature.get(i).entrySet()) { // System.out.print(entry.getKey() + ": " + entry.getValue() + "; "); // listOfMaps.get(i).put(start + entry.getKey(), entry.getValue()); // do stuff instance[j++] = new FeatureNode(entry.getKey(), entry.getValue()); } // double d = LinearCopy.predict(model, instance); double[] predict = new double[85]; double prediction = LinearCopy.predictProbability(model, instance, predict); int labelMap[] = new int[13]; labelMap = model.getLabels(); for (int ar = 0; ar < labelMap.length; ar++) { System.out.println("********************** " + ar + ": " + labelMap[ar]); } // System.out.println(prediction); // Arrays.sort(predict, Collections.reverseOrder()); // System.out.println(); // double prediction = LinearCopy.predict(model, instance); String tokens[] = testReader.readLine().split("\\|"); // System.out.println(tokens[1]); int flag = -1; if (id.containsKey(tokens[1]) == true || tokens[2].compareToIgnoreCase("True") == 0) { flag = 4; // System.out.println("OutofScope: "+tokens[1]); } else if (tokens[3].compareToIgnoreCase("abc") == 0) { flag = 2; System.out.println(tokens[1]); write.println("-1"); write.println("next"); countNext++; id.put(tokens[1], 1); } else { flag = 0; for (int p = 0; p < 85; p++) { if (predict[p] >= 0.128) { flag = 1; write.println(labelMap[p]); } } if (flag == 1) { countNext++; write.println("next"); } else { countNext++; write.println("-1"); write.println("next"); } // write.println((int)d); // write.println("next"); /*write.println(prediction); write.println("next");*/ id.put(tokens[1], 1); } if (flag == -1) { System.out.println("-1, " + tokens[1]); } } write.close(); System.out.println("count " + countNext); } write.close(); }
/* * (non-Javadoc) * * @see net.sf.javailp.Solver#solve(net.sf.javailp.Problem) */ public Result solve(Problem problem) { Map<IloNumVar, Object> numToVar = new HashMap<IloNumVar, Object>(); Map<Object, IloNumVar> varToNum = new HashMap<Object, IloNumVar>(); try { IloCplex cplex = new IloCplex(); initWithParameters(cplex); for (Object variable : problem.getVariables()) { VarType varType = problem.getVarType(variable); Number lowerBound = problem.getVarLowerBound(variable); Number upperBound = problem.getVarUpperBound(variable); double lb = (lowerBound != null ? lowerBound.doubleValue() : Double.NEGATIVE_INFINITY); double ub = (upperBound != null ? upperBound.doubleValue() : Double.POSITIVE_INFINITY); final IloNumVarType type; switch (varType) { case BOOL: type = IloNumVarType.Bool; break; case INT: type = IloNumVarType.Int; break; default: // REAL type = IloNumVarType.Float; break; } IloNumVar num = cplex.numVar(lb, ub, type); numToVar.put(num, variable); varToNum.put(variable, num); } for (Constraint constraint : problem.getConstraints()) { IloLinearNumExpr lin = cplex.linearNumExpr(); Linear linear = constraint.getLhs(); convert(linear, lin, varToNum); double rhs = constraint.getRhs().doubleValue(); switch (constraint.getOperator()) { case LE: cplex.addLe(lin, rhs); break; case GE: cplex.addGe(lin, rhs); break; default: // EQ cplex.addEq(lin, rhs); } } if (problem.getObjective() != null) { IloLinearNumExpr lin = cplex.linearNumExpr(); Linear objective = problem.getObjective(); convert(objective, lin, varToNum); if (problem.getOptType() == OptType.MIN) { cplex.addMinimize(lin); } else { cplex.addMaximize(lin); } } for (Hook hook : hooks) { hook.call(cplex, varToNum); } if (!cplex.solve()) { cplex.end(); return null; } final Result result; if (problem.getObjective() != null) { Linear objective = problem.getObjective(); result = new ResultImpl(objective); } else { result = new ResultImpl(); } for (Entry<Object, IloNumVar> entry : varToNum.entrySet()) { Object variable = entry.getKey(); IloNumVar num = entry.getValue(); VarType varType = problem.getVarType(variable); double value = cplex.getValue(num); if (varType.isInt()) { int v = (int) Math.round(value); result.putPrimalValue(variable, v); } else { result.putPrimalValue(variable, value); } } cplex.end(); return result; } catch (IloException e) { e.printStackTrace(); } return null; }
private String getPositionFromProblem(int n) { Problem problem = problems.get(n); return problem.getLine() + "." + problem.getCharacter(); }
@Test public void testGetSumOfMultiples() { assertEquals(23, Problem.getSumOfMultiples(10, new int[] {3, 5})); assertEquals(233168, Problem.getSumOfMultiples(1000, new int[] {3, 5})); }
private boolean subtaskPlanning(Problem problem, EvaluationAlgorithm algorithm) { if (isSubtaskLoggingOn()) logger.debug("!!!--------- Starting Planning With Subtasks ---------!!!"); final int maxDepthBackup = maxDepth; if (isSubtaskLoggingOn()) logger.debug( "maxDepthBackup:" + maxDepthBackup + " sbt: " + problem.getRelsWithSubtasks().size()); PlanningContext context = problem.getCurrentContext(); try { Set<Rel> relsWithSubtasks = new LinkedHashSet<Rel>(problem.getRelsWithSubtasks()); if (isIncremental) { int incrementalDepth = 0; while (incrementalDepth <= (isSubtaskRepetitionAllowed ? maxDepthBackup : problem.getRelsWithSubtasks().size() - 1)) { if (isSubtaskLoggingOn()) logger.debug( "Incremental dfs, with max depth " + (incrementalDepth + 1) + " and " + problem.getRelsWithSubtasks().size() + " subtasks to solve"); maxDepth = incrementalDepth++; // if we need to compute some specific goals, after reaching a certain depth, but not the // maximal depth, // the problem may be solved and there is no need to go any deeper. if (subtaskPlanningImpl(context, relsWithSubtasks, algorithm, new LinkedList<Rel>(), 0)) { if (isSubtaskLoggingOn()) logger.debug("The problem was solved during idfs after some intermediate MLB"); return true; } if (isSubtaskLoggingOn()) logger.debug("Unsolved subtask left: " + problem.getRelsWithSubtasks().size()); } if (isSubtaskLoggingOn()) logger.debug("Fininshed incremental dfs"); } else { if (!isSubtaskRepetitionAllowed) { maxDepth = problem.getRelsWithSubtasks().size() - 1; } if (isSubtaskLoggingOn()) logger.debug("Starting subtask dfs with maxDepth: " + (maxDepth + 1)); if (subtaskPlanningImpl(context, relsWithSubtasks, algorithm, new LinkedList<Rel>(), 0)) { if (isSubtaskLoggingOn()) logger.debug("The problem was solved during dfs after some intermediate MLB"); return true; } } } finally { if (isSubtaskLoggingOn()) logger.debug("Fininshed dfs"); maxDepth = maxDepthBackup; indSubtasks.clear(); } if (isSubtaskLoggingOn()) logger.debug("Invoking final linear planning"); return linearForwardSearch(context, algorithm, computeAll); }
/** * Goal-driven recursive (depth-first, exhaustive) search with backtracking * * @param problem * @param algorithm * @param subtaskRelsInPath * @param depth */ private boolean subtaskPlanningImpl( PlanningContext context, Set<Rel> relsWithSubtasks, EvaluationAlgorithm algorithm, LinkedList<Rel> subtaskRelsInPath, int depth) { Set<Rel> relsWithSubtasksCopy = new LinkedHashSet<Rel>(relsWithSubtasks); Set<Rel> relsWithSubtasksToRemove = new LinkedHashSet<Rel>(); boolean firstMLB = true; // start building Maximal Linear Branch (MLB) MLB: while (!relsWithSubtasksCopy.isEmpty()) { if (isSubtaskLoggingOn()) { String print = p(depth) + "Starting new MLB with: "; for (Rel rel : relsWithSubtasksCopy) { print += "\n" + p(depth) + " " + rel.getParent().getFullName() + " : " + rel.getDeclaration(); } /* print += "\n" + p( depth ) + " All remaining rels in problem:"; for ( Rel rel : problem.getAllRels() ) { print += "\n" + p( depth ) + " " + rel.getParentObjectName() + " : " + rel.getDeclaration(); } print += "\n" + p( depth ) + "All found variables: "; for ( Var var : problem.getFoundVars() ) { print += "\n" + p( depth ) + " " + var.toString(); } */ logger.debug(print); } // if this is a first attempt to construct an MLB to solve a subtask(i.e. depth>0), // do not invoke linear planning because it has already been done if ((depth == 0) || !firstMLB) { boolean solvedIntermediately = linearForwardSearch(context, algorithm, true); // Having constructed some MLBs the (sub)problem may be solved // and there is no need in wasting precious time planning unnecessary branches if (solvedIntermediately && ( // on the top level optimize only if computing goals (depth == 0 && !computeAll) // otherwise (inside subtasks) always optimize || (depth != 0))) { // If the problem is solved, optimize and return if (!isOptDisabled) Optimizer.optimize(context, algorithm); return true; } } else { firstMLB = false; } // or children OR: for (Iterator<Rel> subtaskRelIterator = relsWithSubtasksCopy.iterator(); subtaskRelIterator.hasNext(); ) { Rel subtaskRel = subtaskRelIterator.next(); if (isSubtaskLoggingOn()) logger.debug( p(depth) + "OR: depth: " + (depth + 1) + " rel - " + subtaskRel.getParent().getFullName() + " : " + subtaskRel.getDeclaration()); if (subtaskRel.equals(subtaskRelsInPath.peekLast()) || (!context.isRelReadyToUse(subtaskRel)) || context.getFoundVars().containsAll(subtaskRel.getOutputs()) || (!isSubtaskRepetitionAllowed && subtaskRelsInPath.contains(subtaskRel))) { if (isSubtaskLoggingOn()) { logger.debug(p(depth) + "skipped"); if (!context.isRelReadyToUse(subtaskRel)) { logger.debug(p(depth) + "because it has unknown inputs"); // TODO print unknown } else if (context.getFoundVars().containsAll(subtaskRel.getOutputs())) { logger.debug(p(depth) + "because all outputs in FoundVars"); } else if (subtaskRel.equals(subtaskRelsInPath.peekLast())) { logger.debug(p(depth) + "because it is nested in itself"); } else if (!isSubtaskRepetitionAllowed && subtaskRelsInPath.contains(subtaskRel)) { logger.debug( p(depth) + "This rel with subtasks is already in use, path: " + subtaskRelsInPath); } } continue OR; } LinkedList<Rel> newPath = new LinkedList<Rel>(subtaskRelsInPath); newPath.add(subtaskRel); PlanningResult result = new PlanningResult(subtaskRel, true); // this is true if all subtasks are solvable boolean allSolved = true; // and children AND: for (SubtaskRel subtask : subtaskRel.getSubtasks()) { if (isSubtaskLoggingOn()) logger.debug(p(depth) + "AND: subtask - " + subtask); EvaluationAlgorithm sbtAlgorithm = null; ////////////////////// INDEPENDENT SUBTASK//////////////////////////////////////// if (subtask.isIndependent()) { if (isSubtaskLoggingOn()) logger.debug("Independent!!!"); if (subtask.isSolvable() == null) { if (isSubtaskLoggingOn()) logger.debug("Start solving independent subtask " + subtask.getDeclaration()); // independent subtask is solved only once Problem problemContext = subtask.getContext(); DepthFirstPlanner planner = new DepthFirstPlanner(); planner.indSubtasks = indSubtasks; planner.nested = true; sbtAlgorithm = planner.invokePlaning(problemContext, isOptDisabled); PlanningContext indCntx = problemContext.getCurrentContext(); boolean solved = indCntx.getFoundVars().containsAll(indCntx.getAllGoals()); if (solved) { subtask.setSolvable(Boolean.TRUE); indSubtasks.put(subtask, sbtAlgorithm); if (isSubtaskLoggingOn()) logger.debug("Solved " + subtask.getDeclaration()); } else { subtask.setSolvable(Boolean.FALSE); if (RuntimeProperties.isLogInfoEnabled()) { logger.debug("Unable to solve " + subtask.getDeclaration()); } } allSolved &= solved; } else if (subtask.isSolvable() == Boolean.TRUE) { if (isSubtaskLoggingOn()) logger.debug("Already solved"); allSolved &= true; sbtAlgorithm = indSubtasks.get(subtask); } else { if (isSubtaskLoggingOn()) logger.debug("Not solvable"); allSolved &= false; } if (isSubtaskLoggingOn()) logger.debug("End of independent subtask " + subtask); if (!allSolved) { continue OR; } assert sbtAlgorithm != null; result.addSubtaskAlgorithm(subtask, sbtAlgorithm); } ////////////////////// DEPENDENT SUBTASK////////////////////////////////////// else { // lets clone the environment PlanningContext newContext = prepareNewContext(context, subtask); sbtAlgorithm = new EvaluationAlgorithm(); // during linear planning, if some goals are found, they are removed from the set // "goals" boolean solved = linearForwardSearch( newContext, sbtAlgorithm, // do not optimize here, because the solution may require additional rels with // subtasks true); if (solved) { if (isSubtaskLoggingOn()) logger.debug(p(depth) + "SOLVED subtask: " + subtask); if (!isOptDisabled) { // if a subtask has been solved, optimize its algorithm Optimizer.optimize(newContext, sbtAlgorithm); } result.addSubtaskAlgorithm(subtask, sbtAlgorithm); allSolved &= solved; continue AND; } else if (!solved && (depth == maxDepth)) { if (isSubtaskLoggingOn()) logger.debug(p(depth) + "NOT SOLVED and cannot go any deeper, subtask: " + subtask); continue OR; } if (isSubtaskLoggingOn()) logger.debug(p(depth) + "Recursing deeper"); solved = subtaskPlanningImpl(newContext, relsWithSubtasks, sbtAlgorithm, newPath, depth + 1); if (isSubtaskLoggingOn()) logger.debug(p(depth) + "Back to depth " + (depth + 1)); // the linear planning has been performed at the end of MLB on the depth+1, // if the problem was solved, there is no need to run linear planning again if ((solved || (solved = linearForwardSearch(newContext, sbtAlgorithm, true))) && !isOptDisabled) { // if solved, optimize here with full list of goals in order to get rid of // unnecessary subtask instances and other relations Optimizer.optimize(newContext, sbtAlgorithm); } if (isSubtaskLoggingOn()) logger.debug(p(depth) + (solved ? "" : "NOT") + " SOLVED subtask: " + subtask); allSolved &= solved; // if at least one subtask is not solvable, try another // branch if (!allSolved) { continue OR; } result.addSubtaskAlgorithm(subtask, sbtAlgorithm); } } // AND if (allSolved) { algorithm.add(result); Set<Var> newVars = new LinkedHashSet<Var>(); unfoldVarsToSet(subtaskRel.getOutputs(), newVars); context.getKnownVars().addAll(newVars); context.getFoundVars().addAll(newVars); subtaskRelIterator.remove(); if (isSubtaskLoggingOn()) { logger.debug( p(depth) + "SOLVED ALL SUBTASKS for " + subtaskRel.getParent().getFullName() + " : " + subtaskRel.getDeclaration()); logger.debug(p(depth) + "Updating the problem graph and continuing building new MLB"); } // this is used for incremental dfs if (depth == 0) { relsWithSubtasksToRemove.add(subtaskRel); } continue MLB; } if (isSubtaskLoggingOn()) logger.debug( p(depth) + "NOT SOLVED ALL subtasks, removing from path " + subtaskRel.getParent().getFullName() + " : " + subtaskRel.getDeclaration()); newPath.remove(subtaskRel); } // end OR // exit loop because there are no more rels with subtasks to be // applied // (i.e. no more rels can introduce new variables into the // algorithm) if (isSubtaskLoggingOn()) logger.debug(p(depth) + "No more MLB can be constructed"); break MLB; } // incremental dfs, remove solved subtasks if (depth == 0) { relsWithSubtasks.removeAll(relsWithSubtasksToRemove); } return false; }