public DefaultTagDescription clone() { try { final DefaultTagDescription clone = (DefaultTagDescription) super.clone(); clone.tagData = (HashMap<TagDefinitionKey, Boolean>) tagData.clone(); clone.defaultDefinitions = (HashMap<String, Boolean>) defaultDefinitions.clone(); clone.lookupKey = new TagDefinitionKey(null, null); return clone; } catch (CloneNotSupportedException e) { throw new IllegalStateException(); } }
/* (non-Javadoc) * @see java.lang.Object#clone() */ @SuppressWarnings("unchecked") @Override public Object clone() { FileContextBuildMacroValues cloned = null; try { cloned = (FileContextBuildMacroValues) super.clone(); cloned.fValues = (HashMap<String, String>) fValues.clone(); cloned.fAllValues = (HashMap<String, String>) fAllValues.clone(); } catch (CloneNotSupportedException e) { } return cloned; }
@Override protected void setValue(Object element, Object value) { HashMap<Integer, String> data = (HashMap<Integer, String>) element; HashMap<Integer, String> oldDataMap = (HashMap<Integer, String>) data.clone(); String oldData = data.get(columnIndex) == null ? "" : data.get(columnIndex); if (oldData.equals(value.toString())) return; // 입력 값이 올바른지 검사합니다. String colType = tableDataTypeList.get(columnIndex - 1); if (!DataTypeValidate.isValid(editPart.getUserDB(), colType, value.toString())) { MessageDialog.openError( null, Messages.TextViewerEditingSupport_0, Messages.TextViewerEditingSupport_1 + value + Messages.TextViewerEditingSupport_2 + colType + Messages.TextViewerEditingSupport_3); return; } // insert가 아닌 경우에는 if (!TbUtils.isInsert(data.get(0))) { data.put(0, TbUtils.getColumnText(TbUtils.COLUMN_MOD_TYPE.UPDATE)); } // 수정된 데이터 표시 data.put(columnIndex, TbUtils.getModifyData(value.toString())); editPart.setModifyButtonControl(); viewer.refresh(); }
public Object clone() { XMLTag ThisClone = new XMLTag(TagName, TagValue, IsTemporary); ThisClone.AttributeList = (HashMap) AttributeList.clone(); for (int i = 0; i < ChildTags.size(); i++) { XMLTag TagToClone = (XMLTag) ChildTags.get(i); ThisClone.addTag((XMLTag) TagToClone.clone()); } return ThisClone; }
/** @deprecated unused */ public Map<String, Long> getTimestamps() { if (_log.shouldLog(Log.INFO)) { synchronized (this) { locked_initTimestamps(); return (Map<String, Long>) _timestamps.clone(); } } return Collections.EMPTY_MAP; }
// Get all metars private HashMap<String, Metar> removeMetarsFromQueue() { HashMap<String, Metar> metars; synchronized (mQueueMetar) { metars = (HashMap<String, Metar>) mQueueMetar.clone(); // Done, queue cleared mQueueMetar.clear(); } return metars; }
/** * Java 5.0 compatible method to get the full map of properties. The properties use the KEY_ keys * defined in this class. */ public Map properties() { Map ret; if (props == null) { ret = new HashMap(0); } else { ret = (HashMap) (props.clone()); } return Collections.unmodifiableMap(ret); }
/** * Obtain an unmodifiable map of properties. The concept of properties is further explained in the * {@link AudioFileFormat class description}. * * @return a <code>Map<String,Object></code> object containing all properties. If no * properties are recognized, an empty map is returned. * @see #getProperty(String) * @since 1.5 */ public Map<String, Object> properties() { Map<String, Object> ret; if (properties == null) { ret = new HashMap<String, Object>(0); } else { ret = (Map<String, Object>) (properties.clone()); } return (Map<String, Object>) Collections.unmodifiableMap(ret); }
/** * Returns a shallow copy of this {@code HashSet} instance: the elements themselves are not * cloned. * * @return a shallow copy of this set */ @SuppressWarnings("unchecked") public Object clone() { try { HashSet<E> newSet = (HashSet<E>) super.clone(); newSet.map = (HashMap<E, Object>) map.clone(); return newSet; } catch (CloneNotSupportedException e) { throw new InternalError(e); } }
@SuppressWarnings("unchecked") private void updatePrivilegesInSectionLevel( String sectionName, HashMap<String, Boolean> privileges) { for (String privilegeName : privileges.keySet()) { if (!isPrivilegeNameValidForSectionLevel(privilegeName)) { return; } } sectionLevel.put(sectionName, (HashMap<String, Boolean>) privileges.clone()); }
/** * Returns a shallow clone of the information, as obtained by the {@link Object} implementation of * {@link Object#clone()}. The map is also cloned, but it still references the same objects. * * @return a shallow clone of this {@link TabularDataSupport}. */ @SuppressWarnings("unchecked") public Object clone() { TabularDataSupport clone = null; try { clone = (TabularDataSupport) super.clone(); clone.setMap((HashMap<Object, Object>) dataMap.clone()); } catch (CloneNotSupportedException e) { /* This won't happen as we implement Cloneable */ } return clone; }
public void assignOrphans() { orphanTransmitters = (HashMap<Coord4D, IGridTransmitter>) newOrphanTransmitters.clone(); newOrphanTransmitters.clear(); if (MekanismAPI.debug && !orphanTransmitters.isEmpty()) { logger.info("Dealing with " + orphanTransmitters.size() + " orphan Transmitters"); } for (IGridTransmitter orphanTransmitter : ((Map<Coord4D, IGridTransmitter>) orphanTransmitters.clone()).values()) { DynamicNetwork network = getNetworkFromOrphan(orphanTransmitter); if (network != null) { networksToChange.add(network); network.register(); } } orphanTransmitters.clear(); }
@Test public void testChargeCapture() throws StripeException { HashMap<String, Object> options = (HashMap<String, Object>) defaultChargeParams.clone(); options.put("capture", false); Charge created = Charge.create(options); assertFalse(created.getCaptured()); Charge captured = created.capture(); assertTrue(captured.getCaptured()); }
@Override public void eval( Document doc, int posn, String tokenString, HashMap bindings, PatternApplication patap, PatternNode node) { bindings = (HashMap) bindings.clone(); bindings.put(variable.name, new Integer(posn)); node.eval(doc, posn, bindings, patap); }
/** * Copy constructor. * * @param featureWeights */ @SuppressWarnings("unchecked") protected DPBasicModel(HashMap<Integer, AveragedParameter> featureWeights) { this.featureWeights = (HashMap<Integer, AveragedParameter>) featureWeights.clone(); for (Entry<Integer, AveragedParameter> entry : this.featureWeights.entrySet()) { try { entry.setValue(entry.getValue().clone()); } catch (CloneNotSupportedException e) { // This should never happen. LOG.error("Cloning DP basic model", e); } } }
/** * @return A clone of this Spatial, the scene graph in its entirety is cloned and can be altered * independently of the original scene graph. * <p>Note that meshes of geometries are not cloned explicitly, they are shared if static, or * specially cloned if animated. * <p>All controls will be cloned using the Control.cloneForSpatial method on the clone. * @see Mesh#cloneForAnim() */ public Spatial clone(boolean cloneMaterial) { try { Spatial clone = (Spatial) super.clone(); if (worldBound != null) { clone.worldBound = worldBound.clone(); } clone.worldLights = worldLights.clone(); clone.localLights = localLights.clone(); // Set the new owner of the light lists clone.localLights.setOwner(clone); clone.worldLights.setOwner(clone); // No need to force cloned to update. // This node already has the refresh flags // set below so it will have to update anyway. clone.worldTransform = worldTransform.clone(); clone.localTransform = localTransform.clone(); if (clone instanceof Node) { Node node = (Node) this; Node nodeClone = (Node) clone; nodeClone.children = new SafeArrayList<Spatial>(Spatial.class); for (Spatial child : node.children) { Spatial childClone = child.clone(cloneMaterial); childClone.parent = nodeClone; nodeClone.children.add(childClone); } } clone.parent = null; clone.setBoundRefresh(); clone.setTransformRefresh(); clone.setLightListRefresh(); clone.controls = new SafeArrayList<Control>(Control.class); for (int i = 0; i < controls.size(); i++) { Control newControl = controls.get(i).cloneForSpatial(clone); newControl.setSpatial(clone); clone.controls.add(newControl); } if (userData != null) { clone.userData = (HashMap<String, Savable>) userData.clone(); } return clone; } catch (CloneNotSupportedException ex) { throw new AssertionError(); } }
public GenericContainer parse(String str) { // // Try the naive parse // ParseResult pr = internalParse(str, null, true); if (pr != null && pr.hasData()) { return (GenericContainer) pr.getData(); } // // Otherwise, we need to consider other union-options. // Unfold the candidate decisions into a series of target decisions // Map<String, Set<Integer>> candidateUnionDecisions = findCandidateUnionDecisions(); List<HashMap<String, Integer>> allUnionDecisions = new ArrayList<HashMap<String, Integer>>(); for (Map.Entry<String, Set<Integer>> pair : candidateUnionDecisions.entrySet()) { String k = pair.getKey(); Set<Integer> indices = pair.getValue(); if (allUnionDecisions.size() == 0) { for (Integer index : indices) { HashMap<String, Integer> newMap = new HashMap<String, Integer>(); newMap.put(k, index); allUnionDecisions.add(newMap); } } else { List<HashMap<String, Integer>> newUnionDecisions = new ArrayList<HashMap<String, Integer>>(); for (HashMap<String, Integer> curUnionDecisions : allUnionDecisions) { for (Integer index : indices) { HashMap<String, Integer> newMap = (HashMap<String, Integer>) curUnionDecisions.clone(); newMap.put(k, index); newUnionDecisions.add(newMap); } } allUnionDecisions = newUnionDecisions; } } // // Now execute all possible union decisions // for (Map<String, Integer> targetUnionDecisions : allUnionDecisions) { pr = internalParse(str, targetUnionDecisions, true); if (pr != null && pr.hasData()) { return (GenericContainer) pr.getData(); } } return null; }
public Object clone() { MSProvider prov = null; try { prov = (MSProvider) super.clone(); prov.provCom = (MSProviderComputing) provCom.clone(); prov.provNet = (MSProviderNetwork) provNet.clone(); prov.provSto = (MSProviderStorage) provSto.clone(); prov.characteristic = (HashMap<String, Object>) characteristic.clone(); } catch (CloneNotSupportedException e) { // TODO Auto-generated catch block e.printStackTrace(); } return prov; }
/** * Clones this Element, the datasource and the private stylesheet of this Element. The clone does * no longer have a parent, as the old parent would not recognize that new object anymore. * * @return a clone of this Element. */ public Element clone() { try { final Element e = (Element) super.clone(); e.style = (InternalElementStyleSheet) style.clone(); e.datasource = datasource.clone(); e.parent = null; e.style.updateElementReference(e); e.elementContext = null; if (attributeExpressions != null) { e.attributes = attributes.clone(); e.attributeExpressions = attributeExpressions.clone(); final String[] namespaces = e.attributeExpressions.getNameSpaces(); for (int i = 0; i < namespaces.length; i++) { final String namespace = namespaces[i]; final Map<String, Expression> attrsNs = attributeExpressions.getAttributes(namespace); for (final Map.Entry<String, Expression> entry : attrsNs.entrySet()) { final Expression exp = entry.getValue(); e.attributeExpressions.setAttribute( namespace, entry.getKey(), (Expression) exp.clone()); } } } else { if (e.cachedAttributes != null) { e.attributes = attributes; e.copyOnWrite = true; copyOnWrite = true; } else { e.copyOnWrite = false; e.attributes = attributes.clone(); } } if (styleExpressions != null) { e.styleExpressions = (HashMap<StyleKey, Expression>) styleExpressions.clone(); for (final Map.Entry<StyleKey, Expression> entry : e.styleExpressions.entrySet()) { final Expression exp = entry.getValue(); entry.setValue((Expression) exp.clone()); } } return e; } catch (CloneNotSupportedException cne) { throw new IllegalStateException(cne); } }
/** Cancels all property animations that are currently running or pending. */ public void cancel() { if (mAnimatorMap.size() > 0) { HashMap<Animator, PropertyBundle> mAnimatorMapCopy = (HashMap<Animator, PropertyBundle>) mAnimatorMap.clone(); Set<Animator> animatorSet = mAnimatorMapCopy.keySet(); for (Animator runningAnim : animatorSet) { runningAnim.cancel(); } } mPendingAnimations.clear(); mPendingSetupAction = null; mPendingCleanupAction = null; mPendingOnStartAction = null; mPendingOnEndAction = null; mView.removeCallbacks(mAnimationStarter); if (mRTBackend != null) { mRTBackend.cancelAll(); } }
public SelectSubMissionD( java.awt.Frame parent, boolean modal, MissionPlanSpecification parentMSpec, ProjectSpecification pSpec, ArrayList<MissionPlanSpecification> existingSubMSpecs, HashMap<MissionPlanSpecification, Boolean> existingMSpecToIsSharedInstance, HashMap<MissionPlanSpecification, HashMap<TaskSpecification, TaskSpecification>> existingMSpecTaskMap) { super(parent, modal); this.parentMSpec = parentMSpec; this.pSpec = pSpec; if (existingSubMSpecs == null) { this.subMSpecs = new ArrayList<MissionPlanSpecification>(); } else { this.subMSpecs = (ArrayList<MissionPlanSpecification>) existingSubMSpecs.clone(); } if (existingMSpecToIsSharedInstance == null) { this.mSpecToIsSharedInstance = new HashMap<MissionPlanSpecification, Boolean>(); } else { this.mSpecToIsSharedInstance = (HashMap<MissionPlanSpecification, Boolean>) existingMSpecToIsSharedInstance.clone(); } if (existingMSpecTaskMap == null) { this.mSpecTaskMap = new HashMap<MissionPlanSpecification, HashMap<TaskSpecification, TaskSpecification>>(); } else { this.mSpecTaskMap = (HashMap<MissionPlanSpecification, HashMap<TaskSpecification, TaskSpecification>>) existingMSpecTaskMap.clone(); } if (existingMSpecTaskMap == null) { this.mSpecTaskMap = new HashMap<MissionPlanSpecification, HashMap<TaskSpecification, TaskSpecification>>(); } else { this.mSpecTaskMap = (HashMap<MissionPlanSpecification, HashMap<TaskSpecification, TaskSpecification>>) existingMSpecTaskMap.clone(); } initComponents(); setTitle("SelectSubMissionD"); }
public MultipartRequest( String url, Response.Listener<String> rListener, Response.ErrorListener eListener, @Nullable HashMap<String, String> head, @NonNull String stringPart, @NonNull File file, @Nullable Map<String, Object> param) { super(Method.POST, url, rListener, eListener); header = null; if (head == null) { header = new HashMap<>(); } else { header = (HashMap) head.clone(); } String boundary = "----WebKitFormBoundary7MA4YWxkTrZu0gW"; header.put("Content-Type", "multipart/form-data; boundary=" + boundary); // build multi-part MultipartEntityBuilder entityBuilder = MultipartEntityBuilder.create(); entityBuilder.setBoundary(boundary); entityBuilder.addPart(stringPart, new FileBody(file)); if (param != null) { try { for (String key : param.keySet()) { Object obj = param.get(key); String input = obj.toString(); entityBuilder.addPart(key, new StringBody(input, ContentType.MULTIPART_FORM_DATA)); } } catch (Exception e) { VolleyLog.e("Fail to build multipart. " + e.getMessage()); e.printStackTrace(); } } entity = entityBuilder.build(); }
/** * Create a new TechnicalServicesProperties which is the combination of the properties passed as * argument with the current ones. The ones passed as argument override the current ones. * * @param techServ */ public TechnicalServicesProperties getCombinationWith(TechnicalServicesProperties techServ) { TechnicalServicesProperties res = new TechnicalServicesProperties((HashMap<String, HashMap<String, String>>) data.clone()); if (techServ != null && !techServ.isEmpty()) { for (Map.Entry<String, HashMap<String, String>> entry : techServ) { HashMap<String, String> classProperties = res.data.get(entry.getKey()); if (classProperties != null) { HashMap<String, String> cpClone = (HashMap<String, String>) classProperties.clone(); cpClone.putAll(entry.getValue()); res.data.put(entry.getKey(), cpClone); } else { res.data.put(entry.getKey(), entry.getValue()); } } } return res; }
// Run RICO to induce our rules! public void runRICO( Set<List<String>> covering, List<String> decision, int minCoverage, boolean dropConditions) { // A hash map to store our results...this seems so unnecessary List<HashMap<List<HashMap<String, String>>, Integer>> results = new ArrayList<HashMap<List<HashMap<String, String>>, Integer>>(); for (List<String> oneCovering : covering) { // Initialize E to the set of all instances (For us, our original data) List<Attribute> E = relation.attributeData; // Make something to hold a single Hash of attributes and counts HashMap<List<HashMap<String, String>>, Integer> resultForSingleCovering = new HashMap<List<HashMap<String, String>>, Integer>(); for (int i = 0; i < E.get(0).instanceValues.size(); i++) { List<HashMap<String, String>> rules = new ArrayList<HashMap<String, String>>(); for (int j = 0; j < oneCovering.size(); j++) { HashMap<String, String> rulePair = new HashMap<String, String>(); rulePair.put( oneCovering.get(j), E.get(attributeIndexMap.get(oneCovering.get(j))).instanceValues.get(i)); rules.add(rulePair); } // Put in the decision as the last one for (int j = 0; j < decision.size(); j++) { HashMap<String, String> rulePair = new HashMap<String, String>(); rulePair.put( decision.get(j), E.get(attributeIndexMap.get(decision.get(j))).instanceValues.get(i)); rules.add(rulePair); } if (!resultForSingleCovering.containsKey(rules)) { resultForSingleCovering.put(rules, 1); } else { int incCount = (resultForSingleCovering.get(rules) + 1); resultForSingleCovering.put(rules, incCount); } } // Print more data to the GUI for the user UserInterface.logData.setText( UserInterface.logData.getText() + "Rules for covering " + oneCovering.toString() + "\n"); try { for (List<HashMap<String, String>> oneResult : resultForSingleCovering .keySet()) { // Something happens around here if the min coverage is higher than the // highest covering if (dropConditions) { // Here we want to prune! // This is gross. It can't be good. List<String> goodGuys = new ArrayList<String>(); for (HashMap<String, String> mapOneResult : oneResult) { for (String key : mapOneResult.keySet()) { // Should only be 1 in each of these if (!decision.contains(key) && checkNumOccurences(key, mapOneResult.get(key)) == resultForSingleCovering.get(oneResult)) { goodGuys.add(key); } } } List<HashMap<String, String>> tempForPrint = new ArrayList<HashMap<String, String>>(); for (HashMap<String, String> tired : oneResult) { System.out.println("Trying: " + tired); tempForPrint.add((HashMap<String, String>) tired.clone()); } boolean foundOne = false; for (HashMap<String, String> mapOneResult : oneResult) { for (String key : mapOneResult.keySet()) { if (!foundOne && !goodGuys.contains(key) && !decision.contains(key)) { System.out.println("Jack just got replaced!"); tempForPrint.get(oneResult.indexOf(mapOneResult)).put(key, "_"); foundOne = true; } } } // Only return results that are >= our minCoverage System.out.println( "Was failing: " + resultForSingleCovering.get(oneResult) + " " + minCoverage); if (resultForSingleCovering.get(oneResult) < minCoverage) { resultForSingleCovering.remove(oneResult); } else { UserInterface.logData.setText( UserInterface.logData.getText() + "[" + tempForPrint.toString() + ", " + resultForSingleCovering.get(oneResult) + "]\n"); } } else { // Only return results that are >= our minCoverage System.out.println( "Was failing: " + resultForSingleCovering.get(oneResult) + " " + minCoverage); if (resultForSingleCovering.get(oneResult) < minCoverage) { resultForSingleCovering.remove(oneResult); } else { UserInterface.logData.setText( UserInterface.logData.getText() + "[" + oneResult.toString() + ", " + resultForSingleCovering.get(oneResult) + "]\n"); } } } } catch (ConcurrentModificationException ex) { // Do nothing but fail gracefully } UserInterface.logData.setText(UserInterface.logData.getText() + "\n\n"); results.add(resultForSingleCovering); allCoverings = new HashSet<List<String>>(); } }
/** * This method is designed to clear out classloader file locks in windows. * * @param clazzLdr class loader to clean up */ public void closeClassLoader() { HashSet<String> closedFiles = new HashSet<String>(); try { Object obj = getFieldObject(URLClassLoader.class, "ucp", this); ArrayList<?> loaders = (ArrayList<?>) getFieldObject(obj.getClass(), "loaders", obj); for (Object ldr : loaders) { try { JarFile file = (JarFile) getFieldObject(ldr.getClass(), "jar", ldr); closedFiles.add(file.getName()); file.close(); } catch (Exception e) { // skip } } } catch (Exception e) { // skip } try { Vector<?> nativeLibArr = (Vector<?>) getFieldObject(ClassLoader.class, "nativeLibraries", this); for (Object lib : nativeLibArr) { try { Method fMethod = lib.getClass().getDeclaredMethod("finalize", new Class<?>[0]); fMethod.setAccessible(true); fMethod.invoke(lib, new Object[0]); } catch (Exception e) { // skip } } } catch (Exception e) { // skip } HashMap<?, ?> uCache = null; HashMap<?, ?> fCache = null; try { Class<?> jarUrlConnClass = null; try { ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); jarUrlConnClass = contextClassLoader.loadClass("sun.net.www.protocol.jar.JarURLConnection"); } catch (Throwable skip) { // skip } if (jarUrlConnClass == null) { jarUrlConnClass = Class.forName("sun.net.www.protocol.jar.JarURLConnection"); } Class<?> factory = getFieldObject(jarUrlConnClass, "factory", null).getClass(); try { fCache = (HashMap<?, ?>) getFieldObject(factory, "fileCache", null); } catch (Exception e) { // skip } try { uCache = (HashMap<?, ?>) getFieldObject(factory, "urlCache", null); } catch (Exception e) { // skip } if (uCache != null) { Set<?> set = null; while (set == null) { try { set = ((HashMap<?, ?>) uCache.clone()).keySet(); } catch (ConcurrentModificationException e) { // Fix for BACKLOG-2149 - Do nothing - while loop will try again. } } for (Object file : set) { if (file instanceof JarFile) { JarFile jar = (JarFile) file; if (!closedFiles.contains(jar.getName())) { continue; } try { jar.close(); } catch (IOException e) { // skip } if (fCache != null) { fCache.remove(uCache.get(jar)); } uCache.remove(jar); } } } else if (fCache != null) { for (Object key : ((HashMap<?, ?>) fCache.clone()).keySet()) { Object file = fCache.get(key); if (file instanceof JarFile) { JarFile jar = (JarFile) file; if (!closedFiles.contains(jar.getName())) { continue; } try { jar.close(); } catch (IOException e) { // ignore } fCache.remove(key); } } } } catch (Exception e) { // skip e.printStackTrace(); } }
@SuppressWarnings("unchecked") private void updatePointList() { for (SmoothPathSegment sps : spsMap.values()) sps.remove(); spsMap.clear(); for (LinearPathSegment lps : lpsMap.values()) lps.remove(); lpsMap.clear(); if (arrow != null) arrow.remove(); Set<PathPoint> pps = ((HashMap<PathPoint, PointVisual>) pvMap.clone()).keySet(); int s = pvList.size(); ActiveArrayList<PathPoint> pp = path.points; int s2 = pp.size(); while (s > s2) pvList.remove(--s); pvList.ensureCapacity(s2); for (int i = 0; i < s2; i++) { PathPoint p = pp.get(i); PointVisual v = pvMap.get(p); if (v == null) { v = new PointVisual(p); pvMap.put(p, v); } else { pps.remove(p); } if (i >= s) pvList.add(v); else pvList.set(i, v); } if (pps.contains(properties.get(PPathEditor.SELECTED_POINT))) properties.put(PPathEditor.SELECTED_POINT, null); for (PathPoint pathPoint : pps) pvMap.remove(pathPoint).remove(); if (path.get(PPath.SMOOTH)) { boolean closed = path.properties.get(PPath.CLOSED); if (s2 >= 3) { PathPoint[] rpp = new PathPoint[4]; for (int i = 0; i < 4; i++) rpp[i] = pp.get(i % s2); int i2 = closed ? s2 + 1 : s2 - 2; for (int i = 1; i < i2; i++) { PathPoint p = rpp[1]; spsMap.put(p, new SmoothPathSegment(rpp)); rpp = Arrays.copyOfRange(rpp, 1, 5); rpp[3] = pp.get((i + 3) % s2); } if (!closed) { PathPoint p = pp.get(0); spsMap.put(p, new SmoothPathSegment(null, p, pp.get(1), pp.get(2))); p = pp.get(s2 - 2); spsMap.put(p, new SmoothPathSegment(pp.get(s2 - 3), p, pp.get(s2 - 1), null)); } arrow = new PathArrow(spsMap.get(pp.get(0))); } } else { if (s2 >= 2) { for (int i = 0; i < s2 - 1; i++) { PathPoint p = path.points.get(i); lpsMap.put(p, new LinearPathSegment(p, path.points.get(i + 1))); } if (path.properties.get(PPath.CLOSED)) { PathPoint p = path.points.get(s2 - 1); lpsMap.put(p, new LinearPathSegment(p, path.points.get(0))); } arrow = new PathArrow(null); } } }
/** * run the learner * * @param opt Command line Options * @throws SQLException */ @SuppressWarnings("unchecked") public void run(CommandOptions opt) throws SQLException { // tally clause Config.learning_mode = true; Config.stop_samplesat_upon_sat = true; Config.track_clause_provenance = true; UIMan.println(">>> Learning clause weight with MC-SAT."); setUp(opt); ground(); Timer.runStat.markGroundingDone(); if (options.maxFlips == 0) { options.maxFlips = 100 * grounding.getNumAtoms(); } if (options.maxTries == 0) { options.maxTries = 3; } Learner.isHardMappings = new HashMap<String, Boolean>(); // String outputStr = ""; // INIT CURRENT WEIGHT, LOAD FROM CBUFFER TABLE Learner.currentWeight = new HashMap<String, Double>(); Learner.oriWeight = new HashMap<String, Double>(); Learner.finalWeight = new HashMap<String, Double>(); // Learner.currentD = new HashMap<String, Double>(); String sql = "SELECT DISTINCT weight, ffcid FROM " + "mln" + mln.getID() + "_cbuffer" + ";"; ResultSet rs = db.query(sql); while (rs.next()) { String ffcid = rs.getString("ffcid"); Double wght = rs.getDouble("weight"); String newCID = ffcid; if (newCID.charAt(0) == '-') { newCID = newCID.substring(1, newCID.length()); wght = -wght; } if (newCID.contains("fixed")) { wght = wght; } Learner.currentWeight.put(newCID, wght); Learner.oriWeight.put(newCID, wght); // Learner.currentD.put(newCID, 0.0); } rs.close(); // INIT TRAINING VIOLATION AND WEIGHT MRF _mcsat = new MRF(mln); dmover.loadMrfFromDb(_mcsat, mln.relAtoms, mln.relClauses); this.loadingTrainingData(_mcsat); // all positive violation for (String s : currentWeight.keySet()) { currentWeight.put(s, 1.0); } _mcsat.updateClauseWeights(currentWeight); _mcsat.updateClauseVoiTallies(); positiveWeightViolation = (HashMap<String, Long>) (_mcsat.clauseVioTallies.clone()); positiveWeightSatisfication = (HashMap<String, Long>) (_mcsat.clauseSatTallies.clone()); // all negative violation for (String s : currentWeight.keySet()) { currentWeight.put(s, -1.0); } _mcsat.updateClauseWeights(currentWeight); _mcsat.clauseVioTallies.clear(); _mcsat.clauseSatTallies.clear(); _mcsat.updateClauseVoiTallies(); negativeWeightViolation = (HashMap<String, Long>) (_mcsat.clauseVioTallies.clone()); negativeWeightSatisfication = (HashMap<String, Long>) (_mcsat.clauseSatTallies.clone()); Learner.currentWeight = (HashMap<String, Double>) Learner.oriWeight.clone(); this.calcCurrentTrainingViolation(); this.fillInCurrentWeight(_mcsat); _mcsat.updateClauseWeights(currentWeight); this.calcCurrentTrainingViolation(); // OUTPUT THE INIT. WEIGHT AND ID/CLAUSE MAPPING Object[] keySet = currentWeight.keySet().toArray(); java.util.Arrays.sort(keySet); UIMan.println("#################INIT. WEIGHT#################"); for (Object ss : keySet) { String s = (String) ss; UIMan.println( s + "\t" + Learner.currentWeight.get(s) + ":" + this.trainingSatisification.get(s) + "/" + this.trainingViolation.get(s) + "\t" + Clause.mappingFromID2Desc.get(s)); } UIMan.println(">>> Iteration Begins..."); _oldWeight = (HashMap<String, Double>) currentWeight.clone(); // options.nDIteration = 1; for (int i = 0; i < options.nDIteration; i++) { _mcsat.mcsat(options.mcsatSamples, options.maxFlips); if (updateWeight(_mcsat) == true) { break; } _mcsat.updateClauseWeights(Learner.currentWeight); if (this.backtracked == true) { UIMan.println("####BACKTRACKED####"); this.calcCurrentTrainingViolation(); i--; continue; } // CALC THE FINAL WEIGHT for (String s : Learner.currentWeight.keySet()) { double fw = Learner.finalWeight.get(s); fw = fw * (i + 1) + Learner.currentWeight.get(s); fw /= (i + 2); Learner.finalWeight.put(s, fw); } Object[] keySet1 = currentWeight.keySet().toArray(); java.util.Arrays.sort(keySet1); UIMan.println("#################ITERATION + " + i + "#################"); for (Object ss : keySet1) { String s = (String) ss; UIMan.println( s + "\t" + Learner.currentWeight.get(s) + "\t" + (_oldWeight.get(s) < currentWeight.get(s) ? "larger\t" : "smaller\t") + Learner.finalWeight.get(s) + "\t" + _mcsat.expectationOfViolation.get(s) + "->" + this.trainingViolation.get(s)); } this.calcCurrentTrainingViolation(); _oldWeight = (HashMap<String, Double>) currentWeight.clone(); } Object[] keySet1 = currentWeight.keySet().toArray(); java.util.Arrays.sort(keySet1); UIMan.println("#################FINAL WEIGHT#################"); for (Object ss : keySet1) { String s = (String) ss; UIMan.println( s + "\t" + Learner.currentWeight.get(s) + "\t" + Learner.finalWeight.get(s) + "\t" + _mcsat.expectationOfViolation.get(s) + "->" + this.trainingViolation.get(s)); } Timer.runStat.markInferDone(); UIMan.println(">>> Writing answer to file: " + options.fout); this.dumpAnswers(options.fout); cleanUp(); }
public SchemeEnvironmentImpl(HashMap m) { map = (HashMap) m.clone(); }
/** * Creates and returns a copy of this object. * * <p>Mutable second-class Objects are required to provide a public clone method in order to allow * for copying PersistenceCapable objects. In contrast to Object.clone(), this method must not * throw a CloneNotSupportedException. * * @return The cloned object */ public Object clone() { return delegate.clone(); }
/** * Creates a deep copy of this element and regenerates all instance-ids. * * @param preserveElementInstanceIds defines whether this call generates new instance-ids for the * derived elements. Instance-IDs are used by the report processor to recognize reoccurring * elements and must not changed within the report run. Outside of the report processors new * instance ids should be generated at all times to separate instances and to make them * uniquely identifiable. * @return the copy of the element. */ public Element derive(final boolean preserveElementInstanceIds) { try { final Element e = (Element) super.clone(); e.elementContext = null; if (preserveElementInstanceIds == false) { e.treeLock = new InstanceID(); } e.style = (InternalElementStyleSheet) style.derive(preserveElementInstanceIds); e.datasource = datasource.clone(); e.parent = null; e.style.updateElementReference(e); e.attributes = attributes.clone(); e.copyOnWrite = false; final ElementMetaData metaData = e.getMetaData(); final String[] namespaces = e.attributes.getNameSpaces(); for (int i = 0; i < namespaces.length; i++) { final String namespace = namespaces[i]; final Map attrsNs = attributes.getAttributes(namespace); final Iterator it = attrsNs.entrySet().iterator(); while (it.hasNext()) { final Map.Entry entry = (Map.Entry) it.next(); final Object value = entry.getValue(); final String name = (String) entry.getKey(); final AttributeMetaData data = metaData.getAttributeDescription(namespace, name); if (data == null) { if (logger.isDebugEnabled()) { logger.debug( getElementTypeName() + ": Attribute " + namespace + "|" + name + " is not listed in the metadata."); } } if (value instanceof Cloneable) { e.attributes.setAttribute(namespace, name, ObjectUtilities.clone(value)); } else if (data == null || data.isComputed() == false || data.isDesignTimeValue()) { e.attributes.setAttribute(namespace, name, value); } else { e.attributes.setAttribute(namespace, name, null); } } } if (e.cachedAttributes != null && e.attributes.getChangeTracker() != e.cachedAttributes.getChangeTracker()) { e.cachedAttributes = null; } if (attributeExpressions != null) { e.attributeExpressions = attributeExpressions.clone(); final String[] attrExprNamespaces = e.attributeExpressions.getNameSpaces(); for (int i = 0; i < attrExprNamespaces.length; i++) { final String namespace = attrExprNamespaces[i]; final Map attrsNs = attributeExpressions.getAttributes(namespace); final Iterator it = attrsNs.entrySet().iterator(); while (it.hasNext()) { final Map.Entry entry = (Map.Entry) it.next(); final Expression exp = (Expression) entry.getValue(); e.attributeExpressions.setAttribute( namespace, (String) entry.getKey(), exp.getInstance()); } } } if (styleExpressions != null) { //noinspection unchecked e.styleExpressions = (HashMap<StyleKey, Expression>) styleExpressions.clone(); final Iterator<Map.Entry<StyleKey, Expression>> styleExpressionsIt = e.styleExpressions.entrySet().iterator(); while (styleExpressionsIt.hasNext()) { final Map.Entry<StyleKey, Expression> entry = styleExpressionsIt.next(); final Expression exp = entry.getValue(); entry.setValue(exp.getInstance()); } } return e; } catch (CloneNotSupportedException cne) { throw new IllegalStateException(cne); } }