private JSONObject convertXMLToJSON(String xmlData) throws JSONException, JAXBException {
    // Unmarshall the xml into a GraphML
    ByteArrayInputStream baiStream = new ByteArrayInputStream(xmlData.getBytes());
    JAXBContext jc = JAXBContext.newInstance(GraphMLUtil.GRAPHML_CLASSES);
    Unmarshaller unmarshaller = jc.createUnmarshaller();
    GraphML graphML = (GraphML) unmarshaller.unmarshal(baiStream);

    // eventually check version for compatibility
    s_logger.info("Importing chart version: {}", graphML.getversion());

    Graph graph = graphML.getGraph();

    JSONObject toReturn = new JSONObject(),
        miscData = null,
        columnJSON,
        nodeJSON = null,
        clusterJSON;
    String cachedClusterJSON = null;
    int columnIdx = -1, rowIdx = -1;
    List<List<JSONObject>> allColumns = new ArrayList<List<JSONObject>>();
    List<JSONObject> currColumn, outColumns = new ArrayList<JSONObject>();

    // first, set up the column and misc JSON data contained in the graph
    for (GraphDataXML data : graph.getdata()) {
      if (data.getkey().startsWith("column")) {
        columnJSON = XML.toJSONObject(data.getvalue());
        columnIdx = Integer.parseInt(data.getkey().substring(6));
        while (columnIdx >= outColumns.size()) {
          outColumns.add(new JSONObject());
        }

        outColumns.set(columnIdx, columnJSON);
        allColumns.add(new ArrayList<JSONObject>());
      } else if (data.getkey().equals("miscData")) {
        miscData = XML.toJSONObject(data.getvalue());
      }
    }

    // next, iterate through the graph nodes and place the JSONObject for each into it's proper
    // row/column
    for (GraphNode node : graph.getnode()) {
      for (GraphData data : node.getdata()) { // parse the goodies from each file node
        if (data.getkey().equals("column")) {
          columnIdx = Integer.parseInt(data.getvalue());
        } else if (data.getkey().equals("row")) {
          rowIdx = Integer.parseInt(data.getvalue());
        }
      }

      for (GraphDataXML data : node.getnodedata()) { // parse the goodies from each data node
        if (data.getkey().equals("fileJSON")) {
          nodeJSON = XML.toJSONObject(data.getvalue());
        } else if (data.getkey().equals("clusterJSON")) {
          cachedClusterJSON = data.getvalue();
        }
      }

      if (nodeJSON != null
          && nodeJSON.has("clusterUIObject")
          && !nodeJSON.get("clusterUIObject").toString().equals("null")) {
        clusterJSON = nodeJSON.getJSONObject("clusterUIObject"); // do annoying cleanup
        insertJSONArray(clusterJSON, "children");
        insertJSONArray(clusterJSON.getJSONObject("spec"), "members");
      }

      if (cachedClusterJSON != null) {
        PermitSet permits = new PermitSet();

        try {
          List<String> entityIds = new LinkedList<String>();
          List<FL_Cluster> allClusters = ClusterHelper.listFromJson(cachedClusterJSON);
          ;
          for (FL_Cluster cluster : allClusters) {
            entityIds.addAll(cluster.getMembers());
            for (FL_Property property : cluster.getProperties()) {
              if (!(property.getRange() instanceof FL_SingletonRange)) continue;
              FL_SingletonRange range = (FL_SingletonRange) property.getRange();
              if (!range.getType().equals(FL_PropertyType.STRING)) continue;
              property.setRange(
                  new SingletonRangeHelper(
                      StringEscapeUtils.unescapeXml((String) range.getValue())));
            }
          }

          final ContextReadWrite contextRW =
              contextCache.getReadWrite(nodeJSON.getString("xfId"), permits);
          contextRW.getContext().addClusters(allClusters);
          contextRW
              .getContext()
              .addEntities(entityAccess.getEntities(entityIds, FL_LevelOfDetail.SUMMARY));
          contextRW.setSimplifiedContext(allClusters);
        } catch (IOException e) {
          throw new ResourceException(
              Status.CLIENT_ERROR_BAD_REQUEST, "Exception during cluster cache processing.", e);
        } finally {
          permits.revoke();
        }
      }

      currColumn = allColumns.get(columnIdx);
      while (rowIdx >= currColumn.size()) {
        currColumn.add(new JSONObject());
      }
      currColumn.set(rowIdx, nodeJSON);
    }

    // place the files as children in the outgoing columns array
    for (int i = 0; i < allColumns.size(); i++) {
      columnJSON = outColumns.get(i);
      columnJSON.put("children", new JSONArray(allColumns.get(i)));
    }

    // finally, place the child columns and misc data in the resulting JSON object
    toReturn.put("children", new JSONArray(outColumns));
    for (String dataKey : JSONObject.getNames(miscData)) {
      toReturn.put(dataKey, miscData.get(dataKey));
    }

    return toReturn;
  }
  @Override
  public ClusterContext clusterEntities(
      Collection<FL_Entity> entities,
      Collection<FL_Cluster> immutableClusters,
      Collection<FL_Cluster> clusters,
      ClusterContext context) {

    Map<String, FL_Entity> entityIndex = createEntityIndex(entities);
    Map<String, FL_Cluster> immutableClusterIndex = createClusterIndex(immutableClusters);
    Map<String, FL_Cluster> clusterIndex = createClusterIndex(clusters);

    DataSet ds = createDataSet(entities, immutableClusters, context);

    BaseClusterer clusterer = createNumericClusterer();

    List<Cluster> existingClusters = new LinkedList<Cluster>();

    for (FL_Cluster cluster : clusters) {
      double val = 0;
      PropertyHelper prop = getFirstProperty(cluster, toClusterPropertyName(clusterField));
      if (prop != null) {
        val = getDoubleValue(prop);
      }
      Cluster c = clusterer.createCluster();
      c.setId(cluster.getUid());
      NumericVectorFeature feature = new NumericVectorFeature("num");
      feature.setValue(new double[] {val});
      c.addFeature(feature);
      existingClusters.add(c);
    }

    ClusterResult rs = null;
    if (existingClusters.isEmpty()) {
      rs = clusterer.doCluster(ds);
    } else {
      rs = clusterer.doIncrementalCluster(ds, existingClusters);
    }
    // clean up
    clusterer.terminate();

    Map<String, FL_Cluster> modifiedClusters = new HashMap<String, FL_Cluster>();

    for (Cluster c : rs) {
      List<FL_Cluster> subClusters = new LinkedList<FL_Cluster>();
      List<FL_Entity> members = new LinkedList<FL_Entity>();

      for (Instance inst : c.getMembers()) {
        String id = inst.getId();
        if (entityIndex.containsKey(id)) {
          members.add(entityIndex.get(id));
        } else if (immutableClusterIndex.containsKey(id)) {
          subClusters.add(immutableClusterIndex.get(id));
        }
      }

      FL_Cluster cluster = clusterIndex.get(c.getId());
      if (cluster == null) {
        cluster = clusterFactory.toCluster(members, subClusters);
        // cache the cluster property
        NumericVectorFeature feature = (NumericVectorFeature) c.getFeature("num");
        double value = feature.getValue()[0];
        addClusterProperty(cluster, clusterField, value);
      } else {
        ClusterHelper.addMembers(cluster, members);
        EntityClusterFactory.setEntityCluster(members, cluster);

        for (FL_Cluster subCluster : subClusters) {
          ClusterHelper.addSubCluster(cluster, subCluster);
          subCluster.setParent(cluster.getUid());
        }

        // cache the cluster property
        NumericVectorFeature feature = (NumericVectorFeature) c.getFeature("num");
        double value = feature.getValue()[0];
        addClusterProperty(cluster, clusterField, value);
      }
      modifiedClusters.put(cluster.getUid(), cluster);
    }

    ClusterContext result = new ClusterContext();
    result.roots.putAll(modifiedClusters);
    result.clusters.putAll(modifiedClusters);

    return result;
  }