public boolean isInSet(VirtualFile file) {
    if (CacheUtil.isIgnored(file, myRootManager)) return false;

    for (VirtualFile vf : getRootFiles()) {
      if (VfsUtil.isAncestor(vf, file, true)) return true;
    }
    return false;
  }
 /**
  * @param cu cache对象
  * @param objectName objectName 索引值(相当于key-value中的key)
  * @return 取得所有当前索引目标的数目
  */
 public static int getItemsTotalByCache(CacheUtil cu, String objectName) {
   int back = 0;
   Long result = (Long) cu.getCacheObject(objectName);
   if (result == null) {
     back = 0;
   }
   return back;
 }
  private void iterateIndexableFilesIn_internal(VirtualFile file, ContentIterator iterator) {
    if (CacheUtil.isIgnored(file, myRootManager)) return;

    if (file.isDirectory()) {
      for (VirtualFile child : file.getChildren()) {
        iterateIndexableFilesIn_internal(child, iterator);
      }
    } else {
      iterator.processFile(file);
    }
  }
 public static Object loadByCacheBasic(CacheUtil cu, String objectName) {
   if (CacheKey.IS_START_CACHE == false) {
     return null;
   }
   Object result = cu.getCacheObject(objectName);
   if (result != null) {
     return result;
   } else {
     return null;
   }
 }
  public static List loadByCache(CacheUtil cu, String objectName) {

    if (CacheKey.IS_START_CACHE == false) {
      return null;
    }
    List back = null;
    List result = (List) cu.getCacheObject(objectName);
    if (result != null) {
      back = result;
    }
    return back;
  }
 /**
  * @param cu cache对象
  * @param objectName 索引值(相当于key-value中的key)
  * @return 仅仅返回所需新闻数据前十条
  */
 public static List loadByCacheNews(CacheUtil cu, String objectName) {
   List back = null;
   List result = (List) cu.getCacheObject(objectName);
   if (result != null) {
     back = new ArrayList();
     int length = result.size();
     for (int i = 0; i < (10 > length ? length : 10); i++) {
       back.add(result.get(i));
     }
   }
   return back;
 }
  public static void putPocessTiming(
      String projectId, String userId, String domainId, ParamType paramType) {
    // get cache
    try {
      Cache cache = CacheUtil.getCache();
      Node rootNode = cache.getRoot();
      String roleTree = domainId + "/" + projectId + "/" + userId + "/" + PM_ENABLE_PROCESS_TIMING;
      rootNode.put(roleTree, paramType.getValue());

    } catch (MalformedObjectNameException e) {
      e.printStackTrace();
    } catch (NullPointerException e) {
      e.printStackTrace();
    }
  }
  public static String clearPocessTiming(String projectId, String userId, String domainId) {
    String processTimingPMFlag = "NONE";
    // get cache
    try {

      Node rootNode = CacheUtil.getCache().getRoot();
      processTimingPMFlag =
          (String)
              rootNode.put(
                  domainId + "/" + projectId + "/" + userId + "/" + PM_ENABLE_PROCESS_TIMING, null);
    } catch (MalformedObjectNameException e) {
      e.printStackTrace();
    } catch (NullPointerException e) {
      e.printStackTrace();
    }
    return processTimingPMFlag;
  }
 /**
  * @param cu cache对象
  * @param page 分页对新
  * @param objectName 索引值(相当于key-value中的key)
  * @return 一个存储所需数据对象的List
  */
 public static List loadByCache(CacheUtil cu, PageBean page, String objectName) {
   List back = null;
   List result = (List) cu.getCacheObject(objectName);
   if (result != null) {
     back = new ArrayList();
     int count =
         (Integer.parseInt(page.getPageIndex()) - 1) * Integer.parseInt(page.getPageCount());
     int length = result.size();
     for (int i = count;
         i
             < ((count + Integer.parseInt(page.getPageCount())) < length
                 ? (count + Integer.parseInt(page.getPageCount()))
                 : length);
         i++) {
       back.add(result.get(i));
     }
   }
   return back;
 }
 public void populate(Iterable<Object> results) {
   CacheUtil.populate(cache, mapper, key, results);
 }
  @Override
  public Sequence<T> run(final Query<T> query, final Map<String, Object> responseContext) {
    final QueryToolChest<T, Query<T>> toolChest = warehouse.getToolChest(query);
    final CacheStrategy<T, Object, Query<T>> strategy = toolChest.getCacheStrategy(query);

    final Map<DruidServer, List<SegmentDescriptor>> serverSegments = Maps.newTreeMap();

    final List<Pair<Interval, byte[]>> cachedResults = Lists.newArrayList();
    final Map<String, CachePopulator> cachePopulatorMap = Maps.newHashMap();

    final boolean useCache =
        BaseQuery.getContextUseCache(query, true)
            && strategy != null
            && cacheConfig.isUseCache()
            && cacheConfig.isQueryCacheable(query);
    final boolean populateCache =
        BaseQuery.getContextPopulateCache(query, true)
            && strategy != null
            && cacheConfig.isPopulateCache()
            && cacheConfig.isQueryCacheable(query);
    final boolean isBySegment = BaseQuery.getContextBySegment(query, false);

    final ImmutableMap.Builder<String, Object> contextBuilder = new ImmutableMap.Builder<>();

    final int priority = BaseQuery.getContextPriority(query, 0);
    contextBuilder.put("priority", priority);

    if (populateCache) {
      // prevent down-stream nodes from caching results as well if we are populating the cache
      contextBuilder.put(CacheConfig.POPULATE_CACHE, false);
      contextBuilder.put("bySegment", true);
    }

    TimelineLookup<String, ServerSelector> timeline = serverView.getTimeline(query.getDataSource());

    if (timeline == null) {
      return Sequences.empty();
    }

    // build set of segments to query
    Set<Pair<ServerSelector, SegmentDescriptor>> segments = Sets.newLinkedHashSet();

    List<TimelineObjectHolder<String, ServerSelector>> serversLookup = Lists.newLinkedList();

    // Note that enabling this leads to putting uncovered intervals information in the response
    // headers
    // and might blow up in some cases https://github.com/druid-io/druid/issues/2108
    int uncoveredIntervalsLimit = BaseQuery.getContextUncoveredIntervalsLimit(query, 0);

    if (uncoveredIntervalsLimit > 0) {
      List<Interval> uncoveredIntervals = Lists.newArrayListWithCapacity(uncoveredIntervalsLimit);
      boolean uncoveredIntervalsOverflowed = false;

      for (Interval interval : query.getIntervals()) {
        Iterable<TimelineObjectHolder<String, ServerSelector>> lookup = timeline.lookup(interval);
        long startMillis = interval.getStartMillis();
        long endMillis = interval.getEndMillis();
        for (TimelineObjectHolder<String, ServerSelector> holder : lookup) {
          Interval holderInterval = holder.getInterval();
          long intervalStart = holderInterval.getStartMillis();
          if (!uncoveredIntervalsOverflowed && startMillis != intervalStart) {
            if (uncoveredIntervalsLimit > uncoveredIntervals.size()) {
              uncoveredIntervals.add(new Interval(startMillis, intervalStart));
            } else {
              uncoveredIntervalsOverflowed = true;
            }
          }
          startMillis = holderInterval.getEndMillis();
          serversLookup.add(holder);
        }

        if (!uncoveredIntervalsOverflowed && startMillis < endMillis) {
          if (uncoveredIntervalsLimit > uncoveredIntervals.size()) {
            uncoveredIntervals.add(new Interval(startMillis, endMillis));
          } else {
            uncoveredIntervalsOverflowed = true;
          }
        }
      }

      if (!uncoveredIntervals.isEmpty()) {
        // This returns intervals for which NO segment is present.
        // Which is not necessarily an indication that the data doesn't exist or is
        // incomplete. The data could exist and just not be loaded yet.  In either
        // case, though, this query will not include any data from the identified intervals.
        responseContext.put("uncoveredIntervals", uncoveredIntervals);
        responseContext.put("uncoveredIntervalsOverflowed", uncoveredIntervalsOverflowed);
      }
    } else {
      for (Interval interval : query.getIntervals()) {
        Iterables.addAll(serversLookup, timeline.lookup(interval));
      }
    }

    // Let tool chest filter out unneeded segments
    final List<TimelineObjectHolder<String, ServerSelector>> filteredServersLookup =
        toolChest.filterSegments(query, serversLookup);
    Map<String, Optional<RangeSet<String>>> dimensionRangeCache = Maps.newHashMap();

    // Filter unneeded chunks based on partition dimension
    for (TimelineObjectHolder<String, ServerSelector> holder : filteredServersLookup) {
      final Set<PartitionChunk<ServerSelector>> filteredChunks =
          DimFilterUtils.filterShards(
              query.getFilter(),
              holder.getObject(),
              new Function<PartitionChunk<ServerSelector>, ShardSpec>() {
                @Override
                public ShardSpec apply(PartitionChunk<ServerSelector> input) {
                  return input.getObject().getSegment().getShardSpec();
                }
              },
              dimensionRangeCache);
      for (PartitionChunk<ServerSelector> chunk : filteredChunks) {
        ServerSelector selector = chunk.getObject();
        final SegmentDescriptor descriptor =
            new SegmentDescriptor(
                holder.getInterval(), holder.getVersion(), chunk.getChunkNumber());
        segments.add(Pair.of(selector, descriptor));
      }
    }

    final byte[] queryCacheKey;

    if ((populateCache || useCache) // implies strategy != null
        && !isBySegment) // explicit bySegment queries are never cached
    {
      queryCacheKey = strategy.computeCacheKey(query);
    } else {
      queryCacheKey = null;
    }

    if (queryCacheKey != null) {
      // cachKeys map must preserve segment ordering, in order for shards to always be combined in
      // the same order
      Map<Pair<ServerSelector, SegmentDescriptor>, Cache.NamedKey> cacheKeys =
          Maps.newLinkedHashMap();
      for (Pair<ServerSelector, SegmentDescriptor> segment : segments) {
        final Cache.NamedKey segmentCacheKey =
            CacheUtil.computeSegmentCacheKey(
                segment.lhs.getSegment().getIdentifier(), segment.rhs, queryCacheKey);
        cacheKeys.put(segment, segmentCacheKey);
      }

      // Pull cached segments from cache and remove from set of segments to query
      final Map<Cache.NamedKey, byte[]> cachedValues;
      if (useCache) {
        cachedValues =
            cache.getBulk(
                Iterables.limit(cacheKeys.values(), cacheConfig.getCacheBulkMergeLimit()));
      } else {
        cachedValues = ImmutableMap.of();
      }

      for (Map.Entry<Pair<ServerSelector, SegmentDescriptor>, Cache.NamedKey> entry :
          cacheKeys.entrySet()) {
        Pair<ServerSelector, SegmentDescriptor> segment = entry.getKey();
        Cache.NamedKey segmentCacheKey = entry.getValue();
        final Interval segmentQueryInterval = segment.rhs.getInterval();

        final byte[] cachedValue = cachedValues.get(segmentCacheKey);
        if (cachedValue != null) {
          // remove cached segment from set of segments to query
          segments.remove(segment);
          cachedResults.add(Pair.of(segmentQueryInterval, cachedValue));
        } else if (populateCache) {
          // otherwise, if populating cache, add segment to list of segments to cache
          final String segmentIdentifier = segment.lhs.getSegment().getIdentifier();
          cachePopulatorMap.put(
              String.format("%s_%s", segmentIdentifier, segmentQueryInterval),
              new CachePopulator(cache, objectMapper, segmentCacheKey));
        }
      }
    }

    // Compile list of all segments not pulled from cache
    for (Pair<ServerSelector, SegmentDescriptor> segment : segments) {
      final QueryableDruidServer queryableDruidServer = segment.lhs.pick();

      if (queryableDruidServer == null) {
        log.makeAlert(
                "No servers found for SegmentDescriptor[%s] for DataSource[%s]?! How can this be?!",
                segment.rhs, query.getDataSource())
            .emit();
      } else {
        final DruidServer server = queryableDruidServer.getServer();
        List<SegmentDescriptor> descriptors = serverSegments.get(server);

        if (descriptors == null) {
          descriptors = Lists.newArrayList();
          serverSegments.put(server, descriptors);
        }

        descriptors.add(segment.rhs);
      }
    }

    return new LazySequence<>(
        new Supplier<Sequence<T>>() {
          @Override
          public Sequence<T> get() {
            ArrayList<Sequence<T>> sequencesByInterval = Lists.newArrayList();
            addSequencesFromCache(sequencesByInterval);
            addSequencesFromServer(sequencesByInterval);

            return mergeCachedAndUncachedSequences(query, sequencesByInterval);
          }

          private void addSequencesFromCache(ArrayList<Sequence<T>> listOfSequences) {
            if (strategy == null) {
              return;
            }

            final Function<Object, T> pullFromCacheFunction = strategy.pullFromCache();
            final TypeReference<Object> cacheObjectClazz = strategy.getCacheObjectClazz();
            for (Pair<Interval, byte[]> cachedResultPair : cachedResults) {
              final byte[] cachedResult = cachedResultPair.rhs;
              Sequence<Object> cachedSequence =
                  new BaseSequence<>(
                      new BaseSequence.IteratorMaker<Object, Iterator<Object>>() {
                        @Override
                        public Iterator<Object> make() {
                          try {
                            if (cachedResult.length == 0) {
                              return Iterators.emptyIterator();
                            }

                            return objectMapper.readValues(
                                objectMapper.getFactory().createParser(cachedResult),
                                cacheObjectClazz);
                          } catch (IOException e) {
                            throw Throwables.propagate(e);
                          }
                        }

                        @Override
                        public void cleanup(Iterator<Object> iterFromMake) {}
                      });
              listOfSequences.add(Sequences.map(cachedSequence, pullFromCacheFunction));
            }
          }

          private void addSequencesFromServer(ArrayList<Sequence<T>> listOfSequences) {
            listOfSequences.ensureCapacity(listOfSequences.size() + serverSegments.size());

            final Query<T> rewrittenQuery = query.withOverriddenContext(contextBuilder.build());

            // Loop through each server, setting up the query and initiating it.
            // The data gets handled as a Future and parsed in the long Sequence chain in the
            // resultSeqToAdd setter.
            for (Map.Entry<DruidServer, List<SegmentDescriptor>> entry :
                serverSegments.entrySet()) {
              final DruidServer server = entry.getKey();
              final List<SegmentDescriptor> descriptors = entry.getValue();

              final QueryRunner clientQueryable = serverView.getQueryRunner(server);

              if (clientQueryable == null) {
                log.error("WTF!? server[%s] doesn't have a client Queryable?", server);
                continue;
              }

              final MultipleSpecificSegmentSpec segmentSpec =
                  new MultipleSpecificSegmentSpec(descriptors);

              final Sequence<T> resultSeqToAdd;
              if (!server.isAssignable()
                  || !populateCache
                  || isBySegment) { // Direct server queryable
                if (!isBySegment) {
                  resultSeqToAdd =
                      clientQueryable.run(query.withQuerySegmentSpec(segmentSpec), responseContext);
                } else {
                  // bySegment queries need to be de-serialized, see DirectDruidClient.run()

                  @SuppressWarnings("unchecked")
                  final Query<Result<BySegmentResultValueClass<T>>> bySegmentQuery =
                      (Query<Result<BySegmentResultValueClass<T>>>) ((Query) query);

                  @SuppressWarnings("unchecked")
                  final Sequence<Result<BySegmentResultValueClass<T>>> resultSequence =
                      clientQueryable.run(
                          bySegmentQuery.withQuerySegmentSpec(segmentSpec), responseContext);

                  resultSeqToAdd =
                      (Sequence)
                          Sequences.map(
                              resultSequence,
                              new Function<
                                  Result<BySegmentResultValueClass<T>>,
                                  Result<BySegmentResultValueClass<T>>>() {
                                @Override
                                public Result<BySegmentResultValueClass<T>> apply(
                                    Result<BySegmentResultValueClass<T>> input) {
                                  final BySegmentResultValueClass<T> bySegmentValue =
                                      input.getValue();
                                  return new Result<>(
                                      input.getTimestamp(),
                                      new BySegmentResultValueClass<T>(
                                          Lists.transform(
                                              bySegmentValue.getResults(),
                                              toolChest.makePreComputeManipulatorFn(
                                                  query, MetricManipulatorFns.deserializing())),
                                          bySegmentValue.getSegmentId(),
                                          bySegmentValue.getInterval()));
                                }
                              });
                }
              } else { // Requires some manipulation on broker side
                @SuppressWarnings("unchecked")
                final Sequence<Result<BySegmentResultValueClass<T>>> runningSequence =
                    clientQueryable.run(
                        rewrittenQuery.withQuerySegmentSpec(segmentSpec), responseContext);
                resultSeqToAdd =
                    new MergeSequence(
                        query.getResultOrdering(),
                        Sequences.<Result<BySegmentResultValueClass<T>>, Sequence<T>>map(
                            runningSequence,
                            new Function<Result<BySegmentResultValueClass<T>>, Sequence<T>>() {
                              private final Function<T, Object> cacheFn =
                                  strategy.prepareForCache();

                              // Acctually do something with the results
                              @Override
                              public Sequence<T> apply(Result<BySegmentResultValueClass<T>> input) {
                                final BySegmentResultValueClass<T> value = input.getValue();
                                final CachePopulator cachePopulator =
                                    cachePopulatorMap.get(
                                        String.format(
                                            "%s_%s", value.getSegmentId(), value.getInterval()));

                                final Queue<ListenableFuture<Object>> cacheFutures =
                                    new ConcurrentLinkedQueue<>();

                                return Sequences.<T>withEffect(
                                    Sequences.<T, T>map(
                                        Sequences.<T, T>map(
                                            Sequences.<T>simple(value.getResults()),
                                            new Function<T, T>() {
                                              @Override
                                              public T apply(final T input) {
                                                if (cachePopulator != null) {
                                                  // only compute cache data if populating cache
                                                  cacheFutures.add(
                                                      backgroundExecutorService.submit(
                                                          new Callable<Object>() {
                                                            @Override
                                                            public Object call() {
                                                              return cacheFn.apply(input);
                                                            }
                                                          }));
                                                }
                                                return input;
                                              }
                                            }),
                                        toolChest.makePreComputeManipulatorFn(
                                            // Ick... most makePreComputeManipulatorFn directly cast
                                            // to their ToolChest query type of choice
                                            // This casting is sub-optimal, but hasn't caused any
                                            // major problems yet...
                                            (Query) rewrittenQuery,
                                            MetricManipulatorFns.deserializing())),
                                    new Runnable() {
                                      @Override
                                      public void run() {
                                        if (cachePopulator != null) {
                                          Futures.addCallback(
                                              Futures.allAsList(cacheFutures),
                                              new FutureCallback<List<Object>>() {
                                                @Override
                                                public void onSuccess(List<Object> cacheData) {
                                                  cachePopulator.populate(cacheData);
                                                  // Help out GC by making sure all references are
                                                  // gone
                                                  cacheFutures.clear();
                                                }

                                                @Override
                                                public void onFailure(Throwable throwable) {
                                                  log.error(throwable, "Background caching failed");
                                                }
                                              },
                                              backgroundExecutorService);
                                        }
                                      }
                                    },
                                    MoreExecutors.sameThreadExecutor()); // End withEffect
                              }
                            }));
              }

              listOfSequences.add(resultSeqToAdd);
            }
          }
        } // End of Supplier
        );
  }
  @Override
  protected void processFilter(
      HttpServletRequest request, HttpServletResponse response, FilterChain filterChain)
      throws Exception {

    request.setAttribute(SKIP_FILTER, Boolean.TRUE);

    String key = getCacheKey(request);

    long companyId = PortalInstances.getCompanyId(request);

    CacheResponseData cacheResponseData = CacheUtil.getCacheResponseData(companyId, key);

    if (cacheResponseData == null) {
      if (!isCacheableData(companyId, request)) {
        if (_log.isDebugEnabled()) {
          _log.debug("Request is not cacheable " + key);
        }

        processFilter(CacheFilter.class, request, response, filterChain);

        return;
      }

      if (_log.isInfoEnabled()) {
        _log.info("Caching request " + key);
      }

      BufferCacheServletResponse bufferCacheServletResponse =
          new BufferCacheServletResponse(response);

      processFilter(CacheFilter.class, request, bufferCacheServletResponse, filterChain);

      cacheResponseData = new CacheResponseData(bufferCacheServletResponse);

      LastPath lastPath = (LastPath) request.getAttribute(WebKeys.LAST_PATH);

      if (lastPath != null) {
        cacheResponseData.setAttribute(WebKeys.LAST_PATH, lastPath);
      }

      // Cache the result if and only if there is a result and the request
      // is cacheable. We have to test the cacheability of a request twice
      // because the user could have been authenticated after the initial
      // test.

      String cacheControl =
          GetterUtil.getString(bufferCacheServletResponse.getHeader(HttpHeaders.CACHE_CONTROL));

      if ((bufferCacheServletResponse.getStatus() == HttpServletResponse.SC_OK)
          && !cacheControl.contains(HttpHeaders.PRAGMA_NO_CACHE_VALUE)
          && isCacheableRequest(request)
          && isCacheableResponse(bufferCacheServletResponse)) {

        CacheUtil.putCacheResponseData(companyId, key, cacheResponseData);
      }
    } else {
      LastPath lastPath = (LastPath) cacheResponseData.getAttribute(WebKeys.LAST_PATH);

      if (lastPath != null) {
        HttpSession session = request.getSession();

        session.setAttribute(WebKeys.LAST_PATH, lastPath);
      }
    }

    CacheResponseUtil.write(response, cacheResponseData);
  }
 public static Table<Integer, String, String> getContent(InlineTable inlineTable) {
   return CacheUtil.getValue(inlineTable, InlineTableUtil.contentCache);
 }
public class InlineTableUtil {

  private InlineTableUtil() {}

  public static Table<Integer, String, String> getContent(InlineTable inlineTable) {
    return CacheUtil.getValue(inlineTable, InlineTableUtil.contentCache);
  }

  public static Table<Integer, String, String> parse(InlineTable inlineTable) {
    Table<Integer, String, String> result = HashBasedTable.create();

    Integer rowKey = 1;

    List<Row> rows = inlineTable.getRows();
    for (Row row : rows) {
      List<Object> cells = row.getContent();

      for (Object cell : cells) {

        if (cell instanceof Element) {
          Element element = (Element) cell;

          result.put(rowKey, element.getTagName(), element.getTextContent());
        }
      }

      rowKey += 1;
    }

    return result;
  }

  public static Map<String, String> match(
      Table<Integer, String, String> table, Map<String, FieldValue> values) {
    Set<Integer> rowKeys = table.rowKeySet();

    rows:
    for (Integer rowKey : rowKeys) {
      Map<String, String> row = table.row(rowKey);

      // A table row contains a certain number of input columns, plus an output column
      if (values.size() < (row.size() - 1)) {
        continue rows;
      }

      Collection<Map.Entry<String, FieldValue>> entries = values.entrySet();
      for (Map.Entry<String, FieldValue> entry : entries) {
        String key = entry.getKey();
        FieldValue value = entry.getValue();

        String rowValue = row.get(key);
        if (rowValue == null) {
          continue rows;
        }

        boolean equals = value.equalsString(rowValue);
        if (!equals) {
          continue rows;
        }
      }

      return row;
    }

    return null;
  }

  private static final LoadingCache<InlineTable, Table<Integer, String, String>> contentCache =
      CacheUtil.buildLoadingCache(
          new CacheLoader<InlineTable, Table<Integer, String, String>>() {

            @Override
            public Table<Integer, String, String> load(InlineTable inlineTable) {
              return Tables.unmodifiableTable(parse(inlineTable));
            }
          });
}
public class SupportVectorMachineModelEvaluator extends ModelEvaluator<SupportVectorMachineModel> {

  public SupportVectorMachineModelEvaluator(PMML pmml) {
    super(pmml, SupportVectorMachineModel.class);
  }

  public SupportVectorMachineModelEvaluator(
      PMML pmml, SupportVectorMachineModel supportVectorMachineModel) {
    super(pmml, supportVectorMachineModel);
  }

  @Override
  public String getSummary() {
    return "Support vector machine";
  }

  @Override
  public Map<FieldName, ?> evaluate(ModelEvaluationContext context) {
    SupportVectorMachineModel supportVectorMachineModel = getModel();
    if (!supportVectorMachineModel.isScorable()) {
      throw new InvalidResultException(supportVectorMachineModel);
    }

    SvmRepresentationType svmRepresentation = supportVectorMachineModel.getSvmRepresentation();
    switch (svmRepresentation) {
      case SUPPORT_VECTORS:
        break;
      default:
        throw new UnsupportedFeatureException(supportVectorMachineModel, svmRepresentation);
    }

    Map<FieldName, ?> predictions;

    MiningFunctionType miningFunction = supportVectorMachineModel.getFunctionName();
    switch (miningFunction) {
      case REGRESSION:
        predictions = evaluateRegression(context);
        break;
      case CLASSIFICATION:
        predictions = evaluateClassification(context);
        break;
      default:
        throw new UnsupportedFeatureException(supportVectorMachineModel, miningFunction);
    }

    return OutputUtil.evaluate(predictions, context);
  }

  private Map<FieldName, ?> evaluateRegression(ModelEvaluationContext context) {
    SupportVectorMachineModel supportVectorMachineModel = getModel();

    List<SupportVectorMachine> supportVectorMachines =
        supportVectorMachineModel.getSupportVectorMachines();
    if (supportVectorMachines.size() != 1) {
      throw new InvalidFeatureException(supportVectorMachineModel);
    }

    SupportVectorMachine supportVectorMachine = supportVectorMachines.get(0);

    double[] input = createInput(context);

    Double result = evaluateSupportVectorMachine(supportVectorMachine, input);

    return TargetUtil.evaluateRegression(result, context);
  }

  private Map<FieldName, ? extends Classification> evaluateClassification(
      ModelEvaluationContext context) {
    SupportVectorMachineModel supportVectorMachineModel = getModel();

    List<SupportVectorMachine> supportVectorMachines =
        supportVectorMachineModel.getSupportVectorMachines();
    if (supportVectorMachines.size() < 1) {
      throw new InvalidFeatureException(supportVectorMachineModel);
    }

    String alternateBinaryTargetCategory =
        supportVectorMachineModel.getAlternateBinaryTargetCategory();

    Classification result;

    SvmClassificationMethodType svmClassificationMethod = getClassificationMethod();
    switch (svmClassificationMethod) {
      case ONE_AGAINST_ALL:
        result = new Classification(Classification.Type.DISTANCE);
        break;
      case ONE_AGAINST_ONE:
        result = new VoteDistribution();
        break;
      default:
        throw new UnsupportedFeatureException(supportVectorMachineModel, svmClassificationMethod);
    }

    double[] input = createInput(context);

    for (SupportVectorMachine supportVectorMachine : supportVectorMachines) {
      String targetCategory = supportVectorMachine.getTargetCategory();
      String alternateTargetCategory = supportVectorMachine.getAlternateTargetCategory();

      Double value = evaluateSupportVectorMachine(supportVectorMachine, input);

      switch (svmClassificationMethod) {
        case ONE_AGAINST_ALL:
          {
            if (targetCategory == null || alternateTargetCategory != null) {
              throw new InvalidFeatureException(supportVectorMachine);
            }

            result.put(targetCategory, value);
          }
          break;
        case ONE_AGAINST_ONE:
          if (alternateBinaryTargetCategory != null) {

            if (targetCategory == null || alternateTargetCategory != null) {
              throw new InvalidFeatureException(supportVectorMachine);
            }

            String label;

            long roundedValue = Math.round(value);

            // "A rounded value of 1 corresponds to the targetCategory attribute of the
            // SupportVectorMachine element"
            if (roundedValue == 1) {
              label = targetCategory;
            } else

            // "A rounded value of 0 corresponds to the alternateBinaryTargetCategory attribute of
            // the SupportVectorMachineModel element"
            if (roundedValue == 0) {
              label = alternateBinaryTargetCategory;
            } else

            // "The numeric prediction must be between 0 and 1"
            {
              throw new EvaluationException("Invalid numeric prediction " + value);
            }

            Double vote = result.get(label);
            if (vote == null) {
              vote = 0d;
            }

            result.put(label, (vote + 1d));
          } else {
            if (targetCategory == null || alternateTargetCategory == null) {
              throw new InvalidFeatureException(supportVectorMachine);
            }

            Double threshold = supportVectorMachine.getThreshold();
            if (threshold == null) {
              threshold = supportVectorMachineModel.getThreshold();
            }

            String label;

            // "If the numeric prediction is smaller than the threshold, it corresponds to the
            // targetCategory attribute"
            if ((value).compareTo(threshold) < 0) {
              label = targetCategory;
            } else {
              label = alternateTargetCategory;
            }

            Double vote = result.get(label);
            if (vote == null) {
              vote = 0d;
            }

            result.put(label, (vote + 1d));
          }
          break;
        default:
          break;
      }
    }

    return TargetUtil.evaluateClassification(result, context);
  }

  private Double evaluateSupportVectorMachine(
      SupportVectorMachine supportVectorMachine, double[] input) {
    SupportVectorMachineModel supportVectorMachineModel = getModel();

    double result = 0d;

    Kernel kernel = supportVectorMachineModel.getKernel();

    Coefficients coefficients = supportVectorMachine.getCoefficients();
    Iterator<Coefficient> coefficientIterator = coefficients.iterator();

    SupportVectors supportVectors = supportVectorMachine.getSupportVectors();
    Iterator<SupportVector> supportVectorIterator = supportVectors.iterator();

    Map<String, double[]> vectorMap = getVectorMap();

    while (coefficientIterator.hasNext() && supportVectorIterator.hasNext()) {
      Coefficient coefficient = coefficientIterator.next();
      SupportVector supportVector = supportVectorIterator.next();

      double[] vector = vectorMap.get(supportVector.getVectorId());
      if (vector == null) {
        throw new InvalidFeatureException(supportVector);
      }

      Double value = KernelUtil.evaluate(kernel, input, vector);

      result += (coefficient.getValue() * value);
    }

    if (coefficientIterator.hasNext() || supportVectorIterator.hasNext()) {
      throw new InvalidFeatureException(supportVectorMachine);
    }

    result += coefficients.getAbsoluteValue();

    return result;
  }

  private SvmClassificationMethodType getClassificationMethod() {
    SupportVectorMachineModel supportVectorMachineModel = getModel();

    // Older versions of several popular PMML producer software are known to omit the
    // classificationMethod attribute.
    // The method SupportVectorMachineModel#getSvmRepresentation() replaces a missing value with the
    // default value "OneAgainstAll", which may lead to incorrect behaviour.
    // The workaround is to bypass this method using Java Reflection API, and infer the correct
    // classification method type based on evidence.
    Field field = ReflectionUtil.getField(supportVectorMachineModel, "classificationMethod");

    SvmClassificationMethodType svmClassificationMethod =
        ReflectionUtil.getFieldValue(field, supportVectorMachineModel);
    if (svmClassificationMethod != null) {
      return svmClassificationMethod;
    }

    List<SupportVectorMachine> supportVectorMachines =
        supportVectorMachineModel.getSupportVectorMachines();

    String alternateBinaryTargetCategory =
        supportVectorMachineModel.getAlternateBinaryTargetCategory();
    if (alternateBinaryTargetCategory != null) {

      if (supportVectorMachines.size() == 1) {
        SupportVectorMachine supportVectorMachine = supportVectorMachines.get(0);

        String targetCategory = supportVectorMachine.getTargetCategory();
        if (targetCategory != null) {
          return SvmClassificationMethodType.ONE_AGAINST_ONE;
        }

        throw new InvalidFeatureException(supportVectorMachine);
      }

      throw new InvalidFeatureException(supportVectorMachineModel);
    }

    for (SupportVectorMachine supportVectorMachine : supportVectorMachines) {
      String targetCategory = supportVectorMachine.getTargetCategory();
      String alternateTargetCategory = supportVectorMachine.getAlternateTargetCategory();

      if (targetCategory != null) {

        if (alternateTargetCategory != null) {
          return SvmClassificationMethodType.ONE_AGAINST_ONE;
        }

        return SvmClassificationMethodType.ONE_AGAINST_ALL;
      }

      throw new InvalidFeatureException(supportVectorMachine);
    }

    throw new InvalidFeatureException(supportVectorMachineModel);
  }

  private double[] createInput(EvaluationContext context) {
    SupportVectorMachineModel supportVectorMachineModel = getModel();

    VectorDictionary vectorDictionary = supportVectorMachineModel.getVectorDictionary();

    VectorFields vectorFields = vectorDictionary.getVectorFields();

    List<FieldRef> fieldRefs = vectorFields.getFieldRefs();

    double[] result = new double[fieldRefs.size()];

    for (int i = 0; i < fieldRefs.size(); i++) {
      FieldRef fieldRef = fieldRefs.get(i);

      FieldValue value = ExpressionUtil.evaluate(fieldRef, context);
      if (value == null) {
        throw new MissingValueException(fieldRef.getField(), vectorFields);
      }

      result[i] = (value.asNumber()).doubleValue();
    }

    return result;
  }

  private Map<String, double[]> getVectorMap() {
    return getValue(SupportVectorMachineModelEvaluator.vectorCache);
  }

  private static Map<String, double[]> parseVectorDictionary(
      SupportVectorMachineModel supportVectorMachineModel) {
    VectorDictionary vectorDictionary = supportVectorMachineModel.getVectorDictionary();

    VectorFields vectorFields = vectorDictionary.getVectorFields();

    List<FieldRef> fieldRefs = vectorFields.getFieldRefs();

    Map<String, double[]> result = new LinkedHashMap<>();

    List<VectorInstance> vectorInstances = vectorDictionary.getVectorInstances();
    for (VectorInstance vectorInstance : vectorInstances) {
      String id = vectorInstance.getId();
      if (id == null) {
        throw new InvalidFeatureException(vectorInstance);
      }

      Array array = vectorInstance.getArray();
      RealSparseArray sparseArray = vectorInstance.getREALSparseArray();

      List<? extends Number> values;

      if (array != null && sparseArray == null) {
        values = ArrayUtil.asNumberList(array);
      } else if (array == null && sparseArray != null) {
        values = SparseArrayUtil.asNumberList(sparseArray);
      } else {
        throw new InvalidFeatureException(vectorInstance);
      } // End if

      if (fieldRefs.size() != values.size()) {
        throw new InvalidFeatureException(vectorInstance);
      }

      double[] vector = Doubles.toArray(values);

      result.put(id, vector);
    }

    return result;
  }

  private static final LoadingCache<SupportVectorMachineModel, Map<String, double[]>> vectorCache =
      CacheUtil.buildLoadingCache(
          new CacheLoader<SupportVectorMachineModel, Map<String, double[]>>() {

            @Override
            public Map<String, double[]> load(SupportVectorMachineModel supportVectorMachineModel) {
              return ImmutableMap.copyOf(parseVectorDictionary(supportVectorMachineModel));
            }
          });
}