@Override public void setDocument(int docId) { script.setNextDocId(docId); final Object value = script.run(); if (value == null) { count = 0; } else if (value.getClass().isArray()) { count = Array.getLength(value); grow(); for (int i = 0; i < count; ++i) { set(i, Array.get(value, i)); } } else if (value instanceof Collection) { final Collection<?> coll = (Collection<?>) value; count = coll.size(); grow(); int i = 0; for (Object v : coll) { set(i++, v); } } else { count = 1; set(0, value); } sort(); }
@Override protected void doSetNextReader(IndexReader reader, int docBase) throws IOException { fieldData = (IntFieldData) fieldDataCache.cache(fieldDataType, reader, indexFieldName); if (script != null) { script.setNextReader(reader); } }
@Override protected boolean matchDoc(int doc) { searchScript.setNextDocId(doc); Object val = searchScript.run(); if (val == null) { return false; } if (val instanceof Boolean) { return (Boolean) val; } if (val instanceof Number) { return ((Number) val).longValue() != 0; } throw new ElasticsearchIllegalArgumentException( "Can't handle type [" + val + "] in script filter"); }
@Override public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException { searchScript.setNextReader(context); // LUCENE 4 UPGRADE: we can simply wrap this here since it is not cacheable and if we are not // top level we will get a null passed anyway return BitsFilteredDocIdSet.wrap( new ScriptDocSet(context.reader().maxDoc(), acceptDocs, searchScript), acceptDocs); }
@Override public void onValue(int docId, long value) { if (excluded != null && excluded.contains(value)) { return; } if (script != null) { script.setNextDocId(docId); script.setNextVar("term", value); Object scriptValue = script.run(); if (scriptValue == null) { return; } if (scriptValue instanceof Boolean) { if (!((Boolean) scriptValue)) { return; } } else { value = ((Number) scriptValue).longValue(); } } super.onValue(docId, value); }
@Override public void onValue(int docId, long value) { valueScript.setNextDocId(docId); long time = tzRounding.calc(value); double scriptValue = valueScript.runAsDouble(); InternalFullDateHistogramFacet.FullEntry entry = entries.get(time); if (entry == null) { entry = new InternalFullDateHistogramFacet.FullEntry( time, 1, scriptValue, scriptValue, 1, scriptValue); entries.put(time, entry); } else { entry.count++; entry.totalCount++; entry.total += scriptValue; if (scriptValue < entry.min) { entry.min = scriptValue; } if (scriptValue > entry.max) { entry.max = scriptValue; } } }
@Override public void onValue(int docId, double value) { valueScript.setNextDocId(docId); long bucket = bucket(value, interval); double scriptValue = valueScript.runAsDouble(); InternalFullHistogramFacet.FullEntry entry = entries.get(bucket); if (entry == null) { entry = new InternalFullHistogramFacet.FullEntry( bucket, 1, scriptValue, scriptValue, 1, scriptValue); entries.put(bucket, entry); } else { entry.count++; entry.totalCount++; entry.total += scriptValue; if (scriptValue < entry.min) { entry.min = scriptValue; } if (scriptValue > entry.max) { entry.max = scriptValue; } } }
@Override public float score(int docId, float subQueryScore) { script.setNextDocId(docId); script.setNextScore(subQueryScore); return script.runAsFloat(); }
@Override public void setNextReader(IndexReader reader) { script.setNextReader(reader); }
@Override protected void doSetNextReader(AtomicReaderContext context) throws IOException { keyValues = keyIndexFieldData.load(context).getLongValues(); valueScript.setNextReader(context); }
@Override public void setScorer(Scorer scorer) throws IOException { valueScript.setScorer(scorer); }
@Override public void setScorer(Scorer scorer) { script.setScorer(scorer); }
@Override protected GetResponse shardOperation(GetRequest request, int shardId) throws ElasticSearchException { IndexService indexService = indicesService.indexServiceSafe(request.index()); BloomCache bloomCache = indexService.cache().bloomCache(); IndexShard indexShard = indexService.shardSafe(shardId); DocumentMapper docMapper = indexService.mapperService().documentMapper(request.type()); if (docMapper == null) { throw new TypeMissingException(new Index(request.index()), request.type()); } if (request.refresh()) { indexShard.refresh(new Engine.Refresh(false)); } Engine.Searcher searcher = indexShard.searcher(); boolean exists = false; byte[] source = null; Map<String, GetField> fields = null; long version = -1; try { UidField.DocIdAndVersion docIdAndVersion = loadCurrentVersionFromIndex( bloomCache, searcher, docMapper.uidMapper().term(request.type(), request.id())); if (docIdAndVersion != null && docIdAndVersion.docId != Lucene.NO_DOC) { if (docIdAndVersion.version > 0) { version = docIdAndVersion.version; } exists = true; FieldSelector fieldSelector = buildFieldSelectors(docMapper, request.fields()); if (fieldSelector != null) { Document doc = docIdAndVersion.reader.document(docIdAndVersion.docId, fieldSelector); source = extractSource(doc, docMapper); for (Object oField : doc.getFields()) { Fieldable field = (Fieldable) oField; String name = field.name(); Object value = null; FieldMappers fieldMappers = docMapper.mappers().indexName(field.name()); if (fieldMappers != null) { FieldMapper mapper = fieldMappers.mapper(); if (mapper != null) { name = mapper.names().fullName(); value = mapper.valueForSearch(field); } } if (value == null) { if (field.isBinary()) { value = field.getBinaryValue(); } else { value = field.stringValue(); } } if (fields == null) { fields = newHashMapWithExpectedSize(2); } GetField getField = fields.get(name); if (getField == null) { getField = new GetField(name, new ArrayList<Object>(2)); fields.put(name, getField); } getField.values().add(value); } } // now, go and do the script thingy if needed if (request.fields() != null && request.fields().length > 0) { SearchLookup searchLookup = null; for (String field : request.fields()) { String script = null; if (field.contains("_source.") || field.contains("doc[")) { script = field; } else { FieldMappers x = docMapper.mappers().smartName(field); if (x != null && !x.mapper().stored()) { script = "_source." + x.mapper().names().fullName(); } } if (script != null) { if (searchLookup == null) { searchLookup = new SearchLookup( indexService.mapperService(), indexService.cache().fieldData()); } SearchScript searchScript = scriptService.search(searchLookup, "mvel", script, null); searchScript.setNextReader(docIdAndVersion.reader); searchScript.setNextDocId(docIdAndVersion.docId); try { Object value = searchScript.run(); if (fields == null) { fields = newHashMapWithExpectedSize(2); } GetField getField = fields.get(field); if (getField == null) { getField = new GetField(field, new ArrayList<Object>(2)); fields.put(field, getField); } getField.values().add(value); } catch (RuntimeException e) { if (logger.isTraceEnabled()) { logger.trace("failed to execute get request script field [{}]", e, script); } // ignore } } } } } } catch (IOException e) { throw new ElasticSearchException( "Failed to get type [" + request.type() + "] and id [" + request.id() + "]", e); } finally { searcher.release(); } return new GetResponse( request.index(), request.type(), request.id(), version, exists, source, fields); }
@Override public void setScorer(Scorer scorer) throws IOException { if (script != null) { script.setScorer(scorer); } }