@Override public Query apply(Function input, Context context) throws IOException { Tuple<Reference, Literal> tuple = super.prepare(input); if (tuple == null) { return null; } return toQuery(tuple.v1(), tuple.v1().valueType(), tuple.v2().value()); }
public void testIsolatedPlugins() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); // these both share the same FakePlugin class Path pluginDir1 = createPluginDir(temp); String pluginZip1 = createPlugin("fake1", pluginDir1); installPlugin(pluginZip1, env.v1()); Path pluginDir2 = createPluginDir(temp); String pluginZip2 = createPlugin("fake2", pluginDir2); installPlugin(pluginZip2, env.v1()); assertPlugin("fake1", pluginDir1, env.v2()); assertPlugin("fake2", pluginDir2, env.v2()); }
@Override public int compare(Tuple<Text, Integer> o1, Tuple<Text, Integer> o2) { int cmp = o2.v2() - o1.v2(); if (cmp != 0) { return cmp; } cmp = o2.v1().compareTo(o1.v1()); if (cmp != 0) { return cmp; } return System.identityHashCode(o2) - System.identityHashCode(o1); }
private ParsedDocument parseDocument(String index, String type, BytesReference doc) { MapperService mapperService = indexShard.mapperService(); IndexService indexService = indexShard.indexService(); // TODO: make parsing not dynamically create fields not in the original mapping Tuple<DocumentMapper, Boolean> docMapper = mapperService.documentMapperWithAutoCreate(type); ParsedDocument parsedDocument = docMapper.v1().parse(source(doc).type(type).flyweight(true)).setMappingsModified(docMapper); if (parsedDocument.mappingsModified()) { mappingUpdatedAction.updateMappingOnMaster(index, docMapper.v1(), indexService.indexUUID()); } return parsedDocument; }
@Override public void collect(int doc) throws IOException { BytesWrap parentId = typeCache.parentIdByDoc(doc); if (parentId == null) { return; } for (Tuple<IndexReader, IdReaderTypeCache> tuple : readers) { IndexReader indexReader = tuple.v1(); IdReaderTypeCache idReaderTypeCache = tuple.v2(); if (idReaderTypeCache == null) { // might be if we don't have that doc with that type in this reader continue; } int parentDocId = idReaderTypeCache.docById(parentId); if (parentDocId != -1 && !indexReader.isDeleted(parentDocId)) { OpenBitSet docIdSet = parentDocs().get(indexReader.getCoreCacheKey()); if (docIdSet == null) { docIdSet = new OpenBitSet(indexReader.maxDoc()); parentDocs.put(indexReader.getCoreCacheKey(), docIdSet); } docIdSet.fastSet(parentDocId); return; } } }
public Query toQuery(Reference reference, DataType type, Object value) { String columnName = reference.info().ident().columnIdent().fqn(); QueryBuilderHelper builder = QueryBuilderHelper.forType(type); Tuple<?, ?> bounds = boundsFunction.apply(value); assert bounds != null; return builder.rangeQuery(columnName, bounds.v1(), bounds.v2(), includeLower, includeUpper); }
public void testPlatformBinPermissions() throws Exception { assumeTrue("posix filesystem", isPosix); Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path platformDir = pluginDir.resolve("platform"); Path platformNameDir = platformDir.resolve("linux-x86_64"); Path platformBinDir = platformNameDir.resolve("bin"); Files.createDirectories(platformBinDir); Path programFile = Files.createFile(platformBinDir.resolve("someprogram")); // a file created with Files.createFile() should not have execute permissions Set<PosixFilePermission> sourcePerms = Files.getPosixFilePermissions(programFile); assertFalse(sourcePerms.contains(PosixFilePermission.OWNER_EXECUTE)); assertFalse(sourcePerms.contains(PosixFilePermission.GROUP_EXECUTE)); assertFalse(sourcePerms.contains(PosixFilePermission.OTHERS_EXECUTE)); String pluginZip = createPlugin("fake", pluginDir); installPlugin(pluginZip, env.v1()); assertPlugin("fake", pluginDir, env.v2()); // check that the installed program has execute permissions, even though the one added to the // plugin didn't Path installedPlatformBinDir = env.v2() .pluginsFile() .resolve("fake") .resolve("platform") .resolve("linux-x86_64") .resolve("bin"); assertTrue(Files.isDirectory(installedPlatformBinDir)); Path installedProgramFile = installedPlatformBinDir.resolve("someprogram"); assertTrue(Files.isRegularFile(installedProgramFile)); Set<PosixFilePermission> installedPerms = Files.getPosixFilePermissions(installedProgramFile); assertTrue(installedPerms.contains(PosixFilePermission.OWNER_EXECUTE)); assertTrue(installedPerms.contains(PosixFilePermission.GROUP_EXECUTE)); assertTrue(installedPerms.contains(PosixFilePermission.OTHERS_EXECUTE)); }
@Override public Query apply(Function input, Context context) throws IOException { Tuple<Reference, Literal> prepare = prepare(input); if (prepare == null) { return null; } String fieldName = prepare.v1().info().ident().columnIdent().fqn(); Object value = prepare.v2().value(); // FIXME: nobody knows how Strings can arrive here if (value instanceof String) { if (isPcrePattern(value)) { return new RegexQuery(new Term(fieldName, (String) value)); } else { return toLuceneRegexpQuery(fieldName, BytesRefs.toBytesRef(value), context); } } if (value instanceof BytesRef) { if (isPcrePattern(value)) { return new RegexQuery(new Term(fieldName, (BytesRef) value)); } else { return toLuceneRegexpQuery(fieldName, (BytesRef) value, context); } } throw new IllegalArgumentException("Can only use ~ with patterns of type string"); }
public void testSomethingWorks() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); installPlugin(pluginZip, env.v1()); assertPlugin("fake", pluginDir, env.v2()); }
private void clearSeenMappings(String index) { // clear seen mappings as well for (Tuple<String, String> tuple : seenMappings.keySet()) { if (tuple.v1().equals(index)) { seenMappings.remove(tuple); } } }
public InternalNode(Settings pSettings, boolean loadConfigSettings) throws ElasticSearchException { Tuple<Settings, Environment> tuple = InternalSettingsPerparer.prepareSettings(pSettings, loadConfigSettings); ESLogger logger = Loggers.getLogger(Node.class, tuple.v1().get("name")); logger.info("{{}}[{}]: initializing ...", Version.full(), JvmInfo.jvmInfo().pid()); this.pluginsService = new PluginsService(tuple.v1(), tuple.v2()); this.settings = pluginsService.updatedSettings(); this.environment = tuple.v2(); ModulesBuilder modules = new ModulesBuilder(); modules.add(new PluginsModule(settings, pluginsService)); modules.add(new SettingsModule(settings)); modules.add(new NodeModule(this)); modules.add(new NetworkModule()); modules.add(new NodeCacheModule(settings)); modules.add(new ScriptModule()); modules.add(new JmxModule(settings)); modules.add(new EnvironmentModule(environment)); modules.add(new NodeEnvironmentModule()); modules.add(new ClusterNameModule(settings)); modules.add(new ThreadPoolModule(settings)); modules.add(new TimerModule()); modules.add(new DiscoveryModule(settings)); modules.add(new ClusterModule(settings)); modules.add(new RestModule(settings)); modules.add(new TransportModule(settings)); if (settings.getAsBoolean("http.enabled", true)) { modules.add(new HttpServerModule(settings)); } modules.add(new RiversModule(settings)); modules.add(new IndicesModule(settings)); modules.add(new SearchModule()); modules.add(new TransportActionModule()); modules.add(new MonitorModule(settings)); modules.add(new GatewayModule(settings)); modules.add(new NodeClientModule()); injector = modules.createInjector(); client = injector.getInstance(Client.class); logger.info("{{}}[{}]: initialized", Version.full(), JvmInfo.jvmInfo().pid()); }
@Override public Query apply(Function input, Context context) { Tuple<Reference, Literal> tuple = prepare(input); if (tuple == null) { return null; } return toQuery(tuple.v1(), tuple.v2().value(), context); }
public void testPluginsDirMissing() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Files.delete(env.v2().pluginsFile()); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); installPlugin(pluginZip, env.v1()); assertPlugin("fake", pluginDir, env.v2()); }
@Override public List<Query> buildGroupedQueries( MultiMatchQueryBuilder.Type type, Map<String, Float> fieldNames, Object value, String minimumShouldMatch) throws IOException { Map<Analyzer, List<FieldAndFieldType>> groups = new HashMap<>(); List<Tuple<String, Float>> missing = new ArrayList<>(); for (Map.Entry<String, Float> entry : fieldNames.entrySet()) { String name = entry.getKey(); MappedFieldType fieldType = context.fieldMapper(name); if (fieldType != null) { Analyzer actualAnalyzer = getAnalyzer(fieldType); name = fieldType.name(); if (!groups.containsKey(actualAnalyzer)) { groups.put(actualAnalyzer, new ArrayList<>()); } Float boost = entry.getValue(); boost = boost == null ? Float.valueOf(1.0f) : boost; groups.get(actualAnalyzer).add(new FieldAndFieldType(name, fieldType, boost)); } else { missing.add(new Tuple<>(name, entry.getValue())); } } List<Query> queries = new ArrayList<>(); for (Tuple<String, Float> tuple : missing) { Query q = parseGroup(type.matchQueryType(), tuple.v1(), tuple.v2(), value, minimumShouldMatch); if (q != null) { queries.add(q); } } for (List<FieldAndFieldType> group : groups.values()) { if (group.size() > 1) { blendedFields = new FieldAndFieldType[group.size()]; int i = 0; for (FieldAndFieldType fieldAndFieldType : group) { blendedFields[i++] = fieldAndFieldType; } } else { blendedFields = null; } /* * We have to pick some field to pass through the superclass so * we just pick the first field. It shouldn't matter because * fields are already grouped by their analyzers/types. */ String representativeField = group.get(0).field; Query q = parseGroup(type.matchQueryType(), representativeField, 1f, value, minimumShouldMatch); if (q != null) { queries.add(q); } } return queries.isEmpty() ? null : queries; }
public void testExistingPlugin() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); installPlugin(pluginZip, env.v1()); UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("already exists")); assertInstallCleaned(env.v2()); }
public void testBin() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path binDir = pluginDir.resolve("bin"); Files.createDirectory(binDir); Files.createFile(binDir.resolve("somescript")); String pluginZip = createPlugin("fake", pluginDir); installPlugin(pluginZip, env.v1()); assertPlugin("fake", pluginDir, env.v2()); }
public void remove(ClusterStateListener listener) { clusterStateListeners.remove(listener); for (Iterator<Tuple<Timeout, NotifyTimeout>> it = onGoingTimeouts.iterator(); it.hasNext(); ) { Tuple<Timeout, NotifyTimeout> tuple = it.next(); if (tuple.v2().listener.equals(listener)) { tuple.v1().cancel(); it.remove(); } } }
public void testInstallMisspelledOfficialPlugins() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); UserException e = expectThrows(UserException.class, () -> installPlugin("xpack", env.v1())); assertThat(e.getMessage(), containsString("Unknown plugin xpack, did you mean [x-pack]?")); e = expectThrows(UserException.class, () -> installPlugin("analysis-smartnc", env.v1())); assertThat( e.getMessage(), containsString("Unknown plugin analysis-smartnc, did you mean [analysis-smartcn]?")); e = expectThrows(UserException.class, () -> installPlugin("repository", env.v1())); assertThat( e.getMessage(), containsString( "Unknown plugin repository, did you mean any of [repository-s3, repository-gcs]?")); e = expectThrows(UserException.class, () -> installPlugin("unknown_plugin", env.v1())); assertThat(e.getMessage(), containsString("Unknown plugin unknown_plugin")); }
public void testZipRelativeOutsideEntryName() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path zip = createTempDir().resolve("broken.zip"); try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(zip))) { stream.putNextEntry(new ZipEntry("elasticsearch/../blah")); } String pluginZip = zip.toUri().toURL().toString(); IOException e = expectThrows(IOException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("resolving outside of plugin directory")); }
public void testMissingDescriptor() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Files.createFile(pluginDir.resolve("fake.yml")); String pluginZip = writeZip(pluginDir, "elasticsearch"); NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("plugin-descriptor.properties")); assertInstallCleaned(env.v2()); }
public void testConfig() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path configDir = pluginDir.resolve("config"); Files.createDirectory(configDir); Files.createFile(configDir.resolve("custom.yaml")); String pluginZip = createPlugin("fake", pluginDir); installPlugin(pluginZip, env.v1()); assertPlugin("fake", pluginDir, env.v2()); }
public void testSpaceInUrl() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); Path pluginZipWithSpaces = createTempFile("foo bar", ".zip"); try (InputStream in = new URL(pluginZip).openStream()) { Files.copy(in, pluginZipWithSpaces, StandardCopyOption.REPLACE_EXISTING); } installPlugin(pluginZipWithSpaces.toUri().toURL().toString(), env.v1()); assertPlugin("fake", pluginDir, env.v2()); }
private void registerStaticColumns() { for (Tuple<String, DataType> column : staticColumns) { ReferenceInfo info = new ReferenceInfo( new ReferenceIdent(ident(), column.v1(), null), RowGranularity.DOC, column.v2()); if (info.ident().isColumn()) { columns.add(info); } INFOS.put(info.ident().columnIdent(), info); } }
@Override public Query apply(Function input, Context context) { Tuple<Reference, Literal> tuple = super.prepare(input); if (tuple == null) { return null; } Reference reference = tuple.v1(); Literal literal = tuple.v2(); String columnName = reference.info().ident().columnIdent().fqn(); if (DataTypes.isCollectionType(reference.valueType()) && DataTypes.isCollectionType(literal.valueType())) { // create boolean filter with term filters to pre-filter the result before applying the // functionQuery. BooleanFilter boolTermsFilter = new BooleanFilter(); DataType type = literal.valueType(); while (DataTypes.isCollectionType(type)) { type = ((CollectionType) type).innerType(); } QueryBuilderHelper builder = QueryBuilderHelper.forType(type); Object value = literal.value(); buildTermsQuery(boolTermsFilter, value, columnName, builder); if (boolTermsFilter.clauses().isEmpty()) { // all values are null... return genericFunctionQuery(input, context); } // wrap boolTermsFilter and genericFunction filter in an additional BooleanFilter to // control the ordering of the filters // termsFilter is applied first // afterwards the more expensive genericFunctionFilter BooleanFilter filterClauses = new BooleanFilter(); filterClauses.add(boolTermsFilter, BooleanClause.Occur.MUST); filterClauses.add(genericFunctionFilter(input, context), BooleanClause.Occur.MUST); return new FilteredQuery(Queries.newMatchAllQuery(), filterClauses); } QueryBuilderHelper builder = QueryBuilderHelper.forType(tuple.v1().valueType()); return builder.eq(columnName, tuple.v2().value()); }
public void testPluginsDirReadOnly() throws Exception { assumeTrue("posix and filesystem", isPosix && isReal); Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); try (PosixPermissionsResetter pluginsAttrs = new PosixPermissionsResetter(env.v2().pluginsFile())) { pluginsAttrs.setPermissions(new HashSet<>()); String pluginZip = createPlugin("fake", pluginDir); IOException e = expectThrows(IOException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains(env.v2().pluginsFile().toString())); } assertInstallCleaned(env.v2()); }
private void installPlugin(MockTerminal terminal, boolean isBatch) throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); // if batch is enabled, we also want to add a security policy String pluginZip = createPlugin("fake", pluginDir, isBatch); Map<String, String> settings = new HashMap<>(); settings.put("path.home", env.v1().toString()); new InstallPluginCommand() { @Override void jarHellCheck(Path candidate, Path pluginsDir) throws Exception {} }.execute(terminal, pluginZip, isBatch, settings); }
public void testJarHell() throws Exception { // jar hell test needs a real filesystem assumeTrue("real filesystem", isReal); Tuple<Path, Environment> environment = createEnv(fs, temp); Path pluginDirectory = createPluginDir(temp); writeJar(pluginDirectory.resolve("other.jar"), "FakePlugin"); String pluginZip = createPlugin("fake", pluginDirectory); // adds plugin.jar with FakePlugin IllegalStateException e = expectThrows( IllegalStateException.class, () -> installPlugin(pluginZip, environment.v1(), true)); assertTrue(e.getMessage(), e.getMessage().contains("jar hell")); assertInstallCleaned(environment.v2()); }
public void testIndexWithShadowReplicasCleansUp() throws Exception { Path dataPath = createTempDir(); Settings nodeSettings = nodeSettings(dataPath); final int nodeCount = randomIntBetween(2, 5); logger.info("--> starting {} nodes", nodeCount); final List<String> nodes = internalCluster().startNodesAsync(nodeCount, nodeSettings).get(); final String IDX = "test"; final Tuple<Integer, Integer> numPrimariesAndReplicas = randomPrimariesAndReplicas(nodeCount); final int numPrimaries = numPrimariesAndReplicas.v1(); final int numReplicas = numPrimariesAndReplicas.v2(); logger.info( "--> creating index {} with {} primary shards and {} replicas", IDX, numPrimaries, numReplicas); Settings idxSettings = Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numPrimaries) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, numReplicas) .put(IndexMetaData.SETTING_DATA_PATH, dataPath.toAbsolutePath().toString()) .put(IndexMetaData.SETTING_SHADOW_REPLICAS, true) .put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true) .build(); prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get(); ensureGreen(IDX); client().prepareIndex(IDX, "doc", "1").setSource("foo", "bar").get(); client().prepareIndex(IDX, "doc", "2").setSource("foo", "bar").get(); flushAndRefresh(IDX); GetResponse gResp1 = client().prepareGet(IDX, "doc", "1").get(); GetResponse gResp2 = client().prepareGet(IDX, "doc", "2").get(); assertThat(gResp1.getSource().get("foo"), equalTo("bar")); assertThat(gResp2.getSource().get("foo"), equalTo("bar")); logger.info("--> performing query"); SearchResponse resp = client().prepareSearch(IDX).setQuery(matchAllQuery()).get(); assertHitCount(resp, 2); logger.info("--> deleting index " + IDX); assertAcked(client().admin().indices().prepareDelete(IDX)); assertAllIndicesRemovedAndDeletionCompleted( internalCluster().getInstances(IndicesService.class)); assertPathHasBeenCleared(dataPath); // TODO: uncomment the test below when https://github.com/elastic/elasticsearch/issues/17695 is // resolved. // assertIndicesDirsDeleted(nodes); }
@Override protected void doStop() throws ElasticSearchException { this.reconnectToNodes.cancel(true); for (Tuple<Timeout, NotifyTimeout> onGoingTimeout : onGoingTimeouts) { onGoingTimeout.v1().cancel(); onGoingTimeout.v2().listener.onClose(); } updateTasksExecutor.shutdown(); try { updateTasksExecutor.awaitTermination(10, TimeUnit.SECONDS); } catch (InterruptedException e) { // ignore } }
public void testDeletingClosedIndexRemovesFiles() throws Exception { Path dataPath = createTempDir(); Settings nodeSettings = nodeSettings(dataPath.getParent()); final int numNodes = randomIntBetween(2, 5); logger.info("--> starting {} nodes", numNodes); final List<String> nodes = internalCluster().startNodesAsync(numNodes, nodeSettings).get(); final String IDX = "test"; final Tuple<Integer, Integer> numPrimariesAndReplicas = randomPrimariesAndReplicas(numNodes); final int numPrimaries = numPrimariesAndReplicas.v1(); final int numReplicas = numPrimariesAndReplicas.v2(); logger.info( "--> creating index {} with {} primary shards and {} replicas", IDX, numPrimaries, numReplicas); assert numPrimaries > 0; assert numReplicas >= 0; Settings idxSettings = Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numPrimaries) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, numReplicas) .put(IndexMetaData.SETTING_DATA_PATH, dataPath.toAbsolutePath().toString()) .put(IndexMetaData.SETTING_SHADOW_REPLICAS, true) .put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true) .build(); prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get(); ensureGreen(IDX); int docCount = randomIntBetween(10, 100); List<IndexRequestBuilder> builders = new ArrayList<>(); for (int i = 0; i < docCount; i++) { builders.add(client().prepareIndex(IDX, "doc", i + "").setSource("foo", "bar")); } indexRandom(true, true, true, builders); flushAndRefresh(IDX); logger.info("--> closing index {}", IDX); client().admin().indices().prepareClose(IDX).get(); ensureGreen(IDX); logger.info("--> deleting closed index"); client().admin().indices().prepareDelete(IDX).get(); assertAllIndicesRemovedAndDeletionCompleted( internalCluster().getInstances(IndicesService.class)); assertPathHasBeenCleared(dataPath); assertIndicesDirsDeleted(nodes); }