Skip to content

Commit

Permalink
Merge branch 'main' into custom-roles-poc-es
Browse files Browse the repository at this point in the history
  • Loading branch information
elasticmachine authored Feb 13, 2024
2 parents f75d118 + e6e14d7 commit 1a4bd0d
Show file tree
Hide file tree
Showing 151 changed files with 2,513 additions and 1,875 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ public void apply(Project project) {
copy.filter(new Transformer<String, String>() {
@Override
public String transform(String s) {
return s.replaceAll("@@LICENSE_HEADER_TEXT@@", finalLicenseHeader);
return s.replace("@@LICENSE_HEADER_TEXT@@", finalLicenseHeader);
}
});
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,8 +123,7 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') {
tasks.register('buildDependencyArtifacts') {
group = 'ide'
description = 'Builds artifacts needed as dependency for IDE modules'
dependsOn([':client:rest-high-level:shadowJar',
':plugins:repository-hdfs:hadoop-client-api:shadowJar',
dependsOn([':plugins:repository-hdfs:hadoop-client-api:shadowJar',
':x-pack:plugin:esql:compute:ann:jar',
':x-pack:plugin:esql:compute:gen:jar',
':server:generateModulesList',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ public void setMappings(LinkedHashMap<String, String> mappings) {
*/
protected String createURL(final String group, final String name, final String version) {
final String baseURL = "https://repo1.maven.org/maven2";
return baseURL + "/" + group.replaceAll("\\.", "/") + "/" + name + "/" + version;
return baseURL + "/" + group.replace('.', '/') + "/" + name + "/" + version;
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,8 +107,7 @@ private void configureDependency(Project project, boolean shadowed, ProjectDepen
// Link to non-shadowed dependant projects
javadoc.dependsOn(upstreamProject.getPath() + ":javadoc");
String externalLinkName = upstreamProject.getExtensions().getByType(BasePluginExtension.class).getArchivesName().get();
String artifactPath = dep.getGroup().replaceAll("\\.", "/") + '/' + externalLinkName.replaceAll("\\.", "/") + '/' + dep
.getVersion();
String artifactPath = dep.getGroup().replace('.', '/') + '/' + externalLinkName.replace('.', '/') + '/' + dep.getVersion();
var options = (StandardJavadocDocletOptions) javadoc.getOptions();
options.linksOffline(
artifactHost(project) + "/javadoc/" + artifactPath,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -330,7 +330,7 @@ private static String generatedAnchor(String input) {
final List<String> excludes = List.of("the", "is", "a", "and", "now", "that");

final String[] words = input.toLowerCase(Locale.ROOT)
.replaceAll("'", "")
.replace("'", "")
.replaceAll("[^\\w]+", "_")
.replaceFirst("^_+", "")
.replaceFirst("_+$", "")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ public static void extendSourceSet(Project project, String parentSourceSetName,
* task execution time.
*/
public static String getProjectPathFromTask(String taskPath) {
int lastDelimiterIndex = taskPath.lastIndexOf(":");
int lastDelimiterIndex = taskPath.lastIndexOf(':');
return lastDelimiterIndex == 0 ? ":" : taskPath.substring(0, lastDelimiterIndex);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,16 @@ public class TestUtils {

public static String normalizeString(String input, File projectRootDir) {
try {
String cannonicalNormalizedPathPrefix = projectRootDir.getCanonicalPath().replace("\\", "/");
String normalizedPathPrefix = projectRootDir.getAbsolutePath().replace("\\", "/");
String canonicalNormalizedPathPrefix = projectRootDir.getCanonicalPath().replace('\\', '/');
String normalizedPathPrefix = projectRootDir.getAbsolutePath().replace('\\', '/');
return input.lines()
.filter(it -> it.startsWith("Picked up JAVA_TOOL_OPTIONS") == false)
.map(it -> it.replace("\\", "/"))
.map(it -> it.replace('\\', '/'))
.map(it -> it.replaceAll("\\d+\\.\\d\\ds", "0.00s"))
.map(it -> it.replaceAll(cannonicalNormalizedPathPrefix, "."))
.map(it -> it.replaceAll(normalizedPathPrefix, "."))
.map(it -> it.replaceAll("file:/./", "file:./"))
.map(it -> it.replaceAll("Gradle Test Executor \\d", "Gradle Test Executor 1"))
.map(it -> it.replace(canonicalNormalizedPathPrefix, "."))
.map(it -> it.replace(normalizedPathPrefix, "."))
.map(it -> it.replace("file:/./", "file:./"))
.map(it -> it.replaceAll("Gradle Test Executor \\d+", "Gradle Test Executor 1"))
.collect(Collectors.joining("\n"));
} catch (IOException e) {
throw new RuntimeException(e);
Expand Down
68 changes: 0 additions & 68 deletions client/rest-high-level/build.gradle

This file was deleted.

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -159,10 +159,10 @@ public class RestClient implements Closeable {
public static RestClientBuilder builder(String cloudId) {
// there is an optional first portion of the cloudId that is a human readable string, but it is not used.
if (cloudId.contains(":")) {
if (cloudId.indexOf(":") == cloudId.length() - 1) {
if (cloudId.indexOf(':') == cloudId.length() - 1) {
throw new IllegalStateException("cloudId " + cloudId + " must begin with a human readable identifier followed by a colon");
}
cloudId = cloudId.substring(cloudId.indexOf(":") + 1);
cloudId = cloudId.substring(cloudId.indexOf(':') + 1);
}

String decoded = new String(Base64.getDecoder().decode(cloudId), UTF_8);
Expand Down
3 changes: 0 additions & 3 deletions docs/Versions.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,6 @@ ifeval::["{release-state}"!="unreleased"]
:version_qualified: {bare_version}
endif::[]

:javadoc-license: {rest-high-level-client-javadoc}/org/elasticsearch/protocol/xpack/license
:javadoc-watcher: {rest-high-level-client-javadoc}/org/elasticsearch/protocol/xpack/watcher

///////
Shared attribute values are pulled from elastic/docs
///////
Expand Down
6 changes: 6 additions & 0 deletions docs/changelog/105196.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 105196
summary: Adding a custom exception for problems with the graph of pipelines to be
applied to a document
area: Ingest Node
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/105265.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 105265
summary: Improving the performance of the ingest simulate verbose API
area: "Ingest Node"
type: enhancement
issues: []
6 changes: 6 additions & 0 deletions docs/changelog/105325.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 105325
summary: "ESQL: Fix Analyzer to not interpret escaped * as a pattern"
area: ES|QL
type: bug
issues:
- 104955
5 changes: 5 additions & 0 deletions docs/changelog/105371.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 105371
summary: "ESQL: Add plan consistency verification after each optimizer"
area: ES|QL
type: enhancement
issues: []
5 changes: 3 additions & 2 deletions docs/reference/alias.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -240,8 +240,9 @@ POST _aliases
include::{es-repo-dir}/indices/aliases.asciidoc[tag=write-index-defaults]

TIP: We recommend using data streams to store append-only time series data. If
you frequently update or delete existing time series data, use an index alias
with a write index instead. See
you need to update or delete existing time series data, you can perform update or delete operations
directly on the data stream backing index. If you frequently send multiple documents using the same
`_id` expecting last-write-wins, you may want to use an index alias with a write index instead. See
<<manage-time-series-data-without-data-streams>>.

[discrete]
Expand Down
10 changes: 5 additions & 5 deletions docs/reference/ilm/ilm-tutorial.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,11 @@ requirements for your newest data, control costs over time, enforce retention po
and still get the most out of your data.

TIP: Data streams are best suited for
<<data-streams-append-only,append-only>> use cases. If you need to frequently
update or delete existing documents across multiple indices, we recommend
using an index alias and index template instead. You can still use ILM to
manage and rollover the alias's indices. Skip to
<<manage-time-series-data-without-data-streams>>.
<<data-streams-append-only,append-only>> use cases. If you need to update or delete existing time
series data, you can perform update or delete operations directly on the data stream backing index.
If you frequently send multiple documents using the same `_id` expecting last-write-wins, you may
want to use an index alias with a write index instead. You can still use ILM to manage and rollover
the alias's indices. Skip to <<manage-time-series-data-without-data-streams>>.

To automate rollover and management of a data stream with {ilm-init}, you:

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,19 +167,13 @@ static void insertNode(String filter, BuildNode node) {
}

if (splitPosition > 0) {
String field = findEscapes
? filter.substring(0, splitPosition).replaceAll("\\\\.", ".")
: filter.substring(0, splitPosition);
BuildNode child = node.children.get(field);
if (child == null) {
child = new BuildNode(false);
node.children.put(field, child);
}
String field = findEscapes ? filter.substring(0, splitPosition).replace("\\.", ".") : filter.substring(0, splitPosition);
BuildNode child = node.children.computeIfAbsent(field, f -> new BuildNode(false));
if (false == child.isFinalNode) {
insertNode(filter.substring(splitPosition + 1), child);
}
} else {
String field = findEscapes ? filter.replaceAll("\\\\.", ".") : filter;
String field = findEscapes ? filter.replace("\\.", ".") : filter;
node.children.put(field, new BuildNode(true));
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -202,10 +202,8 @@ public Object getProperty(List<String> path) {
return emptyMap();
}
final String field = path.get(0)
.replaceAll("^\"", "") // remove leading "
.replaceAll("^'", "") // remove leading '
.replaceAll("\"$", "") // remove trailing "
.replaceAll("'$", ""); // remove trailing '
.replaceAll("^[\"']+", "") // remove leading " and '
.replaceAll("[\"']+$", ""); // remove trailing " and '
final String element = path.get(1);
return switch (element) {
case "counts" -> results.getFieldCount(field);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -443,7 +443,7 @@ private static CharacterRunAutomaton buildAutomaton(List<String> includePatterns

private static Automaton patternsToAutomaton(List<String> patterns) {
final List<Automaton> automata = patterns.stream().map(s -> {
final String regex = s.replaceAll("\\.", "\\\\.").replaceAll("\\*", ".*");
final String regex = s.replace(".", "\\.").replace("*", ".*");
return new RegExp(regex).toAutomaton();
}).toList();
if (automata.isEmpty()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
* This test is meant to make sure that we can handle ingesting a document with a reasonably large number of nested pipeline processors.
*/
public class ManyNestedPipelinesIT extends ESIntegTestCase {
private final int manyPipelinesCount = randomIntBetween(2, 20);
private final int manyPipelinesCount = randomIntBetween(2, 50);

@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ private DomainInfo getRegisteredDomain(IngestDocument d) {
}
return null;
}
if (registeredDomain.indexOf(".") == -1) {
if (registeredDomain.indexOf('.') == -1) {
// we have domain with no matching public suffix, but "." in it
return null;
}
Expand All @@ -117,7 +117,7 @@ private DomainInfo(String eTLD) {
}

private DomainInfo(String registeredDomain, String domain) {
int index = registeredDomain.indexOf(".") + 1;
int index = registeredDomain.indexOf('.') + 1;
if (index > 0 && index < registeredDomain.length()) {
this.domain = domain;
this.eTLD = registeredDomain.substring(index);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ private static Map<String, Object> getUriParts(URI uri, URL fallbackUrl) {
if (userInfo != null) {
uriParts.put("user_info", userInfo);
if (userInfo.contains(":")) {
int colonIndex = userInfo.indexOf(":");
int colonIndex = userInfo.indexOf(':');
uriParts.put("username", userInfo.substring(0, colonIndex));
uriParts.put("password", colonIndex < userInfo.length() ? userInfo.substring(colonIndex + 1) : "");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,9 @@ teardown:

---
"Test Pipeline Processor with Circular Pipelines":
- skip:
version: " - 8.12.99"
reason: exception class changed in 8.13.0
- do:
ingest.put_pipeline:
id: "outer"
Expand Down Expand Up @@ -100,13 +103,13 @@ teardown:
- match: { acknowledged: true }

- do:
catch: /illegal_state_exception/
catch: /graph_structure_exception/
index:
index: test
id: "1"
pipeline: "outer"
body: {}
- match: { error.root_cause.0.type: "illegal_state_exception" }
- match: { error.root_cause.0.type: "graph_structure_exception" }
- match: { error.root_cause.0.reason: "Cycle detected for pipeline: outer" }

---
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ public JavaClassFilesystemResolver(Path root, Map<String, Path> pkgRoots) {
public InputStream openClassFile(String className) throws IOException {
// TODO(stu): handle primitives & not stdlib
if (className.contains(".")) {
int dollarPosition = className.indexOf("$");
int dollarPosition = className.indexOf('$');
if (dollarPosition >= 0) {
className = className.substring(0, dollarPosition);
}
Expand All @@ -131,10 +131,10 @@ public InputStream openClassFile(String className) throws IOException {
Path classPath = root.resolve(path + ".java");
return new FileInputStream(classPath.toFile());
} else {
String packageName = className.substring(0, className.lastIndexOf("."));
String packageName = className.substring(0, className.lastIndexOf('.'));
Path packageRoot = pkgRoots.get(packageName);
if (packageRoot != null) {
Path classPath = packageRoot.resolve(className.substring(className.lastIndexOf(".") + 1) + ".java");
Path classPath = packageRoot.resolve(className.substring(className.lastIndexOf('.') + 1) + ".java");
return new FileInputStream(classPath.toFile());
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ private RatedRequest(
Set<DocumentKey> docKeys = new HashSet<>();
for (RatedDocument doc : ratedDocs) {
if (docKeys.add(doc.getKey()) == false) {
String docKeyToString = doc.getKey().toString().replaceAll("\n", "").replaceAll(" ", " ");
String docKeyToString = doc.getKey().toString().replace("\n", "").replace(" ", " ");
throw new IllegalArgumentException(
"Found duplicate rated document key [" + docKeyToString + "] in evaluation request [" + id + "]"
);
Expand Down
Loading

0 comments on commit 1a4bd0d

Please sign in to comment.