Skip to content

Commit 8093e81

Browse files
committed
Merge branch 'main' of github.com:elastic/logstash-filter-elastic_integration into add_eav_header
2 parents 4c516b5 + 1287a77 commit 8093e81

File tree

7 files changed

+144
-11
lines changed

7 files changed

+144
-11
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,6 @@
11
## 0.0.3 (UNRELEASED)
2+
- Re-syncs with Elasticsearch 8.10 [#78](http://github.com/elastic/logstash-filter-elastic_integration/pull/78)
3+
- BREAKING: The `redact` processor was removed from upstream IngestCommon, and therefore no longer available here.
24
- Documentation added for required privileges and unsupported processors [#72](http://github.com/elastic/logstash-filter-elastic_integration/pull/72)
35
- Added request header `Elastic-Api-Version` for serverless [#84](http://github.com/elastic/logstash-filter-elastic_integration/pull/84)
46

build.gradle

Lines changed: 28 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -235,15 +235,37 @@ task shadeElasticsearchIngestGeoIpModule(type: com.github.jengelman.gradle.plugi
235235
relocate('com.fasterxml.jackson', "${shadeNamespace}.com.fasterxml.jackson")
236236
relocate('com.maxmind', "${shadeNamespace}.com.maxmind")
237237

238-
exclude 'module-info.class'
239-
exclude 'META-INF/versions/9/module-info.class'
238+
exclude '**/module-info.class'
239+
}
240+
241+
task shadeElasticsearchGrokImplementation(type: com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar) {
242+
description "Shades embedded dependencies of the Elasticsearch Grok implementation"
243+
244+
dependsOn buildElasticsearchLocalDistro
245+
246+
from(buildElasticsearchLocalDistro.lib.orElse(objects.fileCollection())) {
247+
include jarPackageNamed("elasticsearch-grok")
248+
include jarPackageNamed("joni")
249+
include jarPackageNamed("jcodings")
250+
}
251+
252+
archiveFileName = "elasticsearch-grok-uber.jar"
253+
destinationDirectory = file("${buildDir}/shaded")
254+
255+
mergeServiceFiles()
256+
String shadeNamespace = "org.elasticsearch.grok.shaded"
257+
relocate('org.joni', "${shadeNamespace}.org.joni")
258+
relocate('org.jcodings', "${shadeNamespace}.org.jcodings")
259+
260+
exclude '**/module-info.class'
240261
}
241262

242263
task importMinimalElasticsearch(dependsOn: buildElasticsearchLocalDistro) {
243264
description "Imports minimal portions of Elasticsearch localDistro"
244265

245266
dependsOn buildElasticsearchLocalDistro
246267
dependsOn shadeElasticsearchIngestGeoIpModule
268+
dependsOn shadeElasticsearchGrokImplementation
247269

248270
ext.jars = "${buildDir}/elasticsearch-minimal-jars"
249271

@@ -261,10 +283,10 @@ task importMinimalElasticsearch(dependsOn: buildElasticsearchLocalDistro) {
261283
include jarPackageNamed("lucene-core")
262284
include jarPackageNamed("lucene-analysis-common")
263285
}
286+
from(shadeElasticsearchGrokImplementation)
264287

265288
from(buildElasticsearchLocalDistro.module("ingest-common")) {
266289
include jarPackageNamed("ingest-common")
267-
include jarPackageNamed("elasticsearch-grok")
268290
include jarPackageNamed("elasticsearch-dissect")
269291
}
270292

@@ -306,6 +328,9 @@ tasks.withType(JavaCompile) {
306328

307329
test {
308330
useJUnitPlatform()
331+
testLogging {
332+
exceptionFormat "full"
333+
}
309334
}
310335

311336
task generateTestCertificates(type: Exec) {

docs/index.asciidoc

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,6 @@ It has access to the Painless and Mustache scripting engines where applicable:
166166
| `lowercase` | _none_
167167
| `networkdirection` | _none_
168168
| `pipeline` | resolved pipeline _must_ be wholly-composed of supported processors
169-
| `redact` | _none_
170169
| `registereddomain` | _none_
171170
| `remove` | _none_
172171
| `rename` | _none_

gradle.properties

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
LOGSTASH_PATH=../../ls
2-
ELASTICSEARCH_TREEISH=8.7
2+
ELASTICSEARCH_TREEISH=8.10

src/main/java/co/elastic/logstash/filters/elasticintegration/EventProcessorBuilder.java

Lines changed: 46 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
import org.elasticsearch.core.IOUtils;
2323
import org.elasticsearch.core.TimeValue;
2424
import org.elasticsearch.env.Environment;
25+
import org.elasticsearch.ingest.IngestService;
2526
import org.elasticsearch.ingest.Processor;
2627
import org.elasticsearch.ingest.common.IngestCommonPlugin;
2728
import org.elasticsearch.ingest.useragent.IngestUserAgentPlugin;
@@ -46,10 +47,12 @@
4647
import java.util.Map;
4748
import java.util.Objects;
4849
import java.util.Optional;
50+
import java.util.Set;
4951
import java.util.concurrent.TimeUnit;
5052
import java.util.function.Consumer;
5153
import java.util.function.Supplier;
5254

55+
import static co.elastic.logstash.filters.elasticintegration.ingest.SafeSubsetIngestPlugin.safeSubset;
5356
import static com.google.common.util.concurrent.AbstractScheduledService.Scheduler.newFixedRateSchedule;
5457

5558
@SuppressWarnings("UnusedReturnValue")
@@ -78,7 +81,41 @@ public static EventProcessorBuilder fromElasticsearch(final RestClient elasticse
7881
}
7982

8083
public EventProcessorBuilder() {
81-
this.addProcessorsFromPlugin(IngestCommonPlugin::new);
84+
this.addProcessorsFromPlugin(IngestCommonPlugin::new, Set.of(
85+
org.elasticsearch.ingest.common.AppendProcessor.TYPE,
86+
org.elasticsearch.ingest.common.BytesProcessor.TYPE,
87+
org.elasticsearch.ingest.common.CommunityIdProcessor.TYPE,
88+
org.elasticsearch.ingest.common.ConvertProcessor.TYPE,
89+
org.elasticsearch.ingest.common.CsvProcessor.TYPE,
90+
org.elasticsearch.ingest.common.DateIndexNameProcessor.TYPE,
91+
org.elasticsearch.ingest.common.DateProcessor.TYPE,
92+
org.elasticsearch.ingest.common.DissectProcessor.TYPE,
93+
"dot_expander", // note: upstream constant is package-private
94+
org.elasticsearch.ingest.DropProcessor.TYPE, // note: not in ingest-common
95+
org.elasticsearch.ingest.common.FailProcessor.TYPE,
96+
org.elasticsearch.ingest.common.FingerprintProcessor.TYPE,
97+
org.elasticsearch.ingest.common.ForEachProcessor.TYPE,
98+
org.elasticsearch.ingest.common.GrokProcessor.TYPE,
99+
org.elasticsearch.ingest.common.GsubProcessor.TYPE,
100+
org.elasticsearch.ingest.common.HtmlStripProcessor.TYPE,
101+
org.elasticsearch.ingest.common.JoinProcessor.TYPE,
102+
org.elasticsearch.ingest.common.JsonProcessor.TYPE,
103+
org.elasticsearch.ingest.common.KeyValueProcessor.TYPE,
104+
org.elasticsearch.ingest.common.LowercaseProcessor.TYPE,
105+
org.elasticsearch.ingest.common.NetworkDirectionProcessor.TYPE,
106+
// note: no `pipeline` processor, as we provide our own
107+
org.elasticsearch.ingest.common.RegisteredDomainProcessor.TYPE,
108+
org.elasticsearch.ingest.common.RemoveProcessor.TYPE,
109+
org.elasticsearch.ingest.common.RenameProcessor.TYPE,
110+
org.elasticsearch.ingest.common.RerouteProcessor.TYPE,
111+
org.elasticsearch.ingest.common.ScriptProcessor.TYPE,
112+
org.elasticsearch.ingest.common.SetProcessor.TYPE,
113+
org.elasticsearch.ingest.common.SortProcessor.TYPE,
114+
org.elasticsearch.ingest.common.SplitProcessor.TYPE,
115+
org.elasticsearch.ingest.common.TrimProcessor.TYPE,
116+
org.elasticsearch.ingest.common.URLDecodeProcessor.TYPE,
117+
org.elasticsearch.ingest.common.UppercaseProcessor.TYPE,
118+
org.elasticsearch.ingest.common.UriPartsProcessor.TYPE));
82119
this.addProcessorsFromPlugin(IngestUserAgentPlugin::new);
83120
this.addProcessor(SetSecurityUserProcessor.TYPE, SetSecurityUserProcessor.Factory::new);
84121
}
@@ -133,6 +170,10 @@ public EventProcessorBuilder addProcessor(final String type, final Supplier<Proc
133170
return this.addProcessorsFromPlugin(SingleProcessorIngestPlugin.of(type, processorFactorySupplier));
134171
}
135172

173+
public EventProcessorBuilder addProcessorsFromPlugin(Supplier<IngestPlugin> pluginSupplier, Set<String> requiredProcessors) {
174+
return this.addProcessorsFromPlugin(safeSubset(pluginSupplier, requiredProcessors));
175+
}
176+
136177
public synchronized EventProcessorBuilder addProcessorsFromPlugin(Supplier<IngestPlugin> pluginSupplier) {
137178
this.ingestPlugins.add(pluginSupplier);
138179
return this;
@@ -158,16 +199,18 @@ synchronized EventProcessor build(final PluginContext pluginContext) {
158199
final ScriptService scriptService = initScriptService(settings, threadPool);
159200
resourcesToClose.add(scriptService);
160201

202+
final Environment env = new Environment(settings, null);
161203
final Processor.Parameters processorParameters = new Processor.Parameters(
162-
new Environment(settings, null),
204+
env,
163205
scriptService,
164206
null,
165207
threadPool.getThreadContext(),
166208
threadPool::relativeTimeInMillis,
167209
(delay, command) -> threadPool.schedule(command, TimeValue.timeValueMillis(delay), ThreadPool.Names.GENERIC),
168210
null,
169211
null,
170-
threadPool.generic()::execute
212+
threadPool.generic()::execute,
213+
IngestService.createGrokThreadWatchdog(env, threadPool)
171214
);
172215

173216
IngestPipelineFactory ingestPipelineFactory = new IngestPipelineFactory(scriptService);
Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
package co.elastic.logstash.filters.elasticintegration.ingest;
2+
3+
import org.elasticsearch.core.IOUtils;
4+
import org.elasticsearch.ingest.Processor;
5+
import org.elasticsearch.plugins.IngestPlugin;
6+
7+
import javax.annotation.Nonnull;
8+
import java.io.Closeable;
9+
import java.io.IOException;
10+
import java.util.HashMap;
11+
import java.util.HashSet;
12+
import java.util.Map;
13+
import java.util.Objects;
14+
import java.util.Set;
15+
import java.util.function.Supplier;
16+
17+
public class SafeSubsetIngestPlugin implements IngestPlugin, Closeable {
18+
private final IngestPlugin ingestPlugin;
19+
private final Set<String> requiredProcessors;
20+
21+
public static Supplier<IngestPlugin> safeSubset(final @Nonnull Supplier<IngestPlugin> ingestPluginSupplier,
22+
final @Nonnull Set<String> requiredProcessors) {
23+
return () -> new SafeSubsetIngestPlugin(ingestPluginSupplier, requiredProcessors);
24+
}
25+
26+
private SafeSubsetIngestPlugin(final @Nonnull Supplier<IngestPlugin> ingestPluginSupplier,
27+
final @Nonnull Set<String> requiredProcessors) {
28+
try {
29+
this.ingestPlugin = Objects.requireNonNull(ingestPluginSupplier.get(), "an IngestPlugin must be supplied!");
30+
this.requiredProcessors = Set.copyOf(requiredProcessors);
31+
} catch (Exception e) {
32+
IOUtils.closeWhileHandlingException(this);
33+
throw e;
34+
}
35+
}
36+
37+
@Override
38+
public Map<String, Processor.Factory> getProcessors(Processor.Parameters parameters) {
39+
final Map<String, Processor.Factory> providedProcessors = this.ingestPlugin.getProcessors(parameters);
40+
41+
final Map<String, Processor.Factory> acceptedProcessors = new HashMap<>();
42+
final Set<String> missingProcessors = new HashSet<>();
43+
44+
for (String requiredProcessor : this.requiredProcessors) {
45+
final Processor.Factory processor = providedProcessors.get(requiredProcessor);
46+
if (!Objects.nonNull(processor)) {
47+
missingProcessors.add(requiredProcessor);
48+
} else {
49+
acceptedProcessors.put(requiredProcessor, processor);
50+
}
51+
}
52+
if (!missingProcessors.isEmpty()) {
53+
throw new IllegalStateException(String.format("Expected IngestPlugin %s to provide processors %s, but they were not provided", this.ingestPlugin, missingProcessors));
54+
}
55+
return Map.copyOf(acceptedProcessors);
56+
}
57+
58+
@Override
59+
public void close() throws IOException {
60+
if (ingestPlugin instanceof Closeable) {
61+
((Closeable) ingestPlugin).close();
62+
}
63+
}
64+
}

src/test/resources/co/elastic/logstash/filters/elasticintegration/ssl-test-certs/generate.sh

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -45,9 +45,9 @@ openssl verify -CAfile root.crt client_no_matching_subject.crt
4545
openssl verify -CAfile client_self_signed.crt client_self_signed.crt
4646

4747
# create encrypted pkcs8 versions of all keys
48-
openssl pkcs8 -topk8 -inform PEM -outform PEM -passout "pass:12345678" -in client_from_root.key -out client_from_root.key.pkcs8
49-
openssl pkcs8 -topk8 -inform PEM -outform PEM -passout "pass:12345678" -in server_from_root.key -out server_from_root.key.pkcs8
50-
openssl pkcs8 -topk8 -inform PEM -outform PEM -passout "pass:12345678" -in client_no_matching_subject.key -out client_no_matching_subject.key.pkcs8
48+
openssl pkcs8 -topk8 -v1 PBE-SHA1-3DES -inform PEM -outform PEM -passout "pass:12345678" -in client_from_root.key -out client_from_root.key.pkcs8
49+
openssl pkcs8 -topk8 -v1 PBE-SHA1-3DES -inform PEM -outform PEM -passout "pass:12345678" -in server_from_root.key -out server_from_root.key.pkcs8
50+
openssl pkcs8 -topk8 -v1 PBE-SHA1-3DES -inform PEM -outform PEM -passout "pass:12345678" -in client_no_matching_subject.key -out client_no_matching_subject.key.pkcs8
5151

5252
# create pkcs12 keystores (pass:12345678)
5353
openssl pkcs12 -export -in server_from_root.crt -inkey server_from_root.key -out server_from_root.p12 -name "server_from_root" -passout 'pass:12345678'

0 commit comments

Comments
 (0)