diff --git a/docs/static/ea-integration-tutorial.asciidoc b/docs/static/ea-integration-tutorial.asciidoc
index af27849bee0..9d965b20572 100644
--- a/docs/static/ea-integration-tutorial.asciidoc
+++ b/docs/static/ea-integration-tutorial.asciidoc
@@ -1,7 +1,7 @@
[[ea-integrations-tutorial]]
-=== Tutorial: Using the {ls} `elastic_integration filter` to extend Elastic {integrations}
+=== Tutorial: Using the {ls} `elastic_integration` filter to extend Elastic {integrations}
++++
-Tutorial: {ls} `elastic_integration filter`
+Tutorial: {ls} `elastic_integration` filter
++++
You can use {ls} to transform events collected by {agent} and paired with an {integrations-docs}[Elastic integration].
@@ -12,11 +12,10 @@ processing power of {ls}.
This new functionality is made possible by the <> plugin.
When you include the `elastic_integration` filter in your configuration, {ls} reads certain field values generated by the {agent},
and uses them to apply the transformations from Elastic integrations.
-This allows you to to further process events in the Logstash pipeline before sending them to their
+This allows you to further process events in the Logstash pipeline before sending them to their
configured destinations.
-This tutorial walks you through adding the {integrations-docs}/crowdstrike-intro[Crowdstrike integration], using {ls} to
-remove the `_version` field, and then sending the data to {ess} or self-managed {es}.
+This tutorial walks you through adding the {integrations-docs}/crowdstrike-intro[Crowdstrike integration] sending the data to {ess} or self-managed {es}.
[[ea-integrations-prereqs]]
@@ -83,8 +82,8 @@ This policy should be selected by default.
. Click *Save and continue*.
+
-You have the option to add the {agent} to your hosts.
-If you haven't already, {fleet-guide}/elastic-agent-installation.html[install the {agent}] on the host where you want to collect data.
+You have the option to add the {agent} to your hosts.
+If you haven't already, {fleet-guide}/elastic-agent-installation.html[install the {agent}] on the host where you want to collect data.
[discrete]
@@ -122,16 +121,15 @@ input {
filter {
elastic_integration {
cloud_id => "your-cloud:id"
- api_key => "api-key"
- remove_field => ["_version"]
+ api_key => "your-api-key"
}
}
output {
stdout {}
elasticsearch {
- cloud_auth => "elastic:"
- cloud_id => "your-cloud-id"
+ cloud_id => "your-cloud:id"
+ api_key => "your-api-key"
}
}
-----
@@ -156,12 +154,11 @@ input {
filter {
elastic_integration {
- hosts => "{es-host}:9200"
+ hosts => ["{es-host}:9200"]
ssl_enabled => true
ssl_certificate_authorities => "/usr/share/logstash/config/certs/ca-cert.pem"
username => "elastic"
- password => "changeme"
- remove_field => ["_version"]
+ password => "changeme"
}
}
@@ -172,8 +169,8 @@ output {
## add elasticsearch
elasticsearch {
hosts => "{es-host}:9200"
- password => "changeme"
user => "elastic"
+ password => "changeme"
ssl_certificate_authorities => "/usr/share/logstash/config/certs/ca-cert.pem"
}
}
@@ -183,4 +180,4 @@ Note that the user credentials that you specify in the `elastic_integration` fil
If your {agent} and {ls} pipeline are configured correctly, then events go to {ls} for processing before {ls} forwards them on to {es}.
-
+Checkout <> page for possible troubleshooting guides if you are facing issue.
\ No newline at end of file