Skip to content

Commit

Permalink
Update IngestPipeline Const to ingest_pipeline (#581)
Browse files Browse the repository at this point in the history
This went missing during the update to the new ingest pipeline paths.

For now, all the ingest pipelines are copied to two directories and will be cleaned up later when Kibana is ready.
  • Loading branch information
ruflin authored Jul 2, 2020
1 parent b608fd7 commit 41c150c
Show file tree
Hide file tree
Showing 17 changed files with 563 additions and 6 deletions.
7 changes: 6 additions & 1 deletion testdata/generated/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,12 @@
"/package/example/1.0.0/dataset/foo/elasticsearch/ingest-pipeline/pipeline-http.json",
"/package/example/1.0.0/dataset/foo/elasticsearch/ingest-pipeline/pipeline-json.json",
"/package/example/1.0.0/dataset/foo/elasticsearch/ingest-pipeline/pipeline-plaintext.json",
"/package/example/1.0.0/dataset/foo/elasticsearch/ingest-pipeline/pipeline-tcp.json"
"/package/example/1.0.0/dataset/foo/elasticsearch/ingest-pipeline/pipeline-tcp.json",
"/package/example/1.0.0/dataset/foo/elasticsearch/ingest_pipeline/pipeline-entry.json",
"/package/example/1.0.0/dataset/foo/elasticsearch/ingest_pipeline/pipeline-http.json",
"/package/example/1.0.0/dataset/foo/elasticsearch/ingest_pipeline/pipeline-json.json",
"/package/example/1.0.0/dataset/foo/elasticsearch/ingest_pipeline/pipeline-plaintext.json",
"/package/example/1.0.0/dataset/foo/elasticsearch/ingest_pipeline/pipeline-tcp.json"
],
"config_templates": [
{
Expand Down
3 changes: 2 additions & 1 deletion testdata/generated/package/default_pipeline/0.0.2/index.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@
"/package/default_pipeline/0.0.2/dataset/foo/manifest.yml",
"/package/default_pipeline/0.0.2/dataset/foo/fields/base-fields.yml",
"/package/default_pipeline/0.0.2/dataset/foo/agent/stream/stream.yml.hbs",
"/package/default_pipeline/0.0.2/dataset/foo/elasticsearch/ingest-pipeline/default.json"
"/package/default_pipeline/0.0.2/dataset/foo/elasticsearch/ingest-pipeline/default.json",
"/package/default_pipeline/0.0.2/dataset/foo/elasticsearch/ingest_pipeline/default.json"
],
"config_templates": [
{
Expand Down
7 changes: 6 additions & 1 deletion testdata/generated/package/example/1.0.0/index.json
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,12 @@
"/package/example/1.0.0/dataset/foo/elasticsearch/ingest-pipeline/pipeline-http.json",
"/package/example/1.0.0/dataset/foo/elasticsearch/ingest-pipeline/pipeline-json.json",
"/package/example/1.0.0/dataset/foo/elasticsearch/ingest-pipeline/pipeline-plaintext.json",
"/package/example/1.0.0/dataset/foo/elasticsearch/ingest-pipeline/pipeline-tcp.json"
"/package/example/1.0.0/dataset/foo/elasticsearch/ingest-pipeline/pipeline-tcp.json",
"/package/example/1.0.0/dataset/foo/elasticsearch/ingest_pipeline/pipeline-entry.json",
"/package/example/1.0.0/dataset/foo/elasticsearch/ingest_pipeline/pipeline-http.json",
"/package/example/1.0.0/dataset/foo/elasticsearch/ingest_pipeline/pipeline-json.json",
"/package/example/1.0.0/dataset/foo/elasticsearch/ingest_pipeline/pipeline-plaintext.json",
"/package/example/1.0.0/dataset/foo/elasticsearch/ingest_pipeline/pipeline-tcp.json"
],
"config_templates": [
{
Expand Down
3 changes: 2 additions & 1 deletion testdata/generated/package/multiple_false/0.0.1/index.json
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@
"/package/multiple_false/0.0.1/dataset/foo/manifest.yml",
"/package/multiple_false/0.0.1/dataset/foo/fields/base-fields.yml",
"/package/multiple_false/0.0.1/dataset/foo/agent/stream/stream.yml.hbs",
"/package/multiple_false/0.0.1/dataset/foo/elasticsearch/ingest-pipeline/default.json"
"/package/multiple_false/0.0.1/dataset/foo/elasticsearch/ingest-pipeline/default.json",
"/package/multiple_false/0.0.1/dataset/foo/elasticsearch/ingest_pipeline/default.json"
],
"config_templates": [
{
Expand Down
5 changes: 4 additions & 1 deletion testdata/generated/package/yamlpipeline/1.0.0/index.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,10 @@
"/package/yamlpipeline/1.0.0/dataset/log/agent/stream/stream.yml.hbs",
"/package/yamlpipeline/1.0.0/dataset/log/elasticsearch/ingest-pipeline/pipeline-entry.yml",
"/package/yamlpipeline/1.0.0/dataset/log/elasticsearch/ingest-pipeline/pipeline-json.yml",
"/package/yamlpipeline/1.0.0/dataset/log/elasticsearch/ingest-pipeline/pipeline-plaintext.yml"
"/package/yamlpipeline/1.0.0/dataset/log/elasticsearch/ingest-pipeline/pipeline-plaintext.yml",
"/package/yamlpipeline/1.0.0/dataset/log/elasticsearch/ingest_pipeline/pipeline-entry.yml",
"/package/yamlpipeline/1.0.0/dataset/log/elasticsearch/ingest_pipeline/pipeline-json.yml",
"/package/yamlpipeline/1.0.0/dataset/log/elasticsearch/ingest_pipeline/pipeline-plaintext.yml"
],
"datasets": [
{
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{
"description": "Pipeline for normalizing envoyproxy logs",
"processors": []
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
{
"description": "Pipeline for normalizing envoyproxy logs",
"processors": [
{
"pipeline": {
"if": "ctx.message.charAt(0) != (char)(\"{\")",
"name": "{{IngestPipeline 'pipeline-plaintext' }}"
}
},
{
"pipeline": {
"if": "ctx.message.charAt(0) == (char)(\"{\")",
"name": "{{IngestPipeline 'pipeline-json' }}"
}
},
{
"set": {
"field": "event.created",
"value": "{{@timestamp}}"
}
},
{
"set": {
"field": "@timestamp",
"value": "{{timestamp}}",
"if": "ctx.timestamp != null"
}
},
{
"remove": {
"field": ["timestamp"],
"ignore_failure": true
}
}
],
"on_failure" : [{
"set" : {
"field" : "error.message",
"value" : "pipeline-entry: {{ _ingest.on_failure_message }}"
}
}]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
{
"description": "Pipeline for normalizing envoy HTTP ACCESS logs",
"processors": [
{
"script": {
"lang": "painless",
"source": "ctx['http'] = new HashMap(); def p = ctx.proto.indexOf ('/'); def l = ctx.proto.length(); ctx.http.version = ctx.proto.substring(p+1, l);",
"ignore_failure" : true
}
},
{
"rename": {
"field": "method",
"target_field": "http.request.method"
}
},
{
"rename": {
"field": "path",
"target_field": "url.path"
}
},
{
"convert" : {
"field" : "response_code",
"type": "long"
}
},
{
"rename": {
"field": "response_code",
"target_field": "http.response.status_code"
}
},
{
"rename": {
"field": "bytes_received",
"target_field": "http.response.body.bytes"
}
},
{
"convert" : {
"field" : "http.response.body.bytes",
"type": "long"
}
},
{
"rename": {
"field": "bytes_sent",
"target_field": "http.request.body.bytes"
}
},
{
"convert" : {
"field" : "http.request.body.bytes",
"type": "long"
}
},
{
"script": {
"lang": "painless",
"source": "ctx.envoyproxy.upstream_service_time = Math.round(Double.parseDouble(ctx.upstream_service_time) * params.scale)",
"params": {
"scale": 1000000
},
"if": "ctx.upstream_service_time != null && ctx.upstream_service_time != '-'"
}
},
{
"set": {
"field": "envoyproxy.proxy_type",
"value": "http"
}
},
{
"set": {
"field": "url.domain",
"value": "{{envoyproxy.authority}}"
}
},
{
"user_agent": {
"field": "user_agent.original",
"ignore_missing": true
}
}
],
"on_failure" : [{
"set" : {
"field" : "error.message",
"value" : "pipeline-http: {{ _ingest.on_failure_message }}"
}
}]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
{
"description": "Pipeline for normalizing envoyproxy access logs",
"processors": [
{
"json" : {
"field" : "message",
"target_field" : "json"
}
},
{
"remove": {
"field": ["message"],
"ignore_failure" : true
}
},
{
"rename": {
"field": "json.message",
"target_field": "message",
"ignore_failure" : true
}
},
{
"rename": {
"field": "json.kubernetes",
"target_field": "kubernetes",
"ignore_failure" : true
}
},
{
"remove": {
"field": ["json"]
}
},
{
"pipeline": {
"name": "{{IngestPipeline 'pipeline-plaintext' }}"
}
}
],
"on_failure" : [{
"set" : {
"field" : "error.message",
"value" : "pipeline-json: {{ _ingest.on_failure_message }}"
}
}]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
{
"description": "Pipeline for normalizing envoy access logs",
"processors": [
{
"script": {
"lang": "painless",
"source": "if (ctx.message.charAt(0) == (char)(\"[\")) { ctx.temp_message = \"ACCESS \" + ctx.message;} else if (ctx.message.substring(0, 7) == \"ACCESS \") { ctx.temp_message = ctx.message;} else { throw new Exception(\"Not a valid envoyproxy access log\");}"
}
},
{
"dissect": {
"field": "temp_message",
"pattern": "%{envoyproxy.log_type} [%{timestamp}] \"%{method} %{path} %{proto}\" %{response_code} %{envoyproxy.response_flags} %{bytes_received} %{bytes_sent} %{duration} %{upstream_service_time} \"%{source.address}\" \"%{user_agent.original}\" \"%{envoyproxy.request_id}\" \"%{envoyproxy.authority}\" \"%{dest}\"",
"on_failure" : [{
"script": {
"lang": "painless",
"source": "ctx.remove('temp_message'); throw new Exception(\"Dissect error: Not a valid envoyproxy access log\");"
}
}]
}
},
{
"script": {
"lang": "painless",
"source": "if (ctx.dest == \"-\") { ctx.remove('dest');} else { ctx['destination'] = new HashMap(); def p = ctx.dest.indexOf (':'); def l = ctx.dest.length(); ctx.destination.address = ctx.dest.substring(0, p); ctx.destination.port = ctx.dest.substring(p+1, l);} ctx.remove('dest');",
"if": "ctx.dest != null"
}
},
{
"convert" : {
"field" : "destination.port",
"type": "integer",
"if": "ctx.destination?.port != null"
}
},
{
"convert" : {
"field" : "duration",
"type": "double",
"if": "ctx.duration != null"
}
},
{
"script": {
"lang": "painless",
"source": "ctx.event.duration = Math.round(ctx.duration * params.scale)",
"params": {
"scale": 1000000
},
"if": "ctx.duration != null"
}
},
{
"remove": {
"field": ["json", "duration", "time", "temp_message"],
"ignore_missing": true
}
},
{
"pipeline": {
"if": "ctx.proto.charAt(0) != (char)(\"-\")",
"name": "{{IngestPipeline 'pipeline-http' }}"
}
},
{
"pipeline": {
"if": "ctx.proto.charAt(0) == (char)(\"-\")",
"name": "{{IngestPipeline 'pipeline-tcp' }}"
}
},
{
"remove": {
"field": ["proto", "upstream_service_time"],
"ignore_failure": true
}
},
{
"remove": {
"field": "source.address",
"if": "ctx.source.address == '-'"
}
},
{
"remove": {
"field": "envoyproxy.response_flags",
"if": "ctx.envoyproxy.response_flags == '-'"
}
},
{
"split": {
"field": "envoyproxy.response_flags",
"separator": "," ,
"if": "ctx.envoyproxy.response_flags != null"
}
},
{
"set" : {
"field" : "destination.ip",
"value" : "{{destination.address}}",
"if": "ctx.destination?.address != null"
}
},
{
"set" : {
"field" : "source.ip",
"value" : "{{source.address}}",
"if": "ctx.source?.address != null"
}
},
{
"geoip": {
"field": "destination.ip",
"target_field": "destination.geo",
"if": "ctx.destination?.ip != null"
}
}
],
"on_failure" : [{
"set" : {
"field" : "error.message",
"value" : "pipeline-plaintext: {{ _ingest.on_failure_message }}"
}
}]
}
Loading

0 comments on commit 41c150c

Please sign in to comment.