-
Notifications
You must be signed in to change notification settings - Fork 1
/
sample-tab-api-header.conf
119 lines (93 loc) · 3.29 KB
/
sample-tab-api-header.conf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
####################### INPUT ############################
# An input enables a specific source of
# events to be read by Logstash.
##########################################################
input {
file {
# Set the path to the source file(s)
type => "sales"
path => "/data/sales.txt"
start_position => "beginning"
sincedb_path => "/dev/null"
}
}
####################### FILTER ###########################
# A filter performs intermediary processing on an event.
# Filters are often applied conditionally depending on the
# characteristics of the event.
##########################################################
filter {
# The CSV filter takes an event field containing CSV data,
# parses it, and stores it as individual fields (can optionally specify the names).
# This filter can also parse data with any separator, not just commas.
csv {
# Set the comma delimiter
separator => "\t"
# We want to exclude these system columns
remove_field => [
"message",
"host",
"@timestamp",
"@version",
"path"
]
# We infer the layout based on the header
autodetect_column_names => "true"
autogenerate_column_names => "true"
}
##############################
# Replaces non-ASCII characters with an ASCII approximation, or if none exists, a replacement character which defaults to ?
i18n { transliterate => [ "Sku","Name","SearchKeywords","Main","Price","ID","Brands" ] }
#############################
# Create consistent hashes (fingerprints) of one or more fields and store the result in a new field.
fingerprint { method => "SHA1" key => "MySecretKeyForMyEyesOnlyOk?" source => "Brands" target => "Brands" }
#############################
# The mutate filter allows you to perform general
# mutations on fields. You can rename, remove, replace
# and modify fields in your events
# We need to set the target column to "string" to allow for find and replace
mutate {
convert => [ "Sku", "string" ]
}
# Strip backslashes, question marks, equals, hashes, and minuses from the target column
mutate {
gsub => [ "Sku", "[\\?#=]", "" ]
}
# Strip extraneous white space from records
mutate {
strip => [
"Sku","Name","SearchKeywords","Main","Price","ID","Brands"
]
}
# Set everything to lowercase
mutate {
lowercase => [ "Sku","Name","SearchKeywords","Main","Price","ID","Brands"
]
}
#############################
# Set rate limits. Do not change unless you know what you are doing
sleep { time => "1" every => 100 }
}
####################### OUTPUT ###########################
# An output sends event data to a particular
# destination. Outputs are the final stage in the
# event pipeline.
##########################################################
output
{
# Sending the contents of the file to the event API
http
{
# Put the URL for your HTTP endpoint to deliver events to
url => "https://myapi.foo-api.us-east-1.amazonaws.com/dev/events/teststash?token=774f77b389154fd2ae7cb5131201777&sign=ujguuuljNjBkFGHyNTNmZTIxYjEzMWE5MjgyNzM1ODQ="
# Leave the settings below untouched.
http_method => "post"
format => "json"
keepalive => "true"
automatic_retries => 1
validate_after_inactivity => "45"
pool_max => "10"
pool_max_per_route => "5"
codec => plain { charset => 'UTF-8' }
}
}