I have been trying to configure logstash to read logs which are getting generated in my amazon S3 bucket, but have not been successful. Below are the details :
- I have installed logstash on an ec2 instance
- My logs are all gz files in the s3 bucket
- The conf file looks like below :
input {
s3 {
access_key_id => "MY_ACCESS_KEY_ID"
bucket => "MY_BUCKET"
region => "MY_REGION"
secret_access_key => "MY_SECRET_ACESS_KEY"
prefix => "/"
type => "s3"
add_field => { source => gzfiles }
}
}
filter {
if [type] == "s3" {
csv {
columns => [ "date", "time", "x-edge-location", "sc-bytes", "c-ip", "cs-method", "Host", "cs-uri-stem", "sc-status", "Referer", "User-Agent", "cs-uri-query", "Cookie", "x-edge-result-type", "x-edge-request-id" ]
}
}
if([message] =~ /^#/) {
drop{}
}
}
output {
elasticsearch {
host => "ELASTICSEARCH_URL" protocol => "http"
}
}