How to export Elasticsearch Index to Folder Inside GCS Bucket Using Logstash

236 Views Asked by At

I am using logstash and copying index from elastic search and uploading it to GCS Bucket using below .conf file:

input {

 elasticsearch {

    hosts => "localhost:9200"

    index => "test"


    query => '

    {
    "_source": ["field1","field2"],

    "query": {

    "match_all": {}

    }
    filter {
    mutate {
        rename => {
            "field1" => "test1"
            "field2" => "test2"
        }
      }
     }

    }

  '

  }

}


output {
   google_cloud_storage {
   codec => csv {
    include_headers => true
    columns => [ "test1", "test2" ]
   }
     bucket => "bucketName"
     json_key_file => "creds.json"
     temp_directory => "/tmp"
     log_file_prefix => "logstash_gcs"
     max_file_size_kbytes => 1024
     date_pattern => "%Y-%m-%dT%H:00"
     flush_interval_secs => 600
     gzip => false
     uploader_interval_secs => 600
     include_uuid => true
     include_hostname => true
   }
}


However i am not able to upload file from elastic search to folder inside GCS Bucket, have tried below format it didn't work: bucket => "bucketName/folderName" .

When I tried to add the folder the log just showed "Uploading file...", but the file was not uploaded

0

There are 0 best solutions below