弹性搜索批量索引超时错误!错误:3000ms后请求超时 [英] Elastic search bulk index timeout err! Error: Request Timeout after 30000ms

查看:323
本文介绍了弹性搜索批量索引超时错误!错误:3000ms后请求超时的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

最近,我想将旧的索引数据滚动到新的基于月的索引。存储的数据从2015/07至今为止。每个月几乎有3万条记录。按照滚动和批量方法/api-reference-2-2.html#api-scroll-2-2rel =nofollow> 2.2 API ,我完成了如下代码。

For recently, I want to scroll through the old index data to new monthly-based indices. The stored data begin from 2015/07 until now. and it is almost 30,000 records for every month. Follow the scroll and bulk methods provided in 2.2 API, I finish the code as follows.

file main.coffee

logger = require 'graceful-logger'
elasticsearch = require 'elasticsearch'
setMonthlyIndices = require './es-test-promise'
client = new elasticsearch.Client
  host:
    host: 'localhost'
    port: 9200
    protocol: 'http'
setMonthlyIndices client, 'es_test_messages', 'talk_messages_v2', 'messages', 2015, 6    

文件 es-test-promise.coffee

logger = require 'graceful-logger'
elasticsearch = require 'elasticsearch'
config = require 'config'

setIndice = (client, prefix, index, type, year, month) ->
  allDocs = []
  count = 0

  prevYear = year + ''
  # with leading '0' for month less than 10
  prevMonth = ("0" + month).slice(-2)
  nextDate = new Date(year, month)
  nextYear = nextDate.getFullYear().toString()
  nextMonth = ("0" + (nextDate.getMonth()+1)).slice(-2)

  minDate = "#{prevYear}-#{prevMonth}-01"
  maxDate = "#{nextYear}-#{nextMonth}-01"

  indice_name = "#{prefix}_#{prevYear}_#{prevMonth}"

  q =
    filtered:
      filter:
        range:
          createdAt:
            gte: minDate
            lt: maxDate
            format: "yyyy-MM-dd"

  client.search
    index: index
    type: type
    scroll: '1m'
    body:
      query: q
    sort: ['_doc']
    size: 1000
  , callback = (err, response) ->
    console.log "indice_name 1", indice_name
    return logger.err err.stack if err
    return unless response.hits?.total

    allDocs = []

    response.hits.hits.forEach (hit)->
      action =
        index:
          _id: hit._id
      allDocs.push(action)
      allDocs.push(hit._source)

    count = count + allDocs.length

    client.bulk
      index: indice_name
      type: type
      body: allDocs
    , (err, resp) ->
      console.log "indice_name 2", indice_name
      return logger.err err.stack if err

      if response.hits.total *2 !=  count
        client.scroll
          scrollId: response._scroll_id
          scroll: '1m'
        , callback
      else
        logger.info "Finish indicing #{indice_name}"

setMonthlyIndices = (client, prefix, index, type, year, month) ->
  current = new Date()
  currentYear = current.getFullYear()
  currentMonth = current.getMonth() + 1

  processYear = year or currentYear
  processMonth = month or 0

  processDate = new Date(processYear, processMonth)
  currentDate = new Date(currentYear, currentMonth)

  processDate = new Date(2015, 6)
  currentDate = new Date(2015, 9)

  while processDate <= currentDate
    year = processDate.getFullYear()
    month = processDate.getMonth() + 1
    setIndice(client, prefix, index, type, year, month)
    processDate.setMonth(processDate.getMonth() + 1)

module.exports = setMonthlyIndices

我想知道是否由于打开太多的客户端请求,因为在文件 es-test-promise.coffee ,所有这些搜索请求都同时运行。这只是一个猜测,然后我也试图用承诺实现,以确保请求可以逐个执行。最后,我无法理解,放弃。

I am wondering whether it is due to open too many client request, because in file es-test-promise.coffee, all these search request is running simultaneously. This is just a guess, and then I have also tried to implement with promise to make sure the request could be executed one by one. Finally, I can't figure it out and give up.

你有什么建议,我认为应该是源码问题,但是我不知道检查...

Do you have any suggestion, I think it should be the source issues , but I don't know where to check...

推荐答案

只需将requestTimeout放入您的配置。

Just put the requestTimeout to your config.

例如:

new elasticsearch.Client({host:"localhost", requestTimeout : Infinity});

您可以将 Infinity 替换为您所需的限制在'ms'。

You can replace Infinity by your desired limit in 'ms' .

这篇关于弹性搜索批量索引超时错误!错误:3000ms后请求超时的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆