Ad Widget

Collapse

Zabbix 4.0.0 Elasticsearch Graph not retrive data

Collapse
X
 
  • Time
  • Show
Clear All
new posts
  • joelurtubiaugarte
    Junior Member
    • Jul 2018
    • 7

    #1

    Zabbix 4.0.0 Elasticsearch Graph not retrive data

    Hello..
    I've download the latest version of zabbix on yesteraday, and it was integrated with Elasticsearch map each index by date, very good...and it was indexing without any problem...

    [root@elastic01 ~]# curl -XGET http://192.168.10.101:9200/_cat/indices?v
    health status index uuid pri rep docs.count docs.deleted store.size pri.store.size
    yellow open apimerakiorganizations chf18A4BTyCX4X0fe_TApA 3 2 319 0 564.6kb 564.6kb
    yellow open apimerakidevices nVfxxavNR8uob1cCBcTaBg 3 2 317 0 596.8kb 596.8kb
    yellow open uint z2jdmK3SQKCKbYvdpXwVBA 5 1 0 0 2.2kb 1.2kb
    green open uint-2018-10-03 IC2hTwSVTxiz-ymQFqXh8A 5 1 2910783 0 369.7mb 188.6mb
    yellow open dbl hnNuz_r4RSWuUC2JE_lVXg 5 1 0 0 1.7kb 1.2kb
    yellow open apimerakiclients YpCkuoBZRPWgD9m-ZHx4qg 3 2 314 0 1.4mb 1.4mb
    yellow open log ggD9PbelRRyVrxs_obOpeA 5 1 0 0 1.7kb 1.2kb
    green open dbl-2018-10-03 BNBhOzP9Tt-A849UvhZcXA 5 1 207 0 165kb 82.5kb
    yellow open str 86T-iQQ8SEWkx5PJzCwl0Q 5 1 0 0 2.2kb 1.2kb


    when i retrive the information by host into "latest data" it working as I expect, but whe I try to see into graph "Zabbix CPU Load" didn't works , apears without information


    this is the debug information when the errors apears from elasticsearch server....

    [2018-10-03T15:43:52,387][DEBUG][o.e.a.s.TransportSearchAction] [elastic01] [dbl-2018-10-03][4], node[jJZ9MCUhRpaFVn2w2QPwvg], [P], s[STARTED], a[id=D-NwvMBFQB2MIvPS0LibZA]: Failed to execute [SearchRequest{searchType=QUERY_THEN_FETCH, indices=[dbl*], indicesOptions=IndicesOptions[ignore_unavailable=false, allow_no_indices=true, expand_wildcards_open=true, expand_wildcards_closed=false, allow_aliases_to_multiple_indices=true, forbid_closed_indices=true, ignore_aliases=false], types=[values], routing='null', preference='null', requestCache=null, scroll=null, maxConcurrentShardRequests=10, batchedReduceSize=512, preFilterShardSize=128, allowPartialSearchResults=true, source={"size":0,"query":{"bool":{"must":[{"terms":{"itemid":["23296","23297","23295"],"boost":1.0}},{"range":{"clock":{"from":153858 113 2,"to":1538581432,"include_lower":true,"include_ up per":true,"boost":1.0}}}],"adjust_pure_negative":true,"boost":1.0}},"agg reg ations":{"group_by_itemid":{"terms":{"field":"item id","size":3,"min_doc_count":1,"shard_min_doc_co un t":0,"show_term_doc_count_error":false,"order":[{"_count":"desc"},{"_key":"asc"}]},"aggregations":{"group_by_script":{"terms":{"s cr ipt":{"source":"Math.floor((params.width*((doc['clock'].date.getMillis()/1000+params.delta)%params.size))/params.size)","lang":"painless","params":{"size":3 00,"width":1227,"delta":68}},"size":1227,"min_doc _ count":1,"shard_min_doc_count":0,"show_term_doc_co unt_error":false,"order":[{"_count":"desc"},{"_key":"asc"}]},"aggregations":{"max_value":{"max":{"field":"v al ue"}},"avg_value":{"avg":{"field":"value"}},"min _v alue":{"min":{"field":"value"}},"max_clock":{"max" :{"field":"clock"}}}}}}}}}] lastShard [true]
    org.elasticsearch.transport.RemoteTransportExcepti on: [elastic02_ElasticSearchDB][192.168.10.12:9300][indices:data/read/search[phase/query]]



    please cpuld you help me...
    this is my configuration files...


    ##Zabbix.conf
    ###############################################

    # HistoryStorageURL=
    HistoryStorageURL=http://192.168.10.101:9200
    ### Option: HistoryStorageTypes
    # Comma separated list of value types to be sent to the history storage.
    #
    # Mandatory: no
    # Default:
    # HistoryStorageTypes=uint,dbl,str,log,text
    HistoryStorageTypes=uint,dbl,str,log,text

    ###############################################



    ##Zabbix.conf
    ###############################################

    // Zabbix GUI configuration file.
    global $DB,$HISTORY;

    $HISTORY['url'] = 'http://192.168.10.101:9200';
    $HISTORY['types'] = ['uint','dbl','str', 'text', 'log'];

    ## mapping
    ###############################################


    curl -X PUT \
    http://127.0.0.1:9200/_template/text_template \
    -H 'content-type:application/json' \
    -d '{
    "template": "text*",
    "index_patterns": ["text*"],
    "settings" : {
    "index" : {
    "number_of_replicas" : 1,
    "number_of_shards" : 5
    }
    },
    "mappings" : {
    "values" : {
    "properties" : {
    "itemid" : {
    "type" : "long"
    },
    "clock" : {
    "format" : "epoch_second",
    "type" : "date"
    },
    "value" : {
    "fields" : {
    "analyzed" : {
    "index" : true,
    "type" : "text",
    "analyzer" : "standard"
    }
    },
    "index" : false,
    "type" : "text"
    }
    }
    }
    }
    }'


    curl -X PUT \
    http://localhost:9200/_ingest/pipeline/text-pipeline \
    -H 'content-type:application/json' \
    -d '{
    "description": "daily uint index naming",
    "processors": [
    {
    "date_index_name": {
    "field": "clock",
    "date_formats": ["UNIX"],
    "index_name_prefix": "text-",
    "date_rounding": "d"
    }
    }
    ]
    }'





    # dbl mapping

    curl -vv -X PUT \
    http://localhost:9200/dbl \
    -H 'content-type:application/json' \
    -d '{
    "template": "dbl*",
    "index_patterns": ["dbl*"],
    "settings" : {
    "index" : {
    "number_of_replicas" : 1,
    "number_of_shards" : 5
    }
    },
    "mappings" : {
    "values" : {
    "properties" : {
    "itemid" : {
    "type" : "long"
    },
    "clock" : {
    "format" : "epoch_second",
    "type" : "date"
    },
    "value" : {
    "type" : "double"
    }
    }
    }
    }
    }
    }'


    curl -X PUT \
    http://localhost:9200/_ingest/pipeline/dbl-pipeline \
    -H 'content-type:application/json' \
    -d '{
    "description": "daily dbl index naming",
    "processors": [
    {
    "date_index_name": {
    "field": "clock",
    "date_formats": ["UNIX"],
    "index_name_prefix": "dbl-",
    "date_rounding": "d"
    }
    }
    ]
    }'



    # str mapping

    curl -vv -X PUT \
    http://localhost:9200/str \
    -H 'content-type:application/json' \
    -d '{
    "template": "str*",
    "index_patterns": ["str*"],
    "settings" : {
    "index" : {
    "number_of_replicas" : 1,
    "number_of_shards" : 5
    }
    },
    "mappings" : {
    "values" : {
    "properties" : {
    "itemid" : {
    "type" : "long"
    },
    "clock" : {
    "format" : "epoch_second",
    "type" : "date"
    },
    "value" : {
    "fields" : {
    "analyzed" : {
    "index" : true,
    "type" : "text",
    "analyzer" : "standard"
    }
    },
    "index" : false,
    "type" : "text"
    }
    }
    }
    }
    }
    }'


    curl -X PUT \
    http://localhost:9200/_ingest/pipeline/str-pipeline \
    -H 'content-type:application/json' \
    -d '{
    "description": "daily str index naming",
    "processors": [
    {
    "date_index_name": {
    "field": "clock",
    "date_formats": ["UNIX"],
    "index_name_prefix": "str-",
    "date_rounding": "d"
    }
    }
    ]
    }'


    # log mapping

    curl -vv -X PUT \
    http://localhost:9200/log \
    -H 'content-type:application/json' \
    -d '{
    "template": "log*",
    "index_patterns": ["log*"],
    "settings" : {
    "index" : {
    "number_of_replicas" : 1,
    "number_of_shards" : 5
    }
    },
    "mappings" : {
    "values" : {
    "properties" : {
    "itemid" : {
    "type" : "long"
    },
    "clock" : {
    "format" : "epoch_second",
    "type" : "date"
    },
    "value" : {
    "fields" : {
    "analyzed" : {
    "index" : true,
    "type" : "text",
    "analyzer" : "standard"
    }
    },
    "index" : false,
    "type" : "text"
    }
    }
    }
    }
    }
    }'


    curl -X PUT \
    http://localhost:9200/_ingest/pipeline/log-pipeline \
    -H 'content-type:application/json' \
    -d '{
    "description": "daily log index naming",
    "processors": [
    {
    "date_index_name": {
    "field": "clock",
    "date_formats": ["UNIX"],
    "index_name_prefix": "log-",
    "date_rounding": "d"
    }
    }
    ]
    }'



    ################################################## ###########
  • joelurtubiaugarte
    Junior Member
    • Jul 2018
    • 7

    #2
    Sorry , I made a mistake in create uint and dbl index, that was my fault ...

    Comment

    • joelurtubiaugarte
      Junior Member
      • Jul 2018
      • 7

      #3
      please close this ticket....

      Comment

      Working...