collectors/python.d: format modules code (#7832)

* collectors/python.d/adaptec_raid: format code

* collectors/python.d/am2320: format code

* collectors/python.d/apache: format code

* collectors/python.d/beanstalk: format code

* collectors/python.d/bind_rndc: format code

* collectors/python.d/boinc: format code

* collectors/python.d/ceph: format code

* collectors/python.d/couchdb: format code

* collectors/python.d/dns_query_time: format code

* collectors/python.d/dnsdist: format code

* collectors/python.d/dockerd: format code

* collectors/python.d/dovecot: format code

* collectors/python.d/energid: format code

* collectors/python.d/example: format code

* collectors/python.d/exim: format code

* collectors/python.d/fail2ban: format code

* collectors/python.d/freeradius: format code

* collectors/python.d/gearman: format code

* collectors/python.d/go_expvar: format code

* collectors/python.d/haproxy: format code

* collectors/python.d/hddtemp: format code

* collectors/python.d/hpssa: format code

* collectors/python.d/httpcheck: format code

* collectors/python.d/icecast: format code

* collectors/python.d/ipfs: format code

* collectors/python.d/isc_dhcpd: format code

* collectors/python.d/litespeed: format code

* collectors/python.d/megacli: format code

* collectors/python.d/memcached: format code

* collectors/python.d/mongodb: format code

* collectors/python.d/mysql: format code

* collectors/python.d/nginx: format code

* collectors/python.d/nginx_plus: format code

* collectors/python.d/nsd: format code

* collectors/python.d/ntpd: format code

* collectors/python.d/openldap: format code

* collectors/python.d/oracledb: format code

* collectors/python.d/ovpn_status_log: format code

* collectors/python.d/phpfpm: format code

* collectors/python.d/portcheck: format code

* collectors/python.d/powerdns: format code

* collectors/python.d/proxysql: format code

* collectors/python.d/puppet: format code

* collectors/python.d/redis: format code

* collectors/python.d/rethinkdbs: format code

* collectors/python.d/retroshare: format code

* collectors/python.d/riakkv: format code

* collectors/python.d/samba: format code

* collectors/python.d/sensors: format code

* collectors/python.d/smartd_log: format code

* collectors/python.d/spigotmc: format code

* collectors/python.d/springboot: format code

* collectors/python.d/squid: format code

* collectors/python.d/tomcat: format code

* collectors/python.d/tor: format code

* collectors/python.d/traefik: format code

* collectors/python.d/unbound: format code

* collectors/python.d/uwsgi: format code

* collectors/python.d/varnish: format code

* collectors/python.d/w1sensor: format code

* collectors/python.d/weblog: format code
This commit is contained in:
Ilya Mashchenko 2020-01-24 12:08:56 +03:00 committed by GitHub
parent 6a3ad766bd
commit f1d133190b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
61 changed files with 162 additions and 187 deletions

View File

@ -5,13 +5,11 @@
import re
from copy import deepcopy
from bases.FrameworkServices.ExecutableService import ExecutableService
from bases.collection import find_binary
disabled_by_default = True
update_every = 5

View File

@ -7,14 +7,13 @@ try:
import board
import busio
import adafruit_am2320
HAS_AM2320 = True
except ImportError:
HAS_AM2320 = False
from bases.FrameworkServices.SimpleService import SimpleService
ORDER = [
'temperature',
'humidity',
@ -60,9 +59,9 @@ class Service(SimpleService):
def get_data(self):
try:
return {
'temperature': self.am.temperature,
'humidity': self.am.relative_humidity,
}
'temperature': self.am.temperature,
'humidity': self.am.relative_humidity,
}
except (OSError, RuntimeError) as error:
self.error(error)

View File

@ -5,7 +5,6 @@
from bases.FrameworkServices.UrlService import UrlService
ORDER = [
'requests',
'connections',

View File

@ -5,6 +5,7 @@
try:
import beanstalkc
BEANSTALKC = True
except ImportError:
BEANSTALKC = False
@ -12,7 +13,6 @@ except ImportError:
from bases.FrameworkServices.SimpleService import SimpleService
from bases.loaders import load_yaml
ORDER = [
'cpu_usage',
'jobs_rate',
@ -109,7 +109,7 @@ CHARTS = {
'options': [None, 'Uptime', 'seconds', 'server statistics', 'beanstalk.uptime', 'line'],
'lines': [
['uptime'],
]
]
}
}

View File

@ -4,13 +4,11 @@
# SPDX-License-Identifier: GPL-3.0-or-later
import os
from collections import defaultdict
from subprocess import Popen
from bases.collection import find_binary
from bases.FrameworkServices.SimpleService import SimpleService
from bases.collection import find_binary
update_every = 30

View File

@ -6,10 +6,8 @@
import socket
from bases.FrameworkServices.SimpleService import SimpleService
from third_party import boinc_client
ORDER = [
'tasks',
'states',

View File

@ -5,6 +5,7 @@
try:
import rados
CEPH = True
except ImportError:
CEPH = False
@ -161,7 +162,7 @@ class Service(SimpleService):
:return: None
"""
# Pool lines
for pool in sorted(self._get_df()['pools'], key=lambda x:sorted(x.keys())):
for pool in sorted(self._get_df()['pools'], key=lambda x: sorted(x.keys())):
self.definitions['pool_usage']['lines'].append([pool['name'],
pool['name'],
'absolute'])
@ -169,20 +170,20 @@ class Service(SimpleService):
pool['name'],
'absolute'])
self.definitions['pool_read_bytes']['lines'].append(['read_{0}'.format(pool['name']),
pool['name'],
'absolute', 1, 1024])
self.definitions['pool_write_bytes']['lines'].append(['write_{0}'.format(pool['name']),
pool['name'],
'absolute', 1, 1024])
self.definitions['pool_write_bytes']['lines'].append(['write_{0}'.format(pool['name']),
pool['name'],
'absolute', 1, 1024])
self.definitions['pool_read_operations']['lines'].append(['read_operations_{0}'.format(pool['name']),
pool['name'],
'absolute'])
self.definitions['pool_write_operations']['lines'].append(['write_operations_{0}'.format(pool['name']),
pool['name'],
'absolute'])
self.definitions['pool_write_operations']['lines'].append(['write_operations_{0}'.format(pool['name']),
pool['name'],
'absolute'])
# OSD lines
for osd in sorted(self._get_osd_df()['nodes'], key=lambda x:sorted(x.keys())):
for osd in sorted(self._get_osd_df()['nodes'], key=lambda x: sorted(x.keys())):
self.definitions['osd_usage']['lines'].append([osd['name'],
osd['name'],
'absolute'])

View File

@ -6,8 +6,8 @@
from collections import namedtuple, defaultdict
from json import loads
from threading import Thread
from socket import gethostbyname, gaierror
from threading import Thread
try:
from queue import Queue
@ -16,10 +16,8 @@ except ImportError:
from bases.FrameworkServices.UrlService import UrlService
update_every = 1
METHODS = namedtuple('METHODS', ['get_data', 'url', 'stats'])
OVERVIEW_STATS = [
@ -127,7 +125,7 @@ CHARTS = {
['couchdb_httpd_request_methods_GET', 'GET', 'incremental'],
['couchdb_httpd_request_methods_HEAD', 'HEAD', 'incremental'],
['couchdb_httpd_request_methods_OPTIONS', 'OPTIONS',
'incremental'],
'incremental'],
['couchdb_httpd_request_methods_POST', 'POST', 'incremental'],
['couchdb_httpd_request_methods_PUT', 'PUT', 'incremental']
]
@ -141,13 +139,13 @@ CHARTS = {
['couchdb_httpd_status_codes_201', '201 Created', 'incremental'],
['couchdb_httpd_status_codes_202', '202 Accepted', 'incremental'],
['couchdb_httpd_status_codes_2xx', 'Other 2xx Success',
'incremental'],
'incremental'],
['couchdb_httpd_status_codes_3xx', '3xx Redirection',
'incremental'],
'incremental'],
['couchdb_httpd_status_codes_4xx', '4xx Client error',
'incremental'],
'incremental'],
['couchdb_httpd_status_codes_5xx', '5xx Server error',
'incremental']
'incremental']
]
},
'open_files': {
@ -280,19 +278,19 @@ class Service(UrlService):
if self._get_raw_data(self.url + '/' + db)]
for db in self.dbs:
self.definitions['db_sizes_file']['lines'].append(
['db_'+db+'_sizes_file', db, 'absolute', 1, 1000]
['db_' + db + '_sizes_file', db, 'absolute', 1, 1000]
)
self.definitions['db_sizes_external']['lines'].append(
['db_'+db+'_sizes_external', db, 'absolute', 1, 1000]
['db_' + db + '_sizes_external', db, 'absolute', 1, 1000]
)
self.definitions['db_sizes_active']['lines'].append(
['db_'+db+'_sizes_active', db, 'absolute', 1, 1000]
['db_' + db + '_sizes_active', db, 'absolute', 1, 1000]
)
self.definitions['db_doc_counts']['lines'].append(
['db_'+db+'_doc_count', db, 'absolute']
['db_' + db + '_doc_count', db, 'absolute']
)
self.definitions['db_doc_del_counts']['lines'].append(
['db_'+db+'_doc_del_count', db, 'absolute']
['db_' + db + '_doc_del_count', db, 'absolute']
)
return UrlService.check(self)

View File

@ -11,6 +11,7 @@ try:
import dns.message
import dns.query
import dns.name
DNS_PYTHON = True
except ImportError:
DNS_PYTHON = False
@ -22,7 +23,6 @@ except ImportError:
from bases.FrameworkServices.SimpleService import SimpleService
update_every = 5

View File

@ -5,7 +5,6 @@ from json import loads
from bases.FrameworkServices.UrlService import UrlService
ORDER = [
'queries',
'queries_dropped',
@ -21,7 +20,6 @@ ORDER = [
'query_latency_avg'
]
CHARTS = {
'queries': {
'options': [None, 'Client queries received', 'queries/s', 'queries', 'dnsdist.queries', 'line'],

View File

@ -4,14 +4,14 @@
try:
import docker
HAS_DOCKER = True
except ImportError:
HAS_DOCKER = False
from bases.FrameworkServices.SimpleService import SimpleService
from distutils.version import StrictVersion
from bases.FrameworkServices.SimpleService import SimpleService
# charts order (can be overridden if you want less charts, or different order)
ORDER = [
@ -44,7 +44,6 @@ CHARTS = {
}
}
MIN_REQUIRED_VERSION = '3.2.0'

View File

@ -5,10 +5,8 @@
from bases.FrameworkServices.SocketService import SocketService
UNIX_SOCKET = '/var/run/dovecot/stats'
ORDER = [
'sessions',
'logins',
@ -51,7 +49,8 @@ CHARTS = {
]
},
'context_switches': {
'options': [None, 'Dovecot Context Switches', 'switches', 'context switches', 'dovecot.context_switches', 'line'],
'options': [None, 'Dovecot Context Switches', 'switches', 'context switches', 'dovecot.context_switches',
'line'],
'lines': [
['vol_cs', 'voluntary', 'absolute'],
['invol_cs', 'involuntary', 'absolute']

View File

@ -41,9 +41,9 @@ CHARTS = {
'mempool': {
'options': [None, 'MemPool', 'MiB', 'memory', 'energid.mempool', 'area'],
'lines': [
['mempool_max', 'Max', 'absolute', None, 1024*1024],
['mempool_current', 'Usage', 'absolute', None, 1024*1024],
['mempool_txsize', 'TX Size', 'absolute', None, 1024*1024],
['mempool_max', 'Max', 'absolute', None, 1024 * 1024],
['mempool_current', 'Usage', 'absolute', None, 1024 * 1024],
['mempool_txsize', 'TX Size', 'absolute', None, 1024 * 1024],
],
},
'secmem': {
@ -93,22 +93,23 @@ METHODS = {
'mempool_max': r['maxmempool'],
},
'getmemoryinfo': lambda r: dict([
('secmem_' + k, v) for (k,v) in r['locked'].items()
('secmem_' + k, v) for (k, v) in r['locked'].items()
]),
'getnetworkinfo': lambda r: {
'network_timeoffset' : r['timeoffset'],
'network_timeoffset': r['timeoffset'],
'network_connections': r['connections'],
},
'gettxoutsetinfo': lambda r: {
'utxo_count' : r['txouts'],
'utxo_xfers' : r['transactions'],
'utxo_size' : r['disk_size'],
'utxo_amount' : r['total_amount'],
'utxo_count': r['txouts'],
'utxo_xfers': r['transactions'],
'utxo_size': r['disk_size'],
'utxo_amount': r['total_amount'],
},
}
JSON_RPC_VERSION = '1.1'
class Service(UrlService):
def __init__(self, configuration=None, name=None):
UrlService.__init__(self, configuration=configuration, name=name)

View File

@ -7,7 +7,6 @@ from random import SystemRandom
from bases.FrameworkServices.SimpleService import SimpleService
priority = 90000
ORDER = [

View File

@ -5,7 +5,6 @@
from bases.FrameworkServices.ExecutableService import ExecutableService
EXIM_COMMAND = 'exim -bpc'
ORDER = [

View File

@ -3,15 +3,13 @@
# Author: ilyam8
# SPDX-License-Identifier: GPL-3.0-or-later
import re
import os
import re
from collections import defaultdict
from glob import glob
from bases.FrameworkServices.LogService import LogService
ORDER = [
'jails_bans',
'jails_in_jail',
@ -25,13 +23,13 @@ def charts(jails):
ch = {
ORDER[0]: {
'options': [None, 'Jails Ban Rate', 'bans/s', 'bans', 'jail.bans', 'line'],
'lines': []
'options': [None, 'Jails Ban Rate', 'bans/s', 'bans', 'jail.bans', 'line'],
'lines': []
},
ORDER[1]: {
'options': [None, 'Banned IPs (since the last restart of netdata)', 'IPs', 'in jail',
'jail.in_jail', 'line'],
'lines': []
'options': [None, 'Banned IPs (since the last restart of netdata)', 'IPs', 'in jail',
'jail.in_jail', 'line'],
'lines': []
},
}
for jail in jails:

View File

@ -6,8 +6,8 @@
import re
from subprocess import Popen, PIPE
from bases.collection import find_binary
from bases.FrameworkServices.SimpleService import SimpleService
from bases.collection import find_binary
update_every = 15

View File

@ -4,9 +4,9 @@
# Gearman Netdata Plugin
from bases.FrameworkServices.SocketService import SocketService
from copy import deepcopy
from bases.FrameworkServices.SocketService import SocketService
CHARTS = {
'total_workers': {
@ -29,6 +29,7 @@ def job_chart_template(job_name):
]
}
def build_result_dict(job):
"""
Get the status for each job
@ -46,6 +47,7 @@ def build_result_dict(job):
'{0}_running'.format(job['job_name']): running,
}
def parse_worker_data(job):
job_name = job[0]
job_metrics = job[1:]

View File

@ -4,13 +4,12 @@
# SPDX-License-Identifier: GPL-3.0-or-later
from __future__ import division
import json
import json
from collections import namedtuple
from bases.FrameworkServices.UrlService import UrlService
MEMSTATS_ORDER = [
'memstats_heap',
'memstats_stack',

View File

@ -179,7 +179,6 @@ CHARTS = {
}
}
METRICS = {
'bin': {'algorithm': 'incremental', 'divisor': 1024},
'bout': {'algorithm': 'incremental', 'divisor': 1024},
@ -193,7 +192,6 @@ METRICS = {
'hrsp_other': {'algorithm': 'incremental', 'divisor': 1}
}
BACKEND_METRICS = {
'qtime': {'algorithm': 'absolute', 'divisor': 1},
'ctime': {'algorithm': 'absolute', 'divisor': 1},
@ -201,7 +199,6 @@ BACKEND_METRICS = {
'ttime': {'algorithm': 'absolute', 'divisor': 1}
}
REGEX = dict(url=re_compile(r'idle = (?P<idle>[0-9]+)'),
socket=re_compile(r'Idle_pct: (?P<idle>[0-9]+)'))
@ -309,7 +306,7 @@ class Service(UrlService, SocketService):
name, METRICS[metric]['algorithm'], 1,
METRICS[metric]['divisor']])
self.definitions['fhrsp_total']['lines'].append(['_'.join(['frontend', 'hrsp_total', idx]),
name, 'incremental', 1, 1])
name, 'incremental', 1, 1])
for back in self.data['backend']:
name, idx = back['# pxname'], back['# pxname'].replace('.', '_')
for metric in METRICS:
@ -317,7 +314,7 @@ class Service(UrlService, SocketService):
name, METRICS[metric]['algorithm'], 1,
METRICS[metric]['divisor']])
self.definitions['bhrsp_total']['lines'].append(['_'.join(['backend', 'hrsp_total', idx]),
name, 'incremental', 1, 1])
name, 'incremental', 1, 1])
for metric in BACKEND_METRICS:
self.definitions['b' + metric]['lines'].append(['_'.join(['backend', metric, idx]),
name, BACKEND_METRICS[metric]['algorithm'], 1,

View File

@ -6,12 +6,10 @@
import re
from copy import deepcopy
from bases.FrameworkServices.SocketService import SocketService
ORDER = [
'temperatures',
]

View File

@ -5,10 +5,10 @@
import os
import re
from copy import deepcopy
from bases.FrameworkServices.ExecutableService import ExecutableService
from bases.collection import find_binary
from copy import deepcopy
disabled_by_default = True
update_every = 5

View File

@ -3,9 +3,10 @@
# Original Author: ccremer (github.com/ccremer)
# SPDX-License-Identifier: GPL-3.0-or-later
import urllib3
import re
import urllib3
try:
from time import monotonic as time
except ImportError:

View File

@ -7,7 +7,6 @@ import json
from bases.FrameworkServices.UrlService import UrlService
ORDER = [
'listeners',
]

View File

@ -7,7 +7,6 @@ import json
from bases.FrameworkServices.UrlService import UrlService
ORDER = [
'bandwidth',
'peers',
@ -89,7 +88,7 @@ class Service(UrlService):
if store_max.endswith('b'):
val, units = store_max[:-2], store_max[-2]
if units in SI_zeroes:
val += '0'*SI_zeroes[units]
val += '0' * SI_zeroes[units]
store_max = val
try:
store_max = int(store_max)
@ -117,10 +116,10 @@ class Service(UrlService):
[('size', 'RepoSize', int), ('objects', 'NumObjects', int), ('avail', 'StorageMax', self._storagemax)],
}
if self.do_pinapi:
cfg.update({
'/api/v0/pin/ls':
[('pinned', 'Keys', len), ('recursive_pins', 'Keys', self._recursive_pins)]
})
cfg.update({
'/api/v0/pin/ls':
[('pinned', 'Keys', len), ('recursive_pins', 'Keys', self._recursive_pins)]
})
r = dict()
for suburl in cfg:
in_json = self._get_json(suburl)

View File

@ -7,9 +7,9 @@ import os
import re
import time
try:
import ipaddress
HAVE_IP_ADDRESS = True
except ImportError:
HAVE_IP_ADDRESS = False
@ -19,7 +19,6 @@ from copy import deepcopy
from bases.FrameworkServices.SimpleService import SimpleService
ORDER = [
'pools_utilization',
'pools_active_leases',

View File

@ -4,27 +4,25 @@
# SPDX-License-Identifier: GPL-3.0-or-later
import glob
import re
import os
import re
from collections import namedtuple
from bases.FrameworkServices.SimpleService import SimpleService
update_every = 10
# charts order (can be overridden if you want less charts, or different order)
ORDER = [
'net_throughput_http', # net throughput
'net_throughput_http', # net throughput
'net_throughput_https', # net throughput
'connections_http', # connections
'connections_https', # connections
'requests', # requests
'requests_processing', # requests
'pub_cache_hits', # cache
'private_cache_hits', # cache
'static_hits', # static
'connections_http', # connections
'connections_https', # connections
'requests', # requests
'requests_processing', # requests
'pub_cache_hits', # cache
'private_cache_hits', # cache
'static_hits', # static
]
CHARTS = {

View File

@ -9,7 +9,6 @@ import re
from bases.FrameworkServices.ExecutableService import ExecutableService
from bases.collection import find_binary
disabled_by_default = True
update_every = 5
@ -27,7 +26,7 @@ def adapter_charts(ads):
'adapter_degraded': {
'options': [None, 'Adapter State', 'is degraded', 'adapter', 'megacli.adapter_degraded', 'line'],
'lines': dims(ads)
},
},
}
return order, charts
@ -111,7 +110,7 @@ def find_adapters(d):
def find_pds(d):
keys = ('Slot Number', 'Media Error Count', 'Predictive Failure Count')
keys = ('Slot Number', 'Media Error Count', 'Predictive Failure Count')
d = ' '.join(v.strip() for v in d if v.startswith(keys))
return [PD(*v) for v in RE_VD.findall(d)]

View File

@ -5,7 +5,6 @@
from bases.FrameworkServices.SocketService import SocketService
ORDER = [
'cache',
'net',

View File

@ -12,13 +12,13 @@ from sys import exc_info
try:
from pymongo import MongoClient, ASCENDING, DESCENDING
from pymongo.errors import PyMongoError
PYMONGO = True
except ImportError:
PYMONGO = False
from bases.FrameworkServices.SimpleService import SimpleService
REPL_SET_STATES = [
('1', 'primary'),
('8', 'down'),

View File

@ -6,7 +6,6 @@
from bases.FrameworkServices.MySQLService import MySQLService
# query executed on MySQL server
QUERY_GLOBAL = 'SHOW GLOBAL STATUS;'
QUERY_SLAVE = 'SHOW SLAVE STATUS;'

View File

@ -5,7 +5,6 @@
from bases.FrameworkServices.UrlService import UrlService
ORDER = [
'connections',
'requests',

View File

@ -16,7 +16,6 @@ except ImportError:
from bases.FrameworkServices.UrlService import UrlService
ORDER = [
'requests_total',
'requests_current',

View File

@ -7,7 +7,6 @@ import re
from bases.FrameworkServices.ExecutableService import ExecutableService
update_every = 30
NSD_CONTROL_COMMAND = 'nsd-control stats_noreset'

View File

@ -4,12 +4,11 @@
# Author: Ilya Mashchenko (ilyam8)
# SPDX-License-Identifier: GPL-3.0-or-later
import struct
import re
import struct
from bases.FrameworkServices.SocketService import SocketService
# NTP Control Message Protocol constants
MODE = 6
HEADER_FORMAT = '!BBHHHHH'

View File

@ -5,13 +5,13 @@
try:
import ldap
HAS_LDAP = True
except ImportError:
HAS_LDAP = False
from bases.FrameworkServices.SimpleService import SimpleService
DEFAULT_SERVER = 'localhost'
DEFAULT_PORT = '389'
DEFAULT_TLS = False
@ -110,7 +110,7 @@ SEARCH_LIST = {
'add_operations': (
'cn=Add,cn=Operations,cn=Monitor', 'monitorOpInitiated',
),
'delete_operations': (
'delete_operations': (
'cn=Delete,cn=Operations,cn=Monitor', 'monitorOpCompleted',
),
'modify_operations': (
@ -193,7 +193,7 @@ class Service(SimpleService):
num = self.conn.search(dn, ldap.SCOPE_BASE, 'objectClass=*', [attr, ])
result_type, result_data = self.conn.result(num, 1)
except ldap.LDAPError as error:
self.error("Empty result. Check bind username/password. Message: ",error)
self.error("Empty result. Check bind username/password. Message: ", error)
self.alive = False
return None

View File

@ -9,11 +9,11 @@ from bases.FrameworkServices.SimpleService import SimpleService
try:
import cx_Oracle
HAS_ORACLE = True
except ImportError:
HAS_ORACLE = False
ORDER = [
'session_count',
'session_limit_usage',
@ -172,7 +172,6 @@ CHARTS = {
},
}
CX_CONNECT_STRING = "{0}/{1}@//{2}/{3}"
QUERY_SYSTEM = '''

View File

@ -7,7 +7,6 @@ import re
from bases.FrameworkServices.SimpleService import SimpleService
update_every = 10
ORDER = [

View File

@ -9,7 +9,6 @@ import re
from bases.FrameworkServices.UrlService import UrlService
REGEX = re.compile(r'([a-z][a-z ]+): ([\d.]+)')
POOL_INFO = [

View File

@ -12,7 +12,6 @@ except ImportError:
from bases.FrameworkServices.SimpleService import SimpleService
PORT_LATENCY = 'connect'
PORT_SUCCESS = 'success'

View File

@ -8,7 +8,6 @@ from json import loads
from bases.FrameworkServices.UrlService import UrlService
ORDER = [
'questions',
'cache_usage',

View File

@ -189,7 +189,8 @@ CHARTS = {
'lines': []
},
'commands_duration': {
'options': [None, 'ProxySQL Commands Duration', 'milliseconds', 'commands', 'proxysql.commands_duration', 'line'],
'options': [None, 'ProxySQL Commands Duration', 'milliseconds', 'commands', 'proxysql.commands_duration',
'line'],
'lines': []
}
}

View File

@ -12,14 +12,12 @@
#
import socket
from json import loads
from bases.FrameworkServices.UrlService import UrlService
update_every = 5
MiB = 1 << 20
CPU_SCALE = 1000
@ -108,8 +106,8 @@ class Service(UrlService):
non_heap_mem = jvm_metrics['non-heap-memory']
for k in ['max', 'committed', 'used', 'init']:
data['jvm_heap_'+k] = heap_mem[k]
data['jvm_nonheap_'+k] = non_heap_mem[k]
data['jvm_heap_' + k] = heap_mem[k]
data['jvm_nonheap_' + k] = non_heap_mem[k]
fd_open = jvm_metrics['file-descriptors']
data['fd_max'] = fd_open['max']

View File

@ -5,7 +5,6 @@
# SPDX-License-Identifier: GPL-3.0-or-later
import re
from copy import deepcopy
from bases.FrameworkServices.SocketService import SocketService

View File

@ -5,6 +5,7 @@
try:
import rethinkdb as rdb
HAS_RETHINKDB = True
except ImportError:
HAS_RETHINKDB = False

View File

@ -7,7 +7,6 @@ import json
from bases.FrameworkServices.UrlService import UrlService
ORDER = [
'bandwidth',
'peers',

View File

@ -67,14 +67,16 @@ ORDER = [
CHARTS = {
# Throughput metrics
"kv.node_operations": {
"options": [None, "Reads & writes coordinated by this node", "operations/s", "throughput", "riak.kv.throughput", "line"],
"options": [None, "Reads & writes coordinated by this node", "operations/s", "throughput", "riak.kv.throughput",
"line"],
"lines": [
["node_gets_total", "gets", "incremental"],
["node_puts_total", "puts", "incremental"]
]
},
"dt.vnode_updates": {
"options": [None, "Update operations coordinated by local vnodes by data type", "operations/s", "throughput", "riak.dt.vnode_updates", "line"],
"options": [None, "Update operations coordinated by local vnodes by data type", "operations/s", "throughput",
"riak.dt.vnode_updates", "line"],
"lines": [
["vnode_counter_update_total", "counters", "incremental"],
["vnode_set_update_total", "sets", "incremental"],
@ -94,7 +96,8 @@ CHARTS = {
]
},
"consistent.operations": {
"options": [None, "Consistent node operations", "operations/s", "throughput", "riak.consistent.operations", "line"],
"options": [None, "Consistent node operations", "operations/s", "throughput", "riak.consistent.operations",
"line"],
"lines": [
["consistent_gets_total", "gets", "incremental"],
["consistent_puts_total", "puts", "incremental"],
@ -103,7 +106,8 @@ CHARTS = {
# Latency metrics
"kv.latency.get": {
"options": [None, "Time between reception of a client GET request and subsequent response to client", "ms", "latency", "riak.kv.latency.get", "line"],
"options": [None, "Time between reception of a client GET request and subsequent response to client", "ms",
"latency", "riak.kv.latency.get", "line"],
"lines": [
["node_get_fsm_time_mean", "mean", "absolute", 1, 1000],
["node_get_fsm_time_median", "median", "absolute", 1, 1000],
@ -113,7 +117,8 @@ CHARTS = {
]
},
"kv.latency.put": {
"options": [None, "Time between reception of a client PUT request and subsequent response to client", "ms", "latency", "riak.kv.latency.put", "line"],
"options": [None, "Time between reception of a client PUT request and subsequent response to client", "ms",
"latency", "riak.kv.latency.put", "line"],
"lines": [
["node_put_fsm_time_mean", "mean", "absolute", 1, 1000],
["node_put_fsm_time_median", "median", "absolute", 1, 1000],
@ -123,7 +128,8 @@ CHARTS = {
]
},
"dt.latency.counter": {
"options": [None, "Time it takes to perform an Update Counter operation", "ms", "latency", "riak.dt.latency.counter_merge", "line"],
"options": [None, "Time it takes to perform an Update Counter operation", "ms", "latency",
"riak.dt.latency.counter_merge", "line"],
"lines": [
["object_counter_merge_time_mean", "mean", "absolute", 1, 1000],
["object_counter_merge_time_median", "median", "absolute", 1, 1000],
@ -133,7 +139,8 @@ CHARTS = {
]
},
"dt.latency.set": {
"options": [None, "Time it takes to perform an Update Set operation", "ms", "latency", "riak.dt.latency.set_merge", "line"],
"options": [None, "Time it takes to perform an Update Set operation", "ms", "latency",
"riak.dt.latency.set_merge", "line"],
"lines": [
["object_set_merge_time_mean", "mean", "absolute", 1, 1000],
["object_set_merge_time_median", "median", "absolute", 1, 1000],
@ -143,7 +150,8 @@ CHARTS = {
]
},
"dt.latency.map": {
"options": [None, "Time it takes to perform an Update Map operation", "ms", "latency", "riak.dt.latency.map_merge", "line"],
"options": [None, "Time it takes to perform an Update Map operation", "ms", "latency",
"riak.dt.latency.map_merge", "line"],
"lines": [
["object_map_merge_time_mean", "mean", "absolute", 1, 1000],
["object_map_merge_time_median", "median", "absolute", 1, 1000],
@ -164,7 +172,8 @@ CHARTS = {
]
},
"search.latency.index": {
"options": [None, "Time it takes Search to index a new document", "ms", "latency", "riak.search.latency.index", "line"],
"options": [None, "Time it takes Search to index a new document", "ms", "latency", "riak.search.latency.index",
"line"],
"lines": [
["search_index_latency_median", "median", "absolute", 1, 1000],
["search_index_latency_min", "min", "absolute", 1, 1000],
@ -205,7 +214,8 @@ CHARTS = {
]
},
"vm.memory.processes": {
"options": [None, "Memory allocated & used by Erlang processes", "MB", "vm", "riak.vm.memory.processes", "line"],
"options": [None, "Memory allocated & used by Erlang processes", "MB", "vm", "riak.vm.memory.processes",
"line"],
"lines": [
["memory_processes", "allocated", "absolute", 1, 1024 * 1024],
["memory_processes_used", "used", "absolute", 1, 1024 * 1024]
@ -214,7 +224,8 @@ CHARTS = {
# General Riak Load/Health metrics
"kv.siblings_encountered.get": {
"options": [None, "Number of siblings encountered during GET operations by this node during the past minute", "siblings", "load", "riak.kv.siblings_encountered.get", "line"],
"options": [None, "Number of siblings encountered during GET operations by this node during the past minute",
"siblings", "load", "riak.kv.siblings_encountered.get", "line"],
"lines": [
["node_get_fsm_siblings_mean", "mean", "absolute"],
["node_get_fsm_siblings_median", "median", "absolute"],
@ -224,7 +235,8 @@ CHARTS = {
]
},
"kv.objsize.get": {
"options": [None, "Object size encountered by this node during the past minute", "KB", "load", "riak.kv.objsize.get", "line"],
"options": [None, "Object size encountered by this node during the past minute", "KB", "load",
"riak.kv.objsize.get", "line"],
"lines": [
["node_get_fsm_objsize_mean", "mean", "absolute", 1, 1024],
["node_get_fsm_objsize_median", "median", "absolute", 1, 1024],
@ -234,7 +246,9 @@ CHARTS = {
]
},
"search.vnodeq_size": {
"options": [None, "Number of unprocessed messages in the vnode message queues of Search on this node in the past minute", "messages", "load", "riak.search.vnodeq_size", "line"],
"options": [None,
"Number of unprocessed messages in the vnode message queues of Search on this node in the past minute",
"messages", "load", "riak.search.vnodeq_size", "line"],
"lines": [
["riak_search_vnodeq_mean", "mean", "absolute"],
["riak_search_vnodeq_median", "median", "absolute"],
@ -244,20 +258,23 @@ CHARTS = {
]
},
"search.index_errors": {
"options": [None, "Number of document index errors encountered by Search", "errors", "load", "riak.search.index", "line"],
"options": [None, "Number of document index errors encountered by Search", "errors", "load",
"riak.search.index", "line"],
"lines": [
["search_index_fail_count", "errors", "absolute"]
]
},
"core.pbc": {
"options": [None, "Protocol buffer connections by status", "connections", "load", "riak.core.protobuf_connections", "line"],
"options": [None, "Protocol buffer connections by status", "connections", "load",
"riak.core.protobuf_connections", "line"],
"lines": [
["pbc_active", "active", "absolute"],
# ["pbc_connects", "established_pastmin", "absolute"]
]
},
"core.repairs": {
"options": [None, "Number of repair operations this node has coordinated", "repairs", "load", "riak.core.repairs", "line"],
"options": [None, "Number of repair operations this node has coordinated", "repairs", "load",
"riak.core.repairs", "line"],
"lines": [
["read_repairs", "read", "absolute"]
]
@ -275,7 +292,8 @@ CHARTS = {
# Writing "Sidejob's" here seems to cause some weird issues: it results in this chart being rendered in
# its own context and additionally, moves the entire Riak graph all the way up to the top of the Netdata
# dashboard for some reason.
"options": [None, "Finite state machines being rejected by Sidejobs overload protection", "fsms", "load", "riak.core.fsm_rejected", "line"],
"options": [None, "Finite state machines being rejected by Sidejobs overload protection", "fsms", "load",
"riak.core.fsm_rejected", "line"],
"lines": [
["node_get_fsm_rejected", "get", "absolute"],
["node_put_fsm_rejected", "put", "absolute"]
@ -284,7 +302,8 @@ CHARTS = {
# General Riak Search Load / Health metrics
"search.errors": {
"options": [None, "Number of writes to Search failed due to bad data format by reason", "writes", "load", "riak.search.index", "line"],
"options": [None, "Number of writes to Search failed due to bad data format by reason", "writes", "load",
"riak.search.index", "line"],
"lines": [
["search_index_bad_entry_count", "bad_entry", "absolute"],
["search_index_extract_fail_count", "extract_fail", "absolute"],

View File

@ -18,8 +18,8 @@
import re
from bases.collection import find_binary
from bases.FrameworkServices.ExecutableService import ExecutableService
from bases.collection import find_binary
disabled_by_default = True

View File

@ -3,10 +3,8 @@
# Author: Pawel Krupa (paulfantom)
# SPDX-License-Identifier: GPL-3.0-or-later
from third_party import lm_sensors as sensors
from bases.FrameworkServices.SimpleService import SimpleService
from third_party import lm_sensors as sensors
ORDER = [
'temperature',

View File

@ -5,13 +5,11 @@
import os
import re
from copy import deepcopy
from time import time
from bases.collection import read_last_line
from bases.FrameworkServices.SimpleService import SimpleService
from bases.collection import read_last_line
INCREMENTAL = 'incremental'
ABSOLUTE = 'absolute'
@ -59,7 +57,6 @@ ATTR_VERIFY_ERR_COR = 'verify-total-err-corrected'
ATTR_VERIFY_ERR_UNC = 'verify-total-unc-errors'
ATTR_TEMPERATURE = 'temperature'
RE_ATA = re.compile(
'(\d+);' # attribute
'(\d+);' # normalized value
@ -533,7 +530,9 @@ def handle_error(*errors):
return method(*args)
except errors:
return None
return on_call
return on_method
@ -653,10 +652,10 @@ class Service(SimpleService):
current_time = time()
for disk in self.disks[:]:
if any(
[
not disk.alive,
not disk.log_file.is_active(current_time, self.age),
]
[
not disk.alive,
not disk.log_file.is_active(current_time, self.age),
]
):
self.disks.remove(disk.raw_name)
self.remove_disk_from_charts(disk)
@ -673,7 +672,7 @@ class Service(SimpleService):
return len(self.disks)
def create_disk_from_file(self, full_name, current_time):
def create_disk_from_file(self, full_name, current_time):
if not full_name.endswith(CSV):
self.debug('skipping {0}: not a csv file'.format(full_name))
return None

View File

@ -3,12 +3,11 @@
# Author: Austin S. Hemmelgarn (Ferroin)
# SPDX-License-Identifier: GPL-3.0-or-later
import socket
import platform
import re
import socket
from bases.FrameworkServices.SimpleService import SimpleService
from third_party import mcrcon
# Update only every 5 seconds because collection takes in excess of
@ -43,9 +42,8 @@ CHARTS = {
}
}
_TPS_REGEX = re.compile(
r'^.*: .*?' # Message lead-in
r'^.*: .*?' # Message lead-in
r'(\d{1,2}.\d+), .*?' # 1-minute TPS value
r'(\d{1,2}.\d+), .*?' # 5-minute TPS value
r'(\d{1,2}\.\d+).*$', # 15-minute TPS value
@ -107,10 +105,10 @@ class Service(SimpleService):
def is_alive(self):
if any(
[
not self.alive,
self.console.socket.getsockopt(socket.IPPROTO_TCP, socket.TCP_INFO, 0) != 1
]
[
not self.alive,
self.console.socket.getsockopt(socket.IPPROTO_TCP, socket.TCP_INFO, 0) != 1
]
):
return self.reconnect()
return True
@ -131,7 +129,8 @@ class Service(SimpleService):
else:
self.error('Unable to process TPS values.')
if not raw:
self.error("'{0}' command returned no value, make sure you set correct password".format(COMMAND_TPS))
self.error(
"'{0}' command returned no value, make sure you set correct password".format(COMMAND_TPS))
except mcrcon.MCRconException:
self.error('Unable to fetch TPS values.')
except socket.error:

View File

@ -4,8 +4,8 @@
# SPDX-License-Identifier: GPL-3.0-or-later
import json
from bases.FrameworkServices.UrlService import UrlService
from bases.FrameworkServices.UrlService import UrlService
DEFAULT_ORDER = [
'response_code',
@ -146,7 +146,7 @@ class Service(UrlService):
}
for line in lines:
dimension = line.get('dimension', None) or self.die('dimension is missing: %s' % chart_id)
dimension = line.get('dimension', None) or self.die('dimension is missing: %s' % chart_id)
name = line.get('name', dimension)
algorithm = line.get('algorithm', 'absolute')
multiplier = line.get('multiplier', 1)

View File

@ -5,7 +5,6 @@
from bases.FrameworkServices.SocketService import SocketService
ORDER = [
'clients_net',
'clients_requests',

View File

@ -4,8 +4,8 @@
# Author: Wei He (Wing924)
# SPDX-License-Identifier: GPL-3.0-or-later
import xml.etree.ElementTree as ET
import re
import xml.etree.ElementTree as ET
from bases.FrameworkServices.UrlService import UrlService

View File

@ -11,11 +11,11 @@ try:
import stem
import stem.connection
import stem.control
STEM_AVAILABLE = True
except ImportError:
STEM_AVAILABLE = False
DEF_PORT = 'default'
ORDER = [
@ -35,6 +35,7 @@ CHARTS = {
class Service(SimpleService):
"""Provide netdata service for Tor"""
def __init__(self, configuration=None, name=None):
super(Service, self).__init__(configuration=configuration, name=name)
self.order = ORDER

View File

@ -4,12 +4,10 @@
# SPDX-License-Identifier: GPL-3.0-or-later
from collections import defaultdict
from json import loads
from bases.FrameworkServices.UrlService import UrlService
ORDER = [
'response_statuses',
'response_codes',

View File

@ -5,7 +5,6 @@
import os
import sys
from copy import deepcopy
from bases.FrameworkServices.SocketService import SocketService

View File

@ -5,8 +5,8 @@
import json
from copy import deepcopy
from bases.FrameworkServices.SocketService import SocketService
from bases.FrameworkServices.SocketService import SocketService
ORDER = [
'requests',

View File

@ -281,15 +281,15 @@ class Service(ExecutableService):
chart = {
chart_name:
{
'options': [None, title, 'kilobits/s', 'backend response statistics',
'varnish.backend', 'area'],
'lines': [
[hdr_bytes, 'header', 'incremental', 8, 1000],
[body_bytes, 'body', 'incremental', -8, 1000]
{
'options': [None, title, 'kilobits/s', 'backend response statistics',
'varnish.backend', 'area'],
'lines': [
[hdr_bytes, 'header', 'incremental', 8, 1000],
[body_bytes, 'body', 'incremental', -8, 1000]
]
}
}
}
self.order.insert(0, chart_name)
self.definitions.update(chart)

View File

@ -5,6 +5,7 @@
import os
import re
from bases.FrameworkServices.SimpleService import SimpleService
# default module values (can be overridden per job in `config`)
@ -40,6 +41,7 @@ THERM_FAMILY = {
class Service(SimpleService):
"""Provide netdata service for 1-Wire sensors"""
def __init__(self, configuration=None, name=None):
SimpleService.__init__(self, configuration=configuration, name=name)
self.order = ORDER

View File

@ -23,7 +23,6 @@ except ImportError:
from bases.collection import read_last_line
from bases.FrameworkServices.LogService import LogService
ORDER_APACHE_CACHE = [
'apache_cache',
]
@ -821,8 +820,8 @@ class Web:
dim_id = match_dict['vhost'].replace('.', '_')
if dim_id not in self.data:
self.charts['vhost'].add_dimension([dim_id,
match_dict['vhost'],
'incremental'])
match_dict['vhost'],
'incremental'])
self.data[dim_id] = 0
self.data[dim_id] += 1
@ -961,9 +960,9 @@ class Squid:
return False
self.storage['dynamic'] = {
'http_code': {
'chart': 'squid_detailed_response_codes',
'func_dim_id': None,
'func_dim': None
'chart': 'squid_detailed_response_codes',
'func_dim_id': None,
'func_dim': None
},
'hier_code': {
'chart': 'squid_hier_code',
@ -1105,7 +1104,7 @@ def get_hist(index, buckets, time):
:param time: time
:return: None
"""
for i in range(len(index)-1, -1, -1):
for i in range(len(index) - 1, -1, -1):
if time <= index[i]:
buckets[i] += 1
else: