Compare commits
10 commits
873e34a3cf
...
d4597a3571
| Author | SHA1 | Date | |
|---|---|---|---|
| d4597a3571 | |||
| e4b387613a | |||
| 86b0283eec | |||
| 6852885148 | |||
| d1790aa085 | |||
| 1f3a8e6f83 | |||
| 8a00167560 | |||
| aacfa5052b | |||
| b4f05f7578 | |||
| d9cb621811 |
6 changed files with 102 additions and 10 deletions
34
README.md
34
README.md
|
|
@ -96,4 +96,38 @@ There other options how I/O where you can define a file log for check errors if
|
|||
|
||||
All's done!, you can init your service with **nssm start MonitService** and let's go.
|
||||
|
||||
# Developing plugins.
|
||||
|
||||
You can add simple python scripts how plugins for add new collections to your data.
|
||||
|
||||
The python scripts need have the next structure:
|
||||
|
||||
```python
|
||||
def stat(obj_stats):
|
||||
|
||||
obj_stats['test_data']={'pos': 0}
|
||||
|
||||
return obj_stats
|
||||
```
|
||||
|
||||
You define a function with name *stat* and arguments *obj_stats*
|
||||
|
||||
*obj_stats* is a python dictionary. You can add new data or manipulate original collected data for your own proposites. The function returns *obj_stats* modified.
|
||||
|
||||
Next, you need modify your configuration with this line:
|
||||
|
||||
```
|
||||
|
||||
```
|
||||
|
||||
```
|
||||
[DEFAULT]
|
||||
|
||||
url_server=http://url_server_collect_data/
|
||||
group=server-group
|
||||
modules=path.to.script, path.to.script2
|
||||
```
|
||||
|
||||
"path.to.script" is the python script path for import it into pastafaristats. You can use many scripts, but if you need many stats, is recommendable use stats collectors how collectd, netdata, etc.
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ import argparse
|
|||
import datetime
|
||||
|
||||
import sched, time
|
||||
from importlib import import_module
|
||||
|
||||
|
||||
#url="http://url/to/info"
|
||||
|
|
@ -25,6 +26,8 @@ user_home=str(Path.home())
|
|||
|
||||
hostname=getfqdn()
|
||||
|
||||
modules_imported={}
|
||||
|
||||
def load_config():
|
||||
|
||||
yes_config=False
|
||||
|
|
@ -74,6 +77,21 @@ def load_config():
|
|||
if 'group' in config['DEFAULT']:
|
||||
group=config['DEFAULT']['group']
|
||||
|
||||
|
||||
modules={}
|
||||
|
||||
if 'modules' in config['DEFAULT']:
|
||||
arr_modules=config['DEFAULT']['modules'].split(',')
|
||||
|
||||
#load modules
|
||||
|
||||
for module in arr_modules:
|
||||
|
||||
if not module in modules_imported:
|
||||
|
||||
modules_imported[module]=import_module(module)
|
||||
|
||||
|
||||
return url, group
|
||||
|
||||
|
||||
|
|
@ -103,7 +121,12 @@ def run(url, group=''):
|
|||
|
||||
mem_info=psutil.virtual_memory()
|
||||
|
||||
json_info=json.dumps({'net_info': network_info, 'cpu_idle': cpu_idle, 'cpus_idle': cpus_idle, 'cpu_number': cpu_number, 'disks_info': partitions, 'mem_info': mem_info, 'hostname': hostname, 'group': group})
|
||||
obj_stats={'net_info': network_info, 'cpu_idle': cpu_idle, 'cpus_idle': cpus_idle, 'cpu_number': cpu_number, 'disks_info': partitions, 'mem_info': mem_info, 'hostname': hostname, 'group': group}
|
||||
|
||||
for module in modules_imported.values():
|
||||
obj_stats=module.stat(obj_stats)
|
||||
|
||||
json_info=json.dumps(obj_stats)
|
||||
|
||||
data = urllib.parse.urlencode({'data_json': json_info})
|
||||
|
||||
|
|
@ -167,6 +190,8 @@ def start():
|
|||
|
||||
#Wait seconds to
|
||||
|
||||
print('Syncing time collection...')
|
||||
|
||||
while True:
|
||||
|
||||
sleep(2)
|
||||
|
|
|
|||
0
pastafaristats/utils/__init__.py
Normal file
0
pastafaristats/utils/__init__.py
Normal file
30
pastafaristats/utils/apache.py
Normal file
30
pastafaristats/utils/apache.py
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
import requests
|
||||
|
||||
def stat(obj_stats):
|
||||
|
||||
#new_obj_stats['apache_data']={'status': 1}
|
||||
|
||||
url='http://127.0.0.1/server-status?auto'
|
||||
|
||||
try:
|
||||
|
||||
r=requests.get(url)
|
||||
|
||||
data=r.text.split("\n")
|
||||
|
||||
final_data={v.split(':')[0].strip():v.split(':')[1].strip() for v in data if v.find(':')!=-1}
|
||||
|
||||
final_data['status']=1
|
||||
|
||||
obj_stats['apache_data']=final_data
|
||||
|
||||
except:
|
||||
obj_stats['apache_data']['status']=0
|
||||
|
||||
return obj_stats
|
||||
|
||||
if __name__=='__main__':
|
||||
|
||||
print(stat({}))
|
||||
|
||||
|
||||
6
pastafaristats/utils/test.py
Normal file
6
pastafaristats/utils/test.py
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
|
||||
def stat(obj_stats):
|
||||
|
||||
obj_stats['test_data']={'pos': 0}
|
||||
|
||||
return obj_stats
|
||||
15
setup.py
15
setup.py
|
|
@ -5,20 +5,16 @@ import os
|
|||
from setuptools import setup, find_packages
|
||||
|
||||
|
||||
if sys.version_info < (3, 5):
|
||||
raise NotImplementedError("Sorry, you need at least Python 3.5 for use pastafaristats.")
|
||||
|
||||
#import paramecio
|
||||
# Pillow should be installed after if you need ImageField
|
||||
# If you install passlib and bcrypt, the password system will use bcrypt by default, if not, will use native crypt libc
|
||||
if sys.version_info < (3, 6):
|
||||
raise NotImplementedError("Sorry, you need at least Python 3.6 for use pastafaristats.")
|
||||
|
||||
setup(name='pastafaristats',
|
||||
version='1.0.0',
|
||||
version='1.0.2',
|
||||
description='Simple scripts for send basic data of a server how complement to other stats solutions more complex.',
|
||||
author='Antonio de la Rosa Caballero',
|
||||
author_email='antonio.delarosa@coesinfo.com',
|
||||
url='https://bitbucket.org/paramecio/pastafaristats/',
|
||||
packages=['pastafaristats'],
|
||||
packages=['pastafaristats', 'pastafaristats.utils'],
|
||||
include_package_data=True,
|
||||
install_requires=['psutil'],
|
||||
entry_points={'console_scripts': [
|
||||
|
|
@ -34,6 +30,7 @@ setup(name='pastafaristats',
|
|||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9'
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10'
|
||||
],
|
||||
)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue