diff --git a/Dockerfile b/Dockerfile index 7a15e7b..3500f26 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,35 +1,11 @@ -FROM centos:7 -# 设置编码 -ENV LANG en_US.UTF-8 -# 同步时间 -ENV TZ=Asia/Shanghai -RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone +FROM registry.cn-hangzhou.aliyuncs.com/sourcegarden/python:centos7-3.6 -# 1. 安装基本依赖 -RUN yum update -y && yum install epel-release -y && yum update -y && yum install wget unzip epel-release nginx xz gcc automake zlib-devel openssl-devel supervisor groupinstall development libxslt-devel libxml2-devel libcurl-devel git -y -#WORKDIR /var/www/ - -# 2. 准备python -RUN wget https://www.python.org/ftp/python/3.6.6/Python-3.6.6.tar.xz -RUN xz -d Python-3.6.6.tar.xz && tar xvf Python-3.6.6.tar && cd Python-3.6.6 && ./configure && make && make install - -# 3. 安装websdk -RUN pip3 install --upgrade pip -RUN pip3 install -U git+https://github.com/ss1917/ops_sdk.git - -# 4. 复制代码 -RUN mkdir -p /var/www/ ADD . /var/www/codo-tools/ +RUN pip3 install -r /var/www/codo-tools/requirements.txt -# 5. 安装pip依赖 -RUN pip3 install -r /var/www/codo-tools/doc/requirements.txt - -# 6. 日志 -VOLUME /var/log/ - -# 7. 准备文件 -COPY doc/nginx_ops.conf /etc/nginx/conf.d/default.conf -COPY doc/supervisor_ops.conf /etc/supervisord.conf +COPY docker/nginx_default.conf /etc/nginx/nginx.conf +COPY docker/nginx_ops.conf /etc/nginx/conf.d/codo-tools.conf +COPY docker/supervisor_ops.conf /etc/supervisord.conf EXPOSE 80 CMD ["/usr/bin/supervisord"] \ No newline at end of file diff --git a/biz/crontab_app.py b/biz/crontab_app.py index 8cf7055..071f146 100644 --- a/biz/crontab_app.py +++ b/biz/crontab_app.py @@ -1,22 +1,22 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/3/21 19:52 -# @Author : Fred Yangxiaofei -# @File : crontab_app.py -# @Role : Application 放一些定时任务 ,可能会导致阻塞 - - -import tornado -from websdk.application import Application as myApplication -from biz.tail_data import tail_data - -class Application(myApplication): - def __init__(self, **settings): - urls = [] - tailed_callback = tornado.ioloop.PeriodicCallback(tail_data, 3600000) # 1小时循环一次 - tailed_callback.start() - super(Application, self).__init__(urls, **settings) - - -if __name__ == '__main__': - pass +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/3/21 19:52 +# @Author : Fred Yangxiaofei +# @File : crontab_app.py +# @Role : Application 放一些定时任务 ,可能会导致阻塞 + + +import tornado +from websdk.application import Application as myApplication +from biz.tail_data import tail_data + +class Application(myApplication): + def __init__(self, **settings): + urls = [] + tailed_callback = tornado.ioloop.PeriodicCallback(tail_data, 3600000) # 1小时循环一次 + tailed_callback.start() + super(Application, self).__init__(urls, **settings) + + +if __name__ == '__main__': + pass diff --git a/biz/get_userinfo.py b/biz/get_userinfo.py index 425205b..efb1b87 100644 --- a/biz/get_userinfo.py +++ b/biz/get_userinfo.py @@ -1,27 +1,27 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/3/21 15:10 -# @Author : Fred Yangxiaofei -# @File : get_userinfo.py -# @Role : 获取CODO平台用户详细信息 - - -from libs.redis_connect import redis_conn -from websdk.consts import const - - -def get_user_info(): - """ - 从现有redis里面获取用户信息,如:Email,SMS等 - :return: - """ - # 集合 - data_set = redis_conn.smembers(const.USERS_INFO) - # 集合转list - userdata = list(data_set) - # PS:这里codo后端会把数据主动写redis里面,假数据类型:user_data:['{"nickname:杨红飞", "email": "test@domain.cn", "tel": "10000000001"}','{"nickname:杨红飞02", "email": "test02@domain.cn", "tel": "10000000002"}'] - return userdata - - -if __name__ == '__main__': - get_user_info() +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/3/21 15:10 +# @Author : Fred Yangxiaofei +# @File : get_userinfo.py +# @Role : 获取CODO平台用户详细信息 + + +from libs.redis_connect import redis_conn +from websdk.consts import const + + +def get_user_info(): + """ + 从现有redis里面获取用户信息,如:Email,SMS等 + :return: + """ + # 集合 + data_set = redis_conn.smembers(const.USERS_INFO) + # 集合转list + userdata = list(data_set) + # PS:这里codo后端会把数据主动写redis里面,假数据类型:user_data:['{"nickname:杨红飞", "email": "test@domain.cn", "tel": "10000000001"}','{"nickname:杨红飞02", "email": "test02@domain.cn", "tel": "10000000002"}'] + return userdata + + +if __name__ == '__main__': + get_user_info() diff --git a/biz/handlers/event_mg_handler.py b/biz/handlers/event_mg_handler.py index 5175b4c..7248ce2 100644 --- a/biz/handlers/event_mg_handler.py +++ b/biz/handlers/event_mg_handler.py @@ -1,124 +1,124 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/3/20 19:19 -# @Author : Fred Yangxiaofei -# @File : event_mg_handler.py -# @Role : 事件记录路由 - - - -import json -import re -import datetime -from libs.database import model_to_dict -from models.event_record import EventRecord -from websdk.db_context import DBContext -from websdk.base_handler import LivenessProbe -from libs.base_handler import BaseHandler - -class EventRecordHandler(BaseHandler): - def get(self, *args, **kwargs): - key = self.get_argument('key', default=None, strip=True) - value = self.get_argument('value', default=None, strip=True) - page_size = self.get_argument('page', default=1, strip=True) - limit = self.get_argument('limit', default=15, strip=True) - limit_start = (int(page_size) - 1) * int(limit) - event_record_list = [] - with DBContext('w') as session: - if key and value: - count = session.query(EventRecord).filter_by(**{key: value}).count() - event_record_data = session.query(EventRecord).filter_by(**{key: value}).order_by( - EventRecord.id).offset(limit_start).limit(int(limit)) - else: - count = session.query(EventRecord).count() - event_record_data = session.query(EventRecord).order_by(EventRecord.id).offset( - limit_start).limit(int(limit)) - - for data in event_record_data: - data_dict = model_to_dict(data) - data_dict['event_start_time'] = str(data_dict['event_start_time']) - data_dict['event_end_time'] = str(data_dict['event_end_time']) - data_dict['create_at'] = str(data_dict['create_at']) - data_dict['update_at'] = str(data_dict['update_at']) - event_record_list.append(data_dict) - return self.write(dict(code=0, msg='获取成功', count=count, data=event_record_list)) - - def post(self, *args, **kwargs): - data = json.loads(self.request.body.decode("utf-8")) - event_name = data.get('event_name') - event_status = data.get('event_status') - event_level = data.get('event_level') - event_processing = data.get('event_processing') - event_start_time = data.get('event_start_time') - event_end_time = data.get('event_end_time') - - event_start_time = datetime.datetime.strptime(event_start_time, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( - hours=8) - event_end_time = datetime.datetime.strptime(event_end_time, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( - hours=8) - - if not event_name or not event_status or not event_level or not event_processing or not event_start_time or not event_end_time: - return self.write(dict(code=-2, msg='关键参数不能为空')) - - with DBContext('w', None, True) as session: - name = session.query(EventRecord).filter(EventRecord.event_name == event_name).first() - if name: - return self.write(dict(code=-2, msg='{}已经存在'.format(event_name))) - - session.add( - EventRecord(event_name=event_name, event_status=event_status, event_level=event_level, - event_processing=event_processing, event_start_time=event_start_time, - event_end_time=event_end_time)) - - self.write(dict(code=0, msg='添加成功')) - - def delete(self, *args, **kwargs): - data = json.loads(self.request.body.decode("utf-8")) - event_id = data.get('id') - if not event_id: - return self.write(dict(code=-2, msg='关键参数不能为空')) - - with DBContext('w', None, True) as session: - session.query(EventRecord).filter(EventRecord.id == event_id).delete(synchronize_session=False) - - self.write(dict(code=0, msg='删除成功')) - - def put(self, *args, **kwargs): - data = json.loads(self.request.body.decode("utf-8")) - event_name = data.get('event_name', None) - event_status = data.get('event_status', None) - event_level = data.get('event_level', None) - event_processing = data.get('event_processing', None) - event_start_time = data.get('event_start_time', None) - event_end_time = data.get('event_end_time', None) - - if not event_name or not event_status or not event_level or not event_processing or not event_start_time or not event_end_time: - return self.write(dict(code=-2, msg='关键参数不能为空')) - - update_info = { - "event_status": event_status, - "event_level": event_level, - "event_processing": event_processing, - "event_start_time": event_start_time, - "event_end_time": event_end_time, - } - - if re.search('000Z', event_start_time): - event_start_time = datetime.datetime.strptime(event_start_time, - "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta(hours=8) - update_info['event_start_time'] = event_start_time - - if re.search('000Z', event_end_time): - event_end_time = datetime.datetime.strptime(event_end_time, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( - hours=8) - update_info['event_end_time'] = event_end_time - - with DBContext('w', None, True) as session: - session.query(EventRecord).filter(EventRecord.event_name == event_name).update(update_info) - self.write(dict(code=0, msg='更新成功')) - - -event_urls = [ - (r"/v1/tools/event/", EventRecordHandler), - (r"/are_you_ok/", LivenessProbe), -] +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/3/20 19:19 +# @Author : Fred Yangxiaofei +# @File : event_mg_handler.py +# @Role : 事件记录路由 + + + +import json +import re +import datetime +from libs.database import model_to_dict +from models.event_record import EventRecord +from websdk.db_context import DBContext +from websdk.base_handler import LivenessProbe +from libs.base_handler import BaseHandler + +class EventRecordHandler(BaseHandler): + def get(self, *args, **kwargs): + key = self.get_argument('key', default=None, strip=True) + value = self.get_argument('value', default=None, strip=True) + page_size = self.get_argument('page', default=1, strip=True) + limit = self.get_argument('limit', default=15, strip=True) + limit_start = (int(page_size) - 1) * int(limit) + event_record_list = [] + with DBContext('w') as session: + if key and value: + count = session.query(EventRecord).filter_by(**{key: value}).count() + event_record_data = session.query(EventRecord).filter_by(**{key: value}).order_by( + EventRecord.id).offset(limit_start).limit(int(limit)) + else: + count = session.query(EventRecord).count() + event_record_data = session.query(EventRecord).order_by(EventRecord.id).offset( + limit_start).limit(int(limit)) + + for data in event_record_data: + data_dict = model_to_dict(data) + data_dict['event_start_time'] = str(data_dict['event_start_time']) + data_dict['event_end_time'] = str(data_dict['event_end_time']) + data_dict['create_at'] = str(data_dict['create_at']) + data_dict['update_at'] = str(data_dict['update_at']) + event_record_list.append(data_dict) + return self.write(dict(code=0, msg='获取成功', count=count, data=event_record_list)) + + def post(self, *args, **kwargs): + data = json.loads(self.request.body.decode("utf-8")) + event_name = data.get('event_name') + event_status = data.get('event_status') + event_level = data.get('event_level') + event_processing = data.get('event_processing') + event_start_time = data.get('event_start_time') + event_end_time = data.get('event_end_time') + + event_start_time = datetime.datetime.strptime(event_start_time, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( + hours=8) + event_end_time = datetime.datetime.strptime(event_end_time, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( + hours=8) + + if not event_name or not event_status or not event_level or not event_processing or not event_start_time or not event_end_time: + return self.write(dict(code=-2, msg='关键参数不能为空')) + + with DBContext('w', None, True) as session: + name = session.query(EventRecord).filter(EventRecord.event_name == event_name).first() + if name: + return self.write(dict(code=-2, msg='{}已经存在'.format(event_name))) + + session.add( + EventRecord(event_name=event_name, event_status=event_status, event_level=event_level, + event_processing=event_processing, event_start_time=event_start_time, + event_end_time=event_end_time)) + + self.write(dict(code=0, msg='添加成功')) + + def delete(self, *args, **kwargs): + data = json.loads(self.request.body.decode("utf-8")) + event_id = data.get('id') + if not event_id: + return self.write(dict(code=-2, msg='关键参数不能为空')) + + with DBContext('w', None, True) as session: + session.query(EventRecord).filter(EventRecord.id == event_id).delete(synchronize_session=False) + + self.write(dict(code=0, msg='删除成功')) + + def put(self, *args, **kwargs): + data = json.loads(self.request.body.decode("utf-8")) + event_name = data.get('event_name', None) + event_status = data.get('event_status', None) + event_level = data.get('event_level', None) + event_processing = data.get('event_processing', None) + event_start_time = data.get('event_start_time', None) + event_end_time = data.get('event_end_time', None) + + if not event_name or not event_status or not event_level or not event_processing or not event_start_time or not event_end_time: + return self.write(dict(code=-2, msg='关键参数不能为空')) + + update_info = { + "event_status": event_status, + "event_level": event_level, + "event_processing": event_processing, + "event_start_time": event_start_time, + "event_end_time": event_end_time, + } + + if re.search('000Z', event_start_time): + event_start_time = datetime.datetime.strptime(event_start_time, + "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta(hours=8) + update_info['event_start_time'] = event_start_time + + if re.search('000Z', event_end_time): + event_end_time = datetime.datetime.strptime(event_end_time, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( + hours=8) + update_info['event_end_time'] = event_end_time + + with DBContext('w', None, True) as session: + session.query(EventRecord).filter(EventRecord.event_name == event_name).update(update_info) + self.write(dict(code=0, msg='更新成功')) + + +event_urls = [ + (r"/v1/tools/event/", EventRecordHandler), + (r"/are_you_ok/", LivenessProbe), +] diff --git a/biz/handlers/fault_mg_handler.py b/biz/handlers/fault_mg_handler.py index 5970628..a02ffec 100644 --- a/biz/handlers/fault_mg_handler.py +++ b/biz/handlers/fault_mg_handler.py @@ -1,228 +1,228 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/3/20 13:41 -# @Author : Fred Yangxiaofei -# @File : fault_mg_handler.py -# @Role : 故障管理路由 - - -import json -import re -import datetime -from libs.database import model_to_dict -from models.fault_mg import Fault -from websdk.db_context import DBContext -from websdk.consts import const -from websdk.tools import convert -from biz.promethues_write_redis import redis_conn -from libs.oss import OSSApi -from libs.base_handler import BaseHandler - - -class FaultHandler(BaseHandler): - - def get(self, *args, **kwargs): - key = self.get_argument('key', default=None, strip=True) - value = self.get_argument('value', default=None, strip=True) - page_size = self.get_argument('page', default=1, strip=True) - limit = self.get_argument('limit', default=15, strip=True) - limit_start = (int(page_size) - 1) * int(limit) - fault_list = [] - with DBContext('w') as session: - if key and value: - count = session.query(Fault).filter_by(**{key: value}).count() - - fault_data = session.query(Fault).filter_by(**{key: value}).order_by( - Fault.id).offset(limit_start).limit(int(limit)) - else: - count = session.query(Fault).count() - fault_data = session.query(Fault).order_by(Fault.id).offset( - limit_start).limit(int(limit)) - - for data in fault_data: - data_dict = model_to_dict(data) - data_dict['fault_start_time'] = str(data_dict['fault_start_time']) - data_dict['fault_end_time'] = str(data_dict['fault_end_time']) - data_dict['create_at'] = str(data_dict['create_at']) - data_dict['update_at'] = str(data_dict['update_at']) - fault_list.append(data_dict) - return self.write(dict(code=0, msg='获取成功', count=count, data=fault_list)) - - def post(self, *args, **kwargs): - data = json.loads(self.request.body.decode("utf-8")) - fault_name = data.get('fault_name', None) - fault_level = data.get('fault_level', None) - fault_state = data.get('fault_state', None) - fault_penson = data.get('fault_penson', None) - processing_penson = data.get('processing_penson', None) - fault_report = data.get('fault_report', None) - fault_start_time = data.get('fault_start_time', None) - fault_end_time = data.get('fault_end_time', None) - fault_issue = data.get('fault_issue', None) - fault_summary = data.get('fault_summary', None) - - if not fault_name or not fault_level or not fault_state or not processing_penson or not fault_start_time or not fault_end_time or not fault_issue or not fault_summary: - return self.write(dict(code=-2, msg='关键参数不能为空')) - - fault_start_time = datetime.datetime.strptime(fault_start_time, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( - hours=8) - fault_end_time = datetime.datetime.strptime(fault_end_time, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( - hours=8) - - with DBContext('w', None, True) as session: - name = session.query(Fault).filter(Fault.fault_name == fault_name).first() - if name: - return self.write(dict(code=-2, msg='{}已经存在'.format(fault_name))) - - session.add(Fault(fault_name=fault_name, fault_level=fault_level, fault_state=fault_state, - fault_penson=fault_penson, processing_penson=processing_penson, - fault_report=fault_report, fault_start_time=fault_start_time, - fault_end_time=fault_end_time, fault_issue=fault_issue, fault_summary=fault_summary)) - - self.write(dict(code=0, msg='添加成功')) - - def delete(self, *args, **kwargs): - data = json.loads(self.request.body.decode("utf-8")) - fault_id = data.get('id') - if not fault_id: - return self.write(dict(code=-2, msg='关键参数不能为空')) - - with DBContext('w', None, True) as session: - session.query(Fault).filter(Fault.id == fault_id).delete(synchronize_session=False) - - self.write(dict(code=0, msg='删除成功')) - - def put(self, *args, **kwargs): - data = json.loads(self.request.body.decode("utf-8")) - fault_name = data.get('fault_name', None) - fault_level = data.get('fault_level', None) - fault_state = data.get('fault_state', None) - fault_penson = data.get('fault_penson', None) - processing_penson = data.get('processing_penson', None) - fault_report = data.get('fault_report', None) - fault_start_time = data.get('fault_start_time', None) - fault_end_time = data.get('fault_end_time', None) - fault_issue = data.get('fault_issue', None) - fault_summary = data.get('fault_summary', None) - - if not fault_name or not fault_level or not fault_state or not processing_penson or not fault_start_time or not fault_end_time or not fault_issue or not fault_summary: - return self.write(dict(code=-2, msg='关键参数不能为空')) - - update_info = { - # "fault_name": fault_name, - "fault_level": fault_level, - "fault_state": fault_state, - "fault_penson": fault_penson, - "processing_penson": processing_penson, - "fault_report": fault_report, - "fault_start_time": fault_start_time, - "fault_end_time": fault_end_time, - "fault_issue": fault_issue, - "fault_summary": fault_summary, - } - - if re.search('000Z', fault_start_time): - fault_start_time = datetime.datetime.strptime(fault_start_time, - "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta(hours=8) - update_info['fault_start_time'] = fault_start_time - - if re.search('000Z', fault_end_time): - fault_end_time = datetime.datetime.strptime(fault_end_time, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( - hours=8) - update_info['fault_end_time'] = fault_end_time - - with DBContext('w', None, True) as session: - session.query(Fault).filter(Fault.fault_name == fault_name).update(update_info) - # raise HTTPError(403, "%s is not a file", self.path) - self.write(dict(code=0, msg='更新成功')) - - -class UpLoadFileHandler(BaseHandler): - def post(self, *args, **kwargs): - ###文件保存到本地 - # Base_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) - # upload_path = '{}/static'.format(Base_DIR) - # file_metas = self.request.files.get('file', None) # 提取表单中‘name’为‘file’的文件元数据 - # ret = {'result': 'OK'} - # if not file_metas: - # ret['result'] = 'Invalid Args' - # return ret - # - # for meta in file_metas: - # filename = meta['filename'] - # print('filename---->', filename) - # file_path = os.path.join(upload_path, filename) - # with open(file_path, 'wb') as up: - # up.write(meta['body']) - # - # self.write(json.dumps(ret)) - - ###文件保存到OSS - ###获取OSS的配置 - cache_config_info = redis_conn.hgetall(const.APP_SETTINGS) - if cache_config_info: - config_info = convert(cache_config_info) - else: - return self.write(dict(code=-1, msg='【系统管理】-【系统配置】-【存储配置】中没有检测到OSS配置信息')) - - file_metas = self.request.files.get('file', None) # 提取表单中‘name’为‘file’的文件元数据 - - if not file_metas: - return self.write(dict(code=-2, msg='没有文件数据')) - - for meta in file_metas: - filename = meta['filename'] - # print('filename---->', filename) - file_data = meta['body'] - oss_data = { - 'STORAGE_KEY_ID': config_info.get('STORAGE_KEY_ID'), - 'STORAGE_KEY_SECRET': config_info.get('STORAGE_KEY_SECRET'), - 'STORAGE_REGION': config_info.get('STORAGE_REGION'), - 'STORAGE_NAME': config_info.get('STORAGE_NAME'), - 'STORAGE_PATH': 'fault' # https://opendevops.oss-cn-shanghai.aliyuncs.com/fault/xxx.pdf - } - # - # obj = OSSApi( - # oss_data.get('STORAGE_KEY_ID'), 'xxxx', - # oss_data.get('STORAGE_REGION'), - # oss_data.get('STORAGE_NAME'), oss_data.get('STORAGE_PATH')) - # obj.setObj(filename, file_data) - try: - obj = OSSApi( - oss_data.get('STORAGE_KEY_ID'), oss_data.get('STORAGE_KEY_SECRET'), - oss_data.get('STORAGE_REGION'), - oss_data.get('STORAGE_NAME'), oss_data.get('STORAGE_PATH')) - obj.setObj(filename, file_data) - except Exception as e: - return self.write(dict(code=-1, msg='上传失败,请检查OSS配置')) - - - self.write(dict(code=0, msg="上传成功")) - -class GetBucketInfoHandler(BaseHandler): - def get(self, *args, **kwargs): - """从redis获取阿里云OSS基本信息""" - cache_config_info = redis_conn.hgetall(const.APP_SETTINGS) - - if cache_config_info: - config_info = convert(cache_config_info) - - if not config_info.get('STORAGE_REGION') and not config_info.get('STORAGE_REGION'): - return self.write(dict(code=-1, msg='没有发现OSS配置信息')) - - oss_info = { - 'STORAGE_REGION': config_info.get('STORAGE_REGION'), - 'STORAGE_NAME': config_info.get('STORAGE_NAME') - } - self.write(dict(code=0, msg="获取成功", data=oss_info)) - else: - self.write(dict(code=-2, msg="没有在redis缓存发现配置信息")) - - - -fault_urls = [ - (r"/v1/tools/fault/", FaultHandler), - (r"/v1/tools/fault/upload/", UpLoadFileHandler), - (r"/v1/tools/fault/oss/", GetBucketInfoHandler), - -] +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/3/20 13:41 +# @Author : Fred Yangxiaofei +# @File : fault_mg_handler.py +# @Role : 故障管理路由 + + +import json +import re +import datetime +from libs.database import model_to_dict +from models.fault_mg import Fault +from websdk.db_context import DBContext +from websdk.consts import const +from websdk.tools import convert +from biz.promethues_write_redis import redis_conn +from libs.oss import OSSApi +from libs.base_handler import BaseHandler + + +class FaultHandler(BaseHandler): + + def get(self, *args, **kwargs): + key = self.get_argument('key', default=None, strip=True) + value = self.get_argument('value', default=None, strip=True) + page_size = self.get_argument('page', default=1, strip=True) + limit = self.get_argument('limit', default=15, strip=True) + limit_start = (int(page_size) - 1) * int(limit) + fault_list = [] + with DBContext('w') as session: + if key and value: + count = session.query(Fault).filter_by(**{key: value}).count() + + fault_data = session.query(Fault).filter_by(**{key: value}).order_by( + Fault.id).offset(limit_start).limit(int(limit)) + else: + count = session.query(Fault).count() + fault_data = session.query(Fault).order_by(Fault.id).offset( + limit_start).limit(int(limit)) + + for data in fault_data: + data_dict = model_to_dict(data) + data_dict['fault_start_time'] = str(data_dict['fault_start_time']) + data_dict['fault_end_time'] = str(data_dict['fault_end_time']) + data_dict['create_at'] = str(data_dict['create_at']) + data_dict['update_at'] = str(data_dict['update_at']) + fault_list.append(data_dict) + return self.write(dict(code=0, msg='获取成功', count=count, data=fault_list)) + + def post(self, *args, **kwargs): + data = json.loads(self.request.body.decode("utf-8")) + fault_name = data.get('fault_name', None) + fault_level = data.get('fault_level', None) + fault_state = data.get('fault_state', None) + fault_penson = data.get('fault_penson', None) + processing_penson = data.get('processing_penson', None) + fault_report = data.get('fault_report', None) + fault_start_time = data.get('fault_start_time', None) + fault_end_time = data.get('fault_end_time', None) + fault_issue = data.get('fault_issue', None) + fault_summary = data.get('fault_summary', None) + + if not fault_name or not fault_level or not fault_state or not processing_penson or not fault_start_time or not fault_end_time or not fault_issue or not fault_summary: + return self.write(dict(code=-2, msg='关键参数不能为空')) + + fault_start_time = datetime.datetime.strptime(fault_start_time, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( + hours=8) + fault_end_time = datetime.datetime.strptime(fault_end_time, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( + hours=8) + + with DBContext('w', None, True) as session: + name = session.query(Fault).filter(Fault.fault_name == fault_name).first() + if name: + return self.write(dict(code=-2, msg='{}已经存在'.format(fault_name))) + + session.add(Fault(fault_name=fault_name, fault_level=fault_level, fault_state=fault_state, + fault_penson=fault_penson, processing_penson=processing_penson, + fault_report=fault_report, fault_start_time=fault_start_time, + fault_end_time=fault_end_time, fault_issue=fault_issue, fault_summary=fault_summary)) + + self.write(dict(code=0, msg='添加成功')) + + def delete(self, *args, **kwargs): + data = json.loads(self.request.body.decode("utf-8")) + fault_id = data.get('id') + if not fault_id: + return self.write(dict(code=-2, msg='关键参数不能为空')) + + with DBContext('w', None, True) as session: + session.query(Fault).filter(Fault.id == fault_id).delete(synchronize_session=False) + + self.write(dict(code=0, msg='删除成功')) + + def put(self, *args, **kwargs): + data = json.loads(self.request.body.decode("utf-8")) + fault_name = data.get('fault_name', None) + fault_level = data.get('fault_level', None) + fault_state = data.get('fault_state', None) + fault_penson = data.get('fault_penson', None) + processing_penson = data.get('processing_penson', None) + fault_report = data.get('fault_report', None) + fault_start_time = data.get('fault_start_time', None) + fault_end_time = data.get('fault_end_time', None) + fault_issue = data.get('fault_issue', None) + fault_summary = data.get('fault_summary', None) + + if not fault_name or not fault_level or not fault_state or not processing_penson or not fault_start_time or not fault_end_time or not fault_issue or not fault_summary: + return self.write(dict(code=-2, msg='关键参数不能为空')) + + update_info = { + # "fault_name": fault_name, + "fault_level": fault_level, + "fault_state": fault_state, + "fault_penson": fault_penson, + "processing_penson": processing_penson, + "fault_report": fault_report, + "fault_start_time": fault_start_time, + "fault_end_time": fault_end_time, + "fault_issue": fault_issue, + "fault_summary": fault_summary, + } + + if re.search('000Z', fault_start_time): + fault_start_time = datetime.datetime.strptime(fault_start_time, + "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta(hours=8) + update_info['fault_start_time'] = fault_start_time + + if re.search('000Z', fault_end_time): + fault_end_time = datetime.datetime.strptime(fault_end_time, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( + hours=8) + update_info['fault_end_time'] = fault_end_time + + with DBContext('w', None, True) as session: + session.query(Fault).filter(Fault.fault_name == fault_name).update(update_info) + # raise HTTPError(403, "%s is not a file", self.path) + self.write(dict(code=0, msg='更新成功')) + + +class UpLoadFileHandler(BaseHandler): + def post(self, *args, **kwargs): + ###文件保存到本地 + # Base_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + # upload_path = '{}/static'.format(Base_DIR) + # file_metas = self.request.files.get('file', None) # 提取表单中‘name’为‘file’的文件元数据 + # ret = {'result': 'OK'} + # if not file_metas: + # ret['result'] = 'Invalid Args' + # return ret + # + # for meta in file_metas: + # filename = meta['filename'] + # print('filename---->', filename) + # file_path = os.path.join(upload_path, filename) + # with open(file_path, 'wb') as up: + # up.write(meta['body']) + # + # self.write(json.dumps(ret)) + + ###文件保存到OSS + ###获取OSS的配置 + cache_config_info = redis_conn.hgetall(const.APP_SETTINGS) + if cache_config_info: + config_info = convert(cache_config_info) + else: + return self.write(dict(code=-1, msg='【系统管理】-【系统配置】-【存储配置】中没有检测到OSS配置信息')) + + file_metas = self.request.files.get('file', None) # 提取表单中‘name’为‘file’的文件元数据 + + if not file_metas: + return self.write(dict(code=-2, msg='没有文件数据')) + + for meta in file_metas: + filename = meta['filename'] + # print('filename---->', filename) + file_data = meta['body'] + oss_data = { + 'STORAGE_KEY_ID': config_info.get('STORAGE_KEY_ID'), + 'STORAGE_KEY_SECRET': config_info.get('STORAGE_KEY_SECRET'), + 'STORAGE_REGION': config_info.get('STORAGE_REGION'), + 'STORAGE_NAME': config_info.get('STORAGE_NAME'), + 'STORAGE_PATH': 'fault' # https://opendevops.oss-cn-shanghai.aliyuncs.com/fault/xxx.pdf + } + # + # obj = OSSApi( + # oss_data.get('STORAGE_KEY_ID'), 'xxxx', + # oss_data.get('STORAGE_REGION'), + # oss_data.get('STORAGE_NAME'), oss_data.get('STORAGE_PATH')) + # obj.setObj(filename, file_data) + try: + obj = OSSApi( + oss_data.get('STORAGE_KEY_ID'), oss_data.get('STORAGE_KEY_SECRET'), + oss_data.get('STORAGE_REGION'), + oss_data.get('STORAGE_NAME'), oss_data.get('STORAGE_PATH')) + obj.setObj(filename, file_data) + except Exception as e: + return self.write(dict(code=-1, msg='上传失败,请检查OSS配置')) + + + self.write(dict(code=0, msg="上传成功")) + +class GetBucketInfoHandler(BaseHandler): + def get(self, *args, **kwargs): + """从redis获取阿里云OSS基本信息""" + cache_config_info = redis_conn.hgetall(const.APP_SETTINGS) + + if cache_config_info: + config_info = convert(cache_config_info) + + if not config_info.get('STORAGE_REGION') and not config_info.get('STORAGE_REGION'): + return self.write(dict(code=-1, msg='没有发现OSS配置信息')) + + oss_info = { + 'STORAGE_REGION': config_info.get('STORAGE_REGION'), + 'STORAGE_NAME': config_info.get('STORAGE_NAME') + } + self.write(dict(code=0, msg="获取成功", data=oss_info)) + else: + self.write(dict(code=-2, msg="没有在redis缓存发现配置信息")) + + + +fault_urls = [ + (r"/v1/tools/fault/", FaultHandler), + (r"/v1/tools/fault/upload/", UpLoadFileHandler), + (r"/v1/tools/fault/oss/", GetBucketInfoHandler), + +] diff --git a/biz/handlers/mycrypt_handler.py b/biz/handlers/mycrypt_handler.py index efe8d96..7472c51 100644 --- a/biz/handlers/mycrypt_handler.py +++ b/biz/handlers/mycrypt_handler.py @@ -1,49 +1,49 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/3/25 14:00 -# @Author : Fred Yangxiaofei -# @File : mycrypt_handler.py -# @Role : 加密解密路由 - - -import tornado.web -from biz.mycrypt import MyCrypt -import binascii -from libs.base_handler import BaseHandler - -class MyCryptHandler(BaseHandler): - - def get(self, *args, **kwargs): - key = self.get_argument('key', default=None, strip=True) - value = self.get_argument('value', default=None, strip=True) - - # text = self.get_argument('text', default=None, strip=True) - # ciphertext = self.get_argument('ciphertext', default=None, strip=True) - - if not key and not value: - return self.write(dict(code=-2, msg='关键参数不能为空')) - - # 实例化 - mc = MyCrypt() - # 用户给正常密码,我们就进行加密操作 - try: - - if key == 'text': - # 加密方法 - ciphertext = mc.my_encrypt(value) - return self.write(dict(code=0, msg="加密成功", data=ciphertext)) - - # 用户给加密文本,我们就进行解密操作 - if key == 'ciphertext': - # 解密方法 - text = mc.my_decrypt(value) - return self.write(dict(code=0, msg="解密成功", data=text)) - except binascii.Error: - return self.write(dict(code=-3, msg="解密格式错误")) - - - - -mycrypt_urls = [ - (r"/v1/tools/mycrypt/", MyCryptHandler) -] +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/3/25 14:00 +# @Author : Fred Yangxiaofei +# @File : mycrypt_handler.py +# @Role : 加密解密路由 + + +import tornado.web +from biz.mycrypt import MyCrypt +import binascii +from libs.base_handler import BaseHandler + +class MyCryptHandler(BaseHandler): + + def get(self, *args, **kwargs): + key = self.get_argument('key', default=None, strip=True) + value = self.get_argument('value', default=None, strip=True) + + # text = self.get_argument('text', default=None, strip=True) + # ciphertext = self.get_argument('ciphertext', default=None, strip=True) + + if not key and not value: + return self.write(dict(code=-2, msg='关键参数不能为空')) + + # 实例化 + mc = MyCrypt() + # 用户给正常密码,我们就进行加密操作 + try: + + if key == 'text': + # 加密方法 + ciphertext = mc.my_encrypt(value) + return self.write(dict(code=0, msg="加密成功", data=ciphertext)) + + # 用户给加密文本,我们就进行解密操作 + if key == 'ciphertext': + # 解密方法 + text = mc.my_decrypt(value) + return self.write(dict(code=0, msg="解密成功", data=text)) + except binascii.Error: + return self.write(dict(code=-3, msg="解密格式错误")) + + + + +mycrypt_urls = [ + (r"/v1/tools/mycrypt/", MyCryptHandler) +] diff --git a/biz/handlers/paid_mg_handler.py b/biz/handlers/paid_mg_handler.py index fecf819..baa10cc 100644 --- a/biz/handlers/paid_mg_handler.py +++ b/biz/handlers/paid_mg_handler.py @@ -1,126 +1,126 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/3/21 11:13 -# @Author : Fred Yangxiaofei -# @File : paid_mg_handler.py -# @Role : 付费管理路由 - - -import json -import re -import datetime -from libs.database import model_to_dict -from models.paid_mg import PaidMG -from websdk.db_context import DBContext -from libs.base_handler import BaseHandler - - -class PaidMGHandler(BaseHandler): - def get(self, *args, **kwargs): - key = self.get_argument('key', default=None, strip=True) - value = self.get_argument('value', default=None, strip=True) - page_size = self.get_argument('page', default=1, strip=True) - limit = self.get_argument('limit', default=15, strip=True) - limit_start = (int(page_size) - 1) * int(limit) - paid_list = [] - with DBContext('w') as session: - if key and value: - count = session.query(PaidMG).filter_by(**{key: value}).count() - paid_data = session.query(PaidMG).filter_by(**{key: value}).order_by( - PaidMG.id).offset(limit_start).limit(int(limit)) - else: - count = session.query(PaidMG).count() - paid_data = session.query(PaidMG).order_by(PaidMG.id).offset( - limit_start).limit(int(limit)) - - for data in paid_data: - data_dict = model_to_dict(data) - data_dict['paid_start_time'] = str(data_dict['paid_start_time']) - data_dict['paid_end_time'] = str(data_dict['paid_end_time']) - data_dict['create_at'] = str(data_dict['create_at']) - data_dict['update_at'] = str(data_dict['update_at']) - if data_dict['nicknames']: - data_dict['nicknames'] = data_dict['nicknames'].split(',') - paid_list.append(data_dict) - return self.write(dict(code=0, msg='获取成功', count=count, data=paid_list)) - - def post(self, *args, **kwargs): - data = json.loads(self.request.body.decode("utf-8")) - paid_name = data.get('paid_name', None) - paid_start_time = data.get('paid_start_time', None) - paid_end_time = data.get('paid_end_time', None) - reminder_day = data.get('reminder_day', None) - nicknames = data.get('nicknames', '') - - if not paid_name or not paid_start_time or not paid_end_time or not reminder_day or not nicknames: - return self.write(dict(code=-2, msg='关键参数不能为空')) - - if nicknames: - nicknames = ','.join(nicknames) - - paid_start_time = datetime.datetime.strptime(paid_start_time, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( - hours=8) - paid_end_time = datetime.datetime.strptime(paid_end_time, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( - hours=8) - - with DBContext('w', None, True) as session: - name = session.query(PaidMG).filter(PaidMG.paid_name == paid_name).first() - if name: - return self.write(dict(code=-2, msg='{}已经存在'.format(paid_name))) - session.add( - PaidMG(paid_name=paid_name, paid_start_time=paid_start_time, paid_end_time=paid_end_time, - reminder_day=reminder_day, nicknames=nicknames)) - - self.write(dict(code=0, msg='添加成功')) - - def delete(self, *args, **kwargs): - data = json.loads(self.request.body.decode("utf-8")) - paid_id = data.get('id') - if not paid_id: - return self.write(dict(code=-2, msg='关键参数不能为空')) - - with DBContext('w', None, True) as session: - session.query(PaidMG).filter(PaidMG.id == paid_id).delete(synchronize_session=False) - - self.write(dict(code=0, msg='删除成功')) - - def put(self, *args, **kwargs): - data = json.loads(self.request.body.decode("utf-8")) - paid_id = data.get('id') - paid_name = data.get('paid_name', None) - paid_start_time = data.get('paid_start_time', None) - paid_end_time = data.get('paid_end_time', None) - reminder_day = data.get('reminder_day', None) - nicknames = data.get('nicknames', None) - - if not paid_name or not paid_start_time or not paid_end_time or not reminder_day or not nicknames: - return self.write(dict(code=-2, msg='关键参数不能为空')) - - if nicknames: - nicknames = ','.join(nicknames) - - update_info = { - "paid_start_time": paid_start_time, - "paid_end_time": paid_end_time, - "reminder_day": reminder_day, - "nicknames": nicknames - } - - if re.search('000Z', paid_start_time): - paid_start_time = datetime.datetime.strptime(paid_start_time, - "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta(hours=8) - update_info['paid_start_time'] = paid_start_time - - if re.search('000Z', paid_end_time): - paid_end_time = datetime.datetime.strptime(paid_end_time, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( - hours=8) - update_info['paid_end_time'] = paid_end_time - - with DBContext('w', None, True) as session: - session.query(PaidMG).filter(PaidMG.paid_name == paid_name).update(update_info) - self.write(dict(code=0, msg='更新成功')) - - -paid_urls = [ - (r"/v1/tools/paid/", PaidMGHandler) -] +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/3/21 11:13 +# @Author : Fred Yangxiaofei +# @File : paid_mg_handler.py +# @Role : 付费管理路由 + + +import json +import re +import datetime +from libs.database import model_to_dict +from models.paid_mg import PaidMG +from websdk.db_context import DBContext +from libs.base_handler import BaseHandler + + +class PaidMGHandler(BaseHandler): + def get(self, *args, **kwargs): + key = self.get_argument('key', default=None, strip=True) + value = self.get_argument('value', default=None, strip=True) + page_size = self.get_argument('page', default=1, strip=True) + limit = self.get_argument('limit', default=15, strip=True) + limit_start = (int(page_size) - 1) * int(limit) + paid_list = [] + with DBContext('w') as session: + if key and value: + count = session.query(PaidMG).filter_by(**{key: value}).count() + paid_data = session.query(PaidMG).filter_by(**{key: value}).order_by( + PaidMG.id).offset(limit_start).limit(int(limit)) + else: + count = session.query(PaidMG).count() + paid_data = session.query(PaidMG).order_by(PaidMG.id).offset( + limit_start).limit(int(limit)) + + for data in paid_data: + data_dict = model_to_dict(data) + data_dict['paid_start_time'] = str(data_dict['paid_start_time']) + data_dict['paid_end_time'] = str(data_dict['paid_end_time']) + data_dict['create_at'] = str(data_dict['create_at']) + data_dict['update_at'] = str(data_dict['update_at']) + if data_dict['nicknames']: + data_dict['nicknames'] = data_dict['nicknames'].split(',') + paid_list.append(data_dict) + return self.write(dict(code=0, msg='获取成功', count=count, data=paid_list)) + + def post(self, *args, **kwargs): + data = json.loads(self.request.body.decode("utf-8")) + paid_name = data.get('paid_name', None) + paid_start_time = data.get('paid_start_time', None) + paid_end_time = data.get('paid_end_time', None) + reminder_day = data.get('reminder_day', None) + nicknames = data.get('nicknames', '') + + if not paid_name or not paid_start_time or not paid_end_time or not reminder_day or not nicknames: + return self.write(dict(code=-2, msg='关键参数不能为空')) + + if nicknames: + nicknames = ','.join(nicknames) + + paid_start_time = datetime.datetime.strptime(paid_start_time, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( + hours=8) + paid_end_time = datetime.datetime.strptime(paid_end_time, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( + hours=8) + + with DBContext('w', None, True) as session: + name = session.query(PaidMG).filter(PaidMG.paid_name == paid_name).first() + if name: + return self.write(dict(code=-2, msg='{}已经存在'.format(paid_name))) + session.add( + PaidMG(paid_name=paid_name, paid_start_time=paid_start_time, paid_end_time=paid_end_time, + reminder_day=reminder_day, nicknames=nicknames)) + + self.write(dict(code=0, msg='添加成功')) + + def delete(self, *args, **kwargs): + data = json.loads(self.request.body.decode("utf-8")) + paid_id = data.get('id') + if not paid_id: + return self.write(dict(code=-2, msg='关键参数不能为空')) + + with DBContext('w', None, True) as session: + session.query(PaidMG).filter(PaidMG.id == paid_id).delete(synchronize_session=False) + + self.write(dict(code=0, msg='删除成功')) + + def put(self, *args, **kwargs): + data = json.loads(self.request.body.decode("utf-8")) + paid_id = data.get('id') + paid_name = data.get('paid_name', None) + paid_start_time = data.get('paid_start_time', None) + paid_end_time = data.get('paid_end_time', None) + reminder_day = data.get('reminder_day', None) + nicknames = data.get('nicknames', None) + + if not paid_name or not paid_start_time or not paid_end_time or not reminder_day or not nicknames: + return self.write(dict(code=-2, msg='关键参数不能为空')) + + if nicknames: + nicknames = ','.join(nicknames) + + update_info = { + "paid_start_time": paid_start_time, + "paid_end_time": paid_end_time, + "reminder_day": reminder_day, + "nicknames": nicknames + } + + if re.search('000Z', paid_start_time): + paid_start_time = datetime.datetime.strptime(paid_start_time, + "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta(hours=8) + update_info['paid_start_time'] = paid_start_time + + if re.search('000Z', paid_end_time): + paid_end_time = datetime.datetime.strptime(paid_end_time, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( + hours=8) + update_info['paid_end_time'] = paid_end_time + + with DBContext('w', None, True) as session: + session.query(PaidMG).filter(PaidMG.paid_name == paid_name).update(update_info) + self.write(dict(code=0, msg='更新成功')) + + +paid_urls = [ + (r"/v1/tools/paid/", PaidMGHandler) +] diff --git a/biz/handlers/password_handler.py b/biz/handlers/password_handler.py index f3792fc..f624b3c 100644 --- a/biz/handlers/password_handler.py +++ b/biz/handlers/password_handler.py @@ -1,32 +1,32 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/3/22 13:24 -# @Author : Fred Yangxiaofei -# @File : password_handler.py -# @Role : 随机密码生成路由 - - - -import string -import random -from libs.base_handler import BaseHandler - - -class PasswordHandler(BaseHandler): - def get(self, *args, **kwargs): - num = self.get_argument('num', default=None, strip=True) - if not num: - return self.write(dict(code=-2, msg='关键参数不能为空')) - - # if not isinstance(num, int): - # return self.write(dict(code=-3, msg='参数必须是int类型')) - - chars = string.ascii_letters + string.digits - random_password = ''.join([random.choice(chars) for i in range(int(num))]) - - return self.write(dict(code=0, msg='获取成功', data=random_password)) - - -password_urls = [ - (r"/v1/tools/password/", PasswordHandler) -] +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/3/22 13:24 +# @Author : Fred Yangxiaofei +# @File : password_handler.py +# @Role : 随机密码生成路由 + + + +import string +import random +from libs.base_handler import BaseHandler + + +class PasswordHandler(BaseHandler): + def get(self, *args, **kwargs): + num = self.get_argument('num', default=None, strip=True) + if not num: + return self.write(dict(code=-2, msg='关键参数不能为空')) + + # if not isinstance(num, int): + # return self.write(dict(code=-3, msg='参数必须是int类型')) + + chars = string.ascii_letters + string.digits + random_password = ''.join([random.choice(chars) for i in range(int(num))]) + + return self.write(dict(code=0, msg='获取成功', data=random_password)) + + +password_urls = [ + (r"/v1/tools/password/", PasswordHandler) +] diff --git a/biz/handlers/project_mg_handler.py b/biz/handlers/project_mg_handler.py index a7cb209..8487ff0 100644 --- a/biz/handlers/project_mg_handler.py +++ b/biz/handlers/project_mg_handler.py @@ -1,122 +1,122 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/3/20 17:36 -# @Author : Fred Yangxiaofei -# @File : project_mg_handler.py -# @Role : 项目管理信息路由 - - -import json -import re -import datetime -from libs.database import model_to_dict -from models.project_mg import ProjectMG -from websdk.db_context import DBContext -from libs.base_handler import BaseHandler - - -class ProjectMGHandler(BaseHandler): - def get(self, *args, **kwargs): - key = self.get_argument('key', default=None, strip=True) - value = self.get_argument('value', default=None, strip=True) - page_size = self.get_argument('page', default=1, strip=True) - limit = self.get_argument('limit', default=15, strip=True) - limit_start = (int(page_size) - 1) * int(limit) - project_list = [] - with DBContext('w') as session: - if key and value: - count = session.query(ProjectMG).filter_by(**{key: value}).count() - project_data = session.query(ProjectMG).filter_by(**{key: value}).order_by( - ProjectMG.id).offset(limit_start).limit(int(limit)) - else: - count = session.query(ProjectMG).count() - project_data = session.query(ProjectMG).order_by(ProjectMG.id).offset( - limit_start).limit(int(limit)) - - for data in project_data: - data_dict = model_to_dict(data) - data_dict['project_start_time'] = str(data_dict['project_start_time']) - data_dict['project_end_time'] = str(data_dict['project_end_time']) - data_dict['create_at'] = str(data_dict['create_at']) - data_dict['update_at'] = str(data_dict['update_at']) - project_list.append(data_dict) - return self.write(dict(code=0, msg='获取成功', count=count, data=project_list)) - - def post(self, *args, **kwargs): - data = json.loads(self.request.body.decode("utf-8")) - project_name = data.get('project_name', None) - project_status = data.get('project_status', None) - project_requester = data.get('project_requester', None) - project_processing = data.get('project_processing', None) - project_start_time = data.get('project_start_time', None) - project_end_time = data.get('project_end_time', None) - - if not project_name or not project_status or not project_requester or not project_processing or not project_start_time or not project_end_time: - return self.write(dict(code=-2, msg='关键参数不能为空')) - - project_start_time = datetime.datetime.strptime(project_start_time, - "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta(hours=8) - project_end_time = datetime.datetime.strptime(project_end_time, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( - hours=8) - - with DBContext('w', None, True) as session: - name = session.query(ProjectMG).filter(ProjectMG.project_name == project_name).first() - if name: - return self.write(dict(code=-2, msg='{}已经存在'.format(project_name))) - session.add( - ProjectMG(project_name=project_name, project_status=project_status, project_requester=project_requester, - project_processing=project_processing, project_start_time=project_start_time, - project_end_time=project_end_time)) - - self.write(dict(code=0, msg='添加成功')) - - def delete(self, *args, **kwargs): - data = json.loads(self.request.body.decode("utf-8")) - project_id = data.get('id') - if not project_id: - return self.write(dict(code=-2, msg='关键参数不能为空')) - - with DBContext('w', None, True) as session: - session.query(ProjectMG).filter(ProjectMG.id == project_id).delete(synchronize_session=False) - - self.write(dict(code=0, msg='删除成功')) - - def put(self, *args, **kwargs): - data = json.loads(self.request.body.decode("utf-8")) - project_name = data.get('project_name', None) - project_status = data.get('project_status', None) - project_requester = data.get('project_requester', None) - project_processing = data.get('project_processing', None) - project_start_time = data.get('project_start_time', None) - project_end_time = data.get('project_end_time', None) - - if not project_name or not project_status or not project_requester or not project_processing or not project_start_time or not project_end_time: - return self.write(dict(code=-2, msg='关键参数不能为空')) - - update_info = { - "project_status": project_status, - "project_requester": project_requester, - "project_processing": project_processing, - "project_start_time": project_start_time, - "project_end_time": project_end_time, - } - - if re.search('000Z', project_start_time): - project_start_time = datetime.datetime.strptime(project_start_time, - "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta(hours=8) - update_info['project_start_time'] = project_start_time - - if re.search('000Z', project_end_time): - project_end_time = datetime.datetime.strptime(project_end_time, - "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( - hours=8) - update_info['project_end_time'] = project_end_time - - with DBContext('w', None, True) as session: - session.query(ProjectMG).filter(ProjectMG.project_name == project_name).update(update_info) - self.write(dict(code=0, msg='更新成功')) - - -project_urls = [ - (r"/v1/tools/project/", ProjectMGHandler), -] +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/3/20 17:36 +# @Author : Fred Yangxiaofei +# @File : project_mg_handler.py +# @Role : 项目管理信息路由 + + +import json +import re +import datetime +from libs.database import model_to_dict +from models.project_mg import ProjectMG +from websdk.db_context import DBContext +from libs.base_handler import BaseHandler + + +class ProjectMGHandler(BaseHandler): + def get(self, *args, **kwargs): + key = self.get_argument('key', default=None, strip=True) + value = self.get_argument('value', default=None, strip=True) + page_size = self.get_argument('page', default=1, strip=True) + limit = self.get_argument('limit', default=15, strip=True) + limit_start = (int(page_size) - 1) * int(limit) + project_list = [] + with DBContext('w') as session: + if key and value: + count = session.query(ProjectMG).filter_by(**{key: value}).count() + project_data = session.query(ProjectMG).filter_by(**{key: value}).order_by( + ProjectMG.id).offset(limit_start).limit(int(limit)) + else: + count = session.query(ProjectMG).count() + project_data = session.query(ProjectMG).order_by(ProjectMG.id).offset( + limit_start).limit(int(limit)) + + for data in project_data: + data_dict = model_to_dict(data) + data_dict['project_start_time'] = str(data_dict['project_start_time']) + data_dict['project_end_time'] = str(data_dict['project_end_time']) + data_dict['create_at'] = str(data_dict['create_at']) + data_dict['update_at'] = str(data_dict['update_at']) + project_list.append(data_dict) + return self.write(dict(code=0, msg='获取成功', count=count, data=project_list)) + + def post(self, *args, **kwargs): + data = json.loads(self.request.body.decode("utf-8")) + project_name = data.get('project_name', None) + project_status = data.get('project_status', None) + project_requester = data.get('project_requester', None) + project_processing = data.get('project_processing', None) + project_start_time = data.get('project_start_time', None) + project_end_time = data.get('project_end_time', None) + + if not project_name or not project_status or not project_requester or not project_processing or not project_start_time or not project_end_time: + return self.write(dict(code=-2, msg='关键参数不能为空')) + + project_start_time = datetime.datetime.strptime(project_start_time, + "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta(hours=8) + project_end_time = datetime.datetime.strptime(project_end_time, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( + hours=8) + + with DBContext('w', None, True) as session: + name = session.query(ProjectMG).filter(ProjectMG.project_name == project_name).first() + if name: + return self.write(dict(code=-2, msg='{}已经存在'.format(project_name))) + session.add( + ProjectMG(project_name=project_name, project_status=project_status, project_requester=project_requester, + project_processing=project_processing, project_start_time=project_start_time, + project_end_time=project_end_time)) + + self.write(dict(code=0, msg='添加成功')) + + def delete(self, *args, **kwargs): + data = json.loads(self.request.body.decode("utf-8")) + project_id = data.get('id') + if not project_id: + return self.write(dict(code=-2, msg='关键参数不能为空')) + + with DBContext('w', None, True) as session: + session.query(ProjectMG).filter(ProjectMG.id == project_id).delete(synchronize_session=False) + + self.write(dict(code=0, msg='删除成功')) + + def put(self, *args, **kwargs): + data = json.loads(self.request.body.decode("utf-8")) + project_name = data.get('project_name', None) + project_status = data.get('project_status', None) + project_requester = data.get('project_requester', None) + project_processing = data.get('project_processing', None) + project_start_time = data.get('project_start_time', None) + project_end_time = data.get('project_end_time', None) + + if not project_name or not project_status or not project_requester or not project_processing or not project_start_time or not project_end_time: + return self.write(dict(code=-2, msg='关键参数不能为空')) + + update_info = { + "project_status": project_status, + "project_requester": project_requester, + "project_processing": project_processing, + "project_start_time": project_start_time, + "project_end_time": project_end_time, + } + + if re.search('000Z', project_start_time): + project_start_time = datetime.datetime.strptime(project_start_time, + "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta(hours=8) + update_info['project_start_time'] = project_start_time + + if re.search('000Z', project_end_time): + project_end_time = datetime.datetime.strptime(project_end_time, + "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.timedelta( + hours=8) + update_info['project_end_time'] = project_end_time + + with DBContext('w', None, True) as session: + session.query(ProjectMG).filter(ProjectMG.project_name == project_name).update(update_info) + self.write(dict(code=0, msg='更新成功')) + + +project_urls = [ + (r"/v1/tools/project/", ProjectMGHandler), +] diff --git a/biz/handlers/zabbix_mg_handler.py b/biz/handlers/zabbix_mg_handler.py index 421b3a3..d1ee0f1 100644 --- a/biz/handlers/zabbix_mg_handler.py +++ b/biz/handlers/zabbix_mg_handler.py @@ -1,549 +1,549 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/7/4 15:55 -# @Author : Fred Yangxiaofei -# @File : zabbix_mg_handler.py -# @Role : ZABBIX相关路由 - - -import json -import datetime -from tornado import gen -from tornado import httpclient -from concurrent.futures import ThreadPoolExecutor -from tornado.concurrent import run_on_executor -from libs.database import model_to_dict -from libs.zabbix.login import zabbix_login -from libs.zabbix.get_issues import main as zabbix_last_issues -from models.zabbix_mg import ZabbixConfig, ZabbixSubmitTaskConf, ZabbixHosts, ZabbixHookLog -from websdk.db_context import DBContext -from libs.base_handler import BaseHandler -import tornado.web -from sqlalchemy import or_ -from websdk.web_logs import ins_log -from libs.zabbix.get_hosts import main as get_zabbix_hosts - - -class ZabbixTreeHandler(tornado.web.RequestHandler): - def get(self, *args, **kwargs): - hosts_list = [] - with DBContext('w') as session: - hosts_info = session.query(ZabbixHosts).all() - - for msg in hosts_info: - data_dict = model_to_dict(msg) - hosts_list.append(data_dict) - - _tree = [{"expand": True, "title": "ZABBIX", "children": [], "data_type": 'root'}] - - if hosts_list: - tmp_tree = { - "zabbix_url": {}, - "group_name": {}, - } - - for t in hosts_list: - zabbix_url, group_name = t["zabbix_url"], t['group_name'] - - # 因为是第一层所以没有parent - tmp_tree["zabbix_url"][zabbix_url] = { - "expand": True, "title": zabbix_url, "parent": "ZABBIX", "children": [], "data_type": 'zabbix_url' - } - - tmp_tree["group_name"][zabbix_url + "|" + group_name] = { - "expand": False, "title": group_name, "parent": zabbix_url, "zabbix_url": zabbix_url, - "children": [], "data_type": 'group_name' - } - - for tmp_group in tmp_tree["group_name"].values(): - tmp_tree["zabbix_url"][tmp_group["parent"]]["children"].append(tmp_group) - - for tmp_zabbix in tmp_tree["zabbix_url"].values(): - _tree[0]["children"].append(tmp_zabbix) - - return self.write(dict(code=0, msg='获取项目Tree成功', data=_tree)) - else: - return self.write(dict(code=0, msg='获取项目Tree失败', data=_tree)) - - -class ZabbixHostsHandler(tornado.web.RequestHandler): - def get(self, *args, **kwargs): - zabbix_url = self.get_argument('zabbix_url', default=None, strip=True) - group_name = self.get_argument('group_name', default=None, strip=True) - search_val = self.get_argument('search_val', default=None, strip=True) - host_list = [] - if search_val: - with DBContext('w') as session: - zabbix_host_info = session.query(ZabbixHosts).filter( - or_(ZabbixHosts.group_name.like('%{}%'.format(search_val)), - ZabbixHosts.host_name.like('%{}%'.format(search_val)), - ZabbixHosts.zabbix_url.like('%{}%'.format(search_val))) - ).order_by(ZabbixHosts.zabbix_url, ZabbixHosts.group_name).all() - - elif zabbix_url and group_name: - with DBContext('w') as session: - zabbix_host_info = session.query(ZabbixHosts).filter(ZabbixHosts.zabbix_url == zabbix_url, - ZabbixHosts.group_name == group_name).order_by( - ZabbixHosts.zabbix_url, ZabbixHosts.group_name).all() - else: - with DBContext('w') as session: - zabbix_host_info = session.query(ZabbixHosts).order_by(ZabbixHosts.zabbix_url, - ZabbixHosts.group_name).all() - - for msg in zabbix_host_info: - data_dict = model_to_dict(msg) - hook_list = [] - if data_dict['zabbix_hooks']: - git_hooks = json.loads(data_dict['zabbix_hooks']) - for k, v in git_hooks.items(): - v['alert_title'] = k - hook_list.append(v) - data_dict['hook_list'] = hook_list - host_list.append(data_dict) - - return self.write(dict(code=0, msg='获取成功', data=host_list)) - - def put(self, *args, **kwargs): - data = json.loads(self.request.body.decode("utf-8")) - print('data--->',data) - alert_title = data.get('alert_title').strip() - temp_id = data.get('temp_id') - schedule = data.get('schedule', 'new') - hook_args = data.get('hook_args') - the_id = data.get('the_id') - exec_host = data.get('exec_host', '127.0.0.1') - if not alert_title or not temp_id or not the_id or not exec_host: - return self.write(dict(code=1, msg='关键参数不能为空')) - - if hook_args: - try: - hook_args_dict = json.loads(hook_args) - except Exception as e: - return self.write(dict(code=2, msg='参数字典格式不正确')) - else: - hook_args_dict = dict() - - with DBContext('w', None, True) as session: - zabbix_hooks_info = session.query(ZabbixHosts.zabbix_hooks).filter(ZabbixHosts.id == the_id).first() - hook_dict = zabbix_hooks_info[0] if zabbix_hooks_info else {} - if hook_dict: - try: - hook_dict = json.loads(hook_dict) - except Exception as e: - return self.write(dict(code=2, msg='钩子参数转化为字典的时候出错,请仔细检查相关内容' + str(e))) - - if not hook_dict: - hook_dict = {alert_title: dict(exec_host=exec_host,temp_id=temp_id, schedule=schedule, hook_args=hook_args_dict)} - else: - hook_dict[alert_title] = dict(exec_host=exec_host,temp_id=temp_id, schedule=schedule, hook_args=hook_args_dict) - - hook_dict = json.dumps(hook_dict) - - session.query(ZabbixHosts.zabbix_hooks).filter(ZabbixHosts.id == the_id).update( - {ZabbixHosts.zabbix_hooks: hook_dict}) - - self.write(dict(code=0, msg='更新钩子成功')) - - -class ZabbixConfigHandler(BaseHandler): - - def get(self, *args, **kwargs): - key = self.get_argument('key', default=None, strip=True) - value = self.get_argument('value', default=None, strip=True) - page_size = self.get_argument('page', default=1, strip=True) - limit = self.get_argument('limit', default=15, strip=True) - limit_start = (int(page_size) - 1) * int(limit) - zabbix_list = [] - with DBContext('w') as session: - if key and value: - count = session.query(ZabbixConfig).filter_by(**{key: value}).count() - zabbix_data = session.query(ZabbixConfig).filter_by(**{key: value}).order_by( - ZabbixConfig.id).offset(limit_start).limit(int(limit)) - else: - count = session.query(ZabbixConfig).count() - zabbix_data = session.query(ZabbixConfig).order_by(ZabbixConfig.id).offset( - limit_start).limit(int(limit)) - - for data in zabbix_data: - data_dict = model_to_dict(data) - zabbix_list.append(data_dict) - return self.write(dict(code=0, msg='获取成功', count=count, data=zabbix_list)) - - '''测试用户填写的信息及认证是否正确,防止主进程卡死,使用异步方法测试''' - _thread_pool = ThreadPoolExecutor(1) - - @run_on_executor(executor='_thread_pool') - def login_auth(self, zabbix_url, zabbix_username, zabbix_password): - """ - 测试ZABBIX验证是否可以通过 - :return: - """ - # 错误信息 - err_msg = '' - - ins_log.read_log('info', 'ZABBIX Login Auth') - - try: - zabbix_login(zabbix_url, zabbix_username, zabbix_password) - - except Exception as e: - err_msg = '测试失败,错误信息:{}'.format(e) - - return err_msg - - @gen.coroutine - def post(self, *args, **kwargs): - data = json.loads(self.request.body.decode("utf-8")) - zabbix_name = data.get('zabbix_name', None) - zabbix_url = data.get('zabbix_url', None) - zabbix_username = data.get('zabbix_username', None) - zabbix_password = data.get('zabbix_password', None) - - if not zabbix_url or not zabbix_username or not zabbix_password: - return self.write(dict(code=-2, msg="测试必须要包含:地址、用户、密码信息")) - - msg = yield self.login_auth(zabbix_url, zabbix_username, zabbix_password) - if msg: - # 失败 - return self.write(dict(code=-1, msg=msg)) - - if not zabbix_name or not zabbix_url or not zabbix_username or not zabbix_password: - return self.write(dict(code=-2, msg='关键参数不能为空')) - - with DBContext('w', None, True) as session: - exist_zabbix_name = session.query(ZabbixConfig).filter(ZabbixConfig.zabbix_name == zabbix_name).first() - exist_zabbix_url = session.query(ZabbixConfig).filter(ZabbixConfig.zabbix_url == zabbix_url).first() - - if exist_zabbix_name or exist_zabbix_url: - - update_info = { - "zabbix_name": zabbix_name, - "zabbix_url": zabbix_url, - "zabbix_username": zabbix_username, - "zabbix_password": zabbix_password, - } - - # 测试下编辑完后的信息是否正确 - msg = yield self.login_auth(zabbix_url, zabbix_username, zabbix_password) - if msg: - # 失败 - return self.write(dict(code=-1, msg=msg)) - - with DBContext('w', None, True) as session: - session.query(ZabbixConfig).filter(ZabbixConfig.zabbix_url == zabbix_url).update(update_info) - - return self.write(dict(code=0, msg='更新成功')) - # return self.write(dict(code=-2, msg='name或zabbix url配置信息已经存在')) - session.add( - ZabbixConfig(zabbix_name=zabbix_name, zabbix_url=zabbix_url, zabbix_username=zabbix_username, - zabbix_password=zabbix_password)) - - self.write(dict(code=0, msg='添加成功')) - - def delete(self, *args, **kwargs): - data = json.loads(self.request.body.decode("utf-8")) - zabbix_config_id = data.get('id') - zabbix_url = data.get('zabbix_url') - - if not zabbix_config_id: - return self.write(dict(code=-2, msg='关键参数不能为空')) - - with DBContext('w', None, True) as session: - session.query(ZabbixConfig).filter(ZabbixConfig.id == zabbix_config_id).delete(synchronize_session=False) - session.query(ZabbixHosts).filter(ZabbixHosts.zabbix_url == zabbix_url).delete(synchronize_session=False) - - self.write(dict(code=0, msg='删除成功')) - - -class ZabbixSyncHandler(tornado.web.RequestHandler): - '''刷新ZABBIX地址,将用户所有配置的ZABBIX信息数据都写入数据库''' - - def post(self, *args, **kwargs): - with DBContext('w', None, True) as session: - zabbix_generator_list = get_zabbix_hosts() - if zabbix_generator_list: - for zabbix_gen in zabbix_generator_list: - for host_info in zabbix_gen: - host_name = host_info.get('host_name') - exist_hostname = session.query(ZabbixHosts).filter(ZabbixHosts.host_name == host_name).first() - if not exist_hostname: - session.add( - ZabbixHosts(zabbix_url=host_info.get('zabbix_url'), group_id=host_info.get('group_id'), - group_name=host_info.get('group_name'), - host_id=host_info.get('host_id'), host_name=host_name)) - else: - session.query(ZabbixHosts).filter(ZabbixHosts.host_name == host_name).update(host_info) - session.commit() - - self.write(dict(code=0, msg='刷新成功')) - - -class ZabbixLastIssuesHandler(tornado.web.RequestHandler): - '''获取多ZABBIX ISSUES信息,前端展示出来''' - - def get(self, *args, **kwargs): - last_issues = zabbix_last_issues() - return self.write(dict(code=0, msg='获取成功', data=last_issues)) - - -class ZabbixhookLogsHandler(tornado.web.RequestHandler): - '''获取webhook告警日志''' - - @gen.coroutine - def get(self, *args, **kwargs): - log_list = [] - - with DBContext('w') as session: - hooks_log_info = session.query(ZabbixHookLog).order_by(-ZabbixHookLog.id).limit(200).all() - - for msg in hooks_log_info: - data_dict = model_to_dict(msg) - data_dict['create_time'] = str(data_dict['create_time']) - log_list.append(data_dict) - - return self.write(dict(code=0, msg='获取成功', data=log_list)) - - -class ZabbixSubmitTaskConfHandler(tornado.web.RequestHandler): - '''ZABBIX钩子向任务平台提交任务,需要一个认证''' - - def get(self, *args, **kwargs): - page_size = self.get_argument('page', default=1, strip=True) - limit = self.get_argument('limit', default=1, strip=True) - limit_start = (int(page_size) - 1) * int(limit) # 只能有一条 - submit_task_conf_list = [] - with DBContext('w') as session: - zabbix_submit_task_conf_data = session.query(ZabbixSubmitTaskConf).order_by(ZabbixSubmitTaskConf.id).offset( - limit_start).limit(int(limit)) - for data in zabbix_submit_task_conf_data: - data_dict = model_to_dict(data) - submit_task_conf_list.append(data_dict) - return self.write(dict(code=0, msg='获取成功', data=submit_task_conf_list)) - - async def post(self, *args, **kwargs): - data = json.loads(self.request.body.decode("utf-8")) - task_url = data.get('task_url', None) - auth_key = data.get('auth_key', None) - - if not task_url or not auth_key: - return self.write(dict(code=-2, msg="测试必须要包含:task_url、auth_key信息")) - - # 测试下权限 - http_client = httpclient.AsyncHTTPClient() - cookie = {"Cookie": 'auth_key={}'.format(auth_key)} - response = await http_client.fetch(task_url, method="GET", raise_error=False, headers=cookie) - - if response.code != 200: - return self.write(dict(code=-3, msg="错误码:{}".format(response.code))) - - response_data = json.loads(response.body.decode('utf-8')) - if response_data.get('code') != 0: - return self.write(dict(code=-3, msg="权限错误:{}".format(response_data.get('msg')))) - - # - with DBContext('w', None, True) as session: - exist_config = session.query(ZabbixSubmitTaskConf.id).first() - if not exist_config: - session.add(ZabbixSubmitTaskConf(task_url=task_url, auth_key=auth_key)) - else: - return self.write(dict(code=-4, msg="提交任务的认证配置信息只能存在一条")) - - self.write(dict(code=0, msg='添加成功')) - - def delete(self, *args, **kwargs): - data = json.loads(self.request.body.decode("utf-8")) - submit_task_config_id = data.get('id') - - if not submit_task_config_id: - return self.write(dict(code=-2, msg='关键参数不能为空')) - - with DBContext('w', None, True) as session: - session.query(ZabbixSubmitTaskConf).filter(ZabbixSubmitTaskConf.id == submit_task_config_id).delete( - synchronize_session=False) - self.write(dict(code=0, msg='删除成功')) - - -class ZabbixHookHandler(BaseHandler): - @gen.coroutine - def get(self, *args, **kwargs): - self.write(dict(code=0, msg='获取csrf_key成功', csrf_key=self.new_csrf_key)) - - async def post(self, *args, **kwargs): - data = json.loads(self.request.body.decode("utf-8")) - - print('data----->',data) - ins_log.read_log('info', '接收到数据:{}'.format(data)) - zabbix_url = data.get('zabbix_url') - messages = data.get('messages') - example_messages = 'Zabbix server___127.0.0.1___Zabbix agent on Zabbix server is unreachable for 5 minutes___PROBLEM___Average' - - if not zabbix_url or not messages: - ins_log.read_log('error', '关键参数不能为空') - return self.write(dict(code=-1, msg='Key parameters cannot be empty')) - - with DBContext('w', None, True) as session: - session.add(ZabbixHookLog(zabbix_url=zabbix_url, logs_info='收到告警信息:{}'.format(messages))) - # 要从message里面分析数据,这里必须是强规范:{HOSTNAME}___{HOST.IP}___{TRIGGER.NAME}___{TRIGGER.STATUS}___{TRIGGER.SEVERITY} - # 我们暂时只用到这2个数据,切割后类型依次是:['Zabbix server', '127.0.0.1', 'Zabbix agent on Zabbix server is unreachable for 5 minutes', 'PROBLEM', 'Average'] - try: - messages_list = messages.split('___') - host_name, host_ip, tagger_name, tagger_status, tagger_level = messages_list[0], messages_list[1], \ - messages_list[2], messages_list[3], \ - messages_list[4] - # host_ip = messages.split('___')[1] # hostip - # tagger_name = messages.split('___')[2] # 触发器名字 - # tagger_status = messages.split('___')[3] # 触发报警状态 - # tagger_level = messages.split('___')[4] # 报警级别 - except IndexError as e: - ins_log.read_log('error', '处理告警数据格式出错:{}'.format(e)) - ins_log.read_log('error', '可能是因为你配置的规则不对,请参考模块:{}'.format(example_messages)) - - return self.write(dict(code=-1, msg='处理告警数据格式出错:{}'.format(e))) - - if not host_name or not host_ip or not tagger_name or not tagger_status or not tagger_level: - return self.write(dict(code=-1, msg='你配置的规则格式应该不正常,请参考此模板:{}'.format(example_messages))) - - # 先查询告警的主机有没有 - hook_info = session.query(ZabbixHosts.zabbix_hooks).filter(ZabbixHosts.zabbix_url == zabbix_url, - ZabbixHosts.host_name == host_name).first() - - if not hook_info: - # return self.write(dict(code=0, msg='没有匹配到主机信息')) - ins_log.read_log('info', '主机:{}, 没有匹配到信息'.format(host_name)) - return self.write(dict(code=-1, msg='[INFO]: No match to host information')) - - # 匹配到主机后开始查询是否配置钩子 - if hook_info and not hook_info[0]: - # return self.write(dict(code=0, msg='没有匹配到钩子')) - ins_log.read_log('info', '主机:{}没有匹配到钩子'.format(host_name)) - return self.write(dict(code=-1, msg='[INFO]: No match to hook')) - - else: - # 匹配到主机,并且配置了钩子 - try: - # 防止用户给的数据不能json - hook_dict = json.loads(hook_info[0]) - ins_log.read_log('info', '主机:{} 一共配置钩子数据是:{}'.format(host_name, hook_dict)) - except Exception as e: - ins_log.read_log('error', e) - session.add(ZabbixHookLog(zabbix_url=zabbix_url, logs_info='钩子出错:{}'.format(messages))) - return self.write(dict(code=2, msg='There was an error when the hook parameter was converted into ' - 'a dictionary. Please check the relevant contents carefully')) - - # 根据你的报警名称匹配你的钩子,这里一个主机你可能配置了多个钩子 - alert_title_mate = None - for i in hook_dict.keys(): - if i == tagger_name: - alert_title_mate = i - - ins_log.read_log('info', '主机:{} 本次告警匹配到的钩子是:{}'.format(host_name, alert_title_mate)) - - if not alert_title_mate: - session.add(ZabbixHookLog(zabbix_url=zabbix_url, logs_info='没有匹配到钩子'.format(messages))) - ins_log.read_log('info', '没有匹配到钩子') - return self.write(dict(code=2, msg='No hook matched')) - else: - # 开始提交任务到平台 - the_hook = hook_dict[alert_title_mate] - print(the_hook) - hook_args = dict(ZABBIX_URL=zabbix_url,HOSTIP=host_ip, HOSTNAME=host_name, TAGGER_NAME=tagger_name, - TAGGER_STATUS=tagger_status, TAGGER_LEVEL=tagger_level) - # old_hook_args = the_hook.get('hook_args') - ### 参数字典 - # hosts_dict = {1: "127.0.0.1", 2: "127.0.0.1"} ### 主机字典 - # if the_hook.get('hook_args'): - # hosts_dict.update(the_hook.get('hook_args')) - exec_host = the_hook.get('exec_host') - if exec_host: - hosts_dict = {1: exec_host} - else: - hosts_dict = {1: "127.0.0.1", 2: "127.0.0.1"} ### 主机字典 - # if old_hook_args.get('hosts_dict') and isinstance(old_hook_args.get('hosts_dict'), dict): - # hosts_dict = old_hook_args.pop('hosts_dict') - - msg = '匹配到钩子:{} 模板ID:{} 执行:{},参数:{}'.format(alert_title_mate, the_hook.get('temp_id'), - the_hook.get('schedule'), str(the_hook.get('hook_args'))) - - ins_log.read_log('info', msg) - if len(msg) > 200: - msg = msg[:200] - - session.add(ZabbixHookLog(zabbix_url=zabbix_url, logs_info=msg)) - - data_info = dict(exec_time=datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'), - task_name='ZABBIX钩子任务', temp_id=the_hook.get('temp_id'), - schedule=the_hook.get('schedule', 'ready'), - submitter=self.get_current_nickname(), args=str(hook_args), hosts=str(hosts_dict)) - - with DBContext('w', None, True) as session: - task_conf = session.query(ZabbixSubmitTaskConf.task_url, ZabbixSubmitTaskConf.auth_key).first() - - task_url = task_conf[0] - auth_key = task_conf[1] - - http_client = httpclient.AsyncHTTPClient() - cookie = {"Cookie": 'auth_key={}'.format(auth_key)} - csrf_response = await http_client.fetch(task_url, method="GET", raise_error=False, headers=cookie) - - if csrf_response.code != 200: - ins_log.read_log('error', '错误码:{}'.format(csrf_response.code)) - return self.write(dict(code=-3, msg="错误码:{}".format(csrf_response.code))) - - csrf_response_data = json.loads(csrf_response.body.decode('utf-8')) - if csrf_response_data.get('code') != 0: - ins_log.read_log('error', '权限错误:{}'.format(csrf_response_data.get('msg'))) - return self.write(dict(code=-3, msg="权限错误:{}".format(csrf_response_data.get('msg')))) - - csrf_key = csrf_response_data.get('csrf_key') - the_body = json.dumps(data_info) - cookie = {"Cookie": 'auth_key={}; csrf_key={}'.format(auth_key, csrf_key)} - response = await http_client.fetch(task_url, method="POST", body=the_body, raise_error=False, headers=cookie) - - if response.error: - ins_log.read_log('error', '请求任务接口失败:{}, 请检查参数字典格式是否正确'.format(response.error)) - return self.write(dict(code=-3, msg='请求任务接口失败:{}请检查参数字典格式是否正确'.format(response.error))) - - response_data = json.loads(response.body.decode('utf-8')) - - if response_data.get('code') != 0: - return self.write(dict(code=-1, msg=response_data.get('msg'))) - - return self.write(dict(code=0, msg=response_data.get('msg'))) - - def delete(self, *args, **kwargs): - data = json.loads(self.request.body.decode("utf-8")) - the_id = data.get('the_id') - alert_title = data.get('alert_title') - - with DBContext('w', None, True) as session: - hook_info = session.query(ZabbixHosts.zabbix_hooks).filter(ZabbixHosts.id == the_id).first() - if not hook_info: - return self.write(dict(code=-1, msg='No related items were found')) - - if not hook_info[0]: - return self.write(dict(code=-2, msg='No hooks, ignore')) - else: - try: - hook_dict = json.loads(hook_info[0]) - except Exception as e: - session.query(ZabbixHosts).filter(ZabbixHosts.id == the_id).update({ZabbixHosts.zabbix_hooks: ""}) - return self.write(dict(code=2, msg='钩子出错')) - - hook_dict.pop(alert_title) - hook_dict = json.dumps(hook_dict) - - session.query(ZabbixHosts).filter(ZabbixHosts.id == the_id).update({ZabbixHosts.zabbix_hooks: hook_dict}) - self.write(dict(code=0, msg='删除成功')) - - -zabbix_urls = [ - (r"/v1/zabbix/config/", ZabbixConfigHandler), - (r"/v1/zabbix/sync/", ZabbixSyncHandler), - (r"/v1/zabbix/tree/", ZabbixTreeHandler), - (r"/v1/zabbix/hosts/", ZabbixHostsHandler), - (r"/v1/zabbix/hooks/", ZabbixHookHandler), - (r"/v1/zabbix/logs/", ZabbixhookLogsHandler), - (r"/v1/zabbix/issues/", ZabbixLastIssuesHandler), - (r"/v1/zabbix/task_config/", ZabbixSubmitTaskConfHandler), - -] +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/7/4 15:55 +# @Author : Fred Yangxiaofei +# @File : zabbix_mg_handler.py +# @Role : ZABBIX相关路由 + + +import json +import datetime +from tornado import gen +from tornado import httpclient +from concurrent.futures import ThreadPoolExecutor +from tornado.concurrent import run_on_executor +from libs.database import model_to_dict +from libs.zabbix.login import zabbix_login +from libs.zabbix.get_issues import main as zabbix_last_issues +from models.zabbix_mg import ZabbixConfig, ZabbixSubmitTaskConf, ZabbixHosts, ZabbixHookLog +from websdk.db_context import DBContext +from libs.base_handler import BaseHandler +import tornado.web +from sqlalchemy import or_ +from websdk.web_logs import ins_log +from libs.zabbix.get_hosts import main as get_zabbix_hosts + + +class ZabbixTreeHandler(tornado.web.RequestHandler): + def get(self, *args, **kwargs): + hosts_list = [] + with DBContext('w') as session: + hosts_info = session.query(ZabbixHosts).all() + + for msg in hosts_info: + data_dict = model_to_dict(msg) + hosts_list.append(data_dict) + + _tree = [{"expand": True, "title": "ZABBIX", "children": [], "data_type": 'root'}] + + if hosts_list: + tmp_tree = { + "zabbix_url": {}, + "group_name": {}, + } + + for t in hosts_list: + zabbix_url, group_name = t["zabbix_url"], t['group_name'] + + # 因为是第一层所以没有parent + tmp_tree["zabbix_url"][zabbix_url] = { + "expand": True, "title": zabbix_url, "parent": "ZABBIX", "children": [], "data_type": 'zabbix_url' + } + + tmp_tree["group_name"][zabbix_url + "|" + group_name] = { + "expand": False, "title": group_name, "parent": zabbix_url, "zabbix_url": zabbix_url, + "children": [], "data_type": 'group_name' + } + + for tmp_group in tmp_tree["group_name"].values(): + tmp_tree["zabbix_url"][tmp_group["parent"]]["children"].append(tmp_group) + + for tmp_zabbix in tmp_tree["zabbix_url"].values(): + _tree[0]["children"].append(tmp_zabbix) + + return self.write(dict(code=0, msg='获取项目Tree成功', data=_tree)) + else: + return self.write(dict(code=0, msg='获取项目Tree失败', data=_tree)) + + +class ZabbixHostsHandler(tornado.web.RequestHandler): + def get(self, *args, **kwargs): + zabbix_url = self.get_argument('zabbix_url', default=None, strip=True) + group_name = self.get_argument('group_name', default=None, strip=True) + search_val = self.get_argument('search_val', default=None, strip=True) + host_list = [] + if search_val: + with DBContext('w') as session: + zabbix_host_info = session.query(ZabbixHosts).filter( + or_(ZabbixHosts.group_name.like('%{}%'.format(search_val)), + ZabbixHosts.host_name.like('%{}%'.format(search_val)), + ZabbixHosts.zabbix_url.like('%{}%'.format(search_val))) + ).order_by(ZabbixHosts.zabbix_url, ZabbixHosts.group_name).all() + + elif zabbix_url and group_name: + with DBContext('w') as session: + zabbix_host_info = session.query(ZabbixHosts).filter(ZabbixHosts.zabbix_url == zabbix_url, + ZabbixHosts.group_name == group_name).order_by( + ZabbixHosts.zabbix_url, ZabbixHosts.group_name).all() + else: + with DBContext('w') as session: + zabbix_host_info = session.query(ZabbixHosts).order_by(ZabbixHosts.zabbix_url, + ZabbixHosts.group_name).all() + + for msg in zabbix_host_info: + data_dict = model_to_dict(msg) + hook_list = [] + if data_dict['zabbix_hooks']: + git_hooks = json.loads(data_dict['zabbix_hooks']) + for k, v in git_hooks.items(): + v['alert_title'] = k + hook_list.append(v) + data_dict['hook_list'] = hook_list + host_list.append(data_dict) + + return self.write(dict(code=0, msg='获取成功', data=host_list)) + + def put(self, *args, **kwargs): + data = json.loads(self.request.body.decode("utf-8")) + print('data--->',data) + alert_title = data.get('alert_title').strip() + temp_id = data.get('temp_id') + schedule = data.get('schedule', 'new') + hook_args = data.get('hook_args') + the_id = data.get('the_id') + exec_host = data.get('exec_host', '127.0.0.1') + if not alert_title or not temp_id or not the_id or not exec_host: + return self.write(dict(code=1, msg='关键参数不能为空')) + + if hook_args: + try: + hook_args_dict = json.loads(hook_args) + except Exception as e: + return self.write(dict(code=2, msg='参数字典格式不正确')) + else: + hook_args_dict = dict() + + with DBContext('w', None, True) as session: + zabbix_hooks_info = session.query(ZabbixHosts.zabbix_hooks).filter(ZabbixHosts.id == the_id).first() + hook_dict = zabbix_hooks_info[0] if zabbix_hooks_info else {} + if hook_dict: + try: + hook_dict = json.loads(hook_dict) + except Exception as e: + return self.write(dict(code=2, msg='钩子参数转化为字典的时候出错,请仔细检查相关内容' + str(e))) + + if not hook_dict: + hook_dict = {alert_title: dict(exec_host=exec_host,temp_id=temp_id, schedule=schedule, hook_args=hook_args_dict)} + else: + hook_dict[alert_title] = dict(exec_host=exec_host,temp_id=temp_id, schedule=schedule, hook_args=hook_args_dict) + + hook_dict = json.dumps(hook_dict) + + session.query(ZabbixHosts.zabbix_hooks).filter(ZabbixHosts.id == the_id).update( + {ZabbixHosts.zabbix_hooks: hook_dict}) + + self.write(dict(code=0, msg='更新钩子成功')) + + +class ZabbixConfigHandler(BaseHandler): + + def get(self, *args, **kwargs): + key = self.get_argument('key', default=None, strip=True) + value = self.get_argument('value', default=None, strip=True) + page_size = self.get_argument('page', default=1, strip=True) + limit = self.get_argument('limit', default=15, strip=True) + limit_start = (int(page_size) - 1) * int(limit) + zabbix_list = [] + with DBContext('w') as session: + if key and value: + count = session.query(ZabbixConfig).filter_by(**{key: value}).count() + zabbix_data = session.query(ZabbixConfig).filter_by(**{key: value}).order_by( + ZabbixConfig.id).offset(limit_start).limit(int(limit)) + else: + count = session.query(ZabbixConfig).count() + zabbix_data = session.query(ZabbixConfig).order_by(ZabbixConfig.id).offset( + limit_start).limit(int(limit)) + + for data in zabbix_data: + data_dict = model_to_dict(data) + zabbix_list.append(data_dict) + return self.write(dict(code=0, msg='获取成功', count=count, data=zabbix_list)) + + '''测试用户填写的信息及认证是否正确,防止主进程卡死,使用异步方法测试''' + _thread_pool = ThreadPoolExecutor(1) + + @run_on_executor(executor='_thread_pool') + def login_auth(self, zabbix_url, zabbix_username, zabbix_password): + """ + 测试ZABBIX验证是否可以通过 + :return: + """ + # 错误信息 + err_msg = '' + + ins_log.read_log('info', 'ZABBIX Login Auth') + + try: + zabbix_login(zabbix_url, zabbix_username, zabbix_password) + + except Exception as e: + err_msg = '测试失败,错误信息:{}'.format(e) + + return err_msg + + @gen.coroutine + def post(self, *args, **kwargs): + data = json.loads(self.request.body.decode("utf-8")) + zabbix_name = data.get('zabbix_name', None) + zabbix_url = data.get('zabbix_url', None) + zabbix_username = data.get('zabbix_username', None) + zabbix_password = data.get('zabbix_password', None) + + if not zabbix_url or not zabbix_username or not zabbix_password: + return self.write(dict(code=-2, msg="测试必须要包含:地址、用户、密码信息")) + + msg = yield self.login_auth(zabbix_url, zabbix_username, zabbix_password) + if msg: + # 失败 + return self.write(dict(code=-1, msg=msg)) + + if not zabbix_name or not zabbix_url or not zabbix_username or not zabbix_password: + return self.write(dict(code=-2, msg='关键参数不能为空')) + + with DBContext('w', None, True) as session: + exist_zabbix_name = session.query(ZabbixConfig).filter(ZabbixConfig.zabbix_name == zabbix_name).first() + exist_zabbix_url = session.query(ZabbixConfig).filter(ZabbixConfig.zabbix_url == zabbix_url).first() + + if exist_zabbix_name or exist_zabbix_url: + + update_info = { + "zabbix_name": zabbix_name, + "zabbix_url": zabbix_url, + "zabbix_username": zabbix_username, + "zabbix_password": zabbix_password, + } + + # 测试下编辑完后的信息是否正确 + msg = yield self.login_auth(zabbix_url, zabbix_username, zabbix_password) + if msg: + # 失败 + return self.write(dict(code=-1, msg=msg)) + + with DBContext('w', None, True) as session: + session.query(ZabbixConfig).filter(ZabbixConfig.zabbix_url == zabbix_url).update(update_info) + + return self.write(dict(code=0, msg='更新成功')) + # return self.write(dict(code=-2, msg='name或zabbix url配置信息已经存在')) + session.add( + ZabbixConfig(zabbix_name=zabbix_name, zabbix_url=zabbix_url, zabbix_username=zabbix_username, + zabbix_password=zabbix_password)) + + self.write(dict(code=0, msg='添加成功')) + + def delete(self, *args, **kwargs): + data = json.loads(self.request.body.decode("utf-8")) + zabbix_config_id = data.get('id') + zabbix_url = data.get('zabbix_url') + + if not zabbix_config_id: + return self.write(dict(code=-2, msg='关键参数不能为空')) + + with DBContext('w', None, True) as session: + session.query(ZabbixConfig).filter(ZabbixConfig.id == zabbix_config_id).delete(synchronize_session=False) + session.query(ZabbixHosts).filter(ZabbixHosts.zabbix_url == zabbix_url).delete(synchronize_session=False) + + self.write(dict(code=0, msg='删除成功')) + + +class ZabbixSyncHandler(tornado.web.RequestHandler): + '''刷新ZABBIX地址,将用户所有配置的ZABBIX信息数据都写入数据库''' + + def post(self, *args, **kwargs): + with DBContext('w', None, True) as session: + zabbix_generator_list = get_zabbix_hosts() + if zabbix_generator_list: + for zabbix_gen in zabbix_generator_list: + for host_info in zabbix_gen: + host_name = host_info.get('host_name') + exist_hostname = session.query(ZabbixHosts).filter(ZabbixHosts.host_name == host_name).first() + if not exist_hostname: + session.add( + ZabbixHosts(zabbix_url=host_info.get('zabbix_url'), group_id=host_info.get('group_id'), + group_name=host_info.get('group_name'), + host_id=host_info.get('host_id'), host_name=host_name)) + else: + session.query(ZabbixHosts).filter(ZabbixHosts.host_name == host_name).update(host_info) + session.commit() + + self.write(dict(code=0, msg='刷新成功')) + + +class ZabbixLastIssuesHandler(tornado.web.RequestHandler): + '''获取多ZABBIX ISSUES信息,前端展示出来''' + + def get(self, *args, **kwargs): + last_issues = zabbix_last_issues() + return self.write(dict(code=0, msg='获取成功', data=last_issues)) + + +class ZabbixhookLogsHandler(tornado.web.RequestHandler): + '''获取webhook告警日志''' + + @gen.coroutine + def get(self, *args, **kwargs): + log_list = [] + + with DBContext('w') as session: + hooks_log_info = session.query(ZabbixHookLog).order_by(-ZabbixHookLog.id).limit(200).all() + + for msg in hooks_log_info: + data_dict = model_to_dict(msg) + data_dict['create_time'] = str(data_dict['create_time']) + log_list.append(data_dict) + + return self.write(dict(code=0, msg='获取成功', data=log_list)) + + +class ZabbixSubmitTaskConfHandler(tornado.web.RequestHandler): + '''ZABBIX钩子向任务平台提交任务,需要一个认证''' + + def get(self, *args, **kwargs): + page_size = self.get_argument('page', default=1, strip=True) + limit = self.get_argument('limit', default=1, strip=True) + limit_start = (int(page_size) - 1) * int(limit) # 只能有一条 + submit_task_conf_list = [] + with DBContext('w') as session: + zabbix_submit_task_conf_data = session.query(ZabbixSubmitTaskConf).order_by(ZabbixSubmitTaskConf.id).offset( + limit_start).limit(int(limit)) + for data in zabbix_submit_task_conf_data: + data_dict = model_to_dict(data) + submit_task_conf_list.append(data_dict) + return self.write(dict(code=0, msg='获取成功', data=submit_task_conf_list)) + + async def post(self, *args, **kwargs): + data = json.loads(self.request.body.decode("utf-8")) + task_url = data.get('task_url', None) + auth_key = data.get('auth_key', None) + + if not task_url or not auth_key: + return self.write(dict(code=-2, msg="测试必须要包含:task_url、auth_key信息")) + + # 测试下权限 + http_client = httpclient.AsyncHTTPClient() + cookie = {"Cookie": 'auth_key={}'.format(auth_key)} + response = await http_client.fetch(task_url, method="GET", raise_error=False, headers=cookie) + + if response.code != 200: + return self.write(dict(code=-3, msg="错误码:{}".format(response.code))) + + response_data = json.loads(response.body.decode('utf-8')) + if response_data.get('code') != 0: + return self.write(dict(code=-3, msg="权限错误:{}".format(response_data.get('msg')))) + + # + with DBContext('w', None, True) as session: + exist_config = session.query(ZabbixSubmitTaskConf.id).first() + if not exist_config: + session.add(ZabbixSubmitTaskConf(task_url=task_url, auth_key=auth_key)) + else: + return self.write(dict(code=-4, msg="提交任务的认证配置信息只能存在一条")) + + self.write(dict(code=0, msg='添加成功')) + + def delete(self, *args, **kwargs): + data = json.loads(self.request.body.decode("utf-8")) + submit_task_config_id = data.get('id') + + if not submit_task_config_id: + return self.write(dict(code=-2, msg='关键参数不能为空')) + + with DBContext('w', None, True) as session: + session.query(ZabbixSubmitTaskConf).filter(ZabbixSubmitTaskConf.id == submit_task_config_id).delete( + synchronize_session=False) + self.write(dict(code=0, msg='删除成功')) + + +class ZabbixHookHandler(BaseHandler): + @gen.coroutine + def get(self, *args, **kwargs): + self.write(dict(code=0, msg='获取csrf_key成功', csrf_key=self.new_csrf_key)) + + async def post(self, *args, **kwargs): + data = json.loads(self.request.body.decode("utf-8")) + + print('data----->',data) + ins_log.read_log('info', '接收到数据:{}'.format(data)) + zabbix_url = data.get('zabbix_url') + messages = data.get('messages') + example_messages = 'Zabbix server___127.0.0.1___Zabbix agent on Zabbix server is unreachable for 5 minutes___PROBLEM___Average' + + if not zabbix_url or not messages: + ins_log.read_log('error', '关键参数不能为空') + return self.write(dict(code=-1, msg='Key parameters cannot be empty')) + + with DBContext('w', None, True) as session: + session.add(ZabbixHookLog(zabbix_url=zabbix_url, logs_info='收到告警信息:{}'.format(messages))) + # 要从message里面分析数据,这里必须是强规范:{HOSTNAME}___{HOST.IP}___{TRIGGER.NAME}___{TRIGGER.STATUS}___{TRIGGER.SEVERITY} + # 我们暂时只用到这2个数据,切割后类型依次是:['Zabbix server', '127.0.0.1', 'Zabbix agent on Zabbix server is unreachable for 5 minutes', 'PROBLEM', 'Average'] + try: + messages_list = messages.split('___') + host_name, host_ip, tagger_name, tagger_status, tagger_level = messages_list[0], messages_list[1], \ + messages_list[2], messages_list[3], \ + messages_list[4] + # host_ip = messages.split('___')[1] # hostip + # tagger_name = messages.split('___')[2] # 触发器名字 + # tagger_status = messages.split('___')[3] # 触发报警状态 + # tagger_level = messages.split('___')[4] # 报警级别 + except IndexError as e: + ins_log.read_log('error', '处理告警数据格式出错:{}'.format(e)) + ins_log.read_log('error', '可能是因为你配置的规则不对,请参考模块:{}'.format(example_messages)) + + return self.write(dict(code=-1, msg='处理告警数据格式出错:{}'.format(e))) + + if not host_name or not host_ip or not tagger_name or not tagger_status or not tagger_level: + return self.write(dict(code=-1, msg='你配置的规则格式应该不正常,请参考此模板:{}'.format(example_messages))) + + # 先查询告警的主机有没有 + hook_info = session.query(ZabbixHosts.zabbix_hooks).filter(ZabbixHosts.zabbix_url == zabbix_url, + ZabbixHosts.host_name == host_name).first() + + if not hook_info: + # return self.write(dict(code=0, msg='没有匹配到主机信息')) + ins_log.read_log('info', '主机:{}, 没有匹配到信息'.format(host_name)) + return self.write(dict(code=-1, msg='[INFO]: No match to host information')) + + # 匹配到主机后开始查询是否配置钩子 + if hook_info and not hook_info[0]: + # return self.write(dict(code=0, msg='没有匹配到钩子')) + ins_log.read_log('info', '主机:{}没有匹配到钩子'.format(host_name)) + return self.write(dict(code=-1, msg='[INFO]: No match to hook')) + + else: + # 匹配到主机,并且配置了钩子 + try: + # 防止用户给的数据不能json + hook_dict = json.loads(hook_info[0]) + ins_log.read_log('info', '主机:{} 一共配置钩子数据是:{}'.format(host_name, hook_dict)) + except Exception as e: + ins_log.read_log('error', e) + session.add(ZabbixHookLog(zabbix_url=zabbix_url, logs_info='钩子出错:{}'.format(messages))) + return self.write(dict(code=2, msg='There was an error when the hook parameter was converted into ' + 'a dictionary. Please check the relevant contents carefully')) + + # 根据你的报警名称匹配你的钩子,这里一个主机你可能配置了多个钩子 + alert_title_mate = None + for i in hook_dict.keys(): + if i == tagger_name: + alert_title_mate = i + + ins_log.read_log('info', '主机:{} 本次告警匹配到的钩子是:{}'.format(host_name, alert_title_mate)) + + if not alert_title_mate: + session.add(ZabbixHookLog(zabbix_url=zabbix_url, logs_info='没有匹配到钩子'.format(messages))) + ins_log.read_log('info', '没有匹配到钩子') + return self.write(dict(code=2, msg='No hook matched')) + else: + # 开始提交任务到平台 + the_hook = hook_dict[alert_title_mate] + print(the_hook) + hook_args = dict(ZABBIX_URL=zabbix_url,HOSTIP=host_ip, HOSTNAME=host_name, TAGGER_NAME=tagger_name, + TAGGER_STATUS=tagger_status, TAGGER_LEVEL=tagger_level) + # old_hook_args = the_hook.get('hook_args') + ### 参数字典 + # hosts_dict = {1: "127.0.0.1", 2: "127.0.0.1"} ### 主机字典 + # if the_hook.get('hook_args'): + # hosts_dict.update(the_hook.get('hook_args')) + exec_host = the_hook.get('exec_host') + if exec_host: + hosts_dict = {1: exec_host} + else: + hosts_dict = {1: "127.0.0.1", 2: "127.0.0.1"} ### 主机字典 + # if old_hook_args.get('hosts_dict') and isinstance(old_hook_args.get('hosts_dict'), dict): + # hosts_dict = old_hook_args.pop('hosts_dict') + + msg = '匹配到钩子:{} 模板ID:{} 执行:{},参数:{}'.format(alert_title_mate, the_hook.get('temp_id'), + the_hook.get('schedule'), str(the_hook.get('hook_args'))) + + ins_log.read_log('info', msg) + if len(msg) > 200: + msg = msg[:200] + + session.add(ZabbixHookLog(zabbix_url=zabbix_url, logs_info=msg)) + + data_info = dict(exec_time=datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'), + task_name='ZABBIX钩子任务', temp_id=the_hook.get('temp_id'), + schedule=the_hook.get('schedule', 'ready'), + submitter=self.get_current_nickname(), args=str(hook_args), hosts=str(hosts_dict)) + + with DBContext('w', None, True) as session: + task_conf = session.query(ZabbixSubmitTaskConf.task_url, ZabbixSubmitTaskConf.auth_key).first() + + task_url = task_conf[0] + auth_key = task_conf[1] + + http_client = httpclient.AsyncHTTPClient() + cookie = {"Cookie": 'auth_key={}'.format(auth_key)} + csrf_response = await http_client.fetch(task_url, method="GET", raise_error=False, headers=cookie) + + if csrf_response.code != 200: + ins_log.read_log('error', '错误码:{}'.format(csrf_response.code)) + return self.write(dict(code=-3, msg="错误码:{}".format(csrf_response.code))) + + csrf_response_data = json.loads(csrf_response.body.decode('utf-8')) + if csrf_response_data.get('code') != 0: + ins_log.read_log('error', '权限错误:{}'.format(csrf_response_data.get('msg'))) + return self.write(dict(code=-3, msg="权限错误:{}".format(csrf_response_data.get('msg')))) + + csrf_key = csrf_response_data.get('csrf_key') + the_body = json.dumps(data_info) + cookie = {"Cookie": 'auth_key={}; csrf_key={}'.format(auth_key, csrf_key)} + response = await http_client.fetch(task_url, method="POST", body=the_body, raise_error=False, headers=cookie) + + if response.error: + ins_log.read_log('error', '请求任务接口失败:{}, 请检查参数字典格式是否正确'.format(response.error)) + return self.write(dict(code=-3, msg='请求任务接口失败:{}请检查参数字典格式是否正确'.format(response.error))) + + response_data = json.loads(response.body.decode('utf-8')) + + if response_data.get('code') != 0: + return self.write(dict(code=-1, msg=response_data.get('msg'))) + + return self.write(dict(code=0, msg=response_data.get('msg'))) + + def delete(self, *args, **kwargs): + data = json.loads(self.request.body.decode("utf-8")) + the_id = data.get('the_id') + alert_title = data.get('alert_title') + + with DBContext('w', None, True) as session: + hook_info = session.query(ZabbixHosts.zabbix_hooks).filter(ZabbixHosts.id == the_id).first() + if not hook_info: + return self.write(dict(code=-1, msg='No related items were found')) + + if not hook_info[0]: + return self.write(dict(code=-2, msg='No hooks, ignore')) + else: + try: + hook_dict = json.loads(hook_info[0]) + except Exception as e: + session.query(ZabbixHosts).filter(ZabbixHosts.id == the_id).update({ZabbixHosts.zabbix_hooks: ""}) + return self.write(dict(code=2, msg='钩子出错')) + + hook_dict.pop(alert_title) + hook_dict = json.dumps(hook_dict) + + session.query(ZabbixHosts).filter(ZabbixHosts.id == the_id).update({ZabbixHosts.zabbix_hooks: hook_dict}) + self.write(dict(code=0, msg='删除成功')) + + +zabbix_urls = [ + (r"/v1/zabbix/config/", ZabbixConfigHandler), + (r"/v1/zabbix/sync/", ZabbixSyncHandler), + (r"/v1/zabbix/tree/", ZabbixTreeHandler), + (r"/v1/zabbix/hosts/", ZabbixHostsHandler), + (r"/v1/zabbix/hooks/", ZabbixHookHandler), + (r"/v1/zabbix/logs/", ZabbixhookLogsHandler), + (r"/v1/zabbix/issues/", ZabbixLastIssuesHandler), + (r"/v1/zabbix/task_config/", ZabbixSubmitTaskConfHandler), + +] diff --git a/biz/paid_write_redis.py b/biz/paid_write_redis.py index 922c528..6488500 100644 --- a/biz/paid_write_redis.py +++ b/biz/paid_write_redis.py @@ -1,194 +1,194 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/3/21 12:56 -# @Author : Fred Yangxiaofei -# @File : paid_write_redis.py -# @Role : 用于提醒,如:将要过期的电信线路 - - -import json -from libs.database import model_to_dict -from libs.database import db_session -from libs.redis_connect import redis_conn -from biz.get_userinfo import get_user_info -from models.paid_mg import PaidMG -import datetime -from websdk.consts import const -from websdk.utils import SendMail - - -def get_paid_info(): - """ - 获取付费管理信息 - :return: - """ - paid_list = [] - paid_data = db_session.query(PaidMG).all() - db_session.close() - - - for data in paid_data: - data_dict = model_to_dict(data) - data_dict['paid_start_time'] = str(data_dict['paid_start_time']) - data_dict['paid_end_time'] = str(data_dict['paid_end_time']) - data_dict['create_at'] = str(data_dict['create_at']) - data_dict['update_at'] = str(data_dict['update_at']) - paid_list.append(data_dict) - return paid_list - - -def save_data(): - """ - 提醒内容写入redis - :return: - """ - - # 付费信息 - paid_data = get_paid_info() - # CODO用户信息 - user_data = get_user_info() - userdata = [json.loads(x) for x in user_data] - with redis_conn.pipeline(transaction=False) as p: - for remind in paid_data: - # print(remind) - for u in userdata: - if remind.get('nicknames'): - if u.get('nickname') in remind.get('nicknames').split(','): - #print(remind.get('paid_name'), {u.get('tel'): u.get('email')}) - save_data = {u.get('tel'): u.get('email')} - p.hmset(remind.get('paid_name'), save_data) - p.execute() - - - -def check_reminder(): - """ - 用途: - 检查哪些事件需要进行邮件提醒 - 逻辑: - 这里逻辑简单说明下如下: - 01. 先获取到所有事件的到期时间 - 02. 获取所有事件中每条事件都需要提前多少天进行提醒 - 03. 计算从哪天开始进行提醒(过期时间 - 提前提醒天数 = 开始提醒的日期) - 04. 计算出来的·开始提醒日期· <= 现在时间 都进行报警 - :return: - """ - # 邮箱配置信息 - config_info = redis_conn.hgetall(const.APP_SETTINGS) - sm = SendMail(mail_host=config_info.get(const.EMAIL_HOST), mail_port=config_info.get(const.EMAIL_PORT), - mail_user=config_info.get(const.EMAIL_HOST_USER), - mail_password=config_info.get(const.EMAIL_HOST_PASSWORD), - mail_ssl=True if config_info.get(const.EMAIL_USE_SSL) == '1' else False) - - for msg in db_session.query(PaidMG).all(): - if msg.paid_end_time < datetime.datetime.now(): - email_content = '{}已过期,请删除该提醒'.format(msg.paid_name) - exp_paid_name = msg.paid_name - emails_list = redis_conn.hvals(msg.paid_name) - sm.send_mail(",".join(emails_list),'运维提醒信息',email_content) - reminder_time = msg.paid_end_time - datetime.timedelta(days=int(msg.reminder_day)) - if reminder_time <= datetime.datetime.now(): - if msg.paid_name != exp_paid_name: - remainder_time = msg.paid_end_time - datetime.datetime.now() - email_content = ('{}还有{}天到期,请留意'.format(msg.paid_name, remainder_time.days)) - emails_list = redis_conn.hvals(msg.paid_name) - sm.send_mail(",".join(emails_list), '运维提醒信息', email_content) - # print('msg_name---->',msg.paid_name) - # print('email_list---->',emails_list) - # content = """ - # - # - # - # - # OpenDevOps运维提醒邮件 - # - # - # - # """ - # content += """ - #
- # Hi, Ops: - #
- # 你有以下事项提醒需要关注 - #
- # - # - # - # - # - # - # - # """ - # - # content += """ - # - # - # - # - # """.format(msg.paid_name, msg.paid_end_time, msg.reminder_day) - # - # content += """ - #
名称过期时间提前通知天数
{}{}{}
- #
- # - # - # - # """ - # send_msg = msg.paid_name + "\n到期时间:" + str(msg.paid_end_time) - #sm.send_mail("yanghongfei@shinezone.com", "运维信息提醒", send_msg) - # sm.send_mail(",".join(emails_list), '运维提醒信息', content, subtype='html') - - -def main(): - """ - 数据写redis+提醒将要过期事件 - :return: - """ - save_data() - check_reminder() - - -if __name__ == '__main__': - main() - #pass +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/3/21 12:56 +# @Author : Fred Yangxiaofei +# @File : paid_write_redis.py +# @Role : 用于提醒,如:将要过期的电信线路 + + +import json +from libs.database import model_to_dict +from libs.database import db_session +from libs.redis_connect import redis_conn +from biz.get_userinfo import get_user_info +from models.paid_mg import PaidMG +import datetime +from websdk.consts import const +from websdk.utils import SendMail + + +def get_paid_info(): + """ + 获取付费管理信息 + :return: + """ + paid_list = [] + paid_data = db_session.query(PaidMG).all() + db_session.close() + + + for data in paid_data: + data_dict = model_to_dict(data) + data_dict['paid_start_time'] = str(data_dict['paid_start_time']) + data_dict['paid_end_time'] = str(data_dict['paid_end_time']) + data_dict['create_at'] = str(data_dict['create_at']) + data_dict['update_at'] = str(data_dict['update_at']) + paid_list.append(data_dict) + return paid_list + + +def save_data(): + """ + 提醒内容写入redis + :return: + """ + + # 付费信息 + paid_data = get_paid_info() + # CODO用户信息 + user_data = get_user_info() + userdata = [json.loads(x) for x in user_data] + with redis_conn.pipeline(transaction=False) as p: + for remind in paid_data: + # print(remind) + for u in userdata: + if remind.get('nicknames'): + if u.get('nickname') in remind.get('nicknames').split(','): + #print(remind.get('paid_name'), {u.get('tel'): u.get('email')}) + save_data = {u.get('tel'): u.get('email')} + p.hmset(remind.get('paid_name'), save_data) + p.execute() + + + +def check_reminder(): + """ + 用途: + 检查哪些事件需要进行邮件提醒 + 逻辑: + 这里逻辑简单说明下如下: + 01. 先获取到所有事件的到期时间 + 02. 获取所有事件中每条事件都需要提前多少天进行提醒 + 03. 计算从哪天开始进行提醒(过期时间 - 提前提醒天数 = 开始提醒的日期) + 04. 计算出来的·开始提醒日期· <= 现在时间 都进行报警 + :return: + """ + # 邮箱配置信息 + config_info = redis_conn.hgetall(const.APP_SETTINGS) + sm = SendMail(mail_host=config_info.get(const.EMAIL_HOST), mail_port=config_info.get(const.EMAIL_PORT), + mail_user=config_info.get(const.EMAIL_HOST_USER), + mail_password=config_info.get(const.EMAIL_HOST_PASSWORD), + mail_ssl=True if config_info.get(const.EMAIL_USE_SSL) == '1' else False) + + for msg in db_session.query(PaidMG).all(): + if msg.paid_end_time < datetime.datetime.now(): + email_content = '{}已过期,请删除该提醒'.format(msg.paid_name) + exp_paid_name = msg.paid_name + emails_list = redis_conn.hvals(msg.paid_name) + sm.send_mail(",".join(emails_list),'运维提醒信息',email_content) + reminder_time = msg.paid_end_time - datetime.timedelta(days=int(msg.reminder_day)) + if reminder_time <= datetime.datetime.now(): + if msg.paid_name != exp_paid_name: + remainder_time = msg.paid_end_time - datetime.datetime.now() + email_content = ('{}还有{}天到期,请留意'.format(msg.paid_name, remainder_time.days)) + emails_list = redis_conn.hvals(msg.paid_name) + sm.send_mail(",".join(emails_list), '运维提醒信息', email_content) + # print('msg_name---->',msg.paid_name) + # print('email_list---->',emails_list) + # content = """ + # + # + # + # + # OpenDevOps运维提醒邮件 + # + # + # + # """ + # content += """ + #
+ # Hi, Ops: + #
+ # 你有以下事项提醒需要关注 + #
+ # + # + # + # + # + # + # + # """ + # + # content += """ + # + # + # + # + # """.format(msg.paid_name, msg.paid_end_time, msg.reminder_day) + # + # content += """ + #
名称过期时间提前通知天数
{}{}{}
+ #
+ # + # + # + # """ + # send_msg = msg.paid_name + "\n到期时间:" + str(msg.paid_end_time) + #sm.send_mail("yanghongfei@shinezone.com", "运维信息提醒", send_msg) + # sm.send_mail(",".join(emails_list), '运维提醒信息', content, subtype='html') + + +def main(): + """ + 数据写redis+提醒将要过期事件 + :return: + """ + save_data() + check_reminder() + + +if __name__ == '__main__': + main() + #pass diff --git a/biz/tail_data.py b/biz/tail_data.py index 1988507..4f22378 100644 --- a/biz/tail_data.py +++ b/biz/tail_data.py @@ -1,30 +1,30 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/3/21 14:16 -# @Author : Fred Yangxiaofei -# @File : tail_data.py -# @Role : Tornado PeriodicCallback 定时执行 - -import datetime -from libs.remind.remind_tail_data import main as remind_tail_data - - -# from biz.paid_write_redis import save_data as paid_save_data -# from biz.paid_write_redis import check_reminder as paid_reminder -# from biz.promethues_write_redis import save_data as promethues_tail_data - - -def tail_data(): - """ - :return: - """ - # Promethues报警信息 - # promethues_tail_data() - d_time = datetime.datetime.strptime(str(datetime.datetime.now().date()) + '9:00', '%Y-%m-%d%H:%M') - d_time1 = datetime.datetime.strptime(str(datetime.datetime.now().date()) + '10:10', '%Y-%m-%d%H:%M') - n_time = datetime.datetime.now() - if n_time > d_time and n_time < d_time1: remind_tail_data() - - -if __name__ == '__main__': - tail_data() +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/3/21 14:16 +# @Author : Fred Yangxiaofei +# @File : tail_data.py +# @Role : Tornado PeriodicCallback 定时执行 + +import datetime +from libs.remind.remind_tail_data import main as remind_tail_data + + +# from biz.paid_write_redis import save_data as paid_save_data +# from biz.paid_write_redis import check_reminder as paid_reminder +# from biz.promethues_write_redis import save_data as promethues_tail_data + + +def tail_data(): + """ + :return: + """ + # Promethues报警信息 + # promethues_tail_data() + d_time = datetime.datetime.strptime(str(datetime.datetime.now().date()) + '9:00', '%Y-%m-%d%H:%M') + d_time1 = datetime.datetime.strptime(str(datetime.datetime.now().date()) + '10:10', '%Y-%m-%d%H:%M') + n_time = datetime.datetime.now() + if n_time > d_time and n_time < d_time1: remind_tail_data() + + +if __name__ == '__main__': + tail_data() diff --git a/doc/__init__.py b/doc/__init__.py index 4a23ee5..8c871f8 100644 --- a/doc/__init__.py +++ b/doc/__init__.py @@ -1,6 +1,6 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/4/9 10:44 -# @Author : Fred Yangxiaofei -# @File : __init__.py.py +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/4/9 10:44 +# @Author : Fred Yangxiaofei +# @File : __init__.py.py # @Role : 说明脚本功能 \ No newline at end of file diff --git a/doc/codo_tools.sql b/docker/codo_tools.sql similarity index 100% rename from doc/codo_tools.sql rename to docker/codo_tools.sql diff --git a/docker-compose.yaml b/docker/docker-compose.yaml similarity index 100% rename from docker-compose.yaml rename to docker/docker-compose.yaml diff --git a/docker/nginx_default.conf b/docker/nginx_default.conf new file mode 100644 index 0000000..e5709d3 --- /dev/null +++ b/docker/nginx_default.conf @@ -0,0 +1,64 @@ +user nginx; +worker_processes auto; +error_log /var/log/nginx/error.log; +pid /run/nginx.pid; + +# Load dynamic modules. See /usr/share/doc/nginx/README.dynamic. +include /usr/share/nginx/modules/*.conf; + +events { + worker_connections 1024; +} + +http { + log_format main '$remote_addr - $remote_user [$time_local] "$request" ' + '$status $body_bytes_sent "$http_referer" ' + '"$http_user_agent" "$http_x_forwarded_for"'; + + access_log /var/log/nginx/access.log main; + + sendfile on; + tcp_nopush on; + tcp_nodelay on; + keepalive_timeout 65; + types_hash_max_size 2048; + + include /etc/nginx/mime.types; + default_type application/octet-stream; + + # Load modular configuration files from the /etc/nginx/conf.d directory. + # See http://nginx.org/en/docs/ngx_core_module.html#include + # for more information. + include /etc/nginx/conf.d/*.conf; + +# Settings for a TLS enabled server. +# +# server { +# listen 443 ssl http2 default_server; +# listen [::]:443 ssl http2 default_server; +# server_name _; +# root /usr/share/nginx/html; +# +# ssl_certificate "/etc/pki/nginx/server.crt"; +# ssl_certificate_key "/etc/pki/nginx/private/server.key"; +# ssl_session_cache shared:SSL:1m; +# ssl_session_timeout 10m; +# ssl_ciphers HIGH:!aNULL:!MD5; +# ssl_prefer_server_ciphers on; +# +# # Load configuration files for the default server block. +# include /etc/nginx/default.d/*.conf; +# +# location / { +# } +# +# error_page 404 /404.html; +# location = /40x.html { +# } +# +# error_page 500 502 503 504 /50x.html; +# location = /50x.html { +# } +# } + +} \ No newline at end of file diff --git a/doc/nginx_ops.conf b/docker/nginx_ops.conf similarity index 68% rename from doc/nginx_ops.conf rename to docker/nginx_ops.conf index b507f6e..075570a 100644 --- a/doc/nginx_ops.conf +++ b/docker/nginx_ops.conf @@ -1,22 +1,21 @@ -upstream codo-tools{ - server 127.0.0.1:9800; - server 127.0.0.1:9801; - server 127.0.0.1:9802; -} - - -server -{ - listen 80; - server_name tools.opendevops.cn; - access_log /var/log/nginx/codo-tools_access.log; - error_log /var/log/nginx/codo-tools_error.log; - location / { - proxy_set_header Host $http_host; - proxy_redirect off; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Scheme $scheme; - proxy_set_header Cookie $http_cookie; - proxy_pass http://codo-tools; - } +upstream codo-tools-nginx{ + server 127.0.0.1:9800; + server 127.0.0.1:9801; + server 127.0.0.1:9802; +} + +server +{ + listen 80; + server_name tools.opendevops.cn; + access_log /var/log/nginx/codo-tools-access.log; + error_log /var/log/nginx/codo-tools-error.log; + location / { + proxy_set_header Host $http_host; + proxy_redirect off; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Scheme $scheme; + proxy_set_header Cookie $http_cookie; + proxy_pass http://codo-tools-nginx; + } } \ No newline at end of file diff --git a/docker/prestart.sh b/docker/prestart.sh new file mode 100644 index 0000000..2729f92 --- /dev/null +++ b/docker/prestart.sh @@ -0,0 +1,29 @@ +#!/bin/sh +cd /var/www/codo-tools/ + +#运维工具 +sed -i "s#cookie_secret = .*#cookie_secret = '${cookie_secret}'#g" settings.py +DEFAULT_DB_DBNAME='codo_tools' +sed -i "s#DEFAULT_DB_DBHOST = .*#DEFAULT_DB_DBHOST = os.getenv('DEFAULT_DB_DBHOST', '${DEFAULT_DB_DBHOST}')#g" settings.py +sed -i "s#DEFAULT_DB_DBPORT = .*#DEFAULT_DB_DBPORT = os.getenv('DEFAULT_DB_DBPORT', '${DEFAULT_DB_DBPORT}')#g" settings.py +sed -i "s#DEFAULT_DB_DBUSER = .*#DEFAULT_DB_DBUSER = os.getenv('DEFAULT_DB_DBUSER', '${DEFAULT_DB_DBUSER}')#g" settings.py +sed -i "s#DEFAULT_DB_DBPWD = .*#DEFAULT_DB_DBPWD = os.getenv('DEFAULT_DB_DBPWD', '${DEFAULT_DB_DBPWD}')#g" settings.py +sed -i "s#DEFAULT_DB_DBNAME = .*#DEFAULT_DB_DBNAME = os.getenv('DEFAULT_DB_DBNAME', '${DEFAULT_DB_DBNAME}')#g" settings.py +sed -i "s#DEFAULT_REDIS_HOST = .*#DEFAULT_REDIS_HOST = os.getenv('DEFAULT_REDIS_HOST', '${DEFAULT_REDIS_HOST}')#g" settings.py +sed -i "s#DEFAULT_REDIS_PORT = .*#DEFAULT_REDIS_PORT = os.getenv('DEFAULT_REDIS_PORT', '${DEFAULT_REDIS_PORT}')#g" settings.py +sed -i "s#DEFAULT_REDIS_PASSWORD = .*#DEFAULT_REDIS_PASSWORD = os.getenv('DEFAULT_REDIS_PASSWORD', '${DEFAULT_REDIS_PASSWORD}')#g" settings.py + + +try_num=0 + +while [[ $try_num -le 100 ]]; +do + if $(curl -s ${DEFAULT_DB_DBHOST}:${DEFAULT_DB_DBPORT} > /dev/null);then + python3 db_sync.py + exit 0 + else + echo 'wait mysql start to do db_sync.py' + fi + let try_num+=1 + sleep 6 +done \ No newline at end of file diff --git a/doc/supervisor_ops.conf b/docker/supervisor_ops.conf similarity index 66% rename from doc/supervisor_ops.conf rename to docker/supervisor_ops.conf index d1c9b3d..6738260 100644 --- a/doc/supervisor_ops.conf +++ b/docker/supervisor_ops.conf @@ -1,37 +1,49 @@ -[supervisord] -nodaemon=true - -[group:codo_tools,] -programs=tools,cron_jobs - -[program:tools] -command=python3 startup.py --service=tools --port=98%(process_num)02d -process_name=%(program_name)s_%(process_num)02d -numprocs=3 -directory=/var/www/codo-tools -user=root -autostart = true -autorestart=true -redirect_stderr=true -stdout_logfile=/var/log/supervisor/tools.log -loglevel=info -logfile_maxbytes=100MB -logfile_backups=3 - -[program:cron_jobs] -command=python3 startup.py --service=cron_jobs -process_name=%(program_name)s_%(process_num)02d -numprocs=1 -directory=/var/www/codo-tools -user=root -autostart = true -autorestart=true -redirect_stderr=true -stdout_logfile=/var/log/supervisor/cron_jobs.log -loglevel=info -logfile_maxbytes=100MB - -[program:nginx] -command=/usr/sbin/nginx -g "daemon off;" -autostart=true +[supervisord] +nodaemon=true + +[group:codo_tools,] +programs=tools,cron_jobs,tools-prestart + +[program:tools-prestart] +command=sh docker/prestart.sh +directory=/var/www/codo-tools +user=root +autostart = true +autorestart=false +redirect_stderr=true +stdout_logfile=/var/log/supervisor/codo-tools-prestart.log +loglevel=info +logfile_maxbytes=10MB +logfile_backups=1 + +[program:tools] +command=python3 startup.py --service=tools --port=98%(process_num)02d +process_name=%(program_name)s_%(process_num)02d +numprocs=3 +directory=/var/www/codo-tools +user=root +autostart = true +autorestart=true +redirect_stderr=true +stdout_logfile=/var/log/supervisor/codo-tools.log +loglevel=info +logfile_maxbytes=100MB +logfile_backups=3 + +[program:cron_jobs] +command=python3 startup.py --service=cron_jobs +process_name=%(program_name)s_%(process_num)02d +numprocs=1 +directory=/var/www/codo-tools +user=root +autostart = true +autorestart=true +redirect_stderr=true +stdout_logfile=/var/log/supervisor/cron_jobs.log +loglevel=info +logfile_maxbytes=100MB + +[program:nginx] +command=/usr/sbin/nginx -g "daemon off;" +autostart=true autorestart=true \ No newline at end of file diff --git a/libs/base_handler.py b/libs/base_handler.py index 2709ffa..b8e74d0 100644 --- a/libs/base_handler.py +++ b/libs/base_handler.py @@ -1,38 +1,37 @@ -#!/usr/bin/env python -# -*-coding:utf-8-*- - -import jwt -from tornado.web import HTTPError -from websdk.base_handler import BaseHandler as SDKBaseHandler - - -class BaseHandler(SDKBaseHandler): - def __init__(self, *args, **kwargs): - super(BaseHandler, self).__init__(*args, **kwargs) - - def prepare(self): - self.xsrf_token - - ### 登陆验证 - auth_key = self.get_cookie('auth_key', None) - if not auth_key: - url_auth_key = self.get_argument('auth_key', default=None, strip=True) - if url_auth_key: - auth_key = bytes(url_auth_key, encoding='utf-8') - - if not auth_key: - # 没登录,就让跳到登陆页面 - raise HTTPError(401, 'auth failed 1') - - else: - #user_info = jwt.decode(auth_key, verify=False).get('data') - user_info = jwt.decode(auth_key, options={"verify_signature": False}).get('data') - self.user_id = user_info.get('user_id', None) - self.username = user_info.get('username', None) - self.nickname = user_info.get('nickname', None) - self.is_super = user_info.get('is_superuser', False) - - if not self.user_id: - raise HTTPError(401, 'auth failed 2') - - self.is_superuser = self.is_super +#!/usr/bin/env python +# -*-coding:utf-8-*- + +import jwt +from tornado.web import HTTPError +from websdk.base_handler import BaseHandler as SDKBaseHandler + + +class BaseHandler(SDKBaseHandler): + def __init__(self, *args, **kwargs): + super(BaseHandler, self).__init__(*args, **kwargs) + + def prepare(self): + self.xsrf_token + + ### 登陆验证 + auth_key = self.get_cookie('auth_key', None) + if not auth_key: + url_auth_key = self.get_argument('auth_key', default=None, strip=True) + if url_auth_key: + auth_key = bytes(url_auth_key, encoding='utf-8') + + if not auth_key: + # 没登录,就让跳到登陆页面 + raise HTTPError(401, 'auth failed 1') + + else: + user_info = jwt.decode(auth_key, verify=False).get('data') + self.user_id = user_info.get('user_id', None) + self.username = user_info.get('username', None) + self.nickname = user_info.get('nickname', None) + self.is_super = user_info.get('is_superuser', False) + + if not self.user_id: + raise HTTPError(401, 'auth failed 2') + + self.is_superuser = self.is_super \ No newline at end of file diff --git a/libs/oss.py b/libs/oss.py index 8320aee..255a992 100644 --- a/libs/oss.py +++ b/libs/oss.py @@ -1,75 +1,75 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/4/8 15:49 -# @Author : Fred Yangxiaofei -# @File : oss.py -# @Role : 说明脚本功能 - - -import oss2 -import datetime -import json - - -class OSSApi(): - def __init__(self, key, secret, region, bucket_name, base_dir): - self.key = key - self.secret = secret - self.region = 'http://oss-%s.aliyuncs.com' % region - self.bucket_name = bucket_name - self.base_dir = base_dir - self.date = datetime.datetime.now().strftime('%Y%m%d') - self.conn() - - def conn(self): - auth = oss2.Auth(self.key, self.secret) - self.bucket = oss2.Bucket(auth, self.region, self.bucket_name) - - def setObj(self, filename, data): - '''存储str对象''' - - result = self.bucket.put_object('%s/%s' % (self.base_dir, filename), data) - - if result.status == 200: - # print('[Success] Put obj success!') - return filename - else: - print('[Faild] Put obj Faild!') - # except oss2.exceptions.ServerError as e: - # print('[Error] 服务器拒绝, 请检查[KEY][SECRET][存储桶]是否正确!') - # except oss2.exceptions.AccessDenied as e: - # print('[Error] 操作拒绝,请检查key是否有权限上传!') - # except Exception as e: - # return e - - # def getObj(self, filename): - # '''获取str对象''' - # try: - # object_stream = self.bucket.get_object('%s/%s' % (self.base_dir, filename)) - # # print('[Success] Get obj success!') - # return object_stream.read().decode() - # except oss2.exceptions.NoSuchKey as e: - # return json.dumps({'0.0029790401458740234': '[Error] OSS录像文件不存在!'}) - # except oss2.exceptions.ServerError as e: - # return json.dumps({'0.0029790401458740234': '[Error] 请检查[KEY][SECRET][存储桶]是否正确!'}) - # except oss2.exceptions.AccessDenied as e: - # return json.dumps({'0.0029790401458740234': '[Error] 操作拒绝,请检查key是否有权限上传!'}) - # except Exception as e: - # return json.dumps({'0.0029790401458740234': '[Error]--->%s' % e}) - - -if __name__ == '__main__': - oss_config = { - 'STORAGE_REGION': 'cn-shanghai', - 'STORAGE_NAME': 'shinezone-opendevops', - 'STORAGE_PATH': 'ops', - 'STORAGE_KEY_ID': 'LTAIRiWZ3L2W1117NQc', - 'STORAGE_KEY_SECRET': 'xxxxxxxxx', - } - - obj = OSSApi( - oss_config.get('STORAGE_KEY_ID'), oss_config.get('STORAGE_KEY_SECRET'), oss_config.get('STORAGE_REGION'), - oss_config.get('STORAGE_NAME'), oss_config.get('STORAGE_PATH')) - - data = '### 1.md' - obj.setObj('1.md', data) +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/4/8 15:49 +# @Author : Fred Yangxiaofei +# @File : oss.py +# @Role : 说明脚本功能 + + +import oss2 +import datetime +import json + + +class OSSApi(): + def __init__(self, key, secret, region, bucket_name, base_dir): + self.key = key + self.secret = secret + self.region = 'http://oss-%s.aliyuncs.com' % region + self.bucket_name = bucket_name + self.base_dir = base_dir + self.date = datetime.datetime.now().strftime('%Y%m%d') + self.conn() + + def conn(self): + auth = oss2.Auth(self.key, self.secret) + self.bucket = oss2.Bucket(auth, self.region, self.bucket_name) + + def setObj(self, filename, data): + '''存储str对象''' + + result = self.bucket.put_object('%s/%s' % (self.base_dir, filename), data) + + if result.status == 200: + # print('[Success] Put obj success!') + return filename + else: + print('[Faild] Put obj Faild!') + # except oss2.exceptions.ServerError as e: + # print('[Error] 服务器拒绝, 请检查[KEY][SECRET][存储桶]是否正确!') + # except oss2.exceptions.AccessDenied as e: + # print('[Error] 操作拒绝,请检查key是否有权限上传!') + # except Exception as e: + # return e + + # def getObj(self, filename): + # '''获取str对象''' + # try: + # object_stream = self.bucket.get_object('%s/%s' % (self.base_dir, filename)) + # # print('[Success] Get obj success!') + # return object_stream.read().decode() + # except oss2.exceptions.NoSuchKey as e: + # return json.dumps({'0.0029790401458740234': '[Error] OSS录像文件不存在!'}) + # except oss2.exceptions.ServerError as e: + # return json.dumps({'0.0029790401458740234': '[Error] 请检查[KEY][SECRET][存储桶]是否正确!'}) + # except oss2.exceptions.AccessDenied as e: + # return json.dumps({'0.0029790401458740234': '[Error] 操作拒绝,请检查key是否有权限上传!'}) + # except Exception as e: + # return json.dumps({'0.0029790401458740234': '[Error]--->%s' % e}) + + +if __name__ == '__main__': + oss_config = { + 'STORAGE_REGION': 'cn-shanghai', + 'STORAGE_NAME': 'shinezone-opendevops', + 'STORAGE_PATH': 'ops', + 'STORAGE_KEY_ID': 'LTAIRiWZ3L2W1117NQc', + 'STORAGE_KEY_SECRET': 'xxxxxxxxx', + } + + obj = OSSApi( + oss_config.get('STORAGE_KEY_ID'), oss_config.get('STORAGE_KEY_SECRET'), oss_config.get('STORAGE_REGION'), + oss_config.get('STORAGE_NAME'), oss_config.get('STORAGE_PATH')) + + data = '### 1.md' + obj.setObj('1.md', data) diff --git a/libs/public.py b/libs/public.py index e7f48f9..03fdd75 100644 --- a/libs/public.py +++ b/libs/public.py @@ -1,40 +1,40 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/7/4 15:44 -# @Author : Fred Yangxiaofei -# @File : public.py -# @Role : 公用的方法 - - -import time -from libs.redis_connect import redis_conn -from websdk.consts import const - - -def timestamp_to_datatime(timestamp): - """ - 将时间戳转换成时间 - :param timestamp: 时间戳 int类型 - :return: - """ - if not isinstance(timestamp, int): - return 'Incorrect format' - # 转换成localtime - time_local = time.localtime(timestamp) - # 转换成新的时间格式(2016-05-05 20:28:54) - data_time = time.strftime("%Y-%m-%d %H:%M:%S", time_local) - return data_time - - -def get_user_info(): - """ - 从现有redis里面获取用户信息,如:Email,SMS等 - :return: - """ - # 集合 - data_set = redis_conn.smembers(const.USERS_INFO) - # 集合转list - userdata = list(data_set) - # PS:这里codo后端会把数据主动写redis里面,假数据类型:user_data:['{"nickname:杨红飞", "email": "test@domain.cn", "tel": "10000000001"}','{"nickname:杨红飞02", "email": "test02@domain.cn", "tel": "10000000002"}'] - return userdata - +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/7/4 15:44 +# @Author : Fred Yangxiaofei +# @File : public.py +# @Role : 公用的方法 + + +import time +from libs.redis_connect import redis_conn +from websdk.consts import const + + +def timestamp_to_datatime(timestamp): + """ + 将时间戳转换成时间 + :param timestamp: 时间戳 int类型 + :return: + """ + if not isinstance(timestamp, int): + return 'Incorrect format' + # 转换成localtime + time_local = time.localtime(timestamp) + # 转换成新的时间格式(2016-05-05 20:28:54) + data_time = time.strftime("%Y-%m-%d %H:%M:%S", time_local) + return data_time + + +def get_user_info(): + """ + 从现有redis里面获取用户信息,如:Email,SMS等 + :return: + """ + # 集合 + data_set = redis_conn.smembers(const.USERS_INFO) + # 集合转list + userdata = list(data_set) + # PS:这里codo后端会把数据主动写redis里面,假数据类型:user_data:['{"nickname:杨红飞", "email": "test@domain.cn", "tel": "10000000001"}','{"nickname:杨红飞02", "email": "test02@domain.cn", "tel": "10000000002"}'] + return userdata + diff --git a/libs/redis_connect.py b/libs/redis_connect.py index e72534b..4bb9f8a 100644 --- a/libs/redis_connect.py +++ b/libs/redis_connect.py @@ -1,33 +1,33 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/3/21 14:46 -# @Author : Fred Yangxiaofei -# @File : redis_connect.py -# @Role : redis链接信息 - - -import redis -from settings import settings -from websdk.consts import const - - -def create_redis_pool(): - redis_configs = settings[const.REDIS_CONFIG_ITEM][const.DEFAULT_RD_KEY] - pool = redis.ConnectionPool(host=redis_configs['host'], port=redis_configs['port'], - password=redis_configs['password'], db=redis_configs[const.RD_DB_KEY], - decode_responses=True) - return pool - - -redis_pool = create_redis_pool() - - -def create_redis_connection(): - redis_con = redis.Redis(connection_pool=redis_pool) - return redis_con - - -redis_conn = create_redis_connection() - -if __name__ == '__main__': - pass +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/3/21 14:46 +# @Author : Fred Yangxiaofei +# @File : redis_connect.py +# @Role : redis链接信息 + + +import redis +from settings import settings +from websdk.consts import const + + +def create_redis_pool(): + redis_configs = settings[const.REDIS_CONFIG_ITEM][const.DEFAULT_RD_KEY] + pool = redis.ConnectionPool(host=redis_configs['host'], port=redis_configs['port'], + password=redis_configs['password'], db=redis_configs[const.RD_DB_KEY], + decode_responses=True) + return pool + + +redis_pool = create_redis_pool() + + +def create_redis_connection(): + redis_con = redis.Redis(connection_pool=redis_pool) + return redis_con + + +redis_conn = create_redis_connection() + +if __name__ == '__main__': + pass diff --git a/libs/zabbix/__init__.py b/libs/zabbix/__init__.py index ba236eb..c9e9933 100644 --- a/libs/zabbix/__init__.py +++ b/libs/zabbix/__init__.py @@ -1,6 +1,6 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/7/2 13:39 -# @Author : Fred Yangxiaofei -# @File : __init__.py.py +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/7/2 13:39 +# @Author : Fred Yangxiaofei +# @File : __init__.py.py # @Role : 说明脚本功能 \ No newline at end of file diff --git a/libs/zabbix/get_hosts.py b/libs/zabbix/get_hosts.py index fd5637a..096040d 100644 --- a/libs/zabbix/get_hosts.py +++ b/libs/zabbix/get_hosts.py @@ -1,90 +1,90 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/7/15 9:48 -# @Author : Fred Yangxiaofei -# @File : get_hosts.py -# @Role : 获取ZABBIX监控的主机组/主机/ID等信息 - -from libs.zabbix.zabbix_api import ZabbixAPI -from libs.db_context import DBContext -from models.zabbix_mg import ZabbixConfig, model_to_dict -import fire - - -class GetZabbixHosts(): - def __init__(self, zabbix_url, zabbix_user, zabbix_password): - self.zabbix_url = zabbix_url - self.zabbix_user = zabbix_user - self.zabbix_password = zabbix_password - self.zapi = self.login() - - def login(self): - zapi = ZabbixAPI(self.zabbix_url) - zapi.login(self.zabbix_user, self.zabbix_password) - return zapi - - def get_all_hostgroups(self): - """ - 获取所有主机组 - :return: - """ - zabbix_all_hostgroups = self.zapi.hostgroup.get(output='extend') - return zabbix_all_hostgroups - - def get_hostgroup_hostinfo(self, all_host_group_info): - """ - 获取单个组下所有的主机信息 - :param all_host_group_info: 所有主机组信息 - :return: - """ - - for g in all_host_group_info: - if g: - group_name = g['name'] - group_id = g['groupid'] - hostid_in_group_list = self.zapi.host.get(output=['hostid', 'name'], groupids=group_id) - if hostid_in_group_list: - for h in hostid_in_group_list: - zabbix_group_data = { - "zabbix_url": self.zabbix_url, - "group_id": group_id, - "group_name": group_name, - "host_id": h['hostid'], - "host_name": h['name'] - } - yield zabbix_group_data - # print(group_name, g['groupid'],h['hostid'], h['name']) - - -def get_zabbix_configs(): - """ - 从数据库里面看下用户有几个监控 - :return: - """ - zabbix_configs_list = [] - - with DBContext('w') as session: - zabbix_config_info = session.query(ZabbixConfig).all() - for data in zabbix_config_info: - data_dict = model_to_dict(data) - zabbix_configs_list.append(data_dict) - return zabbix_configs_list - - -def main(): - zabbix_generator_list = [] # 每一组ZABBIX的信息都返回出来一个generator - zabbix_configs_list = get_zabbix_configs() - for zabbix_data in zabbix_configs_list: - zabbix_url = zabbix_data.get('zabbix_url') - zabbix_username = zabbix_data.get('zabbix_username') - zabbix_password = zabbix_data.get('zabbix_password') - obj = GetZabbixHosts(zabbix_url, zabbix_username, zabbix_password) - all_hostgroups_list = obj.get_all_hostgroups() - zabbix_group_data = obj.get_hostgroup_hostinfo(all_hostgroups_list) - zabbix_generator_list.append(zabbix_group_data) - - return zabbix_generator_list - - -if __name__ == '__main__': - fire.Fire(main) +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/7/15 9:48 +# @Author : Fred Yangxiaofei +# @File : get_hosts.py +# @Role : 获取ZABBIX监控的主机组/主机/ID等信息 + +from libs.zabbix.zabbix_api import ZabbixAPI +from libs.db_context import DBContext +from models.zabbix_mg import ZabbixConfig, model_to_dict +import fire + + +class GetZabbixHosts(): + def __init__(self, zabbix_url, zabbix_user, zabbix_password): + self.zabbix_url = zabbix_url + self.zabbix_user = zabbix_user + self.zabbix_password = zabbix_password + self.zapi = self.login() + + def login(self): + zapi = ZabbixAPI(self.zabbix_url) + zapi.login(self.zabbix_user, self.zabbix_password) + return zapi + + def get_all_hostgroups(self): + """ + 获取所有主机组 + :return: + """ + zabbix_all_hostgroups = self.zapi.hostgroup.get(output='extend') + return zabbix_all_hostgroups + + def get_hostgroup_hostinfo(self, all_host_group_info): + """ + 获取单个组下所有的主机信息 + :param all_host_group_info: 所有主机组信息 + :return: + """ + + for g in all_host_group_info: + if g: + group_name = g['name'] + group_id = g['groupid'] + hostid_in_group_list = self.zapi.host.get(output=['hostid', 'name'], groupids=group_id) + if hostid_in_group_list: + for h in hostid_in_group_list: + zabbix_group_data = { + "zabbix_url": self.zabbix_url, + "group_id": group_id, + "group_name": group_name, + "host_id": h['hostid'], + "host_name": h['name'] + } + yield zabbix_group_data + # print(group_name, g['groupid'],h['hostid'], h['name']) + + +def get_zabbix_configs(): + """ + 从数据库里面看下用户有几个监控 + :return: + """ + zabbix_configs_list = [] + + with DBContext('w') as session: + zabbix_config_info = session.query(ZabbixConfig).all() + for data in zabbix_config_info: + data_dict = model_to_dict(data) + zabbix_configs_list.append(data_dict) + return zabbix_configs_list + + +def main(): + zabbix_generator_list = [] # 每一组ZABBIX的信息都返回出来一个generator + zabbix_configs_list = get_zabbix_configs() + for zabbix_data in zabbix_configs_list: + zabbix_url = zabbix_data.get('zabbix_url') + zabbix_username = zabbix_data.get('zabbix_username') + zabbix_password = zabbix_data.get('zabbix_password') + obj = GetZabbixHosts(zabbix_url, zabbix_username, zabbix_password) + all_hostgroups_list = obj.get_all_hostgroups() + zabbix_group_data = obj.get_hostgroup_hostinfo(all_hostgroups_list) + zabbix_generator_list.append(zabbix_group_data) + + return zabbix_generator_list + + +if __name__ == '__main__': + fire.Fire(main) diff --git a/libs/zabbix/get_issues.py b/libs/zabbix/get_issues.py index e443f73..c12816f 100644 --- a/libs/zabbix/get_issues.py +++ b/libs/zabbix/get_issues.py @@ -1,80 +1,80 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/7/2 13:53 -# @Author : Fred Yangxiaofei -# @File : get_issues.py -# @Role : 获取Zabbix报警信息 - - -from libs.zabbix.login import zabbix_login -from libs.public import timestamp_to_datatime -from libs.db_context import DBContext -from models.zabbix_mg import ZabbixConfig, model_to_dict -import fire - - -def get_last_issues(zabbix_url, zabbix_user, zabbix_password): - """ - 获取最近的ISSUES 没有确认的报警 - :return: - """ - try: - zapi = zabbix_login(zabbix_url, zabbix_user, zabbix_password) - unack_triggers = zapi.trigger.get( - only_true=1, - skipDependent=1, - monitored=1, - active=1, - output='extend', - expandDescription=1, - selectHosts=['host'], - withLastEventUnacknowledged=1, ) - # print(unack_triggers) - last_issues_list = [] - for t in unack_triggers: - issues_data = dict() - issues_data['host'] = t['hosts'][0].get('host') - issues_data['issue'] = t.get('description') - issues_data['last_change'] = timestamp_to_datatime(int(t.get('lastchange'))) - issues_data['level'] = t.get('priority') - last_issues_list.append(issues_data) - - return last_issues_list - - except Exception as e: - print(e) - # 错误,拿不到数据,返回一个空列表出去 - return [] - - -def get_zabbix_configs(): - """ - 从数据库里面看下用户有几个监控 - :return: - """ - zabbix_configs_list = [] - - with DBContext('w') as session: - zabbix_config_info = session.query(ZabbixConfig).all() - for data in zabbix_config_info: - data_dict = model_to_dict(data) - zabbix_configs_list.append(data_dict) - return zabbix_configs_list - - -def main(): - zabbix_last_issues = [] - zabbix_configs_list = get_zabbix_configs() - - for zabbix_data in zabbix_configs_list: - zabbix_url = zabbix_data.get('zabbix_url') - zabbix_username = zabbix_data.get('zabbix_username') - zabbix_password = zabbix_data.get('zabbix_password') - zdata = get_last_issues(zabbix_url, zabbix_username, zabbix_password) - # 2个列表合成一个list - zabbix_last_issues += zdata - return zabbix_last_issues - - -if __name__ == '__main__': - fire.Fire(main) +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/7/2 13:53 +# @Author : Fred Yangxiaofei +# @File : get_issues.py +# @Role : 获取Zabbix报警信息 + + +from libs.zabbix.login import zabbix_login +from libs.public import timestamp_to_datatime +from libs.db_context import DBContext +from models.zabbix_mg import ZabbixConfig, model_to_dict +import fire + + +def get_last_issues(zabbix_url, zabbix_user, zabbix_password): + """ + 获取最近的ISSUES 没有确认的报警 + :return: + """ + try: + zapi = zabbix_login(zabbix_url, zabbix_user, zabbix_password) + unack_triggers = zapi.trigger.get( + only_true=1, + skipDependent=1, + monitored=1, + active=1, + output='extend', + expandDescription=1, + selectHosts=['host'], + withLastEventUnacknowledged=1, ) + # print(unack_triggers) + last_issues_list = [] + for t in unack_triggers: + issues_data = dict() + issues_data['host'] = t['hosts'][0].get('host') + issues_data['issue'] = t.get('description') + issues_data['last_change'] = timestamp_to_datatime(int(t.get('lastchange'))) + issues_data['level'] = t.get('priority') + last_issues_list.append(issues_data) + + return last_issues_list + + except Exception as e: + print(e) + # 错误,拿不到数据,返回一个空列表出去 + return [] + + +def get_zabbix_configs(): + """ + 从数据库里面看下用户有几个监控 + :return: + """ + zabbix_configs_list = [] + + with DBContext('w') as session: + zabbix_config_info = session.query(ZabbixConfig).all() + for data in zabbix_config_info: + data_dict = model_to_dict(data) + zabbix_configs_list.append(data_dict) + return zabbix_configs_list + + +def main(): + zabbix_last_issues = [] + zabbix_configs_list = get_zabbix_configs() + + for zabbix_data in zabbix_configs_list: + zabbix_url = zabbix_data.get('zabbix_url') + zabbix_username = zabbix_data.get('zabbix_username') + zabbix_password = zabbix_data.get('zabbix_password') + zdata = get_last_issues(zabbix_url, zabbix_username, zabbix_password) + # 2个列表合成一个list + zabbix_last_issues += zdata + return zabbix_last_issues + + +if __name__ == '__main__': + fire.Fire(main) diff --git a/libs/zabbix/login.py b/libs/zabbix/login.py index 4b4ba02..16a2138 100644 --- a/libs/zabbix/login.py +++ b/libs/zabbix/login.py @@ -1,19 +1,19 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/7/8 10:24 -# @Author : Fred Yangxiaofei -# @File : login.py -# @Role : 测试登陆 - - -from libs.zabbix.zabbix_api import ZabbixAPI - - -def zabbix_login(zabbix_url, zabbix_user, zabbix_password): - zapi = ZabbixAPI(zabbix_url) - zapi.login(zabbix_user, zabbix_password) - return zapi - - -if __name__ == '__main__': - pass +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/7/8 10:24 +# @Author : Fred Yangxiaofei +# @File : login.py +# @Role : 测试登陆 + + +from libs.zabbix.zabbix_api import ZabbixAPI + + +def zabbix_login(zabbix_url, zabbix_user, zabbix_password): + zapi = ZabbixAPI(zabbix_url) + zapi.login(zabbix_user, zabbix_password) + return zapi + + +if __name__ == '__main__': + pass diff --git a/libs/zabbix/send_alert.py b/libs/zabbix/send_alert.py index 9dc35b5..92d09c8 100644 --- a/libs/zabbix/send_alert.py +++ b/libs/zabbix/send_alert.py @@ -1,49 +1,49 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/7/10 10:09 -# @Author : Fred Yangxiaofei -# @File : send_alert.py -# @Role : Zabbix webhooks 发送告警信息测试 - - -# zabbix3.0+配置 - -import sys -import requests -import json - - -def send_alert(zabbix_url, webhook_url, messages): - # 这里就是一个长期Token,管理员可以在用户列表选择一个用户进行生成一个长期Token, 但是需要对/tools/v1/zabbix/hooks/接口有权限 - params = { - "auth_key": "xxxx" - } - # payload = "{\"data\": \"test\"}" - payload = {"zabbix_url": zabbix_url, "messages": messages} - headers = {'content-type': "application/json", 'cache-control': "no-cache"} - response = requests.post(url=webhook_url, data=json.dumps(payload), headers=headers, params=params) - print(response.text) - - -if __name__ == '__main__': - # 你的ZABBIX地址,高版本ZABBIX你也可以从ZABBIX配置中传过来,请确保你和CODO平台上配置的一模一样 - zabbix_url = 'http://172.16.0.225/zabbix' - - # ZABBIX配置webhooks传过来的,地址从CODO平台获取 - webhook_url = sys.argv[1] - - # ZABBIX的告警信息,这个我要用到,所以就要强规范:{HOSTNAME}___{HOST.IP}___{TRIGGER.NAME}___{TRIGGER.STATUS}___{TRIGGER.SEVERITY} - messages = sys.argv[2] - - send_alert(zabbix_url, webhook_url, messages) - -""" -如何使用: - -pip install json -pip install requests - -如何测试: - -python send_alert_to_codo.py 'http://172.16.0.101:8040/v1/zabbix/hooks/' 'Zabbix server___127.0.0.1___Zabbix agent on Zabbix server is unreachable for 5 minutes___PROBLEM___Average' -""" +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/7/10 10:09 +# @Author : Fred Yangxiaofei +# @File : send_alert.py +# @Role : Zabbix webhooks 发送告警信息测试 + + +# zabbix3.0+配置 + +import sys +import requests +import json + + +def send_alert(zabbix_url, webhook_url, messages): + # 这里就是一个长期Token,管理员可以在用户列表选择一个用户进行生成一个长期Token, 但是需要对/tools/v1/zabbix/hooks/接口有权限 + params = { + "auth_key": "xxxx" + } + # payload = "{\"data\": \"test\"}" + payload = {"zabbix_url": zabbix_url, "messages": messages} + headers = {'content-type': "application/json", 'cache-control': "no-cache"} + response = requests.post(url=webhook_url, data=json.dumps(payload), headers=headers, params=params) + print(response.text) + + +if __name__ == '__main__': + # 你的ZABBIX地址,高版本ZABBIX你也可以从ZABBIX配置中传过来,请确保你和CODO平台上配置的一模一样 + zabbix_url = 'http://172.16.0.225/zabbix' + + # ZABBIX配置webhooks传过来的,地址从CODO平台获取 + webhook_url = sys.argv[1] + + # ZABBIX的告警信息,这个我要用到,所以就要强规范:{HOSTNAME}___{HOST.IP}___{TRIGGER.NAME}___{TRIGGER.STATUS}___{TRIGGER.SEVERITY} + messages = sys.argv[2] + + send_alert(zabbix_url, webhook_url, messages) + +""" +如何使用: + +pip install json +pip install requests + +如何测试: + +python send_alert_to_codo.py 'http://172.16.0.101:8040/v1/zabbix/hooks/' 'Zabbix server___127.0.0.1___Zabbix agent on Zabbix server is unreachable for 5 minutes___PROBLEM___Average' +""" diff --git a/libs/zabbix/zabbix_api.py b/libs/zabbix/zabbix_api.py index ec92032..0762631 100644 --- a/libs/zabbix/zabbix_api.py +++ b/libs/zabbix/zabbix_api.py @@ -1,179 +1,179 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/7/2 13:40 -# @Author : Fred Yangxiaofei -# @File : zabbix_api.py -# @Role : ZABBIX API -# 原文链接:https://github.com/lukecyca/pyzabbix - - - -import logging -import requests -import json - - -class _NullHandler(logging.Handler): - def emit(self, record): - pass - -logger = logging.getLogger(__name__) -logger.addHandler(_NullHandler()) - - -class ZabbixAPIException(Exception): - """ generic zabbix api exception - code list: - -32602 - Invalid params (eg already exists) - -32500 - no permissions - """ - pass - - -class ZabbixAPI(object): - def __init__(self, - server='http://localhost/zabbix', - session=None, - use_authenticate=False, - timeout=None): - """ - Parameters: - server: Base URI for zabbix web interface (omitting /api_jsonrpc.php) - session: optional pre-configured requests.Session instance - use_authenticate: Use old (Zabbix 1.8) style authentication - timeout: optional connect and read timeout in seconds, default: None (if you're using Requests >= 2.4 you can set it as tuple: "(connect, read)" which is used to set individual connect and read timeouts.) - """ - - if session: - self.session = session - else: - self.session = requests.Session() - - # Default headers for all requests - self.session.headers.update({ - 'Content-Type': 'application/json-rpc', - 'User-Agent': 'python/pyzabbix', - 'Cache-Control': 'no-cache' - }) - - self.use_authenticate = use_authenticate - self.auth = '' - self.id = 0 - - self.timeout = timeout - - self.url = server + '/api_jsonrpc.php' - logger.info("JSON-RPC Server Endpoint: %s", self.url) - - def login(self, user='', password=''): - """Convenience method for calling user.authenticate and storing the resulting auth token - for further commands. - If use_authenticate is set, it uses the older (Zabbix 1.8) authentication command - :param password: Password used to login into Zabbix - :param user: Username used to login into Zabbix - """ - - # If we have an invalid auth token, we are not allowed to send a login - # request. Clear it before trying. - self.auth = '' - if self.use_authenticate: - self.auth = self.user.authenticate(user=user, password=password) - else: - self.auth = self.user.login(user=user, password=password) - - def check_authentication(self): - """Convenience method for calling user.checkAuthentication of the current session""" - return self.user.checkAuthentication(sessionid=self.auth) - - def confimport(self, confformat='', source='', rules=''): - """Alias for configuration.import because it clashes with - Python's import reserved keyword - :param rules: - :param source: - :param confformat: - """ - - return self.do_request( - method="configuration.import", - params={"format": confformat, "source": source, "rules": rules} - )['result'] - - def api_version(self): - return self.apiinfo.version() - - def do_request(self, method, params=None): - request_json = { - 'jsonrpc': '2.0', - 'method': method, - 'params': params or {}, - 'id': self.id, - } - - # We don't have to pass the auth token if asking for the apiinfo.version or user.checkAuthentication - if self.auth and method != 'apiinfo.version' and method != 'user.checkAuthentication': - request_json['auth'] = self.auth - - logger.debug("Sending: %s", json.dumps(request_json, - indent=4, - separators=(',', ': '))) - response = self.session.post( - self.url, - data=json.dumps(request_json), - timeout=self.timeout - ) - logger.debug("Response Code: %s", str(response.status_code)) - - # NOTE: Getting a 412 response code means the headers are not in the - # list of allowed headers. - response.raise_for_status() - - if not len(response.text): - raise ZabbixAPIException("Received empty response") - - try: - response_json = json.loads(response.text) - except ValueError: - raise ZabbixAPIException( - "Unable to parse json: %s" % response.text - ) - logger.debug("Response Body: %s", json.dumps(response_json, - indent=4, - separators=(',', ': '))) - - self.id += 1 - - if 'error' in response_json: # some exception - if 'data' not in response_json['error']: # some errors don't contain 'data': workaround for ZBX-9340 - response_json['error']['data'] = "No data" - msg = u"Error {code}: {message}, {data}".format( - code=response_json['error']['code'], - message=response_json['error']['message'], - data=response_json['error']['data'] - ) - raise ZabbixAPIException(msg, response_json['error']['code']) - - return response_json - - def __getattr__(self, attr): - """Dynamically create an object class (ie: host)""" - return ZabbixAPIObjectClass(attr, self) - - -class ZabbixAPIObjectClass(object): - def __init__(self, name, parent): - self.name = name - self.parent = parent - - def __getattr__(self, attr): - """Dynamically create a method (ie: get)""" - - def fn(*args, **kwargs): - if args and kwargs: - raise TypeError("Found both args and kwargs") - - return self.parent.do_request( - '{0}.{1}'.format(self.name, attr), - args or kwargs - )['result'] - - return fn +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/7/2 13:40 +# @Author : Fred Yangxiaofei +# @File : zabbix_api.py +# @Role : ZABBIX API +# 原文链接:https://github.com/lukecyca/pyzabbix + + + +import logging +import requests +import json + + +class _NullHandler(logging.Handler): + def emit(self, record): + pass + +logger = logging.getLogger(__name__) +logger.addHandler(_NullHandler()) + + +class ZabbixAPIException(Exception): + """ generic zabbix api exception + code list: + -32602 - Invalid params (eg already exists) + -32500 - no permissions + """ + pass + + +class ZabbixAPI(object): + def __init__(self, + server='http://localhost/zabbix', + session=None, + use_authenticate=False, + timeout=None): + """ + Parameters: + server: Base URI for zabbix web interface (omitting /api_jsonrpc.php) + session: optional pre-configured requests.Session instance + use_authenticate: Use old (Zabbix 1.8) style authentication + timeout: optional connect and read timeout in seconds, default: None (if you're using Requests >= 2.4 you can set it as tuple: "(connect, read)" which is used to set individual connect and read timeouts.) + """ + + if session: + self.session = session + else: + self.session = requests.Session() + + # Default headers for all requests + self.session.headers.update({ + 'Content-Type': 'application/json-rpc', + 'User-Agent': 'python/pyzabbix', + 'Cache-Control': 'no-cache' + }) + + self.use_authenticate = use_authenticate + self.auth = '' + self.id = 0 + + self.timeout = timeout + + self.url = server + '/api_jsonrpc.php' + logger.info("JSON-RPC Server Endpoint: %s", self.url) + + def login(self, user='', password=''): + """Convenience method for calling user.authenticate and storing the resulting auth token + for further commands. + If use_authenticate is set, it uses the older (Zabbix 1.8) authentication command + :param password: Password used to login into Zabbix + :param user: Username used to login into Zabbix + """ + + # If we have an invalid auth token, we are not allowed to send a login + # request. Clear it before trying. + self.auth = '' + if self.use_authenticate: + self.auth = self.user.authenticate(user=user, password=password) + else: + self.auth = self.user.login(user=user, password=password) + + def check_authentication(self): + """Convenience method for calling user.checkAuthentication of the current session""" + return self.user.checkAuthentication(sessionid=self.auth) + + def confimport(self, confformat='', source='', rules=''): + """Alias for configuration.import because it clashes with + Python's import reserved keyword + :param rules: + :param source: + :param confformat: + """ + + return self.do_request( + method="configuration.import", + params={"format": confformat, "source": source, "rules": rules} + )['result'] + + def api_version(self): + return self.apiinfo.version() + + def do_request(self, method, params=None): + request_json = { + 'jsonrpc': '2.0', + 'method': method, + 'params': params or {}, + 'id': self.id, + } + + # We don't have to pass the auth token if asking for the apiinfo.version or user.checkAuthentication + if self.auth and method != 'apiinfo.version' and method != 'user.checkAuthentication': + request_json['auth'] = self.auth + + logger.debug("Sending: %s", json.dumps(request_json, + indent=4, + separators=(',', ': '))) + response = self.session.post( + self.url, + data=json.dumps(request_json), + timeout=self.timeout + ) + logger.debug("Response Code: %s", str(response.status_code)) + + # NOTE: Getting a 412 response code means the headers are not in the + # list of allowed headers. + response.raise_for_status() + + if not len(response.text): + raise ZabbixAPIException("Received empty response") + + try: + response_json = json.loads(response.text) + except ValueError: + raise ZabbixAPIException( + "Unable to parse json: %s" % response.text + ) + logger.debug("Response Body: %s", json.dumps(response_json, + indent=4, + separators=(',', ': '))) + + self.id += 1 + + if 'error' in response_json: # some exception + if 'data' not in response_json['error']: # some errors don't contain 'data': workaround for ZBX-9340 + response_json['error']['data'] = "No data" + msg = u"Error {code}: {message}, {data}".format( + code=response_json['error']['code'], + message=response_json['error']['message'], + data=response_json['error']['data'] + ) + raise ZabbixAPIException(msg, response_json['error']['code']) + + return response_json + + def __getattr__(self, attr): + """Dynamically create an object class (ie: host)""" + return ZabbixAPIObjectClass(attr, self) + + +class ZabbixAPIObjectClass(object): + def __init__(self, name, parent): + self.name = name + self.parent = parent + + def __getattr__(self, attr): + """Dynamically create a method (ie: get)""" + + def fn(*args, **kwargs): + if args and kwargs: + raise TypeError("Found both args and kwargs") + + return self.parent.do_request( + '{0}.{1}'.format(self.name, attr), + args or kwargs + )['result'] + + return fn diff --git a/libs/zabbix/zabbix_operate.py b/libs/zabbix/zabbix_operate.py index 76ec24a..75c1f78 100644 --- a/libs/zabbix/zabbix_operate.py +++ b/libs/zabbix/zabbix_operate.py @@ -1,85 +1,85 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/7/9 15:24 -# @Author : Fred Yangxiaofei -# @File : zabbix_operate.py -# @Role : ZABBIX操作 - - -from libs.zabbix.zabbix_api import ZabbixAPI -from libs.public import timestamp_to_datatime - - -class ZabbixOperate(): - - def __init__(self, zabbix_url, zabbix_user, zabbix_password): - self.zabbix_url = zabbix_url - self.zabbix_user = zabbix_user - self.zabbix_password = zabbix_password - self.zapi = self.login() - - def login(self): - zapi = ZabbixAPI(self.zabbix_url) - zapi.login(self.zabbix_user, self.zabbix_password) - return zapi - - def get_issues(self): - """ - 获取Zabbix last issues - :return: - """ - unack_triggers = self.zapi.trigger.get( - only_true=1, - skipDependent=1, - monitored=1, - active=1, - output='extend', - expandDescription=1, - selectHosts=['host'], - withLastEventUnacknowledged=1, ) - # print(unack_triggers) - last_issues_list = [] - for t in unack_triggers: - issues_data = dict() - issues_data['host'] = t['hosts'][0].get('host') - issues_data['issue'] = t.get('description') - issues_data['last_change'] = timestamp_to_datatime(int(t.get('lastchange'))) - issues_data['level'] = t.get('priority') - last_issues_list.append(issues_data) - - return last_issues_list - - def get_all_hostgroups(self): - """ - 获取所有主机组 - :return: - """ - zabbix_all_hostgroups = self.zapi.hostgroup.get(output='extend') - return zabbix_all_hostgroups - - def get_hostgroup_hostinfo(self, all_host_group_info): - """ - 获取单个组下所有的主机信息 - :param all_host_group_info: 所有主机组信息 - :return: - """ - - for g in all_host_group_info: - if g: - group_name = g['name'] - group_id = g['groupid'] - hostid_in_group_list = self.zapi.host.get(output=['hostid', 'name'], groupids=group_id) - if hostid_in_group_list: - for h in hostid_in_group_list: - zabbix_group_data = { - "group_id": group_id, - "group_name": group_name, - "host_id": h['hostid'], - "host_name": h['name'] - } - yield zabbix_group_data - # print(group_name, g['groupid'],h['hostid'], h['name']) - - -if __name__ == '__main__': - pass +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/7/9 15:24 +# @Author : Fred Yangxiaofei +# @File : zabbix_operate.py +# @Role : ZABBIX操作 + + +from libs.zabbix.zabbix_api import ZabbixAPI +from libs.public import timestamp_to_datatime + + +class ZabbixOperate(): + + def __init__(self, zabbix_url, zabbix_user, zabbix_password): + self.zabbix_url = zabbix_url + self.zabbix_user = zabbix_user + self.zabbix_password = zabbix_password + self.zapi = self.login() + + def login(self): + zapi = ZabbixAPI(self.zabbix_url) + zapi.login(self.zabbix_user, self.zabbix_password) + return zapi + + def get_issues(self): + """ + 获取Zabbix last issues + :return: + """ + unack_triggers = self.zapi.trigger.get( + only_true=1, + skipDependent=1, + monitored=1, + active=1, + output='extend', + expandDescription=1, + selectHosts=['host'], + withLastEventUnacknowledged=1, ) + # print(unack_triggers) + last_issues_list = [] + for t in unack_triggers: + issues_data = dict() + issues_data['host'] = t['hosts'][0].get('host') + issues_data['issue'] = t.get('description') + issues_data['last_change'] = timestamp_to_datatime(int(t.get('lastchange'))) + issues_data['level'] = t.get('priority') + last_issues_list.append(issues_data) + + return last_issues_list + + def get_all_hostgroups(self): + """ + 获取所有主机组 + :return: + """ + zabbix_all_hostgroups = self.zapi.hostgroup.get(output='extend') + return zabbix_all_hostgroups + + def get_hostgroup_hostinfo(self, all_host_group_info): + """ + 获取单个组下所有的主机信息 + :param all_host_group_info: 所有主机组信息 + :return: + """ + + for g in all_host_group_info: + if g: + group_name = g['name'] + group_id = g['groupid'] + hostid_in_group_list = self.zapi.host.get(output=['hostid', 'name'], groupids=group_id) + if hostid_in_group_list: + for h in hostid_in_group_list: + zabbix_group_data = { + "group_id": group_id, + "group_name": group_name, + "host_id": h['hostid'], + "host_name": h['name'] + } + yield zabbix_group_data + # print(group_name, g['groupid'],h['hostid'], h['name']) + + +if __name__ == '__main__': + pass diff --git a/models/event_record.py b/models/event_record.py index c22cdf5..ddceb5a 100644 --- a/models/event_record.py +++ b/models/event_record.py @@ -1,27 +1,27 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/3/20 17:59 -# @Author : Fred Yangxiaofei -# @File : event_record.py -# @Role : 事件记录ORM - - -from datetime import datetime -from sqlalchemy import Column -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy import String, Integer, DateTime, TIMESTAMP - -Base = declarative_base() - - -class EventRecord(Base): - __tablename__ = 'itsm_event_record' - id = Column(Integer, primary_key=True, autoincrement=True) # ID 自增长 - event_name = Column(String(100), nullable=False) # 事件名称 - event_status = Column(String(100), nullable=False) # 事件状态 - event_level = Column(String(100), nullable=False) # 事件级别 - event_processing = Column(String(100), nullable=False) # 处理人员。接手人员 - event_start_time = Column(DateTime, nullable=False) # 开始时间 - event_end_time = Column(DateTime, nullable=False) # 结束时间 - create_at = Column(DateTime, nullable=False, default=datetime.now()) # 记录创建时间 - update_at = Column(TIMESTAMP, nullable=False, default=datetime.now()) # 记录更新时间 +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/3/20 17:59 +# @Author : Fred Yangxiaofei +# @File : event_record.py +# @Role : 事件记录ORM + + +from datetime import datetime +from sqlalchemy import Column +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy import String, Integer, DateTime, TIMESTAMP + +Base = declarative_base() + + +class EventRecord(Base): + __tablename__ = 'itsm_event_record' + id = Column(Integer, primary_key=True, autoincrement=True) # ID 自增长 + event_name = Column(String(100), nullable=False) # 事件名称 + event_status = Column(String(100), nullable=False) # 事件状态 + event_level = Column(String(100), nullable=False) # 事件级别 + event_processing = Column(String(100), nullable=False) # 处理人员。接手人员 + event_start_time = Column(DateTime, nullable=False) # 开始时间 + event_end_time = Column(DateTime, nullable=False) # 结束时间 + create_at = Column(DateTime, nullable=False, default=datetime.now()) # 记录创建时间 + update_at = Column(TIMESTAMP, nullable=False, default=datetime.now()) # 记录更新时间 diff --git a/models/fault_mg.py b/models/fault_mg.py index a5984b3..11a2d8a 100644 --- a/models/fault_mg.py +++ b/models/fault_mg.py @@ -1,32 +1,32 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/3/20 13:27 -# @Author : Fred Yangxiaofei -# @File : fault_mg.py -# @Role : 故障管理ORM - - -from datetime import datetime -from sqlalchemy import Column -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy import String, Integer, DateTime, TIMESTAMP -from sqlalchemy.dialects.mysql import LONGTEXT - -Base = declarative_base() - - -class Fault(Base): - __tablename__ = 'itsm_fault_info' - id = Column(Integer, primary_key=True, autoincrement=True) # ID 自增长 - fault_name = Column(String(100), nullable=False) # 故障名称 - fault_level = Column(String(100), nullable=False) # 故障级别 - fault_state = Column(String(100), nullable=False) # 故障状态 - fault_penson = Column(String(100), nullable=False) # 故障责任人 - processing_penson = Column(String(100), nullable=True) # 故障处理人员 - fault_report = Column(LONGTEXT, nullable=True) # 故障报告,附件 - fault_start_time = Column(DateTime, nullable=False) # 故障开始时间 - fault_end_time = Column(DateTime, nullable=False) # 故障结束时间 - fault_issue = Column(LONGTEXT, nullable=True) # 故障原因 - fault_summary = Column(LONGTEXT, nullable=True) # 故障总结 - create_at = Column(DateTime, nullable=False, default=datetime.now()) # 记录创建时间 - update_at = Column(TIMESTAMP, nullable=False, default=datetime.now()) # 记录更新时间 +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/3/20 13:27 +# @Author : Fred Yangxiaofei +# @File : fault_mg.py +# @Role : 故障管理ORM + + +from datetime import datetime +from sqlalchemy import Column +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy import String, Integer, DateTime, TIMESTAMP +from sqlalchemy.dialects.mysql import LONGTEXT + +Base = declarative_base() + + +class Fault(Base): + __tablename__ = 'itsm_fault_info' + id = Column(Integer, primary_key=True, autoincrement=True) # ID 自增长 + fault_name = Column(String(100), nullable=False) # 故障名称 + fault_level = Column(String(100), nullable=False) # 故障级别 + fault_state = Column(String(100), nullable=False) # 故障状态 + fault_penson = Column(String(100), nullable=False) # 故障责任人 + processing_penson = Column(String(100), nullable=True) # 故障处理人员 + fault_report = Column(LONGTEXT, nullable=True) # 故障报告,附件 + fault_start_time = Column(DateTime, nullable=False) # 故障开始时间 + fault_end_time = Column(DateTime, nullable=False) # 故障结束时间 + fault_issue = Column(LONGTEXT, nullable=True) # 故障原因 + fault_summary = Column(LONGTEXT, nullable=True) # 故障总结 + create_at = Column(DateTime, nullable=False, default=datetime.now()) # 记录创建时间 + update_at = Column(TIMESTAMP, nullable=False, default=datetime.now()) # 记录更新时间 diff --git a/models/paid_mg.py b/models/paid_mg.py index d941c79..af94edb 100644 --- a/models/paid_mg.py +++ b/models/paid_mg.py @@ -1,26 +1,26 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/3/21 10:29 -# @Author : Fred Yangxiaofei -# @File : paid_mg.py -# @Role : 付费管理ORM - - -from datetime import datetime -from sqlalchemy import Column -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy import String, Integer, DateTime, TIMESTAMP - -Base = declarative_base() - - -class PaidMG(Base): - __tablename__ = 'itsm_paid_mg' - id = Column(Integer, primary_key=True, autoincrement=True) # ID 自增长 - paid_name = Column(String(100), nullable=False) # 事件名称 - paid_start_time = Column(DateTime, nullable=False) # 上次付费时间 - paid_end_time = Column(DateTime, nullable=False) # 到期时间 - reminder_day = Column(Integer, nullable=False) # 提前多少天提醒 - nicknames = Column(String(200), nullable=True) #提醒人员 - create_at = Column(DateTime, nullable=False, default=datetime.now()) # 记录创建时间 - update_at = Column(TIMESTAMP, nullable=False, default=datetime.now()) # 记录更新时间 +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/3/21 10:29 +# @Author : Fred Yangxiaofei +# @File : paid_mg.py +# @Role : 付费管理ORM + + +from datetime import datetime +from sqlalchemy import Column +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy import String, Integer, DateTime, TIMESTAMP + +Base = declarative_base() + + +class PaidMG(Base): + __tablename__ = 'itsm_paid_mg' + id = Column(Integer, primary_key=True, autoincrement=True) # ID 自增长 + paid_name = Column(String(100), nullable=False) # 事件名称 + paid_start_time = Column(DateTime, nullable=False) # 上次付费时间 + paid_end_time = Column(DateTime, nullable=False) # 到期时间 + reminder_day = Column(Integer, nullable=False) # 提前多少天提醒 + nicknames = Column(String(200), nullable=True) #提醒人员 + create_at = Column(DateTime, nullable=False, default=datetime.now()) # 记录创建时间 + update_at = Column(TIMESTAMP, nullable=False, default=datetime.now()) # 记录更新时间 diff --git a/models/project_mg.py b/models/project_mg.py index 620c878..09e76dd 100644 --- a/models/project_mg.py +++ b/models/project_mg.py @@ -1,28 +1,28 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/3/20 17:26 -# @Author : Fred Yangxiaofei -# @File : project_mg.py -# @Role : 项目管理ORM - - -from datetime import datetime -from sqlalchemy import Column -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy import String, Integer, DateTime, TIMESTAMP -from sqlalchemy.dialects.mysql import LONGTEXT - -Base = declarative_base() - - -class ProjectMG(Base): - __tablename__ = 'itsm_project_mg' #项目管理表信息 - id = Column(Integer, primary_key=True, autoincrement=True) # ID 自增长 - project_name = Column(String(100), nullable=False) # 项目名称 - project_status = Column(String(100), nullable=False) # 项目状态 - project_requester = Column(String(100), nullable=False) # 项目需求者。项目发起人 - project_processing = Column(String(100), nullable=False) # 项目处理人员。接手人员 - project_start_time = Column(DateTime, nullable=False) # 项目开始时间 - project_end_time = Column(DateTime, nullable=False) # 项目结束时间 - create_at = Column(DateTime, nullable=False, default=datetime.now()) # 记录创建时间 +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/3/20 17:26 +# @Author : Fred Yangxiaofei +# @File : project_mg.py +# @Role : 项目管理ORM + + +from datetime import datetime +from sqlalchemy import Column +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy import String, Integer, DateTime, TIMESTAMP +from sqlalchemy.dialects.mysql import LONGTEXT + +Base = declarative_base() + + +class ProjectMG(Base): + __tablename__ = 'itsm_project_mg' #项目管理表信息 + id = Column(Integer, primary_key=True, autoincrement=True) # ID 自增长 + project_name = Column(String(100), nullable=False) # 项目名称 + project_status = Column(String(100), nullable=False) # 项目状态 + project_requester = Column(String(100), nullable=False) # 项目需求者。项目发起人 + project_processing = Column(String(100), nullable=False) # 项目处理人员。接手人员 + project_start_time = Column(DateTime, nullable=False) # 项目开始时间 + project_end_time = Column(DateTime, nullable=False) # 项目结束时间 + create_at = Column(DateTime, nullable=False, default=datetime.now()) # 记录创建时间 update_at = Column(TIMESTAMP, nullable=False, default=datetime.now()) # 记录更新时间 \ No newline at end of file diff --git a/models/zabbix_mg.py b/models/zabbix_mg.py index 0be8b9e..557465e 100644 --- a/models/zabbix_mg.py +++ b/models/zabbix_mg.py @@ -1,63 +1,63 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/7/05 17:26 -# @Author : Fred Yangxiaofei -# @File : zabbix_mg.py -# @Role : ZABBIX ORM - - -from sqlalchemy import Column -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import class_mapper -from sqlalchemy import String, Integer, DateTime,Text -from sqlalchemy.dialects.mysql import LONGTEXT -from datetime import datetime - -Base = declarative_base() - - -def model_to_dict(model): - model_dict = {} - for key, column in class_mapper(model.__class__).c.items(): - model_dict[column.name] = getattr(model, key, None) - return model_dict - - -class ZabbixConfig(Base): - __tablename__ = 'zabbix_config' # ZABBIX账户配置信息 - id = Column(Integer, primary_key=True, autoincrement=True) # ID 自增长 - zabbix_name = Column(String(100), nullable=False) # 名称 - zabbix_url = Column(String(255), nullable=False) # zabbix的URL - zabbix_username = Column(String(50), nullable=False) # zabbix用户 - zabbix_password = Column(String(100), nullable=False) # zabbix密码 - - -class ZabbixHosts(Base): - __tablename__ = 'zabbix_hosts' - ### ZABBIX主机信息 - id = Column('id', Integer, primary_key=True, autoincrement=True) - zabbix_url = Column('zabbix_url', String(150)) ### - group_id = Column('group_id', Integer) ### id - group_name = Column('group_name', String(255)) ### 组名称 - host_id = Column('host_id', Integer) ### id - host_name = Column('host_name', String(255)) ### 名称 - zabbix_hooks = Column('zabbix_hooks', Text()) ### 钩子 - # description = Column('description', String(255)) ### 描述、备注 - - -class ZabbixHookLog(Base): - __tablename__ = 'zabbix_hook_logs' - ### ZABBIX log信息 - id = Column('id', Integer, primary_key=True, autoincrement=True) - zabbix_url = Column('zabbix_url', String(150)) ### - logs_info = Column(LONGTEXT, nullable=True) # 报警信息 - create_time = Column('create_time', DateTime(), default=datetime.now) ### 创建时间 - - - -class ZabbixSubmitTaskConf(Base): - __tablename__ = 'zabbix_submit_task' - ### ZABBIX 钩子向任务平台提交任务 需要一个认证 - id = Column('id', Integer, primary_key=True, autoincrement=True) - task_url = Column('task_url', String(150)) ###任务系统接口url - auth_key = Column(LONGTEXT, nullable=True) ###auth_key +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/7/05 17:26 +# @Author : Fred Yangxiaofei +# @File : zabbix_mg.py +# @Role : ZABBIX ORM + + +from sqlalchemy import Column +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import class_mapper +from sqlalchemy import String, Integer, DateTime,Text +from sqlalchemy.dialects.mysql import LONGTEXT +from datetime import datetime + +Base = declarative_base() + + +def model_to_dict(model): + model_dict = {} + for key, column in class_mapper(model.__class__).c.items(): + model_dict[column.name] = getattr(model, key, None) + return model_dict + + +class ZabbixConfig(Base): + __tablename__ = 'zabbix_config' # ZABBIX账户配置信息 + id = Column(Integer, primary_key=True, autoincrement=True) # ID 自增长 + zabbix_name = Column(String(100), nullable=False) # 名称 + zabbix_url = Column(String(255), nullable=False) # zabbix的URL + zabbix_username = Column(String(50), nullable=False) # zabbix用户 + zabbix_password = Column(String(100), nullable=False) # zabbix密码 + + +class ZabbixHosts(Base): + __tablename__ = 'zabbix_hosts' + ### ZABBIX主机信息 + id = Column('id', Integer, primary_key=True, autoincrement=True) + zabbix_url = Column('zabbix_url', String(150)) ### + group_id = Column('group_id', Integer) ### id + group_name = Column('group_name', String(255)) ### 组名称 + host_id = Column('host_id', Integer) ### id + host_name = Column('host_name', String(255)) ### 名称 + zabbix_hooks = Column('zabbix_hooks', Text()) ### 钩子 + # description = Column('description', String(255)) ### 描述、备注 + + +class ZabbixHookLog(Base): + __tablename__ = 'zabbix_hook_logs' + ### ZABBIX log信息 + id = Column('id', Integer, primary_key=True, autoincrement=True) + zabbix_url = Column('zabbix_url', String(150)) ### + logs_info = Column(LONGTEXT, nullable=True) # 报警信息 + create_time = Column('create_time', DateTime(), default=datetime.now) ### 创建时间 + + + +class ZabbixSubmitTaskConf(Base): + __tablename__ = 'zabbix_submit_task' + ### ZABBIX 钩子向任务平台提交任务 需要一个认证 + id = Column('id', Integer, primary_key=True, autoincrement=True) + task_url = Column('task_url', String(150)) ###任务系统接口url + auth_key = Column(LONGTEXT, nullable=True) ###auth_key diff --git a/doc/requirements.txt b/requirements.txt similarity index 100% rename from doc/requirements.txt rename to requirements.txt diff --git a/static/__init__.py b/static/__init__.py index 7404c1a..f05200e 100644 --- a/static/__init__.py +++ b/static/__init__.py @@ -1,6 +1,6 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# @Time : 2019/4/8 15:41 -# @Author : Fred Yangxiaofei -# @File : __init__.py.py +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2019/4/8 15:41 +# @Author : Fred Yangxiaofei +# @File : __init__.py.py # @Role : 说明脚本功能 \ No newline at end of file