通過Python實現對SQL Server 數據文件大小的監控告警功能
1.需求背景
系統程序突然報錯,報錯信息如下:
The transaction log for database ‘@dbname’ is full. To find out why space in the log cannot be reused, see the log_reuse_wait_desc column in sys.databases
此時查看log文件,已達2T。
當時的緊急處理方案是,移除掉鏡像,修改數據庫恢復模式(由full修改為simple),收縮日志。
為瞭防止類似問題再次發生,需對log 文件的大小進行監控,當到達閾值後,觸發告警。
2.主要基礎組件(類)
配置文件qqmssqltest_db_server_conf.ini
同過此配置文件獲取DB Server信息、DB信息、UID信息、郵件服務器信息等。
[sqlserver] db_user = XXXXXX db_pwd = XXXXXXX [sqlserver_qq] db_host = 110.119.120.114 db_port = 1433 [windows] user = pwd = [mail] host = zheshiceshidemail.qq.com port = 25 user = pwd = sender = [email protected]
獲取連接串的組件mssql_get_db_connect.py
# -*- coding: utf-8 -*- import sys import os import datetime import configparser import pymssql # pip3 install pymssql-2.1.4-cp37-cp37m-win_amd64.whl # pip3 install pymssql -i https://pypi.doubanio.com/simple # 獲取連接串信息 def mssql_get_db_connect(db_host, db_port): db_host = db_host db_port = db_port db_ps_file = os.path.join(sys.path[0], "qqmssqltest_db_server_conf.ini") config = configparser.ConfigParser() config.read(db_ps_file, encoding="utf-8") db_user = config.get('sqlserver', 'db_user') db_pwd = config.get('sqlserver', 'db_pwd') conn = pymssql.connect(host=db_host, port=db_port, user=db_user, password=db_pwd, charset="utf8", login_timeout=5, timeout=600, autocommit=True) return conn
執行SQL語句的組件mysql_exec_sql.py
# -*- coding: utf-8 -*- import mysql_get_db_connect def mysql_exec_dml_sql(db_host, db_port, exec_sql): conn = mysql_get_db_connect.mysql_get_db_connect(db_host, db_port) with conn.cursor() as cursor_db: cursor_db.execute(exec_sql) conn.commit() def mysql_exec_select_sql(db_host, db_port, exec_sql): conn = mysql_get_db_connect.mysql_get_db_connect(db_host, db_port) with conn.cursor() as cursor_db: cursor_db.execute(exec_sql) sql_rst = cursor_db.fetchall() return sql_rst def mysql_exec_select_sql_include_colnames(db_host, db_port, exec_sql): conn = mysql_get_db_connect.mysql_get_db_connect(db_host, db_port) with conn.cursor() as cursor_db: cursor_db.execute(exec_sql) sql_rst = cursor_db.fetchall() col_names = cursor_db.description return sql_rst, col_names
發郵件的功能send_monitor_mail.py
# -*- coding: utf-8 -*- # pip3 install PyEmail import smtplib from email.mime.text import MIMEText import configparser import os import sys # 發送告警郵件 def send_monitor_mail(mail_subject, mail_body, mail_receivers="[email protected]"): db_ps_file = os.path.join(sys.path[0], "qqmssqltest_db_server_conf.ini") config = configparser.ConfigParser() config.read(db_ps_file, encoding="utf-8") mail_host = config.get('mail', 'host') mail_port = config.get('mail', 'port') # mail_user = config.get('mail', 'user') # mail_pwd = config.get('mail', 'pwd') sender = config.get('mail', 'sender') # receivers = config.get('mail', 'receivers') # 發送HTML格式郵件 message = MIMEText(mail_body, 'html', 'utf-8') # message = MIMEText(mail_body, 'plain', 'utf-8') message['subject'] = mail_subject message['From'] = sender message['To'] = mail_receivers try: smtpObj = smtplib.SMTP() smtpObj.connect(mail_host, mail_port) # 25 為 SMTP 端口號 # SMTP AUTH extension not supported by server. # https://github.com/miguelgrinberg/microblog/issues/76 # smtpObj.ehlo() # smtpObj.starttls() # smtpObj.login(mail_user, mail_pwd) smtpObj.sendmail(sender, mail_receivers, message.as_string()) smtpObj.quit() print("郵件發送成功") except Exception as e: print(e) # except smtplib.SMTPException: # print("Error: 無法發送郵件")
3.主要功能代碼
收集到的DB數據文件的信息保存到表mssql_dblogsize中,其建表的腳本如下:
CREATE TABLE [dbo].[mssql_dblogsize]( [id] [int] IDENTITY(1,1) NOT NULL, [createtime] [datetime] NULL, [vip] [nvarchar](100) NULL, [port] [nvarchar](100) NULL, [Environment] [nvarchar](200) NULL, [Dbname] [varchar](200) NULL, [Logical_Name] [varchar](200) NULL, [Physical_Name] [varchar](1500) NULL, [Size] [bigint] NULL, PRIMARY KEY CLUSTERED ( [id] ASC )WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY] ) ON [PRIMARY] GO ALTER TABLE [dbo].[mssql_dblogsize] ADD DEFAULT (getdate()) FOR [createtime] GO
為瞭方便對表mssql_dblogsize的數據進行管理和展示,在其基礎上抽象加工出瞭一個視圖v_mssql_dblogsize,註意Size大小的轉換(Size/128/1024 as SizeGB)
創建視圖的腳本如下:
CREATE view [dbo].[v_mssql_dblogsize] as SELECT [id] ,[createtime] ,[vip] ,[port] ,[Environment] ,[Dbname] ,[Logical_Name] ,[Physical_Name] ,Size/128/1024 as SizeGB FROM [dbo].[mssql_dblogsize] where size >50*128*1024 and Physical_Name like '%ldf%' GO
本測試實例使用的數據庫為qqDB,監控的各個DB Server保存在瞭表QQDBServer中,註意Port 不一定為標準端口1433.
collect_mssql_dblogsize_info.py
# -*- coding: utf-8 -*- import sys import os import configparser import pymssql import mssql_get_db_connect import mssql_exec_sql from datetime import datetime def collect_mssql_dblogsize_info(): db_ps_file = os.path.join(sys.path[0], "qqmssqltest_db_server_conf.ini") config = configparser.ConfigParser() config.read(db_ps_file, encoding="utf-8") m_db_host = config.get('sqlserver_qq', 'db_host') m_db_port = config.getint('sqlserver_qq', 'db_port') # 獲取需要遍歷的DB列表 exec_sql_1 = """ SELECT IP, case Port when '1444,1433' then '1433' else Port end as Port, Environment FROM qqDB.dbo.QQDBServer where InUse =1 AND ServerType IN ('SQL') and IP=VIP ; """ sql_rst_1 = mssql_exec_sql.mssql_exec_select_sql(m_db_host, m_db_port, exec_sql_1) for j in sql_rst_1: db_host_2 = j[0] db_port_2 = j[1] db_Environment = j[2] exec_sql_2 = """ select '""" + db_host_2 + """' as vip, '""" + db_port_2 + """' as port, '""" + db_Environment + """' as Environment,DB_NAME(database_id) AS DatabaseName, Name AS Logical_Name, Physical_Name, size FROM master.sys.master_files; """ try: sql_rst_2 = mssql_exec_sql.mssql_exec_select_sql(db_host_2, db_port_2, exec_sql_2) except Exception as e: print(e) for k in sql_rst_2: exec_sql_3 = """ insert into qqDB..mssql_dblogsize([vip], [port], [Environment], [Dbname], [Logical_Name], [Physical_Name], [Size]) values('%s', '%s', '%s', '%s', '%s', '%s', '%s'); """ conn = mssql_get_db_connect.mssql_get_db_connect(m_db_host, m_db_port) with conn.cursor() as cursor_db: cursor_db.execute(exec_sql_3 % (k[0], k[1], k[2], k[3], k[4], k[5], k[6] )) conn.commit() collect_mssql_dblogsize_info()
告警郵件的功能實現為mssql_alert_dblogsize.py,此份代碼的告警閾值設置的為50G,數據來自於視圖v_mssql_dblogsize。
# -*- coding: utf-8 -*- import sys import os import configparser import pymssql import mssql_get_db_connect import mssql_exec_sql import datetime import send_monitor_mail import pandas as pd def mssql_alert_dblogsize(): mail_subject = "SQL Server DB Log Size Greater than 50G, please check!!! " mail_receivers = "[email protected]" db_ps_file = os.path.join(sys.path[0], "qqmssqltest_db_server_conf.ini") config = configparser.ConfigParser() config.read(db_ps_file, encoding="utf-8") m_db_host = config.get('sqlserver_qq', 'db_host') m_db_port = config.getint('sqlserver_qq', 'db_port') # 獲取需要遍歷的DB列表 exec_sql_4 = """ SELECT [vip] as IP,[port],[Environment],[Dbname] ,[Logical_Name],[Physical_Name],[SizeGB],[createtime] FROM qqDB.[dbo].[v_mssql_dblogsize] order by VIP,Dbname; """ sql_rst_4, col_name = mssql_exec_sql.mssql_exec_select_sql_include_colnames(m_db_host, m_db_port, exec_sql_4) # print(sql_rst_4) if len(sql_rst_4): mail_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') columns = [] for i in range(len(col_name)): columns.append(col_name[i][0]) df = pd.DataFrame(columns=columns) for i in range(len(sql_rst_4)): df.loc[i] = list(sql_rst_4[i]) mail_body = df.to_html(index=False, justify="left").replace('<th>', '<th style = "color:red; text-align:left; background-color: yellow">') mail_html = "<html><body><h4>" + "Deal All : " + "<br><h4>" + "以下數據庫的db log文件,已大於50G.請及時檢查,謝謝! " + "<br><h4>" + mail_body + "</body></html>" send_monitor_mail.send_monitor_mail(mail_subject=mail_subject, mail_body=mail_html, mail_receivers=mail_receivers) mssql_alert_dblogsize()
4.實現
定時任務是通過windows的計劃任務來實現的,在此不做過多的敘述。告警郵件的部分截圖如下:
5.附錄
1.報錯定位,判斷是不是log文件過大
https://blog.csdn.net/weixin_30785593/article/details/99912405
2.關於為什麼數據庫log文件過大,我們可以參考以下分享的文章
https://blog.csdn.net/chinadm123/article/details/44941275
到此這篇關於通過Python實現對SQL Server 數據文件大小的監控告警的文章就介紹到這瞭,更多相關PythonSQL Server 數據監控告警內容請搜索WalkonNet以前的文章或繼續瀏覽下面的相關文章希望大傢以後多多支持WalkonNet!
推薦閱讀:
- None Found