diff --git a/.idea/.gitignore b/.idea/.gitignore
new file mode 100644
index 0000000..359bb53
--- /dev/null
+++ b/.idea/.gitignore
@@ -0,0 +1,3 @@
+# 默认忽略的文件
+/shelf/
+/workspace.xml
diff --git a/.idea/AirQuility.iml b/.idea/AirQuility.iml
new file mode 100644
index 0000000..7a85463
--- /dev/null
+++ b/.idea/AirQuility.iml
@@ -0,0 +1,15 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml
new file mode 100644
index 0000000..105ce2d
--- /dev/null
+++ b/.idea/inspectionProfiles/profiles_settings.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/misc.xml b/.idea/misc.xml
new file mode 100644
index 0000000..5cc6554
--- /dev/null
+++ b/.idea/misc.xml
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/modules.xml b/.idea/modules.xml
new file mode 100644
index 0000000..ed8021f
--- /dev/null
+++ b/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
new file mode 100644
index 0000000..35eb1dd
--- /dev/null
+++ b/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/AirQuilitySys/.idea/vcs.xml b/AirQuilitySys/.idea/vcs.xml
new file mode 100644
index 0000000..6c0b863
--- /dev/null
+++ b/AirQuilitySys/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/AirQuilitySys/AirQui.py b/AirQuilitySys/AirQui.py
index 339e28b..96e7552 100644
--- a/AirQuilitySys/AirQui.py
+++ b/AirQuilitySys/AirQui.py
@@ -1,76 +1,50 @@
-import matplotlib.pyplot as plt
-# 创建图形和坐标轴对象,指定尺寸和DPI
-fig, ax = plt.subplots(figsize=(13.33,7.5), dpi = 96)
-# 绘制折线
-for country in top_20_countries:
- data = df[df['Country Name'] == country]
- line = ax.plot(data['Year'], data['GDP'], label=country)
-# 添加图例
-ax.legend(loc="best", fontsize=8)
-# 创建网格
-ax.grid(which="major", axis='x', color='#DAD8D7', alpha=0.5, zorder=1)
-ax.grid(which="major", axis='y', color='#DAD8D7', alpha=0.5, zorder=1)
-# 重新格式化x轴标签和刻度线标签
-ax.set_xlabel('', fontsize=12, labelpad=10)
-# 不需要轴标签
-ax.xaxis.set_label_position("bottom")
-ax.xaxis.set_major_formatter(lambda s, i : f'{s:,.0f}')
-#以防万一我们需要额外的格式设置
-ax.xaxis.set_major_locator(MaxNLocator(integer=True))
-#以防我们需要额外的格式化
-ax.xaxis.set_tick_params(pad=2, labelbottom=True, bottom=True, labelsize=12, labelrotation=0)
-# 重新格式化y轴
-ax.set_ylabel('GDP (Billions USD)', fontsize=12, labelpad=10)
-ax.yaxis.set_label_position("left")
-ax.yaxis.set_major_formatter(lambda s, i : f'{s*10**-9:,.0f}')
-ax.yaxis.set_major_locator(MaxNLocator(integer=True))
-#以防我们需要额外的格式化
-ax.yaxis.set_tick_params(pad=2, labeltop=False, labelbottom=True, bottom=False, labelsize=12)
+import tkinter as tk
+import subprocess
+import sys
-# 颜色和线条样式
-colors_dict = {'United States': '#014f86', 'China': '#DC0000', 'Japan': '#ff4d6d', 'Germany': '#403d39', 'India': '#6a994e'}
+def run_DayAir():
+ """函数用于执行另一个Python脚本"""
+ try:
+ subprocess.run(["D:\Pycharm\Project\AirQuilitySys\.venv\Scripts\python.exe", "DayAir.py"], check=True)
+ except subprocess.CalledProcessError as e:
+ print(f"执行脚本时发生错误: {e}")
-line_styles_dict = {'United States': '-', 'China': '-', 'Japan': '-', 'Germany': '-', 'India': '-'}
-# 绘制前5条线
-for country in top_20_countries[:5]:
- color = colors_dict.get(country, 'grey')
- # 从字典中获取颜色,如果找不到,默认为灰色
- line_style = line_styles_dict.get(country, '-')
- # 从字典中获取线条样式,如果未找到,默认为实线
- data = df[df['Country Name'] == country]
- line = ax.plot(data['Year'], data['GDP'], color=color, linestyle=line_style, zorder=2, label=country)
- # 添加图例
- ax.legend(loc="best", fontsize=8)
- # 绘制剩余部分
- for country in top_20_countries[5:]:
- data = df[df['Country Name'] == country]
- line = ax.plot(data['Year'], data['GDP'], color='grey', linestyle=':', linewidth=0.5, zorder=2)
-# 移除边框
-ax.spines[['top','right','bottom']].set_visible(False)
-# 加粗左侧边框
-ax.spines['left'].set_linewidth(1.1)
-# 在顶部添加红线和矩形
-ax.plot([0.05, .9], [.98, .98], transform=fig.transFigure, clip_on=False, color='#E3120B', linewidth=.6)
-ax.add_patch(plt.Rectangle((0.05,.98), 0.04, -0.02, facecolor='#E3120B', transform=fig.transFigure, clip_on=False, linewidth = 0))
-# 添加标题和副标题
-ax.text(x=0.05, y=.93, s="Evolution of the 20 Richest Countries GDP over the Past 50 Years", transform=fig.transFigure, ha='left', fontsize=14, weight='bold', alpha=.8)
-ax.text(x=0.05, y=.90, s="Focus on the current 5 richest countries from 1973 to 2022", transform=fig.transFigure, ha='left', fontsize=12, alpha=.8)
-# 设置来源文本
-ax.text(x=0.05, y=0.12, s="Source: World Bank - https://databank.worldbank.org/", transform=fig.transFigure, ha='left', fontsize=10, alpha=.7)
-# 调整绘图区域周围的边距
-plt.subplots_adjust(left=None, bottom=0.2, right=None, top=0.85, wspace=None, hspace=None)
-# 设置白色背景
-fig.patch.set_facecolor('white')
-# 绘制前5条线
-for country in top_20_countries[:5]:
- color = colors_dict.get(country, 'grey')
- # 从字典中获取颜色,如果找不到,默认为黑色
- line_style = line_styles_dict.get(country, '-')
- # 从字典中获取线条样式,如果找不到,默认为实线
- data = df[df['Country Name'] == country]
- line = ax.plot(data['Year'], data['GDP'], color=color, linestyle=line_style, zorder=2, label = country)
- ax.plot(data['Year'].iloc[-1], data['GDP'].iloc[-1], 'o', color=color, markersize=10, alpha=0.3)
- ax.plot(data['Year'].iloc[-1], data['GDP'].iloc[-1], 'o', color=color, markersize=5)
- # 在图表上添加一些文字
- ax.annotate('During the 2000s,\nChina began experiencing rapid economic growth,\noutpacing all other countries.', (data['Year'].iloc[-18], 2000000000000), xytext=(data['Year'].iloc[-28]-timedelta(days=500), 18000000000000), ha='left', fontsize=9, arrowprops=dict(arrowstyle='-|>', facecolor='k', connectionstyle="arc3,rad=-0.15"))
+def run_DayAirzhu():
+ """函数用于执行另一个Python脚本"""
+ try:
+ subprocess.run(["D:\Pycharm\Project\AirQuilitySys\.venv\Scripts\python.exe", "DayAirzhu.py"], check=True) # 尝试运行另一个Python脚本
+ except subprocess.CalledProcessError as e:
+ print(f"执行脚本时发生错误: {e}")
+def run_DayAirbing():
+ """函数用于执行另一个Python脚本"""
+ try:
+ subprocess.run(["D:\Pycharm\Project\AirQuilitySys\.venv\Scripts\python.exe", "DayAirbing.py"], check=True) # 尝试运行另一个Python脚本
+ except subprocess.CalledProcessError as e:
+ print(f"执行脚本时发生错误: {e}")
+
+def run_time():
+ """函数用于执行另一个Python脚本"""
+ try:
+ subprocess.run(["D:\Pycharm\Project\AirQuilitySys\.venv\Scripts\python.exe", "time.py"], check=True) # 尝试运行另一个Python脚本
+ except subprocess.CalledProcessError as e:
+ print(f"执行脚本时发生错误: {e}")
+
+# 创建主窗口
+window = tk.Tk()
+window.title("执行外部程序示例")
+
+# 创建一个按钮,点击时调用run_another_program函数
+but = tk.Button(window, text="查看每日空气折线分布图", command=run_DayAir)
+but.pack(padx=20, pady=20) # 设置按钮布局
+
+butt = tk.Button(window, text="查看每日空气柱状图", command=run_DayAirzhu)
+butt.pack(padx=20, pady=20) # 设置按钮布局
+
+butto = tk.Button(window, text="查看每月pm2.5平均指数", command=run_DayAirbing)
+butto.pack(padx=20, pady=20) # 设置按钮布局
+
+bu = tk.Button(window, text="查看未来预测数据", command=run_time)
+bu.pack(padx=20, pady=20) # 设置按钮布局
+# 运行Tkinter事件循环
+window.mainloop()
\ No newline at end of file
diff --git a/AirQuilitySys/DayAir.py b/AirQuilitySys/DayAir.py
new file mode 100644
index 0000000..7150b68
--- /dev/null
+++ b/AirQuilitySys/DayAir.py
@@ -0,0 +1,123 @@
+import logging
+import pymysql
+import subprocess
+import numpy as np
+from matplotlib import pyplot as plt
+from tkinter import Tk
+from pyecharts import options as _opts
+from pyecharts.charts import Bar
+
+logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
+
+def get_data(db_config):
+ """
+ 从数据库中获取ptime和ci数据。
+ :param db_config: 数据库配置字典
+ :return: 包含ptime和ci的元组列表
+ """
+ ptime_list = [] # 存储ptime数据的列表
+ ci_list = [] # 存储ci数据的列表
+ so_list = []
+
+
+ try:
+ with pymysql.connect(**db_config) as conn:
+ logging.info("数据库连接成功")
+ with conn.cursor() as cursor:
+ # 使用单个查询以提高性能
+ sql_ptime = "SELECT ptime FROM May"
+ cursor.execute(sql_ptime)
+ data_ptime = cursor.fetchall()
+ #print(data_ptime)
+ # 处理查询结果
+ #for row in data:
+ #for row in range(len(data)):
+ #if row == 0:
+ #continue
+ # print(row)
+
+ ptime_list = [row['ptime'] for row in data_ptime if 'ptime' in row]
+ #row_list=[val for sublist in row for val in sublist]
+
+ sql_ci="SELECT ci FROM may"
+ cursor.execute(sql_ci)
+ data_ci=cursor.fetchall()
+ #print(data_ci)
+ ci_list = [row['ci'] for row in data_ci if 'ci' in row]
+
+ sql_so = "SELECT so FROM may"
+ cursor.execute(sql_so)
+ data_so = cursor.fetchall()
+ so_list = [row['so'] for row in data_so if 'so' in row]
+
+ sql_no = "SELECT no FROM may"
+ cursor.execute(sql_no)
+ data_no = cursor.fetchall()
+ no_list = [row['no'] for row in data_no if 'no' in row]
+
+ sql_pm = "SELECT pm FROM may"
+ cursor.execute(sql_pm)
+ data_pm = cursor.fetchall()
+ pm_list = [row['pm'] for row in data_pm if 'pm' in row]
+
+
+ return ptime_list, ci_list, so_list, no_list, pm_list
+ except pymysql.err.OperationalError as e:
+ logging.error(f"数据库操作失败: {e}")
+ # 根据具体场景,可以选择返回空列表、抛出异常或返回特定错误码
+ return [], [], [], [], []
+ #except Exception as e:
+ #logging.error(f"未知错误: {e}")
+ # 同上,根据具体场景进行处理
+ #return [], []
+def visualize_data(ptime_list, ci_list, so_list, no_list, pm_list):
+ """
+ 可视化数据。
+ :param ptime_list: ptime数据列表
+ :param ci_list: ci数据列表
+ :param column_to_plot: 需要绘制的列名
+ """
+ if not ptime_list or not ci_list or not so_list or not no_list or not pm_list:
+ logging.error("数据为空,无法进行可视化")
+ return
+ #y_list=[]
+ #for i in range(4,8):
+ # y_list.append(i)
+ plt.figure(figsize=(15,10),dpi=80)
+
+ plt.xlabel('Ptime')
+
+ plt.title('CI over time')
+ plt.plot(ptime_list, ci_list,label='CI')
+ plt.plot(ptime_list, so_list,label='SO2')
+ plt.plot(ptime_list, no_list, label='NO2')
+ plt.plot(ptime_list, pm_list, label='PM2.5')
+ plt.legend()
+ plt.show()
+
+
+
+if __name__ == "__main__":
+ db_config = {
+ "host": '127.0.0.1',
+ "user": "root",
+ "password": 'mysql>hyx123',
+ "db": 'airquility',
+ "charset": 'utf8',
+ "cursorclass": pymysql.cursors.DictCursor
+ }
+
+ ptime_list, ci_list,so_list ,no_list, pm_list= get_data(db_config)
+ visualize_data(ptime_list, ci_list, so_list, no_list, pm_list)
+
+ print(pymysql.__version__)
+
+'''
+ window= Tk()
+ window.title("每日空气质量")
+ window.geometry("300x200")
+#Label(window, text="图表已生成为air_quality_index.html").pack()
+
+ window.mainloop()
+ '''
+
diff --git a/AirQuilitySys/DayAirbing.py b/AirQuilitySys/DayAirbing.py
new file mode 100644
index 0000000..4ed7464
--- /dev/null
+++ b/AirQuilitySys/DayAirbing.py
@@ -0,0 +1,154 @@
+import logging
+import pymysql
+import subprocess
+import numpy as np
+from matplotlib import pyplot as plt
+#%matplotlib inline
+from tkinter import Tk
+from pyecharts import options as _opts
+from pyecharts.charts import Bar
+
+logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
+
+#db_config={'host':'127.0.0.1','user':'root','password':'mysql>hyx123','db':'airquility','charset':'utf8'}
+#ptimelist=[]
+#cilist=[]
+#data_dict=dict(zip(ptimelist,cilist))
+
+jan_list = []
+feb_list = []
+mar_list = []
+apr_list = []
+may_list = []
+ptime_list = []
+
+def get_data(db_config):
+ """
+ 从数据库中获取ptime和ci数据。
+ :param db_config: 数据库配置字典
+ :return: 包含ptime和ci的元组列表
+ """
+ # 存储ptime数据的列表
+
+ try:
+ with pymysql.connect(**db_config) as conn:
+ logging.info("数据库连接成功")
+
+ with conn.cursor() as cursor:
+ # 使用单个查询以提高性能
+
+ sql_ptime = "SELECT ptime FROM May"
+ cursor.execute(sql_ptime)
+ data_ptime = cursor.fetchall()
+ ptime_list = [row['ptime'] for row in data_ptime if 'ptime' in row]
+
+ sql_may = "SELECT pm FROM may"
+ cursor.execute(sql_may)
+ data_may = cursor.fetchall()
+ may_list = [row['pm'] for row in data_may if 'pm' in row]
+
+ sql_feb="SELECT pm FROM feb"
+ cursor.execute(sql_feb)
+ data_feb=cursor.fetchall()
+ #print(data_ci)
+ feb_list = [row['pm'] for row in data_feb if 'pm' in row]
+
+ sql_jan = "SELECT pm FROM jan"
+ cursor.execute(sql_jan)
+ data_jan = cursor.fetchall()
+ # print(data_ci)
+ jan_list = [row['pm'] for row in data_jan if 'pm' in row]
+
+ sql_mar = "SELECT pm FROM mar"
+ cursor.execute(sql_mar)
+ data_mar = cursor.fetchall()
+ # print(data_ci)
+ mar_list = [row['pm'] for row in data_mar if 'pm' in row]
+
+ sql_apr = "SELECT pm FROM apr"
+ cursor.execute(sql_apr)
+ data_apr = cursor.fetchall()
+ # print(data_ci)
+ apr_list = [row['pm'] for row in data_apr if 'pm' in row]
+
+ return ptime_list, may_list, feb_list, jan_list, mar_list, apr_list
+ except pymysql.err.OperationalError as e:
+ logging.error(f"数据库操作失败: {e}")
+ # 根据具体场景,可以选择返回空列表、抛出异常或返回特定错误码
+ return [], [], [], [], [], []
+
+def average(str_list):
+ if not str_list: # 检查列表是否为空
+ return 0 # 或者根据需要处理,比如返回None或者抛出异常
+ num_list = [float(item) for item in str_list] # 假设可能有浮点数
+ return sum(num_list) / len(num_list)
+
+
+
+def visualize_data(ptime_list, may_list, feb_list, jan_list, mar_list, apr_list):
+ """
+ 可视化数据。
+ :param ptime_list: ptime数据列表
+ :param ci_list: ci数据列表
+ :param column_to_plot: 需要绘制的列名
+ """
+ if not may_list or not feb_list or not jan_list:
+ logging.error("数据为空,无法进行可视化")
+ return
+ #y_list=[]
+ #for i in range(4,8):
+ # y_list.append(i)
+ x = np.arange(26)
+ bar_width = 0.2
+ # 调用函数并打印结果
+ jan_value = average(jan_list)
+ feb_value = average(feb_list)
+ may_value = average(may_list)
+ mar_value = average(mar_list)
+ apr_value = average(apr_list)
+ # 创建图形对象
+ fig = plt.figure("average_pm2.5")
+
+ #plt.title('每月pm2.5平均指数')
+ # 创建子图
+ ax = fig.add_axes([0, 0, 1, 1])
+ # 让饼状图变成圆形
+ ax.axis('equal')
+ # 准备数据
+ #labels = ['jan', 'feb', 'may','mar','apr']
+ #num_data = [jan_value, feb_value, may_value, mar_value, apr_value]
+ plt.figure(figsize=(15, 10), dpi=80)
+ plt.bar(ptime_list, apr_value, tick_label=ptime_list, width=bar_width)
+ plt.bar(x + bar_width, may_value, width=bar_width)
+ # 饼状图之间的间隙大小
+ #explode = (0.02, 0.03, 0.02, 0.01, 0.01)
+ # 画饼
+ #ax.pie(num_data, labels=labels, autopct='%1.1f%%', explode=explode)
+ plt.show()
+
+
+
+if __name__ == "__main__":
+ db_config = {
+ "host": '127.0.0.1',
+ "user": "root",
+ "password": 'mysql>hyx123',
+ "db": 'airquility',
+ "charset": 'utf8',
+ "cursorclass": pymysql.cursors.DictCursor
+ }
+
+ ptime_list, may_list, feb_list, jan_list, mar_list, apr_list= get_data(db_config)
+ visualize_data(ptime_list, may_list, feb_list, jan_list, mar_list, apr_list)
+
+ #print(pymysql.__version__)
+
+'''
+ window= Tk()
+ window.title("每日空气质量")
+ window.geometry("300x200")
+#Label(window, text="图表已生成为air_quality_index.html").pack()
+
+ window.mainloop()
+ '''
+
diff --git a/AirQuilitySys/DayAirzhu.py b/AirQuilitySys/DayAirzhu.py
new file mode 100644
index 0000000..c9f9fc6
--- /dev/null
+++ b/AirQuilitySys/DayAirzhu.py
@@ -0,0 +1,132 @@
+import logging
+import pymysql
+import subprocess
+import numpy as np
+from matplotlib import pyplot as plt
+from tkinter import Tk
+from pyecharts import options as _opts
+from pyecharts.charts import Bar
+
+logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
+
+#db_config={'host':'127.0.0.1','user':'root','password':'mysql>hyx123','db':'airquility','charset':'utf8'}
+#ptimelist=[]
+#cilist=[]
+#data_dict=dict(zip(ptimelist,cilist))
+
+def get_data(db_config):
+ """
+ 从数据库中获取ptime和ci数据。
+ :param db_config: 数据库配置字典
+ :return: 包含ptime和ci的元组列表
+ """
+ ptime_list = [] # 存储ptime数据的列表
+ ci_list = [] # 存储ci数据的列表
+
+
+ try:
+ with pymysql.connect(**db_config) as conn:
+ logging.info("数据库连接成功")
+ with conn.cursor() as cursor:
+ # 使用单个查询以提高性能
+ sql_ptime = "SELECT ptime FROM May"
+ cursor.execute(sql_ptime)
+ data_ptime = cursor.fetchall()
+ ptime_list = [row['ptime'] for row in data_ptime if 'ptime' in row]
+ #row_list=[val for sublist in row for val in sublist]
+
+ sql_ci="SELECT ci FROM may"
+ cursor.execute(sql_ci)
+ data_ci=cursor.fetchall()
+ #print(data_ci)
+ ci_list = [row['ci'] for row in data_ci if 'ci' in row]
+
+ sql_so = "SELECT so FROM may"
+ cursor.execute(sql_so)
+ data_so = cursor.fetchall()
+ so_list = [row['so'] for row in data_so if 'so' in row]
+
+ sql_no = "SELECT no FROM may"
+ cursor.execute(sql_no)
+ data_no = cursor.fetchall()
+ no_list = [row['no'] for row in data_no if 'no' in row]
+
+ sql_pm = "SELECT pm FROM may"
+ cursor.execute(sql_pm)
+ data_pm = cursor.fetchall()
+ pm_list = [row['pm'] for row in data_pm if 'pm' in row]
+
+
+ return ptime_list, ci_list, so_list, no_list, pm_list
+ except pymysql.err.OperationalError as e:
+ logging.error(f"数据库操作失败: {e}")
+ # 根据具体场景,可以选择返回空列表、抛出异常或返回特定错误码
+ return [], [], [], [], []
+ #except Exception as e:
+ #logging.error(f"未知错误: {e}")
+ # 同上,根据具体场景进行处理
+ #return [], []
+def visualize_data(ptime_list, ci_list, so_list, no_list, pm_list):
+ """
+ 可视化数据。
+ :param ptime_list: ptime数据列表
+ :param ci_list: ci数据列表
+ :param column_to_plot: 需要绘制的列名
+ """
+ if not ptime_list or not ci_list or not so_list or not no_list or not pm_list:
+ logging.error("数据为空,无法进行可视化")
+ return
+ #y_list=[]
+ #for i in range(4,8):
+ # y_list.append(i)
+ plt.figure(figsize=(15,10),dpi=80)
+ x = np.arange(26)
+ bar_width = 0.2
+ """
+ plt.bar(ptime_list, # 柱状图柱体标签值
+ ci_list, # 柱体高度
+ align='center', # 柱体对齐方式
+ color='red', # 柱体颜色
+ tick_label=ptime_list, # 刻度标签值
+ hatch='/') # 填充样式,越密集填充就越密
+ plt.bar(ptime_list, # 柱状图柱体标签值
+ so_list, # 柱体高度
+ align='center', # 柱体对齐方式
+ color='blue', # 柱体颜色
+ tick_label=ptime_list, # 刻度标签值
+ hatch='/') # 填充样式,越密集填充就越密
+ """
+ plt.xlabel('Ptime')
+ plt.bar(ptime_list, ci_list, tick_label=ptime_list, width=bar_width)
+ plt.bar(x + bar_width, so_list, width=bar_width)
+ #plt.bar(x + bar_width, no_list, width=bar_width)
+ #plt.bar(x + bar_width, pm_list, width=bar_width)
+ plt.title('CI over time')
+ plt.show()
+
+
+
+if __name__ == "__main__":
+ db_config = {
+ "host": '127.0.0.1',
+ "user": "root",
+ "password": 'mysql>hyx123',
+ "db": 'airquility',
+ "charset": 'utf8',
+ "cursorclass": pymysql.cursors.DictCursor
+ }
+
+ ptime_list, ci_list, so_list, no_list, pm_list = get_data(db_config)
+ visualize_data(ptime_list, ci_list, so_list, no_list, pm_list)
+
+ print(pymysql.__version__)
+
+'''
+ window= Tk()
+ window.title("每日空气质量")
+ window.geometry("300x200")
+#Label(window, text="图表已生成为air_quality_index.html").pack()
+
+ window.mainloop()
+ '''
+
diff --git a/AirQuilitySys/LSTM_airqui.py b/AirQuilitySys/LSTM_airqui.py
new file mode 100644
index 0000000..be48538
--- /dev/null
+++ b/AirQuilitySys/LSTM_airqui.py
@@ -0,0 +1,63 @@
+import keras
+import matplotlib.pyplot as plt
+import pandas as pd
+import tensorflow as tf
+import numpy as np
+from sklearn.preprocessing import MinMaxScaler
+from keras.layers import Dense, LSTM, Activation, Dropout
+from keras.optimizers import SGD
+
+colum = 13
+step = 50
+df = pd.read_csv('文件名')
+df_for_training=df[:24000]
+df_for_testing=df[24000:]
+# print(df_for_testing.iloc[:, 2:])
+
+scaler = MinMaxScaler(feature_range=(-1, 1))
+df_for_training_scaled = scaler.fit_transform(df_for_training.iloc[:, 2:])
+print("df_for_training_scaled shape:",df_for_training_scaled.shape)
+#print(df_for_training_scaled.shape)
+#print(df_for_testing_scaled.shape)
+
+#抽取特征与标签列
+def createXY(dataset, n_past):
+ dataX = []
+ dataY = []
+ for i in range(n_past, len(dataset)):
+ dataX.append(dataset[i - n_past:i, 1:dataset.shape[1]])
+ dataY.append(dataset[i, 0])
+ return np.array(dataX), np.array(dataY)
+#trainX,testX数据的shape为[samples, steps, features]
+trainX, trainY = createXY(df_for_training_scaled, step)
+testX, testY = createXY(df_for_testing_scaled, step)
+
+#构建模型
+def build_model():
+ model = tf.keras.models.Sequential()
+ #经试验,如果下一层还是LSTM的话必须将return_sequences设置为True
+ model.add(LSTM(20, input_shape=(step, column), return_sequences=True))
+ model.add(LSTM(20))
+ model.add(Activation('relu'))
+ model.add(Dropout(0.2))
+ model.add(Dense(1))
+ model.compile(loss='mae', optimizer=SGD(lr=0.1,momentum=0.3))
+ return model
+
+lstm_net = build_model()
+lstm_net.fit(trainX, trainY, batch_size=8, epochs=100)
+predictions = lstm_net.predict(testX)
+
+#进行逆变换
+test_Predict_copy = np.repeat(predictions,column+1,axis=-1)
+test_Predict_inverse = scaler.inverse_transform(test_Predict_copy)[:,0]
+#test_Predict_inverse = scaler.inverse_transform(test_Predict)
+test_label = np.array(df_for_testing['omega_vsg_1'])[50:]
+testY_copy = np.repeat(np.reshape(testY,(len(testY), 1)),column+1,axis=-1)
+
+testY_inverse = scaler = scaler.inverse_transform(testY_copy)[:,0]
+
+plt.plot(test_Predict_inverse,color='red')
+plt.plot(test_label,color='green')
+plt.plot(testY_inverse, color='blue')
+plt.savefig('result_test.png')
\ No newline at end of file
diff --git a/AirQuilitySys/Login.py b/AirQuilitySys/Login.py
index 8aef3dd..e5d16ea 100644
--- a/AirQuilitySys/Login.py
+++ b/AirQuilitySys/Login.py
@@ -1,6 +1,19 @@
from tkinter import *
+import subprocess
+import sys
+from tkinter import messagebox
user_login={'aaa':'123456','bbb':'888888','ccc':'333333'}
count=0
+
+def run_AirQui():
+ """函数用于执行另一个Python脚本"""
+ try:
+ #subprocess.run(["python", "AirQui.py"], check=True) # 尝试运行另一个Python脚本
+ #sys.exit()
+ subprocess.run(["D:\Pycharm\Project\AirQuilitySys\.venv\Scripts\python.exe", "AirQui.py"], check=True)
+ sys.exit(0)
+ except subprocess.CalledProcessError as e:
+ print(f"执行脚本时发生错误: {e}")
def login():
global count
username=entry_username.get()
@@ -10,13 +23,15 @@ def login():
password=entry_password.get()
if(password==user_login[username]):
label_message.config(text='登录成功!')
+ run_AirQui()
+
else:
label_message.config(text='密码错误!还可以尝试{}次'.format(2-count))
count=count+1
if count == 3:
label_message.config(text='登录失败!')
- btn_login.config(state="disabled")
-
+ button.config(state="disabled")
+# 创建主窗口
window=Tk()
window.title("用户登录")
window.geometry("300x200")
@@ -33,7 +48,13 @@ label_password.pack()
entry_password=Entry(window,show='*')
entry_password.pack()
+# 创建一个按钮,点击时调用run_another_program函数
+button = Button(window, text="登录", command=login)
+button.pack() # 设置按钮布局
+
label_message=Label(window,text="")
label_message.pack()
-window.mainloop()
\ No newline at end of file
+window.mainloop()
+# 运行Tkinter事件循环
+
diff --git a/AirQuilitySys/airquility.db b/AirQuilitySys/airquility.db
new file mode 100644
index 0000000..e69de29
diff --git a/AirQuilitySys/data.py b/AirQuilitySys/data.py
index 4624836..12e9783 100644
--- a/AirQuilitySys/data.py
+++ b/AirQuilitySys/data.py
@@ -2,6 +2,6 @@ import requests
res = requests.get('http://www.daqi110.com/content/environment/index.html')
print(res)
print(type(res))
->>>
-
-
\ No newline at end of file
+#>>>
+#
+#
\ No newline at end of file
diff --git a/AirQuilitySys/model.h5 b/AirQuilitySys/model.h5
new file mode 100644
index 0000000..21db3c3
Binary files /dev/null and b/AirQuilitySys/model.h5 differ
diff --git a/AirQuilitySys/replace.py b/AirQuilitySys/replace.py
index 8c72462..4c35c53 100644
--- a/AirQuilitySys/replace.py
+++ b/AirQuilitySys/replace.py
@@ -1,7 +1,5 @@
import re
-path = '时间数据'+'txt'
-with open(path,'r+',encoding='utf-8')as f1:
- text1 =f1.read()
- pattern=r'{"ci":'
- replacement=''
- text2=re.sub(pattern,'',text1)
+pattern='(","co":)|(,"no2":)|(,"o3":)|(,"pm10":)|(,"pm25":)|(,"ptime":)|(,"so2":)'
+a='4.8/株洲/1.3,"no2":35,"o3":86,"pm10":82,"pm25":62,"ptime":1706889600000,"so2":7},4.79/株洲/1.3,"no2":35,"o3":86,"pm10":81,"pm25":62,"ptime":1706976000000,"so2":7},4.71/株洲/1.3,"no2":34,"o3":86,"pm10":80,"pm25":61,"ptime":1707062400000,"so2":7},4.66/株洲/1.3,"no2":34,"o3":85,"pm10":79,"pm25":60,"ptime":1707148800000,"so2":7},4.69/株洲/1.3,"no2":34,"o3":85,"pm10":79,"pm25":61,"ptime":1707235200000,"so2":7},4.7/株洲/1.3,"no2":33,"o3":87,"pm10":79,"pm25":62,"ptime":1707321600000,"so2":7},4.82/株洲/1.3,"no2":33,"o3":90,"pm10":82,"pm25":64,"ptime":1707408000000,"so2":7},5/株洲/1.3,"no2":33,"o3":89,"pm10":87,"pm25":68,"ptime":1707494400000,"so2":7},5.07/株洲/1.3,"no2":32,"o3":94,"pm10":88,"pm25":69,"ptime":1707580800000,"so2":8},5.06/株洲/1.3,"no2":32,"o3":93,"pm10":88,"pm25":69,"ptime":1707667200000,"so2":8},4.99/株洲/1.3,"no2":31,"o3":92,"pm10":87,"pm25":68,"ptime":1707753600000,"so2":8},4.99/株洲/1.3,"no2":31,"o3":92,"pm10":87,"pm25":68,"ptime":1707840000000,"so2":8},4.95/株洲/1.3,"no2":31,"o3":92,"pm10":86,"pm25":67,"ptime":1707926400000,"so2":8},4.86/株洲/1.3,"no2":30,"o3":91,"pm10":85,"pm25":66,"ptime":1708012800000,"so2":7},4.84/株洲/1.3,"no2":30,"o3":90,"pm10":84,"pm25":66,"ptime":1708099200000,"so2":7},4.77/株洲/1.3,"no2":29,"o3":90,"pm10":83,"pm25":65,"ptime":1708185600000,"so2":7},4.74/株洲/1.3,"no2":29,"o3":90,"pm10":83,"pm25":64,"ptime":1708272000000,"so2":7},4.69/株洲/1.3,"no2":29,"o3":89,"pm10":82,"pm25":63,"ptime":1708358400000,"so2":7},4.63/株洲/1.3,"no2":28,"o3":89,"pm10":81,"pm25":62,"ptime":1708444800000,"so2":7},4.61/株洲/1.3,"no2":28,"o3":89,"pm10":80,"pm25":62,"ptime":1708531200000,"so2":7},4.57/株洲/1.3,"no2":28,"o3":89,"pm10":79,"pm25":61,"ptime":1708617600000,"so2":7},4.55/株洲/1.3,"no2":27,"o3":89,"pm10":79,"pm25":61,"ptime":1708704000000,"so2":7},4.52/株洲/1.3,"no2":27,"o3":88,"pm10":78,"pm25":61,"ptime":1708790400000,"so2":7},4.52/株洲/1.3,"no2":27,"o3":88,"pm10":78,"pm25":61,"ptime":1708876800000,"so2":7},4.52/株洲/1.3,"no2":27,"o3":88,"pm10":78,"pm25":61,"ptime":1708963200000,"so2":7},4.49/株洲/1.3,"no2":27,"o3":88,"pm10":78,"pm25":60,"ptime":1709049600000,"so2":7},4.5/株洲/1.3,"no2":27,"o3":89,"pm10":78,"pm25":60,"ptime":1709136000000,"so2":7},4.5/株洲/1.3,"no2":27,"o3":89,"pm10":78,"pm25":60,"ptime":1709222400000,"so2":7},4.49/株洲/1.3,"no2":27,"o3":89,"pm10":77,"pm25":60,"ptime":1709308800000,"so2":7},4.49/株洲/1.3,"no2":27,"o3":89,"pm10":77,"pm25":60,"ptime":1709395200000,"so2":7},4.46/株洲/1.3,"no2":27,"o3":89,"pm10":76,"pm25":59,"ptime":1709481600000,"so2":7},4.46/株洲/1.3,"no2":27,"o3":89,"pm10":76,"pm25":59,"ptime":1709568000000,"so2":7},4.48/株洲/1.3,"no2":27,"o3":88,"pm10":77,"pm25":60,"ptime":1709654400000,"so2":7},4.49/株洲/1.3,"no2":27,"o3":89,"pm10":77,"pm25":60,"ptime":1709740800000,"so2":7},4.46/株洲/1.3,"no2":27,"o3":90,"pm10":76,"pm25":59,"ptime":1709827200000,"so2":7},4.48/株洲/1.3,"no2":27,"o3":92,"pm10":76,"pm25":59,"ptime":1709913600000,"so2":7},4.49/株洲/1.3,"no2":27,"o3":92,"pm10":77,"pm25":59,"ptime":1710000000000,"so2":7},4.5/株洲/1.3,"no2":27,"o3":94,"pm10":77,"pm25":59,"ptime":1710086400000,"so2":7},4.51/株洲/1.3,"no2":27,"o3":96,"pm10":77,"pm25":59,"ptime":1710172800000,"so2":7},4.51/株洲","co":1.3,"no2":27,"o3":96,"pm10":77,"pm25":59,"ptime":1710259200000,"so2":7},4.5/株洲","co":1.3,"no2":27,"o3":95,"pm10":77,"pm25":59,"ptime":1710345600000,"so2":7},4.49/株洲","co":1.3,"no2":27,"o3":95,"pm10":76,"pm25":59,"ptime":1710432000000,"so2":7},4.46/株洲","co":1.3,"no2":27,"o3":95,"pm10":76,"pm25":58,"ptime":1710518400000,"so2":7},4.48/株洲","co":1.3,"no2":28,"o3":95,"pm10":76,"pm25":58,"ptime":1710604800000,"so2":7},4.44/株洲","co":1.3,"no2":27,"o3":95,"pm10":75,"pm25":58,"ptime":1710691200000,"so2":7},4.44/株洲","co":1.3,"no2":27,"o3":95,"pm10":75,"pm25":58,"ptime":1710777600000,"so2":7},4.44/株洲","co":1.3,"no2":28,"o3":96,"pm10":75,"pm25":57,"ptime":1710864000000,"so2":7},4.44/株洲","co":1.3,"no2":28,"o3":96,"pm10":75,"pm25":57,"ptime":1710950400000,"so2":7},4.43/株洲","co":1.3,"no2":27,"o3":97,"pm10":75,"pm25":57,"ptime":1711036800000,"so2":7},4.4/株洲","co":1.3,"no2":27,"o3":98,"pm10":75,"pm25":56,"ptime":1711123200000,"so2":7},4.39/株洲","co":1.3,"no2":27,"o3":98,"pm10":74,"pm25":56,"ptime":1711209600000,"so2":7},4.39/株洲","co":1.3,"no2":27,"o3":98,"pm10":74,"pm25":56,"ptime":1711296000000,"so2":7},4.35/株洲","co":1.3,"no2":27,"o3":99,"pm10":73,"pm25":55,"ptime":1711382400000,"so2":7},4.35/株洲","co":1.3,"no2":27,"o3":100,"pm10":73,"pm25":55,"ptime":1711468800000,"so2":7},4.36/株洲","co":1.3,"no2":27,"o3":101,"pm10":73,"pm25":55,"ptime":1711555200000,"so2":7},4.33/株洲","co":1.3,"no2":27,"o3":102,"pm10":72,"pm25":54,"ptime":1711641600000,"so2":7},4.3/株洲","co":1.3,"no2":26,"o3":102,"pm10":72,"pm25":54,"ptime":1711728000000,"so2":7},4.33/株洲","co":1.3,"no2":26,"o3":107,"pm10":72,"pm25":54,"ptime":1711814400000,"so2":7},4.27/株洲","co":1.3,"no2":26,"o3":106,"pm10":71,"pm25":53,"ptime":1711900800000,"so2":7},4.3/株洲","co":1.3,"no2":26,"o3":111,"pm10":71,"pm25":53,"ptime":1711987200000,"so2":7},4.29/株洲","co":1.3,"no2":26,"o3":110,"pm10":70,"pm25":53,"ptime":1712073600000,"so2":7},4.29/株洲","co":1.3,"no2":26,"o3":110,"pm10":70,"pm25":53,"ptime":1712160000000,"so2":7},4.29/株洲","co":1.3,"no2":26,"o3":110,"pm10":70,"pm25":53,"ptime":1712246400000,"so2":7},4.28/株洲","co":1.3,"no2":26,"o3":109,"pm10":70,"pm25":53,"ptime":1712332800000,"so2":7},4.28/株洲","co":1.3,"no2":26,"o3":108,"pm10":70,"pm25":53,"ptime":1712419200000,"so2":7},4.3/株洲","co":1.3,"no2":26,"o3":112,"pm10":70,"pm25":53,"ptime":1712505600000,"so2":7},4.29/株洲","co":1.3,"no2":26,"o3":112,"pm10":69,"pm25":53,"ptime":1712592000000,"so2":7},4.29/株洲","co":1.3,"no2":26,"o3":112,"pm10":69,"pm25":53,"ptime":1712678400000,"so2":7},4.27/株洲","co":1.3,"no2":26,"o3":112,"pm10":69,"pm25":52,"ptime":1712764800000,"so2":7},4.26/株洲","co":1.3,"no2":26,"o3":111,"pm10":69,"pm25":52,"ptime":1712851200000,"so2":7},4.26/株洲","co":1.3,"no2":26,"o3":110,"pm10":69,"pm25":52,"ptime":1712937600000,"so2":7},4.24/株洲","co":1.3,"no2":26,"o3":110,"pm10":68,"pm25":52,"ptime":1713024000000,"so2":7},4.25/株洲","co":1.3,"no2":26,"o3":112,"pm10":68,"pm25":52,"ptime":1713110400000,"so2":7},4.22/株洲","co":1.3,"no2":25,"o3":112,"pm10":68,"pm25":52,"ptime":1713196800000,"so2":7},4.22/株洲","co":1.3,"no2":25,"o3":112,"pm10":68,"pm25":52,"ptime":1713283200000,"so2":7},4.19/株洲","co":1.3,"no2":25,"o3":112,"pm10":68,"pm25":51,"ptime":1713369600000,"so2":7},4.18/株洲","co":1.3,"no2":25,"o3":112,"pm10":67,"pm25":51,"ptime":1713456000000,"so2":7},4.16/株洲","co":1.2,"no2":25,"o3":112,"pm10":67,"pm25":51,"ptime":1713542400000,"so2":7},4.16/株洲","co":1.2,"no2":25,"o3":112,"pm10":67,"pm25":51,"ptime":1713628800000,"so2":7},4.13/株洲","co":1.2,"no2":25,"o3":111,"pm10":66,"pm25":51,"ptime":1713715200000,"so2":7},4.12/株洲","co":1.2,"no2":25,"o3":113,"pm10":66,"pm25":50,"ptime":1713801600000,"so2":7},4.12/株洲","co":1.2,"no2":25,"o3":113,"pm10":66,"pm25":50,"ptime":1713888000000,"so2":7},4.12/株洲","co":1.2,"no2":25,"o3":113,"pm10":66,"pm25":50,"ptime":1713974400000,"so2":7},4.13,/株洲","co":1.2,"no2":25,"o3":115,"pm10":66,"pm25":50,"ptime":1714060800000,"so2":7},4.12/株洲","co":1.2,"no2":25,"o3":114,"pm10":66,"pm25":50,"ptime":1714147200000,"so2":7},4.14/株洲","co":1.2,"no2":25,"o3":119,"pm10":65,"pm25":50,"ptime":1714233600000,"so2":7},4.14/株洲","co":1.2,"no2":25,"o3":119,"pm10":65,"pm25":50,"ptime":1714320000000,"so2":7},4.11/株洲","co":1.2,"no2":25,"o3":118,"pm10":65,"pm25":49,"ptime":1714406400000,"so2":7},4.09/株洲","co":1.2,"no2":24,"o3":118,"pm10":65,"pm25":49,"ptime":1714492800000,"so2":7},4.06/株洲","co":1.2,"no2":24,"o3":117,"pm10":64,"pm25":49,"ptime":1714579200000,"so2":7},4.05/株洲","co":1.2,"no2":24,"o3":116,"pm10":64,"pm25":49,"ptime":1714665600000,"so2":7},4.05/株洲","co":1.2,"no2":24,"o3":116,"pm10":64,"pm25":49,"ptime":1714752000000,"so2":7},4.04/株洲","co":1.2,"no2":24,"o3":116,"pm10":63,"pm25":49,"ptime":1714838400000,"so2":7},4.01/株洲","co":1.2,"no2":24,"o3":115,"pm10":63,"pm25":48,"ptime":1714924800000,"so2":7},4/株洲","co":1.2,"no2":24,"o3":114,"pm10":63,"pm25":48,"ptime":1715011200000,"so2":7},4.03/株洲","co":1.2,"no2":24,"o3":119,"pm10":63,"pm25":48,"ptime":1715097600000,"so2":7},4.0/株洲/1.2,"no2":24,"o3":124,"pm10":63,"pm25":48,"ptime":1715184000000,"so2":7},4.07/株洲/1.2,"no2":24,"o3":124,"pm10":63,"pm25":48,"ptime":1715270400000,"so2":7},4.06/株洲/1.2,"no2":24,"o3":123,"pm10":63,"pm25":48,"ptime":1715356800000,"so2":7},4.07/株洲/1.2,"no2":24,"o3":125,"pm10":63,"pm25":48,"ptime":1715443200000,"so2":7},4.09/株洲/1.2,"no2":24,"o3":126,"pm10":64,"pm25":48,"ptime":1715529600000,"so2":7},4.09/株洲/1.2,"no2":24,"o3":126,"pm10":64,"pm25":48,"ptime":1715616000000,"so2":7},4.09/株洲/1.2,"no2":24,"o3":127,"pm10":64,"pm25":48,"ptime":1715702400000,"so2":7},4.11/株洲/1.2,"no2":24,"o3":130,"pm10":64,"pm25":48,"ptime":1715788800000,"so2":7},4.12/株洲/1.2,"no2":24,"o3":132,"pm10":64,"pm25":48,"ptime":1715875200000,"so2":7},4.1/株洲1.2,"no2":24,"o3":133,"pm10":64,"pm25":47,"ptime":1715961600000,"so2":7},4.09/株洲1.2,"no2":24,"o3":132,"pm10":64,"pm25":47,"ptime":1716048000000,"so2":7},4.09/株洲1.2,"no2":24,"o3":132,"pm10":64,"pm25":47,"ptime":1716134400000,"so2":7},4.09/株洲1.2,"no2":24,"o3":132,"pm10":64,"pm25":47,"ptime":1716220800000,"so2":7},4.06/株洲1.2,"no2":23,"o3":132,"pm10":63,"pm25":47,"ptime":1716307200000,"so2":7},4.08/株洲1.2,"no2":23,"o3":134,"pm10":63,"pm25":47,"ptime":1716393600000,"so2":7},4.1/株洲1.2,"no2":23,"o3":137,"pm10":63,"pm25":47,"ptime":1716480000000,"so2":7},4.06/株洲1.2,"no2":23,"o3":136,"pm10":63,"pm25":46,"ptime":1716566400000,"so2":7},4.06/株洲1.2,"no2":23,"o3":136,"pm10":63,"pm25":46,"ptime":1716652800000,"so2":7}'
+b=re.sub(pattern,'/',a)
+print(b)
diff --git a/AirQuilitySys/scaler.joblib b/AirQuilitySys/scaler.joblib
new file mode 100644
index 0000000..f2b9322
Binary files /dev/null and b/AirQuilitySys/scaler.joblib differ
diff --git a/AirQuilitySys/test.py b/AirQuilitySys/test.py
new file mode 100644
index 0000000..d820b59
--- /dev/null
+++ b/AirQuilitySys/test.py
@@ -0,0 +1 @@
+import matplotlib.pyplot as plt
diff --git a/AirQuilitySys/time.py b/AirQuilitySys/time.py
index e69de29..a16b77e 100644
--- a/AirQuilitySys/time.py
+++ b/AirQuilitySys/time.py
@@ -0,0 +1,382 @@
+import os
+import pandas as pd
+import numpy as np
+from sqlalchemy import create_engine, text
+from sklearn.preprocessing import MinMaxScaler
+import logging
+from sklearn.model_selection import train_test_split
+from keras.callbacks import EarlyStopping, ModelCheckpoint
+from sklearn.metrics import mean_squared_error
+from keras.models import Sequential
+from keras.layers import LSTM, Dense
+from tensorflow.keras.layers import Input
+from keras.preprocessing.sequence import pad_sequences
+import tensorflow as tf
+from tensorflow.keras.models import Sequential
+from tensorflow.keras.layers import LSTM, Dense
+from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
+from tensorflow.keras.models import Model # 导入Model类
+import matplotlib.pyplot as plt
+import joblib
+import pymysql
+import logging
+from datetime import datetime
+
+logging.basicConfig(level=logging.ERROR, format='%(asctime)s - %(levelname)s - %(message)s')
+logger = logging.getLogger(__name__)
+logger.setLevel(logging.INFO)
+def get_db_config():
+ #从环境变量中获取数据库配置。
+ config = {
+ "host": os.getenv('DB_HOST', '127.0.0.1'),
+ "user": os.getenv('DB_USER', 'root'),
+ "password": os.getenv('DB_PASSWORD','mysql>hyx123'),
+ "db": os.getenv('DB_NAME', 'airquility'),
+ "charset": 'utf8',
+ }
+
+ # 检查环境变量是否已设置
+ for key, value in config.items():
+ if value is None:
+ raise ValueError(f"缺少环境变量: {key}")
+
+ return config
+
+
+def create_database_engine(config):
+ """
+ 创建数据库引擎。
+ """
+ db_string = f'mysql+pymysql://{config["user"]}:{config["password"]}@{config["host"]}/{config["db"]}?charset={config["charset"]}'
+
+ try:
+ engine = create_engine(db_string)
+ print(type(engine))
+ except Exception as e:
+ print(f"创建数据库引擎失败: {e}")
+ # 根据需要处理异常,例如记录日志或重试
+ raise # 如果需要将异常继续抛出
+ return engine
+
+
+def fetch_data(engine, query):
+ """
+ 从数据库中获取数据。
+ 参数:
+ engine: 数据库连接引擎对象。
+ query: SQL查询字符串。
+ 返回:
+ 查询结果的数据框(df)。
+ """
+ # 确保query是字符串类型
+ if not isinstance(query, str):
+ logging.error("查询字符串类型错误,query应为字符串。")
+ raise ValueError("查询字符串类型错误,query应为字符串。")
+
+ if not query.strip():
+ logging.error("查询字符串为空。")
+ raise ValueError("查询字符串为空。")
+
+ try:
+ df = pd.read_sql(text(query), engine)
+ return df
+ except Exception as e:
+ logging.error(f"执行SQL查询失败: {e}")
+ raise # 重新抛出异常以便上层处理
+
+def preprocess_data(df, target_col, default_year=2024, features=None):
+ """
+ 对数据进行预处理,包括日期列转换、特征标准化等。
+ """
+ # 检查df是否为空
+ if df.empty:
+ logging.error("输入的DataFrame为空")
+ return None, None, None
+
+ # 检查'ptime'列是否存在
+ if 'ptime' not in df.columns:
+ logging.error("DataFrame中不存在'ptime'列")
+ return None, None, None
+ default_year = 2024
+ df['ptime'] = df['ptime'].apply(lambda x: datetime.strptime(f"{default_year}/{x}", "%Y/%m/%d"))
+ # 或者,如果使用pd.to_datetime,并且'ptime'格式特殊,需要指定格式
+ # df['ptime'] = pd.to_datetime(df['ptime'], errors='coerce', format='%m/%d').dt.strftime('%Y-%m-%d')
+
+ print(df.head)
+
+ # 如果'ptime'已经是datetime类型,则无需转换
+ if df['ptime'].dtype == 'datetime64[ns]':
+ print("ptime列已经是以日期时间格式存储。")
+ else:
+ try:
+ # 尝试将'ptime'列转换为datetime类型
+ df['ptime'] = pd.to_datetime(df['ptime'], format='%m/%d/%Y')
+ except ValueError:
+ logging.error("ptime列转换为datetime类型失败,可能是因为格式不正确。")
+ return None, None, None
+
+ # 设置'ptime'为索引
+ #df.set_index('ptime', inplace=True)
+
+ # 确定features列表
+ if target_col in df.columns:
+ features = df.columns.drop(target_col)
+ print("features:", features)
+ else:
+ logging.warning(f"目标列 '{target_col}' 在DataFrame中未找到,将不进行列删除操作。")
+ features = df.columns
+
+ # 检查features是否被正确设置
+ if features is None:
+ logging.error("未找到任何特征列。")
+ return None, None, None
+
+ print("@@@@")
+ print(target_col)
+ print("@@@@")
+ try:
+ df.set_index('ptime', inplace=True)
+ except KeyError:
+ print("列 'ptime' 不存在,无法设置为索引。")
+ # 在这里处理缺少'ptime'的情况,比如跳过相关操作或使用其他列
+ # 使用MinMaxScaler进行特征缩放
+ scaler = MinMaxScaler()
+ scaled_features = scaler.fit_transform(df[features])
+ scaled_target = scaler.fit_transform(df.index.values.reshape(-1, 1))
+ print("~~~")
+ return scaled_features, scaled_target, scaler
+
+
+def split_dataset_into_train_test(features, target, test_size=0.2):
+ """
+ 切分数据集为训练集和测试集。
+ """
+ # 检查features和target的类型以及长度是否相等
+ if not isinstance(features, np.ndarray) or not isinstance(target, np.ndarray):
+ raise TypeError("features and target must be numpy arrays")
+ if len(features) != len(target):
+ raise ValueError("features and target must have the same length")
+
+ # 检查test_size是否在合理的范围内
+ if not 0 < test_size < 1:
+ raise ValueError("test_size must be between 0 and 1")
+
+ # 计算训练集大小
+ train_size = int(len(features) * (1 - test_size))
+
+ # 使用numpy的切片操作,这不会创建新的数据副本,提高性能
+ train_features, test_features = features[:train_size], features[train_size:]
+ train_target, test_target = target[:train_size], target[train_size:]
+ print("123456")
+ print(features)
+ print(target)
+ print(train_features)
+ print(train_target)
+ print(test_features)
+ print(test_target)
+ return train_features, test_features, train_target, test_target
+
+
+def validate_data_shapes(train_features, test_features, n_steps):
+ """
+ 验证训练和测试数据形状是否符合预期。
+ """
+ if train_features.shape[1] != n_steps or test_features.shape[1] != n_steps:
+ raise ValueError(f"训练和测试特征的第二维度(时间步长)应为{n_steps}")
+ print("7890")
+
+
+def build_model(n_steps, lstm_units, dense_units, input_shape):
+ inputs = Input(shape=input_shape) # 添加Input对象
+ x = LSTM(lstm_units)(inputs) # 直接将Input对象传递给LSTM层
+ outputs = Dense(dense_units)(x)
+ model = tf.keras.Model(inputs=inputs, outputs=outputs)
+ model.compile(optimizer='adam', loss='mse')
+ print("!!!")
+ return model
+
+
+def validate_params(epochs, batch_size):
+ """
+ 确保 epochs 和 batch_size 是合法的参数。
+ """
+ if not isinstance(epochs, int) or epochs <= 0:
+ raise ValueError("epochs 应该是一个正整数")
+ if not isinstance(batch_size, int) or batch_size <= 0:
+ raise ValueError("batch_size 应该是一个正整数")
+ if epochs <= 0 or batch_size <= 0:
+ raise ValueError("epochs和batch_size必须大于0")
+ print("%%%")
+
+
+def ensure_directory_safety(path:str):
+ """
+ 确保路径安全且存在。
+ """
+ if not os.path.isabs(path):
+ raise ValueError("路径应该是绝对路径")
+ directory = os.path.dirname(path)
+ print(directory)
+
+ try:
+ # 检查目录是否需要创建
+ if not os.path.exists(directory):
+ # 添加日志记录
+ logger.info(f"目录 {directory} 不存在,开始创建。")
+ # 使用 exist_ok=True 避免在目录已存在时抛出异常
+ os.makedirs(directory, exist_ok=True)
+ logger.info(f"目录 {directory} 创建成功。")
+ except PermissionError:
+ # 捕获权限异常,给出清晰的错误提示
+ logger.error(f"没有权限在 {directory} 创建目录。")
+ raise
+ except Exception as e:
+ # 捕获其他异常,记录并抛出
+ logger.error(f"创建目录 {directory} 时发生未知错误:{e}")
+ raise
+ print("===")
+def train_model(model: Model, train_features, train_target, test_features, test_target, epochs: int, batch_size: int,
+ patience: int, save_best_only: bool = True, monitor: str = 'val_loss', mode: str = 'min',
+ model_path: str = "best_model.h5") -> dict:
+ """
+ 训练模型,并根据早停策略和性能指标保存最佳模型。
+
+ :param model: Keras模型实例
+ :param train_features: 训练特征
+ :param train_target: 训练目标
+ :param test_features: 测试特征
+ :param test_target: 测试目标
+ :param epochs: 训练轮数
+ :param batch_size: 批量大小
+ :param patience: 早停策略的耐心值
+ :param save_best_only: 是否只保存最佳模型
+ :param monitor: 监控的指标
+ :param mode: 监控指标的模式(min/max)
+ :param model_path: 模型保存路径
+ :return: 训练历史记录
+ """
+ logging.basicConfig(level=logging.INFO)
+
+ model_path = "/path/to/your/model.h5"
+ validate_params(epochs, batch_size)
+ ensure_directory_safety(model_path)
+
+ # 使用ModelCheckpoint保存最佳模型
+ filepath = model_path
+ checkpoint = ModelCheckpoint(filepath, monitor=monitor, verbose=1, save_best_only=save_best_only, mode=mode)
+
+ # 定义早停策略
+ early_stopping = EarlyStopping(monitor=monitor, patience=patience, verbose=1)
+
+ try:
+ history = model.fit(train_features, train_target, epochs=epochs, batch_size=batch_size,
+ validation_data=(test_features, test_target), verbose=1,
+ callbacks=[early_stopping, checkpoint])
+ logging.info("###")
+ return history
+ except ValueError as ve:
+ logging.error(f"参数错误: {ve}")
+ raise
+ except OSError as oe:
+ logging.error(f"文件操作错误: {oe}")
+ raise
+ except Exception as e:
+ logging.error(f"模型训练过程中发生异常: {e}")
+ raise
+
+def build_and_train_model(n_steps, features, target, train_features, train_target, test_features, test_target,lstm_units=50, dense_units=1, optimizer='adam', loss='mse', epochs=100, batch_size=32,patience=10, model_save_path='model.h5'):
+ """
+ 构建LSTM模型并进行训练,增加了参数可配置性,早停策略和模型保存。
+ """
+ # 输入数据验证
+ if not (isinstance(train_features, np.ndarray) and isinstance(train_target, np.ndarray) and isinstance(test_features, np.ndarray) and isinstance(test_target, np.ndarray)):
+ raise ValueError("输入数据train_features, train_target, test_features, test_target必须是numpy数组")
+
+ checkpoint = ModelCheckpoint(filepath="/path/to/your/model.keras", # 注意这里的路径保持为.h5
+ monitor='val_loss', # 或您希望监控的指标
+ verbose=1,
+ save_best_only=True,
+ mode='min')
+
+ # 数据形状验证
+ validate_data_shapes(train_features, test_features, n_steps)
+
+ model = build_model(n_steps, lstm_units, dense_units, input_shape)
+
+ # 早停策略
+ early_stopping = EarlyStopping(monitor='val_loss', patience=patience, verbose=1)
+
+ history = model.fit(train_features, train_target,
+ validation_data=(test_features, test_target),
+ epochs=epochs,
+ batch_size=batch_size,
+ callbacks=[checkpoint],
+ # 其他参数...
+ ) # 模型保存
+ # 增加了路径验证来防止潜在的安全问题,这里简化处理,实际应用中可能需要更复杂的逻辑
+ if not model_save_path.endswith('.h5'):
+ model_save_path += '.h5'
+ model.save(model_save_path)
+
+ return model, history
+
+
+def evaluate_model(model, scaler, test_target, predictions):
+ """
+ 评估模型性能并反向转换预测结果。
+ """
+ predictions = scaler.inverse_transform(predictions)
+ test_target_inv = scaler.inverse_transform(test_target.reshape(-1, 1))
+ mse = mean_squared_error(test_target_inv, predictions)
+ print(f'Mean Squared Error: {mse}')
+
+ return mse
+
+
+if __name__ == "__main__":
+
+ engine = create_database_engine(get_db_config())
+ query = "SELECT ptime, ci FROM may"
+ df = fetch_data(engine, query)
+ target_col = 'ptime'
+ features, target, scaler = preprocess_data(df, target_col)
+
+ train_features, test_features, train_target, test_target = split_dataset_into_train_test(features, target, test_size=0.2)
+
+ n_steps = 5
+ # 假设train_features和test_features是你的数据,且它们是二维数组
+ # 首先,你需要获取或设定一个maxlen,这里假设我们已知或计算出它应该是5
+ maxlen = 5
+
+ # 对训练数据进行填充或截断
+ train_features_padded = pad_sequences(train_features, maxlen=maxlen, padding='post', truncating='post')
+
+ # 对测试数据进行同样的处理
+ test_features_padded = pad_sequences(test_features, maxlen=maxlen, padding='post', truncating='post')
+
+ input_shape = (n_steps, int(train_features.shape[1]))
+
+
+ model, history = build_and_train_model(n_steps=n_steps,
+ features=features,
+ target=target,
+ train_target=train_target,
+ test_target=test_target,
+ train_features=train_features_padded,
+ test_features=test_features_padded)
+
+ predictions = model.predict(test_features)
+ mse = evaluate_model(model, scaler, test_target, predictions)
+ # 可视化预测结果(可选)
+
+ #plt.plot(test_target, label='Actual')
+ plt.plot(predictions, label='Predicted')
+ plt.legend()
+ plt.xlabel('Ptime')
+ plt.ylabel('CI')
+ #plt.plot(ptime, ci)
+ plt.show()
+
+ # 保存模型
+ model.save('trained_model.h5')
+ joblib.dump(scaler, 'scaler.joblib')
diff --git a/AirQuilitySys/trained_model.h5 b/AirQuilitySys/trained_model.h5
new file mode 100644
index 0000000..21db3c3
Binary files /dev/null and b/AirQuilitySys/trained_model.h5 differ
diff --git a/AirQuilitySys/user.py b/AirQuilitySys/user.py
new file mode 100644
index 0000000..07a108a
--- /dev/null
+++ b/AirQuilitySys/user.py
@@ -0,0 +1,44 @@
+import mysql.connector
+from mysql.connector import Error
+ def insert_user():
+ try:
+ # 连接到数据库
+ connection = mysql.connector.connect(
+ host='127.0.0.1',
+ user='root',
+ password='mysql>hyx123',
+ database='airquility'
+ )
+
+ if connection.is_connected():
+ db_info = connection.get_server_info()
+ print(f"Connected to MySQL Server version {db_info}")
+
+ cursor = connection.cursor()
+
+ # 从用户接收输入
+ user = input("Enter user: ")
+ password = input("Enter password: ")
+
+ # 准备插入数据的SQL语句
+ sql_insert_query = """INSERT INTO users (user,password)
+ VALUES (%s, %s)"""
+
+ # 用户输入的数据作为元组
+ insert_data = (user, password)
+
+ # 执行插入操作
+ cursor.execute(sql_insert_query, insert_data)
+ connection.commit()
+ print("Record inserted successfully into users table.")
+
+ except Error as e:
+ print(f"Error while connecting to MySQL: {e}")
+ finally:
+ if (connection.is_connected()):
+ cursor.close()
+ connection.close()
+ print("MySQL connection is closed")
+
+ # 调用函数插入数据
+insert_user()
\ No newline at end of file
diff --git a/AirQuilitySys/your_database_path.db b/AirQuilitySys/your_database_path.db
new file mode 100644
index 0000000..e69de29
diff --git a/Python程序设计课程设计报告模板 2024年.doc b/Python程序设计课程设计报告模板 2024年.doc
new file mode 100644
index 0000000..34dae3b
Binary files /dev/null and b/Python程序设计课程设计报告模板 2024年.doc differ
diff --git a/~$thon程序设计课程设计报告模板 2024年.doc b/~$thon程序设计课程设计报告模板 2024年.doc
new file mode 100644
index 0000000..109d8f0
Binary files /dev/null and b/~$thon程序设计课程设计报告模板 2024年.doc differ