AccessServer.py 37 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109
  1. from flask import Flask, request, jsonify
  2. import subprocess
  3. import csv
  4. import pymysql
  5. import mysql.connector
  6. import requests
  7. from datetime import datetime, timedelta
  8. import math
  9. import json
  10. import os
  11. import codecs
  12. import quopri
  13. from werkzeug.utils import secure_filename
  14. from email.header import decode_header
  15. app = Flask(__name__)
  16. SQServerHost = "http://111.4.141.191:18081"
  17. @app.after_request
  18. def add_header(response):
  19. response.headers['Access-Control-Allow-Origin'] = '*'
  20. response.headers['Access-Control-Allow-Headers'] = 'Content-Type'
  21. return response
  22. # 定义数据模型类
  23. class SensorData:
  24. def __init__(self, sample_time, recv_time, node_name, node_value, node_type, node_unit, device_id, channel_id, project_id):
  25. self.sample_time = sample_time
  26. self.recv_time = recv_time
  27. self.node_name = node_name
  28. self.node_value = node_value
  29. self.node_type = node_type
  30. self.node_unit = node_unit
  31. self.device_id = device_id
  32. self.channel_id = channel_id
  33. self.project_id = project_id
  34. def __repr__(self): # 为了方便打印对象列表,定义一个简单的字符串表示方法
  35. return f"{self.sample_time} - {self.node_name}: {self.node_value}"
  36. def GetStartEndTime(deltaHour = 6):
  37. # 获取当前时间
  38. current_time = datetime.now()
  39. # 取整到最近的小时,即当前时间的小时数不变,分钟和秒数归零
  40. rounded_time = datetime(current_time.year, current_time.month, current_time.day, current_time.hour, 0, 0)
  41. # 往前推6个小时
  42. previous_time = rounded_time - timedelta(hours=deltaHour)
  43. # 输出两个时间
  44. return rounded_time.strftime('%Y-%m-%d %H:%M:%S'), previous_time.strftime('%Y-%m-%d %H:%M:%S')
  45. #return "2024-07-13 16:00:00", "2024-07-10 16:00:00"
  46. @app.route('/sync_from_access', methods=['POST'])
  47. def sync_from_access():
  48. sensor_data_list = []
  49. mdb_file = 'test.mdb'
  50. table_name = 'AcsData'
  51. output_file = 'output.csv'
  52. # 构建命令,确保提供可执行文件的完整路径
  53. cmd = ['mdb-export', mdb_file, table_name]
  54. # 执行命令并捕获输出
  55. try:
  56. result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, check=True)
  57. # 将结果写入输出文件
  58. with open(output_file, 'w', encoding='utf-8') as f:
  59. f.write(result.stdout)
  60. except subprocess.CalledProcessError as e:
  61. print(f"An error occurred: {e}")
  62. # 可以写入错误信息到文件或进行其他错误处理
  63. with open(output_file, 'w', encoding='utf-8') as f:
  64. f.write(e.stderr.decode('utf-8')) # 将stderr从字节解码为字符串
  65. # 建立MySQL连接
  66. conn = mysql.connector.connect(
  67. host="localhost",
  68. user="root",
  69. password="HelloWorld123",
  70. database="water"
  71. )
  72. # 设置MySQL连接的字符集
  73. conn.set_charset_collation('utf8')
  74. # 定义CSV文件和表格名称
  75. csv_file = 'output.csv'
  76. table_name = 'sensor_data'
  77. try:
  78. # 打开CSV文件进行读取
  79. with open(csv_file, mode='r', encoding='utf-8') as file:
  80. # 创建CSV阅读器
  81. reader = csv.reader(file)
  82. # 跳过第一行,因为它是标题行
  83. next(reader)
  84. # 创建游标对象
  85. cursor = conn.cursor()
  86. index = 0
  87. # 逐行插入数据
  88. for row in reader:
  89. print(index)
  90. index += 1
  91. # 将日期时间字符串转换为MySQL可接受的格式
  92. SampleTime = datetime.strptime(row[0], '%m/%d/%y %H:%M:%S').strftime('%Y-%m-%d %H:%M:%S')
  93. RecvTime = datetime.strptime(row[1], '%m/%d/%y %H:%M:%S').strftime('%Y-%m-%d %H:%M:%S')
  94. NodeName = row[2]
  95. NodeValue = float(row[3])
  96. if math.isnan(NodeValue):
  97. NodeValue = 0.0
  98. NodeType = row[4]
  99. NodeUnit = row[5]
  100. DeviceID = row[6]
  101. ChannelID = row[7]
  102. ProjectID = 0
  103. sensor_data_list.append(SensorData(SampleTime,RecvTime,NodeName,NodeValue,NodeType,NodeUnit,DeviceID,ChannelID,ProjectID))
  104. # 对列表进行排序,根据sample_time倒序排序
  105. sensor_data_list.sort(key=lambda x: x.recv_time, reverse=True)
  106. index = 0
  107. sensor_data_list_true = []
  108. for sd in sensor_data_list:
  109. index += 1
  110. sensor_data_list_true.append(sd)
  111. if index > 100:
  112. break
  113. for sd in sensor_data_list_true:
  114. # 检查数据库中是否已存在相同的记录
  115. check_sql = "SELECT COUNT(*) FROM {0} WHERE RecvTime=%s AND DeviceID=%s AND ChannelID=%s".format(table_name)
  116. cursor.execute(check_sql, (sd.recv_time, sd.device_id,sd.channel_id))
  117. exists = cursor.fetchone()[0]
  118. print(exists)
  119. # 如果数据库中不存在相同的记录,则插入数据
  120. if exists == 0:
  121. # 准备SQL语句,使用参数化查询
  122. sql = "INSERT INTO {0} (SampleTime, RecvTime, NodeName, NodeValue, NodeType, NodeUnit, DeviceID, ChannelID, ProjectID) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)".format(table_name)
  123. print(sql)
  124. # 执行插入操作,使用元组传递参数确保字符编码正确
  125. cursor.execute(sql, (sd.sample_time, sd.recv_time, sd.node_name, sd.node_value, sd.node_type, sd.node_unit, sd.device_id, sd.channel_id, sd.project_id))
  126. # 提交事务
  127. conn.commit()
  128. except mysql.connector.Error as e:
  129. return jsonify({"error": str(e)}), 500
  130. finally:
  131. if cursor:
  132. cursor.close()
  133. if conn:
  134. conn.close()
  135. return jsonify({"message": "Data synced successfully"}), 200
  136. @app.route('/get_last_data', methods=['POST'])
  137. def get_last_data():
  138. # 解析 JSON 请求数据
  139. data = request.get_json()
  140. ChannelID = data.get('ChannelID')
  141. DeviceID = data.get('DeviceID')
  142. n = data.get('N', 1) # 默认为1,如果请求中没有提供n,则取默认值
  143. if not ChannelID or not DeviceID:
  144. return jsonify({"error": "Missing ChannelID or DeviceID"}), 400
  145. # 建立MySQL连接
  146. conn = mysql.connector.connect(
  147. host="localhost",
  148. user="root",
  149. password="HelloWorld123",
  150. database="water"
  151. )
  152. # 设置MySQL连接的字符集
  153. conn.set_charset_collation('utf8')
  154. try:
  155. cursor = conn.cursor()
  156. # 编写 SQL 查询,以获取匹配 ChannelID 和 DeviceID 的最近的 n 条记录的全部数据
  157. query = """
  158. WITH Stats AS (
  159. SELECT
  160. MAX(NodeValue) AS MaxNodeValue,
  161. AVG(NodeValue) AS AvgNodeValue
  162. FROM sensor_data
  163. WHERE ChannelID=%s AND DeviceID=%s
  164. )
  165. SELECT
  166. sd.*,
  167. s.MaxNodeValue,
  168. s.AvgNodeValue
  169. FROM sensor_data sd
  170. CROSS JOIN Stats s
  171. WHERE sd.ChannelID=%s AND sd.DeviceID=%s
  172. ORDER BY sd.RecvTime DESC
  173. LIMIT %s;
  174. """
  175. cursor.execute(query, (ChannelID,DeviceID,ChannelID, DeviceID, n))
  176. results = cursor.fetchall()
  177. if results:
  178. # 将查询结果转换为列表的字典
  179. rows = [dict(zip([column[0] for column in cursor.description], row)) for row in results]
  180. jsonData = {}
  181. jsonData["datas"] = rows
  182. return jsonify(jsonData)
  183. else:
  184. return jsonify({"message": "No data found"}), 404
  185. except mysql.connector.Error as e:
  186. return jsonify({"error": str(e)}), 500
  187. finally:
  188. # 关闭游标和数据库连接
  189. if cursor:
  190. cursor.close()
  191. if conn:
  192. conn.close()
  193. @app.route('/get_data_by_avg', methods=['POST'])
  194. def get_data_by_avg():
  195. data = request.get_json()
  196. ChannelID = data.get('ChannelID')
  197. DeviceID = data.get('DeviceID')
  198. AvgType = data.get('AvgType')
  199. N = data.get('N', 6) # 默认为6小时,如果请求中没有提供hour,则取默认值
  200. if not ChannelID or not DeviceID or not AvgType:
  201. return jsonify({"error": "Missing ChannelID or DeviceID or AvgType"}), 400
  202. # 建立MySQL连接
  203. conn = mysql.connector.connect(
  204. host="localhost",
  205. user="root",
  206. password="HelloWorld123",
  207. database="water"
  208. )
  209. # 设置MySQL连接的字符集
  210. conn.set_charset_collation('utf8')
  211. try:
  212. cursor = conn.cursor()
  213. # 编写 SQL 查询,以获取匹配 ChannelID 和 DeviceID 的最近的 n 条记录的全部数据
  214. query = """
  215. SELECT * FROM sensor_data
  216. WHERE ChannelID=%s AND DeviceID=%s
  217. ORDER BY RecvTime DESC
  218. LIMIT %s
  219. """
  220. if AvgType == "hour":
  221. cursor.execute(query, (ChannelID, DeviceID, N * 3))
  222. elif AvgType == "day":
  223. cursor.execute(query, (ChannelID, DeviceID, N * 3 * 24))
  224. elif AvgType == "month":
  225. cursor.execute(query, (ChannelID, DeviceID, N * 3 * 24 * 7 * 31))
  226. else:
  227. return jsonify({"message": "AvgType type error!"}), 404
  228. results = cursor.fetchall()
  229. if not results:
  230. return jsonify({"message": "No data found"}), 404
  231. rows = [dict(zip([column[0] for column in cursor.description], row)) for row in results]
  232. # 按小时分组并计算每个小时的平均值
  233. avg_data = {}
  234. for data in rows:
  235. if AvgType == "hour":
  236. n_key = data["SampleTime"].strftime('%Y-%m-%d %H:00:00')
  237. elif AvgType == "day":
  238. n_key = data["SampleTime"].strftime('%Y-%m-%d')
  239. elif AvgType == "month":
  240. n_key = data["SampleTime"].strftime('%Y-%m')
  241. if n_key not in avg_data:
  242. avg_data[n_key] = {'total_value': 0, 'count': 0}
  243. avg_data[n_key]['total_value'] += float(data["NodeValue"])
  244. avg_data[n_key]['count'] += 1
  245. avg_data[n_key]['ChannelID'] = int(data["ChannelID"])
  246. avg_data[n_key]['DeviceID'] = int(data["DeviceID"])
  247. avg_data[n_key]['NodeName'] = str(data["NodeName"])
  248. avg_data[n_key]['NodeType'] = str(data["NodeType"])
  249. avg_data[n_key]['NodeUnit'] = str(data["NodeUnit"])
  250. avg_data[n_key]['ProjectID'] = int(data["ProjectID"])
  251. # 构建包含每个小时平均值的列表
  252. average_values = []
  253. for n_key in reversed(sorted(avg_data.keys())):
  254. average_values.append({
  255. 'SampleTime': n_key,
  256. 'RecvTime': n_key,
  257. "ProjectID" : avg_data[n_key]['ProjectID'],
  258. "NodeUnit" : avg_data[n_key]['NodeUnit'],
  259. "NodeName" : avg_data[n_key]['NodeName'],
  260. "NodeType" : avg_data[n_key]['NodeType'],
  261. "ChannelID" : avg_data[n_key]['ChannelID'],
  262. "DeviceID" : avg_data[n_key]['DeviceID'],
  263. 'NodeValue': avg_data[n_key]['total_value'] / avg_data[n_key]['count']
  264. })
  265. if len(average_values) >= N:
  266. break
  267. # 检查平均值列表长度是否等于hour,如果不足,用None填充
  268. average_values += [None] * (N - len(average_values))
  269. jsonData = {"datas": average_values}
  270. return jsonify(jsonData)
  271. except mysql.connector.Error as e:
  272. return jsonify({"error": str(e)}), 500
  273. finally:
  274. if cursor:
  275. cursor.close()
  276. if conn:
  277. conn.close()
  278. @app.route('/get_last_datas', methods=['POST'])
  279. def get_last_datas():
  280. # 解析 JSON 请求数据
  281. data = request.get_json()
  282. ids_list = data.get('idsList')
  283. if not ids_list or not isinstance(ids_list, list):
  284. return jsonify({"error": "Missing or invalid idsList parameter"}), 400
  285. # 存储结果的列表
  286. results = []
  287. # 建立MySQL连接
  288. conn = mysql.connector.connect(
  289. host="localhost",
  290. user="root",
  291. password="HelloWorld123",
  292. database="water"
  293. )
  294. if isinstance(conn, str):
  295. return jsonify({"error": conn}), 500
  296. try:
  297. cursor = conn.cursor()
  298. # 创建一个临时表来存储每个 (ChannelID, DeviceID) 的最大 id
  299. ids_temp_table_query = """
  300. CREATE TEMPORARY TABLE ids_temp AS
  301. SELECT MAX(id) as max_id
  302. FROM sensor_data
  303. WHERE (ChannelID, DeviceID) IN (%s)
  304. GROUP BY ChannelID, DeviceID
  305. """
  306. # 构建 (ChannelID, DeviceID) 的值
  307. channel_device_pairs = [(item['ChannelID'], item['DeviceID']) for item in ids_list if 'ChannelID' in item and 'DeviceID' in item]
  308. channel_device_pairs_str = ', '.join([f"({channel_id}, {device_id})" for channel_id, device_id in channel_device_pairs])
  309. # 执行创建临时表的查询
  310. cursor.execute(ids_temp_table_query % channel_device_pairs_str)
  311. # 使用临时表中的最大 id 来获取对应的完整记录
  312. final_query = """
  313. SELECT sd.*
  314. FROM sensor_data sd
  315. JOIN ids_temp it ON sd.id = it.max_id
  316. """
  317. # 执行最终的查询
  318. cursor.execute(final_query)
  319. results = cursor.fetchall()
  320. # 将查询结果转换为字典列表
  321. result_dicts = [dict(zip([column[0] for column in cursor.description], result)) for result in results]
  322. return jsonify(result_dicts)
  323. except mysql.connector.Error as e:
  324. return jsonify({"error": str(e)}), 500
  325. finally:
  326. # 关闭游标和数据库连接
  327. if cursor:
  328. cursor.close()
  329. if conn:
  330. conn.close()
  331. ######################################一张图接口#######################################
  332. @app.route('/get-water-management-projects', methods=['GET'])
  333. def get_water_management_projects():
  334. # 建立MySQL连接
  335. conn = mysql.connector.connect(
  336. host="localhost",
  337. user="root",
  338. password="HelloWorld123",
  339. database="water"
  340. )
  341. # 设置MySQL连接的字符集
  342. conn.set_charset_collation('utf8')
  343. try:
  344. cursor = conn.cursor()
  345. # 查询WaterManagementProjects表中的所有数据
  346. query = """
  347. SELECT * FROM WaterManagementProjects
  348. """
  349. cursor.execute(query)
  350. projects = cursor.fetchall()
  351. # 将查询结果转换为字典列表
  352. projects_json = []
  353. for project in projects:
  354. project_json = {
  355. 'id': project[0], # 项目ID
  356. 'special': bool(project[1]), # 将tinyint转换为布尔值
  357. 'type': project[2], # 项目类型
  358. 'longitude': project[3], # 经度
  359. 'latitude': project[4], # 纬度
  360. 'name': project[5], # 项目名称
  361. 'name_pri': project[6], # 项目主要名称
  362. 'text1': project[7], # 文本1
  363. 'text2': project[8] # 文本2
  364. }
  365. projects_json.append(project_json)
  366. # 返回JSON响应,包含msg, code和data
  367. response = {
  368. "msg": None,
  369. "code": 200,
  370. "data": projects_json
  371. }
  372. return jsonify(response), 200
  373. except mysql.connector.Error as e:
  374. # 捕获MySQL错误并返回错误信息和状态码 500
  375. response = {
  376. "msg": str(e),
  377. "code": 500,
  378. "data": []
  379. }
  380. return jsonify(response), 500
  381. finally:
  382. # 关闭数据库连接
  383. conn.close()
  384. @app.route('/get-hot-point-data', methods=['GET'])
  385. def get_hot_point_data():
  386. # 建立MySQL连接
  387. conn = mysql.connector.connect(
  388. host="localhost",
  389. user="root",
  390. password="HelloWorld123",
  391. database="water"
  392. )
  393. # 设置MySQL连接的字符集
  394. conn.set_charset_collation('utf8')
  395. try:
  396. cursor = conn.cursor()
  397. # 查询Move_Plans表中的所有数据
  398. query = """
  399. SELECT * FROM WaterHotPoints
  400. """
  401. cursor.execute(query)
  402. hotPoints = cursor.fetchall()
  403. # 将查询结果转换为字典列表
  404. hps_json = []
  405. for hp in hotPoints:
  406. hp_json = {
  407. 'type': hp[1],
  408. 'longitude': hp[2],
  409. 'latitude': hp[3],
  410. 'name': hp[4],
  411. 'name_pri': hp[5]
  412. }
  413. hps_json.append(hp_json)
  414. # 返回JSON响应,包含msg, code和data
  415. response = {
  416. "msg": None,
  417. "code": 200,
  418. "data": hps_json
  419. }
  420. return jsonify(response), 200
  421. except mysql.connector.Error as e:
  422. # 捕获MySQL错误并返回错误信息和状态码 500
  423. response = {
  424. "msg": str(e),
  425. "code": 500,
  426. "data": []
  427. }
  428. return jsonify(response), 500
  429. finally:
  430. # 关闭数据库连接
  431. conn.close()
  432. @app.route('/get-project-milestones', methods=['GET'])
  433. def get_project_milestones():
  434. # 建立MySQL连接
  435. conn = mysql.connector.connect(
  436. host="localhost",
  437. user="root",
  438. password="HelloWorld123",
  439. database="water"
  440. )
  441. # 设置MySQL连接的字符集
  442. conn.set_charset_collation('utf8')
  443. try:
  444. cursor = conn.cursor()
  445. # 查询Project_milestones表中的所有数据
  446. query = """
  447. SELECT * FROM Project_milestones
  448. """
  449. cursor.execute(query)
  450. projects = cursor.fetchall()
  451. # 将查询结果转换为字典列表
  452. projects_json = []
  453. for project in projects:
  454. project_json = {
  455. 'id': project[0], # 项目ID
  456. 'year': project[1], # 年份
  457. 'date': project[2], # 日期
  458. 'title': project[3], # 标题
  459. 'pos': project[4], # 位置
  460. 'content': project[5], # 内容
  461. }
  462. projects_json.append(project_json)
  463. # 返回JSON响应,包含msg, code和data
  464. response = {
  465. "msg": None,
  466. "code": 200,
  467. "data": projects_json
  468. }
  469. return jsonify(response), 200
  470. except mysql.connector.Error as e:
  471. # 捕获MySQL错误并返回错误信息和状态码 500
  472. response = {
  473. "msg": str(e),
  474. "code": 500,
  475. "data": []
  476. }
  477. return jsonify(response), 500
  478. finally:
  479. # 关闭数据库连接
  480. conn.close()
  481. @app.route('/get-move-plans', methods=['GET'])
  482. def get_move_plans():
  483. # 建立MySQL连接
  484. conn = mysql.connector.connect(
  485. host="localhost",
  486. user="root",
  487. password="HelloWorld123",
  488. database="water"
  489. )
  490. # 设置MySQL连接的字符集
  491. conn.set_charset_collation('utf8')
  492. try:
  493. cursor = conn.cursor()
  494. # 查询Move_Plans表中的所有数据
  495. query = """
  496. SELECT * FROM Move_Plans
  497. """
  498. cursor.execute(query)
  499. plans = cursor.fetchall()
  500. # 将查询结果转换为字典列表
  501. plans_json = []
  502. for plan in plans:
  503. plan_json = {
  504. 'id': plan[0], # 自增主键ID
  505. 'isAfter': bool(plan[1]), # 将tinyint转换为布尔值
  506. 'isOut': bool(plan[2]), # 将tinyint转换为布尔值
  507. 'desc': plan[3],
  508. 'from': plan[4],
  509. 'to': plan[5],
  510. 'manNum': plan[6],
  511. 'homeNum': plan[7],
  512. 'completeNum': plan[8],
  513. 'dateTime': str(plan[9]),
  514. 'villageCount': plan[10],
  515. 'materialPrize': str(plan[11])
  516. }
  517. plans_json.append(plan_json)
  518. # 返回JSON响应,包含msg, code和data
  519. response = {
  520. "msg": None,
  521. "code": 200,
  522. "data": plans_json
  523. }
  524. return jsonify(response), 200
  525. except mysql.connector.Error as e:
  526. # 捕获MySQL错误并返回错误信息和状态码 500
  527. response = {
  528. "msg": str(e),
  529. "code": 500,
  530. "data": []
  531. }
  532. return jsonify(response), 500
  533. finally:
  534. # 关闭数据库连接
  535. conn.close()
  536. @app.route('/get-rkzy-base-data', methods=['GET'])
  537. def get_rkzy_base_data():
  538. # 建立MySQL连接
  539. conn = mysql.connector.connect(
  540. host="localhost",
  541. user="root",
  542. password="HelloWorld123",
  543. database="water"
  544. )
  545. # 设置MySQL连接的字符集
  546. conn.set_charset_collation('utf8')
  547. try:
  548. cursor = conn.cursor()
  549. # 查询Move_Plans表中的所有数据
  550. query = """
  551. SELECT * FROM SafetyZone
  552. """
  553. cursor.execute(query)
  554. plans = cursor.fetchall()
  555. # 将查询结果转换为字典列表
  556. plans_json = {}
  557. for plan in plans:
  558. plan_json = {
  559. 'population': plan[1],
  560. 'area': plan[2],
  561. 'capacity': plan[3]
  562. }
  563. plans_json = plan_json
  564. break
  565. # 返回JSON响应,包含msg, code和data
  566. response = {
  567. "msg": None,
  568. "code": 200,
  569. "data": plans_json
  570. }
  571. return jsonify(response), 200
  572. except mysql.connector.Error as e:
  573. # 捕获MySQL错误并返回错误信息和状态码 500
  574. response = {
  575. "msg": str(e),
  576. "code": 500,
  577. "data": []
  578. }
  579. return jsonify(response), 500
  580. finally:
  581. # 关闭数据库连接
  582. conn.close()
  583. @app.route('/get-swyj-base-data', methods=['GET'])
  584. def get_swyj_base_data():
  585. # 建立MySQL连接
  586. conn = mysql.connector.connect(
  587. host="localhost",
  588. user="root",
  589. password="HelloWorld123",
  590. database="water"
  591. )
  592. # 设置MySQL连接的字符集
  593. conn.set_charset_collation('utf8')
  594. try:
  595. cursor = conn.cursor()
  596. # 查询Move_Plans表中的所有数据
  597. query = """
  598. SELECT * FROM YangtzeRiverWaterLevels
  599. """
  600. cursor.execute(query)
  601. waterLevels = cursor.fetchall()
  602. # 将查询结果转换为字典列表
  603. waterLevel_jsons = []
  604. for level in waterLevels:
  605. waterLevel_json = {
  606. 'location': level[0],
  607. 'flood_protection_level': level[1],
  608. 'warning_level': level[2],
  609. 'guarantee_level':level[3]
  610. }
  611. waterLevel_jsons.append(waterLevel_json)
  612. # 返回JSON响应,包含msg, code和data
  613. response = {
  614. "msg": None,
  615. "code": 200,
  616. "data": waterLevel_jsons
  617. }
  618. return jsonify(response), 200
  619. except mysql.connector.Error as e:
  620. # 捕获MySQL错误并返回错误信息和状态码 500
  621. response = {
  622. "msg": str(e),
  623. "code": 500,
  624. "data": []
  625. }
  626. return jsonify(response), 500
  627. finally:
  628. # 关闭数据库连接
  629. conn.close()
  630. ######################天气相关接口###################################
  631. # 假设 qxData 是一个全局变量,在这里初始化它
  632. qxData = []
  633. #获取6小时内降雨数据
  634. @app.route('/get-six-hour-qx-data', methods=['GET'])
  635. def get_six_hour_qx_data():
  636. global qxData
  637. # 准备第一个 API 请求的数据和头部信息
  638. requestData = {
  639. "eqaddvcd": "421083",
  640. "pageSize": "80"
  641. }
  642. requestHead = {
  643. "token": "6C60F37D40B48DECF9A3F2CC1A5A50142C4BE52F26D15B171153F20DB63960696A439E435DDCA9AF925F13338470FFBDDC98CEE65AFBC92C2EB4E44C0A757DB5"
  644. }
  645. # 连接数据库
  646. conn = pymysql.connect(host='localhost', user='root', password='HelloWorld123', db='water', charset='utf8mb4')
  647. try:
  648. # 发送第一个请求以获取基本数据
  649. response = requests.post("{0}/shareddata/api/v1/baseinfo/stbprp/qx".format(SQServerHost), data=requestData, headers=requestHead)
  650. response.raise_for_status()
  651. jsonData = response.json()
  652. qxData = []
  653. # 初始化一个字典以将 STCD 映射到其在 qxData 中的索引
  654. qxDataDic = {}
  655. index = 0
  656. # 连接数据库并开始操作
  657. with conn.cursor() as cursor:
  658. for jsData in jsonData["data"]["list"]:
  659. if (114.115926000756 > jsData["LGTD"] and jsData["LGTD"] > 113.410714576698 and 30.2676753765206 > jsData["LTTD"] and jsData["LTTD"] > 29.8114499954776):
  660. qxData.append(jsData)
  661. qxDataDic[jsData["STCD"]] = index
  662. index += 1
  663. # 检查数据库中是否已有该记录
  664. cursor.execute("SELECT id FROM WaterHotPoints WHERE longitude=%s AND latitude=%s",
  665. (jsData["LGTD"], jsData["LTTD"]))
  666. result = cursor.fetchone()
  667. if result:
  668. pass
  669. else:
  670. # 如果记录不存在,执行插入操作
  671. cursor.execute("INSERT INTO WaterHotPoints (type, longitude, latitude, name, name_pri) VALUES (%s, %s, %s, %s, %s)",
  672. (1, jsData['LGTD'], jsData['LTTD'], jsData['STNM'], ""))
  673. conn.commit()
  674. # 准备第二个 API 请求的数据
  675. requestData = {}
  676. sts = ','.join([qx["STCD"] for qx in qxData])
  677. # 假设 GetStartEndTime 函数返回一个元组 (etm, stm)
  678. etm, stm = GetStartEndTime(6)
  679. requestData["sts"] = sts
  680. requestData["etm"] = etm
  681. requestData["stm"] = stm
  682. # 发送第二个请求以获取降雨数据
  683. response = requests.post("{0}/shareddata/api/v1/monitdata/pptn/qx".format(SQServerHost), data=requestData, headers=requestHead)
  684. response.raise_for_status()
  685. jsonData = response.json()
  686. # 初始化一个字典以存储六小时的降雨数据
  687. dropDic = {}
  688. for jd in jsonData['data']:
  689. if jd["STCD"] not in dropDic:
  690. dropDic[jd["STCD"]] = jd["DRP"]
  691. else:
  692. dropDic[jd["STCD"]] += jd["DRP"]
  693. # 更新 qxData 以包含降雨数据
  694. for key, value in dropDic.items():
  695. if key in qxDataDic:
  696. qxData[qxDataDic[key]]["dropSum6"] = value
  697. # 返回最终的 qxData 作为 JSON 响应
  698. response = {
  699. "msg": None,
  700. "code": 200,
  701. "data": qxData
  702. }
  703. return jsonify(response), 200
  704. except requests.RequestException as e:
  705. # 处理任何 HTTP 错误
  706. response = {
  707. "msg": str(e),
  708. "code": 500,
  709. "data": []
  710. }
  711. return jsonify(response), 500
  712. # 假设 swData 是一个全局变量,在这里初始化它
  713. swData = []
  714. @app.route('/get-six-hour-sw-data', methods=['GET'])
  715. def get_six_hour_sw_data():
  716. global swData
  717. swData = []
  718. # 准备第一个 API 请求的数据和头部信息
  719. requestData = {
  720. "eqaddvcd": "421083",
  721. "pageSize": "80"
  722. }
  723. requestHead = {
  724. "token": "6C60F37D40B48DECF9A3F2CC1A5A50142C4BE52F26D15B171153F20DB63960696A439E435DDCA9AF925F13338470FFBDDC98CEE65AFBC92C2EB4E44C0A757DB5"
  725. }
  726. # 连接数据库
  727. conn = pymysql.connect(host='localhost', user='root', password='HelloWorld123', db='water', charset='utf8mb4')
  728. try:
  729. # 发送第一个请求以获取基本数据
  730. response = requests.post("{0}/shareddata/api/v1/baseinfo/stbprp/sw".format(SQServerHost), data=requestData, headers=requestHead)
  731. response.raise_for_status()
  732. jsonData = response.json()
  733. # 初始化一个字典以将 STCD 映射到其在 swData 中的索引
  734. swDataDic = {}
  735. index = 0
  736. # 连接数据库并开始操作
  737. with conn.cursor() as cursor:
  738. for jsData in jsonData["data"]["list"]:
  739. if (114.115926000756 > jsData["LGTD"] and jsData["LGTD"] > 113.410714576698 and 30.2676753765206 > jsData["LTTD"] and jsData["LTTD"] > 29.8114499954776):
  740. swData.append(jsData)
  741. swDataDic[jsData["STCD"]] = index
  742. index += 1
  743. # 检查数据库中是否已有该记录
  744. cursor.execute("SELECT id FROM WaterHotPoints WHERE longitude=%s AND latitude=%s",
  745. (jsData["LGTD"], jsData["LTTD"]))
  746. result = cursor.fetchone()
  747. if result:
  748. pass
  749. else:
  750. # 如果记录不存在,执行插入操作
  751. cursor.execute("INSERT INTO WaterHotPoints (type, longitude, latitude, name, name_pri) VALUES (%s, %s, %s, %s, %s)",
  752. (2, jsData['LGTD'], jsData['LTTD'], jsData['STNM'], ""))
  753. conn.commit()
  754. # 准备第二个 API 请求的数据
  755. requestData = {}
  756. sts = ','.join([sw["STCD"] for sw in swData])
  757. # 假设 GetStartEndTime 函数返回一个元组 (etm, stm)
  758. etm, stm = GetStartEndTime(6)
  759. requestData["sts"] = sts
  760. requestData["etm"] = etm
  761. requestData["stm"] = stm
  762. # 发送第二个请求以获取降雨数据
  763. response = requests.post("{0}/shareddata/api/v1/monitdata/pptn/sw".format(SQServerHost), data=requestData, headers=requestHead)
  764. response.raise_for_status()
  765. jsonData = response.json()
  766. # 初始化字典以存储六小时的降雨数据和天气数据
  767. dropDic = {}
  768. wthDic = {}
  769. for jd in jsonData['data']:
  770. if jd["STCD"] not in dropDic:
  771. dropDic[jd["STCD"]] = jd["DRP"]
  772. else:
  773. dropDic[jd["STCD"]] += jd["DRP"]
  774. wthDic[jd["STCD"]] = jd["WTH"]
  775. # 更新 swData 以包含降雨和天气数据
  776. for key in dropDic.keys():
  777. if key in swDataDic:
  778. swData[swDataDic[key]]["dropSum6"] = dropDic[key]
  779. swData[swDataDic[key]]["wth"] = wthDic[key]
  780. # 使用列表推导过滤出包含 'dropSum6' 字段的字典
  781. filtered_swData = [item for item in swData if 'dropSum6' in item]
  782. # 更新全局变量 swData
  783. swData = filtered_swData
  784. # 返回最终的 swData 作为 JSON 响应
  785. response = {
  786. "msg": None,
  787. "code": 200,
  788. "data": swData
  789. }
  790. return jsonify(response), 200
  791. except requests.RequestException as e:
  792. # 处理任何 HTTP 错误
  793. response = {
  794. "msg": str(e),
  795. "code": 500,
  796. "data": []
  797. }
  798. return jsonify(response), 500
  799. swHeightData = []
  800. @app.route('/get-sw-height-data', methods=['GET'])
  801. def get_sw_height_data():
  802. global swHeightData
  803. swDataDic = {}
  804. swHeightData = []
  805. # 准备头部信息
  806. requestHead = {
  807. "token": "6C60F37D40B48DECF9A3F2CC1A5A50142C4BE52F26D15B171153F20DB63960696A439E435DDCA9AF925F13338470FFBDDC98CEE65AFBC92C2EB4E44C0A757DB5"
  808. }
  809. # 如果本地文件不存在,则从API获取数据并保存到本地文件
  810. if not os.path.exists("importZa.json"):
  811. requestData = {
  812. "pageSize": "10000"
  813. }
  814. response = requests.post("{0}/shareddata/api/v1/baseinfo/stbprp/sw".format(SQServerHost), data=requestData, headers=requestHead)
  815. response.raise_for_status()
  816. importZ = ["福田寺", "黄丝南", "下新河", "子贝渊", "沙螺"]
  817. jsonData = response.json()
  818. index = 0
  819. for jsData in jsonData["data"]["list"]:
  820. esixt = False
  821. for im in importZ:
  822. if im in jsData["STNM"]:
  823. esixt = True
  824. if esixt:
  825. swHeightData.append(jsData)
  826. swDataDic[jsData["STCD"]] = index
  827. index += 1
  828. # 保存到本地文件
  829. newJ = json.dumps(swHeightData, ensure_ascii=False)
  830. with codecs.open("importZa.json", "w+", encoding="utf-8") as nj:
  831. nj.write(newJ)
  832. else:
  833. # 如果本地文件存在,则从文件加载数据
  834. index = 0
  835. with codecs.open("importZa.json", "r", encoding="utf-8") as iz:
  836. izContent = json.loads(iz.read())
  837. for izc in izContent:
  838. swHeightData.append(izc)
  839. swDataDic[izc["STCD"]] = index
  840. index += 1
  841. # 连接数据库
  842. conn = pymysql.connect(host='localhost', user='root', password='HelloWorld123', db='water', charset='utf8mb4')
  843. # 连接数据库并开始操作
  844. with conn.cursor() as cursor:
  845. for swhData in swHeightData:
  846. # 检查数据库中是否已有该记录
  847. cursor.execute("SELECT id FROM WaterHotPoints WHERE name=%s",
  848. (swhData["STNM"]))
  849. result = cursor.fetchone()
  850. if result:
  851. pass
  852. else:
  853. # 如果记录不存在,执行插入操作
  854. cursor.execute("INSERT INTO WaterHotPoints (type, longitude, latitude, name, name_pri) VALUES (%s, %s, %s, %s, %s)",
  855. (4, swhData['LGTD'], swhData['LTTD'], swhData['STNM'], ""))
  856. conn.commit()
  857. # 准备第二个 API 请求的数据
  858. etm, stm = GetStartEndTime(0)
  859. requestData = {
  860. "pageSize": 300,
  861. "etm": etm,
  862. "stm": stm
  863. }
  864. # 发送第二个请求以获取水位数据
  865. response = requests.post("{0}/shareddata/api/v1/monitdata/rw/st_was_r".format(SQServerHost), data=requestData, headers=requestHead)
  866. response.raise_for_status()
  867. jsonData = response.json()
  868. # 初始化字典以存储水位数据
  869. dwzDic = {}
  870. upzDic = {}
  871. tgtqDic = {}
  872. for jd in jsonData['data']["list"]:
  873. dwzDic[jd["STCD"]] = -1 if jd["DWZ"] == None else jd["DWZ"]
  874. upzDic[jd["STCD"]] = -1 if jd["UPZ"] == None else jd["UPZ"]
  875. tgtqDic[jd["STCD"]] = -1 if jd["TGTQ"] == None else jd["TGTQ"]
  876. # 更新 swHeightData 以包含水位数据
  877. for key in dwzDic.keys():
  878. if key in swDataDic:
  879. swHeightData[swDataDic[key]]["dwz"] = dwzDic[key]
  880. swHeightData[swDataDic[key]]["upz"] = upzDic[key]
  881. swHeightData[swDataDic[key]]["tgtq"] = tgtqDic[key]
  882. # 使用列表推导过滤出包含 'dwz' 字段的字典
  883. filtered_swData = [item for item in swHeightData if 'dwz' in item]
  884. # 处理没有 'dwz' 数据的项
  885. sub_swData = [item for item in swHeightData if 'dwz' not in item]
  886. for ssw in sub_swData:
  887. ssw["dwz"] = -1
  888. ssw["upz"] = -1
  889. ssw["tgtq"] = -1
  890. filtered_swData.append(ssw)
  891. # 更新全局变量 swHeightData
  892. swHeightData = filtered_swData
  893. # 返回最终的 swHeightData 作为 JSON 响应
  894. response = {
  895. "msg": None,
  896. "code": 200,
  897. "data": swHeightData
  898. }
  899. return jsonify(response), 200
  900. # 配置文件上传的目录
  901. UPLOAD_FOLDER = 'uploads/images/'
  902. ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg', 'gif'}
  903. app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
  904. def allowed_file(filename):
  905. return '.' in filename and \
  906. filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
  907. def decode_mime(encoded_str):
  908. """解码MIME编码的字符串"""
  909. try:
  910. # 只处理以 `=?utf-8?Q?` 开头并以 `?=` 结尾的字符串
  911. if encoded_str.startswith("=?utf-8?Q?") and encoded_str.endswith("?="):
  912. encoded_str = encoded_str[10:-2]
  913. decoded_bytes = quopri.decodestring(encoded_str)
  914. decoded_str = decoded_bytes.decode('utf-8')
  915. return decoded_str
  916. else:
  917. return encoded_str
  918. except Exception as e:
  919. print(f"Error decoding MIME: {e}")
  920. return encoded_str
  921. @app.route('/upload', methods=['POST'])
  922. def upload_file():
  923. global UPLOAD_FOLDER
  924. # 创建上传目录
  925. if not os.path.exists(UPLOAD_FOLDER):
  926. os.makedirs(UPLOAD_FOLDER)
  927. # 检查是否有文件在请求中
  928. if 'file' not in request.files:
  929. return jsonify({'error': '没有文件部分'})
  930. file = request.files['file']
  931. # 如果用户没有选择文件,浏览器也会提交一个没有文件名的空部分
  932. if file.filename == '':
  933. return jsonify({'error': '没有选择文件'})
  934. # 解码文件名
  935. decoded_filename = decode_mime(file.filename)
  936. print(decoded_filename)
  937. if file and allowed_file(decoded_filename):
  938. filename = decoded_filename
  939. file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
  940. return jsonify({'message': '文件上传成功', 'filename': filename})
  941. return jsonify({'error': '不支持的文件类型'})
  942. if __name__ == '__main__':
  943. app.run(host='192.168.0.7',debug=True,port=15000)