From 1110af9cc42cbf6a3ebbb953f18585cb37ba5b8c Mon Sep 17 00:00:00 2001 From: admin <weikou2014> Date: 星期一, 08 一月 2024 15:24:35 +0800 Subject: [PATCH] bug修复/日志添加 --- data_server.py | 74 ++++++++++++++++++++++++------------ 1 files changed, 49 insertions(+), 25 deletions(-) diff --git a/data_server.py b/data_server.py index 88fa0b2..39ea3d4 100644 --- a/data_server.py +++ b/data_server.py @@ -1,24 +1,22 @@ import http import json +import random import socketserver -import time from http.server import BaseHTTPRequestHandler import dask from code_attribute import gpcode_manager +from log import logger_request_debug from log_module import log_analyse, log_export -from output import limit_up_data_filter, output_util, code_info_output +from output import limit_up_data_filter, output_util from output.limit_up_data_filter import IgnoreCodeManager from third_data import kpl_util, kpl_data_manager, kpl_api from third_data.code_plate_key_manager import KPLPlateForbiddenManager from third_data.kpl_data_manager import KPLLimitUpDataRecordManager, KPLDataManager, KPLCodeLimitUpReasonManager from third_data.kpl_util import KPLPlatManager, KPLDataType -from trade import trade_manager from trade.l2_trade_util import BlackListCodeManager -from utils import tool, global_util, kp_client_msg_manager, hosting_api_util - -from utils.history_k_data_util import HistoryKDatasUtils +from utils import tool, global_util, hosting_api_util import urllib.parse as urlparse from urllib.parse import parse_qs @@ -45,6 +43,17 @@ if not total_datas: KPLLimitUpDataRecordManager.load_total_datas() total_datas = KPLLimitUpDataRecordManager.total_datas + + current_datas_results = hosting_api_util.common_request({"ctype":"get_kpl_limit_up_datas"}) + if type(current_datas_results) == str: + current_datas_results = json.loads(current_datas_results) + current_datas = current_datas_results.get("data") #KPLLimitUpDataRecordManager.latest_origin_datas + current_block_codes = {} + for c in current_datas: + if c[5] not in current_block_codes: + current_block_codes[c[5]] = set() + current_block_codes[c[5]].add(c[0]) + # 閫氳繃娑ㄥ仠鏃堕棿鎺掑簭 total_datas = list(total_datas) @@ -64,12 +73,15 @@ limit_up_reason_want_count_dict[d[2]] = 0 if d[3] in want_codes: limit_up_reason_want_count_dict[d[2]] += 1 - # (鏉垮潡鍚嶇О锛屾定鍋滀唬鐮佹暟閲忥紝鎯充拱鍗曟暟閲�,娑ㄥ仠鏃堕棿) + # (鏉垮潡鍚嶇О锛屾定鍋滀唬鐮佹暟閲忥紝鐐告澘鏁伴噺锛屾兂涔板崟鏁伴噺,娑ㄥ仠鏃堕棿) limit_up_reason_statistic_info = [ - (k, len(limit_up_reason_dict[k]), limit_up_reason_want_count_dict.get(k), limit_up_reason_dict[k][0][5]) for + (k, len(limit_up_reason_dict[k]), + len(limit_up_reason_dict[k]) - (len(current_block_codes[k]) if k in current_block_codes else 0), + limit_up_reason_want_count_dict.get(k), limit_up_reason_dict[k][0][5]) for k in limit_up_reason_dict] - limit_up_reason_statistic_info.sort(key=lambda x: int(x[1])) + limit_up_reason_statistic_info.sort( + key=lambda x: len(current_block_codes.get(x[0])) if x[0] in current_block_codes else 0) limit_up_reason_statistic_info.reverse() codes_set = set([d[3] for d in total_datas]) @@ -260,28 +272,40 @@ def do_GET(self): path = self.path url = urlparse.urlparse(path) - if url.path == "/kpl/get_limit_up_list": - response_data = self.__get_limit_up_list() - self.send_response(200) - # 鍙戠粰璇锋眰瀹㈡埛绔殑鍝嶅簲鏁版嵁 - self.send_header('Content-type', 'application/json') - self.end_headers() - self.wfile.write(response_data.encode()) - else: - ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) - result = hosting_api_util.get_from_data_server(url.path, ps_dict) - self.__send_response(result) + thread_id = random.randint(0, 1000000) + logger_request_debug.info(f"GET 璇锋眰寮�濮�({thread_id})锛歿url.path}") + try: + if url.path == "/kpl/get_limit_up_list": + response_data = self.__get_limit_up_list() + self.send_response(200) + # 鍙戠粰璇锋眰瀹㈡埛绔殑鍝嶅簲鏁版嵁 + self.send_header('Content-type', 'application/json') + self.end_headers() + self.wfile.write(response_data.encode()) + else: + ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) + result = hosting_api_util.get_from_data_server(url.path, ps_dict) + self.__send_response(result) + finally: + logger_request_debug.info(f"GET 璇锋眰缁撴潫({thread_id})锛歿url.path}") + def do_POST(self): + thread_id = random.randint(0, 1000000) path = self.path url = urlparse.urlparse(path) - if url.path == "/upload_kpl_data": - # 鎺ュ彈寮�鐩樺暒鏁版嵁 - params = self.__parse_request() - result_str = self.__process_kpl_data(params) - self.__send_response(result_str) + logger_request_debug.info(f"POST 璇锋眰寮�濮�({thread_id})锛歿url.path}") + try: + if url.path == "/upload_kpl_data": + # 鎺ュ彈寮�鐩樺暒鏁版嵁 + params = self.__parse_request() + result_str = self.__process_kpl_data(params) + self.__send_response(result_str) + finally: + logger_request_debug.info(f"POST 璇锋眰缁撴潫({thread_id})锛歿url.path}") def __process_kpl_data(self, data): + data = json.loads(json.dumps(data).replace("姒傚康", "")) type_ = data["type"] print("寮�鐩樺暒type:", type_) if type_ == KPLDataType.BIDDING.value: -- Gitblit v1.8.0