From a00da3062c6c825b585f82275823ac45cdeb6502 Mon Sep 17 00:00:00 2001 From: Administrator <admin@example.com> Date: 星期五, 19 一月 2024 19:16:20 +0800 Subject: [PATCH] L后成交太快撤单 --- third_data/data_server.py | 163 ++++++++++++++++++++++++++++++++++++++++++----------- 1 files changed, 128 insertions(+), 35 deletions(-) diff --git a/third_data/data_server.py b/third_data/data_server.py index a0a06ab..b179ff9 100644 --- a/third_data/data_server.py +++ b/third_data/data_server.py @@ -7,8 +7,9 @@ from http.server import BaseHTTPRequestHandler import dask -from log_module.log import logger_system, logger_debug -from utils import global_util, tool, data_export_util +from code_attribute.gpcode_manager import BlackListCodeManager, CodePrePriceManager +from log_module.log import logger_system, logger_debug, logger_kpl_limit_up +from utils import global_util, tool, data_export_util, init_data_util from code_attribute import gpcode_manager from log_module import log, log_analyse, log_export from l2 import code_price_manager, l2_data_util, l2_data_manager_new, cancel_buy_strategy, transaction_progress @@ -25,7 +26,6 @@ from output import code_info_output, limit_up_data_filter, output_util, kp_client_msg_manager from trade import bidding_money_manager, trade_manager, l2_trade_util, trade_record_log_util -from trade.l2_trade_util import BlackListCodeManager import concurrent.futures # 绂佺敤http.server鐨勬棩蹇楄緭鍑� @@ -49,6 +49,8 @@ __industry_cache_dict = {} __latest_limit_up_codes_set = set() __data_process_thread_pool = concurrent.futures.ThreadPoolExecutor(max_workers=10) + # 浠g爜鐨勬定骞� + __code_limit_rate_dict = {} # 绂佺敤鏃ュ織杈撳嚭 def log_message(self, format, *args): @@ -62,6 +64,7 @@ total_datas = KPLLimitUpDataRecordManager.total_datas # 閫氳繃娑ㄥ仠鏃堕棿鎺掑簭 total_datas = list(total_datas) + current_reason_codes_dict = KPLLimitUpDataRecordManager.get_current_reason_codes_dict() # 缁熻娑ㄥ仠鍘熷洜 limit_up_reason_dict = {} @@ -84,7 +87,8 @@ (k, len(limit_up_reason_dict[k]), limit_up_reason_want_count_dict.get(k), limit_up_reason_dict[k][0][5]) for k in limit_up_reason_dict] - limit_up_reason_statistic_info.sort(key=lambda x: int(x[1])) + limit_up_reason_statistic_info.sort( + key=lambda x: len(current_reason_codes_dict[x[0]]) if x[0] in current_reason_codes_dict else 0) limit_up_reason_statistic_info.reverse() codes_set = set([d[3] for d in total_datas]) @@ -317,12 +321,28 @@ # 鑾峰彇璇勫垎淇℃伅 pass - elif url.path == "/get_l2_datas": - # 鑾峰彇L2鐨勬暟鎹� + elif url.path == "/get_kpl_block_info": + start_time = time.time() ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) code = ps_dict['code'] - datas = data_export_util.get_l2_datas(code) - response_data = json.dumps({"code": 0, "data": datas}) + try: + data = code_info_output.get_kpl_block_info(code) + response_data = json.dumps({"code": 0, "data": data}) + print("get_kpl_block_info 鑰楁椂锛�", time.time() - start_time) + except Exception as e: + logger_debug.exception(e) + logging.exception(e) + + elif url.path == "/get_l2_datas": + try: + # 鑾峰彇L2鐨勬暟鎹� + ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) + code = ps_dict['code'] + datas = data_export_util.get_l2_datas(code) + code_name = gpcode_manager.get_code_name(code) + response_data = json.dumps({"code": 0, "data": {"code": code, "code_name": code_name, "data": datas}}) + except Exception as e: + logger_debug.exception(e) elif url.path == "/get_trade_progress": # 鑾峰彇浜ゆ槗杩涘害 ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) @@ -428,7 +448,7 @@ elif url.path == "/kpl/get_plate_codes": # 鑾峰彇娑ㄥ仠鍘熷洜涓嬮潰鐨勪唬鐮� ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) - plate = ps_dict["plate"] + plate = kpl_util.filter_block(ps_dict["plate"]) # 鑾峰彇鏉垮潡涓嬬殑浠g爜 # 缁熻鐩墠涓烘鐨勪唬鐮佹定鍋滄暟閲忥紙鍒嗘定鍋滃師鍥狅級 now_limit_up_codes_info = self.__kplDataManager.get_data(KPLDataType.LIMIT_UP) @@ -441,11 +461,23 @@ codes_info = [] for d in record_limit_up_datas: - if d[2] != plate: + if kpl_util.filter_block(d[2]) != plate: continue # 浠g爜,鍚嶇О,娑ㄥ仠鏃堕棿,鏄惁鐐告澘,鏄惁鎯充拱,鏄惁宸茬粡涓嬭繃鍗� codes_info.append( - [d[3], d[4], tool.to_time_str(int(d[5])), 1 if d[3] not in now_limit_up_codes else 0, 0, 0]) + [d[3], d[4], tool.to_time_str(int(d[5])), 1 if d[3] not in now_limit_up_codes else 0, 0, 0, d[12], + output_util.money_desc(d[13]), 1]) + + for d in record_limit_up_datas: + if kpl_util.filter_block(d[2]) == plate: + continue + if plate not in [kpl_util.filter_block(k) for k in d[6].split("銆�")]: + continue + # 浠g爜,鍚嶇О,娑ㄥ仠鏃堕棿,鏄惁鐐告澘,鏄惁鎯充拱,鏄惁宸茬粡涓嬭繃鍗� + codes_info.append( + [d[3], d[4], tool.to_time_str(int(d[5])), 1 if d[3] not in now_limit_up_codes else 0, 0, 0, d[12], + output_util.money_desc(d[13]), 0]) + codes_info.sort(key=lambda x: x[2]) # 鏌ヨ鏄惁涓烘兂涔板崟 want_codes = gpcode_manager.WantBuyCodesManager().list_code_cache() @@ -496,29 +528,86 @@ response_data = json.dumps({"code": 1, "msg": "璇蜂笂浼燾ode"}) elif url.path == "/get_last_trade_day_reasons": - # 鑾峰彇涓婁釜浜ゆ槗鏃ョ殑鐩稿悓娑ㄥ仠鍘熷洜鐨勪唬鐮佷俊鎭� - ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) - code = ps_dict["code"] - day = HistoryKDatasUtils.get_previous_trading_date(tool.get_now_date_str()) - # 鑾峰彇娑ㄥ仠鏁版嵁 - # 鑾峰彇浠g爜鐨勫師鍥� - reasons = kpl_data_manager.KPLLimitUpDataRecordManager.list_by_code(code, day) - if reasons: - reasons = list(reasons) - reasons.sort(key=lambda x: x[9]) - reason = reasons[-1][2] - datas = self.__kplDataManager.get_from_file(kpl_util.KPLDataType.LIMIT_UP, day) - # (浠g爜,鍚嶇О,棣栨娑ㄥ仠鏃堕棿,鏈�杩戞定鍋滄椂闂�,鍑犳澘,娑ㄥ仠鍘熷洜,鏉垮潡,瀹為檯娴侀��,涓诲姏鍑�棰�,娑ㄥ仠鍘熷洜浠g爜,娑ㄥ仠鍘熷洜浠g爜鏁伴噺) - result_list = [] - if datas: - for d in datas: - if d[5] == reason and d[0] != code: - # (浠g爜,鍚嶇О) - result_list.append((d[0], d[1])) - response_data = json.dumps({"code": 0, "data": {"reason": reason, "data": result_list}}) - else: - response_data = json.dumps({"code": 1, "msg": "鏄ㄦ棩鏈定鍋�"}) + # 璁$畻骞冲潎娑ㄥ箙 + def get_limit_rate_list(codes): + if not codes: + return [] + need_request_codes = set() + if tool.trade_time_sub(tool.get_now_time_str(), "09:30:00") < 0: + need_request_codes |= set(codes) + else: + now_time = time.time() + for c in codes: + if c not in self.__code_limit_rate_dict: + need_request_codes.add(c) + elif now_time - self.__code_limit_rate_dict[c][1] > 60: + need_request_codes.add(c) + if need_request_codes: + _limit_rate_list = HistoryKDatasUtils.get_codes_limit_rate(list(need_request_codes)) + for d in _limit_rate_list: + self.__code_limit_rate_dict[d[0]] = (d[1], time.time()) + return [(c_, self.__code_limit_rate_dict[c_][0]) for c_ in codes] + + try: + raise Exception("鎺ュ彛鏆傚仠浣跨敤") + # 鑾峰彇涓婁釜浜ゆ槗鏃ョ殑鐩稿悓娑ㄥ仠鍘熷洜鐨勪唬鐮佷俊鎭� + ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) + code = ps_dict["code"] + # 鑾峰彇鏄ㄦ棩娑ㄥ仠鏁版嵁 + day = HistoryKDatasUtils.get_previous_trading_date_cache(tool.get_now_date_str()) + + limit_up_records = kpl_data_manager.KPLLimitUpDataRecordManager.list_all_cache(day) + reasons = [] + for d in limit_up_records: + if d[3] == code: + reasons.append(d) + # 鑾峰彇浠g爜鐨勫師鍥� + if reasons: + reasons = list(reasons) + reasons.sort(key=lambda x: x[9]) + reason = reasons[-1][2] + # 鑾峰彇娑ㄥ仠鏁版嵁 + datas = self.__kplDataManager.get_from_file_cache(kpl_util.KPLDataType.LIMIT_UP, day) + # (浠g爜,鍚嶇О,棣栨娑ㄥ仠鏃堕棿,鏈�杩戞定鍋滄椂闂�,鍑犳澘,娑ㄥ仠鍘熷洜,鏉垮潡,瀹為檯娴侀��,涓诲姏鍑�棰�,娑ㄥ仠鍘熷洜浠g爜,娑ㄥ仠鍘熷洜浠g爜鏁伴噺) + yesterday_result_list = [] + percent_rate = 0 + if datas: + yesterday_codes = set() + for d in datas: + if d[5] == reason: + yesterday_codes.add(d[0]) + # 鑾峰彇娑ㄥ箙 + limit_rate_list = get_limit_rate_list(yesterday_codes) + limit_rate_dict = {} + if limit_rate_list: + total_rate = 0 + for d in limit_rate_list: + limit_rate_dict[d[0]] = d[1] + total_rate += d[1] + percent_rate = round(total_rate / len(limit_rate_list), 2) + + for d in datas: + if d[5] == reason: + yesterday_codes.add(d[0]) + if d[0] != code: + # (浠g爜,鍚嶇О, 娑ㄥ箙) + yesterday_result_list.append((d[0], d[1], limit_rate_dict.get(d[0]))) + + current_limit_up_list = kpl_data_manager.KPLLimitUpDataRecordManager.latest_origin_datas + current_result_list = [] + if current_limit_up_list: + for c in current_limit_up_list: + if c[5] == reason and c[0] != code: + current_result_list.append((c[0], c[1])) + response_data = json.dumps({"code": 0, "data": {"reason": reason, "reason_rate": percent_rate, + "data": {"yesterday": yesterday_result_list, + "current": current_result_list}}}) + else: + response_data = json.dumps({"code": 1, "msg": "鏄ㄦ棩鏈定鍋�"}) + except Exception as e: + logger_debug.exception(e) + raise e elif url.path == "/pull_kp_client_msg": # 鎷夊彇瀹㈡埛绔秷鎭� @@ -550,7 +639,7 @@ result_str = self.__process_kpl_data(params) self.__send_response(result_str) - def __process_kpl_data(self, data): + def __process_kpl_data(self, data_origin): def do_limit_up(result_list_): if result_list_: # 淇濆瓨娑ㄥ仠鏃堕棿 @@ -607,6 +696,8 @@ kpl_data_manager.KPLLimitUpDataRecordManager.save_record(tool.get_now_date_str(), result_list_) self.__kplDataManager.save_data(type_, result_list_) + # 灏�"姒傚康"浜屽瓧鏇挎崲鎺� + data = data_origin type_ = data["type"] print("寮�鐩樺暒type:", type_) if type_ == KPLDataType.BIDDING.value: @@ -623,8 +714,10 @@ self.__kplDataManager.save_data(type_, result_list) elif type_ == KPLDataType.LIMIT_UP.value: - result_list = kpl_util.parseDaBanData(data["data"], kpl_util.DABAN_TYPE_LIMIT_UP) + result_list = kpl_util.parseLimitUpData(data["data"]) self.__data_process_thread_pool.submit(lambda: do_limit_up(result_list)) + # 璁板綍娑ㄥ仠鏃ュ織 + logger_kpl_limit_up.info(result_list) elif type_ == KPLDataType.OPEN_LIMIT_UP.value: result_list = kpl_util.parseDaBanData(data["data"], kpl_util.DABAN_TYPE_OPEN_LIMIT_UP) if result_list: -- Gitblit v1.8.0