From f0248f54f90a12f491245f0ee3ccfbe8f477a76b Mon Sep 17 00:00:00 2001 From: Administrator <admin@example.com> Date: 星期五, 22 十二月 2023 14:24:20 +0800 Subject: [PATCH] bug修复 --- third_data/data_server.py | 96 +++++++++++++++++++++++++++++++++++++++-------- 1 files changed, 79 insertions(+), 17 deletions(-) diff --git a/third_data/data_server.py b/third_data/data_server.py index 3a6b5a3..929abce 100644 --- a/third_data/data_server.py +++ b/third_data/data_server.py @@ -7,6 +7,7 @@ from http.server import BaseHTTPRequestHandler import dask +from code_attribute.gpcode_manager import BlackListCodeManager from log_module.log import logger_system, logger_debug from utils import global_util, tool, data_export_util from code_attribute import gpcode_manager @@ -25,7 +26,6 @@ from output import code_info_output, limit_up_data_filter, output_util, kp_client_msg_manager from trade import bidding_money_manager, trade_manager, l2_trade_util, trade_record_log_util -from trade.l2_trade_util import BlackListCodeManager import concurrent.futures # 绂佺敤http.server鐨勬棩蹇楄緭鍑� @@ -62,6 +62,7 @@ total_datas = KPLLimitUpDataRecordManager.total_datas # 閫氳繃娑ㄥ仠鏃堕棿鎺掑簭 total_datas = list(total_datas) + current_reason_codes_dict = KPLLimitUpDataRecordManager.get_current_reason_codes_dict() # 缁熻娑ㄥ仠鍘熷洜 limit_up_reason_dict = {} @@ -84,13 +85,15 @@ (k, len(limit_up_reason_dict[k]), limit_up_reason_want_count_dict.get(k), limit_up_reason_dict[k][0][5]) for k in limit_up_reason_dict] - limit_up_reason_statistic_info.sort(key=lambda x: int(x[3])) + limit_up_reason_statistic_info.sort( + key=lambda x: len(current_reason_codes_dict[x[0]]) if x[0] in current_reason_codes_dict else 0) + limit_up_reason_statistic_info.reverse() codes_set = set([d[3] for d in total_datas]) # 鍒ゆ柇鏄緳鍑狅紝鍒ゆ柇鏄惁娑ㄥ仠锛屽垽鏂槸鍚︾偢鏉匡紝鍔犺浇鍒嗘暟 rank_dict = limit_up_data_filter.get_limit_up_time_rank_dict(total_datas) limit_up_dict, limit_up_codes, open_limit_up_codes = limit_up_data_filter.get_limit_up_info(codes_set) - score_dict = limit_up_data_filter.get_codes_scores_dict(codes_set) + score_dict = {} fresult = [] ignore_codes = self.__IgnoreCodeManager.list_ignore_codes("1") @@ -219,7 +222,7 @@ # --鏁版嵁鍑嗗寮�濮�-- codes_set = set([d[0] for d in temps]) limit_up_dict, limit_up_codes, open_limit_up_codes = limit_up_data_filter.get_limit_up_info(codes_set) - score_dict = limit_up_data_filter.get_codes_scores_dict(codes_set) + score_dict = {} want_codes = gpcode_manager.WantBuyCodesManager().list_code_cache() black_codes = BlackListCodeManager().list_codes() total_datas = KPLLimitUpDataRecordManager.total_datas @@ -311,16 +314,30 @@ response_data = json.dumps({"code": 0, "data": data}) print("get_score_info 鑰楁椂锛�", time.time() - start_time) except Exception as e: + logger_debug.exception(e) logging.exception(e) # 鑾峰彇璇勫垎淇℃伅 pass + elif url.path == "/get_kpl_block_info": + start_time = time.time() + ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) + code = ps_dict['code'] + try: + data = code_info_output.get_kpl_block_info(code) + response_data = json.dumps({"code": 0, "data": data}) + print("get_kpl_block_info 鑰楁椂锛�", time.time() - start_time) + except Exception as e: + logger_debug.exception(e) + logging.exception(e) + elif url.path == "/get_l2_datas": # 鑾峰彇L2鐨勬暟鎹� ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) code = ps_dict['code'] datas = data_export_util.get_l2_datas(code) - response_data = json.dumps({"code": 0, "data": datas}) + code_name = gpcode_manager.get_code_name(code) + response_data = json.dumps({"code": 0, "data": {"code": code, "code_name": code_name, "data": datas}}) elif url.path == "/get_trade_progress": # 鑾峰彇浜ゆ槗杩涘害 ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) @@ -332,16 +349,23 @@ # 鏈�鏂扮殑l鎾ゆ暟鎹� ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) code = ps_dict['code'] + buy_single_index = ps_dict.get('buy_single_index') + if buy_single_index is not None: + buy_single_index = int(buy_single_index) records = code_info_output.load_trade_record_cancel_watch_indexes(code) # 鑾峰彇鏈�鏂扮殑L涓婁笌L涓� records.reverse() up_indexes = [] down_indexes = [] for r in records: + if buy_single_index and buy_single_index != r[1]: + continue if r[0] == trade_record_log_util.CancelWatchIndexesInfo.CANCEL_TYPE_L_UP: up_indexes = r[2] break for r in records: + if buy_single_index and buy_single_index != r[1]: + continue if r[0] == trade_record_log_util.CancelWatchIndexesInfo.CANCEL_TYPE_L_DOWN: down_indexes = r[2] break @@ -352,16 +376,19 @@ # 鏈�鏂扮殑H鎾ゆ暟鎹� ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) code = ps_dict['code'] + buy_single_index = ps_dict.get('buy_single_index') records = code_info_output.load_trade_record_cancel_watch_indexes(code, trade_record_log_util.CancelWatchIndexesInfo.CANCEL_TYPE_H) # 鑾峰彇鏈�鏂扮殑L涓婁笌L涓� records.reverse() indexes = [] for r in records: + if buy_single_index and buy_single_index != r[1]: + continue indexes = r[2] break response_data = json.dumps( - {"code": 0, "data":indexes}) + {"code": 0, "data": indexes}) elif url.path == "/kpl/get_limit_up_list": response_data = self.__get_limit_up_list() @@ -433,7 +460,8 @@ continue # 浠g爜,鍚嶇О,娑ㄥ仠鏃堕棿,鏄惁鐐告澘,鏄惁鎯充拱,鏄惁宸茬粡涓嬭繃鍗� codes_info.append( - [d[3], d[4], tool.to_time_str(int(d[5])), 1 if d[3] not in now_limit_up_codes else 0, 0, 0]) + [d[3], d[4], tool.to_time_str(int(d[5])), 1 if d[3] not in now_limit_up_codes else 0, 0, 0, d[12], + output_util.money_desc(d[13])]) codes_info.sort(key=lambda x: x[2]) # 鏌ヨ鏄惁涓烘兂涔板崟 want_codes = gpcode_manager.WantBuyCodesManager().list_code_cache() @@ -487,24 +515,37 @@ # 鑾峰彇涓婁釜浜ゆ槗鏃ョ殑鐩稿悓娑ㄥ仠鍘熷洜鐨勪唬鐮佷俊鎭� ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) code = ps_dict["code"] - day = HistoryKDatasUtils.get_previous_trading_date(tool.get_now_date_str()) - # 鑾峰彇娑ㄥ仠鏁版嵁 + # 鑾峰彇鏄ㄦ棩娑ㄥ仠鏁版嵁 + day = HistoryKDatasUtils.get_previous_trading_date_cache(tool.get_now_date_str()) + limit_up_records = kpl_data_manager.KPLLimitUpDataRecordManager.list_all_cache(day) + reasons = [] + for d in limit_up_records: + if d[3] == code: + reasons.append(d) # 鑾峰彇浠g爜鐨勫師鍥� - reasons = kpl_data_manager.KPLLimitUpDataRecordManager.list_by_code(code, day) if reasons: reasons = list(reasons) reasons.sort(key=lambda x: x[9]) reason = reasons[-1][2] - datas = self.__kplDataManager.get_from_file(kpl_util.KPLDataType.LIMIT_UP, day) + # 鑾峰彇娑ㄥ仠鏁版嵁 + datas = self.__kplDataManager.get_from_file_cache(kpl_util.KPLDataType.LIMIT_UP, day) # (浠g爜,鍚嶇О,棣栨娑ㄥ仠鏃堕棿,鏈�杩戞定鍋滄椂闂�,鍑犳澘,娑ㄥ仠鍘熷洜,鏉垮潡,瀹為檯娴侀��,涓诲姏鍑�棰�,娑ㄥ仠鍘熷洜浠g爜,娑ㄥ仠鍘熷洜浠g爜鏁伴噺) - result_list = [] + yesterday_result_list = [] if datas: for d in datas: if d[5] == reason and d[0] != code: # (浠g爜,鍚嶇О) - result_list.append((d[0], d[1])) - response_data = json.dumps({"code": 0, "data": {"reason": reason, "data": result_list}}) + yesterday_result_list.append((d[0], d[1])) + current_limit_up_list = kpl_data_manager.KPLLimitUpDataRecordManager.latest_origin_datas + current_result_list = [] + if current_limit_up_list: + for c in current_limit_up_list: + if c[5] == reason and c[0] != code: + current_result_list.append((c[0], c[1])) + response_data = json.dumps({"code": 0, "data": {"reason": reason, + "data": {"yesterday": yesterday_result_list, + "current": current_result_list}}}) else: response_data = json.dumps({"code": 1, "msg": "鏄ㄦ棩鏈定鍋�"}) @@ -548,7 +589,7 @@ code = d[0] limit_up_reasons[code] = d[5] codes_set.add(code) - if code.find("00") == 0 or code.find("60") == 0: + if tool.is_shsz_code(code): limit_up_time = time.strftime("%H:%M:%S", time.localtime(d[2])) code_price_manager.Buy1PriceManager().set_limit_up_time(code, limit_up_time) add_codes = codes_set - self.__latest_limit_up_codes_set @@ -567,7 +608,7 @@ if add_codes: for code in add_codes: # 鏍规嵁娑ㄥ仠鍘熷洜鍒ゆ柇鏄惁鍙互涔� - if code.find("00") == 0 or code.find("60") == 0: + if tool.is_shsz_code(code): try: # 鍒ゆ柇鏄惁涓嬪崟 trade_state = trade_manager.CodesTradeStateManager().get_trade_state(code) @@ -692,4 +733,25 @@ if __name__ == "__main__": - run("0.0.0.0", 9004) + code = "002676" + buy_single_index = 716 + records = code_info_output.load_trade_record_cancel_watch_indexes(code) + # 鑾峰彇鏈�鏂扮殑L涓婁笌L涓� + records.reverse() + up_indexes = [] + down_indexes = [] + for r in records: + if buy_single_index and buy_single_index != r[1]: + continue + if r[0] == trade_record_log_util.CancelWatchIndexesInfo.CANCEL_TYPE_L_UP: + up_indexes = r[2] + break + for r in records: + if buy_single_index and buy_single_index != r[1]: + continue + if r[0] == trade_record_log_util.CancelWatchIndexesInfo.CANCEL_TYPE_L_DOWN: + down_indexes = r[2] + break + + response_data = json.dumps( + {"code": 0, "data": {"up": up_indexes, "down": down_indexes}}) -- Gitblit v1.8.0