From cbe19ea6066a600cbd0b5110db5d43f8252d14a8 Mon Sep 17 00:00:00 2001 From: Administrator <admin@example.com> Date: 星期四, 13 六月 2024 11:23:53 +0800 Subject: [PATCH] L撤成交进度相关改进 --- third_data/data_server.py | 502 +++++++++++++++++++++++++++++++++++++++++++++---------- 1 files changed, 406 insertions(+), 96 deletions(-) diff --git a/third_data/data_server.py b/third_data/data_server.py index 6e25adc..1fba80d 100644 --- a/third_data/data_server.py +++ b/third_data/data_server.py @@ -1,18 +1,23 @@ import http import json +import logging import socketserver +import threading import time from http.server import BaseHTTPRequestHandler import dask -from utils import global_util, tool +from code_attribute.gpcode_manager import BlackListCodeManager, CodePrePriceManager +from l2.l2_transaction_data_manager import HuaXinBuyOrderManager +from log_module.log import logger_system, logger_debug, logger_kpl_limit_up +from utils import global_util, tool, data_export_util, init_data_util from code_attribute import gpcode_manager -from logs_ import log, log_analyse -from l2 import code_price_manager, l2_data_util -from l2.cancel_buy_strategy import HourCancelBigNumComputer +from log_module import log, log_analyse, log_export +from l2 import code_price_manager, l2_data_util, l2_data_manager_new, cancel_buy_strategy, transaction_progress +from cancel_strategy.s_l_h_cancel_strategy import HourCancelBigNumComputer, LCancelRateManager from output.limit_up_data_filter import IgnoreCodeManager -from third_data import kpl_util, kpl_data_manager, kpl_api -from third_data.code_plate_key_manager import RealTimeKplMarketData, KPLPlateForbiddenManager +from third_data import kpl_util, kpl_data_manager, kpl_api, block_info +from third_data.code_plate_key_manager import RealTimeKplMarketData, KPLPlateForbiddenManager, CodePlateKeyBuyManager from third_data.history_k_data_util import HistoryKDatasUtils from third_data.kpl_data_manager import KPLDataManager, KPLLimitUpDataRecordManager, \ KPLCodeLimitUpReasonManager @@ -21,8 +26,12 @@ from urllib.parse import parse_qs from output import code_info_output, limit_up_data_filter, output_util, kp_client_msg_manager -from trade import bidding_money_manager, trade_manager -from trade.l2_trade_util import BlackListCodeManager +from trade import bidding_money_manager, trade_manager, l2_trade_util, trade_record_log_util, trade_constant +import concurrent.futures + +# 绂佺敤http.server鐨勬棩蹇楄緭鍑� +logger = logging.getLogger("http.server") +logger.setLevel(logging.CRITICAL) class DataServer(BaseHTTPRequestHandler): @@ -39,47 +48,55 @@ # 绮鹃��,琛屼笟鏁版嵁缂撳瓨 __jingxuan_cache_dict = {} __industry_cache_dict = {} + __latest_limit_up_codes_set = set() + __data_process_thread_pool = concurrent.futures.ThreadPoolExecutor(max_workers=10) + # 浠g爜鐨勬定骞� + __code_limit_rate_dict = {} + + # 绂佺敤鏃ュ織杈撳嚭 + def log_message(self, format, *args): + pass def __get_limit_up_list(self): # 缁熻鐩墠涓烘鐨勪唬鐮佹定鍋滄暟閲忥紙鍒嗘定鍋滃師鍥狅級 - now_limit_up_codes_info = self.__kplDataManager.get_data(KPLDataType.LIMIT_UP) - limit_up_reason_dict = {} - if now_limit_up_codes_info: - for d in now_limit_up_codes_info: - if d[5] not in limit_up_reason_dict: - limit_up_reason_dict[d[5]] = [0, 0] - limit_up_reason_dict[d[5]][0] += 1 - # 鑾峰彇鎯充拱鍘熷洜鎯充拱鍗曠殑浠g爜鏁伴噺 - reason_map = self.__KPLCodeLimitUpReasonManager.list_all() - want_codes = gpcode_manager.WantBuyCodesManager.list_code() - # 鍏朵粬鎯充拱鍗� - other_count = 0 - for k in reason_map: - reson = reason_map[k] - if k in want_codes and reson in limit_up_reason_dict: - limit_up_reason_dict[reson][1] += 1 - elif k in want_codes: - other_count += 1 - - limit_up_reason_statistic_info = [(k, limit_up_reason_dict[k][0], limit_up_reason_dict[k][1]) for k in - limit_up_reason_dict] - limit_up_reason_statistic_info.sort(key=lambda x: x[1]) - limit_up_reason_statistic_info.reverse() - if other_count > 0: - limit_up_reason_statistic_info.insert(0, ('鍏朵粬', other_count, other_count)) - total_datas = KPLLimitUpDataRecordManager.total_datas if not total_datas: KPLLimitUpDataRecordManager.load_total_datas() total_datas = KPLLimitUpDataRecordManager.total_datas # 閫氳繃娑ㄥ仠鏃堕棿鎺掑簭 total_datas = list(total_datas) + current_reason_codes_dict = KPLLimitUpDataRecordManager.get_current_reason_codes_dict() + + # 缁熻娑ㄥ仠鍘熷洜 + limit_up_reason_dict = {} + for d in total_datas: + if d[2] not in limit_up_reason_dict: + limit_up_reason_dict[d[2]] = [] + limit_up_reason_dict[d[2]].append(d) + for k in limit_up_reason_dict: + limit_up_reason_dict[k].sort(key=lambda x: int(x[5])) + # 缁熻鎯充拱鍗曟暟閲� + want_codes = gpcode_manager.WantBuyCodesManager().list_code_cache() + limit_up_reason_want_count_dict = {} + for d in total_datas: + if d[2] not in limit_up_reason_want_count_dict: + limit_up_reason_want_count_dict[d[2]] = 0 + if d[3] in want_codes: + limit_up_reason_want_count_dict[d[2]] += 1 + # (鏉垮潡鍚嶇О锛屾定鍋滀唬鐮佹暟閲忥紝鎯充拱鍗曟暟閲�,娑ㄥ仠鏃堕棿) + limit_up_reason_statistic_info = [ + (k, len(limit_up_reason_dict[k]), limit_up_reason_want_count_dict.get(k), limit_up_reason_dict[k][0][5]) for + k in + limit_up_reason_dict] + limit_up_reason_statistic_info.sort( + key=lambda x: len(current_reason_codes_dict[x[0]]) if x[0] in current_reason_codes_dict else 0) + limit_up_reason_statistic_info.reverse() codes_set = set([d[3] for d in total_datas]) # 鍒ゆ柇鏄緳鍑狅紝鍒ゆ柇鏄惁娑ㄥ仠锛屽垽鏂槸鍚︾偢鏉匡紝鍔犺浇鍒嗘暟 rank_dict = limit_up_data_filter.get_limit_up_time_rank_dict(total_datas) limit_up_dict, limit_up_codes, open_limit_up_codes = limit_up_data_filter.get_limit_up_info(codes_set) - score_dict = limit_up_data_filter.get_codes_scores_dict(codes_set) + score_dict = {} fresult = [] ignore_codes = self.__IgnoreCodeManager.list_ignore_codes("1") @@ -87,7 +104,7 @@ total_datas.reverse() # 鑾峰彇娑ㄥ仠鍘熷洜鍙樺寲璁板綍 - reason_changes = log.load_kpl_reason_changes() + reason_changes = log_export.load_kpl_reason_changes() reason_changes.reverse() reason_changes_dict = {} for r in reason_changes: @@ -208,9 +225,9 @@ # --鏁版嵁鍑嗗寮�濮�-- codes_set = set([d[0] for d in temps]) limit_up_dict, limit_up_codes, open_limit_up_codes = limit_up_data_filter.get_limit_up_info(codes_set) - score_dict = limit_up_data_filter.get_codes_scores_dict(codes_set) - want_codes = gpcode_manager.WantBuyCodesManager.list_code() - black_codes = BlackListCodeManager.list_codes() + score_dict = {} + want_codes = gpcode_manager.WantBuyCodesManager().list_code_cache() + black_codes = BlackListCodeManager().list_codes() total_datas = KPLLimitUpDataRecordManager.total_datas code_info_dict = {} for val in total_datas: @@ -288,20 +305,132 @@ ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) code = ps_dict['code'] name = ps_dict.get('name') + date = ps_dict.get('date') + try: + data = code_info_output.get_output_params(code, self.__jingxuan_cache_dict, self.__industry_cache_dict) + if data["code_name"].find("None") > -1 and name: + data["code_name"] = f"{name} {code}" - data = code_info_output.get_output_params(code, self.__jingxuan_cache_dict, self.__industry_cache_dict) - if data["code_name"].find("None") > -1 and name: - data["code_name"] = f"{name} {code}" + self.__history_plates_dict[code] = (time.time(), data["kpl_code_info"]["code_records"]) + if "plate" in data["kpl_code_info"]: + self.__blocks_dict[code] = (time.time(), data["kpl_code_info"]["plate"]) - self.__history_plates_dict[code] = (time.time(), data["kpl_code_info"]["code_records"]) - if "plate" in data["kpl_code_info"]: - self.__blocks_dict[code] = (time.time(), data["kpl_code_info"]["plate"]) + response_data = json.dumps({"code": 0, "data": data}) + print("get_score_info 鑰楁椂锛�", time.time() - start_time) + except Exception as e: + logger_debug.exception(e) + logging.exception(e) - response_data = json.dumps({"code": 0, "data": data}) - print("get_score_info 鑰楁椂锛�", time.time()-start_time) + elif url.path == "/get_trade_records": + # 鑾峰彇鎸傛挙淇℃伅 + ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) + code = ps_dict['code'] + date = ps_dict.get('date') + local_today_datas = log_export.load_l2_from_log(date) + total_datas = local_today_datas.get(code) + trade_info = code_info_output.load_trade_record(code, total_datas, date) + response_data = json.dumps({"code": 0, "data": {"open_limit_up": trade_info[0], "records": trade_info[2]}}) - # 鑾峰彇璇勫垎淇℃伅 - pass + elif url.path == "/get_l2_cant_buy_reasons": + # 鑾峰彇L2娌′拱鐨勫師鍥� + ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) + code = ps_dict['code'] + fdatas = log_export.get_l2_cant_buy_reasons(code) + response_data = json.dumps({"code": 0, "data": fdatas}) + + elif url.path == "/get_kpl_block_info": + start_time = time.time() + ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) + code = ps_dict['code'] + try: + data = code_info_output.get_kpl_block_info(code) + response_data = json.dumps({"code": 0, "data": data}) + print("get_kpl_block_info 鑰楁椂锛�", time.time() - start_time) + except Exception as e: + logger_debug.exception(e) + logging.exception(e) + + elif url.path == "/get_l2_datas": + try: + # 鑾峰彇L2鐨勬暟鎹� + ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) + code = ps_dict['code'] + date = ps_dict.get('date') + time_str = ps_dict.get('time') + total_datas = l2_data_util.local_today_datas.get(code) + if date or time_str: + total_datas = None + else: + date = tool.get_now_date_str() + delegate_datas = data_export_util.get_l2_datas(code, total_datas, date=date) + transaction_datas = data_export_util.get_l2_transaction_datas(code, date=date) + code_name = gpcode_manager.get_code_name(code) + response_data = json.dumps({"code": 0, "data": {"code": code, "code_name": code_name, + "data": {"delegates": delegate_datas, + "transactions": transaction_datas}}}) + except Exception as e: + logger_debug.exception(e) + elif url.path == "/get_trade_progress": + # 鑾峰彇浜ゆ槗杩涘害 + ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) + code = ps_dict['code'] + trade_progress, is_default = transaction_progress.TradeBuyQueue().get_traded_index(code) + # 鑾峰彇姝e湪鎴愪氦, 璁$畻鎴愪氦杩涘害 + dealing_info = HuaXinBuyOrderManager.get_dealing_order_info(code) + percent = 100 + if dealing_info: + total_datas = l2_data_util.local_today_datas.get(code) + if str(total_datas[trade_progress]['val']["orderNo"]) == str(dealing_info[0]): + percent = int(dealing_info[1] / total_datas[trade_progress]['val']['num']) + response_data = json.dumps( + {"code": 0, "data": {"trade_progress": trade_progress, "is_default": is_default, "percent": percent}}) + elif url.path == "/get_l_cancel_datas": + # 鏈�鏂扮殑l鎾ゆ暟鎹� + ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) + code = ps_dict['code'] + date = ps_dict.get('date') + if not date: + date = tool.get_now_date_str() + buy_single_index = ps_dict.get('buy_single_index') + if buy_single_index is not None: + buy_single_index = int(buy_single_index) + records = code_info_output.load_trade_record_cancel_watch_indexes(code, date=date) + # 鑾峰彇鏈�鏂扮殑L涓婁笌L涓� + records.reverse() + up_indexes = [] + down_indexes = [] + for r in records: + if buy_single_index and buy_single_index != r[1]: + continue + if r[0] == trade_record_log_util.CancelWatchIndexesInfo.CANCEL_TYPE_L_UP: + up_indexes = r[2] + break + for r in records: + if buy_single_index and buy_single_index != r[1]: + continue + if r[0] == trade_record_log_util.CancelWatchIndexesInfo.CANCEL_TYPE_L_DOWN: + down_indexes = r[2] + break + + response_data = json.dumps( + {"code": 0, "data": {"up": up_indexes, "down": down_indexes}}) + elif url.path == "/get_h_cancel_datas": + # 鏈�鏂扮殑H鎾ゆ暟鎹� + ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) + code = ps_dict['code'] + buy_single_index = ps_dict.get('buy_single_index') + records = code_info_output.load_trade_record_cancel_watch_indexes(code, + trade_record_log_util.CancelWatchIndexesInfo.CANCEL_TYPE_H) + # 鑾峰彇鏈�鏂扮殑L涓婁笌L涓� + records.reverse() + indexes = [] + for r in records: + if buy_single_index and buy_single_index != r[1]: + continue + indexes = r[2] + break + response_data = json.dumps( + {"code": 0, "data": indexes}) elif url.path == "/kpl/get_limit_up_list": response_data = self.__get_limit_up_list() @@ -356,20 +485,77 @@ elif url.path == "/kpl/get_plate_codes": # 鑾峰彇娑ㄥ仠鍘熷洜涓嬮潰鐨勪唬鐮� ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) - plate = ps_dict["plate"] + plate = kpl_util.filter_block(ps_dict["plate"]) # 鑾峰彇鏉垮潡涓嬬殑浠g爜 # 缁熻鐩墠涓烘鐨勪唬鐮佹定鍋滄暟閲忥紙鍒嗘定鍋滃師鍥狅級 now_limit_up_codes_info = self.__kplDataManager.get_data(KPLDataType.LIMIT_UP) + now_limit_up_codes = set([d[0] for d in now_limit_up_codes_info]) + # 鑾峰彇鍘嗗彶娑ㄥ仠 + record_limit_up_datas = KPLLimitUpDataRecordManager.total_datas + if not record_limit_up_datas: + KPLLimitUpDataRecordManager.load_total_datas() + record_limit_up_datas = KPLLimitUpDataRecordManager.total_datas + codes_info = [] - for d in now_limit_up_codes_info: - if d[5] != plate: + for d in record_limit_up_datas: + if kpl_util.filter_block(d[2]) != plate: continue - codes_info.append([d[0], d[1], 0]) + # 浠g爜,鍚嶇О,娑ㄥ仠鏃堕棿,鏄惁鐐告澘,鏄惁鎯充拱,鏄惁宸茬粡涓嬭繃鍗�,娑ㄥ仠鏃堕棿,鑷敱娴侀�氬競鍊�,鏄惁鍦ㄩ粦鍚嶅崟閲岄潰 + codes_info.append( + [d[3], d[4], tool.to_time_str(int(d[5])), 1 if d[3] not in now_limit_up_codes else 0, 0, 0, d[12], + output_util.money_desc(d[13]), 1, 1 if l2_trade_util.is_in_forbidden_trade_codes(d[3]) else 0]) + + for d in record_limit_up_datas: + if kpl_util.filter_block(d[2]) == plate: + continue + if plate not in [kpl_util.filter_block(k) for k in d[6].split("銆�")]: + continue + # 浠g爜,鍚嶇О,娑ㄥ仠鏃堕棿,鏄惁鐐告澘,鏄惁鎯充拱,鏄惁宸茬粡涓嬭繃鍗�,娑ㄥ仠鏃堕棿,鑷敱娴侀�氬競鍊�,鏄惁鍦ㄩ粦鍚嶅崟閲岄潰 + codes_info.append( + [d[3], d[4], tool.to_time_str(int(d[5])), 1 if d[3] not in now_limit_up_codes else 0, 0, 0, d[12], + output_util.money_desc(d[13]), 0, 1 if l2_trade_util.is_in_forbidden_trade_codes(d[3]) else 0]) + + codes_info.sort(key=lambda x: x[2]) # 鏌ヨ鏄惁涓烘兂涔板崟 - want_codes = gpcode_manager.WantBuyCodesManager.list_code() + want_codes = gpcode_manager.WantBuyCodesManager().list_code_cache() for code_info in codes_info: - code_info[2] = 1 if code_info[0] in want_codes else 0 + code_info[4] = 1 if code_info[0] in want_codes else 0 + # 鑾峰彇浠g爜鐘舵�� + if trade_manager.CodesTradeStateManager().get_trade_state_cache( + code_info[0]) != trade_constant.TRADE_STATE_NOT_TRADE: + code_info[5] = 1 + response_data = json.dumps({"code": 0, "data": codes_info}) + elif url.path == "/kpl/get_open_limit_up_count_rank": + # 鑾峰彇鐐告澘娆℃暟鎺掕 + ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) + code = ps_dict.get("code") + results = log_export.load_kpl_open_limit_up() + statistic = {} + for result in results: + for c in result[1]: + if not tool.is_can_buy_code(c): + continue + if code and code != c: + continue + if c not in statistic: + statistic[c] = 0 + statistic[c] += 1 + # 鍊掑簭鎺� + statistic_list = [(k, statistic[k]) for k in statistic] + statistic_list.sort(key=lambda x: x[1], reverse=True) + fresults = [] + limit_up_records = KPLLimitUpDataRecordManager.list_all_cache(tool.get_now_date_str()) + limit_up_count_dict = {} + if limit_up_records: + for d in limit_up_records: + limit_up_count_dict[d[3]] = d[12] + + for x in statistic_list: + fresults.append((x[0], gpcode_manager.get_code_name(x[0]), x[1], limit_up_count_dict.get(x[0]))) + + fresults = fresults[:30] + response_data = json.dumps({"code": 0, "data": fresults}) elif url.path == "/get_h_cancel_data": ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) code = ps_dict["code"] @@ -379,11 +565,11 @@ l2_data_util.load_l2_data(code) total_datas = l2_data_util.local_today_datas.get(code) - trade_state = trade_manager.get_trade_state(code) - if trade_state == trade_manager.TRADE_STATE_BUY_PLACE_ORDER or trade_state == trade_manager.TRADE_STATE_BUY_DELEGATED or trade_state == trade_manager.TRADE_STATE_BUY_SUCCESS: - hcancel_datas_dict, cancel_indexes_set = HourCancelBigNumComputer.get_watch_index_dict(code) + trade_state = trade_manager.CodesTradeStateManager().get_trade_state_cache(code) + if trade_state == trade_constant.TRADE_STATE_BUY_PLACE_ORDER or trade_state == trade_constant.TRADE_STATE_BUY_DELEGATED or trade_state == trade_constant.TRADE_STATE_BUY_SUCCESS: + hcancel_datas_dict, cancel_indexes_set = HourCancelBigNumComputer().get_watch_index_dict(code) # 鏍规嵁鏃ュ織璇诲彇瀹炴椂鐨勮绠楁暟鎹� - h_cancel_latest_compute_info = log.get_h_cancel_compute_info(code) + h_cancel_latest_compute_info = log_export.get_h_cancel_compute_info(code) if hcancel_datas_dict: temp_list = [(k, hcancel_datas_dict[k][0]) for k in hcancel_datas_dict] canceled_indexs = set([int(k.split("-")[0]) for k in cancel_indexes_set]) @@ -409,28 +595,86 @@ response_data = json.dumps({"code": 1, "msg": "璇蜂笂浼燾ode"}) elif url.path == "/get_last_trade_day_reasons": - # 鑾峰彇涓婁釜浜ゆ槗鏃ョ殑鐩稿悓娑ㄥ仠鍘熷洜鐨勪唬鐮佷俊鎭� - ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) - code = ps_dict["code"] - day = HistoryKDatasUtils.get_previous_trading_date(tool.get_now_date_str()) - # 鑾峰彇娑ㄥ仠鏁版嵁 - # 鑾峰彇浠g爜鐨勫師鍥� - reasons = kpl_data_manager.KPLLimitUpDataRecordManager.list_by_code(code, day) - if reasons: - reasons = list(reasons) - reasons.sort(key=lambda x: x[9]) - reason = reasons[-1][2] - datas = self.__kplDataManager.get_from_file(kpl_util.KPLDataType.LIMIT_UP, day) - # (浠g爜,鍚嶇О,棣栨娑ㄥ仠鏃堕棿,鏈�杩戞定鍋滄椂闂�,鍑犳澘,娑ㄥ仠鍘熷洜,鏉垮潡,瀹為檯娴侀��,涓诲姏鍑�棰�,娑ㄥ仠鍘熷洜浠g爜,娑ㄥ仠鍘熷洜浠g爜鏁伴噺) - result_list = [] - for d in datas: - if d[5] == reason and d[0] != code: - # (浠g爜,鍚嶇О) - result_list.append((d[0], d[1])) - response_data = json.dumps({"code": 0, "data": {"reason": reason, "data": result_list}}) - else: - response_data = json.dumps({"code": 1, "msg": "鏄ㄦ棩鏈定鍋�"}) + # 璁$畻骞冲潎娑ㄥ箙 + def get_limit_rate_list(codes): + if not codes: + return [] + need_request_codes = set() + if tool.trade_time_sub(tool.get_now_time_str(), "09:30:00") < 0: + need_request_codes |= set(codes) + else: + now_time = time.time() + for c in codes: + if c not in self.__code_limit_rate_dict: + need_request_codes.add(c) + elif now_time - self.__code_limit_rate_dict[c][1] > 60: + need_request_codes.add(c) + if need_request_codes: + _limit_rate_list = HistoryKDatasUtils.get_codes_limit_rate(list(need_request_codes)) + for d in _limit_rate_list: + self.__code_limit_rate_dict[d[0]] = (d[1], time.time()) + return [(c_, self.__code_limit_rate_dict[c_][0]) for c_ in codes] + + try: + raise Exception("鎺ュ彛鏆傚仠浣跨敤") + # 鑾峰彇涓婁釜浜ゆ槗鏃ョ殑鐩稿悓娑ㄥ仠鍘熷洜鐨勪唬鐮佷俊鎭� + ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) + code = ps_dict["code"] + # 鑾峰彇鏄ㄦ棩娑ㄥ仠鏁版嵁 + day = HistoryKDatasUtils.get_previous_trading_date_cache(tool.get_now_date_str()) + + limit_up_records = kpl_data_manager.KPLLimitUpDataRecordManager.list_all_cache(day) + reasons = [] + for d in limit_up_records: + if d[3] == code: + reasons.append(d) + # 鑾峰彇浠g爜鐨勫師鍥� + if reasons: + reasons = list(reasons) + reasons.sort(key=lambda x: x[9]) + reason = reasons[-1][2] + # 鑾峰彇娑ㄥ仠鏁版嵁 + datas = self.__kplDataManager.get_from_file_cache(kpl_util.KPLDataType.LIMIT_UP, day) + # (浠g爜,鍚嶇О,棣栨娑ㄥ仠鏃堕棿,鏈�杩戞定鍋滄椂闂�,鍑犳澘,娑ㄥ仠鍘熷洜,鏉垮潡,瀹為檯娴侀��,涓诲姏鍑�棰�,娑ㄥ仠鍘熷洜浠g爜,娑ㄥ仠鍘熷洜浠g爜鏁伴噺) + yesterday_result_list = [] + percent_rate = 0 + if datas: + yesterday_codes = set() + for d in datas: + if d[5] == reason: + yesterday_codes.add(d[0]) + # 鑾峰彇娑ㄥ箙 + limit_rate_list = get_limit_rate_list(yesterday_codes) + limit_rate_dict = {} + if limit_rate_list: + total_rate = 0 + for d in limit_rate_list: + limit_rate_dict[d[0]] = d[1] + total_rate += d[1] + percent_rate = round(total_rate / len(limit_rate_list), 2) + + for d in datas: + if d[5] == reason: + yesterday_codes.add(d[0]) + if d[0] != code: + # (浠g爜,鍚嶇О, 娑ㄥ箙) + yesterday_result_list.append((d[0], d[1], limit_rate_dict.get(d[0]))) + + current_limit_up_list = kpl_data_manager.KPLLimitUpDataRecordManager.latest_origin_datas + current_result_list = [] + if current_limit_up_list: + for c in current_limit_up_list: + if c[5] == reason and c[0] != code: + current_result_list.append((c[0], c[1])) + response_data = json.dumps({"code": 0, "data": {"reason": reason, "reason_rate": percent_rate, + "data": {"yesterday": yesterday_result_list, + "current": current_result_list}}}) + else: + response_data = json.dumps({"code": 1, "msg": "鏄ㄦ棩鏈定鍋�"}) + except Exception as e: + logger_debug.exception(e) + raise e elif url.path == "/pull_kp_client_msg": # 鎷夊彇瀹㈡埛绔秷鎭� @@ -462,7 +706,67 @@ result_str = self.__process_kpl_data(params) self.__send_response(result_str) - def __process_kpl_data(self, data): + def __process_kpl_data(self, data_origin): + def do_limit_up(result_list_): + try: + if result_list_: + # 淇濆瓨娑ㄥ仠鏃堕棿 + codes_set = set() + limit_up_reasons = {} + for d in result_list_: + code = d[0] + limit_up_reasons[code] = d[5] + codes_set.add(code) + if tool.is_can_buy_code(code): + limit_up_time = time.strftime("%H:%M:%S", time.localtime(d[2])) + code_price_manager.Buy1PriceManager().set_limit_up_time(code, limit_up_time) + add_codes = codes_set - self.__latest_limit_up_codes_set + self.__latest_limit_up_codes_set = codes_set + + if limit_up_reasons: + # 缁熻娑ㄥ仠鍘熷洜鐨勭エ鐨勪釜鏁� + limit_up_reason_code_dict = {} + for code in limit_up_reasons: + b = limit_up_reasons[code] + if b not in limit_up_reason_code_dict: + limit_up_reason_code_dict[b] = set() + limit_up_reason_code_dict[b].add(code) + LCancelRateManager.set_block_limit_up_count(limit_up_reason_code_dict) + + if add_codes: + for code in add_codes: + # 鏍规嵁娑ㄥ仠鍘熷洜鍒ゆ柇鏄惁鍙互涔� + if tool.is_can_buy_code(code): + try: + # 鍒ゆ柇鏄惁涓嬪崟 + trade_state = trade_manager.CodesTradeStateManager().get_trade_state(code) + if trade_state == trade_constant.TRADE_STATE_BUY_PLACE_ORDER or trade_state == trade_constant.TRADE_STATE_BUY_DELEGATED: + # 濮旀墭涓殑璁㈠崟锛屽垽鏂槸鍚﹂渶瑕佹挙鍗� + if not gpcode_manager.WantBuyCodesManager().is_in_cache(code): + yesterday_codes = kpl_data_manager.get_yesterday_limit_up_codes() + current_limit_up_datas, limit_up_record_datas, yesterday_current_limit_up_codes, before_blocks_dict = kpl_data_manager.KPLLimitUpDataRecordManager.latest_origin_datas, kpl_data_manager.KPLLimitUpDataRecordManager.total_datas, yesterday_codes, block_info.get_before_blocks_dict() + if not current_limit_up_datas: + current_limit_up_datas = [] + if not limit_up_record_datas: + limit_up_record_datas = [] + # 涔扮粷瀵硅�佸ぇ + # 涓�斾笉鑳芥挙鍗� + # if CodePlateKeyBuyManager.is_need_cancel(code, limit_up_reasons.get(code), + # current_limit_up_datas, + # limit_up_record_datas, + # yesterday_current_limit_up_codes, + # before_blocks_dict): + # l2_data_manager_new.L2TradeDataProcessor.cancel_buy(code, + # f"娑ㄥ仠鍘熷洜锛坽limit_up_reasons.get(code)}锛変笉鏄�佸ぇ鎾ゅ崟", + # "鏉垮潡鎾�") + except Exception as e: + logger_debug.exception(e) + kpl_data_manager.KPLLimitUpDataRecordManager.save_record(tool.get_now_date_str(), result_list_) + self.__kplDataManager.save_data(type_, result_list_) + except Exception as e: + logger_debug.exception(e) + # 灏�"姒傚康"浜屽瓧鏇挎崲鎺� + data = data_origin type_ = data["type"] print("寮�鐩樺暒type:", type_) if type_ == KPLDataType.BIDDING.value: @@ -479,16 +783,10 @@ self.__kplDataManager.save_data(type_, result_list) elif type_ == KPLDataType.LIMIT_UP.value: - result_list = kpl_util.parseDaBanData(data["data"], kpl_util.DABAN_TYPE_LIMIT_UP) - if result_list: - # 淇濆瓨娑ㄥ仠鏃堕棿 - for d in result_list: - code = d[0] - if code.find("00") == 0 or code.find("60") == 0: - limit_up_time = time.strftime("%H:%M:%S", time.localtime(d[2])) - code_price_manager.Buy1PriceManager.set_limit_up_time(code, limit_up_time) - self.__kplDataManager.save_data(type_, result_list) - kpl_data_manager.KPLLimitUpDataRecordManager.save_record(tool.get_now_date_str(), result_list) + result_list = kpl_util.parseLimitUpData(data["data"]) + self.__data_process_thread_pool.submit(lambda: do_limit_up(result_list)) + # 璁板綍娑ㄥ仠鏃ュ織 + logger_kpl_limit_up.info(result_list) elif type_ == KPLDataType.OPEN_LIMIT_UP.value: result_list = kpl_util.parseDaBanData(data["data"], kpl_util.DABAN_TYPE_OPEN_LIMIT_UP) if result_list: @@ -553,13 +851,25 @@ def run(addr, port): # 杩愯鐪嬬洏娑堟伅閲囬泦 - kp_client_msg_manager.run_capture() + # kp_client_msg_manager.run_capture() + kpl_data_manager.PullTask.run_pull_task() + handler = DataServer # httpd = socketserver.TCPServer((addr, port), handler) - httpd = ThreadedHTTPServer((addr, port), handler) - print("HTTP server is at: http://%s:%d/" % (addr, port)) - httpd.serve_forever() + try: + httpd = ThreadedHTTPServer((addr, port), handler) + print("HTTP server is at: http://%s:%d/" % (addr, port)) + httpd.serve_forever() + except Exception as e: + logger_system.exception(e) + logger_system.error(f"绔彛鏈嶅姟鍣細{port} 鍚姩澶辫触") if __name__ == "__main__": - run("0.0.0.0", 9004) + code = "603359" + records = code_info_output.load_trade_record_cancel_watch_indexes(code, + trade_record_log_util.CancelWatchIndexesInfo.CANCEL_TYPE_H) + print(records) + + # data = code_info_output.get_output_params(code, self.__jingxuan_cache_dict, self.__industry_cache_dict, + # trade_record_date=date) -- Gitblit v1.8.0