From a00da3062c6c825b585f82275823ac45cdeb6502 Mon Sep 17 00:00:00 2001
From: Administrator <admin@example.com>
Date: 星期五, 19 一月 2024 19:16:20 +0800
Subject: [PATCH] L后成交太快撤单

---
 third_data/data_server.py |  363 +++++++++++++++++++++++++++++++++++++++++++--------
 1 files changed, 303 insertions(+), 60 deletions(-)

diff --git a/third_data/data_server.py b/third_data/data_server.py
index 210f7a7..b179ff9 100644
--- a/third_data/data_server.py
+++ b/third_data/data_server.py
@@ -1,18 +1,22 @@
 import http
 import json
+import logging
 import socketserver
+import threading
 import time
 from http.server import BaseHTTPRequestHandler
 import dask
 
-from utils import global_util, tool
+from code_attribute.gpcode_manager import BlackListCodeManager, CodePrePriceManager
+from log_module.log import logger_system, logger_debug, logger_kpl_limit_up
+from utils import global_util, tool, data_export_util, init_data_util
 from code_attribute import gpcode_manager
 from log_module import log, log_analyse, log_export
-from l2 import code_price_manager, l2_data_util
+from l2 import code_price_manager, l2_data_util, l2_data_manager_new, cancel_buy_strategy, transaction_progress
 from l2.cancel_buy_strategy import HourCancelBigNumComputer
 from output.limit_up_data_filter import IgnoreCodeManager
-from third_data import kpl_util, kpl_data_manager, kpl_api
-from third_data.code_plate_key_manager import RealTimeKplMarketData, KPLPlateForbiddenManager
+from third_data import kpl_util, kpl_data_manager, kpl_api, block_info
+from third_data.code_plate_key_manager import RealTimeKplMarketData, KPLPlateForbiddenManager, CodePlateKeyBuyManager
 from third_data.history_k_data_util import HistoryKDatasUtils
 from third_data.kpl_data_manager import KPLDataManager, KPLLimitUpDataRecordManager, \
     KPLCodeLimitUpReasonManager
@@ -21,8 +25,12 @@
 from urllib.parse import parse_qs
 from output import code_info_output, limit_up_data_filter, output_util, kp_client_msg_manager
 
-from trade import bidding_money_manager, trade_manager
-from trade.l2_trade_util import BlackListCodeManager
+from trade import bidding_money_manager, trade_manager, l2_trade_util, trade_record_log_util
+import concurrent.futures
+
+# 绂佺敤http.server鐨勬棩蹇楄緭鍑�
+logger = logging.getLogger("http.server")
+logger.setLevel(logging.CRITICAL)
 
 
 class DataServer(BaseHTTPRequestHandler):
@@ -39,7 +47,14 @@
     # 绮鹃��,琛屼笟鏁版嵁缂撳瓨
     __jingxuan_cache_dict = {}
     __industry_cache_dict = {}
+    __latest_limit_up_codes_set = set()
+    __data_process_thread_pool = concurrent.futures.ThreadPoolExecutor(max_workers=10)
+    # 浠g爜鐨勬定骞�
+    __code_limit_rate_dict = {}
 
+    # 绂佺敤鏃ュ織杈撳嚭
+    def log_message(self, format, *args):
+        pass
 
     def __get_limit_up_list(self):
         # 缁熻鐩墠涓烘鐨勪唬鐮佹定鍋滄暟閲忥紙鍒嗘定鍋滃師鍥狅級
@@ -49,6 +64,7 @@
             total_datas = KPLLimitUpDataRecordManager.total_datas
         # 閫氳繃娑ㄥ仠鏃堕棿鎺掑簭
         total_datas = list(total_datas)
+        current_reason_codes_dict = KPLLimitUpDataRecordManager.get_current_reason_codes_dict()
 
         # 缁熻娑ㄥ仠鍘熷洜
         limit_up_reason_dict = {}
@@ -71,13 +87,15 @@
             (k, len(limit_up_reason_dict[k]), limit_up_reason_want_count_dict.get(k), limit_up_reason_dict[k][0][5]) for
             k in
             limit_up_reason_dict]
-        limit_up_reason_statistic_info.sort(key=lambda x: int(x[3]))
+        limit_up_reason_statistic_info.sort(
+            key=lambda x: len(current_reason_codes_dict[x[0]]) if x[0] in current_reason_codes_dict else 0)
+        limit_up_reason_statistic_info.reverse()
 
         codes_set = set([d[3] for d in total_datas])
         # 鍒ゆ柇鏄緳鍑狅紝鍒ゆ柇鏄惁娑ㄥ仠锛屽垽鏂槸鍚︾偢鏉匡紝鍔犺浇鍒嗘暟
         rank_dict = limit_up_data_filter.get_limit_up_time_rank_dict(total_datas)
         limit_up_dict, limit_up_codes, open_limit_up_codes = limit_up_data_filter.get_limit_up_info(codes_set)
-        score_dict = limit_up_data_filter.get_codes_scores_dict(codes_set)
+        score_dict = {}
         fresult = []
         ignore_codes = self.__IgnoreCodeManager.list_ignore_codes("1")
 
@@ -206,7 +224,7 @@
             # --鏁版嵁鍑嗗寮�濮�--
             codes_set = set([d[0] for d in temps])
             limit_up_dict, limit_up_codes, open_limit_up_codes = limit_up_data_filter.get_limit_up_info(codes_set)
-            score_dict = limit_up_data_filter.get_codes_scores_dict(codes_set)
+            score_dict = {}
             want_codes = gpcode_manager.WantBuyCodesManager().list_code_cache()
             black_codes = BlackListCodeManager().list_codes()
             total_datas = KPLLimitUpDataRecordManager.total_datas
@@ -286,20 +304,96 @@
             ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()])
             code = ps_dict['code']
             name = ps_dict.get('name')
+            try:
+                data = code_info_output.get_output_params(code, self.__jingxuan_cache_dict, self.__industry_cache_dict)
+                if data["code_name"].find("None") > -1 and name:
+                    data["code_name"] = f"{name} {code}"
 
-            data = code_info_output.get_output_params(code, self.__jingxuan_cache_dict, self.__industry_cache_dict)
-            if data["code_name"].find("None") > -1 and name:
-                data["code_name"] = f"{name} {code}"
+                self.__history_plates_dict[code] = (time.time(), data["kpl_code_info"]["code_records"])
+                if "plate" in data["kpl_code_info"]:
+                    self.__blocks_dict[code] = (time.time(), data["kpl_code_info"]["plate"])
 
-            self.__history_plates_dict[code] = (time.time(), data["kpl_code_info"]["code_records"])
-            if "plate" in data["kpl_code_info"]:
-                self.__blocks_dict[code] = (time.time(), data["kpl_code_info"]["plate"])
-
-            response_data = json.dumps({"code": 0, "data": data})
-            print("get_score_info 鑰楁椂锛�", time.time() - start_time)
+                response_data = json.dumps({"code": 0, "data": data})
+                print("get_score_info 鑰楁椂锛�", time.time() - start_time)
+            except Exception as e:
+                logger_debug.exception(e)
+                logging.exception(e)
 
             # 鑾峰彇璇勫垎淇℃伅
             pass
+        elif url.path == "/get_kpl_block_info":
+            start_time = time.time()
+            ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()])
+            code = ps_dict['code']
+            try:
+                data = code_info_output.get_kpl_block_info(code)
+                response_data = json.dumps({"code": 0, "data": data})
+                print("get_kpl_block_info 鑰楁椂锛�", time.time() - start_time)
+            except Exception as e:
+                logger_debug.exception(e)
+                logging.exception(e)
+
+        elif url.path == "/get_l2_datas":
+            try:
+                # 鑾峰彇L2鐨勬暟鎹�
+                ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()])
+                code = ps_dict['code']
+                datas = data_export_util.get_l2_datas(code)
+                code_name = gpcode_manager.get_code_name(code)
+                response_data = json.dumps({"code": 0, "data": {"code": code, "code_name": code_name, "data": datas}})
+            except Exception as e:
+                logger_debug.exception(e)
+        elif url.path == "/get_trade_progress":
+            # 鑾峰彇浜ゆ槗杩涘害
+            ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()])
+            code = ps_dict['code']
+            trade_progress, is_default = transaction_progress.TradeBuyQueue().get_traded_index(code)
+            response_data = json.dumps(
+                {"code": 0, "data": {"trade_progress": trade_progress, "is_default": is_default}})
+        elif url.path == "/get_l_cancel_datas":
+            # 鏈�鏂扮殑l鎾ゆ暟鎹�
+            ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()])
+            code = ps_dict['code']
+            buy_single_index = ps_dict.get('buy_single_index')
+            if buy_single_index is not None:
+                buy_single_index = int(buy_single_index)
+            records = code_info_output.load_trade_record_cancel_watch_indexes(code)
+            # 鑾峰彇鏈�鏂扮殑L涓婁笌L涓�
+            records.reverse()
+            up_indexes = []
+            down_indexes = []
+            for r in records:
+                if buy_single_index and buy_single_index != r[1]:
+                    continue
+                if r[0] == trade_record_log_util.CancelWatchIndexesInfo.CANCEL_TYPE_L_UP:
+                    up_indexes = r[2]
+                    break
+            for r in records:
+                if buy_single_index and buy_single_index != r[1]:
+                    continue
+                if r[0] == trade_record_log_util.CancelWatchIndexesInfo.CANCEL_TYPE_L_DOWN:
+                    down_indexes = r[2]
+                    break
+
+            response_data = json.dumps(
+                {"code": 0, "data": {"up": up_indexes, "down": down_indexes}})
+        elif url.path == "/get_h_cancel_datas":
+            # 鏈�鏂扮殑H鎾ゆ暟鎹�
+            ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()])
+            code = ps_dict['code']
+            buy_single_index = ps_dict.get('buy_single_index')
+            records = code_info_output.load_trade_record_cancel_watch_indexes(code,
+                                                                              trade_record_log_util.CancelWatchIndexesInfo.CANCEL_TYPE_H)
+            # 鑾峰彇鏈�鏂扮殑L涓婁笌L涓�
+            records.reverse()
+            indexes = []
+            for r in records:
+                if buy_single_index and buy_single_index != r[1]:
+                    continue
+                indexes = r[2]
+                break
+            response_data = json.dumps(
+                {"code": 0, "data": indexes})
         elif url.path == "/kpl/get_limit_up_list":
             response_data = self.__get_limit_up_list()
 
@@ -354,7 +448,7 @@
         elif url.path == "/kpl/get_plate_codes":
             # 鑾峰彇娑ㄥ仠鍘熷洜涓嬮潰鐨勪唬鐮�
             ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()])
-            plate = ps_dict["plate"]
+            plate = kpl_util.filter_block(ps_dict["plate"])
             # 鑾峰彇鏉垮潡涓嬬殑浠g爜
             # 缁熻鐩墠涓烘鐨勪唬鐮佹定鍋滄暟閲忥紙鍒嗘定鍋滃師鍥狅級
             now_limit_up_codes_info = self.__kplDataManager.get_data(KPLDataType.LIMIT_UP)
@@ -367,18 +461,31 @@
 
             codes_info = []
             for d in record_limit_up_datas:
-                if d[2] != plate:
+                if kpl_util.filter_block(d[2]) != plate:
                     continue
                 # 浠g爜,鍚嶇О,娑ㄥ仠鏃堕棿,鏄惁鐐告澘,鏄惁鎯充拱,鏄惁宸茬粡涓嬭繃鍗�
                 codes_info.append(
-                    [d[3], d[4], tool.to_time_str(int(d[5])), 1 if d[3] not in now_limit_up_codes else 0, 0, 0])
+                    [d[3], d[4], tool.to_time_str(int(d[5])), 1 if d[3] not in now_limit_up_codes else 0, 0, 0, d[12],
+                     output_util.money_desc(d[13]), 1])
+
+            for d in record_limit_up_datas:
+                if kpl_util.filter_block(d[2]) == plate:
+                    continue
+                if plate not in [kpl_util.filter_block(k) for k in d[6].split("銆�")]:
+                    continue
+                # 浠g爜,鍚嶇О,娑ㄥ仠鏃堕棿,鏄惁鐐告澘,鏄惁鎯充拱,鏄惁宸茬粡涓嬭繃鍗�
+                codes_info.append(
+                    [d[3], d[4], tool.to_time_str(int(d[5])), 1 if d[3] not in now_limit_up_codes else 0, 0, 0, d[12],
+                     output_util.money_desc(d[13]), 0])
+
             codes_info.sort(key=lambda x: x[2])
             # 鏌ヨ鏄惁涓烘兂涔板崟
             want_codes = gpcode_manager.WantBuyCodesManager().list_code_cache()
             for code_info in codes_info:
                 code_info[4] = 1 if code_info[0] in want_codes else 0
                 # 鑾峰彇浠g爜鐘舵��
-                if trade_manager.CodesTradeStateManager().get_trade_state_cache(code_info[0]) != trade_manager.TRADE_STATE_NOT_TRADE:
+                if trade_manager.CodesTradeStateManager().get_trade_state_cache(
+                        code_info[0]) != trade_manager.TRADE_STATE_NOT_TRADE:
                     code_info[5] = 1
 
             response_data = json.dumps({"code": 0, "data": codes_info})
@@ -421,29 +528,86 @@
                 response_data = json.dumps({"code": 1, "msg": "璇蜂笂浼燾ode"})
 
         elif url.path == "/get_last_trade_day_reasons":
-            # 鑾峰彇涓婁釜浜ゆ槗鏃ョ殑鐩稿悓娑ㄥ仠鍘熷洜鐨勪唬鐮佷俊鎭�
-            ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()])
-            code = ps_dict["code"]
-            day = HistoryKDatasUtils.get_previous_trading_date(tool.get_now_date_str())
-            # 鑾峰彇娑ㄥ仠鏁版嵁
 
-            # 鑾峰彇浠g爜鐨勫師鍥�
-            reasons = kpl_data_manager.KPLLimitUpDataRecordManager.list_by_code(code, day)
-            if reasons:
-                reasons = list(reasons)
-                reasons.sort(key=lambda x: x[9])
-                reason = reasons[-1][2]
-                datas = self.__kplDataManager.get_from_file(kpl_util.KPLDataType.LIMIT_UP, day)
-                # (浠g爜,鍚嶇О,棣栨娑ㄥ仠鏃堕棿,鏈�杩戞定鍋滄椂闂�,鍑犳澘,娑ㄥ仠鍘熷洜,鏉垮潡,瀹為檯娴侀��,涓诲姏鍑�棰�,娑ㄥ仠鍘熷洜浠g爜,娑ㄥ仠鍘熷洜浠g爜鏁伴噺)
-                result_list = []
-                if datas:
-                    for d in datas:
-                        if d[5] == reason and d[0] != code:
-                            # (浠g爜,鍚嶇О)
-                            result_list.append((d[0], d[1]))
-                response_data = json.dumps({"code": 0, "data": {"reason": reason, "data": result_list}})
-            else:
-                response_data = json.dumps({"code": 1, "msg": "鏄ㄦ棩鏈定鍋�"})
+            # 璁$畻骞冲潎娑ㄥ箙
+            def get_limit_rate_list(codes):
+                if not codes:
+                    return []
+                need_request_codes = set()
+                if tool.trade_time_sub(tool.get_now_time_str(), "09:30:00") < 0:
+                    need_request_codes |= set(codes)
+                else:
+                    now_time = time.time()
+                    for c in codes:
+                        if c not in self.__code_limit_rate_dict:
+                            need_request_codes.add(c)
+                        elif now_time - self.__code_limit_rate_dict[c][1] > 60:
+                            need_request_codes.add(c)
+                if need_request_codes:
+                    _limit_rate_list = HistoryKDatasUtils.get_codes_limit_rate(list(need_request_codes))
+                    for d in _limit_rate_list:
+                        self.__code_limit_rate_dict[d[0]] = (d[1], time.time())
+                return [(c_, self.__code_limit_rate_dict[c_][0]) for c_ in codes]
+
+            try:
+                raise Exception("鎺ュ彛鏆傚仠浣跨敤")
+                # 鑾峰彇涓婁釜浜ゆ槗鏃ョ殑鐩稿悓娑ㄥ仠鍘熷洜鐨勪唬鐮佷俊鎭�
+                ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()])
+                code = ps_dict["code"]
+                # 鑾峰彇鏄ㄦ棩娑ㄥ仠鏁版嵁
+                day = HistoryKDatasUtils.get_previous_trading_date_cache(tool.get_now_date_str())
+
+                limit_up_records = kpl_data_manager.KPLLimitUpDataRecordManager.list_all_cache(day)
+                reasons = []
+                for d in limit_up_records:
+                    if d[3] == code:
+                        reasons.append(d)
+                # 鑾峰彇浠g爜鐨勫師鍥�
+                if reasons:
+                    reasons = list(reasons)
+                    reasons.sort(key=lambda x: x[9])
+                    reason = reasons[-1][2]
+                    # 鑾峰彇娑ㄥ仠鏁版嵁
+                    datas = self.__kplDataManager.get_from_file_cache(kpl_util.KPLDataType.LIMIT_UP, day)
+                    # (浠g爜,鍚嶇О,棣栨娑ㄥ仠鏃堕棿,鏈�杩戞定鍋滄椂闂�,鍑犳澘,娑ㄥ仠鍘熷洜,鏉垮潡,瀹為檯娴侀��,涓诲姏鍑�棰�,娑ㄥ仠鍘熷洜浠g爜,娑ㄥ仠鍘熷洜浠g爜鏁伴噺)
+                    yesterday_result_list = []
+                    percent_rate = 0
+                    if datas:
+                        yesterday_codes = set()
+                        for d in datas:
+                            if d[5] == reason:
+                                yesterday_codes.add(d[0])
+                        # 鑾峰彇娑ㄥ箙
+                        limit_rate_list = get_limit_rate_list(yesterday_codes)
+                        limit_rate_dict = {}
+                        if limit_rate_list:
+                            total_rate = 0
+                            for d in limit_rate_list:
+                                limit_rate_dict[d[0]] = d[1]
+                                total_rate += d[1]
+                            percent_rate = round(total_rate / len(limit_rate_list), 2)
+
+                        for d in datas:
+                            if d[5] == reason:
+                                yesterday_codes.add(d[0])
+                                if d[0] != code:
+                                    # (浠g爜,鍚嶇О, 娑ㄥ箙)
+                                    yesterday_result_list.append((d[0], d[1], limit_rate_dict.get(d[0])))
+
+                    current_limit_up_list = kpl_data_manager.KPLLimitUpDataRecordManager.latest_origin_datas
+                    current_result_list = []
+                    if current_limit_up_list:
+                        for c in current_limit_up_list:
+                            if c[5] == reason and c[0] != code:
+                                current_result_list.append((c[0], c[1]))
+                    response_data = json.dumps({"code": 0, "data": {"reason": reason, "reason_rate": percent_rate,
+                                                                    "data": {"yesterday": yesterday_result_list,
+                                                                             "current": current_result_list}}})
+                else:
+                    response_data = json.dumps({"code": 1, "msg": "鏄ㄦ棩鏈定鍋�"})
+            except Exception as e:
+                logger_debug.exception(e)
+                raise e
 
         elif url.path == "/pull_kp_client_msg":
             # 鎷夊彇瀹㈡埛绔秷鎭�
@@ -475,7 +639,65 @@
             result_str = self.__process_kpl_data(params)
             self.__send_response(result_str)
 
-    def __process_kpl_data(self, data):
+    def __process_kpl_data(self, data_origin):
+        def do_limit_up(result_list_):
+            if result_list_:
+                # 淇濆瓨娑ㄥ仠鏃堕棿
+                codes_set = set()
+                limit_up_reasons = {}
+                for d in result_list_:
+                    code = d[0]
+                    limit_up_reasons[code] = d[5]
+                    codes_set.add(code)
+                    if tool.is_shsz_code(code):
+                        limit_up_time = time.strftime("%H:%M:%S", time.localtime(d[2]))
+                        code_price_manager.Buy1PriceManager().set_limit_up_time(code, limit_up_time)
+                add_codes = codes_set - self.__latest_limit_up_codes_set
+                self.__latest_limit_up_codes_set = codes_set
+
+                if limit_up_reasons:
+                    # 缁熻娑ㄥ仠鍘熷洜鐨勭エ鐨勪釜鏁�
+                    limit_up_reason_code_dict = {}
+                    for code in limit_up_reasons:
+                        b = limit_up_reasons[code]
+                        if b not in limit_up_reason_code_dict:
+                            limit_up_reason_code_dict[b] = set()
+                        limit_up_reason_code_dict[b].add(code)
+                    cancel_buy_strategy.LCancelRateManager.set_block_limit_up_count(limit_up_reason_code_dict)
+
+                if add_codes:
+                    for code in add_codes:
+                        # 鏍规嵁娑ㄥ仠鍘熷洜鍒ゆ柇鏄惁鍙互涔�
+                        if tool.is_shsz_code(code):
+                            try:
+                                # 鍒ゆ柇鏄惁涓嬪崟
+                                trade_state = trade_manager.CodesTradeStateManager().get_trade_state(code)
+                                if trade_state == trade_manager.TRADE_STATE_BUY_PLACE_ORDER or trade_state == trade_manager.TRADE_STATE_BUY_DELEGATED:
+                                    # 濮旀墭涓殑璁㈠崟锛屽垽鏂槸鍚﹂渶瑕佹挙鍗�
+                                    if not gpcode_manager.WantBuyCodesManager().is_in_cache(code):
+                                        yesterday_codes = kpl_data_manager.get_yesterday_limit_up_codes()
+                                        current_limit_up_datas, limit_up_record_datas, yesterday_current_limit_up_codes, before_blocks_dict = kpl_data_manager.KPLLimitUpDataRecordManager.latest_origin_datas, kpl_data_manager.KPLLimitUpDataRecordManager.total_datas, yesterday_codes, block_info.get_before_blocks_dict()
+                                        if not current_limit_up_datas:
+                                            current_limit_up_datas = []
+                                        if not limit_up_record_datas:
+                                            limit_up_record_datas = []
+                                        # 涔扮粷瀵硅�佸ぇ
+                                        # 涓�斾笉鑳芥挙鍗�
+                                        # if CodePlateKeyBuyManager.is_need_cancel(code, limit_up_reasons.get(code),
+                                        #                                          current_limit_up_datas,
+                                        #                                          limit_up_record_datas,
+                                        #                                          yesterday_current_limit_up_codes,
+                                        #                                          before_blocks_dict):
+                                        #     l2_data_manager_new.L2TradeDataProcessor.cancel_buy(code,
+                                        #                                                         f"娑ㄥ仠鍘熷洜锛坽limit_up_reasons.get(code)}锛変笉鏄�佸ぇ鎾ゅ崟",
+                                        #                                                         "鏉垮潡鎾�")
+                            except Exception as e:
+                                logger_debug.exception(e)
+                kpl_data_manager.KPLLimitUpDataRecordManager.save_record(tool.get_now_date_str(), result_list_)
+                self.__kplDataManager.save_data(type_, result_list_)
+
+        # 灏�"姒傚康"浜屽瓧鏇挎崲鎺�
+        data = data_origin
         type_ = data["type"]
         print("寮�鐩樺暒type:", type_)
         if type_ == KPLDataType.BIDDING.value:
@@ -492,16 +714,10 @@
 
                 self.__kplDataManager.save_data(type_, result_list)
         elif type_ == KPLDataType.LIMIT_UP.value:
-            result_list = kpl_util.parseDaBanData(data["data"], kpl_util.DABAN_TYPE_LIMIT_UP)
-            if result_list:
-                # 淇濆瓨娑ㄥ仠鏃堕棿
-                for d in result_list:
-                    code = d[0]
-                    if code.find("00") == 0 or code.find("60") == 0:
-                        limit_up_time = time.strftime("%H:%M:%S", time.localtime(d[2]))
-                        code_price_manager.Buy1PriceManager().set_limit_up_time(code, limit_up_time)
-                self.__kplDataManager.save_data(type_, result_list)
-                kpl_data_manager.KPLLimitUpDataRecordManager.save_record(tool.get_now_date_str(), result_list)
+            result_list = kpl_util.parseLimitUpData(data["data"])
+            self.__data_process_thread_pool.submit(lambda: do_limit_up(result_list))
+            # 璁板綍娑ㄥ仠鏃ュ織
+            logger_kpl_limit_up.info(result_list)
         elif type_ == KPLDataType.OPEN_LIMIT_UP.value:
             result_list = kpl_util.parseDaBanData(data["data"], kpl_util.DABAN_TYPE_OPEN_LIMIT_UP)
             if result_list:
@@ -566,13 +782,40 @@
 
 def run(addr, port):
     # 杩愯鐪嬬洏娑堟伅閲囬泦
-    kp_client_msg_manager.run_capture()
+    # kp_client_msg_manager.run_capture()
+    kpl_data_manager.run_pull_task()
+
     handler = DataServer
     # httpd = socketserver.TCPServer((addr, port), handler)
-    httpd = ThreadedHTTPServer((addr, port), handler)
-    print("HTTP server is at: http://%s:%d/" % (addr, port))
-    httpd.serve_forever()
+    try:
+        httpd = ThreadedHTTPServer((addr, port), handler)
+        print("HTTP server is at: http://%s:%d/" % (addr, port))
+        httpd.serve_forever()
+    except Exception as e:
+        logger_system.exception(e)
+        logger_system.error(f"绔彛鏈嶅姟鍣細{port} 鍚姩澶辫触")
 
 
 if __name__ == "__main__":
-    run("0.0.0.0", 9004)
+    code = "002676"
+    buy_single_index = 716
+    records = code_info_output.load_trade_record_cancel_watch_indexes(code)
+    # 鑾峰彇鏈�鏂扮殑L涓婁笌L涓�
+    records.reverse()
+    up_indexes = []
+    down_indexes = []
+    for r in records:
+        if buy_single_index and buy_single_index != r[1]:
+            continue
+        if r[0] == trade_record_log_util.CancelWatchIndexesInfo.CANCEL_TYPE_L_UP:
+            up_indexes = r[2]
+            break
+    for r in records:
+        if buy_single_index and buy_single_index != r[1]:
+            continue
+        if r[0] == trade_record_log_util.CancelWatchIndexesInfo.CANCEL_TYPE_L_DOWN:
+            down_indexes = r[2]
+            break
+
+    response_data = json.dumps(
+        {"code": 0, "data": {"up": up_indexes, "down": down_indexes}})

--
Gitblit v1.8.0