From f4333824e6fc82eb7e6c0ff536f31baf6c7c8df2 Mon Sep 17 00:00:00 2001
From: Administrator <admin@example.com>
Date: 星期四, 26 六月 2025 14:21:47 +0800
Subject: [PATCH] L后自然最大撤单率最大80%

---
 third_data/kpl_data_manager.py |  469 +++++++++++++++++++++++++++++++++++++++++++++++++++++-----
 1 files changed, 428 insertions(+), 41 deletions(-)

diff --git a/third_data/kpl_data_manager.py b/third_data/kpl_data_manager.py
index ebcb14a..4c094cd 100644
--- a/third_data/kpl_data_manager.py
+++ b/third_data/kpl_data_manager.py
@@ -1,19 +1,27 @@
+import copy
 import json
+import logging
 import os
+import threading
+import time
+
+import requests
 
 import constant
+from db.redis_manager_delegate import RedisUtils
+from log_module import async_log_util, log
+from third_data.kpl_data_constant import LimitUpDataConstant, TodayLimitUpReasonChangeManager
 from utils import tool
 
 # 寮�鐩樺暒鍘嗗彶娑ㄥ仠鏁版嵁绠$悊
-from db import mysql_data, redis_manager
-from log_module.log import logger_kpl_limit_up_reason_change
+from db import mysql_data_delegate as mysql_data, redis_manager_delegate as redis_manager
+from log_module.log import logger_kpl_limit_up_reason_change, logger_debug, logger_kpl_limit_up, \
+    logger_kpl_open_limit_up
 from third_data import kpl_util, kpl_api
 from third_data.code_plate_key_manager import LimitUpCodesPlateKeyManager, CodesHisReasonAndBlocksManager
 
 # 浠g爜瀵瑰簲鐨勬定鍋滃師鍥犱繚瀛�
 from third_data.kpl_util import KPLPlatManager, KPLDataType
-
-
 
 
 class KPLCodeLimitUpReasonManager:
@@ -23,13 +31,13 @@
         return self.__redisManager.getRedis()
 
     def save_reason(self, code, reason):
-        self.__get_redis().setex(f"kpl_limitup_reason-{code}", tool.get_expire(), reason)
+        RedisUtils.setex(self.__get_redis(), f"kpl_limitup_reason-{code}", tool.get_expire(), reason)
 
     def list_all(self):
-        keys = self.__get_redis().keys("kpl_limitup_reason-*")
+        keys = RedisUtils.keys(self.__get_redis(), "kpl_limitup_reason-*")
         dict_ = {}
         for k in keys:
-            val = self.__get_redis().get(k)
+            val = RedisUtils.get(self.__get_redis(), k)
             dict_[k.split("-")[1]] = val
         return dict_
 
@@ -41,6 +49,13 @@
     __kplPlatManager = KPLPlatManager()
     __LimitUpCodesPlateKeyManager = LimitUpCodesPlateKeyManager()
     __CodesPlateKeysManager = CodesHisReasonAndBlocksManager()
+    __current_code_reasons_dict = {}
+    # 褰撳墠娑ㄥ仠鍘熷洜+鎺ㄨ崘鍘熷洜鐨勪唬鐮侀泦鍚�
+    __current_reason_codes_dict = {}
+    # 褰撳墠娑ㄥ仠鍘熷洜鐨勪唬鐮侀泦鍚�
+    __current_limit_up_reason_codes_dict = {}
+    __records_cache = {}
+    record_code_dict = {}
 
     @classmethod
     def __load_hist_and_blocks(cls, code):
@@ -57,10 +72,31 @@
             pass
 
     @classmethod
-    def save_record(cls, day, records):
+    def save_record(cls, day, records, set_not_open=False):
+        """
+        @param day:
+        @param records:
+        @param set_not_open: 鏄惁闇�瑕佽缃偢鏉夸笌鍚�
+        @return:
+        """
+        # 缁熻鐐告澘
+        try:
+            last_codes = set()
+            if cls.latest_origin_datas:
+                last_codes = set([x[0] for x in cls.latest_origin_datas])
+            now_codes = set()
+            if records:
+                now_codes = set([x[0] for x in records])
+            open_limit_up_codes = last_codes - now_codes
+            if open_limit_up_codes:
+                logger_kpl_open_limit_up.info(f"鐐告澘浠g爜锛歿open_limit_up_codes}")
+        except Exception as e:
+            pass
+
         # 缁熻浠g爜鎵�灞炴澘鍧�
         code_block_dict = {}
         for data in records:
+            cls.record_code_dict[data[0]] = data
             blocks = set(data[5].split("銆�"))
             code = data[0]
             for b in blocks:
@@ -70,7 +106,30 @@
                 # 璁剧疆娑ㄥ仠鏁版嵁
         if records:
             cls.latest_origin_datas = records
-            cls.__LimitUpCodesPlateKeyManager.set_today_limit_up([(r[0], r[5]) for r in records])
+            cls.__LimitUpCodesPlateKeyManager.set_today_limit_up(
+                [(r[0], r[5], r[6].split('銆�') if r[6] else []) for r in records])
+            LimitUpDataConstant.set_current_limit_up_datas(records)
+
+        code_reasons_dict = {}
+        reason_codes_dict = {}
+        limit_up_reason_codes_dict = {}
+        for d in records:
+            if d[5] not in limit_up_reason_codes_dict:
+                limit_up_reason_codes_dict[d[5]] = set()
+            limit_up_reason_codes_dict[d[5]].add(d[0])
+
+            # 娑ㄥ仠鍘熷洜 + 鎺ㄨ崘鍘熷洜
+            bs = {d[5]}
+            if d[6]:
+                bs |= set(d[6].split("銆�"))
+            code_reasons_dict[d[0]] = bs
+            for b in bs:
+                if b not in reason_codes_dict:
+                    reason_codes_dict[b] = set()
+                reason_codes_dict[b].add(d[0])
+        cls.__current_code_reasons_dict = code_reasons_dict
+        cls.__current_reason_codes_dict = reason_codes_dict
+        cls.__current_limit_up_reason_codes_dict = limit_up_reason_codes_dict
 
         # 娑ㄥ仠鏁版嵁璁板綍
         mysqldb = mysql_data.Mysqldb()
@@ -89,13 +148,17 @@
             result = mysqldb.select_one("select * from kpl_limit_up_record where _id='{}'".format(_id))
             if not result:
                 mysqldb.execute(
-                    f"insert into kpl_limit_up_record(_id,_day,_hot_block_name,_code,_code_name,_limit_up_time,_blocks,_latest_limit_up_time,_update_time,_create_time,_hot_block_code_count,_limit_up_high_info,_zylt_val) values('{_id}','{day}','{d[5]}','{d[0]}','{d[1]}','{d[2]}','{d[6]}','{d[3]}',now(),now(),{d[10]},'{d[4]}',{d[7]})")
+                    f"insert into kpl_limit_up_record(_id,_day,_hot_block_name,_code,_code_name,_limit_up_time,_blocks,_latest_limit_up_time,_update_time,_create_time,_hot_block_code_count,_limit_up_high_info,_zylt_val,_hot_block_code) values('{_id}','{day}','{d[5]}','{d[0]}','{d[1]}','{d[2]}','{d[6]}','{d[3]}',now(),now(),{d[10]},'{d[4]}',{d[7]},{d[9]})")
                 cls.__load_hist_and_blocks(code)
             else:
                 if _id in cls.latest_datas and json.dumps(cls.latest_datas.get(_id)) != json.dumps(d):
                     mysqldb.execute(
                         f"update kpl_limit_up_record set _latest_limit_up_time='{d[3]}',_limit_up_time='{d[2]}',_hot_block_code_count={d[10]},_limit_up_high_info='{d[4]}' ,_update_time=now() where _id='{_id}'")
                     cls.latest_datas[_id] = d
+            if set_not_open:
+                # 闇�瑕佽缃笉鐐告澘
+                mysqldb.execute(f"update kpl_limit_up_record set _open = 0, _update_time = now() where _id='{_id}'")
+
             cls.latest_datas[_id] = d
 
             # 鑾峰彇鍘熸潵鐨勪唬鐮佹墍灞炴澘鍧�,鍒犻櫎涔嬪墠閿欒鐨勬澘鍧�
@@ -108,25 +171,38 @@
                         # 鏉垮潡鏇存敼杩�
                         mysqldb.execute(
                             f"update kpl_limit_up_record set _hot_block_change = f'{dd[2]}' where _day='{dd[1]}' and _code='{code}'")
-
-                        cls.__LimitUpCodesPlateKeyManager.set_today_limit_up_reason_change(code, dd[2],
+                        TodayLimitUpReasonChangeManager().set_today_limit_up_reason_change(code, dd[2],
                                                                                            code_block_dict[code])
 
                         if dd[0] in cls.latest_datas:
                             cls.latest_datas.pop(dd[0])
         cls.total_datas = KPLLimitUpDataRecordManager.list_all(tool.get_now_date_str())
+        LimitUpDataConstant.set_history_limit_up_datas(cls.total_datas)
 
     @classmethod
     def load_total_datas(cls):
         cls.total_datas = KPLLimitUpDataRecordManager.list_all(tool.get_now_date_str())
-        cls.__LimitUpCodesPlateKeyManager.set_today_total_limit_up([(r[3], r[2]) for r in cls.total_datas])
+        cls.__LimitUpCodesPlateKeyManager.set_today_total_limit_up(
+            [(r[3], r[2], r[6].split("銆�") if r[6] else []) for r in cls.total_datas])
         for d in cls.total_datas:
             cls.__load_hist_and_blocks(d[3])
 
     @staticmethod
-    def list_all(day):
+    def list_all(day, max_limit_up_time=None):
         mysqldb = mysql_data.Mysqldb()
-        return mysqldb.select_all(f"select * from kpl_limit_up_record where _day='{day}'")
+        sql = f"select * from kpl_limit_up_record where _day='{day}'"
+        if max_limit_up_time:
+            sql += f" and cast(_limit_up_time as unsigned)<={max_limit_up_time}"
+        return mysqldb.select_all(sql)
+
+    @classmethod
+    def list_all_cache(cls, day):
+        if day in cls.__records_cache:
+            return cls.__records_cache[day]
+        fdata = cls.list_all(day)
+        if fdata:
+            cls.__records_cache[day] = fdata
+        return fdata
 
     @staticmethod
     def list_by_code(code, day):
@@ -158,16 +234,20 @@
         for b in constant.KPL_INVALID_BLOCKS:
             wheres.append(f"hb.`_hot_block_name` != '{b}'")
         wheres = " and ".join(wheres)
-        sql = f"SELECT GROUP_CONCAT(_hot_block_name) FROM (SELECT hb.`_hot_block_name`,hb.`_day` FROM `kpl_limit_up_record` hb WHERE hb.`_code`='{code}' AND {wheres} ORDER BY hb.`_day` DESC LIMIT 10) a  GROUP BY a._day ORDER BY a._day DESC LIMIT 1"
+        sql = f"SELECT GROUP_CONCAT(_hot_block_name) FROM (SELECT hb.`_hot_block_name`,hb.`_day` FROM `kpl_limit_up_record` hb WHERE hb.`_code`='{code}' AND {wheres} ORDER BY hb.`_day` DESC LIMIT 2) a  GROUP BY a._day ORDER BY a._day DESC LIMIT 1"
         mysqldb = mysql_data.Mysqldb()
         return mysqldb.select_one(sql)
 
     # 鑾峰彇浠g爜鏈�杩戠殑鏉垮潡锛岃繑鍥瀃(鏉垮潡,鏃ユ湡)]
     @classmethod
     def get_latest_infos(cls, code, count, contains_today=True):
+        wheres = []
+        for b in constant.KPL_INVALID_BLOCKS:
+            wheres.append(f"hb.`_hot_block_name` != '{b}'")
+        wheres = " and ".join(wheres)
         # 鍙幏鍙栨渶杩�180澶╃殑鏁版嵁
         min_day = tool.date_sub(tool.get_now_date_str(), 180)
-        sql = f"SELECT GROUP_CONCAT(_hot_block_name),`_day`,_blocks FROM (SELECT hb.`_hot_block_name`,hb.`_day`,hb._blocks FROM `kpl_limit_up_record` hb WHERE hb.`_code`='{code}' and hb.`_day` > '{min_day}' ORDER BY hb.`_day` DESC LIMIT 10) a  GROUP BY a._day ORDER BY a._day DESC LIMIT {count}"
+        sql = f"SELECT GROUP_CONCAT(_hot_block_name),`_day`,_blocks FROM (SELECT hb.`_hot_block_name`,hb.`_day`,hb._blocks FROM `kpl_limit_up_record` hb WHERE hb.`_code`='{code}' and {wheres} and hb.`_day` > '{min_day}' ORDER BY hb.`_day` DESC LIMIT 10) a  GROUP BY a._day ORDER BY a._day DESC LIMIT {count}"
         mysqldb = mysql_data.Mysqldb()
         results = mysqldb.select_all(sql)
         if results and not contains_today and results[0][1] == tool.get_now_date_str():
@@ -175,22 +255,76 @@
         return results
 
     @classmethod
+    def get_latest_block_infos(cls, min_day=tool.date_sub(tool.get_now_date_str(), 180), code=None):
+        """
+
+        @param min_day: 榛樿鑾峰彇180澶╀箣鍓嶇殑
+        @param code: 浠g爜
+        @return: 鏈�杩戠殑娑ㄥ仠鏉垮潡淇℃伅
+        """
+        sql = f"SELECT r.`_code`, r.`_day`, r.`_hot_block_name`, r.`_blocks` FROM `kpl_limit_up_record` r WHERE r.`_day`>'{min_day}'"
+        if code:
+            sql += f" AND _code='{code}'"
+        sql += " order by _create_time"
+        mysqldb = mysql_data.Mysqldb()
+        results = mysqldb.select_all(sql)
+        return results
+
+    @classmethod
     def get_latest_blocks_set(cls, code):
-        results = cls.get_latest_infos(code, 10, False)
+        results = cls.get_latest_infos(code, 2, False)
         bs = set([b[0] for b in results])
         return bs
+
+    @classmethod
+    def get_current_blocks(cls, code):
+        return cls.__current_code_reasons_dict.get(code)
+
+    @classmethod
+    def get_current_codes_by_block(cls, block):
+        return cls.__current_reason_codes_dict.get(block)
+
+    @classmethod
+    def get_current_reason_codes_dict(cls):
+        return copy.deepcopy(cls.__current_reason_codes_dict)
+
+    @classmethod
+    def get_current_limit_up_reason_codes_dict(cls):
+        return copy.deepcopy(cls.__current_limit_up_reason_codes_dict)
+
+    @classmethod
+    def get_current_reasons(cls):
+        if cls.__current_reason_codes_dict:
+            return cls.__current_reason_codes_dict.keys()
+        return set()
+
+    @classmethod
+    def get_new_blocks(cls, day):
+        """
+        鑾峰彇鏌愪竴澶╂柊鍑虹幇鐨勬澘鍧�(鏂版澘鍧�)
+        @param day:
+        @return:
+        """
+        sql = f"SELECT k.`_hot_block_name`, k.`_day` FROM `kpl_limit_up_record` k GROUP BY k.`_hot_block_name` HAVING k.`_day`='{day}' ORDER BY  k.`_day` DESC"
+        mysqldb = mysql_data.Mysqldb()
+        results = mysqldb.select_all(sql)
+        return [x[0] for x in results]
 
 
 class KPLDataManager:
     __latest_datas = {}
+    kpl_data_update_info = {}
+    __file_content_cache = {}
 
-    def __save_in_file(self, key, datas):
+    @classmethod
+    def __save_in_file(cls, key, datas):
         name = f"{tool.get_now_date_str()}_{key}.log"
         path = f"{constant.CACHE_PATH}/{name}"
         with open(path, 'w') as f:
             f.write(json.dumps(datas))
 
-    def __get_from_file(self, key, day=tool.get_now_date_str()):
+    @classmethod
+    def __get_from_file(cls, key, day=tool.get_now_date_str()):
         name = f"{day}_{key}.log"
         path = f"{constant.CACHE_PATH}/{name}"
         if not os.path.exists(path):
@@ -201,7 +335,8 @@
                 return json.loads(lines[0])
         return None
 
-    def get_from_file(self, type, day):
+    @classmethod
+    def get_from_file(cls, type, day):
         name = f"{day}_{type.value}.log"
         path = f"{constant.CACHE_PATH}/{name}"
         if not os.path.exists(path):
@@ -212,8 +347,19 @@
                 return json.loads(lines[0])
         return None
 
+    @classmethod
+    def get_from_file_cache(cls, type, day):
+        key = f"{type}-{day}"
+        if key in cls.__file_content_cache:
+            return cls.__file_content_cache.get(key)
+        fdata = cls.get_from_file(type, day)
+        if fdata:
+            cls.__file_content_cache[key] = fdata
+        return fdata
+
+    @classmethod
     # 鑾峰彇鏈�杩戝嚑澶╃殑鏁版嵁锛屾牴鎹棩鏈熷�掑簭杩斿洖
-    def get_latest_from_file(self, type, count):
+    def get_latest_from_file(cls, type, count, max_day=tool.get_now_date_str()):
         files = os.listdir(constant.CACHE_PATH)
         file_name_list = []
         for f in files:
@@ -221,7 +367,6 @@
                 file_name_list.append((f.split("_")[0], f))
 
         file_name_list.sort(key=lambda x: x[0], reverse=True)
-        file_name_list = file_name_list[:count]
         fresults = []
         for file in file_name_list:
             path = f"{constant.CACHE_PATH}/{file[1]}"
@@ -230,21 +375,27 @@
             with open(path, 'r') as f:
                 lines = f.readlines()
                 if lines:
-                    fresults.append((file[0], json.loads(lines[0])))
+                    if int(file[0].replace("-", "")) <= int(max_day.replace("-", "")):
+                        fresults.append((file[0], json.loads(lines[0])))
+                if len(fresults) >= count:
+                    break
 
         return fresults
 
-    def save_data(self, type, datas):
-        self.__latest_datas[type] = datas
-        self.__save_in_file(type, datas)
+    @classmethod
+    def save_data(cls, type, datas):
+        cls.kpl_data_update_info[type] = (tool.get_now_time_str(), len(datas))
+        cls.__latest_datas[type] = datas
+        cls.__save_in_file(type, datas)
 
-    def get_data(self, type):
+    @classmethod
+    def get_data(cls, type):
         type = type.value
-        if type in self.__latest_datas:
-            return self.__latest_datas[type]
-        result = self.__get_from_file(type)
+        if type in cls.__latest_datas:
+            return cls.__latest_datas[type]
+        result = cls.__get_from_file(type)
         if result is not None:
-            self.__latest_datas[type] = result
+            cls.__latest_datas[type] = result
         return result
 
 
@@ -272,17 +423,24 @@
 
 # 鑾峰彇鏈�杩戝嚑澶╃殑瀹炴椂娑ㄥ仠淇℃伅
 # 杩斿洖鏍煎紡锛圼鏃ユ湡,鏁版嵁]锛�
-def get_current_limit_up_data_records(count):
+def get_current_limit_up_data_records(count, day=tool.get_now_date_str()):
     fresults = []
-    day = tool.get_now_date_str()
     datas = []
     if day in __limit_up_list_records_dict:
         datas = __limit_up_list_records_dict[day]
     else:
-        datas = KPLDataManager().get_latest_from_file(KPLDataType.LIMIT_UP, 10)
-        if datas:
+        logger_debug.info("浠庢枃浠朵腑鑾峰彇鍓嶅嚑澶╃殑瀹炴椂娑ㄥ仠鏁版嵁")
+        datas = KPLDataManager().get_latest_from_file(KPLDataType.LIMIT_UP, count + 2, max_day=day)
+        # 绉婚櫎姣斾粖澶╄繕澶х殑鏁版嵁
+        fdatas = []
+        for d in datas:
+            if int(d[0].replace("-", "")) > int(day.replace("-", "")):
+                continue
+            fdatas.append(d)
+        if fdatas:
             # 淇濆瓨鏁版嵁
-            __limit_up_list_records_dict[day] = datas
+            __limit_up_list_records_dict[day] = fdatas
+    datas = __limit_up_list_records_dict[day]
     for i in range(len(datas)):
         if datas[i][0] == day:
             continue
@@ -292,8 +450,237 @@
     return fresults
 
 
+def get_yesterday_limit_up_codes():
+    yesterday_limit_up_data_records = get_yesterday_current_limit_up_records()
+    yesterday_codes = set([x[0] for x in yesterday_limit_up_data_records])
+    return yesterday_codes
+
+
+def get_yesterday_current_limit_up_records():
+    yesterday_limit_up_data_records = get_current_limit_up_data_records(1)[0][1]
+    return yesterday_limit_up_data_records
+
+
+# 鑾峰彇鏈�杩戝嚑澶╂定鍋滃師鍥�
+__latest_current_limit_up_records = {}
+
+
+def get_latest_current_limit_up_records(day=tool.get_now_date_str(), max_day_count=15):
+    if day not in __latest_current_limit_up_records:
+        fdatas = get_current_limit_up_data_records(max_day_count)
+        __latest_current_limit_up_records[day] = fdatas
+    return __latest_current_limit_up_records.get(day)
+
+
+class PullTask:
+    # 鏈�杩戞洿鏂版椂闂�
+    __latest_update_time_dict = {}
+
+    @classmethod
+    def __upload_data(cls, type, datas):
+        root_data = {
+            "type": type,
+            "data": datas
+        }
+        requests.post("http://127.0.0.1:9004/upload_kpl_data", json.dumps(root_data))
+
+    @classmethod
+    def repaire_pull_task(cls):
+        """
+        淇鎷夊彇浠诲姟
+        @return:
+        """
+        # 淇娑ㄥ仠
+        logger_debug.info("浠诲姟淇-寮�鐩樺暒锛氬惎鍔ㄤ慨澶�")
+        key = "limit_up"
+        if key not in cls.__latest_update_time_dict or time.time() - cls.__latest_update_time_dict[key] > 20:
+            logger_debug.info("浠诲姟淇-寮�鐩樺暒锛氭定鍋滃垪琛�")
+            # 澶т簬20s灏遍渶瑕佹洿鏂�
+            threading.Thread(target=cls.run_limit_up_task, daemon=True).start()
+        # key = "jingxuan_rank"
+        # if key not in cls.__latest_update_time_dict or time.time() - cls.__latest_update_time_dict[key] > 20:
+        #     logger_debug.info("浠诲姟淇-寮�鐩樺暒锛氱簿閫夋祦鍏ュ垪琛�")
+        #     # 澶т簬20s灏遍渶瑕佹洿鏂�
+        #     threading.Thread(target=cls.run_market_jingxuan_in, daemon=True).start()
+        #
+        # key = "jingxuan_rank_out"
+        # if key not in cls.__latest_update_time_dict or time.time() - cls.__latest_update_time_dict[key] > 20:
+        #     logger_debug.info("浠诲姟淇-寮�鐩樺暒锛氱簿閫夋祦鍑哄垪琛�")
+        #     # 澶т簬20s灏遍渶瑕佹洿鏂�
+        #     threading.Thread(target=cls.run_market_jingxuan_out, daemon=True).start()
+
+        key = "market_strong"
+        if key not in cls.__latest_update_time_dict or time.time() - cls.__latest_update_time_dict[key] > 20:
+            logger_debug.info("浠诲姟淇-寮�鐩樺暒锛氬競鍦哄己搴�")
+            # 澶т簬20s灏遍渶瑕佹洿鏂�
+            threading.Thread(target=cls.run_market_strong, daemon=True).start()
+
+    @classmethod
+    def run_limit_up_task(cls):
+        # 鍏抽棴log
+        log.close_print()
+        while True:
+            try:
+                if (tool.is_trade_time() and int(tool.get_now_time_str().replace(':', '')) > int("092530")):
+                    results = kpl_api.getLimitUpInfoNew()
+                    result = json.loads(results)
+                    start_time = time.time()
+                    cls.__upload_data("limit_up", result)
+            except Exception as e:
+                try:
+                    logging.exception(e)
+                    logger_debug.exception(e)
+                except:
+                    pass
+            except:
+                pass
+            finally:
+                cls.__latest_update_time_dict["limit_up"] = time.time()
+                time.sleep(3)
+
+    @classmethod
+    def run_market_jingxuan_in(cls):
+        """
+        绮鹃�夋祦鍏�
+        @return:
+        """
+        while True:
+            try:
+                if tool.is_trade_time():
+                    results = kpl_api.getMarketJingXuanRealRankingInfo()
+                    result = json.loads(results)
+                    cls.__upload_data("jingxuan_rank", result)
+            except:
+                pass
+            finally:
+                cls.__latest_update_time_dict["jingxuan_rank"] = time.time()
+                time.sleep(3)
+
+    @classmethod
+    def run_market_jingxuan_out(cls):
+        """
+        绮鹃�夋祦鍑�
+        @return:
+        """
+        while True:
+            try:
+                if tool.is_trade_time():
+                    results = kpl_api.getMarketJingXuanRealRankingInfo(False)
+                    result = json.loads(results)
+                    cls.__upload_data("jingxuan_rank_out", result)
+            except:
+                pass
+            finally:
+                cls.__latest_update_time_dict["jingxuan_rank_out"] = time.time()
+                time.sleep(3)
+
+    @classmethod
+    def run_market_strong(cls):
+        """
+        绮鹃�夋祦鍑�
+        @return:
+        """
+        while True:
+            try:
+                if tool.is_trade_time():
+                    strong_value = kpl_api.getMarketStrong()
+                    cls.__upload_data("market_strong", strong_value)
+            except:
+                pass
+            finally:
+                cls.__latest_update_time_dict["market_strong"] = time.time()
+                time.sleep(3)
+
+    @classmethod
+    # 杩愯鎷夊彇浠诲姟
+    def run_pull_task(cls):
+        def get_bidding_money():
+            # 绔炰环鏁版嵁涓婁紶
+            while True:
+                if int("092600") < int(tool.get_now_time_str().replace(":", "")) < int("092700"):
+                    try:
+                        results = kpl_api.daBanList(kpl_api.DABAN_TYPE_BIDDING)
+                        result = json.loads(results)
+                        cls.__upload_data("biddings", result)
+                    except Exception as e:
+                        pass
+                time.sleep(3)
+
+        def get_market_industry():
+            while True:
+                if tool.is_trade_time():
+                    try:
+                        results = kpl_api.getMarketIndustryRealRankingInfo()
+                        result = json.loads(results)
+                        cls.__upload_data("industry_rank", result)
+                    except:
+                        pass
+                time.sleep(3)
+
+        def get_market_jingxuan():
+            while True:
+                if tool.is_trade_time():
+                    try:
+                        results = kpl_api.getMarketJingXuanRealRankingInfo()
+                        result = json.loads(results)
+                        cls.__upload_data("jingxuan_rank", result)
+                    except:
+                        pass
+                    finally:
+                        cls.__latest_update_time_dict["jingxuan_rank"] = time.time()
+                        time.sleep(3)
+                else:
+                    time.sleep(3)
+
+        threading.Thread(target=cls.run_limit_up_task, daemon=True).start()
+        threading.Thread(target=cls.run_market_strong, daemon=True).start()
+        # threading.Thread(target=get_bidding_money, daemon=True).start()
+        # threading.Thread(target=get_market_industry, daemon=True).start()
+        # threading.Thread(target=cls.run_market_jingxuan_in, daemon=True).start()
+        # threading.Thread(target=cls.run_market_jingxuan_out, daemon=True).start()
+
+
+@tool.singleton
+class CodeHighLevel:
+    """
+    浠g爜楂樺害绠$悊
+    """
+    __instance = None
+    # 涓嬪崟鏉垮潡鐨勪唬鐮佽褰�
+    __code_level_dict = {}
+    __codes = set()
+
+    def __init__(self, day=tool.get_now_date_str()):
+        self.__day = day
+        self.__load_data(day)
+
+    @classmethod
+    def __load_data(cls, day):
+        fdatas = get_current_limit_up_data_records(15, day=day)
+        temp_dict = {d[0]: 2 for d in fdatas[0][1]}
+        break_codes = set()
+        for i in range(1, len(fdatas)):
+            codes = [d[0] for d in fdatas[i][1]]
+            for k in temp_dict:
+                if k in break_codes:
+                    continue
+                if k in codes:
+                    temp_dict[k] += 1
+                else:
+                    break_codes.add(k)
+        cls.__code_level_dict = temp_dict
+
+    def get_high_level(self, code):
+        """
+        鑾峰彇娑ㄥ仠楂樺害锛岄粯璁�1鏉�
+        @param code:
+        @return:
+        """
+        if code in self.__code_level_dict:
+            return self.__code_level_dict[code]
+        return 1
+
+
 if __name__ == "__main__":
-    fresults = get_current_limit_up_data_records(2)
-    for d in fresults:
-        print(d)
-    get_current_limit_up_data_records(2)
+    print(CodeHighLevel("2024-11-11").get_high_level("000833"))
+    input()

--
Gitblit v1.8.0