From fb47d36048e94b9a506d5c153e3dd19a01e37df1 Mon Sep 17 00:00:00 2001
From: Administrator <admin@example.com>
Date: 星期一, 30 十月 2023 16:30:27 +0800
Subject: [PATCH] bug修复

---
 third_data/kpl_data_manager.py |  239 ++++++++++++++++++++++++++++++++++++++++++++++-------------
 1 files changed, 184 insertions(+), 55 deletions(-)

diff --git a/third_data/kpl_data_manager.py b/third_data/kpl_data_manager.py
index 8904b81..d90a373 100644
--- a/third_data/kpl_data_manager.py
+++ b/third_data/kpl_data_manager.py
@@ -1,18 +1,25 @@
 import json
+import logging
 import os
+import threading
+import time
+
+import requests
 
 import constant
-import tool
+from db.redis_manager_delegate import RedisUtils
+from utils import tool
 
 # 寮�鐩樺暒鍘嗗彶娑ㄥ仠鏁版嵁绠$悊
-from db import mysql_data, redis_manager
-from l2 import code_price_manager
-from log import logger_kpl_limit_up_reason_change
-from third_data import kpl_util
-from third_data.code_plate_key_manager import LimitUpCodesPlateKeyManager
-
+from db import mysql_data_delegate as mysql_data, redis_manager_delegate as redis_manager
+from log_module.log import logger_kpl_limit_up_reason_change, logger_debug
+from third_data import kpl_util, kpl_api
+from third_data.code_plate_key_manager import LimitUpCodesPlateKeyManager, CodesHisReasonAndBlocksManager
 
 # 浠g爜瀵瑰簲鐨勬定鍋滃師鍥犱繚瀛�
+from third_data.kpl_util import KPLPlatManager, KPLDataType
+
+
 class KPLCodeLimitUpReasonManager:
     __redisManager = redis_manager.RedisManager(3)
 
@@ -20,48 +27,38 @@
         return self.__redisManager.getRedis()
 
     def save_reason(self, code, reason):
-        self.__get_redis().setex(f"kpl_limitup_reason-{code}", tool.get_expire(), reason)
+        RedisUtils.setex(self.__get_redis(), f"kpl_limitup_reason-{code}", tool.get_expire(), reason)
 
     def list_all(self):
-        keys = self.__get_redis().keys("kpl_limitup_reason-*")
+        keys = RedisUtils.keys(self.__get_redis(), "kpl_limitup_reason-*")
         dict_ = {}
         for k in keys:
-            val = self.__get_redis().get(k)
+            val = RedisUtils.get(self.__get_redis(), k)
             dict_[k.split("-")[1]] = val
         return dict_
-
-
-class KPLPlatManager:
-    def save_plat(self, _id, name):
-        if not _id:
-            return
-        mysqldb = mysql_data.Mysqldb()
-        key = f"{_id}-{name}"
-        results = mysqldb.select_one(f"select * from kpl_plate where _name='{name}'")
-        if not results:
-            mysqldb.execute(f"insert into kpl_plate(_id,_name,_key) values({_id},'{name}','{key}')")
-
-    def get_plat(self, name):
-        mysqldb = mysql_data.Mysqldb()
-        results = mysqldb.select_one(f"select * from kpl_plate where _name='{name}'")
-        if results:
-            return results[0]
-        return None
-
-    def get_same_plat_names(self, name):
-        mysqldb = mysql_data.Mysqldb()
-        plate = self.get_plat(name)
-        if not plate:
-            return {name}
-        results = mysqldb.select_all(f"select _name from kpl_plate where _id='{plate}'")
-        return set([r[0] for r in results])
 
 
 class KPLLimitUpDataRecordManager:
     total_datas = None
     latest_datas = {}
+    latest_origin_datas = []
     __kplPlatManager = KPLPlatManager()
     __LimitUpCodesPlateKeyManager = LimitUpCodesPlateKeyManager()
+    __CodesPlateKeysManager = CodesHisReasonAndBlocksManager()
+
+    @classmethod
+    def __load_hist_and_blocks(cls, code):
+        # 鏈夋暟鎹柊澧烇紝鍔犺浇鍘嗗彶鍘熷洜涓庢澘鍧�
+        his_reasons = cls.get_latest_infos(code, 10, False)
+        his_reasons = set([r[0] for r in his_reasons])
+        cls.__CodesPlateKeysManager.set_history_limit_up_reason(code, his_reasons)
+        try:
+            if not cls.__CodesPlateKeysManager.get_blocks(code):
+                results = kpl_api.getStockIDPlate(code)
+                bs = [r[1] for r in results]
+                cls.__CodesPlateKeysManager.set_blocks(code, bs)
+        except Exception as e:
+            pass
 
     @classmethod
     def save_record(cls, day, records):
@@ -76,6 +73,7 @@
                 code_block_dict[code].add(b)
                 # 璁剧疆娑ㄥ仠鏁版嵁
         if records:
+            cls.latest_origin_datas = records
             cls.__LimitUpCodesPlateKeyManager.set_today_limit_up([(r[0], r[5]) for r in records])
 
         # 娑ㄥ仠鏁版嵁璁板綍
@@ -96,7 +94,7 @@
             if not result:
                 mysqldb.execute(
                     f"insert into kpl_limit_up_record(_id,_day,_hot_block_name,_code,_code_name,_limit_up_time,_blocks,_latest_limit_up_time,_update_time,_create_time,_hot_block_code_count,_limit_up_high_info,_zylt_val) values('{_id}','{day}','{d[5]}','{d[0]}','{d[1]}','{d[2]}','{d[6]}','{d[3]}',now(),now(),{d[10]},'{d[4]}',{d[7]})")
-
+                cls.__load_hist_and_blocks(code)
             else:
                 if _id in cls.latest_datas and json.dumps(cls.latest_datas.get(_id)) != json.dumps(d):
                     mysqldb.execute(
@@ -126,6 +124,8 @@
     def load_total_datas(cls):
         cls.total_datas = KPLLimitUpDataRecordManager.list_all(tool.get_now_date_str())
         cls.__LimitUpCodesPlateKeyManager.set_today_total_limit_up([(r[3], r[2]) for r in cls.total_datas])
+        for d in cls.total_datas:
+            cls.__load_hist_and_blocks(d[3])
 
     @staticmethod
     def list_all(day):
@@ -162,16 +162,20 @@
         for b in constant.KPL_INVALID_BLOCKS:
             wheres.append(f"hb.`_hot_block_name` != '{b}'")
         wheres = " and ".join(wheres)
-        sql = f"SELECT GROUP_CONCAT(_hot_block_name) FROM (SELECT hb.`_hot_block_name`,hb.`_day` FROM `kpl_limit_up_record` hb WHERE hb.`_code`='{code}' AND {wheres} ORDER BY hb.`_day` DESC LIMIT 10) a  GROUP BY a._day ORDER BY a._day DESC LIMIT 1"
+        sql = f"SELECT GROUP_CONCAT(_hot_block_name) FROM (SELECT hb.`_hot_block_name`,hb.`_day` FROM `kpl_limit_up_record` hb WHERE hb.`_code`='{code}' AND {wheres} ORDER BY hb.`_day` DESC LIMIT 2) a  GROUP BY a._day ORDER BY a._day DESC LIMIT 1"
         mysqldb = mysql_data.Mysqldb()
         return mysqldb.select_one(sql)
 
     # 鑾峰彇浠g爜鏈�杩戠殑鏉垮潡锛岃繑鍥瀃(鏉垮潡,鏃ユ湡)]
     @classmethod
     def get_latest_infos(cls, code, count, contains_today=True):
+        wheres = []
+        for b in constant.KPL_INVALID_BLOCKS:
+            wheres.append(f"hb.`_hot_block_name` != '{b}'")
+        wheres = " and ".join(wheres)
         # 鍙幏鍙栨渶杩�180澶╃殑鏁版嵁
         min_day = tool.date_sub(tool.get_now_date_str(), 180)
-        sql = f"SELECT GROUP_CONCAT(_hot_block_name),`_day`,_blocks FROM (SELECT hb.`_hot_block_name`,hb.`_day`,hb._blocks FROM `kpl_limit_up_record` hb WHERE hb.`_code`='{code}' and hb.`_day` > '{min_day}' ORDER BY hb.`_day` DESC LIMIT 10) a  GROUP BY a._day ORDER BY a._day DESC LIMIT {count}"
+        sql = f"SELECT GROUP_CONCAT(_hot_block_name),`_day`,_blocks FROM (SELECT hb.`_hot_block_name`,hb.`_day`,hb._blocks FROM `kpl_limit_up_record` hb WHERE hb.`_code`='{code}' and {wheres} and hb.`_day` > '{min_day}' ORDER BY hb.`_day` DESC LIMIT 10) a  GROUP BY a._day ORDER BY a._day DESC LIMIT {count}"
         mysqldb = mysql_data.Mysqldb()
         results = mysqldb.select_all(sql)
         if results and not contains_today and results[0][1] == tool.get_now_date_str():
@@ -180,21 +184,24 @@
 
     @classmethod
     def get_latest_blocks_set(cls, code):
-        results = cls.get_latest_infos(code, 10, False)
+        results = cls.get_latest_infos(code, 2, False)
         bs = set([b[0] for b in results])
         return bs
 
 
 class KPLDataManager:
     __latest_datas = {}
+    kpl_data_update_info = {}
 
-    def __save_in_file(self, key, datas):
+    @classmethod
+    def __save_in_file(cls, key, datas):
         name = f"{tool.get_now_date_str()}_{key}.log"
         path = f"{constant.CACHE_PATH}/{name}"
         with open(path, 'w') as f:
             f.write(json.dumps(datas))
 
-    def __get_from_file(self, key, day=tool.get_now_date_str()):
+    @classmethod
+    def __get_from_file(cls, key, day=tool.get_now_date_str()):
         name = f"{day}_{key}.log"
         path = f"{constant.CACHE_PATH}/{name}"
         if not os.path.exists(path):
@@ -205,7 +212,8 @@
                 return json.loads(lines[0])
         return None
 
-    def get_from_file(self, type, day):
+    @classmethod
+    def get_from_file(cls, type, day):
         name = f"{day}_{type.value}.log"
         path = f"{constant.CACHE_PATH}/{name}"
         if not os.path.exists(path):
@@ -216,26 +224,52 @@
                 return json.loads(lines[0])
         return None
 
-    def save_data(self, type, datas):
-        self.__latest_datas[type] = datas
-        self.__save_in_file(type, datas)
+    @classmethod
+    # 鑾峰彇鏈�杩戝嚑澶╃殑鏁版嵁锛屾牴鎹棩鏈熷�掑簭杩斿洖
+    def get_latest_from_file(cls, type, count):
+        files = os.listdir(constant.CACHE_PATH)
+        file_name_list = []
+        for f in files:
+            if f[10:] == f"_{type.value}.log":
+                file_name_list.append((f.split("_")[0], f))
 
-    def get_data(self, type):
+        file_name_list.sort(key=lambda x: x[0], reverse=True)
+        file_name_list = file_name_list[:count]
+        fresults = []
+        for file in file_name_list:
+            path = f"{constant.CACHE_PATH}/{file[1]}"
+            if not os.path.exists(path):
+                continue
+            with open(path, 'r') as f:
+                lines = f.readlines()
+                if lines:
+                    fresults.append((file[0], json.loads(lines[0])))
+
+        return fresults
+
+    @classmethod
+    def save_data(cls, type, datas):
+        cls.kpl_data_update_info[type] = (tool.get_now_time_str(), len(datas))
+        cls.__latest_datas[type] = datas
+        cls.__save_in_file(type, datas)
+
+    @classmethod
+    def get_data(cls, type):
         type = type.value
-        if type in self.__latest_datas:
-            return self.__latest_datas[type]
-        result = self.__get_from_file(type)
+        if type in cls.__latest_datas:
+            return cls.__latest_datas[type]
+        result = cls.__get_from_file(type)
         if result is not None:
-            self.__latest_datas[type] = result
+            cls.__latest_datas[type] = result
         return result
 
 
 def load_history_limit_up():
-    for file_name in os.listdir("D:/kpl/his"):
+    for file_name in os.listdir(f"{constant.get_path_prefix()}/kpl/his"):
         if file_name.find("HisDaBanList_1.log") < 0:
             continue
         day = file_name[:10]
-        with open(f"D:/kpl/his/{file_name}", 'r', encoding="utf-16") as f:
+        with open(f"{constant.get_path_prefix()}/kpl/his/{file_name}", 'r', encoding="utf-16") as f:
             lines = f.readlines()
             line = lines[0]
             result = json.loads(line)
@@ -248,6 +282,101 @@
             # print(day, list_)
 
 
+# 鍘嗗彶娑ㄥ仠鍒楄〃
+__limit_up_list_records_dict = {}
+
+
+# 鑾峰彇鏈�杩戝嚑澶╃殑瀹炴椂娑ㄥ仠淇℃伅
+# 杩斿洖鏍煎紡锛圼鏃ユ湡,鏁版嵁]锛�
+def get_current_limit_up_data_records(count):
+    fresults = []
+    day = tool.get_now_date_str()
+    datas = []
+    if day in __limit_up_list_records_dict:
+        datas = __limit_up_list_records_dict[day]
+    else:
+        logger_debug.info("浠庢枃浠朵腑鑾峰彇鍓嶅嚑澶╃殑瀹炴椂娑ㄥ仠鏁版嵁")
+        datas = KPLDataManager().get_latest_from_file(KPLDataType.LIMIT_UP, 10)
+        if datas:
+            # 淇濆瓨鏁版嵁
+            __limit_up_list_records_dict[day] = datas
+    for i in range(len(datas)):
+        if datas[i][0] == day:
+            continue
+        fresults.append(datas[i])
+        if len(fresults) >= count:
+            break
+    return fresults
+
+
+def get_yesterday_limit_up_codes():
+    yesterday_limit_up_data_records = get_current_limit_up_data_records(1)[0][1]
+    yesterday_codes = set([x[0] for x in yesterday_limit_up_data_records])
+    return yesterday_codes
+
+
+# 杩愯鎷夊彇浠诲姟
+def run_pull_task():
+    def __upload_data(type, datas):
+        root_data = {
+            "type": type,
+            "data": datas
+        }
+        requests.post("http://127.0.0.1:9004/upload_kpl_data", json.dumps(root_data))
+
+    def get_limit_up():
+        while True:
+            if tool.is_trade_time():
+                try:
+                    results = kpl_api.getLimitUpInfo()
+                    result = json.loads(results)
+                    start_time = time.time()
+                    __upload_data("limit_up", result)
+                    logger_kpl_limit_up_reason_change.info("涓婁紶鑰楁椂锛歿}", time.time() - start_time)
+                except Exception as e:
+                    logging.exception(e)
+            time.sleep(3)
+
+    def get_bidding_money():
+        # 绔炰环鏁版嵁涓婁紶
+        while True:
+            if int("092600") < int(tool.get_now_time_str().replace(":", "")) < int("092700"):
+                try:
+                    results = kpl_api.daBanList(kpl_api.DABAN_TYPE_BIDDING)
+                    result = json.loads(results)
+                    __upload_data("biddings", result)
+                except Exception as e:
+                    pass
+            time.sleep(3)
+
+    def get_market_industry():
+        while True:
+            if tool.is_trade_time():
+                try:
+                    results = kpl_api.getMarketIndustryRealRankingInfo()
+                    result = json.loads(results)
+                    __upload_data("industry_rank", result)
+                except:
+                    pass
+            time.sleep(3)
+
+    def get_market_jingxuan():
+        while True:
+            if tool.is_trade_time():
+                try:
+                    results = kpl_api.getMarketJingXuanRealRankingInfo()
+                    result = json.loads(results)
+                    __upload_data("jingxuan_rank", result)
+                except:
+                    pass
+            time.sleep(3)
+
+    threading.Thread(target=get_limit_up, daemon=True).start()
+    threading.Thread(target=get_bidding_money, daemon=True).start()
+    # threading.Thread(target=get_market_industry, daemon=True).start()
+    # threading.Thread(target=get_market_jingxuan, daemon=True).start()
+
+
 if __name__ == "__main__":
-    ds = set(["1", "2", "3"])
-    print(ds.pop())
+    run_pull_task()
+    input()

--
Gitblit v1.8.0