From 2f2516749615da866e96d8d24e499b7ecbb63a3e Mon Sep 17 00:00:00 2001
From: Administrator <admin@example.com>
Date: 星期一, 23 六月 2025 12:28:52 +0800
Subject: [PATCH] 默认交易模式变更/真实下单位置计算位置修改

---
 log_module/log_export.py |  703 +++++++++++++++++++++++++++++++++++++++++++++++++++-------
 1 files changed, 615 insertions(+), 88 deletions(-)

diff --git a/log_module/log_export.py b/log_module/log_export.py
index d1bf92d..1d530f1 100644
--- a/log_module/log_export.py
+++ b/log_module/log_export.py
@@ -1,11 +1,35 @@
 import datetime
+import hashlib
+import json
+import logging
 import os
 import shutil
+import time
 
 import constant
 from code_attribute import gpcode_manager
 from log_module.log import logger_l2_process_time
 from utils import tool
+
+__log_cache_data = {}
+
+
+# 鏃ュ織缂撳瓨
+def cache_log(fn):
+    def wrapper(*args, **kwargs):
+        can_cache = tool.get_now_time_as_int() > 150000
+        cache_key = f"{fn.__name__}#{args}#{kwargs}"
+        if can_cache:
+            # 15:00:00涔嬪悗鎵嶈兘缂撳瓨
+
+            if cache_key in __log_cache_data:
+                return __log_cache_data[cache_key]
+        result = fn(*args, **kwargs)
+        if can_cache:
+            __log_cache_data[cache_key] = result
+        return result
+
+    return wrapper
 
 
 class LogUtil:
@@ -49,7 +73,8 @@
         while line:
             time_ = line.split(":")[-1]
             if int(time_) > 150:
-                print(line)
+                # print(line)
+                pass
             line = f.readline()
 
 
@@ -72,18 +97,20 @@
     return tool.time_seconds_format(s - 2 - cha)
 
 
+@cache_log
 def load_l2_from_log(date=None):
     today_data = {}
     if date is None:
-        date = datetime.datetime.now().strftime("%Y-%m-%d")
+        date = tool.get_now_date_str()
     try:
         with open("{}/logs/gp/l2/l2_data.{}.log".format(constant.get_path_prefix(), date), mode='r') as f:
-            while True:
-                data = f.readline()
+            lines = f.readlines()
+            for data in lines:
                 if not data:
                     break
-                index = data.find('save_l2_data:')
-                index = data.find('-', index)
+                index = data.find(' - ') + 2
+                if data.find('async_log_util') > 0:
+                    index = data.find(']', index) + 1
                 data = data[index + 1:].strip()
                 code = data[0:6]
                 data = data[7:]
@@ -93,9 +120,10 @@
                 else:
                     today_data[code].extend(dict_)
         for key in today_data:
-            news = sorted(today_data[key], key=lambda x: x["index"])
-            today_data[key] = news
-            print(key, len(today_data[key]) - 1, today_data[key][-1]["index"])
+            # news = sorted(today_data[key], key=lambda x: x["index"])
+            # today_data[key] = news
+            # print(key, len(today_data[key]) - 1, today_data[key][-1]["index"])
+            pass
     except:
         pass
     return today_data
@@ -107,41 +135,57 @@
     return time_
 
 
+def __get_async_log_time(line):
+    line = line.split(" - ")[1]
+    time_str = line[line.find("[") + 1:line.find("[") + 9]
+    return time_str
+
+
 # 鑾峰彇L2姣忔鎵归噺澶勭悊鏁版嵁鐨勪綅缃寖鍥�
+@cache_log
 def get_l2_process_position(code, date=None):
     if not date:
         date = datetime.datetime.now().strftime("%Y-%m-%d")
     pos_list = []
-    with open("{}/logs/gp/l2/l2_process.{}.log".format(constant.get_path_prefix(), date), mode='r',
-              encoding="utf-8") as f:
-        while True:
-            line = f.readline()
-            if not line:
-                break
-            if line.find("code:{}".format(code)) < 0:
-                continue
-            time_ = __get_log_time(line)
-            line = line[line.find("澶勭悊鏁版嵁鑼冨洿") + len("澶勭悊鏁版嵁鑼冨洿") + 1:line.find("澶勭悊鏃堕棿")].strip()
-            if len(pos_list) == 0 or pos_list[-1][1] < int(line.split("-")[0]):
-                if int("093000") <= int(time_.replace(":", "")) <= int("150000"):
-                    pos_list.append((int(line.split("-")[0]), int(line.split("-")[1])))
+    path_ = "{}/logs/gp/l2/l2_process.{}.log".format(constant.get_path_prefix(), date)
+    try:
+        with open(path_, mode='r',
+                  encoding="utf-8") as f:
+            lines = f.readlines()
+            for line in lines:
+                if not line:
+                    break
+                if line.find("code:{}".format(code)) < 0:
+                    continue
+                time_ = __get_log_time(line)
+                line = line[line.find("澶勭悊鏁版嵁鑼冨洿") + len("澶勭悊鏁版嵁鑼冨洿") + 1:line.find("澶勭悊鏃堕棿")].strip()
+                if len(pos_list) == 0 or pos_list[-1][1] < int(line.split("-")[0]):
+                    if int("093000") <= int(time_.replace(":", "")) <= int("150000"):
+                        try:
+                            pos_list.append((int(line.split("-")[0]), int(line.split("-")[1])))
+                        except Exception as e:
+                            logging.exception(e)
+    except:
+        pass
     return pos_list
 
 
 # 鑾峰彇L2姣忔鎵归噺澶勭悊鏁版嵁鐨勪綅缃寖鍥�
+@cache_log
 def get_l2_trade_position(code, date=None):
     if not date:
         date = datetime.datetime.now().strftime("%Y-%m-%d")
     pos_list = []
     with open("{}/logs/gp/l2/l2_trade.{}.log".format(constant.get_path_prefix(), date), mode='r',
               encoding="utf-8") as f:
-        while True:
-            line = f.readline()
+        latest_single = [None, None]
+        lines = f.readlines()
+        for line in lines:
             if not line:
                 break
             if line.find("code={}".format(code)) < 0:
                 continue
-            print(line)
+            # print(line)
             time_ = __get_log_time(line)
             if int("093000") > int(time_.replace(":", "")) or int(time_.replace(":", "")) > int("150000"):
                 continue
@@ -150,14 +194,21 @@
                 str_ = line.split("鑾峰彇鍒颁拱鍏ヤ俊鍙疯捣濮嬬偣锛�")[1].strip()
                 index = str_[0:str_.find(" ")].strip()
                 # print("淇″彿璧峰浣嶇疆锛�", index)
-                pos_list.append((0, int(index), ""))
+                latest_single = [None, None]
+                latest_single[0] = (0, int(index), "")
 
             elif line.find("鑾峰彇鍒颁拱鍏ユ墽琛屼綅缃�") > 0:
                 str_ = line.split("鑾峰彇鍒颁拱鍏ユ墽琛屼綅缃細")[1].strip()
                 index = str_[0:str_.find(" ")].strip()
                 # print("涔板叆鎵ц浣嶇疆锛�", index)
-                pos_list.append((1, int(index), ""))
-            elif line.find("瑙﹀彂鎾ゅ崟") > 0:
+                latest_single[1] = (1, int(index), "")
+            elif line.find("寮�濮嬫墽琛屼拱鍏�") > 0:
+                # 鍙湁鐪熸鎵ц涔板叆鎵嶄細璁板綍浣嶇疆
+                for p in latest_single:
+                    if p:
+                        pos_list.append(p)
+                latest_single = [None, None]
+            elif line.find("瑙﹀彂鎾ゅ崟锛屾挙鍗曚綅缃細") > 0:
                 str_ = line.split("瑙﹀彂鎾ゅ崟锛屾挙鍗曚綅缃細")[1].strip()
                 index = str_[0:str_.find(" ")].strip()
                 # print("鎾ゅ崟浣嶇疆锛�", index)
@@ -168,33 +219,86 @@
     return pos_list
 
 
+# 鑾峰彇L2姣忔鎵归噺澶勭悊鏁版嵁鐨勪綅缃寖鍥�
+@cache_log
+def get_real_place_order_positions(code, date=None):
+    if not date:
+        date = datetime.datetime.now().strftime("%Y-%m-%d")
+    pos_list = []
+    with open("{}/logs/gp/l2/l2_real_place_order_position.{}.log".format(constant.get_path_prefix(), date), mode='r',
+              encoding="utf-8") as f:
+        lines = f.readlines()
+        for line in lines:
+            if not line:
+                break
+            if line.find("{}-".format(code)) < 0:
+                continue
+
+            if line.find("鐪熷疄涓嬪崟浣嶇疆") > 0:
+
+                # print(line)
+                str_ = line.split("锛�")[1].strip()
+                # print(str_)
+                try:
+                    pos = int(eval(str_.split("-")[1].strip())[0])
+                except:
+                    pos = int(eval(str_.split("-")[1].strip()))
+                # print("淇″彿璧峰浣嶇疆锛�", index)
+                pos_list.append(pos)
+    return pos_list
+
+
 # 鑾峰彇浜ゆ槗杩涘害
 def get_trade_progress(code, date=None):
     if not date:
         date = datetime.datetime.now().strftime("%Y-%m-%d")
     index_list = []
     buy_queues = []
-    with open("{}/logs/gp/l2/l2_trade_buy_queue.{}.log".format(constant.get_path_prefix(), date), mode='r',
-              encoding="utf-8") as f:
-        while True:
-            line = f.readline()
-            if not line:
-                break
-            time_ = __get_log_time(line).strip()
-            if int(time_.replace(":", "")) > int("150000"):
-                continue
+    path_str = "{}/logs/gp/l2/l2_trade_buy_queue.{}.log".format(constant.get_path_prefix(), date)
+    lines = __load_file_content(path_str)
+    for line in lines:
+        if not line:
+            break
+        time_ = __get_log_time(line).strip()
+        if int(time_.replace(":", "")) > int("150000"):
+            continue
 
-            if line.find(f"{code}-[") >= 0:
-                buy_queues.append((eval(line.split(f"{code}-")[1]), time_))
+        if line.find(f"{code}-[") >= 0:
+            buy_queues.append((eval(line.split(f"{code}-")[1]), time_))
 
-            if line.find("鑾峰彇鎴愪氦浣嶇疆鎴愬姛锛� code-{}".format(code)) < 0:
-                continue
-            try:
-                index = int(line.split("index-")[1].split(" ")[0])
-                index_list.append((index, time_))
-            except:
-                pass
+        if line.find("鑾峰彇鎴愪氦浣嶇疆鎴愬姛锛� code-{}".format(code)) < 0:
+            continue
+        try:
+            index = int(line.split("index-")[1].split(" ")[0])
+            index_list.append((index, time_))
+        except:
+            pass
     return index_list, buy_queues
+
+
+# 鑾峰彇l2涓嶈兘涔扮殑鍘熷洜
+def get_l2_cant_buy_reasons(code, date=None):
+    if not date:
+        date = datetime.datetime.now().strftime("%Y-%m-%d")
+    fdatas = []
+    path_str = "{}/logs/gp/l2/l2_not_buy_reasons.{}.log".format(constant.get_path_prefix(), date)
+    lines = __load_file_content(path_str)
+    for line in lines:
+        if not line:
+            break
+        if line.find(f"{code}#") < 0:
+            continue
+
+        line = line.split(" - ")[1]
+        time_str = line[line.find("[") + 1:line.find("[") + 9]
+        data = line[line.find("]") + 1:].strip()
+        code_ = data.split("#")[0].strip()
+        data = data.split("#")[1].strip()
+        if code_ != code:
+            continue
+        fdatas.append((time_str, data))
+    fdatas.reverse()
+    return fdatas
 
 
 # 鑾峰彇H绾ф挙鍗曡绠楃粨鏋�
@@ -202,21 +306,20 @@
     if not date:
         date = datetime.datetime.now().strftime("%Y-%m-%d")
     path_str = f"{constant.get_path_prefix()}/logs/gp/l2/cancel/h_cancel.{date}.log"
+    lines = __load_file_content(path_str)
     latest_info = None
-    if os.path.exists(path_str):
-        with open(path_str, mode='r', encoding="utf-8") as f:
-            while True:
-                line = f.readline()
-                if not line:
-                    break
-                if line.find(f"code-{code}") < 0:
-                    continue
-                if line.find(f"H绾ф挙鍗曡绠楃粨鏋�") < 0:
-                    continue
-                target_rate = line.split("鐩爣姣斾緥锛�")[1].split(" ")[0].strip()
-                cancel_num = line.split("鍙栨秷璁$畻缁撴灉")[1][1:].split("/")[0].strip()
-                total_num = line.split("鍙栨秷璁$畻缁撴灉")[1][1:].split("/")[1].split(" ")[0].strip()
-                latest_info = (target_rate, round(int(cancel_num) / int(total_num), 2), cancel_num, total_num,)
+
+    for line in lines:
+        if not line:
+            break
+        if line.find(f"code-{code}") < 0:
+            continue
+        if line.find(f"H绾ф挙鍗曡绠楃粨鏋�") < 0:
+            continue
+        target_rate = line.split("鐩爣姣斾緥锛�")[1].split(" ")[0].strip()
+        cancel_num = line.split("鍙栨秷璁$畻缁撴灉")[1][1:].split("/")[0].strip()
+        total_num = line.split("鍙栨秷璁$畻缁撴灉")[1][1:].split("/")[1].split(" ")[0].strip()
+        latest_info = (target_rate, round(int(cancel_num) / int(total_num), 2), cancel_num, total_num,)
     return latest_info
 
 
@@ -228,8 +331,8 @@
     msg_list = []
     if os.path.exists(path_str):
         with open(path_str, mode='r', encoding="utf-8") as f:
-            while True:
-                line = f.readline()
+            lines = f.readlines()
+            for line in lines:
                 if not line:
                     break
                 msg_list.append(line)
@@ -273,36 +376,165 @@
     return results
 
 
-# 鍔犺浇涔板叆寰楀垎璁板綍
-def load_buy_score_recod(code):
-    path = f"{constant.get_path_prefix()}/logs/gp/trade/trade_record.{tool.get_now_date_str()}.log"
-    fdatas = []
-    if os.path.exists(path):
-        with open(path, 'r', encoding="utf-8") as f:
+__log_file_contents = {}
+
+
+# 鍔犺浇鏂囦欢鍐呭
+def __load_file_content(path_str, expire_timespace=20):
+    md5 = hashlib.md5(path_str.encode(encoding='utf-8')).hexdigest()
+    if md5 in __log_file_contents and time.time() - __log_file_contents[md5][0] < expire_timespace:
+        return __log_file_contents[md5][1]
+    contents = []
+    if os.path.exists(path_str):
+        with open(path_str, 'r', encoding="utf-8") as f:
             lines = f.readlines()
             for line in lines:
-                data_index = line.find(f"code={code}")
-                if data_index > 0:
-                    time_str = line[11:19]
-                    data = line[line.find("data=") + 5:]
-                    type = line[line.find("type=") + 5:line.find(" ", line.find("type="))]
-                    fdatas.append((time_str, type, eval("{" + data + "}")))
+                contents.append(line)
+    __log_file_contents[md5] = (time.time(), contents)
+    return contents
+
+
+# 鍔犺浇涔板叆寰楀垎璁板綍
+def load_trade_recod(code, date=tool.get_now_date_str()):
+    path = f"{constant.get_path_prefix()}/logs/gp/trade/trade_record.{date}.log"
+    fdatas = []
+    lines = __load_file_content(path)
+    for line in lines:
+        data_index = line.find(f"{code}")
+        if data_index > 0:
+            line = line.split(" - ")[1]
+            time_str = line[line.find("[") + 1:line.find("[") + 9]
+            data = line[line.find("]") + 1:].strip()
+            data_json = json.loads(data)
+            if data_json["code"] != code:
+                continue
+            type = data_json["type"]
+            fdatas.append((time_str, type, data_json["data"]))
+    return fdatas
+
+
+# 鍔犺浇涔板叆寰楀垎璁板綍
+def load_trade_recod_by_type(type_, date=tool.get_now_date_str()):
+    path = f"{constant.get_path_prefix()}/logs/gp/trade/trade_record.{date}.log"
+    fdatas = []
+    lines = __load_file_content(path)
+    for line in lines:
+        data_index = line.find(f"{type_}")
+        if data_index > 0:
+            line = line.split(" - ")[1]
+            time_str = line[line.find("[") + 1:line.find("[") + 9]
+            data = line[line.find("]") + 1:].strip()
+            data_json = json.loads(data)
+            type = data_json["type"]
+            code = data_json["code"]
+            if type != type_:
+                continue
+            fdatas.append((time_str, code, type, data_json["data"]))
+    return fdatas
+
+
+@cache_log
+def load_cancel_buy_reasons(code, date=tool.get_now_date_str()):
+    """
+    鑾峰彇鎾ゅ崟鍘熷洜
+    @param code:
+    @param date:
+    @return: {鐪熷疄涓嬪崟浣嶇疆锛氭挙鍗曞師鍥爙
+    """
+    fdatas = load_trade_recod(code, date)
+    cancel_reason_dict = {}
+    for data in fdatas:
+        if data[1] != "cancel":
+            continue
+        msg = data[2].get("msg")
+        real_place_order_index = data[2].get("real_place_order_index")
+        if real_place_order_index not in cancel_reason_dict:
+            cancel_reason_dict[real_place_order_index] = msg
+    return cancel_reason_dict
+
+
+
+def __parse_content(line):
+    line = line.split(" - ")[1]
+    time_str = line[line.find("[") + 1:line.find("[") + 9]
+    data = line[line.find("]") + 1:].strip()
+    if data.find("thread-id=")>-1 and data.find("code=")>-1:
+        data = data[data.find("code=")+11:].strip()
+    return time_str, data
+
+
+# 鍔犺浇l2璁㈠崟鎴愪氦鏁版嵁
+@cache_log
+def load_huaxin_deal_record(code, date=tool.get_now_date_str()):
+    datas_dict = load_huaxin_deal_record_all(date)
+    return datas_dict.get(code)
+
+
+@cache_log
+def load_huaxin_deal_record_all(date=tool.get_now_date_str()):
+    path = f"{constant.get_path_prefix()}/logs/huaxin/l2/transaction_desc.{date}.log"
+    # 鏍煎紡:[(璁㈠崟鍙�,鎵嬫暟,寮�濮嬫垚浜ゆ椂闂�,鎴愪氦缁撴潫鏃堕棿,涓嬪崟鎵嬫暟)]
+    fdatas = {}
+    lines = __load_file_content(path)
+    for line in lines:
+        data_index = line.find(f"#")
+        if data_index > 0:
+            time_str, data = __parse_content(line)
+            code = data.split("#")[0]
+            data = data.split("#")[1]
+            data = eval(data)
+            if code not in fdatas:
+                fdatas[code] = []
+            fdatas[code].append(data)
     return fdatas
 
 
 def load_kpl_reason_changes():
     path = f"{constant.get_path_prefix()}/logs/gp/kpl/kpl_limit_up_reason_change.{tool.get_now_date_str()}.log"
     fdatas = []
-    if os.path.exists(path):
-        with open(path, 'r', encoding="utf-8") as f:
-            lines = f.readlines()
-            for line in lines:
-                data = line[line.find("code-") + 5:]
-                code = data.split(":")[0]
-                from_r = data.split(":")[1].split("-")[0]
-                to_r = eval(data.split(":")[1].split("-")[1])
-                fdatas.append((code, from_r, to_r))
+    lines = __load_file_content(path)
+    for line in lines:
+        if line.find("code-") > 0:
+            data = line[line.find("code-") + 5:]
+            code = data.split(":")[0]
+            from_r = data.split(":")[1].split("-")[0]
+            to_r = eval(data.split(":")[1].split("-")[1])
+            fdatas.append((code, from_r, to_r))
     return fdatas
+
+
+def load_kpl_open_limit_up():
+    path = f"{constant.get_path_prefix()}/logs/gp/kpl/kpl_open_limit_up.{tool.get_now_date_str()}.log"
+    fdatas = []
+    lines = __load_file_content(path)
+    for line in lines:
+        if line.find("鐐告澘") > 0:
+            time_str = __get_log_time(line)
+            data = line[line.find("锛�") + 1:]
+            codes = eval(data)
+            fdatas.append((time_str, codes))
+    return fdatas
+
+
+@cache_log
+def load_kpl_limit_up_records(current_time_str, date=tool.get_now_date_str()):
+    """
+    鑾峰彇绂荤粰瀹氭椂闂存渶杩戠殑娑ㄥ仠鏁版嵁
+    @param current_time_str:
+    @param date:
+    @return:
+    """
+    path = f"{constant.get_path_prefix()}/logs/gp/kpl/kpl_limit_up.{date}.log"
+    lines = __load_file_content(path)
+    lines.reverse()
+    current_time_str_int = int(current_time_str.replace(":", ""))
+    for line in lines:
+        if line:
+            time_str = __get_log_time(line)
+            if int(time_str.replace(":", "")) < current_time_str_int:
+                line = line.split(" - ")[1]
+                return eval(line)
+    return None
 
 
 # 鍔犺浇鍗庨懌鏈湴涔板叆璁㈠崟鍙�
@@ -315,11 +547,135 @@
             for line in lines:
                 if line:
                     data = line.split(" - ")[1].strip()
+                    if data.startswith("["):
+                        data = data[data.find("]") + 1:].strip()
                     code = data.split("#")[0]
                     buy_no = int(data.split("#")[1])
                     if code not in fdatas:
                         fdatas[code] = set()
                     fdatas[code].add(buy_no)
+    return fdatas
+
+
+# 鍔犺浇鍗庨懌鎴愪氦鐨勫崠鍗�
+@cache_log
+def load_huaxin_transaction_sell_no(code=None, date=tool.get_now_date_str()):
+    path = f"{constant.get_path_prefix()}/logs/huaxin/l2/transaction_sell_order.{date}.log"
+    fdatas = {}
+    if os.path.exists(path):
+        with open(path, 'r', encoding="utf-8") as f:
+            lines = f.readlines()
+            for line in lines:
+                if line:
+                    data = line.split(" - ")[1].strip()
+                    if data.startswith("["):
+                        data = data[data.find("]") + 1:].strip()
+                    data = data.split("code=")[1]
+                    code_ = data[:6]
+                    if code and code != code_:
+                        continue
+                    data = data[6:].strip()
+                    if code_ not in fdatas:
+                        fdatas[code_] = []
+                    fdatas[code_].append(eval(data))
+    return fdatas
+
+
+@cache_log
+def load_huaxin_l2_sell_deal(code=None, date=tool.get_now_date_str()):
+    path = f"{constant.get_path_prefix()}/logs/huaxin/l2/sell_l2_deal.{date}.log"
+    fdatas = {}
+    if os.path.exists(path):
+        with open(path, 'r', encoding="utf-8") as f:
+            lines = f.readlines()
+            for line in lines:
+                if line:
+                    time_str = __get_async_log_time(line)
+                    data = line.split(" - ")[1].strip()
+                    if data.startswith("["):
+                        data = data[data.find("]") + 1:].strip()
+                    if data.find("鏈夋定鍋滀富鍔ㄥ崠锛�") < 0:
+                        continue
+                    data = data.split("鏈夋定鍋滀富鍔ㄥ崠锛�")[1]
+                    code_ = data[:6]
+                    if code and code != code_:
+                        continue
+                    data = data[6:].strip()
+                    volume = int(data.split("鎴愪氦閲�-")[1].strip())
+                    if code_ not in fdatas:
+                        fdatas[code_] = []
+                    fdatas[code_].append((time_str, volume))
+    return fdatas
+
+
+
+@cache_log
+def load_huaxin_l2_sell_deal_list(code=None, date=tool.get_now_date_str()):
+    path = f"{constant.get_path_prefix()}/logs/huaxin/l2/sell_l2_deal.{date}.log"
+    fdatas = {}
+    if os.path.exists(path):
+        with open(path, 'r', encoding="utf-8") as f:
+            lines = f.readlines()
+            for line in lines:
+                if line:
+                    time_str = __get_async_log_time(line)
+                    data = line.split(" - ")[1].strip()
+                    if data.startswith("["):
+                        data = data[data.find("]") + 1:].strip()
+                    if data.find("娑ㄥ仠涓诲姩涔版垚浜わ細") <0:
+                        continue
+                    data = data.split("娑ㄥ仠涓诲姩涔版垚浜わ細")[1]
+                    data = eval(data)
+                    code_ = data[0][0]
+                    if code and code != code_:
+                        continue
+                    if code_ not in fdatas:
+                        fdatas[code_] = []
+                    fdatas[code_].append((time_str, data))
+    return fdatas
+
+
+@cache_log
+def load_huaxin_l2_sell_delegate(code=None, date=tool.get_now_date_str()):
+    path = f"{constant.get_path_prefix()}/logs/huaxin/l2/sell_l2_delegate.{date}.log"
+    fdatas = {}
+    if os.path.exists(path):
+        with open(path, 'r', encoding="utf-8") as f:
+            lines = f.readlines()
+            for line in lines:
+                if line:
+                    time_str = __get_async_log_time(line)
+                    data = line.split(" - ")[1].strip()
+                    if data.startswith("["):
+                        data = data[data.find("]") + 1:].strip()
+                    datas = data.split("-")
+                    code_ = datas[0]
+                    if code and code != code_:
+                        continue
+                    if code_ not in fdatas:
+                        fdatas[code_] = []
+                    fdatas[code_].append((time_str, datas[1], eval(datas[2])))
+    return fdatas
+
+
+# 鍔犺浇鍗庨懌鏈湴涔板叆璁㈠崟鍙�
+def load_l2_market_data():
+    path = f"{constant.get_path_prefix()}/logs/huaxin/l2/marketdata.{tool.get_now_date_str()}.log"
+    fdatas = {}
+    if os.path.exists(path):
+        with open(path, 'r', encoding="utf-8") as f:
+            lines = f.readlines()
+            for line in lines:
+                if line:
+                    data = line.split(" - ")[1].strip()
+                    if data.startswith("["):
+                        data = data[data.find("]") + 1:].strip()
+                    code = data.split("#")[0]
+                    d = data.split("#")[1].strip()
+                    d = eval(d)
+                    if code not in fdatas:
+                        fdatas[code] = []
+                    fdatas[code].append(d)
     return fdatas
 
 
@@ -332,15 +688,186 @@
             lines = f.readlines()
             for line in lines:
                 if line:
-                    time_str = line.split("|")[0].strip()
-                    level = line.split("|")[1].strip()
-                    data = line.split("|")[2].split(" - ")[1].strip()
-                    fdatas.append((time_str, level, data))
+                    try:
+                        time_str = line.split("|")[0].strip()
+                        level = line.split("|")[1].strip()
+                        if level != "INFO" and level != "ERROR":
+                            continue
+                        data = line.split("|")[2].split(" - ")[1].strip()
+                        fdatas.append((time_str, level, data))
+                    except:
+                        pass
     return fdatas
 
 
+# 璇诲彇绯荤粺鏃ュ織
+def load_huaxin_transaction_map(date=tool.get_now_date_str(), with_time=False):
+    path = f"{constant.get_path_prefix()}/logs/huaxin/l2/transaction.{date}.log"
+    fdatas = {}
+    if os.path.exists(path):
+        with open(path, 'r', encoding="utf-8") as f:
+            lines = f.readlines()
+            for line in lines:
+                if line:
+                    try:
+                        data = line.split(" - ")[1].strip()
+                        if data.startswith("["):
+                            time_str = data[data.find("[") + 1:data.find("]")].strip()
+                            data = data[data.find("]") + 1:].strip()
+
+                        code = data.split("#")[0]
+                        l2_data = eval(data.split("#")[1])
+                        if code not in fdatas:
+                            fdatas[code] = []
+                        if with_time:
+                            fdatas[code].append((time_str, l2_data))
+                        else:
+                            fdatas[code].append(l2_data)
+                    except:
+                        pass
+    return fdatas
+
+
+@cache_log
+def load_huaxin_active_sell_map(date=tool.get_now_date_str()):
+    path = f"{constant.get_path_prefix()}/logs/huaxin/trade/l2_active_sell.{date}.log"
+    fdatas = {}
+    lines = __load_file_content(path)
+    for line in lines:
+        if line:
+            try:
+                data = line.split(" - ")[1].strip()
+                if data.startswith("["):
+                    time_str = data[data.find("[") + 1:data.find("]")].strip()
+                    data = data[data.find("]") + 1:].strip()
+
+                data = data.split("code=")[1].strip()
+                code = data[:data.find(" ")].strip()
+                data = data[data.find(" "):].strip()
+                data = eval(data)
+                if code not in fdatas:
+                    fdatas[code] = set()
+                fdatas[code].add(data[0])
+            except:
+                pass
+    return fdatas
+
+
+def load_huaxin_big_buy_order(date=tool.get_now_date_str()):
+    """
+    鍔犺浇鍗庨懌澶т拱鍗�
+    @param date:
+    @return:
+    """
+    path = f"{constant.get_path_prefix()}/logs/huaxin/l2/l2_transaction_big_buy.{date}.log"
+    fdatas = {}
+    lines = __load_file_content(path)
+    for line in lines:
+        if line:
+            try:
+                data = line.split(" - ")[1].strip()
+                if data.startswith("["):
+                    time_str = data[data.find("[") + 1:data.find("]")].strip()
+                    data = data[data.find("]") + 1:].strip()
+
+                data = data.split("code=")[1].strip()
+                code = data[:data.find(" ")].strip()
+                data = data[data.find(" "):].strip()
+                data = eval(data)
+                if code not in fdatas:
+                    fdatas[code] = []
+                fdatas[code].extend(data)
+            except:
+                pass
+    return fdatas
+
+
+def load_huaxin_big_sell_order(date=tool.get_now_date_str()):
+    """
+    鍔犺浇鍗庨懌澶т拱鍗�
+    @param date:
+    @return:
+    """
+    path = f"{constant.get_path_prefix()}/logs/huaxin/l2/l2_transaction_big_sell.{date}.log"
+    fdatas = {}
+    lines = __load_file_content(path)
+    for line in lines:
+        if line:
+            try:
+                data = line.split(" - ")[1].strip()
+                if data.startswith("["):
+                    time_str = data[data.find("[") + 1:data.find("]")].strip()
+                    data = data[data.find("]") + 1:].strip()
+
+                data = data.split("code=")[1].strip()
+                code = data[:data.find(" ")].strip()
+                data = data[data.find(" "):].strip()
+                data = eval(data)
+                if code not in fdatas:
+                    fdatas[code] = []
+                fdatas[code].extend(data)
+            except:
+                pass
+    return fdatas
+
+
+def load_huaxin_order_detail(date=tool.get_now_date_str()):
+    """
+    鍔犺浇L2閫愮瑪濮旀墭鏁版嵁
+    @param date:
+    @return:
+    """
+    fdatas = []
+    path = f"{constant.get_path_prefix()}/logs/huaxin/l2/orderdetail.{date}.log"
+    lines = __load_file_content(path)
+    for line in lines:
+        if line:
+            time = __get_async_log_time(line)
+            line = line[line.rfind("#") + 1:]
+            fdatas.append((time, eval(line)))
+    return fdatas
+
+
+def load_pre_close_price(date=tool.get_now_date_str()):
+    """
+    鍔犺浇涔嬪墠鐨勬敹鐩樹环
+    @param date:
+    @return:
+    """
+    fdatas = {}
+    path = f"{constant.get_path_prefix()}/logs/gp/code_attribute/pre_close_price.{date}.log"
+    lines = __load_file_content(path)
+    for line in lines:
+        if line:
+            data = line.split(" - ")[1]
+            code, price = data.split("-")[0].strip(), data.split("-")[1].strip()
+            fdatas[code] = price
+    return fdatas
+
+
+def load_special_codes(date=tool.get_now_date_str()):
+    """
+    鍔犺浇涔嬪墠鐨勬敹鐩樹环
+    @param date:
+    @return:
+    """
+    fdatas = {}
+    path = f"{constant.get_path_prefix()}/logs/gp/plates/special_codes.{date}.log"
+    lines = __load_file_content(path)
+    if lines:
+            line = lines[0]
+            line = line[line.find(" - ") + 3:]
+            return eval(line)
+    return None
+
+
 if __name__ == '__main__':
-    load_huaxin_local_buy_no()
+    line = """
+    2025-03-12 14:49:15.028 | DEBUG    | log_module.async_log_util:run_sync:66 - [14:49:14.899602] thread-id=3048 code=600841  L鍓嶇洃鎺ц寖鍥达細{1477, 1478, 1479, 1480, 1481, 1482, 1486, 1487, 1488, 1489, 1492, 1493, 1495, 1498, 1500} 璁$畻鑼冨洿锛�1477-1503
+    """
+    print(__parse_content(line))
+    # load_huaxin_transaction_sell_no(code='2024-11-10')
+    # load_huaxin_transaction_sell_no(code='2024-11-10')
     # print(get_h_cancel_compute_info("603912"))
 
     # logger_l2_h_cancel.info("test")

--
Gitblit v1.8.0