From 5648819608a812a34a6ec757a2cbed5e5141777d Mon Sep 17 00:00:00 2001 From: Administrator <admin@example.com> Date: 星期一, 01 九月 2025 16:31:38 +0800 Subject: [PATCH] 动态总大单公式修改 --- log_module/log_export.py | 225 +++++++++++++++++++++++++++++++++++++++++++++++++++++--- 1 files changed, 212 insertions(+), 13 deletions(-) diff --git a/log_module/log_export.py b/log_module/log_export.py index cc587fa..87a0510 100644 --- a/log_module/log_export.py +++ b/log_module/log_export.py @@ -11,6 +11,26 @@ from log_module.log import logger_l2_process_time from utils import tool +__log_cache_data = {} + + +# 鏃ュ織缂撳瓨 +def cache_log(fn): + def wrapper(*args, **kwargs): + can_cache = tool.get_now_time_as_int() > 150000 + cache_key = f"{fn.__name__}#{args}#{kwargs}" + if can_cache: + # 15:00:00涔嬪悗鎵嶈兘缂撳瓨 + + if cache_key in __log_cache_data: + return __log_cache_data[cache_key] + result = fn(*args, **kwargs) + if can_cache: + __log_cache_data[cache_key] = result + return result + + return wrapper + class LogUtil: @classmethod @@ -77,6 +97,7 @@ return tool.time_seconds_format(s - 2 - cha) +@cache_log def load_l2_from_log(date=None): today_data = {} if date is None: @@ -121,6 +142,7 @@ # 鑾峰彇L2姣忔鎵归噺澶勭悊鏁版嵁鐨勪綅缃寖鍥� +@cache_log def get_l2_process_position(code, date=None): if not date: date = datetime.datetime.now().strftime("%Y-%m-%d") @@ -149,6 +171,7 @@ # 鑾峰彇L2姣忔鎵归噺澶勭悊鏁版嵁鐨勪綅缃寖鍥� +@cache_log def get_l2_trade_position(code, date=None): if not date: date = datetime.datetime.now().strftime("%Y-%m-%d") @@ -197,6 +220,7 @@ # 鑾峰彇L2姣忔鎵归噺澶勭悊鏁版嵁鐨勪綅缃寖鍥� +@cache_log def get_real_place_order_positions(code, date=None): if not date: date = datetime.datetime.now().strftime("%Y-%m-%d") @@ -389,6 +413,27 @@ return fdatas +# 鍔犺浇涔板叆寰楀垎璁板綍 +def load_trade_recod_by_type(type_, date=tool.get_now_date_str()): + path = f"{constant.get_path_prefix()}/logs/gp/trade/trade_record.{date}.log" + fdatas = [] + lines = __load_file_content(path) + for line in lines: + data_index = line.find(f"{type_}") + if data_index > 0: + line = line.split(" - ")[1] + time_str = line[line.find("[") + 1:line.find("[") + 9] + data = line[line.find("]") + 1:].strip() + data_json = json.loads(data) + type = data_json["type"] + code = data_json["code"] + if type != type_: + continue + fdatas.append((time_str, code, type, data_json["data"])) + return fdatas + + +@cache_log def load_cancel_buy_reasons(code, date=tool.get_now_date_str()): """ 鑾峰彇鎾ゅ崟鍘熷洜 @@ -408,22 +453,38 @@ return cancel_reason_dict +def __parse_content(line): + line = line.split(" - ")[1] + time_str = line[line.find("[") + 1:line.find("[") + 9] + data = line[line.find("]") + 1:].strip() + if data.find("thread-id=") > -1 and data.find("code=") > -1: + data = data[data.find("code=") + 11:].strip() + return time_str, data + + # 鍔犺浇l2璁㈠崟鎴愪氦鏁版嵁 +@cache_log def load_huaxin_deal_record(code, date=tool.get_now_date_str()): + datas_dict = load_huaxin_deal_record_all(date) + return datas_dict.get(code) + + +@cache_log +def load_huaxin_deal_record_all(date=tool.get_now_date_str()): path = f"{constant.get_path_prefix()}/logs/huaxin/l2/transaction_desc.{date}.log" # 鏍煎紡:[(璁㈠崟鍙�,鎵嬫暟,寮�濮嬫垚浜ゆ椂闂�,鎴愪氦缁撴潫鏃堕棿,涓嬪崟鎵嬫暟)] - fdatas = [] + fdatas = {} lines = __load_file_content(path) for line in lines: - data_index = line.find(f"{code}#") + data_index = line.find(f"#") if data_index > 0: - line = line.split(" - ")[1] - time_str = line[line.find("[") + 1:line.find("[") + 9] - data = line[line.find("]") + 1:].strip() + time_str, data = __parse_content(line) code = data.split("#")[0] data = data.split("#")[1] data = eval(data) - fdatas.append(data) + if code not in fdatas: + fdatas[code] = [] + fdatas[code].append(data) return fdatas @@ -454,6 +515,7 @@ return fdatas +@cache_log def load_kpl_limit_up_records(current_time_str, date=tool.get_now_date_str()): """ 鑾峰彇绂荤粰瀹氭椂闂存渶杩戠殑娑ㄥ仠鏁版嵁 @@ -495,6 +557,7 @@ # 鍔犺浇鍗庨懌鎴愪氦鐨勫崠鍗� +@cache_log def load_huaxin_transaction_sell_no(code=None, date=tool.get_now_date_str()): path = f"{constant.get_path_prefix()}/logs/huaxin/l2/transaction_sell_order.{date}.log" fdatas = {} @@ -514,6 +577,82 @@ if code_ not in fdatas: fdatas[code_] = [] fdatas[code_].append(eval(data)) + return fdatas + + +@cache_log +def load_huaxin_l2_sell_deal(code=None, date=tool.get_now_date_str()): + path = f"{constant.get_path_prefix()}/logs/huaxin/l2/sell_l2_deal.{date}.log" + fdatas = {} + if os.path.exists(path): + with open(path, 'r', encoding="utf-8") as f: + lines = f.readlines() + for line in lines: + if line: + time_str = __get_async_log_time(line) + data = line.split(" - ")[1].strip() + if data.startswith("["): + data = data[data.find("]") + 1:].strip() + if data.find("鏈夋定鍋滀富鍔ㄥ崠锛�") < 0: + continue + data = data.split("鏈夋定鍋滀富鍔ㄥ崠锛�")[1] + code_ = data[:6] + if code and code != code_: + continue + data = data[6:].strip() + volume = int(data.split("鎴愪氦閲�-")[1].strip()) + if code_ not in fdatas: + fdatas[code_] = [] + fdatas[code_].append((time_str, volume)) + return fdatas + + +@cache_log +def load_huaxin_l2_sell_deal_list(code=None, date=tool.get_now_date_str()): + path = f"{constant.get_path_prefix()}/logs/huaxin/l2/sell_l2_deal.{date}.log" + fdatas = {} + if os.path.exists(path): + with open(path, 'r', encoding="utf-8") as f: + lines = f.readlines() + for line in lines: + if line: + time_str = __get_async_log_time(line) + data = line.split(" - ")[1].strip() + if data.startswith("["): + data = data[data.find("]") + 1:].strip() + if data.find("娑ㄥ仠涓诲姩涔版垚浜わ細") < 0: + continue + data = data.split("娑ㄥ仠涓诲姩涔版垚浜わ細")[1] + data = eval(data) + code_ = data[0][0] + if code and code != code_: + continue + if code_ not in fdatas: + fdatas[code_] = [] + fdatas[code_].append((time_str, data)) + return fdatas + + +@cache_log +def load_huaxin_l2_sell_delegate(code=None, date=tool.get_now_date_str()): + path = f"{constant.get_path_prefix()}/logs/huaxin/l2/sell_l2_delegate.{date}.log" + fdatas = {} + if os.path.exists(path): + with open(path, 'r', encoding="utf-8") as f: + lines = f.readlines() + for line in lines: + if line: + time_str = __get_async_log_time(line) + data = line.split(" - ")[1].strip() + if data.startswith("["): + data = data[data.find("]") + 1:].strip() + datas = data.split("-") + code_ = datas[0] + if code and code != code_: + continue + if code_ not in fdatas: + fdatas[code_] = [] + fdatas[code_].append((time_str, datas[1], eval(datas[2]))) return fdatas @@ -587,6 +726,7 @@ return fdatas +@cache_log def load_huaxin_active_sell_map(date=tool.get_now_date_str()): path = f"{constant.get_path_prefix()}/logs/huaxin/trade/l2_active_sell.{date}.log" fdatas = {} @@ -694,18 +834,77 @@ """ fdatas = {} path = f"{constant.get_path_prefix()}/logs/gp/code_attribute/pre_close_price.{date}.log" + if os.path.exists(path): + lines = __load_file_content(path) + for line in lines: + if line: + data = line.split(" - ")[1] + code, price = data.split("-")[0].strip(), data.split("-")[1].strip() + fdatas[code] = price + return fdatas + + +def load_special_codes(date=tool.get_now_date_str()): + """ + 鍔犺浇涔嬪墠鐨勬敹鐩樹环 + @param date: + @return: + """ + fdatas = {} + path = f"{constant.get_path_prefix()}/logs/gp/plates/special_codes.{date}.log" lines = __load_file_content(path) - for line in lines: - if line: - data = line.split(" - ")[1] - code, price = data.split("-")[0].strip(), data.split("-")[1].strip() - fdatas[code] = price + if lines: + line = lines[0] + line = line[line.find(" - ") + 3:] + return eval(line) + return None + + +def load_virtual_trade_account(date=tool.get_now_date_str()): + """ + 鍔犺浇铏氭嫙浜ゆ槗鏁版嵁 + :param date: + :return: + """ + path = f"{constant.get_path_prefix()}/logs/gp/virtual_account/virtual_account_money_records.{date}.log" + fdatas = [] + if os.path.exists(path): + with open(path, 'r', encoding="utf-8") as f: + lines = f.readlines() + if lines: + for line in lines: + time_str = __get_async_log_time(line) + data = line[line.find("]") + 1:].strip() + fdatas.append((time_str, eval(data))) + return fdatas + + +def load_deal_list(date=tool.get_now_date_str()): + """ + 鍔犺浇铏氭嫙浜ゆ槗鏁版嵁 + :param date: + :return: + """ + path = f"{constant.get_path_prefix()}/logs/huaxin_local/trade/deal.{date}.log" + fdatas = [] + if os.path.exists(path): + with open(path, 'r', encoding="utf-8") as f: + lines = f.readlines() + if lines: + for line in lines: + # time_str = __get_async_log_time(line) + data = line[line.find("]") + 1:].strip() + fdatas.append(eval(data)) return fdatas if __name__ == '__main__': - fdatas = load_kpl_limit_up_records("10:00:00", "2024-10-21") - print(fdatas) + line = """ + 2025-03-12 14:49:15.028 | DEBUG | log_module.async_log_util:run_sync:66 - [14:49:14.899602] thread-id=3048 code=600841 L鍓嶇洃鎺ц寖鍥达細{1477, 1478, 1479, 1480, 1481, 1482, 1486, 1487, 1488, 1489, 1492, 1493, 1495, 1498, 1500} 璁$畻鑼冨洿锛�1477-1503 + """ + print(__parse_content(line)) + # load_huaxin_transaction_sell_no(code='2024-11-10') + # load_huaxin_transaction_sell_no(code='2024-11-10') # print(get_h_cancel_compute_info("603912")) # logger_l2_h_cancel.info("test") -- Gitblit v1.8.0