From 2f2516749615da866e96d8d24e499b7ecbb63a3e Mon Sep 17 00:00:00 2001 From: Administrator <admin@example.com> Date: 星期一, 23 六月 2025 12:28:52 +0800 Subject: [PATCH] 默认交易模式变更/真实下单位置计算位置修改 --- log_module/log_export.py | 503 ++++++++++++++++++++++++++++++++++++++++++++++++++----- 1 files changed, 456 insertions(+), 47 deletions(-) diff --git a/log_module/log_export.py b/log_module/log_export.py index f7c51cb..1d530f1 100644 --- a/log_module/log_export.py +++ b/log_module/log_export.py @@ -11,6 +11,26 @@ from log_module.log import logger_l2_process_time from utils import tool +__log_cache_data = {} + + +# 鏃ュ織缂撳瓨 +def cache_log(fn): + def wrapper(*args, **kwargs): + can_cache = tool.get_now_time_as_int() > 150000 + cache_key = f"{fn.__name__}#{args}#{kwargs}" + if can_cache: + # 15:00:00涔嬪悗鎵嶈兘缂撳瓨 + + if cache_key in __log_cache_data: + return __log_cache_data[cache_key] + result = fn(*args, **kwargs) + if can_cache: + __log_cache_data[cache_key] = result + return result + + return wrapper + class LogUtil: @classmethod @@ -53,7 +73,8 @@ while line: time_ = line.split(":")[-1] if int(time_) > 150: - print(line) + # print(line) + pass line = f.readline() @@ -76,14 +97,15 @@ return tool.time_seconds_format(s - 2 - cha) +@cache_log def load_l2_from_log(date=None): today_data = {} if date is None: - date = datetime.datetime.now().strftime("%Y-%m-%d") + date = tool.get_now_date_str() try: with open("{}/logs/gp/l2/l2_data.{}.log".format(constant.get_path_prefix(), date), mode='r') as f: - while True: - data = f.readline() + lines = f.readlines() + for data in lines: if not data: break index = data.find(' - ') + 2 @@ -100,7 +122,8 @@ for key in today_data: # news = sorted(today_data[key], key=lambda x: x["index"]) # today_data[key] = news - print(key, len(today_data[key]) - 1, today_data[key][-1]["index"]) + # print(key, len(today_data[key]) - 1, today_data[key][-1]["index"]) + pass except: pass return today_data @@ -112,44 +135,57 @@ return time_ +def __get_async_log_time(line): + line = line.split(" - ")[1] + time_str = line[line.find("[") + 1:line.find("[") + 9] + return time_str + + # 鑾峰彇L2姣忔鎵归噺澶勭悊鏁版嵁鐨勪綅缃寖鍥� +@cache_log def get_l2_process_position(code, date=None): if not date: date = datetime.datetime.now().strftime("%Y-%m-%d") pos_list = [] - with open("{}/logs/gp/l2/l2_process.{}.log".format(constant.get_path_prefix(), date), mode='r', - encoding="utf-8") as f: - while True: - line = f.readline() - if not line: - break - if line.find("code:{}".format(code)) < 0: - continue - time_ = __get_log_time(line) - line = line[line.find("澶勭悊鏁版嵁鑼冨洿") + len("澶勭悊鏁版嵁鑼冨洿") + 1:line.find("澶勭悊鏃堕棿")].strip() - if len(pos_list) == 0 or pos_list[-1][1] < int(line.split("-")[0]): - if int("093000") <= int(time_.replace(":", "")) <= int("150000"): - try: - pos_list.append((int(line.split("-")[0]), int(line.split("-")[1]))) - except Exception as e: - logging.exception(e) + path_ = "{}/logs/gp/l2/l2_process.{}.log".format(constant.get_path_prefix(), date) + try: + with open(path_, mode='r', + encoding="utf-8") as f: + lines = f.readlines() + for line in lines: + if not line: + break + if line.find("code:{}".format(code)) < 0: + continue + time_ = __get_log_time(line) + line = line[line.find("澶勭悊鏁版嵁鑼冨洿") + len("澶勭悊鏁版嵁鑼冨洿") + 1:line.find("澶勭悊鏃堕棿")].strip() + if len(pos_list) == 0 or pos_list[-1][1] < int(line.split("-")[0]): + if int("093000") <= int(time_.replace(":", "")) <= int("150000"): + try: + pos_list.append((int(line.split("-")[0]), int(line.split("-")[1]))) + except Exception as e: + logging.exception(e) + except: + pass return pos_list # 鑾峰彇L2姣忔鎵归噺澶勭悊鏁版嵁鐨勪綅缃寖鍥� +@cache_log def get_l2_trade_position(code, date=None): if not date: date = datetime.datetime.now().strftime("%Y-%m-%d") pos_list = [] with open("{}/logs/gp/l2/l2_trade.{}.log".format(constant.get_path_prefix(), date), mode='r', encoding="utf-8") as f: - while True: - line = f.readline() + latest_single = [None, None] + lines = f.readlines() + for line in lines: if not line: break if line.find("code={}".format(code)) < 0: continue - print(line) + # print(line) time_ = __get_log_time(line) if int("093000") > int(time_.replace(":", "")) or int(time_.replace(":", "")) > int("150000"): continue @@ -158,13 +194,20 @@ str_ = line.split("鑾峰彇鍒颁拱鍏ヤ俊鍙疯捣濮嬬偣锛�")[1].strip() index = str_[0:str_.find(" ")].strip() # print("淇″彿璧峰浣嶇疆锛�", index) - pos_list.append((0, int(index), "")) + latest_single = [None, None] + latest_single[0] = (0, int(index), "") elif line.find("鑾峰彇鍒颁拱鍏ユ墽琛屼綅缃�") > 0: str_ = line.split("鑾峰彇鍒颁拱鍏ユ墽琛屼綅缃細")[1].strip() index = str_[0:str_.find(" ")].strip() # print("涔板叆鎵ц浣嶇疆锛�", index) - pos_list.append((1, int(index), "")) + latest_single[1] = (1, int(index), "") + elif line.find("寮�濮嬫墽琛屼拱鍏�") > 0: + # 鍙湁鐪熸鎵ц涔板叆鎵嶄細璁板綍浣嶇疆 + for p in latest_single: + if p: + pos_list.append(p) + latest_single = [None, None] elif line.find("瑙﹀彂鎾ゅ崟锛屾挙鍗曚綅缃細") > 0: str_ = line.split("瑙﹀彂鎾ゅ崟锛屾挙鍗曚綅缃細")[1].strip() index = str_[0:str_.find(" ")].strip() @@ -177,22 +220,29 @@ # 鑾峰彇L2姣忔鎵归噺澶勭悊鏁版嵁鐨勪綅缃寖鍥� +@cache_log def get_real_place_order_positions(code, date=None): if not date: date = datetime.datetime.now().strftime("%Y-%m-%d") pos_list = [] with open("{}/logs/gp/l2/l2_real_place_order_position.{}.log".format(constant.get_path_prefix(), date), mode='r', encoding="utf-8") as f: - while True: - line = f.readline() + lines = f.readlines() + for line in lines: if not line: break if line.find("{}-".format(code)) < 0: continue if line.find("鐪熷疄涓嬪崟浣嶇疆") > 0: - str_ = line.split("鐪熷疄涓嬪崟浣嶇疆锛�")[1].strip() - pos = int(str_.split("-")[1].strip()) + + # print(line) + str_ = line.split("锛�")[1].strip() + # print(str_) + try: + pos = int(eval(str_.split("-")[1].strip())[0]) + except: + pos = int(eval(str_.split("-")[1].strip())) # print("淇″彿璧峰浣嶇疆锛�", index) pos_list.append(pos) return pos_list @@ -226,6 +276,31 @@ return index_list, buy_queues +# 鑾峰彇l2涓嶈兘涔扮殑鍘熷洜 +def get_l2_cant_buy_reasons(code, date=None): + if not date: + date = datetime.datetime.now().strftime("%Y-%m-%d") + fdatas = [] + path_str = "{}/logs/gp/l2/l2_not_buy_reasons.{}.log".format(constant.get_path_prefix(), date) + lines = __load_file_content(path_str) + for line in lines: + if not line: + break + if line.find(f"{code}#") < 0: + continue + + line = line.split(" - ")[1] + time_str = line[line.find("[") + 1:line.find("[") + 9] + data = line[line.find("]") + 1:].strip() + code_ = data.split("#")[0].strip() + data = data.split("#")[1].strip() + if code_ != code: + continue + fdatas.append((time_str, data)) + fdatas.reverse() + return fdatas + + # 鑾峰彇H绾ф挙鍗曡绠楃粨鏋� def get_h_cancel_compute_info(code, date=None): if not date: @@ -256,8 +331,8 @@ msg_list = [] if os.path.exists(path_str): with open(path_str, mode='r', encoding="utf-8") as f: - while True: - line = f.readline() + lines = f.readlines() + for line in lines: if not line: break msg_list.append(line) @@ -320,8 +395,8 @@ # 鍔犺浇涔板叆寰楀垎璁板綍 -def load_trade_recod(code): - path = f"{constant.get_path_prefix()}/logs/gp/trade/trade_record.{tool.get_now_date_str()}.log" +def load_trade_recod(code, date=tool.get_now_date_str()): + path = f"{constant.get_path_prefix()}/logs/gp/trade/trade_record.{date}.log" fdatas = [] lines = __load_file_content(path) for line in lines: @@ -338,22 +413,79 @@ return fdatas -# 鍔犺浇l2璁㈠崟鎴愪氦鏁版嵁 -def load_huaxin_deal_record(code): - path = f"{constant.get_path_prefix()}/logs/huaxin/l2/transaction_desc.{tool.get_now_date_str()}.log" - # 鏍煎紡:[(璁㈠崟鍙�,鎵嬫暟,寮�濮嬫垚浜ゆ椂闂�,鎴愪氦缁撴潫鏃堕棿,涓嬪崟鎵嬫暟)] +# 鍔犺浇涔板叆寰楀垎璁板綍 +def load_trade_recod_by_type(type_, date=tool.get_now_date_str()): + path = f"{constant.get_path_prefix()}/logs/gp/trade/trade_record.{date}.log" fdatas = [] lines = __load_file_content(path) for line in lines: - data_index = line.find(f"{code}#") + data_index = line.find(f"{type_}") if data_index > 0: line = line.split(" - ")[1] time_str = line[line.find("[") + 1:line.find("[") + 9] data = line[line.find("]") + 1:].strip() + data_json = json.loads(data) + type = data_json["type"] + code = data_json["code"] + if type != type_: + continue + fdatas.append((time_str, code, type, data_json["data"])) + return fdatas + + +@cache_log +def load_cancel_buy_reasons(code, date=tool.get_now_date_str()): + """ + 鑾峰彇鎾ゅ崟鍘熷洜 + @param code: + @param date: + @return: {鐪熷疄涓嬪崟浣嶇疆锛氭挙鍗曞師鍥爙 + """ + fdatas = load_trade_recod(code, date) + cancel_reason_dict = {} + for data in fdatas: + if data[1] != "cancel": + continue + msg = data[2].get("msg") + real_place_order_index = data[2].get("real_place_order_index") + if real_place_order_index not in cancel_reason_dict: + cancel_reason_dict[real_place_order_index] = msg + return cancel_reason_dict + + + +def __parse_content(line): + line = line.split(" - ")[1] + time_str = line[line.find("[") + 1:line.find("[") + 9] + data = line[line.find("]") + 1:].strip() + if data.find("thread-id=")>-1 and data.find("code=")>-1: + data = data[data.find("code=")+11:].strip() + return time_str, data + + +# 鍔犺浇l2璁㈠崟鎴愪氦鏁版嵁 +@cache_log +def load_huaxin_deal_record(code, date=tool.get_now_date_str()): + datas_dict = load_huaxin_deal_record_all(date) + return datas_dict.get(code) + + +@cache_log +def load_huaxin_deal_record_all(date=tool.get_now_date_str()): + path = f"{constant.get_path_prefix()}/logs/huaxin/l2/transaction_desc.{date}.log" + # 鏍煎紡:[(璁㈠崟鍙�,鎵嬫暟,寮�濮嬫垚浜ゆ椂闂�,鎴愪氦缁撴潫鏃堕棿,涓嬪崟鎵嬫暟)] + fdatas = {} + lines = __load_file_content(path) + for line in lines: + data_index = line.find(f"#") + if data_index > 0: + time_str, data = __parse_content(line) code = data.split("#")[0] data = data.split("#")[1] data = eval(data) - fdatas.append(data) + if code not in fdatas: + fdatas[code] = [] + fdatas[code].append(data) return fdatas @@ -369,6 +501,40 @@ to_r = eval(data.split(":")[1].split("-")[1]) fdatas.append((code, from_r, to_r)) return fdatas + + +def load_kpl_open_limit_up(): + path = f"{constant.get_path_prefix()}/logs/gp/kpl/kpl_open_limit_up.{tool.get_now_date_str()}.log" + fdatas = [] + lines = __load_file_content(path) + for line in lines: + if line.find("鐐告澘") > 0: + time_str = __get_log_time(line) + data = line[line.find("锛�") + 1:] + codes = eval(data) + fdatas.append((time_str, codes)) + return fdatas + + +@cache_log +def load_kpl_limit_up_records(current_time_str, date=tool.get_now_date_str()): + """ + 鑾峰彇绂荤粰瀹氭椂闂存渶杩戠殑娑ㄥ仠鏁版嵁 + @param current_time_str: + @param date: + @return: + """ + path = f"{constant.get_path_prefix()}/logs/gp/kpl/kpl_limit_up.{date}.log" + lines = __load_file_content(path) + lines.reverse() + current_time_str_int = int(current_time_str.replace(":", "")) + for line in lines: + if line: + time_str = __get_log_time(line) + if int(time_str.replace(":", "")) < current_time_str_int: + line = line.split(" - ")[1] + return eval(line) + return None # 鍔犺浇鍗庨懌鏈湴涔板叆璁㈠崟鍙� @@ -388,6 +554,107 @@ if code not in fdatas: fdatas[code] = set() fdatas[code].add(buy_no) + return fdatas + + +# 鍔犺浇鍗庨懌鎴愪氦鐨勫崠鍗� +@cache_log +def load_huaxin_transaction_sell_no(code=None, date=tool.get_now_date_str()): + path = f"{constant.get_path_prefix()}/logs/huaxin/l2/transaction_sell_order.{date}.log" + fdatas = {} + if os.path.exists(path): + with open(path, 'r', encoding="utf-8") as f: + lines = f.readlines() + for line in lines: + if line: + data = line.split(" - ")[1].strip() + if data.startswith("["): + data = data[data.find("]") + 1:].strip() + data = data.split("code=")[1] + code_ = data[:6] + if code and code != code_: + continue + data = data[6:].strip() + if code_ not in fdatas: + fdatas[code_] = [] + fdatas[code_].append(eval(data)) + return fdatas + + +@cache_log +def load_huaxin_l2_sell_deal(code=None, date=tool.get_now_date_str()): + path = f"{constant.get_path_prefix()}/logs/huaxin/l2/sell_l2_deal.{date}.log" + fdatas = {} + if os.path.exists(path): + with open(path, 'r', encoding="utf-8") as f: + lines = f.readlines() + for line in lines: + if line: + time_str = __get_async_log_time(line) + data = line.split(" - ")[1].strip() + if data.startswith("["): + data = data[data.find("]") + 1:].strip() + if data.find("鏈夋定鍋滀富鍔ㄥ崠锛�") < 0: + continue + data = data.split("鏈夋定鍋滀富鍔ㄥ崠锛�")[1] + code_ = data[:6] + if code and code != code_: + continue + data = data[6:].strip() + volume = int(data.split("鎴愪氦閲�-")[1].strip()) + if code_ not in fdatas: + fdatas[code_] = [] + fdatas[code_].append((time_str, volume)) + return fdatas + + + +@cache_log +def load_huaxin_l2_sell_deal_list(code=None, date=tool.get_now_date_str()): + path = f"{constant.get_path_prefix()}/logs/huaxin/l2/sell_l2_deal.{date}.log" + fdatas = {} + if os.path.exists(path): + with open(path, 'r', encoding="utf-8") as f: + lines = f.readlines() + for line in lines: + if line: + time_str = __get_async_log_time(line) + data = line.split(" - ")[1].strip() + if data.startswith("["): + data = data[data.find("]") + 1:].strip() + if data.find("娑ㄥ仠涓诲姩涔版垚浜わ細") <0: + continue + data = data.split("娑ㄥ仠涓诲姩涔版垚浜わ細")[1] + data = eval(data) + code_ = data[0][0] + if code and code != code_: + continue + if code_ not in fdatas: + fdatas[code_] = [] + fdatas[code_].append((time_str, data)) + return fdatas + + +@cache_log +def load_huaxin_l2_sell_delegate(code=None, date=tool.get_now_date_str()): + path = f"{constant.get_path_prefix()}/logs/huaxin/l2/sell_l2_delegate.{date}.log" + fdatas = {} + if os.path.exists(path): + with open(path, 'r', encoding="utf-8") as f: + lines = f.readlines() + for line in lines: + if line: + time_str = __get_async_log_time(line) + data = line.split(" - ")[1].strip() + if data.startswith("["): + data = data[data.find("]") + 1:].strip() + datas = data.split("-") + code_ = datas[0] + if code and code != code_: + continue + if code_ not in fdatas: + fdatas[code_] = [] + fdatas[code_].append((time_str, datas[1], eval(datas[2]))) return fdatas @@ -434,8 +701,8 @@ # 璇诲彇绯荤粺鏃ュ織 -def load_huaxin_transaction_map(): - path = f"{constant.get_path_prefix()}/logs/huaxin/l2/transaction.{tool.get_now_date_str()}.log" +def load_huaxin_transaction_map(date=tool.get_now_date_str(), with_time=False): + path = f"{constant.get_path_prefix()}/logs/huaxin/l2/transaction.{date}.log" fdatas = {} if os.path.exists(path): with open(path, 'r', encoding="utf-8") as f: @@ -445,20 +712,162 @@ try: data = line.split(" - ")[1].strip() if data.startswith("["): + time_str = data[data.find("[") + 1:data.find("]")].strip() data = data[data.find("]") + 1:].strip() + code = data.split("#")[0] - l2_data = eval( data.split("#")[1]) + l2_data = eval(data.split("#")[1]) if code not in fdatas: - fdatas[code]=[] - fdatas[code].append(l2_data) + fdatas[code] = [] + if with_time: + fdatas[code].append((time_str, l2_data)) + else: + fdatas[code].append(l2_data) except: pass return fdatas +@cache_log +def load_huaxin_active_sell_map(date=tool.get_now_date_str()): + path = f"{constant.get_path_prefix()}/logs/huaxin/trade/l2_active_sell.{date}.log" + fdatas = {} + lines = __load_file_content(path) + for line in lines: + if line: + try: + data = line.split(" - ")[1].strip() + if data.startswith("["): + time_str = data[data.find("[") + 1:data.find("]")].strip() + data = data[data.find("]") + 1:].strip() + + data = data.split("code=")[1].strip() + code = data[:data.find(" ")].strip() + data = data[data.find(" "):].strip() + data = eval(data) + if code not in fdatas: + fdatas[code] = set() + fdatas[code].add(data[0]) + except: + pass + return fdatas + + +def load_huaxin_big_buy_order(date=tool.get_now_date_str()): + """ + 鍔犺浇鍗庨懌澶т拱鍗� + @param date: + @return: + """ + path = f"{constant.get_path_prefix()}/logs/huaxin/l2/l2_transaction_big_buy.{date}.log" + fdatas = {} + lines = __load_file_content(path) + for line in lines: + if line: + try: + data = line.split(" - ")[1].strip() + if data.startswith("["): + time_str = data[data.find("[") + 1:data.find("]")].strip() + data = data[data.find("]") + 1:].strip() + + data = data.split("code=")[1].strip() + code = data[:data.find(" ")].strip() + data = data[data.find(" "):].strip() + data = eval(data) + if code not in fdatas: + fdatas[code] = [] + fdatas[code].extend(data) + except: + pass + return fdatas + + +def load_huaxin_big_sell_order(date=tool.get_now_date_str()): + """ + 鍔犺浇鍗庨懌澶т拱鍗� + @param date: + @return: + """ + path = f"{constant.get_path_prefix()}/logs/huaxin/l2/l2_transaction_big_sell.{date}.log" + fdatas = {} + lines = __load_file_content(path) + for line in lines: + if line: + try: + data = line.split(" - ")[1].strip() + if data.startswith("["): + time_str = data[data.find("[") + 1:data.find("]")].strip() + data = data[data.find("]") + 1:].strip() + + data = data.split("code=")[1].strip() + code = data[:data.find(" ")].strip() + data = data[data.find(" "):].strip() + data = eval(data) + if code not in fdatas: + fdatas[code] = [] + fdatas[code].extend(data) + except: + pass + return fdatas + + +def load_huaxin_order_detail(date=tool.get_now_date_str()): + """ + 鍔犺浇L2閫愮瑪濮旀墭鏁版嵁 + @param date: + @return: + """ + fdatas = [] + path = f"{constant.get_path_prefix()}/logs/huaxin/l2/orderdetail.{date}.log" + lines = __load_file_content(path) + for line in lines: + if line: + time = __get_async_log_time(line) + line = line[line.rfind("#") + 1:] + fdatas.append((time, eval(line))) + return fdatas + + +def load_pre_close_price(date=tool.get_now_date_str()): + """ + 鍔犺浇涔嬪墠鐨勬敹鐩樹环 + @param date: + @return: + """ + fdatas = {} + path = f"{constant.get_path_prefix()}/logs/gp/code_attribute/pre_close_price.{date}.log" + lines = __load_file_content(path) + for line in lines: + if line: + data = line.split(" - ")[1] + code, price = data.split("-")[0].strip(), data.split("-")[1].strip() + fdatas[code] = price + return fdatas + + +def load_special_codes(date=tool.get_now_date_str()): + """ + 鍔犺浇涔嬪墠鐨勬敹鐩樹环 + @param date: + @return: + """ + fdatas = {} + path = f"{constant.get_path_prefix()}/logs/gp/plates/special_codes.{date}.log" + lines = __load_file_content(path) + if lines: + line = lines[0] + line = line[line.find(" - ") + 3:] + return eval(line) + return None + + if __name__ == '__main__': - fdatas = load_huaxin_deal_record("002528") - print(len(fdatas)) + line = """ + 2025-03-12 14:49:15.028 | DEBUG | log_module.async_log_util:run_sync:66 - [14:49:14.899602] thread-id=3048 code=600841 L鍓嶇洃鎺ц寖鍥达細{1477, 1478, 1479, 1480, 1481, 1482, 1486, 1487, 1488, 1489, 1492, 1493, 1495, 1498, 1500} 璁$畻鑼冨洿锛�1477-1503 + """ + print(__parse_content(line)) + # load_huaxin_transaction_sell_no(code='2024-11-10') + # load_huaxin_transaction_sell_no(code='2024-11-10') # print(get_h_cancel_compute_info("603912")) # logger_l2_h_cancel.info("test") -- Gitblit v1.8.0