import datetime
|
import hashlib
|
import json
|
import logging
|
import os
|
import shutil
|
import time
|
|
import constant
|
from code_attribute import gpcode_manager
|
from utils import tool
|
|
__log_cache_data = {}
|
|
|
# 日志缓存
|
def cache_log(fn):
|
def wrapper(*args, **kwargs):
|
can_cache = tool.get_now_time_as_int() > 150000
|
cache_key = f"{fn.__name__}#{args}#{kwargs}"
|
if can_cache:
|
# 15:00:00之后才能缓存
|
|
if cache_key in __log_cache_data:
|
return __log_cache_data[cache_key]
|
result = fn(*args, **kwargs)
|
if can_cache:
|
__log_cache_data[cache_key] = result
|
return result
|
|
return wrapper
|
|
|
class LogUtil:
|
@classmethod
|
def extract_log_from_key(cls, key, path, target_path):
|
fw = open(target_path, mode='w', encoding="utf-8")
|
try:
|
with open(path, 'r', encoding="utf-8") as f:
|
lines = f.readlines()
|
for line in lines:
|
if line.find("{}".format(key)) > 0:
|
fw.write(line)
|
finally:
|
fw.close()
|
|
|
# 导出数据处理位置日志
|
def __export_l2_pos_range(code, date, dir):
|
LogUtil.extract_log_from_key("{} 处理数据范围".format(code),
|
"{}/logs/gp/l2/l2_process.{}.log".format(constant.get_path_prefix(), date),
|
"{}/l2_process_{}.log".format(dir, date))
|
|
|
# 导出交易日志
|
def __export_l2_trade_log(code, date, dir):
|
LogUtil.extract_log_from_key(code, "{}/logs/gp/l2/l2_trade.{}.log".format(constant.get_path_prefix(), date),
|
"{}/l2_trade_{}.log".format(dir, date))
|
|
|
# 导出交易取消日志
|
def __export_l2_trade_cancel_log(code, date, dir):
|
LogUtil.extract_log_from_key(code, "{}/logs/gp/l2/l2_trade_cancel.{}.log".format(constant.get_path_prefix(), date),
|
"{}/l2_trade_cancel_{}.log".format(dir, date))
|
|
|
def __analyse_pricess_time():
|
date = datetime.datetime.now().strftime("%Y-%m-%d")
|
file_path = f"{constant.get_path_prefix()}/logs/gp/l2/l2_process.{date}.log"
|
with open(file_path, encoding="utf-8") as f:
|
line = f.readline()
|
while line:
|
time_ = line.split(":")[-1]
|
if int(time_) > 150:
|
# print(line)
|
pass
|
line = f.readline()
|
|
|
def export_l2_log(code):
|
if len(code) < 6:
|
return
|
date = datetime.datetime.now().strftime("%Y-%m-%d")
|
dir_ = "{}/logs/gp/l2/{}".format(constant.get_path_prefix(), code)
|
if not os.path.exists(dir_):
|
os.mkdir(dir_)
|
__export_l2_pos_range(code, date, dir_)
|
__export_l2_trade_cancel_log(code, date, dir_)
|
__export_l2_trade_log(code, date, dir_)
|
|
|
def compute_buy1_real_time(time_):
|
ts = time_.split(":")
|
s = int(ts[0]) * 3600 + int(ts[1]) * 60 + int(ts[2])
|
cha = (s - 2) % 3
|
return tool.time_seconds_format(s - 2 - cha)
|
|
|
@cache_log
|
def load_l2_from_log(date=None):
|
today_data = {}
|
if date is None:
|
date = tool.get_now_date_str()
|
try:
|
with open("{}/logs/gp/l2/l2_data.{}.log".format(constant.get_path_prefix(), date), mode='r') as f:
|
lines = f.readlines()
|
for data in lines:
|
if not data:
|
break
|
index = data.find(' - ') + 2
|
if data.find('async_log_util') > 0:
|
index = data.find(']', index) + 1
|
data = data[index + 1:].strip()
|
code = data[0:6]
|
data = data[7:]
|
dict_ = eval(data)
|
if code not in today_data:
|
today_data[code] = dict_
|
else:
|
today_data[code].extend(dict_)
|
for key in today_data:
|
# news = sorted(today_data[key], key=lambda x: x["index"])
|
# today_data[key] = news
|
# print(key, len(today_data[key]) - 1, today_data[key][-1]["index"])
|
pass
|
except:
|
pass
|
return today_data
|
|
|
# 获取日志时间
|
def __get_log_time(line):
|
time_ = line.split("|")[0].split(" ")[1].split(".")[0]
|
return time_
|
|
|
def __get_async_log_time(line):
|
line = line.split(" - ")[1]
|
time_str = line[line.find("[") + 1:line.find("[") + 9]
|
return time_str
|
|
|
# 获取L2每次批量处理数据的位置范围
|
@cache_log
|
def get_l2_process_position(code, date=None):
|
if not date:
|
date = datetime.datetime.now().strftime("%Y-%m-%d")
|
pos_list = []
|
path_ = "{}/logs/gp/l2/l2_process.{}.log".format(constant.get_path_prefix(), date)
|
try:
|
with open(path_, mode='r',
|
encoding="utf-8") as f:
|
lines = f.readlines()
|
for line in lines:
|
if not line:
|
break
|
if line.find("code:{}".format(code)) < 0:
|
continue
|
time_ = __get_log_time(line)
|
line = line[line.find("处理数据范围") + len("处理数据范围") + 1:line.find("处理时间")].strip()
|
if len(pos_list) == 0 or pos_list[-1][1] < int(line.split("-")[0]):
|
if int("093000") <= int(time_.replace(":", "")) <= int("150000"):
|
try:
|
pos_list.append((int(line.split("-")[0]), int(line.split("-")[1])))
|
except Exception as e:
|
logging.exception(e)
|
except:
|
pass
|
return pos_list
|
|
|
# 获取L2每次批量处理数据的位置范围
|
@cache_log
|
def get_l2_trade_position(code, date=None):
|
if not date:
|
date = datetime.datetime.now().strftime("%Y-%m-%d")
|
pos_list = []
|
with open("{}/logs/gp/l2/l2_trade.{}.log".format(constant.get_path_prefix(), date), mode='r',
|
encoding="utf-8") as f:
|
latest_single = [None, None]
|
lines = f.readlines()
|
for line in lines:
|
if not line:
|
break
|
if line.find("code={}".format(code)) < 0:
|
continue
|
# print(line)
|
time_ = __get_log_time(line)
|
if int("093000") > int(time_.replace(":", "")) or int(time_.replace(":", "")) > int("150000"):
|
continue
|
|
if line.find("获取到买入信号起始点") > 0:
|
str_ = line.split("获取到买入信号起始点:")[1].strip()
|
index = str_[0:str_.find(" ")].strip()
|
# print("信号起始位置:", index)
|
latest_single = [None, None]
|
latest_single[0] = (0, int(index), "")
|
|
elif line.find("获取到买入执行位置") > 0:
|
str_ = line.split("获取到买入执行位置:")[1].strip()
|
index = str_[0:str_.find(" ")].strip()
|
# print("买入执行位置:", index)
|
latest_single[1] = (1, int(index), "")
|
elif line.find("开始执行买入") > 0:
|
# 只有真正执行买入才会记录位置
|
for p in latest_single:
|
if p:
|
pos_list.append(p)
|
latest_single = [None, None]
|
elif line.find("触发撤单,撤单位置:") > 0:
|
str_ = line.split("触发撤单,撤单位置:")[1].strip()
|
index = str_[0:str_.find(" ")].strip()
|
# print("撤单位置:", index)
|
pos_list.append((2, int(index), line.split("撤单原因:")[1]))
|
pass
|
else:
|
continue
|
return pos_list
|
|
|
# 获取L2每次批量处理数据的位置范围
|
@cache_log
|
def get_real_place_order_positions(code, date=None):
|
if not date:
|
date = datetime.datetime.now().strftime("%Y-%m-%d")
|
pos_list = []
|
with open("{}/logs/gp/l2/l2_real_place_order_position.{}.log".format(constant.get_path_prefix(), date), mode='r',
|
encoding="utf-8") as f:
|
lines = f.readlines()
|
for line in lines:
|
if not line:
|
break
|
if line.find("{}-".format(code)) < 0:
|
continue
|
|
if line.find("真实下单位置") > 0:
|
|
# print(line)
|
str_ = line.split(":")[1].strip()
|
# print(str_)
|
try:
|
pos = int(eval(str_.split("-")[1].strip())[0])
|
except:
|
pos = int(eval(str_.split("-")[1].strip()))
|
# print("信号起始位置:", index)
|
pos_list.append(pos)
|
return pos_list
|
|
|
# 获取交易进度
|
def get_trade_progress(code, date=None):
|
if not date:
|
date = datetime.datetime.now().strftime("%Y-%m-%d")
|
index_list = []
|
buy_queues = []
|
path_str = "{}/logs/gp/l2/l2_trade_buy_queue.{}.log".format(constant.get_path_prefix(), date)
|
lines = __load_file_content(path_str)
|
for line in lines:
|
if not line:
|
break
|
time_ = __get_log_time(line).strip()
|
if int(time_.replace(":", "")) > int("150000"):
|
continue
|
|
if line.find(f"{code}-[") >= 0:
|
buy_queues.append((eval(line.split(f"{code}-")[1]), time_))
|
|
if line.find("获取成交位置成功: code-{}".format(code)) < 0:
|
continue
|
try:
|
index = int(line.split("index-")[1].split(" ")[0])
|
index_list.append((index, time_))
|
except:
|
pass
|
return index_list, buy_queues
|
|
|
# 获取l2不能买的原因
|
def get_l2_cant_buy_reasons(code, date=None):
|
if not date:
|
date = datetime.datetime.now().strftime("%Y-%m-%d")
|
fdatas = []
|
path_str = "{}/logs/gp/l2/l2_not_buy_reasons.{}.log".format(constant.get_path_prefix(), date)
|
lines = __load_file_content(path_str)
|
for line in lines:
|
if not line:
|
break
|
if line.find(f"{code}#") < 0:
|
continue
|
|
line = line.split(" - ")[1]
|
time_str = line[line.find("[") + 1:line.find("[") + 9]
|
data = line[line.find("]") + 1:].strip()
|
code_ = data.split("#")[0].strip()
|
data = data.split("#")[1].strip()
|
if code_ != code:
|
continue
|
fdatas.append((time_str, data))
|
fdatas.reverse()
|
return fdatas
|
|
|
# 获取H级撤单计算结果
|
def get_h_cancel_compute_info(code, date=None):
|
if not date:
|
date = datetime.datetime.now().strftime("%Y-%m-%d")
|
path_str = f"{constant.get_path_prefix()}/logs/gp/l2/cancel/h_cancel.{date}.log"
|
lines = __load_file_content(path_str)
|
latest_info = None
|
|
for line in lines:
|
if not line:
|
break
|
if line.find(f"code-{code}") < 0:
|
continue
|
if line.find(f"H级撤单计算结果") < 0:
|
continue
|
target_rate = line.split("目标比例:")[1].split(" ")[0].strip()
|
cancel_num = line.split("取消计算结果")[1][1:].split("/")[0].strip()
|
total_num = line.split("取消计算结果")[1][1:].split("/")[1].split(" ")[0].strip()
|
latest_info = (target_rate, round(int(cancel_num) / int(total_num), 2), cancel_num, total_num,)
|
return latest_info
|
|
|
# 读取看盘消息
|
def get_kp_msg_list(date=None):
|
if not date:
|
date = datetime.datetime.now().strftime("%Y-%m-%d")
|
path_str = f"{constant.get_path_prefix()}/logs/gp/kp/kp_msg.{date}.log"
|
msg_list = []
|
if os.path.exists(path_str):
|
with open(path_str, mode='r', encoding="utf-8") as f:
|
lines = f.readlines()
|
for line in lines:
|
if not line:
|
break
|
msg_list.append(line)
|
return msg_list
|
|
|
def export_logs(code):
|
code_name = gpcode_manager.get_code_name(code)
|
date = datetime.datetime.now().strftime("%Y-%m-%d")
|
target_dir = f"{constant.get_path_prefix()}/logs/gp/l2/export/{code}_{code_name}_{date}"
|
if os.path.exists(target_dir):
|
shutil.rmtree(target_dir)
|
os.makedirs(target_dir)
|
log_names = ["l2_process", "l2_trade", "l2_trade_cancel", "l2_process_time", "l2_trade_buy",
|
"l2_trade_buy_progress", "cancel/h_cancel"]
|
# 导出交易日志
|
for log_name in log_names:
|
key = f"code={code}"
|
if log_name == "l2_process" or log_name == "l2_process_time" or log_name == "cancel/h_cancel" or log_name == "l2_trade_buy_progress":
|
key = code
|
target_path = f"{target_dir}/{log_name}.{code}_{code_name}.{date}.log"
|
# 创建文件夹
|
dir_path = "/".join(target_path.split("/")[:-1])
|
if not os.path.exists(dir_path):
|
os.makedirs(dir_path)
|
LogUtil.extract_log_from_key(key, f"{constant.get_path_prefix()}/logs/gp/l2/{log_name}.{date}.log",
|
target_path)
|
|
|
def export_trade_progress(code):
|
path = f"{constant.get_path_prefix()}/logs/gp/l2/l2_trade_buy_progress.{tool.get_now_date_str()}.log"
|
index_set = set()
|
with open(path, mode='r', encoding="utf-8") as f:
|
lines = f.readlines()
|
for line in lines:
|
if line.find(f"code-{code}") > -1 and line.find("确定交易进度成功") > -1:
|
index = line.split("index-")[1].split(" ")[0]
|
index_set.add(int(index))
|
results = list(index_set)
|
results.sort()
|
return results
|
|
|
__log_file_contents = {}
|
|
|
# 加载文件内容
|
def __load_file_content(path_str, expire_timespace=20):
|
md5 = hashlib.md5(path_str.encode(encoding='utf-8')).hexdigest()
|
if md5 in __log_file_contents and time.time() - __log_file_contents[md5][0] < expire_timespace:
|
return __log_file_contents[md5][1]
|
contents = []
|
if os.path.exists(path_str):
|
with open(path_str, 'r', encoding="utf-8") as f:
|
lines = f.readlines()
|
for line in lines:
|
contents.append(line)
|
__log_file_contents[md5] = (time.time(), contents)
|
return contents
|
|
|
# 加载买入得分记录
|
def load_trade_recod(code, date=tool.get_now_date_str()):
|
path = f"{constant.get_path_prefix()}/logs/gp/trade/trade_record.{date}.log"
|
fdatas = []
|
lines = __load_file_content(path)
|
for line in lines:
|
data_index = line.find(f"{code}")
|
if data_index > 0:
|
line = line.split(" - ")[1]
|
time_str = line[line.find("[") + 1:line.find("[") + 9]
|
data = line[line.find("]") + 1:].strip()
|
data_json = json.loads(data)
|
if data_json["code"] != code:
|
continue
|
type = data_json["type"]
|
fdatas.append((time_str, type, data_json["data"]))
|
return fdatas
|
|
|
# 加载买入得分记录
|
def load_trade_recod_by_type(type_, date=tool.get_now_date_str()):
|
path = f"{constant.get_path_prefix()}/logs/gp/trade/trade_record.{date}.log"
|
fdatas = []
|
lines = __load_file_content(path)
|
for line in lines:
|
data_index = line.find(f"{type_}")
|
if data_index > 0:
|
line = line.split(" - ")[1]
|
time_str = line[line.find("[") + 1:line.find("[") + 9]
|
data = line[line.find("]") + 1:].strip()
|
data_json = json.loads(data)
|
type = data_json["type"]
|
code = data_json["code"]
|
if type != type_:
|
continue
|
fdatas.append((time_str, code, type, data_json["data"]))
|
return fdatas
|
|
|
@cache_log
|
def load_cancel_buy_reasons(code, date=tool.get_now_date_str()):
|
"""
|
获取撤单原因
|
@param code:
|
@param date:
|
@return: {真实下单位置:撤单原因}
|
"""
|
fdatas = load_trade_recod(code, date)
|
cancel_reason_dict = {}
|
for data in fdatas:
|
if data[1] != "cancel":
|
continue
|
msg = data[2].get("msg")
|
real_place_order_index = data[2].get("real_place_order_index")
|
if real_place_order_index not in cancel_reason_dict:
|
cancel_reason_dict[real_place_order_index] = msg
|
return cancel_reason_dict
|
|
|
|
def __parse_content(line):
|
line = line.split(" - ")[1]
|
time_str = line[line.find("[") + 1:line.find("[") + 9]
|
data = line[line.find("]") + 1:].strip()
|
if data.find("thread-id=")>-1 and data.find("code=")>-1:
|
data = data[data.find("code=")+11:].strip()
|
return time_str, data
|
|
|
# 加载l2订单成交数据
|
@cache_log
|
def load_huaxin_deal_record(code, date=tool.get_now_date_str()):
|
datas_dict = load_huaxin_deal_record_all(date)
|
return datas_dict.get(code)
|
|
|
@cache_log
|
def load_huaxin_deal_record_all(date=tool.get_now_date_str()):
|
path = f"{constant.get_path_prefix()}/logs/huaxin/l2/transaction_desc.{date}.log"
|
# 格式:[(订单号,手数,开始成交时间,成交结束时间,下单手数)]
|
fdatas = {}
|
lines = __load_file_content(path)
|
for line in lines:
|
data_index = line.find(f"#")
|
if data_index > 0:
|
time_str, data = __parse_content(line)
|
code = data.split("#")[0]
|
data = data.split("#")[1]
|
data = eval(data)
|
if code not in fdatas:
|
fdatas[code] = []
|
fdatas[code].append(data)
|
return fdatas
|
|
|
def load_kpl_reason_changes():
|
path = f"{constant.get_path_prefix()}/logs/gp/kpl/kpl_limit_up_reason_change.{tool.get_now_date_str()}.log"
|
fdatas = []
|
lines = __load_file_content(path)
|
for line in lines:
|
if line.find("code-") > 0:
|
data = line[line.find("code-") + 5:]
|
code = data.split(":")[0]
|
from_r = data.split(":")[1].split("-")[0]
|
to_r = eval(data.split(":")[1].split("-")[1])
|
fdatas.append((code, from_r, to_r))
|
return fdatas
|
|
|
def load_kpl_open_limit_up():
|
path = f"{constant.get_path_prefix()}/logs/gp/kpl/kpl_open_limit_up.{tool.get_now_date_str()}.log"
|
fdatas = []
|
lines = __load_file_content(path)
|
for line in lines:
|
if line.find("炸板") > 0:
|
time_str = __get_log_time(line)
|
data = line[line.find(":") + 1:]
|
codes = eval(data)
|
fdatas.append((time_str, codes))
|
return fdatas
|
|
|
@cache_log
|
def load_kpl_limit_up_records(current_time_str, date=tool.get_now_date_str()):
|
"""
|
获取离给定时间最近的涨停数据
|
@param current_time_str:
|
@param date:
|
@return:
|
"""
|
path = f"{constant.get_path_prefix()}/logs/gp/kpl/kpl_limit_up.{date}.log"
|
lines = __load_file_content(path)
|
lines.reverse()
|
current_time_str_int = int(current_time_str.replace(":", ""))
|
for line in lines:
|
if line:
|
time_str = __get_log_time(line)
|
if int(time_str.replace(":", "")) < current_time_str_int:
|
line = line.split(" - ")[1]
|
return eval(line)
|
return None
|
|
|
# 加载华鑫本地买入订单号
|
def load_huaxin_local_buy_no():
|
path = f"{constant.get_path_prefix()}/logs/huaxin_local/l2/l2_buy_no.{tool.get_now_date_str()}.log"
|
fdatas = {}
|
if os.path.exists(path):
|
with open(path, 'r', encoding="utf-8") as f:
|
lines = f.readlines()
|
for line in lines:
|
if line:
|
data = line.split(" - ")[1].strip()
|
if data.startswith("["):
|
data = data[data.find("]") + 1:].strip()
|
code = data.split("#")[0]
|
buy_no = int(data.split("#")[1])
|
if code not in fdatas:
|
fdatas[code] = set()
|
fdatas[code].add(buy_no)
|
return fdatas
|
|
|
# 加载华鑫成交的卖单
|
@cache_log
|
def load_huaxin_transaction_sell_no(code=None, date=tool.get_now_date_str()):
|
path = f"{constant.get_path_prefix()}/logs/huaxin/l2/transaction_sell_order.{date}.log"
|
fdatas = {}
|
if os.path.exists(path):
|
with open(path, 'r', encoding="utf-8") as f:
|
lines = f.readlines()
|
for line in lines:
|
if line:
|
data = line.split(" - ")[1].strip()
|
if data.startswith("["):
|
data = data[data.find("]") + 1:].strip()
|
data = data.split("code=")[1]
|
code_ = data[:6]
|
if code and code != code_:
|
continue
|
data = data[6:].strip()
|
if code_ not in fdatas:
|
fdatas[code_] = []
|
fdatas[code_].append(eval(data))
|
return fdatas
|
|
|
@cache_log
|
def load_huaxin_l2_sell_deal(code=None, date=tool.get_now_date_str()):
|
path = f"{constant.get_path_prefix()}/logs/huaxin/l2/sell_l2_deal.{date}.log"
|
fdatas = {}
|
if os.path.exists(path):
|
with open(path, 'r', encoding="utf-8") as f:
|
lines = f.readlines()
|
for line in lines:
|
if line:
|
time_str = __get_async_log_time(line)
|
data = line.split(" - ")[1].strip()
|
if data.startswith("["):
|
data = data[data.find("]") + 1:].strip()
|
if data.find("有涨停主动卖:") < 0:
|
continue
|
data = data.split("有涨停主动卖:")[1]
|
code_ = data[:6]
|
if code and code != code_:
|
continue
|
data = data[6:].strip()
|
volume = int(data.split("成交量-")[1].strip())
|
if code_ not in fdatas:
|
fdatas[code_] = []
|
fdatas[code_].append((time_str, volume))
|
return fdatas
|
|
|
|
@cache_log
|
def load_huaxin_l2_sell_deal_list(code=None, date=tool.get_now_date_str()):
|
path = f"{constant.get_path_prefix()}/logs/huaxin/l2/sell_l2_deal.{date}.log"
|
fdatas = {}
|
if os.path.exists(path):
|
with open(path, 'r', encoding="utf-8") as f:
|
lines = f.readlines()
|
for line in lines:
|
if line:
|
time_str = __get_async_log_time(line)
|
data = line.split(" - ")[1].strip()
|
if data.startswith("["):
|
data = data[data.find("]") + 1:].strip()
|
if data.find("涨停主动买成交:") <0:
|
continue
|
data = data.split("涨停主动买成交:")[1]
|
data = eval(data)
|
code_ = data[0][0]
|
if code and code != code_:
|
continue
|
if code_ not in fdatas:
|
fdatas[code_] = []
|
fdatas[code_].append((time_str, data))
|
return fdatas
|
|
|
@cache_log
|
def load_huaxin_l2_sell_delegate(code=None, date=tool.get_now_date_str()):
|
path = f"{constant.get_path_prefix()}/logs/huaxin/l2/sell_l2_delegate.{date}.log"
|
fdatas = {}
|
if os.path.exists(path):
|
with open(path, 'r', encoding="utf-8") as f:
|
lines = f.readlines()
|
for line in lines:
|
if line:
|
time_str = __get_async_log_time(line)
|
data = line.split(" - ")[1].strip()
|
if data.startswith("["):
|
data = data[data.find("]") + 1:].strip()
|
datas = data.split("-")
|
code_ = datas[0]
|
if code and code != code_:
|
continue
|
if code_ not in fdatas:
|
fdatas[code_] = []
|
fdatas[code_].append((time_str, datas[1], eval(datas[2])))
|
return fdatas
|
|
|
# 加载华鑫本地买入订单号
|
def load_l2_market_data():
|
path = f"{constant.get_path_prefix()}/logs/huaxin/l2/marketdata.{tool.get_now_date_str()}.log"
|
fdatas = {}
|
if os.path.exists(path):
|
with open(path, 'r', encoding="utf-8") as f:
|
lines = f.readlines()
|
for line in lines:
|
if line:
|
data = line.split(" - ")[1].strip()
|
if data.startswith("["):
|
data = data[data.find("]") + 1:].strip()
|
code = data.split("#")[0]
|
d = data.split("#")[1].strip()
|
d = eval(d)
|
if code not in fdatas:
|
fdatas[code] = []
|
fdatas[code].append(d)
|
return fdatas
|
|
|
# 读取系统日志
|
def load_system_log():
|
path = f"{constant.get_path_prefix()}/logs/gp/system/system.{tool.get_now_date_str()}.log"
|
fdatas = []
|
if os.path.exists(path):
|
with open(path, 'r', encoding="utf-8") as f:
|
lines = f.readlines()
|
for line in lines:
|
if line:
|
try:
|
time_str = line.split("|")[0].strip()
|
level = line.split("|")[1].strip()
|
if level != "INFO" and level != "ERROR":
|
continue
|
data = line.split("|")[2].split(" - ")[1].strip()
|
fdatas.append((time_str, level, data))
|
except:
|
pass
|
return fdatas
|
|
|
# 读取系统日志
|
def load_huaxin_transaction_map(date=tool.get_now_date_str(), with_time=False):
|
path = f"{constant.get_path_prefix()}/logs/huaxin/l2/transaction.{date}.log"
|
fdatas = {}
|
if os.path.exists(path):
|
with open(path, 'r', encoding="utf-8") as f:
|
lines = f.readlines()
|
for line in lines:
|
if line:
|
try:
|
data = line.split(" - ")[1].strip()
|
if data.startswith("["):
|
time_str = data[data.find("[") + 1:data.find("]")].strip()
|
data = data[data.find("]") + 1:].strip()
|
|
code = data.split("#")[0]
|
l2_data = eval(data.split("#")[1])
|
if code not in fdatas:
|
fdatas[code] = []
|
if with_time:
|
fdatas[code].append((time_str, l2_data))
|
else:
|
fdatas[code].append(l2_data)
|
except:
|
pass
|
return fdatas
|
|
|
@cache_log
|
def load_huaxin_active_sell_map(date=tool.get_now_date_str()):
|
path = f"{constant.get_path_prefix()}/logs/huaxin/trade/l2_active_sell.{date}.log"
|
fdatas = {}
|
lines = __load_file_content(path)
|
for line in lines:
|
if line:
|
try:
|
data = line.split(" - ")[1].strip()
|
if data.startswith("["):
|
time_str = data[data.find("[") + 1:data.find("]")].strip()
|
data = data[data.find("]") + 1:].strip()
|
|
data = data.split("code=")[1].strip()
|
code = data[:data.find(" ")].strip()
|
data = data[data.find(" "):].strip()
|
data = eval(data)
|
if code not in fdatas:
|
fdatas[code] = set()
|
fdatas[code].add(data[0])
|
except:
|
pass
|
return fdatas
|
|
|
def load_huaxin_big_buy_order(date=tool.get_now_date_str()):
|
"""
|
加载华鑫大买单
|
@param date:
|
@return:
|
"""
|
path = f"{constant.get_path_prefix()}/logs/huaxin/l2/l2_transaction_big_buy.{date}.log"
|
fdatas = {}
|
lines = __load_file_content(path)
|
for line in lines:
|
if line:
|
try:
|
data = line.split(" - ")[1].strip()
|
if data.startswith("["):
|
time_str = data[data.find("[") + 1:data.find("]")].strip()
|
data = data[data.find("]") + 1:].strip()
|
|
data = data.split("code=")[1].strip()
|
code = data[:data.find(" ")].strip()
|
data = data[data.find(" "):].strip()
|
data = eval(data)
|
if code not in fdatas:
|
fdatas[code] = []
|
fdatas[code].extend(data)
|
except:
|
pass
|
return fdatas
|
|
|
def load_huaxin_big_sell_order(date=tool.get_now_date_str()):
|
"""
|
加载华鑫大买单
|
@param date:
|
@return:
|
"""
|
path = f"{constant.get_path_prefix()}/logs/huaxin/l2/l2_transaction_big_sell.{date}.log"
|
fdatas = {}
|
lines = __load_file_content(path)
|
for line in lines:
|
if line:
|
try:
|
data = line.split(" - ")[1].strip()
|
if data.startswith("["):
|
time_str = data[data.find("[") + 1:data.find("]")].strip()
|
data = data[data.find("]") + 1:].strip()
|
|
data = data.split("code=")[1].strip()
|
code = data[:data.find(" ")].strip()
|
data = data[data.find(" "):].strip()
|
data = eval(data)
|
if code not in fdatas:
|
fdatas[code] = []
|
fdatas[code].extend(data)
|
except:
|
pass
|
return fdatas
|
|
|
def load_huaxin_order_detail(date=tool.get_now_date_str()):
|
"""
|
加载L2逐笔委托数据
|
@param date:
|
@return:
|
"""
|
fdatas = []
|
path = f"{constant.get_path_prefix()}/logs/huaxin/l2/orderdetail.{date}.log"
|
lines = __load_file_content(path)
|
for line in lines:
|
if line:
|
time = __get_async_log_time(line)
|
line = line[line.rfind("#") + 1:]
|
fdatas.append((time, eval(line)))
|
return fdatas
|
|
|
def load_pre_close_price(date=tool.get_now_date_str()):
|
"""
|
加载之前的收盘价
|
@param date:
|
@return:
|
"""
|
fdatas = {}
|
path = f"{constant.get_path_prefix()}/logs/gp/code_attribute/pre_close_price.{date}.log"
|
lines = __load_file_content(path)
|
for line in lines:
|
if line:
|
data = line.split(" - ")[1]
|
code, price = data.split("-")[0].strip(), data.split("-")[1].strip()
|
fdatas[code] = price
|
return fdatas
|
|
|
if __name__ == '__main__':
|
pass
|