From 32203dcb2d06b93e4b6c81f9121b00531a91395e Mon Sep 17 00:00:00 2001 From: Administrator <admin@example.com> Date: 星期五, 06 六月 2025 18:43:07 +0800 Subject: [PATCH] bug修复 --- strategy/time_series_backtest.py | 623 +++++++++++++++++++++++++++++++++++++++++++++++--------- 1 files changed, 518 insertions(+), 105 deletions(-) diff --git a/strategy/time_series_backtest.py b/strategy/time_series_backtest.py index ffbe31e..b69de53 100644 --- a/strategy/time_series_backtest.py +++ b/strategy/time_series_backtest.py @@ -1,22 +1,48 @@ +import constant from code_attribute import gpcode_manager, code_nature_analyse +from strategy.data_analyzer import KPLLimitUpDataAnalyzer +from strategy.data_downloader import DataDownloader from strategy.low_suction_strategy import LowSuctionOriginDataExportManager +from strategy.strategy_params_settings import StrategyParamsSettings from strategy.strategy_variable import StockVariables from strategy.strategy_variable_factory import DataLoader, StrategyVariableFactory +from third_data import kpl_util +from third_data.third_blocks_manager import BlockMapManager from utils import tool, huaxin_util class BackTest: - def __init__(self, day): + def __init__(self, day, script_name="浣庡惛鑴氭湰_杈ㄨ瘑搴v3.py", settings=StrategyParamsSettings()): self.day = day scripts = "" - with open("浣庡惛鑴氭湰_杈ㄨ瘑搴�.py", mode='r', encoding='utf-8') as f: + with open(script_name, mode='r', encoding='utf-8') as f: lines = f.readlines() scripts = "\n".join(lines) # 娉ㄩ噴鎺夐噷闈㈢殑import涓庡彉閲� - scripts = scripts.replace("from ", "#from ").replace("sv = ", "#sv = ") + scripts = scripts.replace("from ", "#from ").replace("sv = ", "#sv = ").replace("settings = ", + "#settings = ").replace( + "target_code = ", "#target_code = ") + self.settings = settings self.scripts = scripts + self.RANGE_TIMES = ("09:25:00", "11:30:00") + self.current_time = '09:25:00' + self.stock_variables_dict = {} + self.data_loader: DataLoader = None + self.timeline_data = None + self.current_data = None + self.current_tick_data = None + self.fcodes = None + # 棰嗘定浠g爜鐨勬澘鍧�,{浠g爜:{"鏉垮潡":(浠g爜, 棰嗘定娆℃暟, 鏉垮潡鏈�澶ч娑ㄦ鏁�)}} + self.head_rise_code_blocks = {} + # 宸茬粡鎴愪氦鐨勪唬鐮� + self.deal_codes = set() + # 鏉垮潡宸茬粡鎴愪氦鐨勪唬鐮� + self.deal_block_codes = {} + + def set_script(self, script): + self.scripts = script def load_before_date_data_by_timeline(self, data_loader: DataLoader): """ @@ -28,6 +54,7 @@ timeline_data = [] # 鍔犺浇鍘嗗彶鏁版嵁 kline_data = data_loader.load_kline_data() + valid_codes = set(kline_data.keys()) minute_data = {} # data_loader.load_minute_data() limit_up_record_data = data_loader.load_limit_up_data() next_trade_day = data_loader.load_next_trade_day() @@ -37,14 +64,53 @@ raise Exception("鍘嗗彶鏃鑾峰彇澶辫触") if not kline_data: raise Exception("鍘嗗彶娑ㄥ仠鑾峰彇澶辫触") + # 缁熻120涓氦鏄撴棩鍐呬唬鐮佹定鍋滃師鍥犲搴旂殑娑ㄥ仠娆℃暟鎺掑悕鍓�3鐨勬澘鍧� + min_day = data_loader.trade_days[120 - 1] + block_code_dates = {} + for d in limit_up_record_data: + # 鍙粺璁″皝鏉� + if d[3] != 0: + continue + if d[1] < min_day: + continue + code, date, block = d[0], d[1], d[2] + if block not in block_code_dates: + block_code_dates[block] = {} + if code not in block_code_dates[block]: + block_code_dates[block][code] = set() + block_code_dates[block][code].add(date) + # 缁熻鐨勪唬鐮佺殑娑ㄥ仠鍘熷洜 + code_blocks = {} + for b in block_code_dates: + if b in constant.KPL_INVALID_BLOCKS: + continue + # if b == '璺ㄥ鐢靛晢': + # print("") + code_limit_up_count_list = [(x, len(block_code_dates[b][x])) for x in block_code_dates[b]] + code_limit_up_count_list.sort(key=lambda e: e[1], reverse=True) + end_index = 3 + # code_limit_up_count_list = code_limit_up_count_list[:3] + for i in range(end_index, len(code_limit_up_count_list)): + if code_limit_up_count_list[end_index - 1][1] == code_limit_up_count_list[i][1]: + end_index = i + 1 + code_limit_up_count_list = code_limit_up_count_list[:end_index] + for x in code_limit_up_count_list: + if x[1] < 3: + continue + if x[0] not in code_blocks: + code_blocks[x[0]] = set() + code_blocks[x[0]].add(b) return { 'date': day, 'kline_data': kline_data, + 'valid_codes': valid_codes, 'minute_data': minute_data, 'limit_up_record_data': limit_up_record_data, + 'limit_up_record_data_list': limit_up_record_data, "trade_days": trade_days, - "next_trade_day": next_trade_day + "next_trade_day": next_trade_day, + "code_blocks": code_blocks } def load_current_date_data_by_timeline(self): @@ -53,14 +119,18 @@ :param day: 鏃ユ湡锛屾牸寮忎负"YYYY-MM-DD :return: 鎸夋椂闂存帓搴忕殑鏁版嵁鍒楄〃 """ + if self.day >= '2025-05-26': + IS_BY_BIG_ORDER = True + else: + IS_BY_BIG_ORDER = False day = self.day fdata = {} __LowSuctionOriginDataExportManager = LowSuctionOriginDataExportManager(day) all_limit_up_list = __LowSuctionOriginDataExportManager.export_limit_up_list() fdata["limit_up_list"] = {d[0][:8]: d[1] for d in all_limit_up_list} - big_order_deals = __LowSuctionOriginDataExportManager.export_big_order_deal() - if not big_order_deals: - big_order_deals = __LowSuctionOriginDataExportManager.export_big_order_deal_by() + big_order_deals = __LowSuctionOriginDataExportManager.export_big_order_deal(BIG_ORDER_MONEY_THRESHOLD) + if not big_order_deals or IS_BY_BIG_ORDER: + big_order_deals = __LowSuctionOriginDataExportManager.export_big_order_deal_by(BIG_ORDER_MONEY_THRESHOLD) # 杞崲鏍煎紡涓猴細{鏃堕棿: [("浠g爜", (涔板崟鍙�, 閲�, 閲戦, 鏃堕棿, 鏈�缁堟垚浜や环))] big_order_deals_dict = {} for code in big_order_deals: @@ -74,9 +144,10 @@ datas = big_order_deals_dict[k] datas.sort(key=lambda x: huaxin_util.convert_time(x[1][3], True)) fdata["big_order"] = big_order_deals_dict - big_sell_order_deals = __LowSuctionOriginDataExportManager.export_big_sell_order_deal() - if not big_sell_order_deals: - big_sell_order_deals = __LowSuctionOriginDataExportManager.export_big_sell_order_deal_by() + big_sell_order_deals = __LowSuctionOriginDataExportManager.export_big_sell_order_deal(BIG_ORDER_MONEY_THRESHOLD) + if not big_sell_order_deals or IS_BY_BIG_ORDER: + big_sell_order_deals = __LowSuctionOriginDataExportManager.export_big_sell_order_deal_by( + BIG_ORDER_MONEY_THRESHOLD) big_sell_order_deals_dict = {} for code in big_sell_order_deals: for order in big_sell_order_deals[code]: @@ -94,8 +165,25 @@ zylt_volume_dict = __LowSuctionOriginDataExportManager.export_zylt_volume() fdata["zylt_volume"] = zylt_volume_dict + # 鍔犺浇鏉垮潡浠g爜 code_plates_dict = __LowSuctionOriginDataExportManager.export_code_plates() + + code_plates_dict_for_refer = self.data_loader.load_code_plates_for_refer() + + plate_codes = self.data_loader.load_target_plate_and_codes() + code_plates_dict_for_buy = {} + for p in plate_codes: + for code in plate_codes.get(p): + if code not in code_plates_dict_for_buy: + code_plates_dict_for_buy[code] = set() + code_plates_dict_for_buy[code].add(p) + fdata["code_plates_for_buy"] = code_plates_dict_for_buy + fdata["code_plates_for_refer"] = code_plates_dict_for_refer + fdata["code_plates"] = code_plates_dict + # 鍔犺浇鏉垮潡娴佸叆(娴佸叆涓烘) + block_in_datas = __LowSuctionOriginDataExportManager.export_block_in_datas() + fdata["block_in"] = {d[0][:8]: d[1] for d in block_in_datas} special_codes = __LowSuctionOriginDataExportManager.export_special_codes() temp_code_plates = {} @@ -106,6 +194,12 @@ temp_code_plates[code].add(plate) for code in temp_code_plates: code_plates_dict[code] = temp_code_plates[code] + # 鑾峰彇鎵�鏈夋定鍋滃師鍥犱笅闈㈢殑棰嗘定涓偂淇℃伅,寰楀埌鐨勪俊鎭牸寮忥細{"浠g爜":{鏉垮潡鍚嶇О}} + refer_plates_of_codes = self.data_loader.load_all_refer_plates_of_codes() + fdata["limit_up_plate_names_of_refer_code"] = refer_plates_of_codes + + fdata["all_buy_plates_of_codes"] = self.data_loader.load_all_buy_plates_of_codes() + # print("*****", plate_names_of_code.get("600774")) if not fdata["zylt_volume"]: raise Exception("鏃犺嚜鐢辨祦閫氭暟鎹�") @@ -115,16 +209,18 @@ raise Exception("鏃犲ぇ鍗曟暟鎹�") if not fdata["limit_up_list"]: raise Exception("鏃犳定鍋滄暟鎹�") + if not fdata["limit_up_plate_names_of_refer_code"]: + raise Exception("鏃犳定鍋滈娑ㄥ師鍥犳暟鎹�") return fdata def load_current_tick_datas(self, data_loader: DataLoader): """ 鍔犺浇Tick鏁版嵁 - :param day: 鏃ユ湡锛屾牸寮忎负"YYYY-MM-DD - :return: Tick鏁版嵁 + @param data_loader: + @return: Tick鏁版嵁 """ - code_tick_datas = data_loader.load_tick_data() + code_tick_datas = data_loader.load_tick_data(target_codes=self.fcodes) # 鏍规嵁鏃堕棿闆嗘垚 fdata = {} for code in code_tick_datas: @@ -146,19 +242,21 @@ """ global_dict = { "sv": stock_variables, - "target_code": code + "target_code": code, + "settings": self.settings } exec(self.scripts, global_dict) return global_dict["compute_result"] def __filter_codes(self, current_data, timeline_data): code_plates = current_data["code_plates"] - start_time, end_time = "09:25:00", "11:30:00" + start_time, end_time = self.RANGE_TIMES[0], self.RANGE_TIMES[1] fplates = set() for i in range(60 * 60 * 5): time_str = tool.trade_time_add_second(start_time, i) if time_str > end_time: break + self.current_time = time_str # 缁熻褰撳墠娑ㄥ仠鏁版嵁 current_limit_up_list = current_data["limit_up_list"].get(time_str) if current_limit_up_list: @@ -187,12 +285,9 @@ return fcodes - def __get_target_codes(self): - special_codes = LowSuctionOriginDataExportManager(self.day).export_special_codes() - fcodes = set() - for codes in [special_codes[p] for p in special_codes]: - fcodes |= codes - return fcodes + def __get_target_codes_v4(self): + valid_codes = self.timeline_data["valid_codes"] + return set(self.current_data["code_plates_for_buy"].keys()) & valid_codes def init_stock_variables(self, code_, timeline_data, current_data): """ @@ -202,73 +297,244 @@ """ if code_ in self.stock_variables_dict: return + stock_variables = StrategyVariableFactory.create_from_history_data( timeline_data["kline_data"].get(code_), timeline_data["minute_data"].get(code_), timeline_data["limit_up_record_data"].get(code_), timeline_data["trade_days"]) + # 鍔犺浇浠婃棩娑ㄥ仠浠� pre_close = timeline_data["kline_data"].get(code_)[0]["close"] stock_variables.浠婃棩娑ㄥ仠浠� = round(float(gpcode_manager.get_limit_up_price_by_preprice(code_, pre_close)), 2) stock_variables.鑷敱娴侀�氬競鍊� = current_data["zylt_volume"].get(code_) * pre_close # 鑾峰彇浠g爜鏉垮潡 - stock_variables.浠g爜鏉垮潡 = current_data["code_plates"].get(code_) + stock_variables.浠g爜鏉垮潡 = current_data["code_plates_for_buy"].get(code_) is_price_too_high = code_nature_analyse.is_price_too_high_in_days(code_, timeline_data["kline_data"].get(code_), stock_variables.浠婃棩娑ㄥ仠浠�) # if is_price_too_high[0]: # print("鍏釜浜ゆ槗鏃ユ定骞呰繃楂�", code_) stock_variables.鍏釜浜ゆ槗鏃ユ定骞呰繃楂� = is_price_too_high[0] + stock_variables.鏂颁唬鐮佹澘鍧� = timeline_data["code_blocks"].get(code_) + stock_variables.杈ㄨ瘑搴︿唬鐮� = self.fcodes + stock_variables.棰嗘定鏉垮潡淇℃伅 = self.head_rise_code_blocks.get(code_) + if code_ in DEBUG_CODES: + print(code_, stock_variables.棰嗘定鏉垮潡淇℃伅) + + for day in [2, 5, 10, 30, 60, 120]: + days = timeline_data["trade_days"][:day] + stock_variables.__setattr__(f"鏃ュ嚭鐜扮殑鏉垮潡_{day}", + KPLLimitUpDataAnalyzer.get_limit_up_reasons( + timeline_data["limit_up_record_data_list"], min_day=days[-1], + max_day=days[0])) + stock_variables.杩炵画鑰侀鏉� = KPLLimitUpDataAnalyzer.get_continuous_limit_up_reasons( + timeline_data["limit_up_record_data_list"], self.data_loader.trade_days[:2]) + self.stock_variables_dict[code_] = stock_variables - def run(self): - data_loader = DataLoader(self.day) - current_data = self.load_current_date_data_by_timeline() + def load_data(self): + """ + 鍔犺浇鏁版嵁 + @return:鍘嗗彶鏁版嵁, 浠婃棩鏁版嵁, tick鏁版嵁 + """ + # 鎻愬墠涓嬭浇鏁版嵁 + __DataLoader = DataLoader(self.day) + plates = __DataLoader.get_limit_up_reasons_with_plate_code() + for p in plates: + __DataLoader.load_plate_codes(p[0], p[1]) + + if not self.data_loader: + self.data_loader = DataLoader(self.day) + if not self.current_data: + self.current_data = self.load_current_date_data_by_timeline() # 鎸夋椂闂磋酱鍔犺浇鏁版嵁 - timeline_data = self.load_before_date_data_by_timeline(data_loader) + if not self.timeline_data: + self.timeline_data = self.load_before_date_data_by_timeline(self.data_loader) # TODO 杈撳嚭鐩爣浠g爜 - fcodes = self.__get_target_codes() # __filter_codes(current_data, timeline_data) - # print(len(fcodes), fcodes) - current_tick_data = self.load_current_tick_datas(data_loader) + if not self.fcodes: + # self.fcodes, self.head_rise_code_blocks = self.__get_target_codes_v3() # __filter_codes(current_data, timeline_data) + self.fcodes, self.head_rise_code_blocks = self.__get_target_codes_v4(), {} + + print(len(self.fcodes), self.fcodes) + if not self.current_tick_data: + try: + self.current_tick_data = self.load_current_tick_datas(self.data_loader) + except: + pass + + __DataDownloader = DataDownloader(self.day, self.data_loader.trade_days) + __DataDownloader.download_tick_data(self.fcodes) + + def __statistic_big_order_info(self, stock_variables: StockVariables): + """ + 缁熻澶у崟淇℃伅 + @param stock_variables: + @return: + """ + infos = [] + thresholds = [50, 100, 200, 300, 400, 500, 600, 700, 800, 900, 1000, 10000] + for i in range(len(thresholds)): + if i >= len(thresholds) - 1: + break + start, end = thresholds[i], thresholds[i + 1] + info = [f"{start}w-{end}w", 0, None, None] + # 缁熻涔板崟 + total_buy_count = 0 + total_buy_volume = 0 + total_buy_money = 0 + if stock_variables.浠婃棩澶у崟鏁版嵁: + order_ids = set() + for d in reversed(stock_variables.浠婃棩澶у崟鏁版嵁): + if d[0] in order_ids: + continue + order_ids.add(d[0]) + if start * 10000 <= d[2] < end * 10000: + total_buy_count += 1 + total_buy_money += d[2] + total_buy_volume += d[1] + total_sell_count = 0 + total_sell_money = 0 + total_sell_volume = 0 + if stock_variables.浠婃棩鍗栧ぇ鍗曟暟鎹�: + order_ids = set() + for d in reversed(stock_variables.浠婃棩鍗栧ぇ鍗曟暟鎹�): + if d[0] in order_ids: + continue + order_ids.add(d[0]) + if start * 10000 <= d[2] < end * 10000: + total_sell_count += 1 + total_sell_money += d[2] + total_sell_volume += d[1] + info[1] = f"{round((total_buy_volume - total_sell_volume) * 100 / stock_variables.浠婃棩鎴愪氦閲�, 2)}%" + info[2] = (total_buy_count, total_buy_money, total_buy_volume) + info[3] = (total_sell_count, total_sell_money, total_sell_volume) + if info[2][0] > 0 or info[3][0] > 0: + infos.append(info) + return ";".join([f"{x[0]}==鍑�棰�:{x[1]},涔板崟锛歿x[2]},鍗栧崟锛歿x[3]}" for x in infos]) + + def run(self): + self.load_data() + # print(self.fcodes) limit_up_record_data_dict = {} - for limit_up_item in timeline_data["limit_up_record_data"]: + for limit_up_item in self.timeline_data["limit_up_record_data"]: if limit_up_item[0] not in limit_up_record_data_dict: limit_up_record_data_dict[limit_up_item[0]] = [] limit_up_record_data_dict[limit_up_item[0]].append(limit_up_item) - timeline_data["limit_up_record_data"] = limit_up_record_data_dict - next_trade_day = timeline_data["next_trade_day"] + self.timeline_data["limit_up_record_data"] = limit_up_record_data_dict + next_trade_day = self.timeline_data["next_trade_day"] start_time, end_time = "09:25:00", "12:00:00" # 鍒嗛挓K绾� minute_bars_dict = {} - code_plates = current_data["code_plates"] - # 鏉垮潡浠ュ強涔颁簡鐨勪唬鐮侊細{"鏉垮潡":{"000333"}} - deal_block_codes = {} - deal_codes = set() - print("======", self.day) - # 鍒堕�犲洖娴嬫椂闂� + code_plates = self.current_data["code_plates"] + code_plates_for_refer = self.current_data["code_plates_for_refer"] + + # 鏉垮潡娑ㄥ仠浠g爜淇℃伅 + kpl_plate_limit_up_codes_info = None + plate_limit_up_codes_info = None + kpl_head_plate_limit_up_codes_info = None + + latest_current_limit_up_list = None + + latest_block_in_datas = None + + # 鏍规嵁鏉垮潡鑾峰彇鐩爣绁� + target_plate_codes_infos = {} + for code in self.head_rise_code_blocks: + for p in self.head_rise_code_blocks[code]: + if p not in target_plate_codes_infos: + target_plate_codes_infos[p] = [] + target_plate_codes_infos[p].append(self.head_rise_code_blocks[code][p]) + for p in target_plate_codes_infos: + target_plate_codes_infos[p].sort(key=lambda x: x[1], reverse=True) + + all_new_plates = set() + for i in range(60 * 60 * 5): time_str = tool.trade_time_add_second(start_time, i) + # print(f"[{tool.get_now_time_str()}]", time_str) if time_str > end_time: break - ticks = current_tick_data.get(time_str) - # 缁熻褰撳墠娑ㄥ仠鏁版嵁 - current_limit_up_list = current_data["limit_up_list"].get(time_str) + ticks = self.current_tick_data.get(time_str) if self.current_tick_data else None + # ===============缁熻褰撳墠娑ㄥ仠鏁版嵁 + origin_current_limit_up_list = self.current_data["limit_up_list"].get(time_str, []) + current_limit_up_list = [x for x in origin_current_limit_up_list if kpl_util.get_high_level_count(x[4]) < 3] + if current_limit_up_list: - # 缁熻鏉垮潡娑ㄥ仠 + latest_current_limit_up_list = current_limit_up_list + + if current_limit_up_list: plate_codes_info = {} + # 缁熻鏉垮潡娑ㄥ仠 for x in current_limit_up_list: + # 鎸変唬鐮佺殑鏉垮潡缁熻娑ㄥ仠鏉垮潡涓殑浠g爜鏁伴噺 + # 娑ㄥ仠杩�1鍒嗛挓鎵嶇畻鏈夋晥娑ㄥ仠 + if tool.trade_time_sub(time_str, tool.timestamp_format(x[2], "%H:%M:%S")) < 60: + continue plates = code_plates.get(x[0]) if plates: for p in plates: if p not in plate_codes_info: plate_codes_info[p] = [] plate_codes_info[p].append((x[0], x[2])) - else: - plate_codes_info = None + plate_limit_up_codes_info = plate_codes_info - # 褰撳墠鏃跺埢澶у崟 - current_big_orders = current_data["big_order"].get(time_str) + plate_codes_info = {} + for x in current_limit_up_list: + # 鎸夊紑鐩樺暒娑ㄥ仠鍘熷洜缁熻 + p = x[5] + if p in constant.KPL_INVALID_BLOCKS: + continue + if p not in plate_codes_info: + plate_codes_info[p] = [] + # 濡傛灉棰嗘定浠g爜閲岄潰娌℃湁褰撳墠绁ㄥ氨涓嶇畻杩欎釜鏉垮潡鐨勬定鍋滃師鍥� + # 鑾峰彇棰嗘定鏁版嵁 + # head_plate_codes_info = self.data_loader.load_plate_codes(x[9], p) + # if head_plate_codes_info: + # plate_codes = set([x[0] for x in head_plate_codes_info]) + # else: + # plate_codes = set() + # if x[0] not in plate_codes: + # continue + plate_codes_info[p].append((x[0], x[2], x[4])) + kpl_plate_limit_up_codes_info = plate_codes_info + + # {"浠g爜":[(鏉垮潡浠g爜, 鏉垮潡鍚嶇О)]} + limit_up_plate_names_of_refer_code = self.current_data["limit_up_plate_names_of_refer_code"] + plate_codes_info = {} + for x in current_limit_up_list: + # 鎸夊紑鐩樺暒娑ㄥ仠鍘熷洜缁熻 + code = x[0] + # if code not in limit_up_plate_names_of_refer_code: + # continue + # 濡傛灉璁板綍娑ㄥ仠鏃堕棿杩囧幓20鍒嗛挓灏遍噰鐢ㄦ定鍋滈槦鍒楃殑娑ㄥ仠鍘熷洜 + if tool.trade_time_sub(time_str, tool.timestamp_format(x[2], "%H:%M:%S")) < 60 * 20 or True: + plates_infos = limit_up_plate_names_of_refer_code.get(code) + plates = set([d[1] for d in plates_infos if d[1] == x[5]]) if plates_infos else set() + else: + plates = {x[5]} + + new_plates = set() + for p in plates: + if p in constant.KPL_INVALID_BLOCKS: + continue + new_plates.add(p) + for p in new_plates: + if p not in plate_codes_info: + plate_codes_info[p] = [] + plate_codes_info[p].append((x[0], x[2])) + kpl_head_plate_limit_up_codes_info = plate_codes_info + + # ==================娉ㄥ叆鏉垮潡娴佸叆 + block_in_datas = self.current_data["block_in"].get(time_str) + if block_in_datas: + blocks = [x[0] for x in block_in_datas if x[1] > 0] + block_in_datas = blocks[:20] + latest_block_in_datas = block_in_datas + + # ================褰撳墠鏃跺埢澶у崟 + current_big_orders = self.current_data["big_order"].get(time_str) if current_big_orders: for big_order in current_big_orders: # 鏍煎紡 ("浠g爜", (涔板崟鍙�, 閲�, 閲戦, 鏃堕棿, 鏈�缁堟垚浜や环)) - self.init_stock_variables(big_order[0], timeline_data, current_data) + self.init_stock_variables(big_order[0], self.timeline_data, self.current_data) stock_variables: StockVariables = self.stock_variables_dict.get(big_order[0]) if stock_variables.浠婃棩澶у崟鏁版嵁 is None: stock_variables.浠婃棩澶у崟鏁版嵁 = [] @@ -287,40 +553,74 @@ stock_variables.浠婃棩澶у崟鍧囦环 = round(total_money / total_volume, 2) else: stock_variables.浠婃棩澶у崟鍧囦环 = 0 - - current_big_sell_orders = current_data["big_sell_order"].get(time_str) + current_big_sell_orders = self.current_data["big_sell_order"].get(time_str) if current_big_sell_orders: for big_order in current_big_sell_orders: # 鏍煎紡 ("浠g爜", (涔板崟鍙�, 閲�, 閲戦, 鏃堕棿, 鏈�缁堟垚浜や环)) - self.init_stock_variables(big_order[0], timeline_data, current_data) + self.init_stock_variables(big_order[0], self.timeline_data, self.current_data) stock_variables: StockVariables = self.stock_variables_dict.get(big_order[0]) if stock_variables.浠婃棩鍗栧ぇ鍗曟暟鎹� is None: stock_variables.浠婃棩鍗栧ぇ鍗曟暟鎹� = [] stock_variables.浠婃棩鍗栧ぇ鍗曟暟鎹�.append(big_order[1]) + + # 寮�鐩樺暒鏈�姝f定鍋滃師鍥� + most_real_kpl_plate_limit_up_codes_info = {} + # 鑾峰彇杩欎釜鏉垮潡鐨勭洰鏍囩エ + if kpl_plate_limit_up_codes_info: + current_limit_up_dict = {x[0]: x for x in latest_current_limit_up_list} + codes = set() + for plate in kpl_plate_limit_up_codes_info: + kpl_plate_codes = kpl_plate_limit_up_codes_info.get(plate) + codes |= set([x[0] for x in kpl_plate_codes]) + for code in codes: + plates = code_plates.get(code) + if not plates: + plates = {current_limit_up_dict.get(code)[5]} + plates -= constant.KPL_INVALID_BLOCKS + if plates: + for p in plates: + if p not in most_real_kpl_plate_limit_up_codes_info: + most_real_kpl_plate_limit_up_codes_info[p] = [] + most_real_kpl_plate_limit_up_codes_info[p].append(code) + if ticks: for tick in ticks: code = tick["symbol"][-6:] - if code not in fcodes: + # if code not in self.fcodes: + # continue + if DEBUG_CODES and code not in DEBUG_CODES: continue + if code not in self.stock_variables_dict: # 鍔犺浇鍩虹鏁版嵁 - self.init_stock_variables(code, timeline_data, current_data) + self.init_stock_variables(code, self.timeline_data, self.current_data) stock_variables: StockVariables = self.stock_variables_dict.get(code) - stock_variables.鏉垮潡鎴愪氦浠g爜 = deal_block_codes - # 璁剧疆娑ㄥ仠鏁版嵁 - if plate_codes_info is not None: - stock_variables.鏉垮潡娑ㄥ仠 = plate_codes_info - if code not in minute_bars_dict: - minute_bars_dict[code] = [tick] - if minute_bars_dict[code][-1]["created_at"][:-2] == tick["created_at"][:-2]: - # 缁熻鍒嗛挓K绾� - minute_bars_dict[code][-1] = tick - else: - # 淇濆瓨鍒嗛挓K绾挎渶楂樹环 - if not stock_variables.浠婃棩鏈�楂樹环: - stock_variables.浠婃棩鏈�楂樹环 = minute_bars_dict[code][-1]["price"] - if minute_bars_dict[code][-1]["price"] > stock_variables.浠婃棩鏈�楂樹环: - stock_variables.浠婃棩鏈�楂樹环 = minute_bars_dict[code][-1]["price"] + if plate_limit_up_codes_info is not None: + stock_variables.鏉垮潡娑ㄥ仠 = plate_limit_up_codes_info + + if kpl_plate_limit_up_codes_info is not None: + stock_variables.寮�鐩樺暒鏉垮潡娑ㄥ仠 = kpl_plate_limit_up_codes_info + + if kpl_head_plate_limit_up_codes_info is not None: + stock_variables.寮�鐩樺暒棰嗘定鏉垮潡娑ㄥ仠 = kpl_head_plate_limit_up_codes_info + + stock_variables.鏉垮潡鎴愪氦浠g爜 = self.deal_block_codes + # 鏉垮潡娴佸叆鏁版嵁 + if latest_block_in_datas: + stock_variables.璧勯噾娴佸叆鏉垮潡 = latest_block_in_datas + # 鏆傛椂涓嶇敤鍒嗛挓K绾� + # if code not in minute_bars_dict: + # minute_bars_dict[code] = [tick] + # if minute_bars_dict[code][-1]["created_at"][:-2] == tick["created_at"][:-2]: + # # 缁熻鍒嗛挓K绾� + # minute_bars_dict[code][-1] = tick + # else: + # # 淇濆瓨鍒嗛挓K绾挎渶楂樹环 + # if not stock_variables.浠婃棩鏈�楂樹环: + # stock_variables.浠婃棩鏈�楂樹环 = minute_bars_dict[code][-1]["price"] + # if minute_bars_dict[code][-1]["price"] > stock_variables.浠婃棩鏈�楂樹环: + # stock_variables.浠婃棩鏈�楂樹环 = minute_bars_dict[code][-1]["price"] + # 淇濆瓨寮�鐩樹环 if tick["created_at"][-8:] < '09:30:00': stock_variables.浠婃棩寮�鐩樹环 = tick["price"] @@ -328,54 +628,167 @@ stock_variables.浠婃棩寮�鐩樻定骞� = round((tick["price"] - stock_variables.鏄ㄦ棩鏀剁洏浠�) / stock_variables.鏄ㄦ棩鏀剁洏浠�, 4) stock_variables.浠婃棩鎴愪氦閲� = tick["cum_volume"] + stock_variables.浠婃棩鎴愪氦棰� = tick["cum_amount"] stock_variables.褰撳墠浠� = tick["price"] - # 鏍规嵁琛ㄨ揪寮忚绠楁槸鍚﹀彲涔� - # compute_result = __run_backtest(code, stock_variables) - # # print("鍥炴祴缁撴灉锛�",code, compute_result) - # if compute_result[0] and code not in deal_codes: - # # TODO 涓嬪崟 - # deal_codes.add(code) - # print("======鍥炴祴缁撴灉锛�", code, tick["created_at"], tick["price"], compute_result[2]) - # for b in compute_result[1]: - # if b not in deal_block_codes: - # deal_block_codes[b] = set() - # deal_block_codes[b].add(code) + if not stock_variables.浠婃棩閲忓淇℃伅: + if stock_variables.浠婃棩鎴愪氦閲� > stock_variables.鏄ㄦ棩鎴愪氦閲� * 0.8: + stock_variables.浠婃棩閲忓淇℃伅 = (time_str, stock_variables.褰撳墠浠�, round( + (stock_variables.褰撳墠浠� - stock_variables.鏄ㄦ棩鏀剁洏浠�) * 100 / stock_variables.鏄ㄦ棩鏀剁洏浠�, 2), + self.__statistic_big_order_info(stock_variables)) + if VOLUME_LOG_ENABLE: + # 缁熻澶у崟鍑�棰濓紝(50w浠ヤ笂,鍑�棰�,涔板崟涓暟/涔板崟鎬婚噾棰�,鍗栧崟涓暟/鍗栧崟鎬婚噾棰�) + print("****閲忓", code, stock_variables.浠婃棩閲忓淇℃伅) + + # 缁熻浠婃棩鏈�楂樹环 + # if stock_variables.浠婃棩鏈�楂樹环 and tick["price"] > stock_variables.浠婃棩鏈�楂樹环: + # print(code, "====绐佺牬鍒嗘椂鏈�楂樹环锛�", tick["created_at"], tick["price"]) + + if not stock_variables.浠婃棩鏈�楂樹环淇℃伅 or tick["price"] > stock_variables.浠婃棩鏈�楂樹环淇℃伅[0]: + stock_variables.浠婃棩鏈�楂樹环淇℃伅 = (tick["price"], time_str) + + if not stock_variables.浠婃棩鏈�浣庝环 or tick["price"] < stock_variables.浠婃棩鏈�浣庝环: + stock_variables.浠婃棩鏈�浣庝环 = tick["price"] + + stock_variables.寮�鐩樺暒鏈�姝f澘鍧楁定鍋� = most_real_kpl_plate_limit_up_codes_info + + # compute_result = self.__run_backtest(code, stock_variables) + # self.__process_test_result(code, stock_variables, next_trade_day, stock_variables.褰撳墠浠�, + # time_str, compute_result) + + # if len(real_codes) >= 2 and time_str > '09:30:00': + # # print(time_str, plate) + # # 鎵捐繖涓澘鍧楅娑ㄦ鏁版渶澶氱殑绁� + # codes_infos = target_plate_codes_infos.get(plate) + # if codes_infos: + # for code_info in codes_infos: + # code = code_info[0] + # self.init_stock_variables(code, self.timeline_data, self.current_data) + # stock_variables: StockVariables = self.stock_variables_dict.get(code) + # compute_result = self.__run_backtest(code, stock_variables) + # if compute_result[0] and plate not in all_new_plates: + # all_new_plates.add(plate) + # print(plate, time_str, code_info, real_codes) + # else: + # pass # 澶у崟椹卞姩 - if current_big_orders: + if current_big_orders and time_str >= '09:30:00': for big_order in current_big_orders: code = big_order[0] - self.init_stock_variables(code, timeline_data, current_data) + if code not in self.fcodes: + continue + self.init_stock_variables(code, self.timeline_data, self.current_data) stock_variables: StockVariables = self.stock_variables_dict.get(code) + if plate_limit_up_codes_info is not None: + stock_variables.鏉垮潡娑ㄥ仠 = plate_limit_up_codes_info + + if kpl_plate_limit_up_codes_info is not None: + stock_variables.寮�鐩樺暒鏉垮潡娑ㄥ仠 = kpl_plate_limit_up_codes_info + + if kpl_head_plate_limit_up_codes_info is not None: + stock_variables.寮�鐩樺暒棰嗘定鏉垮潡娑ㄥ仠 = kpl_head_plate_limit_up_codes_info + + if most_real_kpl_plate_limit_up_codes_info is not None: + stock_variables.寮�鐩樺暒鏈�姝f澘鍧楁定鍋� = most_real_kpl_plate_limit_up_codes_info + + if block_in_datas: + stock_variables.璧勯噾娴佸叆鏉垮潡 = block_in_datas compute_result = self.__run_backtest(code, stock_variables) - # print("鍥炴祴缁撴灉锛�",code, compute_result) - # if code == '002640': - # print(code, big_order, compute_result) - if compute_result[0] and code not in deal_codes: - # TODO 涓嬪崟 - deal_codes.add(code) - next_k_bars = data_loader.load_kline_data_by_day_and_code(next_trade_day, code) - current_k_bars = data_loader.load_kline_data_by_day_and_code(data_loader.now_day, code) - if next_k_bars: - t_rate = round((next_k_bars[0]["open"] - big_order[1][4]) * 100 / big_order[1][4], 2) - t_rate = f"{t_rate}%" - else: - t_rate = "鏈煡" - if current_k_bars: - c_rate = round((current_k_bars[0]["close"] - big_order[1][4]) * 100 / big_order[1][4], 2) - c_rate = f"{c_rate}%" - else: - c_rate = "鏈煡" + # print(compute_result) + self.__process_test_result(code, stock_variables, next_trade_day, big_order[1][4], + huaxin_util.convert_time(big_order[1][3]), compute_result) - print("======鍥炴祴缁撴灉锛�", code, f"婧环鐜囷細{t_rate},褰撴棩鐩堜簭锛歿c_rate}", compute_result[2]) - for b in compute_result[1]: - if b not in deal_block_codes: - deal_block_codes[b] = set() - deal_block_codes[b].add(code) + print("鍙拱棰樻潗锛�", all_new_plates) + def __process_test_result(self, code, stock_variables: StockVariables, next_trade_day, buy_price, time_str, + compute_result): + + # if code == '000628': + # print(time_str, code, compute_result) + + if not compute_result[0]: + if code in DEBUG_CODES: + print(time_str, code, compute_result[1]) + # if compute_result[1].find("澶у崟") >= 0 or compute_result[1].find("浠锋牸瓒呰繃鏄ㄦ棩鏈�浣庝环") >= 0: + pass + + # print(code, time_str,stock_variables.浠g爜鏉垮潡, compute_result) + + if compute_result[0] and code not in self.deal_codes: + # 鏈�澶氫拱5涓� + if len(self.deal_codes) >= 100: + return + # if huaxin_util.convert_time(big_order[1][3]) >= "10:30:00" and len(deal_codes) > 0: + # break + self.deal_codes.add(code) + next_k_bars = self.data_loader.load_kline_data_by_day_and_code(next_trade_day, code) + current_k_bars = self.data_loader.load_kline_data_by_day_and_code(self.data_loader.now_day, + code) + if next_k_bars and buy_price: + t_rate = round((next_k_bars[0]["open"] - buy_price) * 100 / stock_variables.鏄ㄦ棩鏀剁洏浠�, 2) + t_rate = f"{t_rate}%" + else: + # 鑾峰彇褰撳墠鐨則ick绾� + if self.data_loader.now_day >= next_trade_day: + ticks = self.data_loader.jueJinLocalApi.get_history_tick_n(code, 1, frequency='tick', + end_date=f"{next_trade_day} 09:30:03") + else: + ticks = None + if ticks: + t_rate = round((ticks[-1]["price"] - buy_price) * 100 / stock_variables.鏄ㄦ棩鏀剁洏浠�, 2) + t_rate = f"{t_rate}%" + else: + t_rate = "鏈煡" + if current_k_bars and buy_price: + c_rate = round((current_k_bars[0]["close"] - buy_price) * 100 / current_k_bars[0]["pre_close"], 2) + c_rate = f"{c_rate}%" + else: + # 鎷夊彇褰撴棩K绾� + if tool.get_now_date_str() == self.data_loader.now_day and buy_price: + tick = self.data_loader.jueJinLocalApi.get_history_tick_n(code, 1, frequency='tick', + end_date=f"{self.data_loader.now_day} {tool.get_now_time_str()}") + c_rate = round((tick[0]["price"] - buy_price) * 100 / stock_variables.鏄ㄦ棩鏀剁洏浠�, 2) + else: + bar = self.data_loader.jueJinLocalApi.get_history_tick_n(code, 1, + end_date=f"{self.data_loader.now_day} 15:00:00") + if bar: + c_rate = round((bar[0]["close"] - buy_price) * 100 / bar[0]["pre_close"], 2) + else: + c_rate = "鏈煡" + print(f"{len(self.deal_codes)}==鍥炴祴缁撴灉锛�", code, gpcode_manager.CodesNameManager().get_code_name(code), + f"婧环鐜囷細{t_rate},褰撴棩鐩堜簭锛歿c_rate}锛屼笅鍗曟椂闂达細{time_str}锛屾定骞咃細{round((buy_price - stock_variables.鏄ㄦ棩鏀剁洏浠�) * 100 / stock_variables.鏄ㄦ棩鏀剁洏浠�, 2)}", + compute_result[1], + compute_result[2]) + for b in compute_result[3]: + if b not in self.deal_block_codes: + self.deal_block_codes[b] = set() + self.deal_block_codes[b].add(code) + stock_variables.鏉垮潡鎴愪氦浠g爜 = self.deal_block_codes + + +# DEBUG_CODES = ['002194', '002583', '603083', '002130', '002436'] +DEBUG_CODES = [] + +VOLUME_LOG_ENABLE = False +# 澶囩敤澶у崟 + + +DEBUG_BLOCKS = [] + +BIG_ORDER_MONEY_THRESHOLD = 200e4 if __name__ == "__main__": - days = ["2025-05-06", "2025-05-07", "2025-05-08", "2025-05-09", "2025-05-12"] + back_test_dict = {} + # days = ["2025-05-06", "2025-05-07", "2025-05-08", "2025-05-09", "2025-05-12", "2025-05-13", "2025-05-14", + # "2025-05-15", "2025-05-16"] + days = ["2025-05-12", "2025-05-13", "2025-05-14", "2025-05-15", "2025-05-16", "2025-05-19", "2025-05-20", + "2025-05-21", "2025-05-22","2025-05-23", "2025-05-26", "2025-05-27", "2025-05-28", "2025-05-29", + "2025-05-30", "2025-06-03", "2025-06-04", "2025-06-05", "2025-06-06"] days.reverse() for day in days: - BackTest(day).run() + if day not in back_test_dict: + # back_test_dict[day] = BackTest(day, "浠婃棩閲忔槸鍚﹁冻澶�.py") + back_test_dict[day] = BackTest(day, "浣庡惛鑴氭湰_杈ㄨ瘑搴v6.py") + print("=========================", day) + # back_test_dict[day].run_volume() + back_test_dict[day].run() -- Gitblit v1.8.0