| | |
| | | from db import redis_manager_delegate as redis_manager |
| | | from db.redis_manager_delegate import RedisUtils |
| | | from l2.code_price_manager import Buy1PriceManager |
| | | from l2.huaxin import l2_huaxin_util |
| | | from l2.l2_data_manager import OrderBeginPosInfo |
| | | from l2.l2_sell_manager import L2LimitUpSellManager |
| | | from log_module import async_log_util |
| | |
| | | LCancelBigNumComputer().set_real_place_order_index(code, index, buy_single_index=buy_single_index) |
| | | HourCancelBigNumComputer().set_real_place_order_index(code, index, buy_single_index) |
| | | GCancelBigNumComputer().set_real_place_order_index(code, index, buy_single_index) |
| | | FCancelBigNumComputer().set_real_order_index(code, index) |
| | | |
| | | |
| | | class SecondCancelBigNumComputer: |
| | |
| | | self.__cancel_watch_index_info_cache[code] = (buy_single_index, re_compute, indexes) |
| | | RedisUtils.delete_async(self.__db, f"l_cancel_watch_index_info-{code}") |
| | | RedisUtils.setex_async(self.__db, f"l_cancel_watch_index_info-{code}", tool.get_expire(), |
| | | json.dumps ((buy_single_index, re_compute, list(indexes)))) |
| | | json.dumps((buy_single_index, re_compute, list(indexes)))) |
| | | if indexes: |
| | | trade_record_log_util.add_cancel_watch_indexes_log(code, |
| | | trade_record_log_util.CancelWatchIndexesInfo( |
| | |
| | | self.clear(code) |
| | | |
| | | |
| | | # F撤 |
| | | class FastCancelBigNumComputer: |
| | | __db = 0 |
| | | __redis_manager = redis_manager.RedisManager(0) |
| | | __cancel_real_order_index_cache = {} |
| | | __watch_indexes_cache = {} |
| | | __last_trade_progress_dict = {} |
| | | |
| | | __instance = None |
| | | |
| | | def __new__(cls, *args, **kwargs): |
| | | if not cls.__instance: |
| | | cls.__instance = super(FastCancelBigNumComputer, cls).__new__(cls, *args, **kwargs) |
| | | |
| | | cls.__load_datas() |
| | | return cls.__instance |
| | | |
| | | @classmethod |
| | | def __load_datas(cls): |
| | | __redis = cls.__get_redis() |
| | | try: |
| | | keys = RedisUtils.keys(__redis, "f_cancel_real_order_index-*") |
| | | for k in keys: |
| | | code = k.split("-")[-1] |
| | | val = RedisUtils.get(__redis, k) |
| | | CodeDataCacheUtil.set_cache(cls.__cancel_real_order_index_cache, code, int(val)) |
| | | keys = RedisUtils.keys(__redis, "f_cancel_watch_index-*") |
| | | for k in keys: |
| | | code = k.split("-")[-1] |
| | | val = RedisUtils.get(__redis, k) |
| | | val = set(json.loads(val)) |
| | | CodeDataCacheUtil.set_cache(cls.__watch_indexes_cache, code, val) |
| | | finally: |
| | | RedisUtils.realse(__redis) |
| | | |
| | | @classmethod |
| | | def __get_redis(cls): |
| | | return cls.__redis_manager.getRedis() |
| | | |
| | | def __set_real_order_index(self, code, index): |
| | | CodeDataCacheUtil.set_cache(self.__cancel_real_order_index_cache, code, index) |
| | | RedisUtils.setex_async(self.__db, f"f_cancel_real_order_index-{code}", tool.get_expire(), f"{index}") |
| | | |
| | | def __del_real_order_index(self, code): |
| | | CodeDataCacheUtil.clear_cache(self.__cancel_real_order_index_cache, code) |
| | | RedisUtils.delete_async(self.__db, f"f_cancel_real_order_index-{code}") |
| | | |
| | | def __get_real_order_index(self, code): |
| | | val = RedisUtils.get(self.__db, f"f_cancel_real_order_index-{code}") |
| | | if val: |
| | | return int(val) |
| | | return None |
| | | |
| | | def __get_real_order_index_cache(self, code): |
| | | cache_result = CodeDataCacheUtil.get_cache(self.__cancel_real_order_index_cache, code) |
| | | if cache_result[0]: |
| | | return cache_result[1] |
| | | return None |
| | | |
| | | def __set_watch_indexes(self, code, indexes): |
| | | CodeDataCacheUtil.set_cache(self.__watch_indexes_cache, code, indexes) |
| | | RedisUtils.setex_async(self.__db, f"f_cancel_watch_index-{code}", tool.get_expire(), |
| | | f"{json.dumps(list(indexes))}") |
| | | |
| | | def __get_watch_indexes(self, code): |
| | | watch_indexes = self.__watch_indexes_cache.get(code) |
| | | if watch_indexes: |
| | | return watch_indexes |
| | | return set() |
| | | |
| | | # |
| | | def __compute_watch_indexes(self, code, begin_pos_info: OrderBeginPosInfo, total_datas): |
| | | MAX_COUNT = constant.F_CANCEL_WATCH_COUNT |
| | | watch_indexes = self.__get_watch_indexes(code) |
| | | if watch_indexes and len(watch_indexes) >= MAX_COUNT: |
| | | return |
| | | # 计算开始计算位置 |
| | | total_money = 0 |
| | | c_start_index = begin_pos_info.buy_exec_index |
| | | for i in range(begin_pos_info.buy_single_index, total_datas[-1]["index"] + 1): |
| | | val = total_datas[i]["val"] |
| | | if not L2DataUtil.is_limit_up_price_buy(val): |
| | | continue |
| | | money = val["num"] * float(val["price"]) |
| | | if money < 5000: |
| | | continue |
| | | total_money += int(money * 100) |
| | | if total_money >= begin_pos_info.sell_info[1]: |
| | | c_start_index = i |
| | | break |
| | | for i in range(c_start_index + 1, total_datas[-1]["index"] + 1): |
| | | val = total_datas[i]["val"] |
| | | if not L2DataUtil.is_limit_up_price_buy(val): |
| | | continue |
| | | if val["num"] * float(val["price"]) < 5000: |
| | | continue |
| | | watch_indexes.add(i) |
| | | if len(watch_indexes) >= MAX_COUNT: |
| | | break |
| | | # 保存数据 |
| | | l2_log.f_cancel_debug(code, f"监听范围:{watch_indexes} 计算起始点:{c_start_index}") |
| | | self.__set_watch_indexes(code, watch_indexes) |
| | | |
| | | def set_trade_progress(self, code, index): |
| | | if self.__last_trade_progress_dict.get(code) == index: |
| | | return |
| | | self.__last_trade_progress_dict[code] = index |
| | | |
| | | def need_cancel(self, code, start_index, end_index, begin_pos_info: OrderBeginPosInfo): |
| | | if begin_pos_info.mode != OrderBeginPosInfo.MODE_FAST: |
| | | return False, None |
| | | if code in self.__cancel_real_order_index_cache: |
| | | # 获取到真实下单位置,不需要守护 |
| | | return False, None |
| | | # 计算买入执行位置后的3笔涨停买 |
| | | total_datas = local_today_datas.get(code) |
| | | self.__compute_watch_indexes(code, begin_pos_info, total_datas) |
| | | watch_indexes = self.__get_watch_indexes(code) |
| | | cancel_indexes = [] |
| | | if watch_indexes: |
| | | # 判断其中一个是否撤单 |
| | | for i in watch_indexes: |
| | | cancel_data = l2_data_source_util.L2DataSourceUtils.get_limit_up_buy_canceled_data_v2(code, |
| | | i, |
| | | total_datas, |
| | | local_today_canceled_buyno_map.get( |
| | | code)) |
| | | if cancel_data and cancel_data["index"] <= end_index: |
| | | cancel_indexes.append(cancel_data["index"]) |
| | | if len(cancel_indexes) / len(watch_indexes) >= constant.F_CANCEL_CACEL_RATE: |
| | | cancel_indexes.sort() |
| | | return True, total_datas[cancel_indexes[-1]] |
| | | return False, None |
| | | |
| | | def clear(self, code=None): |
| | | if code: |
| | | self.__del_real_order_index(code) |
| | | else: |
| | | keys = RedisUtils.keys(self.__get_redis(), "f_cancel_real_order_index-*") |
| | | if keys: |
| | | for k in keys: |
| | | code = k.split("-")[1] |
| | | self.__del_real_order_index(code) |
| | | |
| | | # 设置真实的下单位置,返回是否需要撤单 |
| | | def set_real_order_index(self, code, index): |
| | | self.__set_real_order_index(code, index) |
| | | l2_log.f_cancel_debug(code, f"下单位置设置:{index}") |
| | | trade_index = self.__last_trade_progress_dict.get(code) |
| | | l2_log.f_cancel_debug(code, f"计算范围:{trade_index}-{index}") |
| | | if trade_index: |
| | | total_datas = local_today_datas.get(code) |
| | | # 真实下单位置 |
| | | total_count = 0 |
| | | for i in range(trade_index + 1, index): |
| | | data = total_datas[i] |
| | | val = data["val"] |
| | | if not L2DataUtil.is_limit_up_price_buy(val): |
| | | continue |
| | | if val["num"] * float(val["price"]) < 5000: |
| | | continue |
| | | left_count = l2_data_source_util.L2DataSourceUtils.get_limit_up_buy_no_canceled_count_v2(code, i, |
| | | total_datas, |
| | | local_today_canceled_buyno_map.get( |
| | | code)) |
| | | if left_count > 0: |
| | | total_count += left_count |
| | | if total_count >= 2: |
| | | l2_log.f_cancel_debug(code, f"交易进度距离下单位置纯买单数超过2单") |
| | | return False |
| | | return True |
| | | |
| | | def place_order_success(self, code): |
| | | self.clear(code) |
| | | |
| | | def cancel_success(self, code): |
| | | self.clear(code) |
| | | |
| | | |
| | | # 新F撤,根据成交数据来撤 |
| | | class FCancelBigNumComputer: |
| | | __db = 0 |
| | | __redis_manager = redis_manager.RedisManager(0) |
| | | __cancel_real_order_index_cache = {} |
| | | __real_order_index_cache = {} |
| | | |
| | | __instance = None |
| | | |
| | | def __new__(cls, *args, **kwargs): |
| | | if not cls.__instance: |
| | | cls.__instance = super(FCancelBigNumComputer, cls).__new__(cls, *args, **kwargs) |
| | | |
| | | cls.__load_datas() |
| | | return cls.__instance |
| | | |
| | |
| | | for k in keys: |
| | | code = k.split("-")[-1] |
| | | val = RedisUtils.get(__redis, k) |
| | | CodeDataCacheUtil.set_cache(cls.__cancel_real_order_index_cache, code, int(val)) |
| | | CodeDataCacheUtil.set_cache(cls.__real_order_index_cache, code, int(val)) |
| | | finally: |
| | | RedisUtils.realse(__redis) |
| | | |
| | |
| | | return cls.__redis_manager.getRedis() |
| | | |
| | | def __set_real_order_index(self, code, index): |
| | | CodeDataCacheUtil.set_cache(self.__cancel_real_order_index_cache, code, index) |
| | | CodeDataCacheUtil.set_cache(self.__real_order_index_cache, code, index) |
| | | RedisUtils.setex_async(self.__db, f"f_cancel_real_order_index-{code}", tool.get_expire(), f"{index}") |
| | | |
| | | def __del_real_order_index(self, code): |
| | | CodeDataCacheUtil.clear_cache(self.__cancel_real_order_index_cache, code) |
| | | CodeDataCacheUtil.clear_cache(self.__real_order_index_cache, code) |
| | | RedisUtils.delete_async(self.__db, f"f_cancel_real_order_index-{code}") |
| | | |
| | | def __get_real_order_index(self, code): |
| | |
| | | return None |
| | | |
| | | def __get_real_order_index_cache(self, code): |
| | | cache_result = CodeDataCacheUtil.get_cache(self.__cancel_real_order_index_cache, code) |
| | | cache_result = CodeDataCacheUtil.get_cache(self.__real_order_index_cache, code) |
| | | if cache_result[0]: |
| | | return cache_result[1] |
| | | return None |
| | |
| | | code = k.split("-")[1] |
| | | self.__del_real_order_index(code) |
| | | |
| | | # 是否需要撤单 |
| | | def need_cancel(self, transaction_data): |
| | | if not transaction_data: |
| | | return False, "成交数据为空" |
| | | if transaction_data[2] < 10000: |
| | | return False, "成交量小于10000" |
| | | # 成交100万以上才算 |
| | | if transaction_data[1]*transaction_data[2] < 1000000: |
| | | return False, "金额不满足要求" |
| | | code = transaction_data[0] |
| | | real_order_index = self.__real_order_index_cache.get(code) |
| | | if not real_order_index: |
| | | return False, "真实下单位置没找到" |
| | | # 守护15s |
| | | now_time = l2_huaxin_util.convert_time(transaction_data[3]) |
| | | total_datas = local_today_datas.get(code) |
| | | if not total_datas: |
| | | return False, "L2数据为空" |
| | | order_time = total_datas[real_order_index]["val"]["time"] |
| | | if tool.trade_time_sub(now_time, order_time) > 15: |
| | | return False, "只守护15s" |
| | | buyno_map = local_today_buyno_map.get(code) |
| | | if not buyno_map: |
| | | return False, "没找到买单字典" |
| | | buy_data = buyno_map.get(str(transaction_data[6])) |
| | | if not buy_data: |
| | | return False, f"没有找到对应买单({transaction_data[6]})" |
| | | if not l2_data_util.is_big_money(buy_data["val"]): |
| | | return False, f"不为大单" |
| | | # 计算成交比例 |
| | | if transaction_data[2] > buy_data["val"]["num"] * 100 * 0.5: |
| | | return True, "快速成交了50%以上" |
| | | else: |
| | | return False, "" |
| | | |
| | | # 设置真实的下单位置,返回是否需要撤单 |
| | | def set_real_order_index(self, code, index): |
| | | self.__set_real_order_index(code, index) |
| | |
| | | |
| | | def cancel_success(self, code): |
| | | self.clear(code) |
| | | |
| | | |
| | | |
| | | # ---------------------------------G撤------------------------------- |
| | |
| | | buy_index = l2_data_source_util.L2DataSourceUtils.get_buy_index_with_cancel_data_v2(data, |
| | | local_today_buyno_map.get( |
| | | code)) |
| | | if buy_index is not None and buy_index < real_place_order_index and (buy_index in watch_indexes or buy_index in watch_indexes_by): |
| | | if buy_index is not None and buy_index < real_place_order_index and ( |
| | | buy_index in watch_indexes or buy_index in watch_indexes_by): |
| | | return True, data, "" |
| | | return False, None, "" |
| | | |