| | |
| | | import constant |
| | | import inited_data |
| | | import outside_api_command_manager |
| | | from cancel_strategy.s_l_h_cancel_strategy import SCancelBigNumComputer, LCancelRateManager |
| | | from cancel_strategy.s_l_h_cancel_strategy import SCancelBigNumComputer, LCancelRateManager, \ |
| | | CancelRateHumanSettingManager |
| | | from code_attribute import gpcode_manager, code_volumn_manager, zyltgb_util, code_nature_analyse |
| | | from code_attribute.code_data_util import ZYLTGBUtil |
| | | from code_attribute.code_l1_data_manager import L1DataManager |
| | |
| | | "l_down_cancel_rate": l_down_cancel_rate, |
| | | "expire_rate": expire_rate |
| | | } |
| | | l_down_cancel_rate = CancelRateHumanSettingManager().get_l_down(code) |
| | | if l_down_cancel_rate is not None: |
| | | fdata["l_down_cancel_rate"] = l_down_cancel_rate |
| | | |
| | | limit_up_data = kpl_data_manager.KPLLimitUpDataRecordManager.record_code_dict.get(code) |
| | | # 获取当前板块 |
| | | try: |
| | |
| | | self.send_response({"code": 0, "data": {}}, |
| | | client_id, |
| | | request_id) |
| | | elif ctype == "set_l_down_rate": |
| | | # 设置L后撤单比例 |
| | | code = data.get("code") |
| | | rate = data.get("rate") |
| | | if rate < 0 or rate > 1: |
| | | self.send_response({"code": 1, "msg": "比例范围不在0-1之间"}, |
| | | client_id, |
| | | request_id) |
| | | return |
| | | CancelRateHumanSettingManager().set_l_down(code, rate) |
| | | self.send_response({"code": 0, "data": {}}, |
| | | client_id, |
| | | request_id) |
| | | |
| | | except Exception as e: |
| | | logging.exception(e) |
| | | logger_debug.exception(e) |
| | |
| | | date = ps_dict.get('date') |
| | | time_str = ps_dict.get('time') |
| | | end_index = ps_dict.get('end_index') |
| | | fast = ps_dict.get('fast', False) |
| | | if end_index: |
| | | end_index = int(end_index) |
| | | total_datas = l2_data_util.local_today_datas.get(code) |
| | |
| | | total_datas = None |
| | | else: |
| | | date = tool.get_now_date_str() |
| | | delegate_datas = data_export_util.get_l2_datas(code, total_datas, date=date, end_index=end_index) |
| | | delegate_datas = data_export_util.get_l2_datas(code, total_datas, date=date, end_index=end_index, fast_mode=fast) |
| | | code_name = gpcode_manager.get_code_name(code) |
| | | response_data = json.dumps({"code": 0, "data": {"code": code, "code_name": code_name, |
| | | "data": {"delegates": delegate_datas, |
| | |
| | | |
| | | |
| | | # 获取L2的数据 |
| | | def get_l2_datas(code, today_datas=None, date=None, end_index=None): |
| | | def get_l2_datas(code, today_datas=None, date=None, end_index=None, fast_mode=False): |
| | | """ |
| | | |
| | | @param code: |
| | | @param today_datas: |
| | | @param date: |
| | | @param end_index: |
| | | @param fast_mode: 是否是急速模式 |
| | | @return: |
| | | """ |
| | | __start_time = time.time() |
| | | if date is None: |
| | | date = tool.get_now_date_str() |
| | |
| | | sell_no_dict = {} |
| | | active_sell_map = {} |
| | | else: |
| | | tasks = [dask.delayed(log_export.get_l2_process_position)(code, date), |
| | | dask.delayed(log_export.get_l2_trade_position)(code, date), |
| | | dask.delayed(log_export.get_real_place_order_positions)(code, date), |
| | | dask.delayed(log_export.load_huaxin_deal_record)(code, date), |
| | | dask.delayed(log_export.load_cancel_buy_reasons)(code, date), |
| | | dask.delayed(log_export.load_huaxin_transaction_sell_no)(code, date), |
| | | dask.delayed(log_export.load_huaxin_active_sell_map)(date), |
| | | ] |
| | | results = dask.delayed(tasks).compute() |
| | | process_indexs = results[0] |
| | | trade_indexs = results[1] |
| | | real_position_indexes = results[2] |
| | | deal_list = results[3] |
| | | cancel_reasons = results[4] |
| | | sell_no_dict = results[5] |
| | | active_sell_map = results[6] |
| | | if not fast_mode: |
| | | tasks = [dask.delayed(log_export.get_l2_process_position)(code, date), |
| | | dask.delayed(log_export.get_l2_trade_position)(code, date), |
| | | dask.delayed(log_export.get_real_place_order_positions)(code, date), |
| | | dask.delayed(log_export.load_huaxin_deal_record)(code, date), |
| | | dask.delayed(log_export.load_cancel_buy_reasons)(code, date), |
| | | dask.delayed(log_export.load_huaxin_transaction_sell_no)(code, date), |
| | | dask.delayed(log_export.load_huaxin_active_sell_map)(date), |
| | | ] |
| | | results = dask.delayed(tasks).compute() |
| | | process_indexs = results[0] |
| | | trade_indexs = results[1] |
| | | real_position_indexes = results[2] |
| | | deal_list = results[3] |
| | | cancel_reasons = results[4] |
| | | sell_no_dict = results[5] |
| | | active_sell_map = results[6] |
| | | else: |
| | | process_indexs = [] |
| | | trade_indexs = [] |
| | | deal_list = [] |
| | | sell_no_dict = {} |
| | | active_sell_map = {} |
| | | tasks = [dask.delayed(log_export.get_real_place_order_positions)(code, date), |
| | | dask.delayed(log_export.load_cancel_buy_reasons)(code, date), |
| | | ] |
| | | results = dask.delayed(tasks).compute() |
| | | real_position_indexes = results[0] |
| | | cancel_reasons = results[1] |
| | | |
| | | |
| | | deal_list_dict = {} |
| | | for d in deal_list: |
| | | deal_list_dict[str(d[0])] = d |
| | |
| | | if deal_big_buy_order_list: |
| | | for d in deal_big_buy_order_list: |
| | | deal_big_buy_order_no_dict[d[0]] = d |
| | | |
| | | fdatas = export_l2_data(code, datas, process_indexs, trade_indexs, real_position_indexes, deal_list_dict, sell_nos, |
| | | active_sell_set, cancel_reasons, deal_big_buy_order_no_dict) |
| | | return fdatas |