_qcvalueaddproapi.pyd | 补丁 | 查看 | 原始文档 | blame | 历史 | |
_qcvalueaddproapi.so | 补丁 | 查看 | 原始文档 | blame | 历史 | |
huaxin_client/l1_api_client.py | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
l2/cancel_buy_strategy.py | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
libqcvalueaddproapi.so | 补丁 | 查看 | 原始文档 | blame | 历史 | |
qcvalueaddproapi.dll | 补丁 | 查看 | 原始文档 | blame | 历史 | |
qcvalueaddproapi.py | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
servers/data_server.py | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
utils/buy_condition_util.py | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 |
_qcvalueaddproapi.pydBinary files differ
_qcvalueaddproapi.soBinary files differ
huaxin_client/l1_api_client.py
New file @@ -0,0 +1,117 @@ import logging import threading import time import qcvalueaddproapi import sys global g_userid, g_passwd, g_address, g_port, g_seqnum g_seqnum = 100000 def new_seqnum(): global g_seqnum g_seqnum = g_seqnum + 1 return g_seqnum class sampleSpi(qcvalueaddproapi.CQCValueAddProSpi): __result_cache = {} __temp_cache = {} def __init__(self, t_tapi): qcvalueaddproapi.CQCValueAddProSpi.__init__(self) self.m_api = t_tapi def __create_request_id(self): return new_seqnum() def queryTradeCalendar(self): try: queryField = qcvalueaddproapi.CQCVDReqQryShareCalendarField() queryField.BegDate = "20240701" queryField.EndDate = "20240801" queryField.PageCount = 100 queryField.PageLocate = 1 request_id = self.__create_request_id() results = self.m_api.ReqReqQryShareCalendar(queryField, request_id) for i in range(0, 1000): if request_id in self.__result_cache: return self.__result_cache time.sleep(0.002) print("ReqReqQryShareCalendar:", results) except Exception as e: logging.exception(e) def OnFrontConnected(self): print("OnFrontConnected") # 连接上后去登录 loginfield = qcvalueaddproapi.CQCVDReqUserLoginField() loginfield.LogInAccount = g_userid loginfield.AuthMode = qcvalueaddproapi.QCVD_AM_Password loginfield.Password = g_passwd self.m_api.ReqUserLogin(loginfield, new_seqnum()) def OnFrontDisconnected(self, nReason): print("OnFrontDisconnected Reason[%d]" % (nReason)) # 登录请求响应 def OnRspUserLogin(self, pRspUserLoginField, pRspInfo, nRequestID, bIsLast): print("OnRspUserLogin LogInAccount[%s] RequestID[%d] ErrorID[%d] ErrorMsg[%s] " % (pRspUserLoginField.LogInAccount, nRequestID, pRspInfo.ErrorID, pRspInfo.ErrorMsg)) if (pRspInfo.ErrorID == 0): # 登录成功后直接查询 # self.ReqInquiryHistoryDelivery() threading.Thread(target=lambda : print("交易日历:", self.queryTradeCalendar())).start() def ReqQryGGTEODPrices(self): QryField = qcvalueaddproapi.CQCVDQryGGTEODPricesField() self.m_api.ReqQryGGTEODPrices(QryField, new_seqnum()) def ReqQryInvestor(self): QryField = qcvalueaddproapi.CQCVDQryInvestorField() self.m_api.ReqQryInvestor(QryField, new_seqnum()) def OnRspInquiryShareCalendar(self, pShareCalendar, pRspInfo, nRequestID, bIsPageLast, bIsTotalLast): if nRequestID not in self.__temp_cache: self.__temp_cache[nRequestID] = [] if pShareCalendar: self.__temp_cache[nRequestID].append(pShareCalendar.TradingDay) else: self.__result_cache[nRequestID] = self.__temp_cache[nRequestID] self.__temp_cache.pop(nRequestID) print("OnRspInquiryShareCalendar:", self.__result_cache[nRequestID]) def main(): # if (len(sys.argv)< 5): # ######运行命令行: # ###### ip地址 端口号 用户名 密码 # print("usage: ipaddress port userid passwd") # return global g_userid, g_passwd, g_address, g_port g_address = "101.230.90.99" g_port = 25556 g_userid = "388000013942" g_passwd = "110808" # 回测交易是由历史行情来驱动撮合成交: # 因此必须同时使用traderapi和mdapi,不能单独使用traderapi,并且mdapi至少需要订阅一个以上行情。 # 用户可使用回测traderapi的RegisterFront函数来注册此地址去连接上回测服务器 print("GetApiVersion():", qcvalueaddproapi.CQCValueAddProApi_GetApiVersion()) theapi = qcvalueaddproapi.CQCValueAddProApi_CreateInfoQryApi() thespi = sampleSpi(theapi) theapi.RegisterSpi(thespi) theapi.RegisterFront(g_address, g_port) theapi.Run() return if __name__ == '__main__': main() l2/cancel_buy_strategy.py
@@ -500,6 +500,7 @@ # ---------------------------------G撤------------------------------- # 已不效 class GCancelBigNumComputer: __real_place_order_index_dict = {} __trade_progress_index_dict = {} @@ -962,14 +963,18 @@ start_index = traded_index + 1 total_datas = local_today_datas.get(code) watch_indexes = set() for i in range(start_index, real_order_index): # 查找成交进度位到真实下单位置所有的索引 watch_indexes_info = [] limit_up_price = gpcode_manager.get_limit_up_price_as_num(code) big_num = int(l2_data_util.get_big_money_val(limit_up_price, tool.is_ge_code(code)) / (limit_up_price * 100)) for i in range(start_index, real_order_index + 1): # 判断是否有未撤的大单 data = total_datas[i] val = data["val"] if not L2DataUtil.is_limit_up_price_buy(val): continue if val["num"] * float(val["price"]) < 29900: if val["num"] < big_num: continue # 是否已撤单 left_count = l2_data_source_util.L2DataSourceUtils.get_limit_up_buy_no_canceled_count_v2(code, i, @@ -977,14 +982,28 @@ local_today_canceled_buyno_map.get( code)) if left_count > 0: watch_indexes.add(i) watch_indexes_info.append((i, val["num"])) # 当所有大单≤6笔时则G撤全部囊括 # 大单>6笔时囊括其三分之一 if len(watch_indexes_info) > 6: watch_indexes = set([x[0] for x in watch_indexes_info[:int(round(len(watch_indexes_info) / 3, 0))]]) # 找出最大单 max_info = watch_indexes_info[0] for mi in watch_indexes_info: if mi[1] > max_info[1]: max_info = mi watch_indexes.add(max_info[0]) else: watch_indexes = set([x[0] for x in watch_indexes_info]) if watch_indexes: # 还有300万以上的大单没有撤单 if from_real_order_index_changed or recompute: # 真实下单位改变后才会更新 final_watch_indexes = origin_watch_index | watch_indexes self.__set_watch_index(code, final_watch_indexes) l2_log.g_cancel_debug(code, f"大单监听:{final_watch_indexes} 是否重新计算:{recompute}") l2_log.g_cancel_debug(code, f"大单监听:{final_watch_indexes} 是否重新计算:{recompute} 计算范围:{start_index}-{real_order_index}") def set_trade_progress(self, code, buy_single_index, index): # if self.__trade_progress_index_dict.get(code) != index: libqcvalueaddproapi.soBinary files differ
qcvalueaddproapi.dllBinary files differ
qcvalueaddproapi.py
New file Diff too large servers/data_server.py
@@ -699,9 +699,12 @@ msg_list = [f"{msg.split('|')[0]}{msg.split('|')[-1].split('-')[1].strip()}" for msg in msg_list] response_data = json.dumps({"code": 0, "data": msg_list}) elif url.path == "/statistic_latest_limit_up_block": # 统计最近的涨停板块 datas = LatestLimitUpBlockManager().statistics_limit_up_block_infos() response_data = json.dumps({"code": 0, "data": datas}) try: # 统计最近的涨停板块 datas = LatestLimitUpBlockManager().statistics_limit_up_block_infos() response_data = json.dumps({"code": 0, "data": datas}) except Exception as e: logger_debug.exception(e) async_log_util.info(logger_request_api, f"结束请求{tool.get_thread_id()}-{url}") self.send_response(200) utils/buy_condition_util.py
@@ -12,7 +12,7 @@ if market_sitation == MarketSituationManager.SITUATION_GOOD: return 31, 100, 40, 100, 40, 80, 100 # return 5.9, 41, 8.9, 25, 8.9, 19, 80 return 5.9, 200, 8.9, 25, 8.9, 19, 10000 return 5.9, 10000, 8.9, 25, 8.9, 19, 10000 # 获取量比的等级获取量