| | |
| | | import os |
| | | import sys |
| | | import time |
| | | from multiprocessing import Pool |
| | | |
| | | import pandas as pd |
| | | |
| | | from data_parser import transaction_big_order_parser |
| | | from db import mysql_data_delegate as mysql_data |
| | | from huaxin_client.l2_client_test import L2TransactionDataManager |
| | | from log_module import log_export |
| | |
| | | # 将文件写入到文本 |
| | | writer.writerow(row) |
| | | |
| | | def test1(args): |
| | | index, df = args |
| | | print(index) |
| | | |
| | | def pre_process_transactions(csv_path="E:/测试数据/Transaction_Test.csv"): |
| | | def str_to_float(s): |
| | |
| | | 'EndPrice': group['TradePrice'].iloc[-1] |
| | | }) |
| | | |
| | | |
| | | |
| | | |
| | | dtype = { |
| | | 'SecurityID': 'category', # 低基数分类数据 |
| | | } |
| | | chunk_size = 100000 |
| | | chunk_size = 10000 |
| | | # 创建DataFrame |
| | | chunks = pd.read_csv(csv_path, chunksize=chunk_size) |
| | | indexed_data = list(enumerate(chunks)) |
| | | # 新写法 |
| | | with Pool(processes=4) as pool: |
| | | pool.map(test1, indexed_data) |
| | | |
| | | result_list = [] |
| | | index = 0 |
| | | for df in chunks: |
| | | index += 1 |
| | | for chunk_index, chunk in enumerate(chunks): |
| | | df = chunk.copy() |
| | | index = chunk_index + 1 |
| | | child_path = csv_path.replace(".csv", f"_{index}.csv") |
| | | if os.path.exists(child_path): |
| | | continue |
| | | print(f"处理第{index}批次") |
| | | df["TradePrice"] = df["TradePrice"].apply(str_to_float) |
| | | df["SecurityID"] = df["SecurityID"].apply(code_format) |
| | | df = df[df["SecurityID"].str.startswith(("30", "00", "60"), na=False)] |
| | | # 计算成交金额 |
| | | df['TradeAmount'] = df['TradePrice'] * df['TradeVolume'] |
| | | |
| | |
| | | |
| | | # [ExchangeID,SecurityID,MainSeq,SubSeq,TickTime,TickType,BuyNo,SellNo,Price,Volume,TradeMoney,Side,TradeBSFlag,MDSecurityStat,Info1,Info2,Info3,LocalTimeStamp] |
| | | |
| | | chunk_size = 200000 |
| | | chunk_size = 10000 |
| | | # 创建DataFrame |
| | | chunks = pd.read_csv(csv_path, chunksize=chunk_size) |
| | | result_list = [] |
| | |
| | | df = df[df["TickType"] == 'T'] |
| | | df["Price"] = df["Price"].apply(str_to_float) |
| | | df["SecurityID"] = df["SecurityID"].apply(code_format) |
| | | |
| | | df = df[df["SecurityID"].str.startswith(("30", "00", "60"), na=False)] |
| | | |
| | | # 计算成交金额 |
| | | df['TradeMoney'] = df["TradeMoney"].apply(str_to_float) |
| | | # 按SecurityID和BuyNo分组 |
| | |
| | | |
| | | if __name__ == '__main__1': |
| | | # df = pd.read_csv(f"E:/测试数据/Transaction_Test.csv") |
| | | pre_process_ngtstick() |
| | | pre_process_transactions() |
| | | |
| | | # 命令模式 /home/userzjj/app/gp-server/l2_data_parser Transaction 2025-05-08 |
| | | # 解析大单: /home/userzjj/app/gp-server/l2_data_parser ExtractDealBigOrder 2025-05-09 /home/userzjj/最终成交数据20250509.txt 000555 |
| | |
| | | elif _type == 'MarketData': |
| | | parse_market_data(day) |
| | | elif _type == 'Transaction_New': |
| | | pre_process_transactions(f"/home/userzjj/ftp/{day}/Transaction.csv") |
| | | transaction_big_order_parser.pre_process_transactions(f"/home/userzjj/ftp/{day}/Transaction.csv") |
| | | transaction_big_order_parser.concat_pre_transactions(f"/home/userzjj/ftp/{day}/Transaction") |
| | | elif _type == 'NGTSTick_New': |
| | | pre_process_transactions(f"/home/userzjj/ftp/{day}/NGTSTick.csv") |
| | | transaction_big_order_parser.pre_process_ngtsticks(f"/home/userzjj/ftp/{day}/NGTSTick.csv") |
| | | transaction_big_order_parser.concat_pre_ngtsticks(f"/home/userzjj/ftp/{day}/NGTSTick") |
| | | elif _type == 'Transaction_Concat': |
| | | transaction_big_order_parser.concat_pre_transactions(f"/home/userzjj/ftp/{day}/Transaction") |
| | | elif _type == 'NGTSTick_Concat': |
| | | transaction_big_order_parser.concat_pre_ngtsticks(f"/home/userzjj/ftp/{day}/NGTSTick") |
| | | |
| | | |
| | | elif _type == 'ExtractDealBigOrder': |
| | | # 提取所有成交的大单 |
| | | if len(params) > 2: |