Files
exchange_monitor_sync/utils/batch_account_sync.py

174 lines
6.9 KiB
Python
Raw Normal View History

2025-12-03 14:40:14 +08:00
from typing import List, Dict, Any, Tuple
from loguru import logger
from sqlalchemy import text
import time
class BatchAccountSync:
"""账户信息批量同步工具"""
def __init__(self, db_manager):
self.db_manager = db_manager
def sync_accounts_batch(self, all_account_data: List[Dict]) -> Tuple[int, int]:
"""批量同步账户信息(最高效版本)"""
if not all_account_data:
return 0, 0
session = self.db_manager.get_session()
try:
start_time = time.time()
# 方法1使用临时表进行批量操作性能最好
updated_count, inserted_count = self._sync_using_temp_table(session, all_account_data)
elapsed = time.time() - start_time
logger.info(f"账户信息批量同步完成: 更新 {updated_count} 条,插入 {inserted_count} 条,耗时 {elapsed:.2f}")
return updated_count, inserted_count
except Exception as e:
logger.error(f"账户信息批量同步失败: {e}")
return 0, 0
finally:
session.close()
def _sync_using_temp_table(self, session, all_account_data: List[Dict]) -> Tuple[int, int]:
"""使用临时表进行批量同步"""
try:
# 1. 创建临时表
session.execute(text("""
CREATE TEMPORARY TABLE IF NOT EXISTS temp_account_info (
st_id INT,
k_id INT,
asset VARCHAR(32),
balance DECIMAL(20, 8),
withdrawal DECIMAL(20, 8),
deposit DECIMAL(20, 8),
other DECIMAL(20, 8),
profit DECIMAL(20, 8),
time INT,
PRIMARY KEY (k_id, st_id, time)
)
"""))
# 2. 清空临时表
session.execute(text("TRUNCATE TABLE temp_account_info"))
# 3. 批量插入数据到临时表
chunk_size = 1000
total_inserted = 0
for i in range(0, len(all_account_data), chunk_size):
chunk = all_account_data[i:i + chunk_size]
values_list = []
for data in chunk:
values = (
f"({data['st_id']}, {data['k_id']}, 'USDT', "
f"{data['balance']}, {data['withdrawal']}, {data['deposit']}, "
f"{data['other']}, {data['profit']}, {data['time']})"
)
values_list.append(values)
if values_list:
values_str = ", ".join(values_list)
sql = f"""
INSERT INTO temp_account_info
(st_id, k_id, asset, balance, withdrawal, deposit, other, profit, time)
VALUES {values_str}
"""
session.execute(text(sql))
total_inserted += len(chunk)
# 4. 使用临时表更新主表
# 更新已存在的记录
update_result = session.execute(text("""
UPDATE deh_strategy_kx_new main
INNER JOIN temp_account_info temp
ON main.k_id = temp.k_id
AND main.st_id = temp.st_id
AND main.time = temp.time
SET main.balance = temp.balance,
main.withdrawal = temp.withdrawal,
main.deposit = temp.deposit,
main.other = temp.other,
main.profit = temp.profit,
main.up_time = NOW()
"""))
updated_count = update_result.rowcount
# 插入新记录
insert_result = session.execute(text("""
INSERT INTO deh_strategy_kx_new
(st_id, k_id, asset, balance, withdrawal, deposit, other, profit, time, up_time)
SELECT
st_id, k_id, asset, balance, withdrawal, deposit, other, profit, time, NOW()
FROM temp_account_info temp
WHERE NOT EXISTS (
SELECT 1 FROM deh_strategy_kx_new main
WHERE main.k_id = temp.k_id
AND main.st_id = temp.st_id
AND main.time = temp.time
)
"""))
inserted_count = insert_result.rowcount
# 5. 删除临时表
session.execute(text("DROP TEMPORARY TABLE IF EXISTS temp_account_info"))
session.commit()
return updated_count, inserted_count
except Exception as e:
session.rollback()
logger.error(f"临时表同步失败: {e}")
raise
def _sync_using_on_duplicate(self, session, all_account_data: List[Dict]) -> Tuple[int, int]:
"""使用ON DUPLICATE KEY UPDATE批量同步简化版"""
try:
# 分块执行避免SQL过长
chunk_size = 1000
total_processed = 0
for i in range(0, len(all_account_data), chunk_size):
chunk = all_account_data[i:i + chunk_size]
values_list = []
for data in chunk:
values = (
f"({data['st_id']}, {data['k_id']}, 'USDT', "
f"{data['balance']}, {data['withdrawal']}, {data['deposit']}, "
f"{data['other']}, {data['profit']}, {data['time']})"
)
values_list.append(values)
if values_list:
values_str = ", ".join(values_list)
sql = f"""
INSERT INTO deh_strategy_kx_new
(st_id, k_id, asset, balance, withdrawal, deposit, other, profit, time)
VALUES {values_str}
ON DUPLICATE KEY UPDATE
balance = VALUES(balance),
withdrawal = VALUES(withdrawal),
deposit = VALUES(deposit),
other = VALUES(other),
profit = VALUES(profit),
up_time = NOW()
"""
result = session.execute(text(sql))
total_processed += len(chunk)
session.commit()
# 注意:这里无法区分更新和插入的数量
return total_processed, 0
except Exception as e:
session.rollback()
logger.error(f"ON DUPLICATE同步失败: {e}")
raise