This commit is contained in:
2025-07-27 23:42:23 +08:00
parent 1676dc7de0
commit ba62e91cbb

59
main.py
View File

@@ -5,6 +5,8 @@ import time
import subprocess
from datetime import datetime, timezone
from collections import defaultdict
import logging
from pathlib import Path
import requests
@@ -12,11 +14,32 @@ import requests
API_URL = "https://backend.dexrp.io/vending/last" # DEXRP API地址
JSON_FILE = "price.json" # 价格数据存储文件
HASH_FILE = "processed.json" # 已处理交易哈希记录文件
LOG_DIR = "logs"
LOG_FILE_FORMAT = "%Y-%m-%d.log"
# 全局变量
SEEN_TXHASHES = set() # 内存中的已处理交易哈希集合
GIT_INTERVAL = 300 # Git提交间隔(秒)
FETCH_INTERVAL = 10 # API轮询间隔(秒)
LOGGER = None # 添加全局logger变量
def setup_logging():
"""配置日志记录"""
global LOGGER # 声明使用全局logger
Path(LOG_DIR).mkdir(exist_ok=True)
log_file = Path(LOG_DIR) / datetime.now().strftime(LOG_FILE_FORMAT)
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s - %(levelname)s - %(message)s",
handlers=[
logging.FileHandler(log_file),
logging.StreamHandler(),
],
)
LOGGER = logging.getLogger(__name__) # 赋值给全局logger
return LOGGER
def fetch_latest_transactions():
@@ -30,7 +53,7 @@ def fetch_latest_transactions():
# )
return [tx for tx in transactions if tx["transactionHash"] not in SEEN_TXHASHES]
except requests.RequestException as e:
print(f"API请求失败: {e}")
LOGGER.error("API请求失败: %s", e)
return None
@@ -47,7 +70,9 @@ def calculate_daily_stats(transactions):
)
for tx in transactions:
date = datetime.fromtimestamp(tx["blockTimestamp"], timezone.utc).strftime("%Y-%m-%d")
date = datetime.fromtimestamp(tx["blockTimestamp"], timezone.utc).strftime(
"%Y-%m-%d"
)
price = tx.get("price", 0)
tokens_sold = tx.get("tokensSold", 0)
@@ -74,7 +99,7 @@ def calculate_daily_stats(transactions):
def update_json_file(new_data):
"""Update JSON file with new transaction data."""
try:
print(f"开始更新JSON文件收到{len(new_data)}条新交易数据")
LOGGER.info("开始更新JSON文件收到%d条新交易数据", len(new_data))
# 读取现有数据,如果文件不存在则初始化为空数组
try:
with open(JSON_FILE, "r", encoding="utf-8") as file:
@@ -108,11 +133,14 @@ def update_json_file(new_data):
with open(JSON_FILE, "w", encoding="utf-8") as file:
json.dump(existing_data, file, indent=4, ensure_ascii=False)
print(
f"成功更新{JSON_FILE},合并{len(processed_data)}条记录,总计{len(existing_data)}条记录"
LOGGER.info(
"成功更新%s,合并%d条记录,总计%d条记录",
JSON_FILE,
len(processed_data),
len(existing_data)
)
except (FileNotFoundError, json.JSONDecodeError, IOError) as e:
print(f"更新JSON文件时发生错误: {e},跳过本次更新")
LOGGER.error("更新JSON文件时发生错误: %s,跳过本次更新", e)
def git_commit_and_push():
@@ -131,9 +159,9 @@ def git_commit_and_push():
check=True,
)
subprocess.run(["git", "push"], check=True)
print("Git提交成功")
LOGGER.info("Git提交成功")
except subprocess.CalledProcessError as e:
print(f"Git操作失败: {e}")
LOGGER.error("Git操作失败: %s", e)
def load_processed_hashes():
@@ -153,30 +181,33 @@ def save_processed_hashes():
def main():
"""主循环,定期获取数据并更新"""
global SEEN_TXHASHES
global SEEN_TXHASHES, LOGGER # 添加logger到全局声明
setup_logging() # 初始化logger
LOGGER.info("程序启动")
SEEN_TXHASHES = load_processed_hashes()
last_git_time = 0
while True:
setup_logging()
try:
current_time = time.time()
print(f"{datetime.now()}: 获取最新交易数据...")
LOGGER.info("获取最新交易数据...")
if new_transactions := fetch_latest_transactions():
print(f"获取到{len(new_transactions)}条新交易")
LOGGER.info("获取到%d条新交易", len(new_transactions))
SEEN_TXHASHES.update(tx["transactionHash"] for tx in new_transactions)
save_processed_hashes()
update_json_file(new_transactions)
current_time = time.time()
if current_time - last_git_time >= GIT_INTERVAL:
git_commit_and_push()
last_git_time = current_time
time.sleep(FETCH_INTERVAL)
except Exception as e:
print(f"错误: {e},继续运行...")
LOGGER.error("错误: %s,继续运行...", e)
continue
if __name__ == "__main__":
main()
main()