Compare commits
3 Commits
dbdf8af9aa
...
6cb8917b1b
Author | SHA1 | Date | |
---|---|---|---|
6cb8917b1b | |||
ec9176e29e | |||
1676dc7de0
|
63
main.py
63
main.py
@@ -5,8 +5,6 @@ import time
|
|||||||
import subprocess
|
import subprocess
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
import logging
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
@@ -14,32 +12,11 @@ import requests
|
|||||||
API_URL = "https://backend.dexrp.io/vending/last" # DEXRP API地址
|
API_URL = "https://backend.dexrp.io/vending/last" # DEXRP API地址
|
||||||
JSON_FILE = "price.json" # 价格数据存储文件
|
JSON_FILE = "price.json" # 价格数据存储文件
|
||||||
HASH_FILE = "processed.json" # 已处理交易哈希记录文件
|
HASH_FILE = "processed.json" # 已处理交易哈希记录文件
|
||||||
LOG_DIR = "logs"
|
|
||||||
LOG_FILE_FORMAT = "%Y-%m-%d.log"
|
|
||||||
|
|
||||||
# 全局变量
|
# 全局变量
|
||||||
SEEN_TXHASHES = set() # 内存中的已处理交易哈希集合
|
SEEN_TXHASHES = set() # 内存中的已处理交易哈希集合
|
||||||
GIT_INTERVAL = 300 # Git提交间隔(秒)
|
GIT_INTERVAL = 300 # Git提交间隔(秒)
|
||||||
FETCH_INTERVAL = 10 # API轮询间隔(秒)
|
FETCH_INTERVAL = 10 # API轮询间隔(秒)
|
||||||
LOGGER = None # 添加全局logger变量
|
|
||||||
|
|
||||||
|
|
||||||
def setup_logging():
|
|
||||||
"""配置日志记录"""
|
|
||||||
global LOGGER # 声明使用全局logger
|
|
||||||
Path(LOG_DIR).mkdir(exist_ok=True)
|
|
||||||
log_file = Path(LOG_DIR) / datetime.now().strftime(LOG_FILE_FORMAT)
|
|
||||||
|
|
||||||
logging.basicConfig(
|
|
||||||
level=logging.INFO,
|
|
||||||
format="%(asctime)s - %(levelname)s - %(message)s",
|
|
||||||
handlers=[
|
|
||||||
logging.FileHandler(log_file, encoding="utf-8"),
|
|
||||||
logging.StreamHandler(),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
LOGGER = logging.getLogger(__name__) # 赋值给全局logger
|
|
||||||
return LOGGER
|
|
||||||
|
|
||||||
|
|
||||||
def fetch_latest_transactions():
|
def fetch_latest_transactions():
|
||||||
@@ -53,7 +30,7 @@ def fetch_latest_transactions():
|
|||||||
# )
|
# )
|
||||||
return [tx for tx in transactions if tx["transactionHash"] not in SEEN_TXHASHES]
|
return [tx for tx in transactions if tx["transactionHash"] not in SEEN_TXHASHES]
|
||||||
except requests.RequestException as e:
|
except requests.RequestException as e:
|
||||||
LOGGER.error("API请求失败: %s", e)
|
print(f"API请求失败: {e}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@@ -70,9 +47,7 @@ def calculate_daily_stats(transactions):
|
|||||||
)
|
)
|
||||||
|
|
||||||
for tx in transactions:
|
for tx in transactions:
|
||||||
date = datetime.fromtimestamp(tx["blockTimestamp"], timezone.utc).strftime(
|
date = datetime.fromtimestamp(tx["blockTimestamp"], timezone.utc).strftime("%Y-%m-%d")
|
||||||
"%Y-%m-%d"
|
|
||||||
)
|
|
||||||
price = tx.get("price", 0)
|
price = tx.get("price", 0)
|
||||||
tokens_sold = tx.get("tokensSold", 0)
|
tokens_sold = tx.get("tokensSold", 0)
|
||||||
|
|
||||||
@@ -99,7 +74,7 @@ def calculate_daily_stats(transactions):
|
|||||||
def update_json_file(new_data):
|
def update_json_file(new_data):
|
||||||
"""Update JSON file with new transaction data."""
|
"""Update JSON file with new transaction data."""
|
||||||
try:
|
try:
|
||||||
LOGGER.info("开始更新JSON文件,收到%d条新交易数据", len(new_data))
|
print(f"开始更新JSON文件,收到{len(new_data)}条新交易数据")
|
||||||
# 读取现有数据,如果文件不存在则初始化为空数组
|
# 读取现有数据,如果文件不存在则初始化为空数组
|
||||||
try:
|
try:
|
||||||
with open(JSON_FILE, "r", encoding="utf-8") as file:
|
with open(JSON_FILE, "r", encoding="utf-8") as file:
|
||||||
@@ -133,14 +108,11 @@ def update_json_file(new_data):
|
|||||||
with open(JSON_FILE, "w", encoding="utf-8") as file:
|
with open(JSON_FILE, "w", encoding="utf-8") as file:
|
||||||
json.dump(existing_data, file, indent=4, ensure_ascii=False)
|
json.dump(existing_data, file, indent=4, ensure_ascii=False)
|
||||||
|
|
||||||
LOGGER.info(
|
print(
|
||||||
"成功更新%s,合并%d条记录,总计%d条记录",
|
f"成功更新{JSON_FILE},合并{len(processed_data)}条记录,总计{len(existing_data)}条记录"
|
||||||
JSON_FILE,
|
|
||||||
len(processed_data),
|
|
||||||
len(existing_data)
|
|
||||||
)
|
)
|
||||||
except (FileNotFoundError, json.JSONDecodeError, IOError) as e:
|
except (FileNotFoundError, json.JSONDecodeError, IOError) as e:
|
||||||
LOGGER.error("更新JSON文件时发生错误: %s,跳过本次更新", e)
|
print(f"更新JSON文件时发生错误: {e},跳过本次更新")
|
||||||
|
|
||||||
|
|
||||||
def git_commit_and_push():
|
def git_commit_and_push():
|
||||||
@@ -159,9 +131,9 @@ def git_commit_and_push():
|
|||||||
check=True,
|
check=True,
|
||||||
)
|
)
|
||||||
subprocess.run(["git", "push"], check=True)
|
subprocess.run(["git", "push"], check=True)
|
||||||
LOGGER.info("Git提交成功")
|
print("Git提交成功")
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
LOGGER.error("Git操作失败: %s", e)
|
print(f"Git操作失败: {e}")
|
||||||
|
|
||||||
|
|
||||||
def load_processed_hashes():
|
def load_processed_hashes():
|
||||||
@@ -175,41 +147,34 @@ def load_processed_hashes():
|
|||||||
|
|
||||||
def save_processed_hashes():
|
def save_processed_hashes():
|
||||||
"""Save processed transaction hashes to file."""
|
"""Save processed transaction hashes to file."""
|
||||||
try:
|
|
||||||
with open(HASH_FILE, "w", encoding="utf-8") as file:
|
with open(HASH_FILE, "w", encoding="utf-8") as file:
|
||||||
json.dump(list(SEEN_TXHASHES), file, indent=4, ensure_ascii=False)
|
json.dump(list(SEEN_TXHASHES), file)
|
||||||
LOGGER.info("成功保存已处理交易哈希到%s", HASH_FILE)
|
|
||||||
except IOError as e:
|
|
||||||
LOGGER.error("保存已处理交易哈希时发生错误: %s", e)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""主循环,定期获取数据并更新"""
|
"""主循环,定期获取数据并更新"""
|
||||||
global SEEN_TXHASHES, LOGGER # 添加logger到全局声明
|
global SEEN_TXHASHES
|
||||||
setup_logging() # 初始化logger
|
|
||||||
LOGGER.info("程序启动")
|
|
||||||
SEEN_TXHASHES = load_processed_hashes()
|
SEEN_TXHASHES = load_processed_hashes()
|
||||||
last_git_time = 0
|
last_git_time = 0
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
setup_logging()
|
|
||||||
try:
|
try:
|
||||||
LOGGER.info("获取最新交易数据...")
|
current_time = time.time()
|
||||||
|
print(f"{datetime.now()}: 获取最新交易数据...")
|
||||||
|
|
||||||
if new_transactions := fetch_latest_transactions():
|
if new_transactions := fetch_latest_transactions():
|
||||||
LOGGER.info("获取到%d条新交易", len(new_transactions))
|
print(f"获取到{len(new_transactions)}条新交易")
|
||||||
SEEN_TXHASHES.update(tx["transactionHash"] for tx in new_transactions)
|
SEEN_TXHASHES.update(tx["transactionHash"] for tx in new_transactions)
|
||||||
save_processed_hashes()
|
save_processed_hashes()
|
||||||
update_json_file(new_transactions)
|
update_json_file(new_transactions)
|
||||||
|
|
||||||
current_time = time.time()
|
|
||||||
if current_time - last_git_time >= GIT_INTERVAL:
|
if current_time - last_git_time >= GIT_INTERVAL:
|
||||||
git_commit_and_push()
|
git_commit_and_push()
|
||||||
last_git_time = current_time
|
last_git_time = current_time
|
||||||
|
|
||||||
time.sleep(FETCH_INTERVAL)
|
time.sleep(FETCH_INTERVAL)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOGGER.error("错误: %s,继续运行...", e)
|
print(f"错误: {e},继续运行...")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
||||||
|
10
price.json
10
price.json
@@ -5,14 +5,6 @@
|
|||||||
"close": 0.03515,
|
"close": 0.03515,
|
||||||
"high": 0.03515,
|
"high": 0.03515,
|
||||||
"low": 0.03515,
|
"low": 0.03515,
|
||||||
"tokensSold": 0
|
"tokensSold": 223670
|
||||||
},
|
|
||||||
{
|
|
||||||
"date": "2025-07-28",
|
|
||||||
"open": 0.03515,
|
|
||||||
"close": 0.03515,
|
|
||||||
"high": 0.03515,
|
|
||||||
"low": 0.03515,
|
|
||||||
"tokensSold": 0
|
|
||||||
}
|
}
|
||||||
]
|
]
|
1
processed.json
Normal file
1
processed.json
Normal file
@@ -0,0 +1 @@
|
|||||||
|
["0x06bcece7631751b356156581777b1bbbeb71aff346ad185d0afbe608b5ab9ba1", "0xd07efb43aca72173e081820afb58458356e77b2043b161d05c9f74764189b84d", "5GR3o3UHcym1QTDgJyMrBAoQkWYrWsMgETRwcobZc7cEWGq5fTywU8ZpTx6QD8WrMGWfMf1JWyMjSAe9UghBeZkd", "0x7432d15902aa52c699bc89fe68e37d54720adc57b49fa4eaaa9a272aa2d96b1f", "5851416353", "4595614752", "0x506c7c88f4ded169c9ebfa4b2cc4c8c4f5ebbe578b64549061a6f9a00c0327b8", "0x448a196d16e910ab1f1c4c37ac8899b8093f82dec6e2e8cb7a45196dd0eff5a5", "0xfcaf513488610c401cc481ec7a00e68f22df1e6fca0902561d0f0e79367bbe3a", "0x3775a273d70973b20554e491b3fbdc1c5d46e0f6b13e6cb19d3882f8aaa32b6b", "3KBwkXnhAJLrmREyCRa2TXvAomoZTTGrNtX3Rkz5eBEYp9gcsfA9ZETYZ8Dw6araUguN1CRcUYQa1Q6ENabePSWr", "3jGVF5mrioUYoSPNkAUAHiCJniD93jZfRkwGNp251VDTCZZ52jwHzWv9XeuqmFf5cNKxgWZCc5fKZgSnFopBHpG2", "42rUh8pAgdtqqWy5o5bQBZvzBJbkQvZLH6LtKYiXDrDAYVYdyPQuYddL2dx6fqxoJ93ASJ4zwdV8Ense3QNU2LRm", "0xa5e02de9082c820a26a119e9cf445f66060dd9027656a35b877af888773cbc3c"]
|
Reference in New Issue
Block a user