优化保存已处理交易哈希的逻辑,合并新哈希并更新总记录数
This commit is contained in:
23
main.py
23
main.py
@@ -136,7 +136,7 @@ def update_json_file(new_data):
|
|||||||
"成功更新%s,合并%d条记录,总计%d条记录",
|
"成功更新%s,合并%d条记录,总计%d条记录",
|
||||||
JSON_FILE,
|
JSON_FILE,
|
||||||
len(processed_data),
|
len(processed_data),
|
||||||
len(existing_data)
|
len(existing_data),
|
||||||
)
|
)
|
||||||
except (FileNotFoundError, json.JSONDecodeError, IOError) as e:
|
except (FileNotFoundError, json.JSONDecodeError, IOError) as e:
|
||||||
LOGGER.error("更新JSON文件时发生错误: %s,跳过本次更新", e)
|
LOGGER.error("更新JSON文件时发生错误: %s,跳过本次更新", e)
|
||||||
@@ -175,9 +175,26 @@ def load_processed_hashes():
|
|||||||
def save_processed_hashes():
|
def save_processed_hashes():
|
||||||
"""Save processed transaction hashes to file."""
|
"""Save processed transaction hashes to file."""
|
||||||
try:
|
try:
|
||||||
|
# 读取现有数据
|
||||||
|
try:
|
||||||
|
with open(HASH_FILE, "r", encoding="utf-8") as file:
|
||||||
|
existing_data = json.load(file)
|
||||||
|
except (FileNotFoundError, json.JSONDecodeError):
|
||||||
|
existing_data = []
|
||||||
|
|
||||||
|
# 合并新哈希
|
||||||
|
updated_data = list(set(existing_data) | SEEN_TXHASHES)
|
||||||
|
|
||||||
|
# 写回文件
|
||||||
with open(HASH_FILE, "w", encoding="utf-8") as file:
|
with open(HASH_FILE, "w", encoding="utf-8") as file:
|
||||||
json.dump(list(SEEN_TXHASHES), file, indent=4, ensure_ascii=False)
|
json.dump(updated_data, file, indent=4, ensure_ascii=False)
|
||||||
LOGGER.info("成功保存已处理交易哈希到%s", HASH_FILE)
|
|
||||||
|
LOGGER.info(
|
||||||
|
"成功更新%s,新增%d条哈希,总计%d条记录",
|
||||||
|
HASH_FILE,
|
||||||
|
len(SEEN_TXHASHES - set(existing_data)),
|
||||||
|
len(updated_data),
|
||||||
|
)
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
LOGGER.error("保存已处理交易哈希时发生错误: %s", e)
|
LOGGER.error("保存已处理交易哈希时发生错误: %s", e)
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user