添加数据抓取和保存功能,定时每10分钟更新phase和general数据
This commit is contained in:
65
general.py
Normal file
65
general.py
Normal file
@@ -0,0 +1,65 @@
|
||||
import csv
|
||||
from datetime import datetime
|
||||
import time
|
||||
import schedule
|
||||
import requests
|
||||
|
||||
PHASE_URL = "https://backend.dexrp.io/general/phase"
|
||||
GENERAL_URL = "https://backend.dexrp.io/general"
|
||||
|
||||
|
||||
def fetch_data(url):
|
||||
try:
|
||||
response = requests.get(url)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except Exception as e:
|
||||
print(f"Error fetching data: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def save_to_csv(filename, data):
|
||||
try:
|
||||
with open(filename, "a", newline="") as f:
|
||||
writer = csv.writer(f)
|
||||
if f.tell() == 0: # Write header if file is empty
|
||||
writer.writerow(data.keys())
|
||||
writer.writerow(data.values())
|
||||
except Exception as e:
|
||||
print(f"Error saving to {filename}: {e}")
|
||||
|
||||
|
||||
def job():
|
||||
# Phase data
|
||||
phase_data = fetch_data(PHASE_URL)
|
||||
if phase_data:
|
||||
filtered_phase = {
|
||||
"raised": phase_data.get("raised"),
|
||||
"goal": phase_data.get("goal"),
|
||||
}
|
||||
save_to_csv("phase.csv", filtered_phase)
|
||||
|
||||
# General data
|
||||
general_data = fetch_data(GENERAL_URL)
|
||||
if general_data:
|
||||
filtered_general = {
|
||||
"roundIndex": general_data.get("roundIndex"),
|
||||
"raisedAmount": general_data.get("raisedAmount"),
|
||||
"raiseGoalAmount": general_data.get("raiseGoalAmount"),
|
||||
"tokensSoldAmount": general_data.get("tokensSoldAmount"),
|
||||
"tokensGoalAmount": general_data.get("tokensGoalAmount"),
|
||||
}
|
||||
save_to_csv("general.csv", filtered_general)
|
||||
|
||||
|
||||
def main():
|
||||
schedule.every(10).minutes.do(job)
|
||||
job() # Run immediately first time
|
||||
|
||||
while True:
|
||||
schedule.run_pending()
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Reference in New Issue
Block a user