mirror of
https://github.com/earthjasonlin/zzz-signal-search-export.git
synced 2025-04-21 16:00:17 +08:00
Compare commits
7 Commits
v1.1.0
...
a660c03bb5
Author | SHA1 | Date | |
---|---|---|---|
a660c03bb5
|
|||
d7457f2bfb
|
|||
223ab899e0
|
|||
f62ca1d7e7
|
|||
c034b2e70a
|
|||
a2faa86f0c
|
|||
510bfdab7a
|
@ -6,7 +6,7 @@
|
||||
|
||||
一个使用 Electron 制作的小工具,需要在 Windows 操作系统上运行。
|
||||
|
||||
通过读取游戏日志或者代理模式获取访问游戏跃迁记录 API 所需的 authKey,然后再使用获取到的 authKey 来读取游戏跃迁记录。
|
||||
通过读取游戏日志或者代理模式获取访问游戏调频记录 API 所需的 authKey,然后再使用获取到的 authKey 来读取游戏调频记录。
|
||||
|
||||
## 其它语言
|
||||
|
||||
@ -15,7 +15,7 @@
|
||||
## 使用说明
|
||||
|
||||
1. 下载工具后解压 - 下载地址: [GitHub](https://github.com/earthjasonlin/zzz-signal-search-export/releases/latest/download/ZzzSignalSearchExport.zip) / [123云盘](https://www.123pan.com/s/Vs9uVv-ShhE.html) / [蓝奏云(密码:zzzz)](https://www.lanzouh.com/b00eewtvxa)
|
||||
2. 打开游戏的跃迁详情页面
|
||||
2. 打开游戏的调频详情页面
|
||||
|
||||

|
||||
|
||||
@ -34,7 +34,7 @@
|
||||
|
||||
如果需要导出多个账号的数据,可以点击旁边的加号按钮。
|
||||
|
||||
然后游戏切换的新账号,再打开跃迁历史记录,工具再点击“加载数据”按钮。
|
||||
然后游戏切换的新账号,再打开调频历史记录,工具再点击“加载数据”按钮。
|
||||
|
||||
## Devlopment
|
||||
|
||||
|
@ -6,7 +6,7 @@ This project is modified from the [star-rail-warp-export](https://github.com/biu
|
||||
|
||||
A tool made from Electron that runs on the Windows operating system.
|
||||
|
||||
Read the game log or proxy to get the authKey needed to access the game warp history API, and then use the authKey to read the game wish history.
|
||||
Read the game log or proxy to get the authKey needed to access the game signal search history API, and then use the authKey to read the game wish history.
|
||||
|
||||
## Other languages
|
||||
|
||||
@ -18,7 +18,7 @@ If you feel that the existing translation is inappropriate, you can send a pull
|
||||
|
||||
1. Unzip after downloading the tool - [GitHub](https://github.com/earthjasonlin/zzz-signal-search-export/releases/latest/download/ZzzSignalSearchExport.zip)
|
||||
|
||||
2. Open the warp details page of the game
|
||||
2. Open the signal search details page of the game
|
||||
|
||||

|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "zzz-signal-search-export",
|
||||
"version": "1.1.0",
|
||||
"autoUpdateActive": false,
|
||||
"version": "1.1.3",
|
||||
"autoUpdateActive": true,
|
||||
"autoUpdateFrom": "1.1.0",
|
||||
"main": "./dist/electron/main/main.js",
|
||||
"author": "earthjasonlin <https://git.loliquq.cn/earthjasonlin>",
|
||||
|
2487
src/idJson.json
Normal file
2487
src/idJson.json
Normal file
File diff suppressed because it is too large
Load Diff
@ -7,6 +7,7 @@ const { name, version } = require('../../package.json')
|
||||
const i18n = require('./i18n')
|
||||
const { mergeData } = require('./utils/mergeData')
|
||||
const { sendMsg } = require('./utils')
|
||||
const idJson = require('../idJson.json')
|
||||
|
||||
const getTimeString = () => {
|
||||
return new Date().toLocaleString('sv').replace(/[- :]/g, '').slice(0, -2)
|
||||
@ -104,23 +105,35 @@ const importUIGF = async () => {
|
||||
jsonData.nap.forEach(uidData => {
|
||||
const resultTemp = []
|
||||
const isNew = !Boolean(dataMap.has(uidData.uid))
|
||||
|
||||
let region_time_zone
|
||||
if (!isNew) region_time_zone = dataMap.get(uidData.uid).region_time_zone
|
||||
else region_time_zone = uidData.timezone
|
||||
|
||||
let targetLang
|
||||
if (!isNew) targetLang = dataMap.get(uidData.uid).lang
|
||||
else targetLang = uidData.lang
|
||||
if(!idJson[targetLang] && (!uidData.list[0].name || !uidData.list[0].item_type || !uidData.list[0].rank_type)) targetLang = config.lang
|
||||
|
||||
let idTargetLangJson = idJson[targetLang]
|
||||
|
||||
uidData.list.forEach(recordEntry => {
|
||||
let rank_type
|
||||
if (idTargetLangJson?.[recordEntry.item_id].rank_type) rank_type = String(idTargetLangJson[recordEntry.item_id].rank_type)
|
||||
else rank_type = recordEntry.rank_type
|
||||
resultTemp.push({
|
||||
gacha_id: recordEntry.gacha_id,
|
||||
gacha_type: recordEntry.gacha_type,
|
||||
item_id: recordEntry.item_id,
|
||||
count: recordEntry.count,
|
||||
count: recordEntry.count ?? "1",
|
||||
time: convertTimeZone(recordEntry.time, uidData.timezone, region_time_zone),
|
||||
name: recordEntry.name,
|
||||
item_type: recordEntry.item_type,
|
||||
rank_type: recordEntry.rank_type,
|
||||
name: idTargetLangJson?.[recordEntry.item_id].name ?? recordEntry.name,
|
||||
item_type: idTargetLangJson?.[recordEntry.item_id].item_type ?? recordEntry.item_type,
|
||||
rank_type: rank_type,
|
||||
id: recordEntry.id
|
||||
})
|
||||
})
|
||||
|
||||
const resultTempGrouped = resultTemp.reduce((acc, curr) => {
|
||||
if (!acc[curr.gacha_type]) {
|
||||
acc[curr.gacha_type] = []
|
||||
@ -133,9 +146,9 @@ const importUIGF = async () => {
|
||||
let data
|
||||
const mergedData = mergeData(dataMap.get(uidData.uid), resultMap)
|
||||
if (isNew) {
|
||||
data = { result: mergedData, time: Date.now(), uid: uidData.uid, lang: uidData.lang, region_time_zone: uidData.timezone, deleted: false }
|
||||
data = { result: mergedData, time: Date.now(), uid: uidData.uid, lang: targetLang, region_time_zone: uidData.timezone, deleted: false }
|
||||
} else {
|
||||
data = { result: mergedData, time: Date.now(), uid: dataMap.get(uidData.uid).uid, lang: dataMap.get(uidData.uid).lang, region_time_zone: dataMap.get(uidData.uid).region_time_zone, deleted: dataMap.get(uidData.uid).deleted }
|
||||
data = { result: mergedData, time: Date.now(), uid: dataMap.get(uidData.uid).uid, lang: targetLang, region_time_zone: dataMap.get(uidData.uid).region_time_zone, deleted: dataMap.get(uidData.uid).deleted }
|
||||
}
|
||||
|
||||
saveData(data, '')
|
||||
|
@ -31,7 +31,10 @@ const defaultTypeMap = new Map([
|
||||
|
||||
const serverTimeZone = new Map([
|
||||
["prod_gf_cn", 8],
|
||||
["prod_gf_jp", 8]
|
||||
["prod_gf_jp", 8],
|
||||
["prod_gf_us", -5],
|
||||
["prod_gf_eu", 1],
|
||||
["prod_gf_sg", 8]
|
||||
])
|
||||
|
||||
const convertTimeZone = (dateTimeStr, fromTimeZoneOffset, toTimeZoneOffset) => {
|
||||
|
72
tools/getIdMap.py
Normal file
72
tools/getIdMap.py
Normal file
@ -0,0 +1,72 @@
|
||||
import requests
|
||||
import json
|
||||
from opencc import OpenCC
|
||||
|
||||
# 初始化 OpenCC 转换器
|
||||
cc = OpenCC('s2t')
|
||||
|
||||
# 获取 JSON 数据
|
||||
weapon_url = 'https://api.hakush.in/zzz/data/weapon.json'
|
||||
character_url = 'https://api.hakush.in/zzz/data/character.json'
|
||||
bangboo_url = 'https://api.hakush.in/zzz/data/bangboo.json'
|
||||
|
||||
# 语言映射配置
|
||||
language_map = {
|
||||
"zh-cn": "CHS",
|
||||
"zh-tw": "CHS", # 简体转繁体
|
||||
"en-us": "EN",
|
||||
"ja-jp": "JA",
|
||||
"ko-kr": "KO"
|
||||
}
|
||||
|
||||
# 类型映射配置
|
||||
type_map = {
|
||||
"weapon": {"zh-cn": "音擎", "zh-tw": "音擎", "en-us": "W-Engines", "ja-jp": "音動機", "ko-kr": "W-엔진"},
|
||||
"character": {"zh-cn": "代理人", "zh-tw": "代理人", "en-us": "Agents", "ja-jp": "エージェント", "ko-kr": "에이전트"},
|
||||
"bangboo": {"zh-cn": "邦布", "zh-tw": "邦布", "en-us": "Bangboo", "ja-jp": "ボンプ", "ko-kr": "「Bangboo」"}
|
||||
}
|
||||
|
||||
def fetch_json(url):
|
||||
response = requests.get(url)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def transform_data(data, item_type):
|
||||
transformed = {lang: {} for lang in language_map.keys()}
|
||||
for id, item in data.items():
|
||||
for lang, key in language_map.items():
|
||||
name = item[key] if lang != 'zh-tw' else cc.convert(item['CHS'])
|
||||
transformed[lang][id] = {
|
||||
"name": name,
|
||||
"item_type": type_map[item_type][lang],
|
||||
"rank_type": item['rank']
|
||||
}
|
||||
return transformed
|
||||
|
||||
def main():
|
||||
try:
|
||||
weapon_data = fetch_json(weapon_url)
|
||||
character_data = fetch_json(character_url)
|
||||
bangboo_data = fetch_json(bangboo_url)
|
||||
|
||||
transformed_data = {lang: {} for lang in language_map.keys()}
|
||||
|
||||
weapon_transformed = transform_data(weapon_data, "weapon")
|
||||
character_transformed = transform_data(character_data, "character")
|
||||
bangboo_transformed = transform_data(bangboo_data, "bangboo")
|
||||
|
||||
for lang in language_map.keys():
|
||||
transformed_data[lang].update(weapon_transformed[lang])
|
||||
transformed_data[lang].update(character_transformed[lang])
|
||||
transformed_data[lang].update(bangboo_transformed[lang])
|
||||
|
||||
with open('./src/idJson.json', 'w', encoding='utf-8') as f:
|
||||
json.dump(transformed_data, f, ensure_ascii=False, indent=2)
|
||||
|
||||
print("Data successfully transformed and saved")
|
||||
|
||||
except requests.RequestException as e:
|
||||
print(f"Error fetching data: {e}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Reference in New Issue
Block a user