Compare commits

...

48 Commits

Author SHA1 Message Date
e74f561ce5 chore: bump version to 1.1.11 2025-01-26 07:40:03 +08:00
9738b41372 chore: update idJson 2025-01-26 07:39:15 +08:00
2d0a5d38bb docs: Stargazers over time 2025-01-05 10:41:22 +08:00
16e01b7a13 chore: bump version to 1.1.10 2024-12-07 16:02:00 +08:00
8f492376a0 chore: update idJson 2024-12-07 16:01:20 +08:00
0642c52db2 chore: bump version to 1.1.9 2024-09-27 21:15:19 +08:00
af256fba7d chore: update idJson 2024-09-27 21:14:44 +08:00
6599fbe6d3 chore: bump version to 1.1.8 2024-09-08 12:23:13 +08:00
a99959e6e5 chore: update idJson 2024-09-08 12:22:27 +08:00
c9c92da926 chore: bump version to 1.1.7 2024-08-21 12:27:56 +08:00
fcff120657 chore: update idJson and add display for idJson version 2024-08-21 12:27:19 +08:00
0ec7cb7c4f fix: winreg compatibility issues
sync with upstream
2024-08-07 17:17:54 +08:00
5a3159d4cb chore: bump version to 1.1.6 2024-08-07 17:08:15 +08:00
38b99bf4dc feat: import export ui layout 2024-08-07 17:07:18 +08:00
0cd9c071d7 chore: bump version to 1.1.5 2024-08-03 23:23:33 +08:00
bf582d0194 fix: update color 2024-08-03 23:19:49 +08:00
5dec6a8273 chore: bump version to 1.1.4 2024-07-29 15:44:10 +08:00
0e429a4762 fix: OS servers log path 2024-07-29 15:43:25 +08:00
5164a17dca style: pylint 2024-07-28 10:13:21 +08:00
a660c03bb5 docs: fix old project name 2024-07-27 15:56:13 +08:00
d7457f2bfb chore: bump version to 1.1.3 2024-07-27 15:50:31 +08:00
223ab899e0 fix(uigf): add bangboo to ID table
close: #5
2024-07-27 15:50:02 +08:00
f62ca1d7e7 chore: bump version to 1.1.2 2024-07-26 10:06:44 +08:00
c034b2e70a feat(uigf): metadata from local db 2024-07-26 10:05:02 +08:00
a2faa86f0c chore: bump version to 1.1.1 2024-07-25 21:45:45 +08:00
510bfdab7a feat: add support for Europe, America, TW,HK,MO servers 2024-07-25 21:44:38 +08:00
f616944755 chore: bump version to 1.1.0 2024-07-25 13:47:37 +08:00
7300c6e719 refactor(i18n): UIGF v4.0 is no longer in beta 2024-07-25 13:26:57 +08:00
6fe12da9be feat(uigf): import UIGFv4.0
BREAKING CHANGES: `region` is no longer stored in/read from local file
2024-07-25 13:25:26 +08:00
14cfda3986 chore: bump version to 1.0.10 2024-07-24 21:02:40 +08:00
8156b5a9b7 fix: UIGF prod_gf_jp timezone 2024-07-24 21:00:50 +08:00
dd098fcd08 chore: bump version to 1.0.9 2024-07-23 15:03:52 +08:00
0cdc7662f7 feat(uigf): support multiple account export 2024-07-23 15:03:30 +08:00
2814ed211b fix(uigf): incorrect timezone 2024-07-23 12:51:22 +08:00
2f14a4d320 chore: bump version to 1.0.8 2024-07-23 11:11:10 +08:00
0e4f3599c9 feat(uigf): support UIGFv4.0(Beta)
Users don't have to delete their data. New data structure is used automatically when new data is fetched

- Set `count` to `"1"` as default for old data
- Record `count` from API
- Link to the UIGF website

BREAKING CHANGE: SRGF is no longer supported, use UIGFv4.0(Beta) instead
2024-07-23 11:10:27 +08:00
f1e3b76d85 chore: bump version to 1.0.7 2024-07-23 07:47:20 +08:00
2736ee0398 fix(excel): fix excel output style 2024-07-23 07:46:30 +08:00
66188231bc chore: bump version to 1.0.6 2024-07-22 21:59:23 +08:00
5624af3fb2 fix(build): fix icon 2024-07-22 21:58:17 +08:00
3c848a97f8 docs(readme): update project name in Chinese title 2024-07-21 23:25:56 +08:00
6ddc29af5a docs(readme): update readme links 2024-07-21 23:13:30 +08:00
e83fe42268 fix(utils): update hash keys 2024-07-21 21:42:19 +08:00
f9e74b4fb8 chore: bump version to 1.0.5 2024-07-21 20:46:26 +08:00
84179ccc8d ci: fix yarn lock for actions 2024-07-21 20:46:05 +08:00
8b725053ce feat(update): re-implement auto update 2024-07-21 19:45:51 +08:00
bebb14b63d chore: bump version to 1.0.4 2024-07-21 14:30:40 +08:00
2adf56d062 fix(gachaDetail): reset countMio when get S-Rank 2024-07-21 14:26:14 +08:00
25 changed files with 4547 additions and 744 deletions

51
.electron-vite/update.js Normal file
View File

@ -0,0 +1,51 @@
const fs = require('fs-extra')
const path = require('path')
const crypto = require('crypto')
const AdmZip = require('adm-zip')
const { version, autoUpdateActive, autoUpdateFrom } = require('../package.json')
const hash = (data, type = 'sha256') => {
const hmac = crypto.createHmac(type, 'nap')
hmac.update(data)
return hmac.digest('hex')
}
const createZip = (filePath, dest) => {
const zip = new AdmZip()
zip.addLocalFolder(filePath)
zip.toBuffer()
zip.writeZip(dest)
}
const start = async () => {
copyAppZip()
const appPath = './build/win-ia32-unpacked/resources/app'
const name = 'app.zip'
const outputPath = path.resolve('./build/update/update/')
const zipPath = path.resolve(outputPath, name)
await fs.ensureDir(outputPath)
await fs.emptyDir(outputPath)
createZip(appPath, zipPath)
const buffer = await fs.readFile(zipPath)
const sha256 = hash(buffer)
const hashName = sha256.slice(7, 12)
await fs.copy(zipPath, path.resolve(outputPath, `${hashName}.zip`))
await fs.remove(zipPath)
await fs.outputJSON(path.join(outputPath, 'manifest.json'), {
active: autoUpdateActive,
version: version,
from: autoUpdateFrom,
name: `${hashName}.zip`,
hash: sha256
})
}
const copyAppZip = () => {
try {
const dir = path.resolve('./build')
const filePath = path.resolve(dir, `ZzzSignalSearchExport-${version}-ia32-win.zip`)
fs.copySync(filePath, path.join(dir, 'app.zip'))
} catch (e) {}
}
start()

55
.github/workflows/release.yml vendored Normal file
View File

@ -0,0 +1,55 @@
on:
workflow_dispatch:
push:
# Sequence of patterns matched against refs/tag
tags:
- 'v*' # Push events to matching v*, i.e. v1.0, v20.15.10
name: Release
jobs:
build:
name: Release
runs-on: windows-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Use Node.js
uses: actions/setup-node@v1
with:
node-version: '16.x'
- name: Build App
run: |
yarn --frozen-lockfile
yarn build:win32
yarn build-update
- name: Create Release
if: success()
id: create_release
uses: actions/create-release@v1
env:
GITHUB_TOKEN: ${{ secrets.TOKEN }}
with:
tag_name: ${{ github.ref }}
release_name: ZzzSignalSearchExport ${{ github.ref }}
draft: false
prerelease: false
- name: Upload Release Asset
if: success()
id: upload-release-asset
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }} # This pulls from the CREATE RELEASE step above, referencing it's ID to get its outputs object, which include a `upload_url`. See this blog post for more info: https://jasonet.co/posts/new-features-of-github-actions/#passing-data-to-future-steps
asset_path: ./build/app.zip
asset_name: ZzzSignalSearchExport.zip
asset_content_type: application/zip
- name: Deploy update
if: success()
uses: crazy-max/ghaction-github-pages@v2
with:
commit_message: Update app
build_dir: ./build/update
env:
GITHUB_TOKEN: ${{ secrets.TOKEN }}

View File

@ -1,12 +1,12 @@
# 绝区零记录导出工具
# 绝区零调频记录导出工具
中文 | [English](https://git.loliquq.cn/earthjasonlin/zzz-signal-search-export/blob/main/docs/README_EN.md)
中文 | [English](https://github.com/earthjasonlin/zzz-signal-search-export/blob/main/docs/README_EN.md)
这个项目由[star-rail-warp-export](https://github.com/biuuu/star-rail-warp-export/)修改而来,功能基本一致。
一个使用 Electron 制作的小工具,需要在 Windows 操作系统上运行。
通过读取游戏日志或者代理模式获取访问游戏跃迁记录 API 所需的 authKey然后再使用获取到的 authKey 来读取游戏跃迁记录。
通过读取游戏日志或者代理模式获取访问游戏调频记录 API 所需的 authKey然后再使用获取到的 authKey 来读取游戏调频记录。
## 其它语言
@ -15,7 +15,7 @@
## 使用说明
1. 下载工具后解压 - 下载地址: [GitHub](https://github.com/earthjasonlin/zzz-signal-search-export/releases/latest/download/ZzzSignalSearchExport.zip) / [123云盘](https://www.123pan.com/s/Vs9uVv-ShhE.html) / [蓝奏云(密码:zzzz](https://www.lanzouh.com/b00eewtvxa)
2. 打开游戏的跃迁详情页面
2. 打开游戏的调频详情页面
![详情页面](/docs/wish-history.jpg)
@ -34,7 +34,11 @@
如果需要导出多个账号的数据,可以点击旁边的加号按钮。
然后游戏切换的新账号,再打开跃迁历史记录,工具再点击“加载数据”按钮。
然后游戏切换的新账号,再打开调频历史记录,工具再点击“加载数据”按钮。
## Stargazers over time
[![Stargazers over time](https://starchart.cc/earthjasonlin/zzz-signal-search-export.svg)](https://starchart.cc/earthjasonlin/zzz-signal-search-export)
## Devlopment

Binary file not shown.

Before

Width:  |  Height:  |  Size: 264 KiB

After

Width:  |  Height:  |  Size: 42 KiB

View File

@ -1,12 +1,12 @@
# Zenless Zone Zero Signal Search History Exporter
[中文](https://git.loliquq.cn/earthjasonlin/zzz-signal-search-export/blob/main/README.md) | English
[中文](https://github.com/earthjasonlin/zzz-signal-search-export) | English
This project is modified from the [star-rail-warp-export](https://github.com/biuuu/star-rail-warp-export/) repository, and its functions are basically the same.
A tool made from Electron that runs on the Windows operating system.
Read the game log or proxy to get the authKey needed to access the game warp history API, and then use the authKey to read the game wish history.
Read the game log or proxy to get the authKey needed to access the game signal search history API, and then use the authKey to read the game wish history.
## Other languages
@ -18,7 +18,7 @@ If you feel that the existing translation is inappropriate, you can send a pull
1. Unzip after downloading the tool - [GitHub](https://github.com/earthjasonlin/zzz-signal-search-export/releases/latest/download/ZzzSignalSearchExport.zip)
2. Open the warp details page of the game
2. Open the signal search details page of the game
![warp details](/docs/wish-history-en.jpg)

View File

@ -1,6 +1,8 @@
{
"name": "zzz-signal-search-export",
"version": "1.0.3",
"version": "1.1.11",
"autoUpdateActive": true,
"autoUpdateFrom": "1.1.0",
"main": "./dist/electron/main/main.js",
"author": "earthjasonlin <https://git.loliquq.cn/earthjasonlin>",
"homepage": "https://github.com/earthjasonlin/zzz-signal-search-export",
@ -16,6 +18,7 @@
"build:dir": "cross-env BUILD_TARGET=clean node .electron-vite/build.js && electron-builder --dir",
"build:clean": "cross-env BUILD_TARGET=onlyClean node .electron-vite/build.js",
"build:web": "cross-env BUILD_TARGET=web node .electron-vite/build.js",
"build-update": "node .electron-vite/update.js",
"dev:web": "cross-env TARGET=web node .electron-vite/dev-runner.js",
"start": "electron ./src/main/main.js",
"dep:upgrade": "yarn upgrade-interactive --latest",
@ -107,7 +110,7 @@
"tailwindcss": "^3.0.16",
"vite": "2.7.13",
"vue": "^3.2.29",
"winreg": "^1.2.4",
"winreg": "1.2.4",
"yauzl": "^2.10.0"
},
"keywords": [

View File

@ -3,9 +3,10 @@
"ui.button.load": "Load data",
"ui.button.update": "Update",
"ui.button.directUpdate": "Direct update",
"ui.button.files": "Export Files",
"ui.button.files": "Import/Export",
"ui.button.excel": "Export Excel",
"ui.button.srgf": "Export JSON",
"ui.button.uigf": "Export UIGF",
"ui.button.import": "Import UIGF",
"ui.button.url": "Input URL",
"ui.button.setting": "Settings",
"ui.button.option": "Option",
@ -56,6 +57,7 @@
"ui.setting.fetchFullHistoryHint": "When this option is enabled, click the \"Update Data\" button to get all the card draw records within 6 months. When there are incorrect data within 6 months, this function can be used to repair.",
"ui.setting.closeProxy": "Disable system proxy",
"ui.setting.closeProxyHint": "When you choose proxy mode, if the program crashes it can cause unwanted results that may affect your system. You can click this button to clear the system proxy settings.",
"ui.setting.idVersion": "ID database version",
"ui.about.title": "About",
"ui.about.license": "This software is opensource using MIT license.",
"ui.urlDialog.title": "Input URL manually",
@ -87,6 +89,7 @@
"log.proxy.hint": "Using proxy mode [${ip}:${port}] to get URLplease reopen warp history inside the game client.",
"log.url.notFound2": "Unable to find URL, please make sure you already opened warp history inside the game client",
"log.url.incorrect": "Unable to get URL parameters",
"log.autoUpdate.success": "Auto update successfulplease restart the program",
"excel.header.time": "time",
"excel.header.name": "name",
"excel.header.type": "type",
@ -98,8 +101,9 @@
"excel.customFont": "Arial",
"excel.filePrefix": "Zenless Zone Zero Signal Search Log",
"excel.fileType": "Excel file",
"srgf.fileType": "Zenless Zone Zero Gacha Log Format file (SRGF)",
"uigf.fileType": "Uniformed Interchangeable GachaLog Format v4.0 (Beta)",
"ui.extra.cacheClean": "1. Confirm whether the search history in the game has been opened, and if the error \"User authentication expired\" still appears, try the following steps \n2. Close the game window of Zenless Zone Zero \n3. Click the \"Open Web Cache Folder\" button above to open the \"Cache\" folder \n4. Delete the \"Cache_Data\" folder \n5. Start the Zenless Zone Zero game and open the search history page in the game \n6. Close this dialog and click the \"Update Data\" button",
"ui.extra.findCacheFolder": "If the \"Open cache folder\" button does not respond, you can manually find the game's web cache folder. The directory is \"Your game installation path/ZenlessZoneZero_Data/webCaches/Cache/\"",
"ui.extra.urlCopied": "URL Copied"
"ui.extra.urlCopied": "URL Copied",
"ui.uigf.title": "Please select the UID(s) you want to export"
}

View File

@ -3,9 +3,10 @@
"ui.button.load": "加载数据",
"ui.button.update": "更新数据",
"ui.button.directUpdate": "直接更新",
"ui.button.files": "导出文件",
"ui.button.files": "导入/导出",
"ui.button.excel": "导出Excel",
"ui.button.srgf":"导出JSON",
"ui.button.uigf":"导出UIGF",
"ui.button.import":"导入UIGF",
"ui.button.url": "输入URL",
"ui.button.setting": "设置",
"ui.button.option": "选项",
@ -55,6 +56,7 @@
"ui.setting.fetchFullHistoryHint": "开启时点击“更新数据”按钮会完整获取6个月内所有的抽卡记录当记录里有6个月范围以内的错误数据时可以通过这个功能修复。",
"ui.setting.closeProxy": "关闭系统代理",
"ui.setting.closeProxyHint": "如果使用过代理模式时工具非正常关闭,可能导致系统代理设置没能清除,可以通过这个按钮来清除设置过的系统代理。",
"ui.setting.idVersion": "ID 数据库版本",
"ui.about.title": "关于",
"ui.about.license": "本工具为开源软件,源代码使用 MIT 协议授权",
"ui.urlDialog.title": "手动输入URL",
@ -86,6 +88,7 @@
"log.proxy.hint": "正在使用代理模式[${ip}:${port}]获取URL请重新打开游戏抽卡记录。",
"log.url.notFound2": "未找到URL请确认是否已打开游戏抽卡记录",
"log.url.incorrect": "获取URL参数失败",
"log.autoUpdate.success": "自动更新已完成,重启工具后生效",
"excel.header.time": "时间",
"excel.header.name": "名称",
"excel.header.type": "类别",
@ -97,8 +100,9 @@
"excel.customFont": "微软雅黑",
"excel.filePrefix": "绝区零调频记录",
"excel.fileType": "Excel文件",
"srgf.fileType":"绝区零调频记录格式文件SRGF",
"uigf.fileType":"统一可交换抽卡记录标准 v4.0Beta",
"ui.extra.cacheClean": "1. 确认是否已经打开游戏内的抽卡历史记录,如果仍然出现“身份认证已过期”的错误,再尝试下面的步骤\n2. 关闭绝区零的游戏窗口\n3. 点击上方的“打开缓存文件夹”按钮打开Cache文件夹\n4. 删除Cache_Data文件夹\n5. 启动绝区零游戏,打开游戏内抽卡历史记录页面\n6. 关闭这个对话框,再点击“更新数据”按钮",
"ui.extra.findCacheFolder": "如果点“打开缓存文件夹”按钮没有反应,可以手动找到游戏的网页缓存文件夹,目录为“你的游戏安装路径/ZenlessZoneZero_Data/webCaches/Cache/”",
"ui.extra.urlCopied": "URL已复制"
"ui.extra.urlCopied": "URL已复制",
"ui.uigf.title": "请选择要导出的UID"
}

View File

@ -3,9 +3,10 @@
"ui.button.load": "加載數據",
"ui.button.update": "更新數據",
"ui.button.directUpdate": "直接更新",
"ui.button.files": "導出文件",
"ui.button.files": "導入/匯出",
"ui.button.excel": "導出Excel",
"ui.button.srgf":"導出JSON",
"ui.button.uigf":"導出UIGF",
"ui.button.import":"導入UIGF",
"ui.button.url": "輸入URL",
"ui.button.setting": "設置",
"ui.button.option": "選項",
@ -54,6 +55,7 @@
"ui.setting.fetchFullHistoryHint": "開啟時點擊「更新數據」按鈕會完整獲取6個月內所有的抽卡記錄當記錄裏有6個月範圍以內的錯誤數據時可以通過這個功能修復。",
"ui.setting.closeProxy": "關閉系統代理",
"ui.setting.closeProxyHint": "如果使用過代理模式時工具非正常關閉,可能導致系統代理設置沒能清除,可以通過這個按鈕來清除設置過的系統代理。",
"ui.setting.idVersion": "ID 數據庫版本",
"ui.about.title": "關於",
"ui.about.license": "本工具為開源軟件,源代碼使用 MIT 協議授權",
"ui.urlDialog.title": "手動輸入URL",
@ -97,8 +99,9 @@
"excel.customFont": "微軟雅黑",
"excel.filePrefix": "絕區零調頻記錄",
"excel.fileType": "Excel文件",
"srgf.fileType":"絕區零調頻記錄格式文件SRGF",
"uigf.fileType":"統一可交換抽卡記錄標準 v4.0Beta",
"ui.extra.cacheClean": "1. 確認是否已經打開遊戲內的抽卡歷史記錄,如果仍然出現「身份認證已過期」的錯誤,再嘗試下面的步驟\n2. 關閉絕區零的遊戲窗口\n3. 點擊上方的「打開緩存文件夾」按鈕打開Cache文件夾\n4. 刪除Cache_Data文件夾\n5. 啟動絕區零遊戲,打開遊戲內抽卡歷史記錄頁面\n6. 關閉這個對話框,再點擊「更新數據」按鈕",
"ui.extra.findCacheFolder": "如果點「打開緩存文件夾」按鈕沒有反應,可以手動找到遊戲的網頁緩存文件夾,目錄為「你的遊戲安裝路徑/ZenlessZoneZero_Data/webCaches/Cache/」",
"ui.extra.urlCopied": "URL已復製"
"ui.extra.urlCopied": "URL已復製",
"ui.uigf.title": "請選擇要導出的UID"
}

3138
src/idJson.json Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,74 +0,0 @@
const { app, ipcMain, dialog } = require('electron')
const fs = require('fs-extra')
const path = require('path')
const getData = require('./getData').getData
const { version } = require('../../package.json')
const i18n = require('./i18n')
const getTimeString = () => {
return new Date().toLocaleString('sv').replace(/[- :]/g, '').slice(0, -2)
}
const formatDate = (date) => {
let y = date.getFullYear()
let m = `${date.getMonth()+1}`.padStart(2, '0')
let d = `${date.getDate()}`.padStart(2, '0')
return `${y}-${m}-${d} ${date.toLocaleString('zh-cn', { hour12: false }).slice(-8)}`
}
const start = async () => {
const { dataMap, current } = await getData()
const data = dataMap.get(current)
if (!data.result.size) {
throw new Error('数据为空')
}
const result = {
info: {
uid: data.uid,
lang: data.lang,
export_time: formatDate(new Date()),
export_timestamp: Math.ceil(Date.now() / 1000),
export_app: 'zzz-signal-search-export',
export_app_version: `v${version}`,
region_time_zone: data.region_time_zone,
srgf_version: 'v1.0'
},
list: []
}
const listTemp = []
for (let [type, arr] of data.result) {
arr.forEach(log => {
listTemp.push({
gacha_id: log.gacha_id,
gacha_type: log.gacha_type,
item_id: log.item_id,
count: '1',
time: log.time,
name: log.name,
item_type: log.item_type,
rank_type: log.rank_type,
id: log.id
})
})
}
listTemp.sort((a, b) => Number(BigInt(a.id) - BigInt(b.id)))
listTemp.forEach(item => {
result.list.push({
...item
})
})
const filePath = dialog.showSaveDialogSync({
defaultPath: path.join(app.getPath('downloads'), `SRGF_${data.uid}_${getTimeString()}`),
filters: [
{ name: i18n.srgf.fileType, extensions: ['json'] }
]
})
if (filePath) {
await fs.ensureFile(filePath)
await fs.writeFile(filePath, JSON.stringify(result))
}
}
ipcMain.handle('EXPORT_SRGF_JSON', async () => {
await start()
})

174
src/main/UIGFJson.js Normal file
View File

@ -0,0 +1,174 @@
const { app, ipcMain, dialog } = require('electron')
const fs = require('fs-extra')
const path = require('path')
const { getData, saveData, changeCurrent, convertTimeZone } = require('./getData')
const config = require('./config')
const { name, version } = require('../../package.json')
const i18n = require('./i18n')
const { mergeData } = require('./utils/mergeData')
const { sendMsg } = require('./utils')
const idJson = require('../idJson.json')
const getTimeString = () => {
return new Date().toLocaleString('sv').replace(/[- :]/g, '').slice(0, -2)
}
const formatDate = (date) => {
let y = date.getFullYear()
let m = `${date.getMonth()+1}`.padStart(2, '0')
let d = `${date.getDate()}`.padStart(2, '0')
return `${y}-${m}-${d} ${date.toLocaleString('zh-cn', { hour12: false }).slice(-8)}`
}
const exportUIGF = async (uids) => {
const result = {
info: {
export_timestamp: Math.ceil(Date.now() / 1000),
export_app: `${name}`,
export_app_version: `v${version}`,
version: "v4.0"
},
nap: []
}
const { dataMap, current } = await getData()
let fulldata = []
uids.forEach(uid => {
fulldata.push(dataMap.get(uid))
})
if (!fulldata.length) {
throw new Error('数据为空')
}
fulldata.forEach(data => {
const listTemp = []
for (let [type, arr] of data.result) {
arr.forEach(log => {
listTemp.push({
gacha_id: log.gacha_id,
gacha_type: log.gacha_type,
item_id: log.item_id,
count: log.count,
time: log.time,
name: log.name,
item_type: log.item_type,
rank_type: log.rank_type,
id: log.id
})
})
}
listTemp.sort((a, b) => Number(BigInt(a.id) - BigInt(b.id)))
let dataTemp = {
uid: data.uid,
timezone: data.region_time_zone,
lang: data.lang,
list: []
}
listTemp.forEach(item => {
dataTemp.list.push({
...item
})
})
result.nap.push(dataTemp)
})
const filePath = dialog.showSaveDialogSync({
defaultPath: path.join(app.getPath('downloads'), fulldata.length > 1 ? `UIGF_${getTimeString()}` : `UIGF_${fulldata[0].uid}_${getTimeString()}`),
filters: [
{ name: i18n.uigf.fileType, extensions: ['json'] }
]
})
if (filePath) {
await fs.ensureFile(filePath)
await fs.writeFile(filePath, JSON.stringify(result))
}
}
const importUIGF = async () => {
const filepath = await dialog.showOpenDialogSync({
properties: ['openFile'],
filters: [
{ name: i18n.uigf.fileType, extensions: ['json'] }
]
})
if (!filepath) return
const { dataMap, current } = await getData()
try {
const jsonData = fs.readJsonSync(filepath[0])
if('info' in jsonData && 'version' in jsonData.info) {
if (jsonData.info.version !== 'v4.0') {
sendMsg('不支持此版本UIGF')
console.error('不支持此版本UIGF')
return
}
} else {
sendMsg('UIGF格式错误')
console.error('UIGF格式错误')
return
}
jsonData.nap.forEach(uidData => {
const resultTemp = []
const isNew = !Boolean(dataMap.has(uidData.uid))
let region_time_zone
if (!isNew) region_time_zone = dataMap.get(uidData.uid).region_time_zone
else region_time_zone = uidData.timezone
let targetLang
if (!isNew) targetLang = dataMap.get(uidData.uid).lang
else targetLang = uidData.lang
if(!idJson[targetLang] && (!uidData.list[0].name || !uidData.list[0].item_type || !uidData.list[0].rank_type)) targetLang = config.lang
let idTargetLangJson = idJson[targetLang]
uidData.list.forEach(recordEntry => {
let rank_type
if (idTargetLangJson?.[recordEntry.item_id].rank_type) rank_type = String(idTargetLangJson[recordEntry.item_id].rank_type)
else rank_type = recordEntry.rank_type
resultTemp.push({
gacha_id: recordEntry.gacha_id,
gacha_type: recordEntry.gacha_type,
item_id: recordEntry.item_id,
count: recordEntry.count ?? "1",
time: convertTimeZone(recordEntry.time, uidData.timezone, region_time_zone),
name: idTargetLangJson?.[recordEntry.item_id].name ?? recordEntry.name,
item_type: idTargetLangJson?.[recordEntry.item_id].item_type ?? recordEntry.item_type,
rank_type: rank_type,
id: recordEntry.id
})
})
const resultTempGrouped = resultTemp.reduce((acc, curr) => {
if (!acc[curr.gacha_type]) {
acc[curr.gacha_type] = []
}
acc[curr.gacha_type].push(curr)
return acc;
}, {})
const resultTempMap = new Map(Object.entries(resultTempGrouped))
const resultMap = { result: resultTempMap, uid: uidData.uid}
let data
const mergedData = mergeData(dataMap.get(uidData.uid), resultMap)
if (isNew) {
data = { result: mergedData, time: Date.now(), uid: uidData.uid, lang: targetLang, region_time_zone: uidData.timezone, deleted: false }
} else {
data = { result: mergedData, time: Date.now(), uid: dataMap.get(uidData.uid).uid, lang: targetLang, region_time_zone: dataMap.get(uidData.uid).region_time_zone, deleted: dataMap.get(uidData.uid).deleted }
}
saveData(data, '')
changeCurrent(uidData.uid)
dataMap.set(uidData.uid, data)
})
return {
dataMap,
current: config.current
}
} catch (error) {
sendMsg(error, 'ERROR')
console.error(error)
}
}
ipcMain.handle('EXPORT_UIGF_JSON', async (event, uids) => {
await exportUIGF(uids)
})
ipcMain.handle('IMPORT_UIGF_JSON', async () => {
return await importUIGF()
})

View File

@ -82,11 +82,17 @@ const start = async () => {
arr.push(log.time)
arr.push(log.name)
arr.push(log.item_type)
arr.push(log.rank_type)
if(log.rank_type === '2') {
arr.push(i18n.ui.data.star2)
} else if(log.rank_type === '3') {
arr.push(i18n.ui.data.star3)
} else {
arr.push(i18n.ui.data.star4)
}
arr.push(total)
arr.push(pity)
temp.push(arr)
if (log.rank_type === '5') {
if (log.rank_type === '4') {
pity = 0
}
// if (key === '301') {
@ -133,14 +139,14 @@ const start = async () => {
}
// rare rank background color
const rankColor = {
3: "ff8e8e8e",
4: "ffa256e1",
5: "ffbd6932",
2: "ff8e8e8e",
3: "ffa256e1",
4: "ffbd6932",
}
sheet.getCell(`${c}${i + 2}`).font = {
name: customFont,
color: { argb: rankColor[v.rank_type] },
bold : v.rank_type != "3"
bold : v.rank_type != "2"
}
})
})

View File

@ -29,6 +29,27 @@ const defaultTypeMap = new Map([
['5', '邦布频段']
])
const serverTimeZone = new Map([
["prod_gf_cn", 8],
["prod_gf_jp", 8],
["prod_gf_us", -5],
["prod_gf_eu", 1],
["prod_gf_sg", 8]
])
const convertTimeZone = (dateTimeStr, fromTimeZoneOffset, toTimeZoneOffset) => {
let date = new Date(dateTimeStr.replace(' ', 'T') + 'Z');
let utcDate = new Date(date.getTime() - fromTimeZoneOffset * 60 * 60 * 1000);
let targetDate = new Date(utcDate.getTime() + toTimeZoneOffset * 60 * 60 * 1000);
let year = targetDate.getUTCFullYear();
let month = String(targetDate.getUTCMonth() + 1).padStart(2, '0');
let day = String(targetDate.getUTCDate()).padStart(2, '0');
let hours = String(targetDate.getUTCHours()).padStart(2, '0');
let minutes = String(targetDate.getUTCMinutes()).padStart(2, '0');
let seconds = String(targetDate.getUTCSeconds()).padStart(2, '0');
return `${year}-${month}-${day} ${hours}:${minutes}:${seconds}`;
}
const findDataFiles = async (dataPath, fileMap) => {
const files = await readdir(dataPath)
if (files?.length) {
@ -58,6 +79,13 @@ const readData = async () => {
const data = await readJSON(dataPath, name)
data.typeMap = new Map(data.typeMap) || defaultTypeMap
data.result = new Map(data.result)
data.result.forEach((value, key) => {
value.forEach(item => {
if (!('count' in item)) {
item.count = "1";
}
});
});
if (data.uid) {
dataMap.set(data.uid, data)
}
@ -86,7 +114,7 @@ const changeCurrent = async (uid) => {
const detectGameLocale = async (userPath) => {
let list = []
const lang = app.getLocale()
const arr = ['/miHoYo/绝区零/', '/Cognosphere/Zenless Zone Zero/']
const arr = ['/miHoYo/绝区零/', '/miHoYo/ZenlessZoneZero/']
arr.forEach(str => {
try {
const pathname = path.join(userPath, '/AppData/LocalLow/', str, 'Player.log')
@ -196,7 +224,6 @@ const getGachaLogs = async ({ name, key }, queryString) => {
let logs = []
let uid = ''
let region = ''
let region_time_zone = ''
let endId = '0'
const url = `${apiDomain}/common/gacha_record/api/getGachaLog?${queryString}`
do {
@ -214,9 +241,6 @@ const getGachaLogs = async ({ name, key }, queryString) => {
if (!region) {
region = res.region
}
if (!region_time_zone) {
region_time_zone = res.region_time_zone
}
list.push(...logs)
page += 1
@ -245,7 +269,7 @@ const getGachaLogs = async ({ name, key }, queryString) => {
}
}
} while (logs.length > 0)
return { list, uid, region, region_time_zone }
return { list, uid, region }
}
const checkResStatus = (res) => {
@ -418,13 +442,28 @@ const fetchData = async (urlOverride) => {
const typeMap = new Map()
const lang = searchParams.get('lang')
let originUid = ''
let originRegion = ''
let originTimeZone = ''
let localTimeZone
for (const type of gachaType) {
const { list, uid, region, region_time_zone } = await getGachaLogs(type, queryString)
const { list, uid, region} = await getGachaLogs(type, queryString)
const region_time_zone = serverTimeZone.get(region)
if(!region_time_zone) {
sendMsg('不支持此服务器')
console.error('不支持此服务器')
return
}
if (localTimeZone === undefined) {
localTimeZone = dataMap.get(uid)?.region_time_zone
if (localTimeZone === undefined) {
localTimeZone = region_time_zone
}
}
localTimeZone === Number(localTimeZone)
list.forEach(item => {
item.time = convertTimeZone(item.time, region_time_zone, localTimeZone)
})
const logs = list.map((item) => {
const { id, item_id, item_type, name, rank_type, time, gacha_id, gacha_type } = item
return { id, item_id, item_type, name, rank_type, time, gacha_id, gacha_type }
const { id, item_id, item_type, name, rank_type, time, gacha_id, gacha_type, count} = item
return { id, item_id, item_type, name, rank_type, time, gacha_id, gacha_type, count }
})
logs.reverse()
typeMap.set(type.key, type.name)
@ -432,14 +471,8 @@ const fetchData = async (urlOverride) => {
if (!originUid) {
originUid = uid
}
if (!originRegion) {
originRegion = region
}
if (!originTimeZone) {
originTimeZone = region_time_zone
}
}
const data = { result, typeMap, time: Date.now(), uid: originUid, lang, region: originRegion, region_time_zone: originTimeZone }
const data = { result, typeMap, time: Date.now(), uid: originUid, lang, region_time_zone: localTimeZone }
const localData = dataMap.get(originUid)
const mergedResult = mergeData(localData, data)
data.result = mergedResult
@ -520,5 +553,9 @@ exports.getData = () => {
}
}
exports.serverTimeZone = serverTimeZone
exports.getUrl = getUrl
exports.deleteData = deleteData
exports.saveData = saveData
exports.changeCurrent = changeCurrent
exports.convertTimeZone = convertTimeZone

View File

@ -66,7 +66,7 @@ const parseText = (text, data) => {
}
const mainProps = [
'symbol', 'ui', 'log', 'excel',"srgf"
'symbol', 'ui', 'log', 'excel',"uigf"
]
const i18n = new Proxy(raw, {

View File

@ -4,7 +4,8 @@ const { disableProxy, proxyStatus } = require('./module/system-proxy')
require('./getData')
require('./bridge')
require('./excel')
require('./SRGFJson')
require('./UIGFJson')
const { getUpdateInfo } = require('./update/index')
const isDev = !app.isPackaged
let win = null
@ -53,6 +54,12 @@ if (!isFirstInstance) {
if (proxyStatus.started) {
disableProxy()
}
if (getUpdateInfo().status === 'moving') {
e.preventDefault()
setTimeout(() => {
app.quit()
}, 3000)
}
})
app.on('quit', () => {

View File

@ -0,0 +1,185 @@
// Copyright (c) 2014 Max Ogden and other contributors
// All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
// * Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// https://github.com/maxogden/extract-zip
// eslint-disable-next-line node/no-unsupported-features/node-builtins
const { createWriteStream, promises: fs } = require('original-fs')
const getStream = require('get-stream')
const path = require('path')
const { promisify } = require('util')
const stream = require('stream')
const yauzl = require('yauzl')
const openZip = promisify(yauzl.open)
const pipeline = promisify(stream.pipeline)
class Extractor {
constructor (zipPath, opts) {
this.zipPath = zipPath
this.opts = opts
}
async extract () {
this.zipfile = await openZip(this.zipPath, { lazyEntries: true })
this.canceled = false
return new Promise((resolve, reject) => {
this.zipfile.on('error', err => {
this.canceled = true
reject(err)
})
this.zipfile.readEntry()
this.zipfile.on('close', () => {
if (!this.canceled) {
resolve()
}
})
this.zipfile.on('entry', async entry => {
/* istanbul ignore if */
if (this.canceled) {
return
}
if (entry.fileName.startsWith('__MACOSX/')) {
this.zipfile.readEntry()
return
}
const destDir = path.dirname(path.join(this.opts.dir, entry.fileName))
try {
await fs.mkdir(destDir, { recursive: true })
const canonicalDestDir = await fs.realpath(destDir)
const relativeDestDir = path.relative(this.opts.dir, canonicalDestDir)
if (relativeDestDir.split(path.sep).includes('..')) {
throw new Error(`Out of bound path "${canonicalDestDir}" found while processing file ${entry.fileName}`)
}
await this.extractEntry(entry)
this.zipfile.readEntry()
} catch (err) {
this.canceled = true
this.zipfile.close()
reject(err)
}
})
})
}
async extractEntry (entry) {
/* istanbul ignore if */
if (this.canceled) {
return
}
if (this.opts.onEntry) {
this.opts.onEntry(entry, this.zipfile)
}
const dest = path.join(this.opts.dir, entry.fileName)
// convert external file attr int into a fs stat mode int
const mode = (entry.externalFileAttributes >> 16) & 0xFFFF
// check if it's a symlink or dir (using stat mode constants)
const IFMT = 61440
const IFDIR = 16384
const IFLNK = 40960
const symlink = (mode & IFMT) === IFLNK
let isDir = (mode & IFMT) === IFDIR
// Failsafe, borrowed from jsZip
if (!isDir && entry.fileName.endsWith('/')) {
isDir = true
}
// check for windows weird way of specifying a directory
// https://github.com/maxogden/extract-zip/issues/13#issuecomment-154494566
const madeBy = entry.versionMadeBy >> 8
if (!isDir) isDir = (madeBy === 0 && entry.externalFileAttributes === 16)
const procMode = this.getExtractedMode(mode, isDir) & 0o777
// always ensure folders are created
const destDir = isDir ? dest : path.dirname(dest)
const mkdirOptions = { recursive: true }
if (isDir) {
mkdirOptions.mode = procMode
}
await fs.mkdir(destDir, mkdirOptions)
if (isDir) return
const readStream = await promisify(this.zipfile.openReadStream.bind(this.zipfile))(entry)
if (symlink) {
const link = await getStream(readStream)
await fs.symlink(link, dest)
} else {
await pipeline(readStream, createWriteStream(dest, { mode: procMode }))
}
}
getExtractedMode (entryMode, isDir) {
let mode = entryMode
// Set defaults, if necessary
if (mode === 0) {
if (isDir) {
if (this.opts.defaultDirMode) {
mode = parseInt(this.opts.defaultDirMode, 10)
}
if (!mode) {
mode = 0o755
}
} else {
if (this.opts.defaultFileMode) {
mode = parseInt(this.opts.defaultFileMode, 10)
}
if (!mode) {
mode = 0o644
}
}
}
return mode
}
}
module.exports = async function (zipPath, opts) {
if (!path.isAbsolute(opts.dir)) {
throw new Error('Target directory is expected to be absolute')
}
await fs.mkdir(opts.dir, { recursive: true })
opts.dir = await fs.realpath(opts.dir)
return new Extractor(zipPath, opts).extract()
}

65
src/main/update/index.js Normal file
View File

@ -0,0 +1,65 @@
const { app } = require('electron')
const fetch = require('electron-fetch').default
const semver = require('semver')
const util = require('util')
const path = require('path')
const fs = require('fs-extra')
const extract = require('../module/extract-zip')
const { version } = require('../../../package.json')
const { hash, sendMsg } = require('../utils')
const config = require('../config')
const i18n = require('../i18n')
const streamPipeline = util.promisify(require('stream').pipeline)
async function download(url, filePath) {
const response = await fetch(url)
if (!response.ok) throw new Error(`unexpected response ${response.statusText}`)
await streamPipeline(response.body, fs.createWriteStream(filePath))
}
const updateInfo = {
status: 'init'
}
const isDev = !app.isPackaged
const appPath = isDev ? path.resolve(__dirname, '../../', 'update-dev/app'): app.getAppPath()
const updatePath = isDev ? path.resolve(__dirname, '../../', 'update-dev/download') : path.resolve(appPath, '..', '..', 'update')
const update = async () => {
if (isDev) return
try {
const url = 'https://earthjasonlin.github.io/zzz-signal-search-export/update'
const res = await fetch(`${url}/manifest.json?t=${Math.floor(Date.now() / (1000 * 60 * 10))}`)
const data = await res.json()
if (!data.active) return
if (semver.gt(data.version, version) && semver.gte(version, data.from)) {
await fs.emptyDir(updatePath)
const filePath = path.join(updatePath, data.name)
if (!config.autoUpdate) {
sendMsg(data.version, 'NEW_VERSION')
return
}
updateInfo.status = 'downloading'
await download(`${url}/${data.name}`, filePath)
const buffer = await fs.readFile(filePath)
const sha256 = hash(buffer)
if (sha256 !== data.hash) return
const appPathTemp = path.join(updatePath, 'app')
await extract(filePath, { dir: appPathTemp })
updateInfo.status = 'moving'
await fs.emptyDir(appPath)
await fs.copy(appPathTemp, appPath)
updateInfo.status = 'finished'
sendMsg(i18n.log.autoUpdate.success, 'UPDATE_HINT')
}
} catch (e) {
updateInfo.status = 'failed'
sendMsg(e, 'ERROR')
}
}
const getUpdateInfo = () => updateInfo
setTimeout(update, 1000)
exports.getUpdateInfo = getUpdateInfo

View File

@ -142,12 +142,12 @@ const readJSON = async (dataPath, name) => {
}
const hash = (data, type = 'sha256') => {
const hmac = crypto.createHmac(type, 'hk4e')
const hmac = crypto.createHmac(type, 'nap')
hmac.update(data)
return hmac.digest('hex')
}
const scryptKey = crypto.scryptSync(userPath, 'hk4e', 24)
const scryptKey = crypto.scryptSync(userPath, 'nap', 24)
const cipherAes = (data) => {
const algorithm = 'aes-192-cbc'
const iv = Buffer.alloc(16, 0)

View File

@ -11,7 +11,8 @@
<template #dropdown>
<el-dropdown-menu>
<el-dropdown-item command="excel">{{ui.button.excel}}</el-dropdown-item>
<el-dropdown-item command="srgf-json">{{ui.button.srgf}}</el-dropdown-item>
<el-dropdown-item command="uigf-json">{{ui.button.uigf}}</el-dropdown-item>
<el-dropdown-item command="import-json" divided>{{ui.button.import}}</el-dropdown-item>
</el-dropdown-menu>
</template>
</el-dropdown>
@ -91,7 +92,7 @@ import Setting from './components/Setting.vue'
import gachaDetail from './gachaDetail'
import { version } from '../../package.json'
import gachaType from '../gachaType.json'
import { ElMessage } from 'element-plus'
import { ElMessage, ElMessageBox } from 'element-plus'
const state = reactive({
status: 'init',
@ -238,18 +239,61 @@ const saveExcel = async () => {
await ipcRenderer.invoke('SAVE_EXCEL')
}
const exportSRGFJSON = () => {
ipcRenderer.invoke('EXPORT_SRGF_JSON')
const exportUIGFJSON = () => {
let uidList = []
dataMap.value.forEach(item => {
uidList.push(item.uid)
})
ElMessageBox({
title: state.i18n.ui.uigf.title,
message: `
<div>
${uidList.map(uid => `
<div>
<input type="checkbox" id="${uid}" value="${uid}" />
<label for="${uid}">${uid}</label>
</div>
`).join('')}
</div>
`,
dangerouslyUseHTMLString: true,
showCancelButton: true,
confirmButtonText: state.i18n.ui.common.ok,
cancelButtonText: state.i18n.ui.common.cancel,
beforeClose: (action, instance, done) => {
if (action === 'confirm') {
const selected_uids = uidList.filter(uid => document.getElementById(uid).checked);
ipcRenderer.invoke('EXPORT_UIGF_JSON', selected_uids);
}
done();
}
}).then(() => {
}).catch(() => {
});
}
const importData = async () => {
state.status = 'loading'
const data = await ipcRenderer.invoke('IMPORT_UIGF_JSON')
if (data) {
state.dataMap = data.dataMap
state.current = data.current
state.status = 'loaded'
} else {
state.status = 'failed'
}
}
const exportCommand = (type) => {
if (type === 'excel') {
saveExcel()
} else if (type === 'srgf-json') {
exportSRGFJSON()
} else if (type === 'uigf-json') {
exportUIGFJSON()
} else if (type === 'import-json') {
importData()
}
}
const openCacheFolder = async () => {
await ipcRenderer.invoke('OPEN_CACHE_FOLDER')
}
@ -327,6 +371,11 @@ onMounted(async () => {
console.error(err)
})
ipcRenderer.on('UPDATE_HINT', (event, message) => {
state.log = message
state.status = 'updated'
})
ipcRenderer.on('AUTHKEY_TIMEOUT', (event, message) => {
state.authkeyTimeout = message
})

View File

@ -32,7 +32,7 @@ const props = defineProps({
const chart = ref(null);
const colors = ["#eeaa66", "#fac858", "#ee6666", "#5470c6", "#ba66ee", "#91cc75", "#73c0de"];
const colors = ["#fac858", "#fac858", "#ee6666", "#5470c6", "#5470c6", "#91cc75", "#73c0de"];
const parseData = (detail, type) => {
const text = props.i18n.ui.data;

View File

@ -23,6 +23,12 @@
<el-button type="primary" plain @click="state.showDataDialog = true">{{common.dataManage}}</el-button>
<p class="text-gray-400 text-xs m-1.5">{{text.dataManagerHint}}</p>
</el-form-item>
<el-form-item :label="text.autoUpdate">
<el-switch
@change="saveSetting"
v-model="settingForm.autoUpdate">
</el-switch>
</el-form-item>
<el-form-item :label="text.fetchFullHistory">
<el-switch
@change="saveSetting"
@ -41,8 +47,10 @@
</el-form-item>
</el-form>
<h3 class="text-lg my-4">{{about.title}}</h3>
<p class="text-gray-600 text-xs mt-1">{{text.idVersion}} {{idJson.version}}</p>
<p class="text-gray-600 text-xs mt-1">{{about.license}}</p>
<p class="text-gray-600 text-xs mt-1 pb-6">Github: <a @click="openGithub" class="cursor-pointer text-blue-400">https://github.com/earthjasonlin/zzz-signal-search-export</a></p>
<p class="text-gray-600 text-xs mt-1">GitHub: <a @click="openGithub" class="cursor-pointer text-blue-400">https://github.com/earthjasonlin/zzz-signal-search-export</a></p>
<p class="text-gray-600 text-xs mt-1 pb-6">UIGF: <a @click="openUIGF" class="cursor-pointer text-blue-400">https://uigf.org/</a></p>
<el-dialog v-model="state.showDataDialog" :title="common.dataManage" width="90%">
<div class="">
<el-table :data="gachaDataInfo" border stripe>
@ -75,6 +83,7 @@
<script setup>
const { ipcRenderer, shell } = require('electron')
import idJson from '../../idJson.json'
import { reactive, onMounted, computed } from 'vue'
const emit = defineEmits(['close', 'changeLang', 'refreshData'])
@ -92,6 +101,7 @@ const settingForm = reactive({
lang: 'zh-cn',
logType: 1,
proxyMode: true,
autoUpdate: true,
fetchFullHistory: false,
})
@ -105,7 +115,7 @@ const text = computed(() => props.i18n.ui.setting)
const about = computed(() => props.i18n.ui.about)
const saveSetting = async () => {
const keys = ['lang', 'logType', 'proxyMode', 'fetchFullHistory']
const keys = ['lang', 'logType', 'proxyMode', 'autoUpdate', 'fetchFullHistory']
for (let key of keys) {
await ipcRenderer.invoke('SAVE_CONFIG', [key, settingForm[key]])
}
@ -123,6 +133,7 @@ const disableProxy = async () => {
}
const openGithub = () => shell.openExternal('https://github.com/earthjasonlin/zzz-signal-search-export')
const openUIGF = () => shell.openExternal('https://uigf.org/')
const openLink = (link) => shell.openExternal(link)
const deleteData = async (uid, action) => {

View File

@ -54,7 +54,7 @@ const gachaDetail = (data) => {
detail.ssrPos.push([name, index + 1 - lastSSR, time, key])
lastSSR = index + 1
detail.count4++
detail.countMio++
detail.countMio = 0
if (isWeapon(type)) {
detail.count4w++
itemCount(detail.weapon4, name)

81
tools/getIdMap.py Normal file
View File

@ -0,0 +1,81 @@
# pylint: disable=C0116, C0103, C0201
"""Download and process data from the Hakushin API"""
import json
import requests
from opencc import OpenCC
# 初始化 OpenCC 转换器
cc = OpenCC('s2t')
# 获取 JSON 数据
weapon_url = 'https://api.hakush.in/zzz/data/weapon.json'
character_url = 'https://api.hakush.in/zzz/data/character.json'
bangboo_url = 'https://api.hakush.in/zzz/data/bangboo.json'
version_url = 'https://api.hakush.in/zzz/new.json'
# 语言映射配置
language_map = {
"zh-cn": "CHS",
"zh-tw": "CHS", # 简体转繁体
"en-us": "EN",
"ja-jp": "JA",
"ko-kr": "KO"
}
# 类型映射配置
type_map = {
"weapon": {"zh-cn": "音擎", "zh-tw": "音擎", "en-us": "W-Engines", "ja-jp": "音動機", "ko-kr": "W-엔진"},
"character": {"zh-cn": "代理人", "zh-tw": "代理人", "en-us": "Agents",
"ja-jp": "エージェント", "ko-kr": "에이전트"},
"bangboo": {"zh-cn": "邦布", "zh-tw": "邦布", "en-us": "Bangboo",
"ja-jp": "ボンプ", "ko-kr": "「Bangboo」"}
}
def fetch_json(url):
response = requests.get(url, timeout=10)
response.raise_for_status()
return response.json()
def transform_data(data, item_type):
transformed = {lang: {} for lang in language_map.keys()}
for id_, item in data.items():
for lang, key in language_map.items():
name = item[key] if lang != 'zh-tw' else cc.convert(item['CHS'])
transformed[lang][id_] = {
"name": name,
"item_type": type_map[item_type][lang],
"rank_type": item['rank']
}
return transformed
def main():
try:
weapon_data = fetch_json(weapon_url)
character_data = fetch_json(character_url)
bangboo_data = fetch_json(bangboo_url)
version_data = fetch_json(version_url)
transformed_data = {lang: {} for lang in language_map.keys()}
transformed_data["version"] = version_data["version"]
weapon_transformed = transform_data(weapon_data, "weapon")
character_transformed = transform_data(character_data, "character")
bangboo_transformed = transform_data(bangboo_data, "bangboo")
for lang in language_map.keys():
transformed_data[lang].update(weapon_transformed[lang])
transformed_data[lang].update(character_transformed[lang])
transformed_data[lang].update(bangboo_transformed[lang])
with open('./src/idJson.json', 'w', encoding='utf-8') as f:
json.dump(transformed_data, f, ensure_ascii=False, indent=2)
print("Data successfully transformed and saved")
except requests.RequestException as e:
print(f"Error fetching data: {e}")
if __name__ == "__main__":
main()

1218
yarn.lock

File diff suppressed because it is too large Load Diff