mirror of
https://github.com/earthjasonlin/zzz-signal-search-export.git
synced 2025-04-21 16:00:17 +08:00
Departure commit
This commit is contained in:
89
src/main/UIGFJson.js
Normal file
89
src/main/UIGFJson.js
Normal file
@ -0,0 +1,89 @@
|
||||
const { app, ipcMain, dialog } = require('electron')
|
||||
const fs = require('fs-extra')
|
||||
const path = require('path')
|
||||
const getData = require('./getData').getData
|
||||
const { version } = require('../../package.json')
|
||||
|
||||
const getTimeString = () => {
|
||||
return new Date().toLocaleString('sv').replace(/[- :]/g, '').slice(0, -2)
|
||||
}
|
||||
|
||||
const formatDate = (date) => {
|
||||
let y = date.getFullYear()
|
||||
let m = `${date.getMonth()+1}`.padStart(2, '0')
|
||||
let d = `${date.getDate()}`.padStart(2, '0')
|
||||
return `${y}-${m}-${d} ${date.toLocaleString('zh-cn', { hour12: false }).slice(-8)}`
|
||||
}
|
||||
|
||||
const fakeIdFn = () => {
|
||||
let id = 1000000000000000000n
|
||||
return () => {
|
||||
id = id + 1n
|
||||
return id.toString()
|
||||
}
|
||||
}
|
||||
|
||||
const shouldBeString = (value) => {
|
||||
if (typeof value !== 'string') {
|
||||
return ''
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
const start = async () => {
|
||||
const { dataMap, current } = await getData()
|
||||
const data = dataMap.get(current)
|
||||
if (!data.result.size) {
|
||||
throw new Error('数据为空')
|
||||
}
|
||||
const fakeId = fakeIdFn()
|
||||
const result = {
|
||||
info: {
|
||||
uid: data.uid,
|
||||
lang: data.lang,
|
||||
export_time: formatDate(new Date()),
|
||||
export_timestamp: Date.now(),
|
||||
export_app: 'genshin-wish-export',
|
||||
export_app_version: `v${version}`,
|
||||
uigf_version: 'v2.2'
|
||||
},
|
||||
list: []
|
||||
}
|
||||
const listTemp = []
|
||||
for (let [type, arr] of data.result) {
|
||||
arr.forEach(item => {
|
||||
listTemp.push({
|
||||
gacha_type: shouldBeString(item[4]) || type,
|
||||
time: item[0],
|
||||
timestamp: new Date(item[0]).getTime(),
|
||||
name: item[1],
|
||||
item_type: item[2],
|
||||
rank_type: `${item[3]}`,
|
||||
id: shouldBeString(item[5]) || '',
|
||||
uigf_gacha_type: type
|
||||
})
|
||||
})
|
||||
}
|
||||
listTemp.sort((a, b) => a.timestamp - b.timestamp)
|
||||
listTemp.forEach(item => {
|
||||
delete item.timestamp
|
||||
result.list.push({
|
||||
...item,
|
||||
id: item.id || fakeId()
|
||||
})
|
||||
})
|
||||
const filePath = dialog.showSaveDialogSync({
|
||||
defaultPath: path.join(app.getPath('downloads'), `UIGF_${data.uid}_${getTimeString()}`),
|
||||
filters: [
|
||||
{ name: 'JSON文件', extensions: ['json'] }
|
||||
]
|
||||
})
|
||||
if (filePath) {
|
||||
await fs.ensureFile(filePath)
|
||||
await fs.writeFile(filePath, JSON.stringify(result))
|
||||
}
|
||||
}
|
||||
|
||||
ipcMain.handle('EXPORT_UIGF_JSON', async () => {
|
||||
await start()
|
||||
})
|
77
src/main/config.js
Normal file
77
src/main/config.js
Normal file
@ -0,0 +1,77 @@
|
||||
const { readJSON, saveJSON, decipherAes, cipherAes, detectLocale } = require('./utils')
|
||||
|
||||
const config = {
|
||||
urls: [],
|
||||
logType: 0,
|
||||
lang: detectLocale(),
|
||||
current: 0,
|
||||
proxyPort: 8325,
|
||||
proxyMode: false,
|
||||
autoUpdate: true,
|
||||
fetchFullHistory: false,
|
||||
hideNovice: false
|
||||
}
|
||||
|
||||
const getLocalConfig = async () => {
|
||||
const localConfig = await readJSON('config.json')
|
||||
if (!localConfig) return
|
||||
const configTemp = {}
|
||||
for (let key in localConfig) {
|
||||
if (typeof config[key] !== 'undefined') {
|
||||
configTemp[key] = localConfig[key]
|
||||
}
|
||||
}
|
||||
configTemp.urls.forEach(item => {
|
||||
try {
|
||||
item[1] = decipherAes(item[1])
|
||||
} catch (e) {
|
||||
item[1] = ''
|
||||
}
|
||||
})
|
||||
Object.assign(config, configTemp)
|
||||
}
|
||||
|
||||
getLocalConfig()
|
||||
|
||||
let urlsMap = null
|
||||
const setConfig = (key, value) => {
|
||||
Reflect.set(config, key, value)
|
||||
}
|
||||
|
||||
const saveConfig = async () => {
|
||||
let configTemp = config
|
||||
if (urlsMap) {
|
||||
const urls = [...urlsMap]
|
||||
urls.forEach(item => {
|
||||
try {
|
||||
item[1] = cipherAes(item[1])
|
||||
} catch (e) {
|
||||
item[1] = ''
|
||||
}
|
||||
})
|
||||
configTemp = Object.assign({}, config, { urls })
|
||||
}
|
||||
await saveJSON('config.json', configTemp)
|
||||
}
|
||||
|
||||
const getPlainConfig = () => config
|
||||
|
||||
const configProxy = new Proxy(config, {
|
||||
get: function (obj, prop) {
|
||||
if (prop === 'urls') {
|
||||
if (!urlsMap) {
|
||||
urlsMap = new Map(obj[prop])
|
||||
}
|
||||
return urlsMap
|
||||
} else if (prop === 'set') {
|
||||
return setConfig
|
||||
} else if (prop === 'save') {
|
||||
return saveConfig
|
||||
} else if (prop === 'value') {
|
||||
return getPlainConfig
|
||||
}
|
||||
return obj[prop]
|
||||
}
|
||||
})
|
||||
|
||||
module.exports = configProxy
|
167
src/main/excel.js
Normal file
167
src/main/excel.js
Normal file
@ -0,0 +1,167 @@
|
||||
const ExcelJS = require('./module/exceljs.min.js')
|
||||
const getData = require('./getData').getData
|
||||
const { app, ipcMain, dialog } = require('electron')
|
||||
const fs = require('fs-extra')
|
||||
const path = require('path')
|
||||
const i18n = require('./i18n')
|
||||
const cloneDeep = require('lodash-es/cloneDeep').default
|
||||
|
||||
function pad(num) {
|
||||
return `${num}`.padStart(2, "0");
|
||||
}
|
||||
|
||||
function getTimeString() {
|
||||
const d = new Date();
|
||||
const YYYY = d.getFullYear();
|
||||
const MM = pad(d.getMonth() + 1);
|
||||
const DD = pad(d.getDate());
|
||||
const HH = pad(d.getHours());
|
||||
const mm = pad(d.getMinutes());
|
||||
const ss = pad(d.getSeconds());
|
||||
return `${YYYY}${MM}${DD}_${HH}${mm}${ss}`;
|
||||
}
|
||||
|
||||
const addRawSheet = (workbook, data) => {
|
||||
const sheet = workbook.addWorksheet('rawData', {views: [{state: 'frozen', ySplit: 1}]})
|
||||
const excelKeys = ['gacha_id', 'gacha_type', 'id', 'item_id', 'item_type', 'lang', 'name', 'rank_type', 'time', 'uid']
|
||||
sheet.columns = excelKeys.map((key, index) => {
|
||||
return {
|
||||
header: key,
|
||||
key,
|
||||
}
|
||||
})
|
||||
const temp = []
|
||||
for (let [key, value] of data.result) {
|
||||
for (let log of value){
|
||||
const arr = []
|
||||
arr.push(log.gacha_id)
|
||||
arr.push(log.gacha_type)
|
||||
arr.push(log.id)
|
||||
arr.push(log.item_id)
|
||||
arr.push(log.item_type)
|
||||
arr.push(data.lang)
|
||||
arr.push(log.name)
|
||||
arr.push(log.rank_type)
|
||||
arr.push(log.time)
|
||||
arr.push(data.uid)
|
||||
temp.push(arr)
|
||||
}
|
||||
}
|
||||
sheet.addRows(temp)
|
||||
}
|
||||
|
||||
const start = async () => {
|
||||
const { header, customFont, filePrefix, fileType, wish2 } = i18n.excel
|
||||
const { dataMap, current } = await getData()
|
||||
const data = dataMap.get(current)
|
||||
// https://github.com/sunfkny/genshin-gacha-export-js/blob/main/index.js
|
||||
const workbook = new ExcelJS.Workbook()
|
||||
for (let [key, value] of data.result) {
|
||||
const name = data.typeMap.get(key)
|
||||
const sheet = workbook.addWorksheet(name, {views: [{state: 'frozen', ySplit: 1}]})
|
||||
let width = [24, 14, 8, 8, 8, 8, 8]
|
||||
if (!data.lang.includes('zh-')) {
|
||||
width = [24, 32, 16, 12, 12, 12, 8]
|
||||
}
|
||||
const excelKeys = ['time', 'name', 'type', 'rank', 'total', 'pity', 'remark']
|
||||
sheet.columns = excelKeys.map((key, index) => {
|
||||
return {
|
||||
header: header[key],
|
||||
key,
|
||||
width: width[index]
|
||||
}
|
||||
})
|
||||
// get gacha logs
|
||||
const logs = value
|
||||
let total = 0
|
||||
let pity = 0
|
||||
const temp = []
|
||||
for (let log of logs) {
|
||||
const arr = []
|
||||
total += 1
|
||||
pity += 1
|
||||
arr.push(log.time)
|
||||
arr.push(log.name)
|
||||
arr.push(log.item_type)
|
||||
arr.push(log.rank_type)
|
||||
arr.push(total)
|
||||
arr.push(pity)
|
||||
temp.push(arr)
|
||||
if (log.rank_type === 5) {
|
||||
pity = 0
|
||||
}
|
||||
// if (key === '301') {
|
||||
// if (log.gacha_type === '400') {
|
||||
// log.push(wish2)
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
sheet.addRows(temp)
|
||||
// set xlsx hearer style
|
||||
;(["A", "B", "C", "D","E","F", "G"]).forEach((v) => {
|
||||
sheet.getCell(`${v}1`).border = {
|
||||
top: {style:'thin', color: {argb:'ffc4c2bf'}},
|
||||
left: {style:'thin', color: {argb:'ffc4c2bf'}},
|
||||
bottom: {style:'thin', color: {argb:'ffc4c2bf'}},
|
||||
right: {style:'thin', color: {argb:'ffc4c2bf'}}
|
||||
}
|
||||
sheet.getCell(`${v}1`).fill = {
|
||||
type: 'pattern',
|
||||
pattern:'solid',
|
||||
fgColor:{argb:'ffdbd7d3'},
|
||||
}
|
||||
sheet.getCell(`${v}1`).font ={
|
||||
name: customFont,
|
||||
color: { argb: "ff757575" },
|
||||
bold : true
|
||||
}
|
||||
|
||||
})
|
||||
// set xlsx cell style
|
||||
logs.forEach((v, i) => {
|
||||
;(["A", "B", "C", "D","E","F", "G"]).forEach((c) => {
|
||||
sheet.getCell(`${c}${i + 2}`).border = {
|
||||
top: {style:'thin', color: {argb:'ffc4c2bf'}},
|
||||
left: {style:'thin', color: {argb:'ffc4c2bf'}},
|
||||
bottom: {style:'thin', color: {argb:'ffc4c2bf'}},
|
||||
right: {style:'thin', color: {argb:'ffc4c2bf'}}
|
||||
}
|
||||
sheet.getCell(`${c}${i + 2}`).fill = {
|
||||
type: 'pattern',
|
||||
pattern:'solid',
|
||||
fgColor:{argb:'ffebebeb'},
|
||||
}
|
||||
// rare rank background color
|
||||
const rankColor = {
|
||||
3: "ff8e8e8e",
|
||||
4: "ffa256e1",
|
||||
5: "ffbd6932",
|
||||
}
|
||||
sheet.getCell(`${c}${i + 2}`).font = {
|
||||
name: customFont,
|
||||
color: { argb: rankColor[v.rank_type] },
|
||||
bold : v.rank_type != "3"
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
addRawSheet(workbook, data)
|
||||
|
||||
const buffer = await workbook.xlsx.writeBuffer()
|
||||
const filePath = dialog.showSaveDialogSync({
|
||||
defaultPath: path.join(app.getPath('downloads'), `${filePrefix}_${getTimeString()}`),
|
||||
filters: [
|
||||
{ name: fileType, extensions: ['xlsx'] }
|
||||
]
|
||||
})
|
||||
if (filePath) {
|
||||
await fs.ensureFile(filePath)
|
||||
await fs.writeFile(filePath, buffer)
|
||||
}
|
||||
}
|
||||
|
||||
ipcMain.handle('SAVE_EXCEL', async () => {
|
||||
await start()
|
||||
})
|
495
src/main/getData.js
Normal file
495
src/main/getData.js
Normal file
@ -0,0 +1,495 @@
|
||||
const fs = require('fs-extra')
|
||||
const util = require('util')
|
||||
const path = require('path')
|
||||
const { URL } = require('url')
|
||||
const { app, ipcMain, shell } = require('electron')
|
||||
const { sleep, request, sendMsg, readJSON, saveJSON, detectLocale, userDataPath, userPath, localIp, langMap } = require('./utils')
|
||||
const config = require('./config')
|
||||
const i18n = require('./i18n')
|
||||
const { enableProxy, disableProxy } = require('./module/system-proxy')
|
||||
const mitmproxy = require('./module/node-mitmproxy')
|
||||
const { mergeData } = require('./utils/mergeData')
|
||||
|
||||
const dataMap = new Map()
|
||||
const order = ['11', '12', '1', '2']
|
||||
let apiDomain = 'https://api-takumi.mihoyo.com'
|
||||
|
||||
const saveData = async (data, url) => {
|
||||
const obj = Object.assign({}, data)
|
||||
obj.result = [...obj.result]
|
||||
obj.typeMap = [...obj.typeMap]
|
||||
config.urls.set(data.uid, url)
|
||||
await config.save()
|
||||
await saveJSON(`gacha-list-${data.uid}.json`, obj)
|
||||
}
|
||||
|
||||
const defaultTypeMap = new Map([
|
||||
['11', '角色活动跃迁'],
|
||||
['12', '光锥活动跃迁'],
|
||||
['1', '群星跃迁'],
|
||||
['2', '始发跃迁']
|
||||
])
|
||||
|
||||
let localDataReaded = false
|
||||
const readdir = util.promisify(fs.readdir)
|
||||
const readData = async () => {
|
||||
if (localDataReaded) return
|
||||
localDataReaded = true
|
||||
await fs.ensureDir(userDataPath)
|
||||
const files = await readdir(userDataPath)
|
||||
for (let name of files) {
|
||||
if (/^gacha-list-\d+\.json$/.test(name)) {
|
||||
try {
|
||||
const data = await readJSON(name)
|
||||
data.typeMap = new Map(data.typeMap) || defaultTypeMap
|
||||
data.result = new Map(data.result)
|
||||
if (data.uid) {
|
||||
dataMap.set(data.uid, data)
|
||||
}
|
||||
} catch (e) {
|
||||
sendMsg(e, 'ERROR')
|
||||
}
|
||||
}
|
||||
}
|
||||
if ((!config.current && dataMap.size) || (config.current && dataMap.size && !dataMap.has(config.current))) {
|
||||
await changeCurrent(dataMap.keys().next().value)
|
||||
}
|
||||
}
|
||||
|
||||
const changeCurrent = async (uid) => {
|
||||
config.current = uid
|
||||
await config.save()
|
||||
}
|
||||
|
||||
const detectGameLocale = async (userPath) => {
|
||||
let list = []
|
||||
const lang = app.getLocale()
|
||||
const arr = ['/miHoYo/崩坏:星穹铁道/', '/Cognosphere/Star Rail/']
|
||||
arr.forEach(str => {
|
||||
try {
|
||||
const pathname = path.join(userPath, '/AppData/LocalLow/', str, 'Player-prev.log')
|
||||
fs.accessSync(pathname, fs.constants.F_OK)
|
||||
list.push(pathname)
|
||||
} catch (e) {}
|
||||
})
|
||||
if (config.logType) {
|
||||
if (config.logType === 2) {
|
||||
list.reverse()
|
||||
}
|
||||
list = list.slice(0, 1)
|
||||
} else if (lang !== 'zh-CN') {
|
||||
list.reverse()
|
||||
}
|
||||
return list
|
||||
}
|
||||
|
||||
const getLatestUrl = (list) => {
|
||||
let result = list[list.length - 1]
|
||||
let time = 0
|
||||
for (let i = 0; i < list.length; i++) {
|
||||
const tsMch = list[i].match(/timestamp=(\d+)/)
|
||||
if (tsMch?.[1]) {
|
||||
const ts = parseInt(tsMch[1])
|
||||
if (time < parseInt(tsMch[1])) {
|
||||
time = ts
|
||||
result = list[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
let cacheFolder = null
|
||||
const readLog = async () => {
|
||||
const text = i18n.log
|
||||
try {
|
||||
let userPath
|
||||
if (!process.env.WINEPREFIX) {
|
||||
userPath = app.getPath('home')
|
||||
} else {
|
||||
userPath = path.join(process.env.WINEPREFIX, 'drive_c/users', process.env.USER)
|
||||
}
|
||||
const logPaths = await detectGameLocale(userPath)
|
||||
if (!logPaths.length) {
|
||||
sendMsg(text.file.notFound)
|
||||
return false
|
||||
}
|
||||
const promises = logPaths.map(async logpath => {
|
||||
const logText = await fs.readFile(logpath, 'utf8')
|
||||
const gamePathMch = logText.match(/\w:\/.+(Star\sRail\/Game\/StarRail_Data)/)
|
||||
if (gamePathMch) {
|
||||
const cacheText = await fs.readFile(path.join(gamePathMch[0], '/webCaches/Cache/Cache_Data/data_2'), 'utf8')
|
||||
const urlMch = cacheText.match(/https.+?&auth_appid=webview_gacha&.+?authkey=.+?&game_biz=hkrpg_.+/g)
|
||||
if (urlMch) {
|
||||
cacheFolder = path.join(gamePathMch[0], '/webCaches/Cache/')
|
||||
return getLatestUrl(urlMch)
|
||||
}
|
||||
}
|
||||
})
|
||||
const result = await Promise.all(promises)
|
||||
for (let url of result) {
|
||||
if (url) {
|
||||
return url
|
||||
}
|
||||
}
|
||||
sendMsg(text.url.notFound)
|
||||
return false
|
||||
} catch (e) {
|
||||
sendMsg(text.file.readFailed)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
const getGachaLog = async ({ key, page, name, retryCount, url, endId }) => {
|
||||
const text = i18n.log
|
||||
try {
|
||||
const res = await request(`${url}&gacha_type=${key}&page=${page}&size=${20}${endId ? '&end_id=' + endId : ''}`)
|
||||
return res.data.list
|
||||
} catch (e) {
|
||||
if (retryCount) {
|
||||
sendMsg(i18n.parse(text.fetch.retry, { name, page, count: 6 - retryCount }))
|
||||
await sleep(5)
|
||||
retryCount--
|
||||
return await getGachaLog({ key, page, name, retryCount, url, endId })
|
||||
} else {
|
||||
sendMsg(i18n.parse(text.fetch.retryFailed, { name, page }))
|
||||
throw e
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const getGachaLogs = async ({ name, key }, queryString) => {
|
||||
const text = i18n.log
|
||||
let page = 1
|
||||
let list = []
|
||||
let res = []
|
||||
let uid = ''
|
||||
let region = ''
|
||||
let region_time_zone = ''
|
||||
let endId = '0'
|
||||
const url = `${apiDomain}/common/gacha_record/api/getGachaLog?${queryString}`
|
||||
do {
|
||||
if (page % 10 === 0) {
|
||||
sendMsg(i18n.parse(text.fetch.interval, { name, page }))
|
||||
await sleep(1)
|
||||
}
|
||||
sendMsg(i18n.parse(text.fetch.current, { name, page }))
|
||||
res = await getGachaLog({ key, page, name, url, endId, retryCount: 5 })
|
||||
await sleep(0.3)
|
||||
if (!uid && res.length) {
|
||||
uid = res[0].uid
|
||||
}
|
||||
if (!region) {
|
||||
region = res.region
|
||||
}
|
||||
if (!region_time_zone) {
|
||||
region_time_zone = res.region_time_zone
|
||||
}
|
||||
list.push(...res)
|
||||
page += 1
|
||||
|
||||
if (res.length) {
|
||||
endId = res[res.length - 1].id
|
||||
}
|
||||
|
||||
if (!config.fetchFullHistory && res.length && uid && dataMap.has(uid)) {
|
||||
const result = dataMap.get(uid).result
|
||||
if (result.has(key)) {
|
||||
const arr = result.get(key)
|
||||
if (arr.length) {
|
||||
const localLatestId = arr[arr.length - 1].id
|
||||
if (localLatestId) {
|
||||
let shouldBreak = false
|
||||
res.forEach(item => {
|
||||
if (item.id === localLatestId) {
|
||||
shouldBreak = true
|
||||
}
|
||||
})
|
||||
if (shouldBreak) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} while (res.length > 0)
|
||||
return { list, uid, region, region_time_zone }
|
||||
}
|
||||
|
||||
const checkResStatus = (res) => {
|
||||
const text = i18n.log
|
||||
if (res.retcode !== 0) {
|
||||
let message = res.message
|
||||
if (res.message === 'authkey timeout') {
|
||||
message = text.fetch.authTimeout
|
||||
sendMsg(true, 'AUTHKEY_TIMEOUT')
|
||||
}
|
||||
sendMsg(message)
|
||||
throw new Error(message)
|
||||
}
|
||||
sendMsg(false, 'AUTHKEY_TIMEOUT')
|
||||
return res
|
||||
}
|
||||
|
||||
const tryGetUid = async (queryString) => {
|
||||
const url = `${apiDomain}/common/gacha_record/api/getGachaLog?${queryString}`
|
||||
try {
|
||||
for (let [key] of defaultTypeMap) {
|
||||
const res = await request(`${url}&gacha_type=${key}&page=1&size=6`)
|
||||
checkResStatus(res)
|
||||
if (res.data.list && res.data.list.length) {
|
||||
return res.data.list[0].uid
|
||||
}
|
||||
}
|
||||
} catch (e) {}
|
||||
return config.current
|
||||
}
|
||||
|
||||
const gachaTypeMap = new Map(JSON.parse('[["de-de",[{"key":"11","name":"Figuren-Aktionswarp"},{"key":"12","name":"Lichtkegel-Aktionswarp"},{"key":"1","name":"Stellarwarp"},{"key":"2","name":"Startwarp"}]],["ru-ru",[{"key":"11","name":"Прыжок события: Персонаж"},{"key":"12","name":"Прыжок события: Световой конус"},{"key":"1","name":"Звёздный Прыжок"},{"key":"2","name":"Отправной Прыжок"}]],["th-th",[{"key":"11","name":"กิจกรรมวาร์ปตัวละคร"},{"key":"12","name":"กิจกรรมวาร์ป Light Cone"},{"key":"1","name":"วาร์ปสู่ดวงดาว"},{"key":"2","name":"ก้าวแรกแห่งการวาร์ป"}]],["zh-cn",[{"key":"11","name":"角色活动跃迁"},{"key":"12","name":"光锥活动跃迁"},{"key":"1","name":"群星跃迁"},{"key":"2","name":"始发跃迁"}]],["zh-tw",[{"key":"11","name":"角色活動躍遷"},{"key":"12","name":"光錐活動躍遷"},{"key":"1","name":"群星躍遷"},{"key":"2","name":"始發躍遷"}]],["en-us",[{"key":"11","name":"Character Event Warp"},{"key":"12","name":"Light Cone Event Warp"},{"key":"1","name":"Stellar Warp"},{"key":"2","name":"Departure Warp"}]],["es-es",[{"key":"11","name":"Salto de evento de personaje"},{"key":"12","name":"Salto de evento de cono de luz"},{"key":"1","name":"Salto estelar"},{"key":"2","name":"Salto de partida"}]],["fr-fr",[{"key":"11","name":"Saut hyperespace événement de personnage"},{"key":"12","name":"Saut hyperespace événement de cônes de lumière"},{"key":"1","name":"Saut stellaire"},{"key":"2","name":"Saut hyperespace de départ"}]],["id-id",[{"key":"11","name":"Event Warp Karakter"},{"key":"12","name":"Event Warp Light Cone"},{"key":"1","name":"Warp Bintang-Bintang"},{"key":"2","name":"Warp Keberangkatan"}]],["ja-jp",[{"key":"11","name":"イベント跳躍・キャラクター"},{"key":"12","name":"イベント跳躍・光円錐"},{"key":"1","name":"群星跳躍"},{"key":"2","name":"始発跳躍"}]],["ko-kr",[{"key":"11","name":"캐릭터 이벤트 워프"},{"key":"12","name":"광추 이벤트 워프"},{"key":"1","name":"뭇별의 워프"},{"key":"2","name":"초행길 워프"}]],["pt-pt",[{"key":"11","name":"Salto Hiperespacial de Evento de Personagem"},{"key":"12","name":"Salto Hiperespacial de Evento de Cone de Luz"},{"key":"1","name":"Salto Hiperespacial Estelar"},{"key":"2","name":"Salto Hiperespacial de Novatos"}]],["vi-vn",[{"key":"11","name":"Bước Nhảy Sự Kiện Nhân Vật"},{"key":"12","name":"Bước Nhảy Sự Kiện Nón Ánh Sáng"},{"key":"1","name":"Bước Nhảy Chòm Sao"},{"key":"2","name":"Bước Nhảy Đầu Tiên"}]]]'))
|
||||
const getGachaType = (lang) => {
|
||||
const locale = detectLocale(lang)
|
||||
return gachaTypeMap.get(locale || lang)
|
||||
}
|
||||
|
||||
const fixAuthkey = (url) => {
|
||||
const mr = url.match(/authkey=([^&]+)/)
|
||||
if (mr && mr[1] && mr[1].includes('=') && !mr[1].includes('%')) {
|
||||
return url.replace(/authkey=([^&]+)/, `authkey=${encodeURIComponent(mr[1])}`)
|
||||
}
|
||||
return url
|
||||
}
|
||||
|
||||
const getQuerystring = (url) => {
|
||||
const text = i18n.log
|
||||
const { searchParams, host } = new URL(fixAuthkey(url))
|
||||
if (host.includes('webstatic-sea') || host.includes('hkrpg-api-os') || host.includes("api-os-takumi")) {
|
||||
apiDomain = 'https://api-os-takumi.mihoyo.com'
|
||||
} else {
|
||||
apiDomain = 'https://api-takumi.mihoyo.com'
|
||||
}
|
||||
const authkey = searchParams.get('authkey')
|
||||
if (!authkey) {
|
||||
sendMsg(text.url.lackAuth)
|
||||
return false
|
||||
}
|
||||
searchParams.delete('page')
|
||||
searchParams.delete('size')
|
||||
searchParams.delete('gacha_type')
|
||||
searchParams.delete('end_id')
|
||||
return searchParams
|
||||
}
|
||||
|
||||
const proxyServer = (port) => {
|
||||
return new Promise((rev) => {
|
||||
mitmproxy.createProxy({
|
||||
sslConnectInterceptor: (req, cltSocket, head) => {
|
||||
if (/webstatic([^\.]{2,10})?\.(mihoyo|hoyoverse)\.com/.test(req.url)) {
|
||||
return true
|
||||
}
|
||||
},
|
||||
requestInterceptor: (rOptions, req, res, ssl, next) => {
|
||||
next()
|
||||
if (/webstatic([^\.]{2,10})?\.(mihoyo|hoyoverse)\.com/.test(rOptions.hostname)) {
|
||||
if (/authkey=[^&]+/.test(rOptions.path)) {
|
||||
rev(`${rOptions.protocol}//${rOptions.hostname}${rOptions.path}`)
|
||||
}
|
||||
}
|
||||
},
|
||||
responseInterceptor: (req, res, proxyReq, proxyRes, ssl, next) => {
|
||||
next()
|
||||
},
|
||||
getPath: () => path.join(userPath, 'node-mitmproxy'),
|
||||
port
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
let proxyServerPromise
|
||||
const useProxy = async () => {
|
||||
const text = i18n.log
|
||||
const ip = localIp()
|
||||
const port = config.proxyPort
|
||||
sendMsg(i18n.parse(text.proxy.hint, { ip, port }))
|
||||
await enableProxy('127.0.0.1', port)
|
||||
if (!proxyServerPromise) {
|
||||
proxyServerPromise = proxyServer(port)
|
||||
}
|
||||
const url = await proxyServerPromise
|
||||
await disableProxy()
|
||||
return url
|
||||
}
|
||||
|
||||
const getUrlFromConfig = () => {
|
||||
if (config.urls.size) {
|
||||
if (config.current && config.urls.has(config.current)) {
|
||||
const url = config.urls.get(config.current)
|
||||
return url
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const tryRequest = async (url, retry = false) => {
|
||||
const queryString = getQuerystring(url)
|
||||
if (!queryString) return false
|
||||
const gachaTypeUrl = `${apiDomain}/common/gacha_record/api/getGachaLog?${queryString}&page=1&size=5&gacha_type=1&end_id=0`
|
||||
try {
|
||||
const res = await request(gachaTypeUrl)
|
||||
if (res.retcode !== 0) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
} catch (e) {
|
||||
if (e.code === 'ERR_PROXY_CONNECTION_FAILED' && !retry) {
|
||||
await disableProxy()
|
||||
return await tryRequest(url, true)
|
||||
}
|
||||
sendMsg(e.message.replace(url, '***'), 'ERROR')
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
const getUrl = async () => {
|
||||
let url = await readLog()
|
||||
if (!url && config.proxyMode) {
|
||||
url = await useProxy()
|
||||
} else if (url) {
|
||||
const result = await tryRequest(url)
|
||||
if (!result && config.proxyMode) {
|
||||
url = await useProxy()
|
||||
}
|
||||
}
|
||||
return url
|
||||
}
|
||||
|
||||
const fetchData = async (urlOverride) => {
|
||||
const text = i18n.log
|
||||
await readData()
|
||||
let url = urlOverride
|
||||
if (!url) {
|
||||
url = await getUrl()
|
||||
}
|
||||
if (!url) {
|
||||
const message = text.url.notFound2
|
||||
sendMsg(message)
|
||||
throw new Error(message)
|
||||
}
|
||||
const searchParams = getQuerystring(url)
|
||||
if (!searchParams) {
|
||||
const message = text.url.incorrect
|
||||
sendMsg(message)
|
||||
throw new Error(message)
|
||||
}
|
||||
let queryString = searchParams.toString()
|
||||
const vUid = await tryGetUid(queryString)
|
||||
const localLang = dataMap.has(vUid) ? dataMap.get(vUid).lang : ''
|
||||
if (localLang) {
|
||||
searchParams.set('lang', localLang)
|
||||
}
|
||||
queryString = searchParams.toString()
|
||||
const gachaType = await getGachaType(searchParams.get('lang'))
|
||||
|
||||
const result = new Map()
|
||||
const typeMap = new Map()
|
||||
const lang = searchParams.get('lang')
|
||||
let originUid = ''
|
||||
let originRegion = ''
|
||||
let originTimeZone = ''
|
||||
for (const type of gachaType) {
|
||||
const { list, uid, region, region_time_zone } = await getGachaLogs(type, queryString)
|
||||
const logs = list.map((item) => {
|
||||
const { id, item_id, item_type, name, rank_type, time, gacha_id, gacha_type } = item
|
||||
return { id, item_id, item_type, name, rank_type, time, gacha_id, gacha_type }
|
||||
})
|
||||
logs.reverse()
|
||||
typeMap.set(type.key, type.name)
|
||||
result.set(type.key, logs)
|
||||
if (!originUid) {
|
||||
originUid = uid
|
||||
}
|
||||
if (!originRegion) {
|
||||
originRegion = region
|
||||
}
|
||||
if (!originTimeZone) {
|
||||
originTimeZone = region_time_zone
|
||||
}
|
||||
}
|
||||
const data = { result, time: Date.now(), typeMap, uid: originUid, lang, region: originRegion, region_time_zone: originTimeZone }
|
||||
const localData = dataMap.get(originUid)
|
||||
const mergedResult = mergeData(localData, data)
|
||||
data.result = mergedResult
|
||||
dataMap.set(originUid, data)
|
||||
await changeCurrent(originUid)
|
||||
await saveData(data, url)
|
||||
}
|
||||
|
||||
let proxyStarted = false
|
||||
const fetchDataByProxy = async () => {
|
||||
if (proxyStarted) return
|
||||
proxyStarted = true
|
||||
const url = await useProxy()
|
||||
await fetchData(url)
|
||||
}
|
||||
|
||||
ipcMain.handle('FETCH_DATA', async (event, param) => {
|
||||
try {
|
||||
if (param === 'proxy') {
|
||||
await fetchDataByProxy()
|
||||
} else {
|
||||
await fetchData(param)
|
||||
}
|
||||
return {
|
||||
dataMap,
|
||||
current: config.current
|
||||
}
|
||||
} catch (e) {
|
||||
sendMsg(e, 'ERROR')
|
||||
console.error(e)
|
||||
}
|
||||
return false
|
||||
})
|
||||
|
||||
ipcMain.handle('READ_DATA', async () => {
|
||||
await readData()
|
||||
return {
|
||||
dataMap,
|
||||
current: config.current
|
||||
}
|
||||
})
|
||||
|
||||
ipcMain.handle('CHANGE_UID', (event, uid) => {
|
||||
config.current = uid
|
||||
})
|
||||
|
||||
ipcMain.handle('GET_CONFIG', () => {
|
||||
return config.value()
|
||||
})
|
||||
|
||||
ipcMain.handle('LANG_MAP', () => {
|
||||
return langMap
|
||||
})
|
||||
|
||||
ipcMain.handle('SAVE_CONFIG', (event, [key, value]) => {
|
||||
config[key] = value
|
||||
config.save()
|
||||
})
|
||||
|
||||
ipcMain.handle('DISABLE_PROXY', async () => {
|
||||
await disableProxy()
|
||||
})
|
||||
|
||||
ipcMain.handle('I18N_DATA', () => {
|
||||
return i18n.data
|
||||
})
|
||||
|
||||
ipcMain.handle('OPEN_CACHE_FOLDER', () => {
|
||||
if (cacheFolder) {
|
||||
shell.openPath(cacheFolder)
|
||||
}
|
||||
})
|
||||
|
||||
exports.getData = () => {
|
||||
return {
|
||||
dataMap,
|
||||
current: config.current
|
||||
}
|
||||
}
|
96
src/main/i18n.js
Normal file
96
src/main/i18n.js
Normal file
@ -0,0 +1,96 @@
|
||||
const raw = {
|
||||
'zh-cn': require('../i18n/简体中文.json'),
|
||||
'zh-tw': require('../i18n/繁體中文.json'),
|
||||
'de-de': require('../i18n/Deutsch.json'),
|
||||
'en-us': require('../i18n/English.json'),
|
||||
'es-es': require('../i18n/Español.json'),
|
||||
'fr-fr': require('../i18n/Français.json'),
|
||||
'id-id': require('../i18n/Indonesia.json'),
|
||||
'ja-jp': require('../i18n/日本語.json'),
|
||||
'ko-kr': require('../i18n/한국어.json'),
|
||||
'pt-pt': require('../i18n/Português.json'),
|
||||
'ru-ru': require('../i18n/Pусский.json'),
|
||||
'th-th': require('../i18n/ภาษาไทย.json'),
|
||||
'vi-vn': require('../i18n/Tiếng Việt.json')
|
||||
}
|
||||
const config = require('./config')
|
||||
const isPlainObject = require('lodash/isPlainObject')
|
||||
|
||||
const addProp = (obj, key) => {
|
||||
if (isPlainObject(obj[key])) {
|
||||
return obj[key]
|
||||
} else if (typeof obj[key] === 'undefined') {
|
||||
let temp = {}
|
||||
obj[key] = temp
|
||||
return temp
|
||||
}
|
||||
}
|
||||
|
||||
const parseData = (data) => {
|
||||
const result = {}
|
||||
for (let key in data) {
|
||||
let temp = result
|
||||
const arr = key.split('.')
|
||||
arr.forEach((prop, index) => {
|
||||
if (index === arr.length - 1) {
|
||||
temp[prop] = data[key]
|
||||
} else {
|
||||
temp = addProp(temp, prop)
|
||||
}
|
||||
})
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
const i18nMap = new Map()
|
||||
const prepareData = () => {
|
||||
for (let key in raw) {
|
||||
let temp = {}
|
||||
if (key === 'zh-tw') {
|
||||
Object.assign(temp, raw['zh-cn'], raw[key])
|
||||
} else {
|
||||
Object.assign(temp, raw['zh-cn'], raw['en-us'], raw[key])
|
||||
}
|
||||
i18nMap.set(key, parseData(temp))
|
||||
}
|
||||
}
|
||||
|
||||
prepareData()
|
||||
|
||||
const parseText = (text, data) => {
|
||||
return text.replace(/(\${.+?})/g, function (...args) {
|
||||
const key = args[0].slice(2, args[0].length - 1)
|
||||
if (data[key]) return data[key]
|
||||
return args[0]
|
||||
})
|
||||
}
|
||||
|
||||
const mainProps = [
|
||||
'symbol', 'ui', 'log', 'excel'
|
||||
]
|
||||
|
||||
const i18n = new Proxy(raw, {
|
||||
get (obj, prop) {
|
||||
if (prop === 'data') {
|
||||
return i18nMap.get(config.lang)
|
||||
} else if (mainProps.includes(prop)) {
|
||||
return i18nMap.get(config.lang)[prop]
|
||||
} else if (prop === 'parse') {
|
||||
return parseText
|
||||
}
|
||||
return obj[prop]
|
||||
}
|
||||
})
|
||||
|
||||
module.exports = i18n
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
69
src/main/main.js
Normal file
69
src/main/main.js
Normal file
@ -0,0 +1,69 @@
|
||||
const { app, BrowserWindow, ipcMain } = require('electron')
|
||||
const { initWindow } = require('./utils')
|
||||
const { disableProxy, proxyStatus } = require('./module/system-proxy')
|
||||
require('./getData')
|
||||
require('./excel')
|
||||
require('./UIGFJson')
|
||||
const { getUpdateInfo } = require('./update/index')
|
||||
|
||||
const isDev = !app.isPackaged
|
||||
let win = null
|
||||
|
||||
function createWindow() {
|
||||
win = initWindow()
|
||||
win.setMenuBarVisibility(false)
|
||||
isDev ? win.loadURL(`http://localhost:${process.env.PORT}`) : win.loadFile('dist/electron/renderer/index.html')
|
||||
if (isDev) {
|
||||
win.webContents.openDevTools({ mode: 'undocked', activate: true })
|
||||
}
|
||||
}
|
||||
|
||||
const isFirstInstance = app.requestSingleInstanceLock()
|
||||
|
||||
if (!isFirstInstance) {
|
||||
app.quit()
|
||||
} else {
|
||||
app.on('second-instance', () => {
|
||||
if (win) {
|
||||
if (win.isMinimized()) win.restore()
|
||||
win.focus()
|
||||
}
|
||||
})
|
||||
|
||||
app.whenReady().then(createWindow)
|
||||
|
||||
ipcMain.handle('RELAUNCH', async () => {
|
||||
app.relaunch()
|
||||
app.exit(0)
|
||||
})
|
||||
|
||||
app.on('window-all-closed', () => {
|
||||
if (process.platform !== 'darwin') {
|
||||
app.quit()
|
||||
}
|
||||
})
|
||||
|
||||
app.on('activate', () => {
|
||||
if (BrowserWindow.getAllWindows().length === 0) {
|
||||
createWindow()
|
||||
}
|
||||
})
|
||||
|
||||
app.on('will-quit', (e) => {
|
||||
if (proxyStatus.started) {
|
||||
disableProxy()
|
||||
}
|
||||
if (getUpdateInfo().status === 'moving') {
|
||||
e.preventDefault()
|
||||
setTimeout(() => {
|
||||
app.quit()
|
||||
}, 3000)
|
||||
}
|
||||
})
|
||||
|
||||
app.on('quit', () => {
|
||||
if (proxyStatus.started) {
|
||||
disableProxy()
|
||||
}
|
||||
})
|
||||
}
|
39
src/main/module/exceljs.min.js
vendored
Normal file
39
src/main/module/exceljs.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
185
src/main/module/extract-zip.js
Normal file
185
src/main/module/extract-zip.js
Normal file
@ -0,0 +1,185 @@
|
||||
// Copyright (c) 2014 Max Ogden and other contributors
|
||||
// All rights reserved.
|
||||
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are met:
|
||||
|
||||
// * Redistributions of source code must retain the above copyright notice, this
|
||||
// list of conditions and the following disclaimer.
|
||||
|
||||
// * Redistributions in binary form must reproduce the above copyright notice,
|
||||
// this list of conditions and the following disclaimer in the documentation
|
||||
// and/or other materials provided with the distribution.
|
||||
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
// https://github.com/maxogden/extract-zip
|
||||
// eslint-disable-next-line node/no-unsupported-features/node-builtins
|
||||
const { createWriteStream, promises: fs } = require('original-fs')
|
||||
const getStream = require('get-stream')
|
||||
const path = require('path')
|
||||
const { promisify } = require('util')
|
||||
const stream = require('stream')
|
||||
const yauzl = require('yauzl')
|
||||
|
||||
const openZip = promisify(yauzl.open)
|
||||
const pipeline = promisify(stream.pipeline)
|
||||
|
||||
class Extractor {
|
||||
constructor (zipPath, opts) {
|
||||
this.zipPath = zipPath
|
||||
this.opts = opts
|
||||
}
|
||||
|
||||
async extract () {
|
||||
|
||||
this.zipfile = await openZip(this.zipPath, { lazyEntries: true })
|
||||
this.canceled = false
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
this.zipfile.on('error', err => {
|
||||
this.canceled = true
|
||||
reject(err)
|
||||
})
|
||||
this.zipfile.readEntry()
|
||||
|
||||
this.zipfile.on('close', () => {
|
||||
if (!this.canceled) {
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
|
||||
this.zipfile.on('entry', async entry => {
|
||||
/* istanbul ignore if */
|
||||
if (this.canceled) {
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
if (entry.fileName.startsWith('__MACOSX/')) {
|
||||
this.zipfile.readEntry()
|
||||
return
|
||||
}
|
||||
|
||||
const destDir = path.dirname(path.join(this.opts.dir, entry.fileName))
|
||||
|
||||
try {
|
||||
await fs.mkdir(destDir, { recursive: true })
|
||||
|
||||
const canonicalDestDir = await fs.realpath(destDir)
|
||||
const relativeDestDir = path.relative(this.opts.dir, canonicalDestDir)
|
||||
|
||||
if (relativeDestDir.split(path.sep).includes('..')) {
|
||||
throw new Error(`Out of bound path "${canonicalDestDir}" found while processing file ${entry.fileName}`)
|
||||
}
|
||||
|
||||
await this.extractEntry(entry)
|
||||
this.zipfile.readEntry()
|
||||
} catch (err) {
|
||||
this.canceled = true
|
||||
this.zipfile.close()
|
||||
reject(err)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async extractEntry (entry) {
|
||||
/* istanbul ignore if */
|
||||
if (this.canceled) {
|
||||
return
|
||||
}
|
||||
|
||||
if (this.opts.onEntry) {
|
||||
this.opts.onEntry(entry, this.zipfile)
|
||||
}
|
||||
|
||||
const dest = path.join(this.opts.dir, entry.fileName)
|
||||
|
||||
// convert external file attr int into a fs stat mode int
|
||||
const mode = (entry.externalFileAttributes >> 16) & 0xFFFF
|
||||
// check if it's a symlink or dir (using stat mode constants)
|
||||
const IFMT = 61440
|
||||
const IFDIR = 16384
|
||||
const IFLNK = 40960
|
||||
const symlink = (mode & IFMT) === IFLNK
|
||||
let isDir = (mode & IFMT) === IFDIR
|
||||
|
||||
// Failsafe, borrowed from jsZip
|
||||
if (!isDir && entry.fileName.endsWith('/')) {
|
||||
isDir = true
|
||||
}
|
||||
|
||||
// check for windows weird way of specifying a directory
|
||||
// https://github.com/maxogden/extract-zip/issues/13#issuecomment-154494566
|
||||
const madeBy = entry.versionMadeBy >> 8
|
||||
if (!isDir) isDir = (madeBy === 0 && entry.externalFileAttributes === 16)
|
||||
|
||||
|
||||
const procMode = this.getExtractedMode(mode, isDir) & 0o777
|
||||
|
||||
// always ensure folders are created
|
||||
const destDir = isDir ? dest : path.dirname(dest)
|
||||
|
||||
const mkdirOptions = { recursive: true }
|
||||
if (isDir) {
|
||||
mkdirOptions.mode = procMode
|
||||
}
|
||||
await fs.mkdir(destDir, mkdirOptions)
|
||||
if (isDir) return
|
||||
|
||||
const readStream = await promisify(this.zipfile.openReadStream.bind(this.zipfile))(entry)
|
||||
|
||||
if (symlink) {
|
||||
const link = await getStream(readStream)
|
||||
await fs.symlink(link, dest)
|
||||
} else {
|
||||
await pipeline(readStream, createWriteStream(dest, { mode: procMode }))
|
||||
}
|
||||
}
|
||||
|
||||
getExtractedMode (entryMode, isDir) {
|
||||
let mode = entryMode
|
||||
// Set defaults, if necessary
|
||||
if (mode === 0) {
|
||||
if (isDir) {
|
||||
if (this.opts.defaultDirMode) {
|
||||
mode = parseInt(this.opts.defaultDirMode, 10)
|
||||
}
|
||||
|
||||
if (!mode) {
|
||||
mode = 0o755
|
||||
}
|
||||
} else {
|
||||
if (this.opts.defaultFileMode) {
|
||||
mode = parseInt(this.opts.defaultFileMode, 10)
|
||||
}
|
||||
|
||||
if (!mode) {
|
||||
mode = 0o644
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return mode
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = async function (zipPath, opts) {
|
||||
|
||||
if (!path.isAbsolute(opts.dir)) {
|
||||
throw new Error('Target directory is expected to be absolute')
|
||||
}
|
||||
|
||||
await fs.mkdir(opts.dir, { recursive: true })
|
||||
opts.dir = await fs.realpath(opts.dir)
|
||||
return new Extractor(zipPath, opts).extract()
|
||||
}
|
31
src/main/module/node-mitmproxy.js
Normal file
31
src/main/module/node-mitmproxy.js
Normal file
File diff suppressed because one or more lines are too long
39
src/main/module/system-proxy.js
Normal file
39
src/main/module/system-proxy.js
Normal file
@ -0,0 +1,39 @@
|
||||
const Registry = require('winreg')
|
||||
|
||||
const proxyStatus = {
|
||||
started: false
|
||||
}
|
||||
const setProxy = async (enable, proxyIp = '', ignoreIp = '') => {
|
||||
const regKey = new Registry({
|
||||
hive: Registry.HKCU,
|
||||
key: '\\Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings'
|
||||
})
|
||||
|
||||
const regSet = function (key, type, value) {
|
||||
return new Promise((rev, rej) => {
|
||||
regKey.set(key, type, value, function (err) {
|
||||
if (err) rej(err)
|
||||
rev()
|
||||
})
|
||||
})
|
||||
}
|
||||
await regSet('ProxyEnable', Registry.REG_DWORD, enable)
|
||||
await regSet('ProxyServer', Registry.REG_SZ, proxyIp)
|
||||
await regSet('ProxyOverride', Registry.REG_SZ, ignoreIp)
|
||||
}
|
||||
|
||||
const enableProxy = async (ip, port) => {
|
||||
const proxyIp = `${ip}:${port}`
|
||||
const ignoreIp = 'localhost;127.*;10.*;172.16.*;172.17.*;172.18.*;172.19.*;172.20.*;172.21.*;172.22.*;172.23.*;172.24.*;172.25.*;172.26.*;172.27.*;172.28.*;172.29.*;172.30.*;172.31.*;192.168.*;<local>'
|
||||
await setProxy('1', proxyIp, ignoreIp)
|
||||
proxyStatus.started = true
|
||||
}
|
||||
|
||||
const disableProxy = async () => {
|
||||
await setProxy('0')
|
||||
proxyStatus.started = false
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
enableProxy, disableProxy, proxyStatus
|
||||
}
|
65
src/main/update/index.js
Normal file
65
src/main/update/index.js
Normal file
@ -0,0 +1,65 @@
|
||||
const { app } = require('electron')
|
||||
const fetch = require('electron-fetch').default
|
||||
const semver = require('semver')
|
||||
const util = require('util')
|
||||
const path = require('path')
|
||||
const fs = require('fs-extra')
|
||||
const extract = require('../module/extract-zip')
|
||||
const { version } = require('../../../package.json')
|
||||
const { hash, sendMsg } = require('../utils')
|
||||
const config = require('../config')
|
||||
const i18n = require('../i18n')
|
||||
const streamPipeline = util.promisify(require('stream').pipeline)
|
||||
|
||||
async function download(url, filePath) {
|
||||
const response = await fetch(url)
|
||||
if (!response.ok) throw new Error(`unexpected response ${response.statusText}`)
|
||||
await streamPipeline(response.body, fs.createWriteStream(filePath))
|
||||
}
|
||||
|
||||
const updateInfo = {
|
||||
status: 'init'
|
||||
}
|
||||
|
||||
const isDev = !app.isPackaged
|
||||
const appPath = isDev ? path.resolve(__dirname, '../../', 'update-dev/app'): app.getAppPath()
|
||||
const updatePath = isDev ? path.resolve(__dirname, '../../', 'update-dev/download') : path.resolve(appPath, '..', '..', 'update')
|
||||
|
||||
const update = async () => {
|
||||
if (isDev) return
|
||||
try {
|
||||
const url = 'https://star-rail-warp-export.css.moe/update'
|
||||
const res = await fetch(`${url}/manifest.json?t=${Math.floor(Date.now() / (1000 * 60 * 10))}`)
|
||||
const data = await res.json()
|
||||
if (!data.active) return
|
||||
if (semver.gt(data.version, version) && semver.gte(version, data.from)) {
|
||||
await fs.emptyDir(updatePath)
|
||||
const filePath = path.join(updatePath, data.name)
|
||||
if (!config.autoUpdate) {
|
||||
sendMsg(data.version, 'NEW_VERSION')
|
||||
return
|
||||
}
|
||||
updateInfo.status = 'downloading'
|
||||
await download(`${url}/${data.name}`, filePath)
|
||||
const buffer = await fs.readFile(filePath)
|
||||
const sha256 = hash(buffer)
|
||||
if (sha256 !== data.hash) return
|
||||
const appPathTemp = path.join(updatePath, 'app')
|
||||
await extract(filePath, { dir: appPathTemp })
|
||||
updateInfo.status = 'moving'
|
||||
await fs.emptyDir(appPath)
|
||||
await fs.copy(appPathTemp, appPath)
|
||||
updateInfo.status = 'finished'
|
||||
sendMsg(i18n.log.autoUpdate.success, 'UPDATE_HINT')
|
||||
}
|
||||
} catch (e) {
|
||||
updateInfo.status = 'failed'
|
||||
sendMsg(e, 'ERROR')
|
||||
}
|
||||
}
|
||||
|
||||
const getUpdateInfo = () => updateInfo
|
||||
|
||||
setTimeout(update, 1000)
|
||||
|
||||
exports.getUpdateInfo = getUpdateInfo
|
207
src/main/utils.js
Normal file
207
src/main/utils.js
Normal file
@ -0,0 +1,207 @@
|
||||
const fs = require('fs-extra')
|
||||
const path = require('path')
|
||||
const fetch = require('electron-fetch').default
|
||||
const { BrowserWindow, app } = require('electron')
|
||||
const crypto = require('crypto')
|
||||
const unhandled = require('electron-unhandled')
|
||||
const windowStateKeeper = require('electron-window-state')
|
||||
const debounce = require('lodash/debounce')
|
||||
const Registry = require('winreg')
|
||||
|
||||
const isDev = !app.isPackaged
|
||||
|
||||
const userPath = app.getPath('userData')
|
||||
const appRoot = isDev ? path.resolve(__dirname, '..', '..') : userPath
|
||||
const userDataPath = path.resolve(appRoot, 'userData')
|
||||
|
||||
let win = null
|
||||
const initWindow = () => {
|
||||
let mainWindowState = windowStateKeeper({
|
||||
defaultWidth: 888,
|
||||
defaultHeight: 550
|
||||
})
|
||||
win = new BrowserWindow({
|
||||
x: mainWindowState.x,
|
||||
y: mainWindowState.y,
|
||||
width: mainWindowState.width,
|
||||
height: mainWindowState.height,
|
||||
webPreferences: {
|
||||
contextIsolation:false,
|
||||
nodeIntegration: true
|
||||
}
|
||||
})
|
||||
const saveState = debounce(mainWindowState.saveState, 500)
|
||||
win.on('resize', () => saveState(win))
|
||||
win.on('move', () => saveState(win))
|
||||
return win
|
||||
}
|
||||
|
||||
const getWin = () => win
|
||||
|
||||
const log = []
|
||||
const sendMsg = (text, type = 'LOAD_DATA_STATUS') => {
|
||||
if (win) {
|
||||
win.webContents.send(type, text)
|
||||
}
|
||||
if (type !== 'LOAD_DATA_STATUS') {
|
||||
log.push([Date.now(), type, text])
|
||||
saveLog()
|
||||
}
|
||||
}
|
||||
|
||||
const saveLog = () => {
|
||||
const text = log.map(item => {
|
||||
const time = new Date(item[0]).toLocaleString()
|
||||
const type = item[1] === 'LOAD_DATA_STATUS' ? 'INFO' : item[1]
|
||||
const text = item[2]
|
||||
return `[${type}][${time}]${text}`
|
||||
}).join('\r\n')
|
||||
fs.outputFileSync(path.join(userDataPath, 'log.txt'), text)
|
||||
}
|
||||
|
||||
const authkeyMask = (text = '') => {
|
||||
return text.replace(/authkey=[^&]+&/g, 'authkey=***&')
|
||||
}
|
||||
|
||||
unhandled({
|
||||
showDialog: false,
|
||||
logger: function (err) {
|
||||
log.push([Date.now(), 'ERROR', authkeyMask(err.stack)])
|
||||
saveLog()
|
||||
}
|
||||
})
|
||||
|
||||
const request = async (url) => {
|
||||
const res = await fetch(url, {
|
||||
timeout: 15 * 1000
|
||||
})
|
||||
return await res.json()
|
||||
}
|
||||
|
||||
const sleep = (sec = 1) => {
|
||||
return new Promise(rev => {
|
||||
setTimeout(rev, sec * 1000)
|
||||
})
|
||||
}
|
||||
|
||||
const sortData = (data) => {
|
||||
return data.map(item => {
|
||||
const [time, name, type, rank] = item
|
||||
return {
|
||||
time, name, type, rank,
|
||||
timestamp: new Date(time)
|
||||
}
|
||||
}).sort((a, b) => a.timestamp - b.timestamp)
|
||||
.map(item => {
|
||||
const { time, name, type, rank } = item
|
||||
return [time, name, type, rank]
|
||||
})
|
||||
}
|
||||
|
||||
const langMap = new Map([
|
||||
['zh-cn', '简体中文'],
|
||||
['zh-tw', '繁體中文'],
|
||||
['de-de', 'Deutsch'],
|
||||
['en-us', 'English'],
|
||||
['es-es', 'Español'],
|
||||
['fr-fr', 'Français'],
|
||||
['id-id', 'Indonesia'],
|
||||
['ja-jp', '日本語'],
|
||||
['ko-kr', '한국어'],
|
||||
['pt-pt', 'Português'],
|
||||
['ru-ru', 'Pусский'],
|
||||
['th-th', 'ภาษาไทย'],
|
||||
['vi-vn', 'Tiếng Việt']
|
||||
])
|
||||
|
||||
const localeMap = new Map([
|
||||
['zh-cn', ['zh', 'zh-CN']],
|
||||
['zh-tw', ['zh-TW']],
|
||||
['de-de', ['de-AT', 'de-CH', 'de-DE', 'de']],
|
||||
['en-us', ['en-AU', 'en-CA', 'en-GB', 'en-NZ', 'en-US', 'en-ZA', 'en']],
|
||||
['es-es', ['es', 'es-419']],
|
||||
['fr-fr', ['fr-CA', 'fr-CH', 'fr-FR', 'fr']],
|
||||
['id-id', ['id']],
|
||||
['ja-jp', ['ja']],
|
||||
['ko-kr', ['ko']],
|
||||
['pt-pt', ['pt-BR', 'pt-PT', 'pt']],
|
||||
['ru-ru', ['ru']],
|
||||
['th-th', ['th']],
|
||||
['vi-vn', ['vi']]
|
||||
])
|
||||
|
||||
const detectLocale = (value) => {
|
||||
const locale = value || app.getLocale()
|
||||
let result = 'zh-cn'
|
||||
for (let [key, list] of localeMap) {
|
||||
if (list.includes(locale)) {
|
||||
result = key
|
||||
break
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
const saveJSON = async (name, data) => {
|
||||
try {
|
||||
await fs.outputJSON(path.join(userDataPath, name), data, {
|
||||
spaces: 2
|
||||
})
|
||||
} catch (e) {
|
||||
sendMsg(e, 'ERROR')
|
||||
await sleep(3)
|
||||
}
|
||||
}
|
||||
|
||||
const readJSON = async (name) => {
|
||||
let data = null
|
||||
try {
|
||||
data = await fs.readJSON(path.join(userDataPath, name))
|
||||
} catch (e) {}
|
||||
return data
|
||||
}
|
||||
|
||||
const hash = (data, type = 'sha256') => {
|
||||
const hmac = crypto.createHmac(type, 'hk4e')
|
||||
hmac.update(data)
|
||||
return hmac.digest('hex')
|
||||
}
|
||||
|
||||
const scryptKey = crypto.scryptSync(userPath, 'hk4e', 24)
|
||||
const cipherAes = (data) => {
|
||||
const algorithm = 'aes-192-cbc'
|
||||
const iv = Buffer.alloc(16, 0)
|
||||
const cipher = crypto.createCipheriv(algorithm, scryptKey, iv)
|
||||
let encrypted = cipher.update(data, 'utf8', 'hex')
|
||||
encrypted += cipher.final('hex')
|
||||
return encrypted
|
||||
}
|
||||
|
||||
const decipherAes = (encrypted) => {
|
||||
const algorithm = 'aes-192-cbc'
|
||||
const iv = Buffer.alloc(16, 0)
|
||||
const decipher = crypto.createDecipheriv(algorithm, scryptKey, iv)
|
||||
let decrypted = decipher.update(encrypted, 'hex', 'utf8')
|
||||
decrypted += decipher.final('utf8')
|
||||
return decrypted
|
||||
}
|
||||
|
||||
const interfaces = require('os').networkInterfaces()
|
||||
const localIp = () => {
|
||||
for (var devName in interfaces) {
|
||||
var iface = interfaces[devName]
|
||||
|
||||
for (var i = 0; i < iface.length; i++) {
|
||||
var alias = iface[i]
|
||||
if (alias.family === 'IPv4' && alias.address !== '127.0.0.1' && !alias.internal)
|
||||
return alias.address
|
||||
}
|
||||
}
|
||||
return '127.0.0.1'
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
sleep, request, hash, cipherAes, decipherAes, saveLog,
|
||||
sendMsg, readJSON, saveJSON, initWindow, getWin, localIp, userPath, detectLocale, langMap,
|
||||
appRoot, userDataPath
|
||||
}
|
40
src/main/utils/mergeData.js
Normal file
40
src/main/utils/mergeData.js
Normal file
@ -0,0 +1,40 @@
|
||||
const mergeList = (a, b) => {
|
||||
if (!a || !a.length) return b || []
|
||||
if (!b || !b.length) return a
|
||||
const list = [...b, ...a]
|
||||
const result = []
|
||||
const idSet = new Set()
|
||||
list.forEach(item => {
|
||||
if (!idSet.has(item.id)) {
|
||||
result.push(item)
|
||||
}
|
||||
idSet.add(item.id)
|
||||
})
|
||||
return result.sort((m, n) => {
|
||||
const num = BigInt(m.id) - BigInt(n.id)
|
||||
if (num > 0) {
|
||||
return 1
|
||||
} else if (num < 0) {
|
||||
return -1
|
||||
}
|
||||
return 0
|
||||
})
|
||||
}
|
||||
|
||||
const mergeData = (local, origin) => {
|
||||
if (local && local.result) {
|
||||
const localResult = local.result
|
||||
const localUid = local.uid
|
||||
const originUid = origin.uid
|
||||
if (localUid !== originUid) return origin.result
|
||||
const originResult = new Map()
|
||||
for (let [key, value] of origin.result) {
|
||||
const newVal = mergeList(localResult.get(key), value)
|
||||
originResult.set(key, newVal)
|
||||
}
|
||||
return originResult
|
||||
}
|
||||
return origin.result
|
||||
}
|
||||
|
||||
module.exports = { mergeData, mergeList }
|
72
src/main/utils/mergeData.test.js
Normal file
72
src/main/utils/mergeData.test.js
Normal file
@ -0,0 +1,72 @@
|
||||
const { mergeList } = require('./mergeData')
|
||||
|
||||
test('mergeList successed', () => {
|
||||
const listA = [{
|
||||
"id": "1682521800010412850",
|
||||
},
|
||||
{
|
||||
"id": "1682521800010412950",
|
||||
}]
|
||||
|
||||
const listB = [{
|
||||
"id": "1682521800010412900",
|
||||
}]
|
||||
|
||||
expect(mergeList(listA, listB)).toEqual([
|
||||
{
|
||||
"id": "1682521800010412850",
|
||||
},
|
||||
{
|
||||
"id": "1682521800010412900",
|
||||
},
|
||||
{
|
||||
"id": "1682521800010412950",
|
||||
}
|
||||
])
|
||||
})
|
||||
|
||||
test('mergeList with repeated data successed', () => {
|
||||
const listA = [{
|
||||
"id": "1682521800010412850",
|
||||
},
|
||||
{
|
||||
"id": "1682521800010412950",
|
||||
}]
|
||||
|
||||
const listB = [{
|
||||
"id": "1682521800010412950",
|
||||
}]
|
||||
|
||||
expect(mergeList(listA, listB)).toEqual([
|
||||
{
|
||||
"id": "1682521800010412850",
|
||||
},
|
||||
{
|
||||
"id": "1682521800010412950",
|
||||
}
|
||||
])
|
||||
})
|
||||
|
||||
test('mergeList empty successed', () => {
|
||||
const listA = []
|
||||
const listB = [{
|
||||
"id": "1682521800010412900",
|
||||
}]
|
||||
expect(mergeList(listA, listB)).toEqual([
|
||||
{
|
||||
"id": "1682521800010412900",
|
||||
}
|
||||
])
|
||||
})
|
||||
|
||||
test('mergeList empty 2 successed', () => {
|
||||
const listA = [{
|
||||
"id": "1682521800010412900",
|
||||
}]
|
||||
const listB = []
|
||||
expect(mergeList(listA, listB)).toEqual([
|
||||
{
|
||||
"id": "1682521800010412900",
|
||||
}
|
||||
])
|
||||
})
|
Reference in New Issue
Block a user