Initial Commit

This commit is contained in:
amizing25 2025-06-04 22:52:37 +07:00
commit 207f3f4c96
42 changed files with 5009 additions and 0 deletions

6
.gitignore vendored Normal file
View File

@ -0,0 +1,6 @@
/target
/scripts
.vscode
out
/testout
/z_scripts

2138
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

47
Cargo.toml Normal file
View File

@ -0,0 +1,47 @@
[workspace]
resolver = "3"
members = [
"crates/bytes-util",
"crates/asset-meta",
"crates/common",
"crates/parser",
"crates/program",
]
[workspace.package]
version = "0.1.0"
[workspace.dependencies]
# binary utils
byteorder = "1.5.0"
varint-rs = { version = "2.2.0", features = ["signed"] }
xxhash-rust = { version = "0.8.15", features = ["const_xxh64"] }
# net utils
reqwest = { version = "0.12.15", features = ["blocking"] }
# parallelization
dashmap = { version = "6.1.0", features = ["rayon"] }
rayon = "1.10.0"
# serialization
base64 = "0.22.1"
serde = { version = "1.0.219", features = ["derive"] }
serde_json = { version = "1.0.140", features = ["preserve_order"] }
# error handling
anyhow = "1.0.98"
# loging utils
indicatif = "0.17.11"
tracing = "0.1.41"
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
# cli
clap = { version = "4.5.39", features = ["derive"] }
# local crates
common = { path = "crates/common" }
tg-asset-meta = { path = "crates/asset-meta" }
tg-bytes-util = { path = "crates/bytes-util" }
tg-parser = { path = "crates/parser" }

21
LICENSE Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2025 amizing25
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

108
README.md Normal file
View File

@ -0,0 +1,108 @@
# Traingame parser
A tool to parse textmap (and maybe other res too) for a certain anime game.
---
## Usages
### `textmap`
Processes **only** the textmap files.
```bash
./program.exe textmap <Persistent Path / Design Data URL> <OUTPUT_DIR> [OPTIONS]
```
**Arguments:**
- `input_url` — URL or path to the persistent data or design bundle
- `output_dir` — Directory where the parsed output will be stored
**Options:**
- `--full-textmap` — Parse the entire textmap structure as an array instead of just key-value pairs
- `--save-bytes-file` — Save the `.bytes` files after download
**Examples:**
```bash
./program.exe textmap "https://autopatchcn.bhsr.com/design_data/BetaLive/output_10494861_2ed49bac2846_b7f8d02fced269" output/
```
```bash
./program.exe textmap "D:/Star Rail/StarRail_Data/Persistent/DesignData/Windows" output/
```
<details>
<summary><strong><code>excels</code></strong></summary>
### `excels`
Processes the Excel & Textmaps files
```bash
./program.exe excels <DATA_JSON> <EXCEL_PATH_JSON> <Persistent Path / Design Data URL> <OUTPUT_DIR> [OPTIONS]
```
**Arguments:**
- `data_json` — Path to `data.json` schema
- `excel_path_json` — JSON file that maps Excel types to file paths
- `input_url` — URL or path to the persistent data
- `output_dir` — Output folder for processed files
**Options:**
- `--full-textmap` — Enable full textmap parsing if needed for linked data
- `--save-bytes-file` — Save original `.bytes` files
- `--log-error` — Output all encountered errors to the console
- `--config-paths <PATH>` — Optional extra config files (in JSON) for parsing additional types
**Examples:**
```bash
./program.exe excels data.json excels_path.json https://autopatchcn.bhsr.com/design_data/BetaLive/output_10494861_2ed49bac2846_b7f8d02fced269 output/ --log-error --save-bytes-file
```
```bash
./program.exe excels data.json excels_path.json "D:/Star Rail/StarRail_Data/Persistent/DesignData/Windows" output/ --log-error --save-bytes-file
```
</details>
<details>
<summary><strong><code>all</code></strong></summary>
### `all`
Processes Textmap, Excels, and Config files
```bash
./program.exe all <DATA_JSON> <EXCEL_PATH_JSON> <Persistent Path / Design Data URL> <OUTPUT_DIR> [OPTIONS]
```
Accepts the **same arguments and options** as the `excels` command.
**Examples:**
```bash
./program.exe all data.json excels_path.json "https://autopatchcn.bhsr.com/design_data/BetaLive/output_10494861_2ed49bac2846_b7f8d02fced269" output/ --full-textmap --log-error
```
```bash
./program.exe all data.json excels_path.json "D:/Star Rail/StarRail_Data/Persistent/DesignData/Windows" output/ --full-textmap --log-error
```
</details>
## Notes
- For parsing anything other than textmap (i.e., `excels` or `all`), **you must generate `data.json` and `excels_path.json` yourself**.
---
## Credits / References
- https://arca.live/b/starrailleaks/76183295
- https://github.com/Hiro420/HSR_Downloader

View File

@ -0,0 +1,10 @@
[package]
name = "tg-asset-meta"
edition = "2024"
version.workspace = true
[dependencies]
varint-rs.workspace = true
byteorder.workspace = true
tg-bytes-util.workspace = true

View File

@ -0,0 +1,116 @@
use byteorder::{BE, ReadBytesExt};
use std::fmt::Write;
use tg_bytes_util::FromBytes;
#[derive(Debug)]
pub struct DesignIndex {
pub unk_i64: i64,
pub file_count: i32,
pub design_data_count: i32,
pub file_list: Vec<FileEntry>,
}
impl FromBytes for DesignIndex {
fn from_bytes<T: std::io::Seek + std::io::Read>(r: &mut T) -> std::io::Result<Self> {
let mut result = DesignIndex {
unk_i64: r.read_i64::<BE>()?,
file_count: r.read_i32::<BE>()?,
design_data_count: r.read_i32::<BE>()?,
file_list: vec![],
};
for _ in 0..result.file_count {
result.file_list.push(FileEntry::from_bytes(r)?);
}
Ok(result)
}
}
#[derive(Debug)]
pub struct FileEntry {
pub name_hash: i32,
pub file_byte_name: String,
pub size: i64,
pub data_count: i32,
pub data_entries: Vec<DataEntry>,
pub unk: u8,
}
impl FromBytes for FileEntry {
fn from_bytes<T: std::io::Seek + std::io::Read>(r: &mut T) -> std::io::Result<Self> {
let mut result = Self {
name_hash: r.read_i32::<BE>()?,
file_byte_name: {
let mut buf = vec![0u8; 16];
r.read_exact(&mut buf)?;
buf.iter().fold(String::with_capacity(16), |mut output, b| {
let _ = output.write_str(&format!("{b:02x}"));
output
})
},
size: r.read_i64::<BE>()?,
data_count: r.read_i32::<BE>()?,
data_entries: vec![],
unk: 0,
};
for _ in 0..result.data_count {
result.data_entries.push(DataEntry::from_bytes(r)?);
}
result.unk = r.read_u8()?;
Ok(result)
}
}
#[derive(Debug)]
pub struct DataEntry {
pub name_hash: i32,
pub size: u32,
pub offset: u32,
}
impl FromBytes for DataEntry {
fn from_bytes<T: std::io::Seek + std::io::Read>(r: &mut T) -> std::io::Result<Self> {
Ok(Self {
name_hash: r.read_i32::<BE>()?,
size: r.read_u32::<BE>()?,
offset: r.read_u32::<BE>()?,
})
}
}
#[cfg(test)]
mod tests {
use crate::design_index::DesignIndex;
use std::io::Cursor;
use tg_bytes_util::FromBytes;
#[test]
fn test_parse_design_index() {
const BYTES: &[u8] = include_bytes!("../tests/DesignV.bytes");
let mut r = Cursor::new(BYTES);
let parsed = DesignIndex::from_bytes(&mut r).unwrap();
assert_eq!(11, parsed.file_count);
assert_eq!(100102, parsed.design_data_count);
assert_eq!(11, parsed.file_list.len());
// FileEntry
assert_eq!(-1703948225, parsed.file_list[0].name_hash);
assert_eq!(
"7e3fc08e24890ba15f9c3a8ec1454025",
parsed.file_list[0].file_byte_name.to_string()
);
assert_eq!(89899, parsed.file_list[0].size);
assert_eq!(1, parsed.file_list[0].data_count);
// DataEntry
assert_eq!(-1703948225, parsed.file_list[0].data_entries[0].name_hash);
assert_eq!(89899, parsed.file_list[0].data_entries[0].size);
assert_eq!(0, parsed.file_list[0].data_entries[0].offset);
}
}

View File

@ -0,0 +1,2 @@
pub mod design_index;
pub mod mini_asset;

View File

@ -0,0 +1,42 @@
use std::io::SeekFrom;
use byteorder::{LE, ReadBytesExt};
use tg_bytes_util::{ByteHash16, FromBytes};
#[derive(Debug)]
pub struct MiniAsset {
pub revision_id: u32,
pub design_index_hash: ByteHash16,
}
impl FromBytes for MiniAsset {
fn from_bytes<T: std::io::Seek + std::io::Read>(r: &mut T) -> std::io::Result<Self> {
r.seek(SeekFrom::Current(6 * 4))?;
Ok(Self {
revision_id: r.read_u32::<LE>()?,
design_index_hash: ByteHash16::from_bytes(r)?,
})
}
}
#[cfg(test)]
mod tests {
use crate::mini_asset::MiniAsset;
use std::io::Cursor;
use tg_bytes_util::FromBytes;
#[test]
fn test() {
const BYTES: &[u8] = &[
83, 82, 77, 73, 0, 3, 0, 1, 66, 0, 0, 0, 0, 0, 12, 0, 3, 0, 0, 0, 2, 0, 0, 0, 234, 255,
151, 0, 202, 110, 28, 223, 138, 63, 212, 4, 63, 130, 138, 178, 68, 22, 219, 131, 234,
55, 0, 0, 0, 0, 0, 0, 210, 249, 237, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
];
let mut r = Cursor::new(BYTES);
let parsed = MiniAsset::from_bytes(&mut r).unwrap();
assert_eq!(
parsed.design_index_hash.to_string(),
"df1c6eca04d43f8ab28a823f83db1644"
)
}
}

Binary file not shown.

View File

@ -0,0 +1,8 @@
[package]
name = "tg-bytes-util"
edition = "2024"
version.workspace = true
[dependencies]
varint-rs.workspace = true
byteorder.workspace = true

View File

@ -0,0 +1,166 @@
use std::{
collections::HashMap,
hash::Hash,
io::{self, Read, Seek},
};
use varint_rs::VarintReader;
pub trait FromBytes: Send + Sync + Sized {
fn from_bytes<T: io::Seek + io::Read>(r: &mut T) -> io::Result<Self>;
}
impl FromBytes for u8 {
#[inline]
fn from_bytes<T: Seek + Read>(r: &mut T) -> io::Result<Self> {
r.read_u8_varint()
}
}
impl FromBytes for u16 {
#[inline]
fn from_bytes<T: Seek + Read>(r: &mut T) -> io::Result<Self> {
r.read_u16_varint()
}
}
impl FromBytes for u32 {
#[inline]
fn from_bytes<T: Seek + Read>(r: &mut T) -> io::Result<Self> {
r.read_u32_varint()
}
}
impl FromBytes for u64 {
#[inline]
fn from_bytes<T: Seek + Read>(r: &mut T) -> io::Result<Self> {
r.read_u64_varint()
}
}
impl FromBytes for usize {
#[inline]
fn from_bytes<T: Seek + Read>(r: &mut T) -> io::Result<Self> {
r.read_usize_varint()
}
}
impl FromBytes for i8 {
#[inline]
fn from_bytes<T: Seek + Read>(r: &mut T) -> io::Result<Self> {
r.read_i8_varint()
}
}
impl FromBytes for i16 {
#[inline]
fn from_bytes<T: Seek + Read>(r: &mut T) -> io::Result<Self> {
r.read_i16_varint()
}
}
impl FromBytes for i32 {
#[inline]
fn from_bytes<T: Seek + Read>(r: &mut T) -> io::Result<Self> {
r.read_i32_varint()
}
}
impl FromBytes for i64 {
#[inline]
fn from_bytes<T: Seek + Read>(r: &mut T) -> io::Result<Self> {
r.read_i64_varint()
}
}
impl FromBytes for isize {
#[inline]
fn from_bytes<T: Seek + Read>(r: &mut T) -> io::Result<Self> {
r.read_isize_varint()
}
}
impl FromBytes for bool {
#[inline]
fn from_bytes<T: Seek + Read>(r: &mut T) -> io::Result<Self> {
Ok(r.read_i8_varint()? != 0)
}
}
impl FromBytes for f32 {
#[inline]
fn from_bytes<T: Seek + Read>(r: &mut T) -> io::Result<Self> {
let mut byte = [0; 4];
r.read_exact(&mut byte)?;
Ok(f32::from_le_bytes(byte))
}
}
impl FromBytes for f64 {
#[inline]
fn from_bytes<T: Seek + Read>(r: &mut T) -> io::Result<Self> {
let mut byte = [0; 8];
r.read_exact(&mut byte)?;
Ok(f64::from_le_bytes(byte))
}
}
impl FromBytes for String {
#[inline]
fn from_bytes<T: Seek + Read>(r: &mut T) -> io::Result<Self> {
let length = r.read_usize_varint()?;
if length > 1_000_000 {
return Err(io::Error::new(
io::ErrorKind::OutOfMemory,
"attempting to allocate large memory!",
));
}
let mut buf = vec![0u8; length];
r.read_exact(&mut buf)?;
Ok(String::from_utf8_lossy(&buf).to_string())
}
}
impl<T> FromBytes for Vec<T>
where
T: FromBytes,
{
#[inline]
fn from_bytes<R: Read + Seek>(r: &mut R) -> std::io::Result<Self> {
let length = r.read_i32_varint()? as usize;
let mut out = Vec::with_capacity(length);
for _ in 0..length {
out.push(T::from_bytes(r)?);
}
Ok(out)
}
}
impl<K, V> FromBytes for HashMap<K, V>
where
K: FromBytes + Eq + Hash,
V: FromBytes,
{
#[inline]
fn from_bytes<T: io::Seek + io::Read>(r: &mut T) -> io::Result<Self> {
let length = r.read_i32_varint()? as usize;
let mut out = HashMap::with_capacity(length);
for _ in 0..length {
out.insert(K::from_bytes(r)?, V::from_bytes(r)?);
}
Ok(out)
}
}
impl<T> FromBytes for Box<T>
where
T: FromBytes,
{
#[inline]
fn from_bytes<R: Read + Seek>(r: &mut R) -> std::io::Result<Self> {
Ok(Box::new(T::from_bytes(r)?))
}
}

View File

@ -0,0 +1,5 @@
mod from_bytes;
mod util;
pub use from_bytes::FromBytes;
pub use util::*;

View File

@ -0,0 +1,83 @@
use std::{
fmt::{Display, Write as _},
io::Write,
};
use varint_rs::{VarintReader, VarintWriter};
use crate::FromBytes;
#[derive(Debug)]
pub struct ExistFlag {
data: Vec<u64>,
field_length: usize,
}
impl ExistFlag {
pub fn new<R: std::io::Read + std::io::Seek>(
r: &mut R,
field_length: usize,
) -> std::io::Result<Self> {
let num_varints = field_length.max(1usize).div_ceil(64);
let mut data = Vec::with_capacity(num_varints);
for _ in 0..num_varints {
data.push(r.read_u64_varint()?);
}
Ok(Self { data, field_length })
}
pub fn exists(&self, index: usize) -> bool {
if index >= self.field_length {
panic!("out of bound field index: {index}")
} else {
let segment_idx = index / 64;
let bit_idx = index % 64;
((self.data[segment_idx] >> bit_idx) & 1) != 0
}
}
pub fn write<W: Write>(writer: &mut W, exist_flags: &[bool]) -> std::io::Result<()> {
let field_length = exist_flags.len();
let num_varints = field_length.max(1).div_ceil(64);
let mut data = vec![0u64; num_varints];
for (i, &exists) in exist_flags.iter().enumerate() {
let segment_idx = i / 64;
let bit_idx = i % 64;
if exists {
data[segment_idx] |= 1u64 << bit_idx;
}
}
for val in data {
writer.write_u64_varint(val)?;
}
Ok(())
}
}
#[derive(Debug)]
pub struct ByteHash16(Vec<u8>);
impl FromBytes for ByteHash16 {
fn from_bytes<T: std::io::Seek + std::io::Read>(r: &mut T) -> std::io::Result<Self> {
let mut full_hash = [0u8; 16];
for i in 0..4 {
let mut chunk = vec![0u8; 4];
r.read_exact(&mut chunk)?;
for j in 0..4 {
full_hash[i * 4 + j] = chunk[3 - j];
}
}
Ok(Self(full_hash.to_vec()))
}
}
impl Display for ByteHash16 {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.0.iter().fold(String::new(), |mut output, b| {
let _ = output.write_str(&format!("{b:02x}"));
output
}))?;
Ok(())
}
}

15
crates/common/Cargo.toml Normal file
View File

@ -0,0 +1,15 @@
[package]
name = "common"
edition = "2024"
version.workspace = true
[dependencies]
xxhash-rust.workspace = true
reqwest.workspace = true
anyhow.workspace = true
tracing.workspace = true
tracing-subscriber.workspace = true
indicatif.workspace = true
tg-asset-meta.workspace = true
tg-bytes-util.workspace = true

View File

@ -0,0 +1,230 @@
use super::hash;
use anyhow::{Context, Result};
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
use reqwest::{StatusCode, blocking::Client};
use std::{
collections::HashMap,
io::{Cursor, Read as _},
path::PathBuf,
thread,
time::Duration,
};
use tg_asset_meta::{
design_index::{DesignIndex, FileEntry},
mini_asset::MiniAsset,
};
use tg_bytes_util::FromBytes;
const MAX_RETRIES: usize = 3;
const RETRY_DELAY: Duration = Duration::from_millis(100);
pub fn download_all_design_data(
design_data_url: String,
output_folder: Option<PathBuf>,
filter_hashes: Vec<i32>,
) -> Result<HashMap<i32, Vec<u8>>> {
let client = Client::new();
let mp = MultiProgress::new();
let mini_asset = download_mini_asset(&client, &design_data_url, &mp, &output_folder)
.context("Failed to download mini asset")?;
let design_index =
download_design_index(&client, &design_data_url, &mp, &mini_asset, &output_folder)
.context("Failed to download design index")?;
let mut handles = HashMap::with_capacity(design_index.file_list.len());
for file_entry in design_index.file_list {
let client = client.clone();
let design_data_url = design_data_url.clone();
let mp = mp.clone();
let output_folder = output_folder.clone();
let byte_name = file_entry.file_byte_name.clone();
if !filter_hashes.is_empty()
&& !file_entry
.data_entries
.iter()
.any(|e| filter_hashes.contains(&e.name_hash))
{
continue;
}
handles.insert(
byte_name,
thread::spawn(move || {
let data = download_design_bytes(
&client,
&design_data_url,
&mp,
&file_entry,
&output_folder,
)?;
// Special handling for ConfigManifest, since they are in JSON format.
if file_entry.name_hash
== hash::get_32bit_hash_const("BakedConfig/ConfigManifest.json")
{
return Result::<HashMap<i32, Vec<u8>>>::Ok(HashMap::from([(
file_entry.name_hash,
data,
)]));
};
Ok(file_entry
.data_entries
.iter()
.map(|data_entry| {
let slice = &data[data_entry.offset as usize
..(data_entry.offset + data_entry.size) as usize];
(data_entry.name_hash, slice.to_vec())
})
.collect::<HashMap<i32, Vec<u8>>>())
}),
);
}
let results: HashMap<String, HashMap<i32, Vec<u8>>> = handles
.into_iter()
.filter_map(|(byte_name, handle)| match handle.join() {
Ok(Ok(data)) => Some((byte_name, data)),
Ok(Err(e)) => {
tracing::error!("Download error: {:?}", e);
None
}
Err(e) => {
tracing::error!("Thread panicked: {:?}", e);
None
}
})
.collect();
Ok(results
.into_iter()
.flat_map(|(_, inner)| inner.into_iter())
.collect())
}
#[inline]
fn download_mini_asset(
client: &Client,
design_data_url: &str,
mp: &MultiProgress,
output_folder: &Option<PathBuf>,
) -> anyhow::Result<MiniAsset> {
let res = download_bytes(
client,
&format!("{design_data_url}/client/Windows/M_DesignV.bytes"),
mp,
)?;
let mini_asset = MiniAsset::from_bytes(&mut Cursor::new(&res))?;
save_file(output_folder, &res, "M_DesignV.bytes");
Ok(mini_asset)
}
#[inline]
fn download_design_index(
client: &Client,
design_data_url: &str,
mp: &MultiProgress,
mini_asset: &MiniAsset,
output_folder: &Option<PathBuf>,
) -> Result<DesignIndex> {
let name = format!("DesignV_{}.bytes", mini_asset.design_index_hash);
let res = download_bytes(
client,
&format!("{design_data_url}/client/Windows/{name}",),
mp,
)?;
let design_index = DesignIndex::from_bytes(&mut Cursor::new(&res))?;
save_file(output_folder, &res, &name);
Ok(design_index)
}
#[inline]
fn download_design_bytes(
client: &Client,
design_data_url: &str,
mp: &MultiProgress,
file_entry: &FileEntry,
output_folder: &Option<PathBuf>,
) -> Result<Vec<u8>> {
let name = format!("{}.bytes", file_entry.file_byte_name);
let bytes = download_bytes(
client,
&format!("{design_data_url}/client/Windows/{name}",),
mp,
)?;
save_file(output_folder, &bytes, &name);
Ok(bytes)
}
#[inline]
fn save_file(output_folder: &Option<PathBuf>, bytes: &Vec<u8>, file_name: &str) {
if let Some(output_folder) = output_folder {
let output_folder = output_folder.join("DesignData");
if !output_folder.is_dir() {
let _ = std::fs::create_dir_all(&output_folder);
}
let _ = std::fs::write(output_folder.join(file_name), bytes);
}
}
fn download_bytes(client: &Client, design_data_url: &str, mp: &MultiProgress) -> Result<Vec<u8>> {
if !design_data_url.starts_with("http") {
return Ok(std::fs::read(
design_data_url.replace("client/Windows", ""),
)?);
}
for attempt in 1..=MAX_RETRIES {
let result = (|| -> Result<Vec<u8>> {
let resp = client.get(design_data_url).send()?;
let status = resp.status();
if status != StatusCode::OK {
return Err(anyhow::format_err!(
"Server returned non OK code for {design_data_url} {:?}",
status
));
}
let total = resp.content_length().unwrap_or(0);
let pb = mp.add(ProgressBar::new(total));
pb.set_style(
ProgressStyle::with_template("{msg} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {bytes}/{total_bytes} ({bytes_per_sec}, ETA: {eta})")?
.progress_chars("##-"),
);
let file_name = design_data_url.split('/').next_back().unwrap_or_default();
pb.set_message(format!("Downloading {file_name}"));
let mut reader = pb.wrap_read(resp);
let mut buffer = Vec::with_capacity(total as usize);
reader.read_to_end(&mut buffer)?;
pb.finish_with_message(format!("Downloaded {file_name}"));
Ok(buffer)
})();
match result {
Ok(data) => return Ok(data),
Err(e) if attempt < MAX_RETRIES => {
mp.println(format!(
"Retry {attempt}/{MAX_RETRIES} for {design_data_url} due to error: {e}"
))?;
std::thread::sleep(RETRY_DELAY);
}
Err(e) => return Err(e),
}
}
unreachable!()
}

25
crates/common/src/hash.rs Normal file
View File

@ -0,0 +1,25 @@
pub const fn get_32bit_hash_const(s: &str) -> i32 {
let mut hash1: i32 = 5381;
let mut hash2: i32 = hash1;
let bytes = s.as_bytes();
let length = bytes.len();
let mut i = 0;
while i < length {
hash1 = ((hash1 << 5).wrapping_add(hash1)) ^ (bytes[i] as i32);
if i + 1 < length {
hash2 = ((hash2 << 5).wrapping_add(hash2)) ^ (bytes[i + 1] as i32);
}
i += 2;
}
hash1.wrapping_add(hash2.wrapping_mul(1566083941))
}
#[inline]
pub fn get_64bit_hash_const(s: &str) -> u64 {
xxhash_rust::const_xxh64::xxh64(s.as_bytes(), 0)
}

3
crates/common/src/lib.rs Normal file
View File

@ -0,0 +1,3 @@
pub mod downloader;
pub mod hash;
pub mod logging;

View File

@ -0,0 +1,30 @@
use tracing::Level;
use tracing_subscriber::{
EnvFilter, Layer, filter, fmt, layer::SubscriberExt as _, util::SubscriberInitExt as _,
};
pub fn init(level: tracing::Level) {
tracing_subscriber::fmt()
.with_max_level(level)
.with_env_filter(
EnvFilter::builder()
.with_default_directive(level.into())
.from_env()
.unwrap()
.add_directive("ureq=error".parse().unwrap()),
)
.without_time()
.with_target(false)
.init();
}
pub fn init_info_only() {
tracing_subscriber::registry()
.with(
fmt::layer()
.without_time()
.with_target(false)
.with_filter(filter::filter_fn(|m| m.level() == &Level::INFO)),
)
.init();
}

14
crates/parser/Cargo.toml Normal file
View File

@ -0,0 +1,14 @@
[package]
name = "tg-parser"
edition = "2024"
version.workspace = true
[dependencies]
varint-rs.workspace = true
anyhow.workspace = true
tracing.workspace = true
serde.workspace = true
serde_json.workspace = true
base64.workspace = true
tg-bytes-util.workspace = true

View File

@ -0,0 +1,232 @@
use std::{
collections::HashMap,
io::{Read, Seek},
sync::LazyLock,
};
use crate::{DynamicParser, ValueKind};
use base64::Engine;
use serde_json::{Map, Number, Value, json};
use varint_rs::VarintReader;
use tg_bytes_util::FromBytes;
type CustomParser =
HashMap<&'static str, for<'a> fn(&mut DynamicParser<'a>) -> anyhow::Result<Value>>;
pub static CUSTOM_PARSER: LazyLock<CustomParser> = LazyLock::new(|| {
let mut m: CustomParser = HashMap::with_capacity(7);
m.insert("RPG.GameCore.FixPoint", fix_point_parser);
m.insert("RPG.GameCore.DynamicValue", dynamic_value_parser);
m.insert("LAHCFFKCOBC", dynamic_values_parser);
m.insert("RPG.GameCore.DynamicFloat", dynamic_float_parser);
m.insert("RPG.GameCore.ReadInfo", read_info_parser);
m.insert("RPG.GameCore.JsonEnum", json_enum_parser);
m.insert("RPG.Client.TextID", textid_parser);
m
});
fn read_bytes<R: Read + Seek>(cursor: &mut R, len: usize) -> anyhow::Result<Vec<u8>> {
let mut buffer = vec![0u8; len];
cursor.read_exact(&mut buffer)?;
Ok(buffer)
}
fn read_byte<R: Read + Seek>(cursor: &mut R) -> anyhow::Result<u8> {
Ok(read_bytes(cursor, 1)?[0])
}
fn read_bool<R: Read + Seek>(cursor: &mut R) -> anyhow::Result<bool> {
Ok(read_byte(cursor)? != 0)
}
fn fix_point_parser<'a>(parser: &mut DynamicParser<'a>) -> anyhow::Result<Value> {
let value = parser.cursor.read_i64_varint()? as f32;
Ok(json!({
"Value": (value / (2f32).powf(32f32)) as f64
}))
}
fn dynamic_value_parser<'a>(parser: &mut DynamicParser<'a>) -> anyhow::Result<Value> {
let value_type = parser.cursor.read_i8_varint()?;
let (r#type, value) = match value_type {
0 => (
String::from("Int32"),
Value::Number(i32::from_bytes(&mut parser.cursor)?.into()),
),
1 => (
String::from("Float"),
Value::Number(Number::from_f64(f32::from_bytes(&mut parser.cursor)? as f64).unwrap()),
),
2 => (
String::from("Boolean"),
Value::Bool(bool::from_bytes(&mut parser.cursor)?),
),
3 => {
let length = parser.cursor.read_i64_varint()? as usize;
if length > 1_000_000 {
return Err(anyhow::format_err!("attempting to allocate large memory!"));
}
let mut result = Vec::with_capacity(length);
for _ in 0..length {
result.push(parser.parse(
&ValueKind::Class(String::from("RPG.GameCore.DynamicValue")),
false,
)?);
}
(String::from("Array"), serde_json::to_value(result)?)
}
4 => {
let length = parser.cursor.read_i64_varint()? as usize;
if length > 1_000_000 {
return Err(anyhow::format_err!("attempting to allocate large memory!"));
}
let mut result = Vec::with_capacity(length);
for _ in 0..length {
let _ = parser.cursor.read_i64_varint()?;
let _ = parser.cursor.read_i64_varint()?;
result.push(parser.parse(
&ValueKind::Class(String::from("RPG.GameCore.DynamicValue")),
false,
)?);
}
(String::from("Map"), serde_json::to_value(result)?)
}
5 => (
String::from("String"),
Value::String(String::from_bytes(&mut parser.cursor)?),
),
_ => (String::from("Null"), Value::Null),
};
Ok(json!({
"Type": r#type,
"Value": value
}))
}
fn dynamic_values_parser<'a>(parser: &mut DynamicParser<'a>) -> anyhow::Result<Value> {
let length = parser.cursor.read_u64_varint()? as usize;
if length > 1_000_000 {
return Err(anyhow::format_err!("attempting to allocate large memory!"));
}
let mut floats = Map::with_capacity(length);
for _ in 0..length {
let key = parser.parse(
&ValueKind::Class(String::from("RPG.GameCore.StringHash")),
false,
)?;
let v12 = bool::from_bytes(&mut parser.cursor)?;
let value = if v12 {
let v7 = dynamic_float_parser(parser)?;
let v8 = dynamic_float_parser(parser)?;
let v9 = dynamic_float_parser(parser)?;
let read_info = read_info_parser(parser)?;
json!({
"v7": v7,
"v8": v8,
"v9": v9,
"ReadInfo": read_info,
})
} else {
let v24 = fix_point_parser(parser)?;
let v17 = bool::from_bytes(&mut parser.cursor)?;
let unk = if v17 {
let v15 = fix_point_parser(parser)?;
let v16 = fix_point_parser(parser)?;
json!({
"v15": v15,
"v16": v16
})
} else {
json!({})
};
let read_info = read_info_parser(parser)?;
json!({
"ReadInfo": read_info,
"unk": unk,
"v24": v24
})
};
floats.insert(key.to_string(), value);
}
Ok(json!({
"Floats": floats
}))
}
fn dynamic_float_parser<'a>(parser: &mut DynamicParser<'a>) -> anyhow::Result<Value> {
let is_dynamic = read_bool(&mut parser.cursor)?;
Ok(if is_dynamic {
let opcode_len = read_byte(&mut parser.cursor)? as usize;
let opcodes = base64::engine::general_purpose::STANDARD
.encode(read_bytes(&mut parser.cursor, opcode_len)?);
let fixed_values = (0..read_byte(&mut parser.cursor)?)
.map(|_| fix_point_parser(parser))
.collect::<Result<Vec<_>, _>>()?;
let dynamic_hashes = (0..read_byte(&mut parser.cursor)?)
.map(|_| parser.cursor.read_i32_varint())
.collect::<Result<Vec<_>, _>>()?;
json!({
"IsDynamic": true,
"PostfixExpr": {
"OpCodes": opcodes,
"FixedValues": fixed_values,
"DynamicHashes": dynamic_hashes
}
})
} else {
let fixed_value = fix_point_parser(parser)?;
json!({
"IsDynamic": false,
"FixedValue": fixed_value
})
})
}
fn read_info_parser<'a>(parser: &mut DynamicParser<'a>) -> anyhow::Result<Value> {
let has_read_info = read_bool(&mut parser.cursor)?;
if has_read_info {
let string = String::from_bytes(&mut parser.cursor)?;
let v17 = parser.cursor.read_i64_varint()?;
Ok(json!({
"AKFKONMJCEC": string,
"EGMAFIOOKJJ": v17
}))
} else {
Ok(Value::Null)
}
}
fn json_enum_parser<'a>(parser: &mut DynamicParser<'a>) -> anyhow::Result<Value> {
Ok(json!({
"EnumIndex": parser.cursor.read_i32_varint()?,
"Value": parser.cursor.read_i32_varint()?
}))
}
fn textid_parser<'a>(parser: &mut DynamicParser<'a>) -> anyhow::Result<Value> {
Ok(json!({
"Hash": parser.cursor.read_i32_varint()?,
"Hash64": parser.cursor.read_u64_varint()?
}))
}

365
crates/parser/src/lib.rs Normal file
View File

@ -0,0 +1,365 @@
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::io::Cursor;
use anyhow::Context;
use serde::Deserialize;
use serde_json::json;
use serde_json::{Map, Value};
use tg_bytes_util::{ExistFlag, FromBytes};
use varint_rs::VarintReader;
mod custom_parser;
#[derive(Debug, Deserialize)]
pub enum DataDefine {
Class {
skip_existflag_check: Option<bool>,
fields: Vec<DataField>,
interfaces: Vec<String>,
},
Struct {
fields: Vec<DataField>,
interfaces: Vec<String>,
},
Typeindex {
base: String,
descendants: BTreeMap<u64, ValueKind>,
},
Enum(String, BTreeMap<String, String>),
}
#[derive(Debug, Deserialize)]
pub struct DataField {
pub field_name: String,
pub data_type: ValueKind,
}
#[derive(Debug, Deserialize, Clone, PartialEq, Eq)]
pub enum ValueKind {
Primitive(String),
Array(Box<ValueKind>),
Dictionary(Box<ValueKind>, Box<ValueKind>),
Class(String),
Other(),
}
pub struct DynamicParser<'a> {
pub types: &'a HashMap<String, DataDefine>,
pub cursor: Cursor<&'a Vec<u8>>,
}
impl<'a> DynamicParser<'a> {
pub fn new(types: &'a HashMap<String, DataDefine>, data: &'a Vec<u8>) -> Self {
Self {
types,
cursor: Cursor::new(data),
}
}
pub fn parse(&mut self, kind: &ValueKind, include_type: bool) -> anyhow::Result<Value> {
if self.remaining() < 1 {
tracing::debug!("{:?} buffer is empty", kind);
return Ok(match kind {
ValueKind::Primitive(_) => Value::Number(0.into()),
ValueKind::Array(_) => Value::Array(Vec::with_capacity(0)),
ValueKind::Dictionary(_, _) | ValueKind::Class(_) => {
Value::Object(Map::with_capacity(0))
}
ValueKind::Other() => Value::Null,
});
}
Ok(match kind {
ValueKind::Primitive(cs_type) => match cs_type.as_str() {
"byte" => Value::Number(self.cursor.read_u8_varint()?.into()),
"sbyte" => Value::Number(self.cursor.read_i8_varint()?.into()),
"short" => Value::Number(self.cursor.read_i16_varint()?.into()),
"ushort" => Value::Number(self.cursor.read_u16_varint()?.into()),
"int" => Value::Number(self.cursor.read_i32_varint()?.into()),
"uint" => Value::Number(self.cursor.read_u32_varint()?.into()),
"long" => Value::Number(self.cursor.read_i64_varint()?.into()),
"ulong" => Value::Number(serde_json::Number::from(self.cursor.read_u64_varint()?)),
"float" => {
let raw = f32::from_bytes(&mut self.cursor)? as f64;
let sanitized = if raw.is_finite() { raw } else { 0.0 };
let number = serde_json::Number::from_f64(sanitized)
.ok_or_else(|| anyhow::anyhow!("float should always be finite"))?;
Value::Number(number)
}
"double" => Value::Number(
serde_json::Number::from_f64(f64::from_bytes(&mut self.cursor)?)
.ok_or_else(|| anyhow::anyhow!("invalid double"))?,
),
"bool" => Value::Bool(bool::from_bytes(&mut self.cursor)?),
"string" => Value::String(String::from_bytes(&mut self.cursor)?),
other => return Err(anyhow::format_err!("unhandled primitive: {other}")),
},
ValueKind::Dictionary(key, value) => {
tracing::debug!(
"ValueKind::Dictionary(cursor_pos: {}) -> Dictionary<{:?}, {:?}>",
self.cursor.position(),
key,
value
);
let length = self.cursor.read_i64_varint()? as usize;
tracing::debug!(
"ValueKind::Dictionary(cursor_pos: {}) -> Dictionary length: {}",
self.cursor.position(),
length
);
if length > 1_000_000 {
return Err(anyhow::format_err!("attempting to allocate large memory!"));
}
let mut output = Map::with_capacity(length);
for _ in 0..length {
let key = self.parse(key, false)?;
output.insert(
if let Value::String(s) = key {
s
} else {
key.to_string()
},
self.parse(value, false)?,
);
}
Value::Object(output)
}
ValueKind::Array(value) => {
tracing::debug!(
"ValueKind::Array(cursor_pos: {}) -> {:?}[]",
self.cursor.position(),
value
);
let length = self.cursor.read_i64_varint()? as usize;
tracing::debug!(
"ValueKind::Array(cursor_pos: {}) -> Array length: {}",
self.cursor.position(),
length
);
if length > 1_000_000 {
return Err(anyhow::format_err!("attempting to allocate large memory!"));
}
let mut output = Vec::with_capacity(length);
for _ in 0..length {
output.push(self.parse(value, false)?);
}
Value::Array(output)
}
ValueKind::Class(class_name) => {
tracing::debug!(
"ValueKind::Class(cursor_pos: {}) -> {}",
self.cursor.position(),
class_name
);
if let Some(custom) = custom_parser::CUSTOM_PARSER.get(class_name.as_str()) {
return custom(self);
}
let Some(define) = self.types.get(class_name) else {
return Err(anyhow::format_err!("unhandled type: {}", &class_name));
};
let mut result = self.parse_class_kind(define)?;
if include_type {
result.as_object_mut().and_then(|f| {
f.shift_insert(
0,
"$type".into(),
Value::String(
class_name
.strip_suffix("Inner")
.unwrap_or(class_name)
.to_string(),
),
)
});
}
result
}
_ => return Err(anyhow::format_err!("unknown data kind!")),
})
}
fn parse_class_kind(&mut self, data_type: &DataDefine) -> anyhow::Result<Value> {
Ok(match data_type {
DataDefine::Class {
skip_existflag_check,
fields,
interfaces: _,
} => {
if skip_existflag_check.is_some() {
return Ok(json!({}));
}
let exist_flag = ExistFlag::new(&mut self.cursor, fields.len())?;
let mut output = Map::with_capacity(fields.len());
for (i, field) in fields.iter().enumerate() {
if exist_flag.exists(i) {
tracing::debug!(
"DataDefine::Class(cursor_pos: {}) -> Key: {}",
self.cursor.position(),
field.field_name
);
let value = self.parse(&field.data_type, false)?;
tracing::debug!(
"DataDefine::Class(cursor_pos: {}) -> Value: {:?}",
self.cursor.position(),
value
);
output.insert(field.field_name.to_string(), value);
} else {
tracing::debug!(
"DataDefine::Class(cursor_pos: {}) -> Field not exist! key: {}",
self.cursor.position(),
field.field_name
);
}
}
Value::Object(output)
}
DataDefine::Struct {
fields,
interfaces: _,
} => {
let mut output = Map::with_capacity(fields.len());
for field in fields {
tracing::debug!(
"DataDefine::Struct(cursor_pos: {}) -> Key: {}",
self.cursor.position(),
field.field_name
);
let value = self.parse(&field.data_type, false)?;
tracing::debug!(
"DataDefine::Struct(cursor_pos: {}) -> Value: {:?}",
self.cursor.position(),
value
);
output.insert(field.field_name.to_string(), value);
}
Value::Object(output)
}
DataDefine::Typeindex { base, descendants } => {
tracing::debug!(
"DataDefine::Typeindex(cursor_pos: {})",
self.cursor.position()
);
let typeindex = self
.cursor
.read_u64_varint()
.context("typeindex reading failed")?;
let Some(descendant) = descendants.get(&typeindex) else {
return Err(anyhow::format_err!(
"typeindex not exist! dict: {:?} type index: {}",
descendants,
typeindex
));
};
if let ValueKind::Class(descendant) = descendant
&& let Some(DataDefine::Typeindex {
base: _,
descendants,
}) = self.types.get(descendant)
&& let Some(descendant) = descendants.get(&0)
{
return self.parse(descendant, true);
}
tracing::debug!(
"DataDefine::Typeindex(cursor_pos: {}) -> {} typeindex: {typeindex}",
self.cursor.position(),
base
);
return self.parse(descendant, true);
}
DataDefine::Enum(enum_type, enums) => {
let enum_value = match enum_type.as_str() {
"ulong" => {
let discriminant = self.cursor.read_u64_varint()?;
if let Some(enum_value) = enums.get(&discriminant.to_string()) {
enum_value
} else {
tracing::debug!(
"enum_value not exist! enums: {:?} discriminant: {}",
enums,
discriminant
);
&discriminant.to_string()
}
}
"int" => {
let discriminant = self.cursor.read_i32_varint()?;
if let Some(discriminant) = enums.get(&discriminant.to_string()) {
discriminant
} else {
tracing::warn!(
"enum_value not exist! enums: {:?} discriminant: {}",
enums,
discriminant
);
&format!("{discriminant}")
}
}
"uint" => {
let discriminant = self.cursor.read_u32_varint()?;
if let Some(discriminant) = enums.get(&discriminant.to_string()) {
discriminant
} else {
tracing::warn!(
"enum_value not exist! enums: {:?} discriminant: {}",
enums,
discriminant
);
&format!("{discriminant}")
}
}
"ushort" => {
let discriminant = self.cursor.read_u16_varint()?;
if let Some(discriminant) = enums.get(&discriminant.to_string()) {
discriminant
} else {
tracing::debug!(
"enum_value not exist! enums: {:?} discriminant: {}",
enums,
discriminant
);
&format!("{discriminant}")
}
}
_ => return Err(anyhow::format_err!("unsupported enum type: {}", enum_type)),
};
Value::String(enum_value.into())
}
})
}
#[inline]
fn remaining(&self) -> usize {
self.cursor.get_ref().len() - self.cursor.position() as usize
}
}

17
crates/program/Cargo.toml Normal file
View File

@ -0,0 +1,17 @@
[package]
name = "program"
edition = "2024"
version.workspace = true
[dependencies]
serde.workspace = true
serde_json.workspace = true
tracing.workspace = true
anyhow.workspace = true
rayon.workspace = true
dashmap.workspace = true
common.workspace = true
clap.workspace = true
tg-parser.workspace = true
tg-bytes-util.workspace = true

View File

@ -0,0 +1,29 @@
use super::ConfigManifest;
use crate::parse_and_count;
use anyhow::Result;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use std::{collections::HashMap, path::Path};
use tg_parser::DataDefine;
/// RPG.GameCore.AdventureAbilityConfigList
pub fn parse(
assets: &HashMap<i32, Vec<u8>>,
types: &HashMap<String, DataDefine>,
out_folder: &Path,
config_manifest: &ConfigManifest,
) -> Result<()> {
config_manifest
.adventure_ability_config
.par_iter()
.for_each(|json_path| {
parse_and_count!(
json_path,
"RPG.GameCore.AdventureAbilityConfigList",
assets,
types,
out_folder
);
});
Ok(())
}

View File

@ -0,0 +1,29 @@
use super::ConfigManifest;
use crate::parse_and_count;
use anyhow::Result;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use std::{collections::HashMap, path::Path};
use tg_parser::DataDefine;
/// RPG.GameCore.AdventureModifierLookupTable
pub fn parse(
assets: &HashMap<i32, Vec<u8>>,
types: &HashMap<String, DataDefine>,
out_folder: &Path,
config_manifest: &ConfigManifest,
) -> Result<()> {
config_manifest
.adventure_modifier_config
.par_iter()
.for_each(|json_path| {
parse_and_count!(
json_path,
"RPG.GameCore.AdventureModifierLookupTable",
assets,
types,
out_folder
);
});
Ok(())
}

View File

@ -0,0 +1,29 @@
use super::ConfigManifest;
use crate::parse_and_count;
use anyhow::Result;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use std::{collections::HashMap, path::Path};
use tg_parser::DataDefine;
/// RPG.GameCore.ComplexSkillAIGlobalGroupLookup
pub fn parse(
assets: &HashMap<i32, Vec<u8>>,
types: &HashMap<String, DataDefine>,
out_folder: &Path,
config_manifest: &ConfigManifest,
) -> Result<()> {
config_manifest
.complex_skill_aiglobal_group_config
.par_iter()
.for_each(|json_path| {
parse_and_count!(
json_path,
"RPG.GameCore.ComplexSkillAIGlobalGroupLookup",
assets,
types,
out_folder
);
});
Ok(())
}

View File

@ -0,0 +1,29 @@
use super::ConfigManifest;
use crate::parse_and_count;
use anyhow::Result;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use std::{collections::HashMap, path::Path};
use tg_parser::DataDefine;
/// RPG.GameCore.TurnBasedAbilityConfigList
pub fn parse(
assets: &HashMap<i32, Vec<u8>>,
types: &HashMap<String, DataDefine>,
out_folder: &Path,
config_manifest: &ConfigManifest,
) -> Result<()> {
config_manifest
.turn_based_ability_config
.par_iter()
.for_each(|json_path| {
parse_and_count!(
json_path,
"RPG.GameCore.TurnBasedAbilityConfigList",
assets,
types,
out_folder
);
});
Ok(())
}

View File

@ -0,0 +1,29 @@
use super::ConfigManifest;
use crate::parse_and_count;
use anyhow::Result;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use std::{collections::HashMap, path::Path};
use tg_parser::DataDefine;
/// RPG.GameCore.GlobalModifierConfig
pub fn parse(
assets: &HashMap<i32, Vec<u8>>,
types: &HashMap<String, DataDefine>,
out_folder: &Path,
config_manifest: &ConfigManifest,
) -> Result<()> {
config_manifest
.global_modifier_config
.par_iter()
.for_each(|json_path| {
parse_and_count!(
json_path,
"RPG.GameCore.GlobalModifierConfig",
assets,
types,
out_folder
);
});
Ok(())
}

View File

@ -0,0 +1,29 @@
use super::ConfigManifest;
use crate::parse_and_count;
use anyhow::Result;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use std::{collections::HashMap, path::Path};
use tg_parser::DataDefine;
/// RPG.GameCore.GlobalTaskListTemplateConfig
pub fn parse(
assets: &HashMap<i32, Vec<u8>>,
types: &HashMap<String, DataDefine>,
out_folder: &Path,
config_manifest: &ConfigManifest,
) -> Result<()> {
config_manifest
.global_task_template
.par_iter()
.for_each(|json_path| {
parse_and_count!(
json_path,
"RPG.GameCore.GlobalTaskListTemplateConfig",
assets,
types,
out_folder
)
});
Ok(())
}

View File

@ -0,0 +1,133 @@
use super::ConfigManifest;
use crate::parse_and_count;
use anyhow::Result;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use serde_json::{Map, Value};
use std::{collections::HashMap, fs, path::Path};
use tg_parser::DataDefine;
fn parse_floor(
assets: &HashMap<i32, Vec<u8>>,
types: &HashMap<String, DataDefine>,
out_folder: &Path,
) -> Result<()> {
let maze_plane: Vec<Map<String, Value>> =
serde_json::from_slice(&fs::read(out_folder.join("ExcelOutput/MazePlane.json"))?)?;
// Flatten plane and floor ID tuples
let paths: Vec<_> = maze_plane
.iter()
.flat_map(|p| {
let list = p
.get("FloorIDList")
.unwrap()
.as_array()
.unwrap()
.iter()
.map(|v| v.as_u64().unwrap() as u32)
.collect::<Vec<_>>();
let plane_id = p.get("PlaneID").unwrap().as_u64().unwrap() as u32;
list.into_iter().map(move |f| (plane_id, f))
})
.collect();
paths.par_iter().for_each(|(plane_id, floor_id)| {
let name = format!("P{plane_id}_F{floor_id}");
let configs = [
(
format!("Config/LevelOutput/RuntimeFloor/{name}.json"),
"RPG.GameCore.RtLevelFloorInfo",
),
(
format!("Config/LevelOutput_Baked/Floor/{name}_Baked.json"),
"RPG.GameCore.LevelFloorBakedInfo",
),
(
format!(
"Config/LevelOutput_Baked/FloorCrossMapBriefInfo/CrossMapBriefInfo_{name}.json"
),
"RPG.GameCore.LevelFloorCrossMapBriefInfo",
),
(
format!("Config/LevelOutput/Region/FloorRegion_{name}.json"),
"RPG.GameCore.LevelRegionInfos",
),
(
format!("Config/LevelOutput/RotatableRegion/RotatableRegion_Floor_{floor_id}.json"),
"RPG.GameCore.MapRotationConfig",
),
(
format!("Config/LevelOutput/EraFlipper/EraFlipper_Floor_{floor_id}.json"),
"RPG.GameCore.EraFlipperConfig",
),
(
format!("Config/LevelOutput/Map/MapInfo_{name}.json"),
"RPG.GameCore.LevelNavmapConfig",
),
];
for (path, type_name) in configs {
parse_and_count!(&path, type_name, assets, types, out_folder)
}
});
Ok(())
}
fn parse_group(
assets: &HashMap<i32, Vec<u8>>,
types: &HashMap<String, DataDefine>,
out_folder: &Path,
) -> Result<()> {
let runtime_floor = fs::read_dir(out_folder.join("Config/LevelOutput/RuntimeFloor"))?;
let mut group_paths = Vec::new();
for floor in runtime_floor {
let Ok(entry) = floor else {
continue;
};
let Ok(slice) = fs::read(entry.path()) else {
continue;
};
let json: Value = serde_json::from_slice(&slice)?;
json.as_object()
.and_then(|v| v.get("GroupInstanceList")?.as_array())
.inspect(|arr| {
group_paths.extend(
arr.iter()
.filter_map(|item| {
item.as_object()
.and_then(|v| v.get("GroupPath")?.as_str())
.map(|group_path| group_path.to_string())
})
.collect::<Vec<String>>(),
);
});
}
group_paths.par_iter().for_each(|path| {
parse_and_count!(
path,
"RPG.GameCore.RtLevelGroupInfoBase",
assets,
types,
out_folder
)
});
Ok(())
}
pub fn parse(
assets: &HashMap<i32, Vec<u8>>,
types: &HashMap<String, DataDefine>,
out_folder: &Path,
_: &ConfigManifest,
) -> Result<()> {
parse_floor(assets, types, out_folder)?;
parse_group(assets, types, out_folder)
}

View File

@ -0,0 +1,129 @@
use super::{ConfigManifest, parse_config};
use crate::{COUNTER_CONFIGS, parse_and_count};
use anyhow::Result;
use dashmap::DashSet;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use serde_json::Value;
use std::{
collections::{HashMap, HashSet},
fs,
path::Path,
sync::atomic::Ordering,
};
use tg_parser::DataDefine;
fn read_performance(base_path: &Path, name: &str, out: &mut HashSet<String>) -> Result<()> {
let entries =
serde_json::from_slice::<Vec<Value>>(&fs::read(base_path.join(format!("{name}.json")))?)?;
for item in entries {
let Some(Value::String(performance_path)) =
item.get("PerformancePath").or_else(|| item.get("ActPath"))
else {
continue;
};
out.insert(performance_path.to_string());
}
Ok(())
}
/// RPG.GameCore.LevelGraphInfo
fn parse_performances(
assets: &HashMap<i32, Vec<u8>>,
types: &HashMap<String, DataDefine>,
out_folder: &Path,
) {
let mut performances = HashSet::new();
let base_path = out_folder.join("ExcelOutput");
let _ = read_performance(&base_path, "PerformanceA", &mut performances);
let _ = read_performance(&base_path, "PerformanceC", &mut performances);
let _ = read_performance(&base_path, "PerformanceCG", &mut performances);
let _ = read_performance(&base_path, "PerformanceD", &mut performances);
let _ = read_performance(&base_path, "PerformanceDS", &mut performances);
let _ = read_performance(&base_path, "PerformanceE", &mut performances);
let _ = read_performance(&base_path, "PerformanceVideo", &mut performances);
let _ = read_performance(&base_path, "DialogueNPC", &mut performances);
performances.par_iter().for_each(|path| {
parse_and_count!(
path,
"RPG.GameCore.LevelGraphConfig",
assets,
types,
out_folder
)
});
}
/// RPG.GameCore.MainMissionInfoConfig
fn parse_mission_info(
assets: &HashMap<i32, Vec<u8>>,
types: &HashMap<String, DataDefine>,
out_folder: &Path,
) -> Result<()> {
let entries = serde_json::from_slice::<Vec<Value>>(&fs::read(
out_folder.join("ExcelOutput/MainMission.json"),
)?)?;
let paths = entries
.iter()
.filter_map(|item| {
if let Some(Value::Number(mission_id)) = item.get("MainMissionID") {
Some(format!(
"Config/Level/Mission/{mission_id}/MissionInfo_{mission_id}.json"
))
} else {
None
}
})
.collect::<HashSet<_>>();
let sub_mission_paths = DashSet::new();
paths.par_iter().for_each(|path| {
if let Ok(config) = parse_config(
path,
"RPG.GameCore.MainMissionInfoConfig",
assets,
types,
out_folder,
) {
COUNTER_CONFIGS.fetch_add(1, Ordering::Relaxed);
if let Some(Value::Array(sub_mission_list)) = config.get("SubMissionList") {
for sub_mission in sub_mission_list {
let Some(Value::String(json_path)) = sub_mission.get("MissionJsonPath") else {
continue;
};
sub_mission_paths.insert(json_path.to_string());
}
}
}
});
sub_mission_paths.par_iter().for_each(|path| {
parse_and_count!(
&path,
"RPG.GameCore.LevelGraphConfig",
assets,
types,
out_folder
)
});
Ok(())
}
pub fn parse(
assets: &HashMap<i32, Vec<u8>>,
types: &HashMap<String, DataDefine>,
out_folder: &Path,
_: &ConfigManifest,
) -> Result<()> {
let _ = parse_mission_info(assets, types, out_folder);
parse_performances(assets, types, out_folder);
Ok(())
}

View File

@ -0,0 +1,161 @@
use anyhow::{Context, Result};
use common::hash;
use rayon::iter::{IntoParallelRefIterator, ParallelBridge as _, ParallelIterator};
use serde::Deserialize;
use serde_json::{Value, json};
use std::{
collections::HashMap,
fs, panic,
path::{Path, PathBuf},
};
use tg_parser::{DataDefine, DynamicParser, ValueKind};
mod adventure_ability;
mod adventure_modifier;
mod complex_skill_ai_global;
mod config_ability;
mod global_modifier;
mod global_task_template;
mod level_output;
mod mission;
mod rogue_chest_map;
mod rogue_npc;
mod skill_tree_point_preset;
mod summon_unit;
mod video_caption;
#[macro_export]
macro_rules! parse_and_count {
($path:expr, $type:expr, $assets:expr, $types:expr, $out_folder:expr) => {
if $crate::actions::config::parse_config($path, $type, $assets, $types, $out_folder).is_ok()
{
$crate::COUNTER_CONFIGS.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
}
};
}
#[derive(Deserialize)]
#[serde(rename_all = "PascalCase")]
struct ConfigManifest {
adventure_ability_config: Vec<String>,
turn_based_ability_config: Vec<String>,
// battle_lineup_config: Vec<String>,
// battle_lineup_avatar_config: Vec<String>,
// battle_lineup_maze_buff_config: Vec<String>,
battle_lineup_skill_tree_preset_config: Vec<String>,
// #[serde(rename = "BattleLineupCEPresetConfig")]
// battle_lineup_cepreset_config: Vec<String>,
global_modifier_config: Vec<String>,
adventure_modifier_config: Vec<String>,
#[serde(rename = "ComplexSkillAIGlobalGroupConfig")]
complex_skill_aiglobal_group_config: Vec<String>,
global_task_template: Vec<String>,
// common_skill_pool_config: Vec<String>,
}
#[inline]
fn split_path(path: &str) -> Option<(String, String)> {
path.rsplit_once('/').map(|(dir, file)| {
(
if dir.is_empty() { "/" } else { dir }.to_string(),
file.to_string(),
)
})
}
fn parse_config(
json_path: &str,
type_name: &str,
assets: &HashMap<i32, Vec<u8>>,
types: &HashMap<String, DataDefine>,
out_folder: &Path,
) -> Result<Value> {
let (folder_path, file_name) = split_path(json_path).context("Invalid path")?;
let path_hash = hash::get_32bit_hash_const(&format!(
"BakedConfig/{}",
json_path.replace(".json", ".bytes")
));
let bytes = assets.get(&path_hash).ok_or_else(|| {
// tracing::debug!("Asset not found: {json_path} ({path_hash})");
anyhow::anyhow!("Asset not found")
})?;
match panic::catch_unwind(|| {
let mut parser = DynamicParser::new(types, bytes);
parser.parse(&ValueKind::Class(type_name.to_string()), false)
}) {
Ok(Ok(parsed)) => {
let out_folder = out_folder.join(folder_path);
fs::create_dir_all(&out_folder)?;
let out_path = out_folder.join(file_name);
fs::write(&out_path, serde_json::to_string_pretty(&parsed)?)
.context(format!("Failed to write to {out_path:?}"))?;
return Ok(parsed);
}
Ok(Err(err)) => tracing::error!("Parse error for {json_path} ({type_name}): {err:?}"),
Err(err) => tracing::error!("Panic during parsing {json_path} ({type_name}): {err:?}"),
}
Ok(json!({}))
}
type ParseFn =
fn(&HashMap<i32, Vec<u8>>, &HashMap<String, DataDefine>, &Path, &ConfigManifest) -> Result<()>;
pub fn parse_configs(
assets: &HashMap<i32, Vec<u8>>,
types: &HashMap<String, DataDefine>,
out_folder: &Path,
additional_paths: Option<PathBuf>,
) -> Result<()> {
tracing::info!("Parsing Configs...");
let config_manifest_bytes = assets
.get(&hash::get_32bit_hash_const(
"BakedConfig/ConfigManifest.json",
))
.ok_or_else(|| anyhow::anyhow!("ConfigManifest.json not found"))?;
let config_manifest: ConfigManifest =
serde_json::from_slice(config_manifest_bytes).context("Failed to parse ConfigManifest")?;
let parses: &[(ParseFn, &str)] = &[
(adventure_ability::parse, "adventure_ability"),
(config_ability::parse, "config_ability"),
(global_modifier::parse, "global_modifier"),
(skill_tree_point_preset::parse, "skill_tree_point_preset"),
(adventure_modifier::parse, "adventure_modifier"),
(complex_skill_ai_global::parse, "complex_skill_ai_global"),
(global_task_template::parse, "global_task_template"),
(level_output::parse, "level_output"),
(summon_unit::parse, "summon_unit"),
(mission::parse, "mission"),
(video_caption::parse, "video_caption"),
(rogue_npc::parse, "rogue_npc"),
(rogue_chest_map::parse, "rogue_chest_map"),
];
parses.par_iter().for_each(|(parse_fn, name)| {
if let Err(err) =
panic::catch_unwind(|| parse_fn(assets, types, out_folder, &config_manifest).unwrap())
{
tracing::error!("Failed to parse {}: {:?}", name, err);
}
});
// Parse additional paths if provided
additional_paths
.and_then(|path| {
std::fs::read(path).ok().and_then(|bytes| {
serde_json::from_slice::<HashMap<String, Vec<String>>>(&bytes).ok()
})
})
.unwrap_or_default()
.iter()
.flat_map(|(data_type, paths)| paths.iter().map(move |json_path| (json_path, data_type)))
.par_bridge()
.for_each(|(json_path, data_type)| {
parse_and_count!(json_path, data_type, assets, types, out_folder);
});
Ok(())
}

View File

@ -0,0 +1,35 @@
use super::ConfigManifest;
use crate::parse_and_count;
use anyhow::Result;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use serde_json::Value;
use std::{collections::HashMap, fs, path::Path};
use tg_parser::DataDefine;
/// RPG.GameCore.RogueChestMapConfig
pub fn parse(
assets: &HashMap<i32, Vec<u8>>,
types: &HashMap<String, DataDefine>,
out_folder: &Path,
_: &ConfigManifest,
) -> Result<()> {
let summon_unit: Vec<Value> = serde_json::from_slice(&fs::read(
out_folder.join("ExcelOutput/RogueDLCChessBoard.json"),
)?)?;
summon_unit.par_iter().for_each(|summon_unit| {
parse_and_count!(
summon_unit
.get("ChessBoardConfiguration")
.unwrap()
.as_str()
.unwrap(),
"RPG.GameCore.RogueChestMapConfig",
assets,
types,
out_folder
)
});
Ok(())
}

View File

@ -0,0 +1,84 @@
use super::ConfigManifest;
use crate::{COUNTER_CONFIGS, actions::config::parse_config, parse_and_count};
use anyhow::Result;
use dashmap::DashSet;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use serde_json::Value;
use std::{collections::HashMap, fs, path::Path, sync::atomic::Ordering};
use tg_parser::DataDefine;
#[inline]
fn extract_npc_json_paths<P: AsRef<Path>>(path: P) -> std::io::Result<Vec<String>> {
let data = fs::read(path)?;
let json: Vec<Value> = serde_json::from_slice(&data)?;
Ok(json
.into_iter()
.filter_map(|item| item.get("NPCJsonPath")?.as_str().map(|s| s.to_string()))
.collect())
}
/// RPG.GameCore.RogueNPCConfig
pub fn parse(
assets: &HashMap<i32, Vec<u8>>,
types: &HashMap<String, DataDefine>,
out_folder: &Path,
_: &ConfigManifest,
) -> Result<()> {
let paths = [
"ExcelOutput/RogueNPC.json",
"ExcelOutput/RogueTournNPC.json",
"ExcelOutput/RogueMagicNPC.json",
]
.iter()
.filter_map(|path| extract_npc_json_paths(out_folder.join(path)).ok())
.flatten()
.collect::<Vec<_>>();
let dialogue_paths = DashSet::new();
let option_paths = DashSet::new();
paths.par_iter().for_each(|path| {
if let Ok(config) = parse_config(
path,
"RPG.GameCore.RogueNPCConfig",
assets,
types,
out_folder,
) {
COUNTER_CONFIGS.fetch_add(1, Ordering::Relaxed);
if let Some(Value::Array(dialogue_list)) = config.get("DialogueList") {
for dialogue in dialogue_list {
if let Some(Value::String(json_path)) = dialogue.get("DialoguePath") {
dialogue_paths.insert(json_path.to_string());
};
if let Some(Value::String(json_path)) = dialogue.get("OptionPath") {
option_paths.insert(json_path.to_string());
};
}
}
}
});
dialogue_paths.par_iter().for_each(|path| {
parse_and_count!(
&path,
"RPG.GameCore.LevelGraphConfig",
assets,
types,
out_folder
)
});
option_paths.par_iter().for_each(|path| {
parse_and_count!(
&path,
"RPG.GameCore.RogueDialogueEventConfig",
assets,
types,
out_folder
)
});
Ok(())
}

View File

@ -0,0 +1,29 @@
use super::ConfigManifest;
use crate::parse_and_count;
use anyhow::Result;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use std::{collections::HashMap, path::Path};
use tg_parser::DataDefine;
/// RPG.GameCore.SkillTreePointPresetConfig
pub fn parse(
assets: &HashMap<i32, Vec<u8>>,
types: &HashMap<String, DataDefine>,
out_folder: &Path,
config_manifest: &ConfigManifest,
) -> Result<()> {
config_manifest
.battle_lineup_skill_tree_preset_config
.par_iter()
.for_each(|json_path| {
parse_and_count!(
json_path,
"RPG.GameCore.SkillTreePointPresetConfig",
assets,
types,
out_folder
)
});
Ok(())
}

View File

@ -0,0 +1,31 @@
use super::ConfigManifest;
use crate::parse_and_count;
use anyhow::Result;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use serde_json::Value;
use std::{collections::HashMap, fs, path::Path};
use tg_parser::DataDefine;
/// RPG.GameCore.SummonUnitConfig
pub fn parse(
assets: &HashMap<i32, Vec<u8>>,
types: &HashMap<String, DataDefine>,
out_folder: &Path,
_: &ConfigManifest,
) -> Result<()> {
let summon_unit: Vec<Value> = serde_json::from_slice(&fs::read(
out_folder.join("ExcelOutput/SummonUnitData.json"),
)?)?;
summon_unit.par_iter().for_each(|summon_unit| {
parse_and_count!(
summon_unit.get("JsonPath").unwrap().as_str().unwrap(),
"RPG.GameCore.SummonUnitConfig",
assets,
types,
out_folder
)
});
Ok(())
}

View File

@ -0,0 +1,50 @@
use super::ConfigManifest;
use crate::parse_and_count;
use anyhow::Result;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use serde_json::Value;
use std::{collections::HashMap, fs, path::Path};
use tg_parser::DataDefine;
#[inline]
fn extract_caption_paths<P: AsRef<Path>>(path: P) -> std::io::Result<Vec<String>> {
let data = fs::read(path)?;
let json: Vec<Value> = serde_json::from_slice(&data)?;
Ok(json
.into_iter()
.filter_map(|item| item.get("CaptionPath")?.as_str().map(|s| s.to_string()))
.collect())
}
/// RPG.GameCore.VideoCaptionConfig
pub fn parse(
assets: &HashMap<i32, Vec<u8>>,
types: &HashMap<String, DataDefine>,
out_folder: &Path,
_: &ConfigManifest,
) -> Result<()> {
let paths = [
"ExcelOutput/VideoConfig.json",
"ExcelOutput/CutSceneConfig.json",
"ExcelOutput/LoopCGConfig.json",
]
.iter()
.filter_map(|path| extract_caption_paths(out_folder.join(path)).ok())
.flatten()
.collect::<Vec<_>>();
paths
.par_iter()
.filter(|path| !path.is_empty())
.for_each(|path| {
parse_and_count!(
path,
"RPG.GameCore.VideoCaptionConfig",
assets,
types,
out_folder
)
});
Ok(())
}

View File

@ -0,0 +1,59 @@
use anyhow::Context;
use common::hash;
use rayon::iter::{IntoParallelRefIterator as _, ParallelIterator as _};
use std::{collections::HashMap, fs, path::Path, sync::atomic::Ordering};
use tg_parser::{DataDefine, DynamicParser, ValueKind};
use crate::COUNTER_EXCELS;
pub fn parse_all_excels(
assets: &HashMap<i32, Vec<u8>>,
types: &HashMap<String, DataDefine>,
out_folder: &Path,
excel_paths: &HashMap<String, Vec<String>>,
) -> anyhow::Result<()> {
tracing::info!("Parsing Excels...");
let out_excel = out_folder.join("ExcelOutput");
if !out_excel.is_dir() {
fs::create_dir_all(&out_excel).context("Failed create ExcelOutput directory")?;
}
excel_paths.par_iter().for_each(|(type_name, paths)| {
let kind = ValueKind::Array(Box::new(ValueKind::Class(type_name.to_string())));
for path in paths {
let Some(bytes) = assets.get(&hash::get_32bit_hash_const(path)) else {
continue;
};
// Skip empty first byte
let bytes = if !bytes.is_empty() && bytes[0] == 0 {
&bytes[1..]
} else {
bytes
}
.to_vec();
let mut parser = DynamicParser::new(types, &bytes);
match parser.parse(&kind, false) {
Ok(parsed) => {
let file_name = path.split("/").last().unwrap().replace(".bytes", ".json");
let file_out = if file_name.starts_with("Textmap") {
continue;
} else {
out_excel.join(file_name)
};
fs::write(file_out, serde_json::to_string_pretty(&parsed).unwrap()).unwrap();
COUNTER_EXCELS.fetch_add(1, Ordering::Relaxed)
}
Err(err) => {
tracing::error!("failed to parse {:?} {} {err}", kind, path);
continue;
}
};
}
});
Ok(())
}

View File

@ -0,0 +1,3 @@
pub mod config;
pub mod excel;
pub mod textmap;

View File

@ -0,0 +1,230 @@
use std::{collections::HashMap, io::Cursor, path::Path, sync::atomic::Ordering};
use anyhow::Result;
use common::hash::get_32bit_hash_const;
use serde::Serialize;
use serde_json::{Map, Value};
use tg_bytes_util::{ExistFlag, FromBytes};
use crate::COUNTER_TEXTMAPS;
pub const TEXTMAP_PATHS: [(&str, i32); 28] = [
(
"TextMapEN.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/Textmap_en.bytes"),
),
(
"TextMapCN.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/Textmap_cn.bytes"),
),
(
"TextMapKR.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/Textmap_kr.bytes"),
),
(
"TextMapJP.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/Textmap_jp.bytes"),
),
(
"TextMapID.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/Textmap_id.bytes"),
),
(
"TextMapCHS.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/Textmap_chs.bytes"),
),
(
"TextMapCHT.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/Textmap_cht.bytes"),
),
(
"TextMapDE.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/Textmap_de.bytes"),
),
(
"TextMapES.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/Textmap_es.bytes"),
),
(
"TextMapFR.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/Textmap_fr.bytes"),
),
(
"TextMapRU.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/Textmap_ru.bytes"),
),
(
"TextMapTH.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/Textmap_th.bytes"),
),
(
"TextMapVI.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/Textmap_vi.bytes"),
),
(
"TextMapPT.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/Textmap_pt.bytes"),
),
(
"TextMapMainEN.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/TextmapMain_en.bytes"),
),
(
"TextMapMainCN.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/TextmapMain_cn.bytes"),
),
(
"TextMapMainKR.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/TextmapMain_kr.bytes"),
),
(
"TextMapMainJP.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/TextmapMain_jp.bytes"),
),
(
"TextMapMainID.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/TextmapMain_id.bytes"),
),
(
"TextMapMainCHS.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/TextmapMain_chs.bytes"),
),
(
"TextMapMainCHT.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/TextmapMain_cht.bytes"),
),
(
"TextMapMainDE.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/TextmapMain_de.bytes"),
),
(
"TextMapMainES.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/TextmapMain_es.bytes"),
),
(
"TextMapMainFR.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/TextmapMain_fr.bytes"),
),
(
"TextMapMainRU.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/TextmapMain_ru.bytes"),
),
(
"TextMapMainTH.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/TextmapMain_th.bytes"),
),
(
"TextMapMainVI.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/TextmapMain_vi.bytes"),
),
(
"TextMapMainPT.json",
get_32bit_hash_const("BakedConfig/ExcelOutput/TextmapMain_pt.bytes"),
),
];
#[derive(Serialize)]
#[serde(rename_all = "PascalCase")]
struct TextID {
pub hash: i32,
pub hash_64: u64,
}
impl FromBytes for TextID {
fn from_bytes<T: std::io::Seek + std::io::Read>(r: &mut T) -> std::io::Result<Self> {
Ok(Self {
hash: i32::from_bytes(r)?,
hash_64: u64::from_bytes(r)?,
})
}
}
#[derive(Serialize)]
#[serde(rename_all = "PascalCase")]
struct TextMapRow {
#[serde(rename = "ID")]
pub id: Option<TextID>,
pub text: String,
pub has_param: bool,
}
impl FromBytes for TextMapRow {
fn from_bytes<T: std::io::Seek + std::io::Read>(r: &mut T) -> std::io::Result<Self> {
let exist_flag = ExistFlag::new(r, 3)?;
Ok(Self {
id: if exist_flag.exists(0) {
Some(TextID::from_bytes(r)?)
} else {
None
},
text: if exist_flag.exists(1) {
String::from_bytes(r)?
} else {
String::with_capacity(0)
},
has_param: if exist_flag.exists(2) {
bool::from_bytes(r)?
} else {
false
},
})
}
}
pub fn parse_all_textmap(
assets: &HashMap<i32, Vec<u8>>,
out_folder: &Path,
minimal: bool,
) -> Result<()> {
tracing::info!("Parsing Textmaps...");
let out_folder = out_folder.join("TextMap");
if !out_folder.exists() {
std::fs::create_dir_all(&out_folder)?;
}
for (name, hash) in TEXTMAP_PATHS {
let Some(asset) = assets.get(&hash) else {
continue;
};
// Skip empty first byte
let asset = if !asset.is_empty() && asset[0] == 0 {
&asset[1..]
} else {
asset
};
let out_path = out_folder.join(name);
let mut cursor = Cursor::new(asset);
let Ok(parsed) = Vec::<TextMapRow>::from_bytes(&mut cursor) else {
continue;
};
if minimal {
std::fs::write(
out_path,
serde_json::to_string_pretty(
&parsed
.into_iter()
.map(|row| {
(
row.id.map(|v| v.hash).unwrap_or_default().to_string(),
Value::String(row.text),
)
})
.collect::<Map<_, _>>(),
)
.unwrap(),
)
.unwrap();
COUNTER_TEXTMAPS.fetch_add(1, Ordering::Relaxed);
} else {
std::fs::write(out_path, serde_json::to_string_pretty(&parsed).unwrap()).unwrap();
}
}
Ok(())
}

67
crates/program/src/cli.rs Normal file
View File

@ -0,0 +1,67 @@
use std::path::PathBuf;
use clap::{Args, Parser, Subcommand};
#[derive(Parser)]
#[command(
name = "tg-parser",
version = "1.0",
about = "Tool for parsing certain anime game resources."
)]
pub struct Cli {
#[command(subcommand)]
pub command: Command,
}
#[derive(Subcommand)]
pub enum Command {
/// Process textmap only
Textmap {
/// Persistent path or design data URL
input_url: String,
/// Output directory
output_dir: PathBuf,
/// Parse full textmap structure as array, rather than just key-value pair
#[arg(long)]
full_textmap: bool,
/// Save .bytes file after downloading the files
#[arg(long, name = "save-bytes-file")]
save_bytes_file: bool,
},
/// Process excel only
Excels(ExcelArgs),
/// Process excel, config, textmap parse
All(ExcelArgs),
}
#[derive(Args)]
pub struct ExcelArgs {
/// data.json schema file path
pub data_json: String,
/// excel_paths.json file path
pub excel_path_json: String,
/// Persistent path or design data URL
pub input_url: String,
/// Output directory
pub output_dir: PathBuf,
/// Parse full textmap structure as array, rather than just key-value pair
#[arg(long)]
pub full_textmap: bool,
/// Save .bytes file after downloading the files
#[arg(long, name = "save-bytes-file")]
pub save_bytes_file: bool,
/// Log all error into console
#[arg(long, name = "log-error")]
pub log_error: bool,
/// Additional configs path to parse, with type as key, and array of paths as values
#[arg(long, name = "config-paths")]
pub config_paths: Option<PathBuf>,
}

141
crates/program/src/main.rs Normal file
View File

@ -0,0 +1,141 @@
use crate::{
actions::textmap::TEXTMAP_PATHS,
cli::{Cli, Command},
};
use anyhow::{Context as _, Result};
use clap::Parser;
use common::downloader;
use std::{
collections::HashMap,
fs,
sync::atomic::{AtomicI32, Ordering},
time::Instant,
};
use tg_parser::DataDefine;
use tracing::Level;
mod actions;
mod cli;
pub static COUNTER_CONFIGS: AtomicI32 = AtomicI32::new(0);
pub static COUNTER_EXCELS: AtomicI32 = AtomicI32::new(0);
pub static COUNTER_TEXTMAPS: AtomicI32 = AtomicI32::new(0);
fn main() -> Result<()> {
let cli = Cli::parse();
match &cli.command {
Command::Textmap {
input_url,
output_dir,
full_textmap,
save_bytes_file,
} => {
common::logging::init(Level::INFO);
let start = Instant::now();
let assets = downloader::download_all_design_data(
input_url.clone(),
if *save_bytes_file {
Some(output_dir.clone())
} else {
None
},
TEXTMAP_PATHS.iter().map(|v| v.1).collect(),
)?;
tracing::info!("Download Done! Took {}s", start.elapsed().as_secs());
let start = Instant::now();
actions::textmap::parse_all_textmap(&assets, output_dir, !full_textmap)?;
tracing::info!("Textmap Parse Done! Took {}ms", start.elapsed().as_millis());
}
Command::Excels(args) | Command::All(args) => {
if args.log_error {
common::logging::init(Level::INFO)
} else {
common::logging::init_info_only();
}
let assets = downloader::download_all_design_data(
args.input_url.clone(),
if args.save_bytes_file {
Some(args.output_dir.clone())
} else {
None
},
Vec::with_capacity(0),
)?;
let start = Instant::now();
let excel_paths: HashMap<String, Vec<String>> = serde_json::from_slice(
&fs::read(&args.excel_path_json).context("Failed to read excel_paths.json")?,
)?;
let types: HashMap<String, DataDefine> = serde_json::from_slice(
&fs::read(&args.data_json).context("Failed to read data.json")?,
)?;
actions::excel::parse_all_excels(
&assets,
&types,
&args.output_dir.clone(),
&excel_paths,
)?;
if let Command::All(_) = cli.command {
actions::config::parse_configs(
&assets,
&types,
&args.output_dir,
args.config_paths.clone(),
)?;
actions::textmap::parse_all_textmap(&assets, &args.output_dir, !args.full_textmap)?;
}
tracing::info!(
"Parsed {} Excels, {} Configs, and {} Textmaps in {}s",
COUNTER_EXCELS.load(Ordering::Relaxed),
COUNTER_CONFIGS.load(Ordering::Relaxed),
COUNTER_TEXTMAPS.load(Ordering::Relaxed),
start.elapsed().as_secs()
);
}
}
Ok(())
}
// fn main() {
// use std::collections::HashMap;
// use tg_parser::{DynamicParser, ValueKind};
// let assets = common::downloader::download_all_design_data(
// String::from(
// "C:/Data/hoyoreverse/StarRail_3.3.51/StarRail_Data/Persistent/DesignData/Windows",
// ),
// None,
// )
// .unwrap();
// common::logging::init(tracing::Level::DEBUG);
// let bytes = assets
// .get(&common::hash::get_32bit_hash(
// "BakedConfig/Config/AudioConfig.bytes",
// ))
// .unwrap();
// let bytes = &bytes[12..].to_vec();
// let schema: HashMap<String, DataDefine> =
// serde_json::from_slice(&std::fs::read("data.json").unwrap()).unwrap();
// let mut parser = DynamicParser::new(&schema, &bytes);
// let parsed = parser
// .parse(
// &ValueKind::Class(String::from("RPG.GameCore.AudioConfig")),
// false,
// )
// .unwrap();
// std::fs::write("ss.json", serde_json::to_string_pretty(&parsed).unwrap()).unwrap();
// }