Compare commits
38 Commits
8a06185b98
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| ccc9a894b7 | |||
| cdfab8fd1e | |||
| 99aecaefde | |||
| 642ba643ad | |||
| 3720b6ad80 | |||
| 1072186ff1 | |||
| d99c546499 | |||
| 557ffa7e53 | |||
| 8438dabf0b | |||
| ebee7fd19c | |||
| c7a31ce30e | |||
| 44b9a67800 | |||
| 80ccd375de | |||
| 6d6e56042b | |||
| 30e66d4b04 | |||
| 80f70a1bac | |||
| d2bbd3e5f6 | |||
| db45105d90 | |||
| 06dcfb7a9c | |||
| 185d324efe | |||
| beecbd33c6 | |||
| 2efa1aa86d | |||
| be061cb3a4 | |||
| d294748b88 | |||
| 1488f0398d | |||
| 768188134a | |||
| 0f2cb68fa4 | |||
| dea28f5b9c | |||
| 1867c5559b | |||
| c58488c5fa | |||
| 71a031c3f9 | |||
| 16e83aca67 | |||
| b3f09bb742 | |||
| 31662cda3b | |||
| 37d6a9e6fc | |||
| 708cf77df1 | |||
| c570e2b11a | |||
| 29813a46ef |
@@ -12,7 +12,40 @@
|
||||
"Bash(ls:*)",
|
||||
"Bash(find:*)",
|
||||
"Bash(grep:*)",
|
||||
"Bash(wc:*)"
|
||||
"Bash(wc:*)",
|
||||
"Bash(pgrep:*)",
|
||||
"Bash(cargo doc:*)",
|
||||
"Bash(xargs dirname:*)",
|
||||
"Bash(xargs -I {} find {} -name \"*.cs\")",
|
||||
"Bash(RUST_LOG=debug cargo run:*)",
|
||||
"WebSearch",
|
||||
"Bash(cargo search:*)",
|
||||
"Bash(cargo install:*)",
|
||||
"Bash(diesel setup:*)",
|
||||
"Bash(diesel migration generate:*)",
|
||||
"Bash(diesel migration run:*)",
|
||||
"Bash(sqlite3:*)",
|
||||
"Bash(diesel migration redo:*)",
|
||||
"Bash(tree:*)",
|
||||
"Bash(timeout 180 cargo build:*)",
|
||||
"Bash(timeout 5 cargo run:*)",
|
||||
"Bash(DATABASE_URL=\"../cursebreaker.db\" timeout 10 cargo run:*)",
|
||||
"Bash(DATABASE_URL=\"../cursebreaker.db\" timeout -s TERM 3 cargo run:*)",
|
||||
"Bash(curl:*)",
|
||||
"Bash(diesel print-schema:*)",
|
||||
"Bash(time cargo run:*)",
|
||||
"Bash(DATABASE_URL=../cursebreaker.db diesel migration:*)",
|
||||
"Bash(DATABASE_URL=cursebreaker.db diesel migration:*)",
|
||||
"Bash(DATABASE_URL=../cursebreaker-parser/cursebreaker.db cargo run:*)",
|
||||
"Bash(identify:*)",
|
||||
"Bash(diesel migration revert:*)",
|
||||
"Bash(xargs:*)",
|
||||
"Bash(ss:*)",
|
||||
"Bash(timeout 10 cargo run:*)",
|
||||
"Bash(timeout 60 cargo run:*)",
|
||||
"Bash(DATABASE_URL=../cursebreaker.db diesel print-schema:*)",
|
||||
"Bash(DATABASE_URL=../cursebreaker.db diesel database:*)",
|
||||
"Bash(DATABASE_URL=cursebreaker.db CB_ASSETS_PATH=/home/connor/repos/CBAssets cargo run:*)"
|
||||
],
|
||||
"additionalDirectories": [
|
||||
"/home/connor/repos/CBAssets/"
|
||||
|
||||
1
.env
Normal file
1
.env
Normal file
@@ -0,0 +1 @@
|
||||
DATABASE_URL=/home/connor/repos/cursebreaker-parser-rust/cursebreaker.db
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -19,3 +19,5 @@ target/
|
||||
|
||||
# Test data (cloned Unity projects for integration tests)
|
||||
test_data/
|
||||
cursebreaker.db
|
||||
**/cursebreaker.db
|
||||
|
||||
2246
Cargo.lock
generated
2246
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
[workspace]
|
||||
members = ["unity-parser", "unity-parser-macros"]
|
||||
members = ["unity-parser", "unity-parser-macros", "cursebreaker-parser", "cursebreaker-map"]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
|
||||
81
README.md
81
README.md
@@ -253,78 +253,6 @@ Raw YAML Documents
|
||||
- New: O(1) integer comparison
|
||||
- **Significant speedup** for HashMap lookups
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
cursebreaker-parser-rust/
|
||||
├── unity-parser/ # Main library crate
|
||||
│ ├── src/
|
||||
│ │ ├── ecs/ # ECS world building
|
||||
│ │ │ └── builder.rs
|
||||
│ │ ├── model/ # UnityFile, Scene, Prefab, Asset
|
||||
│ │ │ └── mod.rs
|
||||
│ │ ├── parser/ # YAML parsing & GUID resolution
|
||||
│ │ │ ├── guid_resolver.rs # Script GUID → Class Name
|
||||
│ │ │ ├── prefab_guid_resolver.rs # Prefab GUID → Path
|
||||
│ │ │ ├── meta.rs # .meta file parsing
|
||||
│ │ │ ├── yaml.rs # YAML document splitting
|
||||
│ │ │ └── mod.rs
|
||||
│ │ ├── project/ # ⚠️ OUTDATED - needs refactoring
|
||||
│ │ │ └── mod.rs
|
||||
│ │ ├── types/ # Unity types & components
|
||||
│ │ │ ├── unity_types/
|
||||
│ │ │ │ ├── game_object.rs
|
||||
│ │ │ │ ├── transform.rs
|
||||
│ │ │ │ ├── prefab_instance.rs
|
||||
│ │ │ │ └── mod.rs
|
||||
│ │ │ ├── component.rs # UnityComponent trait & helpers
|
||||
│ │ │ ├── guid.rs # 128-bit GUID type
|
||||
│ │ │ ├── ids.rs # FileID, LocalID
|
||||
│ │ │ ├── reference.rs # UnityReference enum
|
||||
│ │ │ ├── type_filter.rs # TypeFilter for selective parsing
|
||||
│ │ │ ├── values.rs # Vector3, Quaternion, Color, etc.
|
||||
│ │ │ └── mod.rs
|
||||
│ │ ├── error.rs # Error types
|
||||
│ │ ├── macros.rs
|
||||
│ │ ├── property/
|
||||
│ │ └── lib.rs
|
||||
│ ├── examples/
|
||||
│ │ ├── basic_parsing.rs
|
||||
│ │ ├── custom_component.rs
|
||||
│ │ ├── ecs_integration.rs
|
||||
│ │ ├── find_playsfx.rs
|
||||
│ │ ├── parse_resources.rs
|
||||
│ │ └── parse_resource_prefabs.rs
|
||||
│ ├── tests/
|
||||
│ └── Cargo.toml
|
||||
├── unity-parser-macros/ # Proc macro crate (⚠️ has bugs)
|
||||
│ ├── src/
|
||||
│ │ └── lib.rs
|
||||
│ └── Cargo.toml
|
||||
├── Cargo.toml # Workspace config
|
||||
└── README.md
|
||||
```
|
||||
|
||||
## Known Issues
|
||||
|
||||
### Critical Issues
|
||||
1. **`unity-parser/src/project/mod.rs` is OUTDATED**
|
||||
- Built for old architecture before `UnityFile` enum refactor
|
||||
- References non-existent `UnityDocument` type (should be `RawDocument`)
|
||||
- Module is disabled in lib.rs until refactored
|
||||
|
||||
2. **Derive macro namespace mismatch**
|
||||
- `unity-parser-macros` uses `unity_parser` namespace
|
||||
- Actual crate name is `unity_parser::` (underscore, not hyphen)
|
||||
- Manual `UnityComponent` implementation recommended
|
||||
|
||||
3. **Placeholder values in Cargo.toml**
|
||||
- Author and repository fields need updating
|
||||
|
||||
### Minor Issues
|
||||
1. Disabled example/test files may reference outdated APIs
|
||||
2. Some examples may have incorrect YAML access patterns
|
||||
|
||||
## Running Examples
|
||||
|
||||
```bash
|
||||
@@ -386,15 +314,6 @@ Contributions welcome! Areas needing help:
|
||||
- **Testing**: Integration tests with real Unity projects
|
||||
- **Performance**: Optimize YAML parsing, parallel processing
|
||||
|
||||
## License
|
||||
|
||||
Licensed under either of:
|
||||
|
||||
- Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
|
||||
- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
|
||||
|
||||
at your option.
|
||||
|
||||
## Acknowledgments
|
||||
|
||||
- **Unity Technologies**: For the YAML-based file format
|
||||
|
||||
12
cursebreaker-map/.gitignore
vendored
Normal file
12
cursebreaker-map/.gitignore
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
# Rust
|
||||
/target/
|
||||
Cargo.lock
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
|
||||
# Database
|
||||
*.db
|
||||
*.db-shm
|
||||
*.db-wal
|
||||
20
cursebreaker-map/Cargo.toml
Normal file
20
cursebreaker-map/Cargo.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "cursebreaker-map"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
axum = "0.7"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tower = "0.4"
|
||||
tower-http = { version = "0.5", features = ["fs", "cors"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
diesel = { version = "2.1", features = ["sqlite", "returning_clauses_for_sqlite_3_35"] }
|
||||
dotenvy = "0.15"
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = "0.3"
|
||||
base64 = "0.22"
|
||||
|
||||
[dependencies.cursebreaker-parser]
|
||||
path = "../cursebreaker-parser"
|
||||
126
cursebreaker-map/README.md
Normal file
126
cursebreaker-map/README.md
Normal file
@@ -0,0 +1,126 @@
|
||||
# Cursebreaker Interactive Map
|
||||
|
||||
An interactive web-based map viewer for "The Black Grimoire: Cursebreaker" game, built with Rust (Axum) and Leaflet.js.
|
||||
|
||||
## Features
|
||||
|
||||
- **Optimized Tile Loading**: Uses merged tiles to reduce HTTP requests
|
||||
- Zoom level 0: ~31 tiles (4×4 merged)
|
||||
- Zoom level 1: ~105 tiles (2×2 merged)
|
||||
- Zoom level 2: ~345 tiles (original tiles)
|
||||
- **Lossless Compression**: All tiles use lossless WebP for optimal quality
|
||||
- **High-Performance Rendering**: Serves tiles directly from SQLite database
|
||||
- **Interactive Navigation**: Pan and zoom through the game world
|
||||
- **Dark Theme UI**: Game-themed dark interface with collapsible sidebar
|
||||
- **Real-time Coordinates**: Display tile and pixel coordinates while hovering
|
||||
|
||||
## Architecture
|
||||
|
||||
### Backend (Rust + Axum)
|
||||
- **Tile Server**: Serves WebP-compressed map tiles from SQLite database
|
||||
- **API Endpoints**:
|
||||
- `GET /api/tiles/:z/:x/:y` - Retrieve tile at coordinates (x, y) and zoom level z
|
||||
- `GET /api/bounds` - Get map bounds (min/max x/y coordinates)
|
||||
- `GET /` - Serve static frontend files
|
||||
|
||||
### Frontend (Leaflet.js)
|
||||
- **Image Overlay Layer**: Each merged tile is rendered as a positioned image overlay
|
||||
- **Merged Tile System**: Reduces HTTP requests by merging tiles at lower zoom levels:
|
||||
- Zoom 0: 4×4 original tiles merged into 512px images (~31 total requests)
|
||||
- Zoom 1: 2×2 original tiles merged into 512px images (~105 total requests)
|
||||
- Zoom 2: Original 512px tiles (1×1, ~345 total requests)
|
||||
- **Fixed Coordinate System**: Uses Leaflet's CRS.Simple with tiles positioned at their exact pixel coordinates
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Rust (latest stable)
|
||||
- SQLite database at `../cursebreaker.db` with `minimap_tiles` table populated
|
||||
|
||||
## Running the Map Viewer
|
||||
|
||||
### First Time Setup
|
||||
|
||||
1. **Generate all map tiles** (only needed once, or after updating minimap images):
|
||||
```bash
|
||||
cd cursebreaker-parser
|
||||
cargo run --bin image-parser --release
|
||||
```
|
||||
This processes all PNG files and automatically generates all 3 zoom levels (takes ~1.5 minutes)
|
||||
|
||||
2. **Start the map server**:
|
||||
```bash
|
||||
cd ../cursebreaker-map
|
||||
cargo run --release
|
||||
```
|
||||
|
||||
3. **Open in browser**:
|
||||
Navigate to `http://127.0.0.1:3000`
|
||||
|
||||
### Subsequent Runs
|
||||
|
||||
Just start the server (step 2 above). All tiles are stored in the database.
|
||||
|
||||
## Database Configuration
|
||||
|
||||
By default, the server looks for the database at `../cursebreaker.db`. You can override this with the `DATABASE_URL` environment variable:
|
||||
|
||||
```bash
|
||||
DATABASE_URL=/path/to/cursebreaker.db cargo run --release
|
||||
```
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
The sidebar includes placeholders for upcoming features:
|
||||
|
||||
- **Icon Filtering**: Toggle visibility of shops, resources, fast travel points, workbenches, etc.
|
||||
- **Map Markers**: Display game entities (shops, resources, NPCs) with clickable info popups
|
||||
- **Search**: Find locations by name
|
||||
- **Pathfinding**: Calculate routes between points
|
||||
- **Layer Control**: Toggle different map overlays
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
cursebreaker-map/
|
||||
├── Cargo.toml # Rust dependencies
|
||||
├── src/
|
||||
│ └── main.rs # Axum web server
|
||||
├── static/
|
||||
│ ├── index.html # Main HTML page
|
||||
│ ├── style.css # Styling (dark theme)
|
||||
│ └── map.js # Leaflet map initialization
|
||||
└── README.md
|
||||
```
|
||||
|
||||
## Performance Notes
|
||||
|
||||
- **Merged Tiles**: Reduces HTTP requests by up to 91% at lowest zoom (31 vs 345 requests)
|
||||
- **Lossless WebP**: High quality compression without artifacts
|
||||
- **Database Storage**: All tiles served directly from SQLite BLOBs (no file I/O)
|
||||
- **CRS.Simple**: Avoids expensive geographic coordinate projections
|
||||
- **Total Storage**: ~111 MB for all zoom levels combined
|
||||
|
||||
### Load Performance Comparison
|
||||
|
||||
| Zoom Level | Merge Factor | Tiles Loaded | HTTP Requests Saved |
|
||||
|------------|--------------|--------------|---------------------|
|
||||
| 0 (zoomed out) | 4×4 | 31 | 91% fewer requests |
|
||||
| 1 (medium) | 2×2 | 105 | 70% fewer requests |
|
||||
| 2 (zoomed in) | 1×1 | 345 | baseline |
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
**Tiles not loading**:
|
||||
- Verify database path is correct
|
||||
- Check that `minimap_tiles` table is populated
|
||||
- Look for errors in server console output
|
||||
|
||||
**Map appears blank**:
|
||||
- Check browser console for JavaScript errors
|
||||
- Verify `/api/bounds` returns valid coordinates
|
||||
- Ensure tiles exist for the displayed coordinate range
|
||||
|
||||
**Performance issues**:
|
||||
- Try running in release mode: `cargo run --release`
|
||||
- Check database is on fast storage (SSD)
|
||||
- Reduce browser zoom level to load lower-resolution tiles
|
||||
505
cursebreaker-map/src/main.rs
Normal file
505
cursebreaker-map/src/main.rs
Normal file
@@ -0,0 +1,505 @@
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::{header, StatusCode},
|
||||
response::{IntoResponse, Response},
|
||||
routing::get,
|
||||
Json, Router,
|
||||
};
|
||||
use base64::Engine;
|
||||
use diesel::prelude::*;
|
||||
use serde::Serialize;
|
||||
use std::sync::Arc;
|
||||
use tower_http::{cors::CorsLayer, services::ServeDir};
|
||||
use tracing::info;
|
||||
|
||||
// Database connection
|
||||
type DbConnection = diesel::SqliteConnection;
|
||||
|
||||
#[derive(Clone)]
|
||||
struct AppState {
|
||||
database_url: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct MapBounds {
|
||||
min_x: i32,
|
||||
min_y: i32,
|
||||
max_x: i32,
|
||||
max_y: i32,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct ResourceResponse {
|
||||
resources: Vec<ResourceGroup>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct ResourceGroup {
|
||||
item_id: i32,
|
||||
name: String,
|
||||
skill: String,
|
||||
level: i32,
|
||||
icon_base64: String,
|
||||
positions: Vec<Position>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct Position {
|
||||
x: f32,
|
||||
y: f32,
|
||||
}
|
||||
|
||||
// Labels response (world_map_icons with icon_type == 16)
|
||||
#[derive(Serialize)]
|
||||
struct LabelsResponse {
|
||||
labels: Vec<Label>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct Label {
|
||||
x: f32,
|
||||
y: f32,
|
||||
text: String,
|
||||
font_size: i32,
|
||||
}
|
||||
|
||||
// Entrances response (world_teleporters)
|
||||
#[derive(Serialize)]
|
||||
struct EntrancesResponse {
|
||||
icon_base64: String,
|
||||
entrances: Vec<Entrance>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct Entrance {
|
||||
pos_x: f32,
|
||||
pos_y: f32,
|
||||
tp_x: Option<f32>,
|
||||
tp_y: Option<f32>,
|
||||
}
|
||||
|
||||
// Ground Items response (world_loot)
|
||||
#[derive(Serialize)]
|
||||
struct GroundItemsResponse {
|
||||
icon_base64: String,
|
||||
items: Vec<GroundItem>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct GroundItem {
|
||||
x: f32,
|
||||
y: f32,
|
||||
name: String,
|
||||
amount: i32,
|
||||
respawn_time: i32,
|
||||
}
|
||||
|
||||
// Houses response (player_houses)
|
||||
#[derive(Serialize)]
|
||||
struct HousesResponse {
|
||||
icon_base64: String,
|
||||
houses: Vec<House>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct House {
|
||||
x: f32,
|
||||
y: f32,
|
||||
name: String,
|
||||
description: String,
|
||||
price: i32,
|
||||
}
|
||||
|
||||
// Establish database connection
|
||||
fn establish_connection(database_url: &str) -> Result<DbConnection, diesel::ConnectionError> {
|
||||
SqliteConnection::establish(database_url)
|
||||
}
|
||||
|
||||
// Get map bounds from database (using zoom level 2 tiles)
|
||||
async fn get_bounds(State(state): State<Arc<AppState>>) -> Result<Json<MapBounds>, StatusCode> {
|
||||
use cursebreaker_parser::schema::minimap_tiles::dsl::*;
|
||||
use diesel::dsl::{max, min};
|
||||
|
||||
let mut conn = establish_connection(&state.database_url).map_err(|e| {
|
||||
tracing::error!("Database connection error: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
let (min_x_val, max_x_val): (Option<i32>, Option<i32>) = minimap_tiles
|
||||
.filter(zoom.eq(2)) // Only count zoom level 2 (original) tiles
|
||||
.select((min(x), max(x)))
|
||||
.first(&mut conn)
|
||||
.map_err(|e| {
|
||||
tracing::error!("Error querying min/max x: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
let (min_y_val, max_y_val): (Option<i32>, Option<i32>) = minimap_tiles
|
||||
.filter(zoom.eq(2)) // Only count zoom level 2 (original) tiles
|
||||
.select((min(y), max(y)))
|
||||
.first(&mut conn)
|
||||
.map_err(|e| {
|
||||
tracing::error!("Error querying min/max y: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
Ok(Json(MapBounds {
|
||||
min_x: min_x_val.unwrap_or(0),
|
||||
min_y: min_y_val.unwrap_or(0),
|
||||
max_x: max_x_val.unwrap_or(0),
|
||||
max_y: max_y_val.unwrap_or(0),
|
||||
}))
|
||||
}
|
||||
|
||||
// Get tile by coordinates and zoom level
|
||||
async fn get_tile(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Path((z, tile_x, tile_y)): Path<(i32, i32, i32)>,
|
||||
) -> Result<Response, StatusCode> {
|
||||
use cursebreaker_parser::schema::minimap_tiles::dsl::*;
|
||||
|
||||
let mut conn = establish_connection(&state.database_url).map_err(|e| {
|
||||
tracing::error!("Database connection error: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
// Query minimap_tiles table for the tile at the requested zoom level
|
||||
let tile_data = minimap_tiles
|
||||
.filter(zoom.eq(z))
|
||||
.filter(x.eq(tile_x))
|
||||
.filter(y.eq(tile_y))
|
||||
.select(image)
|
||||
.first::<Vec<u8>>(&mut conn)
|
||||
.optional()
|
||||
.map_err(|e| {
|
||||
tracing::error!("Error querying tile: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
match tile_data {
|
||||
Some(data) => {
|
||||
info!(
|
||||
"Serving tile at ({}, {}) zoom {} - {} bytes",
|
||||
tile_x,
|
||||
tile_y,
|
||||
z,
|
||||
data.len()
|
||||
);
|
||||
Ok(([(header::CONTENT_TYPE, "image/webp")], data).into_response())
|
||||
}
|
||||
None => {
|
||||
tracing::warn!("Tile not found: ({}, {}) at zoom {}", tile_x, tile_y, z);
|
||||
Err(StatusCode::NOT_FOUND)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get all resources with icons from database
|
||||
async fn get_resources(
|
||||
State(state): State<Arc<AppState>>,
|
||||
) -> Result<Json<ResourceResponse>, StatusCode> {
|
||||
use cursebreaker_parser::schema::{harvestables, resource_icons, world_resources};
|
||||
|
||||
let mut conn = establish_connection(&state.database_url).map_err(|e| {
|
||||
tracing::error!("Database connection error: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
// Query with three-way join
|
||||
let results = world_resources::table
|
||||
.inner_join(
|
||||
resource_icons::table.on(world_resources::item_id.eq(resource_icons::item_id)),
|
||||
)
|
||||
.inner_join(harvestables::table.on(resource_icons::item_id.eq(harvestables::id)))
|
||||
.select((
|
||||
resource_icons::item_id,
|
||||
resource_icons::name,
|
||||
harvestables::skill,
|
||||
harvestables::level,
|
||||
resource_icons::icon_64,
|
||||
world_resources::pos_x,
|
||||
world_resources::pos_y,
|
||||
))
|
||||
.order_by((harvestables::skill, harvestables::level))
|
||||
.load::<(i32, String, String, i32, Vec<u8>, f32, f32)>(&mut conn)
|
||||
.map_err(|e| {
|
||||
tracing::error!("Error querying resources: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
// Group results by item_id
|
||||
use std::collections::HashMap;
|
||||
let mut grouped: HashMap<i32, ResourceGroup> = HashMap::new();
|
||||
|
||||
for (item_id, name, skill, level, icon_bytes, pos_x, pos_y) in results {
|
||||
let entry = grouped.entry(item_id).or_insert_with(|| {
|
||||
// Convert icon to base64 (only once per resource type)
|
||||
let icon_base64 = base64::engine::general_purpose::STANDARD.encode(&icon_bytes);
|
||||
|
||||
ResourceGroup {
|
||||
item_id,
|
||||
name,
|
||||
skill,
|
||||
level,
|
||||
icon_base64,
|
||||
positions: Vec::new(),
|
||||
}
|
||||
});
|
||||
|
||||
// Add position with multiplier applied
|
||||
entry.positions.push(Position {
|
||||
x: pos_x * 5.12,
|
||||
y: pos_y * 5.12,
|
||||
});
|
||||
}
|
||||
|
||||
// Convert to vec and sort by skill and level
|
||||
let mut resources: Vec<ResourceGroup> = grouped.into_values().collect();
|
||||
resources.sort_by(|a, b| a.skill.cmp(&b.skill).then(a.level.cmp(&b.level)));
|
||||
|
||||
info!("Returning {} resource types", resources.len());
|
||||
|
||||
Ok(Json(ResourceResponse { resources }))
|
||||
}
|
||||
|
||||
// Get labels from world_map_icons where icon_type == 16
|
||||
async fn get_labels(State(state): State<Arc<AppState>>) -> Result<Json<LabelsResponse>, StatusCode> {
|
||||
use cursebreaker_parser::schema::world_map_icons;
|
||||
|
||||
let mut conn = establish_connection(&state.database_url).map_err(|e| {
|
||||
tracing::error!("Database connection error: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
let results = world_map_icons::table
|
||||
.filter(world_map_icons::icon_type.eq(16))
|
||||
.select((
|
||||
world_map_icons::pos_x,
|
||||
world_map_icons::pos_y,
|
||||
world_map_icons::text,
|
||||
world_map_icons::font_size,
|
||||
))
|
||||
.load::<(f32, f32, String, i32)>(&mut conn)
|
||||
.map_err(|e| {
|
||||
tracing::error!("Error querying labels: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
let labels: Vec<Label> = results
|
||||
.into_iter()
|
||||
.map(|(pos_x, pos_y, text, font_size)| Label {
|
||||
x: pos_x * 5.12,
|
||||
y: pos_y * 5.12,
|
||||
text,
|
||||
font_size,
|
||||
})
|
||||
.collect();
|
||||
|
||||
info!("Returning {} labels", labels.len());
|
||||
|
||||
Ok(Json(LabelsResponse { labels }))
|
||||
}
|
||||
|
||||
// Get entrances from world_teleporters
|
||||
async fn get_entrances(
|
||||
State(state): State<Arc<AppState>>,
|
||||
) -> Result<Json<EntrancesResponse>, StatusCode> {
|
||||
use cursebreaker_parser::schema::{general_icons, world_teleporters};
|
||||
|
||||
let mut conn = establish_connection(&state.database_url).map_err(|e| {
|
||||
tracing::error!("Database connection error: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
// Get the Entrance icon
|
||||
let icon_bytes: Vec<u8> = general_icons::table
|
||||
.filter(general_icons::name.eq("Entrance"))
|
||||
.select(general_icons::icon_32)
|
||||
.first::<Option<Vec<u8>>>(&mut conn)
|
||||
.map_err(|e| {
|
||||
tracing::error!("Error querying entrance icon: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?
|
||||
.unwrap_or_default();
|
||||
|
||||
let icon_base64 = base64::engine::general_purpose::STANDARD.encode(&icon_bytes);
|
||||
|
||||
// Get teleporter positions
|
||||
let results = world_teleporters::table
|
||||
.select((
|
||||
world_teleporters::pos_x,
|
||||
world_teleporters::pos_y,
|
||||
world_teleporters::tp_x,
|
||||
world_teleporters::tp_y,
|
||||
))
|
||||
.load::<(f32, f32, Option<f32>, Option<f32>)>(&mut conn)
|
||||
.map_err(|e| {
|
||||
tracing::error!("Error querying teleporters: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
let entrances: Vec<Entrance> = results
|
||||
.into_iter()
|
||||
.map(|(pos_x, pos_y, tp_x, tp_y)| Entrance {
|
||||
pos_x: pos_x * 5.12,
|
||||
pos_y: pos_y * 5.12,
|
||||
tp_x: tp_x.map(|x| x * 5.12),
|
||||
tp_y: tp_y.map(|y| y * 5.12),
|
||||
})
|
||||
.collect();
|
||||
|
||||
info!("Returning {} entrances", entrances.len());
|
||||
|
||||
Ok(Json(EntrancesResponse {
|
||||
icon_base64,
|
||||
entrances,
|
||||
}))
|
||||
}
|
||||
|
||||
// Get ground items from world_loot
|
||||
async fn get_ground_items(
|
||||
State(state): State<Arc<AppState>>,
|
||||
) -> Result<Json<GroundItemsResponse>, StatusCode> {
|
||||
use cursebreaker_parser::schema::{general_icons, items, world_loot};
|
||||
|
||||
let mut conn = establish_connection(&state.database_url).map_err(|e| {
|
||||
tracing::error!("Database connection error: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
// Get the Common_tick icon
|
||||
let icon_bytes: Vec<u8> = general_icons::table
|
||||
.filter(general_icons::name.eq("Common_tick"))
|
||||
.select(general_icons::icon_32)
|
||||
.first::<Option<Vec<u8>>>(&mut conn)
|
||||
.map_err(|e| {
|
||||
tracing::error!("Error querying common_tick icon: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?
|
||||
.unwrap_or_default();
|
||||
|
||||
let icon_base64 = base64::engine::general_purpose::STANDARD.encode(&icon_bytes);
|
||||
|
||||
// Get world loot with item names
|
||||
let results = world_loot::table
|
||||
.inner_join(items::table.on(world_loot::item_id.eq(items::id.assume_not_null())))
|
||||
.select((
|
||||
world_loot::pos_x,
|
||||
world_loot::pos_y,
|
||||
items::name,
|
||||
world_loot::amount,
|
||||
world_loot::respawn_time,
|
||||
))
|
||||
.load::<(f32, f32, String, i32, i32)>(&mut conn)
|
||||
.map_err(|e| {
|
||||
tracing::error!("Error querying ground items: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
let ground_items: Vec<GroundItem> = results
|
||||
.into_iter()
|
||||
.map(|(pos_x, pos_y, name, amount, respawn_time)| GroundItem {
|
||||
x: pos_x * 5.12,
|
||||
y: pos_y * 5.12,
|
||||
name,
|
||||
amount,
|
||||
respawn_time,
|
||||
})
|
||||
.collect();
|
||||
|
||||
info!("Returning {} ground items", ground_items.len());
|
||||
|
||||
Ok(Json(GroundItemsResponse {
|
||||
icon_base64,
|
||||
items: ground_items,
|
||||
}))
|
||||
}
|
||||
|
||||
// Get player houses
|
||||
async fn get_houses(State(state): State<Arc<AppState>>) -> Result<Json<HousesResponse>, StatusCode> {
|
||||
use cursebreaker_parser::schema::{general_icons, player_houses};
|
||||
|
||||
let mut conn = establish_connection(&state.database_url).map_err(|e| {
|
||||
tracing::error!("Database connection error: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
// Get the Notifications_House icon (64px)
|
||||
let icon_bytes: Vec<u8> = general_icons::table
|
||||
.filter(general_icons::name.eq("Notifications_House"))
|
||||
.select(general_icons::icon_64)
|
||||
.first::<Option<Vec<u8>>>(&mut conn)
|
||||
.map_err(|e| {
|
||||
tracing::error!("Error querying house icon: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?
|
||||
.unwrap_or_default();
|
||||
|
||||
let icon_base64 = base64::engine::general_purpose::STANDARD.encode(&icon_bytes);
|
||||
|
||||
// Get player houses
|
||||
let results = player_houses::table
|
||||
.select((
|
||||
player_houses::pos_x,
|
||||
player_houses::pos_z,
|
||||
player_houses::name,
|
||||
player_houses::description,
|
||||
player_houses::price,
|
||||
))
|
||||
.load::<(f32, f32, String, String, i32)>(&mut conn)
|
||||
.map_err(|e| {
|
||||
tracing::error!("Error querying player houses: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
let houses: Vec<House> = results
|
||||
.into_iter()
|
||||
.map(|(pos_x, pos_z, name, description, price)| House {
|
||||
x: pos_x * 5.12,
|
||||
y: pos_z * 5.12,
|
||||
name,
|
||||
description,
|
||||
price,
|
||||
})
|
||||
.collect();
|
||||
|
||||
info!("Returning {} houses", houses.len());
|
||||
|
||||
Ok(Json(HousesResponse { icon_base64, houses }))
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
// Initialize tracing
|
||||
tracing_subscriber::fmt::init();
|
||||
|
||||
// Get database path
|
||||
let database_url = std::env::var("DATABASE_URL")
|
||||
.unwrap_or_else(|_| "../cursebreaker.db".to_string());
|
||||
|
||||
info!("Using database: {}", database_url);
|
||||
|
||||
let state = Arc::new(AppState { database_url });
|
||||
|
||||
// Build router
|
||||
let app = Router::new()
|
||||
.route("/api/bounds", get(get_bounds))
|
||||
.route("/api/tiles/:z/:x/:y", get(get_tile))
|
||||
.route("/api/resources", get(get_resources))
|
||||
.route("/api/labels", get(get_labels))
|
||||
.route("/api/entrances", get(get_entrances))
|
||||
.route("/api/ground-items", get(get_ground_items))
|
||||
.route("/api/houses", get(get_houses))
|
||||
.nest_service("/", ServeDir::new("static"))
|
||||
.layer(CorsLayer::permissive())
|
||||
.with_state(state);
|
||||
|
||||
// Start server
|
||||
let listener = tokio::net::TcpListener::bind("127.0.0.1:3000")
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
info!("Server running on http://127.0.0.1:3000");
|
||||
|
||||
axum::serve(listener, app).await.unwrap();
|
||||
}
|
||||
44
cursebreaker-map/static/config.js
Normal file
44
cursebreaker-map/static/config.js
Normal file
@@ -0,0 +1,44 @@
|
||||
// Map Configuration
|
||||
// You can adjust these values and reload the page to test different zoom behaviors
|
||||
|
||||
const MapConfig = {
|
||||
// Zoom level configuration
|
||||
// Maps Leaflet zoom levels to database zoom levels and merge factors
|
||||
zoomLevels: [
|
||||
// Leaflet zoom 0 → Database zoom 0 (4x4 merged)
|
||||
{ leafletZoom: -2, dbZoom: 0, mergeFactor: 4, label: "4x4 merged" },
|
||||
// Leaflet zoom 1 → Database zoom 1 (2x2 merged)
|
||||
{ leafletZoom: -0.5, dbZoom: 1, mergeFactor: 2, label: "2x2 merged" },
|
||||
// Leaflet zoom 2+ → Database zoom 2 (original tiles)
|
||||
{ leafletZoom: 1, dbZoom: 2, mergeFactor: 1, label: "original" },
|
||||
],
|
||||
|
||||
// Leaflet map settings
|
||||
minZoom: -2,
|
||||
maxZoom: 2,
|
||||
|
||||
// Tile size (in pixels) - should match database tile size
|
||||
tileSize: 512,
|
||||
|
||||
// Debug mode - shows tile boundaries and coordinates
|
||||
debug: true,
|
||||
|
||||
// Resource icon configuration
|
||||
resourceIconSize: 48, // Icon size in pixels (configurable)
|
||||
|
||||
// Get zoom configuration for a specific Leaflet zoom level
|
||||
getZoomConfig(leafletZoom) {
|
||||
// Find the appropriate config for this zoom level
|
||||
// Use the highest matching config that's <= current zoom
|
||||
let config = this.zoomLevels[0];
|
||||
for (const zoomConfig of this.zoomLevels) {
|
||||
if (leafletZoom >= zoomConfig.leafletZoom) {
|
||||
config = zoomConfig;
|
||||
}
|
||||
}
|
||||
return config;
|
||||
}
|
||||
};
|
||||
|
||||
// Make it globally available
|
||||
window.MapConfig = MapConfig;
|
||||
104
cursebreaker-map/static/index.html
Normal file
104
cursebreaker-map/static/index.html
Normal file
@@ -0,0 +1,104 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Cursebreaker Interactive Map</title>
|
||||
|
||||
<!-- Leaflet CSS -->
|
||||
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.9.4/dist/leaflet.css" />
|
||||
|
||||
<!-- Custom CSS -->
|
||||
<link rel="stylesheet" href="style.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="app">
|
||||
<!-- Sidebar -->
|
||||
<div id="sidebar" class="sidebar collapsed">
|
||||
<div class="sidebar-content">
|
||||
<h2>Cursebreaker Map</h2>
|
||||
<div class="info-section">
|
||||
<p class="subtitle">The Black Grimoire: Cursebreaker</p>
|
||||
</div>
|
||||
|
||||
<div class="filters-section">
|
||||
<h3>Labels</h3>
|
||||
<div class="filter-controls">
|
||||
<label class="filter-label master-toggle">
|
||||
<input type="checkbox" id="labels-toggle" checked>
|
||||
<span>Show Labels</span>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="filters-section">
|
||||
<h3>Entrances</h3>
|
||||
<div class="filter-controls">
|
||||
<label class="filter-label master-toggle">
|
||||
<input type="checkbox" id="entrances-toggle" checked>
|
||||
<span>Show Entrances</span>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="filters-section">
|
||||
<h3>Ground Items</h3>
|
||||
<div class="filter-controls">
|
||||
<label class="filter-label master-toggle">
|
||||
<input type="checkbox" id="ground-items-toggle" checked>
|
||||
<span>Show Ground Items</span>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="filters-section">
|
||||
<h3>Houses</h3>
|
||||
<div class="filter-controls">
|
||||
<label class="filter-label master-toggle">
|
||||
<input type="checkbox" id="houses-toggle" checked>
|
||||
<span>Show Houses</span>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="filters-section">
|
||||
<h3>Resources</h3>
|
||||
<div class="filter-controls">
|
||||
<button id="select-all-resources" class="filter-btn">Show All</button>
|
||||
<button id="deselect-all-resources" class="filter-btn">Hide All</button>
|
||||
</div>
|
||||
<div id="resource-filters" class="filter-group">
|
||||
<p class="loading-text">Loading resources...</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="map-info">
|
||||
<h3>Map Info</h3>
|
||||
<div id="map-stats">
|
||||
<p>Loading...</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Map Container -->
|
||||
<div id="map"></div>
|
||||
|
||||
<!-- Coordinates Display -->
|
||||
<div id="coordinates" class="coordinates-display">
|
||||
Coordinates: <span id="coord-text">--</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Leaflet JS -->
|
||||
<script src="https://unpkg.com/leaflet@1.9.4/dist/leaflet.js"></script>
|
||||
|
||||
<!-- Configuration (edit this to adjust zoom levels) -->
|
||||
<script src="config.js"></script>
|
||||
|
||||
<!-- Custom JS -->
|
||||
<script src="map.js"></script>
|
||||
<script src="resources.js"></script>
|
||||
<script src="markers.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
273
cursebreaker-map/static/map.js
Normal file
273
cursebreaker-map/static/map.js
Normal file
@@ -0,0 +1,273 @@
|
||||
// Initialize the map when the page loads
|
||||
let map;
|
||||
let bounds;
|
||||
let tileLayerGroup;
|
||||
let debugLayerGroup;
|
||||
|
||||
async function initMap() {
|
||||
try {
|
||||
// Fetch map bounds from the API
|
||||
const response = await fetch('/api/bounds');
|
||||
bounds = await response.json();
|
||||
|
||||
console.log('Map bounds:', bounds);
|
||||
|
||||
// Update sidebar with map info
|
||||
updateMapInfo(bounds);
|
||||
|
||||
// Calculate map dimensions in tiles
|
||||
const width = bounds.max_x - bounds.min_x + 1;
|
||||
const height = bounds.max_y - bounds.min_y + 1;
|
||||
|
||||
// Get config
|
||||
const config = window.MapConfig;
|
||||
const tileSize = config.tileSize;
|
||||
|
||||
// Create map with simple CRS (not geographic)
|
||||
map = L.map('map', {
|
||||
crs: L.CRS.Simple,
|
||||
minZoom: config.minZoom,
|
||||
maxZoom: config.maxZoom,
|
||||
attributionControl: false,
|
||||
});
|
||||
|
||||
// Calculate bounds for Leaflet (in pixels)
|
||||
// Origin at top-left [0,0], y increases down, x increases right
|
||||
const pixelWidth = width * tileSize;
|
||||
const pixelHeight = height * tileSize;
|
||||
|
||||
const mapBounds = [
|
||||
[0, 0],
|
||||
[pixelHeight, pixelWidth]
|
||||
];
|
||||
|
||||
// Set max bounds to prevent panning outside the map
|
||||
map.setMaxBounds(mapBounds);
|
||||
|
||||
// Fit the map to bounds
|
||||
map.fitBounds(mapBounds);
|
||||
|
||||
// Create layer groups
|
||||
tileLayerGroup = L.layerGroup().addTo(map);
|
||||
|
||||
if (config.debug) {
|
||||
debugLayerGroup = L.layerGroup().addTo(map);
|
||||
}
|
||||
|
||||
// Load tiles for current zoom
|
||||
loadTilesForCurrentZoom();
|
||||
|
||||
// Reload tiles when zoom changes
|
||||
map.on('zoomend', function() {
|
||||
loadTilesForCurrentZoom();
|
||||
});
|
||||
|
||||
// Add coordinate display on mouse move
|
||||
map.on('mousemove', function(e) {
|
||||
const lat = e.latlng.lat;
|
||||
const lng = e.latlng.lng;
|
||||
|
||||
// Convert pixel coordinates to tile coordinates
|
||||
const tileX = Math.floor(lng / tileSize);
|
||||
const tileY = Math.floor(lat / tileSize);
|
||||
|
||||
const leafletZoom = map.getZoom();
|
||||
const zoomConfig = config.getZoomConfig(leafletZoom);
|
||||
|
||||
// Calculate which merged tile this is in
|
||||
const mergedTileX = Math.floor(tileX / zoomConfig.mergeFactor);
|
||||
const mergedTileY = Math.floor(tileY / zoomConfig.mergeFactor);
|
||||
|
||||
document.getElementById('coord-text').textContent =
|
||||
`Tile (${tileX}, ${tileY}) | Merged (${mergedTileX}, ${mergedTileY}) | Zoom ${leafletZoom} (DB ${zoomConfig.dbZoom})`;
|
||||
});
|
||||
|
||||
// Add attribution
|
||||
L.control.attribution({
|
||||
position: 'bottomright',
|
||||
prefix: false
|
||||
}).addAttribution('The Black Grimoire: Cursebreaker').addTo(map);
|
||||
|
||||
// Add sidebar toggle control
|
||||
const SidebarControl = L.Control.extend({
|
||||
options: {
|
||||
position: 'topleft'
|
||||
},
|
||||
|
||||
onAdd: function(map) {
|
||||
const container = L.DomUtil.create('div', 'leaflet-bar leaflet-control');
|
||||
const button = L.DomUtil.create('a', 'leaflet-control-sidebar', container);
|
||||
button.innerHTML = '☰';
|
||||
button.href = '#';
|
||||
button.title = 'Toggle Sidebar';
|
||||
|
||||
L.DomEvent.on(button, 'click', function(e) {
|
||||
L.DomEvent.preventDefault(e);
|
||||
const sidebar = document.getElementById('sidebar');
|
||||
sidebar.classList.toggle('collapsed');
|
||||
});
|
||||
|
||||
return container;
|
||||
}
|
||||
});
|
||||
|
||||
map.addControl(new SidebarControl());
|
||||
|
||||
console.log('Map initialized successfully');
|
||||
|
||||
// Load resources asynchronously
|
||||
loadResources().catch(error => {
|
||||
console.error('Failed to load resources:', error);
|
||||
});
|
||||
|
||||
// Load markers (labels, entrances, ground items, houses)
|
||||
initMarkers();
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error initializing map:', error);
|
||||
document.getElementById('map-stats').innerHTML =
|
||||
'<p style="color: #ff6b6b;">Error loading map data</p>';
|
||||
}
|
||||
}
|
||||
|
||||
function loadTilesForCurrentZoom() {
|
||||
// Clear existing tiles
|
||||
tileLayerGroup.clearLayers();
|
||||
if (debugLayerGroup) {
|
||||
debugLayerGroup.clearLayers();
|
||||
}
|
||||
|
||||
const currentZoom = map.getZoom();
|
||||
const config = window.MapConfig;
|
||||
const tileSize = config.tileSize;
|
||||
|
||||
// Get zoom configuration
|
||||
const zoomConfig = config.getZoomConfig(currentZoom);
|
||||
const dbZoom = zoomConfig.dbZoom;
|
||||
const mergeFactor = zoomConfig.mergeFactor;
|
||||
|
||||
console.log(`\n=== Loading tiles at Leaflet zoom ${currentZoom} ===`);
|
||||
console.log(`Database zoom: ${dbZoom}, Merge factor: ${mergeFactor} (${zoomConfig.label})`);
|
||||
console.log(`Bounds: X [${bounds.min_x}, ${bounds.max_x}], Y [${bounds.min_y}, ${bounds.max_y}]`);
|
||||
|
||||
// Calculate which merged tiles we need to load
|
||||
// The database stores merged tile coordinates starting from 0
|
||||
// For a 2x2 merge of tiles (0,0), (0,1), (1,0), (1,1), the database stores it at (0,0)
|
||||
// For original tiles at min_x=0, with mergeFactor=2, we need tiles starting at x=0/2=0
|
||||
|
||||
const minMergedX = Math.floor(bounds.min_x / mergeFactor);
|
||||
const maxMergedX = Math.floor(bounds.max_x / mergeFactor);
|
||||
const minMergedY = Math.floor(bounds.min_y / mergeFactor);
|
||||
const maxMergedY = Math.floor(bounds.max_y / mergeFactor);
|
||||
|
||||
console.log(`Merged tile range: X [${minMergedX}, ${maxMergedX}], Y [${minMergedY}, ${maxMergedY}]`);
|
||||
|
||||
let tileCount = 0;
|
||||
let loadedCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
// Load each merged tile
|
||||
for (let mergedY = minMergedY; mergedY <= maxMergedY; mergedY++) {
|
||||
for (let mergedX = minMergedX; mergedX <= maxMergedX; mergedX++) {
|
||||
// Calculate the pixel bounds for this merged tile
|
||||
// The merged tile at (mergedX, mergedY) covers original tiles starting at:
|
||||
// (mergedX * mergeFactor, mergedY * mergeFactor)
|
||||
const startTileX = mergedX * mergeFactor;
|
||||
const startTileY = mergedY * mergeFactor;
|
||||
|
||||
const pixelMinX = startTileX * tileSize;
|
||||
const pixelMinY = startTileY * tileSize;
|
||||
const pixelMaxX = (startTileX + mergeFactor) * tileSize;
|
||||
const pixelMaxY = (startTileY + mergeFactor) * tileSize;
|
||||
|
||||
const tileBounds = [
|
||||
[pixelMinY, pixelMinX],
|
||||
[pixelMaxY, pixelMaxX]
|
||||
];
|
||||
|
||||
// Request the merged tile from the API
|
||||
const imageUrl = `/api/tiles/${dbZoom}/${mergedX}/${mergedY}`;
|
||||
|
||||
if (config.debug && tileCount < 5) {
|
||||
console.log(` Tile ${tileCount}: DB(${mergedX},${mergedY}) → Pixels [${pixelMinX},${pixelMinY}] to [${pixelMaxX},${pixelMaxY}]`);
|
||||
console.log(` URL: ${imageUrl}`);
|
||||
}
|
||||
|
||||
const overlay = L.imageOverlay(imageUrl, tileBounds, {
|
||||
opacity: 1,
|
||||
errorOverlayUrl: '',
|
||||
});
|
||||
|
||||
overlay.on('load', function() {
|
||||
loadedCount++;
|
||||
if (config.debug && loadedCount <= 3) {
|
||||
console.log(` ✓ Loaded tile (${mergedX}, ${mergedY})`);
|
||||
}
|
||||
});
|
||||
|
||||
overlay.on('error', function() {
|
||||
errorCount++;
|
||||
console.warn(` ✗ Failed to load tile (${mergedX}, ${mergedY}) from ${imageUrl}`);
|
||||
});
|
||||
|
||||
overlay.addTo(tileLayerGroup);
|
||||
tileCount++;
|
||||
|
||||
// Add debug overlay if enabled
|
||||
if (config.debug && debugLayerGroup) {
|
||||
// Draw rectangle showing tile boundaries
|
||||
const rect = L.rectangle(tileBounds, {
|
||||
color: '#ff0000',
|
||||
weight: 1,
|
||||
fillOpacity: 0,
|
||||
interactive: false
|
||||
}).addTo(debugLayerGroup);
|
||||
|
||||
// Add label showing tile coordinates
|
||||
const center = [
|
||||
(pixelMinY + pixelMaxY) / 2,
|
||||
(pixelMinX + pixelMaxX) / 2
|
||||
];
|
||||
|
||||
const label = L.marker(center, {
|
||||
icon: L.divIcon({
|
||||
className: 'tile-label',
|
||||
html: `<div style="background: rgba(0,0,0,0.7); color: #fff; padding: 2px 5px; border-radius: 3px; font-size: 11px; white-space: nowrap;">
|
||||
DB: (${mergedX},${mergedY})<br/>
|
||||
Z: ${dbZoom}
|
||||
</div>`,
|
||||
iconSize: [60, 30],
|
||||
iconAnchor: [30, 15]
|
||||
}),
|
||||
interactive: false
|
||||
}).addTo(debugLayerGroup);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Requested ${tileCount} tiles (merge factor ${mergeFactor}x${mergeFactor})`);
|
||||
|
||||
// Wait a bit and report results
|
||||
setTimeout(() => {
|
||||
console.log(`Results: ${loadedCount} loaded, ${errorCount} errors, ${tileCount - loadedCount - errorCount} pending`);
|
||||
}, 2000);
|
||||
}
|
||||
|
||||
function updateMapInfo(bounds) {
|
||||
const width = bounds.max_x - bounds.min_x + 1;
|
||||
const height = bounds.max_y - bounds.min_y + 1;
|
||||
const config = window.MapConfig;
|
||||
|
||||
document.getElementById('map-stats').innerHTML = `
|
||||
<p><strong>Bounds:</strong></p>
|
||||
<p>X: ${bounds.min_x} to ${bounds.max_x}</p>
|
||||
<p>Y: ${bounds.min_y} to ${bounds.max_y}</p>
|
||||
<p><strong>Size:</strong> ${width} × ${height} tiles</p>
|
||||
<p><strong>Zoom levels:</strong> ${config.minZoom}-${config.maxZoom}</p>
|
||||
<p><strong>Debug mode:</strong> ${config.debug ? 'ON' : 'OFF'}</p>
|
||||
${config.debug ? '<p style="color: #8b5cf6; font-size: 12px;">Red boxes show tile boundaries</p>' : ''}
|
||||
`;
|
||||
}
|
||||
|
||||
// Initialize map when page loads
|
||||
window.addEventListener('DOMContentLoaded', initMap);
|
||||
377
cursebreaker-map/static/markers.js
Normal file
377
cursebreaker-map/static/markers.js
Normal file
@@ -0,0 +1,377 @@
|
||||
// Markers management for Cursebreaker map (Labels, Entrances, Ground Items, Houses)
|
||||
|
||||
// Layer groups for each marker type
|
||||
let labelsLayerGroup = null;
|
||||
let entrancesLayerGroup = null;
|
||||
let groundItemsLayerGroup = null;
|
||||
let housesLayerGroup = null;
|
||||
|
||||
// Store active teleport lines for entrances
|
||||
let activeTeleportLine = null;
|
||||
|
||||
// Initialize all markers when map is ready
|
||||
function initMarkers() {
|
||||
// Load all marker types in parallel
|
||||
Promise.all([
|
||||
loadLabels(),
|
||||
loadEntrances(),
|
||||
loadGroundItems(),
|
||||
loadHouses(),
|
||||
]).catch(error => {
|
||||
console.error('Error loading markers:', error);
|
||||
});
|
||||
|
||||
// Set up toggle handlers
|
||||
setupMarkerToggles();
|
||||
}
|
||||
|
||||
// Set up toggle event handlers
|
||||
function setupMarkerToggles() {
|
||||
const labelsToggle = document.getElementById('labels-toggle');
|
||||
const entrancesToggle = document.getElementById('entrances-toggle');
|
||||
const groundItemsToggle = document.getElementById('ground-items-toggle');
|
||||
const housesToggle = document.getElementById('houses-toggle');
|
||||
|
||||
if (labelsToggle) {
|
||||
labelsToggle.addEventListener('change', (e) => {
|
||||
toggleLayer(labelsLayerGroup, e.target.checked);
|
||||
saveMarkerState('labels', e.target.checked);
|
||||
});
|
||||
}
|
||||
|
||||
if (entrancesToggle) {
|
||||
entrancesToggle.addEventListener('change', (e) => {
|
||||
toggleLayer(entrancesLayerGroup, e.target.checked);
|
||||
saveMarkerState('entrances', e.target.checked);
|
||||
// Remove active teleport line when hiding entrances
|
||||
if (!e.target.checked && activeTeleportLine) {
|
||||
map.removeLayer(activeTeleportLine);
|
||||
activeTeleportLine = null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (groundItemsToggle) {
|
||||
groundItemsToggle.addEventListener('change', (e) => {
|
||||
toggleLayer(groundItemsLayerGroup, e.target.checked);
|
||||
saveMarkerState('groundItems', e.target.checked);
|
||||
});
|
||||
}
|
||||
|
||||
if (housesToggle) {
|
||||
housesToggle.addEventListener('change', (e) => {
|
||||
toggleLayer(housesLayerGroup, e.target.checked);
|
||||
saveMarkerState('houses', e.target.checked);
|
||||
});
|
||||
}
|
||||
|
||||
// Restore saved state
|
||||
restoreMarkerState();
|
||||
}
|
||||
|
||||
// Toggle layer visibility
|
||||
function toggleLayer(layerGroup, visible) {
|
||||
if (!layerGroup) return;
|
||||
|
||||
if (visible) {
|
||||
layerGroup.addTo(map);
|
||||
} else {
|
||||
map.removeLayer(layerGroup);
|
||||
}
|
||||
}
|
||||
|
||||
// Save marker visibility state
|
||||
function saveMarkerState(type, visible) {
|
||||
try {
|
||||
const state = JSON.parse(localStorage.getItem('cursebreaker_marker_state') || '{}');
|
||||
state[type] = visible;
|
||||
localStorage.setItem('cursebreaker_marker_state', JSON.stringify(state));
|
||||
} catch (error) {
|
||||
console.warn('Failed to save marker state:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Restore marker visibility state
|
||||
function restoreMarkerState() {
|
||||
try {
|
||||
const state = JSON.parse(localStorage.getItem('cursebreaker_marker_state') || '{}');
|
||||
|
||||
// Update checkboxes and layers based on saved state
|
||||
setTimeout(() => {
|
||||
if (state.labels === false) {
|
||||
const toggle = document.getElementById('labels-toggle');
|
||||
if (toggle) {
|
||||
toggle.checked = false;
|
||||
toggleLayer(labelsLayerGroup, false);
|
||||
}
|
||||
}
|
||||
if (state.entrances === false) {
|
||||
const toggle = document.getElementById('entrances-toggle');
|
||||
if (toggle) {
|
||||
toggle.checked = false;
|
||||
toggleLayer(entrancesLayerGroup, false);
|
||||
}
|
||||
}
|
||||
if (state.groundItems === false) {
|
||||
const toggle = document.getElementById('ground-items-toggle');
|
||||
if (toggle) {
|
||||
toggle.checked = false;
|
||||
toggleLayer(groundItemsLayerGroup, false);
|
||||
}
|
||||
}
|
||||
if (state.houses === false) {
|
||||
const toggle = document.getElementById('houses-toggle');
|
||||
if (toggle) {
|
||||
toggle.checked = false;
|
||||
toggleLayer(housesLayerGroup, false);
|
||||
}
|
||||
}
|
||||
}, 200);
|
||||
} catch (error) {
|
||||
console.warn('Failed to restore marker state:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Load labels (text markers on the map)
|
||||
async function loadLabels() {
|
||||
try {
|
||||
console.log('Loading labels...');
|
||||
const response = await fetch('/api/labels');
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
console.log(`Received ${data.labels.length} labels`);
|
||||
|
||||
labelsLayerGroup = L.layerGroup();
|
||||
|
||||
for (const label of data.labels) {
|
||||
// Create a divIcon with the label text
|
||||
const labelIcon = L.divIcon({
|
||||
className: 'map-label',
|
||||
html: `<div class="label-text" style="font-size: ${label.font_size}px;">${label.text}</div>`,
|
||||
iconSize: null, // Let CSS handle sizing
|
||||
iconAnchor: [0, 0],
|
||||
});
|
||||
|
||||
const marker = L.marker([label.y, label.x], {
|
||||
icon: labelIcon,
|
||||
interactive: false, // Labels shouldn't be clickable
|
||||
});
|
||||
|
||||
marker.addTo(labelsLayerGroup);
|
||||
}
|
||||
|
||||
labelsLayerGroup.addTo(map);
|
||||
console.log('Labels loaded successfully');
|
||||
} catch (error) {
|
||||
console.error('Error loading labels:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Load entrances (teleporters with lines)
|
||||
async function loadEntrances() {
|
||||
try {
|
||||
console.log('Loading entrances...');
|
||||
const response = await fetch('/api/entrances');
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
console.log(`Received ${data.entrances.length} entrances`);
|
||||
|
||||
entrancesLayerGroup = L.layerGroup();
|
||||
|
||||
// Create icon from base64
|
||||
const iconUrl = `data:image/webp;base64,${data.icon_base64}`;
|
||||
const entranceIcon = L.icon({
|
||||
iconUrl: iconUrl,
|
||||
iconSize: [32, 32],
|
||||
iconAnchor: [16, 16],
|
||||
popupAnchor: [0, -16],
|
||||
});
|
||||
|
||||
for (const entrance of data.entrances) {
|
||||
const marker = L.marker([entrance.pos_y, entrance.pos_x], {
|
||||
icon: entranceIcon,
|
||||
title: 'Entrance',
|
||||
});
|
||||
|
||||
// Store teleport destination on the marker
|
||||
marker.teleportDest = {
|
||||
x: entrance.tp_x,
|
||||
y: entrance.tp_y,
|
||||
};
|
||||
|
||||
// Handle click to show teleport line
|
||||
marker.on('click', function(e) {
|
||||
// Remove existing line if any
|
||||
if (activeTeleportLine) {
|
||||
map.removeLayer(activeTeleportLine);
|
||||
activeTeleportLine = null;
|
||||
}
|
||||
|
||||
const dest = this.teleportDest;
|
||||
if (dest.x !== null && dest.y !== null) {
|
||||
// Create a line from entrance to destination
|
||||
activeTeleportLine = L.polyline(
|
||||
[
|
||||
[entrance.pos_y, entrance.pos_x],
|
||||
[dest.y, dest.x]
|
||||
],
|
||||
{
|
||||
color: '#00ffff',
|
||||
weight: 3,
|
||||
opacity: 0.8,
|
||||
dashArray: '10, 10',
|
||||
}
|
||||
).addTo(map);
|
||||
|
||||
// Add a destination marker
|
||||
const destMarker = L.circleMarker([dest.y, dest.x], {
|
||||
radius: 8,
|
||||
color: '#00ffff',
|
||||
fillColor: '#00ffff',
|
||||
fillOpacity: 0.5,
|
||||
}).addTo(map);
|
||||
|
||||
// Remove line and destination marker after 5 seconds
|
||||
setTimeout(() => {
|
||||
if (activeTeleportLine) {
|
||||
map.removeLayer(activeTeleportLine);
|
||||
activeTeleportLine = null;
|
||||
}
|
||||
map.removeLayer(destMarker);
|
||||
}, 5000);
|
||||
}
|
||||
});
|
||||
|
||||
marker.addTo(entrancesLayerGroup);
|
||||
}
|
||||
|
||||
entrancesLayerGroup.addTo(map);
|
||||
console.log('Entrances loaded successfully');
|
||||
} catch (error) {
|
||||
console.error('Error loading entrances:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Format respawn time as "XXM XXS"
|
||||
function formatRespawnTime(seconds) {
|
||||
const minutes = Math.floor(seconds / 60);
|
||||
const secs = seconds % 60;
|
||||
|
||||
if (minutes > 0 && secs > 0) {
|
||||
return `${minutes}M ${secs}S`;
|
||||
} else if (minutes > 0) {
|
||||
return `${minutes}M`;
|
||||
} else {
|
||||
return `${secs}S`;
|
||||
}
|
||||
}
|
||||
|
||||
// Load ground items
|
||||
async function loadGroundItems() {
|
||||
try {
|
||||
console.log('Loading ground items...');
|
||||
const response = await fetch('/api/ground-items');
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
console.log(`Received ${data.items.length} ground items`);
|
||||
|
||||
groundItemsLayerGroup = L.layerGroup();
|
||||
|
||||
// Create icon from base64
|
||||
const iconUrl = `data:image/webp;base64,${data.icon_base64}`;
|
||||
const itemIcon = L.icon({
|
||||
iconUrl: iconUrl,
|
||||
iconSize: [24, 24],
|
||||
iconAnchor: [12, 12],
|
||||
popupAnchor: [0, -12],
|
||||
});
|
||||
|
||||
for (const item of data.items) {
|
||||
const marker = L.marker([item.y, item.x], {
|
||||
icon: itemIcon,
|
||||
title: item.name,
|
||||
});
|
||||
|
||||
// Build popup content
|
||||
let popupContent = `<strong>${item.name}</strong>`;
|
||||
if (item.amount > 1) {
|
||||
popupContent += `<br/>Amount: ${item.amount}`;
|
||||
}
|
||||
popupContent += `<br/>Respawn: ${formatRespawnTime(item.respawn_time)}`;
|
||||
|
||||
marker.bindPopup(popupContent);
|
||||
marker.addTo(groundItemsLayerGroup);
|
||||
}
|
||||
|
||||
groundItemsLayerGroup.addTo(map);
|
||||
console.log('Ground items loaded successfully');
|
||||
} catch (error) {
|
||||
console.error('Error loading ground items:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Load player houses
|
||||
async function loadHouses() {
|
||||
try {
|
||||
console.log('Loading houses...');
|
||||
const response = await fetch('/api/houses');
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
console.log(`Received ${data.houses.length} houses`);
|
||||
|
||||
housesLayerGroup = L.layerGroup();
|
||||
|
||||
// Create icon from base64
|
||||
const iconUrl = `data:image/webp;base64,${data.icon_base64}`;
|
||||
const houseIcon = L.icon({
|
||||
iconUrl: iconUrl,
|
||||
iconSize: [64, 64],
|
||||
iconAnchor: [32, 32],
|
||||
popupAnchor: [0, -32],
|
||||
});
|
||||
|
||||
for (const house of data.houses) {
|
||||
const marker = L.marker([house.y, house.x], {
|
||||
icon: houseIcon,
|
||||
title: house.name,
|
||||
});
|
||||
|
||||
// Format price with commas
|
||||
const formattedPrice = house.price.toLocaleString();
|
||||
|
||||
// Build popup content
|
||||
const popupContent = `
|
||||
<strong>${house.name}</strong><br/>
|
||||
<em>${house.description}</em><br/>
|
||||
<span class="house-price">Price: ${formattedPrice} gold</span>
|
||||
`;
|
||||
|
||||
marker.bindPopup(popupContent);
|
||||
marker.addTo(housesLayerGroup);
|
||||
}
|
||||
|
||||
housesLayerGroup.addTo(map);
|
||||
console.log('Houses loaded successfully');
|
||||
} catch (error) {
|
||||
console.error('Error loading houses:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Call initMarkers after map is loaded
|
||||
// This is called from map.js after resources are loaded
|
||||
261
cursebreaker-map/static/resources.js
Normal file
261
cursebreaker-map/static/resources.js
Normal file
@@ -0,0 +1,261 @@
|
||||
// Resource management for Cursebreaker map
|
||||
let resourceLayerGroups = {}; // Map: resource name -> L.layerGroup
|
||||
let resourceIcons = {}; // Map: resource name -> L.icon
|
||||
let resourceData = {}; // Map: resource name -> resource metadata (skill, level, etc.)
|
||||
let filterState = {}; // Map: resource name -> boolean (visible)
|
||||
|
||||
// Load resources from API
|
||||
async function loadResources() {
|
||||
try {
|
||||
console.log('Loading resources from API...');
|
||||
const response = await fetch('/api/resources');
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
console.log(`Received ${data.resources.length} resource types`);
|
||||
|
||||
// Create icons and layer groups for each resource
|
||||
for (const group of data.resources) {
|
||||
createResourceGroup(group);
|
||||
}
|
||||
|
||||
// Initialize filter UI
|
||||
initializeFilterUI();
|
||||
|
||||
// Restore filter state from localStorage
|
||||
restoreFilterState();
|
||||
|
||||
console.log(`Loaded ${data.resources.length} resource types successfully`);
|
||||
} catch (error) {
|
||||
console.error('Error loading resources:', error);
|
||||
const container = document.getElementById('resource-filters');
|
||||
if (container) {
|
||||
container.innerHTML = '<p style="color: #ff6b6b;">Failed to load resources. Check console for details.</p>';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create resource group with icon and markers
|
||||
function createResourceGroup(group) {
|
||||
const config = window.MapConfig;
|
||||
|
||||
// Create icon definition (cached per resource type)
|
||||
const iconUrl = `data:image/webp;base64,${group.icon_base64}`;
|
||||
const icon = L.icon({
|
||||
iconUrl: iconUrl,
|
||||
iconSize: [config.resourceIconSize, config.resourceIconSize],
|
||||
iconAnchor: [config.resourceIconSize / 2, config.resourceIconSize / 2],
|
||||
popupAnchor: [0, -(config.resourceIconSize / 2)],
|
||||
});
|
||||
|
||||
resourceIcons[group.name] = icon;
|
||||
|
||||
// Store metadata
|
||||
resourceData[group.name] = {
|
||||
item_id: group.item_id,
|
||||
skill: group.skill,
|
||||
level: group.level,
|
||||
};
|
||||
|
||||
// Create layer group for this resource type
|
||||
const layerGroup = L.layerGroup();
|
||||
|
||||
// Add markers for all positions
|
||||
for (const pos of group.positions) {
|
||||
const marker = L.marker([pos.y, pos.x], {
|
||||
icon: icon,
|
||||
title: group.name,
|
||||
});
|
||||
|
||||
// Add popup with resource details
|
||||
marker.bindPopup(
|
||||
`<strong>${group.name}</strong><br/>Position: (${pos.x.toFixed(1)}, ${pos.y.toFixed(1)})`
|
||||
);
|
||||
|
||||
marker.addTo(layerGroup);
|
||||
}
|
||||
|
||||
// Add to map (initially visible)
|
||||
layerGroup.addTo(map);
|
||||
resourceLayerGroups[group.name] = layerGroup;
|
||||
filterState[group.name] = true; // Initially visible
|
||||
}
|
||||
|
||||
// Initialize filter UI with skill grouping
|
||||
function initializeFilterUI() {
|
||||
const container = document.getElementById('resource-filters');
|
||||
if (!container) {
|
||||
console.error('resource-filters container not found');
|
||||
return;
|
||||
}
|
||||
|
||||
container.innerHTML = ''; // Clear loading text
|
||||
|
||||
// Group resources by skill
|
||||
const skillGroups = {};
|
||||
for (const name in resourceLayerGroups) {
|
||||
const metadata = resourceData[name];
|
||||
if (!skillGroups[metadata.skill]) {
|
||||
skillGroups[metadata.skill] = [];
|
||||
}
|
||||
skillGroups[metadata.skill].push({
|
||||
name: name,
|
||||
level: metadata.level,
|
||||
});
|
||||
}
|
||||
|
||||
// Sort skills alphabetically
|
||||
const sortedSkills = Object.keys(skillGroups).sort();
|
||||
|
||||
// Create UI for each skill group
|
||||
for (const skill of sortedSkills) {
|
||||
const skillDiv = document.createElement('div');
|
||||
skillDiv.className = 'skill-group';
|
||||
|
||||
const header = document.createElement('div');
|
||||
header.className = 'skill-header';
|
||||
// Capitalize first letter of skill
|
||||
header.textContent = skill.charAt(0).toUpperCase() + skill.slice(1);
|
||||
skillDiv.appendChild(header);
|
||||
|
||||
// Resources are already sorted by level in backend, but sort again to be sure
|
||||
skillGroups[skill].sort((a, b) => a.level - b.level);
|
||||
|
||||
// Create checkbox for each resource
|
||||
for (const resource of skillGroups[skill]) {
|
||||
const label = createFilterLabel(resource.name);
|
||||
skillDiv.appendChild(label);
|
||||
}
|
||||
|
||||
container.appendChild(skillDiv);
|
||||
}
|
||||
|
||||
// Attach bulk filter handlers
|
||||
const selectAllBtn = document.getElementById('select-all-resources');
|
||||
const deselectAllBtn = document.getElementById('deselect-all-resources');
|
||||
|
||||
if (selectAllBtn) {
|
||||
selectAllBtn.addEventListener('click', () => {
|
||||
setAllFilters(true);
|
||||
});
|
||||
}
|
||||
|
||||
if (deselectAllBtn) {
|
||||
deselectAllBtn.addEventListener('click', () => {
|
||||
setAllFilters(false);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Create filter label with checkbox and icon
|
||||
function createFilterLabel(resourceName) {
|
||||
const label = document.createElement('label');
|
||||
label.className = 'filter-label';
|
||||
|
||||
const checkbox = document.createElement('input');
|
||||
checkbox.type = 'checkbox';
|
||||
checkbox.checked = filterState[resourceName];
|
||||
checkbox.dataset.resource = resourceName;
|
||||
checkbox.addEventListener('change', handleFilterChange);
|
||||
|
||||
const icon = document.createElement('img');
|
||||
icon.src = resourceIcons[resourceName].options.iconUrl;
|
||||
icon.className = 'filter-icon';
|
||||
icon.alt = resourceName;
|
||||
|
||||
const text = document.createElement('span');
|
||||
text.textContent = resourceName;
|
||||
|
||||
label.appendChild(checkbox);
|
||||
label.appendChild(icon);
|
||||
label.appendChild(text);
|
||||
|
||||
return label;
|
||||
}
|
||||
|
||||
// Handle filter checkbox change
|
||||
function handleFilterChange(event) {
|
||||
const resourceName = event.target.dataset.resource;
|
||||
const isVisible = event.target.checked;
|
||||
|
||||
filterState[resourceName] = isVisible;
|
||||
|
||||
// Show/hide layer group
|
||||
const layerGroup = resourceLayerGroups[resourceName];
|
||||
if (isVisible) {
|
||||
layerGroup.addTo(map);
|
||||
} else {
|
||||
map.removeLayer(layerGroup);
|
||||
}
|
||||
|
||||
// Persist state
|
||||
saveFilterState();
|
||||
}
|
||||
|
||||
// Set all filters to visible or hidden
|
||||
function setAllFilters(visible) {
|
||||
for (const name in filterState) {
|
||||
filterState[name] = visible;
|
||||
const layerGroup = resourceLayerGroups[name];
|
||||
|
||||
if (visible) {
|
||||
layerGroup.addTo(map);
|
||||
} else {
|
||||
map.removeLayer(layerGroup);
|
||||
}
|
||||
}
|
||||
|
||||
// Update checkboxes
|
||||
document.querySelectorAll('#resource-filters input[type="checkbox"]').forEach((cb) => {
|
||||
cb.checked = visible;
|
||||
});
|
||||
|
||||
saveFilterState();
|
||||
}
|
||||
|
||||
// Save filter state to localStorage
|
||||
function saveFilterState() {
|
||||
try {
|
||||
localStorage.setItem('cursebreaker_resource_filters', JSON.stringify(filterState));
|
||||
} catch (error) {
|
||||
console.warn('Failed to save filter state to localStorage:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Restore filter state from localStorage
|
||||
function restoreFilterState() {
|
||||
const saved = localStorage.getItem('cursebreaker_resource_filters');
|
||||
if (!saved) return;
|
||||
|
||||
try {
|
||||
const savedState = JSON.parse(saved);
|
||||
|
||||
for (const name in savedState) {
|
||||
if (resourceLayerGroups[name]) {
|
||||
filterState[name] = savedState[name];
|
||||
|
||||
const layerGroup = resourceLayerGroups[name];
|
||||
if (!savedState[name]) {
|
||||
map.removeLayer(layerGroup);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update checkboxes after UI is created
|
||||
setTimeout(() => {
|
||||
document.querySelectorAll('#resource-filters input[type="checkbox"]').forEach((cb) => {
|
||||
const name = cb.dataset.resource;
|
||||
if (filterState[name] !== undefined) {
|
||||
cb.checked = filterState[name];
|
||||
}
|
||||
});
|
||||
}, 100);
|
||||
|
||||
console.log('Restored filter state from localStorage');
|
||||
} catch (error) {
|
||||
console.warn('Failed to restore filter state:', error);
|
||||
}
|
||||
}
|
||||
318
cursebreaker-map/static/style.css
Normal file
318
cursebreaker-map/static/style.css
Normal file
@@ -0,0 +1,318 @@
|
||||
* {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
||||
overflow: hidden;
|
||||
background: #1a1a1a;
|
||||
color: #e0e0e0;
|
||||
}
|
||||
|
||||
#app {
|
||||
display: flex;
|
||||
height: 100vh;
|
||||
width: 100vw;
|
||||
}
|
||||
|
||||
/* Sidebar */
|
||||
.sidebar {
|
||||
width: 320px;
|
||||
background: #2a2a2a;
|
||||
box-shadow: 2px 0 10px rgba(0, 0, 0, 0.5);
|
||||
z-index: 1000;
|
||||
transition: margin-left 0.3s ease;
|
||||
position: relative;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.sidebar.collapsed {
|
||||
margin-left: -320px;
|
||||
}
|
||||
|
||||
/* Sidebar toggle control */
|
||||
.leaflet-control-sidebar {
|
||||
width: 30px;
|
||||
height: 30px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
font-size: 18px;
|
||||
line-height: 30px;
|
||||
text-align: center;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.sidebar-content {
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.sidebar h2 {
|
||||
color: #8b5cf6;
|
||||
margin-bottom: 5px;
|
||||
font-size: 24px;
|
||||
}
|
||||
|
||||
.subtitle {
|
||||
color: #a0a0a0;
|
||||
font-size: 14px;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.info-section {
|
||||
border-bottom: 1px solid #3a3a3a;
|
||||
padding-bottom: 20px;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.filters-section,
|
||||
.map-info {
|
||||
margin-bottom: 25px;
|
||||
}
|
||||
|
||||
.filters-section h3,
|
||||
.map-info h3 {
|
||||
color: #8b5cf6;
|
||||
margin-bottom: 10px;
|
||||
font-size: 16px;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.filter-group {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.filter-group label {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
cursor: pointer;
|
||||
padding: 5px;
|
||||
border-radius: 4px;
|
||||
transition: background 0.2s;
|
||||
}
|
||||
|
||||
.filter-group label:hover {
|
||||
background: #3a3a3a;
|
||||
}
|
||||
|
||||
.filter-group input[type="checkbox"] {
|
||||
margin-right: 8px;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.coming-soon {
|
||||
color: #a0a0a0;
|
||||
font-style: italic;
|
||||
font-size: 13px;
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
|
||||
#map-stats {
|
||||
font-size: 14px;
|
||||
color: #c0c0c0;
|
||||
}
|
||||
|
||||
#map-stats p {
|
||||
margin: 5px 0;
|
||||
}
|
||||
|
||||
/* Map */
|
||||
#map {
|
||||
flex: 1;
|
||||
height: 100vh;
|
||||
background: #1a1a1a;
|
||||
}
|
||||
|
||||
/* Leaflet overrides for dark theme */
|
||||
.leaflet-container {
|
||||
background: #1a1a1a;
|
||||
}
|
||||
|
||||
.leaflet-control-zoom a {
|
||||
background: #2a2a2a;
|
||||
color: #e0e0e0;
|
||||
border-color: #3a3a3a;
|
||||
}
|
||||
|
||||
.leaflet-control-zoom a:hover {
|
||||
background: #3a3a3a;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.leaflet-bar {
|
||||
border: 1px solid #3a3a3a;
|
||||
}
|
||||
|
||||
/* Coordinates display */
|
||||
.coordinates-display {
|
||||
position: absolute;
|
||||
bottom: 10px;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
background: rgba(42, 42, 42, 0.95);
|
||||
color: #e0e0e0;
|
||||
padding: 8px 16px;
|
||||
border-radius: 6px;
|
||||
font-size: 14px;
|
||||
font-family: 'Courier New', monospace;
|
||||
z-index: 1000;
|
||||
box-shadow: 0 2px 10px rgba(0, 0, 0, 0.5);
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
#coord-text {
|
||||
color: #8b5cf6;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
/* Filter controls */
|
||||
.filter-controls {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
.filter-btn {
|
||||
flex: 1;
|
||||
padding: 6px 12px;
|
||||
background: #3a3a3a;
|
||||
color: #e0e0e0;
|
||||
border: 1px solid #4a4a4a;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
font-size: 12px;
|
||||
transition: background 0.2s;
|
||||
}
|
||||
|
||||
.filter-btn:hover {
|
||||
background: #4a4a4a;
|
||||
}
|
||||
|
||||
.filter-btn:active {
|
||||
background: #2a2a2a;
|
||||
}
|
||||
|
||||
/* Filter groups by skill */
|
||||
.skill-group {
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
.skill-header {
|
||||
color: #8b5cf6;
|
||||
font-size: 13px;
|
||||
font-weight: bold;
|
||||
margin-bottom: 4px;
|
||||
padding: 4px 8px;
|
||||
background: rgba(139, 92, 246, 0.1);
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
/* Filter items */
|
||||
.filter-label {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
cursor: pointer;
|
||||
padding: 4px 8px;
|
||||
margin-left: 8px;
|
||||
border-radius: 4px;
|
||||
transition: background 0.2s;
|
||||
font-size: 13px;
|
||||
}
|
||||
|
||||
.filter-label:hover {
|
||||
background: #3a3a3a;
|
||||
}
|
||||
|
||||
.filter-icon {
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
image-rendering: pixelated;
|
||||
image-rendering: -moz-crisp-edges;
|
||||
image-rendering: crisp-edges;
|
||||
}
|
||||
|
||||
.loading-text {
|
||||
color: #a0a0a0;
|
||||
font-style: italic;
|
||||
font-size: 13px;
|
||||
padding: 8px;
|
||||
}
|
||||
|
||||
/* Dark theme popups */
|
||||
.leaflet-popup-content-wrapper {
|
||||
background: #2a2a2a;
|
||||
color: #e0e0e0;
|
||||
border: 1px solid #3a3a3a;
|
||||
}
|
||||
|
||||
.leaflet-popup-content {
|
||||
margin: 8px 12px;
|
||||
font-size: 13px;
|
||||
}
|
||||
|
||||
.leaflet-popup-tip {
|
||||
background: #2a2a2a;
|
||||
}
|
||||
|
||||
/* Master toggle for marker categories */
|
||||
.master-toggle {
|
||||
margin-left: 0 !important;
|
||||
}
|
||||
|
||||
.master-toggle input[type="checkbox"] {
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
accent-color: #8b5cf6;
|
||||
}
|
||||
|
||||
/* Map labels (text overlays) */
|
||||
.map-label {
|
||||
background: transparent;
|
||||
border: none;
|
||||
}
|
||||
|
||||
.label-text {
|
||||
color: #e0e0e0;
|
||||
text-shadow:
|
||||
-1px -1px 2px #000,
|
||||
1px -1px 2px #000,
|
||||
-1px 1px 2px #000,
|
||||
1px 1px 2px #000,
|
||||
0 0 4px #000;
|
||||
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
||||
font-weight: bold;
|
||||
white-space: nowrap;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
/* House price styling in popup */
|
||||
.house-price {
|
||||
color: #ffd700;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
/* Popup styling for various marker types */
|
||||
.leaflet-popup-content strong {
|
||||
color: #8b5cf6;
|
||||
}
|
||||
|
||||
.leaflet-popup-content em {
|
||||
color: #a0a0a0;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
/* Responsive */
|
||||
@media (max-width: 768px) {
|
||||
.sidebar {
|
||||
width: 280px;
|
||||
}
|
||||
|
||||
.sidebar.collapsed {
|
||||
margin-left: -280px;
|
||||
}
|
||||
}
|
||||
48
cursebreaker-parser/Cargo.toml
Normal file
48
cursebreaker-parser/Cargo.toml
Normal file
@@ -0,0 +1,48 @@
|
||||
[package]
|
||||
name = "cursebreaker-parser"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
name = "cursebreaker_parser"
|
||||
path = "src/lib.rs"
|
||||
|
||||
# Main binary - runs all parsers
|
||||
[[bin]]
|
||||
name = "cursebreaker-parser"
|
||||
path = "src/main.rs"
|
||||
|
||||
# XML Parser - loads game data from XML files and populates database
|
||||
[[bin]]
|
||||
name = "xml-parser"
|
||||
path = "src/bin/xml-parser.rs"
|
||||
|
||||
# Scene Parser - parses Unity scenes and extracts game objects
|
||||
[[bin]]
|
||||
name = "scene-parser"
|
||||
path = "src/bin/scene-parser.rs"
|
||||
|
||||
# Image Parser - processes minimap tiles and generates all zoom levels
|
||||
[[bin]]
|
||||
name = "image-parser"
|
||||
path = "src/bin/image-parser.rs"
|
||||
|
||||
[dependencies]
|
||||
unity-parser = { path = "../unity-parser" }
|
||||
serde_yaml = "0.9"
|
||||
inventory = "0.3"
|
||||
sparsey = "0.13"
|
||||
log = { version = "0.4", features = ["std"] }
|
||||
quick-xml = "0.37"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
diesel = { version = "2.2", features = ["sqlite", "returning_clauses_for_sqlite_3_35"] }
|
||||
libsqlite3-sys = { version = ">=0.17.2", features = ["bundled"] }
|
||||
image = "0.25"
|
||||
webp = "0.3"
|
||||
thiserror = "1.0"
|
||||
chrono = "0.4"
|
||||
clap = { version = "4.5", features = ["derive"] }
|
||||
|
||||
[dev-dependencies]
|
||||
diesel_migrations = "2.2"
|
||||
404
cursebreaker-parser/README.md
Normal file
404
cursebreaker-parser/README.md
Normal file
@@ -0,0 +1,404 @@
|
||||
# Cursebreaker Parser
|
||||
|
||||
A Rust library for parsing and managing game data from the Cursebreaker game. This crate provides tools to extract, load, and query game data from Unity scenes and XML files.
|
||||
|
||||
## Overview
|
||||
|
||||
Cursebreaker Parser is designed to:
|
||||
- Parse Unity scenes and extract game objects using the unity-parser library
|
||||
- Load game data from XML files (Items, NPCs, Quests, Harvestables, Loot tables, Maps, Fast Travel, Player Houses, Traits, Shops)
|
||||
- Process and compress minimap tiles and item images
|
||||
- Provide in-memory databases for efficient querying of game data
|
||||
- Serialize game data to SQL format for database storage
|
||||
|
||||
## Features
|
||||
|
||||
- **Item Database**: Load and query items with support for filtering by ID, category, slot, and other attributes
|
||||
- **NPC Database**: Manage NPC data including stats, levels, animations, and quest markers
|
||||
- **Quest Database**: Handle quest definitions, phases, and rewards
|
||||
- **Harvestable Database**: Track harvestable resources and their drop tables
|
||||
- **Loot Database**: Manage loot tables and drop configurations
|
||||
- **Map Database**: Handle map data and navigation
|
||||
- **Fast Travel Database**: Manage fast travel locations and connections
|
||||
- **Player House Database**: Track player houses and their locations
|
||||
- **Trait Database**: Handle character traits and their effects
|
||||
- **Shop Database**: Manage shop inventories and pricing
|
||||
- **Minimap Database**: Process and manage minimap tiles with multiple zoom levels
|
||||
- **XML Parsing**: Robust XML parsing with error handling
|
||||
- **SQL Export**: Prepare data for SQL database insertion
|
||||
- **Image Processing**: Process and compress minimap tiles and item icons
|
||||
- **Unity Scene Parsing**: Extract game objects and world resources from Unity scenes
|
||||
|
||||
## Binaries
|
||||
|
||||
The project provides multiple binaries to handle different parsing tasks. This allows you to run only the parts you need, avoiding long load times for unnecessary operations.
|
||||
|
||||
### Available Binaries
|
||||
|
||||
1. **xml-parser** - Loads game data from XML files and populates the SQLite database
|
||||
- Fast execution
|
||||
- Run this when XML files change
|
||||
```bash
|
||||
cargo run --bin xml-parser
|
||||
```
|
||||
|
||||
2. **scene-parser** - Parses Unity scenes and extracts world objects
|
||||
- Slow execution (Unity project initialization)
|
||||
- Extracts multiple types of interactable components and their positions:
|
||||
- **InteractableResource**: Harvestable resources → `world_resources` table
|
||||
- **InteractableTeleporter**: Teleporters with source/destination positions → `world_teleporters` table
|
||||
- **InteractableWorkbench**: Workbenches with workbench ID → `world_workbenches` table
|
||||
- **LootSpawner**: Loot spawners with item, amount, respawn time → `world_loot` table
|
||||
- **MapIcon**: Map icons with type, size, text, etc. → `world_map_icons` table
|
||||
- **MapNameChanger**: Map name changers → `world_map_name_changers` table
|
||||
- Processes item icons for harvestables:
|
||||
- Looks up the first item drop for each harvestable from `harvestable_drops` table
|
||||
- Loads the icon from `Data/Textures/ItemIcons/{item_id}.png`
|
||||
- Applies white outline (4px) and resizes to 64x64
|
||||
- Converts to WebP and stores in `resource_icons` table
|
||||
- Run this when scene files change
|
||||
```bash
|
||||
cargo run --bin scene-parser
|
||||
```
|
||||
|
||||
3. **image-parser** - Processes minimap tiles
|
||||
- Slow execution (image processing and compression)
|
||||
- Run this when minimap images change
|
||||
```bash
|
||||
cargo run --bin image-parser
|
||||
```
|
||||
|
||||
4. **cursebreaker-parser** - All-in-one binary (runs all parsers)
|
||||
- Slowest execution (runs everything)
|
||||
- Use when you need to regenerate the entire database
|
||||
```bash
|
||||
cargo run --bin cursebreaker-parser
|
||||
# or simply
|
||||
cargo run
|
||||
```
|
||||
|
||||
5. **verify-db** - Verifies database contents and shows basic statistics
|
||||
```bash
|
||||
cargo run --bin verify-db
|
||||
```
|
||||
|
||||
6. **verify-expanded-db** - Verifies expanded database schema with items, recipes, and stats
|
||||
```bash
|
||||
cargo run --bin verify-expanded-db
|
||||
```
|
||||
|
||||
7. **verify-images** - Verifies item images and shows storage statistics
|
||||
```bash
|
||||
cargo run --bin verify-images
|
||||
```
|
||||
|
||||
8. **verify-stats** - Verifies item stats and shows breakdown by type
|
||||
```bash
|
||||
cargo run --bin verify-stats
|
||||
```
|
||||
|
||||
9. **verify-resource-icons** - Verifies resource icons for harvestables
|
||||
```bash
|
||||
cargo run --bin verify-resource-icons
|
||||
```
|
||||
|
||||
### Building for Production
|
||||
|
||||
Build specific binaries for release:
|
||||
```bash
|
||||
cargo build --release --bin xml-parser
|
||||
cargo build --release --bin scene-parser
|
||||
cargo build --release --bin image-parser
|
||||
```
|
||||
|
||||
The compiled binaries will be in `target/release/`.
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
Set the `CB_ASSETS_PATH` environment variable to the path of your CurseBreaker assets directory:
|
||||
|
||||
```bash
|
||||
export CB_ASSETS_PATH="/path/to/CBAssets"
|
||||
```
|
||||
|
||||
If not set, the default fallback is `/home/connor/repos/CBAssets`.
|
||||
|
||||
## Usage
|
||||
|
||||
### Loading Items from XML
|
||||
|
||||
```rust
|
||||
use cursebreaker_parser::ItemDatabase;
|
||||
|
||||
// Load all items from XML
|
||||
let item_db = ItemDatabase::load_from_xml("Data/XMLs/Items/Items.xml")?;
|
||||
println!("Loaded {} items", item_db.len());
|
||||
|
||||
// Get item by ID
|
||||
if let Some(item) = item_db.get_by_id(150) {
|
||||
println!("Found: {}", item.name);
|
||||
}
|
||||
|
||||
// Query items by category
|
||||
let weapons = item_db.get_by_category("bow");
|
||||
println!("Found {} bows", weapons.len());
|
||||
|
||||
// Query items by slot
|
||||
let consumables = item_db.get_by_slot("consumable");
|
||||
for item in consumables {
|
||||
println!("Consumable: {}", item.name);
|
||||
}
|
||||
```
|
||||
|
||||
### Preparing Data for SQL
|
||||
|
||||
```rust
|
||||
use cursebreaker_parser::ItemDatabase;
|
||||
|
||||
let item_db = ItemDatabase::load_from_xml("Data/XMLs/Items/Items.xml")?;
|
||||
|
||||
// Prepare data for SQL insertion
|
||||
// Returns Vec<(id, name, json_data)>
|
||||
let sql_data = item_db.prepare_for_sql();
|
||||
|
||||
for (id, name, json) in sql_data.iter().take(5) {
|
||||
println!("INSERT INTO items VALUES ({}, '{}', '{}')", id, name, json);
|
||||
}
|
||||
```
|
||||
|
||||
### Querying World Resources
|
||||
|
||||
```rust
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
|
||||
// Connect to database
|
||||
let mut conn = SqliteConnection::establish("../cursebreaker.db")?;
|
||||
|
||||
// Define the structure
|
||||
#[derive(Queryable, Debug)]
|
||||
struct WorldResource {
|
||||
item_id: i32,
|
||||
pos_x: f32,
|
||||
pos_y: f32,
|
||||
}
|
||||
|
||||
// Query resources by item ID
|
||||
use cursebreaker_parser::schema::world_resources::dsl::*;
|
||||
|
||||
let copper_ore = world_resources
|
||||
.filter(item_id.eq(2))
|
||||
.load::<WorldResource>(&mut conn)?;
|
||||
|
||||
println!("Found {} copper ore nodes", copper_ore.len());
|
||||
for resource in copper_ore {
|
||||
println!(" Position: ({:.2}, {:.2})", resource.pos_x, resource.pos_y);
|
||||
}
|
||||
```
|
||||
|
||||
See `examples/query_world_resources.rs` for a complete example.
|
||||
|
||||
### Querying Resource Icons
|
||||
|
||||
```rust
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
|
||||
// Connect to database
|
||||
let mut conn = SqliteConnection::establish("../cursebreaker.db")?;
|
||||
|
||||
// Define the structure
|
||||
#[derive(Queryable, Debug)]
|
||||
struct ResourceIcon {
|
||||
item_id: i32, // Harvestable ID
|
||||
name: String, // Harvestable name
|
||||
icon_64: Vec<u8>, // WebP image data (64x64 with white border)
|
||||
}
|
||||
|
||||
// Query icon for a specific harvestable
|
||||
use cursebreaker_parser::schema::resource_icons::dsl::*;
|
||||
|
||||
let copper_icon = resource_icons
|
||||
.filter(item_id.eq(2)) // Harvestable ID for Copper Ore
|
||||
.first::<ResourceIcon>(&mut conn)?;
|
||||
|
||||
println!("Found icon for: {}", copper_icon.name);
|
||||
println!("Icon size: {} bytes (WebP format)", copper_icon.icon_64.len());
|
||||
|
||||
// Save to file if needed
|
||||
std::fs::write("copper_ore.webp", &copper_icon.icon_64)?;
|
||||
```
|
||||
|
||||
See `examples/resource_icons_example.rs` for a complete example.
|
||||
|
||||
### Additional Databases
|
||||
|
||||
Similar APIs are available for other game data types:
|
||||
|
||||
```rust
|
||||
use cursebreaker_parser::{
|
||||
MapDatabase, FastTravelDatabase, PlayerHouseDatabase,
|
||||
TraitDatabase, ShopDatabase, MinimapDatabase
|
||||
};
|
||||
|
||||
// Load maps, fast travel points, player houses, etc.
|
||||
let map_db = MapDatabase::load_from_xml("Data/XMLs/Maps/Map.xml")?;
|
||||
// ... similar usage patterns
|
||||
```
|
||||
|
||||
See the examples directory for usage of each database type.
|
||||
|
||||
### Database Verification
|
||||
|
||||
After parsing data, you can verify the database contents using the verification binaries:
|
||||
|
||||
```bash
|
||||
# Basic database verification
|
||||
cargo run --bin verify-db
|
||||
|
||||
# Verify expanded schema with recipes and stats
|
||||
cargo run --bin verify-expanded-db
|
||||
|
||||
# Check item images and storage usage
|
||||
cargo run --bin verify-images
|
||||
|
||||
# Analyze item stats breakdown
|
||||
cargo run --bin verify-stats
|
||||
```
|
||||
|
||||
### Examples
|
||||
|
||||
The project includes several example programs demonstrating different aspects of the parser:
|
||||
|
||||
- **game_data_demo.rs** - Comprehensive demo loading and querying all game data types (Items, NPCs, Quests, Harvestables, Loot)
|
||||
- **item_database_demo.rs** - Focused on item database operations
|
||||
- **query_world_resources.rs** - Querying world resource locations from the database
|
||||
- **resource_icons_example.rs** - Querying processed harvestable icons with white borders
|
||||
- **fast_travel_example.rs** - Working with fast travel locations
|
||||
- **maps_example.rs** - Map data handling
|
||||
- **player_houses_example.rs** - Player house management
|
||||
- **shops_example.rs** - Shop inventory and pricing
|
||||
- **traits_example.rs** - Character traits and effects
|
||||
|
||||
Run any example with:
|
||||
```bash
|
||||
cargo run --example <example_name>
|
||||
```
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
cursebreaker-parser/
|
||||
├── src/
|
||||
│ ├── lib.rs # Library entry point and public API
|
||||
│ ├── main.rs # Main binary (all-in-one parser)
|
||||
│ ├── bin/ # Separate parser binaries
|
||||
│ │ ├── xml-parser.rs # XML parsing only
|
||||
│ │ ├── scene-parser.rs # Unity scene parsing only
|
||||
│ │ ├── image-parser.rs # Image processing only
|
||||
│ │ ├── verify-db.rs # Database verification
|
||||
│ │ ├── verify-expanded-db.rs # Expanded database verification
|
||||
│ │ ├── verify-images.rs # Image verification
|
||||
│ │ ├── verify-stats.rs # Stats verification
|
||||
│ │ └── verify-resource-icons.rs # Resource icons verification
|
||||
│ ├── xml_parser.rs # XML parsing utilities
|
||||
│ ├── image_processor.rs # Image processing utilities
|
||||
│ ├── item_loader.rs # Item loading logic
|
||||
│ ├── schema.rs # Database schema definitions
|
||||
│ ├── databases/ # Database implementations
|
||||
│ │ ├── item_database.rs
|
||||
│ │ ├── npc_database.rs
|
||||
│ │ ├── quest_database.rs
|
||||
│ │ ├── harvestable_database.rs
|
||||
│ │ ├── loot_database.rs
|
||||
│ │ ├── map_database.rs
|
||||
│ │ ├── fast_travel_database.rs
|
||||
│ │ ├── player_house_database.rs
|
||||
│ │ ├── trait_database.rs
|
||||
│ │ ├── shop_database.rs
|
||||
│ │ └── minimap_database.rs
|
||||
│ └── types/ # Type definitions
|
||||
│ ├── cursebreaker/ # Game-specific types (Items, NPCs, Quests, etc.)
|
||||
│ └── monobehaviours/ # Unity MonoBehaviour types
|
||||
├── examples/ # Example usage
|
||||
│ ├── fast_travel_example.rs
|
||||
│ ├── game_data_demo.rs
|
||||
│ ├── item_database_demo.rs
|
||||
│ ├── maps_example.rs
|
||||
│ ├── player_houses_example.rs
|
||||
│ ├── query_world_resources.rs
|
||||
│ ├── shops_example.rs
|
||||
│ └── traits_example.rs
|
||||
├── migrations/ # Database migrations
|
||||
├── Cargo.toml # Package configuration
|
||||
├── XML_PARSING.md # XML parsing documentation
|
||||
└── README.md # This file
|
||||
|
||||
```
|
||||
|
||||
## Database Schema
|
||||
|
||||
The parser uses Diesel for database operations with SQLite. Database migrations are located in the `migrations/` directory and handle:
|
||||
|
||||
- Item data with stats, images, and crafting recipes
|
||||
- NPC information and loot tables
|
||||
- Quest definitions and phases
|
||||
- Harvestable resources and drop tables
|
||||
- World resource locations from Unity scenes
|
||||
- Resource icons for harvestables (64x64 WebP with white borders)
|
||||
- World teleporters with source/destination coordinates
|
||||
- World workbenches with workbench IDs
|
||||
- World loot spawners with item, amount, and respawn time
|
||||
- Map icons with type, size, text, and hover text
|
||||
- Map name changers with location and map name
|
||||
- Minimap tiles and metadata
|
||||
- Shop inventories and pricing
|
||||
- Player houses and locations
|
||||
- Fast travel points
|
||||
- Character traits
|
||||
|
||||
## Dependencies
|
||||
|
||||
- **unity-parser**: For parsing Unity scene files
|
||||
- **quick-xml**: XML parsing
|
||||
- **serde**: Serialization/deserialization
|
||||
- **serde_json**: JSON support
|
||||
- **serde_yaml**: YAML support
|
||||
- **sparsey**: ECS (Entity Component System) support
|
||||
- **diesel**: SQL database support with SQLite
|
||||
- **image**: Image processing and WebP compression
|
||||
- **thiserror**: Error handling
|
||||
|
||||
## Building
|
||||
|
||||
```bash
|
||||
# Build the library
|
||||
cargo build
|
||||
|
||||
# Run tests
|
||||
cargo test
|
||||
|
||||
# Build specific binaries
|
||||
cargo build --bin xml-parser
|
||||
cargo build --bin scene-parser
|
||||
cargo build --bin image-parser
|
||||
cargo build --bin verify-db
|
||||
|
||||
# Run examples
|
||||
cargo run --example game_data_demo
|
||||
cargo run --example item_database_demo
|
||||
|
||||
# Build for release
|
||||
cargo build --release
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
For detailed XML parsing information, see [XML_PARSING.md](XML_PARSING.md).
|
||||
|
||||
Generate API documentation:
|
||||
```bash
|
||||
cargo doc --open
|
||||
```
|
||||
278
cursebreaker-parser/XML_PARSER.md
Normal file
278
cursebreaker-parser/XML_PARSER.md
Normal file
@@ -0,0 +1,278 @@
|
||||
# XML Parser Documentation
|
||||
|
||||
This document explains the XML parsing system used to load game data from Cursebreaker's XML files and populate the SQLite database.
|
||||
|
||||
## Overview
|
||||
|
||||
The XML parser system is responsible for:
|
||||
1. Reading game data from XML files (items, NPCs, quests, etc.)
|
||||
2. Parsing the XML into Rust structs
|
||||
3. Storing the parsed data in a SQLite database
|
||||
|
||||
## Architecture
|
||||
|
||||
### File Structure
|
||||
|
||||
```
|
||||
cursebreaker-parser/src/
|
||||
├── xml_parsers/ # XML parsing module
|
||||
│ ├── mod.rs # Shared utilities and re-exports
|
||||
│ ├── items.rs # Item parser
|
||||
│ ├── npcs.rs # NPC parser
|
||||
│ ├── quests.rs # Quest parser
|
||||
│ ├── harvestables.rs # Harvestable resource parser
|
||||
│ ├── loot.rs # Loot table parser
|
||||
│ ├── maps.rs # Map/scene parser
|
||||
│ ├── fast_travel.rs # Fast travel location parser
|
||||
│ ├── player_houses.rs # Player house parser
|
||||
│ ├── traits.rs # Character trait parser
|
||||
│ └── shops.rs # Shop/vendor parser
|
||||
├── databases/ # Database abstraction layer
|
||||
│ ├── item_database.rs
|
||||
│ ├── npc_database.rs
|
||||
│ └── ...
|
||||
├── types/ # Data structures
|
||||
│ └── cursebreaker/
|
||||
│ ├── item.rs
|
||||
│ ├── npc.rs
|
||||
│ └── ...
|
||||
└── bin/
|
||||
└── xml-parser.rs # CLI binary
|
||||
```
|
||||
|
||||
### Data Flow
|
||||
|
||||
```
|
||||
XML Files (CBAssets/Data/XMLs/)
|
||||
│
|
||||
▼
|
||||
XML Parsers (xml_parsers/*.rs)
|
||||
│
|
||||
▼
|
||||
Rust Structs (types/cursebreaker/*.rs)
|
||||
│
|
||||
▼
|
||||
Database Layer (databases/*.rs)
|
||||
│
|
||||
▼
|
||||
SQLite Database (cursebreaker.db)
|
||||
```
|
||||
|
||||
## Parser Components
|
||||
|
||||
### Shared Utilities (`xml_parsers/mod.rs`)
|
||||
|
||||
The module provides common functionality used by all parsers:
|
||||
|
||||
```rust
|
||||
/// Error types for XML parsing
|
||||
pub enum XmlParseError {
|
||||
XmlError(quick_xml::Error), // XML syntax errors
|
||||
IoError(std::io::Error), // File read errors
|
||||
AttrError(AttrError), // Attribute parsing errors
|
||||
MissingAttribute(String), // Required attribute not found
|
||||
InvalidAttribute(String), // Attribute value invalid
|
||||
}
|
||||
|
||||
/// Parse XML element attributes into a HashMap
|
||||
fn parse_attributes(element: &BytesStart) -> Result<HashMap<String, String>, XmlParseError>
|
||||
|
||||
/// Parse health range strings like "3-5" or "3" into (min, max)
|
||||
fn parse_health_range(health_str: &str) -> (i32, i32)
|
||||
```
|
||||
|
||||
### Individual Parsers
|
||||
|
||||
Each parser follows a similar pattern:
|
||||
|
||||
1. **Open and read the XML file** using `quick_xml::Reader`
|
||||
2. **Iterate through XML events** (Start, Empty, End, Text, Eof)
|
||||
3. **Match element names** and extract attributes
|
||||
4. **Build Rust structs** from the parsed data
|
||||
5. **Return a Vec** of parsed objects
|
||||
|
||||
#### Example: Item Parser Flow
|
||||
|
||||
```rust
|
||||
pub fn parse_items_xml<P: AsRef<Path>>(path: P) -> Result<Vec<Item>, XmlParseError> {
|
||||
// 1. Open file and create reader
|
||||
let file = File::open(path)?;
|
||||
let mut reader = Reader::from_reader(BufReader::new(file));
|
||||
|
||||
// 2. Process XML events
|
||||
loop {
|
||||
match reader.read_event_into(&mut buf) {
|
||||
Ok(Event::Start(e)) | Ok(Event::Empty(e)) => {
|
||||
match e.name().as_ref() {
|
||||
b"item" => {
|
||||
// 3. Parse attributes
|
||||
let attrs = parse_attributes(&e)?;
|
||||
let id = attrs.get("id")...;
|
||||
let name = attrs.get("name")...;
|
||||
|
||||
// 4. Create struct
|
||||
let item = Item::new(id, name);
|
||||
current_item = Some(item);
|
||||
}
|
||||
b"stat" => { /* Parse nested stat element */ }
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Ok(Event::End(e)) => {
|
||||
if e.name().as_ref() == b"item" {
|
||||
// 5. Add completed item to results
|
||||
items.push(current_item.take().unwrap());
|
||||
}
|
||||
}
|
||||
Ok(Event::Eof) => break,
|
||||
Err(e) => return Err(XmlParseError::XmlError(e)),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(items)
|
||||
}
|
||||
```
|
||||
|
||||
## Supported Data Types
|
||||
|
||||
| Parser | XML Source | Description |
|
||||
|--------|-----------|-------------|
|
||||
| `items` | `Items/Items.xml` | Game items (weapons, armor, consumables, etc.) |
|
||||
| `npcs` | `Npcs/NPCInfo.xml` | Non-player characters (enemies, vendors, quest givers) |
|
||||
| `quests` | `Quests/Quests.xml` | Quest definitions with phases and rewards |
|
||||
| `harvestables` | `Harvestables/HarvestableInfo.xml` | Gatherable resources (trees, rocks, fishing spots) |
|
||||
| `loot` | `Loot/Loot.xml` | NPC drop tables |
|
||||
| `maps` | `Maps/Maps.xml` | Game scenes/areas with lighting and fog settings |
|
||||
| `fast_travel` | `FastTravel*.xml` | Teleport locations, canoe routes, portals |
|
||||
| `player_houses` | `PlayerHouses/PlayerHouses.xml` | Purchasable player housing |
|
||||
| `traits` | `Traits/Traits.xml` | Character traits/perks |
|
||||
| `shops` | `Shops/Shops.xml` | Vendor inventories and pricing |
|
||||
|
||||
## CLI Usage
|
||||
|
||||
The `xml-parser` binary provides command-line control over which parsers to run:
|
||||
|
||||
```bash
|
||||
# Parse all data types
|
||||
xml-parser --all
|
||||
xml-parser -a
|
||||
|
||||
# Parse specific data types
|
||||
xml-parser --items # or -i
|
||||
xml-parser --npcs # or -n
|
||||
xml-parser --quests # or -q
|
||||
xml-parser --harvestables # or -r
|
||||
xml-parser --loot # or -l
|
||||
xml-parser --maps # or -m
|
||||
xml-parser --fast-travel # or -f
|
||||
xml-parser --houses # or -p
|
||||
xml-parser --traits # or -t
|
||||
xml-parser --shops # or -s
|
||||
|
||||
# Combine multiple parsers
|
||||
xml-parser --items --npcs --quests
|
||||
xml-parser -i -n -q
|
||||
|
||||
# View help
|
||||
xml-parser --help
|
||||
```
|
||||
|
||||
### Environment Variables
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `CB_ASSETS_PATH` | `/home/connor/repos/CBAssets` | Path to game assets directory |
|
||||
| `DATABASE_URL` | `cursebreaker.db` | SQLite database file path |
|
||||
|
||||
## Database Integration
|
||||
|
||||
Each parser has a corresponding database module that handles:
|
||||
|
||||
1. **Loading from XML** - Wraps the parser and creates a queryable database
|
||||
2. **Querying** - Methods like `get_by_id()`, `get_by_name()`, `get_all()`
|
||||
3. **Saving to SQLite** - Serializes data and inserts into database tables
|
||||
|
||||
### Example: ItemDatabase
|
||||
|
||||
```rust
|
||||
// Load items from XML
|
||||
let item_db = ItemDatabase::load_from_xml("path/to/Items.xml")?;
|
||||
|
||||
// Query items
|
||||
let sword = item_db.get_by_id(150);
|
||||
let bows = item_db.get_by_category("bow");
|
||||
|
||||
// Save to database (includes icon processing)
|
||||
item_db.save_to_db_with_images(&mut conn, "path/to/icons")?;
|
||||
```
|
||||
|
||||
## XML Format Examples
|
||||
|
||||
### Item XML
|
||||
```xml
|
||||
<item id="150" name="Iron Sword" level="10" price="500" maxstack="1">
|
||||
<stat damagephysical="25" accuracyphysical="5"/>
|
||||
<anim idle="1" walk="2" run="3" weaponattack="4"/>
|
||||
</item>
|
||||
```
|
||||
|
||||
### NPC XML
|
||||
```xml
|
||||
<npc id="45" name="Goblin" level="5" health="100" aggressive="1">
|
||||
<stat damagephysical="10" resistancephysical="5"/>
|
||||
<level swordsmanship="3" defence="2"/>
|
||||
</npc>
|
||||
```
|
||||
|
||||
### Quest XML
|
||||
```xml
|
||||
<quest id="1" name="First Steps" mainquest="1">
|
||||
<phase id="1" trackerdescription="Talk to the Elder"/>
|
||||
<phase id="2" trackerdescription="Collect 5 herbs"/>
|
||||
<rewards>
|
||||
<reward item="100" amount="1"/>
|
||||
<reward skill="swordsmanship" xp="50"/>
|
||||
</rewards>
|
||||
</quest>
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
The parser uses a custom `XmlParseError` enum to handle various failure modes:
|
||||
|
||||
- **MissingAttribute**: Required XML attribute not found (e.g., missing `id`)
|
||||
- **InvalidAttribute**: Attribute value cannot be parsed (e.g., non-numeric ID)
|
||||
- **XmlError**: Malformed XML syntax
|
||||
- **IoError**: File not found or permission denied
|
||||
|
||||
Parsers fail fast on required attributes but use defaults for optional ones:
|
||||
|
||||
```rust
|
||||
// Required - returns error if missing
|
||||
let id = attrs.get("id")
|
||||
.ok_or_else(|| XmlParseError::MissingAttribute("id".to_string()))?;
|
||||
|
||||
// Optional - uses default if missing
|
||||
let level = attrs.get("level")
|
||||
.and_then(|v| v.parse().ok())
|
||||
.unwrap_or(1);
|
||||
```
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
- **Streaming parser**: Uses `quick_xml` which processes XML as a stream, keeping memory usage low
|
||||
- **Single-pass parsing**: Each file is read once and parsed in a single pass
|
||||
- **Batch database inserts**: Data is collected into vectors before database insertion
|
||||
- **Selective parsing**: CLI allows parsing only needed data types, reducing processing time
|
||||
|
||||
## Adding a New Parser
|
||||
|
||||
To add support for a new XML data type:
|
||||
|
||||
1. **Create the type** in `types/cursebreaker/new_type.rs`
|
||||
2. **Create the parser** in `xml_parsers/new_type.rs`
|
||||
3. **Export from mod.rs**: Add `mod new_type;` and `pub use new_type::parse_new_type_xml;`
|
||||
4. **Create database module** in `databases/new_type_database.rs`
|
||||
5. **Add CLI flag** in `bin/xml-parser.rs`
|
||||
6. **Update this documentation**
|
||||
9
cursebreaker-parser/diesel.toml
Normal file
9
cursebreaker-parser/diesel.toml
Normal file
@@ -0,0 +1,9 @@
|
||||
# For documentation on how to configure this file,
|
||||
# see https://diesel.rs/guides/configuring-diesel-cli
|
||||
|
||||
[print_schema]
|
||||
file = "src/schema.rs"
|
||||
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
|
||||
|
||||
[migrations_directory]
|
||||
dir = "migrations"
|
||||
109
cursebreaker-parser/examples/fast_travel_example.rs
Normal file
109
cursebreaker-parser/examples/fast_travel_example.rs
Normal file
@@ -0,0 +1,109 @@
|
||||
use cursebreaker_parser::{FastTravelDatabase, FastTravelType};
|
||||
use std::env;
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Load all fast travel types from the directory
|
||||
let cb_assets_path = env::var("CB_ASSETS_PATH").unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
|
||||
let ft_db = FastTravelDatabase::load_from_directory(&format!("{}/Data/XMLs", cb_assets_path))?;
|
||||
|
||||
println!("=== Fast Travel Database Statistics ===");
|
||||
println!("Total locations: {}", ft_db.len());
|
||||
println!("Regular locations: {}", ft_db.count_by_type(FastTravelType::Location));
|
||||
println!("Canoe locations: {}", ft_db.count_by_type(FastTravelType::Canoe));
|
||||
println!("Portal locations: {}", ft_db.count_by_type(FastTravelType::Portal));
|
||||
println!();
|
||||
|
||||
// Show regular locations
|
||||
println!("=== Regular Fast Travel Locations ===");
|
||||
let locations = ft_db.get_locations();
|
||||
for loc in locations.iter().take(5) {
|
||||
println!(" [{}] {} (unlocked: {})", loc.id, loc.name, loc.unlocked);
|
||||
if let Some(ref connections) = loc.connections {
|
||||
println!(" Connections: {}", connections);
|
||||
}
|
||||
}
|
||||
println!("... and {} more", locations.len().saturating_sub(5));
|
||||
println!();
|
||||
|
||||
// Show canoe locations
|
||||
println!("=== Canoe Fast Travel Locations ===");
|
||||
let canoe_locs = ft_db.get_canoe_locations();
|
||||
for loc in &canoe_locs {
|
||||
println!(" [{}] {}", loc.id, loc.name);
|
||||
if let Some(ref checks) = loc.checks {
|
||||
println!(" Requirements: {}", checks);
|
||||
}
|
||||
}
|
||||
println!("Total: {}", canoe_locs.len());
|
||||
println!();
|
||||
|
||||
// Show portals
|
||||
println!("=== Portal Fast Travel Locations ===");
|
||||
let portals = ft_db.get_portals();
|
||||
for portal in portals.iter().take(5) {
|
||||
println!(" [{}] {}", portal.id, portal.name);
|
||||
if let Some((x, y, z)) = portal.get_position() {
|
||||
println!(" Position: ({:.2}, {:.2}, {:.2})", x, y, z);
|
||||
}
|
||||
}
|
||||
println!("... and {} more", portals.len().saturating_sub(5));
|
||||
println!();
|
||||
|
||||
// Show unlocked locations
|
||||
println!("=== Unlocked Locations ===");
|
||||
let unlocked = ft_db.get_unlocked_locations();
|
||||
for loc in unlocked.iter().take(10) {
|
||||
println!(" [{}] {}", loc.id, loc.name);
|
||||
}
|
||||
println!("Total unlocked: {}", unlocked.len());
|
||||
println!();
|
||||
|
||||
// Show locations with requirements
|
||||
println!("=== Locations with Requirements ===");
|
||||
let with_reqs = ft_db.get_locations_with_requirements();
|
||||
for loc in &with_reqs {
|
||||
println!(" [{}] {} - {}", loc.id, loc.name, loc.checks.as_ref().unwrap());
|
||||
}
|
||||
println!("Total with requirements: {}", with_reqs.len());
|
||||
println!();
|
||||
|
||||
// Show locations requiring specific trait
|
||||
println!("=== Locations requiring Trait 273 ===");
|
||||
let trait_locs = ft_db.get_locations_requiring_trait(273);
|
||||
for loc in &trait_locs {
|
||||
println!(" [{}] {}", loc.id, loc.name);
|
||||
}
|
||||
println!("Total: {}", trait_locs.len());
|
||||
println!();
|
||||
|
||||
// Show connected locations
|
||||
println!("=== Connected Locations (examples) ===");
|
||||
let connected = ft_db.get_connected_locations();
|
||||
for loc in connected.iter().take(5) {
|
||||
println!(
|
||||
" [{}] {} connects to: {}",
|
||||
loc.id,
|
||||
loc.name,
|
||||
loc.connections.as_ref().unwrap()
|
||||
);
|
||||
}
|
||||
println!("Total connected: {}", connected.len());
|
||||
println!();
|
||||
|
||||
// Find a specific location by ID
|
||||
if let Some(loc) = ft_db.get_by_id(4) {
|
||||
println!("=== Location Details (ID 4) ===");
|
||||
println!("Name: {}", loc.name);
|
||||
println!("Type: {}", loc.travel_type);
|
||||
println!("Position: {}", loc.position);
|
||||
if let Some(ref checks) = loc.checks {
|
||||
println!("Requirements: {}", checks);
|
||||
println!("Parsed checks:");
|
||||
for (check_type, value) in loc.parse_checks() {
|
||||
println!(" - {} = {}", check_type, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
303
cursebreaker-parser/examples/game_data_demo.rs
Normal file
303
cursebreaker-parser/examples/game_data_demo.rs
Normal file
@@ -0,0 +1,303 @@
|
||||
//! Example demonstrating combined Items, NPCs, Quests, and Harvestables database usage
|
||||
//!
|
||||
//! Run with: cargo run --example game_data_demo
|
||||
|
||||
use cursebreaker_parser::{ItemDatabase, NpcDatabase, QuestDatabase, HarvestableDatabase, LootDatabase};
|
||||
use std::env;
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
println!("🎮 Cursebreaker Game Data Demo\n");
|
||||
|
||||
// Load all game data
|
||||
println!("📚 Loading game data...");
|
||||
let cb_assets_path = env::var("CB_ASSETS_PATH").unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
|
||||
let item_db = ItemDatabase::load_from_xml(&format!("{}/Data/XMLs/Items/Items.xml", cb_assets_path))?;
|
||||
let npc_db = NpcDatabase::load_from_xml(&format!("{}/Data/XMLs/Npcs/NPCInfo.xml", cb_assets_path))?;
|
||||
let quest_db = QuestDatabase::load_from_xml(&format!("{}/Data/XMLs/Quests/Quests.xml", cb_assets_path))?;
|
||||
let harvestable_db = HarvestableDatabase::load_from_xml(&format!("{}/Data/XMLs/Harvestables/HarvestableInfo.xml", cb_assets_path))?;
|
||||
let loot_db = LootDatabase::load_from_xml(&format!("{}/Data/XMLs/Loot/Loot.xml", cb_assets_path))?;
|
||||
|
||||
println!("✅ Loaded {} items", item_db.len());
|
||||
println!("✅ Loaded {} NPCs", npc_db.len());
|
||||
println!("✅ Loaded {} quests", quest_db.len());
|
||||
println!("✅ Loaded {} harvestables", harvestable_db.len());
|
||||
println!("✅ Loaded {} loot tables\n", loot_db.len());
|
||||
|
||||
// =======================================================================
|
||||
// Items
|
||||
// =======================================================================
|
||||
println!("=== Items ===");
|
||||
let weapons = item_db.get_by_slot("weapon");
|
||||
let armor = item_db.get_by_slot("armor");
|
||||
let consumables = item_db.get_by_slot("consumable");
|
||||
|
||||
println!("By slot:");
|
||||
println!(" • Weapons: {}", weapons.len());
|
||||
println!(" • Armor: {}", armor.len());
|
||||
println!(" • Consumables: {}", consumables.len());
|
||||
|
||||
// Find specific item
|
||||
if let Some(sword) = item_db.get_by_id(150) {
|
||||
println!("\nSample item (ID 150):");
|
||||
println!(" Name: {}", sword.name);
|
||||
if let Some(desc) = &sword.description {
|
||||
println!(" Description: {}", desc);
|
||||
}
|
||||
if let Some(skill) = &sword.skill {
|
||||
println!(" Skill: {}", skill);
|
||||
}
|
||||
}
|
||||
println!();
|
||||
|
||||
// =======================================================================
|
||||
// NPCs
|
||||
// =======================================================================
|
||||
println!("=== NPCs ===");
|
||||
let hostile = npc_db.get_hostile();
|
||||
let interactable = npc_db.get_interactable();
|
||||
let shopkeepers = npc_db.get_shopkeepers();
|
||||
|
||||
println!("By type:");
|
||||
println!(" • Hostile NPCs: {}", hostile.len());
|
||||
println!(" • Interactable NPCs: {}", interactable.len());
|
||||
println!(" • Shopkeepers: {}", shopkeepers.len());
|
||||
|
||||
// Find NPCs by tag
|
||||
let undead = npc_db.get_by_tag("Undead");
|
||||
let predators = npc_db.get_by_tag("Predator");
|
||||
println!("\nBy tag:");
|
||||
println!(" • Undead: {}", undead.len());
|
||||
println!(" • Predators: {}", predators.len());
|
||||
|
||||
// Sample hostile NPC
|
||||
if let Some(wolf) = npc_db.get_by_id(1) {
|
||||
println!("\nSample hostile NPC (ID 1):");
|
||||
println!(" Name: {}", wolf.name);
|
||||
if let Some(level) = wolf.level {
|
||||
println!(" Level: {}", level);
|
||||
}
|
||||
if let Some(aggro) = wolf.aggrodistance {
|
||||
println!(" Aggro Distance: {}", aggro);
|
||||
}
|
||||
if let Some(speed) = wolf.movementspeed {
|
||||
println!(" Movement Speed: {}", speed);
|
||||
}
|
||||
println!(" Stats: {} stat entries", wolf.stats.len());
|
||||
}
|
||||
|
||||
// Sample interactable NPC
|
||||
println!("\nSample shopkeepers:");
|
||||
for shopkeeper in shopkeepers.iter().take(3) {
|
||||
println!(" • {} (Shop ID: {:?})", shopkeeper.name, shopkeeper.shop);
|
||||
}
|
||||
println!();
|
||||
|
||||
// =======================================================================
|
||||
// Quests
|
||||
// =======================================================================
|
||||
println!("=== Quests ===");
|
||||
let main_quests = quest_db.get_main_quests();
|
||||
let side_quests = quest_db.get_side_quests();
|
||||
let hidden_quests = quest_db.get_hidden_quests();
|
||||
|
||||
println!("By type:");
|
||||
println!(" • Main quests: {}", main_quests.len());
|
||||
println!(" • Side quests: {}", side_quests.len());
|
||||
println!(" • Hidden quests: {}", hidden_quests.len());
|
||||
|
||||
// Main quest details
|
||||
println!("\nMain quests:");
|
||||
for quest in main_quests.iter().take(5) {
|
||||
println!(" • {} (ID: {}, {} phases)",
|
||||
quest.name, quest.id, quest.phase_count());
|
||||
}
|
||||
|
||||
// Sample quest details
|
||||
if let Some(quest) = quest_db.get_by_id(1) {
|
||||
println!("\nSample quest (ID 1):");
|
||||
println!(" Name: {}", quest.name);
|
||||
println!(" Phases: {}", quest.phases.len());
|
||||
println!(" Rewards: {}", quest.rewards.len());
|
||||
|
||||
if let Some(phase) = quest.get_phase(1) {
|
||||
if let Some(desc) = &phase.trackerdescription {
|
||||
println!(" Phase 1: {}", desc);
|
||||
}
|
||||
}
|
||||
|
||||
if !quest.rewards.is_empty() {
|
||||
println!(" Quest rewards:");
|
||||
for reward in &quest.rewards {
|
||||
if let Some(item_id) = reward.item {
|
||||
if let Some(item) = item_db.get_by_id(item_id) {
|
||||
println!(" - {} x{}", item.name, reward.amount.unwrap_or(1));
|
||||
}
|
||||
} else if let Some(skill) = &reward.skill {
|
||||
println!(" - {} XP: {}", skill, reward.xp.unwrap_or(0));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
println!();
|
||||
|
||||
// =======================================================================
|
||||
// Harvestables
|
||||
// =======================================================================
|
||||
println!("=== Harvestables ===");
|
||||
let trees = harvestable_db.get_trees();
|
||||
let woodcutting = harvestable_db.get_by_skill("Woodcutting");
|
||||
let mining = harvestable_db.get_by_skill("mining");
|
||||
let fishing = harvestable_db.get_by_skill("Fishing");
|
||||
let alchemy = harvestable_db.get_by_skill("Alchemy");
|
||||
|
||||
println!("By skill:");
|
||||
println!(" • Trees: {}", trees.len());
|
||||
println!(" • Woodcutting: {}", woodcutting.len());
|
||||
println!(" • Mining: {}", mining.len());
|
||||
println!(" • Fishing: {}", fishing.len());
|
||||
println!(" • Alchemy: {}", alchemy.len());
|
||||
|
||||
// Sample harvestable
|
||||
if let Some(spruce) = harvestable_db.get_by_typeid(1) {
|
||||
println!("\nSample harvestable (TypeID 1):");
|
||||
println!(" Name: {}", spruce.name);
|
||||
println!(" Action: {}", spruce.actionname.as_deref().unwrap_or("N/A"));
|
||||
if let Some(level) = spruce.level {
|
||||
println!(" Level: {}", level);
|
||||
}
|
||||
if let Some(skill) = &spruce.skill {
|
||||
println!(" Skill: {}", skill);
|
||||
}
|
||||
if let Some(tool) = &spruce.tool {
|
||||
println!(" Tool: {}", tool);
|
||||
}
|
||||
println!(" Drops: {} different items", spruce.drops.len());
|
||||
|
||||
// Show drops
|
||||
println!(" Item drops:");
|
||||
for drop in &spruce.drops {
|
||||
if let Some(item) = item_db.get_by_id(drop.id) {
|
||||
println!(" - {} ({}x{}, rate: {})",
|
||||
item.name,
|
||||
drop.minamount.unwrap_or(1),
|
||||
drop.maxamount.unwrap_or(1),
|
||||
drop.droprate.unwrap_or(0));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!("\nHarvestables by level:");
|
||||
let low_level = harvestable_db.get_by_level_range(1, 10);
|
||||
let mid_level = harvestable_db.get_by_level_range(11, 50);
|
||||
let high_level = harvestable_db.get_by_level_range(51, 100);
|
||||
println!(" • Level 1-10: {}", low_level.len());
|
||||
println!(" • Level 11-50: {}", mid_level.len());
|
||||
println!(" • Level 51-100: {}", high_level.len());
|
||||
println!();
|
||||
|
||||
// =======================================================================
|
||||
// Loot Tables
|
||||
// =======================================================================
|
||||
println!("=== Loot Tables ===");
|
||||
let all_tables = loot_db.all_tables();
|
||||
let conditional_tables = loot_db.get_conditional_tables();
|
||||
let guaranteed_tables = loot_db.get_tables_with_guaranteed_drops();
|
||||
|
||||
println!("Statistics:");
|
||||
println!(" • Total loot tables: {}", loot_db.len());
|
||||
println!(" • NPCs with loot: {}", loot_db.get_all_npcs_with_loot().len());
|
||||
println!(" • Droppable items: {}", loot_db.get_all_droppable_items().len());
|
||||
println!(" • Tables with conditional drops: {}", conditional_tables.len());
|
||||
println!(" • Tables with guaranteed drops: {}", guaranteed_tables.len());
|
||||
|
||||
// Sample loot table
|
||||
if let Some(table) = all_tables.first() {
|
||||
println!("\nSample loot table:");
|
||||
if let Some(name) = &table.name {
|
||||
println!(" Name: {}", name);
|
||||
}
|
||||
println!(" NPCs: {:?}", table.npc_ids);
|
||||
println!(" Drops: {} items", table.drops.len());
|
||||
|
||||
// Show first few drops
|
||||
println!(" Sample drops:");
|
||||
for drop in table.drops.iter().take(3) {
|
||||
if let Some(item) = item_db.get_by_id(drop.item) {
|
||||
let rate_str = drop.rate.map(|r| r.to_string()).unwrap_or_else(|| "N/A".to_string());
|
||||
let amount_str = if let (Some(min), Some(max)) = (drop.minamount, drop.maxamount) {
|
||||
format!("{}x{}", min, max)
|
||||
} else {
|
||||
"1x1".to_string()
|
||||
};
|
||||
println!(" - {} ({}, rate: {})", item.name, amount_str, rate_str);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Cross-reference: Find what an NPC drops
|
||||
println!("\nSample NPC drops:");
|
||||
if let Some(npc) = npc_db.get_hostile().first() {
|
||||
println!(" NPC: {} (ID: {})", npc.name, npc.id);
|
||||
let drops = loot_db.get_drops_for_npc(npc.id);
|
||||
if !drops.is_empty() {
|
||||
println!(" Drops {} different items:", drops.len());
|
||||
for drop in drops.iter().take(5) {
|
||||
if let Some(item) = item_db.get_by_id(drop.item) {
|
||||
println!(" - {}", item.name);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!(" No drops configured");
|
||||
}
|
||||
}
|
||||
|
||||
// Cross-reference: Find what NPCs drop an item
|
||||
if let Some(item) = item_db.get_by_id(180) {
|
||||
println!("\nItem '{}' drops from:", item.name);
|
||||
let npcs = loot_db.get_npcs_dropping_item(180);
|
||||
for npc_id in npcs.iter().take(5) {
|
||||
if let Some(npc) = npc_db.get_by_id(*npc_id) {
|
||||
println!(" • {}", npc.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
println!();
|
||||
|
||||
// =======================================================================
|
||||
// Cross-referencing data
|
||||
// =======================================================================
|
||||
println!("=== Cross-referencing Data ===");
|
||||
|
||||
// Find NPCs that give quests
|
||||
let mut quest_givers = 0;
|
||||
for npc in npc_db.all_npcs() {
|
||||
if !npc.questmarkers.is_empty() {
|
||||
quest_givers += 1;
|
||||
}
|
||||
}
|
||||
println!("NPCs with quest markers: {}", quest_givers);
|
||||
|
||||
// Find items that are quest rewards
|
||||
let mut quest_reward_items = std::collections::HashSet::new();
|
||||
for quest in quest_db.all_quests() {
|
||||
for reward in &quest.rewards {
|
||||
if let Some(item_id) = reward.item {
|
||||
quest_reward_items.insert(item_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
println!("Unique items used as quest rewards: {}", quest_reward_items.len());
|
||||
|
||||
// Find items that are harvestable drops
|
||||
let mut harvestable_items = std::collections::HashSet::new();
|
||||
for harvestable in harvestable_db.all_harvestables() {
|
||||
for drop in &harvestable.drops {
|
||||
harvestable_items.insert(drop.id);
|
||||
}
|
||||
}
|
||||
println!("Unique items from harvestables: {}", harvestable_items.len());
|
||||
|
||||
println!("\n✨ Demo complete!");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
103
cursebreaker-parser/examples/item_database_demo.rs
Normal file
103
cursebreaker-parser/examples/item_database_demo.rs
Normal file
@@ -0,0 +1,103 @@
|
||||
//! Example demonstrating ItemDatabase usage
|
||||
//!
|
||||
//! Run with: cargo run --example item_database_demo
|
||||
|
||||
use cursebreaker_parser::ItemDatabase;
|
||||
use std::env;
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
println!("🎮 Cursebreaker Item Database Demo\n");
|
||||
|
||||
// Load items from XML
|
||||
let cb_assets_path = env::var("CB_ASSETS_PATH").unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
|
||||
let items_path = format!("{}/Data/XMLs/Items/Items.xml", cb_assets_path);
|
||||
println!("📚 Loading items from: {}", items_path);
|
||||
|
||||
let item_db = ItemDatabase::load_from_xml(items_path)?;
|
||||
println!("✅ Loaded {} items\n", item_db.len());
|
||||
|
||||
// Example 1: Get item by ID
|
||||
println!("=== Example 1: Get Item by ID ===");
|
||||
if let Some(item) = item_db.get_by_id(150) {
|
||||
println!("Item ID 150:");
|
||||
println!(" Name: {}", item.name);
|
||||
if let Some(desc) = &item.description {
|
||||
println!(" Description: {}", desc);
|
||||
}
|
||||
if let Some(slot) = &item.slot {
|
||||
println!(" Slot: {}", slot);
|
||||
}
|
||||
if let Some(skill) = &item.skill {
|
||||
println!(" Skill: {}", skill);
|
||||
}
|
||||
println!(" Stats: {} stat entries", item.stats.len());
|
||||
}
|
||||
println!();
|
||||
|
||||
// Example 2: Get items by category
|
||||
println!("=== Example 2: Get Items by Category ===");
|
||||
let bows = item_db.get_by_category("bow");
|
||||
println!("Found {} bows:", bows.len());
|
||||
for item in bows.iter().take(5) {
|
||||
println!(" - {} (ID: {})", item.name, item.id);
|
||||
}
|
||||
println!();
|
||||
|
||||
// Example 3: Get items by slot
|
||||
println!("=== Example 3: Get Items by Slot ===");
|
||||
let consumables = item_db.get_by_slot("consumable");
|
||||
println!("Found {} consumables (showing first 10):", consumables.len());
|
||||
for item in consumables.iter().take(10) {
|
||||
let name = &item.name;
|
||||
let id = item.id;
|
||||
if let Some(desc) = &item.description {
|
||||
println!(" - {} (ID: {}) - {}", name, id, desc.chars().take(50).collect::<String>());
|
||||
} else {
|
||||
println!(" - {} (ID: {})", name, id);
|
||||
}
|
||||
}
|
||||
println!();
|
||||
|
||||
// Example 4: Get items by skill
|
||||
println!("=== Example 4: Get Items by Skill ===");
|
||||
let magic_items = item_db.get_by_skill("magic");
|
||||
println!("Found {} magic items:", magic_items.len());
|
||||
for item in magic_items.iter().take(5) {
|
||||
println!(" - {} (ID: {}, Level: {:?})",
|
||||
item.name, item.id, item.level);
|
||||
}
|
||||
println!();
|
||||
|
||||
// Example 5: Statistics
|
||||
println!("=== Example 5: Database Statistics ===");
|
||||
let weapons = item_db.get_by_slot("weapon");
|
||||
let armor = item_db.get_by_slot("armor");
|
||||
let consumables = item_db.get_by_slot("consumable");
|
||||
let trinkets = item_db.get_by_slot("trinket");
|
||||
|
||||
println!("Item Distribution by Slot:");
|
||||
println!(" Weapons: {}", weapons.len());
|
||||
println!(" Armor: {}", armor.len());
|
||||
println!(" Consumables: {}", consumables.len());
|
||||
println!(" Trinkets: {}", trinkets.len());
|
||||
println!();
|
||||
|
||||
// Example 6: Prepare for SQL (showing how it would be used)
|
||||
println!("=== Example 6: SQL Serialization ===");
|
||||
let sql_data = item_db.prepare_for_sql();
|
||||
println!("Prepared {} items for SQL insertion", sql_data.len());
|
||||
println!("Sample SQL inserts (first 3):");
|
||||
for (id, name, json) in sql_data.iter().take(3) {
|
||||
let json_preview = if json.len() > 100 {
|
||||
format!("{}...", &json[..100])
|
||||
} else {
|
||||
json.clone()
|
||||
};
|
||||
println!(" INSERT INTO items (id, name, data) VALUES ({}, '{}', '{}');",
|
||||
id, name, json_preview);
|
||||
}
|
||||
|
||||
println!("\n✨ Demo complete!");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
93
cursebreaker-parser/examples/maps_example.rs
Normal file
93
cursebreaker-parser/examples/maps_example.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
use cursebreaker_parser::MapDatabase;
|
||||
use std::env;
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Load the Maps.xml file
|
||||
let cb_assets_path = env::var("CB_ASSETS_PATH").unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
|
||||
let map_db = MapDatabase::load_from_xml(&format!("{}/Data/XMLs/Maps/Maps.xml", cb_assets_path))?;
|
||||
|
||||
println!("=== Map Database Statistics ===");
|
||||
println!("Total maps loaded: {}", map_db.len());
|
||||
println!("Total named maps: {}", map_db.get_named_maps().len());
|
||||
println!("Total indoor maps: {}", map_db.get_indoor_maps().len());
|
||||
println!("Total isolated maps: {}", map_db.get_isolated_maps().len());
|
||||
println!();
|
||||
|
||||
// Show map bounds
|
||||
if let Some(((min_x, min_y), (max_x, max_y))) = map_db.get_map_bounds() {
|
||||
println!("=== Map Grid Bounds ===");
|
||||
println!("X range: {} to {}", min_x, max_x);
|
||||
println!("Y range: {} to {}", min_y, max_y);
|
||||
println!();
|
||||
}
|
||||
|
||||
// Show some specific maps
|
||||
println!("=== Sample Maps ===");
|
||||
|
||||
if let Some(map) = map_db.get_by_scene_id("3,10") {
|
||||
println!("Map at 3,10:");
|
||||
println!(" Name: {}", if map.name.is_empty() { "(unnamed)" } else { &map.name });
|
||||
println!(" Music: {}", map.music);
|
||||
println!(" Ambience: {}", map.ambience);
|
||||
println!(" Indoor: {}", map.indoors);
|
||||
if let Some(ref fog_color) = map.fog_color {
|
||||
println!(" Fog color: {}", fog_color);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
// Show Haywind maps
|
||||
println!("=== Maps named 'Haywind' ===");
|
||||
let haywind_maps = map_db.get_by_name("Haywind");
|
||||
for map in &haywind_maps {
|
||||
println!(" Scene ID: {} (Music: {})", map.scene_id, map.music);
|
||||
}
|
||||
println!("Total: {}", haywind_maps.len());
|
||||
println!();
|
||||
|
||||
// Show Thornhill City maps
|
||||
println!("=== Maps named 'Thornhill City' ===");
|
||||
let thornhill_maps = map_db.get_by_name("Thornhill City");
|
||||
for map in þhill_maps {
|
||||
println!(" Scene ID: {} (Music: {})", map.scene_id, map.music);
|
||||
}
|
||||
println!("Total: {}", thornhill_maps.len());
|
||||
println!();
|
||||
|
||||
// Show all unique map names (first 20)
|
||||
println!("=== Unique Map Names (first 20) ===");
|
||||
let mut names = map_db.get_all_map_names();
|
||||
names.sort();
|
||||
for name in names.iter().take(20) {
|
||||
println!(" {}", name);
|
||||
}
|
||||
println!("... and {} more", names.len().saturating_sub(20));
|
||||
println!();
|
||||
|
||||
// Show maps with respawn locations
|
||||
println!("=== Maps with Respawn Locations ===");
|
||||
let respawn_maps = map_db.get_maps_with_respawn();
|
||||
for map in respawn_maps.iter().take(5) {
|
||||
println!(
|
||||
" {} -> respawns at {}",
|
||||
map.scene_id,
|
||||
map.respawn_map.as_ref().unwrap_or(&"?".to_string())
|
||||
);
|
||||
}
|
||||
println!("Total maps with respawn: {}", respawn_maps.len());
|
||||
println!();
|
||||
|
||||
// Show connected maps
|
||||
println!("=== Connected Maps (examples) ===");
|
||||
let connected = map_db.get_connected_maps();
|
||||
for map in connected.iter().take(5) {
|
||||
println!(
|
||||
" {} connects to: {}",
|
||||
map.scene_id,
|
||||
map.connected_maps.as_ref().unwrap_or(&"?".to_string())
|
||||
);
|
||||
}
|
||||
println!("Total connected maps: {}", connected.len());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
100
cursebreaker-parser/examples/player_houses_example.rs
Normal file
100
cursebreaker-parser/examples/player_houses_example.rs
Normal file
@@ -0,0 +1,100 @@
|
||||
use cursebreaker_parser::PlayerHouseDatabase;
|
||||
use std::env;
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Load all player houses from XML
|
||||
let cb_assets_path = env::var("CB_ASSETS_PATH").unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
|
||||
let ph_db = PlayerHouseDatabase::load_from_xml(
|
||||
&format!("{}/Data/XMLs/PlayerHouses/PlayerHouses.xml", cb_assets_path),
|
||||
)?;
|
||||
|
||||
println!("=== Player House Database Statistics ===");
|
||||
println!("Total houses: {}", ph_db.len());
|
||||
println!("Visible houses: {}", ph_db.get_visible_houses().len());
|
||||
println!("Hidden houses: {}", ph_db.get_hidden_houses().len());
|
||||
println!();
|
||||
|
||||
// Show all houses sorted by price
|
||||
println!("=== All Houses (sorted by price) ===");
|
||||
let sorted = ph_db.get_sorted_by_price();
|
||||
for house in &sorted {
|
||||
let visibility = if house.hidden { "(hidden)" } else { "" };
|
||||
println!(
|
||||
" [{}] {} - {} gold {}",
|
||||
house.id, house.name, house.price, visibility
|
||||
);
|
||||
}
|
||||
println!();
|
||||
|
||||
// Show price tiers
|
||||
println!("=== Houses by Price Tier ===");
|
||||
println!("Free houses (tier 0):");
|
||||
for house in ph_db.get_by_price_tier(0) {
|
||||
println!(" - {}", house.name);
|
||||
}
|
||||
|
||||
println!("\nCheap houses (tier 1, < 5000 gold):");
|
||||
for house in ph_db.get_by_price_tier(1) {
|
||||
println!(" - {} ({} gold)", house.name, house.price);
|
||||
}
|
||||
|
||||
println!("\nModerate houses (tier 2, 5000-10000 gold):");
|
||||
for house in ph_db.get_by_price_tier(2) {
|
||||
println!(" - {} ({} gold)", house.name, house.price);
|
||||
}
|
||||
|
||||
println!("\nExpensive houses (tier 3, 10000+ gold):");
|
||||
for house in ph_db.get_by_price_tier(3) {
|
||||
println!(" - {} ({} gold)", house.name, house.price);
|
||||
}
|
||||
println!();
|
||||
|
||||
// Show cheapest and most expensive
|
||||
println!("=== Price Extremes ===");
|
||||
if let Some(cheapest) = ph_db.get_cheapest() {
|
||||
println!(
|
||||
"Cheapest house: {} - {} gold",
|
||||
cheapest.name, cheapest.price
|
||||
);
|
||||
}
|
||||
if let Some(most_expensive) = ph_db.get_most_expensive() {
|
||||
println!(
|
||||
"Most expensive: {} - {} gold",
|
||||
most_expensive.name, most_expensive.price
|
||||
);
|
||||
}
|
||||
println!();
|
||||
|
||||
// Show houses in a specific price range
|
||||
println!("=== Houses between 3000-5000 gold ===");
|
||||
let mid_range = ph_db.get_by_price_range(3000, 5000);
|
||||
for house in mid_range {
|
||||
println!(" - {} ({} gold)", house.name, house.price);
|
||||
}
|
||||
println!();
|
||||
|
||||
// Show affordable houses
|
||||
println!("=== Affordable Houses (< 5000 gold) ===");
|
||||
let affordable = ph_db.get_affordable_houses();
|
||||
for house in &affordable {
|
||||
println!(" - {} ({} gold)", house.name, house.price);
|
||||
}
|
||||
println!("Total affordable: {}", affordable.len());
|
||||
println!();
|
||||
|
||||
// Show details of a specific house
|
||||
if let Some(house) = ph_db.get_by_id(8) {
|
||||
println!("=== House Details (ID 8) ===");
|
||||
println!("Name: {}", house.name);
|
||||
println!("Description: {}", house.description);
|
||||
println!("Price: {} gold", house.price);
|
||||
println!("Position: {}", house.position);
|
||||
if let Some((x, y, z)) = house.get_position() {
|
||||
println!("Coordinates: ({:.2}, {:.2}, {:.2})", x, y, z);
|
||||
}
|
||||
println!("Hidden: {}", house.hidden);
|
||||
println!("Price tier: {}", house.get_price_tier());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
59
cursebreaker-parser/examples/query_world_resources.rs
Normal file
59
cursebreaker-parser/examples/query_world_resources.rs
Normal file
@@ -0,0 +1,59 @@
|
||||
//! Example: Query world resources from the database
|
||||
//!
|
||||
//! Run with: cargo run --example query_world_resources
|
||||
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use std::env;
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Connect to database
|
||||
let database_url = env::var("DATABASE_URL").unwrap_or_else(|_| "../cursebreaker.db".to_string());
|
||||
let mut conn = SqliteConnection::establish(&database_url)?;
|
||||
|
||||
// Use the schema
|
||||
use cursebreaker_parser::schema::world_resources::dsl::*;
|
||||
|
||||
// Query all resources
|
||||
#[derive(Queryable, Debug)]
|
||||
struct WorldResource {
|
||||
item_id: i32,
|
||||
pos_x: f32,
|
||||
pos_y: f32,
|
||||
}
|
||||
|
||||
let results = world_resources
|
||||
.limit(10)
|
||||
.load::<WorldResource>(&mut conn)?;
|
||||
|
||||
println!("Found {} resources (showing first 10):", results.len());
|
||||
println!();
|
||||
|
||||
for resource in results {
|
||||
println!("Resource:");
|
||||
println!(" Item ID: {}", resource.item_id);
|
||||
println!(" Position: ({:.2}, {:.2})", resource.pos_x, resource.pos_y);
|
||||
println!();
|
||||
}
|
||||
|
||||
// Query all resources
|
||||
println!("\n--- All world resources ---");
|
||||
let all_results = world_resources
|
||||
.load::<WorldResource>(&mut conn)?;
|
||||
|
||||
println!("Found {} total resources", all_results.len());
|
||||
|
||||
// Group by item_id
|
||||
use std::collections::HashMap;
|
||||
let mut item_counts: HashMap<i32, usize> = HashMap::new();
|
||||
for resource in &all_results {
|
||||
*item_counts.entry(resource.item_id).or_insert(0) += 1;
|
||||
}
|
||||
|
||||
println!("\nResource counts by item ID:");
|
||||
for (i_id, count) in item_counts {
|
||||
println!(" Item {}: {} instances", i_id, count);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
53
cursebreaker-parser/examples/resource_icons_example.rs
Normal file
53
cursebreaker-parser/examples/resource_icons_example.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
//! Example: Query resource icons from the database
|
||||
//!
|
||||
//! This example shows how to retrieve processed resource icons for harvestables.
|
||||
//! Icons are 64x64 WebP images with white borders.
|
||||
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use std::env;
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Connect to database
|
||||
let database_url = env::var("DATABASE_URL").unwrap_or_else(|_| "../cursebreaker.db".to_string());
|
||||
let mut conn = SqliteConnection::establish(&database_url)?;
|
||||
|
||||
// Define the structure
|
||||
#[derive(Queryable, Debug)]
|
||||
struct ResourceIcon {
|
||||
item_id: i32,
|
||||
name: String,
|
||||
icon_64: Vec<u8>,
|
||||
}
|
||||
|
||||
// Import schema
|
||||
use cursebreaker_parser::schema::resource_icons::dsl::*;
|
||||
|
||||
// Query all resource icons
|
||||
let icons = resource_icons.load::<ResourceIcon>(&mut conn)?;
|
||||
|
||||
println!("📦 Resource Icons Database");
|
||||
println!("========================\n");
|
||||
println!("Total icons: {}\n", icons.len());
|
||||
|
||||
for icon in icons {
|
||||
println!("Harvestable ID: {}", icon.item_id);
|
||||
println!(" Name: {}", icon.name);
|
||||
println!(" Icon size: {} bytes (WebP format, 64x64 with white border)", icon.icon_64.len());
|
||||
println!();
|
||||
}
|
||||
|
||||
// Example: Get icon for a specific harvestable
|
||||
println!("\n🔍 Looking up Copper Ore (harvestable_id = 2):");
|
||||
let copper_icon = resource_icons
|
||||
.filter(item_id.eq(2))
|
||||
.first::<ResourceIcon>(&mut conn)?;
|
||||
|
||||
println!(" Name: {}", copper_icon.name);
|
||||
println!(" Icon size: {} bytes", copper_icon.icon_64.len());
|
||||
|
||||
// You can save the icon to a file for testing:
|
||||
// std::fs::write("copper_ore.webp", &copper_icon.icon_64)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
134
cursebreaker-parser/examples/shops_example.rs
Normal file
134
cursebreaker-parser/examples/shops_example.rs
Normal file
@@ -0,0 +1,134 @@
|
||||
use cursebreaker_parser::ShopDatabase;
|
||||
use std::env;
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Load all shops from XML
|
||||
let cb_assets_path = env::var("CB_ASSETS_PATH").unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
|
||||
let shop_db = ShopDatabase::load_from_xml(
|
||||
&format!("{}/Data/XMLs/Shops/Shops.xml", cb_assets_path),
|
||||
)?;
|
||||
|
||||
println!("=== Shop Database Statistics ===");
|
||||
println!("Total shops: {}", shop_db.len());
|
||||
println!("General stores: {}", shop_db.get_general_stores().len());
|
||||
println!("Specialized shops: {}", shop_db.get_specialized_shops().len());
|
||||
println!("Non-empty shops: {}", shop_db.get_non_empty_shops().len());
|
||||
println!("Total items across all shops: {}", shop_db.total_item_count());
|
||||
println!("Unique items sold: {}", shop_db.get_all_item_ids().len());
|
||||
println!();
|
||||
|
||||
// Show all general stores
|
||||
println!("=== General Stores ===");
|
||||
let general_stores = shop_db.get_general_stores();
|
||||
for shop in &general_stores {
|
||||
println!(" [ID {}] {} ({} items)", shop.shop_id, shop.name, shop.item_count());
|
||||
if let Some(ref comment) = shop.comment {
|
||||
println!(" Comment: {}", comment);
|
||||
}
|
||||
}
|
||||
println!();
|
||||
|
||||
// Show some specialized shops
|
||||
println!("=== Specialized Shops (first 10) ===");
|
||||
let specialized = shop_db.get_specialized_shops();
|
||||
for shop in specialized.iter().take(10) {
|
||||
println!(" [ID {}] {} ({} items)", shop.shop_id, shop.name, shop.item_count());
|
||||
if let Some(ref comment) = shop.comment {
|
||||
println!(" Comment: {}", comment);
|
||||
}
|
||||
}
|
||||
println!("... and {} more", specialized.len().saturating_sub(10));
|
||||
println!();
|
||||
|
||||
// Show details of a specific shop
|
||||
if let Some(shop) = shop_db.get_by_id(3) {
|
||||
println!("=== Shop Details (ID 3) ===");
|
||||
println!("Name: {}", shop.name);
|
||||
println!("Is General Store: {}", shop.is_general_store);
|
||||
println!("Total items: {}", shop.item_count());
|
||||
println!("\nItems:");
|
||||
for (i, item) in shop.items.iter().take(10).enumerate() {
|
||||
print!(" {}) Item ID: {}", i + 1, item.item_id);
|
||||
if let Some(ref name) = item.name {
|
||||
print!(" ({})", name);
|
||||
}
|
||||
if let Some(price) = item.price {
|
||||
print!(" - {} gold", price);
|
||||
}
|
||||
if let Some(stock) = item.max_stock {
|
||||
print!(" - max stock: {}", stock);
|
||||
}
|
||||
if let Some(restock) = item.restock_time {
|
||||
print!(" - restock: {}s", restock);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
if shop.item_count() > 10 {
|
||||
println!(" ... and {} more items", shop.item_count() - 10);
|
||||
}
|
||||
}
|
||||
println!();
|
||||
|
||||
// Show shops that sell a specific item
|
||||
println!("=== Shops Selling Item '167' (Fishing Rod) ===");
|
||||
let fishing_rod_shops = shop_db.get_shops_selling_item("167");
|
||||
for shop in &fishing_rod_shops {
|
||||
println!(" [ID {}] {}", shop.shop_id, shop.name);
|
||||
if let Some(item) = shop.get_item_by_id("167") {
|
||||
if let Some(ref name) = item.name {
|
||||
print!(" - {}", name);
|
||||
}
|
||||
if let Some(price) = item.price {
|
||||
print!(" (custom price: {} gold)", price);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
}
|
||||
println!();
|
||||
|
||||
// Show shop with most items
|
||||
if let Some(largest_shop) = shop_db.all_shops().iter().max_by_key(|s| s.item_count()) {
|
||||
println!("=== Largest Shop ===");
|
||||
println!("Name: {}", largest_shop.name);
|
||||
println!("Item count: {}", largest_shop.item_count());
|
||||
println!();
|
||||
}
|
||||
|
||||
// Show items with unlimited stock in a shop
|
||||
if let Some(shop) = shop_db.get_by_id(3) {
|
||||
println!("=== Unlimited Stock Items in {} ===", shop.name);
|
||||
let unlimited = shop.get_unlimited_stock_items();
|
||||
for item in unlimited.iter().take(5) {
|
||||
print!(" Item ID: {}", item.item_id);
|
||||
if let Some(ref name) = item.name {
|
||||
print!(" ({})", name);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
if unlimited.len() > 5 {
|
||||
println!(" ... and {} more", unlimited.len() - 5);
|
||||
}
|
||||
}
|
||||
println!();
|
||||
|
||||
// Show items with limited stock
|
||||
if let Some(shop) = shop_db.get_by_id(8) {
|
||||
println!("=== Limited Stock Items in Shop ID 8 ===");
|
||||
let limited = shop.get_limited_stock_items();
|
||||
for item in &limited {
|
||||
print!(" Item ID: {}", item.item_id);
|
||||
if let Some(ref name) = item.name {
|
||||
print!(" ({})", name);
|
||||
}
|
||||
if let Some(stock) = item.max_stock {
|
||||
print!(" - max stock: {}", stock);
|
||||
}
|
||||
if let Some(minutes) = item.get_restock_minutes() {
|
||||
print!(" - restocks every {:.1} min", minutes);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
108
cursebreaker-parser/examples/traits_example.rs
Normal file
108
cursebreaker-parser/examples/traits_example.rs
Normal file
@@ -0,0 +1,108 @@
|
||||
use cursebreaker_parser::TraitDatabase;
|
||||
use std::env;
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Load all traits from XML
|
||||
let cb_assets_path = env::var("CB_ASSETS_PATH").unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
|
||||
let trait_db = TraitDatabase::load_from_xml(
|
||||
&format!("{}/Data/XMLs/Traits/Traits.xml", cb_assets_path),
|
||||
)?;
|
||||
|
||||
println!("=== Trait Database Statistics ===");
|
||||
println!("Total traits: {}", trait_db.len());
|
||||
println!("Trainer traits: {}", trait_db.get_trainer_traits().len());
|
||||
println!("Ability traits: {}", trait_db.get_ability_traits().len());
|
||||
println!("Novice traits: {}", trait_db.get_novice_traits().len());
|
||||
println!("Experienced traits: {}", trait_db.get_experienced_traits().len());
|
||||
println!("Master traits: {}", trait_db.get_master_traits().len());
|
||||
println!();
|
||||
|
||||
// Show all skills
|
||||
println!("=== All Skills ===");
|
||||
let mut skills = trait_db.get_all_skills();
|
||||
skills.sort();
|
||||
for skill in &skills {
|
||||
let count = trait_db.get_by_skill(skill).len();
|
||||
println!(" {} ({} traits)", skill, count);
|
||||
}
|
||||
println!();
|
||||
|
||||
// Show traits for a specific skill
|
||||
println!("=== Woodcutting Traits (sorted by level) ===");
|
||||
let woodcutting = trait_db.get_sorted_by_level("woodcutting");
|
||||
for trait_obj in woodcutting.iter().take(10) {
|
||||
if let Some(level) = trait_obj.get_required_level() {
|
||||
let tier = if trait_obj.is_novice() {
|
||||
" (Novice)"
|
||||
} else if trait_obj.is_experienced() {
|
||||
" (Experienced)"
|
||||
} else if trait_obj.is_master() {
|
||||
" (Master)"
|
||||
} else {
|
||||
""
|
||||
};
|
||||
println!(" [Lvl {}] {}{}", level, trait_obj.name, tier);
|
||||
}
|
||||
}
|
||||
println!("... and {} more", woodcutting.len().saturating_sub(10));
|
||||
println!();
|
||||
|
||||
// Show master tier traits
|
||||
println!("=== Master Tier Traits ===");
|
||||
let masters = trait_db.get_master_traits();
|
||||
for trait_obj in &masters {
|
||||
if let (Some(skill), Some(level)) = (trait_obj.get_required_skill(), trait_obj.get_required_level()) {
|
||||
println!(" {} - {} (Level {})", trait_obj.name, skill, level);
|
||||
}
|
||||
}
|
||||
println!();
|
||||
|
||||
// Show ability traits
|
||||
println!("=== Traits that Teach Abilities (first 10) ===");
|
||||
let abilities = trait_db.get_ability_traits();
|
||||
for trait_obj in abilities.iter().take(10) {
|
||||
if let Some(ability_id) = trait_obj.learnability {
|
||||
println!(
|
||||
" {} - teaches ability {}",
|
||||
trait_obj.name, ability_id
|
||||
);
|
||||
if let (Some(skill), Some(level)) = (trait_obj.get_required_skill(), trait_obj.get_required_level()) {
|
||||
println!(" Requires: {} level {}", skill, level);
|
||||
}
|
||||
}
|
||||
}
|
||||
println!("... and {} more", abilities.len().saturating_sub(10));
|
||||
println!();
|
||||
|
||||
// Show traits by level range
|
||||
println!("=== Combat Traits (Levels 15-25) ===");
|
||||
let combat_traits = trait_db.get_by_skill_and_level("swordsmanship", 15, 25);
|
||||
for trait_obj in &combat_traits {
|
||||
if let Some(level) = trait_obj.get_required_level() {
|
||||
println!(" [Lvl {}] {}", level, trait_obj.name);
|
||||
}
|
||||
}
|
||||
println!();
|
||||
|
||||
// Show details of a specific trait
|
||||
if let Some(trait_obj) = trait_db.get_by_id(272) {
|
||||
println!("=== Trait Details (ID 272) ===");
|
||||
println!("Name: {}", trait_obj.name);
|
||||
println!("Description (plain): {}", trait_obj.get_plain_description());
|
||||
if let Some(ref trainer) = trait_obj.trainer {
|
||||
println!("Skill: {}", trainer.skill);
|
||||
println!("Level: {}", trainer.level);
|
||||
if let Some(tier) = trainer.tier_icon {
|
||||
println!("Tier: {}", tier);
|
||||
}
|
||||
}
|
||||
if let Some(ability_id) = trait_obj.learnability {
|
||||
println!("Teaches ability: {}", ability_id);
|
||||
}
|
||||
if let Some(ref comment) = trait_obj.comment {
|
||||
println!("Comment: {}", comment);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
140
cursebreaker-parser/examples/verify_world_objects.rs
Normal file
140
cursebreaker-parser/examples/verify_world_objects.rs
Normal file
@@ -0,0 +1,140 @@
|
||||
//! Example: Query world objects from the database
|
||||
//!
|
||||
//! Run with: cargo run --example verify_world_objects
|
||||
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use std::env;
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Connect to database
|
||||
let database_url = env::var("DATABASE_URL").unwrap_or_else(|_| "../cursebreaker.db".to_string());
|
||||
let mut conn = SqliteConnection::establish(&database_url)?;
|
||||
|
||||
// Query teleporters
|
||||
{
|
||||
use cursebreaker_parser::schema::world_teleporters::dsl::*;
|
||||
|
||||
#[derive(Queryable, Debug)]
|
||||
struct Teleporter {
|
||||
pos_x: f32,
|
||||
pos_y: f32,
|
||||
tp_x: Option<f32>,
|
||||
tp_y: Option<f32>,
|
||||
}
|
||||
|
||||
let results = world_teleporters.load::<Teleporter>(&mut conn)?;
|
||||
println!("=== World Teleporters ===");
|
||||
println!("Found {} teleporters\n", results.len());
|
||||
for tp in results {
|
||||
print!(" At ({:.2}, {:.2})", tp.pos_x, tp.pos_y);
|
||||
if let (Some(tx), Some(ty)) = (tp.tp_x, tp.tp_y) {
|
||||
println!(" -> teleports to ({:.2}, {:.2})", tx, ty);
|
||||
} else {
|
||||
println!(" -> no destination");
|
||||
}
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
// Query workbenches
|
||||
{
|
||||
use cursebreaker_parser::schema::world_workbenches::dsl::*;
|
||||
|
||||
#[derive(Queryable, Debug)]
|
||||
struct Workbench {
|
||||
pos_x: f32,
|
||||
pos_y: f32,
|
||||
workbench_id: i32,
|
||||
}
|
||||
|
||||
let results = world_workbenches.load::<Workbench>(&mut conn)?;
|
||||
println!("=== World Workbenches ===");
|
||||
println!("Found {} workbenches\n", results.len());
|
||||
for wb in results {
|
||||
println!(" Workbench ID {} at ({:.2}, {:.2})", wb.workbench_id, wb.pos_x, wb.pos_y);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
// Query loot spawners
|
||||
{
|
||||
use cursebreaker_parser::schema::world_loot::dsl::*;
|
||||
|
||||
#[derive(Queryable, Debug)]
|
||||
struct Loot {
|
||||
pos_x: f32,
|
||||
pos_y: f32,
|
||||
item_id: i32,
|
||||
amount: i32,
|
||||
respawn_time: i32,
|
||||
visibility_checks: String,
|
||||
}
|
||||
|
||||
let results = world_loot.load::<Loot>(&mut conn)?;
|
||||
println!("=== World Loot ===");
|
||||
println!("Found {} loot spawners\n", results.len());
|
||||
for loot in results {
|
||||
println!(" Item {} x{} (respawn: {}s) at ({:.2}, {:.2})",
|
||||
loot.item_id, loot.amount, loot.respawn_time, loot.pos_x, loot.pos_y);
|
||||
if !loot.visibility_checks.is_empty() {
|
||||
println!(" Visibility checks: {}", loot.visibility_checks);
|
||||
}
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
// Query map icons
|
||||
{
|
||||
use cursebreaker_parser::schema::world_map_icons::dsl::*;
|
||||
|
||||
#[derive(Queryable, Debug)]
|
||||
struct MapIcon {
|
||||
pos_x: f32,
|
||||
pos_y: f32,
|
||||
icon_type: i32,
|
||||
icon_size: i32,
|
||||
icon: String,
|
||||
text: String,
|
||||
font_size: i32,
|
||||
hover_text: String,
|
||||
}
|
||||
|
||||
let results = world_map_icons.load::<MapIcon>(&mut conn)?;
|
||||
println!("=== World Map Icons ===");
|
||||
println!("Found {} map icons\n", results.len());
|
||||
for map_icon in results {
|
||||
print!(" Type {} at ({:.2}, {:.2})", map_icon.icon_type, map_icon.pos_x, map_icon.pos_y);
|
||||
if !map_icon.text.is_empty() {
|
||||
print!(" - Text: \"{}\"", map_icon.text);
|
||||
}
|
||||
if !map_icon.hover_text.is_empty() {
|
||||
print!(" - Hover: \"{}\"", map_icon.hover_text);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
// Query map name changers
|
||||
{
|
||||
use cursebreaker_parser::schema::world_map_name_changers::dsl::*;
|
||||
|
||||
#[derive(Queryable, Debug)]
|
||||
struct MapNameChanger {
|
||||
pos_x: f32,
|
||||
pos_y: f32,
|
||||
map_name: String,
|
||||
}
|
||||
|
||||
let results = world_map_name_changers.load::<MapNameChanger>(&mut conn)?;
|
||||
println!("=== World Map Name Changers ===");
|
||||
println!("Found {} map name changers\n", results.len());
|
||||
for changer in results {
|
||||
println!(" \"{}\" at ({:.2}, {:.2})", changer.map_name, changer.pos_x, changer.pos_y);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
0
cursebreaker-parser/migrations/.diesel_lock
Normal file
0
cursebreaker-parser/migrations/.diesel_lock
Normal file
0
cursebreaker-parser/migrations/.keep
Normal file
0
cursebreaker-parser/migrations/.keep
Normal file
@@ -0,0 +1,5 @@
|
||||
-- Rollback migration for minimap_tiles table
|
||||
DROP INDEX IF EXISTS idx_minimap_y;
|
||||
DROP INDEX IF EXISTS idx_minimap_x;
|
||||
DROP INDEX IF EXISTS idx_minimap_coords;
|
||||
DROP TABLE IF EXISTS minimap_tiles;
|
||||
@@ -0,0 +1,39 @@
|
||||
-- Minimap tiles table storing processed WebP images
|
||||
CREATE TABLE minimap_tiles (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
|
||||
-- Tile coordinates (matching file naming: x_y.png)
|
||||
x INTEGER NOT NULL,
|
||||
y INTEGER NOT NULL,
|
||||
|
||||
-- Original PNG metadata
|
||||
original_width INTEGER NOT NULL DEFAULT 512,
|
||||
original_height INTEGER NOT NULL DEFAULT 512,
|
||||
original_file_size INTEGER,
|
||||
|
||||
-- WebP blobs at different resolutions
|
||||
webp_512 BLOB NOT NULL, -- 512x512 WebP
|
||||
webp_256 BLOB NOT NULL, -- 256x256 WebP
|
||||
webp_128 BLOB NOT NULL, -- 128x128 WebP
|
||||
webp_64 BLOB NOT NULL, -- 64x64 WebP
|
||||
|
||||
-- Blob sizes for quick reference
|
||||
webp_512_size INTEGER NOT NULL,
|
||||
webp_256_size INTEGER NOT NULL,
|
||||
webp_128_size INTEGER NOT NULL,
|
||||
webp_64_size INTEGER NOT NULL,
|
||||
|
||||
-- Processing metadata
|
||||
processed_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
source_path TEXT NOT NULL,
|
||||
|
||||
-- Ensure unique coordinate pairs
|
||||
UNIQUE(x, y)
|
||||
);
|
||||
|
||||
-- Index for fast coordinate lookups
|
||||
CREATE INDEX idx_minimap_coords ON minimap_tiles(x, y);
|
||||
|
||||
-- Index for boundary queries
|
||||
CREATE INDEX idx_minimap_x ON minimap_tiles(x);
|
||||
CREATE INDEX idx_minimap_y ON minimap_tiles(y);
|
||||
@@ -0,0 +1,33 @@
|
||||
-- Drop all game data tables
|
||||
DROP INDEX IF EXISTS idx_shops_name;
|
||||
DROP TABLE IF EXISTS shops;
|
||||
|
||||
DROP INDEX IF EXISTS idx_traits_trainer;
|
||||
DROP INDEX IF EXISTS idx_traits_name;
|
||||
DROP TABLE IF EXISTS traits;
|
||||
|
||||
DROP INDEX IF EXISTS idx_player_houses_map;
|
||||
DROP INDEX IF EXISTS idx_player_houses_name;
|
||||
DROP TABLE IF EXISTS player_houses;
|
||||
|
||||
DROP INDEX IF EXISTS idx_fast_travel_map;
|
||||
DROP INDEX IF EXISTS idx_fast_travel_name;
|
||||
DROP TABLE IF EXISTS fast_travel_locations;
|
||||
|
||||
DROP INDEX IF EXISTS idx_maps_name;
|
||||
DROP TABLE IF EXISTS maps;
|
||||
|
||||
DROP INDEX IF EXISTS idx_loot_npc;
|
||||
DROP TABLE IF EXISTS loot_tables;
|
||||
|
||||
DROP INDEX IF EXISTS idx_harvestables_name;
|
||||
DROP TABLE IF EXISTS harvestables;
|
||||
|
||||
DROP INDEX IF EXISTS idx_quests_name;
|
||||
DROP TABLE IF EXISTS quests;
|
||||
|
||||
DROP INDEX IF EXISTS idx_npcs_name;
|
||||
DROP TABLE IF EXISTS npcs;
|
||||
|
||||
DROP INDEX IF EXISTS idx_items_name;
|
||||
DROP TABLE IF EXISTS items;
|
||||
@@ -0,0 +1,98 @@
|
||||
-- Items table
|
||||
CREATE TABLE items (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
data TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX idx_items_name ON items(name);
|
||||
|
||||
-- NPCs table
|
||||
CREATE TABLE npcs (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
data TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX idx_npcs_name ON npcs(name);
|
||||
|
||||
-- Quests table
|
||||
CREATE TABLE quests (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
data TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX idx_quests_name ON quests(name);
|
||||
|
||||
-- Harvestables table
|
||||
CREATE TABLE harvestables (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
data TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX idx_harvestables_name ON harvestables(name);
|
||||
|
||||
-- Loot tables
|
||||
CREATE TABLE loot_tables (
|
||||
table_id TEXT PRIMARY KEY,
|
||||
npc_id TEXT,
|
||||
data TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX idx_loot_npc ON loot_tables(npc_id);
|
||||
|
||||
-- Maps table
|
||||
CREATE TABLE maps (
|
||||
scene_id TEXT PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
data TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX idx_maps_name ON maps(name);
|
||||
|
||||
-- Fast travel locations table
|
||||
CREATE TABLE fast_travel_locations (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
map_name TEXT NOT NULL,
|
||||
data TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX idx_fast_travel_name ON fast_travel_locations(name);
|
||||
CREATE INDEX idx_fast_travel_map ON fast_travel_locations(map_name);
|
||||
|
||||
-- Player houses table
|
||||
CREATE TABLE player_houses (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
map_id INTEGER NOT NULL,
|
||||
data TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX idx_player_houses_name ON player_houses(name);
|
||||
CREATE INDEX idx_player_houses_map ON player_houses(map_id);
|
||||
|
||||
-- Traits table
|
||||
CREATE TABLE traits (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
trainer_id INTEGER,
|
||||
data TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX idx_traits_name ON traits(name);
|
||||
CREATE INDEX idx_traits_trainer ON traits(trainer_id);
|
||||
|
||||
-- Shops table
|
||||
CREATE TABLE shops (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
unique_items INTEGER NOT NULL, -- boolean as 0/1
|
||||
item_count INTEGER NOT NULL,
|
||||
data TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX idx_shops_name ON shops(name);
|
||||
@@ -0,0 +1,2 @@
|
||||
DROP INDEX IF EXISTS idx_merged_tiles_zoom_coords;
|
||||
DROP TABLE IF EXISTS merged_tiles;
|
||||
@@ -0,0 +1,31 @@
|
||||
-- Create merged_tiles table for storing merged map tiles at different zoom levels
|
||||
-- Zoom level 2: 1x1 tiles (512px original tiles)
|
||||
-- Zoom level 1: 2x2 tiles merged into 512px
|
||||
-- Zoom level 0: 4x4 tiles merged into 512px
|
||||
|
||||
CREATE TABLE merged_tiles (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
-- Tile coordinates at this zoom level
|
||||
x INTEGER NOT NULL,
|
||||
y INTEGER NOT NULL,
|
||||
-- Zoom level (0 = most zoomed out, 2 = most zoomed in)
|
||||
zoom_level INTEGER NOT NULL,
|
||||
-- Number of original tiles merged (1, 4, or 16)
|
||||
merge_factor INTEGER NOT NULL,
|
||||
-- Dimensions of the merged image
|
||||
width INTEGER NOT NULL,
|
||||
height INTEGER NOT NULL,
|
||||
-- WebP image data (lossless compression)
|
||||
webp_data BLOB NOT NULL,
|
||||
webp_size INTEGER NOT NULL,
|
||||
-- Metadata
|
||||
processed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
-- Track which original tiles were merged (for debugging)
|
||||
source_tiles TEXT NOT NULL,
|
||||
|
||||
-- Unique constraint on zoom level + coordinates
|
||||
UNIQUE(zoom_level, x, y)
|
||||
);
|
||||
|
||||
-- Index for fast lookups
|
||||
CREATE INDEX idx_merged_tiles_zoom_coords ON merged_tiles(zoom_level, x, y);
|
||||
@@ -0,0 +1,26 @@
|
||||
-- This migration cannot be rolled back automatically
|
||||
-- You would need to re-run the image-parser to restore data
|
||||
DROP INDEX IF EXISTS idx_minimap_tiles_coords;
|
||||
DROP INDEX IF EXISTS idx_minimap_tiles_zoom_coords;
|
||||
DROP TABLE IF EXISTS minimap_tiles;
|
||||
|
||||
-- Restore old structure (data will be lost)
|
||||
CREATE TABLE minimap_tiles (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
x INTEGER NOT NULL,
|
||||
y INTEGER NOT NULL,
|
||||
original_width INTEGER NOT NULL,
|
||||
original_height INTEGER NOT NULL,
|
||||
original_file_size INTEGER,
|
||||
webp_512 BLOB NOT NULL,
|
||||
webp_256 BLOB NOT NULL,
|
||||
webp_128 BLOB NOT NULL,
|
||||
webp_64 BLOB NOT NULL,
|
||||
webp_512_size INTEGER NOT NULL,
|
||||
webp_256_size INTEGER NOT NULL,
|
||||
webp_128_size INTEGER NOT NULL,
|
||||
webp_64_size INTEGER NOT NULL,
|
||||
processed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
source_path TEXT NOT NULL,
|
||||
UNIQUE(x, y)
|
||||
);
|
||||
@@ -0,0 +1,34 @@
|
||||
-- Drop merged_tiles table (no longer needed)
|
||||
DROP TABLE IF EXISTS merged_tiles;
|
||||
DROP INDEX IF EXISTS idx_merged_tiles_zoom_coords;
|
||||
|
||||
-- Drop old minimap_tiles table
|
||||
DROP TABLE IF EXISTS minimap_tiles;
|
||||
|
||||
-- Create new minimap_tiles table with simplified structure
|
||||
CREATE TABLE minimap_tiles (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
-- Tile coordinates (at zoom level 2, original tile coords)
|
||||
x INTEGER NOT NULL,
|
||||
y INTEGER NOT NULL,
|
||||
-- Zoom level (0 = 4x4 merged, 1 = 2x2 merged, 2 = original)
|
||||
zoom INTEGER NOT NULL,
|
||||
-- Image dimensions (always 512x512 for merged tiles)
|
||||
width INTEGER NOT NULL,
|
||||
height INTEGER NOT NULL,
|
||||
-- Original file size (only for zoom=2)
|
||||
original_file_size INTEGER,
|
||||
-- WebP image data (lossless)
|
||||
image BLOB NOT NULL,
|
||||
image_size INTEGER NOT NULL,
|
||||
-- Metadata
|
||||
processed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
source_path TEXT NOT NULL,
|
||||
|
||||
-- Unique constraint on coordinates + zoom
|
||||
UNIQUE(x, y, zoom)
|
||||
);
|
||||
|
||||
-- Index for fast lookups
|
||||
CREATE INDEX idx_minimap_tiles_zoom_coords ON minimap_tiles(zoom, x, y);
|
||||
CREATE INDEX idx_minimap_tiles_coords ON minimap_tiles(x, y);
|
||||
@@ -0,0 +1,26 @@
|
||||
-- Undo the expand_items migration
|
||||
|
||||
-- Drop crafting tables
|
||||
DROP INDEX IF EXISTS idx_crafting_recipe_items_item;
|
||||
DROP TABLE IF EXISTS crafting_recipe_items;
|
||||
|
||||
DROP INDEX IF EXISTS idx_crafting_recipes_workbench;
|
||||
DROP INDEX IF EXISTS idx_crafting_recipes_level;
|
||||
DROP INDEX IF EXISTS idx_crafting_recipes_skill;
|
||||
DROP INDEX IF EXISTS idx_crafting_recipes_product;
|
||||
DROP TABLE IF EXISTS crafting_recipes;
|
||||
|
||||
-- Drop item indexes
|
||||
DROP INDEX IF EXISTS idx_items_skill;
|
||||
DROP INDEX IF EXISTS idx_items_price;
|
||||
DROP INDEX IF EXISTS idx_items_level;
|
||||
DROP INDEX IF EXISTS idx_items_type;
|
||||
|
||||
-- Note: SQLite doesn't support DROP COLUMN in ALTER TABLE
|
||||
-- To truly revert, we'd need to recreate the table without the columns
|
||||
-- For now, we'll leave the columns in place (they won't hurt with defaults)
|
||||
-- If you need a full revert, you'd need to:
|
||||
-- 1. CREATE TABLE items_backup (id, name, data)
|
||||
-- 2. INSERT INTO items_backup SELECT id, name, data FROM items
|
||||
-- 3. DROP TABLE items
|
||||
-- 4. ALTER TABLE items_backup RENAME TO items
|
||||
@@ -0,0 +1,72 @@
|
||||
-- Add core columns to items table for efficient querying
|
||||
|
||||
-- Item classification
|
||||
ALTER TABLE items ADD COLUMN item_type TEXT NOT NULL DEFAULT 'resource';
|
||||
ALTER TABLE items ADD COLUMN level INTEGER NOT NULL DEFAULT 1;
|
||||
|
||||
-- Economy
|
||||
ALTER TABLE items ADD COLUMN price INTEGER NOT NULL DEFAULT 0;
|
||||
|
||||
-- Stacking and storage
|
||||
ALTER TABLE items ADD COLUMN max_stack INTEGER NOT NULL DEFAULT 1;
|
||||
ALTER TABLE items ADD COLUMN storage_size INTEGER NOT NULL DEFAULT 0;
|
||||
|
||||
-- Skills
|
||||
ALTER TABLE items ADD COLUMN skill TEXT NOT NULL DEFAULT 'none';
|
||||
ALTER TABLE items ADD COLUMN tool TEXT NOT NULL DEFAULT 'none';
|
||||
|
||||
-- Visual/UI
|
||||
ALTER TABLE items ADD COLUMN description TEXT NOT NULL DEFAULT '';
|
||||
|
||||
-- Boolean flags (stored as INTEGER: 0=false, 1=true)
|
||||
ALTER TABLE items ADD COLUMN two_handed INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE items ADD COLUMN undroppable INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE items ADD COLUMN undroppable_on_death INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE items ADD COLUMN unequip_destroy INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE items ADD COLUMN generate_icon INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE items ADD COLUMN hide_milestone INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE items ADD COLUMN cannot_craft_exceptional INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE items ADD COLUMN storage_all_items INTEGER NOT NULL DEFAULT 0;
|
||||
|
||||
-- Ability and item IDs
|
||||
ALTER TABLE items ADD COLUMN ability_id INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE items ADD COLUMN special_ability INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE items ADD COLUMN learn_ability_id INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE items ADD COLUMN book_id INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE items ADD COLUMN swap_item INTEGER NOT NULL DEFAULT 0;
|
||||
|
||||
-- Create indexes for commonly queried columns
|
||||
CREATE INDEX idx_items_type ON items(item_type);
|
||||
CREATE INDEX idx_items_level ON items(level);
|
||||
CREATE INDEX idx_items_price ON items(price);
|
||||
CREATE INDEX idx_items_skill ON items(skill);
|
||||
|
||||
-- Crafting recipes table
|
||||
CREATE TABLE crafting_recipes (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
product_item_id INTEGER NOT NULL,
|
||||
skill TEXT NOT NULL,
|
||||
level INTEGER NOT NULL,
|
||||
workbench_id INTEGER NOT NULL,
|
||||
xp INTEGER NOT NULL DEFAULT 0,
|
||||
unlocked_by_default INTEGER NOT NULL DEFAULT 1,
|
||||
checks TEXT, -- nullable, for conditional recipes
|
||||
FOREIGN KEY (product_item_id) REFERENCES items(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX idx_crafting_recipes_product ON crafting_recipes(product_item_id);
|
||||
CREATE INDEX idx_crafting_recipes_skill ON crafting_recipes(skill);
|
||||
CREATE INDEX idx_crafting_recipes_level ON crafting_recipes(level);
|
||||
CREATE INDEX idx_crafting_recipes_workbench ON crafting_recipes(workbench_id);
|
||||
|
||||
-- Crafting recipe ingredients (many-to-many)
|
||||
CREATE TABLE crafting_recipe_items (
|
||||
recipe_id INTEGER NOT NULL,
|
||||
item_id INTEGER NOT NULL,
|
||||
amount INTEGER NOT NULL,
|
||||
PRIMARY KEY (recipe_id, item_id),
|
||||
FOREIGN KEY (recipe_id) REFERENCES crafting_recipes(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (item_id) REFERENCES items(id)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_crafting_recipe_items_item ON crafting_recipe_items(item_id);
|
||||
@@ -0,0 +1,5 @@
|
||||
-- Undo the add_item_images migration
|
||||
|
||||
-- Note: SQLite doesn't support DROP COLUMN in ALTER TABLE
|
||||
-- The icon columns will remain but can be set to NULL
|
||||
-- To truly revert, you would need to recreate the table without the image columns
|
||||
@@ -0,0 +1,6 @@
|
||||
-- Add item icon columns (WebP format)
|
||||
-- These store the processed WebP images at different resolutions
|
||||
|
||||
ALTER TABLE items ADD COLUMN icon_large BLOB; -- 256x256 WebP
|
||||
ALTER TABLE items ADD COLUMN icon_medium BLOB; -- 64x64 WebP
|
||||
ALTER TABLE items ADD COLUMN icon_small BLOB; -- 16x16 WebP
|
||||
@@ -0,0 +1,6 @@
|
||||
-- Undo the add_item_stats migration
|
||||
|
||||
DROP INDEX IF EXISTS idx_item_stats_type_value;
|
||||
DROP INDEX IF EXISTS idx_item_stats_value;
|
||||
DROP INDEX IF EXISTS idx_item_stats_stat_type;
|
||||
DROP TABLE IF EXISTS item_stats;
|
||||
@@ -0,0 +1,15 @@
|
||||
-- Create item_stats table for normalized stat storage
|
||||
CREATE TABLE item_stats (
|
||||
item_id INTEGER NOT NULL,
|
||||
stat_type TEXT NOT NULL,
|
||||
value REAL NOT NULL,
|
||||
PRIMARY KEY (item_id, stat_type),
|
||||
FOREIGN KEY (item_id) REFERENCES items(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
-- Create indexes for querying
|
||||
CREATE INDEX idx_item_stats_stat_type ON item_stats(stat_type);
|
||||
CREATE INDEX idx_item_stats_value ON item_stats(value);
|
||||
|
||||
-- Index for finding items by stat value ranges
|
||||
CREATE INDEX idx_item_stats_type_value ON item_stats(stat_type, value);
|
||||
@@ -0,0 +1 @@
|
||||
DROP TABLE world_resources;
|
||||
@@ -0,0 +1,14 @@
|
||||
-- World resources table - stores harvestable resources from Unity scenes
|
||||
CREATE TABLE world_resources (
|
||||
id INTEGER PRIMARY KEY,
|
||||
item_id INTEGER NOT NULL,
|
||||
scene_name TEXT NOT NULL,
|
||||
pos_x REAL NOT NULL,
|
||||
pos_y REAL NOT NULL,
|
||||
pos_z REAL NOT NULL,
|
||||
object_name TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX idx_world_resources_item_id ON world_resources(item_id);
|
||||
CREATE INDEX idx_world_resources_scene ON world_resources(scene_name);
|
||||
CREATE INDEX idx_world_resources_position ON world_resources(pos_x, pos_z);
|
||||
@@ -0,0 +1,16 @@
|
||||
-- Revert to original structure
|
||||
DROP TABLE world_resources;
|
||||
|
||||
CREATE TABLE world_resources (
|
||||
id INTEGER PRIMARY KEY,
|
||||
item_id INTEGER NOT NULL,
|
||||
scene_name TEXT NOT NULL,
|
||||
pos_x REAL NOT NULL,
|
||||
pos_y REAL NOT NULL,
|
||||
pos_z REAL NOT NULL,
|
||||
object_name TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX idx_world_resources_item_id ON world_resources(item_id);
|
||||
CREATE INDEX idx_world_resources_scene ON world_resources(scene_name);
|
||||
CREATE INDEX idx_world_resources_position ON world_resources(pos_x, pos_z);
|
||||
@@ -0,0 +1,13 @@
|
||||
-- Drop the old table
|
||||
DROP TABLE world_resources;
|
||||
|
||||
-- Recreate with simplified structure - no id, no scene_name, no object_name, only 2D coordinates
|
||||
CREATE TABLE world_resources (
|
||||
item_id INTEGER NOT NULL,
|
||||
pos_x REAL NOT NULL,
|
||||
pos_y REAL NOT NULL,
|
||||
PRIMARY KEY (item_id, pos_x, pos_y)
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE INDEX idx_world_resources_item_id ON world_resources(item_id);
|
||||
CREATE INDEX idx_world_resources_position ON world_resources(pos_x, pos_y);
|
||||
@@ -0,0 +1,2 @@
|
||||
-- Drop resource_icons table
|
||||
DROP TABLE resource_icons;
|
||||
@@ -0,0 +1,8 @@
|
||||
-- Create resource_icons table to store processed item icons for world resources
|
||||
CREATE TABLE resource_icons (
|
||||
item_id INTEGER PRIMARY KEY NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
icon_64 BLOB NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX idx_resource_icons_name ON resource_icons(name);
|
||||
@@ -0,0 +1,10 @@
|
||||
-- Revert to the simple harvestables table
|
||||
DROP TABLE IF EXISTS harvestable_drops;
|
||||
DROP TABLE IF EXISTS harvestables;
|
||||
|
||||
CREATE TABLE harvestables (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
data TEXT NOT NULL
|
||||
);
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
-- Restructure harvestables table to store expanded data
|
||||
DROP TABLE IF EXISTS harvestables;
|
||||
|
||||
CREATE TABLE harvestables (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT NOT NULL,
|
||||
comment TEXT NOT NULL,
|
||||
level INTEGER NOT NULL,
|
||||
skill TEXT NOT NULL,
|
||||
tool TEXT NOT NULL,
|
||||
min_health INTEGER NOT NULL,
|
||||
max_health INTEGER NOT NULL,
|
||||
harvesttime INTEGER NOT NULL,
|
||||
hittime INTEGER NOT NULL,
|
||||
respawntime INTEGER NOT NULL
|
||||
);
|
||||
|
||||
-- Create harvestable_drops table
|
||||
CREATE TABLE harvestable_drops (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
harvestable_id INTEGER NOT NULL,
|
||||
item_id INTEGER NOT NULL,
|
||||
minamount INTEGER NOT NULL,
|
||||
maxamount INTEGER NOT NULL,
|
||||
droprate INTEGER NOT NULL,
|
||||
droprateboost INTEGER NOT NULL,
|
||||
amountboost INTEGER NOT NULL,
|
||||
comment TEXT NOT NULL,
|
||||
FOREIGN KEY (harvestable_id) REFERENCES harvestables(id),
|
||||
FOREIGN KEY (item_id) REFERENCES items(id)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_harvestable_drops_harvestable_id ON harvestable_drops(harvestable_id);
|
||||
CREATE INDEX idx_harvestable_drops_item_id ON harvestable_drops(item_id);
|
||||
CREATE INDEX idx_harvestables_skill ON harvestables(skill);
|
||||
CREATE INDEX idx_harvestables_tool ON harvestables(tool);
|
||||
CREATE INDEX idx_harvestables_level ON harvestables(level);
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
-- Drop world scene object tables
|
||||
DROP TABLE world_teleporters;
|
||||
DROP TABLE world_workbenches;
|
||||
DROP TABLE world_loot;
|
||||
DROP TABLE world_map_icons;
|
||||
DROP TABLE world_map_name_changers;
|
||||
@@ -0,0 +1,48 @@
|
||||
-- Create world_teleporters table
|
||||
CREATE TABLE world_teleporters (
|
||||
pos_x REAL NOT NULL,
|
||||
pos_y REAL NOT NULL,
|
||||
tp_x REAL,
|
||||
tp_y REAL,
|
||||
PRIMARY KEY (pos_x, pos_y)
|
||||
);
|
||||
|
||||
-- Create world_workbenches table
|
||||
CREATE TABLE world_workbenches (
|
||||
pos_x REAL NOT NULL,
|
||||
pos_y REAL NOT NULL,
|
||||
workbench_id INTEGER NOT NULL,
|
||||
PRIMARY KEY (pos_x, pos_y)
|
||||
);
|
||||
|
||||
-- Create world_loot table
|
||||
CREATE TABLE world_loot (
|
||||
pos_x REAL NOT NULL,
|
||||
pos_y REAL NOT NULL,
|
||||
item_id INTEGER NOT NULL,
|
||||
amount INTEGER NOT NULL,
|
||||
respawn_time INTEGER NOT NULL,
|
||||
visibility_checks TEXT NOT NULL DEFAULT '',
|
||||
PRIMARY KEY (pos_x, pos_y)
|
||||
);
|
||||
|
||||
-- Create world_map_icons table
|
||||
CREATE TABLE world_map_icons (
|
||||
pos_x REAL NOT NULL,
|
||||
pos_y REAL NOT NULL,
|
||||
icon_type INTEGER NOT NULL,
|
||||
icon_size INTEGER NOT NULL,
|
||||
icon TEXT NOT NULL,
|
||||
text TEXT NOT NULL DEFAULT '',
|
||||
font_size INTEGER NOT NULL,
|
||||
hover_text TEXT NOT NULL DEFAULT '',
|
||||
PRIMARY KEY (pos_x, pos_y)
|
||||
);
|
||||
|
||||
-- Create world_map_name_changers table
|
||||
CREATE TABLE world_map_name_changers (
|
||||
pos_x REAL NOT NULL,
|
||||
pos_y REAL NOT NULL,
|
||||
map_name TEXT NOT NULL,
|
||||
PRIMARY KEY (pos_x, pos_y)
|
||||
);
|
||||
@@ -0,0 +1,3 @@
|
||||
DROP TABLE IF EXISTS icons;
|
||||
DROP TABLE IF EXISTS achievement_icons;
|
||||
DROP TABLE IF EXISTS general_icons;
|
||||
@@ -0,0 +1,24 @@
|
||||
-- Simple icons table (abilities, buffs, traits, player houses, stat icons)
|
||||
CREATE TABLE IF NOT EXISTS icons (
|
||||
category TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
icon BLOB NOT NULL,
|
||||
PRIMARY KEY (category, name)
|
||||
);
|
||||
|
||||
-- Achievement icons table (filtered, no _0 suffix)
|
||||
CREATE TABLE IF NOT EXISTS achievement_icons (
|
||||
name TEXT PRIMARY KEY NOT NULL,
|
||||
icon BLOB NOT NULL
|
||||
);
|
||||
|
||||
-- General icons table (multiple sizes)
|
||||
CREATE TABLE IF NOT EXISTS general_icons (
|
||||
name TEXT PRIMARY KEY NOT NULL,
|
||||
original_width INTEGER NOT NULL,
|
||||
original_height INTEGER NOT NULL,
|
||||
icon_original BLOB,
|
||||
icon_256 BLOB,
|
||||
icon_64 BLOB,
|
||||
icon_32 BLOB
|
||||
);
|
||||
@@ -0,0 +1,14 @@
|
||||
-- Drop the separate icon tables
|
||||
DROP TABLE IF EXISTS ability_icons;
|
||||
DROP TABLE IF EXISTS buff_icons;
|
||||
DROP TABLE IF EXISTS trait_icons;
|
||||
DROP TABLE IF EXISTS player_house_icons;
|
||||
DROP TABLE IF EXISTS stat_icons;
|
||||
|
||||
-- Recreate the combined icons table
|
||||
CREATE TABLE IF NOT EXISTS icons (
|
||||
category TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
icon BLOB NOT NULL,
|
||||
PRIMARY KEY (category, name)
|
||||
);
|
||||
@@ -0,0 +1,32 @@
|
||||
-- Drop the combined icons table
|
||||
DROP TABLE IF EXISTS icons;
|
||||
|
||||
-- Ability icons table
|
||||
CREATE TABLE IF NOT EXISTS ability_icons (
|
||||
name TEXT PRIMARY KEY NOT NULL,
|
||||
icon BLOB NOT NULL
|
||||
);
|
||||
|
||||
-- Buff icons table
|
||||
CREATE TABLE IF NOT EXISTS buff_icons (
|
||||
name TEXT PRIMARY KEY NOT NULL,
|
||||
icon BLOB NOT NULL
|
||||
);
|
||||
|
||||
-- Trait icons table
|
||||
CREATE TABLE IF NOT EXISTS trait_icons (
|
||||
name TEXT PRIMARY KEY NOT NULL,
|
||||
icon BLOB NOT NULL
|
||||
);
|
||||
|
||||
-- Player house icons table
|
||||
CREATE TABLE IF NOT EXISTS player_house_icons (
|
||||
name TEXT PRIMARY KEY NOT NULL,
|
||||
icon BLOB NOT NULL
|
||||
);
|
||||
|
||||
-- Stat icons table
|
||||
CREATE TABLE IF NOT EXISTS stat_icons (
|
||||
name TEXT PRIMARY KEY NOT NULL,
|
||||
icon BLOB NOT NULL
|
||||
);
|
||||
@@ -0,0 +1,9 @@
|
||||
-- Revert to original schema with JSON data field
|
||||
DROP TABLE IF EXISTS player_houses;
|
||||
|
||||
CREATE TABLE player_houses (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
map_id INTEGER NOT NULL,
|
||||
data TEXT NOT NULL
|
||||
);
|
||||
@@ -0,0 +1,11 @@
|
||||
-- Drop the old table and recreate with direct fields instead of JSON data
|
||||
DROP TABLE IF EXISTS player_houses;
|
||||
|
||||
CREATE TABLE player_houses (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT NOT NULL,
|
||||
pos_x REAL NOT NULL,
|
||||
pos_z REAL NOT NULL,
|
||||
price INTEGER NOT NULL
|
||||
);
|
||||
@@ -0,0 +1,9 @@
|
||||
-- Restore old table schema
|
||||
DROP TABLE IF EXISTS fast_travel_locations;
|
||||
|
||||
CREATE TABLE fast_travel_locations (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
map_name TEXT NOT NULL,
|
||||
data TEXT NOT NULL
|
||||
);
|
||||
@@ -0,0 +1,12 @@
|
||||
-- Drop the old table and create with new schema
|
||||
DROP TABLE IF EXISTS fast_travel_locations;
|
||||
|
||||
CREATE TABLE fast_travel_locations (
|
||||
name TEXT PRIMARY KEY,
|
||||
pos_x REAL NOT NULL,
|
||||
pos_z REAL NOT NULL,
|
||||
travel_type TEXT NOT NULL,
|
||||
unlocked INTEGER NOT NULL DEFAULT 0,
|
||||
connections TEXT,
|
||||
checks TEXT
|
||||
);
|
||||
143
cursebreaker-parser/src/bin/image-parser.rs
Normal file
143
cursebreaker-parser/src/bin/image-parser.rs
Normal file
@@ -0,0 +1,143 @@
|
||||
//! Image Parser - Processes minimap tiles and generates all zoom levels
|
||||
//!
|
||||
//! This binary handles:
|
||||
//! - Loading minimap tile images from PNG files
|
||||
//! - Converting to lossless WebP format (zoom level 2)
|
||||
//! - Generating merged tiles for zoom level 1 (2x2)
|
||||
//! - Generating merged tiles for zoom level 0 (4x4)
|
||||
//! - Storing all tiles in the SQLite database
|
||||
//! - Generating statistics about storage and compression
|
||||
|
||||
use clap::Parser;
|
||||
use cursebreaker_parser::{IconDatabase, MinimapDatabase};
|
||||
use log::{error, info, LevelFilter};
|
||||
use std::env;
|
||||
use unity_parser::log::DedupLogger;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(name = "image-parser")]
|
||||
#[command(about = "Processes minimap tiles and game icons")]
|
||||
struct Args {
|
||||
/// Process minimap tiles
|
||||
#[arg(long)]
|
||||
minimap: bool,
|
||||
|
||||
/// Process game icons
|
||||
#[arg(long)]
|
||||
icons: bool,
|
||||
|
||||
/// Process everything (minimap and icons)
|
||||
#[arg(long)]
|
||||
all: bool,
|
||||
}
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let args = Args::parse();
|
||||
|
||||
// Validate that at least one option is specified
|
||||
if !args.minimap && !args.icons && !args.all {
|
||||
eprintln!("Error: At least one option must be specified.\n");
|
||||
eprintln!("Usage: image-parser [OPTIONS]\n");
|
||||
eprintln!("Options:");
|
||||
eprintln!(" --minimap Process minimap tiles");
|
||||
eprintln!(" --icons Process game icons");
|
||||
eprintln!(" --all Process everything");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
let process_minimap = args.minimap || args.all;
|
||||
let process_icons = args.icons || args.all;
|
||||
|
||||
let logger = DedupLogger::new();
|
||||
log::set_boxed_logger(Box::new(logger))
|
||||
.map(|()| log::set_max_level(LevelFilter::Trace))
|
||||
.unwrap();
|
||||
|
||||
info!("Image Parser");
|
||||
info!("Generates all zoom levels (0, 1, 2) with merged tiles");
|
||||
info!("Will override existing database entries\n");
|
||||
|
||||
let database_url = env::var("DATABASE_URL").unwrap_or_else(|_| "cursebreaker.db".to_string());
|
||||
let cb_assets_path =
|
||||
env::var("CB_ASSETS_PATH").unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
|
||||
|
||||
// Process minimap tiles
|
||||
if process_minimap {
|
||||
info!("Processing minimap tiles...");
|
||||
let minimap_db = MinimapDatabase::new(database_url.clone());
|
||||
let minimap_path = format!("{}/Data/Textures/MinimapSquares", cb_assets_path);
|
||||
|
||||
match minimap_db.load_from_directory(&minimap_path, &cb_assets_path) {
|
||||
Ok(total_count) => {
|
||||
info!("\nProcessed {} total tiles (all zoom levels)", total_count);
|
||||
|
||||
// Get statistics
|
||||
if let Ok(stats) = minimap_db.get_storage_stats() {
|
||||
info!("\n=== Storage Statistics ===");
|
||||
info!(
|
||||
"Original PNG total: {} MB",
|
||||
stats.total_original_size / 1_048_576
|
||||
);
|
||||
info!("WebP total: {} MB", stats.total_webp_size() / 1_048_576);
|
||||
info!("Compression ratio: {:.2}%\n", stats.compression_ratio());
|
||||
|
||||
info!("=== Tiles Per Zoom Level ===");
|
||||
info!(
|
||||
"Zoom 2 (original): {} tiles ({} MB)",
|
||||
stats.zoom2_count,
|
||||
stats.zoom2_size / 1_048_576
|
||||
);
|
||||
info!(
|
||||
"Zoom 1 (2x2 merged): {} tiles ({} MB)",
|
||||
stats.zoom1_count,
|
||||
stats.zoom1_size / 1_048_576
|
||||
);
|
||||
info!(
|
||||
"Zoom 0 (4x4 merged): {} tiles ({} MB)",
|
||||
stats.zoom0_count,
|
||||
stats.zoom0_size / 1_048_576
|
||||
);
|
||||
}
|
||||
|
||||
if let Ok(bounds) = minimap_db.get_map_bounds() {
|
||||
info!("\n=== Map Bounds ===");
|
||||
info!("Min (x,y): {:?}", bounds.0);
|
||||
info!("Max (x,y): {:?}", bounds.1);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Failed to process minimap tiles: {}", e);
|
||||
return Err(Box::new(e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Process game icons
|
||||
if process_icons {
|
||||
info!("\n=== Processing Game Icons ===");
|
||||
let icon_db = IconDatabase::new(database_url);
|
||||
|
||||
match icon_db.load_all_icons(&cb_assets_path) {
|
||||
Ok(stats) => {
|
||||
info!("\n=== Icon Statistics ===");
|
||||
info!("Ability icons: {}", stats.abilities);
|
||||
info!("Buff icons: {}", stats.buffs);
|
||||
info!("Trait icons: {}", stats.traits);
|
||||
info!("Player house icons: {}", stats.player_houses);
|
||||
info!("Stat icons: {}", stats.stat_icons);
|
||||
info!("Achievement icons: {}", stats.achievement_icons);
|
||||
info!("General icons: {}", stats.general_icons);
|
||||
info!("Total icons: {}", stats.total_icons());
|
||||
info!("Total size: {} KB", stats.total_bytes / 1024);
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Failed to process icons: {}", e);
|
||||
return Err(Box::new(e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log::logger().flush();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
557
cursebreaker-parser/src/bin/scene-parser.rs
Normal file
557
cursebreaker-parser/src/bin/scene-parser.rs
Normal file
@@ -0,0 +1,557 @@
|
||||
//! Scene Parser - Parses Unity scenes and extracts game objects
|
||||
//!
|
||||
//! This binary handles:
|
||||
//! - Initializing the Unity project
|
||||
//! - Parsing Unity scenes with type filtering
|
||||
//! - Extracting Interactable_Resource components only
|
||||
//! - Computing world transforms
|
||||
//! - Saving resource locations to the database
|
||||
//! - Processing and saving item icons for resources
|
||||
//!
|
||||
//! Usage:
|
||||
//! scene-parser [min_x max_x min_y max_y]
|
||||
//!
|
||||
//! Examples:
|
||||
//! scene-parser # Parse all scenes
|
||||
//! scene-parser 0 10 0 10 # Parse scenes from (0,0) to (10,10)
|
||||
|
||||
use cursebreaker_parser::{
|
||||
InteractableResource, InteractableTeleporter, InteractableWorkbench,
|
||||
LootSpawner, MapIcon, MapNameChanger, ImageProcessor, OutlineConfig
|
||||
};
|
||||
use unity_parser::{UnityProject, TypeFilter};
|
||||
use std::path::{Path, PathBuf};
|
||||
use unity_parser::log::DedupLogger;
|
||||
use log::{info, error, warn, LevelFilter};
|
||||
use std::env;
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
|
||||
/// Bounds for filtering which scene tiles to parse
|
||||
#[derive(Debug, Clone)]
|
||||
struct Bounds {
|
||||
min_x: i32,
|
||||
max_x: i32,
|
||||
min_y: i32,
|
||||
max_y: i32,
|
||||
}
|
||||
|
||||
impl Bounds {
|
||||
fn contains(&self, x: i32, y: i32) -> bool {
|
||||
x >= self.min_x && x <= self.max_x && y >= self.min_y && y <= self.max_y
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse scene filename to extract tile coordinates (e.g., "10_3.unity" -> (10, 3))
|
||||
fn parse_scene_coords(filename: &str) -> Option<(i32, i32)> {
|
||||
let stem = filename.strip_suffix(".unity")?;
|
||||
let parts: Vec<&str> = stem.split('_').collect();
|
||||
if parts.len() == 2 {
|
||||
let x = parts[0].parse().ok()?;
|
||||
let y = parts[1].parse().ok()?;
|
||||
Some((x, y))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Find all scene files matching the *_*.unity pattern
|
||||
fn find_scene_files(scenes_dir: &Path, bounds: Option<&Bounds>) -> Vec<PathBuf> {
|
||||
let mut scenes = Vec::new();
|
||||
|
||||
if let Ok(entries) = fs::read_dir(scenes_dir) {
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if let Some(filename) = path.file_name().and_then(|n| n.to_str()) {
|
||||
if filename.ends_with(".unity") {
|
||||
if let Some((x, y)) = parse_scene_coords(filename) {
|
||||
// Check bounds if specified
|
||||
if let Some(b) = bounds {
|
||||
if !b.contains(x, y) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
scenes.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by coordinates for consistent ordering
|
||||
scenes.sort_by(|a, b| {
|
||||
let a_coords = a.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.and_then(parse_scene_coords)
|
||||
.unwrap_or((0, 0));
|
||||
let b_coords = b.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.and_then(parse_scene_coords)
|
||||
.unwrap_or((0, 0));
|
||||
a_coords.cmp(&b_coords)
|
||||
});
|
||||
|
||||
scenes
|
||||
}
|
||||
|
||||
/// Parse command line arguments for bounds
|
||||
fn parse_bounds_args() -> Option<Bounds> {
|
||||
let args: Vec<String> = env::args().collect();
|
||||
|
||||
if args.len() == 5 {
|
||||
let min_x = args[1].parse().ok()?;
|
||||
let max_x = args[2].parse().ok()?;
|
||||
let min_y = args[3].parse().ok()?;
|
||||
let max_y = args[4].parse().ok()?;
|
||||
Some(Bounds { min_x, max_x, min_y, max_y })
|
||||
} else if args.len() == 1 {
|
||||
None // No bounds specified, parse all
|
||||
} else {
|
||||
eprintln!("Usage: {} [min_x max_x min_y max_y]", args[0]);
|
||||
eprintln!(" No arguments: parse all scenes");
|
||||
eprintln!(" 4 arguments: parse scenes within bounds (inclusive)");
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let logger = DedupLogger::new();
|
||||
log::set_boxed_logger(Box::new(logger))
|
||||
.map(|()| log::set_max_level(LevelFilter::Warn))
|
||||
.unwrap();
|
||||
|
||||
info!("🎮 Cursebreaker - Scene Parser");
|
||||
|
||||
// Parse bounds from command line
|
||||
let bounds = parse_bounds_args();
|
||||
if let Some(ref b) = bounds {
|
||||
info!("📐 Bounds: x=[{}, {}], y=[{}, {}]", b.min_x, b.max_x, b.min_y, b.max_y);
|
||||
} else {
|
||||
info!("📐 Bounds: none (parsing all scenes)");
|
||||
}
|
||||
|
||||
let cb_assets_path = env::var("CB_ASSETS_PATH").unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
|
||||
|
||||
// Initialize Unity project once - scans entire project for GUID mappings
|
||||
let project_root = Path::new(&cb_assets_path);
|
||||
info!("\n📦 Initializing Unity project from: {}", project_root.display());
|
||||
|
||||
let project = UnityProject::from_path(project_root)?;
|
||||
|
||||
// Find all scene files
|
||||
let scenes_dir = project_root.join("_GameAssets/Scenes/Tiles");
|
||||
let scene_files = find_scene_files(&scenes_dir, bounds.as_ref());
|
||||
info!("🔍 Found {} scene files to parse", scene_files.len());
|
||||
|
||||
if scene_files.is_empty() {
|
||||
warn!("No scene files found matching criteria");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Create type filter to only parse GameObject, Transform, and InteractableResource MonoBehaviour
|
||||
info!("🔍 Setting up type filter:");
|
||||
info!(" • Unity types: GameObject, Transform");
|
||||
info!(" • Custom MonoBehaviours: InteractableResource");
|
||||
let type_filter = TypeFilter::new(
|
||||
vec!["GameObject", "Transform", "PrefabInstance"],
|
||||
vec!["InteractableResource", "InteractableTeleporter", "InteractableWorkbench", "LootSpawner", "MapIcon", "MapNameChanger"]
|
||||
);
|
||||
|
||||
// Setup database connection
|
||||
let database_url = env::var("DATABASE_URL").unwrap_or_else(|_| "cursebreaker.db".to_string());
|
||||
let mut conn = SqliteConnection::establish(&database_url)?;
|
||||
|
||||
// Clear all tables before processing (they're regenerated each run)
|
||||
{
|
||||
use cursebreaker_parser::schema::{
|
||||
world_resources, world_teleporters, world_workbenches,
|
||||
world_loot, world_map_icons, world_map_name_changers, resource_icons
|
||||
};
|
||||
diesel::delete(world_resources::table).execute(&mut conn)?;
|
||||
diesel::delete(world_teleporters::table).execute(&mut conn)?;
|
||||
diesel::delete(world_workbenches::table).execute(&mut conn)?;
|
||||
diesel::delete(world_loot::table).execute(&mut conn)?;
|
||||
diesel::delete(world_map_icons::table).execute(&mut conn)?;
|
||||
diesel::delete(world_map_name_changers::table).execute(&mut conn)?;
|
||||
diesel::delete(resource_icons::table).execute(&mut conn)?;
|
||||
}
|
||||
|
||||
// Collect unique harvestables across all scenes for icon processing
|
||||
let mut all_unique_harvestables: HashMap<i32, String> = HashMap::new();
|
||||
|
||||
// Track totals
|
||||
let mut total_resources = 0;
|
||||
let mut total_teleporters = 0;
|
||||
let mut total_workbenches = 0;
|
||||
let mut total_loot = 0;
|
||||
let mut total_map_icons = 0;
|
||||
let mut total_map_name_changers = 0;
|
||||
let mut scenes_processed = 0;
|
||||
let mut scenes_failed = 0;
|
||||
|
||||
// Process each scene
|
||||
for (idx, scene_path) in scene_files.iter().enumerate() {
|
||||
let relative_path = scene_path.strip_prefix(project_root)
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
.unwrap_or_else(|_| scene_path.to_string_lossy().to_string());
|
||||
|
||||
print!("\n📁 [{}/{}] Parsing scene: {}", idx + 1, scene_files.len(), relative_path);
|
||||
|
||||
match project.parse_scene_filtered(&relative_path, Some(&type_filter)) {
|
||||
Ok(mut scene) => {
|
||||
info!(" ✓ Parsed ({} entities)", scene.entity_map.len());
|
||||
|
||||
// Post-processing: Compute world transforms
|
||||
unity_parser::compute_world_transforms(&mut scene.world, &scene.entity_map);
|
||||
|
||||
// Save resources
|
||||
let resource_count = save_resources(&mut conn, &scene)?;
|
||||
total_resources += resource_count;
|
||||
|
||||
// Collect unique harvestables for icon processing later
|
||||
scene.world
|
||||
.query_all::<(&InteractableResource, &unity_parser::GameObject)>()
|
||||
.for_each(|(resource, object)| {
|
||||
all_unique_harvestables.entry(resource.type_id as i32)
|
||||
.or_insert_with(|| object.name.to_string());
|
||||
});
|
||||
|
||||
// Save other world objects (append mode - tables already cleared)
|
||||
total_teleporters += save_teleporters_append(&mut conn, &scene)?;
|
||||
total_workbenches += save_workbenches_append(&mut conn, &scene)?;
|
||||
total_loot += save_loot_spawners_append(&mut conn, &scene)?;
|
||||
total_map_icons += save_map_icons_append(&mut conn, &scene)?;
|
||||
total_map_name_changers += save_map_name_changers_append(&mut conn, &scene)?;
|
||||
|
||||
scenes_processed += 1;
|
||||
}
|
||||
Err(e) => {
|
||||
error!(" ✗ Parse error: {}", e);
|
||||
scenes_failed += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log::logger().flush();
|
||||
|
||||
// Process icons for all unique harvestables
|
||||
info!("\n🎨 Processing item icons for {} unique harvestable types...", all_unique_harvestables.len());
|
||||
process_item_icons_from_map(&cb_assets_path, &mut conn, &all_unique_harvestables)?;
|
||||
|
||||
// Print summary
|
||||
println!("\n==================================================");
|
||||
println!("📊 SUMMARY");
|
||||
println!("==================================================");
|
||||
println!(" Scenes processed: {} ({} failed)", scenes_processed, scenes_failed);
|
||||
println!(" Resources: {}", total_resources);
|
||||
println!(" Teleporters: {}", total_teleporters);
|
||||
println!(" Workbenches: {}", total_workbenches);
|
||||
println!(" Loot spawners: {}", total_loot);
|
||||
println!(" Map icons: {}", total_map_icons);
|
||||
println!(" Map name changers:{}", total_map_name_changers);
|
||||
println!("==================================================");
|
||||
|
||||
log::logger().flush();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Save resources from a scene (append mode)
|
||||
fn save_resources(
|
||||
conn: &mut SqliteConnection,
|
||||
scene: &unity_parser::UnityScene,
|
||||
) -> Result<usize, Box<dyn std::error::Error>> {
|
||||
use cursebreaker_parser::schema::world_resources;
|
||||
|
||||
let mut count = 0;
|
||||
|
||||
conn.transaction::<_, diesel::result::Error, _>(|conn| {
|
||||
scene.world
|
||||
.query_all::<(&InteractableResource, &unity_parser::WorldTransform, &unity_parser::GameObject)>()
|
||||
.for_each(|(resource, transform, _object)| {
|
||||
let world_pos = transform.position();
|
||||
|
||||
let _ = diesel::insert_into(world_resources::table)
|
||||
.values((
|
||||
world_resources::item_id.eq(resource.type_id as i32),
|
||||
world_resources::pos_x.eq(world_pos.x as f32),
|
||||
world_resources::pos_y.eq(world_pos.z as f32),
|
||||
))
|
||||
.execute(conn);
|
||||
|
||||
count += 1;
|
||||
});
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Process item icons from a pre-collected map of harvestables
|
||||
fn process_item_icons_from_map(
|
||||
cb_assets_path: &str,
|
||||
conn: &mut SqliteConnection,
|
||||
unique_harvestables: &HashMap<i32, String>,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
use cursebreaker_parser::schema::{resource_icons, items, harvestables, harvestable_drops};
|
||||
|
||||
info!(" Processing {} unique harvestable types", unique_harvestables.len());
|
||||
|
||||
// Create image processor with white outline
|
||||
let processor = ImageProcessor::default();
|
||||
let outline_config = OutlineConfig::white(4);
|
||||
|
||||
let mut processed_count = 0;
|
||||
let mut failed_count = 0;
|
||||
|
||||
// Process each unique harvestable
|
||||
for (harvestable_id, default_name) in unique_harvestables.iter() {
|
||||
// Get the harvestable name
|
||||
let harvestable_name: String = harvestables::table
|
||||
.filter(harvestables::id.eq(harvestable_id))
|
||||
.select(harvestables::name)
|
||||
.first(conn)
|
||||
.unwrap_or_else(|_| default_name.clone());
|
||||
|
||||
// Get the first item drop for this harvestable
|
||||
let item_id_result: Result<i32, _> = harvestable_drops::table
|
||||
.filter(harvestable_drops::harvestable_id.eq(harvestable_id))
|
||||
.select(harvestable_drops::item_id)
|
||||
.order(harvestable_drops::id.asc())
|
||||
.first(conn);
|
||||
|
||||
let item_id = match item_id_result {
|
||||
Ok(id) => id,
|
||||
Err(_) => {
|
||||
warn!(" ⚠️ No drops found for harvestable {} ({})", harvestable_id, harvestable_name);
|
||||
failed_count += 1;
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
// Get the item name
|
||||
let item_name: String = items::table
|
||||
.filter(items::id.eq(&item_id))
|
||||
.select(items::name)
|
||||
.first(conn)
|
||||
.unwrap_or_else(|_| format!("Item {}", item_id));
|
||||
|
||||
// Construct icon path using the item_id from the drop
|
||||
let icon_path = PathBuf::from(cb_assets_path)
|
||||
.join("Data/Textures/ItemIcons")
|
||||
.join(format!("{}.png", item_id));
|
||||
|
||||
if !icon_path.exists() {
|
||||
warn!(" ⚠️ Icon not found for harvestable {} ({}) -> item {} ({}): {}",
|
||||
harvestable_id, harvestable_name, item_id, item_name, icon_path.display());
|
||||
failed_count += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Process the icon: resize to 64px with white outline
|
||||
match processor.process_image(&icon_path, &[64], None, Some(&outline_config)) {
|
||||
Ok(processed) => {
|
||||
if let Some(icon_data) = processed.get(64) {
|
||||
// Insert into database using harvestable_id as the key
|
||||
match diesel::insert_into(resource_icons::table)
|
||||
.values((
|
||||
resource_icons::item_id.eq(harvestable_id),
|
||||
resource_icons::name.eq(&harvestable_name),
|
||||
resource_icons::icon_64.eq(icon_data.as_slice()),
|
||||
))
|
||||
.execute(conn)
|
||||
{
|
||||
Ok(_) => {
|
||||
info!(" ✓ Harvestable {} ({}) -> Item {} ({}): {} bytes",
|
||||
harvestable_id, harvestable_name, item_id, item_name, icon_data.len());
|
||||
processed_count += 1;
|
||||
}
|
||||
Err(e) => {
|
||||
warn!(" ⚠️ Failed to insert icon for harvestable {} ({}): {}",
|
||||
harvestable_id, harvestable_name, e);
|
||||
failed_count += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
warn!(" ⚠️ Failed to process icon for harvestable {} ({}) -> item {} ({}): {}",
|
||||
harvestable_id, harvestable_name, item_id, item_name, e);
|
||||
failed_count += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info!("✅ Processed {} harvestable icons ({} succeeded, {} failed)",
|
||||
unique_harvestables.len(), processed_count, failed_count);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Save teleporter data to database (append mode - doesn't clear table)
|
||||
fn save_teleporters_append(
|
||||
conn: &mut SqliteConnection,
|
||||
scene: &unity_parser::UnityScene,
|
||||
) -> Result<usize, Box<dyn std::error::Error>> {
|
||||
use cursebreaker_parser::schema::world_teleporters;
|
||||
|
||||
let mut count = 0;
|
||||
|
||||
// Query all teleporters
|
||||
scene.world
|
||||
.query_all::<(&InteractableTeleporter, &unity_parser::WorldTransform, &unity_parser::GameObject)>()
|
||||
.for_each(|(teleporter, transform, _object)| {
|
||||
let world_pos = transform.position();
|
||||
|
||||
// Get the tp_transform position if it exists
|
||||
let (tp_x, tp_y) = if let Some(tp_entity) = teleporter.tp_transform {
|
||||
if let Some(tp_transform) = scene.world.borrow::<unity_parser::WorldTransform>().get(tp_entity) {
|
||||
let tp_pos = tp_transform.position();
|
||||
(Some(tp_pos.x as f32), Some(tp_pos.z as f32))
|
||||
} else {
|
||||
(None, None)
|
||||
}
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
|
||||
let _ = diesel::insert_into(world_teleporters::table)
|
||||
.values((
|
||||
world_teleporters::pos_x.eq(world_pos.x as f32),
|
||||
world_teleporters::pos_y.eq(world_pos.z as f32),
|
||||
world_teleporters::tp_x.eq(tp_x),
|
||||
world_teleporters::tp_y.eq(tp_y),
|
||||
))
|
||||
.execute(conn);
|
||||
|
||||
count += 1;
|
||||
});
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Save workbench data to database (append mode - doesn't clear table)
|
||||
fn save_workbenches_append(
|
||||
conn: &mut SqliteConnection,
|
||||
scene: &unity_parser::UnityScene,
|
||||
) -> Result<usize, Box<dyn std::error::Error>> {
|
||||
use cursebreaker_parser::schema::world_workbenches;
|
||||
|
||||
let mut count = 0;
|
||||
|
||||
// Query all workbenches
|
||||
scene.world
|
||||
.query_all::<(&InteractableWorkbench, &unity_parser::WorldTransform, &unity_parser::GameObject)>()
|
||||
.for_each(|(workbench, transform, _object)| {
|
||||
let world_pos = transform.position();
|
||||
|
||||
let _ = diesel::insert_into(world_workbenches::table)
|
||||
.values((
|
||||
world_workbenches::pos_x.eq(world_pos.x as f32),
|
||||
world_workbenches::pos_y.eq(world_pos.z as f32),
|
||||
world_workbenches::workbench_id.eq(workbench.workbench_id as i32),
|
||||
))
|
||||
.execute(conn);
|
||||
|
||||
count += 1;
|
||||
});
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Save loot spawner data to database (append mode - doesn't clear table)
|
||||
fn save_loot_spawners_append(
|
||||
conn: &mut SqliteConnection,
|
||||
scene: &unity_parser::UnityScene,
|
||||
) -> Result<usize, Box<dyn std::error::Error>> {
|
||||
use cursebreaker_parser::schema::world_loot;
|
||||
|
||||
let mut count = 0;
|
||||
|
||||
// Query all loot spawners
|
||||
scene.world
|
||||
.query_all::<(&LootSpawner, &unity_parser::WorldTransform, &unity_parser::GameObject)>()
|
||||
.for_each(|(loot, transform, _object)| {
|
||||
let world_pos = transform.position();
|
||||
|
||||
let _ = diesel::insert_into(world_loot::table)
|
||||
.values((
|
||||
world_loot::pos_x.eq(world_pos.x as f32),
|
||||
world_loot::pos_y.eq(world_pos.z as f32),
|
||||
world_loot::item_id.eq(loot.item_id as i32),
|
||||
world_loot::amount.eq(loot.amount as i32),
|
||||
world_loot::respawn_time.eq(loot.respawn_time as i32),
|
||||
world_loot::visibility_checks.eq(&loot.visibility_checks),
|
||||
))
|
||||
.execute(conn);
|
||||
|
||||
count += 1;
|
||||
});
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Save map icon data to database (append mode - doesn't clear table)
|
||||
fn save_map_icons_append(
|
||||
conn: &mut SqliteConnection,
|
||||
scene: &unity_parser::UnityScene,
|
||||
) -> Result<usize, Box<dyn std::error::Error>> {
|
||||
use cursebreaker_parser::schema::world_map_icons;
|
||||
|
||||
let mut count = 0;
|
||||
|
||||
// Query all map icons
|
||||
scene.world
|
||||
.query_all::<(&MapIcon, &unity_parser::WorldTransform, &unity_parser::GameObject)>()
|
||||
.for_each(|(icon, transform, _object)| {
|
||||
let world_pos = transform.position();
|
||||
|
||||
let _ = diesel::insert_into(world_map_icons::table)
|
||||
.values((
|
||||
world_map_icons::pos_x.eq(world_pos.x as f32),
|
||||
world_map_icons::pos_y.eq(world_pos.z as f32),
|
||||
world_map_icons::icon_type.eq(icon.icon_type as i32),
|
||||
world_map_icons::icon_size.eq(icon.icon_size as i32),
|
||||
world_map_icons::icon.eq(&icon.icon),
|
||||
world_map_icons::text.eq(&icon.text),
|
||||
world_map_icons::font_size.eq(icon.font_size as i32),
|
||||
world_map_icons::hover_text.eq(&icon.hover_text),
|
||||
))
|
||||
.execute(conn);
|
||||
|
||||
count += 1;
|
||||
});
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Save map name changer data to database (append mode - doesn't clear table)
|
||||
fn save_map_name_changers_append(
|
||||
conn: &mut SqliteConnection,
|
||||
scene: &unity_parser::UnityScene,
|
||||
) -> Result<usize, Box<dyn std::error::Error>> {
|
||||
use cursebreaker_parser::schema::world_map_name_changers;
|
||||
|
||||
let mut count = 0;
|
||||
|
||||
// Query all map name changers
|
||||
scene.world
|
||||
.query_all::<(&MapNameChanger, &unity_parser::WorldTransform, &unity_parser::GameObject)>()
|
||||
.for_each(|(changer, transform, _object)| {
|
||||
let world_pos = transform.position();
|
||||
|
||||
let _ = diesel::insert_into(world_map_name_changers::table)
|
||||
.values((
|
||||
world_map_name_changers::pos_x.eq(world_pos.x as f32),
|
||||
world_map_name_changers::pos_y.eq(world_pos.z as f32),
|
||||
world_map_name_changers::map_name.eq(&changer.map_name),
|
||||
))
|
||||
.execute(conn);
|
||||
|
||||
count += 1;
|
||||
});
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
342
cursebreaker-parser/src/bin/xml-parser.rs
Normal file
342
cursebreaker-parser/src/bin/xml-parser.rs
Normal file
@@ -0,0 +1,342 @@
|
||||
//! XML Parser - Loads game data from XML files and populates the SQLite database
|
||||
//!
|
||||
//! Usage:
|
||||
//! xml-parser --all Parse all data types
|
||||
//! xml-parser --items Parse items only
|
||||
//! xml-parser --npcs Parse NPCs only
|
||||
//! xml-parser --quests Parse quests only
|
||||
//! xml-parser --harvestables Parse harvestables only
|
||||
//! xml-parser --loot Parse loot tables only
|
||||
//! xml-parser --maps Parse maps only
|
||||
//! xml-parser --fast-travel Parse fast travel locations only
|
||||
//! xml-parser --houses Parse player houses only
|
||||
//! xml-parser --traits Parse traits only
|
||||
//! xml-parser --shops Parse shops only
|
||||
//!
|
||||
//! Multiple flags can be combined:
|
||||
//! xml-parser --items --npcs --quests
|
||||
|
||||
use clap::Parser;
|
||||
use cursebreaker_parser::{
|
||||
ItemDatabase, NpcDatabase, QuestDatabase, HarvestableDatabase,
|
||||
LootDatabase, MapDatabase, FastTravelDatabase, PlayerHouseDatabase,
|
||||
TraitDatabase, ShopDatabase,
|
||||
};
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use log::{info, warn, LevelFilter};
|
||||
use std::env;
|
||||
use unity_parser::log::DedupLogger;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(name = "xml-parser")]
|
||||
#[command(author = "Cursebreaker Team")]
|
||||
#[command(version = "1.0")]
|
||||
#[command(about = "Parses game XML data and populates the SQLite database")]
|
||||
struct Args {
|
||||
/// Parse all data types
|
||||
#[arg(long, short = 'a')]
|
||||
all: bool,
|
||||
|
||||
/// Parse items
|
||||
#[arg(long, short = 'i')]
|
||||
items: bool,
|
||||
|
||||
/// Parse NPCs
|
||||
#[arg(long, short = 'n')]
|
||||
npcs: bool,
|
||||
|
||||
/// Parse quests
|
||||
#[arg(long, short = 'q')]
|
||||
quests: bool,
|
||||
|
||||
/// Parse harvestables
|
||||
#[arg(long, short = 'r')]
|
||||
harvestables: bool,
|
||||
|
||||
/// Parse loot tables
|
||||
#[arg(long, short = 'l')]
|
||||
loot: bool,
|
||||
|
||||
/// Parse maps
|
||||
#[arg(long, short = 'm')]
|
||||
maps: bool,
|
||||
|
||||
/// Parse fast travel locations
|
||||
#[arg(long, short = 'f')]
|
||||
fast_travel: bool,
|
||||
|
||||
/// Parse player houses
|
||||
#[arg(long, short = 'p')]
|
||||
houses: bool,
|
||||
|
||||
/// Parse traits
|
||||
#[arg(long, short = 't')]
|
||||
traits: bool,
|
||||
|
||||
/// Parse shops
|
||||
#[arg(long, short = 's')]
|
||||
shops: bool,
|
||||
}
|
||||
|
||||
impl Args {
|
||||
/// Returns true if no specific parsers were selected
|
||||
fn none_selected(&self) -> bool {
|
||||
!self.all
|
||||
&& !self.items
|
||||
&& !self.npcs
|
||||
&& !self.quests
|
||||
&& !self.harvestables
|
||||
&& !self.loot
|
||||
&& !self.maps
|
||||
&& !self.fast_travel
|
||||
&& !self.houses
|
||||
&& !self.traits
|
||||
&& !self.shops
|
||||
}
|
||||
|
||||
/// Returns true if items should be parsed
|
||||
fn should_parse_items(&self) -> bool {
|
||||
self.all || self.items
|
||||
}
|
||||
|
||||
/// Returns true if NPCs should be parsed
|
||||
fn should_parse_npcs(&self) -> bool {
|
||||
self.all || self.npcs
|
||||
}
|
||||
|
||||
/// Returns true if quests should be parsed
|
||||
fn should_parse_quests(&self) -> bool {
|
||||
self.all || self.quests
|
||||
}
|
||||
|
||||
/// Returns true if harvestables should be parsed
|
||||
fn should_parse_harvestables(&self) -> bool {
|
||||
self.all || self.harvestables
|
||||
}
|
||||
|
||||
/// Returns true if loot should be parsed
|
||||
fn should_parse_loot(&self) -> bool {
|
||||
self.all || self.loot
|
||||
}
|
||||
|
||||
/// Returns true if maps should be parsed
|
||||
fn should_parse_maps(&self) -> bool {
|
||||
self.all || self.maps
|
||||
}
|
||||
|
||||
/// Returns true if fast travel should be parsed
|
||||
fn should_parse_fast_travel(&self) -> bool {
|
||||
self.all || self.fast_travel
|
||||
}
|
||||
|
||||
/// Returns true if houses should be parsed
|
||||
fn should_parse_houses(&self) -> bool {
|
||||
self.all || self.houses
|
||||
}
|
||||
|
||||
/// Returns true if traits should be parsed
|
||||
fn should_parse_traits(&self) -> bool {
|
||||
self.all || self.traits
|
||||
}
|
||||
|
||||
/// Returns true if shops should be parsed
|
||||
fn should_parse_shops(&self) -> bool {
|
||||
self.all || self.shops
|
||||
}
|
||||
}
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let logger = DedupLogger::new();
|
||||
log::set_boxed_logger(Box::new(logger))
|
||||
.map(|()| log::set_max_level(LevelFilter::Trace))
|
||||
.unwrap();
|
||||
|
||||
let args = Args::parse();
|
||||
|
||||
// If no parsers selected, show help
|
||||
if args.none_selected() {
|
||||
eprintln!("No parsers selected. Use --all to parse everything, or specify individual parsers.");
|
||||
eprintln!("Run with --help for usage information.");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
info!("Cursebreaker - XML Parser");
|
||||
info!("Loading game data from XML...");
|
||||
|
||||
let cb_assets_path = env::var("CB_ASSETS_PATH")
|
||||
.unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
|
||||
let database_url = env::var("DATABASE_URL")
|
||||
.unwrap_or_else(|_| "cursebreaker.db".to_string());
|
||||
|
||||
let mut conn = SqliteConnection::establish(&database_url)?;
|
||||
|
||||
// Parse Items
|
||||
if args.should_parse_items() {
|
||||
info!("Parsing items...");
|
||||
let items_path = format!("{}/Data/XMLs/Items/Items.xml", cb_assets_path);
|
||||
match ItemDatabase::load_from_xml(&items_path) {
|
||||
Ok(item_db) => {
|
||||
info!("Loaded {} items", item_db.len());
|
||||
let icon_path = format!("{}/Data/Textures/ItemIcons", cb_assets_path);
|
||||
match item_db.save_to_db_with_images(&mut conn, &icon_path) {
|
||||
Ok((items_count, images_count)) => {
|
||||
info!("Saved {} items to database", items_count);
|
||||
info!("Processed {} item icons", images_count);
|
||||
}
|
||||
Err(e) => warn!("Failed to save items: {}", e),
|
||||
}
|
||||
}
|
||||
Err(e) => warn!("Failed to load items: {}", e),
|
||||
}
|
||||
}
|
||||
|
||||
// Parse NPCs
|
||||
if args.should_parse_npcs() {
|
||||
info!("Parsing NPCs...");
|
||||
let npcs_path = format!("{}/Data/XMLs/Npcs/NPCInfo.xml", cb_assets_path);
|
||||
match NpcDatabase::load_from_xml(&npcs_path) {
|
||||
Ok(npc_db) => {
|
||||
info!("Loaded {} NPCs", npc_db.len());
|
||||
match npc_db.save_to_db(&mut conn) {
|
||||
Ok(count) => info!("Saved {} NPCs to database", count),
|
||||
Err(e) => warn!("Failed to save NPCs: {}", e),
|
||||
}
|
||||
}
|
||||
Err(e) => warn!("Failed to load NPCs: {}", e),
|
||||
}
|
||||
}
|
||||
|
||||
// Parse Quests
|
||||
if args.should_parse_quests() {
|
||||
info!("Parsing quests...");
|
||||
let quests_path = format!("{}/Data/XMLs/Quests/Quests.xml", cb_assets_path);
|
||||
match QuestDatabase::load_from_xml(&quests_path) {
|
||||
Ok(quest_db) => {
|
||||
info!("Loaded {} quests", quest_db.len());
|
||||
match quest_db.save_to_db(&mut conn) {
|
||||
Ok(count) => info!("Saved {} quests to database", count),
|
||||
Err(e) => warn!("Failed to save quests: {}", e),
|
||||
}
|
||||
}
|
||||
Err(e) => warn!("Failed to load quests: {}", e),
|
||||
}
|
||||
}
|
||||
|
||||
// Parse Harvestables
|
||||
if args.should_parse_harvestables() {
|
||||
info!("Parsing harvestables...");
|
||||
let harvestables_path = format!("{}/Data/XMLs/Harvestables/HarvestableInfo.xml", cb_assets_path);
|
||||
match HarvestableDatabase::load_from_xml(&harvestables_path) {
|
||||
Ok(harvestable_db) => {
|
||||
info!("Loaded {} harvestables", harvestable_db.len());
|
||||
match harvestable_db.save_to_db(&mut conn) {
|
||||
Ok(count) => info!("Saved {} harvestables to database", count),
|
||||
Err(e) => warn!("Failed to save harvestables: {}", e),
|
||||
}
|
||||
}
|
||||
Err(e) => warn!("Failed to load harvestables: {}", e),
|
||||
}
|
||||
}
|
||||
|
||||
// Parse Loot
|
||||
if args.should_parse_loot() {
|
||||
info!("Parsing loot tables...");
|
||||
let loot_path = format!("{}/Data/XMLs/Loot/Loot.xml", cb_assets_path);
|
||||
match LootDatabase::load_from_xml(&loot_path) {
|
||||
Ok(loot_db) => {
|
||||
info!("Loaded {} loot tables", loot_db.len());
|
||||
match loot_db.save_to_db(&mut conn) {
|
||||
Ok(count) => info!("Saved {} loot tables to database", count),
|
||||
Err(e) => warn!("Failed to save loot tables: {}", e),
|
||||
}
|
||||
}
|
||||
Err(e) => warn!("Failed to load loot tables: {}", e),
|
||||
}
|
||||
}
|
||||
|
||||
// Parse Maps
|
||||
if args.should_parse_maps() {
|
||||
info!("Parsing maps...");
|
||||
let maps_path = format!("{}/Data/XMLs/Maps/Maps.xml", cb_assets_path);
|
||||
match MapDatabase::load_from_xml(&maps_path) {
|
||||
Ok(map_db) => {
|
||||
info!("Loaded {} maps", map_db.len());
|
||||
match map_db.save_to_db(&mut conn) {
|
||||
Ok(count) => info!("Saved {} maps to database", count),
|
||||
Err(e) => warn!("Failed to save maps: {}", e),
|
||||
}
|
||||
}
|
||||
Err(e) => warn!("Failed to load maps: {}", e),
|
||||
}
|
||||
}
|
||||
|
||||
// Parse Fast Travel
|
||||
if args.should_parse_fast_travel() {
|
||||
info!("Parsing fast travel locations...");
|
||||
let fast_travel_dir = format!("{}/Data/XMLs", cb_assets_path);
|
||||
match FastTravelDatabase::load_from_directory(&fast_travel_dir) {
|
||||
Ok(fast_travel_db) => {
|
||||
info!("Loaded {} fast travel locations", fast_travel_db.len());
|
||||
match fast_travel_db.save_to_db(&mut conn) {
|
||||
Ok(count) => info!("Saved {} fast travel locations to database", count),
|
||||
Err(e) => warn!("Failed to save fast travel locations: {}", e),
|
||||
}
|
||||
}
|
||||
Err(e) => warn!("Failed to load fast travel locations: {}", e),
|
||||
}
|
||||
}
|
||||
|
||||
// Parse Player Houses
|
||||
if args.should_parse_houses() {
|
||||
info!("Parsing player houses...");
|
||||
let player_houses_path = format!("{}/Data/XMLs/PlayerHouses/PlayerHouses.xml", cb_assets_path);
|
||||
match PlayerHouseDatabase::load_from_xml(&player_houses_path) {
|
||||
Ok(player_house_db) => {
|
||||
info!("Loaded {} player houses", player_house_db.len());
|
||||
match player_house_db.save_to_db(&mut conn) {
|
||||
Ok(count) => info!("Saved {} player houses to database", count),
|
||||
Err(e) => warn!("Failed to save player houses: {}", e),
|
||||
}
|
||||
}
|
||||
Err(e) => warn!("Failed to load player houses: {}", e),
|
||||
}
|
||||
}
|
||||
|
||||
// Parse Traits
|
||||
if args.should_parse_traits() {
|
||||
info!("Parsing traits...");
|
||||
let traits_path = format!("{}/Data/XMLs/Traits/Traits.xml", cb_assets_path);
|
||||
match TraitDatabase::load_from_xml(&traits_path) {
|
||||
Ok(trait_db) => {
|
||||
info!("Loaded {} traits", trait_db.len());
|
||||
match trait_db.save_to_db(&mut conn) {
|
||||
Ok(count) => info!("Saved {} traits to database", count),
|
||||
Err(e) => warn!("Failed to save traits: {}", e),
|
||||
}
|
||||
}
|
||||
Err(e) => warn!("Failed to load traits: {}", e),
|
||||
}
|
||||
}
|
||||
|
||||
// Parse Shops
|
||||
if args.should_parse_shops() {
|
||||
info!("Parsing shops...");
|
||||
let shops_path = format!("{}/Data/XMLs/Shops/Shops.xml", cb_assets_path);
|
||||
match ShopDatabase::load_from_xml(&shops_path) {
|
||||
Ok(shop_db) => {
|
||||
info!("Loaded {} shops", shop_db.len());
|
||||
match shop_db.save_to_db(&mut conn) {
|
||||
Ok(count) => info!("Saved {} shops to database", count),
|
||||
Err(e) => warn!("Failed to save shops: {}", e),
|
||||
}
|
||||
}
|
||||
Err(e) => warn!("Failed to load shops: {}", e),
|
||||
}
|
||||
}
|
||||
|
||||
info!("XML parsing complete!");
|
||||
log::logger().flush();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
24
cursebreaker-parser/src/databases/db_helper.rs
Normal file
24
cursebreaker-parser/src/databases/db_helper.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
/// Helper module for database persistence operations
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
|
||||
/// Establish a database connection
|
||||
pub fn establish_connection(database_url: &str) -> Result<SqliteConnection, diesel::ConnectionError> {
|
||||
SqliteConnection::establish(database_url)
|
||||
}
|
||||
|
||||
/// Generic record for simple id/name/data pattern
|
||||
#[derive(Queryable)]
|
||||
pub struct SimpleRecord {
|
||||
pub id: Option<i32>,
|
||||
pub name: String,
|
||||
pub data: String,
|
||||
}
|
||||
|
||||
/// Generic record for text-based primary keys
|
||||
#[derive(Queryable)]
|
||||
pub struct TextKeyRecord {
|
||||
pub key: Option<String>,
|
||||
pub secondary: Option<String>,
|
||||
pub data: String,
|
||||
}
|
||||
330
cursebreaker-parser/src/databases/fast_travel_database.rs
Normal file
330
cursebreaker-parser/src/databases/fast_travel_database.rs
Normal file
@@ -0,0 +1,330 @@
|
||||
use crate::types::{FastTravelLocation, FastTravelType};
|
||||
use crate::xml_parsers::{
|
||||
parse_fast_travel_canoe_xml, parse_fast_travel_locations_xml, parse_fast_travel_portals_xml,
|
||||
XmlParseError,
|
||||
};
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
/// A database for managing Fast Travel Locations loaded from XML files
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FastTravelDatabase {
|
||||
locations: Vec<FastTravelLocation>,
|
||||
// Map ID -> location index
|
||||
locations_by_id: HashMap<i32, usize>,
|
||||
// Map name -> list of location indices
|
||||
locations_by_name: HashMap<String, Vec<usize>>,
|
||||
// Map type -> list of location indices
|
||||
locations_by_type: HashMap<FastTravelType, Vec<usize>>,
|
||||
}
|
||||
|
||||
impl FastTravelDatabase {
|
||||
/// Create a new empty FastTravelDatabase
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
locations: Vec::new(),
|
||||
locations_by_id: HashMap::new(),
|
||||
locations_by_name: HashMap::new(),
|
||||
locations_by_type: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Load all fast travel types from their respective XML files in a directory
|
||||
/// Expects the directory structure:
|
||||
/// - dir/FastTravelLocations/FastTravelLocations.xml
|
||||
/// - dir/FastTravelCanoe/FastTravelCanoe.xml
|
||||
/// - dir/FastTravelPortals/FastTravelPortals.xml
|
||||
pub fn load_from_directory<P: AsRef<Path>>(dir: P) -> Result<Self, XmlParseError> {
|
||||
let dir = dir.as_ref();
|
||||
let mut db = Self::new();
|
||||
|
||||
// Load regular locations
|
||||
let locations_path = dir.join("FastTravelLocations/FastTravelLocations.xml");
|
||||
if locations_path.exists() {
|
||||
let locations = parse_fast_travel_locations_xml(&locations_path)?;
|
||||
db.add_locations(locations);
|
||||
}
|
||||
|
||||
// Load canoe locations
|
||||
let canoe_path = dir.join("FastTravelCanoe/FastTravelCanoe.xml");
|
||||
if canoe_path.exists() {
|
||||
let canoe_locations = parse_fast_travel_canoe_xml(&canoe_path)?;
|
||||
db.add_locations(canoe_locations);
|
||||
}
|
||||
|
||||
// Load portal locations
|
||||
let portals_path = dir.join("FastTravelPortals/FastTravelPortals.xml");
|
||||
if portals_path.exists() {
|
||||
let portals = parse_fast_travel_portals_xml(&portals_path)?;
|
||||
db.add_locations(portals);
|
||||
}
|
||||
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
/// Load only regular fast travel locations from XML
|
||||
pub fn load_locations_from_xml<P: AsRef<Path>>(path: P) -> Result<Self, XmlParseError> {
|
||||
let locations = parse_fast_travel_locations_xml(path)?;
|
||||
let mut db = Self::new();
|
||||
db.add_locations(locations);
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
/// Load only canoe fast travel locations from XML
|
||||
pub fn load_canoe_from_xml<P: AsRef<Path>>(path: P) -> Result<Self, XmlParseError> {
|
||||
let locations = parse_fast_travel_canoe_xml(path)?;
|
||||
let mut db = Self::new();
|
||||
db.add_locations(locations);
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
/// Load only portal fast travel locations from XML
|
||||
pub fn load_portals_from_xml<P: AsRef<Path>>(path: P) -> Result<Self, XmlParseError> {
|
||||
let locations = parse_fast_travel_portals_xml(path)?;
|
||||
let mut db = Self::new();
|
||||
db.add_locations(locations);
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
/// Add fast travel locations to the database
|
||||
pub fn add_locations(&mut self, locations: Vec<FastTravelLocation>) {
|
||||
for location in locations {
|
||||
let index = self.locations.len();
|
||||
|
||||
// Index by ID
|
||||
self.locations_by_id.insert(location.id, index);
|
||||
|
||||
// Index by name
|
||||
self.locations_by_name
|
||||
.entry(location.name.clone())
|
||||
.or_insert_with(Vec::new)
|
||||
.push(index);
|
||||
|
||||
// Index by type
|
||||
self.locations_by_type
|
||||
.entry(location.travel_type)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(index);
|
||||
|
||||
self.locations.push(location);
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a fast travel location by ID
|
||||
pub fn get_by_id(&self, id: i32) -> Option<&FastTravelLocation> {
|
||||
self.locations_by_id
|
||||
.get(&id)
|
||||
.and_then(|&index| self.locations.get(index))
|
||||
}
|
||||
|
||||
/// Get fast travel locations by name (returns all locations with matching name)
|
||||
pub fn get_by_name(&self, name: &str) -> Vec<&FastTravelLocation> {
|
||||
self.locations_by_name
|
||||
.get(name)
|
||||
.map(|indices| {
|
||||
indices
|
||||
.iter()
|
||||
.filter_map(|&index| self.locations.get(index))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Get all locations
|
||||
pub fn all_locations(&self) -> &[FastTravelLocation] {
|
||||
&self.locations
|
||||
}
|
||||
|
||||
/// Get all locations of a specific type
|
||||
pub fn get_by_type(&self, travel_type: FastTravelType) -> Vec<&FastTravelLocation> {
|
||||
self.locations_by_type
|
||||
.get(&travel_type)
|
||||
.map(|indices| {
|
||||
indices
|
||||
.iter()
|
||||
.filter_map(|&index| self.locations.get(index))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Get all regular fast travel locations
|
||||
pub fn get_locations(&self) -> Vec<&FastTravelLocation> {
|
||||
self.get_by_type(FastTravelType::Location)
|
||||
}
|
||||
|
||||
/// Get all canoe fast travel locations
|
||||
pub fn get_canoe_locations(&self) -> Vec<&FastTravelLocation> {
|
||||
self.get_by_type(FastTravelType::Canoe)
|
||||
}
|
||||
|
||||
/// Get all portal fast travel locations
|
||||
pub fn get_portals(&self) -> Vec<&FastTravelLocation> {
|
||||
self.get_by_type(FastTravelType::Portal)
|
||||
}
|
||||
|
||||
/// Get all unlocked locations (regular locations only)
|
||||
pub fn get_unlocked_locations(&self) -> Vec<&FastTravelLocation> {
|
||||
self.locations
|
||||
.iter()
|
||||
.filter(|loc| loc.unlocked)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all locations with requirements
|
||||
pub fn get_locations_with_requirements(&self) -> Vec<&FastTravelLocation> {
|
||||
self.locations
|
||||
.iter()
|
||||
.filter(|loc| loc.has_requirements())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all locations that have connections to other locations
|
||||
pub fn get_connected_locations(&self) -> Vec<&FastTravelLocation> {
|
||||
self.locations
|
||||
.iter()
|
||||
.filter(|loc| loc.has_connections())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get locations that are connected to a specific location ID
|
||||
pub fn get_locations_connected_to(&self, id: i32) -> Vec<&FastTravelLocation> {
|
||||
self.locations
|
||||
.iter()
|
||||
.filter(|loc| loc.get_connections().contains(&id))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get locations that require a specific quest
|
||||
pub fn get_locations_requiring_quest(&self, quest_id: &str) -> Vec<&FastTravelLocation> {
|
||||
self.locations
|
||||
.iter()
|
||||
.filter(|loc| loc.requires_quest(quest_id))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get locations that require a specific trait
|
||||
pub fn get_locations_requiring_trait(&self, trait_id: i32) -> Vec<&FastTravelLocation> {
|
||||
self.locations
|
||||
.iter()
|
||||
.filter(|loc| loc.requires_trait(trait_id))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all unique location names
|
||||
pub fn get_all_names(&self) -> Vec<String> {
|
||||
self.locations_by_name.keys().cloned().collect()
|
||||
}
|
||||
|
||||
/// Get count by type
|
||||
pub fn count_by_type(&self, travel_type: FastTravelType) -> usize {
|
||||
self.locations_by_type
|
||||
.get(&travel_type)
|
||||
.map(|v| v.len())
|
||||
.unwrap_or(0)
|
||||
}
|
||||
|
||||
/// Get number of locations in database
|
||||
pub fn len(&self) -> usize {
|
||||
self.locations.len()
|
||||
}
|
||||
|
||||
/// Check if database is empty
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.locations.is_empty()
|
||||
}
|
||||
|
||||
/// Save all fast travel locations to SQLite database
|
||||
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
|
||||
use crate::schema::fast_travel_locations;
|
||||
|
||||
// Clear existing entries
|
||||
diesel::delete(fast_travel_locations::table).execute(conn)?;
|
||||
|
||||
let mut count = 0;
|
||||
for location in &self.locations {
|
||||
let record = (
|
||||
fast_travel_locations::name.eq(&location.name),
|
||||
fast_travel_locations::pos_x.eq(location.pos_x),
|
||||
fast_travel_locations::pos_z.eq(location.pos_z),
|
||||
fast_travel_locations::travel_type.eq(location.travel_type.to_string()),
|
||||
fast_travel_locations::unlocked.eq(if location.unlocked { 1 } else { 0 }),
|
||||
fast_travel_locations::connections.eq(&location.connections),
|
||||
fast_travel_locations::checks.eq(&location.checks),
|
||||
);
|
||||
|
||||
diesel::insert_into(fast_travel_locations::table)
|
||||
.values(&record)
|
||||
.execute(conn)?;
|
||||
count += 1;
|
||||
}
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Load all fast travel locations from SQLite database
|
||||
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
|
||||
use crate::schema::fast_travel_locations::dsl::*;
|
||||
|
||||
#[derive(Queryable)]
|
||||
#[allow(dead_code)]
|
||||
struct FastTravelLocationRecord {
|
||||
name: Option<String>,
|
||||
pos_x: f32,
|
||||
pos_z: f32,
|
||||
travel_type: String,
|
||||
unlocked: i32,
|
||||
connections: Option<String>,
|
||||
checks: Option<String>,
|
||||
}
|
||||
|
||||
let records = fast_travel_locations.load::<FastTravelLocationRecord>(conn)?;
|
||||
|
||||
let mut loaded_locations = Vec::new();
|
||||
for record in records {
|
||||
let travel_type_enum = match record.travel_type.as_str() {
|
||||
"Location" => FastTravelType::Location,
|
||||
"Canoe" => FastTravelType::Canoe,
|
||||
"Portal" => FastTravelType::Portal,
|
||||
_ => FastTravelType::Location, // Default fallback
|
||||
};
|
||||
|
||||
let mut location = FastTravelLocation::new(
|
||||
0, // id not stored in DB
|
||||
record.name.unwrap_or_default(),
|
||||
record.pos_x,
|
||||
record.pos_z,
|
||||
travel_type_enum,
|
||||
);
|
||||
location.unlocked = record.unlocked != 0;
|
||||
location.connections = record.connections;
|
||||
location.checks = record.checks;
|
||||
|
||||
loaded_locations.push(location);
|
||||
}
|
||||
|
||||
let mut db = Self::new();
|
||||
db.add_locations(loaded_locations);
|
||||
Ok(db)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for FastTravelDatabase {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_fast_travel_database_basic() {
|
||||
let mut db = FastTravelDatabase::new();
|
||||
assert!(db.is_empty());
|
||||
assert_eq!(db.len(), 0);
|
||||
}
|
||||
}
|
||||
354
cursebreaker-parser/src/databases/harvestable_database.rs
Normal file
354
cursebreaker-parser/src/databases/harvestable_database.rs
Normal file
@@ -0,0 +1,354 @@
|
||||
use crate::types::Harvestable;
|
||||
use crate::xml_parsers::{parse_harvestables_xml, XmlParseError};
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
/// A database for managing Harvestables loaded from XML files
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct HarvestableDatabase {
|
||||
harvestables: Vec<Harvestable>,
|
||||
harvestables_by_typeid: HashMap<i32, usize>,
|
||||
harvestables_by_name: HashMap<String, usize>,
|
||||
}
|
||||
|
||||
impl HarvestableDatabase {
|
||||
/// Create a new empty HarvestableDatabase
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
harvestables: Vec::new(),
|
||||
harvestables_by_typeid: HashMap::new(),
|
||||
harvestables_by_name: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Load harvestables from an XML file
|
||||
pub fn load_from_xml<P: AsRef<Path>>(path: P) -> Result<Self, XmlParseError> {
|
||||
let harvestables = parse_harvestables_xml(path)?;
|
||||
let mut db = Self::new();
|
||||
db.add_harvestables(harvestables);
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
/// Add harvestables to the database
|
||||
pub fn add_harvestables(&mut self, harvestables: Vec<Harvestable>) {
|
||||
for harvestable in harvestables {
|
||||
let index = self.harvestables.len();
|
||||
self.harvestables_by_typeid.insert(harvestable.typeid, index);
|
||||
self.harvestables_by_name.insert(harvestable.name.clone(), index);
|
||||
self.harvestables.push(harvestable);
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a harvestable by type ID
|
||||
pub fn get_by_typeid(&self, typeid: i32) -> Option<&Harvestable> {
|
||||
self.harvestables_by_typeid
|
||||
.get(&typeid)
|
||||
.and_then(|&index| self.harvestables.get(index))
|
||||
}
|
||||
|
||||
/// Get a harvestable by name
|
||||
pub fn get_by_name(&self, name: &str) -> Option<&Harvestable> {
|
||||
self.harvestables_by_name
|
||||
.get(name)
|
||||
.and_then(|&index| self.harvestables.get(index))
|
||||
}
|
||||
|
||||
/// Get all harvestables
|
||||
pub fn all_harvestables(&self) -> &[Harvestable] {
|
||||
&self.harvestables
|
||||
}
|
||||
|
||||
/// Get harvestables by skill
|
||||
pub fn get_by_skill(&self, skill: &str) -> Vec<&Harvestable> {
|
||||
use crate::types::SkillType;
|
||||
let skill_type = skill.parse::<SkillType>().unwrap_or(SkillType::None);
|
||||
self.harvestables
|
||||
.iter()
|
||||
.filter(|h| h.skill == skill_type)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get harvestables that require a specific tool
|
||||
pub fn get_by_tool(&self, tool: &str) -> Vec<&Harvestable> {
|
||||
use crate::types::Tool;
|
||||
let tool_type = tool.parse::<Tool>().unwrap_or(Tool::None);
|
||||
self.harvestables
|
||||
.iter()
|
||||
.filter(|h| h.tool == tool_type)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all trees (harvestables with tree=1)
|
||||
pub fn get_trees(&self) -> Vec<&Harvestable> {
|
||||
self.harvestables
|
||||
.iter()
|
||||
.filter(|h| h.is_tree())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get harvestables that require tools
|
||||
pub fn get_requiring_tools(&self) -> Vec<&Harvestable> {
|
||||
self.harvestables
|
||||
.iter()
|
||||
.filter(|h| h.requires_tool())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get harvestables by level range
|
||||
pub fn get_by_level_range(&self, min_level: i32, max_level: i32) -> Vec<&Harvestable> {
|
||||
self.harvestables
|
||||
.iter()
|
||||
.filter(|h| h.level >= min_level && h.level <= max_level)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get number of harvestables in database
|
||||
pub fn len(&self) -> usize {
|
||||
self.harvestables.len()
|
||||
}
|
||||
|
||||
/// Check if database is empty
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.harvestables.is_empty()
|
||||
}
|
||||
|
||||
/// Prepare harvestables for SQL insertion (deprecated - use save_to_db instead)
|
||||
#[deprecated(note = "Use save_to_db() to save directly to SQLite database")]
|
||||
#[allow(deprecated)]
|
||||
pub fn prepare_for_sql(&self) -> Vec<(i32, String, String, String, i32, String, String, i32, i32, i32, i32, i32)> {
|
||||
use crate::types::{SkillType, Tool};
|
||||
|
||||
self.harvestables
|
||||
.iter()
|
||||
.map(|harvestable| {
|
||||
let skill_str = match harvestable.skill {
|
||||
SkillType::None => "none",
|
||||
SkillType::Swordsmanship => "swordsmanship",
|
||||
SkillType::Archery => "archery",
|
||||
SkillType::Magic => "magic",
|
||||
SkillType::Defence => "defence",
|
||||
SkillType::Mining => "mining",
|
||||
SkillType::Woodcutting => "woodcutting",
|
||||
SkillType::Fishing => "fishing",
|
||||
SkillType::Cooking => "cooking",
|
||||
SkillType::Carpentry => "carpentry",
|
||||
SkillType::Blacksmithy => "blacksmithy",
|
||||
SkillType::Tailoring => "tailoring",
|
||||
SkillType::Alchemy => "alchemy",
|
||||
}.to_string();
|
||||
|
||||
let tool_str = match harvestable.tool {
|
||||
Tool::None => "none",
|
||||
Tool::Pickaxe => "pickaxe",
|
||||
Tool::Hatchet => "hatchet",
|
||||
Tool::Scythe => "scythe",
|
||||
Tool::Hammer => "hammer",
|
||||
Tool::Shears => "shears",
|
||||
Tool::FishingRod => "fishingrod",
|
||||
}.to_string();
|
||||
|
||||
(
|
||||
harvestable.typeid,
|
||||
harvestable.name.clone(),
|
||||
harvestable.desc.clone(),
|
||||
harvestable.comment.clone(),
|
||||
harvestable.level,
|
||||
skill_str,
|
||||
tool_str,
|
||||
harvestable.min_health,
|
||||
harvestable.max_health,
|
||||
harvestable.harvesttime,
|
||||
harvestable.hittime,
|
||||
harvestable.respawntime,
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Save all harvestables to SQLite database
|
||||
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
|
||||
use crate::schema::{harvestables, harvestable_drops};
|
||||
use crate::types::{SkillType, Tool};
|
||||
|
||||
// Clear existing data
|
||||
diesel::delete(harvestable_drops::table).execute(conn)?;
|
||||
diesel::delete(harvestables::table).execute(conn)?;
|
||||
|
||||
let mut count = 0;
|
||||
for harvestable in &self.harvestables {
|
||||
// Convert enums to strings for database storage
|
||||
let skill_str = match harvestable.skill {
|
||||
SkillType::None => "none",
|
||||
SkillType::Swordsmanship => "swordsmanship",
|
||||
SkillType::Archery => "archery",
|
||||
SkillType::Magic => "magic",
|
||||
SkillType::Defence => "defence",
|
||||
SkillType::Mining => "mining",
|
||||
SkillType::Woodcutting => "woodcutting",
|
||||
SkillType::Fishing => "fishing",
|
||||
SkillType::Cooking => "cooking",
|
||||
SkillType::Carpentry => "carpentry",
|
||||
SkillType::Blacksmithy => "blacksmithy",
|
||||
SkillType::Tailoring => "tailoring",
|
||||
SkillType::Alchemy => "alchemy",
|
||||
};
|
||||
|
||||
let tool_str = match harvestable.tool {
|
||||
Tool::None => "none",
|
||||
Tool::Pickaxe => "pickaxe",
|
||||
Tool::Hatchet => "hatchet",
|
||||
Tool::Scythe => "scythe",
|
||||
Tool::Hammer => "hammer",
|
||||
Tool::Shears => "shears",
|
||||
Tool::FishingRod => "fishingrod",
|
||||
};
|
||||
|
||||
// Insert harvestable
|
||||
diesel::insert_into(harvestables::table)
|
||||
.values((
|
||||
harvestables::id.eq(harvestable.typeid),
|
||||
harvestables::name.eq(&harvestable.name),
|
||||
harvestables::description.eq(&harvestable.desc),
|
||||
harvestables::comment.eq(&harvestable.comment),
|
||||
harvestables::level.eq(harvestable.level),
|
||||
harvestables::skill.eq(skill_str),
|
||||
harvestables::tool.eq(tool_str),
|
||||
harvestables::min_health.eq(harvestable.min_health),
|
||||
harvestables::max_health.eq(harvestable.max_health),
|
||||
harvestables::harvesttime.eq(harvestable.harvesttime),
|
||||
harvestables::hittime.eq(harvestable.hittime),
|
||||
harvestables::respawntime.eq(harvestable.respawntime),
|
||||
))
|
||||
.execute(conn)?;
|
||||
|
||||
// Insert drops
|
||||
for drop in &harvestable.drops {
|
||||
// Try to insert, but skip if foreign key constraint fails (item doesn't exist)
|
||||
let insert_result = diesel::insert_into(harvestable_drops::table)
|
||||
.values((
|
||||
harvestable_drops::harvestable_id.eq(harvestable.typeid),
|
||||
harvestable_drops::item_id.eq(drop.id),
|
||||
harvestable_drops::minamount.eq(drop.minamount),
|
||||
harvestable_drops::maxamount.eq(drop.maxamount),
|
||||
harvestable_drops::droprate.eq(drop.droprate),
|
||||
harvestable_drops::droprateboost.eq(drop.droprateboost),
|
||||
harvestable_drops::amountboost.eq(drop.amountboost),
|
||||
harvestable_drops::comment.eq(&drop.comment),
|
||||
))
|
||||
.execute(conn);
|
||||
|
||||
// Log warning if insert failed but continue
|
||||
if let Err(e) = insert_result {
|
||||
eprintln!("Warning: Failed to insert drop for harvestable {} (item {}): {}",
|
||||
harvestable.typeid, drop.id, e);
|
||||
}
|
||||
}
|
||||
|
||||
count += 1;
|
||||
}
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Load all harvestables from SQLite database
|
||||
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
|
||||
use crate::schema::{harvestables, harvestable_drops};
|
||||
use crate::types::{Harvestable, HarvestableDrop, SkillType, Tool};
|
||||
use diesel::prelude::*;
|
||||
|
||||
#[derive(Queryable)]
|
||||
struct HarvestableRecord {
|
||||
id: i32,
|
||||
name: String,
|
||||
description: String,
|
||||
comment: String,
|
||||
level: i32,
|
||||
skill: String,
|
||||
tool: String,
|
||||
min_health: i32,
|
||||
max_health: i32,
|
||||
harvesttime: i32,
|
||||
hittime: i32,
|
||||
respawntime: i32,
|
||||
}
|
||||
|
||||
#[derive(Queryable)]
|
||||
struct HarvestableDropRecord {
|
||||
id: Option<i32>,
|
||||
harvestable_id: i32,
|
||||
item_id: i32,
|
||||
minamount: i32,
|
||||
maxamount: i32,
|
||||
droprate: i32,
|
||||
droprateboost: i32,
|
||||
amountboost: i32,
|
||||
comment: String,
|
||||
}
|
||||
|
||||
let harv_records = harvestables::table.load::<HarvestableRecord>(conn)?;
|
||||
let drop_records = harvestable_drops::table.load::<HarvestableDropRecord>(conn)?;
|
||||
|
||||
let mut loaded_harvestables = Vec::new();
|
||||
for record in harv_records {
|
||||
let mut harvestable = Harvestable {
|
||||
typeid: record.id,
|
||||
name: record.name,
|
||||
actionname: String::new(),
|
||||
desc: record.description,
|
||||
comment: record.comment,
|
||||
level: record.level,
|
||||
skill: record.skill.parse().unwrap_or(SkillType::None),
|
||||
tool: record.tool.parse().unwrap_or(Tool::None),
|
||||
min_health: record.min_health,
|
||||
max_health: record.max_health,
|
||||
harvesttime: record.harvesttime,
|
||||
hittime: record.hittime,
|
||||
respawntime: record.respawntime,
|
||||
harvestsfx: String::new(),
|
||||
endsfx: String::new(),
|
||||
receiveitemsfx: String::new(),
|
||||
animation: String::new(),
|
||||
takehitanimation: String::new(),
|
||||
endgfx: String::new(),
|
||||
tree: false,
|
||||
hidemilestone: false,
|
||||
nohighlight: false,
|
||||
hideminimap: false,
|
||||
noleftclickinteract: false,
|
||||
interactdistance: String::new(),
|
||||
drops: Vec::new(),
|
||||
};
|
||||
|
||||
// Add drops for this harvestable
|
||||
for drop_rec in &drop_records {
|
||||
if drop_rec.harvestable_id == record.id {
|
||||
harvestable.drops.push(HarvestableDrop {
|
||||
id: drop_rec.item_id,
|
||||
minamount: drop_rec.minamount,
|
||||
maxamount: drop_rec.maxamount,
|
||||
droprate: drop_rec.droprate,
|
||||
droprateboost: drop_rec.droprateboost,
|
||||
amountboost: drop_rec.amountboost,
|
||||
checks: String::new(),
|
||||
comment: drop_rec.comment.clone(),
|
||||
dontconsumehealth: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
loaded_harvestables.push(harvestable);
|
||||
}
|
||||
|
||||
let mut db = Self::new();
|
||||
db.add_harvestables(loaded_harvestables);
|
||||
Ok(db)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for HarvestableDatabase {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
655
cursebreaker-parser/src/databases/icon_database.rs
Normal file
655
cursebreaker-parser/src/databases/icon_database.rs
Normal file
@@ -0,0 +1,655 @@
|
||||
use crate::types::{
|
||||
NewAbilityIcon, NewBuffIcon, NewTraitIcon, NewPlayerHouseIcon, NewStatIcon,
|
||||
NewAchievementIcon, NewGeneralIcon
|
||||
};
|
||||
use crate::image_processor::ImageProcessor;
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::fs;
|
||||
use thiserror::Error;
|
||||
use log::{info, warn};
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum IconDatabaseError {
|
||||
#[error("Database error: {0}")]
|
||||
DatabaseError(#[from] diesel::result::Error),
|
||||
|
||||
#[error("Image load error: {0}")]
|
||||
ImageLoadError(#[from] image::ImageError),
|
||||
|
||||
#[error("IO error: {0}")]
|
||||
IoError(#[from] std::io::Error),
|
||||
|
||||
#[error("Connection pool error: {0}")]
|
||||
ConnectionError(String),
|
||||
}
|
||||
|
||||
/// Statistics for icon loading
|
||||
#[derive(Debug, Default)]
|
||||
pub struct IconStats {
|
||||
pub abilities: usize,
|
||||
pub buffs: usize,
|
||||
pub traits: usize,
|
||||
pub player_houses: usize,
|
||||
pub stat_icons: usize,
|
||||
pub achievement_icons: usize,
|
||||
pub general_icons: usize,
|
||||
pub total_bytes: usize,
|
||||
}
|
||||
|
||||
impl IconStats {
|
||||
pub fn total_icons(&self) -> usize {
|
||||
self.abilities + self.buffs + self.traits + self.player_houses
|
||||
+ self.stat_icons + self.achievement_icons + self.general_icons
|
||||
}
|
||||
}
|
||||
|
||||
/// Database for managing game icons
|
||||
pub struct IconDatabase {
|
||||
database_url: String,
|
||||
}
|
||||
|
||||
impl IconDatabase {
|
||||
/// Create new database connection
|
||||
pub fn new(database_url: String) -> Self {
|
||||
Self { database_url }
|
||||
}
|
||||
|
||||
/// Establish database connection
|
||||
fn establish_connection(&self) -> Result<SqliteConnection, IconDatabaseError> {
|
||||
SqliteConnection::establish(&self.database_url)
|
||||
.map_err(|e| IconDatabaseError::ConnectionError(e.to_string()))
|
||||
}
|
||||
|
||||
/// Load all icons from the CBAssets directory
|
||||
pub fn load_all_icons<P: AsRef<Path>>(
|
||||
&self,
|
||||
cb_assets_path: P,
|
||||
) -> Result<IconStats, IconDatabaseError> {
|
||||
let base = cb_assets_path.as_ref();
|
||||
let textures = base.join("Data/Textures");
|
||||
let mut stats = IconStats::default();
|
||||
|
||||
info!("Loading ability icons...");
|
||||
stats.abilities = self.load_ability_icons(
|
||||
&textures.join("Abilities"),
|
||||
&mut stats.total_bytes,
|
||||
)?;
|
||||
|
||||
info!("Loading buff icons...");
|
||||
stats.buffs = self.load_buff_icons(
|
||||
&textures.join("Buffs"),
|
||||
&mut stats.total_bytes,
|
||||
)?;
|
||||
|
||||
info!("Loading trait icons...");
|
||||
stats.traits = self.load_trait_icons(
|
||||
&textures.join("Traits"),
|
||||
&mut stats.total_bytes,
|
||||
)?;
|
||||
|
||||
info!("Loading player house icons...");
|
||||
stats.player_houses = self.load_player_house_icons(
|
||||
&textures.join("PlayerHouses/Houses"),
|
||||
&mut stats.total_bytes,
|
||||
)?;
|
||||
|
||||
info!("Loading stat icons...");
|
||||
stats.stat_icons = self.load_stat_icons(
|
||||
&textures.join("StatIcons"),
|
||||
&mut stats.total_bytes,
|
||||
)?;
|
||||
|
||||
info!("Loading achievement icons...");
|
||||
stats.achievement_icons = self.load_achievement_icons(
|
||||
&textures.join("Achievements/Icons"),
|
||||
&mut stats.total_bytes,
|
||||
)?;
|
||||
|
||||
info!("Loading general icons...");
|
||||
stats.general_icons = self.load_general_icons(&textures, &mut stats.total_bytes)?;
|
||||
|
||||
Ok(stats)
|
||||
}
|
||||
|
||||
/// Load ability icons from a directory
|
||||
fn load_ability_icons<P: AsRef<Path>>(
|
||||
&self,
|
||||
dir: P,
|
||||
total_bytes: &mut usize,
|
||||
) -> Result<usize, IconDatabaseError> {
|
||||
use crate::schema::ability_icons;
|
||||
|
||||
let dir_path = dir.as_ref();
|
||||
if !dir_path.exists() {
|
||||
warn!("Directory does not exist: {}", dir_path.display());
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let mut conn = self.establish_connection()?;
|
||||
let mut count = 0;
|
||||
|
||||
let image_files = self.find_image_files(dir_path)?;
|
||||
|
||||
for path in image_files {
|
||||
let name = path.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
if name.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let img = image::open(&path)?;
|
||||
let rgba = img.to_rgba8();
|
||||
let webp_data = ImageProcessor::encode_webp_lossless(&rgba)
|
||||
.map_err(|e| IconDatabaseError::IoError(std::io::Error::other(e.to_string())))?;
|
||||
|
||||
*total_bytes += webp_data.len();
|
||||
|
||||
let new_icon = NewAbilityIcon {
|
||||
name: &name,
|
||||
icon: &webp_data,
|
||||
};
|
||||
|
||||
diesel::replace_into(ability_icons::table)
|
||||
.values(&new_icon)
|
||||
.execute(&mut conn)?;
|
||||
|
||||
count += 1;
|
||||
}
|
||||
|
||||
info!(" Loaded {} ability icons", count);
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Load buff icons from a directory
|
||||
fn load_buff_icons<P: AsRef<Path>>(
|
||||
&self,
|
||||
dir: P,
|
||||
total_bytes: &mut usize,
|
||||
) -> Result<usize, IconDatabaseError> {
|
||||
use crate::schema::buff_icons;
|
||||
|
||||
let dir_path = dir.as_ref();
|
||||
if !dir_path.exists() {
|
||||
warn!("Directory does not exist: {}", dir_path.display());
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let mut conn = self.establish_connection()?;
|
||||
let mut count = 0;
|
||||
|
||||
let image_files = self.find_image_files(dir_path)?;
|
||||
|
||||
for path in image_files {
|
||||
let name = path.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
if name.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let img = image::open(&path)?;
|
||||
let rgba = img.to_rgba8();
|
||||
let webp_data = ImageProcessor::encode_webp_lossless(&rgba)
|
||||
.map_err(|e| IconDatabaseError::IoError(std::io::Error::other(e.to_string())))?;
|
||||
|
||||
*total_bytes += webp_data.len();
|
||||
|
||||
let new_icon = NewBuffIcon {
|
||||
name: &name,
|
||||
icon: &webp_data,
|
||||
};
|
||||
|
||||
diesel::replace_into(buff_icons::table)
|
||||
.values(&new_icon)
|
||||
.execute(&mut conn)?;
|
||||
|
||||
count += 1;
|
||||
}
|
||||
|
||||
info!(" Loaded {} buff icons", count);
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Load trait icons from a directory
|
||||
fn load_trait_icons<P: AsRef<Path>>(
|
||||
&self,
|
||||
dir: P,
|
||||
total_bytes: &mut usize,
|
||||
) -> Result<usize, IconDatabaseError> {
|
||||
use crate::schema::trait_icons;
|
||||
|
||||
let dir_path = dir.as_ref();
|
||||
if !dir_path.exists() {
|
||||
warn!("Directory does not exist: {}", dir_path.display());
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let mut conn = self.establish_connection()?;
|
||||
let mut count = 0;
|
||||
|
||||
let image_files = self.find_image_files(dir_path)?;
|
||||
|
||||
for path in image_files {
|
||||
let name = path.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
if name.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let img = image::open(&path)?;
|
||||
let rgba = img.to_rgba8();
|
||||
let webp_data = ImageProcessor::encode_webp_lossless(&rgba)
|
||||
.map_err(|e| IconDatabaseError::IoError(std::io::Error::other(e.to_string())))?;
|
||||
|
||||
*total_bytes += webp_data.len();
|
||||
|
||||
let new_icon = NewTraitIcon {
|
||||
name: &name,
|
||||
icon: &webp_data,
|
||||
};
|
||||
|
||||
diesel::replace_into(trait_icons::table)
|
||||
.values(&new_icon)
|
||||
.execute(&mut conn)?;
|
||||
|
||||
count += 1;
|
||||
}
|
||||
|
||||
info!(" Loaded {} trait icons", count);
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Load player house icons from a directory
|
||||
fn load_player_house_icons<P: AsRef<Path>>(
|
||||
&self,
|
||||
dir: P,
|
||||
total_bytes: &mut usize,
|
||||
) -> Result<usize, IconDatabaseError> {
|
||||
use crate::schema::player_house_icons;
|
||||
|
||||
let dir_path = dir.as_ref();
|
||||
if !dir_path.exists() {
|
||||
warn!("Directory does not exist: {}", dir_path.display());
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let mut conn = self.establish_connection()?;
|
||||
let mut count = 0;
|
||||
|
||||
let image_files = self.find_image_files(dir_path)?;
|
||||
|
||||
for path in image_files {
|
||||
let name = path.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
if name.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let img = image::open(&path)?;
|
||||
let rgba = img.to_rgba8();
|
||||
let webp_data = ImageProcessor::encode_webp_lossless(&rgba)
|
||||
.map_err(|e| IconDatabaseError::IoError(std::io::Error::other(e.to_string())))?;
|
||||
|
||||
*total_bytes += webp_data.len();
|
||||
|
||||
let new_icon = NewPlayerHouseIcon {
|
||||
name: &name,
|
||||
icon: &webp_data,
|
||||
};
|
||||
|
||||
diesel::replace_into(player_house_icons::table)
|
||||
.values(&new_icon)
|
||||
.execute(&mut conn)?;
|
||||
|
||||
count += 1;
|
||||
}
|
||||
|
||||
info!(" Loaded {} player house icons", count);
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Load stat icons from a directory
|
||||
fn load_stat_icons<P: AsRef<Path>>(
|
||||
&self,
|
||||
dir: P,
|
||||
total_bytes: &mut usize,
|
||||
) -> Result<usize, IconDatabaseError> {
|
||||
use crate::schema::stat_icons;
|
||||
|
||||
let dir_path = dir.as_ref();
|
||||
if !dir_path.exists() {
|
||||
warn!("Directory does not exist: {}", dir_path.display());
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let mut conn = self.establish_connection()?;
|
||||
let mut count = 0;
|
||||
|
||||
let image_files = self.find_image_files(dir_path)?;
|
||||
|
||||
for path in image_files {
|
||||
let name = path.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
if name.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let img = image::open(&path)?;
|
||||
let rgba = img.to_rgba8();
|
||||
let webp_data = ImageProcessor::encode_webp_lossless(&rgba)
|
||||
.map_err(|e| IconDatabaseError::IoError(std::io::Error::other(e.to_string())))?;
|
||||
|
||||
*total_bytes += webp_data.len();
|
||||
|
||||
let new_icon = NewStatIcon {
|
||||
name: &name,
|
||||
icon: &webp_data,
|
||||
};
|
||||
|
||||
diesel::replace_into(stat_icons::table)
|
||||
.values(&new_icon)
|
||||
.execute(&mut conn)?;
|
||||
|
||||
count += 1;
|
||||
}
|
||||
|
||||
info!(" Loaded {} stat icons", count);
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Load achievement icons, filtering out files ending with _0
|
||||
fn load_achievement_icons<P: AsRef<Path>>(
|
||||
&self,
|
||||
dir: P,
|
||||
total_bytes: &mut usize,
|
||||
) -> Result<usize, IconDatabaseError> {
|
||||
use crate::schema::achievement_icons;
|
||||
|
||||
let dir_path = dir.as_ref();
|
||||
if !dir_path.exists() {
|
||||
warn!("Directory does not exist: {}", dir_path.display());
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let mut conn = self.establish_connection()?;
|
||||
let mut count = 0;
|
||||
|
||||
let image_files = self.find_image_files(dir_path)?;
|
||||
|
||||
for path in image_files {
|
||||
let name = path.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
if name.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip files ending with _0
|
||||
if name.ends_with("_0") {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Load and encode as lossless WebP
|
||||
let img = image::open(&path)?;
|
||||
let rgba = img.to_rgba8();
|
||||
let webp_data = ImageProcessor::encode_webp_lossless(&rgba)
|
||||
.map_err(|e| IconDatabaseError::IoError(std::io::Error::other(e.to_string())))?;
|
||||
|
||||
*total_bytes += webp_data.len();
|
||||
|
||||
let new_icon = NewAchievementIcon {
|
||||
name: &name,
|
||||
icon: &webp_data,
|
||||
};
|
||||
|
||||
diesel::replace_into(achievement_icons::table)
|
||||
.values(&new_icon)
|
||||
.execute(&mut conn)?;
|
||||
|
||||
count += 1;
|
||||
}
|
||||
|
||||
info!(" Loaded {} achievement icons", count);
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Load general icons with multiple sizes
|
||||
fn load_general_icons<P: AsRef<Path>>(
|
||||
&self,
|
||||
textures_dir: P,
|
||||
total_bytes: &mut usize,
|
||||
) -> Result<usize, IconDatabaseError> {
|
||||
let textures = textures_dir.as_ref();
|
||||
let mut count = 0;
|
||||
|
||||
// Collect all general icon paths
|
||||
let mut icon_paths: Vec<(String, PathBuf)> = Vec::new();
|
||||
|
||||
// Directory-based icons
|
||||
let directories = [
|
||||
("Achievements/Trophies", true), // PNG only
|
||||
("BottomRightTabs", false),
|
||||
("MinimapIcons", false),
|
||||
("Notifications", false),
|
||||
("OverheadIcons", false),
|
||||
("Skills", false),
|
||||
];
|
||||
|
||||
for (subdir, png_only) in directories {
|
||||
let dir = textures.join(subdir);
|
||||
if dir.exists() {
|
||||
let files = if png_only {
|
||||
self.find_png_files(&dir)?
|
||||
} else {
|
||||
self.find_image_files(&dir)?
|
||||
};
|
||||
|
||||
for path in files {
|
||||
let name = path.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.map(|s| format!("{}_{}", subdir.replace('/', "_"), s))
|
||||
.unwrap_or_default();
|
||||
|
||||
if !name.is_empty() {
|
||||
icon_paths.push((name, path));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Individual file icons
|
||||
let individual_files = [
|
||||
("Common/Book.png", "Common_Book"),
|
||||
("Common/Hourglass.png", "Common_Hourglass"),
|
||||
("Common/Mana.png", "Common_Mana"),
|
||||
("Common/QuestCompleteTrophy.png", "Common_QuestCompleteTrophy"),
|
||||
("Common/Tick.png", "Common_Tick"),
|
||||
("Common/TutorialTip.png", "Common_TutorialTip"),
|
||||
("Common/Zoom_Minus.png", "Common_Zoom_Minus"),
|
||||
("Common/Zoom_Plus.png", "Common_Zoom_Plus"),
|
||||
("Inventory/Banknote.png", "Inventory_Banknote"),
|
||||
("Minimap/ShowCoordinates.png", "Minimap_ShowCoordinates"),
|
||||
("SplashScreens/Olipa.png", "SplashScreens_Olipa"),
|
||||
("ItemIcons/131.png", "Coins"),
|
||||
("118.png", "Map"),
|
||||
("124.png", "Entrance"),
|
||||
("Bug.png", "Bug"),
|
||||
("Checkmark.png", "Checkmark"),
|
||||
];
|
||||
|
||||
for (file, name) in individual_files {
|
||||
let path = textures.join(file);
|
||||
if path.exists() {
|
||||
icon_paths.push((name.to_string(), path));
|
||||
} else {
|
||||
warn!("File not found: {}", path.display());
|
||||
}
|
||||
}
|
||||
|
||||
// Process all collected icons
|
||||
let mut conn = self.establish_connection()?;
|
||||
|
||||
for (name, path) in icon_paths {
|
||||
if let Ok(bytes) = self.process_general_icon(&path, &name, &mut conn) {
|
||||
*total_bytes += bytes;
|
||||
count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
info!(" Loaded {} general icons", count);
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Process a single general icon at multiple sizes
|
||||
fn process_general_icon(
|
||||
&self,
|
||||
path: &Path,
|
||||
name: &str,
|
||||
conn: &mut SqliteConnection,
|
||||
) -> Result<usize, IconDatabaseError> {
|
||||
use crate::schema::general_icons;
|
||||
|
||||
// Load image
|
||||
let img = image::open(path)?;
|
||||
let (width, height) = (img.width(), img.height());
|
||||
let rgba = img.to_rgba8();
|
||||
|
||||
let mut total_bytes = 0;
|
||||
|
||||
// Original size (lossless)
|
||||
let icon_original = ImageProcessor::encode_webp_lossless(&rgba)
|
||||
.map_err(|e| IconDatabaseError::IoError(std::io::Error::other(e.to_string())))?;
|
||||
total_bytes += icon_original.len();
|
||||
|
||||
// Generate smaller sizes only if image is large enough (no upscaling)
|
||||
let processor = ImageProcessor::new(90.0);
|
||||
|
||||
let icon_256 = if width >= 256 && height >= 256 {
|
||||
Some(self.resize_and_encode(&img, 256, &processor)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
if let Some(ref data) = icon_256 {
|
||||
total_bytes += data.len();
|
||||
}
|
||||
|
||||
let icon_64 = if width >= 64 && height >= 64 {
|
||||
Some(self.resize_and_encode(&img, 64, &processor)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
if let Some(ref data) = icon_64 {
|
||||
total_bytes += data.len();
|
||||
}
|
||||
|
||||
let icon_32 = if width >= 32 && height >= 32 {
|
||||
Some(self.resize_and_encode(&img, 32, &processor)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
if let Some(ref data) = icon_32 {
|
||||
total_bytes += data.len();
|
||||
}
|
||||
|
||||
let new_icon = NewGeneralIcon {
|
||||
name,
|
||||
original_width: width as i32,
|
||||
original_height: height as i32,
|
||||
icon_original: Some(&icon_original),
|
||||
icon_256: icon_256.as_deref(),
|
||||
icon_64: icon_64.as_deref(),
|
||||
icon_32: icon_32.as_deref(),
|
||||
};
|
||||
|
||||
diesel::replace_into(general_icons::table)
|
||||
.values(&new_icon)
|
||||
.execute(conn)?;
|
||||
|
||||
Ok(total_bytes)
|
||||
}
|
||||
|
||||
/// Resize image and encode to WebP
|
||||
fn resize_and_encode(
|
||||
&self,
|
||||
img: &image::DynamicImage,
|
||||
size: u32,
|
||||
_processor: &ImageProcessor,
|
||||
) -> Result<Vec<u8>, IconDatabaseError> {
|
||||
let resized = img.resize_exact(size, size, image::imageops::FilterType::Lanczos3);
|
||||
let rgba = resized.to_rgba8();
|
||||
|
||||
// Use lossy encoding for smaller sizes
|
||||
let encoder = webp::Encoder::from_rgba(rgba.as_raw(), size, size);
|
||||
let webp_data = encoder.encode(90.0);
|
||||
|
||||
Ok(webp_data.to_vec())
|
||||
}
|
||||
|
||||
/// Find all image files (PNG, JPG, etc.) in a directory
|
||||
fn find_image_files<P: AsRef<Path>>(
|
||||
&self,
|
||||
dir: P,
|
||||
) -> Result<Vec<PathBuf>, IconDatabaseError> {
|
||||
let mut files = Vec::new();
|
||||
|
||||
for entry in fs::read_dir(dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
|
||||
if path.is_file() {
|
||||
if let Some(ext) = path.extension().and_then(|s| s.to_str()) {
|
||||
let ext_lower = ext.to_lowercase();
|
||||
if ext_lower == "png" || ext_lower == "jpg" || ext_lower == "jpeg" {
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
files.sort();
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
/// Find only PNG files in a directory
|
||||
fn find_png_files<P: AsRef<Path>>(
|
||||
&self,
|
||||
dir: P,
|
||||
) -> Result<Vec<PathBuf>, IconDatabaseError> {
|
||||
let mut files = Vec::new();
|
||||
|
||||
for entry in fs::read_dir(dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
|
||||
if path.is_file() {
|
||||
if let Some(ext) = path.extension().and_then(|s| s.to_str()) {
|
||||
let ext_lower = ext.to_lowercase();
|
||||
if ext_lower == "png" {
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
files.sort();
|
||||
Ok(files)
|
||||
}
|
||||
}
|
||||
613
cursebreaker-parser/src/databases/item_database.rs
Normal file
613
cursebreaker-parser/src/databases/item_database.rs
Normal file
@@ -0,0 +1,613 @@
|
||||
use crate::image_processor::ImageProcessor;
|
||||
use crate::item_loader::{
|
||||
calculate_prices, generate_banknotes, generate_exceptional_items, load_items_from_directory,
|
||||
};
|
||||
use crate::types::Item;
|
||||
use crate::xml_parsers::{parse_items_xml, XmlParseError};
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
/// A database for managing game items loaded from XML files
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ItemDatabase {
|
||||
items: Vec<Item>,
|
||||
items_by_id: HashMap<i32, usize>,
|
||||
items_by_name: HashMap<String, Vec<usize>>,
|
||||
stackable_item_ids: HashSet<i32>,
|
||||
storage_item_ids: HashSet<i32>,
|
||||
}
|
||||
|
||||
impl ItemDatabase {
|
||||
/// Create a new empty ItemDatabase
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
items: Vec::new(),
|
||||
items_by_id: HashMap::new(),
|
||||
items_by_name: HashMap::new(),
|
||||
stackable_item_ids: HashSet::new(),
|
||||
storage_item_ids: HashSet::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Load items from an XML file (basic loading without advanced features)
|
||||
pub fn load_from_xml<P: AsRef<Path>>(path: P) -> Result<Self, XmlParseError> {
|
||||
let items = parse_items_xml(path)?;
|
||||
let mut db = Self::new();
|
||||
db.add_items(items);
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
/// Load items from a directory with full support for:
|
||||
/// - Multiple XML files
|
||||
/// - Stats generation
|
||||
/// - Crafting recipe generation
|
||||
/// - Exceptional items
|
||||
/// - Banknotes
|
||||
/// - Price calculation
|
||||
pub fn load_from_directory<P: AsRef<Path>>(dir: P) -> Result<Self, XmlParseError> {
|
||||
let mut items = load_items_from_directory(dir)?;
|
||||
|
||||
// Generate exceptional items
|
||||
let exceptional = generate_exceptional_items(&items);
|
||||
items.extend(exceptional);
|
||||
|
||||
// Generate banknotes
|
||||
let banknotes = generate_banknotes(&items);
|
||||
items.extend(banknotes);
|
||||
|
||||
// Calculate prices
|
||||
calculate_prices(&mut items);
|
||||
|
||||
let mut db = Self::new();
|
||||
db.add_items(items);
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
/// Add items to the database
|
||||
pub fn add_items(&mut self, items: Vec<Item>) {
|
||||
for item in items {
|
||||
let index = self.items.len();
|
||||
self.items_by_id.insert(item.type_id, index);
|
||||
|
||||
// Add to name index (can have multiple items with same name)
|
||||
self.items_by_name
|
||||
.entry(item.item_name.clone())
|
||||
.or_insert_with(Vec::new)
|
||||
.push(index);
|
||||
|
||||
// Track stackable items
|
||||
if item.is_stackable() {
|
||||
self.stackable_item_ids.insert(item.type_id);
|
||||
}
|
||||
|
||||
// Track storage items
|
||||
if item.is_storage_item() {
|
||||
self.storage_item_ids.insert(item.type_id);
|
||||
}
|
||||
|
||||
self.items.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if an item is stackable by ID
|
||||
pub fn is_stackable(&self, type_id: i32) -> bool {
|
||||
self.stackable_item_ids.contains(&type_id)
|
||||
}
|
||||
|
||||
/// Check if an item is a storage item by ID
|
||||
pub fn is_storage_item(&self, type_id: i32) -> bool {
|
||||
self.storage_item_ids.contains(&type_id)
|
||||
}
|
||||
|
||||
/// Get an item by ID
|
||||
pub fn get_by_id(&self, id: i32) -> Option<&Item> {
|
||||
self.items_by_id
|
||||
.get(&id)
|
||||
.and_then(|&index| self.items.get(index))
|
||||
}
|
||||
|
||||
/// Get items by name (returns all items with matching name)
|
||||
pub fn get_by_name(&self, name: &str) -> Vec<&Item> {
|
||||
self.items_by_name
|
||||
.get(name)
|
||||
.map(|indices| {
|
||||
indices
|
||||
.iter()
|
||||
.filter_map(|&index| self.items.get(index))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Get all items
|
||||
pub fn all_items(&self) -> &[Item] {
|
||||
&self.items
|
||||
}
|
||||
|
||||
/// Get items by category
|
||||
pub fn get_by_category(&self, category: &str) -> Vec<&Item> {
|
||||
use crate::types::ItemCategory;
|
||||
use std::str::FromStr;
|
||||
|
||||
if let Ok(cat) = ItemCategory::from_str(category) {
|
||||
self.items
|
||||
.iter()
|
||||
.filter(|item| item.has_category(cat))
|
||||
.collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
/// Get items by slot/item type
|
||||
pub fn get_by_slot(&self, slot: &str) -> Vec<&Item> {
|
||||
use crate::types::ItemType;
|
||||
use std::str::FromStr;
|
||||
|
||||
if let Ok(item_type) = ItemType::from_str(slot) {
|
||||
self.items
|
||||
.iter()
|
||||
.filter(|item| item.item_type == item_type)
|
||||
.collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
/// Get items by skill requirement
|
||||
pub fn get_by_skill(&self, skill: &str) -> Vec<&Item> {
|
||||
use crate::types::SkillType;
|
||||
use std::str::FromStr;
|
||||
|
||||
if let Ok(skill_type) = SkillType::from_str(skill) {
|
||||
self.items
|
||||
.iter()
|
||||
.filter(|item| item.skill == skill_type)
|
||||
.collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
/// Get items by tool type
|
||||
pub fn get_by_tool(&self, tool: &str) -> Vec<&Item> {
|
||||
use crate::types::Tool;
|
||||
use std::str::FromStr;
|
||||
|
||||
if let Ok(tool_type) = Tool::from_str(tool) {
|
||||
self.items
|
||||
.iter()
|
||||
.filter(|item| item.tool == tool_type)
|
||||
.collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
/// Get number of items in database
|
||||
pub fn len(&self) -> usize {
|
||||
self.items.len()
|
||||
}
|
||||
|
||||
/// Check if database is empty
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.items.is_empty()
|
||||
}
|
||||
|
||||
/// Serialize items to JSON for SQL storage
|
||||
#[cfg(feature = "diesel")]
|
||||
pub fn to_json(&self) -> Result<String, serde_json::Error> {
|
||||
serde_json::to_string(&self.items)
|
||||
}
|
||||
|
||||
/// Prepare items for SQL insertion (deprecated - use save_to_db instead)
|
||||
#[deprecated(note = "Use save_to_db() to save directly to SQLite database")]
|
||||
pub fn prepare_for_sql(&self) -> Vec<(i32, String, String)> {
|
||||
self.items
|
||||
.iter()
|
||||
.map(|item| {
|
||||
let json = serde_json::to_string(item).unwrap_or_else(|_| "{}".to_string());
|
||||
(item.type_id, item.item_name.clone(), json)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Save all items to SQLite database
|
||||
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
|
||||
use crate::schema::{items, crafting_recipes, crafting_recipe_items};
|
||||
use diesel::replace_into;
|
||||
|
||||
conn.transaction::<_, diesel::result::Error, _>(|conn| {
|
||||
let mut count = 0;
|
||||
|
||||
for item in &self.items {
|
||||
let json = serde_json::to_string(item).unwrap_or_else(|_| "{}".to_string());
|
||||
|
||||
// Insert/replace item with all columns
|
||||
replace_into(items::table)
|
||||
.values((
|
||||
items::id.eq(item.type_id),
|
||||
items::name.eq(&item.item_name),
|
||||
items::data.eq(json),
|
||||
items::item_type.eq(item.item_type.to_string()),
|
||||
items::level.eq(item.level),
|
||||
items::price.eq(item.price),
|
||||
items::max_stack.eq(item.max_stack),
|
||||
items::storage_size.eq(item.storage_size),
|
||||
items::skill.eq(match item.skill {
|
||||
crate::types::SkillType::None => "none",
|
||||
crate::types::SkillType::Swordsmanship => "swordsmanship",
|
||||
crate::types::SkillType::Archery => "archery",
|
||||
crate::types::SkillType::Magic => "magic",
|
||||
crate::types::SkillType::Defence => "defence",
|
||||
crate::types::SkillType::Mining => "mining",
|
||||
crate::types::SkillType::Woodcutting => "woodcutting",
|
||||
crate::types::SkillType::Fishing => "fishing",
|
||||
crate::types::SkillType::Cooking => "cooking",
|
||||
crate::types::SkillType::Carpentry => "carpentry",
|
||||
crate::types::SkillType::Blacksmithy => "blacksmithy",
|
||||
crate::types::SkillType::Tailoring => "tailoring",
|
||||
crate::types::SkillType::Alchemy => "alchemy",
|
||||
}),
|
||||
items::tool.eq(match item.tool {
|
||||
crate::types::Tool::None => "none",
|
||||
crate::types::Tool::Pickaxe => "pickaxe",
|
||||
crate::types::Tool::Hatchet => "hatchet",
|
||||
crate::types::Tool::Scythe => "scythe",
|
||||
crate::types::Tool::Hammer => "hammer",
|
||||
crate::types::Tool::Shears => "shears",
|
||||
crate::types::Tool::FishingRod => "fishingrod",
|
||||
}),
|
||||
items::description.eq(&item.description),
|
||||
items::two_handed.eq(item.two_handed as i32),
|
||||
items::undroppable.eq(item.undroppable as i32),
|
||||
items::undroppable_on_death.eq(item.undroppable_on_death as i32),
|
||||
items::unequip_destroy.eq(item.unequip_destroy as i32),
|
||||
items::generate_icon.eq(item.generate_icon as i32),
|
||||
items::hide_milestone.eq(item.hide_milestone as i32),
|
||||
items::cannot_craft_exceptional.eq(item.cannot_craft_exceptional as i32),
|
||||
items::storage_all_items.eq(item.storage_all_items as i32),
|
||||
items::ability_id.eq(item.ability_id),
|
||||
items::special_ability.eq(item.special_ability),
|
||||
items::learn_ability_id.eq(item.learn_ability_id),
|
||||
items::book_id.eq(item.book_id),
|
||||
items::swap_item.eq(item.swap_item),
|
||||
))
|
||||
.execute(conn)?;
|
||||
|
||||
// Save crafting recipes for this item
|
||||
for recipe in &item.crafting_recipes {
|
||||
use diesel::prelude::*;
|
||||
|
||||
// Insert recipe
|
||||
diesel::insert_into(crafting_recipes::table)
|
||||
.values((
|
||||
crafting_recipes::product_item_id.eq(item.type_id),
|
||||
crafting_recipes::skill.eq(match recipe.skill {
|
||||
crate::types::SkillType::None => "none",
|
||||
crate::types::SkillType::Swordsmanship => "swordsmanship",
|
||||
crate::types::SkillType::Archery => "archery",
|
||||
crate::types::SkillType::Magic => "magic",
|
||||
crate::types::SkillType::Defence => "defence",
|
||||
crate::types::SkillType::Mining => "mining",
|
||||
crate::types::SkillType::Woodcutting => "woodcutting",
|
||||
crate::types::SkillType::Fishing => "fishing",
|
||||
crate::types::SkillType::Cooking => "cooking",
|
||||
crate::types::SkillType::Carpentry => "carpentry",
|
||||
crate::types::SkillType::Blacksmithy => "blacksmithy",
|
||||
crate::types::SkillType::Tailoring => "tailoring",
|
||||
crate::types::SkillType::Alchemy => "alchemy",
|
||||
}),
|
||||
crafting_recipes::level.eq(recipe.level),
|
||||
crafting_recipes::workbench_id.eq(recipe.workbench_id),
|
||||
crafting_recipes::xp.eq(recipe.xp),
|
||||
crafting_recipes::unlocked_by_default.eq(recipe.unlocked_by_default as i32),
|
||||
crafting_recipes::checks.eq(recipe.checks.as_ref()),
|
||||
))
|
||||
.execute(conn)?;
|
||||
|
||||
// Get the recipe_id we just inserted
|
||||
let recipe_id: i32 = diesel::select(diesel::dsl::sql::<diesel::sql_types::Integer>(
|
||||
"last_insert_rowid()"
|
||||
))
|
||||
.get_result(conn)?;
|
||||
|
||||
// Insert recipe items (ingredients)
|
||||
for ingredient in &recipe.items {
|
||||
diesel::insert_into(crafting_recipe_items::table)
|
||||
.values((
|
||||
crafting_recipe_items::recipe_id.eq(recipe_id),
|
||||
crafting_recipe_items::item_id.eq(ingredient.item_id),
|
||||
crafting_recipe_items::amount.eq(ingredient.amount),
|
||||
))
|
||||
.execute(conn)?;
|
||||
}
|
||||
}
|
||||
|
||||
count += 1;
|
||||
}
|
||||
|
||||
Ok(count)
|
||||
})
|
||||
}
|
||||
|
||||
/// Save all items to SQLite database with icon processing
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `conn` - Database connection
|
||||
/// * `icon_path` - Path to the ItemIcons directory (e.g., "CBAssets/Data/Textures/ItemIcons")
|
||||
///
|
||||
/// # Returns
|
||||
/// Tuple of (items_saved, images_processed)
|
||||
pub fn save_to_db_with_images<P: AsRef<Path>>(
|
||||
&self,
|
||||
conn: &mut SqliteConnection,
|
||||
icon_path: P,
|
||||
) -> Result<(usize, usize), diesel::result::Error> {
|
||||
use crate::schema::items;
|
||||
use diesel::replace_into;
|
||||
|
||||
let icon_base_path = icon_path.as_ref();
|
||||
let processor = ImageProcessor::new(85.0); // 85% WebP quality
|
||||
let mut images_processed = 0;
|
||||
|
||||
conn.transaction::<_, diesel::result::Error, _>(|conn| {
|
||||
let mut count = 0;
|
||||
|
||||
for item in &self.items {
|
||||
let json = serde_json::to_string(item).unwrap_or_else(|_| "{}".to_string());
|
||||
|
||||
// Process item icon if it exists
|
||||
let (icon_large, icon_medium, icon_small) =
|
||||
Self::process_item_icon(&processor, icon_base_path, item.type_id);
|
||||
|
||||
if icon_large.is_some() {
|
||||
images_processed += 1;
|
||||
}
|
||||
|
||||
// Insert/replace item with all columns including images
|
||||
replace_into(items::table)
|
||||
.values((
|
||||
items::id.eq(item.type_id),
|
||||
items::name.eq(&item.item_name),
|
||||
items::data.eq(json),
|
||||
items::item_type.eq(item.item_type.to_string()),
|
||||
items::level.eq(item.level),
|
||||
items::price.eq(item.price),
|
||||
items::max_stack.eq(item.max_stack),
|
||||
items::storage_size.eq(item.storage_size),
|
||||
items::skill.eq(match item.skill {
|
||||
crate::types::SkillType::None => "none",
|
||||
crate::types::SkillType::Swordsmanship => "swordsmanship",
|
||||
crate::types::SkillType::Archery => "archery",
|
||||
crate::types::SkillType::Magic => "magic",
|
||||
crate::types::SkillType::Defence => "defence",
|
||||
crate::types::SkillType::Mining => "mining",
|
||||
crate::types::SkillType::Woodcutting => "woodcutting",
|
||||
crate::types::SkillType::Fishing => "fishing",
|
||||
crate::types::SkillType::Cooking => "cooking",
|
||||
crate::types::SkillType::Carpentry => "carpentry",
|
||||
crate::types::SkillType::Blacksmithy => "blacksmithy",
|
||||
crate::types::SkillType::Tailoring => "tailoring",
|
||||
crate::types::SkillType::Alchemy => "alchemy",
|
||||
}),
|
||||
items::tool.eq(match item.tool {
|
||||
crate::types::Tool::None => "none",
|
||||
crate::types::Tool::Pickaxe => "pickaxe",
|
||||
crate::types::Tool::Hatchet => "hatchet",
|
||||
crate::types::Tool::Scythe => "scythe",
|
||||
crate::types::Tool::Hammer => "hammer",
|
||||
crate::types::Tool::Shears => "shears",
|
||||
crate::types::Tool::FishingRod => "fishingrod",
|
||||
}),
|
||||
items::description.eq(&item.description),
|
||||
items::two_handed.eq(item.two_handed as i32),
|
||||
items::undroppable.eq(item.undroppable as i32),
|
||||
items::undroppable_on_death.eq(item.undroppable_on_death as i32),
|
||||
items::unequip_destroy.eq(item.unequip_destroy as i32),
|
||||
items::generate_icon.eq(item.generate_icon as i32),
|
||||
items::hide_milestone.eq(item.hide_milestone as i32),
|
||||
items::cannot_craft_exceptional.eq(item.cannot_craft_exceptional as i32),
|
||||
items::storage_all_items.eq(item.storage_all_items as i32),
|
||||
items::ability_id.eq(item.ability_id),
|
||||
items::special_ability.eq(item.special_ability),
|
||||
items::learn_ability_id.eq(item.learn_ability_id),
|
||||
items::book_id.eq(item.book_id),
|
||||
items::swap_item.eq(item.swap_item),
|
||||
items::icon_large.eq(icon_large.as_ref()),
|
||||
items::icon_medium.eq(icon_medium.as_ref()),
|
||||
items::icon_small.eq(icon_small.as_ref()),
|
||||
))
|
||||
.execute(conn)?;
|
||||
|
||||
// Save crafting recipes for this item (same as before)
|
||||
for recipe in &item.crafting_recipes {
|
||||
use diesel::prelude::*;
|
||||
|
||||
diesel::insert_into(crate::schema::crafting_recipes::table)
|
||||
.values((
|
||||
crate::schema::crafting_recipes::product_item_id.eq(item.type_id),
|
||||
crate::schema::crafting_recipes::skill.eq(match recipe.skill {
|
||||
crate::types::SkillType::None => "none",
|
||||
crate::types::SkillType::Swordsmanship => "swordsmanship",
|
||||
crate::types::SkillType::Archery => "archery",
|
||||
crate::types::SkillType::Magic => "magic",
|
||||
crate::types::SkillType::Defence => "defence",
|
||||
crate::types::SkillType::Mining => "mining",
|
||||
crate::types::SkillType::Woodcutting => "woodcutting",
|
||||
crate::types::SkillType::Fishing => "fishing",
|
||||
crate::types::SkillType::Cooking => "cooking",
|
||||
crate::types::SkillType::Carpentry => "carpentry",
|
||||
crate::types::SkillType::Blacksmithy => "blacksmithy",
|
||||
crate::types::SkillType::Tailoring => "tailoring",
|
||||
crate::types::SkillType::Alchemy => "alchemy",
|
||||
}),
|
||||
crate::schema::crafting_recipes::level.eq(recipe.level),
|
||||
crate::schema::crafting_recipes::workbench_id.eq(recipe.workbench_id),
|
||||
crate::schema::crafting_recipes::xp.eq(recipe.xp),
|
||||
crate::schema::crafting_recipes::unlocked_by_default.eq(recipe.unlocked_by_default as i32),
|
||||
crate::schema::crafting_recipes::checks.eq(recipe.checks.as_ref()),
|
||||
))
|
||||
.execute(conn)?;
|
||||
|
||||
let recipe_id: i32 = diesel::select(diesel::dsl::sql::<diesel::sql_types::Integer>(
|
||||
"last_insert_rowid()"
|
||||
))
|
||||
.get_result(conn)?;
|
||||
|
||||
for ingredient in &recipe.items {
|
||||
diesel::insert_into(crate::schema::crafting_recipe_items::table)
|
||||
.values((
|
||||
crate::schema::crafting_recipe_items::recipe_id.eq(recipe_id),
|
||||
crate::schema::crafting_recipe_items::item_id.eq(ingredient.item_id),
|
||||
crate::schema::crafting_recipe_items::amount.eq(ingredient.amount),
|
||||
))
|
||||
.execute(conn)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Save item stats
|
||||
for stat in &item.stats {
|
||||
let stat_type_str = match stat.stat_type {
|
||||
crate::types::StatType::None => "none",
|
||||
crate::types::StatType::Health => "health",
|
||||
crate::types::StatType::Mana => "mana",
|
||||
crate::types::StatType::HealthRegen => "health_regen",
|
||||
crate::types::StatType::ManaRegen => "mana_regen",
|
||||
crate::types::StatType::DamagePhysical => "damage_physical",
|
||||
crate::types::StatType::DamageMagical => "damage_magical",
|
||||
crate::types::StatType::DamageRanged => "damage_ranged",
|
||||
crate::types::StatType::AccuracyPhysical => "accuracy_physical",
|
||||
crate::types::StatType::AccuracyMagical => "accuracy_magical",
|
||||
crate::types::StatType::AccuracyRanged => "accuracy_ranged",
|
||||
crate::types::StatType::ResistancePhysical => "resistance_physical",
|
||||
crate::types::StatType::ResistanceMagical => "resistance_magical",
|
||||
crate::types::StatType::ResistanceRanged => "resistance_ranged",
|
||||
crate::types::StatType::Critical => "critical",
|
||||
crate::types::StatType::Healing => "healing",
|
||||
crate::types::StatType::MovementSpeed => "movement_speed",
|
||||
crate::types::StatType::DamageVsBeasts => "damage_vs_beasts",
|
||||
crate::types::StatType::DamageVsUndead => "damage_vs_undead",
|
||||
crate::types::StatType::CritterSlaying => "critter_slaying",
|
||||
};
|
||||
|
||||
diesel::insert_into(crate::schema::item_stats::table)
|
||||
.values((
|
||||
crate::schema::item_stats::item_id.eq(item.type_id),
|
||||
crate::schema::item_stats::stat_type.eq(stat_type_str),
|
||||
crate::schema::item_stats::value.eq(stat.value),
|
||||
))
|
||||
.execute(conn)?;
|
||||
}
|
||||
|
||||
count += 1;
|
||||
}
|
||||
|
||||
Ok((count, images_processed))
|
||||
})
|
||||
}
|
||||
|
||||
/// Helper function to process a single item icon
|
||||
/// Returns (large, medium, small) WebP blobs
|
||||
fn process_item_icon(
|
||||
processor: &ImageProcessor,
|
||||
icon_base_path: &Path,
|
||||
item_id: i32,
|
||||
) -> (Option<Vec<u8>>, Option<Vec<u8>>, Option<Vec<u8>>) {
|
||||
// Try both lowercase and uppercase extensions (Linux is case-sensitive)
|
||||
let lowercase = icon_base_path.join(format!("{}.png", item_id));
|
||||
let uppercase = icon_base_path.join(format!("{}.PNG", item_id));
|
||||
|
||||
let icon_file = if lowercase.exists() {
|
||||
lowercase
|
||||
} else if uppercase.exists() {
|
||||
uppercase
|
||||
} else {
|
||||
return (None, None, None);
|
||||
};
|
||||
|
||||
// Process image at 3 sizes: 256, 64, 16
|
||||
match processor.process_image(&icon_file, &[256, 64, 16], None, None) {
|
||||
Ok(processed) => (
|
||||
processed.get(256).cloned(),
|
||||
processed.get(64).cloned(),
|
||||
processed.get(16).cloned(),
|
||||
),
|
||||
Err(e) => {
|
||||
log::warn!("Failed to process icon for item {}: {}", item_id, e);
|
||||
(None, None, None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Load all items from SQLite database
|
||||
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
|
||||
use crate::schema::items::dsl::*;
|
||||
|
||||
#[derive(Queryable)]
|
||||
#[allow(dead_code)]
|
||||
struct ItemRecord {
|
||||
id: Option<i32>,
|
||||
name: String,
|
||||
data: String,
|
||||
item_type: String,
|
||||
level: i32,
|
||||
price: i32,
|
||||
max_stack: i32,
|
||||
storage_size: i32,
|
||||
skill: String,
|
||||
tool: String,
|
||||
description: String,
|
||||
two_handed: i32,
|
||||
undroppable: i32,
|
||||
undroppable_on_death: i32,
|
||||
unequip_destroy: i32,
|
||||
generate_icon: i32,
|
||||
hide_milestone: i32,
|
||||
cannot_craft_exceptional: i32,
|
||||
storage_all_items: i32,
|
||||
ability_id: i32,
|
||||
special_ability: i32,
|
||||
learn_ability_id: i32,
|
||||
book_id: i32,
|
||||
swap_item: i32,
|
||||
icon_large: Option<Vec<u8>>,
|
||||
icon_medium: Option<Vec<u8>>,
|
||||
icon_small: Option<Vec<u8>>,
|
||||
}
|
||||
|
||||
let records = items.load::<ItemRecord>(conn)?;
|
||||
|
||||
let mut loaded_items = Vec::new();
|
||||
for record in records {
|
||||
// Load from JSON data column (contains complete item info including crafting recipes)
|
||||
if let Ok(item) = serde_json::from_str::<Item>(&record.data) {
|
||||
loaded_items.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
let mut db = Self::new();
|
||||
db.add_items(loaded_items);
|
||||
Ok(db)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ItemDatabase {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_item_database_basic() {
|
||||
let mut db = ItemDatabase::new();
|
||||
assert!(db.is_empty());
|
||||
assert_eq!(db.len(), 0);
|
||||
}
|
||||
}
|
||||
218
cursebreaker-parser/src/databases/loot_database.rs
Normal file
218
cursebreaker-parser/src/databases/loot_database.rs
Normal file
@@ -0,0 +1,218 @@
|
||||
use crate::types::{LootTable, LootDrop};
|
||||
use crate::xml_parsers::{parse_loot_xml, XmlParseError};
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
/// A database for managing Loot Tables loaded from XML files
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LootDatabase {
|
||||
tables: Vec<LootTable>,
|
||||
// Map NPC ID -> list of table indices that apply to this NPC
|
||||
tables_by_npc: HashMap<i32, Vec<usize>>,
|
||||
// Map item ID -> list of table indices that drop this item
|
||||
tables_by_item: HashMap<i32, Vec<usize>>,
|
||||
}
|
||||
|
||||
impl LootDatabase {
|
||||
/// Create a new empty LootDatabase
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
tables: Vec::new(),
|
||||
tables_by_npc: HashMap::new(),
|
||||
tables_by_item: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Load loot tables from an XML file
|
||||
pub fn load_from_xml<P: AsRef<Path>>(path: P) -> Result<Self, XmlParseError> {
|
||||
let tables = parse_loot_xml(path)?;
|
||||
let mut db = Self::new();
|
||||
db.add_tables(tables);
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
/// Add loot tables to the database
|
||||
pub fn add_tables(&mut self, tables: Vec<LootTable>) {
|
||||
for table in tables {
|
||||
let index = self.tables.len();
|
||||
|
||||
// Index by NPC IDs
|
||||
for &npc_id in &table.npc_ids {
|
||||
self.tables_by_npc
|
||||
.entry(npc_id)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(index);
|
||||
}
|
||||
|
||||
// Index by item IDs
|
||||
for drop in &table.drops {
|
||||
self.tables_by_item
|
||||
.entry(drop.item)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(index);
|
||||
}
|
||||
|
||||
self.tables.push(table);
|
||||
}
|
||||
}
|
||||
|
||||
/// Get all loot tables that apply to a specific NPC ID
|
||||
pub fn get_tables_for_npc(&self, npc_id: i32) -> Vec<&LootTable> {
|
||||
self.tables_by_npc
|
||||
.get(&npc_id)
|
||||
.map(|indices| {
|
||||
indices
|
||||
.iter()
|
||||
.filter_map(|&idx| self.tables.get(idx))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Get all loot tables that drop a specific item
|
||||
pub fn get_tables_with_item(&self, item_id: i32) -> Vec<&LootTable> {
|
||||
self.tables_by_item
|
||||
.get(&item_id)
|
||||
.map(|indices| {
|
||||
indices
|
||||
.iter()
|
||||
.filter_map(|&idx| self.tables.get(idx))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Get all possible drops for a specific NPC
|
||||
pub fn get_drops_for_npc(&self, npc_id: i32) -> Vec<&LootDrop> {
|
||||
let mut drops = Vec::new();
|
||||
for table in self.get_tables_for_npc(npc_id) {
|
||||
drops.extend(&table.drops);
|
||||
}
|
||||
drops
|
||||
}
|
||||
|
||||
/// Get all NPCs that can drop a specific item
|
||||
pub fn get_npcs_dropping_item(&self, item_id: i32) -> Vec<i32> {
|
||||
let mut npcs = std::collections::HashSet::new();
|
||||
|
||||
if let Some(table_indices) = self.tables_by_item.get(&item_id) {
|
||||
for &idx in table_indices {
|
||||
if let Some(table) = self.tables.get(idx) {
|
||||
npcs.extend(&table.npc_ids);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
npcs.into_iter().collect()
|
||||
}
|
||||
|
||||
/// Get all loot tables
|
||||
pub fn all_tables(&self) -> &[LootTable] {
|
||||
&self.tables
|
||||
}
|
||||
|
||||
/// Get tables with conditional drops (that have checks)
|
||||
pub fn get_conditional_tables(&self) -> Vec<&LootTable> {
|
||||
self.tables
|
||||
.iter()
|
||||
.filter(|t| !t.get_conditional_drops().is_empty())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get tables with guaranteed drops (rate = 1)
|
||||
pub fn get_tables_with_guaranteed_drops(&self) -> Vec<&LootTable> {
|
||||
self.tables
|
||||
.iter()
|
||||
.filter(|t| !t.get_guaranteed_drops().is_empty())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all unique item IDs that can drop
|
||||
pub fn get_all_droppable_items(&self) -> Vec<i32> {
|
||||
self.tables_by_item.keys().copied().collect()
|
||||
}
|
||||
|
||||
/// Get all unique NPC IDs that have loot tables
|
||||
pub fn get_all_npcs_with_loot(&self) -> Vec<i32> {
|
||||
self.tables_by_npc.keys().copied().collect()
|
||||
}
|
||||
|
||||
/// Get number of loot tables in database
|
||||
pub fn len(&self) -> usize {
|
||||
self.tables.len()
|
||||
}
|
||||
|
||||
/// Check if database is empty
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.tables.is_empty()
|
||||
}
|
||||
|
||||
/// Prepare loot tables for SQL insertion (deprecated - use save_to_db instead)
|
||||
#[deprecated(note = "Use save_to_db() to save directly to SQLite database")]
|
||||
pub fn prepare_for_sql(&self) -> Vec<(String, Option<String>, String)> {
|
||||
self.tables
|
||||
.iter()
|
||||
.map(|table| {
|
||||
let npc_ids_json = serde_json::to_string(&table.npc_ids).unwrap_or_else(|_| "[]".to_string());
|
||||
let json = serde_json::to_string(table).unwrap_or_else(|_| "{}".to_string());
|
||||
(npc_ids_json, table.name.clone(), json)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Save all loot tables to SQLite database
|
||||
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
|
||||
use crate::schema::loot_tables;
|
||||
|
||||
let mut count = 0;
|
||||
for table in &self.tables {
|
||||
let table_id = serde_json::to_string(&table.npc_ids).unwrap_or_else(|_| "[]".to_string());
|
||||
let json = serde_json::to_string(table).unwrap_or_else(|_| "{}".to_string());
|
||||
let record = (
|
||||
loot_tables::table_id.eq(table_id),
|
||||
loot_tables::npc_id.eq(None::<String>),
|
||||
loot_tables::data.eq(json),
|
||||
);
|
||||
|
||||
diesel::insert_into(loot_tables::table)
|
||||
.values(&record)
|
||||
.execute(conn)?;
|
||||
count += 1;
|
||||
}
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Load all loot tables from SQLite database
|
||||
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
|
||||
use crate::schema::loot_tables::dsl::*;
|
||||
|
||||
#[derive(Queryable)]
|
||||
struct LootTableRecord {
|
||||
table_id: Option<String>,
|
||||
npc_id: Option<String>,
|
||||
data: String,
|
||||
}
|
||||
|
||||
let records = loot_tables.load::<LootTableRecord>(conn)?;
|
||||
|
||||
let mut loaded_tables = Vec::new();
|
||||
for record in records {
|
||||
if let Ok(table) = serde_json::from_str::<LootTable>(&record.data) {
|
||||
loaded_tables.push(table);
|
||||
}
|
||||
}
|
||||
|
||||
let mut db = Self::new();
|
||||
db.add_tables(loaded_tables);
|
||||
Ok(db)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for LootDatabase {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
263
cursebreaker-parser/src/databases/map_database.rs
Normal file
263
cursebreaker-parser/src/databases/map_database.rs
Normal file
@@ -0,0 +1,263 @@
|
||||
use crate::types::Map;
|
||||
use crate::xml_parsers::{parse_maps_xml, XmlParseError};
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
/// A database for managing Maps loaded from XML files
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct MapDatabase {
|
||||
maps: Vec<Map>,
|
||||
// Map scene_id -> map index
|
||||
maps_by_scene_id: HashMap<String, usize>,
|
||||
// Map name -> list of map indices (multiple maps can have same name)
|
||||
maps_by_name: HashMap<String, Vec<usize>>,
|
||||
// Map coordinates (x,y) -> map index
|
||||
maps_by_coords: HashMap<(i32, i32), usize>,
|
||||
}
|
||||
|
||||
impl MapDatabase {
|
||||
/// Create a new empty MapDatabase
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
maps: Vec::new(),
|
||||
maps_by_scene_id: HashMap::new(),
|
||||
maps_by_name: HashMap::new(),
|
||||
maps_by_coords: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Load maps from an XML file
|
||||
pub fn load_from_xml<P: AsRef<Path>>(path: P) -> Result<Self, XmlParseError> {
|
||||
let maps = parse_maps_xml(path)?;
|
||||
let mut db = Self::new();
|
||||
db.add_maps(maps);
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
/// Add maps to the database
|
||||
pub fn add_maps(&mut self, maps: Vec<Map>) {
|
||||
for map in maps {
|
||||
let index = self.maps.len();
|
||||
|
||||
// Index by scene ID
|
||||
self.maps_by_scene_id.insert(map.scene_id.clone(), index);
|
||||
|
||||
// Index by name (if it has a name)
|
||||
if !map.name.is_empty() {
|
||||
self.maps_by_name
|
||||
.entry(map.name.clone())
|
||||
.or_insert_with(Vec::new)
|
||||
.push(index);
|
||||
}
|
||||
|
||||
// Index by coordinates
|
||||
if let Some(coords) = map.get_coordinates() {
|
||||
self.maps_by_coords.insert(coords, index);
|
||||
}
|
||||
|
||||
self.maps.push(map);
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a map by scene ID (e.g., "3,10")
|
||||
pub fn get_by_scene_id(&self, scene_id: &str) -> Option<&Map> {
|
||||
self.maps_by_scene_id
|
||||
.get(scene_id)
|
||||
.and_then(|&index| self.maps.get(index))
|
||||
}
|
||||
|
||||
/// Get a map by coordinates
|
||||
pub fn get_by_coords(&self, x: i32, y: i32) -> Option<&Map> {
|
||||
self.maps_by_coords
|
||||
.get(&(x, y))
|
||||
.and_then(|&index| self.maps.get(index))
|
||||
}
|
||||
|
||||
/// Get maps by name (returns all maps with matching name)
|
||||
pub fn get_by_name(&self, name: &str) -> Vec<&Map> {
|
||||
self.maps_by_name
|
||||
.get(name)
|
||||
.map(|indices| {
|
||||
indices
|
||||
.iter()
|
||||
.filter_map(|&index| self.maps.get(index))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Get all maps
|
||||
pub fn all_maps(&self) -> &[Map] {
|
||||
&self.maps
|
||||
}
|
||||
|
||||
/// Get all named maps (maps with non-empty names)
|
||||
pub fn get_named_maps(&self) -> Vec<&Map> {
|
||||
self.maps.iter().filter(|m| m.is_named()).collect()
|
||||
}
|
||||
|
||||
/// Get all indoor maps
|
||||
pub fn get_indoor_maps(&self) -> Vec<&Map> {
|
||||
self.maps.iter().filter(|m| m.is_indoor()).collect()
|
||||
}
|
||||
|
||||
/// Get all maps that are isolated (don't load nearby scenes)
|
||||
pub fn get_isolated_maps(&self) -> Vec<&Map> {
|
||||
self.maps.iter().filter(|m| m.is_isolated()).collect()
|
||||
}
|
||||
|
||||
/// Get all maps with border restrictions
|
||||
pub fn get_bordered_maps(&self) -> Vec<&Map> {
|
||||
self.maps.iter().filter(|m| m.has_borders()).collect()
|
||||
}
|
||||
|
||||
/// Get maps by music track ID
|
||||
pub fn get_by_music(&self, music_id: i32) -> Vec<&Map> {
|
||||
self.maps.iter().filter(|m| m.music == music_id).collect()
|
||||
}
|
||||
|
||||
/// Get maps by ambience ID
|
||||
pub fn get_by_ambience(&self, ambience_id: i32) -> Vec<&Map> {
|
||||
self.maps
|
||||
.iter()
|
||||
.filter(|m| m.ambience == ambience_id)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all maps that have a respawn location set
|
||||
pub fn get_maps_with_respawn(&self) -> Vec<&Map> {
|
||||
self.maps
|
||||
.iter()
|
||||
.filter(|m| m.respawn_map.is_some())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all maps that are connected to other maps
|
||||
pub fn get_connected_maps(&self) -> Vec<&Map> {
|
||||
self.maps
|
||||
.iter()
|
||||
.filter(|m| m.connected_maps.is_some())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all maps hidden from world map
|
||||
pub fn get_hidden_from_worldmap(&self) -> Vec<&Map> {
|
||||
self.maps.iter().filter(|m| m.no_world_map).collect()
|
||||
}
|
||||
|
||||
/// Get all unique map names
|
||||
pub fn get_all_map_names(&self) -> Vec<String> {
|
||||
self.maps_by_name.keys().cloned().collect()
|
||||
}
|
||||
|
||||
/// Get the bounds of the map grid (min/max x and y coordinates)
|
||||
pub fn get_map_bounds(&self) -> Option<((i32, i32), (i32, i32))> {
|
||||
let coords: Vec<(i32, i32)> = self.maps_by_coords.keys().copied().collect();
|
||||
|
||||
if coords.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let min_x = coords.iter().map(|(x, _)| *x).min()?;
|
||||
let max_x = coords.iter().map(|(x, _)| *x).max()?;
|
||||
let min_y = coords.iter().map(|(_, y)| *y).min()?;
|
||||
let max_y = coords.iter().map(|(_, y)| *y).max()?;
|
||||
|
||||
Some(((min_x, min_y), (max_x, max_y)))
|
||||
}
|
||||
|
||||
/// Get number of maps in database
|
||||
pub fn len(&self) -> usize {
|
||||
self.maps.len()
|
||||
}
|
||||
|
||||
/// Check if database is empty
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.maps.is_empty()
|
||||
}
|
||||
|
||||
/// Prepare maps for SQL insertion (deprecated - use save_to_db instead)
|
||||
#[deprecated(note = "Use save_to_db() to save directly to SQLite database")]
|
||||
pub fn prepare_for_sql(&self) -> Vec<(String, String, String)> {
|
||||
self.maps
|
||||
.iter()
|
||||
.map(|map| {
|
||||
let json = serde_json::to_string(map).unwrap_or_else(|_| "{}".to_string());
|
||||
(map.scene_id.clone(), map.name.clone(), json)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Save all maps to SQLite database
|
||||
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
|
||||
use crate::schema::maps;
|
||||
|
||||
let records: Vec<_> = self
|
||||
.maps
|
||||
.iter()
|
||||
.map(|map| {
|
||||
let json = serde_json::to_string(map).unwrap_or_else(|_| "{}".to_string());
|
||||
(
|
||||
maps::scene_id.eq(&map.scene_id),
|
||||
maps::name.eq(&map.name),
|
||||
maps::data.eq(json),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut count = 0;
|
||||
for record in records {
|
||||
diesel::insert_into(maps::table)
|
||||
.values(&record)
|
||||
.execute(conn)?;
|
||||
count += 1;
|
||||
}
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Load all maps from SQLite database
|
||||
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
|
||||
use crate::schema::maps::dsl::*;
|
||||
|
||||
#[derive(Queryable)]
|
||||
struct MapRecord {
|
||||
scene_id: Option<String>,
|
||||
name: String,
|
||||
data: String,
|
||||
}
|
||||
|
||||
let records = maps.load::<MapRecord>(conn)?;
|
||||
|
||||
let mut loaded_maps = Vec::new();
|
||||
for record in records {
|
||||
if let Ok(map) = serde_json::from_str::<Map>(&record.data) {
|
||||
loaded_maps.push(map);
|
||||
}
|
||||
}
|
||||
|
||||
let mut db = Self::new();
|
||||
db.add_maps(loaded_maps);
|
||||
Ok(db)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for MapDatabase {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_map_database_basic() {
|
||||
let mut db = MapDatabase::new();
|
||||
assert!(db.is_empty());
|
||||
assert_eq!(db.len(), 0);
|
||||
}
|
||||
}
|
||||
388
cursebreaker-parser/src/databases/minimap_database.rs
Normal file
388
cursebreaker-parser/src/databases/minimap_database.rs
Normal file
@@ -0,0 +1,388 @@
|
||||
use crate::types::{MinimapTileRecord, NewMinimapTile};
|
||||
use crate::image_processor::{ImageProcessor, ImageProcessingError};
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::fs;
|
||||
use std::collections::HashMap;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum MinimapDatabaseError {
|
||||
#[error("Database error: {0}")]
|
||||
DatabaseError(#[from] diesel::result::Error),
|
||||
|
||||
#[error("Image processing error: {0}")]
|
||||
ImageError(#[from] ImageProcessingError),
|
||||
|
||||
#[error("Image load error: {0}")]
|
||||
ImageLoadError(#[from] image::ImageError),
|
||||
|
||||
#[error("IO error: {0}")]
|
||||
IoError(#[from] std::io::Error),
|
||||
|
||||
#[error("Invalid filename format: {0}")]
|
||||
InvalidFilename(String),
|
||||
|
||||
#[error("Connection pool error: {0}")]
|
||||
ConnectionError(String),
|
||||
}
|
||||
|
||||
/// Database for managing minimap tiles with merged zoom levels
|
||||
pub struct MinimapDatabase {
|
||||
database_url: String,
|
||||
}
|
||||
|
||||
impl MinimapDatabase {
|
||||
/// Create new database connection
|
||||
pub fn new(database_url: String) -> Self {
|
||||
Self { database_url }
|
||||
}
|
||||
|
||||
/// Establish database connection
|
||||
fn establish_connection(&self) -> Result<SqliteConnection, MinimapDatabaseError> {
|
||||
SqliteConnection::establish(&self.database_url)
|
||||
.map_err(|e| MinimapDatabaseError::ConnectionError(e.to_string()))
|
||||
}
|
||||
|
||||
/// Load all PNG files from directory and process them into all zoom levels
|
||||
pub fn load_from_directory<P: AsRef<Path>, B: AsRef<Path>>(
|
||||
&self,
|
||||
minimap_dir: P,
|
||||
base_path: B,
|
||||
) -> Result<usize, MinimapDatabaseError> {
|
||||
use crate::schema::minimap_tiles;
|
||||
|
||||
let mut conn = self.establish_connection()?;
|
||||
|
||||
println!("Loading PNG files from directory...");
|
||||
let png_files = self.find_minimap_pngs(minimap_dir.as_ref())?;
|
||||
println!("Found {} PNG files", png_files.len());
|
||||
|
||||
// Step 1: Process all original tiles (zoom level 2) and store their WebP data
|
||||
println!("\nProcessing zoom level 2 (original tiles)...");
|
||||
let mut tile_data: HashMap<(i32, i32), Vec<u8>> = HashMap::new();
|
||||
let mut count = 0;
|
||||
|
||||
for png_path in &png_files {
|
||||
let (x, y) = self.parse_coordinates(png_path)?;
|
||||
|
||||
// Load and encode as lossless WebP
|
||||
let img = image::open(png_path)?;
|
||||
let rgba = img.to_rgba8();
|
||||
let webp_data = ImageProcessor::encode_webp_lossless(&rgba)?;
|
||||
|
||||
// Get original file size
|
||||
let original_size = fs::metadata(png_path)?.len() as i32;
|
||||
|
||||
// Store in database
|
||||
let relative_path = png_path.strip_prefix(base_path.as_ref()).unwrap_or(png_path);
|
||||
let new_tile = NewMinimapTile {
|
||||
x,
|
||||
y,
|
||||
zoom: 2,
|
||||
width: 512,
|
||||
height: 512,
|
||||
original_file_size: Some(original_size),
|
||||
image: &webp_data,
|
||||
image_size: webp_data.len() as i32,
|
||||
source_path: relative_path.to_str().unwrap_or(""),
|
||||
};
|
||||
|
||||
diesel::replace_into(minimap_tiles::table)
|
||||
.values(&new_tile)
|
||||
.execute(&mut conn)?;
|
||||
|
||||
// Cache for later merging
|
||||
tile_data.insert((x, y), webp_data);
|
||||
count += 1;
|
||||
|
||||
if count % 50 == 0 {
|
||||
println!(" Processed {} tiles...", count);
|
||||
}
|
||||
}
|
||||
|
||||
println!("Processed {} zoom level 2 tiles", count);
|
||||
|
||||
// Get bounds for merging
|
||||
let ((min_x, min_y), (max_x, max_y)) = self.get_map_bounds()?;
|
||||
println!("\nMap bounds: X [{}, {}], Y [{}, {}]", min_x, max_x, min_y, max_y);
|
||||
|
||||
// Step 2: Generate zoom level 1 (2x2 merged)
|
||||
println!("\nGenerating zoom level 1 (2x2 merged)...");
|
||||
let zoom1_count = self.generate_merged_tiles(
|
||||
&mut conn,
|
||||
&tile_data,
|
||||
min_x,
|
||||
max_x,
|
||||
min_y,
|
||||
max_y,
|
||||
1, // zoom level
|
||||
2, // merge factor
|
||||
)?;
|
||||
println!("Generated {} zoom level 1 tiles", zoom1_count);
|
||||
|
||||
// Step 3: Generate zoom level 0 (4x4 merged)
|
||||
println!("\nGenerating zoom level 0 (4x4 merged)...");
|
||||
let zoom0_count = self.generate_merged_tiles(
|
||||
&mut conn,
|
||||
&tile_data,
|
||||
min_x,
|
||||
max_x,
|
||||
min_y,
|
||||
max_y,
|
||||
0, // zoom level
|
||||
4, // merge factor
|
||||
)?;
|
||||
println!("Generated {} zoom level 0 tiles", zoom0_count);
|
||||
|
||||
println!("\nTotal tiles generated:");
|
||||
println!(" Zoom 2: {}", count);
|
||||
println!(" Zoom 1: {}", zoom1_count);
|
||||
println!(" Zoom 0: {}", zoom0_count);
|
||||
println!(" Total: {}", count + zoom1_count + zoom0_count);
|
||||
|
||||
Ok(count + zoom1_count + zoom0_count)
|
||||
}
|
||||
|
||||
/// Generate merged tiles for a specific zoom level
|
||||
fn generate_merged_tiles(
|
||||
&self,
|
||||
conn: &mut SqliteConnection,
|
||||
tile_data: &HashMap<(i32, i32), Vec<u8>>,
|
||||
min_x: i32,
|
||||
max_x: i32,
|
||||
min_y: i32,
|
||||
max_y: i32,
|
||||
zoom_level: i32,
|
||||
merge_factor: i32,
|
||||
) -> Result<usize, MinimapDatabaseError> {
|
||||
use crate::schema::minimap_tiles;
|
||||
|
||||
let mut count = 0;
|
||||
|
||||
// Iterate through merged tile grid
|
||||
let mut merged_y = min_y;
|
||||
while merged_y <= max_y {
|
||||
let mut merged_x = min_x;
|
||||
while merged_x <= max_x {
|
||||
// Collect tiles for this merged tile
|
||||
let mut tiles_for_merge: HashMap<(i32, i32), Vec<u8>> = HashMap::new();
|
||||
let mut has_any_tile = false;
|
||||
|
||||
for dy in 0..merge_factor {
|
||||
for dx in 0..merge_factor {
|
||||
let tile_x = merged_x + dx;
|
||||
let tile_y = merged_y + dy;
|
||||
|
||||
if let Some(webp) = tile_data.get(&(tile_x, tile_y)) {
|
||||
tiles_for_merge.insert((dx, dy), webp.clone());
|
||||
has_any_tile = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Only create merged tile if we have at least one source tile
|
||||
if has_any_tile {
|
||||
let merged_img = ImageProcessor::merge_tiles(
|
||||
&tiles_for_merge,
|
||||
merge_factor,
|
||||
merge_factor,
|
||||
512,
|
||||
512,
|
||||
)?;
|
||||
|
||||
let merged_webp = ImageProcessor::encode_webp_lossless(&merged_img)?;
|
||||
|
||||
// Calculate merged tile coordinates
|
||||
let merged_tile_x = merged_x / merge_factor;
|
||||
let merged_tile_y = merged_y / merge_factor;
|
||||
|
||||
// Build source_tiles string for debugging
|
||||
let mut source_coords = Vec::new();
|
||||
for dy in 0..merge_factor {
|
||||
for dx in 0..merge_factor {
|
||||
let tx = merged_x + dx;
|
||||
let ty = merged_y + dy;
|
||||
if tile_data.contains_key(&(tx, ty)) {
|
||||
source_coords.push(format!("{},{}", tx, ty));
|
||||
}
|
||||
}
|
||||
}
|
||||
let source_tiles = source_coords.join(";");
|
||||
|
||||
let new_tile = NewMinimapTile {
|
||||
x: merged_tile_x,
|
||||
y: merged_tile_y,
|
||||
zoom: zoom_level,
|
||||
width: 512,
|
||||
height: 512,
|
||||
original_file_size: None,
|
||||
image: &merged_webp,
|
||||
image_size: merged_webp.len() as i32,
|
||||
source_path: &source_tiles,
|
||||
};
|
||||
|
||||
diesel::replace_into(minimap_tiles::table)
|
||||
.values(&new_tile)
|
||||
.execute(conn)?;
|
||||
|
||||
count += 1;
|
||||
}
|
||||
|
||||
merged_x += merge_factor;
|
||||
}
|
||||
merged_y += merge_factor;
|
||||
|
||||
if count % 20 == 0 && count > 0 {
|
||||
println!(" Generated {} merged tiles...", count);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Find all minimap PNG files in directory
|
||||
fn find_minimap_pngs<P: AsRef<Path>>(
|
||||
&self,
|
||||
dir: P,
|
||||
) -> Result<Vec<PathBuf>, MinimapDatabaseError> {
|
||||
let mut png_files = Vec::new();
|
||||
|
||||
for entry in fs::read_dir(dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
|
||||
if path.is_file() && path.extension().and_then(|s| s.to_str()) == Some("png") {
|
||||
// Check if filename matches x_y.png pattern
|
||||
if let Some(stem) = path.file_stem().and_then(|s| s.to_str()) {
|
||||
if stem.contains('_') && stem.chars().all(|c| c.is_numeric() || c == '_' || c == '-') {
|
||||
png_files.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(png_files)
|
||||
}
|
||||
|
||||
/// Parse x,y coordinates from filename (e.g., "0_0.png" -> (0, 0))
|
||||
fn parse_coordinates<P: AsRef<Path>>(
|
||||
&self,
|
||||
path: P,
|
||||
) -> Result<(i32, i32), MinimapDatabaseError> {
|
||||
let filename = path
|
||||
.as_ref()
|
||||
.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.ok_or_else(|| {
|
||||
MinimapDatabaseError::InvalidFilename(path.as_ref().display().to_string())
|
||||
})?;
|
||||
|
||||
let parts: Vec<&str> = filename.split('_').collect();
|
||||
if parts.len() != 2 {
|
||||
return Err(MinimapDatabaseError::InvalidFilename(
|
||||
filename.to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let x = parts[0].parse::<i32>().map_err(|_| {
|
||||
MinimapDatabaseError::InvalidFilename(filename.to_string())
|
||||
})?;
|
||||
let y = parts[1].parse::<i32>().map_err(|_| {
|
||||
MinimapDatabaseError::InvalidFilename(filename.to_string())
|
||||
})?;
|
||||
|
||||
Ok((x, y))
|
||||
}
|
||||
|
||||
/// Get map bounds (min/max x and y) from zoom level 2 tiles
|
||||
pub fn get_map_bounds(
|
||||
&self,
|
||||
) -> Result<((i32, i32), (i32, i32)), MinimapDatabaseError> {
|
||||
use crate::schema::minimap_tiles::dsl::*;
|
||||
use diesel::dsl::{max, min};
|
||||
|
||||
let mut conn = self.establish_connection()?;
|
||||
|
||||
let (min_x_val, max_x_val): (Option<i32>, Option<i32>) =
|
||||
minimap_tiles
|
||||
.filter(zoom.eq(2))
|
||||
.select((min(x), max(x)))
|
||||
.first(&mut conn)?;
|
||||
|
||||
let (min_y_val, max_y_val): (Option<i32>, Option<i32>) =
|
||||
minimap_tiles
|
||||
.filter(zoom.eq(2))
|
||||
.select((min(y), max(y)))
|
||||
.first(&mut conn)?;
|
||||
|
||||
Ok((
|
||||
(min_x_val.unwrap_or(0), min_y_val.unwrap_or(0)),
|
||||
(max_x_val.unwrap_or(0), max_y_val.unwrap_or(0)),
|
||||
))
|
||||
}
|
||||
|
||||
/// Get count of tiles at a specific zoom level
|
||||
pub fn count_at_zoom(&self, zoom_level: i32) -> Result<i64, MinimapDatabaseError> {
|
||||
use crate::schema::minimap_tiles::dsl::*;
|
||||
use diesel::dsl::count_star;
|
||||
|
||||
let mut conn = self.establish_connection()?;
|
||||
let total = minimap_tiles
|
||||
.filter(zoom.eq(zoom_level))
|
||||
.select(count_star())
|
||||
.first(&mut conn)?;
|
||||
|
||||
Ok(total)
|
||||
}
|
||||
|
||||
/// Get storage statistics
|
||||
pub fn get_storage_stats(&self) -> Result<StorageStats, MinimapDatabaseError> {
|
||||
let mut conn = self.establish_connection()?;
|
||||
|
||||
use crate::schema::minimap_tiles::dsl::*;
|
||||
let tiles = minimap_tiles.load::<MinimapTileRecord>(&mut conn)?;
|
||||
|
||||
let mut stats = StorageStats::default();
|
||||
for tile in tiles {
|
||||
if tile.zoom == 2 {
|
||||
stats.total_original_size += tile.original_file_size.unwrap_or(0) as i64;
|
||||
stats.zoom2_count += 1;
|
||||
stats.zoom2_size += tile.image_size as i64;
|
||||
} else if tile.zoom == 1 {
|
||||
stats.zoom1_count += 1;
|
||||
stats.zoom1_size += tile.image_size as i64;
|
||||
} else if tile.zoom == 0 {
|
||||
stats.zoom0_count += 1;
|
||||
stats.zoom0_size += tile.image_size as i64;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(stats)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct StorageStats {
|
||||
pub zoom2_count: i64,
|
||||
pub zoom1_count: i64,
|
||||
pub zoom0_count: i64,
|
||||
pub total_original_size: i64,
|
||||
pub zoom2_size: i64,
|
||||
pub zoom1_size: i64,
|
||||
pub zoom0_size: i64,
|
||||
}
|
||||
|
||||
impl StorageStats {
|
||||
pub fn total_webp_size(&self) -> i64 {
|
||||
self.zoom2_size + self.zoom1_size + self.zoom0_size
|
||||
}
|
||||
|
||||
pub fn compression_ratio(&self) -> f64 {
|
||||
if self.total_original_size == 0 {
|
||||
return 0.0;
|
||||
}
|
||||
(self.total_webp_size() as f64 / self.total_original_size as f64) * 100.0
|
||||
}
|
||||
}
|
||||
25
cursebreaker-parser/src/databases/mod.rs
Normal file
25
cursebreaker-parser/src/databases/mod.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
mod item_database;
|
||||
mod npc_database;
|
||||
mod quest_database;
|
||||
mod harvestable_database;
|
||||
mod loot_database;
|
||||
mod map_database;
|
||||
mod fast_travel_database;
|
||||
mod player_house_database;
|
||||
mod trait_database;
|
||||
mod shop_database;
|
||||
mod minimap_database;
|
||||
mod icon_database;
|
||||
|
||||
pub use item_database::ItemDatabase;
|
||||
pub use npc_database::NpcDatabase;
|
||||
pub use quest_database::QuestDatabase;
|
||||
pub use harvestable_database::HarvestableDatabase;
|
||||
pub use loot_database::LootDatabase;
|
||||
pub use map_database::MapDatabase;
|
||||
pub use fast_travel_database::FastTravelDatabase;
|
||||
pub use player_house_database::PlayerHouseDatabase;
|
||||
pub use trait_database::TraitDatabase;
|
||||
pub use shop_database::ShopDatabase;
|
||||
pub use minimap_database::{MinimapDatabase, MinimapDatabaseError, StorageStats};
|
||||
pub use icon_database::{IconDatabase, IconDatabaseError, IconStats};
|
||||
193
cursebreaker-parser/src/databases/npc_database.rs
Normal file
193
cursebreaker-parser/src/databases/npc_database.rs
Normal file
@@ -0,0 +1,193 @@
|
||||
use crate::types::Npc;
|
||||
use crate::xml_parsers::{parse_npcs_xml, XmlParseError};
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
/// A database for managing NPCs loaded from XML files
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct NpcDatabase {
|
||||
npcs: Vec<Npc>,
|
||||
npcs_by_id: HashMap<i32, usize>,
|
||||
npcs_by_name: HashMap<String, Vec<usize>>,
|
||||
}
|
||||
|
||||
impl NpcDatabase {
|
||||
/// Create a new empty NpcDatabase
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
npcs: Vec::new(),
|
||||
npcs_by_id: HashMap::new(),
|
||||
npcs_by_name: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Load NPCs from an XML file
|
||||
pub fn load_from_xml<P: AsRef<Path>>(path: P) -> Result<Self, XmlParseError> {
|
||||
let npcs = parse_npcs_xml(path)?;
|
||||
let mut db = Self::new();
|
||||
db.add_npcs(npcs);
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
/// Add NPCs to the database
|
||||
pub fn add_npcs(&mut self, npcs: Vec<Npc>) {
|
||||
for npc in npcs {
|
||||
let index = self.npcs.len();
|
||||
self.npcs_by_id.insert(npc.id, index);
|
||||
|
||||
self.npcs_by_name
|
||||
.entry(npc.name.clone())
|
||||
.or_insert_with(Vec::new)
|
||||
.push(index);
|
||||
|
||||
self.npcs.push(npc);
|
||||
}
|
||||
}
|
||||
|
||||
/// Get an NPC by ID
|
||||
pub fn get_by_id(&self, id: i32) -> Option<&Npc> {
|
||||
self.npcs_by_id
|
||||
.get(&id)
|
||||
.and_then(|&index| self.npcs.get(index))
|
||||
}
|
||||
|
||||
/// Get NPCs by name
|
||||
pub fn get_by_name(&self, name: &str) -> Vec<&Npc> {
|
||||
self.npcs_by_name
|
||||
.get(name)
|
||||
.map(|indices| {
|
||||
indices
|
||||
.iter()
|
||||
.filter_map(|&index| self.npcs.get(index))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Get all NPCs
|
||||
pub fn all_npcs(&self) -> &[Npc] {
|
||||
&self.npcs
|
||||
}
|
||||
|
||||
/// Get all hostile NPCs (can fight and aggressive)
|
||||
pub fn get_hostile(&self) -> Vec<&Npc> {
|
||||
self.npcs
|
||||
.iter()
|
||||
.filter(|npc| {
|
||||
npc.canfight == Some(1) && npc.aggressive == Some(1)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all interactable NPCs
|
||||
pub fn get_interactable(&self) -> Vec<&Npc> {
|
||||
self.npcs
|
||||
.iter()
|
||||
.filter(|npc| npc.interactable == Some(1))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get NPCs by tag
|
||||
pub fn get_by_tag(&self, tag: &str) -> Vec<&Npc> {
|
||||
self.npcs
|
||||
.iter()
|
||||
.filter(|npc| {
|
||||
npc.tags
|
||||
.as_ref()
|
||||
.map(|tags| tags.split(',').any(|t| t.trim() == tag))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get NPCs that offer shops
|
||||
pub fn get_shopkeepers(&self) -> Vec<&Npc> {
|
||||
self.npcs
|
||||
.iter()
|
||||
.filter(|npc| npc.shop.is_some())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get number of NPCs in database
|
||||
pub fn len(&self) -> usize {
|
||||
self.npcs.len()
|
||||
}
|
||||
|
||||
/// Check if database is empty
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.npcs.is_empty()
|
||||
}
|
||||
|
||||
/// Prepare NPCs for SQL insertion (deprecated - use save_to_db instead)
|
||||
#[deprecated(note = "Use save_to_db() to save directly to SQLite database")]
|
||||
pub fn prepare_for_sql(&self) -> Vec<(i32, String, String)> {
|
||||
self.npcs
|
||||
.iter()
|
||||
.map(|npc| {
|
||||
let json = serde_json::to_string(npc).unwrap_or_else(|_| "{}".to_string());
|
||||
(npc.id, npc.name.clone(), json)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Save all NPCs to SQLite database
|
||||
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
|
||||
use crate::schema::npcs;
|
||||
|
||||
let records: Vec<_> = self
|
||||
.npcs
|
||||
.iter()
|
||||
.map(|npc| {
|
||||
let json = serde_json::to_string(npc).unwrap_or_else(|_| "{}".to_string());
|
||||
(
|
||||
npcs::id.eq(npc.id),
|
||||
npcs::name.eq(&npc.name),
|
||||
npcs::data.eq(json),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut count = 0;
|
||||
for record in records {
|
||||
diesel::insert_into(npcs::table)
|
||||
.values(&record)
|
||||
.execute(conn)?;
|
||||
count += 1;
|
||||
}
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Load all NPCs from SQLite database
|
||||
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
|
||||
use crate::schema::npcs::dsl::*;
|
||||
|
||||
#[derive(Queryable)]
|
||||
struct NpcRecord {
|
||||
id: Option<i32>,
|
||||
name: String,
|
||||
data: String,
|
||||
}
|
||||
|
||||
let records = npcs.load::<NpcRecord>(conn)?;
|
||||
|
||||
let mut loaded_npcs = Vec::new();
|
||||
for record in records {
|
||||
if let Ok(npc) = serde_json::from_str::<Npc>(&record.data) {
|
||||
loaded_npcs.push(npc);
|
||||
}
|
||||
}
|
||||
|
||||
let mut db = Self::new();
|
||||
db.add_npcs(loaded_npcs);
|
||||
Ok(db)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for NpcDatabase {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
232
cursebreaker-parser/src/databases/player_house_database.rs
Normal file
232
cursebreaker-parser/src/databases/player_house_database.rs
Normal file
@@ -0,0 +1,232 @@
|
||||
use crate::types::PlayerHouse;
|
||||
use crate::xml_parsers::{parse_player_houses_xml, XmlParseError};
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
/// A database for managing Player Houses loaded from XML files
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PlayerHouseDatabase {
|
||||
houses: Vec<PlayerHouse>,
|
||||
// Map ID -> house index
|
||||
houses_by_id: HashMap<i32, usize>,
|
||||
// Map name -> list of house indices (multiple houses can have same name)
|
||||
houses_by_name: HashMap<String, Vec<usize>>,
|
||||
}
|
||||
|
||||
impl PlayerHouseDatabase {
|
||||
/// Create a new empty PlayerHouseDatabase
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
houses: Vec::new(),
|
||||
houses_by_id: HashMap::new(),
|
||||
houses_by_name: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Load player houses from an XML file
|
||||
pub fn load_from_xml<P: AsRef<Path>>(path: P) -> Result<Self, XmlParseError> {
|
||||
let houses = parse_player_houses_xml(path)?;
|
||||
let mut db = Self::new();
|
||||
db.add_houses(houses);
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
/// Add player houses to the database
|
||||
pub fn add_houses(&mut self, houses: Vec<PlayerHouse>) {
|
||||
for house in houses {
|
||||
let index = self.houses.len();
|
||||
|
||||
// Index by ID
|
||||
self.houses_by_id.insert(house.id, index);
|
||||
|
||||
// Index by name
|
||||
self.houses_by_name
|
||||
.entry(house.name.clone())
|
||||
.or_insert_with(Vec::new)
|
||||
.push(index);
|
||||
|
||||
self.houses.push(house);
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a player house by ID
|
||||
pub fn get_by_id(&self, id: i32) -> Option<&PlayerHouse> {
|
||||
self.houses_by_id
|
||||
.get(&id)
|
||||
.and_then(|&index| self.houses.get(index))
|
||||
}
|
||||
|
||||
/// Get player houses by name (returns all houses with matching name)
|
||||
pub fn get_by_name(&self, name: &str) -> Vec<&PlayerHouse> {
|
||||
self.houses_by_name
|
||||
.get(name)
|
||||
.map(|indices| {
|
||||
indices
|
||||
.iter()
|
||||
.filter_map(|&index| self.houses.get(index))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Get all houses
|
||||
pub fn all_houses(&self) -> &[PlayerHouse] {
|
||||
&self.houses
|
||||
}
|
||||
|
||||
/// Get all free houses (price is 0)
|
||||
pub fn get_free_houses(&self) -> Vec<&PlayerHouse> {
|
||||
self.houses.iter().filter(|h| h.is_free()).collect()
|
||||
}
|
||||
|
||||
/// Get all affordable houses (price < 5000)
|
||||
pub fn get_affordable_houses(&self) -> Vec<&PlayerHouse> {
|
||||
self.houses.iter().filter(|h| h.is_affordable()).collect()
|
||||
}
|
||||
|
||||
/// Get all expensive houses (price >= 10000)
|
||||
pub fn get_expensive_houses(&self) -> Vec<&PlayerHouse> {
|
||||
self.houses.iter().filter(|h| h.is_expensive()).collect()
|
||||
}
|
||||
|
||||
/// Get houses by price tier (0: free, 1: cheap, 2: moderate, 3: expensive)
|
||||
pub fn get_by_price_tier(&self, tier: u8) -> Vec<&PlayerHouse> {
|
||||
self.houses
|
||||
.iter()
|
||||
.filter(|h| h.get_price_tier() == tier)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get houses within a price range (inclusive)
|
||||
pub fn get_by_price_range(&self, min_price: i32, max_price: i32) -> Vec<&PlayerHouse> {
|
||||
self.houses
|
||||
.iter()
|
||||
.filter(|h| h.price >= min_price && h.price <= max_price)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all houses sorted by price (ascending)
|
||||
pub fn get_sorted_by_price(&self) -> Vec<&PlayerHouse> {
|
||||
let mut houses: Vec<&PlayerHouse> = self.houses.iter().collect();
|
||||
houses.sort_by_key(|h| h.price);
|
||||
houses
|
||||
}
|
||||
|
||||
/// Get the cheapest house (excluding free houses)
|
||||
pub fn get_cheapest(&self) -> Option<&PlayerHouse> {
|
||||
self.houses
|
||||
.iter()
|
||||
.filter(|h| h.price > 0)
|
||||
.min_by_key(|h| h.price)
|
||||
}
|
||||
|
||||
/// Get the most expensive house
|
||||
pub fn get_most_expensive(&self) -> Option<&PlayerHouse> {
|
||||
self.houses.iter().max_by_key(|h| h.price)
|
||||
}
|
||||
|
||||
/// Get all unique house names
|
||||
pub fn get_all_names(&self) -> Vec<String> {
|
||||
self.houses_by_name.keys().cloned().collect()
|
||||
}
|
||||
|
||||
/// Get number of houses in database
|
||||
pub fn len(&self) -> usize {
|
||||
self.houses.len()
|
||||
}
|
||||
|
||||
/// Check if database is empty
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.houses.is_empty()
|
||||
}
|
||||
|
||||
/// Save all player houses to SQLite database (clears existing entries first)
|
||||
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
|
||||
use crate::schema::player_houses;
|
||||
|
||||
// Clear existing entries
|
||||
diesel::delete(player_houses::table).execute(conn)?;
|
||||
|
||||
let records: Vec<_> = self
|
||||
.houses
|
||||
.iter()
|
||||
.map(|house| {
|
||||
(
|
||||
player_houses::id.eq(house.id),
|
||||
player_houses::name.eq(&house.name),
|
||||
player_houses::description.eq(&house.description),
|
||||
player_houses::pos_x.eq(house.pos_x),
|
||||
player_houses::pos_z.eq(house.pos_z),
|
||||
player_houses::price.eq(house.price),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut count = 0;
|
||||
for record in records {
|
||||
diesel::insert_into(player_houses::table)
|
||||
.values(&record)
|
||||
.execute(conn)?;
|
||||
count += 1;
|
||||
}
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Load all player houses from SQLite database
|
||||
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
|
||||
use crate::schema::player_houses::dsl::*;
|
||||
|
||||
#[derive(Queryable)]
|
||||
struct PlayerHouseRecord {
|
||||
record_id: Option<i32>,
|
||||
name: String,
|
||||
description: String,
|
||||
pos_x: f32,
|
||||
pos_z: f32,
|
||||
price: i32,
|
||||
}
|
||||
|
||||
let records = player_houses.load::<PlayerHouseRecord>(conn)?;
|
||||
|
||||
let loaded_houses: Vec<PlayerHouse> = records
|
||||
.into_iter()
|
||||
.filter_map(|record| {
|
||||
record.record_id.map(|house_id| {
|
||||
PlayerHouse::new(
|
||||
house_id,
|
||||
record.name,
|
||||
record.description,
|
||||
record.pos_x,
|
||||
record.pos_z,
|
||||
record.price,
|
||||
)
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut db = Self::new();
|
||||
db.add_houses(loaded_houses);
|
||||
Ok(db)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for PlayerHouseDatabase {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_player_house_database_basic() {
|
||||
let mut db = PlayerHouseDatabase::new();
|
||||
assert!(db.is_empty());
|
||||
assert_eq!(db.len(), 0);
|
||||
}
|
||||
}
|
||||
167
cursebreaker-parser/src/databases/quest_database.rs
Normal file
167
cursebreaker-parser/src/databases/quest_database.rs
Normal file
@@ -0,0 +1,167 @@
|
||||
use crate::types::Quest;
|
||||
use crate::xml_parsers::{parse_quests_xml, XmlParseError};
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
/// A database for managing Quests loaded from XML files
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct QuestDatabase {
|
||||
quests: Vec<Quest>,
|
||||
quests_by_id: HashMap<i32, usize>,
|
||||
quests_by_name: HashMap<String, usize>,
|
||||
}
|
||||
|
||||
impl QuestDatabase {
|
||||
/// Create a new empty QuestDatabase
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
quests: Vec::new(),
|
||||
quests_by_id: HashMap::new(),
|
||||
quests_by_name: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Load quests from an XML file
|
||||
pub fn load_from_xml<P: AsRef<Path>>(path: P) -> Result<Self, XmlParseError> {
|
||||
let quests = parse_quests_xml(path)?;
|
||||
let mut db = Self::new();
|
||||
db.add_quests(quests);
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
/// Add quests to the database
|
||||
pub fn add_quests(&mut self, quests: Vec<Quest>) {
|
||||
for quest in quests {
|
||||
let index = self.quests.len();
|
||||
self.quests_by_id.insert(quest.id, index);
|
||||
self.quests_by_name.insert(quest.name.clone(), index);
|
||||
self.quests.push(quest);
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a quest by ID
|
||||
pub fn get_by_id(&self, id: i32) -> Option<&Quest> {
|
||||
self.quests_by_id
|
||||
.get(&id)
|
||||
.and_then(|&index| self.quests.get(index))
|
||||
}
|
||||
|
||||
/// Get a quest by name
|
||||
pub fn get_by_name(&self, name: &str) -> Option<&Quest> {
|
||||
self.quests_by_name
|
||||
.get(name)
|
||||
.and_then(|&index| self.quests.get(index))
|
||||
}
|
||||
|
||||
/// Get all quests
|
||||
pub fn all_quests(&self) -> &[Quest] {
|
||||
&self.quests
|
||||
}
|
||||
|
||||
/// Get all main quests
|
||||
pub fn get_main_quests(&self) -> Vec<&Quest> {
|
||||
self.quests
|
||||
.iter()
|
||||
.filter(|quest| quest.is_main_quest())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all side quests (non-main quests)
|
||||
pub fn get_side_quests(&self) -> Vec<&Quest> {
|
||||
self.quests
|
||||
.iter()
|
||||
.filter(|quest| !quest.is_main_quest())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all hidden quests
|
||||
pub fn get_hidden_quests(&self) -> Vec<&Quest> {
|
||||
self.quests
|
||||
.iter()
|
||||
.filter(|quest| quest.is_hidden())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get number of quests in database
|
||||
pub fn len(&self) -> usize {
|
||||
self.quests.len()
|
||||
}
|
||||
|
||||
/// Check if database is empty
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.quests.is_empty()
|
||||
}
|
||||
|
||||
/// Prepare quests for SQL insertion (deprecated - use save_to_db instead)
|
||||
#[deprecated(note = "Use save_to_db() to save directly to SQLite database")]
|
||||
pub fn prepare_for_sql(&self) -> Vec<(i32, String, String)> {
|
||||
self.quests
|
||||
.iter()
|
||||
.map(|quest| {
|
||||
let json = serde_json::to_string(quest).unwrap_or_else(|_| "{}".to_string());
|
||||
(quest.id, quest.name.clone(), json)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Save all quests to SQLite database
|
||||
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
|
||||
use crate::schema::quests;
|
||||
|
||||
let records: Vec<_> = self
|
||||
.quests
|
||||
.iter()
|
||||
.map(|quest| {
|
||||
let json = serde_json::to_string(quest).unwrap_or_else(|_| "{}".to_string());
|
||||
(
|
||||
quests::id.eq(quest.id),
|
||||
quests::name.eq(&quest.name),
|
||||
quests::data.eq(json),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut count = 0;
|
||||
for record in records {
|
||||
diesel::insert_into(quests::table)
|
||||
.values(&record)
|
||||
.execute(conn)?;
|
||||
count += 1;
|
||||
}
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Load all quests from SQLite database
|
||||
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
|
||||
use crate::schema::quests::dsl::*;
|
||||
|
||||
#[derive(Queryable)]
|
||||
struct QuestRecord {
|
||||
id: Option<i32>,
|
||||
name: String,
|
||||
data: String,
|
||||
}
|
||||
|
||||
let records = quests.load::<QuestRecord>(conn)?;
|
||||
|
||||
let mut loaded_quests = Vec::new();
|
||||
for record in records {
|
||||
if let Ok(quest) = serde_json::from_str::<Quest>(&record.data) {
|
||||
loaded_quests.push(quest);
|
||||
}
|
||||
}
|
||||
|
||||
let mut db = Self::new();
|
||||
db.add_quests(loaded_quests);
|
||||
Ok(db)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for QuestDatabase {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
240
cursebreaker-parser/src/databases/shop_database.rs
Normal file
240
cursebreaker-parser/src/databases/shop_database.rs
Normal file
@@ -0,0 +1,240 @@
|
||||
use crate::types::Shop;
|
||||
use crate::xml_parsers::{parse_shops_xml, XmlParseError};
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
/// A database for managing Shops loaded from XML files
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ShopDatabase {
|
||||
shops: Vec<Shop>,
|
||||
// Map shop_id -> shop index
|
||||
shops_by_id: HashMap<i32, usize>,
|
||||
// Map name -> list of shop indices
|
||||
shops_by_name: HashMap<String, Vec<usize>>,
|
||||
}
|
||||
|
||||
impl ShopDatabase {
|
||||
/// Create a new empty ShopDatabase
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
shops: Vec::new(),
|
||||
shops_by_id: HashMap::new(),
|
||||
shops_by_name: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Load shops from an XML file
|
||||
pub fn load_from_xml<P: AsRef<Path>>(path: P) -> Result<Self, XmlParseError> {
|
||||
let shops = parse_shops_xml(path)?;
|
||||
let mut db = Self::new();
|
||||
db.add_shops(shops);
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
/// Add shops to the database
|
||||
pub fn add_shops(&mut self, shops: Vec<Shop>) {
|
||||
for shop in shops {
|
||||
let index = self.shops.len();
|
||||
|
||||
// Index by ID
|
||||
self.shops_by_id.insert(shop.shop_id, index);
|
||||
|
||||
// Index by name
|
||||
self.shops_by_name
|
||||
.entry(shop.name.clone())
|
||||
.or_insert_with(Vec::new)
|
||||
.push(index);
|
||||
|
||||
self.shops.push(shop);
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a shop by ID
|
||||
pub fn get_by_id(&self, shop_id: i32) -> Option<&Shop> {
|
||||
self.shops_by_id
|
||||
.get(&shop_id)
|
||||
.and_then(|&index| self.shops.get(index))
|
||||
}
|
||||
|
||||
/// Get shops by name (returns all shops with matching name)
|
||||
pub fn get_by_name(&self, name: &str) -> Vec<&Shop> {
|
||||
self.shops_by_name
|
||||
.get(name)
|
||||
.map(|indices| {
|
||||
indices
|
||||
.iter()
|
||||
.filter_map(|&index| self.shops.get(index))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Get all shops
|
||||
pub fn all_shops(&self) -> &[Shop] {
|
||||
&self.shops
|
||||
}
|
||||
|
||||
/// Get all general stores
|
||||
pub fn get_general_stores(&self) -> Vec<&Shop> {
|
||||
self.shops
|
||||
.iter()
|
||||
.filter(|s| s.is_general_store)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all specialized shops (non-general stores)
|
||||
pub fn get_specialized_shops(&self) -> Vec<&Shop> {
|
||||
self.shops
|
||||
.iter()
|
||||
.filter(|s| !s.is_general_store)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all non-empty shops
|
||||
pub fn get_non_empty_shops(&self) -> Vec<&Shop> {
|
||||
self.shops.iter().filter(|s| !s.is_empty()).collect()
|
||||
}
|
||||
|
||||
/// Get all shops that sell a specific item ID
|
||||
pub fn get_shops_selling_item(&self, item_id: &str) -> Vec<&Shop> {
|
||||
self.shops
|
||||
.iter()
|
||||
.filter(|shop| shop.get_item_by_id(item_id).is_some())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all shops with comments
|
||||
pub fn get_shops_with_comments(&self) -> Vec<&Shop> {
|
||||
self.shops
|
||||
.iter()
|
||||
.filter(|s| s.comment.is_some())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all unique shop names
|
||||
pub fn get_all_names(&self) -> Vec<String> {
|
||||
self.shops_by_name.keys().cloned().collect()
|
||||
}
|
||||
|
||||
/// Get total number of items across all shops
|
||||
pub fn total_item_count(&self) -> usize {
|
||||
self.shops.iter().map(|s| s.item_count()).sum()
|
||||
}
|
||||
|
||||
/// Get all unique item IDs sold across all shops
|
||||
pub fn get_all_item_ids(&self) -> Vec<String> {
|
||||
let mut item_ids: Vec<String> = self
|
||||
.shops
|
||||
.iter()
|
||||
.flat_map(|shop| shop.get_all_item_ids())
|
||||
.collect();
|
||||
item_ids.sort();
|
||||
item_ids.dedup();
|
||||
item_ids
|
||||
}
|
||||
|
||||
/// Get number of shops in database
|
||||
pub fn len(&self) -> usize {
|
||||
self.shops.len()
|
||||
}
|
||||
|
||||
/// Check if database is empty
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.shops.is_empty()
|
||||
}
|
||||
|
||||
/// Prepare shops for SQL insertion (deprecated - use save_to_db instead)
|
||||
#[deprecated(note = "Use save_to_db() to save directly to SQLite database")]
|
||||
pub fn prepare_for_sql(&self) -> Vec<(i32, String, bool, usize, String)> {
|
||||
self.shops
|
||||
.iter()
|
||||
.map(|shop| {
|
||||
let json = serde_json::to_string(shop).unwrap_or_else(|_| "{}".to_string());
|
||||
(
|
||||
shop.shop_id,
|
||||
shop.name.clone(),
|
||||
shop.is_general_store,
|
||||
shop.item_count(),
|
||||
json,
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Save all shops to SQLite database
|
||||
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
|
||||
use crate::schema::shops;
|
||||
|
||||
let records: Vec<_> = self
|
||||
.shops
|
||||
.iter()
|
||||
.map(|shop| {
|
||||
let json = serde_json::to_string(shop).unwrap_or_else(|_| "{}".to_string());
|
||||
(
|
||||
shops::id.eq(shop.shop_id),
|
||||
shops::name.eq(&shop.name),
|
||||
shops::unique_items.eq(if shop.is_general_store { 0 } else { 1 }),
|
||||
shops::item_count.eq(shop.items.len() as i32),
|
||||
shops::data.eq(json),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut count = 0;
|
||||
for record in records {
|
||||
diesel::insert_into(shops::table)
|
||||
.values(&record)
|
||||
.execute(conn)?;
|
||||
count += 1;
|
||||
}
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Load all shops from SQLite database
|
||||
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
|
||||
use crate::schema::shops::dsl::*;
|
||||
|
||||
#[derive(Queryable)]
|
||||
struct ShopRecord {
|
||||
id: Option<i32>,
|
||||
name: String,
|
||||
unique_items: i32,
|
||||
item_count: i32,
|
||||
data: String,
|
||||
}
|
||||
|
||||
let records = shops.load::<ShopRecord>(conn)?;
|
||||
|
||||
let mut loaded_shops = Vec::new();
|
||||
for record in records {
|
||||
if let Ok(shop) = serde_json::from_str::<Shop>(&record.data) {
|
||||
loaded_shops.push(shop);
|
||||
}
|
||||
}
|
||||
|
||||
let mut db = Self::new();
|
||||
db.add_shops(loaded_shops);
|
||||
Ok(db)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ShopDatabase {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_shop_database_basic() {
|
||||
let mut db = ShopDatabase::new();
|
||||
assert!(db.is_empty());
|
||||
assert_eq!(db.len(), 0);
|
||||
}
|
||||
}
|
||||
266
cursebreaker-parser/src/databases/trait_database.rs
Normal file
266
cursebreaker-parser/src/databases/trait_database.rs
Normal file
@@ -0,0 +1,266 @@
|
||||
use crate::types::Trait;
|
||||
use crate::xml_parsers::{parse_traits_xml, XmlParseError};
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
/// A database for managing Traits loaded from XML files
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TraitDatabase {
|
||||
traits: Vec<Trait>,
|
||||
// Map ID -> trait index
|
||||
traits_by_id: HashMap<i32, usize>,
|
||||
// Map name -> list of trait indices
|
||||
traits_by_name: HashMap<String, Vec<usize>>,
|
||||
// Map skill -> list of trait indices
|
||||
traits_by_skill: HashMap<String, Vec<usize>>,
|
||||
}
|
||||
|
||||
impl TraitDatabase {
|
||||
/// Create a new empty TraitDatabase
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
traits: Vec::new(),
|
||||
traits_by_id: HashMap::new(),
|
||||
traits_by_name: HashMap::new(),
|
||||
traits_by_skill: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Load traits from an XML file
|
||||
pub fn load_from_xml<P: AsRef<Path>>(path: P) -> Result<Self, XmlParseError> {
|
||||
let traits = parse_traits_xml(path)?;
|
||||
let mut db = Self::new();
|
||||
db.add_traits(traits);
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
/// Add traits to the database
|
||||
pub fn add_traits(&mut self, traits: Vec<Trait>) {
|
||||
for trait_obj in traits {
|
||||
let index = self.traits.len();
|
||||
|
||||
// Index by ID
|
||||
self.traits_by_id.insert(trait_obj.id, index);
|
||||
|
||||
// Index by name (if it has a name)
|
||||
if !trait_obj.name.is_empty() {
|
||||
self.traits_by_name
|
||||
.entry(trait_obj.name.clone())
|
||||
.or_insert_with(Vec::new)
|
||||
.push(index);
|
||||
}
|
||||
|
||||
// Index by skill (if it has a trainer requirement)
|
||||
if let Some(ref trainer) = trait_obj.trainer {
|
||||
self.traits_by_skill
|
||||
.entry(trainer.skill.clone().to_lowercase())
|
||||
.or_insert_with(Vec::new)
|
||||
.push(index);
|
||||
}
|
||||
|
||||
self.traits.push(trait_obj);
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a trait by ID
|
||||
pub fn get_by_id(&self, id: i32) -> Option<&Trait> {
|
||||
self.traits_by_id
|
||||
.get(&id)
|
||||
.and_then(|&index| self.traits.get(index))
|
||||
}
|
||||
|
||||
/// Get traits by name (returns all traits with matching name)
|
||||
pub fn get_by_name(&self, name: &str) -> Vec<&Trait> {
|
||||
self.traits_by_name
|
||||
.get(name)
|
||||
.map(|indices| {
|
||||
indices
|
||||
.iter()
|
||||
.filter_map(|&index| self.traits.get(index))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Get all traits
|
||||
pub fn all_traits(&self) -> &[Trait] {
|
||||
&self.traits
|
||||
}
|
||||
|
||||
/// Get all traits for a specific skill
|
||||
pub fn get_by_skill(&self, skill: &str) -> Vec<&Trait> {
|
||||
self.traits_by_skill
|
||||
.get(&skill.to_lowercase())
|
||||
.map(|indices| {
|
||||
indices
|
||||
.iter()
|
||||
.filter_map(|&index| self.traits.get(index))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Get all trainer traits (traits that require a trainer)
|
||||
pub fn get_trainer_traits(&self) -> Vec<&Trait> {
|
||||
self.traits.iter().filter(|t| t.is_trainer_trait()).collect()
|
||||
}
|
||||
|
||||
/// Get all traits that teach abilities
|
||||
pub fn get_ability_traits(&self) -> Vec<&Trait> {
|
||||
self.traits
|
||||
.iter()
|
||||
.filter(|t| t.teaches_ability())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all novice tier traits
|
||||
pub fn get_novice_traits(&self) -> Vec<&Trait> {
|
||||
self.traits.iter().filter(|t| t.is_novice()).collect()
|
||||
}
|
||||
|
||||
/// Get all experienced tier traits
|
||||
pub fn get_experienced_traits(&self) -> Vec<&Trait> {
|
||||
self.traits.iter().filter(|t| t.is_experienced()).collect()
|
||||
}
|
||||
|
||||
/// Get all master tier traits
|
||||
pub fn get_master_traits(&self) -> Vec<&Trait> {
|
||||
self.traits.iter().filter(|t| t.is_master()).collect()
|
||||
}
|
||||
|
||||
/// Get traits by level requirement for a specific skill
|
||||
pub fn get_by_skill_and_level(&self, skill: &str, min_level: i32, max_level: i32) -> Vec<&Trait> {
|
||||
self.get_by_skill(skill)
|
||||
.into_iter()
|
||||
.filter(|t| {
|
||||
if let Some(level) = t.get_required_level() {
|
||||
level >= min_level && level <= max_level
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all unique skill names
|
||||
pub fn get_all_skills(&self) -> Vec<String> {
|
||||
self.traits_by_skill.keys().cloned().collect()
|
||||
}
|
||||
|
||||
/// Get traits sorted by level for a specific skill
|
||||
pub fn get_sorted_by_level(&self, skill: &str) -> Vec<&Trait> {
|
||||
let mut traits = self.get_by_skill(skill);
|
||||
traits.sort_by_key(|t| t.get_required_level().unwrap_or(0));
|
||||
traits
|
||||
}
|
||||
|
||||
/// Get all traits with comments
|
||||
pub fn get_with_comments(&self) -> Vec<&Trait> {
|
||||
self.traits
|
||||
.iter()
|
||||
.filter(|t| t.comment.is_some())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get number of traits in database
|
||||
pub fn len(&self) -> usize {
|
||||
self.traits.len()
|
||||
}
|
||||
|
||||
/// Check if database is empty
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.traits.is_empty()
|
||||
}
|
||||
|
||||
/// Prepare traits for SQL insertion (deprecated - use save_to_db instead)
|
||||
#[deprecated(note = "Use save_to_db() to save directly to SQLite database")]
|
||||
pub fn prepare_for_sql(&self) -> Vec<(i32, String, Option<String>, Option<i32>, String)> {
|
||||
self.traits
|
||||
.iter()
|
||||
.map(|trait_obj| {
|
||||
let json =
|
||||
serde_json::to_string(trait_obj).unwrap_or_else(|_| "{}".to_string());
|
||||
let skill = trait_obj.get_required_skill().map(|s| s.to_string());
|
||||
let level = trait_obj.get_required_level();
|
||||
(trait_obj.id, trait_obj.name.clone(), skill, level, json)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Save all traits to SQLite database
|
||||
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
|
||||
use crate::schema::traits;
|
||||
|
||||
let records: Vec<_> = self
|
||||
.traits
|
||||
.iter()
|
||||
.map(|trait_obj| {
|
||||
let json = serde_json::to_string(trait_obj).unwrap_or_else(|_| "{}".to_string());
|
||||
(
|
||||
traits::id.eq(trait_obj.id),
|
||||
traits::name.eq(&trait_obj.name),
|
||||
traits::description.eq(Some(&trait_obj.description)),
|
||||
traits::trainer_id.eq(None::<i32>), // TODO: determine actual trainer ID
|
||||
traits::data.eq(json),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut count = 0;
|
||||
for record in records {
|
||||
diesel::insert_into(traits::table)
|
||||
.values(&record)
|
||||
.execute(conn)?;
|
||||
count += 1;
|
||||
}
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Load all traits from SQLite database
|
||||
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
|
||||
use crate::schema::traits::dsl::*;
|
||||
|
||||
#[derive(Queryable)]
|
||||
struct TraitRecord {
|
||||
id: Option<i32>,
|
||||
name: String,
|
||||
description: Option<String>,
|
||||
trainer_id: Option<i32>,
|
||||
data: String,
|
||||
}
|
||||
|
||||
let records = traits.load::<TraitRecord>(conn)?;
|
||||
|
||||
let mut loaded_traits = Vec::new();
|
||||
for record in records {
|
||||
if let Ok(trait_obj) = serde_json::from_str::<Trait>(&record.data) {
|
||||
loaded_traits.push(trait_obj);
|
||||
}
|
||||
}
|
||||
|
||||
let mut db = Self::new();
|
||||
db.add_traits(loaded_traits);
|
||||
Ok(db)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for TraitDatabase {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_trait_database_basic() {
|
||||
let mut db = TraitDatabase::new();
|
||||
assert!(db.is_empty());
|
||||
assert_eq!(db.len(), 0);
|
||||
}
|
||||
}
|
||||
467
cursebreaker-parser/src/image_processor.rs
Normal file
467
cursebreaker-parser/src/image_processor.rs
Normal file
@@ -0,0 +1,467 @@
|
||||
use image::{DynamicImage, ImageError, Rgba, RgbaImage};
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use thiserror::Error;
|
||||
|
||||
/// Configuration for outline drawing on images with alpha channels
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct OutlineConfig {
|
||||
/// Outline color (RGBA)
|
||||
pub color: Rgba<u8>,
|
||||
/// Outline thickness in pixels
|
||||
pub thickness: u32,
|
||||
/// Alpha threshold for edge detection (0-255)
|
||||
/// Pixels with alpha >= threshold are considered solid
|
||||
pub alpha_threshold: u8,
|
||||
}
|
||||
|
||||
impl OutlineConfig {
|
||||
/// Create new outline config with custom color and thickness
|
||||
pub fn new(color: Rgba<u8>, thickness: u32) -> Self {
|
||||
Self {
|
||||
color,
|
||||
thickness,
|
||||
alpha_threshold: 128,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create outline config with white color
|
||||
pub fn white(thickness: u32) -> Self {
|
||||
Self::new(Rgba([255, 255, 255, 255]), thickness)
|
||||
}
|
||||
|
||||
/// Create outline config with black color
|
||||
pub fn black(thickness: u32) -> Self {
|
||||
Self::new(Rgba([0, 0, 0, 255]), thickness)
|
||||
}
|
||||
|
||||
/// Set alpha threshold for edge detection
|
||||
pub fn with_alpha_threshold(mut self, threshold: u8) -> Self {
|
||||
self.alpha_threshold = threshold;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for OutlineConfig {
|
||||
fn default() -> Self {
|
||||
Self::white(1)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum ImageProcessingError {
|
||||
#[error("Failed to load image: {0}")]
|
||||
ImageLoadError(#[from] ImageError),
|
||||
|
||||
#[error("WebP encoding failed: {0}")]
|
||||
WebPError(String),
|
||||
|
||||
#[error("Invalid image dimensions: expected {expected_width}x{expected_height}, got {actual_width}x{actual_height}")]
|
||||
InvalidDimensions {
|
||||
expected_width: u32,
|
||||
expected_height: u32,
|
||||
actual_width: u32,
|
||||
actual_height: u32,
|
||||
},
|
||||
|
||||
#[error("IO error: {0}")]
|
||||
IoError(#[from] std::io::Error),
|
||||
|
||||
#[error("No resolutions specified")]
|
||||
NoResolutions,
|
||||
}
|
||||
|
||||
pub struct ImageProcessor {
|
||||
quality: f32, // WebP quality (0.0-100.0)
|
||||
}
|
||||
|
||||
impl ImageProcessor {
|
||||
/// Create new processor with specified WebP quality
|
||||
pub fn new(quality: f32) -> Self {
|
||||
Self { quality }
|
||||
}
|
||||
|
||||
/// Process image and generate WebP at multiple resolutions
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `image_path` - Path to the source image
|
||||
/// * `sizes` - Slice of desired output sizes (width/height in pixels)
|
||||
/// * `validate_dimensions` - Optional (width, height) to validate source image dimensions
|
||||
/// * `outline` - Optional outline configuration to add edges around transparent areas
|
||||
///
|
||||
/// # Returns
|
||||
/// ProcessedImages containing WebP blobs for each requested size
|
||||
pub fn process_image<P: AsRef<Path>>(
|
||||
&self,
|
||||
image_path: P,
|
||||
sizes: &[u32],
|
||||
validate_dimensions: Option<(u32, u32)>,
|
||||
outline: Option<&OutlineConfig>,
|
||||
) -> Result<ProcessedImages, ImageProcessingError> {
|
||||
if sizes.is_empty() {
|
||||
return Err(ImageProcessingError::NoResolutions);
|
||||
}
|
||||
|
||||
// Load image
|
||||
let mut img = image::open(image_path.as_ref())?;
|
||||
|
||||
// Validate dimensions if requested
|
||||
if let Some((expected_width, expected_height)) = validate_dimensions {
|
||||
if img.width() != expected_width || img.height() != expected_height {
|
||||
return Err(ImageProcessingError::InvalidDimensions {
|
||||
expected_width,
|
||||
expected_height,
|
||||
actual_width: img.width(),
|
||||
actual_height: img.height(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Apply outline if requested
|
||||
if let Some(outline_config) = outline {
|
||||
img = DynamicImage::ImageRgba8(self.apply_outline(img.to_rgba8(), outline_config));
|
||||
}
|
||||
|
||||
// Generate WebP for each size
|
||||
let mut images = HashMap::new();
|
||||
for &size in sizes {
|
||||
let webp_data = self.encode_webp(&img, size, size)?;
|
||||
images.insert(size, webp_data);
|
||||
}
|
||||
|
||||
Ok(ProcessedImages { images })
|
||||
}
|
||||
|
||||
/// Apply outline effect to image based on alpha channel edges
|
||||
fn apply_outline(&self, img: RgbaImage, config: &OutlineConfig) -> RgbaImage {
|
||||
let (width, height) = img.dimensions();
|
||||
|
||||
// Create a mask of edge pixels that need outline
|
||||
let mut edge_mask = vec![vec![false; height as usize]; width as usize];
|
||||
|
||||
// Detect edges: pixels that are transparent but adjacent to opaque pixels
|
||||
for y in 0..height {
|
||||
for x in 0..width {
|
||||
let pixel = img.get_pixel(x, y);
|
||||
|
||||
// Skip if pixel is already opaque
|
||||
if pixel[3] >= config.alpha_threshold {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if any neighbor is opaque (this is an edge)
|
||||
let is_edge = self.has_opaque_neighbor(&img, x, y, config.alpha_threshold);
|
||||
|
||||
if is_edge {
|
||||
edge_mask[x as usize][y as usize] = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Apply outline with thickness
|
||||
let thickness = config.thickness as i32;
|
||||
let mut outlined = img.clone();
|
||||
|
||||
for y in 0..height {
|
||||
for x in 0..width {
|
||||
if edge_mask[x as usize][y as usize] {
|
||||
// Draw outline in a square pattern around this edge pixel
|
||||
for dy in -thickness..=thickness {
|
||||
for dx in -thickness..=thickness {
|
||||
let nx = x as i32 + dx;
|
||||
let ny = y as i32 + dy;
|
||||
|
||||
// Check bounds
|
||||
if nx >= 0 && nx < width as i32 && ny >= 0 && ny < height as i32 {
|
||||
let nx = nx as u32;
|
||||
let ny = ny as u32;
|
||||
|
||||
let current_pixel = outlined.get_pixel(nx, ny);
|
||||
|
||||
// Only draw outline on transparent pixels
|
||||
if current_pixel[3] < config.alpha_threshold {
|
||||
outlined.put_pixel(nx, ny, config.color);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
outlined
|
||||
}
|
||||
|
||||
/// Check if a pixel has any opaque neighbor
|
||||
fn has_opaque_neighbor(
|
||||
&self,
|
||||
img: &RgbaImage,
|
||||
x: u32,
|
||||
y: u32,
|
||||
alpha_threshold: u8,
|
||||
) -> bool {
|
||||
let (width, height) = img.dimensions();
|
||||
|
||||
// Check 8 surrounding pixels
|
||||
for dy in -1..=1 {
|
||||
for dx in -1..=1 {
|
||||
if dx == 0 && dy == 0 {
|
||||
continue; // Skip center pixel
|
||||
}
|
||||
|
||||
let nx = x as i32 + dx;
|
||||
let ny = y as i32 + dy;
|
||||
|
||||
// Check bounds
|
||||
if nx >= 0 && nx < width as i32 && ny >= 0 && ny < height as i32 {
|
||||
let neighbor = img.get_pixel(nx as u32, ny as u32);
|
||||
if neighbor[3] >= alpha_threshold {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// Encode image to WebP at specified dimensions
|
||||
fn encode_webp(
|
||||
&self,
|
||||
img: &DynamicImage,
|
||||
width: u32,
|
||||
height: u32,
|
||||
) -> Result<Vec<u8>, ImageProcessingError> {
|
||||
// Resize if dimensions don't match original
|
||||
let resized = if img.width() != width || img.height() != height {
|
||||
img.resize_exact(width, height, image::imageops::FilterType::Lanczos3)
|
||||
} else {
|
||||
img.clone()
|
||||
};
|
||||
|
||||
// Convert to RGBA8
|
||||
let rgba = resized.to_rgba8();
|
||||
let (w, h) = rgba.dimensions();
|
||||
|
||||
// Encode to WebP
|
||||
let encoder = webp::Encoder::from_rgba(rgba.as_raw(), w, h);
|
||||
|
||||
let webp_data = encoder.encode(self.quality);
|
||||
Ok(webp_data.to_vec())
|
||||
}
|
||||
|
||||
/// Encode image to lossless WebP
|
||||
pub fn encode_webp_lossless(
|
||||
img: &RgbaImage,
|
||||
) -> Result<Vec<u8>, ImageProcessingError> {
|
||||
let (w, h) = img.dimensions();
|
||||
let encoder = webp::Encoder::from_rgba(img.as_raw(), w, h);
|
||||
let webp_data = encoder.encode_lossless();
|
||||
Ok(webp_data.to_vec())
|
||||
}
|
||||
|
||||
/// Create a black tile of specified size
|
||||
pub fn create_black_tile(size: u32) -> RgbaImage {
|
||||
image::ImageBuffer::from_pixel(size, size, Rgba([0, 0, 0, 255]))
|
||||
}
|
||||
|
||||
/// Merge multiple tiles into a single image
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `tiles` - HashMap of (x, y) coordinates to tile image data (WebP format)
|
||||
/// * `grid_x` - Number of tiles in X direction
|
||||
/// * `grid_y` - Number of tiles in Y direction
|
||||
/// * `tile_size` - Size of each original tile (assumes square tiles)
|
||||
/// * `output_size` - Size of the output merged image
|
||||
///
|
||||
/// # Returns
|
||||
/// A merged RgbaImage containing all tiles positioned correctly
|
||||
pub fn merge_tiles(
|
||||
tiles: &HashMap<(i32, i32), Vec<u8>>,
|
||||
grid_x: i32,
|
||||
grid_y: i32,
|
||||
tile_size: u32,
|
||||
output_size: u32,
|
||||
) -> Result<RgbaImage, ImageProcessingError> {
|
||||
// Create output image
|
||||
let mut merged = Self::create_black_tile(output_size);
|
||||
|
||||
// Calculate size each tile should be in the output
|
||||
let scaled_tile_size = output_size / grid_x.max(grid_y) as u32;
|
||||
|
||||
// Process each tile in the grid
|
||||
for dy in 0..grid_y {
|
||||
for dx in 0..grid_x {
|
||||
if let Some(webp_data) = tiles.get(&(dx, dy)) {
|
||||
// Decode WebP tile
|
||||
if let Ok(tile_img) = image::load_from_memory_with_format(
|
||||
webp_data,
|
||||
image::ImageFormat::WebP,
|
||||
) {
|
||||
// Resize tile to fit in output
|
||||
let resized = tile_img.resize_exact(
|
||||
scaled_tile_size,
|
||||
scaled_tile_size,
|
||||
image::imageops::FilterType::Lanczos3,
|
||||
).to_rgba8();
|
||||
|
||||
// Calculate position in output image
|
||||
let offset_x = dx as u32 * scaled_tile_size;
|
||||
// Invert Y-axis to match expected coordinate system
|
||||
let offset_y = (grid_y - 1 - dy) as u32 * scaled_tile_size;
|
||||
|
||||
// Copy pixels into merged image
|
||||
for y in 0..scaled_tile_size {
|
||||
for x in 0..scaled_tile_size {
|
||||
if let Some(pixel) = resized.get_pixel_checked(x, y) {
|
||||
merged.put_pixel(offset_x + x, offset_y + y, *pixel);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// If tile doesn't exist, it stays black (already initialized)
|
||||
}
|
||||
}
|
||||
|
||||
Ok(merged)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ImageProcessor {
|
||||
fn default() -> Self {
|
||||
Self::new(85.0) // 85% quality default
|
||||
}
|
||||
}
|
||||
|
||||
/// Container for processed WebP images at multiple resolutions
|
||||
#[derive(Debug)]
|
||||
pub struct ProcessedImages {
|
||||
/// Map of size (in pixels) to WebP blob data
|
||||
pub images: HashMap<u32, Vec<u8>>,
|
||||
}
|
||||
|
||||
impl ProcessedImages {
|
||||
/// Get WebP blob for a specific size
|
||||
pub fn get(&self, size: u32) -> Option<&Vec<u8>> {
|
||||
self.images.get(&size)
|
||||
}
|
||||
|
||||
/// Get total size of all WebP blobs in bytes
|
||||
pub fn total_size(&self) -> usize {
|
||||
self.images.values().map(|v| v.len()).sum()
|
||||
}
|
||||
|
||||
/// Get all available sizes
|
||||
pub fn sizes(&self) -> Vec<u32> {
|
||||
let mut sizes: Vec<u32> = self.images.keys().copied().collect();
|
||||
sizes.sort_unstable();
|
||||
sizes
|
||||
}
|
||||
|
||||
/// Get number of resolutions stored
|
||||
pub fn len(&self) -> usize {
|
||||
self.images.len()
|
||||
}
|
||||
|
||||
/// Check if empty
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.images.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_processor_creation() {
|
||||
let processor = ImageProcessor::default();
|
||||
assert_eq!(processor.quality, 85.0);
|
||||
|
||||
let custom = ImageProcessor::new(90.0);
|
||||
assert_eq!(custom.quality, 90.0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_processed_images() {
|
||||
let mut images = HashMap::new();
|
||||
images.insert(512, vec![1, 2, 3]);
|
||||
images.insert(256, vec![4, 5]);
|
||||
|
||||
let processed = ProcessedImages { images };
|
||||
|
||||
assert_eq!(processed.len(), 2);
|
||||
assert_eq!(processed.total_size(), 5);
|
||||
assert_eq!(processed.get(512), Some(&vec![1, 2, 3]));
|
||||
assert_eq!(processed.get(128), None);
|
||||
|
||||
let sizes = processed.sizes();
|
||||
assert_eq!(sizes, vec![256, 512]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_outline_config_default() {
|
||||
let config = OutlineConfig::default();
|
||||
assert_eq!(config.thickness, 1);
|
||||
assert_eq!(config.color, Rgba([255, 255, 255, 255])); // White
|
||||
assert_eq!(config.alpha_threshold, 128);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_outline_config_custom() {
|
||||
let red = Rgba([255, 0, 0, 255]);
|
||||
let config = OutlineConfig::new(red, 2);
|
||||
assert_eq!(config.thickness, 2);
|
||||
assert_eq!(config.color, red);
|
||||
assert_eq!(config.alpha_threshold, 128);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_outline_config_builders() {
|
||||
let white = OutlineConfig::white(3);
|
||||
assert_eq!(white.color, Rgba([255, 255, 255, 255]));
|
||||
assert_eq!(white.thickness, 3);
|
||||
|
||||
let black = OutlineConfig::black(2).with_alpha_threshold(200);
|
||||
assert_eq!(black.color, Rgba([0, 0, 0, 255]));
|
||||
assert_eq!(black.thickness, 2);
|
||||
assert_eq!(black.alpha_threshold, 200);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_outline_edge_detection() {
|
||||
let processor = ImageProcessor::default();
|
||||
|
||||
// Create a simple 3x3 image with a transparent pixel in the center
|
||||
let mut img = RgbaImage::new(3, 3);
|
||||
|
||||
// Fill with opaque white
|
||||
for y in 0..3 {
|
||||
for x in 0..3 {
|
||||
img.put_pixel(x, y, Rgba([255, 255, 255, 255]));
|
||||
}
|
||||
}
|
||||
|
||||
// Make center transparent
|
||||
img.put_pixel(1, 1, Rgba([0, 0, 0, 0]));
|
||||
|
||||
// Test that center pixel has opaque neighbors
|
||||
assert!(processor.has_opaque_neighbor(&img, 1, 1, 128));
|
||||
|
||||
// Test a fully opaque pixel - should not have any transparent neighbors
|
||||
// but the function checks if a pixel has opaque neighbors, not transparent ones
|
||||
assert!(processor.has_opaque_neighbor(&img, 0, 0, 128));
|
||||
|
||||
// Create a new image that's fully transparent
|
||||
let mut transparent_img = RgbaImage::new(3, 3);
|
||||
for y in 0..3 {
|
||||
for x in 0..3 {
|
||||
transparent_img.put_pixel(x, y, Rgba([0, 0, 0, 0]));
|
||||
}
|
||||
}
|
||||
|
||||
// A transparent pixel with all transparent neighbors should return false
|
||||
assert!(!processor.has_opaque_neighbor(&transparent_img, 1, 1, 128));
|
||||
}
|
||||
}
|
||||
1217
cursebreaker-parser/src/item_loader.rs
Normal file
1217
cursebreaker-parser/src/item_loader.rs
Normal file
File diff suppressed because it is too large
Load Diff
147
cursebreaker-parser/src/lib.rs
Normal file
147
cursebreaker-parser/src/lib.rs
Normal file
@@ -0,0 +1,147 @@
|
||||
//! Cursebreaker Parser - A library for parsing Cursebreaker game data
|
||||
//!
|
||||
//! This library provides functionality to:
|
||||
//! - Parse Unity scenes and extract game objects
|
||||
//! - Load game data from XML files (Items, NPCs, Quests, etc.)
|
||||
//! - Store and query game data at runtime
|
||||
//! - Serialize data to SQL databases
|
||||
//!
|
||||
//! # Example - Loading Items from XML
|
||||
//!
|
||||
//! ```no_run
|
||||
//! use cursebreaker_parser::ItemDatabase;
|
||||
//!
|
||||
//! // Load all items from XML
|
||||
//! let item_db = ItemDatabase::load_from_xml("Data/XMLs/Items/Items.xml")?;
|
||||
//! println!("Loaded {} items", item_db.len());
|
||||
//!
|
||||
//! // Get item by ID
|
||||
//! if let Some(item) = item_db.get_by_id(150) {
|
||||
//! println!("Found: {}", item.name);
|
||||
//! }
|
||||
//!
|
||||
//! // Query items by category
|
||||
//! let weapons = item_db.get_by_category("bow");
|
||||
//! println!("Found {} bows", weapons.len());
|
||||
//!
|
||||
//! // Query items by slot
|
||||
//! let consumables = item_db.get_by_slot("consumable");
|
||||
//! for item in consumables {
|
||||
//! println!("Consumable: {}", item.name);
|
||||
//! }
|
||||
//! # Ok::<(), Box<dyn std::error::Error>>(())
|
||||
//! ```
|
||||
//!
|
||||
//! # Example - Preparing Data for SQL
|
||||
//!
|
||||
//! ```no_run
|
||||
//! use cursebreaker_parser::ItemDatabase;
|
||||
//!
|
||||
//! let item_db = ItemDatabase::load_from_xml("Data/XMLs/Items/Items.xml")?;
|
||||
//!
|
||||
//! // Prepare data for SQL insertion
|
||||
//! // Returns Vec<(id, name, json_data)>
|
||||
//! let sql_data = item_db.prepare_for_sql();
|
||||
//!
|
||||
//! for (id, name, json) in sql_data.iter().take(5) {
|
||||
//! println!("INSERT INTO items VALUES ({}, '{}', '{}')", id, name, json);
|
||||
//! }
|
||||
//! # Ok::<(), Box<dyn std::error::Error>>(())
|
||||
//! ```
|
||||
|
||||
pub mod types;
|
||||
pub mod databases;
|
||||
pub mod schema;
|
||||
mod xml_parsers;
|
||||
mod item_loader;
|
||||
mod image_processor;
|
||||
|
||||
pub use databases::{
|
||||
ItemDatabase,
|
||||
NpcDatabase,
|
||||
QuestDatabase,
|
||||
HarvestableDatabase,
|
||||
LootDatabase,
|
||||
MapDatabase,
|
||||
FastTravelDatabase,
|
||||
PlayerHouseDatabase,
|
||||
TraitDatabase,
|
||||
ShopDatabase,
|
||||
MinimapDatabase,
|
||||
MinimapDatabaseError,
|
||||
StorageStats,
|
||||
IconDatabase,
|
||||
IconDatabaseError,
|
||||
IconStats,
|
||||
};
|
||||
pub use types::{
|
||||
// Items
|
||||
Item,
|
||||
ItemStat,
|
||||
CraftingRecipe,
|
||||
CraftingRecipeItem,
|
||||
AnimationSet,
|
||||
GenerateRule,
|
||||
ItemType,
|
||||
ItemCategory,
|
||||
Tool,
|
||||
SkillType,
|
||||
StatType,
|
||||
Stat,
|
||||
ItemXpBoost,
|
||||
PermanentStatBoost,
|
||||
CustomItemName,
|
||||
MAX_STACK,
|
||||
// Other types
|
||||
InteractableResource,
|
||||
InteractableTeleporter,
|
||||
InteractableWorkbench,
|
||||
LootSpawner,
|
||||
MapIcon,
|
||||
MapIconType,
|
||||
MapNameChanger,
|
||||
Npc,
|
||||
NpcStat,
|
||||
NpcLevel,
|
||||
RightClick,
|
||||
BarkGroup,
|
||||
Bark,
|
||||
QuestMarker,
|
||||
NpcAnimationSet,
|
||||
Quest,
|
||||
QuestPhase,
|
||||
QuestReward,
|
||||
Harvestable,
|
||||
HarvestableDrop,
|
||||
LootTable,
|
||||
LootDrop,
|
||||
Map,
|
||||
FastTravelLocation,
|
||||
FastTravelType,
|
||||
PlayerHouse,
|
||||
Trait,
|
||||
TraitTrainer,
|
||||
Shop,
|
||||
ShopItem,
|
||||
// Minimap
|
||||
MinimapTile,
|
||||
MinimapTileRecord,
|
||||
NewMinimapTile,
|
||||
// Icons
|
||||
AbilityIconRecord,
|
||||
NewAbilityIcon,
|
||||
BuffIconRecord,
|
||||
NewBuffIcon,
|
||||
TraitIconRecord,
|
||||
NewTraitIcon,
|
||||
PlayerHouseIconRecord,
|
||||
NewPlayerHouseIcon,
|
||||
StatIconRecord,
|
||||
NewStatIcon,
|
||||
AchievementIconRecord,
|
||||
NewAchievementIcon,
|
||||
GeneralIconRecord,
|
||||
NewGeneralIcon,
|
||||
};
|
||||
pub use xml_parsers::XmlParseError;
|
||||
pub use image_processor::{ImageProcessor, ImageProcessingError, ProcessedImages, OutlineConfig};
|
||||
227
cursebreaker-parser/src/main.rs
Normal file
227
cursebreaker-parser/src/main.rs
Normal file
@@ -0,0 +1,227 @@
|
||||
//! Parse Cursebreaker Resources from 10_3.unity Scene
|
||||
//!
|
||||
//! This example demonstrates:
|
||||
//! 1. Parsing the Cursebreaker Unity project
|
||||
//! 2. Finding Interactable_Resource components
|
||||
//! 3. Extracting typeId and transform positions
|
||||
//! 4. Writing resource data to an output file
|
||||
|
||||
use cursebreaker_parser::{ItemDatabase, NpcDatabase, QuestDatabase, HarvestableDatabase, LootDatabase, MapDatabase, FastTravelDatabase, PlayerHouseDatabase, TraitDatabase, ShopDatabase, InteractableResource, MinimapDatabase};
|
||||
use unity_parser::UnityProject;
|
||||
use std::path::Path;
|
||||
use unity_parser::log::DedupLogger;
|
||||
use log::{info, error, warn, LevelFilter};
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use std::env;
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
|
||||
let logger = DedupLogger::new();
|
||||
log::set_boxed_logger(Box::new(logger))
|
||||
.map(|()| log::set_max_level(LevelFilter::Trace))
|
||||
.unwrap();
|
||||
// log::set_max_level(LevelFilter::Warn);
|
||||
|
||||
info!("🎮 Cursebreaker - Resource Parser");
|
||||
|
||||
// Load items from XML
|
||||
info!("📚 Loading game data from XML...");
|
||||
|
||||
let cb_assets_path = env::var("CB_ASSETS_PATH").unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
|
||||
let items_path = format!("{}/Data/XMLs/Items/Items.xml", cb_assets_path);
|
||||
let item_db = ItemDatabase::load_from_xml(items_path)?;
|
||||
info!("✅ Loaded {} items", item_db.len());
|
||||
|
||||
let npcs_path = format!("{}/Data/XMLs/Npcs/NPCInfo.xml", cb_assets_path);
|
||||
let npc_db = NpcDatabase::load_from_xml(npcs_path)?;
|
||||
info!("✅ Loaded {} NPCs", npc_db.len());
|
||||
|
||||
let quests_path = format!("{}/Data/XMLs/Quests/Quests.xml", cb_assets_path);
|
||||
let quest_db = QuestDatabase::load_from_xml(quests_path)?;
|
||||
info!("✅ Loaded {} quests", quest_db.len());
|
||||
|
||||
let harvestables_path = format!("{}/Data/XMLs/Harvestables/HarvestableInfo.xml", cb_assets_path);
|
||||
let harvestable_db = HarvestableDatabase::load_from_xml(harvestables_path)?;
|
||||
info!("✅ Loaded {} harvestables", harvestable_db.len());
|
||||
|
||||
let loot_path = format!("{}/Data/XMLs/Loot/Loot.xml", cb_assets_path);
|
||||
let loot_db = LootDatabase::load_from_xml(loot_path)?;
|
||||
info!("✅ Loaded {} loot tables", loot_db.len());
|
||||
|
||||
let maps_path = format!("{}/Data/XMLs/Maps/Maps.xml", cb_assets_path);
|
||||
let map_db = MapDatabase::load_from_xml(maps_path)?;
|
||||
info!("✅ Loaded {} maps", map_db.len());
|
||||
|
||||
let fast_travel_dir = format!("{}/Data/XMLs", cb_assets_path);
|
||||
let fast_travel_db = FastTravelDatabase::load_from_directory(fast_travel_dir)?;
|
||||
info!("✅ Loaded {} fast travel locations", fast_travel_db.len());
|
||||
|
||||
let player_houses_path = format!("{}/Data/XMLs/PlayerHouses/PlayerHouses.xml", cb_assets_path);
|
||||
let player_house_db = PlayerHouseDatabase::load_from_xml(player_houses_path)?;
|
||||
info!("✅ Loaded {} player houses", player_house_db.len());
|
||||
|
||||
let traits_path = format!("{}/Data/XMLs/Traits/Traits.xml", cb_assets_path);
|
||||
let trait_db = TraitDatabase::load_from_xml(traits_path)?;
|
||||
info!("✅ Loaded {} traits", trait_db.len());
|
||||
|
||||
let shops_path = format!("{}/Data/XMLs/Shops/Shops.xml", cb_assets_path);
|
||||
let shop_db = ShopDatabase::load_from_xml(shops_path)?;
|
||||
info!("✅ Loaded {} shops", shop_db.len());
|
||||
|
||||
// Save to SQLite database
|
||||
info!("\n💾 Saving game data to SQLite database...");
|
||||
let database_url = env::var("DATABASE_URL").unwrap_or_else(|_| "../cursebreaker.db".to_string());
|
||||
let mut conn = SqliteConnection::establish(&database_url)?;
|
||||
|
||||
match item_db.save_to_db(&mut conn) {
|
||||
Ok(count) => info!("✅ Saved {} items to database", count),
|
||||
Err(e) => warn!("⚠️ Failed to save items: {}", e),
|
||||
}
|
||||
|
||||
match npc_db.save_to_db(&mut conn) {
|
||||
Ok(count) => info!("✅ Saved {} NPCs to database", count),
|
||||
Err(e) => warn!("⚠️ Failed to save NPCs: {}", e),
|
||||
}
|
||||
|
||||
match quest_db.save_to_db(&mut conn) {
|
||||
Ok(count) => info!("✅ Saved {} quests to database", count),
|
||||
Err(e) => warn!("⚠️ Failed to save quests: {}", e),
|
||||
}
|
||||
|
||||
match harvestable_db.save_to_db(&mut conn) {
|
||||
Ok(count) => info!("✅ Saved {} harvestables to database", count),
|
||||
Err(e) => warn!("⚠️ Failed to save harvestables: {}", e),
|
||||
}
|
||||
|
||||
match loot_db.save_to_db(&mut conn) {
|
||||
Ok(count) => info!("✅ Saved {} loot tables to database", count),
|
||||
Err(e) => warn!("⚠️ Failed to save loot tables: {}", e),
|
||||
}
|
||||
|
||||
match map_db.save_to_db(&mut conn) {
|
||||
Ok(count) => info!("✅ Saved {} maps to database", count),
|
||||
Err(e) => warn!("⚠️ Failed to save maps: {}", e),
|
||||
}
|
||||
|
||||
match fast_travel_db.save_to_db(&mut conn) {
|
||||
Ok(count) => info!("✅ Saved {} fast travel locations to database", count),
|
||||
Err(e) => warn!("⚠️ Failed to save fast travel locations: {}", e),
|
||||
}
|
||||
|
||||
match player_house_db.save_to_db(&mut conn) {
|
||||
Ok(count) => info!("✅ Saved {} player houses to database", count),
|
||||
Err(e) => warn!("⚠️ Failed to save player houses: {}", e),
|
||||
}
|
||||
|
||||
match trait_db.save_to_db(&mut conn) {
|
||||
Ok(count) => info!("✅ Saved {} traits to database", count),
|
||||
Err(e) => warn!("⚠️ Failed to save traits: {}", e),
|
||||
}
|
||||
|
||||
match shop_db.save_to_db(&mut conn) {
|
||||
Ok(count) => info!("✅ Saved {} shops to database", count),
|
||||
Err(e) => warn!("⚠️ Failed to save shops: {}", e),
|
||||
}
|
||||
|
||||
// Print statistics
|
||||
info!("\n📊 Game Data Statistics:");
|
||||
info!(" Items:");
|
||||
info!(" • Weapons: {}", item_db.get_by_slot("weapon").len());
|
||||
info!(" • Consumables: {}", item_db.get_by_slot("consumable").len());
|
||||
info!(" NPCs:");
|
||||
info!(" • Hostile: {}", npc_db.get_hostile().len());
|
||||
info!(" • Interactable: {}", npc_db.get_interactable().len());
|
||||
info!(" Quests:");
|
||||
info!(" • Main quests: {}", quest_db.get_main_quests().len());
|
||||
info!(" • Side quests: {}", quest_db.get_side_quests().len());
|
||||
info!(" Harvestables:");
|
||||
info!(" • Trees: {}", harvestable_db.get_trees().len());
|
||||
info!(" • Woodcutting: {}", harvestable_db.get_by_skill("Woodcutting").len());
|
||||
info!(" • Mining: {}", harvestable_db.get_by_skill("mining").len());
|
||||
info!(" • Fishing: {}", harvestable_db.get_by_skill("Fishing").len());
|
||||
info!(" • Alchemy: {}", harvestable_db.get_by_skill("Alchemy").len());
|
||||
info!(" Loot:");
|
||||
info!(" • Total tables: {}", loot_db.len());
|
||||
info!(" • NPCs with loot: {}", loot_db.get_all_npcs_with_loot().len());
|
||||
info!(" • Droppable items: {}", loot_db.get_all_droppable_items().len());
|
||||
info!(" • Tables with conditional drops: {}", loot_db.get_conditional_tables().len());
|
||||
|
||||
// Initialize Unity project once - scans entire project for GUID mappings
|
||||
let project_root = Path::new(&cb_assets_path);
|
||||
info!("\n📦 Initializing Unity project from: {}", project_root.display());
|
||||
|
||||
let project = UnityProject::from_path(project_root)?;
|
||||
|
||||
// Now parse the scene using the pre-built GUID resolvers
|
||||
let scene_path = "_GameAssets/Scenes/Tiles/10_3.unity";
|
||||
info!("📁 Parsing scene: {}", scene_path);
|
||||
|
||||
log::logger().flush();
|
||||
|
||||
// Parse the scene using the project
|
||||
match project.parse_scene(scene_path) {
|
||||
Ok(mut scene) => {
|
||||
info!("✅ Scene parsed successfully!");
|
||||
info!(" Total entities: {}", scene.entity_map.len());
|
||||
|
||||
// Post-processing: Compute world transforms
|
||||
info!("🔄 Computing world transforms...");
|
||||
unity_parser::compute_world_transforms(&mut scene.world, &scene.entity_map);
|
||||
info!(" ✓ World transforms computed");
|
||||
|
||||
// Get views for component types we need
|
||||
// Find all entities that have Interactable_Resource
|
||||
log::logger().flush();
|
||||
|
||||
scene.world
|
||||
.query_all::<(&InteractableResource, &unity_parser::WorldTransform, &unity_parser::GameObject)>()
|
||||
.for_each(|(resource, transform, object)| {
|
||||
info!(" 📦 Resource: \"{}\"", object.name);
|
||||
info!(" • typeId: {}", resource.type_id);
|
||||
|
||||
// Extract world position from WorldTransform
|
||||
let world_pos = transform.position();
|
||||
info!(" • Position: ({:.2}, {:.2}, {:.2})", world_pos.x, world_pos.y, world_pos.z);
|
||||
log::logger().flush();
|
||||
});
|
||||
|
||||
log::logger().flush();
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Parse error: {}", e);
|
||||
return Err(Box::new(e));
|
||||
}
|
||||
}
|
||||
|
||||
log::logger().flush();
|
||||
|
||||
// Process minimap tiles
|
||||
info!("\n🗺️ Processing minimap tiles...");
|
||||
let minimap_db = MinimapDatabase::new(database_url.clone());
|
||||
|
||||
let minimap_path = format!("{}/Data/Textures/MinimapSquares", cb_assets_path);
|
||||
match minimap_db.load_from_directory(&minimap_path, &cb_assets_path) {
|
||||
Ok(count) => {
|
||||
info!("✅ Processed {} minimap tiles", count);
|
||||
|
||||
if let Ok(stats) = minimap_db.get_storage_stats() {
|
||||
info!(" Storage Statistics:");
|
||||
info!(" • Original PNG total: {} MB", stats.total_original_size / 1_048_576);
|
||||
info!(" • WebP total: {} MB", stats.total_webp_size() / 1_048_576);
|
||||
info!(" • Compression ratio: {:.2}%", stats.compression_ratio());
|
||||
}
|
||||
|
||||
if let Ok(bounds) = minimap_db.get_map_bounds() {
|
||||
info!(" Map Bounds:");
|
||||
info!(" • Min (x,y): {:?}", bounds.0);
|
||||
info!(" • Max (x,y): {:?}", bounds.1);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Failed to process minimap tiles: {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
342
cursebreaker-parser/src/schema.rs
Normal file
342
cursebreaker-parser/src/schema.rs
Normal file
@@ -0,0 +1,342 @@
|
||||
// @generated automatically by Diesel CLI.
|
||||
|
||||
diesel::table! {
|
||||
ability_icons (name) {
|
||||
name -> Text,
|
||||
icon -> Binary,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
achievement_icons (name) {
|
||||
name -> Text,
|
||||
icon -> Binary,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
buff_icons (name) {
|
||||
name -> Text,
|
||||
icon -> Binary,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
crafting_recipe_items (recipe_id, item_id) {
|
||||
recipe_id -> Integer,
|
||||
item_id -> Integer,
|
||||
amount -> Integer,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
crafting_recipes (id) {
|
||||
id -> Nullable<Integer>,
|
||||
product_item_id -> Integer,
|
||||
skill -> Text,
|
||||
level -> Integer,
|
||||
workbench_id -> Integer,
|
||||
xp -> Integer,
|
||||
unlocked_by_default -> Integer,
|
||||
checks -> Nullable<Text>,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
fast_travel_locations (name) {
|
||||
name -> Nullable<Text>,
|
||||
pos_x -> Float,
|
||||
pos_z -> Float,
|
||||
travel_type -> Text,
|
||||
unlocked -> Integer,
|
||||
connections -> Nullable<Text>,
|
||||
checks -> Nullable<Text>,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
general_icons (name) {
|
||||
name -> Text,
|
||||
original_width -> Integer,
|
||||
original_height -> Integer,
|
||||
icon_original -> Nullable<Binary>,
|
||||
icon_256 -> Nullable<Binary>,
|
||||
icon_64 -> Nullable<Binary>,
|
||||
icon_32 -> Nullable<Binary>,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
harvestable_drops (id) {
|
||||
id -> Nullable<Integer>,
|
||||
harvestable_id -> Integer,
|
||||
item_id -> Integer,
|
||||
minamount -> Integer,
|
||||
maxamount -> Integer,
|
||||
droprate -> Integer,
|
||||
droprateboost -> Integer,
|
||||
amountboost -> Integer,
|
||||
comment -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
harvestables (id) {
|
||||
id -> Integer,
|
||||
name -> Text,
|
||||
description -> Text,
|
||||
comment -> Text,
|
||||
level -> Integer,
|
||||
skill -> Text,
|
||||
tool -> Text,
|
||||
min_health -> Integer,
|
||||
max_health -> Integer,
|
||||
harvesttime -> Integer,
|
||||
hittime -> Integer,
|
||||
respawntime -> Integer,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
item_stats (item_id, stat_type) {
|
||||
item_id -> Integer,
|
||||
stat_type -> Text,
|
||||
value -> Float,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
items (id) {
|
||||
id -> Nullable<Integer>,
|
||||
name -> Text,
|
||||
data -> Text,
|
||||
item_type -> Text,
|
||||
level -> Integer,
|
||||
price -> Integer,
|
||||
max_stack -> Integer,
|
||||
storage_size -> Integer,
|
||||
skill -> Text,
|
||||
tool -> Text,
|
||||
description -> Text,
|
||||
two_handed -> Integer,
|
||||
undroppable -> Integer,
|
||||
undroppable_on_death -> Integer,
|
||||
unequip_destroy -> Integer,
|
||||
generate_icon -> Integer,
|
||||
hide_milestone -> Integer,
|
||||
cannot_craft_exceptional -> Integer,
|
||||
storage_all_items -> Integer,
|
||||
ability_id -> Integer,
|
||||
special_ability -> Integer,
|
||||
learn_ability_id -> Integer,
|
||||
book_id -> Integer,
|
||||
swap_item -> Integer,
|
||||
icon_large -> Nullable<Binary>,
|
||||
icon_medium -> Nullable<Binary>,
|
||||
icon_small -> Nullable<Binary>,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
loot_tables (table_id) {
|
||||
table_id -> Nullable<Text>,
|
||||
npc_id -> Nullable<Text>,
|
||||
data -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
maps (scene_id) {
|
||||
scene_id -> Nullable<Text>,
|
||||
name -> Text,
|
||||
data -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
minimap_tiles (id) {
|
||||
id -> Nullable<Integer>,
|
||||
x -> Integer,
|
||||
y -> Integer,
|
||||
zoom -> Integer,
|
||||
width -> Integer,
|
||||
height -> Integer,
|
||||
original_file_size -> Nullable<Integer>,
|
||||
image -> Binary,
|
||||
image_size -> Integer,
|
||||
processed_at -> Timestamp,
|
||||
source_path -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
npcs (id) {
|
||||
id -> Nullable<Integer>,
|
||||
name -> Text,
|
||||
data -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
player_house_icons (name) {
|
||||
name -> Text,
|
||||
icon -> Binary,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
player_houses (id) {
|
||||
id -> Nullable<Integer>,
|
||||
name -> Text,
|
||||
description -> Text,
|
||||
pos_x -> Float,
|
||||
pos_z -> Float,
|
||||
price -> Integer,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
quests (id) {
|
||||
id -> Nullable<Integer>,
|
||||
name -> Text,
|
||||
data -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
resource_icons (item_id) {
|
||||
item_id -> Integer,
|
||||
name -> Text,
|
||||
icon_64 -> Binary,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
shops (id) {
|
||||
id -> Nullable<Integer>,
|
||||
name -> Text,
|
||||
unique_items -> Integer,
|
||||
item_count -> Integer,
|
||||
data -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
stat_icons (name) {
|
||||
name -> Text,
|
||||
icon -> Binary,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
trait_icons (name) {
|
||||
name -> Text,
|
||||
icon -> Binary,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
traits (id) {
|
||||
id -> Nullable<Integer>,
|
||||
name -> Text,
|
||||
description -> Nullable<Text>,
|
||||
trainer_id -> Nullable<Integer>,
|
||||
data -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
world_loot (pos_x, pos_y) {
|
||||
pos_x -> Float,
|
||||
pos_y -> Float,
|
||||
item_id -> Integer,
|
||||
amount -> Integer,
|
||||
respawn_time -> Integer,
|
||||
visibility_checks -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
world_map_icons (pos_x, pos_y) {
|
||||
pos_x -> Float,
|
||||
pos_y -> Float,
|
||||
icon_type -> Integer,
|
||||
icon_size -> Integer,
|
||||
icon -> Text,
|
||||
text -> Text,
|
||||
font_size -> Integer,
|
||||
hover_text -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
world_map_name_changers (pos_x, pos_y) {
|
||||
pos_x -> Float,
|
||||
pos_y -> Float,
|
||||
map_name -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
world_resources (item_id, pos_x, pos_y) {
|
||||
item_id -> Integer,
|
||||
pos_x -> Float,
|
||||
pos_y -> Float,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
world_teleporters (pos_x, pos_y) {
|
||||
pos_x -> Float,
|
||||
pos_y -> Float,
|
||||
tp_x -> Nullable<Float>,
|
||||
tp_y -> Nullable<Float>,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
world_workbenches (pos_x, pos_y) {
|
||||
pos_x -> Float,
|
||||
pos_y -> Float,
|
||||
workbench_id -> Integer,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::joinable!(crafting_recipe_items -> crafting_recipes (recipe_id));
|
||||
diesel::joinable!(crafting_recipe_items -> items (item_id));
|
||||
diesel::joinable!(crafting_recipes -> items (product_item_id));
|
||||
diesel::joinable!(harvestable_drops -> harvestables (harvestable_id));
|
||||
diesel::joinable!(harvestable_drops -> items (item_id));
|
||||
diesel::joinable!(item_stats -> items (item_id));
|
||||
|
||||
diesel::allow_tables_to_appear_in_same_query!(
|
||||
ability_icons,
|
||||
achievement_icons,
|
||||
buff_icons,
|
||||
crafting_recipe_items,
|
||||
crafting_recipes,
|
||||
fast_travel_locations,
|
||||
general_icons,
|
||||
harvestable_drops,
|
||||
harvestables,
|
||||
item_stats,
|
||||
items,
|
||||
loot_tables,
|
||||
maps,
|
||||
minimap_tiles,
|
||||
npcs,
|
||||
player_house_icons,
|
||||
player_houses,
|
||||
quests,
|
||||
resource_icons,
|
||||
shops,
|
||||
stat_icons,
|
||||
trait_icons,
|
||||
traits,
|
||||
world_loot,
|
||||
world_map_icons,
|
||||
world_map_name_changers,
|
||||
world_resources,
|
||||
world_teleporters,
|
||||
world_workbenches,
|
||||
);
|
||||
146
cursebreaker-parser/src/types/cursebreaker/fast_travel.rs
Normal file
146
cursebreaker-parser/src/types/cursebreaker/fast_travel.rs
Normal file
@@ -0,0 +1,146 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Type of fast travel location
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub enum FastTravelType {
|
||||
/// Regular fast travel location (horse/cart)
|
||||
Location,
|
||||
/// Canoe fast travel location (water travel)
|
||||
Canoe,
|
||||
/// Portal fast travel location (magical portal)
|
||||
Portal,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for FastTravelType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
FastTravelType::Location => write!(f, "Location"),
|
||||
FastTravelType::Canoe => write!(f, "Canoe"),
|
||||
FastTravelType::Portal => write!(f, "Portal"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a fast travel location (canoe, portal, or regular location)
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct FastTravelLocation {
|
||||
/// Unique ID
|
||||
pub id: i32,
|
||||
|
||||
/// Display name
|
||||
pub name: String,
|
||||
|
||||
/// X position in world space
|
||||
pub pos_x: f32,
|
||||
|
||||
/// Z position in world space
|
||||
pub pos_z: f32,
|
||||
|
||||
/// Type of fast travel
|
||||
pub travel_type: FastTravelType,
|
||||
|
||||
// ========== Optional Fields ==========
|
||||
/// Whether this location is unlocked by default (FastTravelLocations only)
|
||||
pub unlocked: bool,
|
||||
|
||||
/// Connected location IDs (FastTravelLocations only, comma-separated)
|
||||
pub connections: Option<String>,
|
||||
|
||||
/// Requirement checks (FastTravelCanoe only, e.g., "Quest=70-2-999,HasTrait=273")
|
||||
pub checks: Option<String>,
|
||||
}
|
||||
|
||||
impl FastTravelLocation {
|
||||
/// Create a new FastTravelLocation with required fields
|
||||
pub fn new(id: i32, name: String, pos_x: f32, pos_z: f32, travel_type: FastTravelType) -> Self {
|
||||
Self {
|
||||
id,
|
||||
name,
|
||||
pos_x,
|
||||
pos_z,
|
||||
travel_type,
|
||||
unlocked: false,
|
||||
connections: None,
|
||||
checks: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get position as (x, z) tuple
|
||||
pub fn get_position(&self) -> (f32, f32) {
|
||||
(self.pos_x, self.pos_z)
|
||||
}
|
||||
|
||||
/// Get list of connected location IDs
|
||||
pub fn get_connections(&self) -> Vec<i32> {
|
||||
if let Some(ref connections) = self.connections {
|
||||
connections
|
||||
.split(',')
|
||||
.filter_map(|s| s.trim().parse::<i32>().ok())
|
||||
.collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if this location has any requirements
|
||||
pub fn has_requirements(&self) -> bool {
|
||||
self.checks.is_some()
|
||||
}
|
||||
|
||||
/// Check if this location has connections to other locations
|
||||
pub fn has_connections(&self) -> bool {
|
||||
self.connections.is_some() && !self.get_connections().is_empty()
|
||||
}
|
||||
|
||||
/// Parse checks into a list of individual requirements
|
||||
/// Returns Vec of (check_type, value) tuples
|
||||
/// e.g., "Quest=70-2-999,HasTrait=273" -> [("Quest", "70-2-999"), ("HasTrait", "273")]
|
||||
pub fn parse_checks(&self) -> Vec<(String, String)> {
|
||||
if let Some(ref checks) = self.checks {
|
||||
checks
|
||||
.split(',')
|
||||
.filter_map(|check| {
|
||||
let parts: Vec<&str> = check.trim().split('=').collect();
|
||||
if parts.len() == 2 {
|
||||
Some((parts[0].to_string(), parts[1].to_string()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if this location requires a specific quest
|
||||
pub fn requires_quest(&self, quest_id: &str) -> bool {
|
||||
self.parse_checks()
|
||||
.iter()
|
||||
.any(|(check_type, value)| check_type == "Quest" && value.starts_with(quest_id))
|
||||
}
|
||||
|
||||
/// Check if this location requires a specific trait
|
||||
pub fn requires_trait(&self, trait_id: i32) -> bool {
|
||||
self.parse_checks()
|
||||
.iter()
|
||||
.any(|(check_type, value)| {
|
||||
check_type == "HasTrait" && value.parse::<i32>().ok() == Some(trait_id)
|
||||
})
|
||||
}
|
||||
|
||||
/// Check if location is a canoe location
|
||||
pub fn is_canoe(&self) -> bool {
|
||||
self.travel_type == FastTravelType::Canoe
|
||||
}
|
||||
|
||||
/// Check if location is a portal
|
||||
pub fn is_portal(&self) -> bool {
|
||||
self.travel_type == FastTravelType::Portal
|
||||
}
|
||||
|
||||
/// Check if location is a regular location
|
||||
pub fn is_location(&self) -> bool {
|
||||
self.travel_type == FastTravelType::Location
|
||||
}
|
||||
}
|
||||
115
cursebreaker-parser/src/types/cursebreaker/harvestable.rs
Normal file
115
cursebreaker-parser/src/types/cursebreaker/harvestable.rs
Normal file
@@ -0,0 +1,115 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use super::item::{SkillType, Tool};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Harvestable {
|
||||
// Required fields
|
||||
pub typeid: i32,
|
||||
pub name: String,
|
||||
|
||||
// Basic attributes
|
||||
pub actionname: String,
|
||||
pub desc: String,
|
||||
pub comment: String,
|
||||
pub level: i32,
|
||||
pub skill: SkillType,
|
||||
pub tool: Tool,
|
||||
|
||||
// Health
|
||||
pub min_health: i32,
|
||||
pub max_health: i32,
|
||||
|
||||
// Timing
|
||||
pub harvesttime: i32,
|
||||
pub hittime: i32,
|
||||
pub respawntime: i32,
|
||||
|
||||
// Audio
|
||||
pub harvestsfx: String,
|
||||
pub endsfx: String,
|
||||
pub receiveitemsfx: String,
|
||||
|
||||
// Visuals
|
||||
pub animation: String,
|
||||
pub takehitanimation: String,
|
||||
pub endgfx: String,
|
||||
|
||||
// Behavior flags
|
||||
pub tree: bool,
|
||||
pub hidemilestone: bool,
|
||||
pub nohighlight: bool,
|
||||
pub hideminimap: bool,
|
||||
pub noleftclickinteract: bool,
|
||||
|
||||
// Interaction
|
||||
pub interactdistance: String,
|
||||
|
||||
// Drops
|
||||
pub drops: Vec<HarvestableDrop>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct HarvestableDrop {
|
||||
pub id: i32,
|
||||
pub minamount: i32,
|
||||
pub maxamount: i32,
|
||||
pub droprate: i32,
|
||||
pub droprateboost: i32,
|
||||
pub amountboost: i32,
|
||||
pub checks: String,
|
||||
pub comment: String,
|
||||
pub dontconsumehealth: bool,
|
||||
}
|
||||
|
||||
impl Harvestable {
|
||||
pub fn new(typeid: i32, name: String) -> Self {
|
||||
Self {
|
||||
typeid,
|
||||
name,
|
||||
actionname: String::new(),
|
||||
desc: String::new(),
|
||||
comment: String::new(),
|
||||
level: 0,
|
||||
skill: SkillType::None,
|
||||
tool: Tool::None,
|
||||
min_health: 0,
|
||||
max_health: 0,
|
||||
harvesttime: 0,
|
||||
hittime: 0,
|
||||
respawntime: 0,
|
||||
harvestsfx: String::new(),
|
||||
endsfx: String::new(),
|
||||
receiveitemsfx: String::new(),
|
||||
animation: String::new(),
|
||||
takehitanimation: String::new(),
|
||||
endgfx: String::new(),
|
||||
tree: false,
|
||||
hidemilestone: false,
|
||||
nohighlight: false,
|
||||
hideminimap: false,
|
||||
noleftclickinteract: false,
|
||||
interactdistance: String::new(),
|
||||
drops: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if this is a tree
|
||||
pub fn is_tree(&self) -> bool {
|
||||
self.tree
|
||||
}
|
||||
|
||||
/// Check if this requires a tool
|
||||
pub fn requires_tool(&self) -> bool {
|
||||
!matches!(self.tool, Tool::None)
|
||||
}
|
||||
|
||||
/// Get the skill associated with this harvestable
|
||||
pub fn get_skill(&self) -> SkillType {
|
||||
self.skill
|
||||
}
|
||||
|
||||
/// Get all item IDs that can drop from this harvestable
|
||||
pub fn get_drop_item_ids(&self) -> Vec<i32> {
|
||||
self.drops.iter().map(|d| d.id).collect()
|
||||
}
|
||||
}
|
||||
134
cursebreaker-parser/src/types/cursebreaker/icon_models.rs
Normal file
134
cursebreaker-parser/src/types/cursebreaker/icon_models.rs
Normal file
@@ -0,0 +1,134 @@
|
||||
use diesel::prelude::*;
|
||||
use crate::schema::{
|
||||
ability_icons, buff_icons, trait_icons, player_house_icons, stat_icons,
|
||||
achievement_icons, general_icons
|
||||
};
|
||||
|
||||
/// Diesel queryable model for ability_icons table
|
||||
#[derive(Queryable, Selectable, Debug, Clone)]
|
||||
#[diesel(table_name = ability_icons)]
|
||||
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
|
||||
pub struct AbilityIconRecord {
|
||||
pub name: String,
|
||||
pub icon: Vec<u8>,
|
||||
}
|
||||
|
||||
/// Diesel insertable model for ability_icons table
|
||||
#[derive(Insertable, Debug)]
|
||||
#[diesel(table_name = ability_icons)]
|
||||
pub struct NewAbilityIcon<'a> {
|
||||
pub name: &'a str,
|
||||
pub icon: &'a [u8],
|
||||
}
|
||||
|
||||
/// Diesel queryable model for buff_icons table
|
||||
#[derive(Queryable, Selectable, Debug, Clone)]
|
||||
#[diesel(table_name = buff_icons)]
|
||||
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
|
||||
pub struct BuffIconRecord {
|
||||
pub name: String,
|
||||
pub icon: Vec<u8>,
|
||||
}
|
||||
|
||||
/// Diesel insertable model for buff_icons table
|
||||
#[derive(Insertable, Debug)]
|
||||
#[diesel(table_name = buff_icons)]
|
||||
pub struct NewBuffIcon<'a> {
|
||||
pub name: &'a str,
|
||||
pub icon: &'a [u8],
|
||||
}
|
||||
|
||||
/// Diesel queryable model for trait_icons table
|
||||
#[derive(Queryable, Selectable, Debug, Clone)]
|
||||
#[diesel(table_name = trait_icons)]
|
||||
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
|
||||
pub struct TraitIconRecord {
|
||||
pub name: String,
|
||||
pub icon: Vec<u8>,
|
||||
}
|
||||
|
||||
/// Diesel insertable model for trait_icons table
|
||||
#[derive(Insertable, Debug)]
|
||||
#[diesel(table_name = trait_icons)]
|
||||
pub struct NewTraitIcon<'a> {
|
||||
pub name: &'a str,
|
||||
pub icon: &'a [u8],
|
||||
}
|
||||
|
||||
/// Diesel queryable model for player_house_icons table
|
||||
#[derive(Queryable, Selectable, Debug, Clone)]
|
||||
#[diesel(table_name = player_house_icons)]
|
||||
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
|
||||
pub struct PlayerHouseIconRecord {
|
||||
pub name: String,
|
||||
pub icon: Vec<u8>,
|
||||
}
|
||||
|
||||
/// Diesel insertable model for player_house_icons table
|
||||
#[derive(Insertable, Debug)]
|
||||
#[diesel(table_name = player_house_icons)]
|
||||
pub struct NewPlayerHouseIcon<'a> {
|
||||
pub name: &'a str,
|
||||
pub icon: &'a [u8],
|
||||
}
|
||||
|
||||
/// Diesel queryable model for stat_icons table
|
||||
#[derive(Queryable, Selectable, Debug, Clone)]
|
||||
#[diesel(table_name = stat_icons)]
|
||||
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
|
||||
pub struct StatIconRecord {
|
||||
pub name: String,
|
||||
pub icon: Vec<u8>,
|
||||
}
|
||||
|
||||
/// Diesel insertable model for stat_icons table
|
||||
#[derive(Insertable, Debug)]
|
||||
#[diesel(table_name = stat_icons)]
|
||||
pub struct NewStatIcon<'a> {
|
||||
pub name: &'a str,
|
||||
pub icon: &'a [u8],
|
||||
}
|
||||
|
||||
/// Diesel queryable model for achievement_icons table
|
||||
#[derive(Queryable, Selectable, Debug, Clone)]
|
||||
#[diesel(table_name = achievement_icons)]
|
||||
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
|
||||
pub struct AchievementIconRecord {
|
||||
pub name: String,
|
||||
pub icon: Vec<u8>,
|
||||
}
|
||||
|
||||
/// Diesel insertable model for achievement_icons table
|
||||
#[derive(Insertable, Debug)]
|
||||
#[diesel(table_name = achievement_icons)]
|
||||
pub struct NewAchievementIcon<'a> {
|
||||
pub name: &'a str,
|
||||
pub icon: &'a [u8],
|
||||
}
|
||||
|
||||
/// Diesel queryable model for general_icons table
|
||||
#[derive(Queryable, Selectable, Debug, Clone)]
|
||||
#[diesel(table_name = general_icons)]
|
||||
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
|
||||
pub struct GeneralIconRecord {
|
||||
pub name: String,
|
||||
pub original_width: i32,
|
||||
pub original_height: i32,
|
||||
pub icon_original: Option<Vec<u8>>,
|
||||
pub icon_256: Option<Vec<u8>>,
|
||||
pub icon_64: Option<Vec<u8>>,
|
||||
pub icon_32: Option<Vec<u8>>,
|
||||
}
|
||||
|
||||
/// Diesel insertable model for general_icons table
|
||||
#[derive(Insertable, Debug)]
|
||||
#[diesel(table_name = general_icons)]
|
||||
pub struct NewGeneralIcon<'a> {
|
||||
pub name: &'a str,
|
||||
pub original_width: i32,
|
||||
pub original_height: i32,
|
||||
pub icon_original: Option<&'a [u8]>,
|
||||
pub icon_256: Option<&'a [u8]>,
|
||||
pub icon_64: Option<&'a [u8]>,
|
||||
pub icon_32: Option<&'a [u8]>,
|
||||
}
|
||||
649
cursebreaker-parser/src/types/cursebreaker/item.rs
Normal file
649
cursebreaker-parser/src/types/cursebreaker/item.rs
Normal file
@@ -0,0 +1,649 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::str::FromStr;
|
||||
|
||||
// Constants
|
||||
pub const MAX_STACK: i32 = 2_100_000_000; // 2.1 billion
|
||||
|
||||
// ============================================================================
|
||||
// Enums
|
||||
// ============================================================================
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum ItemType {
|
||||
Weapon,
|
||||
Shield,
|
||||
Armor,
|
||||
Head,
|
||||
Resource,
|
||||
Consumable,
|
||||
Trinket,
|
||||
Bracelet,
|
||||
}
|
||||
|
||||
impl ItemType {
|
||||
pub fn is_equippable(&self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
ItemType::Armor
|
||||
| ItemType::Shield
|
||||
| ItemType::Weapon
|
||||
| ItemType::Head
|
||||
| ItemType::Trinket
|
||||
| ItemType::Bracelet
|
||||
)
|
||||
}
|
||||
|
||||
pub fn to_string(&self) -> &'static str {
|
||||
match self {
|
||||
ItemType::Shield => "Offhand",
|
||||
ItemType::Weapon => "weapon",
|
||||
ItemType::Armor => "armor",
|
||||
ItemType::Head => "head",
|
||||
ItemType::Resource => "resource",
|
||||
ItemType::Consumable => "consumable",
|
||||
ItemType::Trinket => "trinket",
|
||||
ItemType::Bracelet => "bracelet",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for ItemType {
|
||||
type Err = String;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"armor" => Ok(ItemType::Armor),
|
||||
"weapon" => Ok(ItemType::Weapon),
|
||||
"shield" => Ok(ItemType::Shield),
|
||||
"resource" => Ok(ItemType::Resource),
|
||||
"consumable" => Ok(ItemType::Consumable),
|
||||
"head" => Ok(ItemType::Head),
|
||||
"trinket" => Ok(ItemType::Trinket),
|
||||
"bracelet" => Ok(ItemType::Bracelet),
|
||||
_ => Ok(ItemType::Resource), // Default fallback
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum ItemCategory {
|
||||
None,
|
||||
Bone,
|
||||
Bow,
|
||||
Crossbow,
|
||||
Constructable,
|
||||
Torch,
|
||||
Blacksmithhammer,
|
||||
Questitem,
|
||||
HeavyArmor,
|
||||
Warhammer,
|
||||
Shield,
|
||||
Hatchet,
|
||||
Blade,
|
||||
Armor,
|
||||
Pickaxe,
|
||||
Fish,
|
||||
Fishingrod,
|
||||
Shears,
|
||||
Hammer,
|
||||
Battleaxe,
|
||||
Morningstar,
|
||||
Wand,
|
||||
Staff,
|
||||
Dagger,
|
||||
}
|
||||
|
||||
impl ItemCategory {
|
||||
pub fn to_string(&self) -> &'static str {
|
||||
match self {
|
||||
ItemCategory::Fishingrod => "fishing rod",
|
||||
ItemCategory::None => "none",
|
||||
ItemCategory::Bone => "bone",
|
||||
ItemCategory::Bow => "bow",
|
||||
ItemCategory::Crossbow => "crossbow",
|
||||
ItemCategory::Constructable => "constructable",
|
||||
ItemCategory::Torch => "torch",
|
||||
ItemCategory::Blacksmithhammer => "blacksmithhammer",
|
||||
ItemCategory::Questitem => "questitem",
|
||||
ItemCategory::HeavyArmor => "heavyArmor",
|
||||
ItemCategory::Warhammer => "warhammer",
|
||||
ItemCategory::Shield => "shield",
|
||||
ItemCategory::Hatchet => "hatchet",
|
||||
ItemCategory::Blade => "blade",
|
||||
ItemCategory::Armor => "armor",
|
||||
ItemCategory::Pickaxe => "pickaxe",
|
||||
ItemCategory::Fish => "fish",
|
||||
ItemCategory::Shears => "shears",
|
||||
ItemCategory::Hammer => "hammer",
|
||||
ItemCategory::Battleaxe => "battleaxe",
|
||||
ItemCategory::Morningstar => "morningstar",
|
||||
ItemCategory::Wand => "wand",
|
||||
ItemCategory::Staff => "staff",
|
||||
ItemCategory::Dagger => "dagger",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for ItemCategory {
|
||||
type Err = String;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"none" => Ok(ItemCategory::None),
|
||||
"bone" => Ok(ItemCategory::Bone),
|
||||
"bow" => Ok(ItemCategory::Bow),
|
||||
"crossbow" => Ok(ItemCategory::Crossbow),
|
||||
"constructable" => Ok(ItemCategory::Constructable),
|
||||
"torch" => Ok(ItemCategory::Torch),
|
||||
"blacksmithhammer" => Ok(ItemCategory::Blacksmithhammer),
|
||||
"questitem" => Ok(ItemCategory::Questitem),
|
||||
"heavyarmor" => Ok(ItemCategory::HeavyArmor),
|
||||
"warhammer" => Ok(ItemCategory::Warhammer),
|
||||
"shield" => Ok(ItemCategory::Shield),
|
||||
"hatchet" => Ok(ItemCategory::Hatchet),
|
||||
"blade" => Ok(ItemCategory::Blade),
|
||||
"armor" => Ok(ItemCategory::Armor),
|
||||
"pickaxe" => Ok(ItemCategory::Pickaxe),
|
||||
"fish" => Ok(ItemCategory::Fish),
|
||||
"fishingrod" => Ok(ItemCategory::Fishingrod),
|
||||
"shears" => Ok(ItemCategory::Shears),
|
||||
"hammer" => Ok(ItemCategory::Hammer),
|
||||
"battleaxe" => Ok(ItemCategory::Battleaxe),
|
||||
"morningstar" => Ok(ItemCategory::Morningstar),
|
||||
"wand" => Ok(ItemCategory::Wand),
|
||||
"staff" => Ok(ItemCategory::Staff),
|
||||
"dagger" => Ok(ItemCategory::Dagger),
|
||||
_ => Ok(ItemCategory::None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum Tool {
|
||||
None,
|
||||
Pickaxe,
|
||||
Hatchet,
|
||||
Scythe,
|
||||
Hammer,
|
||||
Shears,
|
||||
FishingRod,
|
||||
}
|
||||
|
||||
impl FromStr for Tool {
|
||||
type Err = String;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"none" | "" => Ok(Tool::None),
|
||||
"pickaxe" => Ok(Tool::Pickaxe),
|
||||
"hatchet" => Ok(Tool::Hatchet),
|
||||
"scythe" => Ok(Tool::Scythe),
|
||||
"hammer" => Ok(Tool::Hammer),
|
||||
"shears" => Ok(Tool::Shears),
|
||||
"fishingrod" => Ok(Tool::FishingRod),
|
||||
_ => Ok(Tool::None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum SkillType {
|
||||
None,
|
||||
Swordsmanship,
|
||||
Archery,
|
||||
Magic,
|
||||
Defence,
|
||||
Mining,
|
||||
Woodcutting,
|
||||
Fishing,
|
||||
Cooking,
|
||||
Carpentry,
|
||||
Blacksmithy,
|
||||
Tailoring,
|
||||
Alchemy,
|
||||
}
|
||||
|
||||
impl FromStr for SkillType {
|
||||
type Err = String;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"none" | "" => Ok(SkillType::None),
|
||||
"swordsmanship" => Ok(SkillType::Swordsmanship),
|
||||
"archery" => Ok(SkillType::Archery),
|
||||
"magic" => Ok(SkillType::Magic),
|
||||
"defence" => Ok(SkillType::Defence),
|
||||
"mining" => Ok(SkillType::Mining),
|
||||
"woodcutting" => Ok(SkillType::Woodcutting),
|
||||
"fishing" => Ok(SkillType::Fishing),
|
||||
"cooking" => Ok(SkillType::Cooking),
|
||||
"carpentry" => Ok(SkillType::Carpentry),
|
||||
"blacksmithy" => Ok(SkillType::Blacksmithy),
|
||||
"tailoring" => Ok(SkillType::Tailoring),
|
||||
"alchemy" => Ok(SkillType::Alchemy),
|
||||
_ => Ok(SkillType::None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum StatType {
|
||||
None,
|
||||
Health,
|
||||
Mana,
|
||||
HealthRegen,
|
||||
ManaRegen,
|
||||
DamagePhysical,
|
||||
DamageMagical,
|
||||
DamageRanged,
|
||||
AccuracyPhysical,
|
||||
AccuracyMagical,
|
||||
AccuracyRanged,
|
||||
ResistancePhysical,
|
||||
ResistanceMagical,
|
||||
ResistanceRanged,
|
||||
Critical,
|
||||
Healing,
|
||||
MovementSpeed,
|
||||
DamageVsBeasts,
|
||||
DamageVsUndead,
|
||||
CritterSlaying,
|
||||
}
|
||||
|
||||
impl FromStr for StatType {
|
||||
type Err = String;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"health" => Ok(StatType::Health),
|
||||
"mana" => Ok(StatType::Mana),
|
||||
"healthregen" => Ok(StatType::HealthRegen),
|
||||
"manaregen" => Ok(StatType::ManaRegen),
|
||||
"damagephysical" => Ok(StatType::DamagePhysical),
|
||||
"damagemagical" => Ok(StatType::DamageMagical),
|
||||
"damageranged" => Ok(StatType::DamageRanged),
|
||||
"accuracyphysical" => Ok(StatType::AccuracyPhysical),
|
||||
"accuracymagical" => Ok(StatType::AccuracyMagical),
|
||||
"accuracyranged" => Ok(StatType::AccuracyRanged),
|
||||
"resistancephysical" => Ok(StatType::ResistancePhysical),
|
||||
"resistancemagical" => Ok(StatType::ResistanceMagical),
|
||||
"resistanceranged" => Ok(StatType::ResistanceRanged),
|
||||
"critical" => Ok(StatType::Critical),
|
||||
"healing" => Ok(StatType::Healing),
|
||||
"movementspeed" => Ok(StatType::MovementSpeed),
|
||||
"damagevsbeasts" => Ok(StatType::DamageVsBeasts),
|
||||
"damagevsundead" => Ok(StatType::DamageVsUndead),
|
||||
"critterslaying" => Ok(StatType::CritterSlaying),
|
||||
_ => Ok(StatType::None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Nested Structs
|
||||
// ============================================================================
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Stat {
|
||||
pub stat_type: StatType,
|
||||
pub value: f32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ItemXpBoost {
|
||||
pub skill_type: SkillType,
|
||||
pub multiplier: f32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PermanentStatBoost {
|
||||
pub stat: StatType,
|
||||
pub amount: i32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CustomItemName {
|
||||
pub checks: String,
|
||||
pub item_name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CraftingRecipeItem {
|
||||
pub item_id: i32,
|
||||
pub amount: i32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CraftingRecipe {
|
||||
pub product: i32,
|
||||
pub level: i32,
|
||||
pub skill: SkillType,
|
||||
pub workbench_id: i32,
|
||||
pub items: Vec<CraftingRecipeItem>,
|
||||
pub unlocked_by_default: bool,
|
||||
pub xp: i32,
|
||||
pub checks: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AnimationSet {
|
||||
pub idle: i32,
|
||||
pub walk: i32,
|
||||
pub run: i32,
|
||||
pub takehit: i32,
|
||||
pub use_anim: i32,
|
||||
pub weapon_attack: i32,
|
||||
}
|
||||
|
||||
impl Default for AnimationSet {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
idle: 0,
|
||||
walk: 0,
|
||||
run: 0,
|
||||
takehit: 0,
|
||||
use_anim: 0,
|
||||
weapon_attack: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct GenerateRule {
|
||||
pub generate_stats: Option<String>,
|
||||
pub generate_crafting: bool,
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Main Item Struct
|
||||
// ============================================================================
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Item {
|
||||
// Core identification
|
||||
pub type_id: i32,
|
||||
pub item_name: String,
|
||||
|
||||
// Item classification
|
||||
pub item_type: ItemType,
|
||||
pub item_categories: Vec<ItemCategory>,
|
||||
pub level: i32,
|
||||
|
||||
// Flags
|
||||
pub undroppable: bool,
|
||||
pub undroppable_on_death: bool,
|
||||
pub two_handed: bool,
|
||||
pub unequip_destroy: bool,
|
||||
pub generate_icon: bool,
|
||||
pub hide_milestone: bool,
|
||||
pub cannot_craft_exceptional: bool,
|
||||
pub storage_all_items: bool,
|
||||
|
||||
// Visual/UI
|
||||
pub comment: String,
|
||||
pub description: String,
|
||||
pub effect_string: String,
|
||||
pub use_text: String,
|
||||
|
||||
// Models and resources
|
||||
pub using_item_model: String,
|
||||
pub handmodel: String,
|
||||
pub ground_model: i32,
|
||||
pub copy_model: i32,
|
||||
|
||||
// Audio
|
||||
pub drop_sfx: i32,
|
||||
pub pickup_sfx: i32,
|
||||
|
||||
// Stacking and storage
|
||||
pub max_stack: i32,
|
||||
pub storage_items: Vec<i32>,
|
||||
pub storage_size: i32,
|
||||
|
||||
// Abilities and skills
|
||||
pub ability_id: i32,
|
||||
pub special_ability: i32,
|
||||
pub learn_ability_id: i32,
|
||||
pub skill: SkillType,
|
||||
pub tool: Tool,
|
||||
|
||||
// Economy
|
||||
pub price: i32,
|
||||
#[serde(skip)]
|
||||
pub setup_price: bool,
|
||||
|
||||
// Food properties
|
||||
pub food_level: i32,
|
||||
pub food_time: i32,
|
||||
pub food_frequency: i32,
|
||||
pub food_amount: i32,
|
||||
|
||||
// Crafting
|
||||
pub crafting_recipes: Vec<CraftingRecipe>,
|
||||
pub has_crafting: bool,
|
||||
|
||||
// Stats and bonuses
|
||||
pub stats: Vec<Stat>,
|
||||
pub item_xp_boosts: Vec<ItemXpBoost>,
|
||||
pub permanent_stat_boosts: Vec<PermanentStatBoost>,
|
||||
|
||||
// Other
|
||||
pub book_id: i32,
|
||||
pub swap_item: i32,
|
||||
pub visibility_xml_checks: Vec<String>,
|
||||
pub custom_item_names: Vec<CustomItemName>,
|
||||
pub custom_item_descriptions: Vec<CustomItemName>,
|
||||
|
||||
// Animation IDs
|
||||
pub animations: Option<AnimationSet>,
|
||||
}
|
||||
|
||||
impl Item {
|
||||
pub fn new(type_id: i32, name: String) -> Self {
|
||||
Self {
|
||||
type_id,
|
||||
item_name: name,
|
||||
item_type: ItemType::Resource,
|
||||
item_categories: vec![ItemCategory::None],
|
||||
level: 1,
|
||||
undroppable: false,
|
||||
undroppable_on_death: false,
|
||||
two_handed: false,
|
||||
unequip_destroy: false,
|
||||
generate_icon: false,
|
||||
hide_milestone: false,
|
||||
cannot_craft_exceptional: false,
|
||||
storage_all_items: false,
|
||||
comment: String::new(),
|
||||
description: String::new(),
|
||||
effect_string: String::new(),
|
||||
use_text: String::new(),
|
||||
using_item_model: String::new(),
|
||||
handmodel: String::new(),
|
||||
ground_model: 0,
|
||||
copy_model: 0,
|
||||
drop_sfx: 0,
|
||||
pickup_sfx: 0,
|
||||
max_stack: 1,
|
||||
storage_items: Vec::new(),
|
||||
storage_size: 0,
|
||||
ability_id: 0,
|
||||
special_ability: 0,
|
||||
learn_ability_id: 0,
|
||||
skill: SkillType::None,
|
||||
tool: Tool::None,
|
||||
price: 0,
|
||||
setup_price: false,
|
||||
food_level: 0,
|
||||
food_time: 0,
|
||||
food_frequency: 0,
|
||||
food_amount: 0,
|
||||
crafting_recipes: Vec::new(),
|
||||
has_crafting: false,
|
||||
stats: Vec::new(),
|
||||
item_xp_boosts: Vec::new(),
|
||||
permanent_stat_boosts: Vec::new(),
|
||||
book_id: 0,
|
||||
swap_item: 0,
|
||||
visibility_xml_checks: Vec::new(),
|
||||
custom_item_names: Vec::new(),
|
||||
custom_item_descriptions: Vec::new(),
|
||||
animations: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_equippable(&self) -> bool {
|
||||
self.item_type.is_equippable()
|
||||
}
|
||||
|
||||
pub fn has_category(&self, category: ItemCategory) -> bool {
|
||||
self.item_categories.contains(&category)
|
||||
}
|
||||
|
||||
pub fn get_item_name(&self) -> &str {
|
||||
&self.item_name
|
||||
}
|
||||
|
||||
pub fn is_stackable(&self) -> bool {
|
||||
self.max_stack > 1
|
||||
}
|
||||
|
||||
pub fn is_storage_item(&self) -> bool {
|
||||
self.storage_size > 0 || self.storage_all_items
|
||||
}
|
||||
|
||||
/// Get the equip sound ID based on item type and category
|
||||
pub fn get_equip_sound(&self) -> i32 {
|
||||
if self.has_category(ItemCategory::Blade) {
|
||||
return 845;
|
||||
}
|
||||
if self.has_category(ItemCategory::HeavyArmor) || self.has_category(ItemCategory::Armor) {
|
||||
// Pick random from 846-851
|
||||
return 846;
|
||||
}
|
||||
|
||||
match self.item_type {
|
||||
ItemType::Weapon => 46,
|
||||
ItemType::Shield => 47,
|
||||
ItemType::Head => 46,
|
||||
ItemType::Armor => 46,
|
||||
ItemType::Trinket | ItemType::Bracelet => 479,
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Legacy compatibility structs (for existing parser)
|
||||
// ============================================================================
|
||||
|
||||
/// Legacy ItemStat struct for backwards compatibility with existing XML parser
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct ItemStat {
|
||||
pub damagephysical: Option<i32>,
|
||||
pub damagemagical: Option<i32>,
|
||||
pub damageranged: Option<i32>,
|
||||
pub accuracyphysical: Option<i32>,
|
||||
pub accuracymagical: Option<i32>,
|
||||
pub accuracyranged: Option<i32>,
|
||||
pub resistancephysical: Option<i32>,
|
||||
pub resistancemagical: Option<i32>,
|
||||
pub resistanceranged: Option<i32>,
|
||||
pub health: Option<i32>,
|
||||
pub mana: Option<i32>,
|
||||
pub manaregen: Option<i32>,
|
||||
pub healing: Option<i32>,
|
||||
pub harvestingspeedwoodcutting: Option<i32>,
|
||||
}
|
||||
|
||||
impl ItemStat {
|
||||
/// Convert legacy ItemStat to new Stat vec
|
||||
pub fn to_stats(&self) -> Vec<Stat> {
|
||||
let mut stats = Vec::new();
|
||||
|
||||
if let Some(v) = self.damagephysical {
|
||||
stats.push(Stat {
|
||||
stat_type: StatType::DamagePhysical,
|
||||
value: v as f32,
|
||||
});
|
||||
}
|
||||
if let Some(v) = self.damagemagical {
|
||||
stats.push(Stat {
|
||||
stat_type: StatType::DamageMagical,
|
||||
value: v as f32,
|
||||
});
|
||||
}
|
||||
if let Some(v) = self.damageranged {
|
||||
stats.push(Stat {
|
||||
stat_type: StatType::DamageRanged,
|
||||
value: v as f32,
|
||||
});
|
||||
}
|
||||
if let Some(v) = self.accuracyphysical {
|
||||
stats.push(Stat {
|
||||
stat_type: StatType::AccuracyPhysical,
|
||||
value: v as f32,
|
||||
});
|
||||
}
|
||||
if let Some(v) = self.accuracymagical {
|
||||
stats.push(Stat {
|
||||
stat_type: StatType::AccuracyMagical,
|
||||
value: v as f32,
|
||||
});
|
||||
}
|
||||
if let Some(v) = self.accuracyranged {
|
||||
stats.push(Stat {
|
||||
stat_type: StatType::AccuracyRanged,
|
||||
value: v as f32,
|
||||
});
|
||||
}
|
||||
if let Some(v) = self.resistancephysical {
|
||||
stats.push(Stat {
|
||||
stat_type: StatType::ResistancePhysical,
|
||||
value: v as f32,
|
||||
});
|
||||
}
|
||||
if let Some(v) = self.resistancemagical {
|
||||
stats.push(Stat {
|
||||
stat_type: StatType::ResistanceMagical,
|
||||
value: v as f32,
|
||||
});
|
||||
}
|
||||
if let Some(v) = self.resistanceranged {
|
||||
stats.push(Stat {
|
||||
stat_type: StatType::ResistanceRanged,
|
||||
value: v as f32,
|
||||
});
|
||||
}
|
||||
if let Some(v) = self.health {
|
||||
stats.push(Stat {
|
||||
stat_type: StatType::Health,
|
||||
value: v as f32,
|
||||
});
|
||||
}
|
||||
if let Some(v) = self.mana {
|
||||
stats.push(Stat {
|
||||
stat_type: StatType::Mana,
|
||||
value: v as f32,
|
||||
});
|
||||
}
|
||||
if let Some(v) = self.manaregen {
|
||||
stats.push(Stat {
|
||||
stat_type: StatType::ManaRegen,
|
||||
value: v as f32,
|
||||
});
|
||||
}
|
||||
if let Some(v) = self.healing {
|
||||
stats.push(Stat {
|
||||
stat_type: StatType::Healing,
|
||||
value: v as f32,
|
||||
});
|
||||
}
|
||||
|
||||
stats
|
||||
}
|
||||
}
|
||||
76
cursebreaker-parser/src/types/cursebreaker/loot.rs
Normal file
76
cursebreaker-parser/src/types/cursebreaker/loot.rs
Normal file
@@ -0,0 +1,76 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct LootTable {
|
||||
// NPC IDs this table applies to (can be multiple, comma-separated in XML)
|
||||
pub npc_ids: Vec<i32>,
|
||||
|
||||
// Optional name/description of the loot table
|
||||
pub name: Option<String>,
|
||||
|
||||
// List of possible drops
|
||||
pub drops: Vec<LootDrop>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct LootDrop {
|
||||
// Item ID that can drop
|
||||
pub item: i32,
|
||||
|
||||
// Drop rate (higher = rarer, e.g., rate=1 means common, rate=100 means very rare)
|
||||
pub rate: Option<i32>,
|
||||
|
||||
// Amount range
|
||||
pub minamount: Option<i32>,
|
||||
pub maxamount: Option<i32>,
|
||||
|
||||
// Optional requirements/checks
|
||||
pub checks: Option<String>,
|
||||
|
||||
// Optional comment/description
|
||||
pub comment: Option<String>,
|
||||
}
|
||||
|
||||
impl LootTable {
|
||||
pub fn new(npc_ids: Vec<i32>) -> Self {
|
||||
Self {
|
||||
npc_ids,
|
||||
name: None,
|
||||
drops: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if this loot table applies to a given NPC ID
|
||||
pub fn applies_to_npc(&self, npc_id: i32) -> bool {
|
||||
self.npc_ids.contains(&npc_id)
|
||||
}
|
||||
|
||||
/// Get all item IDs that can drop from this table
|
||||
pub fn get_drop_item_ids(&self) -> Vec<i32> {
|
||||
self.drops.iter().map(|d| d.item).collect()
|
||||
}
|
||||
|
||||
/// Get drops that have conditional checks
|
||||
pub fn get_conditional_drops(&self) -> Vec<&LootDrop> {
|
||||
self.drops.iter().filter(|d| d.checks.is_some()).collect()
|
||||
}
|
||||
|
||||
/// Get guaranteed drops (rate = 1)
|
||||
pub fn get_guaranteed_drops(&self) -> Vec<&LootDrop> {
|
||||
self.drops.iter().filter(|d| d.rate == Some(1)).collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl LootDrop {
|
||||
/// Check if this drop has requirements
|
||||
pub fn has_requirements(&self) -> bool {
|
||||
self.checks.is_some()
|
||||
}
|
||||
|
||||
/// Get the average drop amount
|
||||
pub fn average_amount(&self) -> f32 {
|
||||
let min = self.minamount.unwrap_or(1) as f32;
|
||||
let max = self.maxamount.unwrap_or(1) as f32;
|
||||
(min + max) / 2.0
|
||||
}
|
||||
}
|
||||
215
cursebreaker-parser/src/types/cursebreaker/map.rs
Normal file
215
cursebreaker-parser/src/types/cursebreaker/map.rs
Normal file
@@ -0,0 +1,215 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Represents a single map/scene in the game world
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Map {
|
||||
/// Scene ID in "x,y" format (e.g., "0,0", "3,10")
|
||||
pub scene_id: String,
|
||||
|
||||
/// Display name of the map (can be empty)
|
||||
pub name: String,
|
||||
|
||||
/// Music track ID
|
||||
pub music: i32,
|
||||
|
||||
/// Ambient sound ID
|
||||
pub ambience: i32,
|
||||
|
||||
// ========== Lighting & Atmosphere ==========
|
||||
/// Fog color in "r,g,b" format (default from comment: "63,98,106")
|
||||
pub fog_color: Option<String>,
|
||||
|
||||
/// Fogginess/fog density (default: 0.6)
|
||||
pub fogginess: Option<f32>,
|
||||
|
||||
/// View distance (default: 90)
|
||||
pub view_distance: Option<i32>,
|
||||
|
||||
/// NPC view distance (default: 50)
|
||||
pub npc_view_distance: Option<i32>,
|
||||
|
||||
/// Sunlight intensity (default: 1)
|
||||
pub sunlight: Option<f32>,
|
||||
|
||||
/// Sun color in "r,g,b" format (default: "255,251,230")
|
||||
pub sun_color: Option<String>,
|
||||
|
||||
/// Ambient color in "r,g,b" format (default: "128,128,128")
|
||||
pub ambient_color: Option<String>,
|
||||
|
||||
/// Indoor sunlight level (default: 0.2)
|
||||
pub indoor_sunlight: Option<f32>,
|
||||
|
||||
/// Fog start distance
|
||||
pub fog_start: Option<f32>,
|
||||
|
||||
// ========== Map Properties ==========
|
||||
/// Whether this is an indoor map
|
||||
pub indoors: bool,
|
||||
|
||||
/// Whether to hide this map from the world map
|
||||
pub no_world_map: bool,
|
||||
|
||||
/// Whether to hide the minimap
|
||||
pub no_minimap: bool,
|
||||
|
||||
/// Whether teleportation is disabled
|
||||
pub tp_disabled: bool,
|
||||
|
||||
/// Whether to prevent loading nearby scenes
|
||||
pub dont_load_nearby_scenes: bool,
|
||||
|
||||
/// Remove all borders
|
||||
pub no_border: bool,
|
||||
|
||||
/// Block movement at left edge
|
||||
pub border_left: bool,
|
||||
|
||||
/// Block movement at right edge
|
||||
pub border_right: bool,
|
||||
|
||||
/// Block movement at top edge
|
||||
pub border_up: bool,
|
||||
|
||||
/// Block movement at bottom edge
|
||||
pub border_down: bool,
|
||||
|
||||
// ========== Connectivity ==========
|
||||
/// Scene ID to respawn at (e.g., "3,10")
|
||||
pub respawn_map: Option<String>,
|
||||
|
||||
/// Connected maps in "x-y,x-y" format (e.g., "5-13,5-14")
|
||||
pub connected_maps: Option<String>,
|
||||
|
||||
// ========== Metadata ==========
|
||||
/// Developer comment/note
|
||||
pub comment: Option<String>,
|
||||
}
|
||||
|
||||
impl Map {
|
||||
/// Create a new Map with required fields
|
||||
pub fn new(scene_id: String, music: i32, ambience: i32) -> Self {
|
||||
Self {
|
||||
scene_id,
|
||||
name: String::new(),
|
||||
music,
|
||||
ambience,
|
||||
fog_color: None,
|
||||
fogginess: None,
|
||||
view_distance: None,
|
||||
npc_view_distance: None,
|
||||
sunlight: None,
|
||||
sun_color: None,
|
||||
ambient_color: None,
|
||||
indoor_sunlight: None,
|
||||
fog_start: None,
|
||||
indoors: false,
|
||||
no_world_map: false,
|
||||
no_minimap: false,
|
||||
tp_disabled: false,
|
||||
dont_load_nearby_scenes: false,
|
||||
no_border: false,
|
||||
border_left: false,
|
||||
border_right: false,
|
||||
border_up: false,
|
||||
border_down: false,
|
||||
respawn_map: None,
|
||||
connected_maps: None,
|
||||
comment: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse scene ID into (x, y) coordinates
|
||||
pub fn get_coordinates(&self) -> Option<(i32, i32)> {
|
||||
let parts: Vec<&str> = self.scene_id.split(',').collect();
|
||||
if parts.len() == 2 {
|
||||
if let (Ok(x), Ok(y)) = (parts[0].parse::<i32>(), parts[1].parse::<i32>()) {
|
||||
return Some((x, y));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Check if the map is named (has a non-empty name)
|
||||
pub fn is_named(&self) -> bool {
|
||||
!self.name.is_empty()
|
||||
}
|
||||
|
||||
/// Check if the map is an indoor location
|
||||
pub fn is_indoor(&self) -> bool {
|
||||
self.indoors
|
||||
}
|
||||
|
||||
/// Check if the map has any border restrictions
|
||||
pub fn has_borders(&self) -> bool {
|
||||
!self.no_border && (self.border_left || self.border_right || self.border_up || self.border_down)
|
||||
}
|
||||
|
||||
/// Get list of connected map scene IDs
|
||||
pub fn get_connected_map_ids(&self) -> Vec<String> {
|
||||
if let Some(ref connected) = self.connected_maps {
|
||||
connected
|
||||
.split(',')
|
||||
.map(|s| s.trim().replace('-', ","))
|
||||
.collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if this map is isolated (doesn't load nearby scenes)
|
||||
pub fn is_isolated(&self) -> bool {
|
||||
self.dont_load_nearby_scenes
|
||||
}
|
||||
|
||||
/// Parse fog color into RGB values
|
||||
pub fn get_fog_color_rgb(&self) -> Option<(u8, u8, u8)> {
|
||||
self.fog_color.as_ref().and_then(|color| {
|
||||
let parts: Vec<&str> = color.split(',').collect();
|
||||
if parts.len() == 3 {
|
||||
if let (Ok(r), Ok(g), Ok(b)) = (
|
||||
parts[0].parse::<u8>(),
|
||||
parts[1].parse::<u8>(),
|
||||
parts[2].parse::<u8>(),
|
||||
) {
|
||||
return Some((r, g, b));
|
||||
}
|
||||
}
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
/// Parse sun color into RGB values
|
||||
pub fn get_sun_color_rgb(&self) -> Option<(u8, u8, u8)> {
|
||||
self.sun_color.as_ref().and_then(|color| {
|
||||
let parts: Vec<&str> = color.split(',').collect();
|
||||
if parts.len() == 3 {
|
||||
if let (Ok(r), Ok(g), Ok(b)) = (
|
||||
parts[0].parse::<u8>(),
|
||||
parts[1].parse::<u8>(),
|
||||
parts[2].parse::<u8>(),
|
||||
) {
|
||||
return Some((r, g, b));
|
||||
}
|
||||
}
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
/// Parse ambient color into RGB values
|
||||
pub fn get_ambient_color_rgb(&self) -> Option<(u8, u8, u8)> {
|
||||
self.ambient_color.as_ref().and_then(|color| {
|
||||
let parts: Vec<&str> = color.split(',').collect();
|
||||
if parts.len() == 3 {
|
||||
if let (Ok(r), Ok(g), Ok(b)) = (
|
||||
parts[0].parse::<u8>(),
|
||||
parts[1].parse::<u8>(),
|
||||
parts[2].parse::<u8>(),
|
||||
) {
|
||||
return Some((r, g, b));
|
||||
}
|
||||
}
|
||||
None
|
||||
})
|
||||
}
|
||||
}
|
||||
35
cursebreaker-parser/src/types/cursebreaker/minimap_models.rs
Normal file
35
cursebreaker-parser/src/types/cursebreaker/minimap_models.rs
Normal file
@@ -0,0 +1,35 @@
|
||||
use diesel::prelude::*;
|
||||
use crate::schema::minimap_tiles;
|
||||
|
||||
/// Diesel queryable model (for SELECT queries)
|
||||
#[derive(Queryable, Selectable, Debug, Clone)]
|
||||
#[diesel(table_name = minimap_tiles)]
|
||||
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
|
||||
pub struct MinimapTileRecord {
|
||||
pub id: Option<i32>,
|
||||
pub x: i32,
|
||||
pub y: i32,
|
||||
pub zoom: i32,
|
||||
pub width: i32,
|
||||
pub height: i32,
|
||||
pub original_file_size: Option<i32>,
|
||||
pub image: Vec<u8>,
|
||||
pub image_size: i32,
|
||||
pub processed_at: String, // SQLite TIMESTAMP as String
|
||||
pub source_path: String,
|
||||
}
|
||||
|
||||
/// Diesel insertable model (for INSERT queries)
|
||||
#[derive(Insertable, Debug)]
|
||||
#[diesel(table_name = minimap_tiles)]
|
||||
pub struct NewMinimapTile<'a> {
|
||||
pub x: i32,
|
||||
pub y: i32,
|
||||
pub zoom: i32,
|
||||
pub width: i32,
|
||||
pub height: i32,
|
||||
pub original_file_size: Option<i32>,
|
||||
pub image: &'a [u8],
|
||||
pub image_size: i32,
|
||||
pub source_path: &'a str,
|
||||
}
|
||||
61
cursebreaker-parser/src/types/cursebreaker/minimap_tile.rs
Normal file
61
cursebreaker-parser/src/types/cursebreaker/minimap_tile.rs
Normal file
@@ -0,0 +1,61 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Represents a single minimap tile with multi-resolution WebP data
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct MinimapTile {
|
||||
/// X coordinate from filename
|
||||
pub x: i32,
|
||||
|
||||
/// Y coordinate from filename
|
||||
pub y: i32,
|
||||
|
||||
/// Original dimensions
|
||||
pub original_width: i32,
|
||||
pub original_height: i32,
|
||||
|
||||
/// Source file path
|
||||
pub source_path: String,
|
||||
|
||||
/// WebP blob at 512x512
|
||||
#[serde(skip)] // Skip serialization for binary data
|
||||
pub webp_512: Vec<u8>,
|
||||
|
||||
/// WebP blob at 256x256
|
||||
#[serde(skip)]
|
||||
pub webp_256: Vec<u8>,
|
||||
|
||||
/// WebP blob at 128x128
|
||||
#[serde(skip)]
|
||||
pub webp_128: Vec<u8>,
|
||||
|
||||
/// WebP blob at 64x64
|
||||
#[serde(skip)]
|
||||
pub webp_64: Vec<u8>,
|
||||
}
|
||||
|
||||
impl MinimapTile {
|
||||
/// Create new tile from coordinates and source path
|
||||
pub fn new(x: i32, y: i32, source_path: String) -> Self {
|
||||
Self {
|
||||
x,
|
||||
y,
|
||||
original_width: 512,
|
||||
original_height: 512,
|
||||
source_path,
|
||||
webp_512: Vec::new(),
|
||||
webp_256: Vec::new(),
|
||||
webp_128: Vec::new(),
|
||||
webp_64: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get total size of all WebP blobs
|
||||
pub fn total_webp_size(&self) -> usize {
|
||||
self.webp_512.len() + self.webp_256.len() + self.webp_128.len() + self.webp_64.len()
|
||||
}
|
||||
|
||||
/// Check if tile has been processed (has WebP data)
|
||||
pub fn is_processed(&self) -> bool {
|
||||
!self.webp_512.is_empty()
|
||||
}
|
||||
}
|
||||
63
cursebreaker-parser/src/types/cursebreaker/mod.rs
Normal file
63
cursebreaker-parser/src/types/cursebreaker/mod.rs
Normal file
@@ -0,0 +1,63 @@
|
||||
mod item;
|
||||
mod npc;
|
||||
mod quest;
|
||||
mod harvestable;
|
||||
mod loot;
|
||||
mod map;
|
||||
mod fast_travel;
|
||||
mod player_house;
|
||||
mod r#trait;
|
||||
mod shop;
|
||||
mod minimap_tile;
|
||||
mod minimap_models;
|
||||
mod icon_models;
|
||||
|
||||
pub use item::{
|
||||
// Main types
|
||||
Item,
|
||||
ItemStat,
|
||||
CraftingRecipe,
|
||||
CraftingRecipeItem,
|
||||
AnimationSet,
|
||||
GenerateRule,
|
||||
// Enums
|
||||
ItemType,
|
||||
ItemCategory,
|
||||
Tool,
|
||||
SkillType,
|
||||
StatType,
|
||||
// Nested structs
|
||||
Stat,
|
||||
ItemXpBoost,
|
||||
PermanentStatBoost,
|
||||
CustomItemName,
|
||||
// Constants
|
||||
MAX_STACK,
|
||||
};
|
||||
pub use npc::{Npc, NpcStat, NpcLevel, RightClick, BarkGroup, Bark, QuestMarker, NpcAnimationSet};
|
||||
pub use quest::{Quest, QuestPhase, QuestReward};
|
||||
pub use harvestable::{Harvestable, HarvestableDrop};
|
||||
pub use loot::{LootTable, LootDrop};
|
||||
pub use map::Map;
|
||||
pub use fast_travel::{FastTravelLocation, FastTravelType};
|
||||
pub use player_house::PlayerHouse;
|
||||
pub use r#trait::{Trait, TraitTrainer};
|
||||
pub use shop::{Shop, ShopItem};
|
||||
pub use minimap_tile::MinimapTile;
|
||||
pub use minimap_models::{MinimapTileRecord, NewMinimapTile};
|
||||
pub use icon_models::{
|
||||
AbilityIconRecord,
|
||||
NewAbilityIcon,
|
||||
BuffIconRecord,
|
||||
NewBuffIcon,
|
||||
TraitIconRecord,
|
||||
NewTraitIcon,
|
||||
PlayerHouseIconRecord,
|
||||
NewPlayerHouseIcon,
|
||||
StatIconRecord,
|
||||
NewStatIcon,
|
||||
AchievementIconRecord,
|
||||
NewAchievementIcon,
|
||||
GeneralIconRecord,
|
||||
NewGeneralIcon,
|
||||
};
|
||||
226
cursebreaker-parser/src/types/cursebreaker/npc.rs
Normal file
226
cursebreaker-parser/src/types/cursebreaker/npc.rs
Normal file
@@ -0,0 +1,226 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Npc {
|
||||
// Required fields
|
||||
pub id: i32,
|
||||
pub name: String,
|
||||
|
||||
// Basic attributes
|
||||
pub tags: Option<String>,
|
||||
pub level: Option<i32>,
|
||||
pub description: Option<String>,
|
||||
pub comment: Option<String>,
|
||||
pub model: Option<String>,
|
||||
|
||||
// Combat attributes
|
||||
pub canfight: Option<i32>,
|
||||
pub aggressive: Option<i32>,
|
||||
pub team: Option<i32>,
|
||||
pub aggrodistance: Option<i32>,
|
||||
pub respawntime: Option<i32>,
|
||||
pub health: Option<i32>,
|
||||
pub mana: Option<i32>,
|
||||
pub accuracy: Option<i32>,
|
||||
pub damagetype: Option<i32>,
|
||||
pub damageblock: Option<i32>,
|
||||
pub ability: Option<i32>,
|
||||
|
||||
// Attack attributes
|
||||
pub attackdistance: Option<i32>,
|
||||
pub attackspeed: Option<i32>,
|
||||
pub attackdelay: Option<i32>,
|
||||
pub gfxattack: Option<String>,
|
||||
|
||||
// Projectile attributes
|
||||
pub projectile: Option<i32>,
|
||||
pub projectilerate: Option<i32>,
|
||||
pub projectileendgfx: Option<String>,
|
||||
pub projectileattackdistance: Option<i32>,
|
||||
|
||||
// Movement
|
||||
pub movementspeed: Option<i32>,
|
||||
pub walkspeed: Option<i32>,
|
||||
pub wandering: Option<i32>,
|
||||
pub wanderingdistance: Option<i32>,
|
||||
|
||||
// AI behavior
|
||||
pub aibehaviour: Option<i32>,
|
||||
pub nobestiary: Option<i32>,
|
||||
|
||||
// Interaction
|
||||
pub interactable: Option<i32>,
|
||||
pub interactdistance: Option<i32>,
|
||||
pub dontrotateoninteract: Option<i32>,
|
||||
pub shop: Option<i32>,
|
||||
|
||||
// Sound effects
|
||||
pub sfxattack: Option<String>,
|
||||
pub sfxdeath: Option<String>,
|
||||
pub sfxtakehit: Option<String>,
|
||||
pub sfxidle: Option<String>,
|
||||
pub idlesoundtext: Option<String>,
|
||||
|
||||
// Animations
|
||||
pub anim_attack: Option<String>,
|
||||
pub anim_death: Option<String>,
|
||||
pub anim_idle: Option<String>,
|
||||
pub anim_run: Option<String>,
|
||||
pub anim_walk: Option<String>,
|
||||
pub anim_takehit: Option<String>,
|
||||
pub startanim: Option<String>,
|
||||
|
||||
// Nested elements
|
||||
pub stats: Vec<NpcStat>,
|
||||
pub levels: Vec<NpcLevel>,
|
||||
pub rightclick: Option<RightClick>,
|
||||
pub barks: Vec<BarkGroup>,
|
||||
pub exitdialoguebarks: Vec<BarkGroup>,
|
||||
pub questmarkers: Vec<QuestMarker>,
|
||||
pub animations: Option<NpcAnimationSet>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct NpcStat {
|
||||
// Damage
|
||||
pub damagephysical: Option<i32>,
|
||||
pub damagemagical: Option<i32>,
|
||||
pub damageranged: Option<i32>,
|
||||
|
||||
// Accuracy
|
||||
pub accuracyphysical: Option<i32>,
|
||||
pub accuracymagical: Option<i32>,
|
||||
pub accuracyranged: Option<i32>,
|
||||
|
||||
// Resistance
|
||||
pub resistancephysical: Option<i32>,
|
||||
pub resistancemagical: Option<i32>,
|
||||
pub resistanceranged: Option<i32>,
|
||||
|
||||
// Core stats
|
||||
pub health: Option<i32>,
|
||||
pub mana: Option<i32>,
|
||||
pub manaregen: Option<i32>,
|
||||
pub healing: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct NpcLevel {
|
||||
pub swordsmanship: Option<i32>,
|
||||
pub archery: Option<i32>,
|
||||
pub magic: Option<i32>,
|
||||
pub defence: Option<i32>,
|
||||
pub mining: Option<i32>,
|
||||
pub woodcutting: Option<i32>,
|
||||
pub fishing: Option<i32>,
|
||||
pub cooking: Option<i32>,
|
||||
pub carpentry: Option<i32>,
|
||||
pub blacksmithy: Option<i32>,
|
||||
pub tailoring: Option<i32>,
|
||||
pub alchemy: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct RightClick {
|
||||
pub option: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct BarkGroup {
|
||||
pub cooldown: Option<i32>,
|
||||
pub rate: Option<i32>,
|
||||
pub range: Option<i32>,
|
||||
pub checks: Option<String>,
|
||||
pub npcs: Option<String>,
|
||||
pub barks: Vec<Bark>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Bark {
|
||||
pub text: String,
|
||||
pub pausetime: Option<i32>,
|
||||
pub rate: Option<i32>,
|
||||
pub anim: Option<String>,
|
||||
pub npc: Option<String>,
|
||||
pub dontrotate: Option<i32>,
|
||||
pub dontrotateothers: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct QuestMarker {
|
||||
pub id: i32,
|
||||
pub phase: i32,
|
||||
pub checks: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct NpcAnimationSet {
|
||||
pub idle: Option<String>,
|
||||
pub walk: Option<String>,
|
||||
pub run: Option<String>,
|
||||
pub attack: Option<String>,
|
||||
pub death: Option<String>,
|
||||
pub talk: Option<String>,
|
||||
}
|
||||
|
||||
impl Npc {
|
||||
pub fn new(id: i32, name: String) -> Self {
|
||||
Self {
|
||||
id,
|
||||
name,
|
||||
tags: None,
|
||||
level: None,
|
||||
description: None,
|
||||
comment: None,
|
||||
model: None,
|
||||
canfight: None,
|
||||
aggressive: None,
|
||||
team: None,
|
||||
aggrodistance: None,
|
||||
respawntime: None,
|
||||
health: None,
|
||||
mana: None,
|
||||
accuracy: None,
|
||||
damagetype: None,
|
||||
damageblock: None,
|
||||
ability: None,
|
||||
attackdistance: None,
|
||||
attackspeed: None,
|
||||
attackdelay: None,
|
||||
gfxattack: None,
|
||||
projectile: None,
|
||||
projectilerate: None,
|
||||
projectileendgfx: None,
|
||||
projectileattackdistance: None,
|
||||
movementspeed: None,
|
||||
walkspeed: None,
|
||||
wandering: None,
|
||||
wanderingdistance: None,
|
||||
aibehaviour: None,
|
||||
nobestiary: None,
|
||||
interactable: None,
|
||||
interactdistance: None,
|
||||
dontrotateoninteract: None,
|
||||
shop: None,
|
||||
sfxattack: None,
|
||||
sfxdeath: None,
|
||||
sfxtakehit: None,
|
||||
sfxidle: None,
|
||||
idlesoundtext: None,
|
||||
anim_attack: None,
|
||||
anim_death: None,
|
||||
anim_idle: None,
|
||||
anim_run: None,
|
||||
anim_walk: None,
|
||||
anim_takehit: None,
|
||||
startanim: None,
|
||||
stats: Vec::new(),
|
||||
levels: Vec::new(),
|
||||
rightclick: None,
|
||||
barks: Vec::new(),
|
||||
exitdialoguebarks: Vec::new(),
|
||||
questmarkers: Vec::new(),
|
||||
animations: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
70
cursebreaker-parser/src/types/cursebreaker/player_house.rs
Normal file
70
cursebreaker-parser/src/types/cursebreaker/player_house.rs
Normal file
@@ -0,0 +1,70 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Represents a player house that can be purchased
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PlayerHouse {
|
||||
/// Unique ID
|
||||
pub id: i32,
|
||||
|
||||
/// Display name
|
||||
pub name: String,
|
||||
|
||||
/// Description text
|
||||
pub description: String,
|
||||
|
||||
/// X position in world space
|
||||
pub pos_x: f32,
|
||||
|
||||
/// Z position in world space
|
||||
pub pos_z: f32,
|
||||
|
||||
/// Purchase price in gold
|
||||
pub price: i32,
|
||||
}
|
||||
|
||||
impl PlayerHouse {
|
||||
/// Create a new PlayerHouse with required fields
|
||||
pub fn new(id: i32, name: String, description: String, pos_x: f32, pos_z: f32, price: i32) -> Self {
|
||||
Self {
|
||||
id,
|
||||
name,
|
||||
description,
|
||||
pos_x,
|
||||
pos_z,
|
||||
price,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get position as (x, z) tuple
|
||||
pub fn get_position(&self) -> (f32, f32) {
|
||||
(self.pos_x, self.pos_z)
|
||||
}
|
||||
|
||||
/// Check if this house is free (price is 0)
|
||||
pub fn is_free(&self) -> bool {
|
||||
self.price == 0
|
||||
}
|
||||
|
||||
/// Check if this house is expensive (price >= 10000)
|
||||
pub fn is_expensive(&self) -> bool {
|
||||
self.price >= 10000
|
||||
}
|
||||
|
||||
/// Check if this house is affordable (price < 5000)
|
||||
pub fn is_affordable(&self) -> bool {
|
||||
self.price < 5000
|
||||
}
|
||||
|
||||
/// Get price tier (0: free, 1: cheap (<5k), 2: moderate (5k-10k), 3: expensive (10k+))
|
||||
pub fn get_price_tier(&self) -> u8 {
|
||||
if self.price == 0 {
|
||||
0
|
||||
} else if self.price < 5000 {
|
||||
1
|
||||
} else if self.price < 10000 {
|
||||
2
|
||||
} else {
|
||||
3
|
||||
}
|
||||
}
|
||||
}
|
||||
77
cursebreaker-parser/src/types/cursebreaker/quest.rs
Normal file
77
cursebreaker-parser/src/types/cursebreaker/quest.rs
Normal file
@@ -0,0 +1,77 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Quest {
|
||||
// Required fields
|
||||
pub id: i32,
|
||||
pub name: String,
|
||||
|
||||
// Optional attributes
|
||||
pub mainquest: Option<i32>,
|
||||
pub hidden: Option<i32>,
|
||||
pub questdescription: Option<String>,
|
||||
pub completiontext: Option<String>,
|
||||
pub dontshowcompletionscreen: Option<i32>,
|
||||
pub comment: Option<String>,
|
||||
|
||||
// Nested elements
|
||||
pub phases: Vec<QuestPhase>,
|
||||
pub rewards: Vec<QuestReward>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct QuestPhase {
|
||||
pub id: i32,
|
||||
pub trackerdescription: Option<String>,
|
||||
pub description: Option<String>,
|
||||
pub helperarrownpc: Option<String>,
|
||||
pub helperarrowpos: Option<String>,
|
||||
pub checks: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct QuestReward {
|
||||
pub item: Option<i32>,
|
||||
pub skill: Option<String>,
|
||||
pub amount: Option<i32>,
|
||||
pub xp: Option<i32>,
|
||||
pub checks: Option<String>,
|
||||
pub comment: Option<String>,
|
||||
}
|
||||
|
||||
impl Quest {
|
||||
pub fn new(id: i32, name: String) -> Self {
|
||||
Self {
|
||||
id,
|
||||
name,
|
||||
mainquest: None,
|
||||
hidden: None,
|
||||
questdescription: None,
|
||||
completiontext: None,
|
||||
dontshowcompletionscreen: None,
|
||||
comment: None,
|
||||
phases: Vec::new(),
|
||||
rewards: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if this is a main quest
|
||||
pub fn is_main_quest(&self) -> bool {
|
||||
self.mainquest == Some(1)
|
||||
}
|
||||
|
||||
/// Check if this quest is hidden
|
||||
pub fn is_hidden(&self) -> bool {
|
||||
self.hidden == Some(1)
|
||||
}
|
||||
|
||||
/// Get the number of phases in this quest
|
||||
pub fn phase_count(&self) -> usize {
|
||||
self.phases.len()
|
||||
}
|
||||
|
||||
/// Get a specific phase by ID
|
||||
pub fn get_phase(&self, phase_id: i32) -> Option<&QuestPhase> {
|
||||
self.phases.iter().find(|p| p.id == phase_id)
|
||||
}
|
||||
}
|
||||
155
cursebreaker-parser/src/types/cursebreaker/shop.rs
Normal file
155
cursebreaker-parser/src/types/cursebreaker/shop.rs
Normal file
@@ -0,0 +1,155 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Represents an item sold in a shop
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ShopItem {
|
||||
/// Item ID (can be numeric or string reference)
|
||||
pub item_id: String,
|
||||
|
||||
/// Optional item name
|
||||
pub name: Option<String>,
|
||||
|
||||
/// Custom price (if different from item's default price)
|
||||
pub price: Option<i32>,
|
||||
|
||||
/// Maximum stock (items available before restocking)
|
||||
pub max_stock: Option<i32>,
|
||||
|
||||
/// Restock time in seconds
|
||||
pub restock_time: Option<i32>,
|
||||
|
||||
/// Buy price (price shop pays for the item)
|
||||
pub buy_price: Option<i32>,
|
||||
|
||||
/// Developer comment
|
||||
pub comment: Option<String>,
|
||||
}
|
||||
|
||||
impl ShopItem {
|
||||
/// Create a new ShopItem with required fields
|
||||
pub fn new(item_id: String) -> Self {
|
||||
Self {
|
||||
item_id,
|
||||
name: None,
|
||||
price: None,
|
||||
max_stock: None,
|
||||
restock_time: None,
|
||||
buy_price: None,
|
||||
comment: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to parse item_id as an integer
|
||||
pub fn get_item_id_as_int(&self) -> Option<i32> {
|
||||
self.item_id.parse().ok()
|
||||
}
|
||||
|
||||
/// Check if this item has unlimited stock
|
||||
pub fn has_unlimited_stock(&self) -> bool {
|
||||
self.max_stock.is_none() || self.max_stock == Some(0)
|
||||
}
|
||||
|
||||
/// Check if this item has custom pricing
|
||||
pub fn has_custom_price(&self) -> bool {
|
||||
self.price.is_some()
|
||||
}
|
||||
|
||||
/// Check if shop buys this item (has buy price)
|
||||
pub fn is_buyable_by_shop(&self) -> bool {
|
||||
self.buy_price.is_some()
|
||||
}
|
||||
|
||||
/// Get restock time in minutes
|
||||
pub fn get_restock_minutes(&self) -> Option<f32> {
|
||||
self.restock_time.map(|seconds| seconds as f32 / 60.0)
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a shop
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Shop {
|
||||
/// Shop ID
|
||||
pub shop_id: i32,
|
||||
|
||||
/// Shop name
|
||||
pub name: String,
|
||||
|
||||
/// Whether this is a general store
|
||||
pub is_general_store: bool,
|
||||
|
||||
/// Developer comment
|
||||
pub comment: Option<String>,
|
||||
|
||||
/// Items sold in this shop
|
||||
pub items: Vec<ShopItem>,
|
||||
}
|
||||
|
||||
impl Shop {
|
||||
/// Create a new Shop with required fields
|
||||
pub fn new(shop_id: i32, name: String) -> Self {
|
||||
Self {
|
||||
shop_id,
|
||||
name,
|
||||
is_general_store: false,
|
||||
comment: None,
|
||||
items: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Add an item to the shop
|
||||
pub fn add_item(&mut self, item: ShopItem) {
|
||||
self.items.push(item);
|
||||
}
|
||||
|
||||
/// Get number of items in shop
|
||||
pub fn item_count(&self) -> usize {
|
||||
self.items.len()
|
||||
}
|
||||
|
||||
/// Check if shop is empty
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.items.is_empty()
|
||||
}
|
||||
|
||||
/// Get all items with unlimited stock
|
||||
pub fn get_unlimited_stock_items(&self) -> Vec<&ShopItem> {
|
||||
self.items
|
||||
.iter()
|
||||
.filter(|item| item.has_unlimited_stock())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all items with limited stock
|
||||
pub fn get_limited_stock_items(&self) -> Vec<&ShopItem> {
|
||||
self.items
|
||||
.iter()
|
||||
.filter(|item| !item.has_unlimited_stock())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all items with custom pricing
|
||||
pub fn get_custom_priced_items(&self) -> Vec<&ShopItem> {
|
||||
self.items
|
||||
.iter()
|
||||
.filter(|item| item.has_custom_price())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all items the shop buys
|
||||
pub fn get_buyable_items(&self) -> Vec<&ShopItem> {
|
||||
self.items
|
||||
.iter()
|
||||
.filter(|item| item.is_buyable_by_shop())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get item by ID
|
||||
pub fn get_item_by_id(&self, item_id: &str) -> Option<&ShopItem> {
|
||||
self.items.iter().find(|item| item.item_id == item_id)
|
||||
}
|
||||
|
||||
/// Get all item IDs
|
||||
pub fn get_all_item_ids(&self) -> Vec<String> {
|
||||
self.items.iter().map(|item| item.item_id.clone()).collect()
|
||||
}
|
||||
}
|
||||
155
cursebreaker-parser/src/types/cursebreaker/trait.rs
Normal file
155
cursebreaker-parser/src/types/cursebreaker/trait.rs
Normal file
@@ -0,0 +1,155 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Trainer requirement for learning a trait
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TraitTrainer {
|
||||
/// Required skill
|
||||
pub skill: String,
|
||||
|
||||
/// Required level in the skill
|
||||
pub level: i32,
|
||||
|
||||
/// Tier icon indicator (1, 2, 3 for novice, experienced, master)
|
||||
pub tier_icon: Option<i32>,
|
||||
}
|
||||
|
||||
impl TraitTrainer {
|
||||
pub fn new(skill: String, level: i32) -> Self {
|
||||
Self {
|
||||
skill,
|
||||
level,
|
||||
tier_icon: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if this is a novice tier trait (tier 1)
|
||||
pub fn is_novice(&self) -> bool {
|
||||
self.tier_icon == Some(1)
|
||||
}
|
||||
|
||||
/// Check if this is an experienced tier trait (tier 2)
|
||||
pub fn is_experienced(&self) -> bool {
|
||||
self.tier_icon == Some(2)
|
||||
}
|
||||
|
||||
/// Check if this is a master tier trait (tier 3)
|
||||
pub fn is_master(&self) -> bool {
|
||||
self.tier_icon == Some(3)
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a character trait/perk
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Trait {
|
||||
/// Unique ID
|
||||
pub id: i32,
|
||||
|
||||
/// Display name
|
||||
pub name: String,
|
||||
|
||||
/// Description text (may contain HTML color tags)
|
||||
pub description: String,
|
||||
|
||||
/// Learnability ID (ability that can be learned)
|
||||
pub learnability: Option<i32>,
|
||||
|
||||
/// Developer comment
|
||||
pub comment: Option<String>,
|
||||
|
||||
/// Trainer requirement (if this trait is learned from a trainer)
|
||||
pub trainer: Option<TraitTrainer>,
|
||||
}
|
||||
|
||||
impl Trait {
|
||||
/// Create a new Trait with required fields
|
||||
pub fn new(id: i32, name: String, description: String) -> Self {
|
||||
Self {
|
||||
id,
|
||||
name,
|
||||
description,
|
||||
learnability: None,
|
||||
comment: None,
|
||||
trainer: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if this trait teaches an ability (has learnability)
|
||||
pub fn teaches_ability(&self) -> bool {
|
||||
self.learnability.is_some()
|
||||
}
|
||||
|
||||
/// Check if this trait is learned from a trainer
|
||||
pub fn is_trainer_trait(&self) -> bool {
|
||||
self.trainer.is_some()
|
||||
}
|
||||
|
||||
/// Check if this trait requires a specific skill
|
||||
pub fn requires_skill(&self, skill: &str) -> bool {
|
||||
self.trainer
|
||||
.as_ref()
|
||||
.map(|t| t.skill.eq_ignore_ascii_case(skill))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
/// Get the required skill level, if any
|
||||
pub fn get_required_level(&self) -> Option<i32> {
|
||||
self.trainer.as_ref().map(|t| t.level)
|
||||
}
|
||||
|
||||
/// Get the required skill name, if any
|
||||
pub fn get_required_skill(&self) -> Option<&str> {
|
||||
self.trainer.as_ref().map(|t| t.skill.as_str())
|
||||
}
|
||||
|
||||
/// Check if this is a novice tier trait
|
||||
pub fn is_novice(&self) -> bool {
|
||||
self.trainer
|
||||
.as_ref()
|
||||
.map(|t| t.is_novice())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
/// Check if this is an experienced tier trait
|
||||
pub fn is_experienced(&self) -> bool {
|
||||
self.trainer
|
||||
.as_ref()
|
||||
.map(|t| t.is_experienced())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
/// Check if this is a master tier trait
|
||||
pub fn is_master(&self) -> bool {
|
||||
self.trainer
|
||||
.as_ref()
|
||||
.map(|t| t.is_master())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
/// Check if this trait's description contains HTML color tags
|
||||
pub fn has_colored_description(&self) -> bool {
|
||||
self.description.contains("<color=")
|
||||
}
|
||||
|
||||
/// Strip HTML color tags from description
|
||||
pub fn get_plain_description(&self) -> String {
|
||||
let mut result = self.description.clone();
|
||||
|
||||
// Remove color tags
|
||||
while let Some(start) = result.find("<color=") {
|
||||
if let Some(end) = result[start..].find('>') {
|
||||
result.replace_range(start..start + end + 1, "");
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Remove closing color tags
|
||||
result = result.replace("</color>", "");
|
||||
|
||||
// Unescape HTML entities
|
||||
result = result.replace("<", "<");
|
||||
result = result.replace(">", ">");
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user