Compare commits

...

15 Commits

Author SHA1 Message Date
ccc9a894b7 26-1-2026 2026-01-26 13:05:57 +00:00
cdfab8fd1e teleporter 2026-01-23 07:12:25 +00:00
99aecaefde images in database 2026-01-16 13:31:48 +00:00
642ba643ad DB addition 2026-01-16 09:33:30 +00:00
3720b6ad80 resource icons DB 2026-01-12 07:19:38 +00:00
1072186ff1 resource icons 2026-01-12 06:06:44 +00:00
d99c546499 world resources to DB 2026-01-12 04:32:38 +00:00
557ffa7e53 cursebreaker readme update 2026-01-12 04:32:20 +00:00
8438dabf0b item DB extension 2026-01-12 03:02:45 +00:00
ebee7fd19c item DB upgrade 2026-01-11 13:48:15 +00:00
c7a31ce30e selective parsing of scenes 2026-01-11 03:03:39 +00:00
44b9a67800 interactive map init 2026-01-11 02:46:49 +00:00
80ccd375de different commands 2026-01-10 10:43:41 +00:00
6d6e56042b database migration 2026-01-10 09:22:58 +00:00
30e66d4b04 sql database 2026-01-10 07:44:26 +00:00
114 changed files with 11592 additions and 1886 deletions

View File

@@ -18,7 +18,34 @@
"Bash(xargs dirname:*)",
"Bash(xargs -I {} find {} -name \"*.cs\")",
"Bash(RUST_LOG=debug cargo run:*)",
"WebSearch"
"WebSearch",
"Bash(cargo search:*)",
"Bash(cargo install:*)",
"Bash(diesel setup:*)",
"Bash(diesel migration generate:*)",
"Bash(diesel migration run:*)",
"Bash(sqlite3:*)",
"Bash(diesel migration redo:*)",
"Bash(tree:*)",
"Bash(timeout 180 cargo build:*)",
"Bash(timeout 5 cargo run:*)",
"Bash(DATABASE_URL=\"../cursebreaker.db\" timeout 10 cargo run:*)",
"Bash(DATABASE_URL=\"../cursebreaker.db\" timeout -s TERM 3 cargo run:*)",
"Bash(curl:*)",
"Bash(diesel print-schema:*)",
"Bash(time cargo run:*)",
"Bash(DATABASE_URL=../cursebreaker.db diesel migration:*)",
"Bash(DATABASE_URL=cursebreaker.db diesel migration:*)",
"Bash(DATABASE_URL=../cursebreaker-parser/cursebreaker.db cargo run:*)",
"Bash(identify:*)",
"Bash(diesel migration revert:*)",
"Bash(xargs:*)",
"Bash(ss:*)",
"Bash(timeout 10 cargo run:*)",
"Bash(timeout 60 cargo run:*)",
"Bash(DATABASE_URL=../cursebreaker.db diesel print-schema:*)",
"Bash(DATABASE_URL=../cursebreaker.db diesel database:*)",
"Bash(DATABASE_URL=cursebreaker.db CB_ASSETS_PATH=/home/connor/repos/CBAssets cargo run:*)"
],
"additionalDirectories": [
"/home/connor/repos/CBAssets/"

1
.env Normal file
View File

@@ -0,0 +1 @@
DATABASE_URL=/home/connor/repos/cursebreaker-parser-rust/cursebreaker.db

2
.gitignore vendored
View File

@@ -19,3 +19,5 @@ target/
# Test data (cloned Unity projects for integration tests)
test_data/
cursebreaker.db
**/cursebreaker.db

1857
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,5 @@
[workspace]
members = ["unity-parser", "unity-parser-macros", "cursebreaker-parser"]
members = ["unity-parser", "unity-parser-macros", "cursebreaker-parser", "cursebreaker-map"]
resolver = "2"
[workspace.package]

12
cursebreaker-map/.gitignore vendored Normal file
View File

@@ -0,0 +1,12 @@
# Rust
/target/
Cargo.lock
# IDE
.vscode/
.idea/
# Database
*.db
*.db-shm
*.db-wal

View File

@@ -0,0 +1,20 @@
[package]
name = "cursebreaker-map"
version = "0.1.0"
edition = "2021"
[dependencies]
axum = "0.7"
tokio = { version = "1", features = ["full"] }
tower = "0.4"
tower-http = { version = "0.5", features = ["fs", "cors"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
diesel = { version = "2.1", features = ["sqlite", "returning_clauses_for_sqlite_3_35"] }
dotenvy = "0.15"
tracing = "0.1"
tracing-subscriber = "0.3"
base64 = "0.22"
[dependencies.cursebreaker-parser]
path = "../cursebreaker-parser"

126
cursebreaker-map/README.md Normal file
View File

@@ -0,0 +1,126 @@
# Cursebreaker Interactive Map
An interactive web-based map viewer for "The Black Grimoire: Cursebreaker" game, built with Rust (Axum) and Leaflet.js.
## Features
- **Optimized Tile Loading**: Uses merged tiles to reduce HTTP requests
- Zoom level 0: ~31 tiles (4×4 merged)
- Zoom level 1: ~105 tiles (2×2 merged)
- Zoom level 2: ~345 tiles (original tiles)
- **Lossless Compression**: All tiles use lossless WebP for optimal quality
- **High-Performance Rendering**: Serves tiles directly from SQLite database
- **Interactive Navigation**: Pan and zoom through the game world
- **Dark Theme UI**: Game-themed dark interface with collapsible sidebar
- **Real-time Coordinates**: Display tile and pixel coordinates while hovering
## Architecture
### Backend (Rust + Axum)
- **Tile Server**: Serves WebP-compressed map tiles from SQLite database
- **API Endpoints**:
- `GET /api/tiles/:z/:x/:y` - Retrieve tile at coordinates (x, y) and zoom level z
- `GET /api/bounds` - Get map bounds (min/max x/y coordinates)
- `GET /` - Serve static frontend files
### Frontend (Leaflet.js)
- **Image Overlay Layer**: Each merged tile is rendered as a positioned image overlay
- **Merged Tile System**: Reduces HTTP requests by merging tiles at lower zoom levels:
- Zoom 0: 4×4 original tiles merged into 512px images (~31 total requests)
- Zoom 1: 2×2 original tiles merged into 512px images (~105 total requests)
- Zoom 2: Original 512px tiles (1×1, ~345 total requests)
- **Fixed Coordinate System**: Uses Leaflet's CRS.Simple with tiles positioned at their exact pixel coordinates
## Prerequisites
- Rust (latest stable)
- SQLite database at `../cursebreaker.db` with `minimap_tiles` table populated
## Running the Map Viewer
### First Time Setup
1. **Generate all map tiles** (only needed once, or after updating minimap images):
```bash
cd cursebreaker-parser
cargo run --bin image-parser --release
```
This processes all PNG files and automatically generates all 3 zoom levels (takes ~1.5 minutes)
2. **Start the map server**:
```bash
cd ../cursebreaker-map
cargo run --release
```
3. **Open in browser**:
Navigate to `http://127.0.0.1:3000`
### Subsequent Runs
Just start the server (step 2 above). All tiles are stored in the database.
## Database Configuration
By default, the server looks for the database at `../cursebreaker.db`. You can override this with the `DATABASE_URL` environment variable:
```bash
DATABASE_URL=/path/to/cursebreaker.db cargo run --release
```
## Future Enhancements
The sidebar includes placeholders for upcoming features:
- **Icon Filtering**: Toggle visibility of shops, resources, fast travel points, workbenches, etc.
- **Map Markers**: Display game entities (shops, resources, NPCs) with clickable info popups
- **Search**: Find locations by name
- **Pathfinding**: Calculate routes between points
- **Layer Control**: Toggle different map overlays
## Project Structure
```
cursebreaker-map/
├── Cargo.toml # Rust dependencies
├── src/
│ └── main.rs # Axum web server
├── static/
│ ├── index.html # Main HTML page
│ ├── style.css # Styling (dark theme)
│ └── map.js # Leaflet map initialization
└── README.md
```
## Performance Notes
- **Merged Tiles**: Reduces HTTP requests by up to 91% at lowest zoom (31 vs 345 requests)
- **Lossless WebP**: High quality compression without artifacts
- **Database Storage**: All tiles served directly from SQLite BLOBs (no file I/O)
- **CRS.Simple**: Avoids expensive geographic coordinate projections
- **Total Storage**: ~111 MB for all zoom levels combined
### Load Performance Comparison
| Zoom Level | Merge Factor | Tiles Loaded | HTTP Requests Saved |
|------------|--------------|--------------|---------------------|
| 0 (zoomed out) | 4×4 | 31 | 91% fewer requests |
| 1 (medium) | 2×2 | 105 | 70% fewer requests |
| 2 (zoomed in) | 1×1 | 345 | baseline |
## Troubleshooting
**Tiles not loading**:
- Verify database path is correct
- Check that `minimap_tiles` table is populated
- Look for errors in server console output
**Map appears blank**:
- Check browser console for JavaScript errors
- Verify `/api/bounds` returns valid coordinates
- Ensure tiles exist for the displayed coordinate range
**Performance issues**:
- Try running in release mode: `cargo run --release`
- Check database is on fast storage (SSD)
- Reduce browser zoom level to load lower-resolution tiles

View File

@@ -0,0 +1,505 @@
use axum::{
extract::{Path, State},
http::{header, StatusCode},
response::{IntoResponse, Response},
routing::get,
Json, Router,
};
use base64::Engine;
use diesel::prelude::*;
use serde::Serialize;
use std::sync::Arc;
use tower_http::{cors::CorsLayer, services::ServeDir};
use tracing::info;
// Database connection
type DbConnection = diesel::SqliteConnection;
#[derive(Clone)]
struct AppState {
database_url: String,
}
#[derive(Serialize)]
struct MapBounds {
min_x: i32,
min_y: i32,
max_x: i32,
max_y: i32,
}
#[derive(Serialize)]
struct ResourceResponse {
resources: Vec<ResourceGroup>,
}
#[derive(Serialize)]
struct ResourceGroup {
item_id: i32,
name: String,
skill: String,
level: i32,
icon_base64: String,
positions: Vec<Position>,
}
#[derive(Serialize)]
struct Position {
x: f32,
y: f32,
}
// Labels response (world_map_icons with icon_type == 16)
#[derive(Serialize)]
struct LabelsResponse {
labels: Vec<Label>,
}
#[derive(Serialize)]
struct Label {
x: f32,
y: f32,
text: String,
font_size: i32,
}
// Entrances response (world_teleporters)
#[derive(Serialize)]
struct EntrancesResponse {
icon_base64: String,
entrances: Vec<Entrance>,
}
#[derive(Serialize)]
struct Entrance {
pos_x: f32,
pos_y: f32,
tp_x: Option<f32>,
tp_y: Option<f32>,
}
// Ground Items response (world_loot)
#[derive(Serialize)]
struct GroundItemsResponse {
icon_base64: String,
items: Vec<GroundItem>,
}
#[derive(Serialize)]
struct GroundItem {
x: f32,
y: f32,
name: String,
amount: i32,
respawn_time: i32,
}
// Houses response (player_houses)
#[derive(Serialize)]
struct HousesResponse {
icon_base64: String,
houses: Vec<House>,
}
#[derive(Serialize)]
struct House {
x: f32,
y: f32,
name: String,
description: String,
price: i32,
}
// Establish database connection
fn establish_connection(database_url: &str) -> Result<DbConnection, diesel::ConnectionError> {
SqliteConnection::establish(database_url)
}
// Get map bounds from database (using zoom level 2 tiles)
async fn get_bounds(State(state): State<Arc<AppState>>) -> Result<Json<MapBounds>, StatusCode> {
use cursebreaker_parser::schema::minimap_tiles::dsl::*;
use diesel::dsl::{max, min};
let mut conn = establish_connection(&state.database_url).map_err(|e| {
tracing::error!("Database connection error: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
let (min_x_val, max_x_val): (Option<i32>, Option<i32>) = minimap_tiles
.filter(zoom.eq(2)) // Only count zoom level 2 (original) tiles
.select((min(x), max(x)))
.first(&mut conn)
.map_err(|e| {
tracing::error!("Error querying min/max x: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
let (min_y_val, max_y_val): (Option<i32>, Option<i32>) = minimap_tiles
.filter(zoom.eq(2)) // Only count zoom level 2 (original) tiles
.select((min(y), max(y)))
.first(&mut conn)
.map_err(|e| {
tracing::error!("Error querying min/max y: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
Ok(Json(MapBounds {
min_x: min_x_val.unwrap_or(0),
min_y: min_y_val.unwrap_or(0),
max_x: max_x_val.unwrap_or(0),
max_y: max_y_val.unwrap_or(0),
}))
}
// Get tile by coordinates and zoom level
async fn get_tile(
State(state): State<Arc<AppState>>,
Path((z, tile_x, tile_y)): Path<(i32, i32, i32)>,
) -> Result<Response, StatusCode> {
use cursebreaker_parser::schema::minimap_tiles::dsl::*;
let mut conn = establish_connection(&state.database_url).map_err(|e| {
tracing::error!("Database connection error: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
// Query minimap_tiles table for the tile at the requested zoom level
let tile_data = minimap_tiles
.filter(zoom.eq(z))
.filter(x.eq(tile_x))
.filter(y.eq(tile_y))
.select(image)
.first::<Vec<u8>>(&mut conn)
.optional()
.map_err(|e| {
tracing::error!("Error querying tile: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
match tile_data {
Some(data) => {
info!(
"Serving tile at ({}, {}) zoom {} - {} bytes",
tile_x,
tile_y,
z,
data.len()
);
Ok(([(header::CONTENT_TYPE, "image/webp")], data).into_response())
}
None => {
tracing::warn!("Tile not found: ({}, {}) at zoom {}", tile_x, tile_y, z);
Err(StatusCode::NOT_FOUND)
}
}
}
// Get all resources with icons from database
async fn get_resources(
State(state): State<Arc<AppState>>,
) -> Result<Json<ResourceResponse>, StatusCode> {
use cursebreaker_parser::schema::{harvestables, resource_icons, world_resources};
let mut conn = establish_connection(&state.database_url).map_err(|e| {
tracing::error!("Database connection error: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
// Query with three-way join
let results = world_resources::table
.inner_join(
resource_icons::table.on(world_resources::item_id.eq(resource_icons::item_id)),
)
.inner_join(harvestables::table.on(resource_icons::item_id.eq(harvestables::id)))
.select((
resource_icons::item_id,
resource_icons::name,
harvestables::skill,
harvestables::level,
resource_icons::icon_64,
world_resources::pos_x,
world_resources::pos_y,
))
.order_by((harvestables::skill, harvestables::level))
.load::<(i32, String, String, i32, Vec<u8>, f32, f32)>(&mut conn)
.map_err(|e| {
tracing::error!("Error querying resources: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
// Group results by item_id
use std::collections::HashMap;
let mut grouped: HashMap<i32, ResourceGroup> = HashMap::new();
for (item_id, name, skill, level, icon_bytes, pos_x, pos_y) in results {
let entry = grouped.entry(item_id).or_insert_with(|| {
// Convert icon to base64 (only once per resource type)
let icon_base64 = base64::engine::general_purpose::STANDARD.encode(&icon_bytes);
ResourceGroup {
item_id,
name,
skill,
level,
icon_base64,
positions: Vec::new(),
}
});
// Add position with multiplier applied
entry.positions.push(Position {
x: pos_x * 5.12,
y: pos_y * 5.12,
});
}
// Convert to vec and sort by skill and level
let mut resources: Vec<ResourceGroup> = grouped.into_values().collect();
resources.sort_by(|a, b| a.skill.cmp(&b.skill).then(a.level.cmp(&b.level)));
info!("Returning {} resource types", resources.len());
Ok(Json(ResourceResponse { resources }))
}
// Get labels from world_map_icons where icon_type == 16
async fn get_labels(State(state): State<Arc<AppState>>) -> Result<Json<LabelsResponse>, StatusCode> {
use cursebreaker_parser::schema::world_map_icons;
let mut conn = establish_connection(&state.database_url).map_err(|e| {
tracing::error!("Database connection error: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
let results = world_map_icons::table
.filter(world_map_icons::icon_type.eq(16))
.select((
world_map_icons::pos_x,
world_map_icons::pos_y,
world_map_icons::text,
world_map_icons::font_size,
))
.load::<(f32, f32, String, i32)>(&mut conn)
.map_err(|e| {
tracing::error!("Error querying labels: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
let labels: Vec<Label> = results
.into_iter()
.map(|(pos_x, pos_y, text, font_size)| Label {
x: pos_x * 5.12,
y: pos_y * 5.12,
text,
font_size,
})
.collect();
info!("Returning {} labels", labels.len());
Ok(Json(LabelsResponse { labels }))
}
// Get entrances from world_teleporters
async fn get_entrances(
State(state): State<Arc<AppState>>,
) -> Result<Json<EntrancesResponse>, StatusCode> {
use cursebreaker_parser::schema::{general_icons, world_teleporters};
let mut conn = establish_connection(&state.database_url).map_err(|e| {
tracing::error!("Database connection error: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
// Get the Entrance icon
let icon_bytes: Vec<u8> = general_icons::table
.filter(general_icons::name.eq("Entrance"))
.select(general_icons::icon_32)
.first::<Option<Vec<u8>>>(&mut conn)
.map_err(|e| {
tracing::error!("Error querying entrance icon: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?
.unwrap_or_default();
let icon_base64 = base64::engine::general_purpose::STANDARD.encode(&icon_bytes);
// Get teleporter positions
let results = world_teleporters::table
.select((
world_teleporters::pos_x,
world_teleporters::pos_y,
world_teleporters::tp_x,
world_teleporters::tp_y,
))
.load::<(f32, f32, Option<f32>, Option<f32>)>(&mut conn)
.map_err(|e| {
tracing::error!("Error querying teleporters: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
let entrances: Vec<Entrance> = results
.into_iter()
.map(|(pos_x, pos_y, tp_x, tp_y)| Entrance {
pos_x: pos_x * 5.12,
pos_y: pos_y * 5.12,
tp_x: tp_x.map(|x| x * 5.12),
tp_y: tp_y.map(|y| y * 5.12),
})
.collect();
info!("Returning {} entrances", entrances.len());
Ok(Json(EntrancesResponse {
icon_base64,
entrances,
}))
}
// Get ground items from world_loot
async fn get_ground_items(
State(state): State<Arc<AppState>>,
) -> Result<Json<GroundItemsResponse>, StatusCode> {
use cursebreaker_parser::schema::{general_icons, items, world_loot};
let mut conn = establish_connection(&state.database_url).map_err(|e| {
tracing::error!("Database connection error: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
// Get the Common_tick icon
let icon_bytes: Vec<u8> = general_icons::table
.filter(general_icons::name.eq("Common_tick"))
.select(general_icons::icon_32)
.first::<Option<Vec<u8>>>(&mut conn)
.map_err(|e| {
tracing::error!("Error querying common_tick icon: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?
.unwrap_or_default();
let icon_base64 = base64::engine::general_purpose::STANDARD.encode(&icon_bytes);
// Get world loot with item names
let results = world_loot::table
.inner_join(items::table.on(world_loot::item_id.eq(items::id.assume_not_null())))
.select((
world_loot::pos_x,
world_loot::pos_y,
items::name,
world_loot::amount,
world_loot::respawn_time,
))
.load::<(f32, f32, String, i32, i32)>(&mut conn)
.map_err(|e| {
tracing::error!("Error querying ground items: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
let ground_items: Vec<GroundItem> = results
.into_iter()
.map(|(pos_x, pos_y, name, amount, respawn_time)| GroundItem {
x: pos_x * 5.12,
y: pos_y * 5.12,
name,
amount,
respawn_time,
})
.collect();
info!("Returning {} ground items", ground_items.len());
Ok(Json(GroundItemsResponse {
icon_base64,
items: ground_items,
}))
}
// Get player houses
async fn get_houses(State(state): State<Arc<AppState>>) -> Result<Json<HousesResponse>, StatusCode> {
use cursebreaker_parser::schema::{general_icons, player_houses};
let mut conn = establish_connection(&state.database_url).map_err(|e| {
tracing::error!("Database connection error: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
// Get the Notifications_House icon (64px)
let icon_bytes: Vec<u8> = general_icons::table
.filter(general_icons::name.eq("Notifications_House"))
.select(general_icons::icon_64)
.first::<Option<Vec<u8>>>(&mut conn)
.map_err(|e| {
tracing::error!("Error querying house icon: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?
.unwrap_or_default();
let icon_base64 = base64::engine::general_purpose::STANDARD.encode(&icon_bytes);
// Get player houses
let results = player_houses::table
.select((
player_houses::pos_x,
player_houses::pos_z,
player_houses::name,
player_houses::description,
player_houses::price,
))
.load::<(f32, f32, String, String, i32)>(&mut conn)
.map_err(|e| {
tracing::error!("Error querying player houses: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
let houses: Vec<House> = results
.into_iter()
.map(|(pos_x, pos_z, name, description, price)| House {
x: pos_x * 5.12,
y: pos_z * 5.12,
name,
description,
price,
})
.collect();
info!("Returning {} houses", houses.len());
Ok(Json(HousesResponse { icon_base64, houses }))
}
#[tokio::main]
async fn main() {
// Initialize tracing
tracing_subscriber::fmt::init();
// Get database path
let database_url = std::env::var("DATABASE_URL")
.unwrap_or_else(|_| "../cursebreaker.db".to_string());
info!("Using database: {}", database_url);
let state = Arc::new(AppState { database_url });
// Build router
let app = Router::new()
.route("/api/bounds", get(get_bounds))
.route("/api/tiles/:z/:x/:y", get(get_tile))
.route("/api/resources", get(get_resources))
.route("/api/labels", get(get_labels))
.route("/api/entrances", get(get_entrances))
.route("/api/ground-items", get(get_ground_items))
.route("/api/houses", get(get_houses))
.nest_service("/", ServeDir::new("static"))
.layer(CorsLayer::permissive())
.with_state(state);
// Start server
let listener = tokio::net::TcpListener::bind("127.0.0.1:3000")
.await
.unwrap();
info!("Server running on http://127.0.0.1:3000");
axum::serve(listener, app).await.unwrap();
}

View File

@@ -0,0 +1,44 @@
// Map Configuration
// You can adjust these values and reload the page to test different zoom behaviors
const MapConfig = {
// Zoom level configuration
// Maps Leaflet zoom levels to database zoom levels and merge factors
zoomLevels: [
// Leaflet zoom 0 → Database zoom 0 (4x4 merged)
{ leafletZoom: -2, dbZoom: 0, mergeFactor: 4, label: "4x4 merged" },
// Leaflet zoom 1 → Database zoom 1 (2x2 merged)
{ leafletZoom: -0.5, dbZoom: 1, mergeFactor: 2, label: "2x2 merged" },
// Leaflet zoom 2+ → Database zoom 2 (original tiles)
{ leafletZoom: 1, dbZoom: 2, mergeFactor: 1, label: "original" },
],
// Leaflet map settings
minZoom: -2,
maxZoom: 2,
// Tile size (in pixels) - should match database tile size
tileSize: 512,
// Debug mode - shows tile boundaries and coordinates
debug: true,
// Resource icon configuration
resourceIconSize: 48, // Icon size in pixels (configurable)
// Get zoom configuration for a specific Leaflet zoom level
getZoomConfig(leafletZoom) {
// Find the appropriate config for this zoom level
// Use the highest matching config that's <= current zoom
let config = this.zoomLevels[0];
for (const zoomConfig of this.zoomLevels) {
if (leafletZoom >= zoomConfig.leafletZoom) {
config = zoomConfig;
}
}
return config;
}
};
// Make it globally available
window.MapConfig = MapConfig;

View File

@@ -0,0 +1,104 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Cursebreaker Interactive Map</title>
<!-- Leaflet CSS -->
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.9.4/dist/leaflet.css" />
<!-- Custom CSS -->
<link rel="stylesheet" href="style.css">
</head>
<body>
<div id="app">
<!-- Sidebar -->
<div id="sidebar" class="sidebar collapsed">
<div class="sidebar-content">
<h2>Cursebreaker Map</h2>
<div class="info-section">
<p class="subtitle">The Black Grimoire: Cursebreaker</p>
</div>
<div class="filters-section">
<h3>Labels</h3>
<div class="filter-controls">
<label class="filter-label master-toggle">
<input type="checkbox" id="labels-toggle" checked>
<span>Show Labels</span>
</label>
</div>
</div>
<div class="filters-section">
<h3>Entrances</h3>
<div class="filter-controls">
<label class="filter-label master-toggle">
<input type="checkbox" id="entrances-toggle" checked>
<span>Show Entrances</span>
</label>
</div>
</div>
<div class="filters-section">
<h3>Ground Items</h3>
<div class="filter-controls">
<label class="filter-label master-toggle">
<input type="checkbox" id="ground-items-toggle" checked>
<span>Show Ground Items</span>
</label>
</div>
</div>
<div class="filters-section">
<h3>Houses</h3>
<div class="filter-controls">
<label class="filter-label master-toggle">
<input type="checkbox" id="houses-toggle" checked>
<span>Show Houses</span>
</label>
</div>
</div>
<div class="filters-section">
<h3>Resources</h3>
<div class="filter-controls">
<button id="select-all-resources" class="filter-btn">Show All</button>
<button id="deselect-all-resources" class="filter-btn">Hide All</button>
</div>
<div id="resource-filters" class="filter-group">
<p class="loading-text">Loading resources...</p>
</div>
</div>
<div class="map-info">
<h3>Map Info</h3>
<div id="map-stats">
<p>Loading...</p>
</div>
</div>
</div>
</div>
<!-- Map Container -->
<div id="map"></div>
<!-- Coordinates Display -->
<div id="coordinates" class="coordinates-display">
Coordinates: <span id="coord-text">--</span>
</div>
</div>
<!-- Leaflet JS -->
<script src="https://unpkg.com/leaflet@1.9.4/dist/leaflet.js"></script>
<!-- Configuration (edit this to adjust zoom levels) -->
<script src="config.js"></script>
<!-- Custom JS -->
<script src="map.js"></script>
<script src="resources.js"></script>
<script src="markers.js"></script>
</body>
</html>

View File

@@ -0,0 +1,273 @@
// Initialize the map when the page loads
let map;
let bounds;
let tileLayerGroup;
let debugLayerGroup;
async function initMap() {
try {
// Fetch map bounds from the API
const response = await fetch('/api/bounds');
bounds = await response.json();
console.log('Map bounds:', bounds);
// Update sidebar with map info
updateMapInfo(bounds);
// Calculate map dimensions in tiles
const width = bounds.max_x - bounds.min_x + 1;
const height = bounds.max_y - bounds.min_y + 1;
// Get config
const config = window.MapConfig;
const tileSize = config.tileSize;
// Create map with simple CRS (not geographic)
map = L.map('map', {
crs: L.CRS.Simple,
minZoom: config.minZoom,
maxZoom: config.maxZoom,
attributionControl: false,
});
// Calculate bounds for Leaflet (in pixels)
// Origin at top-left [0,0], y increases down, x increases right
const pixelWidth = width * tileSize;
const pixelHeight = height * tileSize;
const mapBounds = [
[0, 0],
[pixelHeight, pixelWidth]
];
// Set max bounds to prevent panning outside the map
map.setMaxBounds(mapBounds);
// Fit the map to bounds
map.fitBounds(mapBounds);
// Create layer groups
tileLayerGroup = L.layerGroup().addTo(map);
if (config.debug) {
debugLayerGroup = L.layerGroup().addTo(map);
}
// Load tiles for current zoom
loadTilesForCurrentZoom();
// Reload tiles when zoom changes
map.on('zoomend', function() {
loadTilesForCurrentZoom();
});
// Add coordinate display on mouse move
map.on('mousemove', function(e) {
const lat = e.latlng.lat;
const lng = e.latlng.lng;
// Convert pixel coordinates to tile coordinates
const tileX = Math.floor(lng / tileSize);
const tileY = Math.floor(lat / tileSize);
const leafletZoom = map.getZoom();
const zoomConfig = config.getZoomConfig(leafletZoom);
// Calculate which merged tile this is in
const mergedTileX = Math.floor(tileX / zoomConfig.mergeFactor);
const mergedTileY = Math.floor(tileY / zoomConfig.mergeFactor);
document.getElementById('coord-text').textContent =
`Tile (${tileX}, ${tileY}) | Merged (${mergedTileX}, ${mergedTileY}) | Zoom ${leafletZoom} (DB ${zoomConfig.dbZoom})`;
});
// Add attribution
L.control.attribution({
position: 'bottomright',
prefix: false
}).addAttribution('The Black Grimoire: Cursebreaker').addTo(map);
// Add sidebar toggle control
const SidebarControl = L.Control.extend({
options: {
position: 'topleft'
},
onAdd: function(map) {
const container = L.DomUtil.create('div', 'leaflet-bar leaflet-control');
const button = L.DomUtil.create('a', 'leaflet-control-sidebar', container);
button.innerHTML = '☰';
button.href = '#';
button.title = 'Toggle Sidebar';
L.DomEvent.on(button, 'click', function(e) {
L.DomEvent.preventDefault(e);
const sidebar = document.getElementById('sidebar');
sidebar.classList.toggle('collapsed');
});
return container;
}
});
map.addControl(new SidebarControl());
console.log('Map initialized successfully');
// Load resources asynchronously
loadResources().catch(error => {
console.error('Failed to load resources:', error);
});
// Load markers (labels, entrances, ground items, houses)
initMarkers();
} catch (error) {
console.error('Error initializing map:', error);
document.getElementById('map-stats').innerHTML =
'<p style="color: #ff6b6b;">Error loading map data</p>';
}
}
function loadTilesForCurrentZoom() {
// Clear existing tiles
tileLayerGroup.clearLayers();
if (debugLayerGroup) {
debugLayerGroup.clearLayers();
}
const currentZoom = map.getZoom();
const config = window.MapConfig;
const tileSize = config.tileSize;
// Get zoom configuration
const zoomConfig = config.getZoomConfig(currentZoom);
const dbZoom = zoomConfig.dbZoom;
const mergeFactor = zoomConfig.mergeFactor;
console.log(`\n=== Loading tiles at Leaflet zoom ${currentZoom} ===`);
console.log(`Database zoom: ${dbZoom}, Merge factor: ${mergeFactor} (${zoomConfig.label})`);
console.log(`Bounds: X [${bounds.min_x}, ${bounds.max_x}], Y [${bounds.min_y}, ${bounds.max_y}]`);
// Calculate which merged tiles we need to load
// The database stores merged tile coordinates starting from 0
// For a 2x2 merge of tiles (0,0), (0,1), (1,0), (1,1), the database stores it at (0,0)
// For original tiles at min_x=0, with mergeFactor=2, we need tiles starting at x=0/2=0
const minMergedX = Math.floor(bounds.min_x / mergeFactor);
const maxMergedX = Math.floor(bounds.max_x / mergeFactor);
const minMergedY = Math.floor(bounds.min_y / mergeFactor);
const maxMergedY = Math.floor(bounds.max_y / mergeFactor);
console.log(`Merged tile range: X [${minMergedX}, ${maxMergedX}], Y [${minMergedY}, ${maxMergedY}]`);
let tileCount = 0;
let loadedCount = 0;
let errorCount = 0;
// Load each merged tile
for (let mergedY = minMergedY; mergedY <= maxMergedY; mergedY++) {
for (let mergedX = minMergedX; mergedX <= maxMergedX; mergedX++) {
// Calculate the pixel bounds for this merged tile
// The merged tile at (mergedX, mergedY) covers original tiles starting at:
// (mergedX * mergeFactor, mergedY * mergeFactor)
const startTileX = mergedX * mergeFactor;
const startTileY = mergedY * mergeFactor;
const pixelMinX = startTileX * tileSize;
const pixelMinY = startTileY * tileSize;
const pixelMaxX = (startTileX + mergeFactor) * tileSize;
const pixelMaxY = (startTileY + mergeFactor) * tileSize;
const tileBounds = [
[pixelMinY, pixelMinX],
[pixelMaxY, pixelMaxX]
];
// Request the merged tile from the API
const imageUrl = `/api/tiles/${dbZoom}/${mergedX}/${mergedY}`;
if (config.debug && tileCount < 5) {
console.log(` Tile ${tileCount}: DB(${mergedX},${mergedY}) → Pixels [${pixelMinX},${pixelMinY}] to [${pixelMaxX},${pixelMaxY}]`);
console.log(` URL: ${imageUrl}`);
}
const overlay = L.imageOverlay(imageUrl, tileBounds, {
opacity: 1,
errorOverlayUrl: '',
});
overlay.on('load', function() {
loadedCount++;
if (config.debug && loadedCount <= 3) {
console.log(` ✓ Loaded tile (${mergedX}, ${mergedY})`);
}
});
overlay.on('error', function() {
errorCount++;
console.warn(` ✗ Failed to load tile (${mergedX}, ${mergedY}) from ${imageUrl}`);
});
overlay.addTo(tileLayerGroup);
tileCount++;
// Add debug overlay if enabled
if (config.debug && debugLayerGroup) {
// Draw rectangle showing tile boundaries
const rect = L.rectangle(tileBounds, {
color: '#ff0000',
weight: 1,
fillOpacity: 0,
interactive: false
}).addTo(debugLayerGroup);
// Add label showing tile coordinates
const center = [
(pixelMinY + pixelMaxY) / 2,
(pixelMinX + pixelMaxX) / 2
];
const label = L.marker(center, {
icon: L.divIcon({
className: 'tile-label',
html: `<div style="background: rgba(0,0,0,0.7); color: #fff; padding: 2px 5px; border-radius: 3px; font-size: 11px; white-space: nowrap;">
DB: (${mergedX},${mergedY})<br/>
Z: ${dbZoom}
</div>`,
iconSize: [60, 30],
iconAnchor: [30, 15]
}),
interactive: false
}).addTo(debugLayerGroup);
}
}
}
console.log(`Requested ${tileCount} tiles (merge factor ${mergeFactor}x${mergeFactor})`);
// Wait a bit and report results
setTimeout(() => {
console.log(`Results: ${loadedCount} loaded, ${errorCount} errors, ${tileCount - loadedCount - errorCount} pending`);
}, 2000);
}
function updateMapInfo(bounds) {
const width = bounds.max_x - bounds.min_x + 1;
const height = bounds.max_y - bounds.min_y + 1;
const config = window.MapConfig;
document.getElementById('map-stats').innerHTML = `
<p><strong>Bounds:</strong></p>
<p>X: ${bounds.min_x} to ${bounds.max_x}</p>
<p>Y: ${bounds.min_y} to ${bounds.max_y}</p>
<p><strong>Size:</strong> ${width} × ${height} tiles</p>
<p><strong>Zoom levels:</strong> ${config.minZoom}-${config.maxZoom}</p>
<p><strong>Debug mode:</strong> ${config.debug ? 'ON' : 'OFF'}</p>
${config.debug ? '<p style="color: #8b5cf6; font-size: 12px;">Red boxes show tile boundaries</p>' : ''}
`;
}
// Initialize map when page loads
window.addEventListener('DOMContentLoaded', initMap);

View File

@@ -0,0 +1,377 @@
// Markers management for Cursebreaker map (Labels, Entrances, Ground Items, Houses)
// Layer groups for each marker type
let labelsLayerGroup = null;
let entrancesLayerGroup = null;
let groundItemsLayerGroup = null;
let housesLayerGroup = null;
// Store active teleport lines for entrances
let activeTeleportLine = null;
// Initialize all markers when map is ready
function initMarkers() {
// Load all marker types in parallel
Promise.all([
loadLabels(),
loadEntrances(),
loadGroundItems(),
loadHouses(),
]).catch(error => {
console.error('Error loading markers:', error);
});
// Set up toggle handlers
setupMarkerToggles();
}
// Set up toggle event handlers
function setupMarkerToggles() {
const labelsToggle = document.getElementById('labels-toggle');
const entrancesToggle = document.getElementById('entrances-toggle');
const groundItemsToggle = document.getElementById('ground-items-toggle');
const housesToggle = document.getElementById('houses-toggle');
if (labelsToggle) {
labelsToggle.addEventListener('change', (e) => {
toggleLayer(labelsLayerGroup, e.target.checked);
saveMarkerState('labels', e.target.checked);
});
}
if (entrancesToggle) {
entrancesToggle.addEventListener('change', (e) => {
toggleLayer(entrancesLayerGroup, e.target.checked);
saveMarkerState('entrances', e.target.checked);
// Remove active teleport line when hiding entrances
if (!e.target.checked && activeTeleportLine) {
map.removeLayer(activeTeleportLine);
activeTeleportLine = null;
}
});
}
if (groundItemsToggle) {
groundItemsToggle.addEventListener('change', (e) => {
toggleLayer(groundItemsLayerGroup, e.target.checked);
saveMarkerState('groundItems', e.target.checked);
});
}
if (housesToggle) {
housesToggle.addEventListener('change', (e) => {
toggleLayer(housesLayerGroup, e.target.checked);
saveMarkerState('houses', e.target.checked);
});
}
// Restore saved state
restoreMarkerState();
}
// Toggle layer visibility
function toggleLayer(layerGroup, visible) {
if (!layerGroup) return;
if (visible) {
layerGroup.addTo(map);
} else {
map.removeLayer(layerGroup);
}
}
// Save marker visibility state
function saveMarkerState(type, visible) {
try {
const state = JSON.parse(localStorage.getItem('cursebreaker_marker_state') || '{}');
state[type] = visible;
localStorage.setItem('cursebreaker_marker_state', JSON.stringify(state));
} catch (error) {
console.warn('Failed to save marker state:', error);
}
}
// Restore marker visibility state
function restoreMarkerState() {
try {
const state = JSON.parse(localStorage.getItem('cursebreaker_marker_state') || '{}');
// Update checkboxes and layers based on saved state
setTimeout(() => {
if (state.labels === false) {
const toggle = document.getElementById('labels-toggle');
if (toggle) {
toggle.checked = false;
toggleLayer(labelsLayerGroup, false);
}
}
if (state.entrances === false) {
const toggle = document.getElementById('entrances-toggle');
if (toggle) {
toggle.checked = false;
toggleLayer(entrancesLayerGroup, false);
}
}
if (state.groundItems === false) {
const toggle = document.getElementById('ground-items-toggle');
if (toggle) {
toggle.checked = false;
toggleLayer(groundItemsLayerGroup, false);
}
}
if (state.houses === false) {
const toggle = document.getElementById('houses-toggle');
if (toggle) {
toggle.checked = false;
toggleLayer(housesLayerGroup, false);
}
}
}, 200);
} catch (error) {
console.warn('Failed to restore marker state:', error);
}
}
// Load labels (text markers on the map)
async function loadLabels() {
try {
console.log('Loading labels...');
const response = await fetch('/api/labels');
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const data = await response.json();
console.log(`Received ${data.labels.length} labels`);
labelsLayerGroup = L.layerGroup();
for (const label of data.labels) {
// Create a divIcon with the label text
const labelIcon = L.divIcon({
className: 'map-label',
html: `<div class="label-text" style="font-size: ${label.font_size}px;">${label.text}</div>`,
iconSize: null, // Let CSS handle sizing
iconAnchor: [0, 0],
});
const marker = L.marker([label.y, label.x], {
icon: labelIcon,
interactive: false, // Labels shouldn't be clickable
});
marker.addTo(labelsLayerGroup);
}
labelsLayerGroup.addTo(map);
console.log('Labels loaded successfully');
} catch (error) {
console.error('Error loading labels:', error);
}
}
// Load entrances (teleporters with lines)
async function loadEntrances() {
try {
console.log('Loading entrances...');
const response = await fetch('/api/entrances');
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const data = await response.json();
console.log(`Received ${data.entrances.length} entrances`);
entrancesLayerGroup = L.layerGroup();
// Create icon from base64
const iconUrl = `data:image/webp;base64,${data.icon_base64}`;
const entranceIcon = L.icon({
iconUrl: iconUrl,
iconSize: [32, 32],
iconAnchor: [16, 16],
popupAnchor: [0, -16],
});
for (const entrance of data.entrances) {
const marker = L.marker([entrance.pos_y, entrance.pos_x], {
icon: entranceIcon,
title: 'Entrance',
});
// Store teleport destination on the marker
marker.teleportDest = {
x: entrance.tp_x,
y: entrance.tp_y,
};
// Handle click to show teleport line
marker.on('click', function(e) {
// Remove existing line if any
if (activeTeleportLine) {
map.removeLayer(activeTeleportLine);
activeTeleportLine = null;
}
const dest = this.teleportDest;
if (dest.x !== null && dest.y !== null) {
// Create a line from entrance to destination
activeTeleportLine = L.polyline(
[
[entrance.pos_y, entrance.pos_x],
[dest.y, dest.x]
],
{
color: '#00ffff',
weight: 3,
opacity: 0.8,
dashArray: '10, 10',
}
).addTo(map);
// Add a destination marker
const destMarker = L.circleMarker([dest.y, dest.x], {
radius: 8,
color: '#00ffff',
fillColor: '#00ffff',
fillOpacity: 0.5,
}).addTo(map);
// Remove line and destination marker after 5 seconds
setTimeout(() => {
if (activeTeleportLine) {
map.removeLayer(activeTeleportLine);
activeTeleportLine = null;
}
map.removeLayer(destMarker);
}, 5000);
}
});
marker.addTo(entrancesLayerGroup);
}
entrancesLayerGroup.addTo(map);
console.log('Entrances loaded successfully');
} catch (error) {
console.error('Error loading entrances:', error);
}
}
// Format respawn time as "XXM XXS"
function formatRespawnTime(seconds) {
const minutes = Math.floor(seconds / 60);
const secs = seconds % 60;
if (minutes > 0 && secs > 0) {
return `${minutes}M ${secs}S`;
} else if (minutes > 0) {
return `${minutes}M`;
} else {
return `${secs}S`;
}
}
// Load ground items
async function loadGroundItems() {
try {
console.log('Loading ground items...');
const response = await fetch('/api/ground-items');
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const data = await response.json();
console.log(`Received ${data.items.length} ground items`);
groundItemsLayerGroup = L.layerGroup();
// Create icon from base64
const iconUrl = `data:image/webp;base64,${data.icon_base64}`;
const itemIcon = L.icon({
iconUrl: iconUrl,
iconSize: [24, 24],
iconAnchor: [12, 12],
popupAnchor: [0, -12],
});
for (const item of data.items) {
const marker = L.marker([item.y, item.x], {
icon: itemIcon,
title: item.name,
});
// Build popup content
let popupContent = `<strong>${item.name}</strong>`;
if (item.amount > 1) {
popupContent += `<br/>Amount: ${item.amount}`;
}
popupContent += `<br/>Respawn: ${formatRespawnTime(item.respawn_time)}`;
marker.bindPopup(popupContent);
marker.addTo(groundItemsLayerGroup);
}
groundItemsLayerGroup.addTo(map);
console.log('Ground items loaded successfully');
} catch (error) {
console.error('Error loading ground items:', error);
}
}
// Load player houses
async function loadHouses() {
try {
console.log('Loading houses...');
const response = await fetch('/api/houses');
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const data = await response.json();
console.log(`Received ${data.houses.length} houses`);
housesLayerGroup = L.layerGroup();
// Create icon from base64
const iconUrl = `data:image/webp;base64,${data.icon_base64}`;
const houseIcon = L.icon({
iconUrl: iconUrl,
iconSize: [64, 64],
iconAnchor: [32, 32],
popupAnchor: [0, -32],
});
for (const house of data.houses) {
const marker = L.marker([house.y, house.x], {
icon: houseIcon,
title: house.name,
});
// Format price with commas
const formattedPrice = house.price.toLocaleString();
// Build popup content
const popupContent = `
<strong>${house.name}</strong><br/>
<em>${house.description}</em><br/>
<span class="house-price">Price: ${formattedPrice} gold</span>
`;
marker.bindPopup(popupContent);
marker.addTo(housesLayerGroup);
}
housesLayerGroup.addTo(map);
console.log('Houses loaded successfully');
} catch (error) {
console.error('Error loading houses:', error);
}
}
// Call initMarkers after map is loaded
// This is called from map.js after resources are loaded

View File

@@ -0,0 +1,261 @@
// Resource management for Cursebreaker map
let resourceLayerGroups = {}; // Map: resource name -> L.layerGroup
let resourceIcons = {}; // Map: resource name -> L.icon
let resourceData = {}; // Map: resource name -> resource metadata (skill, level, etc.)
let filterState = {}; // Map: resource name -> boolean (visible)
// Load resources from API
async function loadResources() {
try {
console.log('Loading resources from API...');
const response = await fetch('/api/resources');
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const data = await response.json();
console.log(`Received ${data.resources.length} resource types`);
// Create icons and layer groups for each resource
for (const group of data.resources) {
createResourceGroup(group);
}
// Initialize filter UI
initializeFilterUI();
// Restore filter state from localStorage
restoreFilterState();
console.log(`Loaded ${data.resources.length} resource types successfully`);
} catch (error) {
console.error('Error loading resources:', error);
const container = document.getElementById('resource-filters');
if (container) {
container.innerHTML = '<p style="color: #ff6b6b;">Failed to load resources. Check console for details.</p>';
}
}
}
// Create resource group with icon and markers
function createResourceGroup(group) {
const config = window.MapConfig;
// Create icon definition (cached per resource type)
const iconUrl = `data:image/webp;base64,${group.icon_base64}`;
const icon = L.icon({
iconUrl: iconUrl,
iconSize: [config.resourceIconSize, config.resourceIconSize],
iconAnchor: [config.resourceIconSize / 2, config.resourceIconSize / 2],
popupAnchor: [0, -(config.resourceIconSize / 2)],
});
resourceIcons[group.name] = icon;
// Store metadata
resourceData[group.name] = {
item_id: group.item_id,
skill: group.skill,
level: group.level,
};
// Create layer group for this resource type
const layerGroup = L.layerGroup();
// Add markers for all positions
for (const pos of group.positions) {
const marker = L.marker([pos.y, pos.x], {
icon: icon,
title: group.name,
});
// Add popup with resource details
marker.bindPopup(
`<strong>${group.name}</strong><br/>Position: (${pos.x.toFixed(1)}, ${pos.y.toFixed(1)})`
);
marker.addTo(layerGroup);
}
// Add to map (initially visible)
layerGroup.addTo(map);
resourceLayerGroups[group.name] = layerGroup;
filterState[group.name] = true; // Initially visible
}
// Initialize filter UI with skill grouping
function initializeFilterUI() {
const container = document.getElementById('resource-filters');
if (!container) {
console.error('resource-filters container not found');
return;
}
container.innerHTML = ''; // Clear loading text
// Group resources by skill
const skillGroups = {};
for (const name in resourceLayerGroups) {
const metadata = resourceData[name];
if (!skillGroups[metadata.skill]) {
skillGroups[metadata.skill] = [];
}
skillGroups[metadata.skill].push({
name: name,
level: metadata.level,
});
}
// Sort skills alphabetically
const sortedSkills = Object.keys(skillGroups).sort();
// Create UI for each skill group
for (const skill of sortedSkills) {
const skillDiv = document.createElement('div');
skillDiv.className = 'skill-group';
const header = document.createElement('div');
header.className = 'skill-header';
// Capitalize first letter of skill
header.textContent = skill.charAt(0).toUpperCase() + skill.slice(1);
skillDiv.appendChild(header);
// Resources are already sorted by level in backend, but sort again to be sure
skillGroups[skill].sort((a, b) => a.level - b.level);
// Create checkbox for each resource
for (const resource of skillGroups[skill]) {
const label = createFilterLabel(resource.name);
skillDiv.appendChild(label);
}
container.appendChild(skillDiv);
}
// Attach bulk filter handlers
const selectAllBtn = document.getElementById('select-all-resources');
const deselectAllBtn = document.getElementById('deselect-all-resources');
if (selectAllBtn) {
selectAllBtn.addEventListener('click', () => {
setAllFilters(true);
});
}
if (deselectAllBtn) {
deselectAllBtn.addEventListener('click', () => {
setAllFilters(false);
});
}
}
// Create filter label with checkbox and icon
function createFilterLabel(resourceName) {
const label = document.createElement('label');
label.className = 'filter-label';
const checkbox = document.createElement('input');
checkbox.type = 'checkbox';
checkbox.checked = filterState[resourceName];
checkbox.dataset.resource = resourceName;
checkbox.addEventListener('change', handleFilterChange);
const icon = document.createElement('img');
icon.src = resourceIcons[resourceName].options.iconUrl;
icon.className = 'filter-icon';
icon.alt = resourceName;
const text = document.createElement('span');
text.textContent = resourceName;
label.appendChild(checkbox);
label.appendChild(icon);
label.appendChild(text);
return label;
}
// Handle filter checkbox change
function handleFilterChange(event) {
const resourceName = event.target.dataset.resource;
const isVisible = event.target.checked;
filterState[resourceName] = isVisible;
// Show/hide layer group
const layerGroup = resourceLayerGroups[resourceName];
if (isVisible) {
layerGroup.addTo(map);
} else {
map.removeLayer(layerGroup);
}
// Persist state
saveFilterState();
}
// Set all filters to visible or hidden
function setAllFilters(visible) {
for (const name in filterState) {
filterState[name] = visible;
const layerGroup = resourceLayerGroups[name];
if (visible) {
layerGroup.addTo(map);
} else {
map.removeLayer(layerGroup);
}
}
// Update checkboxes
document.querySelectorAll('#resource-filters input[type="checkbox"]').forEach((cb) => {
cb.checked = visible;
});
saveFilterState();
}
// Save filter state to localStorage
function saveFilterState() {
try {
localStorage.setItem('cursebreaker_resource_filters', JSON.stringify(filterState));
} catch (error) {
console.warn('Failed to save filter state to localStorage:', error);
}
}
// Restore filter state from localStorage
function restoreFilterState() {
const saved = localStorage.getItem('cursebreaker_resource_filters');
if (!saved) return;
try {
const savedState = JSON.parse(saved);
for (const name in savedState) {
if (resourceLayerGroups[name]) {
filterState[name] = savedState[name];
const layerGroup = resourceLayerGroups[name];
if (!savedState[name]) {
map.removeLayer(layerGroup);
}
}
}
// Update checkboxes after UI is created
setTimeout(() => {
document.querySelectorAll('#resource-filters input[type="checkbox"]').forEach((cb) => {
const name = cb.dataset.resource;
if (filterState[name] !== undefined) {
cb.checked = filterState[name];
}
});
}, 100);
console.log('Restored filter state from localStorage');
} catch (error) {
console.warn('Failed to restore filter state:', error);
}
}

View File

@@ -0,0 +1,318 @@
* {
margin: 0;
padding: 0;
box-sizing: border-box;
}
body {
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
overflow: hidden;
background: #1a1a1a;
color: #e0e0e0;
}
#app {
display: flex;
height: 100vh;
width: 100vw;
}
/* Sidebar */
.sidebar {
width: 320px;
background: #2a2a2a;
box-shadow: 2px 0 10px rgba(0, 0, 0, 0.5);
z-index: 1000;
transition: margin-left 0.3s ease;
position: relative;
overflow-y: auto;
}
.sidebar.collapsed {
margin-left: -320px;
}
/* Sidebar toggle control */
.leaflet-control-sidebar {
width: 30px;
height: 30px;
display: flex;
align-items: center;
justify-content: center;
font-size: 18px;
line-height: 30px;
text-align: center;
text-decoration: none;
}
.sidebar-content {
padding: 20px;
}
.sidebar h2 {
color: #8b5cf6;
margin-bottom: 5px;
font-size: 24px;
}
.subtitle {
color: #a0a0a0;
font-size: 14px;
margin-bottom: 20px;
}
.info-section {
border-bottom: 1px solid #3a3a3a;
padding-bottom: 20px;
margin-bottom: 20px;
}
.filters-section,
.map-info {
margin-bottom: 25px;
}
.filters-section h3,
.map-info h3 {
color: #8b5cf6;
margin-bottom: 10px;
font-size: 16px;
font-weight: 600;
}
.filter-group {
display: flex;
flex-direction: column;
gap: 8px;
}
.filter-group label {
display: flex;
align-items: center;
cursor: pointer;
padding: 5px;
border-radius: 4px;
transition: background 0.2s;
}
.filter-group label:hover {
background: #3a3a3a;
}
.filter-group input[type="checkbox"] {
margin-right: 8px;
cursor: pointer;
}
.coming-soon {
color: #a0a0a0;
font-style: italic;
font-size: 13px;
margin-bottom: 15px;
}
#map-stats {
font-size: 14px;
color: #c0c0c0;
}
#map-stats p {
margin: 5px 0;
}
/* Map */
#map {
flex: 1;
height: 100vh;
background: #1a1a1a;
}
/* Leaflet overrides for dark theme */
.leaflet-container {
background: #1a1a1a;
}
.leaflet-control-zoom a {
background: #2a2a2a;
color: #e0e0e0;
border-color: #3a3a3a;
}
.leaflet-control-zoom a:hover {
background: #3a3a3a;
color: #fff;
}
.leaflet-bar {
border: 1px solid #3a3a3a;
}
/* Coordinates display */
.coordinates-display {
position: absolute;
bottom: 10px;
left: 50%;
transform: translateX(-50%);
background: rgba(42, 42, 42, 0.95);
color: #e0e0e0;
padding: 8px 16px;
border-radius: 6px;
font-size: 14px;
font-family: 'Courier New', monospace;
z-index: 1000;
box-shadow: 0 2px 10px rgba(0, 0, 0, 0.5);
pointer-events: none;
}
#coord-text {
color: #8b5cf6;
font-weight: bold;
}
/* Filter controls */
.filter-controls {
display: flex;
gap: 8px;
margin-bottom: 12px;
}
.filter-btn {
flex: 1;
padding: 6px 12px;
background: #3a3a3a;
color: #e0e0e0;
border: 1px solid #4a4a4a;
border-radius: 4px;
cursor: pointer;
font-size: 12px;
transition: background 0.2s;
}
.filter-btn:hover {
background: #4a4a4a;
}
.filter-btn:active {
background: #2a2a2a;
}
/* Filter groups by skill */
.skill-group {
margin-bottom: 12px;
}
.skill-header {
color: #8b5cf6;
font-size: 13px;
font-weight: bold;
margin-bottom: 4px;
padding: 4px 8px;
background: rgba(139, 92, 246, 0.1);
border-radius: 3px;
}
/* Filter items */
.filter-label {
display: flex;
align-items: center;
gap: 8px;
cursor: pointer;
padding: 4px 8px;
margin-left: 8px;
border-radius: 4px;
transition: background 0.2s;
font-size: 13px;
}
.filter-label:hover {
background: #3a3a3a;
}
.filter-icon {
width: 20px;
height: 20px;
image-rendering: pixelated;
image-rendering: -moz-crisp-edges;
image-rendering: crisp-edges;
}
.loading-text {
color: #a0a0a0;
font-style: italic;
font-size: 13px;
padding: 8px;
}
/* Dark theme popups */
.leaflet-popup-content-wrapper {
background: #2a2a2a;
color: #e0e0e0;
border: 1px solid #3a3a3a;
}
.leaflet-popup-content {
margin: 8px 12px;
font-size: 13px;
}
.leaflet-popup-tip {
background: #2a2a2a;
}
/* Master toggle for marker categories */
.master-toggle {
margin-left: 0 !important;
}
.master-toggle input[type="checkbox"] {
width: 18px;
height: 18px;
accent-color: #8b5cf6;
}
/* Map labels (text overlays) */
.map-label {
background: transparent;
border: none;
}
.label-text {
color: #e0e0e0;
text-shadow:
-1px -1px 2px #000,
1px -1px 2px #000,
-1px 1px 2px #000,
1px 1px 2px #000,
0 0 4px #000;
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
font-weight: bold;
white-space: nowrap;
pointer-events: none;
}
/* House price styling in popup */
.house-price {
color: #ffd700;
font-weight: bold;
}
/* Popup styling for various marker types */
.leaflet-popup-content strong {
color: #8b5cf6;
}
.leaflet-popup-content em {
color: #a0a0a0;
font-size: 12px;
}
/* Responsive */
@media (max-width: 768px) {
.sidebar {
width: 280px;
}
.sidebar.collapsed {
margin-left: -280px;
}
}

View File

@@ -7,10 +7,26 @@ edition = "2021"
name = "cursebreaker_parser"
path = "src/lib.rs"
# Main binary - runs all parsers
[[bin]]
name = "cursebreaker-parser"
path = "src/main.rs"
# XML Parser - loads game data from XML files and populates database
[[bin]]
name = "xml-parser"
path = "src/bin/xml-parser.rs"
# Scene Parser - parses Unity scenes and extracts game objects
[[bin]]
name = "scene-parser"
path = "src/bin/scene-parser.rs"
# Image Parser - processes minimap tiles and generates all zoom levels
[[bin]]
name = "image-parser"
path = "src/bin/image-parser.rs"
[dependencies]
unity-parser = { path = "../unity-parser" }
serde_yaml = "0.9"
@@ -20,5 +36,13 @@ log = { version = "0.4", features = ["std"] }
quick-xml = "0.37"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
diesel = { version = "2.2", features = ["sqlite"], optional = true }
diesel = { version = "2.2", features = ["sqlite", "returning_clauses_for_sqlite_3_35"] }
libsqlite3-sys = { version = ">=0.17.2", features = ["bundled"] }
image = "0.25"
webp = "0.3"
thiserror = "1.0"
chrono = "0.4"
clap = { version = "4.5", features = ["derive"] }
[dev-dependencies]
diesel_migrations = "2.2"

View File

@@ -6,7 +6,8 @@ A Rust library for parsing and managing game data from the Cursebreaker game. Th
Cursebreaker Parser is designed to:
- Parse Unity scenes and extract game objects using the unity-parser library
- Load game data from XML files (Items, NPCs, Quests, Harvestables, Loot tables)
- Load game data from XML files (Items, NPCs, Quests, Harvestables, Loot tables, Maps, Fast Travel, Player Houses, Traits, Shops)
- Process and compress minimap tiles and item images
- Provide in-memory databases for efficient querying of game data
- Serialize game data to SQL format for database storage
@@ -17,8 +18,112 @@ Cursebreaker Parser is designed to:
- **Quest Database**: Handle quest definitions, phases, and rewards
- **Harvestable Database**: Track harvestable resources and their drop tables
- **Loot Database**: Manage loot tables and drop configurations
- **Map Database**: Handle map data and navigation
- **Fast Travel Database**: Manage fast travel locations and connections
- **Player House Database**: Track player houses and their locations
- **Trait Database**: Handle character traits and their effects
- **Shop Database**: Manage shop inventories and pricing
- **Minimap Database**: Process and manage minimap tiles with multiple zoom levels
- **XML Parsing**: Robust XML parsing with error handling
- **SQL Export**: Prepare data for SQL database insertion
- **Image Processing**: Process and compress minimap tiles and item icons
- **Unity Scene Parsing**: Extract game objects and world resources from Unity scenes
## Binaries
The project provides multiple binaries to handle different parsing tasks. This allows you to run only the parts you need, avoiding long load times for unnecessary operations.
### Available Binaries
1. **xml-parser** - Loads game data from XML files and populates the SQLite database
- Fast execution
- Run this when XML files change
```bash
cargo run --bin xml-parser
```
2. **scene-parser** - Parses Unity scenes and extracts world objects
- Slow execution (Unity project initialization)
- Extracts multiple types of interactable components and their positions:
- **InteractableResource**: Harvestable resources → `world_resources` table
- **InteractableTeleporter**: Teleporters with source/destination positions → `world_teleporters` table
- **InteractableWorkbench**: Workbenches with workbench ID → `world_workbenches` table
- **LootSpawner**: Loot spawners with item, amount, respawn time → `world_loot` table
- **MapIcon**: Map icons with type, size, text, etc. → `world_map_icons` table
- **MapNameChanger**: Map name changers → `world_map_name_changers` table
- Processes item icons for harvestables:
- Looks up the first item drop for each harvestable from `harvestable_drops` table
- Loads the icon from `Data/Textures/ItemIcons/{item_id}.png`
- Applies white outline (4px) and resizes to 64x64
- Converts to WebP and stores in `resource_icons` table
- Run this when scene files change
```bash
cargo run --bin scene-parser
```
3. **image-parser** - Processes minimap tiles
- Slow execution (image processing and compression)
- Run this when minimap images change
```bash
cargo run --bin image-parser
```
4. **cursebreaker-parser** - All-in-one binary (runs all parsers)
- Slowest execution (runs everything)
- Use when you need to regenerate the entire database
```bash
cargo run --bin cursebreaker-parser
# or simply
cargo run
```
5. **verify-db** - Verifies database contents and shows basic statistics
```bash
cargo run --bin verify-db
```
6. **verify-expanded-db** - Verifies expanded database schema with items, recipes, and stats
```bash
cargo run --bin verify-expanded-db
```
7. **verify-images** - Verifies item images and shows storage statistics
```bash
cargo run --bin verify-images
```
8. **verify-stats** - Verifies item stats and shows breakdown by type
```bash
cargo run --bin verify-stats
```
9. **verify-resource-icons** - Verifies resource icons for harvestables
```bash
cargo run --bin verify-resource-icons
```
### Building for Production
Build specific binaries for release:
```bash
cargo build --release --bin xml-parser
cargo build --release --bin scene-parser
cargo build --release --bin image-parser
```
The compiled binaries will be in `target/release/`.
## Configuration
### Environment Variables
Set the `CB_ASSETS_PATH` environment variable to the path of your CurseBreaker assets directory:
```bash
export CB_ASSETS_PATH="/path/to/CBAssets"
```
If not set, the default fallback is `/home/connor/repos/CBAssets`.
## Usage
@@ -63,30 +168,197 @@ for (id, name, json) in sql_data.iter().take(5) {
}
```
### Querying World Resources
```rust
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
// Connect to database
let mut conn = SqliteConnection::establish("../cursebreaker.db")?;
// Define the structure
#[derive(Queryable, Debug)]
struct WorldResource {
item_id: i32,
pos_x: f32,
pos_y: f32,
}
// Query resources by item ID
use cursebreaker_parser::schema::world_resources::dsl::*;
let copper_ore = world_resources
.filter(item_id.eq(2))
.load::<WorldResource>(&mut conn)?;
println!("Found {} copper ore nodes", copper_ore.len());
for resource in copper_ore {
println!(" Position: ({:.2}, {:.2})", resource.pos_x, resource.pos_y);
}
```
See `examples/query_world_resources.rs` for a complete example.
### Querying Resource Icons
```rust
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
// Connect to database
let mut conn = SqliteConnection::establish("../cursebreaker.db")?;
// Define the structure
#[derive(Queryable, Debug)]
struct ResourceIcon {
item_id: i32, // Harvestable ID
name: String, // Harvestable name
icon_64: Vec<u8>, // WebP image data (64x64 with white border)
}
// Query icon for a specific harvestable
use cursebreaker_parser::schema::resource_icons::dsl::*;
let copper_icon = resource_icons
.filter(item_id.eq(2)) // Harvestable ID for Copper Ore
.first::<ResourceIcon>(&mut conn)?;
println!("Found icon for: {}", copper_icon.name);
println!("Icon size: {} bytes (WebP format)", copper_icon.icon_64.len());
// Save to file if needed
std::fs::write("copper_ore.webp", &copper_icon.icon_64)?;
```
See `examples/resource_icons_example.rs` for a complete example.
### Additional Databases
Similar APIs are available for other game data types:
```rust
use cursebreaker_parser::{
MapDatabase, FastTravelDatabase, PlayerHouseDatabase,
TraitDatabase, ShopDatabase, MinimapDatabase
};
// Load maps, fast travel points, player houses, etc.
let map_db = MapDatabase::load_from_xml("Data/XMLs/Maps/Map.xml")?;
// ... similar usage patterns
```
See the examples directory for usage of each database type.
### Database Verification
After parsing data, you can verify the database contents using the verification binaries:
```bash
# Basic database verification
cargo run --bin verify-db
# Verify expanded schema with recipes and stats
cargo run --bin verify-expanded-db
# Check item images and storage usage
cargo run --bin verify-images
# Analyze item stats breakdown
cargo run --bin verify-stats
```
### Examples
The project includes several example programs demonstrating different aspects of the parser:
- **game_data_demo.rs** - Comprehensive demo loading and querying all game data types (Items, NPCs, Quests, Harvestables, Loot)
- **item_database_demo.rs** - Focused on item database operations
- **query_world_resources.rs** - Querying world resource locations from the database
- **resource_icons_example.rs** - Querying processed harvestable icons with white borders
- **fast_travel_example.rs** - Working with fast travel locations
- **maps_example.rs** - Map data handling
- **player_houses_example.rs** - Player house management
- **shops_example.rs** - Shop inventory and pricing
- **traits_example.rs** - Character traits and effects
Run any example with:
```bash
cargo run --example <example_name>
```
## Project Structure
```
cursebreaker-parser/
├── src/
│ ├── lib.rs # Library entry point and public API
│ ├── main.rs # Binary entry point
│ ├── xml_parser.rs # XML parsing utilities
│ ├── item_loader.rs # Item loading logic
│ ├── databases/ # Database implementations
│ ├── lib.rs # Library entry point and public API
│ ├── main.rs # Main binary (all-in-one parser)
│ ├── bin/ # Separate parser binaries
│ ├── xml-parser.rs # XML parsing only
│ ├── scene-parser.rs # Unity scene parsing only
│ │ ├── image-parser.rs # Image processing only
│ │ ├── verify-db.rs # Database verification
│ │ ├── verify-expanded-db.rs # Expanded database verification
│ │ ├── verify-images.rs # Image verification
│ │ ├── verify-stats.rs # Stats verification
│ │ └── verify-resource-icons.rs # Resource icons verification
│ ├── xml_parser.rs # XML parsing utilities
│ ├── image_processor.rs # Image processing utilities
│ ├── item_loader.rs # Item loading logic
│ ├── schema.rs # Database schema definitions
│ ├── databases/ # Database implementations
│ │ ├── item_database.rs
│ │ ├── npc_database.rs
│ │ ├── quest_database.rs
│ │ ├── harvestable_database.rs
│ │ ── loot_database.rs
└── types/ # Type definitions
├── cursebreaker/ # Game-specific types (Items, NPCs, Quests, etc.)
── monobehaviours/ # Unity MonoBehaviour types
├── examples/ # Example usage
├── Cargo.toml # Package configuration
└── XML_PARSING.md # XML parsing documentation
│ │ ── loot_database.rs
│ ├── map_database.rs
├── fast_travel_database.rs
── player_house_database.rs
├── trait_database.rs
│ │ ├── shop_database.rs
└── minimap_database.rs
│ └── types/ # Type definitions
│ ├── cursebreaker/ # Game-specific types (Items, NPCs, Quests, etc.)
│ └── monobehaviours/ # Unity MonoBehaviour types
├── examples/ # Example usage
│ ├── fast_travel_example.rs
│ ├── game_data_demo.rs
│ ├── item_database_demo.rs
│ ├── maps_example.rs
│ ├── player_houses_example.rs
│ ├── query_world_resources.rs
│ ├── shops_example.rs
│ └── traits_example.rs
├── migrations/ # Database migrations
├── Cargo.toml # Package configuration
├── XML_PARSING.md # XML parsing documentation
└── README.md # This file
```
## Database Schema
The parser uses Diesel for database operations with SQLite. Database migrations are located in the `migrations/` directory and handle:
- Item data with stats, images, and crafting recipes
- NPC information and loot tables
- Quest definitions and phases
- Harvestable resources and drop tables
- World resource locations from Unity scenes
- Resource icons for harvestables (64x64 WebP with white borders)
- World teleporters with source/destination coordinates
- World workbenches with workbench IDs
- World loot spawners with item, amount, and respawn time
- Map icons with type, size, text, and hover text
- Map name changers with location and map name
- Minimap tiles and metadata
- Shop inventories and pricing
- Player houses and locations
- Fast travel points
- Character traits
## Dependencies
- **unity-parser**: For parsing Unity scene files
@@ -95,7 +367,8 @@ cursebreaker-parser/
- **serde_json**: JSON support
- **serde_yaml**: YAML support
- **sparsey**: ECS (Entity Component System) support
- **diesel**: Optional SQL database support
- **diesel**: SQL database support with SQLite
- **image**: Image processing and WebP compression
- **thiserror**: Error handling
## Building
@@ -107,8 +380,18 @@ cargo build
# Run tests
cargo test
# Build with SQL support
cargo build --features diesel
# Build specific binaries
cargo build --bin xml-parser
cargo build --bin scene-parser
cargo build --bin image-parser
cargo build --bin verify-db
# Run examples
cargo run --example game_data_demo
cargo run --example item_database_demo
# Build for release
cargo build --release
```
## Documentation

View File

@@ -0,0 +1,278 @@
# XML Parser Documentation
This document explains the XML parsing system used to load game data from Cursebreaker's XML files and populate the SQLite database.
## Overview
The XML parser system is responsible for:
1. Reading game data from XML files (items, NPCs, quests, etc.)
2. Parsing the XML into Rust structs
3. Storing the parsed data in a SQLite database
## Architecture
### File Structure
```
cursebreaker-parser/src/
├── xml_parsers/ # XML parsing module
│ ├── mod.rs # Shared utilities and re-exports
│ ├── items.rs # Item parser
│ ├── npcs.rs # NPC parser
│ ├── quests.rs # Quest parser
│ ├── harvestables.rs # Harvestable resource parser
│ ├── loot.rs # Loot table parser
│ ├── maps.rs # Map/scene parser
│ ├── fast_travel.rs # Fast travel location parser
│ ├── player_houses.rs # Player house parser
│ ├── traits.rs # Character trait parser
│ └── shops.rs # Shop/vendor parser
├── databases/ # Database abstraction layer
│ ├── item_database.rs
│ ├── npc_database.rs
│ └── ...
├── types/ # Data structures
│ └── cursebreaker/
│ ├── item.rs
│ ├── npc.rs
│ └── ...
└── bin/
└── xml-parser.rs # CLI binary
```
### Data Flow
```
XML Files (CBAssets/Data/XMLs/)
XML Parsers (xml_parsers/*.rs)
Rust Structs (types/cursebreaker/*.rs)
Database Layer (databases/*.rs)
SQLite Database (cursebreaker.db)
```
## Parser Components
### Shared Utilities (`xml_parsers/mod.rs`)
The module provides common functionality used by all parsers:
```rust
/// Error types for XML parsing
pub enum XmlParseError {
XmlError(quick_xml::Error), // XML syntax errors
IoError(std::io::Error), // File read errors
AttrError(AttrError), // Attribute parsing errors
MissingAttribute(String), // Required attribute not found
InvalidAttribute(String), // Attribute value invalid
}
/// Parse XML element attributes into a HashMap
fn parse_attributes(element: &BytesStart) -> Result<HashMap<String, String>, XmlParseError>
/// Parse health range strings like "3-5" or "3" into (min, max)
fn parse_health_range(health_str: &str) -> (i32, i32)
```
### Individual Parsers
Each parser follows a similar pattern:
1. **Open and read the XML file** using `quick_xml::Reader`
2. **Iterate through XML events** (Start, Empty, End, Text, Eof)
3. **Match element names** and extract attributes
4. **Build Rust structs** from the parsed data
5. **Return a Vec** of parsed objects
#### Example: Item Parser Flow
```rust
pub fn parse_items_xml<P: AsRef<Path>>(path: P) -> Result<Vec<Item>, XmlParseError> {
// 1. Open file and create reader
let file = File::open(path)?;
let mut reader = Reader::from_reader(BufReader::new(file));
// 2. Process XML events
loop {
match reader.read_event_into(&mut buf) {
Ok(Event::Start(e)) | Ok(Event::Empty(e)) => {
match e.name().as_ref() {
b"item" => {
// 3. Parse attributes
let attrs = parse_attributes(&e)?;
let id = attrs.get("id")...;
let name = attrs.get("name")...;
// 4. Create struct
let item = Item::new(id, name);
current_item = Some(item);
}
b"stat" => { /* Parse nested stat element */ }
_ => {}
}
}
Ok(Event::End(e)) => {
if e.name().as_ref() == b"item" {
// 5. Add completed item to results
items.push(current_item.take().unwrap());
}
}
Ok(Event::Eof) => break,
Err(e) => return Err(XmlParseError::XmlError(e)),
_ => {}
}
}
Ok(items)
}
```
## Supported Data Types
| Parser | XML Source | Description |
|--------|-----------|-------------|
| `items` | `Items/Items.xml` | Game items (weapons, armor, consumables, etc.) |
| `npcs` | `Npcs/NPCInfo.xml` | Non-player characters (enemies, vendors, quest givers) |
| `quests` | `Quests/Quests.xml` | Quest definitions with phases and rewards |
| `harvestables` | `Harvestables/HarvestableInfo.xml` | Gatherable resources (trees, rocks, fishing spots) |
| `loot` | `Loot/Loot.xml` | NPC drop tables |
| `maps` | `Maps/Maps.xml` | Game scenes/areas with lighting and fog settings |
| `fast_travel` | `FastTravel*.xml` | Teleport locations, canoe routes, portals |
| `player_houses` | `PlayerHouses/PlayerHouses.xml` | Purchasable player housing |
| `traits` | `Traits/Traits.xml` | Character traits/perks |
| `shops` | `Shops/Shops.xml` | Vendor inventories and pricing |
## CLI Usage
The `xml-parser` binary provides command-line control over which parsers to run:
```bash
# Parse all data types
xml-parser --all
xml-parser -a
# Parse specific data types
xml-parser --items # or -i
xml-parser --npcs # or -n
xml-parser --quests # or -q
xml-parser --harvestables # or -r
xml-parser --loot # or -l
xml-parser --maps # or -m
xml-parser --fast-travel # or -f
xml-parser --houses # or -p
xml-parser --traits # or -t
xml-parser --shops # or -s
# Combine multiple parsers
xml-parser --items --npcs --quests
xml-parser -i -n -q
# View help
xml-parser --help
```
### Environment Variables
| Variable | Default | Description |
|----------|---------|-------------|
| `CB_ASSETS_PATH` | `/home/connor/repos/CBAssets` | Path to game assets directory |
| `DATABASE_URL` | `cursebreaker.db` | SQLite database file path |
## Database Integration
Each parser has a corresponding database module that handles:
1. **Loading from XML** - Wraps the parser and creates a queryable database
2. **Querying** - Methods like `get_by_id()`, `get_by_name()`, `get_all()`
3. **Saving to SQLite** - Serializes data and inserts into database tables
### Example: ItemDatabase
```rust
// Load items from XML
let item_db = ItemDatabase::load_from_xml("path/to/Items.xml")?;
// Query items
let sword = item_db.get_by_id(150);
let bows = item_db.get_by_category("bow");
// Save to database (includes icon processing)
item_db.save_to_db_with_images(&mut conn, "path/to/icons")?;
```
## XML Format Examples
### Item XML
```xml
<item id="150" name="Iron Sword" level="10" price="500" maxstack="1">
<stat damagephysical="25" accuracyphysical="5"/>
<anim idle="1" walk="2" run="3" weaponattack="4"/>
</item>
```
### NPC XML
```xml
<npc id="45" name="Goblin" level="5" health="100" aggressive="1">
<stat damagephysical="10" resistancephysical="5"/>
<level swordsmanship="3" defence="2"/>
</npc>
```
### Quest XML
```xml
<quest id="1" name="First Steps" mainquest="1">
<phase id="1" trackerdescription="Talk to the Elder"/>
<phase id="2" trackerdescription="Collect 5 herbs"/>
<rewards>
<reward item="100" amount="1"/>
<reward skill="swordsmanship" xp="50"/>
</rewards>
</quest>
```
## Error Handling
The parser uses a custom `XmlParseError` enum to handle various failure modes:
- **MissingAttribute**: Required XML attribute not found (e.g., missing `id`)
- **InvalidAttribute**: Attribute value cannot be parsed (e.g., non-numeric ID)
- **XmlError**: Malformed XML syntax
- **IoError**: File not found or permission denied
Parsers fail fast on required attributes but use defaults for optional ones:
```rust
// Required - returns error if missing
let id = attrs.get("id")
.ok_or_else(|| XmlParseError::MissingAttribute("id".to_string()))?;
// Optional - uses default if missing
let level = attrs.get("level")
.and_then(|v| v.parse().ok())
.unwrap_or(1);
```
## Performance Considerations
- **Streaming parser**: Uses `quick_xml` which processes XML as a stream, keeping memory usage low
- **Single-pass parsing**: Each file is read once and parsed in a single pass
- **Batch database inserts**: Data is collected into vectors before database insertion
- **Selective parsing**: CLI allows parsing only needed data types, reducing processing time
## Adding a New Parser
To add support for a new XML data type:
1. **Create the type** in `types/cursebreaker/new_type.rs`
2. **Create the parser** in `xml_parsers/new_type.rs`
3. **Export from mod.rs**: Add `mod new_type;` and `pub use new_type::parse_new_type_xml;`
4. **Create database module** in `databases/new_type_database.rs`
5. **Add CLI flag** in `bin/xml-parser.rs`
6. **Update this documentation**

View File

@@ -1,442 +0,0 @@
# XML Parsing in Cursebreaker Parser
This document describes the XML parsing functionality added to the cursebreaker-parser project.
## Overview
The parser now supports loading game data from Cursebreaker's XML files and storing them in efficient data structures for runtime access and SQL database serialization.
## Features
- ✅ Parse Items, NPCs, Quests, and Harvestables XML files with full attribute and nested element support
- ✅ In-memory databases with fast lookups by ID, name, and various filters
- ✅ JSON serialization for SQL database storage
- ✅ Type-safe data structures with serde support
- ✅ Easy-to-use API with query methods
- ✅ Cross-referencing support between different data types
## Quick Start
### Loading Items
```rust
use cursebreaker_parser::ItemDatabase;
let item_db = ItemDatabase::load_from_xml("Data/XMLs/Items/Items.xml")?;
println!("Loaded {} items", item_db.len());
```
### Querying Items
```rust
// Get by ID
if let Some(item) = item_db.get_by_id(150) {
println!("Found: {}", item.name);
}
// Get by category
let bows = item_db.get_by_category("bow");
// Get by slot
let weapons = item_db.get_by_slot("weapon");
// Get by skill requirement
let magic_items = item_db.get_by_skill("magic");
// Get all items
for item in item_db.all_items() {
println!("{}: {}", item.id, item.name);
}
```
### SQL Serialization
```rust
// Prepare items for SQL insertion
let sql_data = item_db.prepare_for_sql();
for (id, name, json_data) in sql_data {
// INSERT INTO items (id, name, data) VALUES (?, ?, ?)
// Use your preferred SQL library to insert
}
```
## Data Structures
### Item
The main `Item` struct contains all item attributes from the XML:
```rust
pub struct Item {
// Required
pub id: i32,
pub name: String,
// Optional attributes
pub level: Option<i32>,
pub description: Option<String>,
pub price: Option<i32>,
pub slot: Option<String>,
pub category: Option<String>,
pub skill: Option<String>,
// ... many more fields
// Nested elements
pub stats: Vec<ItemStat>,
pub crafting_recipes: Vec<CraftingRecipe>,
pub animations: Option<AnimationSet>,
pub generate_rules: Vec<GenerateRule>,
}
```
### ItemStat
Represents item statistics:
```rust
pub struct ItemStat {
// Damage
pub damagephysical: Option<i32>,
pub damagemagical: Option<i32>,
pub damageranged: Option<i32>,
// Accuracy
pub accuracyphysical: Option<i32>,
pub accuracymagical: Option<i32>,
pub accuracyranged: Option<i32>,
// Resistance
pub resistancephysical: Option<i32>,
pub resistancemagical: Option<i32>,
pub resistanceranged: Option<i32>,
// Core stats
pub health: Option<i32>,
pub mana: Option<i32>,
pub manaregen: Option<i32>,
pub healing: Option<i32>,
// Harvesting
pub harvestingspeedwoodcutting: Option<i32>,
}
```
## Example Programs
Run the demos to see all features in action:
```bash
# Items only
cargo run --example item_database_demo
# All game data (Items, NPCs, Quests, Harvestables)
cargo run --example game_data_demo
```
## Loading NPCs
```rust
use cursebreaker_parser::NpcDatabase;
let npc_db = NpcDatabase::load_from_xml("Data/XMLs/Npcs/NPCInfo.xml")?;
println!("Loaded {} NPCs", npc_db.len());
```
### Querying NPCs
```rust
// Get by ID
if let Some(npc) = npc_db.get_by_id(1) {
println!("Found: {}", npc.name);
}
// Get hostile NPCs
let hostile = npc_db.get_hostile();
// Get interactable NPCs
let interactable = npc_db.get_interactable();
// Get NPCs by tag
let undead = npc_db.get_by_tag("Undead");
// Get shopkeepers
let shops = npc_db.get_shopkeepers();
```
## Loading Quests
```rust
use cursebreaker_parser::QuestDatabase;
let quest_db = QuestDatabase::load_from_xml("Data/XMLs/Quests/Quests.xml")?;
println!("Loaded {} quests", quest_db.len());
```
### Querying Quests
```rust
// Get by ID
if let Some(quest) = quest_db.get_by_id(1) {
println!("Quest: {}", quest.name);
println!("Phases: {}", quest.phase_count());
}
// Get main quests
let main_quests = quest_db.get_main_quests();
// Get side quests
let side_quests = quest_db.get_side_quests();
// Get hidden quests
let hidden = quest_db.get_hidden_quests();
```
## Loading Harvestables
```rust
use cursebreaker_parser::HarvestableDatabase;
let harvestable_db = HarvestableDatabase::load_from_xml("Data/XMLs/Harvestables/HarvestableInfo.xml")?;
println!("Loaded {} harvestables", harvestable_db.len());
```
### Querying Harvestables
```rust
// Get by type ID
if let Some(harvestable) = harvestable_db.get_by_typeid(1) {
println!("Found: {}", harvestable.name);
}
// Get by skill
let woodcutting = harvestable_db.get_by_skill("Woodcutting");
let mining = harvestable_db.get_by_skill("mining");
let fishing = harvestable_db.get_by_skill("Fishing");
// Get trees (harvestables with tree=1)
let trees = harvestable_db.get_trees();
// Get by tool requirement
let hatchet_nodes = harvestable_db.get_by_tool("hatchet");
let pickaxe_nodes = harvestable_db.get_by_tool("pickaxe");
// Get by level range
let beginner = harvestable_db.get_by_level_range(1, 10);
let advanced = harvestable_db.get_by_level_range(50, 100);
```
## Loading Loot Tables
```rust
use cursebreaker_parser::LootDatabase;
let loot_db = LootDatabase::load_from_xml("Data/XMLs/Loot/Loot.xml")?;
println!("Loaded {} loot tables", loot_db.len());
```
### Querying Loot Tables
```rust
// Get all loot tables for a specific NPC
let npc_id = 45;
let tables = loot_db.get_tables_for_npc(npc_id);
// Get all drops for a specific NPC
let drops = loot_db.get_drops_for_npc(npc_id);
for drop in drops {
println!("Item ID: {}, Rate: {:?}", drop.item, drop.rate);
}
// Find which NPCs drop a specific item
let item_id = 180;
let npcs = loot_db.get_npcs_dropping_item(item_id);
println!("Item {} drops from {} NPCs", item_id, npcs.len());
// Get all tables with conditional drops (checks field)
let conditional = loot_db.get_conditional_tables();
// Get all tables with guaranteed drops (rate = 1)
let guaranteed = loot_db.get_tables_with_guaranteed_drops();
// Get all unique item IDs that can drop
let droppable_items = loot_db.get_all_droppable_items();
// Get all NPCs that have loot tables
let npcs_with_loot = loot_db.get_all_npcs_with_loot();
```
## Cross-referencing Data
```rust
// Find items rewarded by quests
for quest in quest_db.all_quests() {
for reward in &quest.rewards {
if let Some(item_id) = reward.item {
if let Some(item) = item_db.get_by_id(item_id) {
println!("Quest '{}' rewards: {}", quest.name, item.name);
}
}
}
}
// Find NPCs that give quests
for npc in npc_db.all_npcs() {
if !npc.questmarkers.is_empty() {
println!("NPC '{}' has {} quest markers", npc.name, npc.questmarkers.len());
}
}
// Find items that drop from harvestables
for harvestable in harvestable_db.all_harvestables() {
for drop in &harvestable.drops {
if let Some(item) = item_db.get_by_id(drop.id) {
println!("'{}' drops: {} (rate: {})",
harvestable.name, item.name, drop.droprate.unwrap_or(0));
}
}
}
// Find what items an NPC drops
let npc_id = 45;
if let Some(npc) = npc_db.get_by_id(npc_id) {
let drops = loot_db.get_drops_for_npc(npc_id);
println!("NPC '{}' drops {} items:", npc.name, drops.len());
for drop in drops {
if let Some(item) = item_db.get_by_id(drop.item) {
println!(" - {}", item.name);
}
}
}
// Find which NPCs drop a specific item
let item_id = 180;
if let Some(item) = item_db.get_by_id(item_id) {
let npcs = loot_db.get_npcs_dropping_item(item_id);
println!("Item '{}' drops from:", item.name);
for npc_id in npcs {
if let Some(npc) = npc_db.get_by_id(npc_id) {
println!(" - {}", npc.name);
}
}
}
```
## Statistics from XML Files
When loaded from `/home/connor/repos/CBAssets/Data/XMLs/`:
### Items.xml
- **Total Items**: 1,360
- **Weapons**: 166
- **Armor**: 148
- **Consumables**: 294
- **Trinkets**: 59
- **Bows**: 18
- **Magic Items**: 76
### NPCs/NPCInfo.xml
- **Total NPCs**: 1,242
- **Hostile NPCs**: 328
- **Interactable NPCs**: 512
- **Undead**: 71
- **Predators**: 13
- **Quest Givers**: 108
### Quests/Quests.xml
- **Total Quests**: 108
- **Main Quests**: 19
- **Side Quests**: 89
- **Hidden Quests**: 2
- **Unique Quest Reward Items**: 70
### Harvestables/HarvestableInfo.xml
- **Total Harvestables**: 96
- **Trees**: 9
- **Woodcutting**: 10
- **Mining**: 11
- **Fishing**: 11
- **Alchemy**: 50
- **Level 1-10**: 31
- **Level 11-50**: 37
- **Level 51-100**: 28
- **Unique Items from Harvestables**: 98
### Loot/Loot.xml
- **Total Loot Tables**: 175
- **NPCs with Loot**: 267
- **Droppable Items**: 405
- **Tables with Conditional Drops**: 33
- **Tables with Guaranteed Drops**: Multiple tables include guaranteed (rate=1) drops
## File Structure
```
cursebreaker-parser/
├── src/
│ ├── lib.rs # Library exports
│ ├── main.rs # Main binary (Unity + XML parsing)
│ ├── types/
│ │ ├── mod.rs
│ │ ├── item.rs # Item data structures
│ │ ├── npc.rs # NPC data structures
│ │ ├── quest.rs # Quest data structures
│ │ ├── harvestable.rs # Harvestable data structures
│ │ ├── loot.rs # Loot table data structures
│ │ └── interactable_resource.rs
│ ├── xml_parser.rs # XML parsing logic (all types)
│ ├── item_database.rs # ItemDatabase for runtime access
│ ├── npc_database.rs # NpcDatabase for runtime access
│ ├── quest_database.rs # QuestDatabase for runtime access
│ ├── harvestable_database.rs # HarvestableDatabase for runtime access
│ └── loot_database.rs # LootDatabase for runtime access
└── examples/
├── item_database_demo.rs # Items usage example
└── game_data_demo.rs # Full game data example
```
## Dependencies Added
```toml
quick-xml = "0.37" # XML parsing
serde = { version = "1.0", features = ["derive"] } # Serialization
serde_json = "1.0" # JSON serialization
diesel = { version = "2.2", features = ["sqlite"], optional = true } # SQL (optional)
thiserror = "1.0" # Error handling
```
## Completed Features
- ✅ Items (`/XMLs/Items/Items.xml`)
- ✅ NPCs (`/XMLs/Npcs/NPCInfo.xml`)
- ✅ Quests (`/XMLs/Quests/Quests.xml`)
- ✅ Harvestables (`/XMLs/Harvestables/HarvestableInfo.xml`)
- ✅ Loot tables (`/XMLs/Loot/Loot.xml`)
## Future Enhancements
The same pattern can be extended to parse other XML files:
- [ ] Maps (`/XMLs/Maps/*.xml`)
- [ ] Dialogue (`/XMLs/Dialogue/*.xml`)
- [ ] Events (`/XMLs/Events/*.xml`)
- [ ] Achievements (`/XMLs/Achievements/*.xml`)
- [ ] Traits (`/XMLs/Traits/*.xml`)
- [ ] Shops (`/XMLs/Shops/*.xml`)
Each follows the same pattern:
1. Define data structures in `src/types/`
2. Create parser in `src/xml_parser.rs`
3. Create database wrapper for runtime access
4. Add to `lib.rs` exports
## Integration with Unity Parser
The main binary (`src/main.rs`) demonstrates integration of both systems:
1. Load game data from XML files (Items, etc.)
2. Parse Unity scenes for game objects
3. Cross-reference data (e.g., item IDs in loot spawners)
This creates a complete game data pipeline from source files to runtime.

View File

@@ -0,0 +1,9 @@
# For documentation on how to configure this file,
# see https://diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
[migrations_directory]
dir = "migrations"

View File

@@ -1,8 +1,10 @@
use cursebreaker_parser::{FastTravelDatabase, FastTravelType};
use std::env;
fn main() -> Result<(), Box<dyn std::error::Error>> {
// Load all fast travel types from the directory
let ft_db = FastTravelDatabase::load_from_directory("/home/connor/repos/CBAssets/Data/XMLs")?;
let cb_assets_path = env::var("CB_ASSETS_PATH").unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
let ft_db = FastTravelDatabase::load_from_directory(&format!("{}/Data/XMLs", cb_assets_path))?;
println!("=== Fast Travel Database Statistics ===");
println!("Total locations: {}", ft_db.len());

View File

@@ -3,17 +3,19 @@
//! Run with: cargo run --example game_data_demo
use cursebreaker_parser::{ItemDatabase, NpcDatabase, QuestDatabase, HarvestableDatabase, LootDatabase};
use std::env;
fn main() -> Result<(), Box<dyn std::error::Error>> {
println!("🎮 Cursebreaker Game Data Demo\n");
// Load all game data
println!("📚 Loading game data...");
let item_db = ItemDatabase::load_from_xml("/home/connor/repos/CBAssets/Data/XMLs/Items/Items.xml")?;
let npc_db = NpcDatabase::load_from_xml("/home/connor/repos/CBAssets/Data/XMLs/Npcs/NPCInfo.xml")?;
let quest_db = QuestDatabase::load_from_xml("/home/connor/repos/CBAssets/Data/XMLs/Quests/Quests.xml")?;
let harvestable_db = HarvestableDatabase::load_from_xml("/home/connor/repos/CBAssets/Data/XMLs/Harvestables/HarvestableInfo.xml")?;
let loot_db = LootDatabase::load_from_xml("/home/connor/repos/CBAssets/Data/XMLs/Loot/Loot.xml")?;
let cb_assets_path = env::var("CB_ASSETS_PATH").unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
let item_db = ItemDatabase::load_from_xml(&format!("{}/Data/XMLs/Items/Items.xml", cb_assets_path))?;
let npc_db = NpcDatabase::load_from_xml(&format!("{}/Data/XMLs/Npcs/NPCInfo.xml", cb_assets_path))?;
let quest_db = QuestDatabase::load_from_xml(&format!("{}/Data/XMLs/Quests/Quests.xml", cb_assets_path))?;
let harvestable_db = HarvestableDatabase::load_from_xml(&format!("{}/Data/XMLs/Harvestables/HarvestableInfo.xml", cb_assets_path))?;
let loot_db = LootDatabase::load_from_xml(&format!("{}/Data/XMLs/Loot/Loot.xml", cb_assets_path))?;
println!("✅ Loaded {} items", item_db.len());
println!("✅ Loaded {} NPCs", npc_db.len());

View File

@@ -3,12 +3,14 @@
//! Run with: cargo run --example item_database_demo
use cursebreaker_parser::ItemDatabase;
use std::env;
fn main() -> Result<(), Box<dyn std::error::Error>> {
println!("🎮 Cursebreaker Item Database Demo\n");
// Load items from XML
let items_path = "/home/connor/repos/CBAssets/Data/XMLs/Items/Items.xml";
let cb_assets_path = env::var("CB_ASSETS_PATH").unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
let items_path = format!("{}/Data/XMLs/Items/Items.xml", cb_assets_path);
println!("📚 Loading items from: {}", items_path);
let item_db = ItemDatabase::load_from_xml(items_path)?;

View File

@@ -1,8 +1,10 @@
use cursebreaker_parser::MapDatabase;
use std::env;
fn main() -> Result<(), Box<dyn std::error::Error>> {
// Load the Maps.xml file
let map_db = MapDatabase::load_from_xml("/home/connor/repos/CBAssets/Data/XMLs/Maps/Maps.xml")?;
let cb_assets_path = env::var("CB_ASSETS_PATH").unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
let map_db = MapDatabase::load_from_xml(&format!("{}/Data/XMLs/Maps/Maps.xml", cb_assets_path))?;
println!("=== Map Database Statistics ===");
println!("Total maps loaded: {}", map_db.len());

View File

@@ -1,9 +1,11 @@
use cursebreaker_parser::PlayerHouseDatabase;
use std::env;
fn main() -> Result<(), Box<dyn std::error::Error>> {
// Load all player houses from XML
let cb_assets_path = env::var("CB_ASSETS_PATH").unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
let ph_db = PlayerHouseDatabase::load_from_xml(
"/home/connor/repos/CBAssets/Data/XMLs/PlayerHouses/PlayerHouses.xml",
&format!("{}/Data/XMLs/PlayerHouses/PlayerHouses.xml", cb_assets_path),
)?;
println!("=== Player House Database Statistics ===");

View File

@@ -0,0 +1,59 @@
//! Example: Query world resources from the database
//!
//! Run with: cargo run --example query_world_resources
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::env;
fn main() -> Result<(), Box<dyn std::error::Error>> {
// Connect to database
let database_url = env::var("DATABASE_URL").unwrap_or_else(|_| "../cursebreaker.db".to_string());
let mut conn = SqliteConnection::establish(&database_url)?;
// Use the schema
use cursebreaker_parser::schema::world_resources::dsl::*;
// Query all resources
#[derive(Queryable, Debug)]
struct WorldResource {
item_id: i32,
pos_x: f32,
pos_y: f32,
}
let results = world_resources
.limit(10)
.load::<WorldResource>(&mut conn)?;
println!("Found {} resources (showing first 10):", results.len());
println!();
for resource in results {
println!("Resource:");
println!(" Item ID: {}", resource.item_id);
println!(" Position: ({:.2}, {:.2})", resource.pos_x, resource.pos_y);
println!();
}
// Query all resources
println!("\n--- All world resources ---");
let all_results = world_resources
.load::<WorldResource>(&mut conn)?;
println!("Found {} total resources", all_results.len());
// Group by item_id
use std::collections::HashMap;
let mut item_counts: HashMap<i32, usize> = HashMap::new();
for resource in &all_results {
*item_counts.entry(resource.item_id).or_insert(0) += 1;
}
println!("\nResource counts by item ID:");
for (i_id, count) in item_counts {
println!(" Item {}: {} instances", i_id, count);
}
Ok(())
}

View File

@@ -0,0 +1,53 @@
//! Example: Query resource icons from the database
//!
//! This example shows how to retrieve processed resource icons for harvestables.
//! Icons are 64x64 WebP images with white borders.
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::env;
fn main() -> Result<(), Box<dyn std::error::Error>> {
// Connect to database
let database_url = env::var("DATABASE_URL").unwrap_or_else(|_| "../cursebreaker.db".to_string());
let mut conn = SqliteConnection::establish(&database_url)?;
// Define the structure
#[derive(Queryable, Debug)]
struct ResourceIcon {
item_id: i32,
name: String,
icon_64: Vec<u8>,
}
// Import schema
use cursebreaker_parser::schema::resource_icons::dsl::*;
// Query all resource icons
let icons = resource_icons.load::<ResourceIcon>(&mut conn)?;
println!("📦 Resource Icons Database");
println!("========================\n");
println!("Total icons: {}\n", icons.len());
for icon in icons {
println!("Harvestable ID: {}", icon.item_id);
println!(" Name: {}", icon.name);
println!(" Icon size: {} bytes (WebP format, 64x64 with white border)", icon.icon_64.len());
println!();
}
// Example: Get icon for a specific harvestable
println!("\n🔍 Looking up Copper Ore (harvestable_id = 2):");
let copper_icon = resource_icons
.filter(item_id.eq(2))
.first::<ResourceIcon>(&mut conn)?;
println!(" Name: {}", copper_icon.name);
println!(" Icon size: {} bytes", copper_icon.icon_64.len());
// You can save the icon to a file for testing:
// std::fs::write("copper_ore.webp", &copper_icon.icon_64)?;
Ok(())
}

View File

@@ -1,9 +1,11 @@
use cursebreaker_parser::ShopDatabase;
use std::env;
fn main() -> Result<(), Box<dyn std::error::Error>> {
// Load all shops from XML
let cb_assets_path = env::var("CB_ASSETS_PATH").unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
let shop_db = ShopDatabase::load_from_xml(
"/home/connor/repos/CBAssets/Data/XMLs/Shops/Shops.xml",
&format!("{}/Data/XMLs/Shops/Shops.xml", cb_assets_path),
)?;
println!("=== Shop Database Statistics ===");

View File

@@ -1,9 +1,11 @@
use cursebreaker_parser::TraitDatabase;
use std::env;
fn main() -> Result<(), Box<dyn std::error::Error>> {
// Load all traits from XML
let cb_assets_path = env::var("CB_ASSETS_PATH").unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
let trait_db = TraitDatabase::load_from_xml(
"/home/connor/repos/CBAssets/Data/XMLs/Traits/Traits.xml",
&format!("{}/Data/XMLs/Traits/Traits.xml", cb_assets_path),
)?;
println!("=== Trait Database Statistics ===");

View File

@@ -0,0 +1,140 @@
//! Example: Query world objects from the database
//!
//! Run with: cargo run --example verify_world_objects
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::env;
fn main() -> Result<(), Box<dyn std::error::Error>> {
// Connect to database
let database_url = env::var("DATABASE_URL").unwrap_or_else(|_| "../cursebreaker.db".to_string());
let mut conn = SqliteConnection::establish(&database_url)?;
// Query teleporters
{
use cursebreaker_parser::schema::world_teleporters::dsl::*;
#[derive(Queryable, Debug)]
struct Teleporter {
pos_x: f32,
pos_y: f32,
tp_x: Option<f32>,
tp_y: Option<f32>,
}
let results = world_teleporters.load::<Teleporter>(&mut conn)?;
println!("=== World Teleporters ===");
println!("Found {} teleporters\n", results.len());
for tp in results {
print!(" At ({:.2}, {:.2})", tp.pos_x, tp.pos_y);
if let (Some(tx), Some(ty)) = (tp.tp_x, tp.tp_y) {
println!(" -> teleports to ({:.2}, {:.2})", tx, ty);
} else {
println!(" -> no destination");
}
}
println!();
}
// Query workbenches
{
use cursebreaker_parser::schema::world_workbenches::dsl::*;
#[derive(Queryable, Debug)]
struct Workbench {
pos_x: f32,
pos_y: f32,
workbench_id: i32,
}
let results = world_workbenches.load::<Workbench>(&mut conn)?;
println!("=== World Workbenches ===");
println!("Found {} workbenches\n", results.len());
for wb in results {
println!(" Workbench ID {} at ({:.2}, {:.2})", wb.workbench_id, wb.pos_x, wb.pos_y);
}
println!();
}
// Query loot spawners
{
use cursebreaker_parser::schema::world_loot::dsl::*;
#[derive(Queryable, Debug)]
struct Loot {
pos_x: f32,
pos_y: f32,
item_id: i32,
amount: i32,
respawn_time: i32,
visibility_checks: String,
}
let results = world_loot.load::<Loot>(&mut conn)?;
println!("=== World Loot ===");
println!("Found {} loot spawners\n", results.len());
for loot in results {
println!(" Item {} x{} (respawn: {}s) at ({:.2}, {:.2})",
loot.item_id, loot.amount, loot.respawn_time, loot.pos_x, loot.pos_y);
if !loot.visibility_checks.is_empty() {
println!(" Visibility checks: {}", loot.visibility_checks);
}
}
println!();
}
// Query map icons
{
use cursebreaker_parser::schema::world_map_icons::dsl::*;
#[derive(Queryable, Debug)]
struct MapIcon {
pos_x: f32,
pos_y: f32,
icon_type: i32,
icon_size: i32,
icon: String,
text: String,
font_size: i32,
hover_text: String,
}
let results = world_map_icons.load::<MapIcon>(&mut conn)?;
println!("=== World Map Icons ===");
println!("Found {} map icons\n", results.len());
for map_icon in results {
print!(" Type {} at ({:.2}, {:.2})", map_icon.icon_type, map_icon.pos_x, map_icon.pos_y);
if !map_icon.text.is_empty() {
print!(" - Text: \"{}\"", map_icon.text);
}
if !map_icon.hover_text.is_empty() {
print!(" - Hover: \"{}\"", map_icon.hover_text);
}
println!();
}
println!();
}
// Query map name changers
{
use cursebreaker_parser::schema::world_map_name_changers::dsl::*;
#[derive(Queryable, Debug)]
struct MapNameChanger {
pos_x: f32,
pos_y: f32,
map_name: String,
}
let results = world_map_name_changers.load::<MapNameChanger>(&mut conn)?;
println!("=== World Map Name Changers ===");
println!("Found {} map name changers\n", results.len());
for changer in results {
println!(" \"{}\" at ({:.2}, {:.2})", changer.map_name, changer.pos_x, changer.pos_y);
}
println!();
}
Ok(())
}

View File

View File

@@ -0,0 +1,5 @@
-- Rollback migration for minimap_tiles table
DROP INDEX IF EXISTS idx_minimap_y;
DROP INDEX IF EXISTS idx_minimap_x;
DROP INDEX IF EXISTS idx_minimap_coords;
DROP TABLE IF EXISTS minimap_tiles;

View File

@@ -0,0 +1,39 @@
-- Minimap tiles table storing processed WebP images
CREATE TABLE minimap_tiles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
-- Tile coordinates (matching file naming: x_y.png)
x INTEGER NOT NULL,
y INTEGER NOT NULL,
-- Original PNG metadata
original_width INTEGER NOT NULL DEFAULT 512,
original_height INTEGER NOT NULL DEFAULT 512,
original_file_size INTEGER,
-- WebP blobs at different resolutions
webp_512 BLOB NOT NULL, -- 512x512 WebP
webp_256 BLOB NOT NULL, -- 256x256 WebP
webp_128 BLOB NOT NULL, -- 128x128 WebP
webp_64 BLOB NOT NULL, -- 64x64 WebP
-- Blob sizes for quick reference
webp_512_size INTEGER NOT NULL,
webp_256_size INTEGER NOT NULL,
webp_128_size INTEGER NOT NULL,
webp_64_size INTEGER NOT NULL,
-- Processing metadata
processed_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
source_path TEXT NOT NULL,
-- Ensure unique coordinate pairs
UNIQUE(x, y)
);
-- Index for fast coordinate lookups
CREATE INDEX idx_minimap_coords ON minimap_tiles(x, y);
-- Index for boundary queries
CREATE INDEX idx_minimap_x ON minimap_tiles(x);
CREATE INDEX idx_minimap_y ON minimap_tiles(y);

View File

@@ -0,0 +1,33 @@
-- Drop all game data tables
DROP INDEX IF EXISTS idx_shops_name;
DROP TABLE IF EXISTS shops;
DROP INDEX IF EXISTS idx_traits_trainer;
DROP INDEX IF EXISTS idx_traits_name;
DROP TABLE IF EXISTS traits;
DROP INDEX IF EXISTS idx_player_houses_map;
DROP INDEX IF EXISTS idx_player_houses_name;
DROP TABLE IF EXISTS player_houses;
DROP INDEX IF EXISTS idx_fast_travel_map;
DROP INDEX IF EXISTS idx_fast_travel_name;
DROP TABLE IF EXISTS fast_travel_locations;
DROP INDEX IF EXISTS idx_maps_name;
DROP TABLE IF EXISTS maps;
DROP INDEX IF EXISTS idx_loot_npc;
DROP TABLE IF EXISTS loot_tables;
DROP INDEX IF EXISTS idx_harvestables_name;
DROP TABLE IF EXISTS harvestables;
DROP INDEX IF EXISTS idx_quests_name;
DROP TABLE IF EXISTS quests;
DROP INDEX IF EXISTS idx_npcs_name;
DROP TABLE IF EXISTS npcs;
DROP INDEX IF EXISTS idx_items_name;
DROP TABLE IF EXISTS items;

View File

@@ -0,0 +1,98 @@
-- Items table
CREATE TABLE items (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
data TEXT NOT NULL
);
CREATE INDEX idx_items_name ON items(name);
-- NPCs table
CREATE TABLE npcs (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
data TEXT NOT NULL
);
CREATE INDEX idx_npcs_name ON npcs(name);
-- Quests table
CREATE TABLE quests (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
data TEXT NOT NULL
);
CREATE INDEX idx_quests_name ON quests(name);
-- Harvestables table
CREATE TABLE harvestables (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
data TEXT NOT NULL
);
CREATE INDEX idx_harvestables_name ON harvestables(name);
-- Loot tables
CREATE TABLE loot_tables (
table_id TEXT PRIMARY KEY,
npc_id TEXT,
data TEXT NOT NULL
);
CREATE INDEX idx_loot_npc ON loot_tables(npc_id);
-- Maps table
CREATE TABLE maps (
scene_id TEXT PRIMARY KEY,
name TEXT NOT NULL,
data TEXT NOT NULL
);
CREATE INDEX idx_maps_name ON maps(name);
-- Fast travel locations table
CREATE TABLE fast_travel_locations (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
map_name TEXT NOT NULL,
data TEXT NOT NULL
);
CREATE INDEX idx_fast_travel_name ON fast_travel_locations(name);
CREATE INDEX idx_fast_travel_map ON fast_travel_locations(map_name);
-- Player houses table
CREATE TABLE player_houses (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
map_id INTEGER NOT NULL,
data TEXT NOT NULL
);
CREATE INDEX idx_player_houses_name ON player_houses(name);
CREATE INDEX idx_player_houses_map ON player_houses(map_id);
-- Traits table
CREATE TABLE traits (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
description TEXT,
trainer_id INTEGER,
data TEXT NOT NULL
);
CREATE INDEX idx_traits_name ON traits(name);
CREATE INDEX idx_traits_trainer ON traits(trainer_id);
-- Shops table
CREATE TABLE shops (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
unique_items INTEGER NOT NULL, -- boolean as 0/1
item_count INTEGER NOT NULL,
data TEXT NOT NULL
);
CREATE INDEX idx_shops_name ON shops(name);

View File

@@ -0,0 +1,2 @@
DROP INDEX IF EXISTS idx_merged_tiles_zoom_coords;
DROP TABLE IF EXISTS merged_tiles;

View File

@@ -0,0 +1,31 @@
-- Create merged_tiles table for storing merged map tiles at different zoom levels
-- Zoom level 2: 1x1 tiles (512px original tiles)
-- Zoom level 1: 2x2 tiles merged into 512px
-- Zoom level 0: 4x4 tiles merged into 512px
CREATE TABLE merged_tiles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
-- Tile coordinates at this zoom level
x INTEGER NOT NULL,
y INTEGER NOT NULL,
-- Zoom level (0 = most zoomed out, 2 = most zoomed in)
zoom_level INTEGER NOT NULL,
-- Number of original tiles merged (1, 4, or 16)
merge_factor INTEGER NOT NULL,
-- Dimensions of the merged image
width INTEGER NOT NULL,
height INTEGER NOT NULL,
-- WebP image data (lossless compression)
webp_data BLOB NOT NULL,
webp_size INTEGER NOT NULL,
-- Metadata
processed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
-- Track which original tiles were merged (for debugging)
source_tiles TEXT NOT NULL,
-- Unique constraint on zoom level + coordinates
UNIQUE(zoom_level, x, y)
);
-- Index for fast lookups
CREATE INDEX idx_merged_tiles_zoom_coords ON merged_tiles(zoom_level, x, y);

View File

@@ -0,0 +1,26 @@
-- This migration cannot be rolled back automatically
-- You would need to re-run the image-parser to restore data
DROP INDEX IF EXISTS idx_minimap_tiles_coords;
DROP INDEX IF EXISTS idx_minimap_tiles_zoom_coords;
DROP TABLE IF EXISTS minimap_tiles;
-- Restore old structure (data will be lost)
CREATE TABLE minimap_tiles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
x INTEGER NOT NULL,
y INTEGER NOT NULL,
original_width INTEGER NOT NULL,
original_height INTEGER NOT NULL,
original_file_size INTEGER,
webp_512 BLOB NOT NULL,
webp_256 BLOB NOT NULL,
webp_128 BLOB NOT NULL,
webp_64 BLOB NOT NULL,
webp_512_size INTEGER NOT NULL,
webp_256_size INTEGER NOT NULL,
webp_128_size INTEGER NOT NULL,
webp_64_size INTEGER NOT NULL,
processed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
source_path TEXT NOT NULL,
UNIQUE(x, y)
);

View File

@@ -0,0 +1,34 @@
-- Drop merged_tiles table (no longer needed)
DROP TABLE IF EXISTS merged_tiles;
DROP INDEX IF EXISTS idx_merged_tiles_zoom_coords;
-- Drop old minimap_tiles table
DROP TABLE IF EXISTS minimap_tiles;
-- Create new minimap_tiles table with simplified structure
CREATE TABLE minimap_tiles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
-- Tile coordinates (at zoom level 2, original tile coords)
x INTEGER NOT NULL,
y INTEGER NOT NULL,
-- Zoom level (0 = 4x4 merged, 1 = 2x2 merged, 2 = original)
zoom INTEGER NOT NULL,
-- Image dimensions (always 512x512 for merged tiles)
width INTEGER NOT NULL,
height INTEGER NOT NULL,
-- Original file size (only for zoom=2)
original_file_size INTEGER,
-- WebP image data (lossless)
image BLOB NOT NULL,
image_size INTEGER NOT NULL,
-- Metadata
processed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
source_path TEXT NOT NULL,
-- Unique constraint on coordinates + zoom
UNIQUE(x, y, zoom)
);
-- Index for fast lookups
CREATE INDEX idx_minimap_tiles_zoom_coords ON minimap_tiles(zoom, x, y);
CREATE INDEX idx_minimap_tiles_coords ON minimap_tiles(x, y);

View File

@@ -0,0 +1,26 @@
-- Undo the expand_items migration
-- Drop crafting tables
DROP INDEX IF EXISTS idx_crafting_recipe_items_item;
DROP TABLE IF EXISTS crafting_recipe_items;
DROP INDEX IF EXISTS idx_crafting_recipes_workbench;
DROP INDEX IF EXISTS idx_crafting_recipes_level;
DROP INDEX IF EXISTS idx_crafting_recipes_skill;
DROP INDEX IF EXISTS idx_crafting_recipes_product;
DROP TABLE IF EXISTS crafting_recipes;
-- Drop item indexes
DROP INDEX IF EXISTS idx_items_skill;
DROP INDEX IF EXISTS idx_items_price;
DROP INDEX IF EXISTS idx_items_level;
DROP INDEX IF EXISTS idx_items_type;
-- Note: SQLite doesn't support DROP COLUMN in ALTER TABLE
-- To truly revert, we'd need to recreate the table without the columns
-- For now, we'll leave the columns in place (they won't hurt with defaults)
-- If you need a full revert, you'd need to:
-- 1. CREATE TABLE items_backup (id, name, data)
-- 2. INSERT INTO items_backup SELECT id, name, data FROM items
-- 3. DROP TABLE items
-- 4. ALTER TABLE items_backup RENAME TO items

View File

@@ -0,0 +1,72 @@
-- Add core columns to items table for efficient querying
-- Item classification
ALTER TABLE items ADD COLUMN item_type TEXT NOT NULL DEFAULT 'resource';
ALTER TABLE items ADD COLUMN level INTEGER NOT NULL DEFAULT 1;
-- Economy
ALTER TABLE items ADD COLUMN price INTEGER NOT NULL DEFAULT 0;
-- Stacking and storage
ALTER TABLE items ADD COLUMN max_stack INTEGER NOT NULL DEFAULT 1;
ALTER TABLE items ADD COLUMN storage_size INTEGER NOT NULL DEFAULT 0;
-- Skills
ALTER TABLE items ADD COLUMN skill TEXT NOT NULL DEFAULT 'none';
ALTER TABLE items ADD COLUMN tool TEXT NOT NULL DEFAULT 'none';
-- Visual/UI
ALTER TABLE items ADD COLUMN description TEXT NOT NULL DEFAULT '';
-- Boolean flags (stored as INTEGER: 0=false, 1=true)
ALTER TABLE items ADD COLUMN two_handed INTEGER NOT NULL DEFAULT 0;
ALTER TABLE items ADD COLUMN undroppable INTEGER NOT NULL DEFAULT 0;
ALTER TABLE items ADD COLUMN undroppable_on_death INTEGER NOT NULL DEFAULT 0;
ALTER TABLE items ADD COLUMN unequip_destroy INTEGER NOT NULL DEFAULT 0;
ALTER TABLE items ADD COLUMN generate_icon INTEGER NOT NULL DEFAULT 0;
ALTER TABLE items ADD COLUMN hide_milestone INTEGER NOT NULL DEFAULT 0;
ALTER TABLE items ADD COLUMN cannot_craft_exceptional INTEGER NOT NULL DEFAULT 0;
ALTER TABLE items ADD COLUMN storage_all_items INTEGER NOT NULL DEFAULT 0;
-- Ability and item IDs
ALTER TABLE items ADD COLUMN ability_id INTEGER NOT NULL DEFAULT 0;
ALTER TABLE items ADD COLUMN special_ability INTEGER NOT NULL DEFAULT 0;
ALTER TABLE items ADD COLUMN learn_ability_id INTEGER NOT NULL DEFAULT 0;
ALTER TABLE items ADD COLUMN book_id INTEGER NOT NULL DEFAULT 0;
ALTER TABLE items ADD COLUMN swap_item INTEGER NOT NULL DEFAULT 0;
-- Create indexes for commonly queried columns
CREATE INDEX idx_items_type ON items(item_type);
CREATE INDEX idx_items_level ON items(level);
CREATE INDEX idx_items_price ON items(price);
CREATE INDEX idx_items_skill ON items(skill);
-- Crafting recipes table
CREATE TABLE crafting_recipes (
id INTEGER PRIMARY KEY AUTOINCREMENT,
product_item_id INTEGER NOT NULL,
skill TEXT NOT NULL,
level INTEGER NOT NULL,
workbench_id INTEGER NOT NULL,
xp INTEGER NOT NULL DEFAULT 0,
unlocked_by_default INTEGER NOT NULL DEFAULT 1,
checks TEXT, -- nullable, for conditional recipes
FOREIGN KEY (product_item_id) REFERENCES items(id) ON DELETE CASCADE
);
CREATE INDEX idx_crafting_recipes_product ON crafting_recipes(product_item_id);
CREATE INDEX idx_crafting_recipes_skill ON crafting_recipes(skill);
CREATE INDEX idx_crafting_recipes_level ON crafting_recipes(level);
CREATE INDEX idx_crafting_recipes_workbench ON crafting_recipes(workbench_id);
-- Crafting recipe ingredients (many-to-many)
CREATE TABLE crafting_recipe_items (
recipe_id INTEGER NOT NULL,
item_id INTEGER NOT NULL,
amount INTEGER NOT NULL,
PRIMARY KEY (recipe_id, item_id),
FOREIGN KEY (recipe_id) REFERENCES crafting_recipes(id) ON DELETE CASCADE,
FOREIGN KEY (item_id) REFERENCES items(id)
);
CREATE INDEX idx_crafting_recipe_items_item ON crafting_recipe_items(item_id);

View File

@@ -0,0 +1,5 @@
-- Undo the add_item_images migration
-- Note: SQLite doesn't support DROP COLUMN in ALTER TABLE
-- The icon columns will remain but can be set to NULL
-- To truly revert, you would need to recreate the table without the image columns

View File

@@ -0,0 +1,6 @@
-- Add item icon columns (WebP format)
-- These store the processed WebP images at different resolutions
ALTER TABLE items ADD COLUMN icon_large BLOB; -- 256x256 WebP
ALTER TABLE items ADD COLUMN icon_medium BLOB; -- 64x64 WebP
ALTER TABLE items ADD COLUMN icon_small BLOB; -- 16x16 WebP

View File

@@ -0,0 +1,6 @@
-- Undo the add_item_stats migration
DROP INDEX IF EXISTS idx_item_stats_type_value;
DROP INDEX IF EXISTS idx_item_stats_value;
DROP INDEX IF EXISTS idx_item_stats_stat_type;
DROP TABLE IF EXISTS item_stats;

View File

@@ -0,0 +1,15 @@
-- Create item_stats table for normalized stat storage
CREATE TABLE item_stats (
item_id INTEGER NOT NULL,
stat_type TEXT NOT NULL,
value REAL NOT NULL,
PRIMARY KEY (item_id, stat_type),
FOREIGN KEY (item_id) REFERENCES items(id) ON DELETE CASCADE
);
-- Create indexes for querying
CREATE INDEX idx_item_stats_stat_type ON item_stats(stat_type);
CREATE INDEX idx_item_stats_value ON item_stats(value);
-- Index for finding items by stat value ranges
CREATE INDEX idx_item_stats_type_value ON item_stats(stat_type, value);

View File

@@ -0,0 +1 @@
DROP TABLE world_resources;

View File

@@ -0,0 +1,14 @@
-- World resources table - stores harvestable resources from Unity scenes
CREATE TABLE world_resources (
id INTEGER PRIMARY KEY,
item_id INTEGER NOT NULL,
scene_name TEXT NOT NULL,
pos_x REAL NOT NULL,
pos_y REAL NOT NULL,
pos_z REAL NOT NULL,
object_name TEXT NOT NULL
);
CREATE INDEX idx_world_resources_item_id ON world_resources(item_id);
CREATE INDEX idx_world_resources_scene ON world_resources(scene_name);
CREATE INDEX idx_world_resources_position ON world_resources(pos_x, pos_z);

View File

@@ -0,0 +1,16 @@
-- Revert to original structure
DROP TABLE world_resources;
CREATE TABLE world_resources (
id INTEGER PRIMARY KEY,
item_id INTEGER NOT NULL,
scene_name TEXT NOT NULL,
pos_x REAL NOT NULL,
pos_y REAL NOT NULL,
pos_z REAL NOT NULL,
object_name TEXT NOT NULL
);
CREATE INDEX idx_world_resources_item_id ON world_resources(item_id);
CREATE INDEX idx_world_resources_scene ON world_resources(scene_name);
CREATE INDEX idx_world_resources_position ON world_resources(pos_x, pos_z);

View File

@@ -0,0 +1,13 @@
-- Drop the old table
DROP TABLE world_resources;
-- Recreate with simplified structure - no id, no scene_name, no object_name, only 2D coordinates
CREATE TABLE world_resources (
item_id INTEGER NOT NULL,
pos_x REAL NOT NULL,
pos_y REAL NOT NULL,
PRIMARY KEY (item_id, pos_x, pos_y)
) WITHOUT ROWID;
CREATE INDEX idx_world_resources_item_id ON world_resources(item_id);
CREATE INDEX idx_world_resources_position ON world_resources(pos_x, pos_y);

View File

@@ -0,0 +1,2 @@
-- Drop resource_icons table
DROP TABLE resource_icons;

View File

@@ -0,0 +1,8 @@
-- Create resource_icons table to store processed item icons for world resources
CREATE TABLE resource_icons (
item_id INTEGER PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
icon_64 BLOB NOT NULL
);
CREATE INDEX idx_resource_icons_name ON resource_icons(name);

View File

@@ -0,0 +1,10 @@
-- Revert to the simple harvestables table
DROP TABLE IF EXISTS harvestable_drops;
DROP TABLE IF EXISTS harvestables;
CREATE TABLE harvestables (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
data TEXT NOT NULL
);

View File

@@ -0,0 +1,39 @@
-- Restructure harvestables table to store expanded data
DROP TABLE IF EXISTS harvestables;
CREATE TABLE harvestables (
id INTEGER PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
description TEXT NOT NULL,
comment TEXT NOT NULL,
level INTEGER NOT NULL,
skill TEXT NOT NULL,
tool TEXT NOT NULL,
min_health INTEGER NOT NULL,
max_health INTEGER NOT NULL,
harvesttime INTEGER NOT NULL,
hittime INTEGER NOT NULL,
respawntime INTEGER NOT NULL
);
-- Create harvestable_drops table
CREATE TABLE harvestable_drops (
id INTEGER PRIMARY KEY AUTOINCREMENT,
harvestable_id INTEGER NOT NULL,
item_id INTEGER NOT NULL,
minamount INTEGER NOT NULL,
maxamount INTEGER NOT NULL,
droprate INTEGER NOT NULL,
droprateboost INTEGER NOT NULL,
amountboost INTEGER NOT NULL,
comment TEXT NOT NULL,
FOREIGN KEY (harvestable_id) REFERENCES harvestables(id),
FOREIGN KEY (item_id) REFERENCES items(id)
);
CREATE INDEX idx_harvestable_drops_harvestable_id ON harvestable_drops(harvestable_id);
CREATE INDEX idx_harvestable_drops_item_id ON harvestable_drops(item_id);
CREATE INDEX idx_harvestables_skill ON harvestables(skill);
CREATE INDEX idx_harvestables_tool ON harvestables(tool);
CREATE INDEX idx_harvestables_level ON harvestables(level);

View File

@@ -0,0 +1,6 @@
-- Drop world scene object tables
DROP TABLE world_teleporters;
DROP TABLE world_workbenches;
DROP TABLE world_loot;
DROP TABLE world_map_icons;
DROP TABLE world_map_name_changers;

View File

@@ -0,0 +1,48 @@
-- Create world_teleporters table
CREATE TABLE world_teleporters (
pos_x REAL NOT NULL,
pos_y REAL NOT NULL,
tp_x REAL,
tp_y REAL,
PRIMARY KEY (pos_x, pos_y)
);
-- Create world_workbenches table
CREATE TABLE world_workbenches (
pos_x REAL NOT NULL,
pos_y REAL NOT NULL,
workbench_id INTEGER NOT NULL,
PRIMARY KEY (pos_x, pos_y)
);
-- Create world_loot table
CREATE TABLE world_loot (
pos_x REAL NOT NULL,
pos_y REAL NOT NULL,
item_id INTEGER NOT NULL,
amount INTEGER NOT NULL,
respawn_time INTEGER NOT NULL,
visibility_checks TEXT NOT NULL DEFAULT '',
PRIMARY KEY (pos_x, pos_y)
);
-- Create world_map_icons table
CREATE TABLE world_map_icons (
pos_x REAL NOT NULL,
pos_y REAL NOT NULL,
icon_type INTEGER NOT NULL,
icon_size INTEGER NOT NULL,
icon TEXT NOT NULL,
text TEXT NOT NULL DEFAULT '',
font_size INTEGER NOT NULL,
hover_text TEXT NOT NULL DEFAULT '',
PRIMARY KEY (pos_x, pos_y)
);
-- Create world_map_name_changers table
CREATE TABLE world_map_name_changers (
pos_x REAL NOT NULL,
pos_y REAL NOT NULL,
map_name TEXT NOT NULL,
PRIMARY KEY (pos_x, pos_y)
);

View File

@@ -0,0 +1,3 @@
DROP TABLE IF EXISTS icons;
DROP TABLE IF EXISTS achievement_icons;
DROP TABLE IF EXISTS general_icons;

View File

@@ -0,0 +1,24 @@
-- Simple icons table (abilities, buffs, traits, player houses, stat icons)
CREATE TABLE IF NOT EXISTS icons (
category TEXT NOT NULL,
name TEXT NOT NULL,
icon BLOB NOT NULL,
PRIMARY KEY (category, name)
);
-- Achievement icons table (filtered, no _0 suffix)
CREATE TABLE IF NOT EXISTS achievement_icons (
name TEXT PRIMARY KEY NOT NULL,
icon BLOB NOT NULL
);
-- General icons table (multiple sizes)
CREATE TABLE IF NOT EXISTS general_icons (
name TEXT PRIMARY KEY NOT NULL,
original_width INTEGER NOT NULL,
original_height INTEGER NOT NULL,
icon_original BLOB,
icon_256 BLOB,
icon_64 BLOB,
icon_32 BLOB
);

View File

@@ -0,0 +1,14 @@
-- Drop the separate icon tables
DROP TABLE IF EXISTS ability_icons;
DROP TABLE IF EXISTS buff_icons;
DROP TABLE IF EXISTS trait_icons;
DROP TABLE IF EXISTS player_house_icons;
DROP TABLE IF EXISTS stat_icons;
-- Recreate the combined icons table
CREATE TABLE IF NOT EXISTS icons (
category TEXT NOT NULL,
name TEXT NOT NULL,
icon BLOB NOT NULL,
PRIMARY KEY (category, name)
);

View File

@@ -0,0 +1,32 @@
-- Drop the combined icons table
DROP TABLE IF EXISTS icons;
-- Ability icons table
CREATE TABLE IF NOT EXISTS ability_icons (
name TEXT PRIMARY KEY NOT NULL,
icon BLOB NOT NULL
);
-- Buff icons table
CREATE TABLE IF NOT EXISTS buff_icons (
name TEXT PRIMARY KEY NOT NULL,
icon BLOB NOT NULL
);
-- Trait icons table
CREATE TABLE IF NOT EXISTS trait_icons (
name TEXT PRIMARY KEY NOT NULL,
icon BLOB NOT NULL
);
-- Player house icons table
CREATE TABLE IF NOT EXISTS player_house_icons (
name TEXT PRIMARY KEY NOT NULL,
icon BLOB NOT NULL
);
-- Stat icons table
CREATE TABLE IF NOT EXISTS stat_icons (
name TEXT PRIMARY KEY NOT NULL,
icon BLOB NOT NULL
);

View File

@@ -0,0 +1,9 @@
-- Revert to original schema with JSON data field
DROP TABLE IF EXISTS player_houses;
CREATE TABLE player_houses (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
map_id INTEGER NOT NULL,
data TEXT NOT NULL
);

View File

@@ -0,0 +1,11 @@
-- Drop the old table and recreate with direct fields instead of JSON data
DROP TABLE IF EXISTS player_houses;
CREATE TABLE player_houses (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
description TEXT NOT NULL,
pos_x REAL NOT NULL,
pos_z REAL NOT NULL,
price INTEGER NOT NULL
);

View File

@@ -0,0 +1,9 @@
-- Restore old table schema
DROP TABLE IF EXISTS fast_travel_locations;
CREATE TABLE fast_travel_locations (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
map_name TEXT NOT NULL,
data TEXT NOT NULL
);

View File

@@ -0,0 +1,12 @@
-- Drop the old table and create with new schema
DROP TABLE IF EXISTS fast_travel_locations;
CREATE TABLE fast_travel_locations (
name TEXT PRIMARY KEY,
pos_x REAL NOT NULL,
pos_z REAL NOT NULL,
travel_type TEXT NOT NULL,
unlocked INTEGER NOT NULL DEFAULT 0,
connections TEXT,
checks TEXT
);

View File

@@ -0,0 +1,143 @@
//! Image Parser - Processes minimap tiles and generates all zoom levels
//!
//! This binary handles:
//! - Loading minimap tile images from PNG files
//! - Converting to lossless WebP format (zoom level 2)
//! - Generating merged tiles for zoom level 1 (2x2)
//! - Generating merged tiles for zoom level 0 (4x4)
//! - Storing all tiles in the SQLite database
//! - Generating statistics about storage and compression
use clap::Parser;
use cursebreaker_parser::{IconDatabase, MinimapDatabase};
use log::{error, info, LevelFilter};
use std::env;
use unity_parser::log::DedupLogger;
#[derive(Parser, Debug)]
#[command(name = "image-parser")]
#[command(about = "Processes minimap tiles and game icons")]
struct Args {
/// Process minimap tiles
#[arg(long)]
minimap: bool,
/// Process game icons
#[arg(long)]
icons: bool,
/// Process everything (minimap and icons)
#[arg(long)]
all: bool,
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
let args = Args::parse();
// Validate that at least one option is specified
if !args.minimap && !args.icons && !args.all {
eprintln!("Error: At least one option must be specified.\n");
eprintln!("Usage: image-parser [OPTIONS]\n");
eprintln!("Options:");
eprintln!(" --minimap Process minimap tiles");
eprintln!(" --icons Process game icons");
eprintln!(" --all Process everything");
std::process::exit(1);
}
let process_minimap = args.minimap || args.all;
let process_icons = args.icons || args.all;
let logger = DedupLogger::new();
log::set_boxed_logger(Box::new(logger))
.map(|()| log::set_max_level(LevelFilter::Trace))
.unwrap();
info!("Image Parser");
info!("Generates all zoom levels (0, 1, 2) with merged tiles");
info!("Will override existing database entries\n");
let database_url = env::var("DATABASE_URL").unwrap_or_else(|_| "cursebreaker.db".to_string());
let cb_assets_path =
env::var("CB_ASSETS_PATH").unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
// Process minimap tiles
if process_minimap {
info!("Processing minimap tiles...");
let minimap_db = MinimapDatabase::new(database_url.clone());
let minimap_path = format!("{}/Data/Textures/MinimapSquares", cb_assets_path);
match minimap_db.load_from_directory(&minimap_path, &cb_assets_path) {
Ok(total_count) => {
info!("\nProcessed {} total tiles (all zoom levels)", total_count);
// Get statistics
if let Ok(stats) = minimap_db.get_storage_stats() {
info!("\n=== Storage Statistics ===");
info!(
"Original PNG total: {} MB",
stats.total_original_size / 1_048_576
);
info!("WebP total: {} MB", stats.total_webp_size() / 1_048_576);
info!("Compression ratio: {:.2}%\n", stats.compression_ratio());
info!("=== Tiles Per Zoom Level ===");
info!(
"Zoom 2 (original): {} tiles ({} MB)",
stats.zoom2_count,
stats.zoom2_size / 1_048_576
);
info!(
"Zoom 1 (2x2 merged): {} tiles ({} MB)",
stats.zoom1_count,
stats.zoom1_size / 1_048_576
);
info!(
"Zoom 0 (4x4 merged): {} tiles ({} MB)",
stats.zoom0_count,
stats.zoom0_size / 1_048_576
);
}
if let Ok(bounds) = minimap_db.get_map_bounds() {
info!("\n=== Map Bounds ===");
info!("Min (x,y): {:?}", bounds.0);
info!("Max (x,y): {:?}", bounds.1);
}
}
Err(e) => {
error!("Failed to process minimap tiles: {}", e);
return Err(Box::new(e));
}
}
}
// Process game icons
if process_icons {
info!("\n=== Processing Game Icons ===");
let icon_db = IconDatabase::new(database_url);
match icon_db.load_all_icons(&cb_assets_path) {
Ok(stats) => {
info!("\n=== Icon Statistics ===");
info!("Ability icons: {}", stats.abilities);
info!("Buff icons: {}", stats.buffs);
info!("Trait icons: {}", stats.traits);
info!("Player house icons: {}", stats.player_houses);
info!("Stat icons: {}", stats.stat_icons);
info!("Achievement icons: {}", stats.achievement_icons);
info!("General icons: {}", stats.general_icons);
info!("Total icons: {}", stats.total_icons());
info!("Total size: {} KB", stats.total_bytes / 1024);
}
Err(e) => {
error!("Failed to process icons: {}", e);
return Err(Box::new(e));
}
}
}
log::logger().flush();
Ok(())
}

View File

@@ -0,0 +1,557 @@
//! Scene Parser - Parses Unity scenes and extracts game objects
//!
//! This binary handles:
//! - Initializing the Unity project
//! - Parsing Unity scenes with type filtering
//! - Extracting Interactable_Resource components only
//! - Computing world transforms
//! - Saving resource locations to the database
//! - Processing and saving item icons for resources
//!
//! Usage:
//! scene-parser [min_x max_x min_y max_y]
//!
//! Examples:
//! scene-parser # Parse all scenes
//! scene-parser 0 10 0 10 # Parse scenes from (0,0) to (10,10)
use cursebreaker_parser::{
InteractableResource, InteractableTeleporter, InteractableWorkbench,
LootSpawner, MapIcon, MapNameChanger, ImageProcessor, OutlineConfig
};
use unity_parser::{UnityProject, TypeFilter};
use std::path::{Path, PathBuf};
use unity_parser::log::DedupLogger;
use log::{info, error, warn, LevelFilter};
use std::env;
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::collections::HashMap;
use std::fs;
/// Bounds for filtering which scene tiles to parse
#[derive(Debug, Clone)]
struct Bounds {
min_x: i32,
max_x: i32,
min_y: i32,
max_y: i32,
}
impl Bounds {
fn contains(&self, x: i32, y: i32) -> bool {
x >= self.min_x && x <= self.max_x && y >= self.min_y && y <= self.max_y
}
}
/// Parse scene filename to extract tile coordinates (e.g., "10_3.unity" -> (10, 3))
fn parse_scene_coords(filename: &str) -> Option<(i32, i32)> {
let stem = filename.strip_suffix(".unity")?;
let parts: Vec<&str> = stem.split('_').collect();
if parts.len() == 2 {
let x = parts[0].parse().ok()?;
let y = parts[1].parse().ok()?;
Some((x, y))
} else {
None
}
}
/// Find all scene files matching the *_*.unity pattern
fn find_scene_files(scenes_dir: &Path, bounds: Option<&Bounds>) -> Vec<PathBuf> {
let mut scenes = Vec::new();
if let Ok(entries) = fs::read_dir(scenes_dir) {
for entry in entries.flatten() {
let path = entry.path();
if let Some(filename) = path.file_name().and_then(|n| n.to_str()) {
if filename.ends_with(".unity") {
if let Some((x, y)) = parse_scene_coords(filename) {
// Check bounds if specified
if let Some(b) = bounds {
if !b.contains(x, y) {
continue;
}
}
scenes.push(path);
}
}
}
}
}
// Sort by coordinates for consistent ordering
scenes.sort_by(|a, b| {
let a_coords = a.file_name()
.and_then(|n| n.to_str())
.and_then(parse_scene_coords)
.unwrap_or((0, 0));
let b_coords = b.file_name()
.and_then(|n| n.to_str())
.and_then(parse_scene_coords)
.unwrap_or((0, 0));
a_coords.cmp(&b_coords)
});
scenes
}
/// Parse command line arguments for bounds
fn parse_bounds_args() -> Option<Bounds> {
let args: Vec<String> = env::args().collect();
if args.len() == 5 {
let min_x = args[1].parse().ok()?;
let max_x = args[2].parse().ok()?;
let min_y = args[3].parse().ok()?;
let max_y = args[4].parse().ok()?;
Some(Bounds { min_x, max_x, min_y, max_y })
} else if args.len() == 1 {
None // No bounds specified, parse all
} else {
eprintln!("Usage: {} [min_x max_x min_y max_y]", args[0]);
eprintln!(" No arguments: parse all scenes");
eprintln!(" 4 arguments: parse scenes within bounds (inclusive)");
std::process::exit(1);
}
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
let logger = DedupLogger::new();
log::set_boxed_logger(Box::new(logger))
.map(|()| log::set_max_level(LevelFilter::Warn))
.unwrap();
info!("🎮 Cursebreaker - Scene Parser");
// Parse bounds from command line
let bounds = parse_bounds_args();
if let Some(ref b) = bounds {
info!("📐 Bounds: x=[{}, {}], y=[{}, {}]", b.min_x, b.max_x, b.min_y, b.max_y);
} else {
info!("📐 Bounds: none (parsing all scenes)");
}
let cb_assets_path = env::var("CB_ASSETS_PATH").unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
// Initialize Unity project once - scans entire project for GUID mappings
let project_root = Path::new(&cb_assets_path);
info!("\n📦 Initializing Unity project from: {}", project_root.display());
let project = UnityProject::from_path(project_root)?;
// Find all scene files
let scenes_dir = project_root.join("_GameAssets/Scenes/Tiles");
let scene_files = find_scene_files(&scenes_dir, bounds.as_ref());
info!("🔍 Found {} scene files to parse", scene_files.len());
if scene_files.is_empty() {
warn!("No scene files found matching criteria");
return Ok(());
}
// Create type filter to only parse GameObject, Transform, and InteractableResource MonoBehaviour
info!("🔍 Setting up type filter:");
info!(" • Unity types: GameObject, Transform");
info!(" • Custom MonoBehaviours: InteractableResource");
let type_filter = TypeFilter::new(
vec!["GameObject", "Transform", "PrefabInstance"],
vec!["InteractableResource", "InteractableTeleporter", "InteractableWorkbench", "LootSpawner", "MapIcon", "MapNameChanger"]
);
// Setup database connection
let database_url = env::var("DATABASE_URL").unwrap_or_else(|_| "cursebreaker.db".to_string());
let mut conn = SqliteConnection::establish(&database_url)?;
// Clear all tables before processing (they're regenerated each run)
{
use cursebreaker_parser::schema::{
world_resources, world_teleporters, world_workbenches,
world_loot, world_map_icons, world_map_name_changers, resource_icons
};
diesel::delete(world_resources::table).execute(&mut conn)?;
diesel::delete(world_teleporters::table).execute(&mut conn)?;
diesel::delete(world_workbenches::table).execute(&mut conn)?;
diesel::delete(world_loot::table).execute(&mut conn)?;
diesel::delete(world_map_icons::table).execute(&mut conn)?;
diesel::delete(world_map_name_changers::table).execute(&mut conn)?;
diesel::delete(resource_icons::table).execute(&mut conn)?;
}
// Collect unique harvestables across all scenes for icon processing
let mut all_unique_harvestables: HashMap<i32, String> = HashMap::new();
// Track totals
let mut total_resources = 0;
let mut total_teleporters = 0;
let mut total_workbenches = 0;
let mut total_loot = 0;
let mut total_map_icons = 0;
let mut total_map_name_changers = 0;
let mut scenes_processed = 0;
let mut scenes_failed = 0;
// Process each scene
for (idx, scene_path) in scene_files.iter().enumerate() {
let relative_path = scene_path.strip_prefix(project_root)
.map(|p| p.to_string_lossy().to_string())
.unwrap_or_else(|_| scene_path.to_string_lossy().to_string());
print!("\n📁 [{}/{}] Parsing scene: {}", idx + 1, scene_files.len(), relative_path);
match project.parse_scene_filtered(&relative_path, Some(&type_filter)) {
Ok(mut scene) => {
info!(" ✓ Parsed ({} entities)", scene.entity_map.len());
// Post-processing: Compute world transforms
unity_parser::compute_world_transforms(&mut scene.world, &scene.entity_map);
// Save resources
let resource_count = save_resources(&mut conn, &scene)?;
total_resources += resource_count;
// Collect unique harvestables for icon processing later
scene.world
.query_all::<(&InteractableResource, &unity_parser::GameObject)>()
.for_each(|(resource, object)| {
all_unique_harvestables.entry(resource.type_id as i32)
.or_insert_with(|| object.name.to_string());
});
// Save other world objects (append mode - tables already cleared)
total_teleporters += save_teleporters_append(&mut conn, &scene)?;
total_workbenches += save_workbenches_append(&mut conn, &scene)?;
total_loot += save_loot_spawners_append(&mut conn, &scene)?;
total_map_icons += save_map_icons_append(&mut conn, &scene)?;
total_map_name_changers += save_map_name_changers_append(&mut conn, &scene)?;
scenes_processed += 1;
}
Err(e) => {
error!(" ✗ Parse error: {}", e);
scenes_failed += 1;
}
}
}
log::logger().flush();
// Process icons for all unique harvestables
info!("\n🎨 Processing item icons for {} unique harvestable types...", all_unique_harvestables.len());
process_item_icons_from_map(&cb_assets_path, &mut conn, &all_unique_harvestables)?;
// Print summary
println!("\n==================================================");
println!("📊 SUMMARY");
println!("==================================================");
println!(" Scenes processed: {} ({} failed)", scenes_processed, scenes_failed);
println!(" Resources: {}", total_resources);
println!(" Teleporters: {}", total_teleporters);
println!(" Workbenches: {}", total_workbenches);
println!(" Loot spawners: {}", total_loot);
println!(" Map icons: {}", total_map_icons);
println!(" Map name changers:{}", total_map_name_changers);
println!("==================================================");
log::logger().flush();
Ok(())
}
/// Save resources from a scene (append mode)
fn save_resources(
conn: &mut SqliteConnection,
scene: &unity_parser::UnityScene,
) -> Result<usize, Box<dyn std::error::Error>> {
use cursebreaker_parser::schema::world_resources;
let mut count = 0;
conn.transaction::<_, diesel::result::Error, _>(|conn| {
scene.world
.query_all::<(&InteractableResource, &unity_parser::WorldTransform, &unity_parser::GameObject)>()
.for_each(|(resource, transform, _object)| {
let world_pos = transform.position();
let _ = diesel::insert_into(world_resources::table)
.values((
world_resources::item_id.eq(resource.type_id as i32),
world_resources::pos_x.eq(world_pos.x as f32),
world_resources::pos_y.eq(world_pos.z as f32),
))
.execute(conn);
count += 1;
});
Ok(())
})?;
Ok(count)
}
/// Process item icons from a pre-collected map of harvestables
fn process_item_icons_from_map(
cb_assets_path: &str,
conn: &mut SqliteConnection,
unique_harvestables: &HashMap<i32, String>,
) -> Result<(), Box<dyn std::error::Error>> {
use cursebreaker_parser::schema::{resource_icons, items, harvestables, harvestable_drops};
info!(" Processing {} unique harvestable types", unique_harvestables.len());
// Create image processor with white outline
let processor = ImageProcessor::default();
let outline_config = OutlineConfig::white(4);
let mut processed_count = 0;
let mut failed_count = 0;
// Process each unique harvestable
for (harvestable_id, default_name) in unique_harvestables.iter() {
// Get the harvestable name
let harvestable_name: String = harvestables::table
.filter(harvestables::id.eq(harvestable_id))
.select(harvestables::name)
.first(conn)
.unwrap_or_else(|_| default_name.clone());
// Get the first item drop for this harvestable
let item_id_result: Result<i32, _> = harvestable_drops::table
.filter(harvestable_drops::harvestable_id.eq(harvestable_id))
.select(harvestable_drops::item_id)
.order(harvestable_drops::id.asc())
.first(conn);
let item_id = match item_id_result {
Ok(id) => id,
Err(_) => {
warn!(" ⚠️ No drops found for harvestable {} ({})", harvestable_id, harvestable_name);
failed_count += 1;
continue;
}
};
// Get the item name
let item_name: String = items::table
.filter(items::id.eq(&item_id))
.select(items::name)
.first(conn)
.unwrap_or_else(|_| format!("Item {}", item_id));
// Construct icon path using the item_id from the drop
let icon_path = PathBuf::from(cb_assets_path)
.join("Data/Textures/ItemIcons")
.join(format!("{}.png", item_id));
if !icon_path.exists() {
warn!(" ⚠️ Icon not found for harvestable {} ({}) -> item {} ({}): {}",
harvestable_id, harvestable_name, item_id, item_name, icon_path.display());
failed_count += 1;
continue;
}
// Process the icon: resize to 64px with white outline
match processor.process_image(&icon_path, &[64], None, Some(&outline_config)) {
Ok(processed) => {
if let Some(icon_data) = processed.get(64) {
// Insert into database using harvestable_id as the key
match diesel::insert_into(resource_icons::table)
.values((
resource_icons::item_id.eq(harvestable_id),
resource_icons::name.eq(&harvestable_name),
resource_icons::icon_64.eq(icon_data.as_slice()),
))
.execute(conn)
{
Ok(_) => {
info!(" ✓ Harvestable {} ({}) -> Item {} ({}): {} bytes",
harvestable_id, harvestable_name, item_id, item_name, icon_data.len());
processed_count += 1;
}
Err(e) => {
warn!(" ⚠️ Failed to insert icon for harvestable {} ({}): {}",
harvestable_id, harvestable_name, e);
failed_count += 1;
}
}
}
}
Err(e) => {
warn!(" ⚠️ Failed to process icon for harvestable {} ({}) -> item {} ({}): {}",
harvestable_id, harvestable_name, item_id, item_name, e);
failed_count += 1;
}
}
}
info!("✅ Processed {} harvestable icons ({} succeeded, {} failed)",
unique_harvestables.len(), processed_count, failed_count);
Ok(())
}
/// Save teleporter data to database (append mode - doesn't clear table)
fn save_teleporters_append(
conn: &mut SqliteConnection,
scene: &unity_parser::UnityScene,
) -> Result<usize, Box<dyn std::error::Error>> {
use cursebreaker_parser::schema::world_teleporters;
let mut count = 0;
// Query all teleporters
scene.world
.query_all::<(&InteractableTeleporter, &unity_parser::WorldTransform, &unity_parser::GameObject)>()
.for_each(|(teleporter, transform, _object)| {
let world_pos = transform.position();
// Get the tp_transform position if it exists
let (tp_x, tp_y) = if let Some(tp_entity) = teleporter.tp_transform {
if let Some(tp_transform) = scene.world.borrow::<unity_parser::WorldTransform>().get(tp_entity) {
let tp_pos = tp_transform.position();
(Some(tp_pos.x as f32), Some(tp_pos.z as f32))
} else {
(None, None)
}
} else {
(None, None)
};
let _ = diesel::insert_into(world_teleporters::table)
.values((
world_teleporters::pos_x.eq(world_pos.x as f32),
world_teleporters::pos_y.eq(world_pos.z as f32),
world_teleporters::tp_x.eq(tp_x),
world_teleporters::tp_y.eq(tp_y),
))
.execute(conn);
count += 1;
});
Ok(count)
}
/// Save workbench data to database (append mode - doesn't clear table)
fn save_workbenches_append(
conn: &mut SqliteConnection,
scene: &unity_parser::UnityScene,
) -> Result<usize, Box<dyn std::error::Error>> {
use cursebreaker_parser::schema::world_workbenches;
let mut count = 0;
// Query all workbenches
scene.world
.query_all::<(&InteractableWorkbench, &unity_parser::WorldTransform, &unity_parser::GameObject)>()
.for_each(|(workbench, transform, _object)| {
let world_pos = transform.position();
let _ = diesel::insert_into(world_workbenches::table)
.values((
world_workbenches::pos_x.eq(world_pos.x as f32),
world_workbenches::pos_y.eq(world_pos.z as f32),
world_workbenches::workbench_id.eq(workbench.workbench_id as i32),
))
.execute(conn);
count += 1;
});
Ok(count)
}
/// Save loot spawner data to database (append mode - doesn't clear table)
fn save_loot_spawners_append(
conn: &mut SqliteConnection,
scene: &unity_parser::UnityScene,
) -> Result<usize, Box<dyn std::error::Error>> {
use cursebreaker_parser::schema::world_loot;
let mut count = 0;
// Query all loot spawners
scene.world
.query_all::<(&LootSpawner, &unity_parser::WorldTransform, &unity_parser::GameObject)>()
.for_each(|(loot, transform, _object)| {
let world_pos = transform.position();
let _ = diesel::insert_into(world_loot::table)
.values((
world_loot::pos_x.eq(world_pos.x as f32),
world_loot::pos_y.eq(world_pos.z as f32),
world_loot::item_id.eq(loot.item_id as i32),
world_loot::amount.eq(loot.amount as i32),
world_loot::respawn_time.eq(loot.respawn_time as i32),
world_loot::visibility_checks.eq(&loot.visibility_checks),
))
.execute(conn);
count += 1;
});
Ok(count)
}
/// Save map icon data to database (append mode - doesn't clear table)
fn save_map_icons_append(
conn: &mut SqliteConnection,
scene: &unity_parser::UnityScene,
) -> Result<usize, Box<dyn std::error::Error>> {
use cursebreaker_parser::schema::world_map_icons;
let mut count = 0;
// Query all map icons
scene.world
.query_all::<(&MapIcon, &unity_parser::WorldTransform, &unity_parser::GameObject)>()
.for_each(|(icon, transform, _object)| {
let world_pos = transform.position();
let _ = diesel::insert_into(world_map_icons::table)
.values((
world_map_icons::pos_x.eq(world_pos.x as f32),
world_map_icons::pos_y.eq(world_pos.z as f32),
world_map_icons::icon_type.eq(icon.icon_type as i32),
world_map_icons::icon_size.eq(icon.icon_size as i32),
world_map_icons::icon.eq(&icon.icon),
world_map_icons::text.eq(&icon.text),
world_map_icons::font_size.eq(icon.font_size as i32),
world_map_icons::hover_text.eq(&icon.hover_text),
))
.execute(conn);
count += 1;
});
Ok(count)
}
/// Save map name changer data to database (append mode - doesn't clear table)
fn save_map_name_changers_append(
conn: &mut SqliteConnection,
scene: &unity_parser::UnityScene,
) -> Result<usize, Box<dyn std::error::Error>> {
use cursebreaker_parser::schema::world_map_name_changers;
let mut count = 0;
// Query all map name changers
scene.world
.query_all::<(&MapNameChanger, &unity_parser::WorldTransform, &unity_parser::GameObject)>()
.for_each(|(changer, transform, _object)| {
let world_pos = transform.position();
let _ = diesel::insert_into(world_map_name_changers::table)
.values((
world_map_name_changers::pos_x.eq(world_pos.x as f32),
world_map_name_changers::pos_y.eq(world_pos.z as f32),
world_map_name_changers::map_name.eq(&changer.map_name),
))
.execute(conn);
count += 1;
});
Ok(count)
}

View File

@@ -0,0 +1,342 @@
//! XML Parser - Loads game data from XML files and populates the SQLite database
//!
//! Usage:
//! xml-parser --all Parse all data types
//! xml-parser --items Parse items only
//! xml-parser --npcs Parse NPCs only
//! xml-parser --quests Parse quests only
//! xml-parser --harvestables Parse harvestables only
//! xml-parser --loot Parse loot tables only
//! xml-parser --maps Parse maps only
//! xml-parser --fast-travel Parse fast travel locations only
//! xml-parser --houses Parse player houses only
//! xml-parser --traits Parse traits only
//! xml-parser --shops Parse shops only
//!
//! Multiple flags can be combined:
//! xml-parser --items --npcs --quests
use clap::Parser;
use cursebreaker_parser::{
ItemDatabase, NpcDatabase, QuestDatabase, HarvestableDatabase,
LootDatabase, MapDatabase, FastTravelDatabase, PlayerHouseDatabase,
TraitDatabase, ShopDatabase,
};
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use log::{info, warn, LevelFilter};
use std::env;
use unity_parser::log::DedupLogger;
#[derive(Parser, Debug)]
#[command(name = "xml-parser")]
#[command(author = "Cursebreaker Team")]
#[command(version = "1.0")]
#[command(about = "Parses game XML data and populates the SQLite database")]
struct Args {
/// Parse all data types
#[arg(long, short = 'a')]
all: bool,
/// Parse items
#[arg(long, short = 'i')]
items: bool,
/// Parse NPCs
#[arg(long, short = 'n')]
npcs: bool,
/// Parse quests
#[arg(long, short = 'q')]
quests: bool,
/// Parse harvestables
#[arg(long, short = 'r')]
harvestables: bool,
/// Parse loot tables
#[arg(long, short = 'l')]
loot: bool,
/// Parse maps
#[arg(long, short = 'm')]
maps: bool,
/// Parse fast travel locations
#[arg(long, short = 'f')]
fast_travel: bool,
/// Parse player houses
#[arg(long, short = 'p')]
houses: bool,
/// Parse traits
#[arg(long, short = 't')]
traits: bool,
/// Parse shops
#[arg(long, short = 's')]
shops: bool,
}
impl Args {
/// Returns true if no specific parsers were selected
fn none_selected(&self) -> bool {
!self.all
&& !self.items
&& !self.npcs
&& !self.quests
&& !self.harvestables
&& !self.loot
&& !self.maps
&& !self.fast_travel
&& !self.houses
&& !self.traits
&& !self.shops
}
/// Returns true if items should be parsed
fn should_parse_items(&self) -> bool {
self.all || self.items
}
/// Returns true if NPCs should be parsed
fn should_parse_npcs(&self) -> bool {
self.all || self.npcs
}
/// Returns true if quests should be parsed
fn should_parse_quests(&self) -> bool {
self.all || self.quests
}
/// Returns true if harvestables should be parsed
fn should_parse_harvestables(&self) -> bool {
self.all || self.harvestables
}
/// Returns true if loot should be parsed
fn should_parse_loot(&self) -> bool {
self.all || self.loot
}
/// Returns true if maps should be parsed
fn should_parse_maps(&self) -> bool {
self.all || self.maps
}
/// Returns true if fast travel should be parsed
fn should_parse_fast_travel(&self) -> bool {
self.all || self.fast_travel
}
/// Returns true if houses should be parsed
fn should_parse_houses(&self) -> bool {
self.all || self.houses
}
/// Returns true if traits should be parsed
fn should_parse_traits(&self) -> bool {
self.all || self.traits
}
/// Returns true if shops should be parsed
fn should_parse_shops(&self) -> bool {
self.all || self.shops
}
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
let logger = DedupLogger::new();
log::set_boxed_logger(Box::new(logger))
.map(|()| log::set_max_level(LevelFilter::Trace))
.unwrap();
let args = Args::parse();
// If no parsers selected, show help
if args.none_selected() {
eprintln!("No parsers selected. Use --all to parse everything, or specify individual parsers.");
eprintln!("Run with --help for usage information.");
std::process::exit(1);
}
info!("Cursebreaker - XML Parser");
info!("Loading game data from XML...");
let cb_assets_path = env::var("CB_ASSETS_PATH")
.unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
let database_url = env::var("DATABASE_URL")
.unwrap_or_else(|_| "cursebreaker.db".to_string());
let mut conn = SqliteConnection::establish(&database_url)?;
// Parse Items
if args.should_parse_items() {
info!("Parsing items...");
let items_path = format!("{}/Data/XMLs/Items/Items.xml", cb_assets_path);
match ItemDatabase::load_from_xml(&items_path) {
Ok(item_db) => {
info!("Loaded {} items", item_db.len());
let icon_path = format!("{}/Data/Textures/ItemIcons", cb_assets_path);
match item_db.save_to_db_with_images(&mut conn, &icon_path) {
Ok((items_count, images_count)) => {
info!("Saved {} items to database", items_count);
info!("Processed {} item icons", images_count);
}
Err(e) => warn!("Failed to save items: {}", e),
}
}
Err(e) => warn!("Failed to load items: {}", e),
}
}
// Parse NPCs
if args.should_parse_npcs() {
info!("Parsing NPCs...");
let npcs_path = format!("{}/Data/XMLs/Npcs/NPCInfo.xml", cb_assets_path);
match NpcDatabase::load_from_xml(&npcs_path) {
Ok(npc_db) => {
info!("Loaded {} NPCs", npc_db.len());
match npc_db.save_to_db(&mut conn) {
Ok(count) => info!("Saved {} NPCs to database", count),
Err(e) => warn!("Failed to save NPCs: {}", e),
}
}
Err(e) => warn!("Failed to load NPCs: {}", e),
}
}
// Parse Quests
if args.should_parse_quests() {
info!("Parsing quests...");
let quests_path = format!("{}/Data/XMLs/Quests/Quests.xml", cb_assets_path);
match QuestDatabase::load_from_xml(&quests_path) {
Ok(quest_db) => {
info!("Loaded {} quests", quest_db.len());
match quest_db.save_to_db(&mut conn) {
Ok(count) => info!("Saved {} quests to database", count),
Err(e) => warn!("Failed to save quests: {}", e),
}
}
Err(e) => warn!("Failed to load quests: {}", e),
}
}
// Parse Harvestables
if args.should_parse_harvestables() {
info!("Parsing harvestables...");
let harvestables_path = format!("{}/Data/XMLs/Harvestables/HarvestableInfo.xml", cb_assets_path);
match HarvestableDatabase::load_from_xml(&harvestables_path) {
Ok(harvestable_db) => {
info!("Loaded {} harvestables", harvestable_db.len());
match harvestable_db.save_to_db(&mut conn) {
Ok(count) => info!("Saved {} harvestables to database", count),
Err(e) => warn!("Failed to save harvestables: {}", e),
}
}
Err(e) => warn!("Failed to load harvestables: {}", e),
}
}
// Parse Loot
if args.should_parse_loot() {
info!("Parsing loot tables...");
let loot_path = format!("{}/Data/XMLs/Loot/Loot.xml", cb_assets_path);
match LootDatabase::load_from_xml(&loot_path) {
Ok(loot_db) => {
info!("Loaded {} loot tables", loot_db.len());
match loot_db.save_to_db(&mut conn) {
Ok(count) => info!("Saved {} loot tables to database", count),
Err(e) => warn!("Failed to save loot tables: {}", e),
}
}
Err(e) => warn!("Failed to load loot tables: {}", e),
}
}
// Parse Maps
if args.should_parse_maps() {
info!("Parsing maps...");
let maps_path = format!("{}/Data/XMLs/Maps/Maps.xml", cb_assets_path);
match MapDatabase::load_from_xml(&maps_path) {
Ok(map_db) => {
info!("Loaded {} maps", map_db.len());
match map_db.save_to_db(&mut conn) {
Ok(count) => info!("Saved {} maps to database", count),
Err(e) => warn!("Failed to save maps: {}", e),
}
}
Err(e) => warn!("Failed to load maps: {}", e),
}
}
// Parse Fast Travel
if args.should_parse_fast_travel() {
info!("Parsing fast travel locations...");
let fast_travel_dir = format!("{}/Data/XMLs", cb_assets_path);
match FastTravelDatabase::load_from_directory(&fast_travel_dir) {
Ok(fast_travel_db) => {
info!("Loaded {} fast travel locations", fast_travel_db.len());
match fast_travel_db.save_to_db(&mut conn) {
Ok(count) => info!("Saved {} fast travel locations to database", count),
Err(e) => warn!("Failed to save fast travel locations: {}", e),
}
}
Err(e) => warn!("Failed to load fast travel locations: {}", e),
}
}
// Parse Player Houses
if args.should_parse_houses() {
info!("Parsing player houses...");
let player_houses_path = format!("{}/Data/XMLs/PlayerHouses/PlayerHouses.xml", cb_assets_path);
match PlayerHouseDatabase::load_from_xml(&player_houses_path) {
Ok(player_house_db) => {
info!("Loaded {} player houses", player_house_db.len());
match player_house_db.save_to_db(&mut conn) {
Ok(count) => info!("Saved {} player houses to database", count),
Err(e) => warn!("Failed to save player houses: {}", e),
}
}
Err(e) => warn!("Failed to load player houses: {}", e),
}
}
// Parse Traits
if args.should_parse_traits() {
info!("Parsing traits...");
let traits_path = format!("{}/Data/XMLs/Traits/Traits.xml", cb_assets_path);
match TraitDatabase::load_from_xml(&traits_path) {
Ok(trait_db) => {
info!("Loaded {} traits", trait_db.len());
match trait_db.save_to_db(&mut conn) {
Ok(count) => info!("Saved {} traits to database", count),
Err(e) => warn!("Failed to save traits: {}", e),
}
}
Err(e) => warn!("Failed to load traits: {}", e),
}
}
// Parse Shops
if args.should_parse_shops() {
info!("Parsing shops...");
let shops_path = format!("{}/Data/XMLs/Shops/Shops.xml", cb_assets_path);
match ShopDatabase::load_from_xml(&shops_path) {
Ok(shop_db) => {
info!("Loaded {} shops", shop_db.len());
match shop_db.save_to_db(&mut conn) {
Ok(count) => info!("Saved {} shops to database", count),
Err(e) => warn!("Failed to save shops: {}", e),
}
}
Err(e) => warn!("Failed to load shops: {}", e),
}
}
info!("XML parsing complete!");
log::logger().flush();
Ok(())
}

View File

@@ -0,0 +1,24 @@
/// Helper module for database persistence operations
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
/// Establish a database connection
pub fn establish_connection(database_url: &str) -> Result<SqliteConnection, diesel::ConnectionError> {
SqliteConnection::establish(database_url)
}
/// Generic record for simple id/name/data pattern
#[derive(Queryable)]
pub struct SimpleRecord {
pub id: Option<i32>,
pub name: String,
pub data: String,
}
/// Generic record for text-based primary keys
#[derive(Queryable)]
pub struct TextKeyRecord {
pub key: Option<String>,
pub secondary: Option<String>,
pub data: String,
}

View File

@@ -1,8 +1,10 @@
use crate::types::{FastTravelLocation, FastTravelType};
use crate::xml_parser::{
use crate::xml_parsers::{
parse_fast_travel_canoe_xml, parse_fast_travel_locations_xml, parse_fast_travel_portals_xml,
XmlParseError,
};
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::collections::HashMap;
use std::path::Path;
@@ -234,22 +236,78 @@ impl FastTravelDatabase {
self.locations.is_empty()
}
/// Prepare fast travel locations for SQL insertion
/// Returns a vector of tuples (id, name, type, json_data)
pub fn prepare_for_sql(&self) -> Vec<(i32, String, String, String)> {
self.locations
.iter()
.map(|location| {
let json =
serde_json::to_string(location).unwrap_or_else(|_| "{}".to_string());
(
location.id,
location.name.clone(),
location.travel_type.to_string(),
json,
)
})
.collect()
/// Save all fast travel locations to SQLite database
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
use crate::schema::fast_travel_locations;
// Clear existing entries
diesel::delete(fast_travel_locations::table).execute(conn)?;
let mut count = 0;
for location in &self.locations {
let record = (
fast_travel_locations::name.eq(&location.name),
fast_travel_locations::pos_x.eq(location.pos_x),
fast_travel_locations::pos_z.eq(location.pos_z),
fast_travel_locations::travel_type.eq(location.travel_type.to_string()),
fast_travel_locations::unlocked.eq(if location.unlocked { 1 } else { 0 }),
fast_travel_locations::connections.eq(&location.connections),
fast_travel_locations::checks.eq(&location.checks),
);
diesel::insert_into(fast_travel_locations::table)
.values(&record)
.execute(conn)?;
count += 1;
}
Ok(count)
}
/// Load all fast travel locations from SQLite database
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
use crate::schema::fast_travel_locations::dsl::*;
#[derive(Queryable)]
#[allow(dead_code)]
struct FastTravelLocationRecord {
name: Option<String>,
pos_x: f32,
pos_z: f32,
travel_type: String,
unlocked: i32,
connections: Option<String>,
checks: Option<String>,
}
let records = fast_travel_locations.load::<FastTravelLocationRecord>(conn)?;
let mut loaded_locations = Vec::new();
for record in records {
let travel_type_enum = match record.travel_type.as_str() {
"Location" => FastTravelType::Location,
"Canoe" => FastTravelType::Canoe,
"Portal" => FastTravelType::Portal,
_ => FastTravelType::Location, // Default fallback
};
let mut location = FastTravelLocation::new(
0, // id not stored in DB
record.name.unwrap_or_default(),
record.pos_x,
record.pos_z,
travel_type_enum,
);
location.unlocked = record.unlocked != 0;
location.connections = record.connections;
location.checks = record.checks;
loaded_locations.push(location);
}
let mut db = Self::new();
db.add_locations(loaded_locations);
Ok(db)
}
}

View File

@@ -1,5 +1,7 @@
use crate::types::Harvestable;
use crate::xml_parser::{parse_harvestables_xml, XmlParseError};
use crate::xml_parsers::{parse_harvestables_xml, XmlParseError};
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::collections::HashMap;
use std::path::Path;
@@ -60,27 +62,21 @@ impl HarvestableDatabase {
/// Get harvestables by skill
pub fn get_by_skill(&self, skill: &str) -> Vec<&Harvestable> {
use crate::types::SkillType;
let skill_type = skill.parse::<SkillType>().unwrap_or(SkillType::None);
self.harvestables
.iter()
.filter(|h| {
h.skill
.as_ref()
.map(|s| s.eq_ignore_ascii_case(skill))
.unwrap_or(false)
})
.filter(|h| h.skill == skill_type)
.collect()
}
/// Get harvestables that require a specific tool
pub fn get_by_tool(&self, tool: &str) -> Vec<&Harvestable> {
use crate::types::Tool;
let tool_type = tool.parse::<Tool>().unwrap_or(Tool::None);
self.harvestables
.iter()
.filter(|h| {
h.tool
.as_ref()
.map(|t| t.eq_ignore_ascii_case(tool))
.unwrap_or(false)
})
.filter(|h| h.tool == tool_type)
.collect()
}
@@ -104,11 +100,7 @@ impl HarvestableDatabase {
pub fn get_by_level_range(&self, min_level: i32, max_level: i32) -> Vec<&Harvestable> {
self.harvestables
.iter()
.filter(|h| {
h.level
.map(|l| l >= min_level && l <= max_level)
.unwrap_or(false)
})
.filter(|h| h.level >= min_level && h.level <= max_level)
.collect()
}
@@ -122,17 +114,237 @@ impl HarvestableDatabase {
self.harvestables.is_empty()
}
/// Prepare harvestables for SQL insertion
/// Returns a vector of tuples (typeid, name, json_data)
pub fn prepare_for_sql(&self) -> Vec<(i32, String, String)> {
/// Prepare harvestables for SQL insertion (deprecated - use save_to_db instead)
#[deprecated(note = "Use save_to_db() to save directly to SQLite database")]
#[allow(deprecated)]
pub fn prepare_for_sql(&self) -> Vec<(i32, String, String, String, i32, String, String, i32, i32, i32, i32, i32)> {
use crate::types::{SkillType, Tool};
self.harvestables
.iter()
.map(|harvestable| {
let json = serde_json::to_string(harvestable).unwrap_or_else(|_| "{}".to_string());
(harvestable.typeid, harvestable.name.clone(), json)
let skill_str = match harvestable.skill {
SkillType::None => "none",
SkillType::Swordsmanship => "swordsmanship",
SkillType::Archery => "archery",
SkillType::Magic => "magic",
SkillType::Defence => "defence",
SkillType::Mining => "mining",
SkillType::Woodcutting => "woodcutting",
SkillType::Fishing => "fishing",
SkillType::Cooking => "cooking",
SkillType::Carpentry => "carpentry",
SkillType::Blacksmithy => "blacksmithy",
SkillType::Tailoring => "tailoring",
SkillType::Alchemy => "alchemy",
}.to_string();
let tool_str = match harvestable.tool {
Tool::None => "none",
Tool::Pickaxe => "pickaxe",
Tool::Hatchet => "hatchet",
Tool::Scythe => "scythe",
Tool::Hammer => "hammer",
Tool::Shears => "shears",
Tool::FishingRod => "fishingrod",
}.to_string();
(
harvestable.typeid,
harvestable.name.clone(),
harvestable.desc.clone(),
harvestable.comment.clone(),
harvestable.level,
skill_str,
tool_str,
harvestable.min_health,
harvestable.max_health,
harvestable.harvesttime,
harvestable.hittime,
harvestable.respawntime,
)
})
.collect()
}
/// Save all harvestables to SQLite database
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
use crate::schema::{harvestables, harvestable_drops};
use crate::types::{SkillType, Tool};
// Clear existing data
diesel::delete(harvestable_drops::table).execute(conn)?;
diesel::delete(harvestables::table).execute(conn)?;
let mut count = 0;
for harvestable in &self.harvestables {
// Convert enums to strings for database storage
let skill_str = match harvestable.skill {
SkillType::None => "none",
SkillType::Swordsmanship => "swordsmanship",
SkillType::Archery => "archery",
SkillType::Magic => "magic",
SkillType::Defence => "defence",
SkillType::Mining => "mining",
SkillType::Woodcutting => "woodcutting",
SkillType::Fishing => "fishing",
SkillType::Cooking => "cooking",
SkillType::Carpentry => "carpentry",
SkillType::Blacksmithy => "blacksmithy",
SkillType::Tailoring => "tailoring",
SkillType::Alchemy => "alchemy",
};
let tool_str = match harvestable.tool {
Tool::None => "none",
Tool::Pickaxe => "pickaxe",
Tool::Hatchet => "hatchet",
Tool::Scythe => "scythe",
Tool::Hammer => "hammer",
Tool::Shears => "shears",
Tool::FishingRod => "fishingrod",
};
// Insert harvestable
diesel::insert_into(harvestables::table)
.values((
harvestables::id.eq(harvestable.typeid),
harvestables::name.eq(&harvestable.name),
harvestables::description.eq(&harvestable.desc),
harvestables::comment.eq(&harvestable.comment),
harvestables::level.eq(harvestable.level),
harvestables::skill.eq(skill_str),
harvestables::tool.eq(tool_str),
harvestables::min_health.eq(harvestable.min_health),
harvestables::max_health.eq(harvestable.max_health),
harvestables::harvesttime.eq(harvestable.harvesttime),
harvestables::hittime.eq(harvestable.hittime),
harvestables::respawntime.eq(harvestable.respawntime),
))
.execute(conn)?;
// Insert drops
for drop in &harvestable.drops {
// Try to insert, but skip if foreign key constraint fails (item doesn't exist)
let insert_result = diesel::insert_into(harvestable_drops::table)
.values((
harvestable_drops::harvestable_id.eq(harvestable.typeid),
harvestable_drops::item_id.eq(drop.id),
harvestable_drops::minamount.eq(drop.minamount),
harvestable_drops::maxamount.eq(drop.maxamount),
harvestable_drops::droprate.eq(drop.droprate),
harvestable_drops::droprateboost.eq(drop.droprateboost),
harvestable_drops::amountboost.eq(drop.amountboost),
harvestable_drops::comment.eq(&drop.comment),
))
.execute(conn);
// Log warning if insert failed but continue
if let Err(e) = insert_result {
eprintln!("Warning: Failed to insert drop for harvestable {} (item {}): {}",
harvestable.typeid, drop.id, e);
}
}
count += 1;
}
Ok(count)
}
/// Load all harvestables from SQLite database
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
use crate::schema::{harvestables, harvestable_drops};
use crate::types::{Harvestable, HarvestableDrop, SkillType, Tool};
use diesel::prelude::*;
#[derive(Queryable)]
struct HarvestableRecord {
id: i32,
name: String,
description: String,
comment: String,
level: i32,
skill: String,
tool: String,
min_health: i32,
max_health: i32,
harvesttime: i32,
hittime: i32,
respawntime: i32,
}
#[derive(Queryable)]
struct HarvestableDropRecord {
id: Option<i32>,
harvestable_id: i32,
item_id: i32,
minamount: i32,
maxamount: i32,
droprate: i32,
droprateboost: i32,
amountboost: i32,
comment: String,
}
let harv_records = harvestables::table.load::<HarvestableRecord>(conn)?;
let drop_records = harvestable_drops::table.load::<HarvestableDropRecord>(conn)?;
let mut loaded_harvestables = Vec::new();
for record in harv_records {
let mut harvestable = Harvestable {
typeid: record.id,
name: record.name,
actionname: String::new(),
desc: record.description,
comment: record.comment,
level: record.level,
skill: record.skill.parse().unwrap_or(SkillType::None),
tool: record.tool.parse().unwrap_or(Tool::None),
min_health: record.min_health,
max_health: record.max_health,
harvesttime: record.harvesttime,
hittime: record.hittime,
respawntime: record.respawntime,
harvestsfx: String::new(),
endsfx: String::new(),
receiveitemsfx: String::new(),
animation: String::new(),
takehitanimation: String::new(),
endgfx: String::new(),
tree: false,
hidemilestone: false,
nohighlight: false,
hideminimap: false,
noleftclickinteract: false,
interactdistance: String::new(),
drops: Vec::new(),
};
// Add drops for this harvestable
for drop_rec in &drop_records {
if drop_rec.harvestable_id == record.id {
harvestable.drops.push(HarvestableDrop {
id: drop_rec.item_id,
minamount: drop_rec.minamount,
maxamount: drop_rec.maxamount,
droprate: drop_rec.droprate,
droprateboost: drop_rec.droprateboost,
amountboost: drop_rec.amountboost,
checks: String::new(),
comment: drop_rec.comment.clone(),
dontconsumehealth: false,
});
}
}
loaded_harvestables.push(harvestable);
}
let mut db = Self::new();
db.add_harvestables(loaded_harvestables);
Ok(db)
}
}
impl Default for HarvestableDatabase {

View File

@@ -0,0 +1,655 @@
use crate::types::{
NewAbilityIcon, NewBuffIcon, NewTraitIcon, NewPlayerHouseIcon, NewStatIcon,
NewAchievementIcon, NewGeneralIcon
};
use crate::image_processor::ImageProcessor;
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::path::{Path, PathBuf};
use std::fs;
use thiserror::Error;
use log::{info, warn};
#[derive(Debug, Error)]
pub enum IconDatabaseError {
#[error("Database error: {0}")]
DatabaseError(#[from] diesel::result::Error),
#[error("Image load error: {0}")]
ImageLoadError(#[from] image::ImageError),
#[error("IO error: {0}")]
IoError(#[from] std::io::Error),
#[error("Connection pool error: {0}")]
ConnectionError(String),
}
/// Statistics for icon loading
#[derive(Debug, Default)]
pub struct IconStats {
pub abilities: usize,
pub buffs: usize,
pub traits: usize,
pub player_houses: usize,
pub stat_icons: usize,
pub achievement_icons: usize,
pub general_icons: usize,
pub total_bytes: usize,
}
impl IconStats {
pub fn total_icons(&self) -> usize {
self.abilities + self.buffs + self.traits + self.player_houses
+ self.stat_icons + self.achievement_icons + self.general_icons
}
}
/// Database for managing game icons
pub struct IconDatabase {
database_url: String,
}
impl IconDatabase {
/// Create new database connection
pub fn new(database_url: String) -> Self {
Self { database_url }
}
/// Establish database connection
fn establish_connection(&self) -> Result<SqliteConnection, IconDatabaseError> {
SqliteConnection::establish(&self.database_url)
.map_err(|e| IconDatabaseError::ConnectionError(e.to_string()))
}
/// Load all icons from the CBAssets directory
pub fn load_all_icons<P: AsRef<Path>>(
&self,
cb_assets_path: P,
) -> Result<IconStats, IconDatabaseError> {
let base = cb_assets_path.as_ref();
let textures = base.join("Data/Textures");
let mut stats = IconStats::default();
info!("Loading ability icons...");
stats.abilities = self.load_ability_icons(
&textures.join("Abilities"),
&mut stats.total_bytes,
)?;
info!("Loading buff icons...");
stats.buffs = self.load_buff_icons(
&textures.join("Buffs"),
&mut stats.total_bytes,
)?;
info!("Loading trait icons...");
stats.traits = self.load_trait_icons(
&textures.join("Traits"),
&mut stats.total_bytes,
)?;
info!("Loading player house icons...");
stats.player_houses = self.load_player_house_icons(
&textures.join("PlayerHouses/Houses"),
&mut stats.total_bytes,
)?;
info!("Loading stat icons...");
stats.stat_icons = self.load_stat_icons(
&textures.join("StatIcons"),
&mut stats.total_bytes,
)?;
info!("Loading achievement icons...");
stats.achievement_icons = self.load_achievement_icons(
&textures.join("Achievements/Icons"),
&mut stats.total_bytes,
)?;
info!("Loading general icons...");
stats.general_icons = self.load_general_icons(&textures, &mut stats.total_bytes)?;
Ok(stats)
}
/// Load ability icons from a directory
fn load_ability_icons<P: AsRef<Path>>(
&self,
dir: P,
total_bytes: &mut usize,
) -> Result<usize, IconDatabaseError> {
use crate::schema::ability_icons;
let dir_path = dir.as_ref();
if !dir_path.exists() {
warn!("Directory does not exist: {}", dir_path.display());
return Ok(0);
}
let mut conn = self.establish_connection()?;
let mut count = 0;
let image_files = self.find_image_files(dir_path)?;
for path in image_files {
let name = path.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("")
.to_string();
if name.is_empty() {
continue;
}
let img = image::open(&path)?;
let rgba = img.to_rgba8();
let webp_data = ImageProcessor::encode_webp_lossless(&rgba)
.map_err(|e| IconDatabaseError::IoError(std::io::Error::other(e.to_string())))?;
*total_bytes += webp_data.len();
let new_icon = NewAbilityIcon {
name: &name,
icon: &webp_data,
};
diesel::replace_into(ability_icons::table)
.values(&new_icon)
.execute(&mut conn)?;
count += 1;
}
info!(" Loaded {} ability icons", count);
Ok(count)
}
/// Load buff icons from a directory
fn load_buff_icons<P: AsRef<Path>>(
&self,
dir: P,
total_bytes: &mut usize,
) -> Result<usize, IconDatabaseError> {
use crate::schema::buff_icons;
let dir_path = dir.as_ref();
if !dir_path.exists() {
warn!("Directory does not exist: {}", dir_path.display());
return Ok(0);
}
let mut conn = self.establish_connection()?;
let mut count = 0;
let image_files = self.find_image_files(dir_path)?;
for path in image_files {
let name = path.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("")
.to_string();
if name.is_empty() {
continue;
}
let img = image::open(&path)?;
let rgba = img.to_rgba8();
let webp_data = ImageProcessor::encode_webp_lossless(&rgba)
.map_err(|e| IconDatabaseError::IoError(std::io::Error::other(e.to_string())))?;
*total_bytes += webp_data.len();
let new_icon = NewBuffIcon {
name: &name,
icon: &webp_data,
};
diesel::replace_into(buff_icons::table)
.values(&new_icon)
.execute(&mut conn)?;
count += 1;
}
info!(" Loaded {} buff icons", count);
Ok(count)
}
/// Load trait icons from a directory
fn load_trait_icons<P: AsRef<Path>>(
&self,
dir: P,
total_bytes: &mut usize,
) -> Result<usize, IconDatabaseError> {
use crate::schema::trait_icons;
let dir_path = dir.as_ref();
if !dir_path.exists() {
warn!("Directory does not exist: {}", dir_path.display());
return Ok(0);
}
let mut conn = self.establish_connection()?;
let mut count = 0;
let image_files = self.find_image_files(dir_path)?;
for path in image_files {
let name = path.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("")
.to_string();
if name.is_empty() {
continue;
}
let img = image::open(&path)?;
let rgba = img.to_rgba8();
let webp_data = ImageProcessor::encode_webp_lossless(&rgba)
.map_err(|e| IconDatabaseError::IoError(std::io::Error::other(e.to_string())))?;
*total_bytes += webp_data.len();
let new_icon = NewTraitIcon {
name: &name,
icon: &webp_data,
};
diesel::replace_into(trait_icons::table)
.values(&new_icon)
.execute(&mut conn)?;
count += 1;
}
info!(" Loaded {} trait icons", count);
Ok(count)
}
/// Load player house icons from a directory
fn load_player_house_icons<P: AsRef<Path>>(
&self,
dir: P,
total_bytes: &mut usize,
) -> Result<usize, IconDatabaseError> {
use crate::schema::player_house_icons;
let dir_path = dir.as_ref();
if !dir_path.exists() {
warn!("Directory does not exist: {}", dir_path.display());
return Ok(0);
}
let mut conn = self.establish_connection()?;
let mut count = 0;
let image_files = self.find_image_files(dir_path)?;
for path in image_files {
let name = path.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("")
.to_string();
if name.is_empty() {
continue;
}
let img = image::open(&path)?;
let rgba = img.to_rgba8();
let webp_data = ImageProcessor::encode_webp_lossless(&rgba)
.map_err(|e| IconDatabaseError::IoError(std::io::Error::other(e.to_string())))?;
*total_bytes += webp_data.len();
let new_icon = NewPlayerHouseIcon {
name: &name,
icon: &webp_data,
};
diesel::replace_into(player_house_icons::table)
.values(&new_icon)
.execute(&mut conn)?;
count += 1;
}
info!(" Loaded {} player house icons", count);
Ok(count)
}
/// Load stat icons from a directory
fn load_stat_icons<P: AsRef<Path>>(
&self,
dir: P,
total_bytes: &mut usize,
) -> Result<usize, IconDatabaseError> {
use crate::schema::stat_icons;
let dir_path = dir.as_ref();
if !dir_path.exists() {
warn!("Directory does not exist: {}", dir_path.display());
return Ok(0);
}
let mut conn = self.establish_connection()?;
let mut count = 0;
let image_files = self.find_image_files(dir_path)?;
for path in image_files {
let name = path.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("")
.to_string();
if name.is_empty() {
continue;
}
let img = image::open(&path)?;
let rgba = img.to_rgba8();
let webp_data = ImageProcessor::encode_webp_lossless(&rgba)
.map_err(|e| IconDatabaseError::IoError(std::io::Error::other(e.to_string())))?;
*total_bytes += webp_data.len();
let new_icon = NewStatIcon {
name: &name,
icon: &webp_data,
};
diesel::replace_into(stat_icons::table)
.values(&new_icon)
.execute(&mut conn)?;
count += 1;
}
info!(" Loaded {} stat icons", count);
Ok(count)
}
/// Load achievement icons, filtering out files ending with _0
fn load_achievement_icons<P: AsRef<Path>>(
&self,
dir: P,
total_bytes: &mut usize,
) -> Result<usize, IconDatabaseError> {
use crate::schema::achievement_icons;
let dir_path = dir.as_ref();
if !dir_path.exists() {
warn!("Directory does not exist: {}", dir_path.display());
return Ok(0);
}
let mut conn = self.establish_connection()?;
let mut count = 0;
let image_files = self.find_image_files(dir_path)?;
for path in image_files {
let name = path.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("")
.to_string();
if name.is_empty() {
continue;
}
// Skip files ending with _0
if name.ends_with("_0") {
continue;
}
// Load and encode as lossless WebP
let img = image::open(&path)?;
let rgba = img.to_rgba8();
let webp_data = ImageProcessor::encode_webp_lossless(&rgba)
.map_err(|e| IconDatabaseError::IoError(std::io::Error::other(e.to_string())))?;
*total_bytes += webp_data.len();
let new_icon = NewAchievementIcon {
name: &name,
icon: &webp_data,
};
diesel::replace_into(achievement_icons::table)
.values(&new_icon)
.execute(&mut conn)?;
count += 1;
}
info!(" Loaded {} achievement icons", count);
Ok(count)
}
/// Load general icons with multiple sizes
fn load_general_icons<P: AsRef<Path>>(
&self,
textures_dir: P,
total_bytes: &mut usize,
) -> Result<usize, IconDatabaseError> {
let textures = textures_dir.as_ref();
let mut count = 0;
// Collect all general icon paths
let mut icon_paths: Vec<(String, PathBuf)> = Vec::new();
// Directory-based icons
let directories = [
("Achievements/Trophies", true), // PNG only
("BottomRightTabs", false),
("MinimapIcons", false),
("Notifications", false),
("OverheadIcons", false),
("Skills", false),
];
for (subdir, png_only) in directories {
let dir = textures.join(subdir);
if dir.exists() {
let files = if png_only {
self.find_png_files(&dir)?
} else {
self.find_image_files(&dir)?
};
for path in files {
let name = path.file_stem()
.and_then(|s| s.to_str())
.map(|s| format!("{}_{}", subdir.replace('/', "_"), s))
.unwrap_or_default();
if !name.is_empty() {
icon_paths.push((name, path));
}
}
}
}
// Individual file icons
let individual_files = [
("Common/Book.png", "Common_Book"),
("Common/Hourglass.png", "Common_Hourglass"),
("Common/Mana.png", "Common_Mana"),
("Common/QuestCompleteTrophy.png", "Common_QuestCompleteTrophy"),
("Common/Tick.png", "Common_Tick"),
("Common/TutorialTip.png", "Common_TutorialTip"),
("Common/Zoom_Minus.png", "Common_Zoom_Minus"),
("Common/Zoom_Plus.png", "Common_Zoom_Plus"),
("Inventory/Banknote.png", "Inventory_Banknote"),
("Minimap/ShowCoordinates.png", "Minimap_ShowCoordinates"),
("SplashScreens/Olipa.png", "SplashScreens_Olipa"),
("ItemIcons/131.png", "Coins"),
("118.png", "Map"),
("124.png", "Entrance"),
("Bug.png", "Bug"),
("Checkmark.png", "Checkmark"),
];
for (file, name) in individual_files {
let path = textures.join(file);
if path.exists() {
icon_paths.push((name.to_string(), path));
} else {
warn!("File not found: {}", path.display());
}
}
// Process all collected icons
let mut conn = self.establish_connection()?;
for (name, path) in icon_paths {
if let Ok(bytes) = self.process_general_icon(&path, &name, &mut conn) {
*total_bytes += bytes;
count += 1;
}
}
info!(" Loaded {} general icons", count);
Ok(count)
}
/// Process a single general icon at multiple sizes
fn process_general_icon(
&self,
path: &Path,
name: &str,
conn: &mut SqliteConnection,
) -> Result<usize, IconDatabaseError> {
use crate::schema::general_icons;
// Load image
let img = image::open(path)?;
let (width, height) = (img.width(), img.height());
let rgba = img.to_rgba8();
let mut total_bytes = 0;
// Original size (lossless)
let icon_original = ImageProcessor::encode_webp_lossless(&rgba)
.map_err(|e| IconDatabaseError::IoError(std::io::Error::other(e.to_string())))?;
total_bytes += icon_original.len();
// Generate smaller sizes only if image is large enough (no upscaling)
let processor = ImageProcessor::new(90.0);
let icon_256 = if width >= 256 && height >= 256 {
Some(self.resize_and_encode(&img, 256, &processor)?)
} else {
None
};
if let Some(ref data) = icon_256 {
total_bytes += data.len();
}
let icon_64 = if width >= 64 && height >= 64 {
Some(self.resize_and_encode(&img, 64, &processor)?)
} else {
None
};
if let Some(ref data) = icon_64 {
total_bytes += data.len();
}
let icon_32 = if width >= 32 && height >= 32 {
Some(self.resize_and_encode(&img, 32, &processor)?)
} else {
None
};
if let Some(ref data) = icon_32 {
total_bytes += data.len();
}
let new_icon = NewGeneralIcon {
name,
original_width: width as i32,
original_height: height as i32,
icon_original: Some(&icon_original),
icon_256: icon_256.as_deref(),
icon_64: icon_64.as_deref(),
icon_32: icon_32.as_deref(),
};
diesel::replace_into(general_icons::table)
.values(&new_icon)
.execute(conn)?;
Ok(total_bytes)
}
/// Resize image and encode to WebP
fn resize_and_encode(
&self,
img: &image::DynamicImage,
size: u32,
_processor: &ImageProcessor,
) -> Result<Vec<u8>, IconDatabaseError> {
let resized = img.resize_exact(size, size, image::imageops::FilterType::Lanczos3);
let rgba = resized.to_rgba8();
// Use lossy encoding for smaller sizes
let encoder = webp::Encoder::from_rgba(rgba.as_raw(), size, size);
let webp_data = encoder.encode(90.0);
Ok(webp_data.to_vec())
}
/// Find all image files (PNG, JPG, etc.) in a directory
fn find_image_files<P: AsRef<Path>>(
&self,
dir: P,
) -> Result<Vec<PathBuf>, IconDatabaseError> {
let mut files = Vec::new();
for entry in fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
if path.is_file() {
if let Some(ext) = path.extension().and_then(|s| s.to_str()) {
let ext_lower = ext.to_lowercase();
if ext_lower == "png" || ext_lower == "jpg" || ext_lower == "jpeg" {
files.push(path);
}
}
}
}
files.sort();
Ok(files)
}
/// Find only PNG files in a directory
fn find_png_files<P: AsRef<Path>>(
&self,
dir: P,
) -> Result<Vec<PathBuf>, IconDatabaseError> {
let mut files = Vec::new();
for entry in fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
if path.is_file() {
if let Some(ext) = path.extension().and_then(|s| s.to_str()) {
let ext_lower = ext.to_lowercase();
if ext_lower == "png" {
files.push(path);
}
}
}
}
files.sort();
Ok(files)
}
}

View File

@@ -1,10 +1,13 @@
use crate::image_processor::ImageProcessor;
use crate::item_loader::{
calculate_prices, generate_banknotes, generate_exceptional_items, load_items_from_directory,
};
use crate::types::Item;
use crate::xml_parser::{parse_items_xml, XmlParseError};
use crate::xml_parsers::{parse_items_xml, XmlParseError};
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::collections::{HashMap, HashSet};
use std::path::Path;
use std::path::{Path, PathBuf};
/// A database for managing game items loaded from XML files
#[derive(Debug, Clone)]
@@ -199,8 +202,8 @@ impl ItemDatabase {
serde_json::to_string(&self.items)
}
/// Prepare items for SQL insertion
/// Returns a vector of tuples (id, name, json_data)
/// Prepare items for SQL insertion (deprecated - use save_to_db instead)
#[deprecated(note = "Use save_to_db() to save directly to SQLite database")]
pub fn prepare_for_sql(&self) -> Vec<(i32, String, String)> {
self.items
.iter()
@@ -210,6 +213,385 @@ impl ItemDatabase {
})
.collect()
}
/// Save all items to SQLite database
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
use crate::schema::{items, crafting_recipes, crafting_recipe_items};
use diesel::replace_into;
conn.transaction::<_, diesel::result::Error, _>(|conn| {
let mut count = 0;
for item in &self.items {
let json = serde_json::to_string(item).unwrap_or_else(|_| "{}".to_string());
// Insert/replace item with all columns
replace_into(items::table)
.values((
items::id.eq(item.type_id),
items::name.eq(&item.item_name),
items::data.eq(json),
items::item_type.eq(item.item_type.to_string()),
items::level.eq(item.level),
items::price.eq(item.price),
items::max_stack.eq(item.max_stack),
items::storage_size.eq(item.storage_size),
items::skill.eq(match item.skill {
crate::types::SkillType::None => "none",
crate::types::SkillType::Swordsmanship => "swordsmanship",
crate::types::SkillType::Archery => "archery",
crate::types::SkillType::Magic => "magic",
crate::types::SkillType::Defence => "defence",
crate::types::SkillType::Mining => "mining",
crate::types::SkillType::Woodcutting => "woodcutting",
crate::types::SkillType::Fishing => "fishing",
crate::types::SkillType::Cooking => "cooking",
crate::types::SkillType::Carpentry => "carpentry",
crate::types::SkillType::Blacksmithy => "blacksmithy",
crate::types::SkillType::Tailoring => "tailoring",
crate::types::SkillType::Alchemy => "alchemy",
}),
items::tool.eq(match item.tool {
crate::types::Tool::None => "none",
crate::types::Tool::Pickaxe => "pickaxe",
crate::types::Tool::Hatchet => "hatchet",
crate::types::Tool::Scythe => "scythe",
crate::types::Tool::Hammer => "hammer",
crate::types::Tool::Shears => "shears",
crate::types::Tool::FishingRod => "fishingrod",
}),
items::description.eq(&item.description),
items::two_handed.eq(item.two_handed as i32),
items::undroppable.eq(item.undroppable as i32),
items::undroppable_on_death.eq(item.undroppable_on_death as i32),
items::unequip_destroy.eq(item.unequip_destroy as i32),
items::generate_icon.eq(item.generate_icon as i32),
items::hide_milestone.eq(item.hide_milestone as i32),
items::cannot_craft_exceptional.eq(item.cannot_craft_exceptional as i32),
items::storage_all_items.eq(item.storage_all_items as i32),
items::ability_id.eq(item.ability_id),
items::special_ability.eq(item.special_ability),
items::learn_ability_id.eq(item.learn_ability_id),
items::book_id.eq(item.book_id),
items::swap_item.eq(item.swap_item),
))
.execute(conn)?;
// Save crafting recipes for this item
for recipe in &item.crafting_recipes {
use diesel::prelude::*;
// Insert recipe
diesel::insert_into(crafting_recipes::table)
.values((
crafting_recipes::product_item_id.eq(item.type_id),
crafting_recipes::skill.eq(match recipe.skill {
crate::types::SkillType::None => "none",
crate::types::SkillType::Swordsmanship => "swordsmanship",
crate::types::SkillType::Archery => "archery",
crate::types::SkillType::Magic => "magic",
crate::types::SkillType::Defence => "defence",
crate::types::SkillType::Mining => "mining",
crate::types::SkillType::Woodcutting => "woodcutting",
crate::types::SkillType::Fishing => "fishing",
crate::types::SkillType::Cooking => "cooking",
crate::types::SkillType::Carpentry => "carpentry",
crate::types::SkillType::Blacksmithy => "blacksmithy",
crate::types::SkillType::Tailoring => "tailoring",
crate::types::SkillType::Alchemy => "alchemy",
}),
crafting_recipes::level.eq(recipe.level),
crafting_recipes::workbench_id.eq(recipe.workbench_id),
crafting_recipes::xp.eq(recipe.xp),
crafting_recipes::unlocked_by_default.eq(recipe.unlocked_by_default as i32),
crafting_recipes::checks.eq(recipe.checks.as_ref()),
))
.execute(conn)?;
// Get the recipe_id we just inserted
let recipe_id: i32 = diesel::select(diesel::dsl::sql::<diesel::sql_types::Integer>(
"last_insert_rowid()"
))
.get_result(conn)?;
// Insert recipe items (ingredients)
for ingredient in &recipe.items {
diesel::insert_into(crafting_recipe_items::table)
.values((
crafting_recipe_items::recipe_id.eq(recipe_id),
crafting_recipe_items::item_id.eq(ingredient.item_id),
crafting_recipe_items::amount.eq(ingredient.amount),
))
.execute(conn)?;
}
}
count += 1;
}
Ok(count)
})
}
/// Save all items to SQLite database with icon processing
///
/// # Arguments
/// * `conn` - Database connection
/// * `icon_path` - Path to the ItemIcons directory (e.g., "CBAssets/Data/Textures/ItemIcons")
///
/// # Returns
/// Tuple of (items_saved, images_processed)
pub fn save_to_db_with_images<P: AsRef<Path>>(
&self,
conn: &mut SqliteConnection,
icon_path: P,
) -> Result<(usize, usize), diesel::result::Error> {
use crate::schema::items;
use diesel::replace_into;
let icon_base_path = icon_path.as_ref();
let processor = ImageProcessor::new(85.0); // 85% WebP quality
let mut images_processed = 0;
conn.transaction::<_, diesel::result::Error, _>(|conn| {
let mut count = 0;
for item in &self.items {
let json = serde_json::to_string(item).unwrap_or_else(|_| "{}".to_string());
// Process item icon if it exists
let (icon_large, icon_medium, icon_small) =
Self::process_item_icon(&processor, icon_base_path, item.type_id);
if icon_large.is_some() {
images_processed += 1;
}
// Insert/replace item with all columns including images
replace_into(items::table)
.values((
items::id.eq(item.type_id),
items::name.eq(&item.item_name),
items::data.eq(json),
items::item_type.eq(item.item_type.to_string()),
items::level.eq(item.level),
items::price.eq(item.price),
items::max_stack.eq(item.max_stack),
items::storage_size.eq(item.storage_size),
items::skill.eq(match item.skill {
crate::types::SkillType::None => "none",
crate::types::SkillType::Swordsmanship => "swordsmanship",
crate::types::SkillType::Archery => "archery",
crate::types::SkillType::Magic => "magic",
crate::types::SkillType::Defence => "defence",
crate::types::SkillType::Mining => "mining",
crate::types::SkillType::Woodcutting => "woodcutting",
crate::types::SkillType::Fishing => "fishing",
crate::types::SkillType::Cooking => "cooking",
crate::types::SkillType::Carpentry => "carpentry",
crate::types::SkillType::Blacksmithy => "blacksmithy",
crate::types::SkillType::Tailoring => "tailoring",
crate::types::SkillType::Alchemy => "alchemy",
}),
items::tool.eq(match item.tool {
crate::types::Tool::None => "none",
crate::types::Tool::Pickaxe => "pickaxe",
crate::types::Tool::Hatchet => "hatchet",
crate::types::Tool::Scythe => "scythe",
crate::types::Tool::Hammer => "hammer",
crate::types::Tool::Shears => "shears",
crate::types::Tool::FishingRod => "fishingrod",
}),
items::description.eq(&item.description),
items::two_handed.eq(item.two_handed as i32),
items::undroppable.eq(item.undroppable as i32),
items::undroppable_on_death.eq(item.undroppable_on_death as i32),
items::unequip_destroy.eq(item.unequip_destroy as i32),
items::generate_icon.eq(item.generate_icon as i32),
items::hide_milestone.eq(item.hide_milestone as i32),
items::cannot_craft_exceptional.eq(item.cannot_craft_exceptional as i32),
items::storage_all_items.eq(item.storage_all_items as i32),
items::ability_id.eq(item.ability_id),
items::special_ability.eq(item.special_ability),
items::learn_ability_id.eq(item.learn_ability_id),
items::book_id.eq(item.book_id),
items::swap_item.eq(item.swap_item),
items::icon_large.eq(icon_large.as_ref()),
items::icon_medium.eq(icon_medium.as_ref()),
items::icon_small.eq(icon_small.as_ref()),
))
.execute(conn)?;
// Save crafting recipes for this item (same as before)
for recipe in &item.crafting_recipes {
use diesel::prelude::*;
diesel::insert_into(crate::schema::crafting_recipes::table)
.values((
crate::schema::crafting_recipes::product_item_id.eq(item.type_id),
crate::schema::crafting_recipes::skill.eq(match recipe.skill {
crate::types::SkillType::None => "none",
crate::types::SkillType::Swordsmanship => "swordsmanship",
crate::types::SkillType::Archery => "archery",
crate::types::SkillType::Magic => "magic",
crate::types::SkillType::Defence => "defence",
crate::types::SkillType::Mining => "mining",
crate::types::SkillType::Woodcutting => "woodcutting",
crate::types::SkillType::Fishing => "fishing",
crate::types::SkillType::Cooking => "cooking",
crate::types::SkillType::Carpentry => "carpentry",
crate::types::SkillType::Blacksmithy => "blacksmithy",
crate::types::SkillType::Tailoring => "tailoring",
crate::types::SkillType::Alchemy => "alchemy",
}),
crate::schema::crafting_recipes::level.eq(recipe.level),
crate::schema::crafting_recipes::workbench_id.eq(recipe.workbench_id),
crate::schema::crafting_recipes::xp.eq(recipe.xp),
crate::schema::crafting_recipes::unlocked_by_default.eq(recipe.unlocked_by_default as i32),
crate::schema::crafting_recipes::checks.eq(recipe.checks.as_ref()),
))
.execute(conn)?;
let recipe_id: i32 = diesel::select(diesel::dsl::sql::<diesel::sql_types::Integer>(
"last_insert_rowid()"
))
.get_result(conn)?;
for ingredient in &recipe.items {
diesel::insert_into(crate::schema::crafting_recipe_items::table)
.values((
crate::schema::crafting_recipe_items::recipe_id.eq(recipe_id),
crate::schema::crafting_recipe_items::item_id.eq(ingredient.item_id),
crate::schema::crafting_recipe_items::amount.eq(ingredient.amount),
))
.execute(conn)?;
}
}
// Save item stats
for stat in &item.stats {
let stat_type_str = match stat.stat_type {
crate::types::StatType::None => "none",
crate::types::StatType::Health => "health",
crate::types::StatType::Mana => "mana",
crate::types::StatType::HealthRegen => "health_regen",
crate::types::StatType::ManaRegen => "mana_regen",
crate::types::StatType::DamagePhysical => "damage_physical",
crate::types::StatType::DamageMagical => "damage_magical",
crate::types::StatType::DamageRanged => "damage_ranged",
crate::types::StatType::AccuracyPhysical => "accuracy_physical",
crate::types::StatType::AccuracyMagical => "accuracy_magical",
crate::types::StatType::AccuracyRanged => "accuracy_ranged",
crate::types::StatType::ResistancePhysical => "resistance_physical",
crate::types::StatType::ResistanceMagical => "resistance_magical",
crate::types::StatType::ResistanceRanged => "resistance_ranged",
crate::types::StatType::Critical => "critical",
crate::types::StatType::Healing => "healing",
crate::types::StatType::MovementSpeed => "movement_speed",
crate::types::StatType::DamageVsBeasts => "damage_vs_beasts",
crate::types::StatType::DamageVsUndead => "damage_vs_undead",
crate::types::StatType::CritterSlaying => "critter_slaying",
};
diesel::insert_into(crate::schema::item_stats::table)
.values((
crate::schema::item_stats::item_id.eq(item.type_id),
crate::schema::item_stats::stat_type.eq(stat_type_str),
crate::schema::item_stats::value.eq(stat.value),
))
.execute(conn)?;
}
count += 1;
}
Ok((count, images_processed))
})
}
/// Helper function to process a single item icon
/// Returns (large, medium, small) WebP blobs
fn process_item_icon(
processor: &ImageProcessor,
icon_base_path: &Path,
item_id: i32,
) -> (Option<Vec<u8>>, Option<Vec<u8>>, Option<Vec<u8>>) {
// Try both lowercase and uppercase extensions (Linux is case-sensitive)
let lowercase = icon_base_path.join(format!("{}.png", item_id));
let uppercase = icon_base_path.join(format!("{}.PNG", item_id));
let icon_file = if lowercase.exists() {
lowercase
} else if uppercase.exists() {
uppercase
} else {
return (None, None, None);
};
// Process image at 3 sizes: 256, 64, 16
match processor.process_image(&icon_file, &[256, 64, 16], None, None) {
Ok(processed) => (
processed.get(256).cloned(),
processed.get(64).cloned(),
processed.get(16).cloned(),
),
Err(e) => {
log::warn!("Failed to process icon for item {}: {}", item_id, e);
(None, None, None)
}
}
}
/// Load all items from SQLite database
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
use crate::schema::items::dsl::*;
#[derive(Queryable)]
#[allow(dead_code)]
struct ItemRecord {
id: Option<i32>,
name: String,
data: String,
item_type: String,
level: i32,
price: i32,
max_stack: i32,
storage_size: i32,
skill: String,
tool: String,
description: String,
two_handed: i32,
undroppable: i32,
undroppable_on_death: i32,
unequip_destroy: i32,
generate_icon: i32,
hide_milestone: i32,
cannot_craft_exceptional: i32,
storage_all_items: i32,
ability_id: i32,
special_ability: i32,
learn_ability_id: i32,
book_id: i32,
swap_item: i32,
icon_large: Option<Vec<u8>>,
icon_medium: Option<Vec<u8>>,
icon_small: Option<Vec<u8>>,
}
let records = items.load::<ItemRecord>(conn)?;
let mut loaded_items = Vec::new();
for record in records {
// Load from JSON data column (contains complete item info including crafting recipes)
if let Ok(item) = serde_json::from_str::<Item>(&record.data) {
loaded_items.push(item);
}
}
let mut db = Self::new();
db.add_items(loaded_items);
Ok(db)
}
}
impl Default for ItemDatabase {

View File

@@ -1,5 +1,7 @@
use crate::types::{LootTable, LootDrop};
use crate::xml_parser::{parse_loot_xml, XmlParseError};
use crate::xml_parsers::{parse_loot_xml, XmlParseError};
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::collections::HashMap;
use std::path::Path;
@@ -147,8 +149,8 @@ impl LootDatabase {
self.tables.is_empty()
}
/// Prepare loot tables for SQL insertion
/// Returns a vector of tuples (npc_ids_json, name, json_data)
/// Prepare loot tables for SQL insertion (deprecated - use save_to_db instead)
#[deprecated(note = "Use save_to_db() to save directly to SQLite database")]
pub fn prepare_for_sql(&self) -> Vec<(String, Option<String>, String)> {
self.tables
.iter()
@@ -159,6 +161,54 @@ impl LootDatabase {
})
.collect()
}
/// Save all loot tables to SQLite database
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
use crate::schema::loot_tables;
let mut count = 0;
for table in &self.tables {
let table_id = serde_json::to_string(&table.npc_ids).unwrap_or_else(|_| "[]".to_string());
let json = serde_json::to_string(table).unwrap_or_else(|_| "{}".to_string());
let record = (
loot_tables::table_id.eq(table_id),
loot_tables::npc_id.eq(None::<String>),
loot_tables::data.eq(json),
);
diesel::insert_into(loot_tables::table)
.values(&record)
.execute(conn)?;
count += 1;
}
Ok(count)
}
/// Load all loot tables from SQLite database
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
use crate::schema::loot_tables::dsl::*;
#[derive(Queryable)]
struct LootTableRecord {
table_id: Option<String>,
npc_id: Option<String>,
data: String,
}
let records = loot_tables.load::<LootTableRecord>(conn)?;
let mut loaded_tables = Vec::new();
for record in records {
if let Ok(table) = serde_json::from_str::<LootTable>(&record.data) {
loaded_tables.push(table);
}
}
let mut db = Self::new();
db.add_tables(loaded_tables);
Ok(db)
}
}
impl Default for LootDatabase {

View File

@@ -1,5 +1,7 @@
use crate::types::Map;
use crate::xml_parser::{parse_maps_xml, XmlParseError};
use crate::xml_parsers::{parse_maps_xml, XmlParseError};
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::collections::HashMap;
use std::path::Path;
@@ -176,8 +178,8 @@ impl MapDatabase {
self.maps.is_empty()
}
/// Prepare maps for SQL insertion
/// Returns a vector of tuples (scene_id, name, json_data)
/// Prepare maps for SQL insertion (deprecated - use save_to_db instead)
#[deprecated(note = "Use save_to_db() to save directly to SQLite database")]
pub fn prepare_for_sql(&self) -> Vec<(String, String, String)> {
self.maps
.iter()
@@ -187,6 +189,59 @@ impl MapDatabase {
})
.collect()
}
/// Save all maps to SQLite database
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
use crate::schema::maps;
let records: Vec<_> = self
.maps
.iter()
.map(|map| {
let json = serde_json::to_string(map).unwrap_or_else(|_| "{}".to_string());
(
maps::scene_id.eq(&map.scene_id),
maps::name.eq(&map.name),
maps::data.eq(json),
)
})
.collect();
let mut count = 0;
for record in records {
diesel::insert_into(maps::table)
.values(&record)
.execute(conn)?;
count += 1;
}
Ok(count)
}
/// Load all maps from SQLite database
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
use crate::schema::maps::dsl::*;
#[derive(Queryable)]
struct MapRecord {
scene_id: Option<String>,
name: String,
data: String,
}
let records = maps.load::<MapRecord>(conn)?;
let mut loaded_maps = Vec::new();
for record in records {
if let Ok(map) = serde_json::from_str::<Map>(&record.data) {
loaded_maps.push(map);
}
}
let mut db = Self::new();
db.add_maps(loaded_maps);
Ok(db)
}
}
impl Default for MapDatabase {

View File

@@ -0,0 +1,388 @@
use crate::types::{MinimapTileRecord, NewMinimapTile};
use crate::image_processor::{ImageProcessor, ImageProcessingError};
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::path::{Path, PathBuf};
use std::fs;
use std::collections::HashMap;
use thiserror::Error;
#[derive(Debug, Error)]
pub enum MinimapDatabaseError {
#[error("Database error: {0}")]
DatabaseError(#[from] diesel::result::Error),
#[error("Image processing error: {0}")]
ImageError(#[from] ImageProcessingError),
#[error("Image load error: {0}")]
ImageLoadError(#[from] image::ImageError),
#[error("IO error: {0}")]
IoError(#[from] std::io::Error),
#[error("Invalid filename format: {0}")]
InvalidFilename(String),
#[error("Connection pool error: {0}")]
ConnectionError(String),
}
/// Database for managing minimap tiles with merged zoom levels
pub struct MinimapDatabase {
database_url: String,
}
impl MinimapDatabase {
/// Create new database connection
pub fn new(database_url: String) -> Self {
Self { database_url }
}
/// Establish database connection
fn establish_connection(&self) -> Result<SqliteConnection, MinimapDatabaseError> {
SqliteConnection::establish(&self.database_url)
.map_err(|e| MinimapDatabaseError::ConnectionError(e.to_string()))
}
/// Load all PNG files from directory and process them into all zoom levels
pub fn load_from_directory<P: AsRef<Path>, B: AsRef<Path>>(
&self,
minimap_dir: P,
base_path: B,
) -> Result<usize, MinimapDatabaseError> {
use crate::schema::minimap_tiles;
let mut conn = self.establish_connection()?;
println!("Loading PNG files from directory...");
let png_files = self.find_minimap_pngs(minimap_dir.as_ref())?;
println!("Found {} PNG files", png_files.len());
// Step 1: Process all original tiles (zoom level 2) and store their WebP data
println!("\nProcessing zoom level 2 (original tiles)...");
let mut tile_data: HashMap<(i32, i32), Vec<u8>> = HashMap::new();
let mut count = 0;
for png_path in &png_files {
let (x, y) = self.parse_coordinates(png_path)?;
// Load and encode as lossless WebP
let img = image::open(png_path)?;
let rgba = img.to_rgba8();
let webp_data = ImageProcessor::encode_webp_lossless(&rgba)?;
// Get original file size
let original_size = fs::metadata(png_path)?.len() as i32;
// Store in database
let relative_path = png_path.strip_prefix(base_path.as_ref()).unwrap_or(png_path);
let new_tile = NewMinimapTile {
x,
y,
zoom: 2,
width: 512,
height: 512,
original_file_size: Some(original_size),
image: &webp_data,
image_size: webp_data.len() as i32,
source_path: relative_path.to_str().unwrap_or(""),
};
diesel::replace_into(minimap_tiles::table)
.values(&new_tile)
.execute(&mut conn)?;
// Cache for later merging
tile_data.insert((x, y), webp_data);
count += 1;
if count % 50 == 0 {
println!(" Processed {} tiles...", count);
}
}
println!("Processed {} zoom level 2 tiles", count);
// Get bounds for merging
let ((min_x, min_y), (max_x, max_y)) = self.get_map_bounds()?;
println!("\nMap bounds: X [{}, {}], Y [{}, {}]", min_x, max_x, min_y, max_y);
// Step 2: Generate zoom level 1 (2x2 merged)
println!("\nGenerating zoom level 1 (2x2 merged)...");
let zoom1_count = self.generate_merged_tiles(
&mut conn,
&tile_data,
min_x,
max_x,
min_y,
max_y,
1, // zoom level
2, // merge factor
)?;
println!("Generated {} zoom level 1 tiles", zoom1_count);
// Step 3: Generate zoom level 0 (4x4 merged)
println!("\nGenerating zoom level 0 (4x4 merged)...");
let zoom0_count = self.generate_merged_tiles(
&mut conn,
&tile_data,
min_x,
max_x,
min_y,
max_y,
0, // zoom level
4, // merge factor
)?;
println!("Generated {} zoom level 0 tiles", zoom0_count);
println!("\nTotal tiles generated:");
println!(" Zoom 2: {}", count);
println!(" Zoom 1: {}", zoom1_count);
println!(" Zoom 0: {}", zoom0_count);
println!(" Total: {}", count + zoom1_count + zoom0_count);
Ok(count + zoom1_count + zoom0_count)
}
/// Generate merged tiles for a specific zoom level
fn generate_merged_tiles(
&self,
conn: &mut SqliteConnection,
tile_data: &HashMap<(i32, i32), Vec<u8>>,
min_x: i32,
max_x: i32,
min_y: i32,
max_y: i32,
zoom_level: i32,
merge_factor: i32,
) -> Result<usize, MinimapDatabaseError> {
use crate::schema::minimap_tiles;
let mut count = 0;
// Iterate through merged tile grid
let mut merged_y = min_y;
while merged_y <= max_y {
let mut merged_x = min_x;
while merged_x <= max_x {
// Collect tiles for this merged tile
let mut tiles_for_merge: HashMap<(i32, i32), Vec<u8>> = HashMap::new();
let mut has_any_tile = false;
for dy in 0..merge_factor {
for dx in 0..merge_factor {
let tile_x = merged_x + dx;
let tile_y = merged_y + dy;
if let Some(webp) = tile_data.get(&(tile_x, tile_y)) {
tiles_for_merge.insert((dx, dy), webp.clone());
has_any_tile = true;
}
}
}
// Only create merged tile if we have at least one source tile
if has_any_tile {
let merged_img = ImageProcessor::merge_tiles(
&tiles_for_merge,
merge_factor,
merge_factor,
512,
512,
)?;
let merged_webp = ImageProcessor::encode_webp_lossless(&merged_img)?;
// Calculate merged tile coordinates
let merged_tile_x = merged_x / merge_factor;
let merged_tile_y = merged_y / merge_factor;
// Build source_tiles string for debugging
let mut source_coords = Vec::new();
for dy in 0..merge_factor {
for dx in 0..merge_factor {
let tx = merged_x + dx;
let ty = merged_y + dy;
if tile_data.contains_key(&(tx, ty)) {
source_coords.push(format!("{},{}", tx, ty));
}
}
}
let source_tiles = source_coords.join(";");
let new_tile = NewMinimapTile {
x: merged_tile_x,
y: merged_tile_y,
zoom: zoom_level,
width: 512,
height: 512,
original_file_size: None,
image: &merged_webp,
image_size: merged_webp.len() as i32,
source_path: &source_tiles,
};
diesel::replace_into(minimap_tiles::table)
.values(&new_tile)
.execute(conn)?;
count += 1;
}
merged_x += merge_factor;
}
merged_y += merge_factor;
if count % 20 == 0 && count > 0 {
println!(" Generated {} merged tiles...", count);
}
}
Ok(count)
}
/// Find all minimap PNG files in directory
fn find_minimap_pngs<P: AsRef<Path>>(
&self,
dir: P,
) -> Result<Vec<PathBuf>, MinimapDatabaseError> {
let mut png_files = Vec::new();
for entry in fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
if path.is_file() && path.extension().and_then(|s| s.to_str()) == Some("png") {
// Check if filename matches x_y.png pattern
if let Some(stem) = path.file_stem().and_then(|s| s.to_str()) {
if stem.contains('_') && stem.chars().all(|c| c.is_numeric() || c == '_' || c == '-') {
png_files.push(path);
}
}
}
}
Ok(png_files)
}
/// Parse x,y coordinates from filename (e.g., "0_0.png" -> (0, 0))
fn parse_coordinates<P: AsRef<Path>>(
&self,
path: P,
) -> Result<(i32, i32), MinimapDatabaseError> {
let filename = path
.as_ref()
.file_stem()
.and_then(|s| s.to_str())
.ok_or_else(|| {
MinimapDatabaseError::InvalidFilename(path.as_ref().display().to_string())
})?;
let parts: Vec<&str> = filename.split('_').collect();
if parts.len() != 2 {
return Err(MinimapDatabaseError::InvalidFilename(
filename.to_string(),
));
}
let x = parts[0].parse::<i32>().map_err(|_| {
MinimapDatabaseError::InvalidFilename(filename.to_string())
})?;
let y = parts[1].parse::<i32>().map_err(|_| {
MinimapDatabaseError::InvalidFilename(filename.to_string())
})?;
Ok((x, y))
}
/// Get map bounds (min/max x and y) from zoom level 2 tiles
pub fn get_map_bounds(
&self,
) -> Result<((i32, i32), (i32, i32)), MinimapDatabaseError> {
use crate::schema::minimap_tiles::dsl::*;
use diesel::dsl::{max, min};
let mut conn = self.establish_connection()?;
let (min_x_val, max_x_val): (Option<i32>, Option<i32>) =
minimap_tiles
.filter(zoom.eq(2))
.select((min(x), max(x)))
.first(&mut conn)?;
let (min_y_val, max_y_val): (Option<i32>, Option<i32>) =
minimap_tiles
.filter(zoom.eq(2))
.select((min(y), max(y)))
.first(&mut conn)?;
Ok((
(min_x_val.unwrap_or(0), min_y_val.unwrap_or(0)),
(max_x_val.unwrap_or(0), max_y_val.unwrap_or(0)),
))
}
/// Get count of tiles at a specific zoom level
pub fn count_at_zoom(&self, zoom_level: i32) -> Result<i64, MinimapDatabaseError> {
use crate::schema::minimap_tiles::dsl::*;
use diesel::dsl::count_star;
let mut conn = self.establish_connection()?;
let total = minimap_tiles
.filter(zoom.eq(zoom_level))
.select(count_star())
.first(&mut conn)?;
Ok(total)
}
/// Get storage statistics
pub fn get_storage_stats(&self) -> Result<StorageStats, MinimapDatabaseError> {
let mut conn = self.establish_connection()?;
use crate::schema::minimap_tiles::dsl::*;
let tiles = minimap_tiles.load::<MinimapTileRecord>(&mut conn)?;
let mut stats = StorageStats::default();
for tile in tiles {
if tile.zoom == 2 {
stats.total_original_size += tile.original_file_size.unwrap_or(0) as i64;
stats.zoom2_count += 1;
stats.zoom2_size += tile.image_size as i64;
} else if tile.zoom == 1 {
stats.zoom1_count += 1;
stats.zoom1_size += tile.image_size as i64;
} else if tile.zoom == 0 {
stats.zoom0_count += 1;
stats.zoom0_size += tile.image_size as i64;
}
}
Ok(stats)
}
}
#[derive(Debug, Default)]
pub struct StorageStats {
pub zoom2_count: i64,
pub zoom1_count: i64,
pub zoom0_count: i64,
pub total_original_size: i64,
pub zoom2_size: i64,
pub zoom1_size: i64,
pub zoom0_size: i64,
}
impl StorageStats {
pub fn total_webp_size(&self) -> i64 {
self.zoom2_size + self.zoom1_size + self.zoom0_size
}
pub fn compression_ratio(&self) -> f64 {
if self.total_original_size == 0 {
return 0.0;
}
(self.total_webp_size() as f64 / self.total_original_size as f64) * 100.0
}
}

View File

@@ -8,6 +8,8 @@ mod fast_travel_database;
mod player_house_database;
mod trait_database;
mod shop_database;
mod minimap_database;
mod icon_database;
pub use item_database::ItemDatabase;
pub use npc_database::NpcDatabase;
@@ -19,3 +21,5 @@ pub use fast_travel_database::FastTravelDatabase;
pub use player_house_database::PlayerHouseDatabase;
pub use trait_database::TraitDatabase;
pub use shop_database::ShopDatabase;
pub use minimap_database::{MinimapDatabase, MinimapDatabaseError, StorageStats};
pub use icon_database::{IconDatabase, IconDatabaseError, IconStats};

View File

@@ -1,5 +1,7 @@
use crate::types::Npc;
use crate::xml_parser::{parse_npcs_xml, XmlParseError};
use crate::xml_parsers::{parse_npcs_xml, XmlParseError};
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::collections::HashMap;
use std::path::Path;
@@ -118,8 +120,8 @@ impl NpcDatabase {
self.npcs.is_empty()
}
/// Prepare NPCs for SQL insertion
/// Returns a vector of tuples (id, name, json_data)
/// Prepare NPCs for SQL insertion (deprecated - use save_to_db instead)
#[deprecated(note = "Use save_to_db() to save directly to SQLite database")]
pub fn prepare_for_sql(&self) -> Vec<(i32, String, String)> {
self.npcs
.iter()
@@ -129,6 +131,59 @@ impl NpcDatabase {
})
.collect()
}
/// Save all NPCs to SQLite database
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
use crate::schema::npcs;
let records: Vec<_> = self
.npcs
.iter()
.map(|npc| {
let json = serde_json::to_string(npc).unwrap_or_else(|_| "{}".to_string());
(
npcs::id.eq(npc.id),
npcs::name.eq(&npc.name),
npcs::data.eq(json),
)
})
.collect();
let mut count = 0;
for record in records {
diesel::insert_into(npcs::table)
.values(&record)
.execute(conn)?;
count += 1;
}
Ok(count)
}
/// Load all NPCs from SQLite database
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
use crate::schema::npcs::dsl::*;
#[derive(Queryable)]
struct NpcRecord {
id: Option<i32>,
name: String,
data: String,
}
let records = npcs.load::<NpcRecord>(conn)?;
let mut loaded_npcs = Vec::new();
for record in records {
if let Ok(npc) = serde_json::from_str::<Npc>(&record.data) {
loaded_npcs.push(npc);
}
}
let mut db = Self::new();
db.add_npcs(loaded_npcs);
Ok(db)
}
}
impl Default for NpcDatabase {

View File

@@ -1,5 +1,7 @@
use crate::types::PlayerHouse;
use crate::xml_parser::{parse_player_houses_xml, XmlParseError};
use crate::xml_parsers::{parse_player_houses_xml, XmlParseError};
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::collections::HashMap;
use std::path::Path;
@@ -74,16 +76,6 @@ impl PlayerHouseDatabase {
&self.houses
}
/// Get all visible houses (not hidden)
pub fn get_visible_houses(&self) -> Vec<&PlayerHouse> {
self.houses.iter().filter(|h| h.is_visible()).collect()
}
/// Get all hidden houses
pub fn get_hidden_houses(&self) -> Vec<&PlayerHouse> {
self.houses.iter().filter(|h| h.hidden).collect()
}
/// Get all free houses (price is 0)
pub fn get_free_houses(&self) -> Vec<&PlayerHouse> {
self.houses.iter().filter(|h| h.is_free()).collect()
@@ -150,16 +142,74 @@ impl PlayerHouseDatabase {
self.houses.is_empty()
}
/// Prepare player houses for SQL insertion
/// Returns a vector of tuples (id, name, price, json_data)
pub fn prepare_for_sql(&self) -> Vec<(i32, String, i32, String)> {
self.houses
/// Save all player houses to SQLite database (clears existing entries first)
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
use crate::schema::player_houses;
// Clear existing entries
diesel::delete(player_houses::table).execute(conn)?;
let records: Vec<_> = self
.houses
.iter()
.map(|house| {
let json = serde_json::to_string(house).unwrap_or_else(|_| "{}".to_string());
(house.id, house.name.clone(), house.price, json)
(
player_houses::id.eq(house.id),
player_houses::name.eq(&house.name),
player_houses::description.eq(&house.description),
player_houses::pos_x.eq(house.pos_x),
player_houses::pos_z.eq(house.pos_z),
player_houses::price.eq(house.price),
)
})
.collect()
.collect();
let mut count = 0;
for record in records {
diesel::insert_into(player_houses::table)
.values(&record)
.execute(conn)?;
count += 1;
}
Ok(count)
}
/// Load all player houses from SQLite database
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
use crate::schema::player_houses::dsl::*;
#[derive(Queryable)]
struct PlayerHouseRecord {
record_id: Option<i32>,
name: String,
description: String,
pos_x: f32,
pos_z: f32,
price: i32,
}
let records = player_houses.load::<PlayerHouseRecord>(conn)?;
let loaded_houses: Vec<PlayerHouse> = records
.into_iter()
.filter_map(|record| {
record.record_id.map(|house_id| {
PlayerHouse::new(
house_id,
record.name,
record.description,
record.pos_x,
record.pos_z,
record.price,
)
})
})
.collect();
let mut db = Self::new();
db.add_houses(loaded_houses);
Ok(db)
}
}

View File

@@ -1,5 +1,7 @@
use crate::types::Quest;
use crate::xml_parser::{parse_quests_xml, XmlParseError};
use crate::xml_parsers::{parse_quests_xml, XmlParseError};
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::collections::HashMap;
use std::path::Path;
@@ -92,8 +94,8 @@ impl QuestDatabase {
self.quests.is_empty()
}
/// Prepare quests for SQL insertion
/// Returns a vector of tuples (id, name, json_data)
/// Prepare quests for SQL insertion (deprecated - use save_to_db instead)
#[deprecated(note = "Use save_to_db() to save directly to SQLite database")]
pub fn prepare_for_sql(&self) -> Vec<(i32, String, String)> {
self.quests
.iter()
@@ -103,6 +105,59 @@ impl QuestDatabase {
})
.collect()
}
/// Save all quests to SQLite database
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
use crate::schema::quests;
let records: Vec<_> = self
.quests
.iter()
.map(|quest| {
let json = serde_json::to_string(quest).unwrap_or_else(|_| "{}".to_string());
(
quests::id.eq(quest.id),
quests::name.eq(&quest.name),
quests::data.eq(json),
)
})
.collect();
let mut count = 0;
for record in records {
diesel::insert_into(quests::table)
.values(&record)
.execute(conn)?;
count += 1;
}
Ok(count)
}
/// Load all quests from SQLite database
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
use crate::schema::quests::dsl::*;
#[derive(Queryable)]
struct QuestRecord {
id: Option<i32>,
name: String,
data: String,
}
let records = quests.load::<QuestRecord>(conn)?;
let mut loaded_quests = Vec::new();
for record in records {
if let Ok(quest) = serde_json::from_str::<Quest>(&record.data) {
loaded_quests.push(quest);
}
}
let mut db = Self::new();
db.add_quests(loaded_quests);
Ok(db)
}
}
impl Default for QuestDatabase {

View File

@@ -1,5 +1,7 @@
use crate::types::Shop;
use crate::xml_parser::{parse_shops_xml, XmlParseError};
use crate::xml_parsers::{parse_shops_xml, XmlParseError};
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::collections::HashMap;
use std::path::Path;
@@ -143,8 +145,8 @@ impl ShopDatabase {
self.shops.is_empty()
}
/// Prepare shops for SQL insertion
/// Returns a vector of tuples (shop_id, name, is_general_store, item_count, json_data)
/// Prepare shops for SQL insertion (deprecated - use save_to_db instead)
#[deprecated(note = "Use save_to_db() to save directly to SQLite database")]
pub fn prepare_for_sql(&self) -> Vec<(i32, String, bool, usize, String)> {
self.shops
.iter()
@@ -160,6 +162,63 @@ impl ShopDatabase {
})
.collect()
}
/// Save all shops to SQLite database
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
use crate::schema::shops;
let records: Vec<_> = self
.shops
.iter()
.map(|shop| {
let json = serde_json::to_string(shop).unwrap_or_else(|_| "{}".to_string());
(
shops::id.eq(shop.shop_id),
shops::name.eq(&shop.name),
shops::unique_items.eq(if shop.is_general_store { 0 } else { 1 }),
shops::item_count.eq(shop.items.len() as i32),
shops::data.eq(json),
)
})
.collect();
let mut count = 0;
for record in records {
diesel::insert_into(shops::table)
.values(&record)
.execute(conn)?;
count += 1;
}
Ok(count)
}
/// Load all shops from SQLite database
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
use crate::schema::shops::dsl::*;
#[derive(Queryable)]
struct ShopRecord {
id: Option<i32>,
name: String,
unique_items: i32,
item_count: i32,
data: String,
}
let records = shops.load::<ShopRecord>(conn)?;
let mut loaded_shops = Vec::new();
for record in records {
if let Ok(shop) = serde_json::from_str::<Shop>(&record.data) {
loaded_shops.push(shop);
}
}
let mut db = Self::new();
db.add_shops(loaded_shops);
Ok(db)
}
}
impl Default for ShopDatabase {

View File

@@ -1,5 +1,7 @@
use crate::types::Trait;
use crate::xml_parser::{parse_traits_xml, XmlParseError};
use crate::xml_parsers::{parse_traits_xml, XmlParseError};
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::collections::HashMap;
use std::path::Path;
@@ -172,8 +174,8 @@ impl TraitDatabase {
self.traits.is_empty()
}
/// Prepare traits for SQL insertion
/// Returns a vector of tuples (id, name, skill, level, json_data)
/// Prepare traits for SQL insertion (deprecated - use save_to_db instead)
#[deprecated(note = "Use save_to_db() to save directly to SQLite database")]
pub fn prepare_for_sql(&self) -> Vec<(i32, String, Option<String>, Option<i32>, String)> {
self.traits
.iter()
@@ -186,6 +188,63 @@ impl TraitDatabase {
})
.collect()
}
/// Save all traits to SQLite database
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
use crate::schema::traits;
let records: Vec<_> = self
.traits
.iter()
.map(|trait_obj| {
let json = serde_json::to_string(trait_obj).unwrap_or_else(|_| "{}".to_string());
(
traits::id.eq(trait_obj.id),
traits::name.eq(&trait_obj.name),
traits::description.eq(Some(&trait_obj.description)),
traits::trainer_id.eq(None::<i32>), // TODO: determine actual trainer ID
traits::data.eq(json),
)
})
.collect();
let mut count = 0;
for record in records {
diesel::insert_into(traits::table)
.values(&record)
.execute(conn)?;
count += 1;
}
Ok(count)
}
/// Load all traits from SQLite database
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
use crate::schema::traits::dsl::*;
#[derive(Queryable)]
struct TraitRecord {
id: Option<i32>,
name: String,
description: Option<String>,
trainer_id: Option<i32>,
data: String,
}
let records = traits.load::<TraitRecord>(conn)?;
let mut loaded_traits = Vec::new();
for record in records {
if let Ok(trait_obj) = serde_json::from_str::<Trait>(&record.data) {
loaded_traits.push(trait_obj);
}
}
let mut db = Self::new();
db.add_traits(loaded_traits);
Ok(db)
}
}
impl Default for TraitDatabase {

View File

@@ -0,0 +1,467 @@
use image::{DynamicImage, ImageError, Rgba, RgbaImage};
use std::collections::HashMap;
use std::path::Path;
use thiserror::Error;
/// Configuration for outline drawing on images with alpha channels
#[derive(Debug, Clone)]
pub struct OutlineConfig {
/// Outline color (RGBA)
pub color: Rgba<u8>,
/// Outline thickness in pixels
pub thickness: u32,
/// Alpha threshold for edge detection (0-255)
/// Pixels with alpha >= threshold are considered solid
pub alpha_threshold: u8,
}
impl OutlineConfig {
/// Create new outline config with custom color and thickness
pub fn new(color: Rgba<u8>, thickness: u32) -> Self {
Self {
color,
thickness,
alpha_threshold: 128,
}
}
/// Create outline config with white color
pub fn white(thickness: u32) -> Self {
Self::new(Rgba([255, 255, 255, 255]), thickness)
}
/// Create outline config with black color
pub fn black(thickness: u32) -> Self {
Self::new(Rgba([0, 0, 0, 255]), thickness)
}
/// Set alpha threshold for edge detection
pub fn with_alpha_threshold(mut self, threshold: u8) -> Self {
self.alpha_threshold = threshold;
self
}
}
impl Default for OutlineConfig {
fn default() -> Self {
Self::white(1)
}
}
#[derive(Debug, Error)]
pub enum ImageProcessingError {
#[error("Failed to load image: {0}")]
ImageLoadError(#[from] ImageError),
#[error("WebP encoding failed: {0}")]
WebPError(String),
#[error("Invalid image dimensions: expected {expected_width}x{expected_height}, got {actual_width}x{actual_height}")]
InvalidDimensions {
expected_width: u32,
expected_height: u32,
actual_width: u32,
actual_height: u32,
},
#[error("IO error: {0}")]
IoError(#[from] std::io::Error),
#[error("No resolutions specified")]
NoResolutions,
}
pub struct ImageProcessor {
quality: f32, // WebP quality (0.0-100.0)
}
impl ImageProcessor {
/// Create new processor with specified WebP quality
pub fn new(quality: f32) -> Self {
Self { quality }
}
/// Process image and generate WebP at multiple resolutions
///
/// # Arguments
/// * `image_path` - Path to the source image
/// * `sizes` - Slice of desired output sizes (width/height in pixels)
/// * `validate_dimensions` - Optional (width, height) to validate source image dimensions
/// * `outline` - Optional outline configuration to add edges around transparent areas
///
/// # Returns
/// ProcessedImages containing WebP blobs for each requested size
pub fn process_image<P: AsRef<Path>>(
&self,
image_path: P,
sizes: &[u32],
validate_dimensions: Option<(u32, u32)>,
outline: Option<&OutlineConfig>,
) -> Result<ProcessedImages, ImageProcessingError> {
if sizes.is_empty() {
return Err(ImageProcessingError::NoResolutions);
}
// Load image
let mut img = image::open(image_path.as_ref())?;
// Validate dimensions if requested
if let Some((expected_width, expected_height)) = validate_dimensions {
if img.width() != expected_width || img.height() != expected_height {
return Err(ImageProcessingError::InvalidDimensions {
expected_width,
expected_height,
actual_width: img.width(),
actual_height: img.height(),
});
}
}
// Apply outline if requested
if let Some(outline_config) = outline {
img = DynamicImage::ImageRgba8(self.apply_outline(img.to_rgba8(), outline_config));
}
// Generate WebP for each size
let mut images = HashMap::new();
for &size in sizes {
let webp_data = self.encode_webp(&img, size, size)?;
images.insert(size, webp_data);
}
Ok(ProcessedImages { images })
}
/// Apply outline effect to image based on alpha channel edges
fn apply_outline(&self, img: RgbaImage, config: &OutlineConfig) -> RgbaImage {
let (width, height) = img.dimensions();
// Create a mask of edge pixels that need outline
let mut edge_mask = vec![vec![false; height as usize]; width as usize];
// Detect edges: pixels that are transparent but adjacent to opaque pixels
for y in 0..height {
for x in 0..width {
let pixel = img.get_pixel(x, y);
// Skip if pixel is already opaque
if pixel[3] >= config.alpha_threshold {
continue;
}
// Check if any neighbor is opaque (this is an edge)
let is_edge = self.has_opaque_neighbor(&img, x, y, config.alpha_threshold);
if is_edge {
edge_mask[x as usize][y as usize] = true;
}
}
}
// Apply outline with thickness
let thickness = config.thickness as i32;
let mut outlined = img.clone();
for y in 0..height {
for x in 0..width {
if edge_mask[x as usize][y as usize] {
// Draw outline in a square pattern around this edge pixel
for dy in -thickness..=thickness {
for dx in -thickness..=thickness {
let nx = x as i32 + dx;
let ny = y as i32 + dy;
// Check bounds
if nx >= 0 && nx < width as i32 && ny >= 0 && ny < height as i32 {
let nx = nx as u32;
let ny = ny as u32;
let current_pixel = outlined.get_pixel(nx, ny);
// Only draw outline on transparent pixels
if current_pixel[3] < config.alpha_threshold {
outlined.put_pixel(nx, ny, config.color);
}
}
}
}
}
}
}
outlined
}
/// Check if a pixel has any opaque neighbor
fn has_opaque_neighbor(
&self,
img: &RgbaImage,
x: u32,
y: u32,
alpha_threshold: u8,
) -> bool {
let (width, height) = img.dimensions();
// Check 8 surrounding pixels
for dy in -1..=1 {
for dx in -1..=1 {
if dx == 0 && dy == 0 {
continue; // Skip center pixel
}
let nx = x as i32 + dx;
let ny = y as i32 + dy;
// Check bounds
if nx >= 0 && nx < width as i32 && ny >= 0 && ny < height as i32 {
let neighbor = img.get_pixel(nx as u32, ny as u32);
if neighbor[3] >= alpha_threshold {
return true;
}
}
}
}
false
}
/// Encode image to WebP at specified dimensions
fn encode_webp(
&self,
img: &DynamicImage,
width: u32,
height: u32,
) -> Result<Vec<u8>, ImageProcessingError> {
// Resize if dimensions don't match original
let resized = if img.width() != width || img.height() != height {
img.resize_exact(width, height, image::imageops::FilterType::Lanczos3)
} else {
img.clone()
};
// Convert to RGBA8
let rgba = resized.to_rgba8();
let (w, h) = rgba.dimensions();
// Encode to WebP
let encoder = webp::Encoder::from_rgba(rgba.as_raw(), w, h);
let webp_data = encoder.encode(self.quality);
Ok(webp_data.to_vec())
}
/// Encode image to lossless WebP
pub fn encode_webp_lossless(
img: &RgbaImage,
) -> Result<Vec<u8>, ImageProcessingError> {
let (w, h) = img.dimensions();
let encoder = webp::Encoder::from_rgba(img.as_raw(), w, h);
let webp_data = encoder.encode_lossless();
Ok(webp_data.to_vec())
}
/// Create a black tile of specified size
pub fn create_black_tile(size: u32) -> RgbaImage {
image::ImageBuffer::from_pixel(size, size, Rgba([0, 0, 0, 255]))
}
/// Merge multiple tiles into a single image
///
/// # Arguments
/// * `tiles` - HashMap of (x, y) coordinates to tile image data (WebP format)
/// * `grid_x` - Number of tiles in X direction
/// * `grid_y` - Number of tiles in Y direction
/// * `tile_size` - Size of each original tile (assumes square tiles)
/// * `output_size` - Size of the output merged image
///
/// # Returns
/// A merged RgbaImage containing all tiles positioned correctly
pub fn merge_tiles(
tiles: &HashMap<(i32, i32), Vec<u8>>,
grid_x: i32,
grid_y: i32,
tile_size: u32,
output_size: u32,
) -> Result<RgbaImage, ImageProcessingError> {
// Create output image
let mut merged = Self::create_black_tile(output_size);
// Calculate size each tile should be in the output
let scaled_tile_size = output_size / grid_x.max(grid_y) as u32;
// Process each tile in the grid
for dy in 0..grid_y {
for dx in 0..grid_x {
if let Some(webp_data) = tiles.get(&(dx, dy)) {
// Decode WebP tile
if let Ok(tile_img) = image::load_from_memory_with_format(
webp_data,
image::ImageFormat::WebP,
) {
// Resize tile to fit in output
let resized = tile_img.resize_exact(
scaled_tile_size,
scaled_tile_size,
image::imageops::FilterType::Lanczos3,
).to_rgba8();
// Calculate position in output image
let offset_x = dx as u32 * scaled_tile_size;
// Invert Y-axis to match expected coordinate system
let offset_y = (grid_y - 1 - dy) as u32 * scaled_tile_size;
// Copy pixels into merged image
for y in 0..scaled_tile_size {
for x in 0..scaled_tile_size {
if let Some(pixel) = resized.get_pixel_checked(x, y) {
merged.put_pixel(offset_x + x, offset_y + y, *pixel);
}
}
}
}
}
// If tile doesn't exist, it stays black (already initialized)
}
}
Ok(merged)
}
}
impl Default for ImageProcessor {
fn default() -> Self {
Self::new(85.0) // 85% quality default
}
}
/// Container for processed WebP images at multiple resolutions
#[derive(Debug)]
pub struct ProcessedImages {
/// Map of size (in pixels) to WebP blob data
pub images: HashMap<u32, Vec<u8>>,
}
impl ProcessedImages {
/// Get WebP blob for a specific size
pub fn get(&self, size: u32) -> Option<&Vec<u8>> {
self.images.get(&size)
}
/// Get total size of all WebP blobs in bytes
pub fn total_size(&self) -> usize {
self.images.values().map(|v| v.len()).sum()
}
/// Get all available sizes
pub fn sizes(&self) -> Vec<u32> {
let mut sizes: Vec<u32> = self.images.keys().copied().collect();
sizes.sort_unstable();
sizes
}
/// Get number of resolutions stored
pub fn len(&self) -> usize {
self.images.len()
}
/// Check if empty
pub fn is_empty(&self) -> bool {
self.images.is_empty()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_processor_creation() {
let processor = ImageProcessor::default();
assert_eq!(processor.quality, 85.0);
let custom = ImageProcessor::new(90.0);
assert_eq!(custom.quality, 90.0);
}
#[test]
fn test_processed_images() {
let mut images = HashMap::new();
images.insert(512, vec![1, 2, 3]);
images.insert(256, vec![4, 5]);
let processed = ProcessedImages { images };
assert_eq!(processed.len(), 2);
assert_eq!(processed.total_size(), 5);
assert_eq!(processed.get(512), Some(&vec![1, 2, 3]));
assert_eq!(processed.get(128), None);
let sizes = processed.sizes();
assert_eq!(sizes, vec![256, 512]);
}
#[test]
fn test_outline_config_default() {
let config = OutlineConfig::default();
assert_eq!(config.thickness, 1);
assert_eq!(config.color, Rgba([255, 255, 255, 255])); // White
assert_eq!(config.alpha_threshold, 128);
}
#[test]
fn test_outline_config_custom() {
let red = Rgba([255, 0, 0, 255]);
let config = OutlineConfig::new(red, 2);
assert_eq!(config.thickness, 2);
assert_eq!(config.color, red);
assert_eq!(config.alpha_threshold, 128);
}
#[test]
fn test_outline_config_builders() {
let white = OutlineConfig::white(3);
assert_eq!(white.color, Rgba([255, 255, 255, 255]));
assert_eq!(white.thickness, 3);
let black = OutlineConfig::black(2).with_alpha_threshold(200);
assert_eq!(black.color, Rgba([0, 0, 0, 255]));
assert_eq!(black.thickness, 2);
assert_eq!(black.alpha_threshold, 200);
}
#[test]
fn test_outline_edge_detection() {
let processor = ImageProcessor::default();
// Create a simple 3x3 image with a transparent pixel in the center
let mut img = RgbaImage::new(3, 3);
// Fill with opaque white
for y in 0..3 {
for x in 0..3 {
img.put_pixel(x, y, Rgba([255, 255, 255, 255]));
}
}
// Make center transparent
img.put_pixel(1, 1, Rgba([0, 0, 0, 0]));
// Test that center pixel has opaque neighbors
assert!(processor.has_opaque_neighbor(&img, 1, 1, 128));
// Test a fully opaque pixel - should not have any transparent neighbors
// but the function checks if a pixel has opaque neighbors, not transparent ones
assert!(processor.has_opaque_neighbor(&img, 0, 0, 128));
// Create a new image that's fully transparent
let mut transparent_img = RgbaImage::new(3, 3);
for y in 0..3 {
for x in 0..3 {
transparent_img.put_pixel(x, y, Rgba([0, 0, 0, 0]));
}
}
// A transparent pixel with all transparent neighbors should return false
assert!(!processor.has_opaque_neighbor(&transparent_img, 1, 1, 128));
}
}

View File

@@ -3,7 +3,7 @@ use crate::types::{
ItemCategory, ItemType, ItemXpBoost, PermanentStatBoost, SkillType, Stat, StatType,
Tool, MAX_STACK,
};
use crate::xml_parser::XmlParseError;
use crate::xml_parsers::XmlParseError;
use quick_xml::events::Event;
use quick_xml::reader::Reader;
use std::collections::{HashMap, HashSet};

View File

@@ -51,8 +51,10 @@
pub mod types;
pub mod databases;
mod xml_parser;
pub mod schema;
mod xml_parsers;
mod item_loader;
mod image_processor;
pub use databases::{
ItemDatabase,
@@ -65,6 +67,12 @@ pub use databases::{
PlayerHouseDatabase,
TraitDatabase,
ShopDatabase,
MinimapDatabase,
MinimapDatabaseError,
StorageStats,
IconDatabase,
IconDatabaseError,
IconStats,
};
pub use types::{
// Items
@@ -86,6 +94,12 @@ pub use types::{
MAX_STACK,
// Other types
InteractableResource,
InteractableTeleporter,
InteractableWorkbench,
LootSpawner,
MapIcon,
MapIconType,
MapNameChanger,
Npc,
NpcStat,
NpcLevel,
@@ -109,5 +123,25 @@ pub use types::{
TraitTrainer,
Shop,
ShopItem,
// Minimap
MinimapTile,
MinimapTileRecord,
NewMinimapTile,
// Icons
AbilityIconRecord,
NewAbilityIcon,
BuffIconRecord,
NewBuffIcon,
TraitIconRecord,
NewTraitIcon,
PlayerHouseIconRecord,
NewPlayerHouseIcon,
StatIconRecord,
NewStatIcon,
AchievementIconRecord,
NewAchievementIcon,
GeneralIconRecord,
NewGeneralIcon,
};
pub use xml_parser::XmlParseError;
pub use xml_parsers::XmlParseError;
pub use image_processor::{ImageProcessor, ImageProcessingError, ProcessedImages, OutlineConfig};

View File

@@ -6,11 +6,14 @@
//! 3. Extracting typeId and transform positions
//! 4. Writing resource data to an output file
use cursebreaker_parser::{ItemDatabase, NpcDatabase, QuestDatabase, HarvestableDatabase, LootDatabase, InteractableResource};
use cursebreaker_parser::{ItemDatabase, NpcDatabase, QuestDatabase, HarvestableDatabase, LootDatabase, MapDatabase, FastTravelDatabase, PlayerHouseDatabase, TraitDatabase, ShopDatabase, InteractableResource, MinimapDatabase};
use unity_parser::UnityProject;
use std::path::Path;
use unity_parser::log::DedupLogger;
use log::{info, error, LevelFilter};
use log::{info, error, warn, LevelFilter};
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::env;
fn main() -> Result<(), Box<dyn std::error::Error>> {
@@ -25,26 +28,102 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
// Load items from XML
info!("📚 Loading game data from XML...");
let items_path = "/home/connor/repos/CBAssets/Data/XMLs/Items/Items.xml";
let cb_assets_path = env::var("CB_ASSETS_PATH").unwrap_or_else(|_| "/home/connor/repos/CBAssets".to_string());
let items_path = format!("{}/Data/XMLs/Items/Items.xml", cb_assets_path);
let item_db = ItemDatabase::load_from_xml(items_path)?;
info!("✅ Loaded {} items", item_db.len());
let npcs_path = "/home/connor/repos/CBAssets/Data/XMLs/Npcs/NPCInfo.xml";
let npcs_path = format!("{}/Data/XMLs/Npcs/NPCInfo.xml", cb_assets_path);
let npc_db = NpcDatabase::load_from_xml(npcs_path)?;
info!("✅ Loaded {} NPCs", npc_db.len());
let quests_path = "/home/connor/repos/CBAssets/Data/XMLs/Quests/Quests.xml";
let quests_path = format!("{}/Data/XMLs/Quests/Quests.xml", cb_assets_path);
let quest_db = QuestDatabase::load_from_xml(quests_path)?;
info!("✅ Loaded {} quests", quest_db.len());
let harvestables_path = "/home/connor/repos/CBAssets/Data/XMLs/Harvestables/HarvestableInfo.xml";
let harvestables_path = format!("{}/Data/XMLs/Harvestables/HarvestableInfo.xml", cb_assets_path);
let harvestable_db = HarvestableDatabase::load_from_xml(harvestables_path)?;
info!("✅ Loaded {} harvestables", harvestable_db.len());
let loot_path = "/home/connor/repos/CBAssets/Data/XMLs/Loot/Loot.xml";
let loot_path = format!("{}/Data/XMLs/Loot/Loot.xml", cb_assets_path);
let loot_db = LootDatabase::load_from_xml(loot_path)?;
info!("✅ Loaded {} loot tables", loot_db.len());
let maps_path = format!("{}/Data/XMLs/Maps/Maps.xml", cb_assets_path);
let map_db = MapDatabase::load_from_xml(maps_path)?;
info!("✅ Loaded {} maps", map_db.len());
let fast_travel_dir = format!("{}/Data/XMLs", cb_assets_path);
let fast_travel_db = FastTravelDatabase::load_from_directory(fast_travel_dir)?;
info!("✅ Loaded {} fast travel locations", fast_travel_db.len());
let player_houses_path = format!("{}/Data/XMLs/PlayerHouses/PlayerHouses.xml", cb_assets_path);
let player_house_db = PlayerHouseDatabase::load_from_xml(player_houses_path)?;
info!("✅ Loaded {} player houses", player_house_db.len());
let traits_path = format!("{}/Data/XMLs/Traits/Traits.xml", cb_assets_path);
let trait_db = TraitDatabase::load_from_xml(traits_path)?;
info!("✅ Loaded {} traits", trait_db.len());
let shops_path = format!("{}/Data/XMLs/Shops/Shops.xml", cb_assets_path);
let shop_db = ShopDatabase::load_from_xml(shops_path)?;
info!("✅ Loaded {} shops", shop_db.len());
// Save to SQLite database
info!("\n💾 Saving game data to SQLite database...");
let database_url = env::var("DATABASE_URL").unwrap_or_else(|_| "../cursebreaker.db".to_string());
let mut conn = SqliteConnection::establish(&database_url)?;
match item_db.save_to_db(&mut conn) {
Ok(count) => info!("✅ Saved {} items to database", count),
Err(e) => warn!("⚠️ Failed to save items: {}", e),
}
match npc_db.save_to_db(&mut conn) {
Ok(count) => info!("✅ Saved {} NPCs to database", count),
Err(e) => warn!("⚠️ Failed to save NPCs: {}", e),
}
match quest_db.save_to_db(&mut conn) {
Ok(count) => info!("✅ Saved {} quests to database", count),
Err(e) => warn!("⚠️ Failed to save quests: {}", e),
}
match harvestable_db.save_to_db(&mut conn) {
Ok(count) => info!("✅ Saved {} harvestables to database", count),
Err(e) => warn!("⚠️ Failed to save harvestables: {}", e),
}
match loot_db.save_to_db(&mut conn) {
Ok(count) => info!("✅ Saved {} loot tables to database", count),
Err(e) => warn!("⚠️ Failed to save loot tables: {}", e),
}
match map_db.save_to_db(&mut conn) {
Ok(count) => info!("✅ Saved {} maps to database", count),
Err(e) => warn!("⚠️ Failed to save maps: {}", e),
}
match fast_travel_db.save_to_db(&mut conn) {
Ok(count) => info!("✅ Saved {} fast travel locations to database", count),
Err(e) => warn!("⚠️ Failed to save fast travel locations: {}", e),
}
match player_house_db.save_to_db(&mut conn) {
Ok(count) => info!("✅ Saved {} player houses to database", count),
Err(e) => warn!("⚠️ Failed to save player houses: {}", e),
}
match trait_db.save_to_db(&mut conn) {
Ok(count) => info!("✅ Saved {} traits to database", count),
Err(e) => warn!("⚠️ Failed to save traits: {}", e),
}
match shop_db.save_to_db(&mut conn) {
Ok(count) => info!("✅ Saved {} shops to database", count),
Err(e) => warn!("⚠️ Failed to save shops: {}", e),
}
// Print statistics
info!("\n📊 Game Data Statistics:");
info!(" Items:");
@@ -69,7 +148,7 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
info!(" • Tables with conditional drops: {}", loot_db.get_conditional_tables().len());
// Initialize Unity project once - scans entire project for GUID mappings
let project_root = Path::new("/home/connor/repos/CBAssets");
let project_root = Path::new(&cb_assets_path);
info!("\n📦 Initializing Unity project from: {}", project_root.display());
let project = UnityProject::from_path(project_root)?;
@@ -117,5 +196,32 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
log::logger().flush();
// Process minimap tiles
info!("\n🗺️ Processing minimap tiles...");
let minimap_db = MinimapDatabase::new(database_url.clone());
let minimap_path = format!("{}/Data/Textures/MinimapSquares", cb_assets_path);
match minimap_db.load_from_directory(&minimap_path, &cb_assets_path) {
Ok(count) => {
info!("✅ Processed {} minimap tiles", count);
if let Ok(stats) = minimap_db.get_storage_stats() {
info!(" Storage Statistics:");
info!(" • Original PNG total: {} MB", stats.total_original_size / 1_048_576);
info!(" • WebP total: {} MB", stats.total_webp_size() / 1_048_576);
info!(" • Compression ratio: {:.2}%", stats.compression_ratio());
}
if let Ok(bounds) = minimap_db.get_map_bounds() {
info!(" Map Bounds:");
info!(" • Min (x,y): {:?}", bounds.0);
info!(" • Max (x,y): {:?}", bounds.1);
}
}
Err(e) => {
error!("Failed to process minimap tiles: {}", e);
}
}
Ok(())
}

View File

@@ -0,0 +1,342 @@
// @generated automatically by Diesel CLI.
diesel::table! {
ability_icons (name) {
name -> Text,
icon -> Binary,
}
}
diesel::table! {
achievement_icons (name) {
name -> Text,
icon -> Binary,
}
}
diesel::table! {
buff_icons (name) {
name -> Text,
icon -> Binary,
}
}
diesel::table! {
crafting_recipe_items (recipe_id, item_id) {
recipe_id -> Integer,
item_id -> Integer,
amount -> Integer,
}
}
diesel::table! {
crafting_recipes (id) {
id -> Nullable<Integer>,
product_item_id -> Integer,
skill -> Text,
level -> Integer,
workbench_id -> Integer,
xp -> Integer,
unlocked_by_default -> Integer,
checks -> Nullable<Text>,
}
}
diesel::table! {
fast_travel_locations (name) {
name -> Nullable<Text>,
pos_x -> Float,
pos_z -> Float,
travel_type -> Text,
unlocked -> Integer,
connections -> Nullable<Text>,
checks -> Nullable<Text>,
}
}
diesel::table! {
general_icons (name) {
name -> Text,
original_width -> Integer,
original_height -> Integer,
icon_original -> Nullable<Binary>,
icon_256 -> Nullable<Binary>,
icon_64 -> Nullable<Binary>,
icon_32 -> Nullable<Binary>,
}
}
diesel::table! {
harvestable_drops (id) {
id -> Nullable<Integer>,
harvestable_id -> Integer,
item_id -> Integer,
minamount -> Integer,
maxamount -> Integer,
droprate -> Integer,
droprateboost -> Integer,
amountboost -> Integer,
comment -> Text,
}
}
diesel::table! {
harvestables (id) {
id -> Integer,
name -> Text,
description -> Text,
comment -> Text,
level -> Integer,
skill -> Text,
tool -> Text,
min_health -> Integer,
max_health -> Integer,
harvesttime -> Integer,
hittime -> Integer,
respawntime -> Integer,
}
}
diesel::table! {
item_stats (item_id, stat_type) {
item_id -> Integer,
stat_type -> Text,
value -> Float,
}
}
diesel::table! {
items (id) {
id -> Nullable<Integer>,
name -> Text,
data -> Text,
item_type -> Text,
level -> Integer,
price -> Integer,
max_stack -> Integer,
storage_size -> Integer,
skill -> Text,
tool -> Text,
description -> Text,
two_handed -> Integer,
undroppable -> Integer,
undroppable_on_death -> Integer,
unequip_destroy -> Integer,
generate_icon -> Integer,
hide_milestone -> Integer,
cannot_craft_exceptional -> Integer,
storage_all_items -> Integer,
ability_id -> Integer,
special_ability -> Integer,
learn_ability_id -> Integer,
book_id -> Integer,
swap_item -> Integer,
icon_large -> Nullable<Binary>,
icon_medium -> Nullable<Binary>,
icon_small -> Nullable<Binary>,
}
}
diesel::table! {
loot_tables (table_id) {
table_id -> Nullable<Text>,
npc_id -> Nullable<Text>,
data -> Text,
}
}
diesel::table! {
maps (scene_id) {
scene_id -> Nullable<Text>,
name -> Text,
data -> Text,
}
}
diesel::table! {
minimap_tiles (id) {
id -> Nullable<Integer>,
x -> Integer,
y -> Integer,
zoom -> Integer,
width -> Integer,
height -> Integer,
original_file_size -> Nullable<Integer>,
image -> Binary,
image_size -> Integer,
processed_at -> Timestamp,
source_path -> Text,
}
}
diesel::table! {
npcs (id) {
id -> Nullable<Integer>,
name -> Text,
data -> Text,
}
}
diesel::table! {
player_house_icons (name) {
name -> Text,
icon -> Binary,
}
}
diesel::table! {
player_houses (id) {
id -> Nullable<Integer>,
name -> Text,
description -> Text,
pos_x -> Float,
pos_z -> Float,
price -> Integer,
}
}
diesel::table! {
quests (id) {
id -> Nullable<Integer>,
name -> Text,
data -> Text,
}
}
diesel::table! {
resource_icons (item_id) {
item_id -> Integer,
name -> Text,
icon_64 -> Binary,
}
}
diesel::table! {
shops (id) {
id -> Nullable<Integer>,
name -> Text,
unique_items -> Integer,
item_count -> Integer,
data -> Text,
}
}
diesel::table! {
stat_icons (name) {
name -> Text,
icon -> Binary,
}
}
diesel::table! {
trait_icons (name) {
name -> Text,
icon -> Binary,
}
}
diesel::table! {
traits (id) {
id -> Nullable<Integer>,
name -> Text,
description -> Nullable<Text>,
trainer_id -> Nullable<Integer>,
data -> Text,
}
}
diesel::table! {
world_loot (pos_x, pos_y) {
pos_x -> Float,
pos_y -> Float,
item_id -> Integer,
amount -> Integer,
respawn_time -> Integer,
visibility_checks -> Text,
}
}
diesel::table! {
world_map_icons (pos_x, pos_y) {
pos_x -> Float,
pos_y -> Float,
icon_type -> Integer,
icon_size -> Integer,
icon -> Text,
text -> Text,
font_size -> Integer,
hover_text -> Text,
}
}
diesel::table! {
world_map_name_changers (pos_x, pos_y) {
pos_x -> Float,
pos_y -> Float,
map_name -> Text,
}
}
diesel::table! {
world_resources (item_id, pos_x, pos_y) {
item_id -> Integer,
pos_x -> Float,
pos_y -> Float,
}
}
diesel::table! {
world_teleporters (pos_x, pos_y) {
pos_x -> Float,
pos_y -> Float,
tp_x -> Nullable<Float>,
tp_y -> Nullable<Float>,
}
}
diesel::table! {
world_workbenches (pos_x, pos_y) {
pos_x -> Float,
pos_y -> Float,
workbench_id -> Integer,
}
}
diesel::joinable!(crafting_recipe_items -> crafting_recipes (recipe_id));
diesel::joinable!(crafting_recipe_items -> items (item_id));
diesel::joinable!(crafting_recipes -> items (product_item_id));
diesel::joinable!(harvestable_drops -> harvestables (harvestable_id));
diesel::joinable!(harvestable_drops -> items (item_id));
diesel::joinable!(item_stats -> items (item_id));
diesel::allow_tables_to_appear_in_same_query!(
ability_icons,
achievement_icons,
buff_icons,
crafting_recipe_items,
crafting_recipes,
fast_travel_locations,
general_icons,
harvestable_drops,
harvestables,
item_stats,
items,
loot_tables,
maps,
minimap_tiles,
npcs,
player_house_icons,
player_houses,
quests,
resource_icons,
shops,
stat_icons,
trait_icons,
traits,
world_loot,
world_map_icons,
world_map_name_changers,
world_resources,
world_teleporters,
world_workbenches,
);

View File

@@ -30,8 +30,11 @@ pub struct FastTravelLocation {
/// Display name
pub name: String,
/// 3D position in world space (x,y,z)
pub position: String,
/// X position in world space
pub pos_x: f32,
/// Z position in world space
pub pos_z: f32,
/// Type of fast travel
pub travel_type: FastTravelType,
@@ -49,11 +52,12 @@ pub struct FastTravelLocation {
impl FastTravelLocation {
/// Create a new FastTravelLocation with required fields
pub fn new(id: i32, name: String, position: String, travel_type: FastTravelType) -> Self {
pub fn new(id: i32, name: String, pos_x: f32, pos_z: f32, travel_type: FastTravelType) -> Self {
Self {
id,
name,
position,
pos_x,
pos_z,
travel_type,
unlocked: false,
connections: None,
@@ -61,19 +65,9 @@ impl FastTravelLocation {
}
}
/// Parse position into (x, y, z) coordinates
pub fn get_position(&self) -> Option<(f32, f32, f32)> {
let parts: Vec<&str> = self.position.split(',').collect();
if parts.len() == 3 {
if let (Ok(x), Ok(y), Ok(z)) = (
parts[0].parse::<f32>(),
parts[1].parse::<f32>(),
parts[2].parse::<f32>(),
) {
return Some((x, y, z));
}
}
None
/// Get position as (x, z) tuple
pub fn get_position(&self) -> (f32, f32) {
(self.pos_x, self.pos_z)
}
/// Get list of connected location IDs

View File

@@ -1,4 +1,5 @@
use serde::{Deserialize, Serialize};
use super::item::{SkillType, Tool};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Harvestable {
@@ -7,40 +8,41 @@ pub struct Harvestable {
pub name: String,
// Basic attributes
pub actionname: Option<String>,
pub desc: Option<String>,
pub comment: Option<String>,
pub level: Option<i32>,
pub skill: Option<String>,
pub tool: Option<String>,
pub actionname: String,
pub desc: String,
pub comment: String,
pub level: i32,
pub skill: SkillType,
pub tool: Tool,
// Health (can be range like "3-5" or single value)
pub health: Option<String>,
// Health
pub min_health: i32,
pub max_health: i32,
// Timing
pub harvesttime: Option<i32>,
pub hittime: Option<i32>,
pub respawntime: Option<i32>,
pub harvesttime: i32,
pub hittime: i32,
pub respawntime: i32,
// Audio
pub harvestsfx: Option<String>,
pub endsfx: Option<String>,
pub receiveitemsfx: Option<String>,
pub harvestsfx: String,
pub endsfx: String,
pub receiveitemsfx: String,
// Visuals
pub animation: Option<String>,
pub takehitanimation: Option<String>,
pub endgfx: Option<String>,
pub animation: String,
pub takehitanimation: String,
pub endgfx: String,
// Behavior flags
pub tree: Option<i32>,
pub hidemilestone: Option<i32>,
pub nohighlight: Option<i32>,
pub hideminimap: Option<i32>,
pub noleftclickinteract: Option<i32>,
pub tree: bool,
pub hidemilestone: bool,
pub nohighlight: bool,
pub hideminimap: bool,
pub noleftclickinteract: bool,
// Interaction
pub interactdistance: Option<String>,
pub interactdistance: String,
// Drops
pub drops: Vec<HarvestableDrop>,
@@ -49,14 +51,14 @@ pub struct Harvestable {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HarvestableDrop {
pub id: i32,
pub minamount: Option<i32>,
pub maxamount: Option<i32>,
pub droprate: Option<i32>,
pub droprateboost: Option<i32>,
pub amountboost: Option<i32>,
pub checks: Option<String>,
pub comment: Option<String>,
pub dontconsumehealth: Option<i32>,
pub minamount: i32,
pub maxamount: i32,
pub droprate: i32,
pub droprateboost: i32,
pub amountboost: i32,
pub checks: String,
pub comment: String,
pub dontconsumehealth: bool,
}
impl Harvestable {
@@ -64,45 +66,46 @@ impl Harvestable {
Self {
typeid,
name,
actionname: None,
desc: None,
comment: None,
level: None,
skill: None,
tool: None,
health: None,
harvesttime: None,
hittime: None,
respawntime: None,
harvestsfx: None,
endsfx: None,
receiveitemsfx: None,
animation: None,
takehitanimation: None,
endgfx: None,
tree: None,
hidemilestone: None,
nohighlight: None,
hideminimap: None,
noleftclickinteract: None,
interactdistance: None,
actionname: String::new(),
desc: String::new(),
comment: String::new(),
level: 0,
skill: SkillType::None,
tool: Tool::None,
min_health: 0,
max_health: 0,
harvesttime: 0,
hittime: 0,
respawntime: 0,
harvestsfx: String::new(),
endsfx: String::new(),
receiveitemsfx: String::new(),
animation: String::new(),
takehitanimation: String::new(),
endgfx: String::new(),
tree: false,
hidemilestone: false,
nohighlight: false,
hideminimap: false,
noleftclickinteract: false,
interactdistance: String::new(),
drops: Vec::new(),
}
}
/// Check if this is a tree
pub fn is_tree(&self) -> bool {
self.tree == Some(1)
self.tree
}
/// Check if this requires a tool
pub fn requires_tool(&self) -> bool {
self.tool.is_some()
!matches!(self.tool, Tool::None)
}
/// Get the skill associated with this harvestable
pub fn get_skill(&self) -> Option<&str> {
self.skill.as_deref()
pub fn get_skill(&self) -> SkillType {
self.skill
}
/// Get all item IDs that can drop from this harvestable

View File

@@ -0,0 +1,134 @@
use diesel::prelude::*;
use crate::schema::{
ability_icons, buff_icons, trait_icons, player_house_icons, stat_icons,
achievement_icons, general_icons
};
/// Diesel queryable model for ability_icons table
#[derive(Queryable, Selectable, Debug, Clone)]
#[diesel(table_name = ability_icons)]
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
pub struct AbilityIconRecord {
pub name: String,
pub icon: Vec<u8>,
}
/// Diesel insertable model for ability_icons table
#[derive(Insertable, Debug)]
#[diesel(table_name = ability_icons)]
pub struct NewAbilityIcon<'a> {
pub name: &'a str,
pub icon: &'a [u8],
}
/// Diesel queryable model for buff_icons table
#[derive(Queryable, Selectable, Debug, Clone)]
#[diesel(table_name = buff_icons)]
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
pub struct BuffIconRecord {
pub name: String,
pub icon: Vec<u8>,
}
/// Diesel insertable model for buff_icons table
#[derive(Insertable, Debug)]
#[diesel(table_name = buff_icons)]
pub struct NewBuffIcon<'a> {
pub name: &'a str,
pub icon: &'a [u8],
}
/// Diesel queryable model for trait_icons table
#[derive(Queryable, Selectable, Debug, Clone)]
#[diesel(table_name = trait_icons)]
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
pub struct TraitIconRecord {
pub name: String,
pub icon: Vec<u8>,
}
/// Diesel insertable model for trait_icons table
#[derive(Insertable, Debug)]
#[diesel(table_name = trait_icons)]
pub struct NewTraitIcon<'a> {
pub name: &'a str,
pub icon: &'a [u8],
}
/// Diesel queryable model for player_house_icons table
#[derive(Queryable, Selectable, Debug, Clone)]
#[diesel(table_name = player_house_icons)]
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
pub struct PlayerHouseIconRecord {
pub name: String,
pub icon: Vec<u8>,
}
/// Diesel insertable model for player_house_icons table
#[derive(Insertable, Debug)]
#[diesel(table_name = player_house_icons)]
pub struct NewPlayerHouseIcon<'a> {
pub name: &'a str,
pub icon: &'a [u8],
}
/// Diesel queryable model for stat_icons table
#[derive(Queryable, Selectable, Debug, Clone)]
#[diesel(table_name = stat_icons)]
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
pub struct StatIconRecord {
pub name: String,
pub icon: Vec<u8>,
}
/// Diesel insertable model for stat_icons table
#[derive(Insertable, Debug)]
#[diesel(table_name = stat_icons)]
pub struct NewStatIcon<'a> {
pub name: &'a str,
pub icon: &'a [u8],
}
/// Diesel queryable model for achievement_icons table
#[derive(Queryable, Selectable, Debug, Clone)]
#[diesel(table_name = achievement_icons)]
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
pub struct AchievementIconRecord {
pub name: String,
pub icon: Vec<u8>,
}
/// Diesel insertable model for achievement_icons table
#[derive(Insertable, Debug)]
#[diesel(table_name = achievement_icons)]
pub struct NewAchievementIcon<'a> {
pub name: &'a str,
pub icon: &'a [u8],
}
/// Diesel queryable model for general_icons table
#[derive(Queryable, Selectable, Debug, Clone)]
#[diesel(table_name = general_icons)]
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
pub struct GeneralIconRecord {
pub name: String,
pub original_width: i32,
pub original_height: i32,
pub icon_original: Option<Vec<u8>>,
pub icon_256: Option<Vec<u8>>,
pub icon_64: Option<Vec<u8>>,
pub icon_32: Option<Vec<u8>>,
}
/// Diesel insertable model for general_icons table
#[derive(Insertable, Debug)]
#[diesel(table_name = general_icons)]
pub struct NewGeneralIcon<'a> {
pub name: &'a str,
pub original_width: i32,
pub original_height: i32,
pub icon_original: Option<&'a [u8]>,
pub icon_256: Option<&'a [u8]>,
pub icon_64: Option<&'a [u8]>,
pub icon_32: Option<&'a [u8]>,
}

View File

@@ -0,0 +1,35 @@
use diesel::prelude::*;
use crate::schema::minimap_tiles;
/// Diesel queryable model (for SELECT queries)
#[derive(Queryable, Selectable, Debug, Clone)]
#[diesel(table_name = minimap_tiles)]
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
pub struct MinimapTileRecord {
pub id: Option<i32>,
pub x: i32,
pub y: i32,
pub zoom: i32,
pub width: i32,
pub height: i32,
pub original_file_size: Option<i32>,
pub image: Vec<u8>,
pub image_size: i32,
pub processed_at: String, // SQLite TIMESTAMP as String
pub source_path: String,
}
/// Diesel insertable model (for INSERT queries)
#[derive(Insertable, Debug)]
#[diesel(table_name = minimap_tiles)]
pub struct NewMinimapTile<'a> {
pub x: i32,
pub y: i32,
pub zoom: i32,
pub width: i32,
pub height: i32,
pub original_file_size: Option<i32>,
pub image: &'a [u8],
pub image_size: i32,
pub source_path: &'a str,
}

View File

@@ -0,0 +1,61 @@
use serde::{Deserialize, Serialize};
/// Represents a single minimap tile with multi-resolution WebP data
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MinimapTile {
/// X coordinate from filename
pub x: i32,
/// Y coordinate from filename
pub y: i32,
/// Original dimensions
pub original_width: i32,
pub original_height: i32,
/// Source file path
pub source_path: String,
/// WebP blob at 512x512
#[serde(skip)] // Skip serialization for binary data
pub webp_512: Vec<u8>,
/// WebP blob at 256x256
#[serde(skip)]
pub webp_256: Vec<u8>,
/// WebP blob at 128x128
#[serde(skip)]
pub webp_128: Vec<u8>,
/// WebP blob at 64x64
#[serde(skip)]
pub webp_64: Vec<u8>,
}
impl MinimapTile {
/// Create new tile from coordinates and source path
pub fn new(x: i32, y: i32, source_path: String) -> Self {
Self {
x,
y,
original_width: 512,
original_height: 512,
source_path,
webp_512: Vec::new(),
webp_256: Vec::new(),
webp_128: Vec::new(),
webp_64: Vec::new(),
}
}
/// Get total size of all WebP blobs
pub fn total_webp_size(&self) -> usize {
self.webp_512.len() + self.webp_256.len() + self.webp_128.len() + self.webp_64.len()
}
/// Check if tile has been processed (has WebP data)
pub fn is_processed(&self) -> bool {
!self.webp_512.is_empty()
}
}

View File

@@ -8,6 +8,9 @@ mod fast_travel;
mod player_house;
mod r#trait;
mod shop;
mod minimap_tile;
mod minimap_models;
mod icon_models;
pub use item::{
// Main types
@@ -40,3 +43,21 @@ pub use fast_travel::{FastTravelLocation, FastTravelType};
pub use player_house::PlayerHouse;
pub use r#trait::{Trait, TraitTrainer};
pub use shop::{Shop, ShopItem};
pub use minimap_tile::MinimapTile;
pub use minimap_models::{MinimapTileRecord, NewMinimapTile};
pub use icon_models::{
AbilityIconRecord,
NewAbilityIcon,
BuffIconRecord,
NewBuffIcon,
TraitIconRecord,
NewTraitIcon,
PlayerHouseIconRecord,
NewPlayerHouseIcon,
StatIconRecord,
NewStatIcon,
AchievementIconRecord,
NewAchievementIcon,
GeneralIconRecord,
NewGeneralIcon,
};

View File

@@ -12,42 +12,32 @@ pub struct PlayerHouse {
/// Description text
pub description: String,
/// 3D position in world space (x,y,z)
pub position: String,
/// X position in world space
pub pos_x: f32,
/// Z position in world space
pub pos_z: f32,
/// Purchase price in gold
pub price: i32,
/// Whether this house is hidden (not shown in normal lists)
pub hidden: bool,
}
impl PlayerHouse {
/// Create a new PlayerHouse with required fields
pub fn new(id: i32, name: String, description: String, position: String, price: i32) -> Self {
pub fn new(id: i32, name: String, description: String, pos_x: f32, pos_z: f32, price: i32) -> Self {
Self {
id,
name,
description,
position,
pos_x,
pos_z,
price,
hidden: false,
}
}
/// Parse position into (x, y, z) coordinates
pub fn get_position(&self) -> Option<(f32, f32, f32)> {
let parts: Vec<&str> = self.position.split(',').collect();
if parts.len() == 3 {
if let (Ok(x), Ok(y), Ok(z)) = (
parts[0].parse::<f32>(),
parts[1].parse::<f32>(),
parts[2].parse::<f32>(),
) {
return Some((x, y, z));
}
}
None
/// Get position as (x, z) tuple
pub fn get_position(&self) -> (f32, f32) {
(self.pos_x, self.pos_z)
}
/// Check if this house is free (price is 0)
@@ -55,11 +45,6 @@ impl PlayerHouse {
self.price == 0
}
/// Check if this house is visible (not hidden)
pub fn is_visible(&self) -> bool {
!self.hidden
}
/// Check if this house is expensive (price >= 10000)
pub fn is_expensive(&self) -> bool {
self.price >= 10000

View File

@@ -15,14 +15,12 @@ use serde_yaml::Mapping;
#[derive(Debug, Clone)]
pub struct InteractableResource {
pub max_health: i64,
pub type_id: i64,
}
impl UnityComponent for InteractableResource {
fn parse(yaml: &Mapping, _ctx: &ComponentContext) -> Option<Self> {
Some(Self {
max_health: unity_parser::yaml_helpers::get_i64(yaml, "maxHealth").unwrap_or(0),
type_id: unity_parser::yaml_helpers::get_i64(yaml, "typeId").unwrap_or(0),
})
}

View File

@@ -1,8 +1,8 @@
/// Interactable_TeleporterTeleporter component from Cursebreaker
/// Interactable_TeleporterDoor component from Cursebreaker
///
/// C# definition from Interactable_TeleporterTeleporter.cs:
/// C# definition from Interactable_TeleporterDoor.cs:
/// ```csharp
/// public class Interactable_TeleporterTeleporter : MonoBehaviour
/// public class Interactable_TeleporterDoor : MonoBehaviour
/// {
/// public Transform tpTransform;
/// }
@@ -53,7 +53,7 @@ impl EcsInsertable for InteractableTeleporter {
inventory::submit! {
unity_parser::ComponentRegistration {
type_id: 114,
class_name: "Interactable_TeleporterTeleporter",
class_name: "Interactable_TeleporterDoorEditor",
parse_and_insert: |yaml, ctx, world, entity| {
<InteractableTeleporter as EcsInsertable>::parse_and_insert(yaml, ctx, world, entity)
},

View File

@@ -0,0 +1,150 @@
/// MapIcon component from Cursebreaker
///
/// C# definition from MapIcon.cs:
/// ```csharp
/// public enum MapIconType
/// {
/// npc,
/// aggressiveNpc,
/// ally,
/// loot,
/// self,
/// player,
/// buildingAlly,
/// buildingEnemy,
/// path,
/// resource,
/// questmarker,
/// workbench,
/// door,
/// tree,
/// fish,
/// custom,
/// mapText,
/// worldMapText,
/// fastTravel,
/// fightingNpc,
/// worldMapIcon,
/// playerHouse,
/// task
/// }
///
/// public class MapIcon : MonoBehaviour
/// {
/// public UI_Minimap.MapIconType iconType = UI_Minimap.MapIconType.custom;
/// public int iconSize = 24;
/// public string icon = "MinimapIcons/";
/// public string text;
/// public int fontSize = 24;
/// public string hoverText;
/// }
/// ```
use unity_parser::{UnityComponent, ComponentContext, EcsInsertable};
use serde_yaml::Mapping;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum MapIconType {
Npc = 0,
AggressiveNpc = 1,
Ally = 2,
Loot = 3,
Self_ = 4,
Player = 5,
BuildingAlly = 6,
BuildingEnemy = 7,
Path = 8,
Resource = 9,
Questmarker = 10,
Workbench = 11,
Door = 12,
Tree = 13,
Fish = 14,
Custom = 15,
MapText = 16,
WorldMapText = 17,
FastTravel = 18,
FightingNpc = 19,
WorldMapIcon = 20,
PlayerHouse = 21,
Task = 22,
}
impl Default for MapIconType {
fn default() -> Self {
MapIconType::Custom
}
}
impl MapIconType {
pub fn from_i64(value: i64) -> Self {
match value {
0 => MapIconType::Npc,
1 => MapIconType::AggressiveNpc,
2 => MapIconType::Ally,
3 => MapIconType::Loot,
4 => MapIconType::Self_,
5 => MapIconType::Player,
6 => MapIconType::BuildingAlly,
7 => MapIconType::BuildingEnemy,
8 => MapIconType::Path,
9 => MapIconType::Resource,
10 => MapIconType::Questmarker,
11 => MapIconType::Workbench,
12 => MapIconType::Door,
13 => MapIconType::Tree,
14 => MapIconType::Fish,
15 => MapIconType::Custom,
16 => MapIconType::MapText,
17 => MapIconType::WorldMapText,
18 => MapIconType::FastTravel,
19 => MapIconType::FightingNpc,
20 => MapIconType::WorldMapIcon,
21 => MapIconType::PlayerHouse,
22 => MapIconType::Task,
_ => MapIconType::Custom,
}
}
}
#[derive(Debug, Clone)]
pub struct MapIcon {
pub icon_type: MapIconType,
pub icon_size: i64,
pub icon: String,
pub text: String,
pub font_size: i64,
pub hover_text: String,
}
impl UnityComponent for MapIcon {
fn parse(yaml: &Mapping, _ctx: &ComponentContext) -> Option<Self> {
let icon_type_value = unity_parser::yaml_helpers::get_i64(yaml, "iconType").unwrap_or(15);
Some(Self {
icon_type: MapIconType::from_i64(icon_type_value),
icon_size: unity_parser::yaml_helpers::get_i64(yaml, "iconSize").unwrap_or(24),
icon: unity_parser::yaml_helpers::get_string(yaml, "icon").unwrap_or_else(|| "MinimapIcons/".to_string()),
text: unity_parser::yaml_helpers::get_string(yaml, "text").unwrap_or_default(),
font_size: unity_parser::yaml_helpers::get_i64(yaml, "fontSize").unwrap_or(24),
hover_text: unity_parser::yaml_helpers::get_string(yaml, "hoverText").unwrap_or_default(),
})
}
}
impl EcsInsertable for MapIcon {
fn insert_into_world(self, world: &mut sparsey::World, entity: sparsey::Entity) {
world.insert(entity, (self,));
}
}
// Register component with inventory
inventory::submit! {
unity_parser::ComponentRegistration {
type_id: 114,
class_name: "MapIcon",
parse_and_insert: |yaml, ctx, world, entity| {
<MapIcon as EcsInsertable>::parse_and_insert(yaml, ctx, world, entity)
},
register: |builder| builder.register::<MapIcon>(),
}
}

View File

@@ -2,10 +2,12 @@ mod interactable_resource;
mod interactable_teleporter;
mod interactable_workbench;
mod loot_spawner;
mod map_icon;
mod map_name_changer;
pub use interactable_resource::InteractableResource;
pub use interactable_teleporter::InteractableTeleporter;
pub use interactable_workbench::InteractableWorkbench;
pub use loot_spawner::LootSpawner;
pub use map_icon::{MapIcon, MapIconType};
pub use map_name_changer::MapNameChanger;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,127 @@
//! Fast Travel XML Parser
use crate::types::{FastTravelLocation, FastTravelType};
use super::{parse_attributes, XmlParseError};
use quick_xml::events::Event;
use quick_xml::reader::Reader;
use std::fs::File;
use std::io::BufReader;
use std::path::Path;
/// Parse FastTravelLocations.xml (regular fast travel locations)
pub fn parse_fast_travel_locations_xml<P: AsRef<Path>>(
path: P,
) -> Result<Vec<FastTravelLocation>, XmlParseError> {
parse_fast_travel_xml_internal(path, FastTravelType::Location)
}
/// Parse FastTravelCanoe.xml (canoe fast travel locations)
pub fn parse_fast_travel_canoe_xml<P: AsRef<Path>>(
path: P,
) -> Result<Vec<FastTravelLocation>, XmlParseError> {
parse_fast_travel_xml_internal(path, FastTravelType::Canoe)
}
/// Parse FastTravelPortals.xml (portal fast travel locations)
pub fn parse_fast_travel_portals_xml<P: AsRef<Path>>(
path: P,
) -> Result<Vec<FastTravelLocation>, XmlParseError> {
parse_fast_travel_xml_internal(path, FastTravelType::Portal)
}
/// Internal function to parse any fast travel XML file
fn parse_fast_travel_xml_internal<P: AsRef<Path>>(
path: P,
travel_type: FastTravelType,
) -> Result<Vec<FastTravelLocation>, XmlParseError> {
let file = File::open(path)?;
let buf_reader = BufReader::new(file);
let mut reader = Reader::from_reader(buf_reader);
reader.config_mut().trim_text(true);
let mut locations = Vec::new();
let mut buf = Vec::new();
loop {
match reader.read_event_into(&mut buf) {
Ok(Event::Start(ref e)) | Ok(Event::Empty(ref e)) => {
match e.name().as_ref() {
b"location" => {
let attrs = parse_attributes(e)?;
// Get required attributes
let id = attrs
.get("id")
.ok_or_else(|| XmlParseError::MissingAttribute("id".to_string()))?
.parse::<i32>()
.map_err(|_| XmlParseError::InvalidAttribute("id".to_string()))?;
let name = attrs
.get("name")
.ok_or_else(|| XmlParseError::MissingAttribute("name".to_string()))?
.clone();
let position_str = attrs
.get("pos")
.ok_or_else(|| XmlParseError::MissingAttribute("pos".to_string()))?;
// Parse position "x,y,z" and extract x,z (discard y)
let (pos_x, pos_z) = parse_position(position_str)?;
let mut location = FastTravelLocation::new(id, name, pos_x, pos_z, travel_type);
// Parse optional attributes based on type
match travel_type {
FastTravelType::Location => {
// Regular locations have unlocked and connections
if attrs.get("unlocked").is_some() {
location.unlocked = true;
}
if let Some(v) = attrs.get("connections") {
location.connections = Some(v.clone());
}
}
FastTravelType::Canoe => {
// Canoe locations have checks
if let Some(v) = attrs.get("checks") {
location.checks = Some(v.clone());
}
}
FastTravelType::Portal => {
// Portals have no additional fields
}
}
locations.push(location);
}
_ => {}
}
}
Ok(Event::Eof) => break,
Err(e) => return Err(XmlParseError::XmlError(e)),
_ => {}
}
buf.clear();
}
Ok(locations)
}
/// Parse position string "x,y,z" and return (x, z), discarding y
fn parse_position(pos: &str) -> Result<(f32, f32), XmlParseError> {
let parts: Vec<&str> = pos.split(',').collect();
if parts.len() != 3 {
return Err(XmlParseError::InvalidAttribute("pos".to_string()));
}
let x = parts[0]
.trim()
.parse::<f32>()
.map_err(|_| XmlParseError::InvalidAttribute("pos.x".to_string()))?;
let z = parts[2]
.trim()
.parse::<f32>()
.map_err(|_| XmlParseError::InvalidAttribute("pos.z".to_string()))?;
Ok((x, z))
}

View File

@@ -0,0 +1,127 @@
//! Harvestable XML Parser
use crate::types::{Harvestable, HarvestableDrop, SkillType, Tool};
use super::{parse_attributes, parse_health_range, XmlParseError};
use quick_xml::events::Event;
use quick_xml::reader::Reader;
use std::fs::File;
use std::io::BufReader;
use std::path::Path;
pub fn parse_harvestables_xml<P: AsRef<Path>>(path: P) -> Result<Vec<Harvestable>, XmlParseError> {
let file = File::open(path)?;
let buf_reader = BufReader::new(file);
let mut reader = Reader::from_reader(buf_reader);
reader.config_mut().trim_text(true);
let mut harvestables = Vec::new();
let mut buf = Vec::new();
let mut current_harvestable: Option<Harvestable> = None;
loop {
match reader.read_event_into(&mut buf) {
Ok(Event::Start(ref e)) | Ok(Event::Empty(ref e)) => {
match e.name().as_ref() {
b"harvestable" => {
let attrs = parse_attributes(e)?;
let typeid = attrs.get("typeid")
.ok_or_else(|| XmlParseError::MissingAttribute("typeid".to_string()))?
.parse::<i32>()
.map_err(|_| XmlParseError::InvalidAttribute("typeid".to_string()))?;
let name = attrs.get("name")
.ok_or_else(|| XmlParseError::MissingAttribute("name".to_string()))?
.clone();
let mut harvestable = Harvestable::new(typeid, name);
// Parse optional attributes with defaults
if let Some(v) = attrs.get("actionname") { harvestable.actionname = v.clone(); }
if let Some(v) = attrs.get("desc") { harvestable.desc = v.clone(); }
if let Some(v) = attrs.get("comment") { harvestable.comment = v.clone(); }
if let Some(v) = attrs.get("level") { harvestable.level = v.parse().unwrap_or(0); }
if let Some(v) = attrs.get("skill") { harvestable.skill = v.parse().unwrap_or(SkillType::None); }
if let Some(v) = attrs.get("tool") { harvestable.tool = v.parse().unwrap_or(Tool::None); }
if let Some(v) = attrs.get("health") {
let (min, max) = parse_health_range(v);
harvestable.min_health = min;
harvestable.max_health = max;
}
if let Some(v) = attrs.get("harvesttime") { harvestable.harvesttime = v.parse().unwrap_or(0); }
if let Some(v) = attrs.get("hittime") { harvestable.hittime = v.parse().unwrap_or(0); }
if let Some(v) = attrs.get("respawntime") { harvestable.respawntime = v.parse().unwrap_or(0); }
// Audio (handle both cases: harvestSfx and harvestsfx)
if let Some(v) = attrs.get("harvestSfx").or_else(|| attrs.get("harvestsfx")) {
harvestable.harvestsfx = v.clone();
}
if let Some(v) = attrs.get("endSfx").or_else(|| attrs.get("endsfx")) {
harvestable.endsfx = v.clone();
}
if let Some(v) = attrs.get("receiveItemSfx").or_else(|| attrs.get("receiveitemsfx")) {
harvestable.receiveitemsfx = v.clone();
}
if let Some(v) = attrs.get("animation") { harvestable.animation = v.clone(); }
if let Some(v) = attrs.get("takehitanimation") { harvestable.takehitanimation = v.clone(); }
if let Some(v) = attrs.get("endgfx") { harvestable.endgfx = v.clone(); }
if let Some(v) = attrs.get("tree") { harvestable.tree = v.parse().unwrap_or(0) == 1; }
if let Some(v) = attrs.get("hidemilestone") { harvestable.hidemilestone = v.parse().unwrap_or(0) == 1; }
if let Some(v) = attrs.get("nohighlight") { harvestable.nohighlight = v.parse().unwrap_or(0) == 1; }
// Handle both cases: hideMinimap and hideminimap
if let Some(v) = attrs.get("hideMinimap").or_else(|| attrs.get("hideminimap")) {
harvestable.hideminimap = v.parse().unwrap_or(0) == 1;
}
if let Some(v) = attrs.get("noLeftClickInteract").or_else(|| attrs.get("noleftclickinteract")) {
harvestable.noleftclickinteract = v.parse().unwrap_or(0) == 1;
}
if let Some(v) = attrs.get("interactDistance").or_else(|| attrs.get("interactdistance")) {
harvestable.interactdistance = v.clone();
}
current_harvestable = Some(harvestable);
}
b"item" if current_harvestable.is_some() => {
if let Some(ref mut harvestable) = current_harvestable {
let attrs = parse_attributes(e)?;
if let Some(id_str) = attrs.get("id") {
if let Ok(id) = id_str.parse::<i32>() {
let drop = HarvestableDrop {
id,
minamount: attrs.get("minamount").and_then(|v| v.parse().ok()).unwrap_or(0),
maxamount: attrs.get("maxamount").and_then(|v| v.parse().ok()).unwrap_or(0),
droprate: attrs.get("droprate").and_then(|v| v.parse().ok()).unwrap_or(0),
droprateboost: attrs.get("droprateboost").and_then(|v| v.parse().ok()).unwrap_or(0),
amountboost: attrs.get("amountboost").and_then(|v| v.parse().ok()).unwrap_or(0),
checks: attrs.get("checks").cloned().unwrap_or_default(),
comment: attrs.get("comment").cloned().unwrap_or_default(),
dontconsumehealth: attrs.get("dontconsumehealth").and_then(|v| v.parse().ok()).unwrap_or(0) == 1,
};
harvestable.drops.push(drop);
}
}
}
}
_ => {}
}
}
Ok(Event::End(ref e)) => {
match e.name().as_ref() {
b"harvestable" => {
if let Some(harvestable) = current_harvestable.take() {
harvestables.push(harvestable);
}
}
_ => {}
}
}
Ok(Event::Eof) => break,
Err(e) => return Err(XmlParseError::XmlError(e)),
_ => {}
}
buf.clear();
}
Ok(harvestables)
}

Some files were not shown because too many files have changed in this diff Show More