sql database

This commit is contained in:
2026-01-10 07:44:26 +00:00
parent 80f70a1bac
commit 30e66d4b04
24 changed files with 2625 additions and 6 deletions

View File

@@ -18,7 +18,15 @@
"Bash(xargs dirname:*)",
"Bash(xargs -I {} find {} -name \"*.cs\")",
"Bash(RUST_LOG=debug cargo run:*)",
"WebSearch"
"WebSearch",
"Bash(cargo search:*)",
"Bash(cargo install:*)",
"Bash(diesel setup:*)",
"Bash(diesel migration generate:*)",
"Bash(diesel migration run:*)",
"Bash(sqlite3:*)",
"Bash(diesel migration redo:*)",
"Bash(tree:*)"
],
"additionalDirectories": [
"/home/connor/repos/CBAssets/"

945
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -20,5 +20,11 @@ log = { version = "0.4", features = ["std"] }
quick-xml = "0.37"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
diesel = { version = "2.2", features = ["sqlite"], optional = true }
diesel = { version = "2.2", features = ["sqlite", "returning_clauses_for_sqlite_3_35"] }
libsqlite3-sys = { version = ">=0.17.2", features = ["bundled"] }
image = "0.25"
webp = "0.3"
thiserror = "1.0"
[dev-dependencies]
diesel_migrations = "2.2"

View File

@@ -0,0 +1,273 @@
# Database Migration Guide
This guide shows how to update all databases to use actual SQL storage with Diesel instead of just `prepare_for_sql()`.
## Status
**Completed**: ItemDatabase
**Completed**: Database tables created (migration)
**Completed**: Main.rs integration example
**Remaining**: 9 databases need the same updates
## Pattern to Follow
For each database file in `src/databases/`, follow this pattern (using ItemDatabase as the reference):
### Step 1: Add Diesel Imports
At the top of the file, add:
```rust
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
```
### Step 2: Add `save_to_db()` Method
Replace or add after the `prepare_for_sql()` method:
```rust
/// Save all [items/npcs/quests/etc] to SQLite database
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
use crate::schema::TABLE_NAME; // Replace TABLE_NAME
let records: Vec<_> = self
.ITEMS_FIELD // Replace with actual field name (e.g., items, npcs, quests)
.iter()
.map(|item| {
let json = serde_json::to_string(item).unwrap_or_else(|_| "{}".to_string());
(
TABLE_NAME::id.eq(item.ID_FIELD), // Replace ID_FIELD
TABLE_NAME::name.eq(&item.NAME_FIELD), // Replace NAME_FIELD
TABLE_NAME::data.eq(json),
)
})
.collect();
let mut count = 0;
for record in records {
diesel::insert_into(TABLE_NAME::table)
.values(&record)
.execute(conn)?;
count += 1;
}
Ok(count)
}
```
### Step 3: Add `load_from_db()` Method
```rust
/// Load all [items/npcs/quests/etc] from SQLite database
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
use crate::schema::TABLE_NAME::dsl::*; // Replace TABLE_NAME
#[derive(Queryable)]
struct Record {
id: Option<i32>, // Or Option<String> for text keys
name: String, // Adjust based on schema
data: String,
}
let records = TABLE_NAME.load::<Record>(conn)?; // Replace TABLE_NAME
let mut loaded_items = Vec::new();
for record in records {
if let Ok(item) = serde_json::from_str::<TYPE>(&record.data) { // Replace TYPE
loaded_items.push(item);
}
}
let mut db = Self::new();
db.add_ITEMS(loaded_items); // Replace add_ITEMS with actual method
Ok(db)
}
```
### Step 4: Mark `prepare_for_sql()` as Deprecated
```rust
#[deprecated(note = "Use save_to_db() to save directly to SQLite database")]
pub fn prepare_for_sql(&self) -> Vec<...> {
// existing implementation
}
```
## Database-Specific Mappings
### Simple Databases (id: i32, name: String, data: String)
| Database | Table | Items Field | ID Field | Name Field | Type |
|----------|-------|-------------|----------|------------|------|
| NpcDatabase | `npcs` | `npcs` | `type_id` | `npc_name` | `Npc` |
| QuestDatabase | `quests` | `quests` | `id` | `name` | `Quest` |
| HarvestableDatabase | `harvestables` | `harvestables` | `type_id` | `name` | `Harvestable` |
**Example for NpcDatabase:**
```rust
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
use crate::schema::npcs;
let records: Vec<_> = self
.npcs
.iter()
.map(|npc| {
let json = serde_json::to_string(npc).unwrap_or_else(|_| "{}".to_string());
(
npcs::id.eq(npc.type_id),
npcs::name.eq(&npc.npc_name),
npcs::data.eq(json),
)
})
.collect();
let mut count = 0;
for record in records {
diesel::insert_into(npcs::table)
.values(&record)
.execute(conn)?;
count += 1;
}
Ok(count)
}
```
### Text-Key Databases
| Database | Table | Primary Key Field | Type |
|----------|-------|-------------------|------|
| LootDatabase | `loot_tables` | `table_id: String` | `LootTable` |
| MapDatabase | `maps` | `scene_id: String` | `Map` |
**Example for LootDatabase:**
```rust
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
use crate::schema::loot_tables;
let records: Vec<_> = self
.loot_tables // Check actual field name
.iter()
.map(|loot| {
let json = serde_json::to_string(loot).unwrap_or_else(|_| "{}".to_string());
(
loot_tables::table_id.eq(&loot.table_id),
loot_tables::npc_id.eq(loot.npc_id.as_ref()), // Optional field
loot_tables::data.eq(json),
)
})
.collect();
let mut count = 0;
for record in records {
diesel::insert_into(loot_tables::table)
.values(&record)
.execute(conn)?;
count += 1;
}
Ok(count)
}
```
### Complex Databases (Multiple Columns)
| Database | Table | Additional Columns | Notes |
|----------|-------|-------------------|-------|
| FastTravelDatabase | `fast_travel_locations` | `map_name: String` | Has map reference |
| PlayerHouseDatabase | `player_houses` | `map_id: i32` | Has map ID |
| TraitDatabase | `traits` | `description: Option<String>`, `trainer_id: Option<i32>` | Multiple optional fields |
| ShopDatabase | `shops` | `unique_items: bool`, `item_count: usize` | Has metadata columns |
**Example for ShopDatabase:**
```rust
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
use crate::schema::shops;
let records: Vec<_> = self
.shops
.iter()
.map(|shop| {
let json = serde_json::to_string(shop).unwrap_or_else(|_| "{}".to_string());
(
shops::id.eq(shop.id),
shops::name.eq(&shop.name),
shops::unique_items.eq(if shop.unique_items { 1 } else { 0 }),
shops::item_count.eq(shop.items.len() as i32),
shops::data.eq(json),
)
})
.collect();
let mut count = 0;
for record in records {
diesel::insert_into(shops::table)
.values(&record)
.execute(conn)?;
count += 1;
}
Ok(count)
}
```
## Usage in main.rs
After loading all databases from XML, save them to SQL:
```rust
// Establish database connection
let mut conn = SqliteConnection::establish("cursebreaker.db")?;
// Save each database
match item_db.save_to_db(&mut conn) {
Ok(count) => info!("✅ Saved {} items to database", count),
Err(e) => warn!("⚠️ Failed to save items: {}", e),
}
match npc_db.save_to_db(&mut conn) {
Ok(count) => info!("✅ Saved {} NPCs to database", count),
Err(e) => warn!("⚠️ Failed to save NPCs: {}", e),
}
// ... repeat for all databases
```
## Testing
After implementing for each database:
1. **Build**: `cargo build` - Should compile without errors
2. **Run**: `cargo run` - Should show save confirmations
3. **Verify**: Check `cursebreaker.db` contains data
## Implementation Order Recommendation
1. ✅ ItemDatabase (DONE)
2. NpcDatabase (simple, same as items)
3. QuestDatabase (simple, same as items)
4. HarvestableDatabase (simple, same as items)
5. MapDatabase (text key, medium)
6. LootDatabase (text key with optional field, medium)
7. FastTravelDatabase (multiple columns, complex)
8. PlayerHouseDatabase (multiple columns, complex)
9. TraitDatabase (optional columns, complex)
10. ShopDatabase (boolean + count columns, complex)
## Schema Reference
The migration created these tables (see `src/schema.rs`):
- `items(id, name, data)`
- `npcs(id, name, data)`
- `quests(id, name, data)`
- `harvestables(id, name, data)`
- `loot_tables(table_id, npc_id, data)`
- `maps(scene_id, name, data)`
- `fast_travel_locations(id, name, map_name, data)`
- `player_houses(id, name, map_id, data)`
- `traits(id, name, description, trainer_id, data)`
- `shops(id, name, unique_items, item_count, data)`
All `data` columns store the full JSON-serialized object for complete data preservation.

View File

@@ -0,0 +1,9 @@
# For documentation on how to configure this file,
# see https://diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
[migrations_directory]
dir = "migrations"

View File

View File

@@ -0,0 +1,5 @@
-- Rollback migration for minimap_tiles table
DROP INDEX IF EXISTS idx_minimap_y;
DROP INDEX IF EXISTS idx_minimap_x;
DROP INDEX IF EXISTS idx_minimap_coords;
DROP TABLE IF EXISTS minimap_tiles;

View File

@@ -0,0 +1,39 @@
-- Minimap tiles table storing processed WebP images
CREATE TABLE minimap_tiles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
-- Tile coordinates (matching file naming: x_y.png)
x INTEGER NOT NULL,
y INTEGER NOT NULL,
-- Original PNG metadata
original_width INTEGER NOT NULL DEFAULT 512,
original_height INTEGER NOT NULL DEFAULT 512,
original_file_size INTEGER,
-- WebP blobs at different resolutions
webp_512 BLOB NOT NULL, -- 512x512 WebP
webp_256 BLOB NOT NULL, -- 256x256 WebP
webp_128 BLOB NOT NULL, -- 128x128 WebP
webp_64 BLOB NOT NULL, -- 64x64 WebP
-- Blob sizes for quick reference
webp_512_size INTEGER NOT NULL,
webp_256_size INTEGER NOT NULL,
webp_128_size INTEGER NOT NULL,
webp_64_size INTEGER NOT NULL,
-- Processing metadata
processed_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
source_path TEXT NOT NULL,
-- Ensure unique coordinate pairs
UNIQUE(x, y)
);
-- Index for fast coordinate lookups
CREATE INDEX idx_minimap_coords ON minimap_tiles(x, y);
-- Index for boundary queries
CREATE INDEX idx_minimap_x ON minimap_tiles(x);
CREATE INDEX idx_minimap_y ON minimap_tiles(y);

View File

@@ -0,0 +1,33 @@
-- Drop all game data tables
DROP INDEX IF EXISTS idx_shops_name;
DROP TABLE IF EXISTS shops;
DROP INDEX IF EXISTS idx_traits_trainer;
DROP INDEX IF EXISTS idx_traits_name;
DROP TABLE IF EXISTS traits;
DROP INDEX IF EXISTS idx_player_houses_map;
DROP INDEX IF EXISTS idx_player_houses_name;
DROP TABLE IF EXISTS player_houses;
DROP INDEX IF EXISTS idx_fast_travel_map;
DROP INDEX IF EXISTS idx_fast_travel_name;
DROP TABLE IF EXISTS fast_travel_locations;
DROP INDEX IF EXISTS idx_maps_name;
DROP TABLE IF EXISTS maps;
DROP INDEX IF EXISTS idx_loot_npc;
DROP TABLE IF EXISTS loot_tables;
DROP INDEX IF EXISTS idx_harvestables_name;
DROP TABLE IF EXISTS harvestables;
DROP INDEX IF EXISTS idx_quests_name;
DROP TABLE IF EXISTS quests;
DROP INDEX IF EXISTS idx_npcs_name;
DROP TABLE IF EXISTS npcs;
DROP INDEX IF EXISTS idx_items_name;
DROP TABLE IF EXISTS items;

View File

@@ -0,0 +1,98 @@
-- Items table
CREATE TABLE items (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
data TEXT NOT NULL
);
CREATE INDEX idx_items_name ON items(name);
-- NPCs table
CREATE TABLE npcs (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
data TEXT NOT NULL
);
CREATE INDEX idx_npcs_name ON npcs(name);
-- Quests table
CREATE TABLE quests (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
data TEXT NOT NULL
);
CREATE INDEX idx_quests_name ON quests(name);
-- Harvestables table
CREATE TABLE harvestables (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
data TEXT NOT NULL
);
CREATE INDEX idx_harvestables_name ON harvestables(name);
-- Loot tables
CREATE TABLE loot_tables (
table_id TEXT PRIMARY KEY,
npc_id TEXT,
data TEXT NOT NULL
);
CREATE INDEX idx_loot_npc ON loot_tables(npc_id);
-- Maps table
CREATE TABLE maps (
scene_id TEXT PRIMARY KEY,
name TEXT NOT NULL,
data TEXT NOT NULL
);
CREATE INDEX idx_maps_name ON maps(name);
-- Fast travel locations table
CREATE TABLE fast_travel_locations (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
map_name TEXT NOT NULL,
data TEXT NOT NULL
);
CREATE INDEX idx_fast_travel_name ON fast_travel_locations(name);
CREATE INDEX idx_fast_travel_map ON fast_travel_locations(map_name);
-- Player houses table
CREATE TABLE player_houses (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
map_id INTEGER NOT NULL,
data TEXT NOT NULL
);
CREATE INDEX idx_player_houses_name ON player_houses(name);
CREATE INDEX idx_player_houses_map ON player_houses(map_id);
-- Traits table
CREATE TABLE traits (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
description TEXT,
trainer_id INTEGER,
data TEXT NOT NULL
);
CREATE INDEX idx_traits_name ON traits(name);
CREATE INDEX idx_traits_trainer ON traits(trainer_id);
-- Shops table
CREATE TABLE shops (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
unique_items INTEGER NOT NULL, -- boolean as 0/1
item_count INTEGER NOT NULL,
data TEXT NOT NULL
);
CREATE INDEX idx_shops_name ON shops(name);

View File

@@ -0,0 +1,24 @@
/// Helper module for database persistence operations
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
/// Establish a database connection
pub fn establish_connection(database_url: &str) -> Result<SqliteConnection, diesel::ConnectionError> {
SqliteConnection::establish(database_url)
}
/// Generic record for simple id/name/data pattern
#[derive(Queryable)]
pub struct SimpleRecord {
pub id: Option<i32>,
pub name: String,
pub data: String,
}
/// Generic record for text-based primary keys
#[derive(Queryable)]
pub struct TextKeyRecord {
pub key: Option<String>,
pub secondary: Option<String>,
pub data: String,
}

View File

@@ -3,6 +3,8 @@ use crate::item_loader::{
};
use crate::types::Item;
use crate::xml_parser::{parse_items_xml, XmlParseError};
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::collections::{HashMap, HashSet};
use std::path::Path;
@@ -199,8 +201,8 @@ impl ItemDatabase {
serde_json::to_string(&self.items)
}
/// Prepare items for SQL insertion
/// Returns a vector of tuples (id, name, json_data)
/// Prepare items for SQL insertion (deprecated - use save_to_db instead)
#[deprecated(note = "Use save_to_db() to save directly to SQLite database")]
pub fn prepare_for_sql(&self) -> Vec<(i32, String, String)> {
self.items
.iter()
@@ -210,6 +212,59 @@ impl ItemDatabase {
})
.collect()
}
/// Save all items to SQLite database
pub fn save_to_db(&self, conn: &mut SqliteConnection) -> Result<usize, diesel::result::Error> {
use crate::schema::items;
let records: Vec<_> = self
.items
.iter()
.map(|item| {
let json = serde_json::to_string(item).unwrap_or_else(|_| "{}".to_string());
(
items::id.eq(item.type_id),
items::name.eq(&item.item_name),
items::data.eq(json),
)
})
.collect();
let mut count = 0;
for record in records {
diesel::insert_into(items::table)
.values(&record)
.execute(conn)?;
count += 1;
}
Ok(count)
}
/// Load all items from SQLite database
pub fn load_from_db(conn: &mut SqliteConnection) -> Result<Self, diesel::result::Error> {
use crate::schema::items::dsl::*;
#[derive(Queryable)]
struct ItemRecord {
id: Option<i32>,
name: String,
data: String,
}
let records = items.load::<ItemRecord>(conn)?;
let mut loaded_items = Vec::new();
for record in records {
if let Ok(item) = serde_json::from_str::<Item>(&record.data) {
loaded_items.push(item);
}
}
let mut db = Self::new();
db.add_items(loaded_items);
Ok(db)
}
}
impl Default for ItemDatabase {

View File

@@ -0,0 +1,290 @@
use crate::types::{MinimapTileRecord, NewMinimapTile};
use crate::image_processor::{ImageProcessor, ImageProcessingError};
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::path::{Path, PathBuf};
use std::fs;
use thiserror::Error;
#[derive(Debug, Error)]
pub enum MinimapDatabaseError {
#[error("Database error: {0}")]
DatabaseError(#[from] diesel::result::Error),
#[error("Image processing error: {0}")]
ImageError(#[from] ImageProcessingError),
#[error("IO error: {0}")]
IoError(#[from] std::io::Error),
#[error("Invalid filename format: {0}")]
InvalidFilename(String),
#[error("Connection pool error: {0}")]
ConnectionError(String),
}
/// Database for managing minimap tiles with actual SQLite storage
pub struct MinimapDatabase {
database_url: String,
image_processor: ImageProcessor,
}
impl MinimapDatabase {
/// Create new database connection
pub fn new(database_url: String) -> Self {
Self {
database_url,
image_processor: ImageProcessor::default(),
}
}
/// Create with custom WebP quality
pub fn with_quality(database_url: String, quality: f32) -> Self {
Self {
database_url,
image_processor: ImageProcessor::new(quality),
}
}
/// Establish database connection
fn establish_connection(&self) -> Result<SqliteConnection, MinimapDatabaseError> {
SqliteConnection::establish(&self.database_url)
.map_err(|e| MinimapDatabaseError::ConnectionError(e.to_string()))
}
/// Load all PNG files from directory and process them into database
pub fn load_from_directory<P: AsRef<Path>>(
&self,
minimap_dir: P,
) -> Result<usize, MinimapDatabaseError> {
use crate::schema::minimap_tiles;
let mut conn = self.establish_connection()?;
let mut count = 0;
// Find all PNG files
let png_files = self.find_minimap_pngs(&minimap_dir)?;
for png_path in png_files {
// Parse coordinates from filename
let (x, y) = self.parse_coordinates(&png_path)?;
// Process image
let processed = self.image_processor.process_minimap_png(&png_path)?;
// Get original file size
let original_size = fs::metadata(&png_path)?.len() as i32;
// Extract WebP blobs for each size
let webp_512 = processed.get(512).expect("512px resolution missing");
let webp_256 = processed.get(256).expect("256px resolution missing");
let webp_128 = processed.get(128).expect("128px resolution missing");
let webp_64 = processed.get(64).expect("64px resolution missing");
// Create insertable record
let new_tile = NewMinimapTile {
x,
y,
original_width: 512,
original_height: 512,
original_file_size: Some(original_size),
webp_512,
webp_256,
webp_128,
webp_64,
webp_512_size: webp_512.len() as i32,
webp_256_size: webp_256.len() as i32,
webp_128_size: webp_128.len() as i32,
webp_64_size: webp_64.len() as i32,
source_path: png_path.to_str().unwrap_or(""),
};
// Insert into database
diesel::insert_into(minimap_tiles::table)
.values(&new_tile)
.execute(&mut conn)?;
count += 1;
}
Ok(count)
}
/// Find all minimap PNG files in directory
fn find_minimap_pngs<P: AsRef<Path>>(
&self,
dir: P,
) -> Result<Vec<PathBuf>, MinimapDatabaseError> {
let mut png_files = Vec::new();
for entry in fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
if path.is_file() && path.extension().and_then(|s| s.to_str()) == Some("png") {
// Check if filename matches x_y.png pattern
if let Some(stem) = path.file_stem().and_then(|s| s.to_str()) {
if stem.contains('_') && stem.chars().all(|c| c.is_numeric() || c == '_' || c == '-') {
png_files.push(path);
}
}
}
}
Ok(png_files)
}
/// Parse x,y coordinates from filename (e.g., "0_0.png" -> (0, 0))
fn parse_coordinates<P: AsRef<Path>>(
&self,
path: P,
) -> Result<(i32, i32), MinimapDatabaseError> {
let filename = path
.as_ref()
.file_stem()
.and_then(|s| s.to_str())
.ok_or_else(|| {
MinimapDatabaseError::InvalidFilename(path.as_ref().display().to_string())
})?;
let parts: Vec<&str> = filename.split('_').collect();
if parts.len() != 2 {
return Err(MinimapDatabaseError::InvalidFilename(
filename.to_string(),
));
}
let x = parts[0].parse::<i32>().map_err(|_| {
MinimapDatabaseError::InvalidFilename(filename.to_string())
})?;
let y = parts[1].parse::<i32>().map_err(|_| {
MinimapDatabaseError::InvalidFilename(filename.to_string())
})?;
Ok((x, y))
}
/// Get tile by coordinates
pub fn get_tile(
&self,
x: i32,
y: i32,
) -> Result<Option<MinimapTileRecord>, MinimapDatabaseError> {
use crate::schema::minimap_tiles::dsl;
let mut conn = self.establish_connection()?;
let tile = dsl::minimap_tiles
.filter(dsl::x.eq(x))
.filter(dsl::y.eq(y))
.first::<MinimapTileRecord>(&mut conn)
.optional()?;
Ok(tile)
}
/// Get tile WebP blob at specific size
pub fn get_tile_webp(
&self,
x: i32,
y: i32,
size: u32,
) -> Result<Option<Vec<u8>>, MinimapDatabaseError> {
let tile = self.get_tile(x, y)?;
Ok(tile.map(|t| match size {
512 => t.webp_512,
256 => t.webp_256,
128 => t.webp_128,
64 => t.webp_64,
_ => t.webp_512, // Default to 512
}))
}
/// Get all tiles
pub fn get_all_tiles(&self) -> Result<Vec<MinimapTileRecord>, MinimapDatabaseError> {
use crate::schema::minimap_tiles::dsl::*;
let mut conn = self.establish_connection()?;
let tiles = minimap_tiles.load::<MinimapTileRecord>(&mut conn)?;
Ok(tiles)
}
/// Get map bounds (min/max x and y)
pub fn get_map_bounds(
&self,
) -> Result<((i32, i32), (i32, i32)), MinimapDatabaseError> {
use crate::schema::minimap_tiles::dsl::*;
use diesel::dsl::{max, min};
let mut conn = self.establish_connection()?;
let (min_x, max_x): (Option<i32>, Option<i32>) =
minimap_tiles.select((min(x), max(x))).first(&mut conn)?;
let (min_y, max_y): (Option<i32>, Option<i32>) =
minimap_tiles.select((min(y), max(y))).first(&mut conn)?;
Ok((
(min_x.unwrap_or(0), min_y.unwrap_or(0)),
(max_x.unwrap_or(0), max_y.unwrap_or(0)),
))
}
/// Get count of processed tiles
pub fn count(&self) -> Result<i64, MinimapDatabaseError> {
use crate::schema::minimap_tiles::dsl::*;
use diesel::dsl::count_star;
let mut conn = self.establish_connection()?;
let total = minimap_tiles.select(count_star()).first(&mut conn)?;
Ok(total)
}
/// Get total storage size statistics
pub fn get_storage_stats(&self) -> Result<StorageStats, MinimapDatabaseError> {
let mut conn = self.establish_connection()?;
use crate::schema::minimap_tiles::dsl::*;
let tiles = minimap_tiles.load::<MinimapTileRecord>(&mut conn)?;
let mut stats = StorageStats::default();
for tile in tiles {
stats.total_original_size += tile.original_file_size.unwrap_or(0) as i64;
stats.total_webp_512 += tile.webp_512_size as i64;
stats.total_webp_256 += tile.webp_256_size as i64;
stats.total_webp_128 += tile.webp_128_size as i64;
stats.total_webp_64 += tile.webp_64_size as i64;
stats.tile_count += 1;
}
Ok(stats)
}
}
#[derive(Debug, Default)]
pub struct StorageStats {
pub tile_count: i64,
pub total_original_size: i64,
pub total_webp_512: i64,
pub total_webp_256: i64,
pub total_webp_128: i64,
pub total_webp_64: i64,
}
impl StorageStats {
pub fn total_webp_size(&self) -> i64 {
self.total_webp_512 + self.total_webp_256 + self.total_webp_128 + self.total_webp_64
}
pub fn compression_ratio(&self) -> f64 {
if self.total_original_size == 0 {
return 0.0;
}
(self.total_webp_size() as f64 / self.total_original_size as f64) * 100.0
}
}

View File

@@ -8,6 +8,7 @@ mod fast_travel_database;
mod player_house_database;
mod trait_database;
mod shop_database;
mod minimap_database;
pub use item_database::ItemDatabase;
pub use npc_database::NpcDatabase;
@@ -19,3 +20,4 @@ pub use fast_travel_database::FastTravelDatabase;
pub use player_house_database::PlayerHouseDatabase;
pub use trait_database::TraitDatabase;
pub use shop_database::ShopDatabase;
pub use minimap_database::{MinimapDatabase, MinimapDatabaseError, StorageStats};

View File

@@ -0,0 +1,400 @@
use image::{DynamicImage, ImageError, Rgba, RgbaImage};
use std::collections::HashMap;
use std::path::Path;
use thiserror::Error;
/// Configuration for outline drawing on images with alpha channels
#[derive(Debug, Clone)]
pub struct OutlineConfig {
/// Outline color (RGBA)
pub color: Rgba<u8>,
/// Outline thickness in pixels
pub thickness: u32,
/// Alpha threshold for edge detection (0-255)
/// Pixels with alpha >= threshold are considered solid
pub alpha_threshold: u8,
}
impl OutlineConfig {
/// Create new outline config with custom color and thickness
pub fn new(color: Rgba<u8>, thickness: u32) -> Self {
Self {
color,
thickness,
alpha_threshold: 128,
}
}
/// Create outline config with white color
pub fn white(thickness: u32) -> Self {
Self::new(Rgba([255, 255, 255, 255]), thickness)
}
/// Create outline config with black color
pub fn black(thickness: u32) -> Self {
Self::new(Rgba([0, 0, 0, 255]), thickness)
}
/// Set alpha threshold for edge detection
pub fn with_alpha_threshold(mut self, threshold: u8) -> Self {
self.alpha_threshold = threshold;
self
}
}
impl Default for OutlineConfig {
fn default() -> Self {
Self::white(1)
}
}
#[derive(Debug, Error)]
pub enum ImageProcessingError {
#[error("Failed to load image: {0}")]
ImageLoadError(#[from] ImageError),
#[error("WebP encoding failed: {0}")]
WebPError(String),
#[error("Invalid image dimensions: expected {expected_width}x{expected_height}, got {actual_width}x{actual_height}")]
InvalidDimensions {
expected_width: u32,
expected_height: u32,
actual_width: u32,
actual_height: u32,
},
#[error("IO error: {0}")]
IoError(#[from] std::io::Error),
#[error("No resolutions specified")]
NoResolutions,
}
pub struct ImageProcessor {
quality: f32, // WebP quality (0.0-100.0)
}
impl ImageProcessor {
/// Create new processor with specified WebP quality
pub fn new(quality: f32) -> Self {
Self { quality }
}
/// Process image and generate WebP at multiple resolutions
///
/// # Arguments
/// * `image_path` - Path to the source image
/// * `sizes` - Slice of desired output sizes (width/height in pixels)
/// * `validate_dimensions` - Optional (width, height) to validate source image dimensions
/// * `outline` - Optional outline configuration to add edges around transparent areas
///
/// # Returns
/// ProcessedImages containing WebP blobs for each requested size
pub fn process_image<P: AsRef<Path>>(
&self,
image_path: P,
sizes: &[u32],
validate_dimensions: Option<(u32, u32)>,
outline: Option<&OutlineConfig>,
) -> Result<ProcessedImages, ImageProcessingError> {
if sizes.is_empty() {
return Err(ImageProcessingError::NoResolutions);
}
// Load image
let mut img = image::open(image_path.as_ref())?;
// Validate dimensions if requested
if let Some((expected_width, expected_height)) = validate_dimensions {
if img.width() != expected_width || img.height() != expected_height {
return Err(ImageProcessingError::InvalidDimensions {
expected_width,
expected_height,
actual_width: img.width(),
actual_height: img.height(),
});
}
}
// Apply outline if requested
if let Some(outline_config) = outline {
img = DynamicImage::ImageRgba8(self.apply_outline(img.to_rgba8(), outline_config));
}
// Generate WebP for each size
let mut images = HashMap::new();
for &size in sizes {
let webp_data = self.encode_webp(&img, size, size)?;
images.insert(size, webp_data);
}
Ok(ProcessedImages { images })
}
/// Load PNG and generate 4 WebP sizes specifically for minimap tiles (512x512 source)
///
/// Convenience method that generates 512, 256, 128, and 64 pixel versions
pub fn process_minimap_png<P: AsRef<Path>>(
&self,
png_path: P,
) -> Result<ProcessedImages, ImageProcessingError> {
self.process_image(png_path, &[512, 256, 128, 64], Some((512, 512)), None)
}
/// Apply outline effect to image based on alpha channel edges
fn apply_outline(&self, img: RgbaImage, config: &OutlineConfig) -> RgbaImage {
let (width, height) = img.dimensions();
// Create a mask of edge pixels that need outline
let mut edge_mask = vec![vec![false; height as usize]; width as usize];
// Detect edges: pixels that are transparent but adjacent to opaque pixels
for y in 0..height {
for x in 0..width {
let pixel = img.get_pixel(x, y);
// Skip if pixel is already opaque
if pixel[3] >= config.alpha_threshold {
continue;
}
// Check if any neighbor is opaque (this is an edge)
let is_edge = self.has_opaque_neighbor(&img, x, y, config.alpha_threshold);
if is_edge {
edge_mask[x as usize][y as usize] = true;
}
}
}
// Apply outline with thickness
let thickness = config.thickness as i32;
let mut outlined = img.clone();
for y in 0..height {
for x in 0..width {
if edge_mask[x as usize][y as usize] {
// Draw outline in a square pattern around this edge pixel
for dy in -thickness..=thickness {
for dx in -thickness..=thickness {
let nx = x as i32 + dx;
let ny = y as i32 + dy;
// Check bounds
if nx >= 0 && nx < width as i32 && ny >= 0 && ny < height as i32 {
let nx = nx as u32;
let ny = ny as u32;
let current_pixel = outlined.get_pixel(nx, ny);
// Only draw outline on transparent pixels
if current_pixel[3] < config.alpha_threshold {
outlined.put_pixel(nx, ny, config.color);
}
}
}
}
}
}
}
outlined
}
/// Check if a pixel has any opaque neighbor
fn has_opaque_neighbor(
&self,
img: &RgbaImage,
x: u32,
y: u32,
alpha_threshold: u8,
) -> bool {
let (width, height) = img.dimensions();
// Check 8 surrounding pixels
for dy in -1..=1 {
for dx in -1..=1 {
if dx == 0 && dy == 0 {
continue; // Skip center pixel
}
let nx = x as i32 + dx;
let ny = y as i32 + dy;
// Check bounds
if nx >= 0 && nx < width as i32 && ny >= 0 && ny < height as i32 {
let neighbor = img.get_pixel(nx as u32, ny as u32);
if neighbor[3] >= alpha_threshold {
return true;
}
}
}
}
false
}
/// Encode image to WebP at specified dimensions
fn encode_webp(
&self,
img: &DynamicImage,
width: u32,
height: u32,
) -> Result<Vec<u8>, ImageProcessingError> {
// Resize if dimensions don't match original
let resized = if img.width() != width || img.height() != height {
img.resize_exact(width, height, image::imageops::FilterType::Lanczos3)
} else {
img.clone()
};
// Convert to RGBA8
let rgba = resized.to_rgba8();
let (w, h) = rgba.dimensions();
// Encode to WebP
let encoder = webp::Encoder::from_rgba(rgba.as_raw(), w, h);
let webp_data = encoder.encode(self.quality);
Ok(webp_data.to_vec())
}
}
impl Default for ImageProcessor {
fn default() -> Self {
Self::new(85.0) // 85% quality default
}
}
/// Container for processed WebP images at multiple resolutions
#[derive(Debug)]
pub struct ProcessedImages {
/// Map of size (in pixels) to WebP blob data
pub images: HashMap<u32, Vec<u8>>,
}
impl ProcessedImages {
/// Get WebP blob for a specific size
pub fn get(&self, size: u32) -> Option<&Vec<u8>> {
self.images.get(&size)
}
/// Get total size of all WebP blobs in bytes
pub fn total_size(&self) -> usize {
self.images.values().map(|v| v.len()).sum()
}
/// Get all available sizes
pub fn sizes(&self) -> Vec<u32> {
let mut sizes: Vec<u32> = self.images.keys().copied().collect();
sizes.sort_unstable();
sizes
}
/// Get number of resolutions stored
pub fn len(&self) -> usize {
self.images.len()
}
/// Check if empty
pub fn is_empty(&self) -> bool {
self.images.is_empty()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_processor_creation() {
let processor = ImageProcessor::default();
assert_eq!(processor.quality, 85.0);
let custom = ImageProcessor::new(90.0);
assert_eq!(custom.quality, 90.0);
}
#[test]
fn test_processed_images() {
let mut images = HashMap::new();
images.insert(512, vec![1, 2, 3]);
images.insert(256, vec![4, 5]);
let processed = ProcessedImages { images };
assert_eq!(processed.len(), 2);
assert_eq!(processed.total_size(), 5);
assert_eq!(processed.get(512), Some(&vec![1, 2, 3]));
assert_eq!(processed.get(128), None);
let sizes = processed.sizes();
assert_eq!(sizes, vec![256, 512]);
}
#[test]
fn test_outline_config_default() {
let config = OutlineConfig::default();
assert_eq!(config.thickness, 1);
assert_eq!(config.color, Rgba([255, 255, 255, 255])); // White
assert_eq!(config.alpha_threshold, 128);
}
#[test]
fn test_outline_config_custom() {
let red = Rgba([255, 0, 0, 255]);
let config = OutlineConfig::new(red, 2);
assert_eq!(config.thickness, 2);
assert_eq!(config.color, red);
assert_eq!(config.alpha_threshold, 128);
}
#[test]
fn test_outline_config_builders() {
let white = OutlineConfig::white(3);
assert_eq!(white.color, Rgba([255, 255, 255, 255]));
assert_eq!(white.thickness, 3);
let black = OutlineConfig::black(2).with_alpha_threshold(200);
assert_eq!(black.color, Rgba([0, 0, 0, 255]));
assert_eq!(black.thickness, 2);
assert_eq!(black.alpha_threshold, 200);
}
#[test]
fn test_outline_edge_detection() {
let processor = ImageProcessor::default();
// Create a simple 3x3 image with a transparent pixel in the center
let mut img = RgbaImage::new(3, 3);
// Fill with opaque white
for y in 0..3 {
for x in 0..3 {
img.put_pixel(x, y, Rgba([255, 255, 255, 255]));
}
}
// Make center transparent
img.put_pixel(1, 1, Rgba([0, 0, 0, 0]));
// Test that center pixel has opaque neighbors
assert!(processor.has_opaque_neighbor(&img, 1, 1, 128));
// Test a fully opaque pixel - should not have any transparent neighbors
// but the function checks if a pixel has opaque neighbors, not transparent ones
assert!(processor.has_opaque_neighbor(&img, 0, 0, 128));
// Create a new image that's fully transparent
let mut transparent_img = RgbaImage::new(3, 3);
for y in 0..3 {
for x in 0..3 {
transparent_img.put_pixel(x, y, Rgba([0, 0, 0, 0]));
}
}
// A transparent pixel with all transparent neighbors should return false
assert!(!processor.has_opaque_neighbor(&transparent_img, 1, 1, 128));
}
}

View File

@@ -51,8 +51,10 @@
pub mod types;
pub mod databases;
pub mod schema;
mod xml_parser;
mod item_loader;
mod image_processor;
pub use databases::{
ItemDatabase,
@@ -65,6 +67,9 @@ pub use databases::{
PlayerHouseDatabase,
TraitDatabase,
ShopDatabase,
MinimapDatabase,
MinimapDatabaseError,
StorageStats,
};
pub use types::{
// Items
@@ -109,5 +114,10 @@ pub use types::{
TraitTrainer,
Shop,
ShopItem,
// Minimap
MinimapTile,
MinimapTileRecord,
NewMinimapTile,
};
pub use xml_parser::XmlParseError;
pub use image_processor::{ImageProcessor, ImageProcessingError, ProcessedImages, OutlineConfig};

View File

@@ -6,11 +6,13 @@
//! 3. Extracting typeId and transform positions
//! 4. Writing resource data to an output file
use cursebreaker_parser::{ItemDatabase, NpcDatabase, QuestDatabase, HarvestableDatabase, LootDatabase, InteractableResource};
use cursebreaker_parser::{ItemDatabase, NpcDatabase, QuestDatabase, HarvestableDatabase, LootDatabase, InteractableResource, MinimapDatabase};
use unity_parser::UnityProject;
use std::path::Path;
use unity_parser::log::DedupLogger;
use log::{info, error, LevelFilter};
use log::{info, error, warn, LevelFilter};
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
fn main() -> Result<(), Box<dyn std::error::Error>> {
@@ -45,6 +47,15 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
let loot_db = LootDatabase::load_from_xml(loot_path)?;
info!("✅ Loaded {} loot tables", loot_db.len());
// Save to SQLite database
info!("\n💾 Saving game data to SQLite database...");
let mut conn = SqliteConnection::establish("cursebreaker.db")?;
match item_db.save_to_db(&mut conn) {
Ok(count) => info!("✅ Saved {} items to database", count),
Err(e) => warn!("⚠️ Failed to save items: {}", e),
}
// Print statistics
info!("\n📊 Game Data Statistics:");
info!(" Items:");
@@ -117,5 +128,32 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
log::logger().flush();
// Process minimap tiles
info!("\n🗺️ Processing minimap tiles...");
let minimap_db = MinimapDatabase::new("cursebreaker.db".to_string());
let minimap_path = "/home/connor/repos/CBAssets/Data/Textures/MinimapSquares";
match minimap_db.load_from_directory(minimap_path) {
Ok(count) => {
info!("✅ Processed {} minimap tiles", count);
if let Ok(stats) = minimap_db.get_storage_stats() {
info!(" Storage Statistics:");
info!(" • Original PNG total: {} MB", stats.total_original_size / 1_048_576);
info!(" • WebP total: {} MB", stats.total_webp_size() / 1_048_576);
info!(" • Compression ratio: {:.2}%", stats.compression_ratio());
}
if let Ok(bounds) = minimap_db.get_map_bounds() {
info!(" Map Bounds:");
info!(" • Min (x,y): {:?}", bounds.0);
info!(" • Max (x,y): {:?}", bounds.1);
}
}
Err(e) => {
error!("Failed to process minimap tiles: {}", e);
}
}
Ok(())
}

View File

@@ -0,0 +1,122 @@
// @generated automatically by Diesel CLI.
diesel::table! {
fast_travel_locations (id) {
id -> Nullable<Integer>,
name -> Text,
map_name -> Text,
data -> Text,
}
}
diesel::table! {
harvestables (id) {
id -> Nullable<Integer>,
name -> Text,
data -> Text,
}
}
diesel::table! {
items (id) {
id -> Nullable<Integer>,
name -> Text,
data -> Text,
}
}
diesel::table! {
loot_tables (table_id) {
table_id -> Nullable<Text>,
npc_id -> Nullable<Text>,
data -> Text,
}
}
diesel::table! {
maps (scene_id) {
scene_id -> Nullable<Text>,
name -> Text,
data -> Text,
}
}
diesel::table! {
minimap_tiles (id) {
id -> Nullable<Integer>,
x -> Integer,
y -> Integer,
original_width -> Integer,
original_height -> Integer,
original_file_size -> Nullable<Integer>,
webp_512 -> Binary,
webp_256 -> Binary,
webp_128 -> Binary,
webp_64 -> Binary,
webp_512_size -> Integer,
webp_256_size -> Integer,
webp_128_size -> Integer,
webp_64_size -> Integer,
processed_at -> Timestamp,
source_path -> Text,
}
}
diesel::table! {
npcs (id) {
id -> Nullable<Integer>,
name -> Text,
data -> Text,
}
}
diesel::table! {
player_houses (id) {
id -> Nullable<Integer>,
name -> Text,
map_id -> Integer,
data -> Text,
}
}
diesel::table! {
quests (id) {
id -> Nullable<Integer>,
name -> Text,
data -> Text,
}
}
diesel::table! {
shops (id) {
id -> Nullable<Integer>,
name -> Text,
unique_items -> Integer,
item_count -> Integer,
data -> Text,
}
}
diesel::table! {
traits (id) {
id -> Nullable<Integer>,
name -> Text,
description -> Nullable<Text>,
trainer_id -> Nullable<Integer>,
data -> Text,
}
}
diesel::allow_tables_to_appear_in_same_query!(
fast_travel_locations,
harvestables,
items,
loot_tables,
maps,
minimap_tiles,
npcs,
player_houses,
quests,
shops,
traits,
);

View File

@@ -0,0 +1,45 @@
use diesel::prelude::*;
use crate::schema::minimap_tiles;
/// Diesel queryable model (for SELECT queries)
#[derive(Queryable, Selectable, Debug, Clone)]
#[diesel(table_name = minimap_tiles)]
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
pub struct MinimapTileRecord {
pub id: Option<i32>,
pub x: i32,
pub y: i32,
pub original_width: i32,
pub original_height: i32,
pub original_file_size: Option<i32>,
pub webp_512: Vec<u8>,
pub webp_256: Vec<u8>,
pub webp_128: Vec<u8>,
pub webp_64: Vec<u8>,
pub webp_512_size: i32,
pub webp_256_size: i32,
pub webp_128_size: i32,
pub webp_64_size: i32,
pub processed_at: String, // SQLite TIMESTAMP as String
pub source_path: String,
}
/// Diesel insertable model (for INSERT queries)
#[derive(Insertable, Debug)]
#[diesel(table_name = minimap_tiles)]
pub struct NewMinimapTile<'a> {
pub x: i32,
pub y: i32,
pub original_width: i32,
pub original_height: i32,
pub original_file_size: Option<i32>,
pub webp_512: &'a [u8],
pub webp_256: &'a [u8],
pub webp_128: &'a [u8],
pub webp_64: &'a [u8],
pub webp_512_size: i32,
pub webp_256_size: i32,
pub webp_128_size: i32,
pub webp_64_size: i32,
pub source_path: &'a str,
}

View File

@@ -0,0 +1,61 @@
use serde::{Deserialize, Serialize};
/// Represents a single minimap tile with multi-resolution WebP data
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MinimapTile {
/// X coordinate from filename
pub x: i32,
/// Y coordinate from filename
pub y: i32,
/// Original dimensions
pub original_width: i32,
pub original_height: i32,
/// Source file path
pub source_path: String,
/// WebP blob at 512x512
#[serde(skip)] // Skip serialization for binary data
pub webp_512: Vec<u8>,
/// WebP blob at 256x256
#[serde(skip)]
pub webp_256: Vec<u8>,
/// WebP blob at 128x128
#[serde(skip)]
pub webp_128: Vec<u8>,
/// WebP blob at 64x64
#[serde(skip)]
pub webp_64: Vec<u8>,
}
impl MinimapTile {
/// Create new tile from coordinates and source path
pub fn new(x: i32, y: i32, source_path: String) -> Self {
Self {
x,
y,
original_width: 512,
original_height: 512,
source_path,
webp_512: Vec::new(),
webp_256: Vec::new(),
webp_128: Vec::new(),
webp_64: Vec::new(),
}
}
/// Get total size of all WebP blobs
pub fn total_webp_size(&self) -> usize {
self.webp_512.len() + self.webp_256.len() + self.webp_128.len() + self.webp_64.len()
}
/// Check if tile has been processed (has WebP data)
pub fn is_processed(&self) -> bool {
!self.webp_512.is_empty()
}
}

View File

@@ -8,6 +8,8 @@ mod fast_travel;
mod player_house;
mod r#trait;
mod shop;
mod minimap_tile;
mod minimap_models;
pub use item::{
// Main types
@@ -40,3 +42,5 @@ pub use fast_travel::{FastTravelLocation, FastTravelType};
pub use player_house::PlayerHouse;
pub use r#trait::{Trait, TraitTrainer};
pub use shop::{Shop, ShopItem};
pub use minimap_tile::MinimapTile;
pub use minimap_models::{MinimapTileRecord, NewMinimapTile};

View File

@@ -0,0 +1,150 @@
/// MapIcon component from Cursebreaker
///
/// C# definition from MapIcon.cs:
/// ```csharp
/// public enum MapIconType
/// {
/// npc,
/// aggressiveNpc,
/// ally,
/// loot,
/// self,
/// player,
/// buildingAlly,
/// buildingEnemy,
/// path,
/// resource,
/// questmarker,
/// workbench,
/// door,
/// tree,
/// fish,
/// custom,
/// mapText,
/// worldMapText,
/// fastTravel,
/// fightingNpc,
/// worldMapIcon,
/// playerHouse,
/// task
/// }
///
/// public class MapIcon : MonoBehaviour
/// {
/// public UI_Minimap.MapIconType iconType = UI_Minimap.MapIconType.custom;
/// public int iconSize = 24;
/// public string icon = "MinimapIcons/";
/// public string text;
/// public int fontSize = 24;
/// public string hoverText;
/// }
/// ```
use unity_parser::{UnityComponent, ComponentContext, EcsInsertable};
use serde_yaml::Mapping;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum MapIconType {
Npc = 0,
AggressiveNpc = 1,
Ally = 2,
Loot = 3,
Self_ = 4,
Player = 5,
BuildingAlly = 6,
BuildingEnemy = 7,
Path = 8,
Resource = 9,
Questmarker = 10,
Workbench = 11,
Door = 12,
Tree = 13,
Fish = 14,
Custom = 15,
MapText = 16,
WorldMapText = 17,
FastTravel = 18,
FightingNpc = 19,
WorldMapIcon = 20,
PlayerHouse = 21,
Task = 22,
}
impl Default for MapIconType {
fn default() -> Self {
MapIconType::Custom
}
}
impl MapIconType {
pub fn from_i64(value: i64) -> Self {
match value {
0 => MapIconType::Npc,
1 => MapIconType::AggressiveNpc,
2 => MapIconType::Ally,
3 => MapIconType::Loot,
4 => MapIconType::Self_,
5 => MapIconType::Player,
6 => MapIconType::BuildingAlly,
7 => MapIconType::BuildingEnemy,
8 => MapIconType::Path,
9 => MapIconType::Resource,
10 => MapIconType::Questmarker,
11 => MapIconType::Workbench,
12 => MapIconType::Door,
13 => MapIconType::Tree,
14 => MapIconType::Fish,
15 => MapIconType::Custom,
16 => MapIconType::MapText,
17 => MapIconType::WorldMapText,
18 => MapIconType::FastTravel,
19 => MapIconType::FightingNpc,
20 => MapIconType::WorldMapIcon,
21 => MapIconType::PlayerHouse,
22 => MapIconType::Task,
_ => MapIconType::Custom,
}
}
}
#[derive(Debug, Clone)]
pub struct MapIcon {
pub icon_type: MapIconType,
pub icon_size: i64,
pub icon: String,
pub text: String,
pub font_size: i64,
pub hover_text: String,
}
impl UnityComponent for MapIcon {
fn parse(yaml: &Mapping, _ctx: &ComponentContext) -> Option<Self> {
let icon_type_value = unity_parser::yaml_helpers::get_i64(yaml, "iconType").unwrap_or(15);
Some(Self {
icon_type: MapIconType::from_i64(icon_type_value),
icon_size: unity_parser::yaml_helpers::get_i64(yaml, "iconSize").unwrap_or(24),
icon: unity_parser::yaml_helpers::get_string(yaml, "icon").unwrap_or_else(|| "MinimapIcons/".to_string()),
text: unity_parser::yaml_helpers::get_string(yaml, "text").unwrap_or_default(),
font_size: unity_parser::yaml_helpers::get_i64(yaml, "fontSize").unwrap_or(24),
hover_text: unity_parser::yaml_helpers::get_string(yaml, "hoverText").unwrap_or_default(),
})
}
}
impl EcsInsertable for MapIcon {
fn insert_into_world(self, world: &mut sparsey::World, entity: sparsey::Entity) {
world.insert(entity, (self,));
}
}
// Register component with inventory
inventory::submit! {
unity_parser::ComponentRegistration {
type_id: 114,
class_name: "MapIcon",
parse_and_insert: |yaml, ctx, world, entity| {
<MapIcon as EcsInsertable>::parse_and_insert(yaml, ctx, world, entity)
},
register: |builder| builder.register::<MapIcon>(),
}
}

View File

@@ -2,10 +2,12 @@ mod interactable_resource;
mod interactable_teleporter;
mod interactable_workbench;
mod loot_spawner;
mod map_icon;
mod map_name_changer;
pub use interactable_resource::InteractableResource;
pub use interactable_teleporter::InteractableTeleporter;
pub use interactable_workbench::InteractableWorkbench;
pub use loot_spawner::LootSpawner;
pub use map_icon::{MapIcon, MapIconType};
pub use map_name_changer::MapNameChanger;