Compare commits
3 Commits
9bca794ce1
...
17ea08caac
| Author | SHA1 | Date | |
|---|---|---|---|
| 17ea08caac | |||
| 96b1e172ee | |||
| 16a5973a19 |
@@ -8,7 +8,9 @@
|
||||
"Bash(cargo tree:*)",
|
||||
"WebFetch(domain:docs.rs)",
|
||||
"Bash(findstr:*)",
|
||||
"Bash(cargo check:*)"
|
||||
"Bash(cargo check:*)",
|
||||
"Bash(ls:*)",
|
||||
"Bash(find:*)"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -16,3 +16,6 @@ target/
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
# Test data (cloned Unity projects for integration tests)
|
||||
test_data/
|
||||
|
||||
26
Cargo.lock
generated
26
Cargo.lock
generated
@@ -27,8 +27,10 @@ checksum = "41e67cd8309bbd06cd603a9e693a784ac2e5d1e955f11286e355089fcab3047c"
|
||||
name = "cursebreaker-parser"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"cursebreaker-parser-macros",
|
||||
"glam",
|
||||
"indexmap",
|
||||
"inventory",
|
||||
"lru",
|
||||
"once_cell",
|
||||
"pretty_assertions",
|
||||
@@ -40,6 +42,15 @@ dependencies = [
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cursebreaker-parser-macros"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "diff"
|
||||
version = "0.1.13"
|
||||
@@ -96,6 +107,15 @@ dependencies = [
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inventory"
|
||||
version = "0.3.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bc61209c082fbeb19919bee74b176221b27223e27b65d781eb91af24eb1fb46e"
|
||||
dependencies = [
|
||||
"rustversion",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itoa"
|
||||
version = "1.0.17"
|
||||
@@ -186,6 +206,12 @@ version = "2.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
|
||||
|
||||
[[package]]
|
||||
name = "rustversion"
|
||||
version = "1.0.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d"
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.22"
|
||||
|
||||
54
Cargo.toml
54
Cargo.toml
@@ -1,54 +1,10 @@
|
||||
[package]
|
||||
name = "cursebreaker-parser"
|
||||
[workspace]
|
||||
members = ["cursebreaker-parser", "cursebreaker-parser-macros"]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
authors = ["Your Name <your.email@example.com>"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
description = "A high-performance Rust library for parsing Unity project files (.unity, .prefab, .asset)"
|
||||
repository = "https://github.com/yourusername/cursebreaker-parser-rust"
|
||||
keywords = ["unity", "parser", "yaml", "gamedev"]
|
||||
categories = ["parser-implementations", "game-development"]
|
||||
rust-version = "1.70"
|
||||
|
||||
[lib]
|
||||
name = "cursebreaker_parser"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
# YAML parsing
|
||||
serde_yaml = "0.9"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
||||
# Error handling
|
||||
thiserror = "1.0"
|
||||
|
||||
# Ordered maps for properties
|
||||
indexmap = { version = "2.1", features = ["serde"] }
|
||||
|
||||
# Regex for parsing
|
||||
regex = "1.10"
|
||||
|
||||
# Math types (Vector2, Vector3, Quaternion, etc.)
|
||||
glam = { version = "0.29", features = ["serde"] }
|
||||
|
||||
# ECS (Entity Component System)
|
||||
sparsey = "0.13"
|
||||
|
||||
# LRU cache for reference resolution
|
||||
lru = "0.12"
|
||||
|
||||
# Directory traversal for loading projects
|
||||
walkdir = "2.4"
|
||||
|
||||
# Lazy static initialization for type registry
|
||||
once_cell = "1.19"
|
||||
|
||||
[dev-dependencies]
|
||||
# Testing utilities
|
||||
pretty_assertions = "1.4"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
||||
# Future: parallel processing support
|
||||
parallel = []
|
||||
|
||||
192
DESIGN.md
192
DESIGN.md
@@ -1,192 +0,0 @@
|
||||
# Cursebreaker Unity Parser - Design Document
|
||||
|
||||
## Project Overview
|
||||
|
||||
A high-performance Rust library for parsing and querying Unity project files (.unity scenes, .prefab prefabs, and .asset ScriptableObjects).
|
||||
|
||||
## Goals
|
||||
|
||||
1. **Parse Unity YAML Format**: Handle Unity's YAML 1.1 format with custom tags (`!u!`) and file ID references
|
||||
2. **Extract Structure**: Parse GameObjects, Components, and their properties into queryable data structures
|
||||
3. **High Performance**: Optimized for large Unity projects with minimal memory footprint
|
||||
4. **Type Safety**: Strong typing for Unity's component system
|
||||
5. **Library-First**: Designed as a reusable SDK for other Rust tools
|
||||
|
||||
## Target File Formats
|
||||
|
||||
- `.unity` - Unity scene files
|
||||
- `.prefab` - Unity prefab files
|
||||
- `.asset` - Unity ScriptableObject and other asset files
|
||||
|
||||
All three formats share the same underlying YAML structure with Unity-specific extensions.
|
||||
|
||||
## Unity File Format Structure
|
||||
|
||||
Unity files use YAML 1.1 with special conventions:
|
||||
|
||||
```yaml
|
||||
%YAML 1.1
|
||||
%TAG !u! tag:unity3d.com,2011:
|
||||
--- !u!1 &1866116814460599870
|
||||
GameObject:
|
||||
m_ObjectHideFlags: 0
|
||||
m_Component:
|
||||
- component: {fileID: 8151827567463220614}
|
||||
- component: {fileID: 8755205353704683373}
|
||||
m_Name: CardGrabber
|
||||
--- !u!224 &8151827567463220614
|
||||
RectTransform:
|
||||
m_GameObject: {fileID: 1866116814460599870}
|
||||
m_LocalPosition: {x: 0, y: 0, z: 0}
|
||||
```
|
||||
|
||||
### Key Concepts
|
||||
|
||||
1. **Documents**: Each `---` starts a new YAML document representing a Unity object
|
||||
2. **Type Tags**: `!u!N` indicates Unity type (e.g., `!u!1` = GameObject, `!u!224` = RectTransform)
|
||||
3. **Anchors**: `&ID` defines a local file ID for the object
|
||||
4. **File References**: `{fileID: N}` references objects by their ID (local or external)
|
||||
5. **GUID References**: `{guid: ...}` references external assets
|
||||
6. **Properties**: All Unity objects have serialized fields (usually prefixed with `m_`)
|
||||
|
||||
## Architecture
|
||||
|
||||
### Core Components
|
||||
|
||||
```
|
||||
cursebreaker-parser/
|
||||
├── src/
|
||||
│ ├── lib.rs # Public API exports
|
||||
│ ├── parser/ # YAML parsing layer
|
||||
│ │ ├── mod.rs
|
||||
│ │ ├── yaml.rs # YAML document parser
|
||||
│ │ ├── unity_tag.rs # Unity type tag handler (!u!)
|
||||
│ │ └── reference.rs # FileID/GUID reference parser
|
||||
│ ├── model/ # Data model
|
||||
│ │ ├── mod.rs
|
||||
│ │ ├── document.rs # UnityDocument struct
|
||||
│ │ ├── object.rs # UnityObject base
|
||||
│ │ ├── gameobject.rs # GameObject type
|
||||
│ │ ├── component.rs # Component types
|
||||
│ │ └── property.rs # Property value types
|
||||
│ ├── types/ # Unity type system
|
||||
│ │ ├── mod.rs
|
||||
│ │ ├── type_id.rs # Unity type ID -> name mapping
|
||||
│ │ └── component_types.rs
|
||||
│ ├── query/ # Query API
|
||||
│ │ ├── mod.rs
|
||||
│ │ ├── project.rs # UnityProject (multi-file)
|
||||
│ │ ├── find.rs # Find objects/components
|
||||
│ │ └── filter.rs # Filter/search utilities
|
||||
│ └── error.rs # Error types
|
||||
```
|
||||
|
||||
### Data Model
|
||||
|
||||
```rust
|
||||
// Core types
|
||||
pub struct UnityFile {
|
||||
pub path: PathBuf,
|
||||
pub documents: Vec<UnityDocument>,
|
||||
}
|
||||
|
||||
pub struct UnityDocument {
|
||||
pub type_id: u32, // From !u!N
|
||||
pub file_id: i64, // From &ID
|
||||
pub class_name: String, // E.g., "GameObject"
|
||||
pub properties: PropertyMap,
|
||||
}
|
||||
|
||||
pub struct UnityProject {
|
||||
pub files: HashMap<PathBuf, UnityFile>,
|
||||
// Reference resolution cache
|
||||
}
|
||||
|
||||
// Property values (simplified)
|
||||
pub enum PropertyValue {
|
||||
Integer(i64),
|
||||
Float(f64),
|
||||
String(String),
|
||||
Boolean(bool),
|
||||
FileRef { file_id: i64, guid: Option<String> },
|
||||
Vector3 { x: f64, y: f64, z: f64 },
|
||||
Color { r: f64, g: f64, b: f64, a: f64 },
|
||||
Array(Vec<PropertyValue>),
|
||||
Object(PropertyMap),
|
||||
}
|
||||
```
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
1. **Streaming Parser**: Parse YAML incrementally rather than loading entire file into memory
|
||||
2. **Lazy Loading**: Only parse files when accessed
|
||||
3. **Reference Caching**: Cache resolved references to avoid repeated lookups
|
||||
4. **Zero-Copy Where Possible**: Use string slices and borrowed data where feasible
|
||||
5. **Parallel Parsing**: Support parsing multiple files concurrently
|
||||
|
||||
## Dependencies
|
||||
|
||||
- `yaml-rust2` or `serde_yaml` - YAML parsing (evaluate both)
|
||||
- `serde` - Serialization/deserialization
|
||||
- `rayon` - Parallel processing (optional, for multi-file parsing)
|
||||
- `thiserror` - Error handling
|
||||
- `indexmap` - Ordered maps for properties
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
1. **Unit Tests**: Each parser component tested independently
|
||||
2. **Integration Tests**: Full file parsing with real Unity files
|
||||
3. **Sample Data**: Use PiratePanic project as test corpus
|
||||
4. **Benchmarks**: Performance tests on large Unity projects
|
||||
5. **Fuzzing**: Fuzz testing for parser robustness (future)
|
||||
|
||||
## API Design Goals
|
||||
|
||||
### Simple File Parsing
|
||||
```rust
|
||||
let file = UnityFile::from_path("Scene.unity")?;
|
||||
for doc in &file.documents {
|
||||
println!("{}: {}", doc.class_name, doc.file_id);
|
||||
}
|
||||
```
|
||||
|
||||
### Query API
|
||||
```rust
|
||||
let project = UnityProject::from_directory("Assets/")?;
|
||||
|
||||
// Find all GameObjects
|
||||
let objects = project.find_all_by_type("GameObject");
|
||||
|
||||
// Find by name
|
||||
let player = project.find_by_name("Player")?;
|
||||
|
||||
// Get components
|
||||
let transform = player.get_component("Transform")?;
|
||||
let position = transform.get_vector3("m_LocalPosition")?;
|
||||
```
|
||||
|
||||
### Reference Resolution
|
||||
```rust
|
||||
// Follow references automatically
|
||||
let gameobject = project.get_object(file_id)?;
|
||||
let transform_ref = gameobject.get_file_ref("m_Component[0].component")?;
|
||||
let transform = project.resolve_reference(transform_ref)?;
|
||||
```
|
||||
|
||||
## Future Enhancements (Out of Scope for v1)
|
||||
|
||||
- Unity YAML serialization (writing files)
|
||||
- C# script parsing
|
||||
- Asset dependency graphs
|
||||
- Unity version detection and compatibility
|
||||
- Binary .unity format support (older Unity versions)
|
||||
- Meta file parsing (.meta files)
|
||||
|
||||
## Success Criteria
|
||||
|
||||
1. Successfully parse all files in PiratePanic sample project
|
||||
2. Extract all GameObjects and Components with properties
|
||||
3. Resolve all internal file references correctly
|
||||
4. Parse large scene files (>10MB) in <100ms
|
||||
5. Memory usage scales linearly with file size
|
||||
6. Clean, documented public API
|
||||
402
ROADMAP.md
402
ROADMAP.md
@@ -1,402 +0,0 @@
|
||||
# Cursebreaker Unity Parser - [ ] Implementation Roadmap
|
||||
|
||||
## Overview
|
||||
|
||||
This roadmap breaks down the development into 5 phases, each building on the previous. Each phase has clear deliverables and success criteria.
|
||||
|
||||
---
|
||||
|
||||
## Phase 1: Project Foundation & YAML Parsing ✅ COMPLETED
|
||||
|
||||
**Goal**: Set up project structure and implement basic YAML parsing for Unity files
|
||||
|
||||
### Tasks
|
||||
|
||||
1. **Project Setup**
|
||||
- [x] Initialize Cargo project with workspace structure
|
||||
- [x] Add core dependencies (yaml parser, serde, thiserror)
|
||||
- [x] Set up basic module structure (lib.rs, parser/, model/, error.rs)
|
||||
- [x] Configure Cargo.toml with metadata and feature flags
|
||||
|
||||
2. **Error Handling**
|
||||
- [x] Define error types (ParseError, ReferenceError, etc.)
|
||||
- [x] Implement Display and Error traits
|
||||
- [x] Set up Result type aliases
|
||||
|
||||
3. **YAML Document Parser**
|
||||
- [x] Implement Unity YAML document reader
|
||||
- [x] Parse YAML 1.1 header and Unity tags
|
||||
- [x] Split multi-document YAML files into individual documents
|
||||
- [x] Handle `%TAG !u! tag:unity3d.com,2011:` directive
|
||||
|
||||
4. **Unity Tag Parser**
|
||||
- [x] Parse Unity type tags (`!u!1`, `!u!224`, etc.)
|
||||
- [x] Extract type ID from tag
|
||||
- [x] Handle anchor IDs (`&12345`)
|
||||
|
||||
5. **Basic Testing**
|
||||
- [x] Set up test infrastructure
|
||||
- [x] Create minimal test YAML files
|
||||
- [x] Unit tests for YAML splitting and tag parsing
|
||||
- [x] Integration test: parse simple Unity file
|
||||
|
||||
### Deliverables
|
||||
- [x] ✓ Working Cargo project structure
|
||||
- [x] ✓ YAML documents successfully split from Unity files
|
||||
- [x] ✓ Unity type IDs and file IDs extracted
|
||||
- [x] ✓ Basic error handling in place
|
||||
- [x] ✓ Tests passing
|
||||
|
||||
### Success Criteria
|
||||
- [x] Can read `Scene01MainMenu.unity` and split into individual documents
|
||||
- [x] Each document has correct type ID and file ID
|
||||
- [x] No panics on malformed input (returns errors)
|
||||
|
||||
**Implementation Notes:**
|
||||
- Created comprehensive error handling with thiserror
|
||||
- Implemented regex-based Unity tag parser with caching
|
||||
- Built YAML document splitter that handles multi-document files
|
||||
- Created model with UnityFile and UnityDocument structs
|
||||
- Added 23 passing tests (12 unit, 7 integration, 4 doc tests)
|
||||
- Successfully parses real Unity files from PiratePanic sample project
|
||||
|
||||
---
|
||||
|
||||
## Phase 2: Data Model & Property Parsing
|
||||
|
||||
**Goal**: Build the core data model and parse Unity properties into structured data
|
||||
|
||||
### Tasks
|
||||
|
||||
1. **Core Data Structures**
|
||||
- [x] Implement `UnityDocument` struct
|
||||
- [x] Implement `UnityFile` struct
|
||||
- [x] Create property storage (PropertyMap using IndexMap)
|
||||
- [x] Define FileID and LocalID types
|
||||
|
||||
2. **Property Value Types**
|
||||
- [x] Implement `PropertyValue` enum (Integer, Float, String, Boolean, etc.)
|
||||
- [x] Add Vector3, Color, Quaternion value types
|
||||
- [x] Add Array and nested Object support
|
||||
- [x] Implement Debug and Display for PropertyValue
|
||||
|
||||
3. **Property Parser**
|
||||
- [x] Parse YAML mappings into PropertyMap
|
||||
- [x] Handle nested properties (paths like `m_Component[0].component`)
|
||||
- [x] Parse Unity-specific formats:
|
||||
- [x] `{fileID: N}` references
|
||||
- [x] `{x: 0, y: 0, z: 0}` vectors
|
||||
- [x] `{r: 1, g: 1, b: 1, a: 1}` colors
|
||||
- [x] `{guid: ..., type: N}` external references
|
||||
|
||||
4. **GameObject & Component Models**
|
||||
- [x] Create specialized GameObject struct
|
||||
- [x] Create base Component trait/struct
|
||||
- [x] Add common component types (Transform, RectTransform, etc.)
|
||||
- [x] Helper methods for accessing common properties
|
||||
|
||||
5. **Testing**
|
||||
- [x] Unit tests for property parsing
|
||||
- [x] Test all PropertyValue variants
|
||||
- [x] Integration test: parse GameObject with components
|
||||
- [x] Snapshot tests using sample Unity files
|
||||
|
||||
### Deliverables
|
||||
- [x] ✓ Complete data model implemented
|
||||
- [x] ✓ Properties parsed into type-safe structures
|
||||
- [x] ✓ GameObject and Component abstractions working
|
||||
- [x] ✓ All property types handled correctly
|
||||
|
||||
### Success Criteria
|
||||
- [x] Parse entire `CardGrabber.prefab` correctly
|
||||
- [x] Extract all GameObject properties (name, components list)
|
||||
- [x] Extract all Component properties with correct types
|
||||
- [x] Can access nested properties programmatically
|
||||
|
||||
---
|
||||
|
||||
## Phase 3: Reference Resolution & Unity Type System
|
||||
|
||||
**Goal**: Resolve references between objects and implement Unity's type system
|
||||
|
||||
### Tasks
|
||||
|
||||
1. **Reference Types**
|
||||
- [x] Implement `FileReference` struct (fileID + optional GUID)
|
||||
- [x] Implement `LocalReference` (within-file references)
|
||||
- [x] Implement `ExternalReference` (cross-file GUID references)
|
||||
- [x] Add reference equality and comparison
|
||||
|
||||
2. **Type ID Mapping**
|
||||
- [x] Create Unity type ID → class name mapping
|
||||
- [x] Common types: GameObject(1), Transform(4), MonoBehaviour(114), etc.
|
||||
- [x] Load type mappings from data file or hardcode common ones
|
||||
- [x] Support unknown type IDs gracefully
|
||||
|
||||
3. **Reference Resolution**
|
||||
- [x] Implement within-file reference resolution
|
||||
- [x] Cache resolved references for performance
|
||||
- [x] Handle cyclic references safely
|
||||
- [x] Detect and report broken references
|
||||
|
||||
4. **UnityProject Multi-File Support**
|
||||
- [x] Implement `UnityProject` struct
|
||||
- [x] Load multiple Unity files into project
|
||||
- [x] Build file ID → document index
|
||||
- [x] Cross-file reference resolution (GUID-based)
|
||||
|
||||
5. **Query Helpers**
|
||||
- [x] Find object by file ID
|
||||
- [x] Find objects by type
|
||||
- [x] Find objects by name
|
||||
- [x] Get component from GameObject
|
||||
- [x] Follow reference chains
|
||||
|
||||
6. **Testing**
|
||||
- [x] Test reference resolution within single file
|
||||
- [x] Test cross-file references (scene → prefab)
|
||||
- [x] Test broken reference handling
|
||||
- [x] Test circular reference detection
|
||||
|
||||
### Deliverables
|
||||
- [x] ✓ All references within files resolved correctly
|
||||
- [x] ✓ Type ID system working with common Unity types
|
||||
- [x] ✓ UnityProject can load and query multiple files
|
||||
- [x] ✓ Query API functional
|
||||
|
||||
### Success Criteria
|
||||
- [x] Load entire PiratePanic/Scenes/ directory
|
||||
- [x] Resolve all GameObject → Component references
|
||||
- [x] Resolve prefab references from scenes
|
||||
- [x] Find objects by name across entire project
|
||||
- [x] Handle missing references gracefully
|
||||
|
||||
---
|
||||
|
||||
## Phase 4: Optimization & Robustness
|
||||
|
||||
**Goal**: Optimize performance and handle edge cases
|
||||
|
||||
### Tasks
|
||||
|
||||
1. **Performance Optimization**
|
||||
- [ ] Profile parsing performance on large files
|
||||
- [ ] Implement string interning for common property names
|
||||
- [ ] Optimize property access paths (cache lookups)
|
||||
- [ ] Consider zero-copy parsing where possible
|
||||
- [ ] Add lazy loading for large projects
|
||||
|
||||
2. **Memory Optimization**
|
||||
- [ ] Measure memory usage on large projects
|
||||
- [ ] Use Cow<str> where appropriate
|
||||
- [ ] Pool allocations for common types
|
||||
- [ ] Implement Drop for cleanup
|
||||
- [ ] Add memory usage benchmarks
|
||||
|
||||
3. **Parallel Processing**
|
||||
- [ ] Add optional rayon dependency
|
||||
- [ ] Parallel file loading
|
||||
- [ ] Parallel document parsing within files
|
||||
- [ ] Thread-safe caching
|
||||
|
||||
4. **Error Recovery**
|
||||
- [ ] Graceful degradation on parse errors
|
||||
- [ ] Partial file parsing (skip invalid documents)
|
||||
- [ ] Better error messages with context
|
||||
- [ ] Error recovery suggestions
|
||||
|
||||
5. **Edge Cases**
|
||||
- [ ] Handle very large files (>100MB scenes)
|
||||
- [ ] Handle deeply nested properties
|
||||
- [ ] Handle unusual property types
|
||||
- [ ] Handle legacy Unity versions (different YAML formats)
|
||||
- [ ] Handle corrupted files
|
||||
|
||||
6. **Comprehensive Testing**
|
||||
- [ ] Parse entire PiratePanic project
|
||||
- [ ] Parse various Unity project versions
|
||||
- [ ] Stress tests with large files
|
||||
- [ ] Fuzz testing setup (optional)
|
||||
- [ ] Property-based tests
|
||||
|
||||
### Deliverables
|
||||
- [ ] ✓ Optimized parsing (<100ms for 10MB file)
|
||||
- [ ] ✓ Low memory footprint (linear scaling)
|
||||
- [ ] ✓ Parallel parsing support
|
||||
- [ ] ✓ Robust error handling
|
||||
- [ ] ✓ Comprehensive test suite
|
||||
|
||||
### Success Criteria
|
||||
- [ ] Parse 10MB scene file in <100ms
|
||||
- [ ] Parse entire PiratePanic project in <1s
|
||||
- [ ] Memory usage < 2x file size
|
||||
- [ ] 100% of PiratePanic files parse successfully
|
||||
- [ ] No panics on malformed input
|
||||
|
||||
---
|
||||
|
||||
## Phase 5: API Polish & Documentation
|
||||
|
||||
**Goal**: Finalize public API and create excellent documentation
|
||||
|
||||
### Tasks
|
||||
|
||||
1. **API Review & Refinement**
|
||||
- [ ] Review all public APIs for consistency
|
||||
- [ ] Add convenience methods based on common use cases
|
||||
- [ ] Ensure ergonomic API design
|
||||
- [ ] Add builder patterns where appropriate
|
||||
- [ ] Minimize unsafe code, document when necessary
|
||||
|
||||
2. **Type Safety Improvements**
|
||||
- [ ] Add type-safe component access methods
|
||||
- [ ] Strongly-typed property getters
|
||||
- [ ] Generic query API improvements
|
||||
- [ ] Consider proc macros for component definitions (optional)
|
||||
|
||||
3. **Documentation**
|
||||
- [ ] Write comprehensive rustdoc for all public items
|
||||
- [ ] Add code examples to every public function
|
||||
- [ ] Create module-level documentation
|
||||
- [ ] Write getting started guide
|
||||
- [ ] Create cookbook with common tasks
|
||||
|
||||
4. **Examples**
|
||||
- [ ] Basic parsing example
|
||||
- [ ] Query API example
|
||||
- [ ] Reference resolution example
|
||||
- [ ] Multi-file project example
|
||||
- [ ] Performance tips example
|
||||
|
||||
5. **README & Guides**
|
||||
- [ ] Professional README.md
|
||||
- [ ] Architecture documentation
|
||||
- [ ] Contributing guide
|
||||
- [ ] Changelog template
|
||||
- [ ] License file (Apache 2.0 or MIT)
|
||||
|
||||
6. **CI/CD Setup**
|
||||
- [ ] GitHub Actions workflow
|
||||
- [ ] Run tests on PR
|
||||
- [ ] Clippy lints
|
||||
- [ ] Format checking
|
||||
- [ ] Code coverage reporting
|
||||
- [ ] Benchmark tracking
|
||||
|
||||
7. **Benchmarks**
|
||||
- [ ] Benchmark suite for common operations
|
||||
- [ ] Track performance over time
|
||||
- [ ] Document performance characteristics
|
||||
- [ ] Comparison with other parsers (if any exist)
|
||||
|
||||
### Deliverables
|
||||
- [ ] ✓ Clean, documented public API
|
||||
- [ ] ✓ Comprehensive rustdoc with examples
|
||||
- [ ] ✓ README and getting started guide
|
||||
- [ ] ✓ Working examples
|
||||
- [ ] ✓ CI/CD pipeline
|
||||
|
||||
### Success Criteria
|
||||
- [ ] Every public item has rustdoc
|
||||
- [ ] At least 3 working examples
|
||||
- [ ] CI passes on all commits
|
||||
- [ ] README clearly explains usage
|
||||
- [ ] Someone new can use library from docs alone
|
||||
|
||||
---
|
||||
|
||||
## Phase 6: Future Enhancements (Post-v1.0)
|
||||
|
||||
These are potential features for future versions:
|
||||
|
||||
### Advanced Querying
|
||||
- [ ] XPath-like query language for Unity objects
|
||||
- [ ] Filter DSL for complex searches
|
||||
- [ ] Object graph traversal API
|
||||
- [ ] Dependency analysis tools
|
||||
|
||||
### Write Support
|
||||
- [ ] Modify Unity files programmatically
|
||||
- [ ] Create new Unity objects
|
||||
- [ ] Safe YAML serialization
|
||||
- [ ] Preserve formatting and comments
|
||||
|
||||
### Additional Formats
|
||||
- [ ] .meta file parsing
|
||||
- [ ] TextMesh Pro asset files
|
||||
- [ ] Unity package manifest parsing
|
||||
- [ ] C# script analysis integration
|
||||
|
||||
### Tooling
|
||||
- [ ] CLI tool built on library
|
||||
- [ ] Web service for Unity file analysis
|
||||
- [ ] VS Code extension for Unity file viewing
|
||||
- [ ] Unity Editor plugin for exporting metadata
|
||||
|
||||
### Performance
|
||||
- [ ] Binary format support (legacy Unity)
|
||||
- [ ] Streaming API for huge files
|
||||
- [ ] Incremental parsing (watch mode)
|
||||
- [ ] Serialization/deserialization optimizations
|
||||
|
||||
---
|
||||
|
||||
## Development Guidelines
|
||||
|
||||
### Code Quality
|
||||
- [ ] Follow Rust API guidelines
|
||||
- [ ] Use clippy with strict lints
|
||||
- [ ] Maintain >80% test coverage
|
||||
- [ ] No unsafe unless absolutely necessary
|
||||
- [ ] All public APIs must be documented
|
||||
|
||||
### Testing Philosophy
|
||||
- [ ] Unit test every parser component
|
||||
- [ ] Integration tests for full workflows
|
||||
- [ ] Use real Unity files from PiratePanic
|
||||
- [ ] Add regression tests for bugs
|
||||
- [ ] Benchmark critical paths
|
||||
|
||||
### Version Strategy
|
||||
- [ ] Semantic versioning (SemVer)
|
||||
- [ ] 0.x.x during development
|
||||
- [ ] 1.0.0 when API is stable
|
||||
- [ ] Changelog for all versions
|
||||
- [ ] No breaking changes in minor versions after 1.0
|
||||
|
||||
### Dependencies
|
||||
- [ ] Minimize dependency count
|
||||
- [ ] Use well-maintained crates only
|
||||
- [ ] Avoid nightly features
|
||||
- [ ] Keep MSRV (Minimum Supported Rust Version) reasonable
|
||||
- [ ] Document all feature flags
|
||||
|
||||
---
|
||||
|
||||
## Estimated Milestones
|
||||
|
||||
These are rough estimates for a single developer working part-time:
|
||||
|
||||
- [ ] **Phase 1**: 1-2 weeks
|
||||
- [ ] **Phase 2**: 2-3 weeks
|
||||
- [ ] **Phase 3**: 2-3 weeks
|
||||
- [ ] **Phase 4**: 1-2 weeks
|
||||
- [ ] **Phase 5**: 1-2 weeks
|
||||
|
||||
**Total: 7-12 weeks to v1.0**
|
||||
|
||||
Phases can overlap and tasks can be parallelized. Testing happens continuously throughout all phases.
|
||||
|
||||
---
|
||||
|
||||
## Getting Started
|
||||
|
||||
To begin implementation:
|
||||
|
||||
1. Start with Phase 1, Task 1 (Project Setup)
|
||||
2. Work through tasks sequentially within each phase
|
||||
3. Complete all deliverables before moving to next phase
|
||||
4. Use PiratePanic sample project for testing throughout
|
||||
5. Iterate based on what you learn from the Unity files
|
||||
|
||||
Remember: Start simple, make it work, then make it fast. Focus on correctness and API design in early phases, optimization comes later.
|
||||
457
SUMMARY.md
457
SUMMARY.md
@@ -1,457 +0,0 @@
|
||||
# Cursebreaker Parser - Current State Summary
|
||||
|
||||
**Last Updated:** 2026-01-01
|
||||
**Version:** 0.1.0 (Major refactoring in progress)
|
||||
|
||||
## Overview
|
||||
|
||||
This codebase is a Unity file parser that converts Unity YAML files (.unity, .prefab, .asset) into Rust data structures. A major architectural refactoring has been completed to:
|
||||
1. Parse YAML directly into component types (bypassing intermediate `UnityDocument`)
|
||||
2. Automatically build Sparsey ECS Worlds for scene files
|
||||
3. Keep prefabs as raw YAML for efficient cloning and instantiation
|
||||
|
||||
## Current Architecture
|
||||
|
||||
### Data Flow
|
||||
|
||||
```
|
||||
Unity File (.unity/.prefab/.asset)
|
||||
↓
|
||||
Parser detects file type by extension
|
||||
↓
|
||||
┌─────────────┬──────────────┬──────────────┐
|
||||
│ .unity │ .prefab │ .asset │
|
||||
│ (Scene) │ (Prefab) │ (Asset) │
|
||||
└─────────────┴──────────────┴──────────────┘
|
||||
↓ ↓ ↓
|
||||
Parse YAML Parse YAML Parse YAML
|
||||
↓ ↓ ↓
|
||||
RawDocument RawDocument RawDocument
|
||||
↓ ↓ ↓
|
||||
Build World Store YAML Store YAML
|
||||
↓ ↓ ↓
|
||||
UnityScene UnityPrefab UnityAsset
|
||||
↓
|
||||
Entity + Components
|
||||
```
|
||||
|
||||
### Core Types
|
||||
|
||||
#### `UnityFile` (src/model/mod.rs:14-53)
|
||||
```rust
|
||||
pub enum UnityFile {
|
||||
Scene(UnityScene), // .unity files → ECS World
|
||||
Prefab(UnityPrefab), // .prefab files → Raw YAML
|
||||
Asset(UnityAsset), // .asset files → Raw YAML
|
||||
}
|
||||
```
|
||||
|
||||
#### `UnityScene` (src/model/mod.rs:60-85)
|
||||
Contains a fully-parsed Sparsey ECS World:
|
||||
```rust
|
||||
pub struct UnityScene {
|
||||
pub path: PathBuf,
|
||||
pub world: World, // Sparsey ECS World
|
||||
pub entity_map: HashMap<FileID, Entity>, // Unity FileID → Entity mapping
|
||||
}
|
||||
```
|
||||
|
||||
#### `UnityPrefab` / `UnityAsset` (src/model/mod.rs:92-150)
|
||||
Contains raw YAML documents for cloning:
|
||||
```rust
|
||||
pub struct UnityPrefab {
|
||||
pub path: PathBuf,
|
||||
pub documents: Vec<RawDocument>, // Raw YAML + metadata
|
||||
}
|
||||
```
|
||||
|
||||
#### `RawDocument` (src/model/mod.rs:160-194)
|
||||
Lightweight storage of Unity object metadata + YAML:
|
||||
```rust
|
||||
pub struct RawDocument {
|
||||
pub type_id: u32, // Unity type ID
|
||||
pub file_id: FileID, // Unity file ID
|
||||
pub class_name: String, // "GameObject", "Transform", etc.
|
||||
pub yaml: serde_yaml::Value, // Inner YAML (after "GameObject: {...}" wrapper)
|
||||
}
|
||||
```
|
||||
|
||||
### Component System
|
||||
|
||||
#### `UnityComponent` Trait (src/types/component.rs:18-28)
|
||||
Components parse directly from YAML:
|
||||
```rust
|
||||
pub trait UnityComponent: Sized {
|
||||
fn parse(yaml: &serde_yaml::Mapping, ctx: &ComponentContext) -> Option<Self>;
|
||||
}
|
||||
```
|
||||
|
||||
**Key Change:** Previously used `UnityDocument`, now uses raw `serde_yaml::Mapping` for zero-copy parsing.
|
||||
|
||||
#### `ComponentContext` (src/types/component.rs:8-15)
|
||||
Provides metadata during parsing:
|
||||
```rust
|
||||
pub struct ComponentContext<'a> {
|
||||
pub type_id: u32,
|
||||
pub file_id: FileID,
|
||||
pub class_name: &'a str,
|
||||
}
|
||||
```
|
||||
|
||||
#### YAML Helpers (src/types/component.rs:31-167)
|
||||
Typed accessors for Unity YAML patterns:
|
||||
- `get_vector3()` - Parses `{x, y, z}` into `glam::Vec3`
|
||||
- `get_quaternion()` - Parses `{x, y, z, w}` into `glam::Quat`
|
||||
- `get_file_ref()` - Parses `{fileID: N}` into `FileRef`
|
||||
- etc.
|
||||
|
||||
#### Implemented Components
|
||||
1. **GameObject** (src/types/game_object.rs) - Basic entity data (name, active, layer)
|
||||
2. **Transform** (src/types/transform.rs) - Position, rotation, scale + hierarchy
|
||||
3. **RectTransform** (src/types/transform.rs) - UI transform with anchors
|
||||
|
||||
### ECS World Building (src/ecs/builder.rs)
|
||||
|
||||
**3-Pass Approach:**
|
||||
|
||||
**Pass 1: Spawn GameObjects** (lines 32-36)
|
||||
- Creates entities for all GameObjects
|
||||
- Maps `FileID → Entity`
|
||||
|
||||
**Pass 2: Attach Components** (lines 38-42)
|
||||
- Parses components from YAML
|
||||
- Dispatches to correct parser based on `class_name`
|
||||
- Attaches to GameObject entities
|
||||
|
||||
**Pass 3: Resolve Hierarchy** (lines 44-46)
|
||||
- Converts Transform parent/children FileRefs to Entity references
|
||||
|
||||
### Parser Pipeline (src/parser/mod.rs)
|
||||
|
||||
**File Type Detection** (lines 69-76)
|
||||
```rust
|
||||
.unity → FileType::Scene → Build ECS World
|
||||
.prefab → FileType::Prefab → Store Raw YAML
|
||||
.asset → FileType::Asset → Store Raw YAML
|
||||
```
|
||||
|
||||
**YAML Document Parsing** (lines 125-167)
|
||||
1. Parse Unity tag: `--- !u!1 &12345`
|
||||
2. Extract YAML after tag line
|
||||
3. Unwrap class name wrapper: `GameObject: {...}` → `{...}`
|
||||
4. Store as `RawDocument`
|
||||
|
||||
## ✅ What's Implemented
|
||||
|
||||
### Fully Working
|
||||
- ✅ File type detection by extension
|
||||
- ✅ YAML parsing with Unity header validation
|
||||
- ✅ Direct YAML-to-component parsing (bypasses UnityDocument)
|
||||
- ✅ Component trait with typed YAML helpers
|
||||
- ✅ GameObject, Transform, RectTransform parsing
|
||||
- ✅ Separate code paths for scenes vs prefabs
|
||||
- ✅ Sparsey World creation with component registration
|
||||
- ✅ Entity spawning for GameObjects
|
||||
|
||||
## ❌ What's Not Implemented
|
||||
|
||||
### Critical Missing Features
|
||||
|
||||
#### 1. Prefab Instancing System (MEDIUM PRIORITY)
|
||||
**Status:** Not started
|
||||
|
||||
**What's Needed:**
|
||||
Create `src/prefab/mod.rs` with:
|
||||
|
||||
```rust
|
||||
pub struct PrefabInstance {
|
||||
documents: Vec<RawDocument>, // Cloned YAML
|
||||
}
|
||||
|
||||
impl UnityPrefab {
|
||||
/// Clone prefab for instancing
|
||||
pub fn instantiate(&self) -> PrefabInstance;
|
||||
}
|
||||
|
||||
impl PrefabInstance {
|
||||
/// Override YAML values before spawning
|
||||
pub fn override_value(&mut self, file_id: FileID, path: &str, value: serde_yaml::Value);
|
||||
|
||||
/// Spawn into existing scene world
|
||||
pub fn spawn_into(self, world: &mut World) -> Result<HashMap<FileID, Entity>>;
|
||||
}
|
||||
```
|
||||
|
||||
**Usage Example:**
|
||||
```rust
|
||||
let prefab = match unity_file {
|
||||
UnityFile::Prefab(p) => p,
|
||||
_ => panic!("Not a prefab"),
|
||||
};
|
||||
|
||||
let mut instance = prefab.instantiate();
|
||||
instance.override_value(file_id, "m_Name", "CustomName".into())?;
|
||||
instance.override_value(file_id, "m_LocalPosition.x", 100.0.into())?;
|
||||
let entities = instance.spawn_into(&mut scene.world)?;
|
||||
```
|
||||
|
||||
**Implementation Steps:**
|
||||
1. Create src/prefab/mod.rs
|
||||
2. Implement YAML cloning (serde_yaml::Value::clone())
|
||||
3. Implement YAML path navigation for overrides (e.g., "m_LocalPosition.x")
|
||||
4. Reuse `build_world_from_documents()` for spawning
|
||||
5. Add tests with real prefab files
|
||||
|
||||
**Files to Create:**
|
||||
- src/prefab/mod.rs
|
||||
|
||||
**Files to Modify:**
|
||||
- src/lib.rs (add `pub mod prefab`)
|
||||
|
||||
#### 4. UnityProject Module Update (MEDIUM PRIORITY)
|
||||
**Status:** Currently disabled to allow compilation
|
||||
|
||||
**Location:** src/project/mod.rs, src/project/query.rs
|
||||
|
||||
**Problem:** References old `UnityDocument` type that no longer exists.
|
||||
|
||||
**What's Needed:**
|
||||
- Update `UnityProject` to store `HashMap<PathBuf, UnityFile>` instead of files with documents
|
||||
- Implement queries that work across scenes/prefabs:
|
||||
- `get_all_scenes() -> Vec<&UnityScene>`
|
||||
- `get_all_prefabs() -> Vec<&UnityPrefab>`
|
||||
- `find_by_name()` - search across RawDocuments in prefabs
|
||||
- Update reference resolution for cross-file references
|
||||
- GUID → Entity resolution for scene references to prefabs
|
||||
|
||||
**Files to Modify:**
|
||||
- src/project/mod.rs (lines 9, 36-50)
|
||||
- src/project/query.rs (entire file)
|
||||
- src/lib.rs (re-enable module exports)
|
||||
|
||||
**Example Updated API:**
|
||||
```rust
|
||||
impl UnityProject {
|
||||
pub fn load_file(&mut self, path: impl AsRef<Path>) -> Result<&UnityFile>;
|
||||
|
||||
pub fn get_scenes(&self) -> Vec<&UnityScene>;
|
||||
pub fn get_prefabs(&self) -> Vec<&UnityPrefab>;
|
||||
|
||||
pub fn find_prefab_by_name(&self, name: &str) -> Option<&UnityPrefab>;
|
||||
}
|
||||
```
|
||||
|
||||
#### 5. Additional Unity Components (LOW PRIORITY)
|
||||
**Status:** Only 3 components implemented
|
||||
|
||||
**Currently Missing:**
|
||||
- Camera
|
||||
- Light
|
||||
- MeshRenderer / MeshFilter
|
||||
- Collider variants (BoxCollider, SphereCollider, etc.)
|
||||
- Rigidbody
|
||||
- MonoBehaviour (custom scripts)
|
||||
- UI components (Image, Text, Button, etc.)
|
||||
|
||||
**Implementation Pattern:**
|
||||
```rust
|
||||
// src/types/camera.rs
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Camera {
|
||||
pub field_of_view: f32,
|
||||
pub near_clip_plane: f32,
|
||||
pub far_clip_plane: f32,
|
||||
// ... other fields
|
||||
}
|
||||
|
||||
impl UnityComponent for Camera {
|
||||
fn parse(yaml: &serde_yaml::Mapping, _ctx: &ComponentContext) -> Option<Self> {
|
||||
Some(Self {
|
||||
field_of_view: yaml_helpers::get_f64(yaml, "m_FieldOfView")? as f32,
|
||||
near_clip_plane: yaml_helpers::get_f64(yaml, "near clip plane")? as f32,
|
||||
far_clip_plane: yaml_helpers::get_f64(yaml, "far clip plane")? as f32,
|
||||
})
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Files to Create:**
|
||||
- src/types/camera.rs
|
||||
- src/types/light.rs
|
||||
- src/types/renderer.rs
|
||||
- etc.
|
||||
|
||||
**Files to Modify:**
|
||||
- src/types/mod.rs (add module declarations)
|
||||
- src/ecs/builder.rs:96-122 (add component dispatch cases)
|
||||
- Register components in Sparsey World builder (src/ecs/builder.rs:24-28)
|
||||
|
||||
## 🔧 Known Issues
|
||||
|
||||
### 1. Compilation Warnings
|
||||
None currently! Code compiles cleanly in release mode.
|
||||
|
||||
### 2. Disabled Modules
|
||||
- `src/project/` - Commented out in src/lib.rs:33 due to UnityDocument references
|
||||
|
||||
### 3. Stubbed Functionality
|
||||
- Component insertion (src/ecs/builder.rs:141-151)
|
||||
- Transform hierarchy resolution (src/ecs/builder.rs:155-176)
|
||||
|
||||
## 📋 Recommended Next Steps
|
||||
|
||||
### Phase 1: Complete Sparsey Integration (CRITICAL)
|
||||
**Time Estimate:** 1-2 hours of research + 2-3 hours implementation
|
||||
|
||||
1. **Research Sparsey 0.13 API:**
|
||||
- Read docs at https://docs.rs/sparsey/0.13.3/
|
||||
- Look for examples in GitHub repo
|
||||
- Find component insertion and mutation APIs
|
||||
|
||||
2. **Fix Component Insertion:**
|
||||
- Implement `insert_component()` properly
|
||||
- Test with GameObject + Transform entities
|
||||
|
||||
3. **Fix Transform Hierarchy:**
|
||||
- Get mutable component access
|
||||
- Apply parent/children Entity references
|
||||
- Test with nested GameObjects
|
||||
|
||||
**Success Criteria:**
|
||||
- Parse a .unity scene with nested GameObjects
|
||||
- Verify Transform hierarchy is correctly resolved
|
||||
- Query entities and access components from World
|
||||
|
||||
### Phase 2: Implement Prefab Instancing (HIGH VALUE)
|
||||
**Time Estimate:** 3-4 hours
|
||||
|
||||
1. Create `src/prefab/mod.rs` with PrefabInstance API
|
||||
2. Implement YAML cloning and override logic
|
||||
3. Implement `spawn_into()` using existing world builder
|
||||
4. Add tests with real prefab files
|
||||
|
||||
**Success Criteria:**
|
||||
- Load a prefab
|
||||
- Override values (name, position, etc.)
|
||||
- Instantiate into scene multiple times
|
||||
- Verify entities created correctly
|
||||
|
||||
### Phase 3: Update UnityProject Module (MEDIUM PRIORITY)
|
||||
**Time Estimate:** 2-3 hours
|
||||
|
||||
1. Update HashMap to store UnityFile enum
|
||||
2. Implement scene/prefab accessors
|
||||
3. Update query functions for RawDocument
|
||||
4. Re-enable module exports
|
||||
|
||||
**Success Criteria:**
|
||||
- Load multiple scenes and prefabs
|
||||
- Query across files
|
||||
- Find prefabs by name
|
||||
|
||||
### Phase 4: Add More Components (ONGOING)
|
||||
**Time Estimate:** 1-2 hours per component
|
||||
|
||||
Start with most common components:
|
||||
1. Camera (critical for scene rendering)
|
||||
2. Light (critical for scene rendering)
|
||||
3. MeshRenderer + MeshFilter (for 3D objects)
|
||||
|
||||
## 🎯 Performance Characteristics
|
||||
|
||||
### Memory Improvements
|
||||
- **Before:** YAML → PropertyValue tree → Component (2x allocations)
|
||||
- **After (Scenes):** YAML → Component (1x allocation, ~40% reduction)
|
||||
- **After (Prefabs):** YAML → serde_yaml::Value (shared references, minimal overhead)
|
||||
|
||||
### Parsing Speed
|
||||
- Direct YAML access eliminates PropertyValue conversion
|
||||
- Prefabs use cheap cloning (Arc-based in serde_yaml)
|
||||
|
||||
## 🧪 Testing Status
|
||||
|
||||
### Unit Tests
|
||||
- ✅ Parser header validation (src/parser/mod.rs:196-201)
|
||||
- ✅ YAML content extraction (src/parser/mod.rs:204-209)
|
||||
- ✅ File type detection (src/parser/mod.rs:212-229)
|
||||
|
||||
### Integration Tests
|
||||
- ❌ Scene parsing end-to-end
|
||||
- ❌ Prefab parsing end-to-end
|
||||
- ❌ Component attachment
|
||||
- ❌ Transform hierarchy resolution
|
||||
- ❌ Prefab instantiation
|
||||
|
||||
**Recommendation:** Add integration tests once Sparsey integration is complete.
|
||||
|
||||
## 📝 Code Organization
|
||||
|
||||
```
|
||||
src/
|
||||
├── lib.rs # Public API + exports
|
||||
├── error.rs # Error types
|
||||
├── model/
|
||||
│ └── mod.rs # ✅ UnityFile, UnityScene, UnityPrefab, RawDocument
|
||||
├── parser/
|
||||
│ ├── mod.rs # ✅ File type detection + parsing pipeline
|
||||
│ ├── unity_tag.rs # ✅ Unity tag parsing (!u!N &ID)
|
||||
│ ├── yaml.rs # ✅ YAML document splitting
|
||||
│ └── meta.rs # ✅ .meta file parsing
|
||||
├── types/
|
||||
│ ├── mod.rs # ✅ Type exports
|
||||
│ ├── component.rs # ✅ UnityComponent trait + yaml_helpers
|
||||
│ ├── game_object.rs # ✅ GameObject component
|
||||
│ ├── transform.rs # ✅ Transform + RectTransform
|
||||
│ ├── ids.rs # ✅ FileID, LocalID
|
||||
│ ├── values.rs # ✅ Vector2/3, Quaternion, Color, etc.
|
||||
│ ├── reference.rs # ✅ UnityReference enum
|
||||
│ └── type_registry.rs # ✅ Type ID ↔ Class name mapping
|
||||
├── ecs/
|
||||
│ ├── mod.rs # ✅ Module exports
|
||||
│ └── builder.rs # ⚠️ 3-pass world building (incomplete)
|
||||
├── prefab/ # ❌ NOT CREATED YET
|
||||
│ └── mod.rs # TODO: Prefab instancing
|
||||
├── project/ # ❌ DISABLED (needs refactoring)
|
||||
│ ├── mod.rs # ❌ References old UnityDocument
|
||||
│ └── query.rs # ❌ References old UnityDocument
|
||||
└── property/
|
||||
└── mod.rs # ✅ PropertyValue (kept for helpers)
|
||||
```
|
||||
|
||||
## 🔗 External Dependencies
|
||||
|
||||
- **serde_yaml 0.9** - YAML parsing
|
||||
- **sparsey 0.13** - ECS framework
|
||||
- **glam 0.29** - Math types (Vec2/3, Quat)
|
||||
- **indexmap 2.1** - Ordered maps
|
||||
- **lru 0.12** - LRU cache for references
|
||||
|
||||
## 📚 Useful Documentation
|
||||
|
||||
- **Sparsey Docs:** https://docs.rs/sparsey/0.13.3/
|
||||
- **Sparsey GitHub:** https://github.com/LechintanTudor/sparsey
|
||||
- **Unity YAML Format:** GameObjects use `--- !u!1 &fileID` tags with nested YAML
|
||||
|
||||
## 🤝 Contributing / Next Agent Instructions
|
||||
|
||||
**If you're the next AI agent working on this:**
|
||||
|
||||
1. **Start here:** Read this summary completely
|
||||
2. **Quick test:** Try `cargo build --release` - should compile cleanly
|
||||
3. **Focus on:** Sparsey integration (Phase 1) - highest priority
|
||||
4. **Key files:**
|
||||
- src/ecs/builder.rs (needs Sparsey API research)
|
||||
- src/prefab/mod.rs (doesn't exist yet)
|
||||
- src/project/mod.rs (needs refactoring)
|
||||
|
||||
**Before making changes:**
|
||||
- Understand the 3-pass world building approach
|
||||
- Know that dispatcher routes to parsers (no redundant type checks in parsers)
|
||||
- RawDocument.yaml contains INNER yaml (after class name wrapper is removed)
|
||||
|
||||
**Testing approach:**
|
||||
- Use files in `data/` directory for real Unity files
|
||||
- Focus on .unity scenes first, then .prefab files
|
||||
- Verify entity creation and component attachment
|
||||
|
||||
Good luck! 🚀
|
||||
16
cursebreaker-parser-macros/Cargo.toml
Normal file
16
cursebreaker-parser-macros/Cargo.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
[package]
|
||||
name = "cursebreaker-parser-macros"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
authors = ["Your Name <your.email@example.com>"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
description = "Procedural macros for cursebreaker-parser"
|
||||
repository = "https://github.com/yourusername/cursebreaker-parser-rust"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
syn = { version = "2.0", features = ["full", "extra-traits"] }
|
||||
quote = "1.0"
|
||||
proc-macro2 = "1.0"
|
||||
212
cursebreaker-parser-macros/src/lib.rs
Normal file
212
cursebreaker-parser-macros/src/lib.rs
Normal file
@@ -0,0 +1,212 @@
|
||||
//! Procedural macros for cursebreaker-parser
|
||||
//!
|
||||
//! This crate provides the `#[derive(UnityComponent)]` macro for automatically
|
||||
//! generating Unity component parsing code.
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
use quote::quote;
|
||||
use syn::{parse_macro_input, Data, DeriveInput, Expr, ExprLit, Fields, Lit, Type};
|
||||
|
||||
/// Derive macro for automatically implementing UnityComponent trait
|
||||
///
|
||||
/// # Example
|
||||
/// ```ignore
|
||||
/// #[derive(UnityComponent)]
|
||||
/// #[unity_class("PlaySFX")] // Optional, defaults to struct name
|
||||
/// pub struct PlaySFX {
|
||||
/// #[unity_field("volume")]
|
||||
/// volume: f64,
|
||||
///
|
||||
/// #[unity_field("startTime")]
|
||||
/// start_time: f64,
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_derive(UnityComponent, attributes(unity_field, unity_class, unity_type))]
|
||||
pub fn derive_unity_component(input: TokenStream) -> TokenStream {
|
||||
let input = parse_macro_input!(input as DeriveInput);
|
||||
|
||||
// Extract struct name
|
||||
let struct_name = &input.ident;
|
||||
|
||||
// Extract class name (defaults to struct name)
|
||||
let class_name = extract_class_name(&input.attrs, struct_name);
|
||||
|
||||
// Extract type ID (defaults to 114 for MonoBehaviour)
|
||||
let type_id = extract_type_id(&input.attrs);
|
||||
|
||||
// Only process structs
|
||||
let fields = match &input.data {
|
||||
Data::Struct(data) => match &data.fields {
|
||||
Fields::Named(fields) => &fields.named,
|
||||
_ => panic!("UnityComponent can only be derived for structs with named fields"),
|
||||
},
|
||||
_ => panic!("UnityComponent can only be derived for structs"),
|
||||
};
|
||||
|
||||
// Extract field mappings
|
||||
let field_parsers: Vec<_> = fields
|
||||
.iter()
|
||||
.map(|field| {
|
||||
let field_name = field.ident.as_ref().unwrap();
|
||||
let field_type = &field.ty;
|
||||
let unity_field_name = extract_unity_field_name(&field.attrs, field_name);
|
||||
|
||||
// Generate the parsing code for this field
|
||||
let parser_call = generate_parser_call(field_type, &unity_field_name);
|
||||
|
||||
quote! {
|
||||
let #field_name = #parser_call;
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Generate field initialization
|
||||
let field_names: Vec<_> = fields
|
||||
.iter()
|
||||
.map(|field| field.ident.as_ref().unwrap())
|
||||
.collect();
|
||||
|
||||
// Generate UnityComponent implementation
|
||||
let parse_impl = quote! {
|
||||
impl cursebreaker_parser::UnityComponent for #struct_name {
|
||||
fn parse(
|
||||
yaml: &serde_yaml::Mapping,
|
||||
ctx: &cursebreaker_parser::ComponentContext
|
||||
) -> Option<Self> {
|
||||
#(#field_parsers)*
|
||||
|
||||
Some(Self {
|
||||
#(#field_names,)*
|
||||
})
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Generate EcsInsertable implementation
|
||||
let ecs_insertable_impl = quote! {
|
||||
impl cursebreaker_parser::EcsInsertable for #struct_name {
|
||||
fn insert_into_world(self, world: &mut sparsey::World, entity: sparsey::Entity) {
|
||||
world.insert(entity, (self,));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Generate inventory registration
|
||||
let registration = quote! {
|
||||
inventory::submit! {
|
||||
cursebreaker_parser::ComponentRegistration {
|
||||
type_id: #type_id,
|
||||
class_name: #class_name,
|
||||
parse_and_insert: |yaml, ctx, world, entity| {
|
||||
<#struct_name as cursebreaker_parser::EcsInsertable>::parse_and_insert(
|
||||
yaml, ctx, world, entity
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Combine everything
|
||||
let expanded = quote! {
|
||||
#parse_impl
|
||||
#ecs_insertable_impl
|
||||
#registration
|
||||
};
|
||||
|
||||
TokenStream::from(expanded)
|
||||
}
|
||||
|
||||
/// Extract the Unity class name from attributes, or default to struct name
|
||||
fn extract_class_name(attrs: &[syn::Attribute], struct_name: &syn::Ident) -> String {
|
||||
for attr in attrs {
|
||||
if attr.path().is_ident("unity_class") {
|
||||
if let Ok(Expr::Lit(ExprLit {
|
||||
lit: Lit::Str(lit), ..
|
||||
})) = attr.parse_args()
|
||||
{
|
||||
return lit.value();
|
||||
}
|
||||
}
|
||||
}
|
||||
struct_name.to_string()
|
||||
}
|
||||
|
||||
/// Extract the Unity type ID from attributes, or default to 114 (MonoBehaviour)
|
||||
fn extract_type_id(attrs: &[syn::Attribute]) -> u32 {
|
||||
for attr in attrs {
|
||||
if attr.path().is_ident("unity_type") {
|
||||
if let Ok(Expr::Lit(ExprLit {
|
||||
lit: Lit::Int(lit), ..
|
||||
})) = attr.parse_args()
|
||||
{
|
||||
if let Ok(value) = lit.base10_parse::<u32>() {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
114 // Default to MonoBehaviour
|
||||
}
|
||||
|
||||
/// Extract the Unity field name from field attributes
|
||||
fn extract_unity_field_name(attrs: &[syn::Attribute], field_name: &syn::Ident) -> String {
|
||||
for attr in attrs {
|
||||
if attr.path().is_ident("unity_field") {
|
||||
if let Ok(Expr::Lit(ExprLit {
|
||||
lit: Lit::Str(lit), ..
|
||||
})) = attr.parse_args()
|
||||
{
|
||||
return lit.value();
|
||||
}
|
||||
}
|
||||
}
|
||||
panic!(
|
||||
"Field '{}' is missing #[unity_field(\"name\")] attribute",
|
||||
field_name
|
||||
);
|
||||
}
|
||||
|
||||
/// Generate the appropriate yaml_helpers call based on field type
|
||||
fn generate_parser_call(field_type: &Type, unity_field_name: &str) -> proc_macro2::TokenStream {
|
||||
let type_str = quote! { #field_type }.to_string();
|
||||
let type_str = type_str.replace(" ", ""); // Remove whitespace
|
||||
|
||||
// Determine the appropriate yaml_helpers function
|
||||
let helper_call = if type_str == "f64" {
|
||||
quote! { cursebreaker_parser::yaml_helpers::get_f64(yaml, #unity_field_name) }
|
||||
} else if type_str == "f32" {
|
||||
quote! { cursebreaker_parser::yaml_helpers::get_f64(yaml, #unity_field_name).map(|v| v as f32) }
|
||||
} else if type_str == "i64" {
|
||||
quote! { cursebreaker_parser::yaml_helpers::get_i64(yaml, #unity_field_name) }
|
||||
} else if type_str == "i32" {
|
||||
quote! { cursebreaker_parser::yaml_helpers::get_i64(yaml, #unity_field_name).map(|v| v as i32) }
|
||||
} else if type_str == "bool" {
|
||||
quote! { cursebreaker_parser::yaml_helpers::get_bool(yaml, #unity_field_name) }
|
||||
} else if type_str == "String" {
|
||||
quote! { cursebreaker_parser::yaml_helpers::get_string(yaml, #unity_field_name) }
|
||||
} else if type_str == "Vector2" {
|
||||
quote! { cursebreaker_parser::yaml_helpers::get_vector2(yaml, #unity_field_name) }
|
||||
} else if type_str == "Vector3" {
|
||||
quote! { cursebreaker_parser::yaml_helpers::get_vector3(yaml, #unity_field_name) }
|
||||
} else if type_str == "Quaternion" {
|
||||
quote! { cursebreaker_parser::yaml_helpers::get_quaternion(yaml, #unity_field_name) }
|
||||
} else if type_str == "Color" {
|
||||
quote! { cursebreaker_parser::yaml_helpers::get_color(yaml, #unity_field_name) }
|
||||
} else if type_str == "FileRef" {
|
||||
quote! { cursebreaker_parser::yaml_helpers::get_file_ref(yaml, #unity_field_name) }
|
||||
} else if type_str == "ExternalRef" {
|
||||
quote! { cursebreaker_parser::yaml_helpers::get_external_ref(yaml, #unity_field_name) }
|
||||
} else if type_str == "Vec<FileRef>" {
|
||||
quote! { cursebreaker_parser::yaml_helpers::get_file_ref_array(yaml, #unity_field_name) }
|
||||
} else {
|
||||
panic!(
|
||||
"Unsupported field type: {}. Supported types: f64, f32, i64, i32, bool, String, Vector2, Vector3, Quaternion, Color, FileRef, ExternalRef, Vec<FileRef>",
|
||||
type_str
|
||||
);
|
||||
};
|
||||
|
||||
// Wrap with Default::default() fallback
|
||||
quote! {
|
||||
#helper_call.unwrap_or_else(Default::default)
|
||||
}
|
||||
}
|
||||
60
cursebreaker-parser/Cargo.toml
Normal file
60
cursebreaker-parser/Cargo.toml
Normal file
@@ -0,0 +1,60 @@
|
||||
[package]
|
||||
name = "cursebreaker-parser"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
authors = ["Your Name <your.email@example.com>"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
description = "A high-performance Rust library for parsing Unity project files (.unity, .prefab, .asset)"
|
||||
repository = "https://github.com/yourusername/cursebreaker-parser-rust"
|
||||
keywords = ["unity", "parser", "yaml", "gamedev"]
|
||||
categories = ["parser-implementations", "game-development"]
|
||||
rust-version = "1.70"
|
||||
|
||||
[lib]
|
||||
name = "cursebreaker_parser"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
# YAML parsing
|
||||
serde_yaml = "0.9"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
||||
# Error handling
|
||||
thiserror = "1.0"
|
||||
|
||||
# Ordered maps for properties
|
||||
indexmap = { version = "2.1", features = ["serde"] }
|
||||
|
||||
# Regex for parsing
|
||||
regex = "1.10"
|
||||
|
||||
# Math types (Vector2, Vector3, Quaternion, etc.)
|
||||
glam = { version = "0.29", features = ["serde"] }
|
||||
|
||||
# ECS (Entity Component System)
|
||||
sparsey = "0.13"
|
||||
|
||||
# LRU cache for reference resolution
|
||||
lru = "0.12"
|
||||
|
||||
# Directory traversal for loading projects
|
||||
walkdir = "2.4"
|
||||
|
||||
# Lazy static initialization for type registry
|
||||
once_cell = "1.19"
|
||||
|
||||
# Component registry for custom MonoBehaviours
|
||||
inventory = "0.3"
|
||||
|
||||
# Procedural macro for derive(UnityComponent)
|
||||
cursebreaker-parser-macros = { path = "../cursebreaker-parser-macros" }
|
||||
|
||||
[dev-dependencies]
|
||||
# Testing utilities
|
||||
pretty_assertions = "1.4"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
||||
# Future: parallel processing support
|
||||
parallel = []
|
||||
79
cursebreaker-parser/examples/basic_parsing.rs
Normal file
79
cursebreaker-parser/examples/basic_parsing.rs
Normal file
@@ -0,0 +1,79 @@
|
||||
use cursebreaker_parser::UnityFile;
|
||||
use std::path::Path;
|
||||
|
||||
fn main() {
|
||||
// Parse a Unity prefab file
|
||||
let prefab_path = Path::new("data/tests/unity-sampleproject/PiratePanic/Assets/PiratePanic/Prefabs/Menu/Battle/Hand/CardGrabber.prefab");
|
||||
|
||||
if !prefab_path.exists() {
|
||||
eprintln!("Error: Unity sample project not found.");
|
||||
eprintln!("Please ensure the git submodule is initialized:");
|
||||
eprintln!(" git submodule update --init --recursive");
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse the file
|
||||
match UnityFile::from_path(prefab_path) {
|
||||
Ok(file) => {
|
||||
println!("Successfully parsed: {:?}", file.path().file_name().unwrap());
|
||||
|
||||
// Handle the different file types
|
||||
match file {
|
||||
UnityFile::Prefab(prefab) => {
|
||||
println!("Found {} documents\n", prefab.documents.len());
|
||||
|
||||
// List all documents
|
||||
for (i, doc) in prefab.documents.iter().enumerate() {
|
||||
println!("Document {}: {} (Type ID: {}, File ID: {})",
|
||||
i + 1,
|
||||
doc.class_name,
|
||||
doc.type_id,
|
||||
doc.file_id
|
||||
);
|
||||
}
|
||||
|
||||
println!();
|
||||
|
||||
// Find all GameObjects
|
||||
let game_objects = prefab.get_documents_by_class("GameObject");
|
||||
println!("Found {} GameObjects:", game_objects.len());
|
||||
for go in game_objects {
|
||||
if let Some(mapping) = go.as_mapping() {
|
||||
if let Some(go_obj) = mapping.get("GameObject") {
|
||||
if let Some(props) = go_obj.as_mapping() {
|
||||
if let Some(name) = props.get("m_Name").and_then(|v| v.as_str()) {
|
||||
println!(" - {}", name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!();
|
||||
|
||||
// Find all Transforms
|
||||
let transforms = prefab.get_documents_by_type(224); // RectTransform type ID
|
||||
println!("Found {} RectTransforms", transforms.len());
|
||||
|
||||
// Look up a specific document by file ID
|
||||
if let Some(first_doc) = prefab.documents.first() {
|
||||
let file_id = first_doc.file_id;
|
||||
if let Some(found) = prefab.get_document(file_id) {
|
||||
println!("\nLooking up document by file ID {}:", file_id);
|
||||
println!(" Class: {}", found.class_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
UnityFile::Scene(scene) => {
|
||||
println!("This is a scene file with {} entities", scene.entity_map.len());
|
||||
}
|
||||
UnityFile::Asset(asset) => {
|
||||
println!("This is an asset file with {} documents", asset.documents.len());
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error parsing file: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
101
cursebreaker-parser/examples/custom_component.rs
Normal file
101
cursebreaker-parser/examples/custom_component.rs
Normal file
@@ -0,0 +1,101 @@
|
||||
//! Example demonstrating how to define custom Unity MonoBehaviour components
|
||||
//! using the #[derive(UnityComponent)] macro.
|
||||
|
||||
use cursebreaker_parser::{yaml_helpers, ComponentContext, UnityComponent};
|
||||
|
||||
/// Custom Unity MonoBehaviour component for playing sound effects
|
||||
///
|
||||
/// This mirrors the C# PlaySFX MonoBehaviour:
|
||||
/// ```csharp
|
||||
/// public class PlaySFX : MonoBehaviour
|
||||
/// {
|
||||
/// [SerializeField] float volume;
|
||||
/// [SerializeField] float startTime;
|
||||
/// [SerializeField] float endTime;
|
||||
/// [SerializeField] bool isLoop;
|
||||
/// }
|
||||
/// ```
|
||||
#[derive(Debug, Clone, UnityComponent)]
|
||||
#[unity_class("PlaySFX")]
|
||||
pub struct PlaySFX {
|
||||
#[unity_field("volume")]
|
||||
pub volume: f64,
|
||||
|
||||
#[unity_field("startTime")]
|
||||
pub start_time: f64,
|
||||
|
||||
#[unity_field("endTime")]
|
||||
pub end_time: f64,
|
||||
|
||||
#[unity_field("isLoop")]
|
||||
pub is_loop: bool,
|
||||
}
|
||||
|
||||
/// Another example - a custom damage component
|
||||
#[derive(Debug, Clone, UnityComponent)]
|
||||
#[unity_class("DamageDealer")]
|
||||
pub struct DamageDealer {
|
||||
#[unity_field("damageAmount")]
|
||||
pub damage_amount: f64,
|
||||
|
||||
#[unity_field("damageType")]
|
||||
pub damage_type: String,
|
||||
|
||||
#[unity_field("canCrit")]
|
||||
pub can_crit: bool,
|
||||
|
||||
#[unity_field("critMultiplier")]
|
||||
pub crit_multiplier: f64,
|
||||
}
|
||||
|
||||
fn main() {
|
||||
println!("Custom Unity Component Example");
|
||||
println!("===============================\n");
|
||||
|
||||
println!("Defined custom components:");
|
||||
println!(" - PlaySFX: volume, start_time, end_time, is_loop");
|
||||
println!(" - DamageDealer: damage_amount, damage_type, can_crit, crit_multiplier\n");
|
||||
|
||||
println!("These components are automatically registered via the inventory crate.");
|
||||
println!("When parsing Unity files, they will be recognized and parsed automatically.\n");
|
||||
|
||||
// Demonstrate parsing from YAML
|
||||
let yaml_str = r#"
|
||||
volume: 0.75
|
||||
startTime: 1.5
|
||||
endTime: 3.0
|
||||
isLoop: 1
|
||||
"#;
|
||||
|
||||
let yaml: serde_yaml::Value = serde_yaml::from_str(yaml_str).unwrap();
|
||||
let mapping = yaml.as_mapping().unwrap();
|
||||
|
||||
// Create a dummy context
|
||||
use cursebreaker_parser::{ComponentContext, FileID};
|
||||
let ctx = ComponentContext {
|
||||
type_id: 114,
|
||||
file_id: FileID::from_i64(12345),
|
||||
class_name: "PlaySFX",
|
||||
entity: None,
|
||||
linking_ctx: None,
|
||||
yaml: mapping,
|
||||
};
|
||||
|
||||
// Parse the component
|
||||
if let Some(play_sfx) = PlaySFX::parse(mapping, &ctx) {
|
||||
println!("Successfully parsed PlaySFX component:");
|
||||
println!(" volume: {}", play_sfx.volume);
|
||||
println!(" start_time: {}", play_sfx.start_time);
|
||||
println!(" end_time: {}", play_sfx.end_time);
|
||||
println!(" is_loop: {}", play_sfx.is_loop);
|
||||
} else {
|
||||
println!("Failed to parse PlaySFX component");
|
||||
}
|
||||
|
||||
println!("\nTo use in your own code:");
|
||||
println!(" 1. Define a struct matching your C# MonoBehaviour fields");
|
||||
println!(" 2. Add #[derive(UnityComponent)] to the struct");
|
||||
println!(" 3. Add #[unity_class(\"YourClassName\")] to specify the Unity class name");
|
||||
println!(" 4. Add #[unity_field(\"fieldName\")] to each field");
|
||||
println!(" 5. The component will be automatically registered and parsed!");
|
||||
}
|
||||
146
cursebreaker-parser/examples/ecs_integration.rs
Normal file
146
cursebreaker-parser/examples/ecs_integration.rs
Normal file
@@ -0,0 +1,146 @@
|
||||
//! Example demonstrating ECS integration and selective type parsing
|
||||
//!
|
||||
//! This example shows:
|
||||
//! 1. Custom components being automatically inserted into the ECS world
|
||||
//! 2. Using the parse_with_types! macro for selective parsing
|
||||
//! 3. Querying the ECS world for components
|
||||
|
||||
use cursebreaker_parser::{parse_with_types, ComponentContext, EcsInsertable, FileID, TypeFilter, UnityComponent};
|
||||
|
||||
/// Custom Unity MonoBehaviour component
|
||||
#[derive(Debug, Clone, UnityComponent)]
|
||||
#[unity_class("PlaySFX")]
|
||||
pub struct PlaySFX {
|
||||
#[unity_field("volume")]
|
||||
pub volume: f64,
|
||||
|
||||
#[unity_field("startTime")]
|
||||
pub start_time: f64,
|
||||
|
||||
#[unity_field("endTime")]
|
||||
pub end_time: f64,
|
||||
|
||||
#[unity_field("isLoop")]
|
||||
pub is_loop: bool,
|
||||
}
|
||||
|
||||
/// Another custom component
|
||||
#[derive(Debug, Clone, UnityComponent)]
|
||||
#[unity_class("Interactable")]
|
||||
pub struct Interactable {
|
||||
#[unity_field("interactionRadius")]
|
||||
pub interaction_radius: f32,
|
||||
|
||||
#[unity_field("interactionText")]
|
||||
pub interaction_text: String,
|
||||
|
||||
#[unity_field("canInteract")]
|
||||
pub can_interact: bool,
|
||||
}
|
||||
|
||||
fn main() {
|
||||
println!("ECS Integration & Selective Parsing Example");
|
||||
println!("{}", "=".repeat(60));
|
||||
|
||||
// Example 1: Using parse_with_types! macro
|
||||
println!("\n1. Creating type filters:");
|
||||
println!("{}", "-".repeat(60));
|
||||
|
||||
let _filter_all = TypeFilter::parse_all();
|
||||
println!("✓ Filter that parses ALL types");
|
||||
|
||||
let filter_selective = parse_with_types! {
|
||||
unity_types(Transform, Camera),
|
||||
custom_types(PlaySFX)
|
||||
};
|
||||
println!("✓ Filter for Transform, Camera, and PlaySFX only");
|
||||
|
||||
let filter_custom_only = parse_with_types! {
|
||||
custom_types(PlaySFX, Interactable)
|
||||
};
|
||||
println!("✓ Filter for PlaySFX and Interactable only (no Unity types)");
|
||||
|
||||
// Example 2: Demonstrating ECS insertion
|
||||
println!("\n2. ECS Integration:");
|
||||
println!("{}", "-".repeat(60));
|
||||
|
||||
// Simulate parsing a PlaySFX component
|
||||
let yaml_str = r#"
|
||||
volume: 0.8
|
||||
startTime: 0.0
|
||||
endTime: 5.0
|
||||
isLoop: 0
|
||||
"#;
|
||||
|
||||
let yaml: serde_yaml::Value = serde_yaml::from_str(yaml_str).unwrap();
|
||||
let mapping = yaml.as_mapping().unwrap();
|
||||
|
||||
let ctx = ComponentContext {
|
||||
type_id: 114,
|
||||
file_id: FileID::from_i64(12345),
|
||||
class_name: "PlaySFX",
|
||||
entity: None,
|
||||
linking_ctx: None,
|
||||
yaml: mapping,
|
||||
};
|
||||
|
||||
// Parse the component
|
||||
if let Some(play_sfx) = PlaySFX::parse(mapping, &ctx) {
|
||||
println!("✓ Parsed PlaySFX component:");
|
||||
println!(" - volume: {}", play_sfx.volume);
|
||||
println!(" - start_time: {}", play_sfx.start_time);
|
||||
println!(" - end_time: {}", play_sfx.end_time);
|
||||
println!(" - is_loop: {}", play_sfx.is_loop);
|
||||
|
||||
// Create a minimal ECS world to demonstrate insertion
|
||||
use sparsey::World;
|
||||
let mut world = World::builder().register::<PlaySFX>().build();
|
||||
let entity = world.create(());
|
||||
|
||||
println!("\n✓ Created ECS entity: {:?}", entity);
|
||||
|
||||
// Insert the component into the world
|
||||
play_sfx.clone().insert_into_world(&mut world, entity);
|
||||
println!("✓ Inserted PlaySFX component into ECS world");
|
||||
|
||||
// Query it back
|
||||
{
|
||||
let view = world.borrow::<PlaySFX>();
|
||||
if let Some(component) = view.get(entity) {
|
||||
println!("✓ Successfully queried component from ECS:");
|
||||
println!(" - volume: {}", component.volume);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Example 3: Type filter usage
|
||||
println!("\n3. Type Filter Behavior:");
|
||||
println!("{}", "-".repeat(60));
|
||||
|
||||
println!("Filter checks:");
|
||||
println!(" Transform in selective filter: {}", filter_selective.should_parse_unity("Transform"));
|
||||
println!(" Camera in selective filter: {}", filter_selective.should_parse_unity("Camera"));
|
||||
println!(" Light in selective filter: {}", filter_selective.should_parse_unity("Light"));
|
||||
println!(" PlaySFX in selective filter: {}", filter_selective.should_parse_custom("PlaySFX"));
|
||||
println!(" Interactable in selective filter: {}", filter_selective.should_parse_custom("Interactable"));
|
||||
|
||||
println!("\n PlaySFX in custom-only filter: {}", filter_custom_only.should_parse_custom("PlaySFX"));
|
||||
println!(" Transform in custom-only filter: {}", filter_custom_only.should_parse_unity("Transform"));
|
||||
|
||||
// Example 4: Benefits of selective parsing
|
||||
println!("\n4. Benefits of Selective Parsing:");
|
||||
println!("{}", "-".repeat(60));
|
||||
println!("When parsing a large Unity project:");
|
||||
println!(" • Parse ALL types: Parse everything (default)");
|
||||
println!(" • Parse specific types: Faster parsing & less memory");
|
||||
println!(" • Parse only what you need for your tool/analysis");
|
||||
println!("\nExample use cases:");
|
||||
println!(" • Animation tool: Only parse Animator, AnimationClip");
|
||||
println!(" • Audio tool: Only parse AudioSource, PlaySFX");
|
||||
println!(" • Transform analyzer: Only parse Transform, RectTransform");
|
||||
|
||||
println!();
|
||||
println!("{}", "=".repeat(60));
|
||||
println!("Complete! Custom components now work with ECS!");
|
||||
println!("{}", "=".repeat(60));
|
||||
}
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
use crate::model::RawDocument;
|
||||
use crate::types::{
|
||||
yaml_helpers, ComponentContext, FileID, GameObject, LinkingContext, RectTransform, Transform,
|
||||
UnityComponent,
|
||||
yaml_helpers, ComponentContext, FileID, GameObject, LinkingContext, PrefabInstanceComponent,
|
||||
RectTransform, Transform, TypeFilter, UnityComponent,
|
||||
};
|
||||
use crate::{Error, Result};
|
||||
use sparsey::{Entity, World};
|
||||
@@ -30,6 +30,7 @@ pub fn build_world_from_documents(
|
||||
.register::<GameObject>()
|
||||
.register::<Transform>()
|
||||
.register::<RectTransform>()
|
||||
.register::<PrefabInstanceComponent>()
|
||||
.build();
|
||||
|
||||
let linking_ctx = RefCell::new(LinkingContext::new());
|
||||
@@ -41,8 +42,9 @@ pub fn build_world_from_documents(
|
||||
}
|
||||
|
||||
// PASS 2: Attach components to entities
|
||||
let type_filter = TypeFilter::parse_all();
|
||||
for doc in documents.iter().filter(|d| d.type_id != 1 && d.class_name != "GameObject") {
|
||||
attach_component(&mut world, doc, &linking_ctx)?;
|
||||
attach_component(&mut world, doc, &linking_ctx, &type_filter)?;
|
||||
}
|
||||
|
||||
// PASS 3: Execute all deferred linking callbacks
|
||||
@@ -51,6 +53,58 @@ pub fn build_world_from_documents(
|
||||
Ok((world, entity_map))
|
||||
}
|
||||
|
||||
/// Build entities from raw Unity documents into an existing world
|
||||
///
|
||||
/// This is similar to `build_world_from_documents` but spawns into an existing
|
||||
/// world instead of creating a new one. This is used for prefab instantiation.
|
||||
///
|
||||
/// Uses the same 3-pass approach:
|
||||
/// 1. Create entities for all GameObjects
|
||||
/// 2. Attach components (Transform, RectTransform, etc.) to entities
|
||||
/// 3. Resolve Transform hierarchy (parent/children Entity references)
|
||||
///
|
||||
/// # Arguments
|
||||
/// - `documents`: Parsed Unity documents to build entities from
|
||||
/// - `world`: Existing Sparsey ECS world to spawn entities into
|
||||
/// - `entity_map`: Existing entity map to merge new mappings into
|
||||
///
|
||||
/// # Returns
|
||||
/// Vec of newly spawned entities
|
||||
pub fn build_world_from_documents_into(
|
||||
documents: Vec<RawDocument>,
|
||||
world: &mut World,
|
||||
entity_map: &mut HashMap<FileID, Entity>,
|
||||
) -> Result<Vec<Entity>> {
|
||||
let linking_ctx = RefCell::new(LinkingContext::new());
|
||||
|
||||
// Initialize linking context with existing entity_map
|
||||
// This allows cross-references between prefab instances and scene entities
|
||||
*linking_ctx.borrow_mut().entity_map_mut() = entity_map.clone();
|
||||
|
||||
let mut spawned_entities = Vec::new();
|
||||
|
||||
// PASS 1: Create entities for all GameObjects
|
||||
for doc in documents.iter().filter(|d| d.type_id == 1 || d.class_name == "GameObject") {
|
||||
let entity = spawn_game_object(world, doc)?;
|
||||
linking_ctx.borrow_mut().entity_map_mut().insert(doc.file_id, entity);
|
||||
spawned_entities.push(entity);
|
||||
}
|
||||
|
||||
// PASS 2: Attach components to entities
|
||||
let type_filter = TypeFilter::parse_all();
|
||||
for doc in documents.iter().filter(|d| d.type_id != 1 && d.class_name != "GameObject") {
|
||||
attach_component(world, doc, &linking_ctx, &type_filter)?;
|
||||
}
|
||||
|
||||
// PASS 3: Execute all deferred linking callbacks
|
||||
let final_entity_map = linking_ctx.into_inner().execute_callbacks(world);
|
||||
|
||||
// Update caller's entity_map with new mappings
|
||||
entity_map.extend(final_entity_map);
|
||||
|
||||
Ok(spawned_entities)
|
||||
}
|
||||
|
||||
/// Spawn a GameObject entity
|
||||
fn spawn_game_object(world: &mut World, doc: &RawDocument) -> Result<Entity> {
|
||||
let yaml = doc
|
||||
@@ -80,6 +134,7 @@ fn attach_component(
|
||||
world: &mut World,
|
||||
doc: &RawDocument,
|
||||
linking_ctx: &RefCell<LinkingContext>,
|
||||
type_filter: &TypeFilter,
|
||||
) -> Result<()> {
|
||||
let yaml = doc
|
||||
.as_mapping()
|
||||
@@ -116,6 +171,16 @@ fn attach_component(
|
||||
yaml,
|
||||
};
|
||||
|
||||
// Check type filter to see if we should parse this component
|
||||
let is_custom = doc.class_name.as_str() != "Transform"
|
||||
&& doc.class_name.as_str() != "RectTransform"
|
||||
&& doc.class_name.as_str() != "PrefabInstance";
|
||||
|
||||
if !type_filter.should_parse(&doc.class_name, is_custom) {
|
||||
// Skip this component type based on filter
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Dispatch to appropriate component parser
|
||||
match doc.class_name.as_str() {
|
||||
"Transform" => {
|
||||
@@ -130,7 +195,29 @@ fn attach_component(
|
||||
linking_ctx.borrow_mut().entity_map_mut().insert(doc.file_id, entity);
|
||||
}
|
||||
}
|
||||
"PrefabInstance" => {
|
||||
// Parse and store nested prefab reference
|
||||
if let Some(prefab_comp) = PrefabInstanceComponent::parse(yaml, &ctx) {
|
||||
world.insert(entity, (prefab_comp,));
|
||||
linking_ctx.borrow_mut().entity_map_mut().insert(doc.file_id, entity);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// Check if this is a registered custom component
|
||||
let mut found_custom = false;
|
||||
for reg in inventory::iter::<crate::types::ComponentRegistration> {
|
||||
if reg.class_name == doc.class_name.as_str() {
|
||||
found_custom = true;
|
||||
// Parse and insert the component into the ECS world
|
||||
if (reg.parse_and_insert)(yaml, &ctx, world, entity) {
|
||||
// Successfully parsed and inserted
|
||||
linking_ctx.borrow_mut().entity_map_mut().insert(doc.file_id, entity);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if !found_custom {
|
||||
// Unknown component type - skip with warning
|
||||
eprintln!(
|
||||
"Warning: Skipping unknown component type: {}",
|
||||
@@ -138,6 +225,7 @@ fn attach_component(
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -6,7 +6,7 @@
|
||||
|
||||
mod builder;
|
||||
|
||||
pub use builder::build_world_from_documents;
|
||||
pub use builder::{build_world_from_documents, build_world_from_documents_into};
|
||||
|
||||
// TODO: Add project-level world building once UnityProject is updated to work with new architecture
|
||||
// pub use builder::build_world_from_project;
|
||||
@@ -27,6 +27,7 @@
|
||||
// Public modules
|
||||
pub mod ecs;
|
||||
pub mod error;
|
||||
pub mod macros;
|
||||
pub mod model;
|
||||
pub mod parser;
|
||||
// TODO: Update project module to work with new UnityFile enum architecture
|
||||
@@ -42,7 +43,11 @@ pub use parser::{meta::MetaFile, parse_unity_file};
|
||||
// pub use project::UnityProject;
|
||||
pub use property::PropertyValue;
|
||||
pub use types::{
|
||||
get_class_name, get_type_id, Color, ComponentContext, ExternalRef, FileID, FileRef,
|
||||
GameObject, LocalID, Quaternion, RectTransform, Transform, UnityComponent, UnityReference,
|
||||
Vector2, Vector3, yaml_helpers,
|
||||
get_class_name, get_type_id, Color, ComponentContext, ComponentRegistration, EcsInsertable,
|
||||
ExternalRef, FileID, FileRef, GameObject, LocalID, PrefabInstance, PrefabInstanceComponent,
|
||||
PrefabModification, PrefabResolver, Quaternion, RectTransform, Transform, TypeFilter,
|
||||
UnityComponent, UnityReference, Vector2, Vector3, yaml_helpers,
|
||||
};
|
||||
|
||||
// Re-export the derive macro from the macro crate
|
||||
pub use cursebreaker_parser_macros::UnityComponent;
|
||||
111
cursebreaker-parser/src/macros.rs
Normal file
111
cursebreaker-parser/src/macros.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
//! Declarative macros for convenient API usage
|
||||
|
||||
/// Create a TypeFilter with specific Unity and custom types
|
||||
///
|
||||
/// # Syntax
|
||||
/// ```ignore
|
||||
/// parse_with_types! {
|
||||
/// unity_types(Transform, Camera, Light),
|
||||
/// custom_types(PlaySFX, Interact)
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// You can omit either section:
|
||||
/// ```ignore
|
||||
/// // Only Unity types
|
||||
/// parse_with_types! {
|
||||
/// unity_types(Transform, Camera)
|
||||
/// }
|
||||
///
|
||||
/// // Only custom types
|
||||
/// parse_with_types! {
|
||||
/// custom_types(PlaySFX)
|
||||
/// }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! parse_with_types {
|
||||
// Full syntax with both Unity and custom types
|
||||
(unity_types($($unity:ident),+ $(,)?), custom_types($($custom:ident),+ $(,)?)) => {
|
||||
$crate::TypeFilter::new(
|
||||
vec![$(stringify!($unity)),+],
|
||||
vec![$(stringify!($custom)),+]
|
||||
)
|
||||
};
|
||||
|
||||
// Only Unity types
|
||||
(unity_types($($unity:ident),+ $(,)?)) => {
|
||||
$crate::TypeFilter::unity_only(
|
||||
vec![$(stringify!($unity)),+]
|
||||
)
|
||||
};
|
||||
|
||||
// Only custom types
|
||||
(custom_types($($custom:ident),+ $(,)?)) => {
|
||||
$crate::TypeFilter::custom_only(
|
||||
vec![$(stringify!($custom)),+]
|
||||
)
|
||||
};
|
||||
|
||||
// Alternative order: custom_types first
|
||||
(custom_types($($custom:ident),+ $(,)?), unity_types($($unity:ident),+ $(,)?)) => {
|
||||
$crate::TypeFilter::new(
|
||||
vec![$(stringify!($unity)),+],
|
||||
vec![$(stringify!($custom)),+]
|
||||
)
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::TypeFilter;
|
||||
|
||||
#[test]
|
||||
fn test_parse_with_types_macro() {
|
||||
let filter = parse_with_types! {
|
||||
unity_types(Transform, Camera, Light),
|
||||
custom_types(PlaySFX, Interact)
|
||||
};
|
||||
|
||||
assert!(filter.should_parse_unity("Transform"));
|
||||
assert!(filter.should_parse_unity("Camera"));
|
||||
assert!(filter.should_parse_unity("Light"));
|
||||
assert!(!filter.should_parse_unity("AudioSource"));
|
||||
|
||||
assert!(filter.should_parse_custom("PlaySFX"));
|
||||
assert!(filter.should_parse_custom("Interact"));
|
||||
assert!(!filter.should_parse_custom("OtherComponent"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_with_types_unity_only() {
|
||||
let filter = parse_with_types! {
|
||||
unity_types(Transform, Camera)
|
||||
};
|
||||
|
||||
assert!(filter.should_parse_unity("Transform"));
|
||||
assert!(!filter.should_parse_unity("Light"));
|
||||
assert!(!filter.should_parse_custom("PlaySFX"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_with_types_custom_only() {
|
||||
let filter = parse_with_types! {
|
||||
custom_types(PlaySFX, Interact)
|
||||
};
|
||||
|
||||
assert!(!filter.should_parse_unity("Transform"));
|
||||
assert!(filter.should_parse_custom("PlaySFX"));
|
||||
assert!(filter.should_parse_custom("Interact"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_with_types_reversed_order() {
|
||||
let filter = parse_with_types! {
|
||||
custom_types(PlaySFX),
|
||||
unity_types(Transform)
|
||||
};
|
||||
|
||||
assert!(filter.should_parse_unity("Transform"));
|
||||
assert!(filter.should_parse_custom("PlaySFX"));
|
||||
}
|
||||
}
|
||||
@@ -123,6 +123,24 @@ impl UnityPrefab {
|
||||
.filter(|doc| doc.class_name == class_name)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Create a new instance of this prefab for spawning into a scene
|
||||
///
|
||||
/// This clones the prefab's documents and prepares them for instantiation
|
||||
/// with unique FileIDs to avoid collisions.
|
||||
///
|
||||
/// # Returns
|
||||
/// A `PrefabInstance` that can be customized with overrides and spawned
|
||||
///
|
||||
/// # Example
|
||||
/// ```ignore
|
||||
/// let mut instance = prefab.instantiate();
|
||||
/// instance.override_value(file_id, "m_Name", "Player1".into())?;
|
||||
/// let entities = instance.spawn_into(&mut world, &mut entity_map)?;
|
||||
/// ```
|
||||
pub fn instantiate(&self) -> crate::types::PrefabInstance {
|
||||
crate::types::PrefabInstance::new(self)
|
||||
}
|
||||
}
|
||||
|
||||
/// A Unity asset file with raw YAML
|
||||
@@ -84,6 +84,45 @@ pub trait UnityComponent: Sized {
|
||||
fn parse(yaml: &Mapping, ctx: &ComponentContext) -> Option<Self>;
|
||||
}
|
||||
|
||||
/// Trait for components that can be inserted into the ECS world
|
||||
///
|
||||
/// This enables dynamic component insertion for both built-in and custom components.
|
||||
pub trait EcsInsertable: UnityComponent {
|
||||
/// Insert this component into the ECS world
|
||||
fn insert_into_world(self, world: &mut sparsey::World, entity: Entity);
|
||||
|
||||
/// Parse and insert in one step
|
||||
fn parse_and_insert(
|
||||
yaml: &Mapping,
|
||||
ctx: &ComponentContext,
|
||||
world: &mut sparsey::World,
|
||||
entity: Entity,
|
||||
) -> bool {
|
||||
if let Some(component) = Self::parse(yaml, ctx) {
|
||||
component.insert_into_world(world, entity);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Registration entry for custom Unity components
|
||||
///
|
||||
/// This is submitted via the `inventory` crate by the `#[derive(UnityComponent)]` macro
|
||||
/// to enable automatic component discovery and parsing.
|
||||
pub struct ComponentRegistration {
|
||||
/// Unity type ID (usually 114 for MonoBehaviour)
|
||||
pub type_id: u32,
|
||||
/// Unity class name (e.g., "PlaySFX")
|
||||
pub class_name: &'static str,
|
||||
/// Parser function that parses and inserts the component into the ECS world
|
||||
pub parse_and_insert: fn(&Mapping, &ComponentContext, &mut sparsey::World, Entity) -> bool,
|
||||
}
|
||||
|
||||
// Collect all component registrations submitted via the macro
|
||||
inventory::collect!(ComponentRegistration);
|
||||
|
||||
/// Helper functions for parsing typed values from YAML mappings
|
||||
pub mod yaml_helpers {
|
||||
use super::*;
|
||||
@@ -1,6 +1,7 @@
|
||||
//! GameObject component
|
||||
|
||||
use crate::types::{yaml_helpers, ComponentContext, UnityComponent};
|
||||
use sparsey::Entity;
|
||||
|
||||
/// A GameObject component
|
||||
///
|
||||
@@ -56,3 +57,9 @@ impl UnityComponent for GameObject {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::types::EcsInsertable for GameObject {
|
||||
fn insert_into_world(self, world: &mut sparsey::World, entity: Entity) {
|
||||
world.insert(entity, (self,));
|
||||
}
|
||||
}
|
||||
@@ -7,15 +7,24 @@
|
||||
mod component;
|
||||
mod game_object;
|
||||
mod ids;
|
||||
mod prefab_instance;
|
||||
mod reference;
|
||||
mod transform;
|
||||
mod type_filter;
|
||||
mod type_registry;
|
||||
mod values;
|
||||
|
||||
pub use component::{yaml_helpers, ComponentContext, LinkCallback, LinkingContext, UnityComponent};
|
||||
pub use component::{
|
||||
yaml_helpers, ComponentContext, ComponentRegistration, EcsInsertable, LinkCallback,
|
||||
LinkingContext, UnityComponent,
|
||||
};
|
||||
pub use game_object::GameObject;
|
||||
pub use ids::{FileID, LocalID};
|
||||
pub use prefab_instance::{
|
||||
PrefabInstance, PrefabInstanceComponent, PrefabModification, PrefabResolver,
|
||||
};
|
||||
pub use reference::UnityReference;
|
||||
pub use transform::{RectTransform, Transform};
|
||||
pub use type_filter::TypeFilter;
|
||||
pub use type_registry::{get_class_name, get_type_id};
|
||||
pub use values::{Color, ExternalRef, FileRef, Quaternion, Vector2, Vector3};
|
||||
690
cursebreaker-parser/src/types/prefab_instance.rs
Normal file
690
cursebreaker-parser/src/types/prefab_instance.rs
Normal file
@@ -0,0 +1,690 @@
|
||||
//! Prefab instancing system for cloning and spawning Unity prefabs
|
||||
|
||||
use crate::model::{RawDocument, UnityPrefab};
|
||||
use crate::types::{yaml_helpers, ComponentContext, ExternalRef, FileID, UnityComponent};
|
||||
use crate::{Error, Result};
|
||||
use serde_yaml::{Mapping, Value};
|
||||
use sparsey::{Entity, World};
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
/// An instance of a Unity prefab ready for spawning into a scene
|
||||
///
|
||||
/// PrefabInstance represents a cloned prefab with unique FileIDs to avoid
|
||||
/// collisions when spawning multiple instances. It supports:
|
||||
/// - FileID remapping to ensure uniqueness
|
||||
/// - YAML value overrides before spawning
|
||||
/// - Spawning into existing ECS worlds
|
||||
///
|
||||
/// # Example
|
||||
/// ```ignore
|
||||
/// let prefab = /* load UnityPrefab */;
|
||||
/// let mut instance = prefab.instantiate();
|
||||
/// instance.override_value(file_id, "m_Name", "Player1".into())?;
|
||||
/// instance.override_value(file_id, "m_LocalPosition.x", 100.0.into())?;
|
||||
/// let entities = instance.spawn_into(&mut world, &mut entity_map)?;
|
||||
/// ```
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PrefabInstance {
|
||||
/// Cloned YAML documents from the source prefab
|
||||
documents: Vec<RawDocument>,
|
||||
|
||||
/// FileID remapping table: original FileID → new FileID
|
||||
/// This ensures no collisions when spawning into existing worlds
|
||||
file_id_map: HashMap<FileID, FileID>,
|
||||
|
||||
/// Overrides to apply before spawning
|
||||
/// Maps (original FileID, YAML path) → new value
|
||||
overrides: HashMap<(FileID, String), Value>,
|
||||
|
||||
/// Sequential counter for generating new FileIDs
|
||||
/// Starts at i64::MAX and decrements to avoid collisions with scene FileIDs
|
||||
next_file_id: i64,
|
||||
|
||||
/// Source prefab path for debugging
|
||||
source_path: PathBuf,
|
||||
}
|
||||
|
||||
impl PrefabInstance {
|
||||
/// Create a new instance from a Unity prefab
|
||||
///
|
||||
/// This clones all documents from the prefab and initializes FileID remapping.
|
||||
pub fn new(prefab: &UnityPrefab) -> Self {
|
||||
// Clone all documents from the prefab
|
||||
let documents = prefab.documents.clone();
|
||||
|
||||
let mut instance = Self {
|
||||
documents,
|
||||
file_id_map: HashMap::new(),
|
||||
overrides: HashMap::new(),
|
||||
next_file_id: i64::MAX,
|
||||
source_path: prefab.path.clone(),
|
||||
};
|
||||
|
||||
// Initialize FileID mapping and remap all references
|
||||
instance.initialize_file_id_mapping();
|
||||
instance.remap_yaml_file_refs();
|
||||
|
||||
instance
|
||||
}
|
||||
|
||||
/// Generate a new unique FileID
|
||||
///
|
||||
/// Uses a sequential counter starting from i64::MAX and decrementing.
|
||||
/// This avoids collisions with typical scene FileIDs which are positive.
|
||||
fn generate_file_id(&mut self) -> FileID {
|
||||
let id = self.next_file_id;
|
||||
self.next_file_id -= 1;
|
||||
FileID::from_i64(id)
|
||||
}
|
||||
|
||||
/// Initialize FileID remapping for all documents
|
||||
///
|
||||
/// Creates a mapping from original FileID → new unique FileID for each document.
|
||||
fn initialize_file_id_mapping(&mut self) {
|
||||
// Collect original IDs first to avoid borrowing conflicts
|
||||
let original_ids: Vec<FileID> = self.documents.iter().map(|doc| doc.file_id).collect();
|
||||
|
||||
for original_id in original_ids {
|
||||
let new_id = self.generate_file_id();
|
||||
self.file_id_map.insert(original_id, new_id);
|
||||
}
|
||||
}
|
||||
|
||||
/// Remap all FileID references in YAML documents
|
||||
///
|
||||
/// This recursively traverses all YAML values and replaces FileID references
|
||||
/// with their remapped values from `file_id_map`.
|
||||
fn remap_yaml_file_refs(&mut self) {
|
||||
// First, update each document's own file_id
|
||||
for doc in &mut self.documents {
|
||||
if let Some(&new_id) = self.file_id_map.get(&doc.file_id) {
|
||||
doc.file_id = new_id;
|
||||
}
|
||||
}
|
||||
|
||||
// Clone the map to avoid borrow conflicts
|
||||
let file_id_map = self.file_id_map.clone();
|
||||
|
||||
// Then, remap all FileRef references in the YAML
|
||||
for doc in &mut self.documents {
|
||||
Self::remap_value(&mut doc.yaml, &file_id_map);
|
||||
}
|
||||
}
|
||||
|
||||
/// Recursively traverse YAML and remap FileID references
|
||||
///
|
||||
/// Looks for patterns like `{fileID: 12345}` and replaces the FileID
|
||||
/// with the remapped value from `file_id_map`.
|
||||
fn remap_value(value: &mut Value, file_id_map: &HashMap<FileID, FileID>) {
|
||||
match value {
|
||||
Value::Mapping(map) => {
|
||||
// Check if this is a FileRef: {fileID: N}
|
||||
if let Some(file_id_value) = map.get(&Value::String("fileID".to_string())) {
|
||||
if let Some(num) = file_id_value.as_i64() {
|
||||
let original = FileID::from_i64(num);
|
||||
|
||||
// Remap if it's a local reference (not 0, not external)
|
||||
if num != 0 {
|
||||
if let Some(&new_id) = file_id_map.get(&original) {
|
||||
map.insert(
|
||||
Value::String("fileID".to_string()),
|
||||
Value::Number(new_id.as_i64().into()),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Recursively process all values in the mapping
|
||||
for (_, v) in map.iter_mut() {
|
||||
Self::remap_value(v, file_id_map);
|
||||
}
|
||||
}
|
||||
Value::Sequence(seq) => {
|
||||
// Recursively process array elements
|
||||
for item in seq.iter_mut() {
|
||||
Self::remap_value(item, file_id_map);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// Scalars (strings, numbers, bools, null) don't need remapping
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Override a YAML value before spawning
|
||||
///
|
||||
/// This allows modifying prefab data before instantiation. The override
|
||||
/// is applied to the document with the given FileID at the specified YAML path.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `file_id` - The original FileID (before remapping) of the document to modify
|
||||
/// * `path` - Dot-notation path to the value (e.g., "m_LocalPosition.x")
|
||||
/// * `value` - The new value to set
|
||||
///
|
||||
/// # Example
|
||||
/// ```ignore
|
||||
/// instance.override_value(file_id, "m_Name", "Player1".into())?;
|
||||
/// instance.override_value(file_id, "m_LocalPosition.x", 100.0.into())?;
|
||||
/// ```
|
||||
pub fn override_value(
|
||||
&mut self,
|
||||
file_id: FileID,
|
||||
path: &str,
|
||||
value: Value,
|
||||
) -> Result<()> {
|
||||
// Store override to be applied during spawn
|
||||
// Note: We store using the original FileID for easier API
|
||||
self.overrides.insert((file_id, path.to_string()), value);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Apply all stored overrides to the documents
|
||||
///
|
||||
/// This is called internally before spawning. It navigates to each
|
||||
/// override path and sets the new value.
|
||||
fn apply_overrides(&mut self) -> Result<()> {
|
||||
for ((file_id, path), value) in &self.overrides {
|
||||
// Find the document with this FileID (after remapping)
|
||||
let remapped_id = self
|
||||
.file_id_map
|
||||
.get(file_id)
|
||||
.ok_or_else(|| Error::reference_error(format!("FileID not found: {}", file_id)))?;
|
||||
|
||||
let doc = self
|
||||
.documents
|
||||
.iter_mut()
|
||||
.find(|d| d.file_id == *remapped_id)
|
||||
.ok_or_else(|| {
|
||||
Error::reference_error(format!("Document not found: {}", remapped_id))
|
||||
})?;
|
||||
|
||||
// Navigate to the path and set the value
|
||||
Self::set_yaml_value(&mut doc.yaml, path, value.clone())?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Navigate YAML path and set value
|
||||
///
|
||||
/// This parses the dot-notation path and navigates through the YAML
|
||||
/// structure to set the value at the target location.
|
||||
fn set_yaml_value(yaml: &mut Value, path: &str, new_value: Value) -> Result<()> {
|
||||
let segments = parse_yaml_path(path);
|
||||
let mut current = yaml;
|
||||
|
||||
// Navigate to parent of target
|
||||
for segment in &segments[..segments.len() - 1] {
|
||||
current = match segment {
|
||||
PathSegment::Field(field) => current
|
||||
.as_mapping_mut()
|
||||
.ok_or_else(|| Error::InvalidPropertyPath(path.to_string()))?
|
||||
.get_mut(&Value::String(field.clone()))
|
||||
.ok_or_else(|| Error::PropertyNotFound(field.clone()))?,
|
||||
PathSegment::ArrayIndex { field, index } => {
|
||||
let mapping = current
|
||||
.as_mapping_mut()
|
||||
.ok_or_else(|| Error::InvalidPropertyPath(path.to_string()))?;
|
||||
let array = mapping
|
||||
.get_mut(&Value::String(field.clone()))
|
||||
.and_then(|v| v.as_sequence_mut())
|
||||
.ok_or_else(|| Error::PropertyNotFound(field.clone()))?;
|
||||
array.get_mut(*index).ok_or_else(|| {
|
||||
Error::InvalidPropertyPath(format!("{}[{}]", field, index))
|
||||
})?
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Set the final value
|
||||
match segments.last().unwrap() {
|
||||
PathSegment::Field(field) => {
|
||||
let mapping = current
|
||||
.as_mapping_mut()
|
||||
.ok_or_else(|| Error::InvalidPropertyPath(path.to_string()))?;
|
||||
mapping.insert(Value::String(field.clone()), new_value);
|
||||
}
|
||||
PathSegment::ArrayIndex { field, index } => {
|
||||
let mapping = current
|
||||
.as_mapping_mut()
|
||||
.ok_or_else(|| Error::InvalidPropertyPath(path.to_string()))?;
|
||||
let array = mapping
|
||||
.get_mut(&Value::String(field.clone()))
|
||||
.and_then(|v| v.as_sequence_mut())
|
||||
.ok_or_else(|| Error::PropertyNotFound(field.clone()))?;
|
||||
if *index < array.len() {
|
||||
array[*index] = new_value;
|
||||
} else {
|
||||
return Err(Error::InvalidPropertyPath(format!(
|
||||
"{}[{}] out of bounds",
|
||||
field, index
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Spawn this prefab instance into an existing world
|
||||
///
|
||||
/// This applies any overrides and then uses the ECS builder to create
|
||||
/// entities and components in the target world.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `world` - The Sparsey ECS world to spawn entities into
|
||||
/// * `entity_map` - HashMap to track FileID → Entity mappings
|
||||
///
|
||||
/// # Returns
|
||||
/// Vec of newly created entities
|
||||
///
|
||||
/// # Example
|
||||
/// ```ignore
|
||||
/// let entities = instance.spawn_into(&mut scene.world, &mut scene.entity_map)?;
|
||||
/// println!("Spawned {} entities", entities.len());
|
||||
/// ```
|
||||
pub fn spawn_into(
|
||||
mut self,
|
||||
world: &mut World,
|
||||
entity_map: &mut HashMap<FileID, Entity>,
|
||||
) -> Result<Vec<Entity>> {
|
||||
// Apply overrides before spawning
|
||||
self.apply_overrides()?;
|
||||
|
||||
// Spawn into existing world using the builder
|
||||
crate::ecs::build_world_from_documents_into(self.documents, world, entity_map)
|
||||
}
|
||||
|
||||
/// Get the source prefab path (for debugging)
|
||||
pub fn source_path(&self) -> &PathBuf {
|
||||
&self.source_path
|
||||
}
|
||||
|
||||
/// Get the FileID mapping table (for debugging)
|
||||
pub fn file_id_map(&self) -> &HashMap<FileID, FileID> {
|
||||
&self.file_id_map
|
||||
}
|
||||
}
|
||||
|
||||
/// Unity component representing a reference to another prefab (nested prefab)
|
||||
///
|
||||
/// This component appears in prefabs that contain instances of other prefabs.
|
||||
/// It stores the GUID of the referenced prefab and any modifications applied.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PrefabInstanceComponent {
|
||||
/// External reference to the source prefab (by GUID)
|
||||
pub prefab_ref: ExternalRef,
|
||||
|
||||
/// Modifications applied to the nested prefab
|
||||
pub modifications: Vec<PrefabModification>,
|
||||
}
|
||||
|
||||
impl UnityComponent for PrefabInstanceComponent {
|
||||
fn parse(yaml: &Mapping, _ctx: &ComponentContext) -> Option<Self> {
|
||||
// Extract m_SourcePrefab (external GUID reference)
|
||||
let prefab_ref = yaml_helpers::get_external_ref(yaml, "m_SourcePrefab")?;
|
||||
|
||||
// Extract m_Modification array (if any)
|
||||
let modifications = parse_modifications(yaml).unwrap_or_default();
|
||||
|
||||
Some(Self {
|
||||
prefab_ref,
|
||||
modifications,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::types::EcsInsertable for PrefabInstanceComponent {
|
||||
fn insert_into_world(self, world: &mut sparsey::World, entity: Entity) {
|
||||
world.insert(entity, (self,));
|
||||
}
|
||||
}
|
||||
|
||||
/// A modification applied to a nested prefab
|
||||
///
|
||||
/// Unity stores modifications as changes to specific properties of objects
|
||||
/// within the nested prefab.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PrefabModification {
|
||||
/// The FileID of the target object within the nested prefab
|
||||
pub target_file_id: FileID,
|
||||
|
||||
/// The property path to modify (dot notation)
|
||||
pub property_path: String,
|
||||
|
||||
/// The new value to apply
|
||||
pub value: Value,
|
||||
}
|
||||
|
||||
/// Parse modifications array from Unity YAML
|
||||
fn parse_modifications(yaml: &Mapping) -> Option<Vec<PrefabModification>> {
|
||||
let mods_array = yaml
|
||||
.get(&Value::String("m_Modification".to_string()))
|
||||
.and_then(|v| v.as_sequence())?;
|
||||
|
||||
let mut mods = Vec::new();
|
||||
for mod_yaml in mods_array {
|
||||
if let Some(mod_map) = mod_yaml.as_mapping() {
|
||||
// Parse target FileID, property path, and value
|
||||
// Unity format: {target: {fileID: N}, propertyPath: "m_Name", value: "NewName"}
|
||||
if let Some(modification) = parse_single_modification(mod_map) {
|
||||
mods.push(modification);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(mods)
|
||||
}
|
||||
|
||||
/// Parse a single modification entry
|
||||
fn parse_single_modification(yaml: &Mapping) -> Option<PrefabModification> {
|
||||
// Get target FileID
|
||||
let target = yaml
|
||||
.get(&Value::String("target".to_string()))
|
||||
.and_then(|v| v.as_mapping())?;
|
||||
let target_file_id = yaml_helpers::get_file_ref_from_mapping(target)?.file_id;
|
||||
|
||||
// Get property path
|
||||
let property_path = yaml
|
||||
.get(&Value::String("propertyPath".to_string()))
|
||||
.and_then(|v| v.as_str())?
|
||||
.to_string();
|
||||
|
||||
// Get value
|
||||
let value = yaml
|
||||
.get(&Value::String("value".to_string()))?
|
||||
.clone();
|
||||
|
||||
Some(PrefabModification {
|
||||
target_file_id,
|
||||
property_path,
|
||||
value,
|
||||
})
|
||||
}
|
||||
|
||||
/// Resolver for loading and recursively instantiating prefabs
|
||||
///
|
||||
/// PrefabResolver handles:
|
||||
/// - Loading prefabs by GUID
|
||||
/// - Caching loaded prefabs
|
||||
/// - Detecting circular prefab references
|
||||
/// - Recursively instantiating nested prefabs
|
||||
pub struct PrefabResolver {
|
||||
/// Cache of loaded prefabs (GUID → Prefab)
|
||||
prefab_cache: HashMap<String, Arc<UnityPrefab>>,
|
||||
|
||||
/// Mapping from GUID to file path
|
||||
guid_to_path: HashMap<String, PathBuf>,
|
||||
|
||||
/// Stack of GUIDs currently being instantiated (for cycle detection)
|
||||
instantiation_stack: Vec<String>,
|
||||
}
|
||||
|
||||
impl PrefabResolver {
|
||||
/// Create a new PrefabResolver
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `guid_to_path` - Mapping from asset GUID to file path
|
||||
pub fn new(guid_to_path: HashMap<String, PathBuf>) -> Self {
|
||||
Self {
|
||||
prefab_cache: HashMap::new(),
|
||||
guid_to_path,
|
||||
instantiation_stack: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Recursively instantiate a prefab and its nested prefabs
|
||||
///
|
||||
/// This handles:
|
||||
/// 1. Checking for circular references
|
||||
/// 2. Creating a prefab instance
|
||||
/// 3. Finding any nested prefab references
|
||||
/// 4. Recursively instantiating nested prefabs
|
||||
/// 5. Spawning the prefab's entities into the world
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `prefab` - The prefab to instantiate
|
||||
/// * `world` - The ECS world to spawn entities into
|
||||
/// * `entity_map` - Entity mapping to update
|
||||
///
|
||||
/// # Returns
|
||||
/// Vec of spawned entities
|
||||
pub fn instantiate_recursive(
|
||||
&mut self,
|
||||
prefab: &UnityPrefab,
|
||||
world: &mut World,
|
||||
entity_map: &mut HashMap<FileID, Entity>,
|
||||
) -> Result<Vec<Entity>> {
|
||||
// For this implementation, we'll use the path as the identifier
|
||||
// In a full implementation, we'd extract GUID from .meta files
|
||||
let prefab_id = prefab.path.to_string_lossy().to_string();
|
||||
|
||||
// Check for circular references
|
||||
if self.instantiation_stack.contains(&prefab_id) {
|
||||
return Err(Error::circular_reference());
|
||||
}
|
||||
|
||||
// Push to stack
|
||||
self.instantiation_stack.push(prefab_id.clone());
|
||||
|
||||
// Create instance
|
||||
let instance = prefab.instantiate();
|
||||
|
||||
// Find nested prefab references
|
||||
let nested_prefabs = self.find_nested_prefabs(&instance)?;
|
||||
|
||||
// For each nested prefab, recursively instantiate it
|
||||
// (This is a simplified version - full implementation would need to
|
||||
// properly link nested entities to parent GameObjects)
|
||||
for (_parent_file_id, nested_component) in nested_prefabs {
|
||||
// Load the referenced prefab
|
||||
if let Ok(nested_prefab) = self.load_prefab(&nested_component.prefab_ref.guid) {
|
||||
// Apply modifications
|
||||
let mut nested_instance = nested_prefab.instantiate();
|
||||
for modification in &nested_component.modifications {
|
||||
nested_instance.override_value(
|
||||
modification.target_file_id,
|
||||
&modification.property_path,
|
||||
modification.value.clone(),
|
||||
)?;
|
||||
}
|
||||
|
||||
// Recursively spawn nested prefab
|
||||
self.instantiate_recursive(&nested_prefab, world, entity_map)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Spawn this prefab's entities
|
||||
let spawned = instance.spawn_into(world, entity_map)?;
|
||||
|
||||
// Pop from stack
|
||||
self.instantiation_stack.pop();
|
||||
|
||||
Ok(spawned)
|
||||
}
|
||||
|
||||
/// Find all nested prefab references in an instance
|
||||
fn find_nested_prefabs(
|
||||
&self,
|
||||
instance: &PrefabInstance,
|
||||
) -> Result<Vec<(FileID, PrefabInstanceComponent)>> {
|
||||
let mut nested = Vec::new();
|
||||
|
||||
for doc in &instance.documents {
|
||||
if doc.class_name == "PrefabInstance" {
|
||||
if let Some(mapping) = doc.as_mapping() {
|
||||
// Create a minimal context for parsing
|
||||
let ctx = ComponentContext {
|
||||
type_id: doc.type_id,
|
||||
file_id: doc.file_id,
|
||||
class_name: &doc.class_name,
|
||||
entity: None,
|
||||
linking_ctx: None,
|
||||
yaml: mapping,
|
||||
};
|
||||
|
||||
if let Some(component) = PrefabInstanceComponent::parse(mapping, &ctx) {
|
||||
nested.push((doc.file_id, component));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(nested)
|
||||
}
|
||||
|
||||
/// Load a prefab by GUID
|
||||
fn load_prefab(&mut self, guid: &str) -> Result<Arc<UnityPrefab>> {
|
||||
// Check cache first
|
||||
if let Some(prefab) = self.prefab_cache.get(guid) {
|
||||
return Ok(prefab.clone());
|
||||
}
|
||||
|
||||
// Resolve GUID to path
|
||||
let path = self
|
||||
.guid_to_path
|
||||
.get(guid)
|
||||
.ok_or_else(|| Error::guid_resolution_error(format!("GUID not found: {}", guid)))?
|
||||
.clone();
|
||||
|
||||
// Load prefab
|
||||
let unity_file = crate::model::UnityFile::from_path(&path)?;
|
||||
let prefab = match unity_file {
|
||||
crate::model::UnityFile::Prefab(p) => Arc::new(p),
|
||||
_ => {
|
||||
return Err(Error::invalid_format(
|
||||
"Expected prefab file for GUID resolution",
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
// Cache for future use
|
||||
self.prefab_cache.insert(guid.to_string(), prefab.clone());
|
||||
|
||||
Ok(prefab)
|
||||
}
|
||||
}
|
||||
|
||||
/// A segment of a YAML property path
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
enum PathSegment {
|
||||
/// A simple field access (e.g., "m_Name")
|
||||
Field(String),
|
||||
/// An array element access (e.g., "m_Component[0]")
|
||||
ArrayIndex { field: String, index: usize },
|
||||
}
|
||||
|
||||
/// Parse a YAML path into segments
|
||||
///
|
||||
/// Supports dot notation and array indices:
|
||||
/// - "m_Name" → [Field("m_Name")]
|
||||
/// - "m_LocalPosition.x" → [Field("m_LocalPosition"), Field("x")]
|
||||
/// - "m_Component[0].fileID" → [ArrayIndex{field: "m_Component", index: 0}, Field("fileID")]
|
||||
fn parse_yaml_path(path: &str) -> Vec<PathSegment> {
|
||||
path.split('.')
|
||||
.map(|segment| {
|
||||
// Check if it's an array index like "m_Component[0]"
|
||||
if let Some(idx_start) = segment.find('[') {
|
||||
let field = &segment[..idx_start];
|
||||
if let Some(idx_end) = segment.find(']') {
|
||||
if let Ok(index) = segment[idx_start + 1..idx_end].parse::<usize>() {
|
||||
return PathSegment::ArrayIndex {
|
||||
field: field.to_string(),
|
||||
index,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
PathSegment::Field(segment.to_string())
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_file_id_generation() {
|
||||
let mut instance = PrefabInstance {
|
||||
documents: Vec::new(),
|
||||
file_id_map: HashMap::new(),
|
||||
overrides: HashMap::new(),
|
||||
next_file_id: i64::MAX,
|
||||
source_path: PathBuf::from("test.prefab"),
|
||||
};
|
||||
|
||||
let id1 = instance.generate_file_id();
|
||||
let id2 = instance.generate_file_id();
|
||||
|
||||
// Should decrement
|
||||
assert!(id1.as_i64() > id2.as_i64());
|
||||
assert_eq!(id1.as_i64() - 1, id2.as_i64());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_file_id_generation_starts_at_max() {
|
||||
let mut instance = PrefabInstance {
|
||||
documents: Vec::new(),
|
||||
file_id_map: HashMap::new(),
|
||||
overrides: HashMap::new(),
|
||||
next_file_id: i64::MAX,
|
||||
source_path: PathBuf::from("test.prefab"),
|
||||
};
|
||||
|
||||
let id1 = instance.generate_file_id();
|
||||
assert_eq!(id1.as_i64(), i64::MAX);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_yaml_path_parsing_simple() {
|
||||
let path = "m_Name";
|
||||
let segments = parse_yaml_path(path);
|
||||
assert_eq!(segments.len(), 1);
|
||||
assert_eq!(segments[0], PathSegment::Field("m_Name".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_yaml_path_parsing_nested() {
|
||||
let path = "m_LocalPosition.x";
|
||||
let segments = parse_yaml_path(path);
|
||||
assert_eq!(segments.len(), 2);
|
||||
assert_eq!(
|
||||
segments[0],
|
||||
PathSegment::Field("m_LocalPosition".to_string())
|
||||
);
|
||||
assert_eq!(segments[1], PathSegment::Field("x".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_yaml_path_parsing_array() {
|
||||
let path = "m_Component[0]";
|
||||
let segments = parse_yaml_path(path);
|
||||
assert_eq!(segments.len(), 1);
|
||||
assert_eq!(
|
||||
segments[0],
|
||||
PathSegment::ArrayIndex {
|
||||
field: "m_Component".to_string(),
|
||||
index: 0
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_yaml_path_parsing_array_with_field() {
|
||||
let path = "m_Component[0].fileID";
|
||||
let segments = parse_yaml_path(path);
|
||||
assert_eq!(segments.len(), 2);
|
||||
assert_eq!(
|
||||
segments[0],
|
||||
PathSegment::ArrayIndex {
|
||||
field: "m_Component".to_string(),
|
||||
index: 0
|
||||
}
|
||||
);
|
||||
assert_eq!(segments[1], PathSegment::Field("fileID".to_string()));
|
||||
}
|
||||
}
|
||||
@@ -126,6 +126,12 @@ impl UnityComponent for Transform {
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::types::EcsInsertable for Transform {
|
||||
fn insert_into_world(self, world: &mut sparsey::World, entity: Entity) {
|
||||
world.insert(entity, (self,));
|
||||
}
|
||||
}
|
||||
|
||||
/// A RectTransform component
|
||||
///
|
||||
/// RectTransform is used for UI elements and extends Transform with additional properties.
|
||||
@@ -288,3 +294,9 @@ impl UnityComponent for RectTransform {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::types::EcsInsertable for RectTransform {
|
||||
fn insert_into_world(self, world: &mut sparsey::World, entity: Entity) {
|
||||
world.insert(entity, (self,));
|
||||
}
|
||||
}
|
||||
161
cursebreaker-parser/src/types/type_filter.rs
Normal file
161
cursebreaker-parser/src/types/type_filter.rs
Normal file
@@ -0,0 +1,161 @@
|
||||
//! Type filtering for selective parsing
|
||||
//!
|
||||
//! This module provides functionality to selectively parse only specific Unity
|
||||
//! component types, improving performance and reducing memory usage.
|
||||
|
||||
use std::collections::HashSet;
|
||||
|
||||
/// Filter for controlling which Unity types get parsed
|
||||
///
|
||||
/// By default, all types are parsed. Use `TypeFilter::new()` to create
|
||||
/// a filter that only parses specific types.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TypeFilter {
|
||||
/// Set of Unity type names to parse (e.g., "Transform", "Camera")
|
||||
/// If None, all types are parsed
|
||||
unity_types: Option<HashSet<String>>,
|
||||
|
||||
/// Set of custom component names to parse (e.g., "PlaySFX")
|
||||
/// If None, all custom types are parsed
|
||||
custom_types: Option<HashSet<String>>,
|
||||
|
||||
/// Whether to parse all types (default)
|
||||
parse_all: bool,
|
||||
}
|
||||
|
||||
impl TypeFilter {
|
||||
/// Create a new filter that parses ALL types (default behavior)
|
||||
pub fn parse_all() -> Self {
|
||||
Self {
|
||||
unity_types: None,
|
||||
custom_types: None,
|
||||
parse_all: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new filter with specific Unity and custom types
|
||||
///
|
||||
/// # Example
|
||||
/// ```
|
||||
/// use cursebreaker_parser::TypeFilter;
|
||||
///
|
||||
/// let filter = TypeFilter::new(
|
||||
/// vec!["Transform", "Camera", "Light"],
|
||||
/// vec!["PlaySFX", "Interact"]
|
||||
/// );
|
||||
/// ```
|
||||
pub fn new<S1, S2>(unity_types: Vec<S1>, custom_types: Vec<S2>) -> Self
|
||||
where
|
||||
S1: Into<String>,
|
||||
S2: Into<String>,
|
||||
{
|
||||
Self {
|
||||
unity_types: Some(unity_types.into_iter().map(|s| s.into()).collect()),
|
||||
custom_types: Some(custom_types.into_iter().map(|s| s.into()).collect()),
|
||||
parse_all: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a filter that only parses specific Unity types (no custom types)
|
||||
pub fn unity_only<S: Into<String>>(types: Vec<S>) -> Self {
|
||||
Self {
|
||||
unity_types: Some(types.into_iter().map(|s| s.into()).collect()),
|
||||
custom_types: Some(HashSet::new()),
|
||||
parse_all: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a filter that only parses specific custom types (no Unity types)
|
||||
pub fn custom_only<S: Into<String>>(types: Vec<S>) -> Self {
|
||||
Self {
|
||||
unity_types: Some(HashSet::new()),
|
||||
custom_types: Some(types.into_iter().map(|s| s.into()).collect()),
|
||||
parse_all: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if a Unity type should be parsed
|
||||
pub fn should_parse_unity(&self, type_name: &str) -> bool {
|
||||
if self.parse_all {
|
||||
return true;
|
||||
}
|
||||
|
||||
match &self.unity_types {
|
||||
Some(types) => types.contains(type_name),
|
||||
None => true, // If not specified, parse all
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if a custom type should be parsed
|
||||
pub fn should_parse_custom(&self, type_name: &str) -> bool {
|
||||
if self.parse_all {
|
||||
return true;
|
||||
}
|
||||
|
||||
match &self.custom_types {
|
||||
Some(types) => types.contains(type_name),
|
||||
None => true, // If not specified, parse all
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if any type should be parsed
|
||||
pub fn should_parse(&self, type_name: &str, is_custom: bool) -> bool {
|
||||
if is_custom {
|
||||
self.should_parse_custom(type_name)
|
||||
} else {
|
||||
self.should_parse_unity(type_name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for TypeFilter {
|
||||
fn default() -> Self {
|
||||
Self::parse_all()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_all() {
|
||||
let filter = TypeFilter::parse_all();
|
||||
assert!(filter.should_parse_unity("Transform"));
|
||||
assert!(filter.should_parse_unity("Camera"));
|
||||
assert!(filter.should_parse_custom("PlaySFX"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_specific_types() {
|
||||
let filter = TypeFilter::new(
|
||||
vec!["Transform", "Camera"],
|
||||
vec!["PlaySFX"]
|
||||
);
|
||||
|
||||
assert!(filter.should_parse_unity("Transform"));
|
||||
assert!(filter.should_parse_unity("Camera"));
|
||||
assert!(!filter.should_parse_unity("Light"));
|
||||
|
||||
assert!(filter.should_parse_custom("PlaySFX"));
|
||||
assert!(!filter.should_parse_custom("Interact"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_unity_only() {
|
||||
let filter = TypeFilter::unity_only(vec!["Transform"]);
|
||||
|
||||
assert!(filter.should_parse_unity("Transform"));
|
||||
assert!(!filter.should_parse_unity("Camera"));
|
||||
assert!(!filter.should_parse_custom("PlaySFX"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_custom_only() {
|
||||
let filter = TypeFilter::custom_only(vec!["PlaySFX"]);
|
||||
|
||||
assert!(!filter.should_parse_unity("Transform"));
|
||||
assert!(filter.should_parse_custom("PlaySFX"));
|
||||
assert!(!filter.should_parse_custom("Interact"));
|
||||
}
|
||||
}
|
||||
402
cursebreaker-parser/tests/integration_tests.rs
Normal file
402
cursebreaker-parser/tests/integration_tests.rs
Normal file
@@ -0,0 +1,402 @@
|
||||
//! Integration tests for parsing real Unity projects
|
||||
|
||||
use cursebreaker_parser::UnityFile;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use std::time::Instant;
|
||||
|
||||
/// Test project configuration
|
||||
struct TestProject {
|
||||
name: &'static str,
|
||||
repo_url: &'static str,
|
||||
branch: Option<&'static str>,
|
||||
}
|
||||
|
||||
impl TestProject {
|
||||
const VR_HORROR: TestProject = TestProject {
|
||||
name: "VR_Horror_YouCantRun",
|
||||
repo_url: "https://github.com/Unity3D-Projects/VR_Horror_YouCantRun.git",
|
||||
branch: None,
|
||||
};
|
||||
|
||||
const PIRATE_PANIC: TestProject = TestProject {
|
||||
name: "PiratePanic",
|
||||
repo_url: "https://github.com/Unity-Technologies/PiratePanic.git",
|
||||
branch: None,
|
||||
};
|
||||
}
|
||||
|
||||
/// Statistics gathered during parsing
|
||||
#[derive(Debug, Default)]
|
||||
struct ParsingStats {
|
||||
total_files: usize,
|
||||
scenes: usize,
|
||||
prefabs: usize,
|
||||
assets: usize,
|
||||
errors: Vec<(PathBuf, String)>,
|
||||
total_entities: usize,
|
||||
total_documents: usize,
|
||||
parse_time_ms: u128,
|
||||
}
|
||||
|
||||
impl ParsingStats {
|
||||
fn print_summary(&self) {
|
||||
println!("\n{}", "=".repeat(60));
|
||||
println!("Parsing Statistics");
|
||||
println!("{}", "=".repeat(60));
|
||||
println!(" Total files found: {}", self.total_files);
|
||||
println!(" Scenes parsed: {}", self.scenes);
|
||||
println!(" Prefabs parsed: {}", self.prefabs);
|
||||
println!(" Assets parsed: {}", self.assets);
|
||||
println!(" Total entities: {}", self.total_entities);
|
||||
println!(" Total documents: {}", self.total_documents);
|
||||
println!(" Parse time: {} ms", self.parse_time_ms);
|
||||
|
||||
if !self.errors.is_empty() {
|
||||
println!("\n Errors encountered: {}", self.errors.len());
|
||||
println!("\n Error details:");
|
||||
for (path, error) in self.errors.iter().take(10) {
|
||||
println!(" - {}", path.display());
|
||||
println!(" Error: {}", error);
|
||||
}
|
||||
if self.errors.len() > 10 {
|
||||
println!(" ... and {} more errors", self.errors.len() - 10);
|
||||
}
|
||||
}
|
||||
|
||||
let success_rate = if self.total_files > 0 {
|
||||
((self.total_files - self.errors.len()) as f64 / self.total_files as f64) * 100.0
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
println!("\n Success rate: {:.2}%", success_rate);
|
||||
println!("{}", "=".repeat(60));
|
||||
}
|
||||
}
|
||||
|
||||
/// Clone a git repository for testing
|
||||
fn clone_test_project(project: &TestProject) -> std::io::Result<PathBuf> {
|
||||
let test_data_dir = PathBuf::from("test_data");
|
||||
std::fs::create_dir_all(&test_data_dir)?;
|
||||
|
||||
let project_path = test_data_dir.join(project.name);
|
||||
|
||||
// Skip if already cloned
|
||||
if project_path.exists() {
|
||||
println!("Project already cloned at: {}", project_path.display());
|
||||
return Ok(project_path);
|
||||
}
|
||||
|
||||
println!("Cloning {} from {}...", project.name, project.repo_url);
|
||||
|
||||
let mut cmd = Command::new("git");
|
||||
cmd.arg("clone");
|
||||
|
||||
if let Some(branch) = project.branch {
|
||||
cmd.arg("--branch").arg(branch);
|
||||
}
|
||||
|
||||
cmd.arg("--depth").arg("1"); // Shallow clone for speed
|
||||
cmd.arg(project.repo_url);
|
||||
cmd.arg(&project_path);
|
||||
|
||||
let output = cmd.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
eprintln!("Git clone failed: {}", String::from_utf8_lossy(&output.stderr));
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Git clone failed",
|
||||
));
|
||||
}
|
||||
|
||||
println!("Successfully cloned to: {}", project_path.display());
|
||||
Ok(project_path)
|
||||
}
|
||||
|
||||
/// Recursively find all Unity files in a directory
|
||||
fn find_unity_files(dir: &Path) -> Vec<PathBuf> {
|
||||
let mut files = Vec::new();
|
||||
|
||||
if !dir.exists() || !dir.is_dir() {
|
||||
return files;
|
||||
}
|
||||
|
||||
fn visit_dir(dir: &Path, files: &mut Vec<PathBuf>) {
|
||||
if let Ok(entries) = std::fs::read_dir(dir) {
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
|
||||
// Skip common Unity directories that don't contain source assets
|
||||
if let Some(name) = path.file_name().and_then(|n| n.to_str()) {
|
||||
if name == "Library" || name == "Temp" || name == "Builds" || name == ".git" {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if path.is_dir() {
|
||||
visit_dir(&path, files);
|
||||
} else if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
|
||||
if ext == "unity" || ext == "prefab" || ext == "asset" {
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
visit_dir(dir, &mut files);
|
||||
files
|
||||
}
|
||||
|
||||
/// Parse all Unity files in a project and collect statistics
|
||||
fn parse_project(project_path: &Path) -> ParsingStats {
|
||||
let mut stats = ParsingStats::default();
|
||||
|
||||
println!("\nFinding Unity files in {}...", project_path.display());
|
||||
let files = find_unity_files(project_path);
|
||||
stats.total_files = files.len();
|
||||
|
||||
println!("Found {} Unity files", files.len());
|
||||
println!("\nParsing files...");
|
||||
|
||||
let start_time = Instant::now();
|
||||
|
||||
for (i, file_path) in files.iter().enumerate() {
|
||||
// Print progress
|
||||
if (i + 1) % 10 == 0 || i == 0 {
|
||||
println!(
|
||||
" [{}/{}] Parsing: {}",
|
||||
i + 1,
|
||||
files.len(),
|
||||
file_path.file_name().unwrap().to_string_lossy()
|
||||
);
|
||||
}
|
||||
|
||||
match UnityFile::from_path(file_path) {
|
||||
Ok(unity_file) => match unity_file {
|
||||
UnityFile::Scene(scene) => {
|
||||
stats.scenes += 1;
|
||||
stats.total_entities += scene.entity_map.len();
|
||||
}
|
||||
UnityFile::Prefab(prefab) => {
|
||||
stats.prefabs += 1;
|
||||
stats.total_documents += prefab.documents.len();
|
||||
}
|
||||
UnityFile::Asset(asset) => {
|
||||
stats.assets += 1;
|
||||
stats.total_documents += asset.documents.len();
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
stats.errors.push((file_path.clone(), e.to_string()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stats.parse_time_ms = start_time.elapsed().as_millis();
|
||||
stats
|
||||
}
|
||||
|
||||
/// Test parsing a specific project
|
||||
fn test_project(project: &TestProject) {
|
||||
println!("\n{}", "=".repeat(60));
|
||||
println!("Testing: {}", project.name);
|
||||
println!("{}", "=".repeat(60));
|
||||
|
||||
// Clone the project
|
||||
let project_path = match clone_test_project(project) {
|
||||
Ok(path) => path,
|
||||
Err(e) => {
|
||||
eprintln!("Failed to clone project: {}", e);
|
||||
eprintln!("Skipping project test (git may not be available)");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
// Parse all files
|
||||
let stats = parse_project(&project_path);
|
||||
|
||||
// Print summary
|
||||
stats.print_summary();
|
||||
|
||||
// Assert basic expectations
|
||||
assert!(
|
||||
stats.total_files > 0,
|
||||
"Should find at least some Unity files"
|
||||
);
|
||||
|
||||
// Allow some errors but not too many
|
||||
let error_rate = if stats.total_files > 0 {
|
||||
(stats.errors.len() as f64 / stats.total_files as f64) * 100.0
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
|
||||
if error_rate > 50.0 {
|
||||
panic!(
|
||||
"Error rate too high: {:.2}% ({}/{})",
|
||||
error_rate, stats.errors.len(), stats.total_files
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Test detailed parsing of specific file types
|
||||
fn test_detailed_parsing(project_path: &Path) {
|
||||
println!("\n{}", "=".repeat(60));
|
||||
println!("Detailed Parsing Tests");
|
||||
println!("{}", "=".repeat(60));
|
||||
|
||||
let files = find_unity_files(project_path);
|
||||
|
||||
// Test scene parsing
|
||||
if let Some(scene_file) = files.iter().find(|f| {
|
||||
f.extension()
|
||||
.and_then(|e| e.to_str())
|
||||
.map_or(false, |e| e == "unity")
|
||||
}) {
|
||||
println!(
|
||||
"\nTesting scene parsing: {}",
|
||||
scene_file.file_name().unwrap().to_string_lossy()
|
||||
);
|
||||
match UnityFile::from_path(scene_file) {
|
||||
Ok(UnityFile::Scene(scene)) => {
|
||||
println!(" ✓ Successfully parsed scene");
|
||||
println!(" - Entities: {}", scene.entity_map.len());
|
||||
println!(" - Path: {}", scene.path.display());
|
||||
|
||||
// Try to access entities
|
||||
for (file_id, entity) in scene.entity_map.iter().take(3) {
|
||||
println!(" - FileID {} -> Entity {:?}", file_id, entity);
|
||||
}
|
||||
}
|
||||
Ok(_) => println!(" ✗ File was not parsed as scene"),
|
||||
Err(e) => println!(" ✗ Parse error: {}", e),
|
||||
}
|
||||
}
|
||||
|
||||
// Test prefab parsing and instancing
|
||||
if let Some(prefab_file) = files.iter().find(|f| {
|
||||
f.extension()
|
||||
.and_then(|e| e.to_str())
|
||||
.map_or(false, |e| e == "prefab")
|
||||
}) {
|
||||
println!(
|
||||
"\nTesting prefab parsing: {}",
|
||||
prefab_file.file_name().unwrap().to_string_lossy()
|
||||
);
|
||||
match UnityFile::from_path(prefab_file) {
|
||||
Ok(UnityFile::Prefab(prefab)) => {
|
||||
println!(" ✓ Successfully parsed prefab");
|
||||
println!(" - Documents: {}", prefab.documents.len());
|
||||
println!(" - Path: {}", prefab.path.display());
|
||||
|
||||
// Test instantiation
|
||||
println!("\n Testing prefab instantiation:");
|
||||
let instance = prefab.instantiate();
|
||||
println!(
|
||||
" ✓ Created instance with {} remapped FileIDs",
|
||||
instance.file_id_map().len()
|
||||
);
|
||||
|
||||
// Test override system
|
||||
if let Some(first_doc) = prefab.documents.first() {
|
||||
let mut instance2 = prefab.instantiate();
|
||||
let result = instance2.override_value(
|
||||
first_doc.file_id,
|
||||
"m_Name",
|
||||
serde_yaml::Value::String("TestName".to_string()),
|
||||
);
|
||||
if result.is_ok() {
|
||||
println!(" ✓ Override system working");
|
||||
} else {
|
||||
println!(" - Override test: {}", result.unwrap_err());
|
||||
}
|
||||
}
|
||||
|
||||
// List document types
|
||||
let mut type_counts = std::collections::HashMap::new();
|
||||
for doc in &prefab.documents {
|
||||
*type_counts.entry(&doc.class_name).or_insert(0) += 1;
|
||||
}
|
||||
println!(" - Component types:");
|
||||
for (class_name, count) in type_counts.iter() {
|
||||
println!(" - {}: {}", class_name, count);
|
||||
}
|
||||
}
|
||||
Ok(_) => println!(" ✗ File was not parsed as prefab"),
|
||||
Err(e) => println!(" ✗ Parse error: {}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_vr_horror_project() {
|
||||
test_project(&TestProject::VR_HORROR);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore] // Ignore by default, run with --ignored to test
|
||||
fn test_pirate_panic_project() {
|
||||
test_project(&TestProject::PIRATE_PANIC);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_vr_horror_detailed() {
|
||||
let project_path = match clone_test_project(&TestProject::VR_HORROR) {
|
||||
Ok(path) => path,
|
||||
Err(e) => {
|
||||
eprintln!("Failed to clone project: {}", e);
|
||||
eprintln!("Skipping detailed test (git may not be available)");
|
||||
return;
|
||||
}
|
||||
};
|
||||
test_detailed_parsing(&project_path);
|
||||
}
|
||||
|
||||
/// Benchmark parsing performance
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn benchmark_parsing() {
|
||||
let project_path = match clone_test_project(&TestProject::VR_HORROR) {
|
||||
Ok(path) => path,
|
||||
Err(_) => {
|
||||
eprintln!("Skipping benchmark (git not available)");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
println!("\n{}", "=".repeat(60));
|
||||
println!("Parsing Performance Benchmark");
|
||||
println!("{}", "=".repeat(60));
|
||||
|
||||
let files = find_unity_files(&project_path);
|
||||
let total_size: u64 = files
|
||||
.iter()
|
||||
.filter_map(|f| std::fs::metadata(f).ok())
|
||||
.map(|m| m.len())
|
||||
.sum();
|
||||
|
||||
println!("Total files: {}", files.len());
|
||||
println!("Total size: {} KB", total_size / 1024);
|
||||
|
||||
let start = Instant::now();
|
||||
let stats = parse_project(&project_path);
|
||||
let elapsed = start.elapsed();
|
||||
|
||||
println!("\nParsing completed in {:?}", elapsed);
|
||||
println!(
|
||||
"Average time per file: {:.2} ms",
|
||||
elapsed.as_millis() as f64 / files.len() as f64
|
||||
);
|
||||
println!(
|
||||
"Throughput: {:.2} files/sec",
|
||||
files.len() as f64 / elapsed.as_secs_f64()
|
||||
);
|
||||
println!(
|
||||
"Throughput: {:.2} KB/sec",
|
||||
(total_size / 1024) as f64 / elapsed.as_secs_f64()
|
||||
);
|
||||
|
||||
stats.print_summary();
|
||||
}
|
||||
197
cursebreaker-parser/tests/macro_tests.rs
Normal file
197
cursebreaker-parser/tests/macro_tests.rs
Normal file
@@ -0,0 +1,197 @@
|
||||
//! Tests for the #[derive(UnityComponent)] macro
|
||||
|
||||
use cursebreaker_parser::{ComponentContext, FileID, UnityComponent};
|
||||
|
||||
/// Test component matching the PlaySFX script from VR_Horror_YouCantRun
|
||||
#[derive(Debug, Clone, UnityComponent)]
|
||||
#[unity_class("PlaySFX")]
|
||||
struct PlaySFX {
|
||||
#[unity_field("volume")]
|
||||
volume: f64,
|
||||
|
||||
#[unity_field("startTime")]
|
||||
start_time: f64,
|
||||
|
||||
#[unity_field("endTime")]
|
||||
end_time: f64,
|
||||
|
||||
#[unity_field("isLoop")]
|
||||
is_loop: bool,
|
||||
}
|
||||
|
||||
/// Test component with different field types
|
||||
#[derive(Debug, Clone, UnityComponent)]
|
||||
#[unity_class("TestComponent")]
|
||||
struct TestComponent {
|
||||
#[unity_field("floatValue")]
|
||||
float_value: f32,
|
||||
|
||||
#[unity_field("intValue")]
|
||||
int_value: i32,
|
||||
|
||||
#[unity_field("stringValue")]
|
||||
string_value: String,
|
||||
|
||||
#[unity_field("boolValue")]
|
||||
bool_value: bool,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_play_sfx_parsing() {
|
||||
let yaml_str = r#"
|
||||
volume: 0.75
|
||||
startTime: 1.5
|
||||
endTime: 3.0
|
||||
isLoop: 1
|
||||
"#;
|
||||
|
||||
let yaml: serde_yaml::Value = serde_yaml::from_str(yaml_str).unwrap();
|
||||
let mapping = yaml.as_mapping().unwrap();
|
||||
|
||||
let ctx = ComponentContext {
|
||||
type_id: 114,
|
||||
file_id: FileID::from_i64(12345),
|
||||
class_name: "PlaySFX",
|
||||
entity: None,
|
||||
linking_ctx: None,
|
||||
yaml: mapping,
|
||||
};
|
||||
|
||||
let result = PlaySFX::parse(mapping, &ctx);
|
||||
assert!(result.is_some(), "Failed to parse PlaySFX component");
|
||||
|
||||
let component = result.unwrap();
|
||||
assert_eq!(component.volume, 0.75);
|
||||
assert_eq!(component.start_time, 1.5);
|
||||
assert_eq!(component.end_time, 3.0);
|
||||
assert_eq!(component.is_loop, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_play_sfx_default_values() {
|
||||
// Test with missing fields (should use Default::default())
|
||||
let yaml_str = r#"
|
||||
volume: 0.5
|
||||
"#;
|
||||
|
||||
let yaml: serde_yaml::Value = serde_yaml::from_str(yaml_str).unwrap();
|
||||
let mapping = yaml.as_mapping().unwrap();
|
||||
|
||||
let ctx = ComponentContext {
|
||||
type_id: 114,
|
||||
file_id: FileID::from_i64(12345),
|
||||
class_name: "PlaySFX",
|
||||
entity: None,
|
||||
linking_ctx: None,
|
||||
yaml: mapping,
|
||||
};
|
||||
|
||||
let result = PlaySFX::parse(mapping, &ctx);
|
||||
assert!(result.is_some(), "Failed to parse PlaySFX component with defaults");
|
||||
|
||||
let component = result.unwrap();
|
||||
assert_eq!(component.volume, 0.5);
|
||||
assert_eq!(component.start_time, 0.0); // Default for f64
|
||||
assert_eq!(component.end_time, 0.0); // Default for f64
|
||||
assert_eq!(component.is_loop, false); // Default for bool
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_test_component_parsing() {
|
||||
let yaml_str = r#"
|
||||
floatValue: 3.14
|
||||
intValue: 42
|
||||
stringValue: "Hello, Unity!"
|
||||
boolValue: 1
|
||||
"#;
|
||||
|
||||
let yaml: serde_yaml::Value = serde_yaml::from_str(yaml_str).unwrap();
|
||||
let mapping = yaml.as_mapping().unwrap();
|
||||
|
||||
let ctx = ComponentContext {
|
||||
type_id: 114,
|
||||
file_id: FileID::from_i64(67890),
|
||||
class_name: "TestComponent",
|
||||
entity: None,
|
||||
linking_ctx: None,
|
||||
yaml: mapping,
|
||||
};
|
||||
|
||||
let result = TestComponent::parse(mapping, &ctx);
|
||||
assert!(result.is_some(), "Failed to parse TestComponent");
|
||||
|
||||
let component = result.unwrap();
|
||||
assert!((component.float_value - 3.14_f32).abs() < 0.001);
|
||||
assert_eq!(component.int_value, 42);
|
||||
assert_eq!(component.string_value, "Hello, Unity!");
|
||||
assert_eq!(component.bool_value, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_component_registration() {
|
||||
// Verify that components are registered in the inventory
|
||||
let mut found_play_sfx = false;
|
||||
let mut found_test_component = false;
|
||||
|
||||
for reg in inventory::iter::<cursebreaker_parser::ComponentRegistration> {
|
||||
if reg.class_name == "PlaySFX" {
|
||||
found_play_sfx = true;
|
||||
assert_eq!(reg.type_id, 114);
|
||||
}
|
||||
if reg.class_name == "TestComponent" {
|
||||
found_test_component = true;
|
||||
assert_eq!(reg.type_id, 114);
|
||||
}
|
||||
}
|
||||
|
||||
assert!(
|
||||
found_play_sfx,
|
||||
"PlaySFX component was not registered in inventory"
|
||||
);
|
||||
assert!(
|
||||
found_test_component,
|
||||
"TestComponent was not registered in inventory"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_component_registration_parser() {
|
||||
// Test that the registered parser function works
|
||||
let yaml_str = r#"
|
||||
volume: 0.8
|
||||
startTime: 2.0
|
||||
endTime: 4.0
|
||||
isLoop: 0
|
||||
"#;
|
||||
|
||||
let yaml: serde_yaml::Value = serde_yaml::from_str(yaml_str).unwrap();
|
||||
let mapping = yaml.as_mapping().unwrap();
|
||||
|
||||
let ctx = ComponentContext {
|
||||
type_id: 114,
|
||||
file_id: FileID::from_i64(11111),
|
||||
class_name: "PlaySFX",
|
||||
entity: None,
|
||||
linking_ctx: None,
|
||||
yaml: mapping,
|
||||
};
|
||||
|
||||
// Find the PlaySFX registration and call its parser
|
||||
for reg in inventory::iter::<cursebreaker_parser::ComponentRegistration> {
|
||||
if reg.class_name == "PlaySFX" {
|
||||
let result = (reg.parser)(mapping, &ctx);
|
||||
assert!(result.is_some(), "Registered parser failed to parse");
|
||||
|
||||
// Downcast to verify it's the right type
|
||||
let boxed = result.unwrap();
|
||||
assert!(
|
||||
boxed.downcast_ref::<PlaySFX>().is_some(),
|
||||
"Parsed component is not PlaySFX type"
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
panic!("PlaySFX registration not found");
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
use cursebreaker_parser::UnityFile;
|
||||
use std::path::Path;
|
||||
|
||||
fn main() {
|
||||
// Parse a Unity prefab file
|
||||
let prefab_path = Path::new("data/tests/unity-sampleproject/PiratePanic/Assets/PiratePanic/Prefabs/Menu/Battle/Hand/CardGrabber.prefab");
|
||||
|
||||
if !prefab_path.exists() {
|
||||
eprintln!("Error: Unity sample project not found.");
|
||||
eprintln!("Please ensure the git submodule is initialized:");
|
||||
eprintln!(" git submodule update --init --recursive");
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse the file
|
||||
match UnityFile::from_path(prefab_path) {
|
||||
Ok(file) => {
|
||||
println!("Successfully parsed: {:?}", file.path.file_name().unwrap());
|
||||
println!("Found {} documents\n", file.documents.len());
|
||||
|
||||
// List all documents
|
||||
for (i, doc) in file.documents.iter().enumerate() {
|
||||
println!("Document {}: {} (Type ID: {}, File ID: {})",
|
||||
i + 1,
|
||||
doc.class_name,
|
||||
doc.type_id,
|
||||
doc.file_id
|
||||
);
|
||||
}
|
||||
|
||||
println!();
|
||||
|
||||
// Find all GameObjects
|
||||
let game_objects = file.get_documents_by_class("GameObject");
|
||||
println!("Found {} GameObjects:", game_objects.len());
|
||||
for go in game_objects {
|
||||
if let Some(go_props) = go.get("GameObject") {
|
||||
if let Some(props) = go_props.as_object() {
|
||||
if let Some(name) = props.get("m_Name").and_then(|v| v.as_str()) {
|
||||
println!(" - {}", name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!();
|
||||
|
||||
// Find all Transforms
|
||||
let transforms = file.get_documents_by_type(224); // RectTransform type ID
|
||||
println!("Found {} RectTransforms", transforms.len());
|
||||
|
||||
// Look up a specific document by file ID
|
||||
if let Some(first_doc) = file.documents.first() {
|
||||
let file_id = first_doc.file_id;
|
||||
if let Some(found) = file.get_document(file_id) {
|
||||
println!("\nLooking up document by file ID {}:", file_id);
|
||||
println!(" Class: {}", found.class_name);
|
||||
println!(" Properties: {} keys", found.properties.len());
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error parsing file: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,162 +0,0 @@
|
||||
use cursebreaker_parser::UnityFile;
|
||||
use std::path::Path;
|
||||
|
||||
#[test]
|
||||
fn test_parse_cardgrabber_prefab() {
|
||||
let path = Path::new("data/tests/unity-sampleproject/PiratePanic/Assets/PiratePanic/Prefabs/Menu/Battle/Hand/CardGrabber.prefab");
|
||||
|
||||
// Skip if the file doesn't exist (CI/CD might not have submodules)
|
||||
if !path.exists() {
|
||||
eprintln!("Skipping test: file not found at {:?}", path);
|
||||
return;
|
||||
}
|
||||
|
||||
let file = UnityFile::from_path(path).expect("Failed to parse CardGrabber.prefab");
|
||||
|
||||
// Verify we parsed multiple documents
|
||||
assert!(file.documents.len() > 0, "Should have at least one document");
|
||||
|
||||
// Find the GameObject
|
||||
let game_objects = file.get_documents_by_class("GameObject");
|
||||
assert!(!game_objects.is_empty(), "Should have at least one GameObject");
|
||||
|
||||
let game_object = game_objects[0];
|
||||
assert_eq!(game_object.type_id, 1, "GameObject should have type ID 1");
|
||||
|
||||
// Verify the name property exists
|
||||
if let Some(go_props) = game_object.get("GameObject") {
|
||||
if let Some(props) = go_props.as_object() {
|
||||
let has_name = props.contains_key("m_Name");
|
||||
assert!(has_name, "GameObject should have m_Name property");
|
||||
}
|
||||
}
|
||||
|
||||
// Find RectTransform
|
||||
let transforms = file.get_documents_by_class("RectTransform");
|
||||
assert!(!transforms.is_empty(), "Should have at least one RectTransform");
|
||||
|
||||
let transform = transforms[0];
|
||||
assert_eq!(transform.type_id, 224, "RectTransform should have type ID 224");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_scene_file() {
|
||||
let path = Path::new("data/tests/unity-sampleproject/PiratePanic/Assets/PiratePanic/Scenes/Scene01MainMenu.unity");
|
||||
|
||||
// Skip if the file doesn't exist
|
||||
if !path.exists() {
|
||||
eprintln!("Skipping test: file not found at {:?}", path);
|
||||
return;
|
||||
}
|
||||
|
||||
let file = UnityFile::from_path(path).expect("Failed to parse Scene01MainMenu.unity");
|
||||
|
||||
// Scenes typically have many documents
|
||||
assert!(file.documents.len() > 10, "Scene should have many documents");
|
||||
|
||||
// Should have GameObjects
|
||||
let game_objects = file.get_documents_by_class("GameObject");
|
||||
assert!(!game_objects.is_empty(), "Scene should have GameObjects");
|
||||
|
||||
println!("Parsed {} documents from scene", file.documents.len());
|
||||
println!("Found {} GameObjects", game_objects.len());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_multiple_prefabs() {
|
||||
let prefab_paths = [
|
||||
"data/tests/unity-sampleproject/PiratePanic/Assets/PiratePanic/Prefabs/Menu/Battle/Hand/CostPanel.prefab",
|
||||
"data/tests/unity-sampleproject/PiratePanic/Assets/PiratePanic/Prefabs/Menu/Battle/Hand/GoldPanel.prefab",
|
||||
"data/tests/unity-sampleproject/PiratePanic/Assets/PiratePanic/Prefabs/Menu/Battle/Map/Node.prefab",
|
||||
];
|
||||
|
||||
let mut total_documents = 0;
|
||||
|
||||
for path_str in &prefab_paths {
|
||||
let path = Path::new(path_str);
|
||||
|
||||
if !path.exists() {
|
||||
eprintln!("Skipping test: file not found at {:?}", path);
|
||||
continue;
|
||||
}
|
||||
|
||||
match UnityFile::from_path(path) {
|
||||
Ok(file) => {
|
||||
assert!(file.documents.len() > 0, "File {:?} should have documents", path);
|
||||
total_documents += file.documents.len();
|
||||
println!("Parsed {:?}: {} documents", path.file_name().unwrap(), file.documents.len());
|
||||
}
|
||||
Err(e) => {
|
||||
panic!("Failed to parse {:?}: {}", path, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if total_documents > 0 {
|
||||
assert!(total_documents > 3, "Should have parsed multiple documents across files");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_file_id_lookup() {
|
||||
let path = Path::new("data/tests/unity-sampleproject/PiratePanic/Assets/PiratePanic/Prefabs/Menu/Battle/Hand/CardGrabber.prefab");
|
||||
|
||||
if !path.exists() {
|
||||
eprintln!("Skipping test: file not found at {:?}", path);
|
||||
return;
|
||||
}
|
||||
|
||||
let file = UnityFile::from_path(path).expect("Failed to parse file");
|
||||
|
||||
// Get the first document's file ID
|
||||
if let Some(first_doc) = file.documents.first() {
|
||||
let file_id = first_doc.file_id;
|
||||
|
||||
// Look it up
|
||||
let found = file.get_document(file_id);
|
||||
assert!(found.is_some(), "Should be able to find document by file ID");
|
||||
assert_eq!(found.unwrap().file_id, file_id, "Found document should have correct file ID");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_documents_by_type() {
|
||||
let path = Path::new("data/tests/unity-sampleproject/PiratePanic/Assets/PiratePanic/Prefabs/Menu/Battle/Hand/CardGrabber.prefab");
|
||||
|
||||
if !path.exists() {
|
||||
eprintln!("Skipping test: file not found at {:?}", path);
|
||||
return;
|
||||
}
|
||||
|
||||
let file = UnityFile::from_path(path).expect("Failed to parse file");
|
||||
|
||||
// Get all GameObjects (type ID 1)
|
||||
let game_objects = file.get_documents_by_type(1);
|
||||
assert!(!game_objects.is_empty(), "Should find GameObjects by type ID");
|
||||
|
||||
// Verify they're actually GameObjects
|
||||
for go in game_objects {
|
||||
assert_eq!(go.type_id, 1, "All returned documents should have type ID 1");
|
||||
assert!(go.is_game_object(), "Document should be identified as GameObject");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_error_handling_invalid_file() {
|
||||
let result = UnityFile::from_path("nonexistent_file.unity");
|
||||
assert!(result.is_err(), "Should return error for nonexistent file");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_error_handling_invalid_format() {
|
||||
// Create a temporary file with invalid content
|
||||
let temp_dir = std::env::temp_dir();
|
||||
let temp_file = temp_dir.join("invalid_unity_file.unity");
|
||||
std::fs::write(&temp_file, "This is not a Unity file").expect("Failed to write temp file");
|
||||
|
||||
let result = UnityFile::from_path(&temp_file);
|
||||
assert!(result.is_err(), "Should return error for invalid Unity file format");
|
||||
|
||||
// Clean up
|
||||
let _ = std::fs::remove_file(&temp_file);
|
||||
}
|
||||
Reference in New Issue
Block a user