Skip to content

Commit

Permalink
Merge pull request #322 from AnthonyTornetta/304-serializedeserialize…
Browse files Browse the repository at this point in the history
…-block-data

Fixed planet chunks not saving + block data now saves too
  • Loading branch information
AnthonyTornetta authored Aug 26, 2024
2 parents 0ab13e5 + 5e34928 commit eb5f6e7
Show file tree
Hide file tree
Showing 4 changed files with 110 additions and 103 deletions.
37 changes: 20 additions & 17 deletions cosmos_core/src/structure/loading/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,13 +56,33 @@ fn set_structure_done_loading(mut structure_query: Query<&mut Structure>, mut ev
}
}

#[derive(Debug, Hash, PartialEq, Eq, Clone, SystemSet)]
/// Systems responsible for the creation & population of a structure
pub enum StructureLoadingSet {
/// Initially sets up the structure being loaded, such as creating the `Structure` component
LoadStructure,
/// Adds structure components that need to be present
AddStructureComponents,
/// Creates all entnties the chunks would have
CreateChunkEntities,
/// Loads the chunk from disk, and creates their serialized data.
LoadChunkBlocks,
/// Sets up the `BlockData` components used by block data
InitializeChunkBlockData,
/// Loads any chunk's block data
LoadChunkData,
/// Run once the structure is finished loaded. Used to notify other systems a chunk is ready to be processed
StructureLoaded,
}

pub(super) fn register(app: &mut App) {
app.configure_sets(
Update,
(
StructureLoadingSet::LoadStructure,
StructureLoadingSet::AddStructureComponents,
StructureLoadingSet::CreateChunkEntities,
StructureLoadingSet::LoadChunkBlocks,
StructureLoadingSet::InitializeChunkBlockData,
StructureLoadingSet::LoadChunkData,
StructureLoadingSet::StructureLoaded,
Expand All @@ -82,20 +102,3 @@ pub(super) fn register(app: &mut App) {
)
.register_type::<ChunksNeedLoaded>();
}

#[derive(Debug, Hash, PartialEq, Eq, Clone, SystemSet)]
/// Systems responsible for the creation & population of a structure
pub enum StructureLoadingSet {
/// Initially sets up the structure being loaded, such as creating the `Structure` component
LoadStructure,
/// Adds structure components that need to be present
AddStructureComponents,
/// Creates all entnties the chunks would have
CreateChunkEntities,
/// Sets up the `BlockData` components used by block data
InitializeChunkBlockData,
/// Loads any chunk's block data
LoadChunkData,
/// Run once the structure is finished loaded. Used to notify other systems a chunk is ready to be processed
StructureLoaded,
}
52 changes: 30 additions & 22 deletions cosmos_server/src/persistence/saving.rs
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ fn create_entity_ids(mut commands: Commands, q_without_id: Query<(Entity, &Seria

/// Make sure any systems that serialize data for saving are run before this
fn done_saving(
query: Query<
q_needs_saved: Query<
(
Entity,
Option<&Name>,
Expand Down Expand Up @@ -166,7 +166,7 @@ fn done_saving(
}
}

for (entity, name, sd, entity_id, loading_distance, save_file_identifier) in query.iter() {
for (entity, name, sd, entity_id, loading_distance, mut save_file_identifier) in q_needs_saved.iter() {
commands.entity(entity).remove::<NeedsSaved>().remove::<SerializedData>();

if !sd.should_save() {
Expand All @@ -179,40 +179,48 @@ fn done_saving(
} else {
warn!("Missing load distance for {entity:?}");
}
}

if let Some(save_file_identifier) = save_file_identifier {
let path = save_file_identifier.get_save_file_path();
if fs::exists(&path).unwrap_or(false) {
if fs::remove_file(&path).is_err() {
warn!("Error deleting old save file at {path}!");
}
commands.entity(entity).log_components();
}

if let SaveFileIdentifierType::Base(entity_id, Some(sector), load_distance) = &save_file_identifier.identifier_type {
sectors_cache.remove(entity_id, *sector, *load_distance);
}
// Required to be in the outer scope so the reference is still valid
let sfi: Option<SaveFileIdentifier>;
if save_file_identifier.is_none() {
sfi = calculate_sfi(entity, &q_parent, &q_entity_id, &q_serialized_data);
if let Some(sfi) = sfi.clone() {
commands.entity(entity).insert(sfi);
}
save_file_identifier = sfi.as_ref();
}

let serialized: Vec<u8> = cosmos_encoder::serialize(&sd);

let Some(save_identifier) = calculate_sfi(entity, &q_parent, &q_entity_id, &q_serialized_data) else {
error!("Could not calculate save file identifier for {entity:?}");
let Some(save_file_identifier) = save_file_identifier else {
error!("Could not calculate save file identifier for {entity:?} - loggin components");
commands.entity(entity).log_components();
continue;
};

if let Err(e) = write_file(&save_identifier, &serialized) {
warn!("{e}");
continue;
let path = save_file_identifier.get_save_file_path();
if fs::exists(&path).unwrap_or(false) {
if fs::remove_file(&path).is_err() {
warn!("Error deleting old save file at {path}!");
}

if let SaveFileIdentifierType::Base(entity_id, Some(sector), load_distance) = &save_file_identifier.identifier_type {
sectors_cache.remove(entity_id, *sector, *load_distance);
}
}

if matches!(&save_identifier.identifier_type, SaveFileIdentifierType::Base(_, _, _)) {
let serialized: Vec<u8> = cosmos_encoder::serialize(&sd);

if let Err(e) = write_file(save_file_identifier, &serialized) {
error!("Unable to save {entity:?}\n{e}");
}

if matches!(&save_file_identifier.identifier_type, SaveFileIdentifierType::Base(_, _, _)) {
if let Some(loc) = sd.location {
sectors_cache.insert(loc.sector(), entity_id.clone(), loading_distance.map(|ld| ld.load_distance()));
}
}

commands.entity(entity).insert(save_identifier);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,7 @@ fn unload_chunks_far_from_players(
mut event_writer: EventWriter<ChunkUnloadEvent>,
mut commands: Commands,
) {
let mut potential_chunks = HashMap::<Entity, HashSet<ChunkCoordinate>>::new();
let mut chunks_to_unload = HashMap::<Entity, HashSet<ChunkCoordinate>>::new();
for (_, planet, entity, _) in planets.iter() {
let mut set = HashSet::new();

Expand All @@ -344,7 +344,7 @@ fn unload_chunks_far_from_players(
}
}

potential_chunks.insert(entity, set);
chunks_to_unload.insert(entity, set);
}

for player in players.iter() {
Expand Down Expand Up @@ -376,7 +376,7 @@ fn unload_chunks_far_from_players(
true,
);

let set = potential_chunks.get_mut(&entity).expect("This was just added");
let set = chunks_to_unload.get_mut(&entity).expect("This was just added");

for res in iterator {
let chunk_position = match res {
Expand All @@ -389,7 +389,7 @@ fn unload_chunks_far_from_players(
}
}

for (planet, chunk_coords) in potential_chunks {
for (planet, chunk_coords) in chunks_to_unload {
if let Ok((location, mut structure, _, entity_id)) = planets.get_mut(planet) {
let mut needs_id = false;

Expand Down
116 changes: 56 additions & 60 deletions cosmos_server/src/structure/planet/persistence.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use bevy::prelude::*;
use bevy_rapier3d::plugin::RapierContextEntityLink;
use cosmos_core::{
block::data::persistence::ChunkLoadBlockDataEvent,
netty::{cosmos_encoder, system_sets::NetworkingSystemsSet, NoSendEntity},
netty::{cosmos_encoder, NoSendEntity},
physics::location::Location,
structure::{
chunk::{netty::SerializedChunkBlockData, Chunk, ChunkEntity},
Expand All @@ -20,7 +20,7 @@ use cosmos_core::{
use serde::{Deserialize, Serialize};

use crate::persistence::{
loading::{LoadingSystemSet, NeedsLoaded, LOADING_SCHEDULE},
loading::{LoadingSystemSet, NeedsLoaded},
saving::{NeedsSaved, SavingSystemSet, SAVING_SCHEDULE},
EntityId, SaveFileIdentifier, SerializedData,
};
Expand Down Expand Up @@ -86,7 +86,7 @@ pub(super) struct ChunkNeedsPopulated {
pub structure_entity: Entity,
}

fn structure_created(created: Query<Entity, (Added<Structure>, Without<EntityId>)>, mut commands: Commands) {
fn add_entity_id_to_new_structures(created: Query<Entity, (Added<Structure>, Without<EntityId>)>, mut commands: Commands) {
for ent in created.iter() {
commands.entity(ent).insert(EntityId::generate());
}
Expand Down Expand Up @@ -123,7 +123,7 @@ fn populate_chunks(

let serialized_data = cosmos_encoder::deserialize::<SerializedData>(&chunk).unwrap_or_else(|_| {
panic!(
"Error parsing chunk @ {cx} {cy} {cz} - is the file corrupted? File len: {}",
"Error parsing chunk @ {cx} {cy} {cz} ({svi:?}) - is the file corrupted? File len: {}",
chunk.len()
)
});
Expand Down Expand Up @@ -158,67 +158,63 @@ fn populate_chunks(
}

fn load_chunk(
query: Query<(Entity, &SerializedData, &ChunkEntity), With<NeedsLoaded>>,
mut structure_query: Query<&mut Structure>,
mut chunk_init_event: EventWriter<ChunkInitEvent>,
q_chunk_needs_loaded: Query<(Entity, &SerializedData, &ChunkEntity), With<NeedsLoaded>>,
mut q_structure: Query<&mut Structure>,
mut evw_chunk_init: EventWriter<ChunkInitEvent>,
mut evw_chunk_load_block_data: EventWriter<ChunkLoadBlockDataEvent>,
mut commands: Commands,
mut chunk_load_block_data_event_writer: EventWriter<ChunkLoadBlockDataEvent>,
) {
for (entity, sd, ce) in query.iter() {
if let Some(chunk) = sd.deserialize_data::<Chunk>("cosmos:chunk") {
if let Ok(mut structure) = structure_query.get_mut(ce.structure_entity) {
let coords = chunk.chunk_coordinates();

commands
.entity(entity)
.insert(TransformBundle::from_transform(Transform::from_translation(
structure.chunk_relative_position(coords),
)));

structure.set_chunk_entity(coords, entity);

structure.set_chunk(chunk);

chunk_init_event.send(ChunkInitEvent {
structure_entity: ce.structure_entity,
coords,
serialized_block_data: None,
});

// Block data is stored per-chunk as `SerializedChunkBlockData` on dynamic structures,
// instead of fixed structures storing it as `AllBlockData` on the structure itself.
if let Some(data) = sd.deserialize_data::<SerializedChunkBlockData>("cosmos:block_data") {
chunk_load_block_data_event_writer.send(ChunkLoadBlockDataEvent {
data,
chunk: coords,
structure_entity: ce.structure_entity,
});
}
}
for (entity, sd, ce) in q_chunk_needs_loaded.iter() {
let Some(chunk) = sd.deserialize_data::<Chunk>("cosmos:chunk") else {
continue;
};

let Ok(mut structure) = q_structure.get_mut(ce.structure_entity) else {
continue;
};

let coords = chunk.chunk_coordinates();

commands
.entity(entity)
.insert(TransformBundle::from_transform(Transform::from_translation(
structure.chunk_relative_position(coords),
)));

structure.set_chunk_entity(coords, entity);

structure.set_chunk(chunk);

evw_chunk_init.send(ChunkInitEvent {
structure_entity: ce.structure_entity,
coords,
serialized_block_data: None,
});

// Block data is stored per-chunk as `SerializedChunkBlockData` on dynamic structures,
// instead of fixed structures storing it as `AllBlockData` on the structure itself.
if let Some(data) = sd.deserialize_data::<SerializedChunkBlockData>("cosmos:block_data") {
evw_chunk_load_block_data.send(ChunkLoadBlockDataEvent {
data,
chunk: coords,
structure_entity: ce.structure_entity,
});
}
}
}

pub(super) fn register(app: &mut App) {
app.add_systems(
Update,
(
structure_created.in_set(StructureLoadingSet::CreateChunkEntities),
populate_chunks.in_set(StructureLoadingSet::LoadChunkData),
)
.in_set(NetworkingSystemsSet::Between)
.chain(),
)
.add_systems(SAVING_SCHEDULE, on_save_structure.in_set(SavingSystemSet::DoSaving))
.add_systems(
LOADING_SCHEDULE,
(
on_load_structure,
// This will not interfere with the generation of chunks, so their relative ordering does not matter.
load_chunk.ambiguous_with(BiosphereGenerationSet::GenerateChunkFeatures),
)
.chain()
.in_set(LoadingSystemSet::DoLoading)
.in_set(StructureTypeSet::Planet),
);
app.add_systems(SAVING_SCHEDULE, on_save_structure.in_set(SavingSystemSet::DoSaving))
.add_systems(
Update,
(
add_entity_id_to_new_structures.in_set(StructureLoadingSet::CreateChunkEntities),
populate_chunks.in_set(StructureLoadingSet::CreateChunkEntities).before(load_chunk),
load_chunk
.in_set(StructureLoadingSet::LoadChunkBlocks)
.ambiguous_with(BiosphereGenerationSet::GenerateChunkFeatures),
on_load_structure.in_set(LoadingSystemSet::DoLoading),
)
.in_set(StructureTypeSet::Planet),
);
}

0 comments on commit eb5f6e7

Please sign in to comment.