1use std::{
2 cell::{RefCell, UnsafeCell},
3 sync::Arc,
4};
5
6use azalea_block::BlockState;
7use azalea_core::{
8 bitset::FixedBitSet,
9 position::{BlockPos, ChunkPos, ChunkSectionBlockPos, ChunkSectionPos},
10};
11use azalea_physics::collision::BlockWithShape;
12use azalea_world::Instance;
13use parking_lot::RwLock;
14
15use super::{mining::MiningCache, rel_block_pos::RelBlockPos};
16
17pub struct CachedWorld {
19 origin: BlockPos,
23
24 min_y: i32,
25 world_lock: Arc<RwLock<Instance>>,
26
27 cached_chunks: RefCell<Vec<(ChunkPos, Vec<azalea_world::palette::PalettedContainer>)>>,
30 last_chunk_cache_index: RefCell<Option<usize>>,
31
32 cached_blocks: UnsafeCell<CachedSections>,
33
34 cached_mining_costs: UnsafeCell<Box<[(RelBlockPos, f32)]>>,
35}
36
37#[derive(Default)]
38pub struct CachedSections {
39 pub last_index: usize,
40 pub second_last_index: usize,
41 pub sections: Vec<CachedSection>,
42}
43
44impl CachedSections {
45 #[inline]
46 pub fn get_mut(&mut self, pos: ChunkSectionPos) -> Option<&mut CachedSection> {
47 if let Some(last_item) = self.sections.get(self.last_index) {
48 if last_item.pos == pos {
49 return Some(&mut self.sections[self.last_index]);
50 } else if let Some(second_last_item) = self.sections.get(self.second_last_index) {
51 if second_last_item.pos == pos {
52 return Some(&mut self.sections[self.second_last_index]);
53 }
54 }
55 }
56
57 let index = self
58 .sections
59 .binary_search_by(|section| section.pos.cmp(&pos))
60 .ok();
61
62 if let Some(index) = index {
63 self.second_last_index = self.last_index;
64 self.last_index = index;
65 return Some(&mut self.sections[index]);
66 }
67 None
68 }
69
70 #[inline]
71 pub fn insert(&mut self, section: CachedSection) {
72 let index = self
75 .sections
76 .binary_search_by(|s| s.pos.cmp(§ion.pos))
77 .unwrap_or_else(|e| e);
78 self.sections.insert(index, section);
79 }
80}
81
82pub struct CachedSection {
83 pub pos: ChunkSectionPos,
84 pub passable_bitset: FixedBitSet<{ 4096_usize.div_ceil(8) }>,
85 pub solid_bitset: FixedBitSet<{ 4096_usize.div_ceil(8) }>,
86}
87
88impl CachedWorld {
89 pub fn new(world_lock: Arc<RwLock<Instance>>, origin: BlockPos) -> Self {
90 let min_y = world_lock.read().chunks.min_y;
91 Self {
92 origin,
93 min_y,
94 world_lock,
95 cached_chunks: Default::default(),
96 last_chunk_cache_index: Default::default(),
97 cached_blocks: Default::default(),
98 cached_mining_costs: UnsafeCell::new(
100 vec![(RelBlockPos::new(i16::MAX, i32::MAX, i16::MAX), 0.); 2usize.pow(20)]
101 .into_boxed_slice(),
102 ),
103 }
104 }
105
106 fn with_section<T>(
116 &self,
117 section_pos: ChunkSectionPos,
118 f: impl FnOnce(&azalea_world::palette::PalettedContainer) -> T,
119 ) -> Option<T> {
120 if section_pos.y * 16 < self.min_y {
121 return None;
123 }
124
125 let chunk_pos = ChunkPos::from(section_pos);
126 let section_index =
127 azalea_world::chunk_storage::section_index(section_pos.y * 16, self.min_y) as usize;
128
129 let mut cached_chunks = self.cached_chunks.borrow_mut();
130
131 if let Some(last_chunk_cache_index) = *self.last_chunk_cache_index.borrow() {
134 if cached_chunks[last_chunk_cache_index].0 == chunk_pos {
135 let sections = &cached_chunks[last_chunk_cache_index].1;
137 if section_index >= sections.len() {
138 return None;
140 };
141 let section: &azalea_world::palette::PalettedContainer = §ions[section_index];
142 return Some(f(section));
143 }
144 }
145
146 if let Some((chunk_index, sections)) =
148 cached_chunks
149 .iter()
150 .enumerate()
151 .find_map(|(i, (pos, sections))| {
152 if *pos == chunk_pos {
153 Some((i, sections))
154 } else {
155 None
156 }
157 })
158 {
159 if section_index >= sections.len() {
160 return None;
162 };
163 *self.last_chunk_cache_index.borrow_mut() = Some(chunk_index);
164 let section: &azalea_world::palette::PalettedContainer = §ions[section_index];
165 return Some(f(section));
166 }
167
168 let world = self.world_lock.read();
169 let chunk = world.chunks.get(&chunk_pos)?;
170 let chunk = chunk.read();
171
172 let sections: Vec<azalea_world::palette::PalettedContainer> = chunk
173 .sections
174 .iter()
175 .map(|section| section.states.clone())
176 .collect();
177
178 if section_index >= sections.len() {
179 return None;
181 };
182
183 let section = §ions[section_index];
184 let r = f(section);
185
186 cached_chunks.push((chunk_pos, sections));
188
189 Some(r)
190 }
191
192 fn calculate_bitsets_for_section(&self, section_pos: ChunkSectionPos) -> Option<CachedSection> {
193 self.with_section(section_pos, |section| {
194 let mut passable_bitset = FixedBitSet::<{ 4096_usize.div_ceil(8) }>::new();
195 let mut solid_bitset = FixedBitSet::<{ 4096_usize.div_ceil(8) }>::new();
196 for i in 0..4096 {
197 let block_state_id = section.get_at_index(i);
198 let block_state = BlockState::try_from(block_state_id).unwrap_or(BlockState::AIR);
199 if is_block_state_passable(block_state) {
200 passable_bitset.set(i);
201 }
202 if is_block_state_solid(block_state) {
203 solid_bitset.set(i);
204 }
205 }
206 CachedSection {
207 pos: section_pos,
208 passable_bitset,
209 solid_bitset,
210 }
211 })
212 }
213
214 pub fn is_block_passable(&self, pos: RelBlockPos) -> bool {
215 self.is_block_pos_passable(pos.apply(self.origin))
216 }
217
218 fn is_block_pos_passable(&self, pos: BlockPos) -> bool {
219 let (section_pos, section_block_pos) =
220 (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
221 let index = u16::from(section_block_pos) as usize;
222 let cached_blocks = unsafe { &mut *self.cached_blocks.get() };
224 if let Some(cached) = cached_blocks.get_mut(section_pos) {
225 return cached.passable_bitset.index(index);
226 }
227
228 let Some(cached) = self.calculate_bitsets_for_section(section_pos) else {
229 return false;
230 };
231 let passable = cached.passable_bitset.index(index);
232 cached_blocks.insert(cached);
233 passable
234 }
235
236 pub fn is_block_solid(&self, pos: RelBlockPos) -> bool {
237 self.is_block_pos_solid(pos.apply(self.origin))
238 }
239
240 fn is_block_pos_solid(&self, pos: BlockPos) -> bool {
241 let (section_pos, section_block_pos) =
242 (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
243 let index = u16::from(section_block_pos) as usize;
244 let cached_blocks = unsafe { &mut *self.cached_blocks.get() };
246 if let Some(cached) = cached_blocks.get_mut(section_pos) {
247 return cached.solid_bitset.index(index);
248 }
249
250 let Some(cached) = self.calculate_bitsets_for_section(section_pos) else {
251 return false;
252 };
253 let solid = cached.solid_bitset.index(index);
254 cached_blocks.insert(cached);
255 solid
256 }
257
258 pub fn cost_for_breaking_block(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
261 let cached_mining_costs = unsafe { &mut *self.cached_mining_costs.get() };
263 let hash_index = ((pos.x as usize & 0xff) << 12)
266 | ((pos.y as usize & 0xf) << 8)
267 | (pos.z as usize & 0xff);
268 debug_assert!(hash_index < 1048576);
269 let &(cached_pos, potential_cost) =
270 unsafe { cached_mining_costs.get_unchecked(hash_index) };
271 if cached_pos == pos {
272 return potential_cost;
273 }
274
275 let cost = self.uncached_cost_for_breaking_block(pos, mining_cache);
276 unsafe {
277 *cached_mining_costs.get_unchecked_mut(hash_index) = (pos, cost);
278 };
279
280 cost
281 }
282
283 fn uncached_cost_for_breaking_block(
284 &self,
285 pos: RelBlockPos,
286 mining_cache: &MiningCache,
287 ) -> f32 {
288 if self.is_block_passable(pos) {
289 return 0.;
291 }
292
293 let pos = pos.apply(self.origin);
294
295 let (section_pos, section_block_pos) =
296 (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
297
298 let up_is_in_same_section = section_block_pos.y != 15;
301 let north_is_in_same_section = section_block_pos.z != 0;
302 let east_is_in_same_section = section_block_pos.x != 15;
303 let south_is_in_same_section = section_block_pos.z != 15;
304 let west_is_in_same_section = section_block_pos.x != 0;
305
306 let Some(mining_cost) = self.with_section(section_pos, |section| {
307 let block_state =
308 BlockState::try_from(section.get_at_index(u16::from(section_block_pos) as usize))
309 .unwrap_or_default();
310 let mining_cost = mining_cache.cost_for(block_state);
311
312 if mining_cost == f32::INFINITY {
313 return f32::INFINITY;
315 }
316
317 if up_is_in_same_section {
319 let up_block = BlockState::try_from(
320 section.get_at_index(u16::from(section_block_pos.up(1)) as usize),
321 )
322 .unwrap_or_default();
323 if mining_cache.is_liquid(up_block) || mining_cache.is_falling_block(up_block) {
324 return f32::INFINITY;
325 }
326 }
327
328 if north_is_in_same_section {
330 let north_block = BlockState::try_from(
331 section.get_at_index(u16::from(section_block_pos.north(1)) as usize),
332 )
333 .unwrap_or_default();
334 if mining_cache.is_liquid(north_block) {
335 return f32::INFINITY;
336 }
337 }
338
339 if east_is_in_same_section {
341 let east_block = BlockState::try_from(
342 section.get_at_index(u16::from(section_block_pos.east(1)) as usize),
343 )
344 .unwrap_or_default();
345 if mining_cache.is_liquid(east_block) {
346 return f32::INFINITY;
347 }
348 }
349
350 if south_is_in_same_section {
352 let south_block = BlockState::try_from(
353 section.get_at_index(u16::from(section_block_pos.south(1)) as usize),
354 )
355 .unwrap_or_default();
356 if mining_cache.is_liquid(south_block) {
357 return f32::INFINITY;
358 }
359 }
360
361 if west_is_in_same_section {
363 let west_block = BlockState::try_from(
364 section.get_at_index(u16::from(section_block_pos.west(1)) as usize),
365 )
366 .unwrap_or_default();
367 if mining_cache.is_liquid(west_block) {
368 return f32::INFINITY;
369 }
370 }
371
372 mining_cost
375 }) else {
376 let cost = if self.is_block_pos_solid(pos) {
378 f32::INFINITY
380 } else {
381 0.
382 };
383 return cost;
384 };
385
386 if mining_cost == f32::INFINITY {
387 return f32::INFINITY;
389 }
390
391 let check_should_avoid_this_block = |pos: BlockPos, check: &dyn Fn(BlockState) -> bool| {
392 let block_state = self
393 .with_section(ChunkSectionPos::from(pos), |section| {
394 BlockState::try_from(
395 section.get_at_index(u16::from(ChunkSectionBlockPos::from(pos)) as usize),
396 )
397 .unwrap_or_default()
398 })
399 .unwrap_or_default();
400 check(block_state)
401 };
402
403 if !up_is_in_same_section
405 && check_should_avoid_this_block(pos.up(1), &|b| {
406 mining_cache.is_liquid(b) || mining_cache.is_falling_block(b)
407 })
408 {
409 return f32::INFINITY;
410 }
411 if !north_is_in_same_section
412 && check_should_avoid_this_block(pos.north(1), &|b| mining_cache.is_liquid(b))
413 {
414 return f32::INFINITY;
415 }
416 if !east_is_in_same_section
417 && check_should_avoid_this_block(pos.east(1), &|b| mining_cache.is_liquid(b))
418 {
419 return f32::INFINITY;
420 }
421 if !south_is_in_same_section
422 && check_should_avoid_this_block(pos.south(1), &|b| mining_cache.is_liquid(b))
423 {
424 return f32::INFINITY;
425 }
426 if !west_is_in_same_section
427 && check_should_avoid_this_block(pos.west(1), &|b| mining_cache.is_liquid(b))
428 {
429 return f32::INFINITY;
430 }
431
432 mining_cost
433 }
434
435 pub fn is_passable(&self, pos: RelBlockPos) -> bool {
437 self.is_passable_at_block_pos(pos.apply(self.origin))
438 }
439 fn is_passable_at_block_pos(&self, pos: BlockPos) -> bool {
440 self.is_block_pos_passable(pos) && self.is_block_pos_passable(pos.up(1))
441 }
442
443 pub fn cost_for_passing(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
444 self.cost_for_breaking_block(pos, mining_cache)
445 + self.cost_for_breaking_block(pos.up(1), mining_cache)
446 }
447
448 pub fn is_standable(&self, pos: RelBlockPos) -> bool {
451 self.is_standable_at_block_pos(pos.apply(self.origin))
452 }
453 fn is_standable_at_block_pos(&self, pos: BlockPos) -> bool {
454 self.is_block_pos_solid(pos.down(1)) && self.is_passable_at_block_pos(pos)
455 }
456
457 pub fn cost_for_standing(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
458 if !self.is_block_solid(pos.down(1)) {
459 return f32::INFINITY;
460 }
461 self.cost_for_passing(pos, mining_cache)
462 }
463
464 pub fn fall_distance(&self, pos: RelBlockPos) -> u32 {
466 let mut distance = 0;
467 let mut current_pos = pos.down(1);
468 while self.is_block_passable(current_pos) {
469 distance += 1;
470 current_pos = current_pos.down(1);
471
472 if current_pos.y < self.min_y {
473 return u32::MAX;
474 }
475 }
476 distance
477 }
478}
479
480pub fn is_block_state_passable(block: BlockState) -> bool {
482 if block.is_air() {
486 return true;
488 }
489 if !block.is_collision_shape_empty() {
490 return false;
491 }
492 let registry_block = azalea_registry::Block::from(block);
493 if registry_block == azalea_registry::Block::Water {
494 return false;
495 }
496 if block
497 .property::<azalea_block::properties::Waterlogged>()
498 .unwrap_or_default()
499 {
500 return false;
501 }
502 if registry_block == azalea_registry::Block::Lava {
503 return false;
504 }
505 if block == azalea_registry::Block::Seagrass.into() {
508 return false;
509 }
510
511 if registry_block == azalea_registry::Block::Fire
513 || registry_block == azalea_registry::Block::SoulFire
514 {
515 return false;
516 }
517
518 true
519}
520
521pub fn is_block_state_solid(block: BlockState) -> bool {
523 if block.is_air() {
524 return false;
526 }
527 block.is_collision_shape_full()
528}
529
530#[cfg(test)]
531mod tests {
532
533 use azalea_world::{Chunk, ChunkStorage, PartialInstance};
534
535 use super::*;
536
537 #[test]
538 fn test_is_passable() {
539 let mut partial_world = PartialInstance::default();
540 let mut world = ChunkStorage::default();
541
542 partial_world
543 .chunks
544 .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
545 partial_world.chunks.set_block_state(
546 &BlockPos::new(0, 0, 0),
547 azalea_registry::Block::Stone.into(),
548 &world,
549 );
550 partial_world
551 .chunks
552 .set_block_state(&BlockPos::new(0, 1, 0), BlockState::AIR, &world);
553
554 let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
555 assert!(!ctx.is_block_pos_passable(BlockPos::new(0, 0, 0)));
556 assert!(ctx.is_block_pos_passable(BlockPos::new(0, 1, 0),));
557 }
558
559 #[test]
560 fn test_is_solid() {
561 let mut partial_world = PartialInstance::default();
562 let mut world = ChunkStorage::default();
563 partial_world
564 .chunks
565 .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
566 partial_world.chunks.set_block_state(
567 &BlockPos::new(0, 0, 0),
568 azalea_registry::Block::Stone.into(),
569 &world,
570 );
571 partial_world
572 .chunks
573 .set_block_state(&BlockPos::new(0, 1, 0), BlockState::AIR, &world);
574
575 let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
576 assert!(ctx.is_block_pos_solid(BlockPos::new(0, 0, 0)));
577 assert!(!ctx.is_block_pos_solid(BlockPos::new(0, 1, 0)));
578 }
579
580 #[test]
581 fn test_is_standable() {
582 let mut partial_world = PartialInstance::default();
583 let mut world = ChunkStorage::default();
584 partial_world
585 .chunks
586 .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
587 partial_world.chunks.set_block_state(
588 &BlockPos::new(0, 0, 0),
589 azalea_registry::Block::Stone.into(),
590 &world,
591 );
592 partial_world
593 .chunks
594 .set_block_state(&BlockPos::new(0, 1, 0), BlockState::AIR, &world);
595 partial_world
596 .chunks
597 .set_block_state(&BlockPos::new(0, 2, 0), BlockState::AIR, &world);
598 partial_world
599 .chunks
600 .set_block_state(&BlockPos::new(0, 3, 0), BlockState::AIR, &world);
601
602 let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
603 assert!(ctx.is_standable_at_block_pos(BlockPos::new(0, 1, 0)));
604 assert!(!ctx.is_standable_at_block_pos(BlockPos::new(0, 0, 0)));
605 assert!(!ctx.is_standable_at_block_pos(BlockPos::new(0, 2, 0)));
606 }
607}