1use core::f32;
2use std::{
3 array,
4 cell::{RefCell, UnsafeCell},
5 fmt::Debug,
6 mem,
7 sync::Arc,
8};
9
10use azalea_block::{
11 BlockState,
12 properties::{self, SlabKind, StairShape},
13};
14use azalea_core::{
15 bitset::FastFixedBitSet,
16 position::{BlockPos, ChunkPos, ChunkSectionBlockPos},
17};
18use azalea_physics::collision::BlockWithShape;
19use azalea_registry::builtin::BlockKind;
20use azalea_world::{World, palette::PalettedContainer};
21use parking_lot::RwLock;
22use rustc_hash::FxHashMap;
23
24use super::{mining::MiningCache, positions::RelBlockPos};
25use crate::pathfinder::positions::SmallChunkSectionPos;
26
27const MAX_VIEW_DISTANCE: usize = 32;
28
29pub struct CachedWorld {
31 origin: BlockPos,
36
37 min_y: i32,
38 world_lock: Arc<RwLock<World>>,
39
40 bounded_chunk_cache: RefCell<[(ChunkPos, CachedChunk); MAX_VIEW_DISTANCE * MAX_VIEW_DISTANCE]>,
42 unbounded_chunk_cache: RefCell<FxHashMap<ChunkPos, CachedChunk>>,
43
44 cached_blocks: UnsafeCell<CachedSections>,
45
46 #[allow(clippy::type_complexity)]
47 cached_mining_costs: UnsafeCell<Option<Box<[(RelBlockPos, f32)]>>>,
48}
49
50type CachedChunk = Box<[PalettedContainer<BlockState>]>;
53
54pub struct CachedSections {
55 pub fast_sections: Box<[Option<CachedSection>; FAST_SECTIONS_CACHE_SIZE]>,
56 pub fallback_sections: Vec<CachedSection>,
57}
58
59const FAST_SECTIONS_CACHE_SIZE: usize = 16 * 16 * 16;
60fn fast_section_idx(pos: SmallChunkSectionPos) -> usize {
61 (pos.y as usize % 16) + (pos.x as usize % 16) * 16 + (pos.z as usize % 16) * 16 * 16
62}
63
64impl CachedSections {
65 pub fn get_mut(&mut self, pos: SmallChunkSectionPos) -> Option<&mut CachedSection> {
66 let idx = fast_section_idx(pos);
67
68 if let Some(fast_item) = &mut self.fast_sections[idx]
69 && fast_item.pos == pos
70 {
71 return Some(fast_item);
72 }
73
74 if let Some(item) = self.fallback_sections.iter_mut().find(|s| s.pos == pos) {
75 return Some(item);
76 }
77
78 None
79 }
80
81 #[inline]
82 pub fn insert(&mut self, section: CachedSection) {
83 let idx = fast_section_idx(section.pos);
84
85 if let item @ None = &mut self.fast_sections[idx] {
86 *item = Some(section);
87 return;
88 }
89
90 let index = self
93 .fallback_sections
94 .binary_search_by(|s| s.pos.cmp(§ion.pos))
95 .unwrap_or_else(|e| e);
96 self.fallback_sections.insert(index, section);
97 }
98}
99impl Default for CachedSections {
100 fn default() -> Self {
101 Self {
102 fast_sections: (0..FAST_SECTIONS_CACHE_SIZE)
103 .map(|_| None)
104 .collect::<Box<[_]>>()
105 .try_into()
106 .unwrap(),
107 fallback_sections: Default::default(),
108 }
109 }
110}
111
112pub struct CachedSection {
113 pub pos: SmallChunkSectionPos,
114 pub bitsets: Box<SectionBitsets>,
115}
116impl Debug for CachedSection {
117 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
118 f.debug_struct("CachedSection")
119 .field("pos", &self.pos)
120 .finish()
121 }
122}
123
124#[derive(Default)]
125pub struct SectionBitsets {
126 pub passable: FastFixedBitSet<4096>,
128 pub solid: FastFixedBitSet<4096>,
130 pub standable: FastFixedBitSet<4096>,
132 pub water: FastFixedBitSet<4096>,
134}
135
136impl CachedWorld {
137 pub fn new(world_lock: Arc<RwLock<World>>, origin: BlockPos) -> Self {
138 let min_y = world_lock.read().chunks.min_y();
139 Self {
140 origin,
141 min_y,
142 world_lock,
143 bounded_chunk_cache: RefCell::new(array::from_fn(|_| {
144 (ChunkPos::new(i32::MAX, i32::MAX), Default::default())
145 })),
146 unbounded_chunk_cache: Default::default(),
147 cached_blocks: Default::default(),
148 cached_mining_costs: UnsafeCell::new(None),
149 }
150 }
151
152 fn with_section<T>(
162 &self,
163 section_pos: SmallChunkSectionPos,
164 f: impl FnOnce(&azalea_world::palette::PalettedContainer<BlockState>) -> T,
165 ) -> Option<T> {
166 if section_pos.y * 16 < self.min_y {
167 return None;
169 }
170
171 let chunk_pos = ChunkPos::new(section_pos.x as i32, section_pos.z as i32);
172 let section_index =
173 azalea_world::chunk::section_index(section_pos.y * 16, self.min_y) as usize;
174
175 let mut cache_idx = 0;
176
177 let mut unbounded_chunk_cache = self.unbounded_chunk_cache.borrow_mut();
178 let mut bounded_chunk_cache = self.bounded_chunk_cache.borrow_mut();
179 if unbounded_chunk_cache.is_empty() {
180 const D: i32 = MAX_VIEW_DISTANCE as i32;
181 let cache_x = i32::rem_euclid(chunk_pos.x, D) * D;
182 let cache_z = i32::rem_euclid(chunk_pos.z, D);
183 cache_idx = (cache_x + cache_z) as usize;
184
185 if !bounded_chunk_cache[cache_idx].1.is_empty() {
187 if bounded_chunk_cache[cache_idx].0 != chunk_pos {
188 for (moving_chunk_pos, moving_chunk) in bounded_chunk_cache.iter_mut() {
191 if !moving_chunk.is_empty() {
192 unbounded_chunk_cache
193 .insert(*moving_chunk_pos, mem::take(moving_chunk));
194 }
195 }
196 }
197
198 let sections = &bounded_chunk_cache[cache_idx].1;
199 if section_index >= sections.len() {
200 return None;
202 };
203 let section = §ions[section_index];
204 return Some(f(section));
205 }
206 } else if let Some(sections) = unbounded_chunk_cache.get(&chunk_pos) {
207 if section_index >= sections.len() {
208 return None;
210 };
211 let section = §ions[section_index];
212 return Some(f(section));
213 }
214
215 let world = self.world_lock.read();
216 let chunk = world.chunks.get(&chunk_pos)?;
217 let chunk = chunk.read();
218
219 let sections = chunk
220 .sections
221 .iter()
222 .map(|section| section.states.clone())
223 .collect::<Box<[PalettedContainer<BlockState>]>>();
224
225 if section_index >= sections.len() {
226 return None;
228 };
229
230 let section = §ions[section_index];
231 let r = f(section);
232
233 if unbounded_chunk_cache.is_empty() {
235 bounded_chunk_cache[cache_idx] = (chunk_pos, sections);
236 } else {
237 unbounded_chunk_cache.insert(chunk_pos, sections);
238 }
239
240 Some(r)
241 }
242
243 fn calculate_bitsets_for_section(&self, section_pos: SmallChunkSectionPos) -> CachedSection {
244 let bitsets = self
245 .with_section(section_pos, |section| {
246 let mut bitsets = SectionBitsets {
247 passable: FastFixedBitSet::<4096>::new(),
248 solid: FastFixedBitSet::<4096>::new(),
249 standable: FastFixedBitSet::<4096>::new(),
250 water: FastFixedBitSet::<4096>::new(),
251 };
252
253 for i in 0..4096 {
254 let block_state = section.get_at_index(i);
255 if is_block_state_passable(block_state) {
256 bitsets.passable.set(i);
257 }
258 if is_block_state_solid(block_state) {
259 bitsets.solid.set(i);
260 }
261 if is_block_state_standable(block_state) {
262 bitsets.standable.set(i);
263 }
264 if is_block_state_water(block_state) {
265 bitsets.water.set(i);
266 }
267 }
268 Box::new(bitsets)
269 })
270 .unwrap_or_default();
271
272 CachedSection {
273 pos: section_pos,
274 bitsets,
275 }
276 }
277
278 fn check_bitset_for_block(
279 &self,
280 pos: BlockPos,
281 cb: impl FnOnce(&SectionBitsets, usize) -> bool,
282 ) -> bool {
283 let (section_pos, section_block_pos) = (
284 SmallChunkSectionPos::from(pos),
285 ChunkSectionBlockPos::from(pos),
286 );
287 let index = u16::from(section_block_pos) as usize;
288 let cached_blocks = unsafe { &mut *self.cached_blocks.get() };
290 if let Some(cached) = cached_blocks.get_mut(section_pos) {
291 return cb(&cached.bitsets, index);
292 }
293
294 let cached = self.calculate_bitsets_for_section(section_pos);
295 let passable = cb(&cached.bitsets, index);
296 cached_blocks.insert(cached);
297 passable
298 }
299
300 pub fn is_block_passable(&self, pos: RelBlockPos) -> bool {
301 self.is_block_pos_passable(pos.apply(self.origin))
302 }
303 fn is_block_pos_passable(&self, pos: BlockPos) -> bool {
304 self.check_bitset_for_block(pos, |bitsets, index| bitsets.passable.index(index))
305 }
306
307 pub fn is_block_water(&self, pos: RelBlockPos) -> bool {
308 self.is_block_pos_water(pos.apply(self.origin))
309 }
310 fn is_block_pos_water(&self, pos: BlockPos) -> bool {
311 self.check_bitset_for_block(pos, |bitsets, index| bitsets.water.index(index))
312 }
313
314 pub fn get_block_state(&self, pos: RelBlockPos) -> BlockState {
318 self.get_block_state_at_pos(pos.apply(self.origin))
319 }
320
321 fn get_block_state_at_pos(&self, pos: BlockPos) -> BlockState {
322 let (section_pos, section_block_pos) = (
323 SmallChunkSectionPos::from(pos),
324 ChunkSectionBlockPos::from(pos),
325 );
326 let index = u16::from(section_block_pos) as usize;
327
328 self.with_section(section_pos, |section| section.get_at_index(index))
329 .unwrap_or_default()
330 }
331
332 pub fn is_block_solid(&self, pos: RelBlockPos) -> bool {
333 self.is_block_pos_solid(pos.apply(self.origin))
334 }
335 pub fn is_block_standable(&self, pos: RelBlockPos) -> bool {
336 self.is_block_pos_standable(pos.apply(self.origin))
337 }
338
339 fn is_block_pos_solid(&self, pos: BlockPos) -> bool {
340 self.check_bitset_for_block(pos, |bitsets, index| bitsets.solid.index(index))
341 }
342 fn is_block_pos_standable(&self, pos: BlockPos) -> bool {
343 self.check_bitset_for_block(pos, |bitsets, index| bitsets.standable.index(index))
344 }
345
346 pub fn cost_for_breaking_block(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
350 let cached_mining_costs = self.cached_mining_costs();
351
352 let hash_index = calculate_cached_mining_costs_index(pos);
353 let &(cached_pos, potential_cost) =
354 unsafe { cached_mining_costs.get_unchecked(hash_index) };
355 if cached_pos == pos {
356 return potential_cost;
357 }
358
359 let cost = self.uncached_cost_for_breaking_block(pos, mining_cache);
360 unsafe {
361 *cached_mining_costs.get_unchecked_mut(hash_index) = (pos, cost);
362 };
363
364 cost
365 }
366
367 #[allow(clippy::mut_from_ref)]
369 fn cached_mining_costs(&self) -> &mut [(RelBlockPos, f32)] {
370 let cached_mining_costs = unsafe { &mut *self.cached_mining_costs.get() };
372 if let Some(cached_mining_costs) = cached_mining_costs {
373 return cached_mining_costs;
374 }
375 *cached_mining_costs = Some(
379 vec![(RelBlockPos::new(i16::MAX, i32::MAX, i16::MAX), 0.); CACHED_MINING_COSTS_SIZE]
380 .into(),
381 );
382
383 cached_mining_costs.as_mut().unwrap()
384 }
385
386 fn uncached_cost_for_breaking_block(
387 &self,
388 pos: RelBlockPos,
389 mining_cache: &MiningCache,
390 ) -> f32 {
391 if self.is_block_passable(pos) {
392 return 0.;
394 }
395
396 let rel_pos = pos;
397 let pos = pos.apply(self.origin);
398
399 let (section_pos, section_block_pos) = (
400 SmallChunkSectionPos::from(pos),
401 ChunkSectionBlockPos::from(pos),
402 );
403
404 let up_is_in_same_section = section_block_pos.y != 15;
407 let north_is_in_same_section = section_block_pos.z != 0;
408 let east_is_in_same_section = section_block_pos.x != 15;
409 let south_is_in_same_section = section_block_pos.z != 15;
410 let west_is_in_same_section = section_block_pos.x != 0;
411
412 let mut is_falling_block_above = false;
413
414 let Some(mut mining_cost) = self.with_section(section_pos, |section| {
415 let block_state = section.get_at_index(u16::from(section_block_pos) as usize);
416 let mining_cost = mining_cache.cost_for(block_state);
417
418 if mining_cost == f32::INFINITY {
419 return f32::INFINITY;
421 }
422
423 if up_is_in_same_section {
425 let up_block = section.get_at_index(u16::from(section_block_pos.up(1)) as usize);
426 if mining_cache.is_liquid(up_block) {
427 return f32::INFINITY;
428 }
429 if mining_cache.is_falling_block(up_block) {
430 is_falling_block_above = true;
431 }
432 }
433
434 if north_is_in_same_section {
436 let north_block =
437 section.get_at_index(u16::from(section_block_pos.north(1)) as usize);
438 if mining_cache.is_liquid(north_block) {
439 return f32::INFINITY;
440 }
441 }
442
443 if east_is_in_same_section {
445 let east_block =
446 section.get_at_index(u16::from(section_block_pos.east(1)) as usize);
447 if mining_cache.is_liquid(east_block) {
448 return f32::INFINITY;
449 }
450 }
451
452 if south_is_in_same_section {
454 let south_block =
455 section.get_at_index(u16::from(section_block_pos.south(1)) as usize);
456 if mining_cache.is_liquid(south_block) {
457 return f32::INFINITY;
458 }
459 }
460
461 if west_is_in_same_section {
463 let west_block =
464 section.get_at_index(u16::from(section_block_pos.west(1)) as usize);
465 if mining_cache.is_liquid(west_block) {
466 return f32::INFINITY;
467 }
468 }
469
470 mining_cost
473 }) else {
474 let cost = if self.is_block_pos_solid(pos) {
476 f32::INFINITY
478 } else {
479 0.
480 };
481 return cost;
482 };
483
484 if mining_cost == f32::INFINITY {
485 return f32::INFINITY;
487 }
488
489 fn check_should_avoid_this_block(
490 world: &CachedWorld,
491 pos: BlockPos,
492 check: impl FnOnce(BlockState) -> bool,
493 ) -> bool {
494 let block_state = world
495 .with_section(SmallChunkSectionPos::from(pos), |section| {
496 section.get_at_index(u16::from(ChunkSectionBlockPos::from(pos)) as usize)
497 })
498 .unwrap_or_default();
499 check(block_state)
500 }
501
502 if !up_is_in_same_section
504 && check_should_avoid_this_block(self, pos.up(1), |b| {
505 if mining_cache.is_falling_block(b) {
506 is_falling_block_above = true;
507 }
508 mining_cache.is_liquid(b)
509 })
510 {
511 return f32::INFINITY;
512 }
513 if !north_is_in_same_section
514 && check_should_avoid_this_block(self, pos.north(1), |b| mining_cache.is_liquid(b))
515 {
516 return f32::INFINITY;
517 }
518 if !east_is_in_same_section
519 && check_should_avoid_this_block(self, pos.east(1), |b| mining_cache.is_liquid(b))
520 {
521 return f32::INFINITY;
522 }
523 if !south_is_in_same_section
524 && check_should_avoid_this_block(self, pos.south(1), |b| mining_cache.is_liquid(b))
525 {
526 return f32::INFINITY;
527 }
528 if !west_is_in_same_section
529 && check_should_avoid_this_block(self, pos.west(1), |b| mining_cache.is_liquid(b))
530 {
531 return f32::INFINITY;
532 }
533
534 if is_falling_block_above {
535 mining_cost += self.cost_for_breaking_block(rel_pos.up(1), mining_cache);
536 }
537
538 mining_cost
539 }
540
541 pub fn is_passable(&self, pos: RelBlockPos) -> bool {
543 self.is_passable_at_block_pos(pos.apply(self.origin))
544 }
545 fn is_passable_at_block_pos(&self, pos: BlockPos) -> bool {
546 self.is_block_pos_passable(pos) && self.is_block_pos_passable(pos.up(1))
547 }
548
549 pub fn cost_for_passing(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
550 self.cost_for_breaking_block(pos, mining_cache)
551 + self.cost_for_breaking_block(pos.up(1), mining_cache)
552 }
553
554 pub fn is_standable(&self, pos: RelBlockPos) -> bool {
559 self.is_standable_at_block_pos(pos.apply(self.origin))
560 }
561 fn is_standable_at_block_pos(&self, pos: BlockPos) -> bool {
562 self.is_block_pos_standable(pos.down(1)) && self.is_passable_at_block_pos(pos)
563 }
564
565 pub fn cost_for_standing(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
566 if !self.is_block_standable(pos.down(1)) {
567 return f32::INFINITY;
568 }
569 self.cost_for_passing(pos, mining_cache)
570 }
571
572 pub fn fall_distance(&self, pos: RelBlockPos) -> u32 {
575 let mut distance = 0;
576 let mut current_pos = pos.down(1);
577 while self.is_block_passable(current_pos) {
578 distance += 1;
579 current_pos = current_pos.down(1);
580
581 if current_pos.y < self.min_y {
582 return u32::MAX;
583 }
584 }
585 distance
586 }
587
588 pub fn origin(&self) -> BlockPos {
589 self.origin
590 }
591}
592
593const CACHED_MINING_COSTS_SIZE: usize = 2usize.pow(18);
594fn calculate_cached_mining_costs_index(pos: RelBlockPos) -> usize {
595 const X_BITS: usize = 6;
598 const Y_BITS: usize = 6;
599 const Z_BITS: usize = 6;
600
601 const X_MASK: usize = (1 << X_BITS) - 1;
602 const Y_MASK: usize = (1 << Y_BITS) - 1;
603 const Z_MASK: usize = (1 << Z_BITS) - 1;
604
605 let hash_index = ((pos.x as usize & X_MASK) << (Y_BITS + Z_BITS))
606 | ((pos.z as usize & Z_MASK) << Y_BITS)
607 | (pos.y as usize & Y_MASK);
608 debug_assert!(hash_index < CACHED_MINING_COSTS_SIZE);
609 hash_index
610}
611
612pub fn is_block_state_passable(block_state: BlockState) -> bool {
614 if block_state.is_air() {
618 return true;
620 }
621 if !block_state.is_collision_shape_empty() {
622 return false;
623 }
624 let registry_block = BlockKind::from(block_state);
625 if registry_block == BlockKind::Water {
626 return false;
627 }
628 if block_state
629 .property::<azalea_block::properties::Waterlogged>()
630 .unwrap_or_default()
631 {
632 return false;
633 }
634 if registry_block == BlockKind::Lava {
635 return false;
636 }
637 if block_state == BlockKind::Seagrass.into() {
640 return false;
641 }
642
643 if registry_block == BlockKind::Fire || registry_block == BlockKind::SoulFire {
645 return false;
646 }
647
648 if registry_block == BlockKind::PowderSnow {
649 return false;
651 }
652
653 if registry_block == BlockKind::SweetBerryBush {
654 return false;
656 }
657
658 true
659}
660
661#[inline]
664pub fn is_block_state_solid(block_state: BlockState) -> bool {
665 if block_state.is_air() {
666 return false;
668 }
669
670 if block_state.is_collision_shape_full() {
671 if block_state == BlockState::from(BlockKind::MagmaBlock) {
673 return false;
674 };
675
676 return true;
677 }
678
679 if matches!(
680 block_state.property::<properties::SlabKind>(),
681 Some(properties::SlabKind::Top | properties::SlabKind::Double)
682 ) {
683 return true;
685 }
686
687 let block = BlockKind::from(block_state);
688 if matches!(block, BlockKind::DirtPath | BlockKind::Farmland) {
690 return true;
691 }
692
693 false
694}
695
696pub fn is_block_state_standable(block_state: BlockState) -> bool {
699 if block_state.is_air() {
700 return false;
702 }
703
704 if is_block_state_solid(block_state) {
705 return true;
706 }
707
708 if block_state.property::<SlabKind>().is_some()
709 || block_state.property::<StairShape>().is_some()
710 {
711 return true;
712 }
713
714 false
715}
716
717pub fn is_block_state_water(block_state: BlockState) -> bool {
718 block_state == BlockState::from(BlockKind::Water)
720}
721
722#[cfg(test)]
723mod tests {
724 use azalea_world::{Chunk, ChunkStorage, PartialWorld};
725
726 use super::*;
727
728 #[test]
729 fn test_is_passable() {
730 let mut partial_world = PartialWorld::default();
731 let mut world = ChunkStorage::default();
732
733 partial_world
734 .chunks
735 .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
736 partial_world.chunks.set_block_state(
737 BlockPos::new(0, 0, 0),
738 BlockKind::Stone.into(),
739 &world,
740 );
741 partial_world
742 .chunks
743 .set_block_state(BlockPos::new(0, 1, 0), BlockState::AIR, &world);
744
745 let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
746 assert!(!ctx.is_block_pos_passable(BlockPos::new(0, 0, 0)));
747 assert!(ctx.is_block_pos_passable(BlockPos::new(0, 1, 0)));
748 }
749
750 #[test]
751 fn test_is_solid() {
752 let mut partial_world = PartialWorld::default();
753 let mut world = ChunkStorage::default();
754 partial_world
755 .chunks
756 .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
757 partial_world.chunks.set_block_state(
758 BlockPos::new(0, 0, 0),
759 BlockKind::Stone.into(),
760 &world,
761 );
762 partial_world
763 .chunks
764 .set_block_state(BlockPos::new(0, 1, 0), BlockState::AIR, &world);
765
766 let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
767 assert!(ctx.is_block_pos_solid(BlockPos::new(0, 0, 0)));
768 assert!(!ctx.is_block_pos_solid(BlockPos::new(0, 1, 0)));
769 }
770
771 #[test]
772 fn test_is_standable() {
773 let mut partial_world = PartialWorld::default();
774 let mut world = ChunkStorage::default();
775 partial_world
776 .chunks
777 .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
778 partial_world.chunks.set_block_state(
779 BlockPos::new(0, 0, 0),
780 BlockKind::Stone.into(),
781 &world,
782 );
783 partial_world
784 .chunks
785 .set_block_state(BlockPos::new(0, 1, 0), BlockState::AIR, &world);
786 partial_world
787 .chunks
788 .set_block_state(BlockPos::new(0, 2, 0), BlockState::AIR, &world);
789 partial_world
790 .chunks
791 .set_block_state(BlockPos::new(0, 3, 0), BlockState::AIR, &world);
792
793 let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
794 assert!(ctx.is_standable_at_block_pos(BlockPos::new(0, 1, 0)));
795 assert!(!ctx.is_standable_at_block_pos(BlockPos::new(0, 0, 0)));
796 assert!(!ctx.is_standable_at_block_pos(BlockPos::new(0, 2, 0)));
797 }
798}