1use std::{
2 cell::{RefCell, UnsafeCell},
3 sync::Arc,
4};
5
6use azalea_block::{BlockState, properties};
7use azalea_core::{
8 bitset::FastFixedBitSet,
9 position::{BlockPos, ChunkPos, ChunkSectionBlockPos, ChunkSectionPos},
10};
11use azalea_physics::collision::BlockWithShape;
12use azalea_registry::{Block, tags};
13use azalea_world::{Instance, palette::PalettedContainer};
14use parking_lot::RwLock;
15
16use super::{mining::MiningCache, rel_block_pos::RelBlockPos};
17
18pub struct CachedWorld {
20 origin: BlockPos,
25
26 min_y: i32,
27 world_lock: Arc<RwLock<Instance>>,
28
29 cached_chunks: RefCell<
32 Vec<(
33 ChunkPos,
34 Vec<azalea_world::palette::PalettedContainer<BlockState>>,
35 )>,
36 >,
37 last_chunk_cache_index: RefCell<Option<usize>>,
38
39 cached_blocks: UnsafeCell<CachedSections>,
40
41 cached_mining_costs: UnsafeCell<Box<[(RelBlockPos, f32)]>>,
42}
43
44#[derive(Default)]
45pub struct CachedSections {
46 pub last_index: usize,
47 pub second_last_index: usize,
48 pub sections: Vec<CachedSection>,
49}
50
51impl CachedSections {
52 #[inline]
53 pub fn get_mut(&mut self, pos: ChunkSectionPos) -> Option<&mut CachedSection> {
54 if let Some(last_item) = self.sections.get(self.last_index) {
55 if last_item.pos == pos {
56 return Some(&mut self.sections[self.last_index]);
57 } else if let Some(second_last_item) = self.sections.get(self.second_last_index)
58 && second_last_item.pos == pos
59 {
60 return Some(&mut self.sections[self.second_last_index]);
61 }
62 }
63
64 let index = self
65 .sections
66 .binary_search_by(|section| section.pos.cmp(&pos))
67 .ok();
68
69 if let Some(index) = index {
70 self.second_last_index = self.last_index;
71 self.last_index = index;
72 return Some(&mut self.sections[index]);
73 }
74 None
75 }
76
77 #[inline]
78 pub fn insert(&mut self, section: CachedSection) {
79 let index = self
82 .sections
83 .binary_search_by(|s| s.pos.cmp(§ion.pos))
84 .unwrap_or_else(|e| e);
85 self.sections.insert(index, section);
86 }
87}
88
89pub struct CachedSection {
90 pub pos: ChunkSectionPos,
91 pub passable_bitset: FastFixedBitSet<4096>,
93 pub solid_bitset: FastFixedBitSet<4096>,
95 pub standable_bitset: FastFixedBitSet<4096>,
97}
98
99impl CachedWorld {
100 pub fn new(world_lock: Arc<RwLock<Instance>>, origin: BlockPos) -> Self {
101 let min_y = world_lock.read().chunks.min_y;
102 Self {
103 origin,
104 min_y,
105 world_lock,
106 cached_chunks: Default::default(),
107 last_chunk_cache_index: Default::default(),
108 cached_blocks: Default::default(),
109 cached_mining_costs: UnsafeCell::new(
111 vec![(RelBlockPos::new(i16::MAX, i32::MAX, i16::MAX), 0.); 2usize.pow(20)]
112 .into_boxed_slice(),
113 ),
114 }
115 }
116
117 fn with_section<T>(
127 &self,
128 section_pos: ChunkSectionPos,
129 f: impl FnOnce(&azalea_world::palette::PalettedContainer<BlockState>) -> T,
130 ) -> Option<T> {
131 if section_pos.y * 16 < self.min_y {
132 return None;
134 }
135
136 let chunk_pos = ChunkPos::from(section_pos);
137 let section_index =
138 azalea_world::chunk_storage::section_index(section_pos.y * 16, self.min_y) as usize;
139
140 let mut cached_chunks = self.cached_chunks.borrow_mut();
141
142 if let Some(last_chunk_cache_index) = *self.last_chunk_cache_index.borrow()
145 && cached_chunks[last_chunk_cache_index].0 == chunk_pos
146 {
147 let sections = &cached_chunks[last_chunk_cache_index].1;
149 if section_index >= sections.len() {
150 return None;
152 };
153 let section = §ions[section_index];
154 return Some(f(section));
155 }
156
157 if let Some((chunk_index, sections)) =
159 cached_chunks
160 .iter()
161 .enumerate()
162 .find_map(|(i, (pos, sections))| {
163 if *pos == chunk_pos {
164 Some((i, sections))
165 } else {
166 None
167 }
168 })
169 {
170 if section_index >= sections.len() {
171 return None;
173 };
174 *self.last_chunk_cache_index.borrow_mut() = Some(chunk_index);
175 let section = §ions[section_index];
176 return Some(f(section));
177 }
178
179 let world = self.world_lock.read();
180 let chunk = world.chunks.get(&chunk_pos)?;
181 let chunk = chunk.read();
182
183 let sections = chunk
184 .sections
185 .iter()
186 .map(|section| section.states.clone())
187 .collect::<Vec<PalettedContainer<BlockState>>>();
188
189 if section_index >= sections.len() {
190 return None;
192 };
193
194 let section = §ions[section_index];
195 let r = f(section);
196
197 cached_chunks.push((chunk_pos, sections));
199
200 Some(r)
201 }
202
203 fn calculate_bitsets_for_section(&self, section_pos: ChunkSectionPos) -> Option<CachedSection> {
204 self.with_section(section_pos, |section| {
205 let mut passable_bitset = FastFixedBitSet::<4096>::new();
206 let mut solid_bitset = FastFixedBitSet::<4096>::new();
207 let mut standable_bitset = FastFixedBitSet::<4096>::new();
208 for i in 0..4096 {
209 let block_state = section.get_at_index(i);
210 if is_block_state_passable(block_state) {
211 passable_bitset.set(i);
212 }
213 if is_block_state_solid(block_state) {
214 solid_bitset.set(i);
215 }
216 if is_block_state_standable(block_state) {
217 standable_bitset.set(i);
218 }
219 }
220 CachedSection {
221 pos: section_pos,
222 passable_bitset,
223 solid_bitset,
224 standable_bitset,
225 }
226 })
227 }
228
229 pub fn is_block_passable(&self, pos: RelBlockPos) -> bool {
230 self.is_block_pos_passable(pos.apply(self.origin))
231 }
232
233 fn is_block_pos_passable(&self, pos: BlockPos) -> bool {
234 let (section_pos, section_block_pos) =
235 (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
236 let index = u16::from(section_block_pos) as usize;
237 let cached_blocks = unsafe { &mut *self.cached_blocks.get() };
239 if let Some(cached) = cached_blocks.get_mut(section_pos) {
240 return cached.passable_bitset.index(index);
241 }
242
243 let Some(cached) = self.calculate_bitsets_for_section(section_pos) else {
244 return false;
245 };
246 let passable = cached.passable_bitset.index(index);
247 cached_blocks.insert(cached);
248 passable
249 }
250
251 pub fn get_block_state(&self, pos: RelBlockPos) -> BlockState {
255 self.get_block_state_at_pos(pos.apply(self.origin))
256 }
257
258 fn get_block_state_at_pos(&self, pos: BlockPos) -> BlockState {
259 let (section_pos, section_block_pos) =
260 (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
261 let index = u16::from(section_block_pos) as usize;
262
263 self.with_section(section_pos, |section| section.get_at_index(index))
264 .unwrap_or_default()
265 }
266
267 pub fn is_block_solid(&self, pos: RelBlockPos) -> bool {
268 self.is_block_pos_solid(pos.apply(self.origin))
269 }
270 pub fn is_block_standable(&self, pos: RelBlockPos) -> bool {
271 self.is_block_pos_standable(pos.apply(self.origin))
272 }
273
274 fn is_block_pos_solid(&self, pos: BlockPos) -> bool {
275 let (section_pos, section_block_pos) =
276 (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
277 let index = u16::from(section_block_pos) as usize;
278 let cached_blocks = unsafe { &mut *self.cached_blocks.get() };
280 if let Some(cached) = cached_blocks.get_mut(section_pos) {
281 return cached.solid_bitset.index(index);
282 }
283
284 let Some(cached) = self.calculate_bitsets_for_section(section_pos) else {
285 return false;
286 };
287 let solid = cached.solid_bitset.index(index);
288 cached_blocks.insert(cached);
289 solid
290 }
291 fn is_block_pos_standable(&self, pos: BlockPos) -> bool {
292 let (section_pos, section_block_pos) =
293 (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
294 let index = u16::from(section_block_pos) as usize;
295 let cached_blocks = unsafe { &mut *self.cached_blocks.get() };
297 if let Some(cached) = cached_blocks.get_mut(section_pos) {
298 return cached.standable_bitset.index(index);
299 }
300
301 let Some(cached) = self.calculate_bitsets_for_section(section_pos) else {
302 return false;
303 };
304 let solid = cached.standable_bitset.index(index);
305 cached_blocks.insert(cached);
306 solid
307 }
308
309 pub fn cost_for_breaking_block(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
313 let cached_mining_costs = unsafe { &mut *self.cached_mining_costs.get() };
315 let hash_index = ((pos.x as usize & 0xff) << 12)
318 | ((pos.y as usize & 0xf) << 8)
319 | (pos.z as usize & 0xff);
320 debug_assert!(hash_index < 1048576);
321 let &(cached_pos, potential_cost) =
322 unsafe { cached_mining_costs.get_unchecked(hash_index) };
323 if cached_pos == pos {
324 return potential_cost;
325 }
326
327 let cost = self.uncached_cost_for_breaking_block(pos, mining_cache);
328 unsafe {
329 *cached_mining_costs.get_unchecked_mut(hash_index) = (pos, cost);
330 };
331
332 cost
333 }
334
335 fn uncached_cost_for_breaking_block(
336 &self,
337 pos: RelBlockPos,
338 mining_cache: &MiningCache,
339 ) -> f32 {
340 if self.is_block_passable(pos) {
341 return 0.;
343 }
344
345 let pos = pos.apply(self.origin);
346
347 let (section_pos, section_block_pos) =
348 (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
349
350 let up_is_in_same_section = section_block_pos.y != 15;
353 let north_is_in_same_section = section_block_pos.z != 0;
354 let east_is_in_same_section = section_block_pos.x != 15;
355 let south_is_in_same_section = section_block_pos.z != 15;
356 let west_is_in_same_section = section_block_pos.x != 0;
357
358 let Some(mining_cost) = self.with_section(section_pos, |section| {
359 let block_state = section.get_at_index(u16::from(section_block_pos) as usize);
360 let mining_cost = mining_cache.cost_for(block_state);
361
362 if mining_cost == f32::INFINITY {
363 return f32::INFINITY;
365 }
366
367 if up_is_in_same_section {
369 let up_block = section.get_at_index(u16::from(section_block_pos.up(1)) as usize);
370 if mining_cache.is_liquid(up_block) || mining_cache.is_falling_block(up_block) {
371 return f32::INFINITY;
372 }
373 }
374
375 if north_is_in_same_section {
377 let north_block =
378 section.get_at_index(u16::from(section_block_pos.north(1)) as usize);
379 if mining_cache.is_liquid(north_block) {
380 return f32::INFINITY;
381 }
382 }
383
384 if east_is_in_same_section {
386 let east_block =
387 section.get_at_index(u16::from(section_block_pos.east(1)) as usize);
388 if mining_cache.is_liquid(east_block) {
389 return f32::INFINITY;
390 }
391 }
392
393 if south_is_in_same_section {
395 let south_block =
396 section.get_at_index(u16::from(section_block_pos.south(1)) as usize);
397 if mining_cache.is_liquid(south_block) {
398 return f32::INFINITY;
399 }
400 }
401
402 if west_is_in_same_section {
404 let west_block =
405 section.get_at_index(u16::from(section_block_pos.west(1)) as usize);
406 if mining_cache.is_liquid(west_block) {
407 return f32::INFINITY;
408 }
409 }
410
411 mining_cost
414 }) else {
415 let cost = if self.is_block_pos_solid(pos) {
417 f32::INFINITY
419 } else {
420 0.
421 };
422 return cost;
423 };
424
425 if mining_cost == f32::INFINITY {
426 return f32::INFINITY;
428 }
429
430 let check_should_avoid_this_block = |pos: BlockPos, check: &dyn Fn(BlockState) -> bool| {
431 let block_state = self
432 .with_section(ChunkSectionPos::from(pos), |section| {
433 section.get_at_index(u16::from(ChunkSectionBlockPos::from(pos)) as usize)
434 })
435 .unwrap_or_default();
436 check(block_state)
437 };
438
439 if !up_is_in_same_section
441 && check_should_avoid_this_block(pos.up(1), &|b| {
442 mining_cache.is_liquid(b) || mining_cache.is_falling_block(b)
443 })
444 {
445 return f32::INFINITY;
446 }
447 if !north_is_in_same_section
448 && check_should_avoid_this_block(pos.north(1), &|b| mining_cache.is_liquid(b))
449 {
450 return f32::INFINITY;
451 }
452 if !east_is_in_same_section
453 && check_should_avoid_this_block(pos.east(1), &|b| mining_cache.is_liquid(b))
454 {
455 return f32::INFINITY;
456 }
457 if !south_is_in_same_section
458 && check_should_avoid_this_block(pos.south(1), &|b| mining_cache.is_liquid(b))
459 {
460 return f32::INFINITY;
461 }
462 if !west_is_in_same_section
463 && check_should_avoid_this_block(pos.west(1), &|b| mining_cache.is_liquid(b))
464 {
465 return f32::INFINITY;
466 }
467
468 mining_cost
469 }
470
471 pub fn is_passable(&self, pos: RelBlockPos) -> bool {
473 self.is_passable_at_block_pos(pos.apply(self.origin))
474 }
475 fn is_passable_at_block_pos(&self, pos: BlockPos) -> bool {
476 self.is_block_pos_passable(pos) && self.is_block_pos_passable(pos.up(1))
477 }
478
479 pub fn cost_for_passing(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
480 self.cost_for_breaking_block(pos, mining_cache)
481 + self.cost_for_breaking_block(pos.up(1), mining_cache)
482 }
483
484 pub fn is_standable(&self, pos: RelBlockPos) -> bool {
489 self.is_standable_at_block_pos(pos.apply(self.origin))
490 }
491 fn is_standable_at_block_pos(&self, pos: BlockPos) -> bool {
492 self.is_block_pos_standable(pos.down(1)) && self.is_passable_at_block_pos(pos)
493 }
494
495 pub fn cost_for_standing(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
496 if !self.is_block_standable(pos.down(1)) {
497 return f32::INFINITY;
498 }
499 self.cost_for_passing(pos, mining_cache)
500 }
501
502 pub fn fall_distance(&self, pos: RelBlockPos) -> u32 {
504 let mut distance = 0;
505 let mut current_pos = pos.down(1);
506 while self.is_block_passable(current_pos) {
507 distance += 1;
508 current_pos = current_pos.down(1);
509
510 if current_pos.y < self.min_y {
511 return u32::MAX;
512 }
513 }
514 distance
515 }
516
517 pub fn origin(&self) -> BlockPos {
518 self.origin
519 }
520}
521
522pub fn is_block_state_passable(block_state: BlockState) -> bool {
524 if block_state.is_air() {
528 return true;
530 }
531 if !block_state.is_collision_shape_empty() {
532 return false;
533 }
534 let registry_block = Block::from(block_state);
535 if registry_block == Block::Water {
536 return false;
537 }
538 if block_state
539 .property::<azalea_block::properties::Waterlogged>()
540 .unwrap_or_default()
541 {
542 return false;
543 }
544 if registry_block == Block::Lava {
545 return false;
546 }
547 if block_state == Block::Seagrass.into() {
550 return false;
551 }
552
553 if registry_block == Block::Fire || registry_block == Block::SoulFire {
555 return false;
556 }
557
558 if registry_block == Block::PowderSnow {
559 return false;
561 }
562
563 if registry_block == Block::SweetBerryBush {
564 return false;
566 }
567
568 true
569}
570
571#[inline]
574pub fn is_block_state_solid(block_state: BlockState) -> bool {
575 if block_state.is_air() {
576 return false;
578 }
579 if block_state.is_collision_shape_full() {
580 return true;
581 }
582
583 if matches!(
584 block_state.property::<properties::Type>(),
585 Some(properties::Type::Top | properties::Type::Double)
586 ) {
587 return true;
589 }
590
591 let block = Block::from(block_state);
592 if matches!(block, Block::DirtPath | Block::Farmland) {
594 return true;
595 }
596
597 false
598}
599
600pub fn is_block_state_standable(block_state: BlockState) -> bool {
603 if is_block_state_solid(block_state) {
604 return true;
605 }
606
607 let block = Block::from(block_state);
608 if tags::blocks::SLABS.contains(&block) || tags::blocks::STAIRS.contains(&block) {
609 return true;
610 }
611
612 false
613}
614
615#[cfg(test)]
616mod tests {
617 use azalea_world::{Chunk, ChunkStorage, PartialInstance};
618
619 use super::*;
620
621 #[test]
622 fn test_is_passable() {
623 let mut partial_world = PartialInstance::default();
624 let mut world = ChunkStorage::default();
625
626 partial_world
627 .chunks
628 .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
629 partial_world
630 .chunks
631 .set_block_state(BlockPos::new(0, 0, 0), Block::Stone.into(), &world);
632 partial_world
633 .chunks
634 .set_block_state(BlockPos::new(0, 1, 0), BlockState::AIR, &world);
635
636 let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
637 assert!(!ctx.is_block_pos_passable(BlockPos::new(0, 0, 0)));
638 assert!(ctx.is_block_pos_passable(BlockPos::new(0, 1, 0),));
639 }
640
641 #[test]
642 fn test_is_solid() {
643 let mut partial_world = PartialInstance::default();
644 let mut world = ChunkStorage::default();
645 partial_world
646 .chunks
647 .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
648 partial_world
649 .chunks
650 .set_block_state(BlockPos::new(0, 0, 0), Block::Stone.into(), &world);
651 partial_world
652 .chunks
653 .set_block_state(BlockPos::new(0, 1, 0), BlockState::AIR, &world);
654
655 let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
656 assert!(ctx.is_block_pos_solid(BlockPos::new(0, 0, 0)));
657 assert!(!ctx.is_block_pos_solid(BlockPos::new(0, 1, 0)));
658 }
659
660 #[test]
661 fn test_is_standable() {
662 let mut partial_world = PartialInstance::default();
663 let mut world = ChunkStorage::default();
664 partial_world
665 .chunks
666 .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
667 partial_world
668 .chunks
669 .set_block_state(BlockPos::new(0, 0, 0), Block::Stone.into(), &world);
670 partial_world
671 .chunks
672 .set_block_state(BlockPos::new(0, 1, 0), BlockState::AIR, &world);
673 partial_world
674 .chunks
675 .set_block_state(BlockPos::new(0, 2, 0), BlockState::AIR, &world);
676 partial_world
677 .chunks
678 .set_block_state(BlockPos::new(0, 3, 0), BlockState::AIR, &world);
679
680 let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
681 assert!(ctx.is_standable_at_block_pos(BlockPos::new(0, 1, 0)));
682 assert!(!ctx.is_standable_at_block_pos(BlockPos::new(0, 0, 0)));
683 assert!(!ctx.is_standable_at_block_pos(BlockPos::new(0, 2, 0)));
684 }
685}