azalea/pathfinder/
world.rs

1use std::{
2    cell::{RefCell, UnsafeCell},
3    sync::Arc,
4};
5
6use azalea_block::{BlockState, properties};
7use azalea_core::{
8    bitset::FastFixedBitSet,
9    position::{BlockPos, ChunkPos, ChunkSectionBlockPos, ChunkSectionPos},
10};
11use azalea_physics::collision::BlockWithShape;
12use azalea_registry::{Block, tags};
13use azalea_world::{Instance, palette::PalettedContainer};
14use parking_lot::RwLock;
15
16use super::{mining::MiningCache, rel_block_pos::RelBlockPos};
17
18/// An efficient representation of the world used for the pathfinder.
19pub struct CachedWorld {
20    /// The origin that the [`RelBlockPos`] types will be relative to. This is
21    /// for an optimization that reduces the size of the block positions
22    /// that are used by the pathfinder.
23    origin: BlockPos,
24
25    min_y: i32,
26    world_lock: Arc<RwLock<Instance>>,
27
28    // we store `PalettedContainer`s instead of `Chunk`s or `Section`s because it doesn't contain
29    // any unnecessary data like heightmaps or biomes.
30    cached_chunks: RefCell<
31        Vec<(
32            ChunkPos,
33            Vec<azalea_world::palette::PalettedContainer<BlockState>>,
34        )>,
35    >,
36    last_chunk_cache_index: RefCell<Option<usize>>,
37
38    cached_blocks: UnsafeCell<CachedSections>,
39
40    cached_mining_costs: UnsafeCell<Box<[(RelBlockPos, f32)]>>,
41}
42
43#[derive(Default)]
44pub struct CachedSections {
45    pub last_index: usize,
46    pub second_last_index: usize,
47    pub sections: Vec<CachedSection>,
48}
49
50impl CachedSections {
51    #[inline]
52    pub fn get_mut(&mut self, pos: ChunkSectionPos) -> Option<&mut CachedSection> {
53        if let Some(last_item) = self.sections.get(self.last_index) {
54            if last_item.pos == pos {
55                return Some(&mut self.sections[self.last_index]);
56            } else if let Some(second_last_item) = self.sections.get(self.second_last_index)
57                && second_last_item.pos == pos
58            {
59                return Some(&mut self.sections[self.second_last_index]);
60            }
61        }
62
63        let index = self
64            .sections
65            .binary_search_by(|section| section.pos.cmp(&pos))
66            .ok();
67
68        if let Some(index) = index {
69            self.second_last_index = self.last_index;
70            self.last_index = index;
71            return Some(&mut self.sections[index]);
72        }
73        None
74    }
75
76    #[inline]
77    pub fn insert(&mut self, section: CachedSection) {
78        // self.sections.push(section);
79        // self.sections.sort_unstable_by(|a, b| a.pos.cmp(&b.pos));
80        let index = self
81            .sections
82            .binary_search_by(|s| s.pos.cmp(&section.pos))
83            .unwrap_or_else(|e| e);
84        self.sections.insert(index, section);
85    }
86}
87
88pub struct CachedSection {
89    pub pos: ChunkSectionPos,
90    /// Blocks that we can fully pass through (like air).
91    pub passable_bitset: FastFixedBitSet<4096>,
92    /// Blocks that we can stand on and do parkour from.
93    pub solid_bitset: FastFixedBitSet<4096>,
94    /// Blocks that we can stand on but might not be able to parkour from.
95    pub standable_bitset: FastFixedBitSet<4096>,
96}
97
98impl CachedWorld {
99    pub fn new(world_lock: Arc<RwLock<Instance>>, origin: BlockPos) -> Self {
100        let min_y = world_lock.read().chunks.min_y;
101        Self {
102            origin,
103            min_y,
104            world_lock,
105            cached_chunks: Default::default(),
106            last_chunk_cache_index: Default::default(),
107            cached_blocks: Default::default(),
108            // this uses about 12mb of memory. it *really* helps though.
109            cached_mining_costs: UnsafeCell::new(
110                vec![(RelBlockPos::new(i16::MAX, i32::MAX, i16::MAX), 0.); 2usize.pow(20)]
111                    .into_boxed_slice(),
112            ),
113        }
114    }
115
116    // ```
117    // fn get_block_state(&self, pos: BlockPos) -> Option<BlockState> {
118    //     self.with_section(ChunkSectionPos::from(pos), |section| {
119    //         let state = section.get(pos.x as usize, pos.y as usize, pos.z as usize);
120    //         BlockState::try_from(state).unwrap_or(BlockState::AIR)
121    //     })
122    // }
123    // ```
124
125    fn with_section<T>(
126        &self,
127        section_pos: ChunkSectionPos,
128        f: impl FnOnce(&azalea_world::palette::PalettedContainer<BlockState>) -> T,
129    ) -> Option<T> {
130        if section_pos.y * 16 < self.min_y {
131            // y position is out of bounds
132            return None;
133        }
134
135        let chunk_pos = ChunkPos::from(section_pos);
136        let section_index =
137            azalea_world::chunk_storage::section_index(section_pos.y * 16, self.min_y) as usize;
138
139        let mut cached_chunks = self.cached_chunks.borrow_mut();
140
141        // optimization: avoid doing the iter lookup if the last chunk we looked up is
142        // the same
143        if let Some(last_chunk_cache_index) = *self.last_chunk_cache_index.borrow()
144            && cached_chunks[last_chunk_cache_index].0 == chunk_pos
145        {
146            // don't bother with the iter lookup
147            let sections = &cached_chunks[last_chunk_cache_index].1;
148            if section_index >= sections.len() {
149                // y position is out of bounds
150                return None;
151            };
152            let section = &sections[section_index];
153            return Some(f(section));
154        }
155
156        // get section from cache
157        if let Some((chunk_index, sections)) =
158            cached_chunks
159                .iter()
160                .enumerate()
161                .find_map(|(i, (pos, sections))| {
162                    if *pos == chunk_pos {
163                        Some((i, sections))
164                    } else {
165                        None
166                    }
167                })
168        {
169            if section_index >= sections.len() {
170                // y position is out of bounds
171                return None;
172            };
173            *self.last_chunk_cache_index.borrow_mut() = Some(chunk_index);
174            let section = &sections[section_index];
175            return Some(f(section));
176        }
177
178        let world = self.world_lock.read();
179        let chunk = world.chunks.get(&chunk_pos)?;
180        let chunk = chunk.read();
181
182        let sections = chunk
183            .sections
184            .iter()
185            .map(|section| section.states.clone())
186            .collect::<Vec<PalettedContainer<BlockState>>>();
187
188        if section_index >= sections.len() {
189            // y position is out of bounds
190            return None;
191        };
192
193        let section = &sections[section_index];
194        let r = f(section);
195
196        // add the sections to the chunk cache
197        cached_chunks.push((chunk_pos, sections));
198
199        Some(r)
200    }
201
202    fn calculate_bitsets_for_section(&self, section_pos: ChunkSectionPos) -> Option<CachedSection> {
203        self.with_section(section_pos, |section| {
204            let mut passable_bitset = FastFixedBitSet::<4096>::new();
205            let mut solid_bitset = FastFixedBitSet::<4096>::new();
206            let mut standable_bitset = FastFixedBitSet::<4096>::new();
207            for i in 0..4096 {
208                let block_state = section.get_at_index(i);
209                if is_block_state_passable(block_state) {
210                    passable_bitset.set(i);
211                }
212                if is_block_state_solid(block_state) {
213                    solid_bitset.set(i);
214                }
215                if is_block_state_standable(block_state) {
216                    standable_bitset.set(i);
217                }
218            }
219            CachedSection {
220                pos: section_pos,
221                passable_bitset,
222                solid_bitset,
223                standable_bitset,
224            }
225        })
226    }
227
228    pub fn is_block_passable(&self, pos: RelBlockPos) -> bool {
229        self.is_block_pos_passable(pos.apply(self.origin))
230    }
231
232    fn is_block_pos_passable(&self, pos: BlockPos) -> bool {
233        let (section_pos, section_block_pos) =
234            (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
235        let index = u16::from(section_block_pos) as usize;
236        // SAFETY: we're only accessing this from one thread
237        let cached_blocks = unsafe { &mut *self.cached_blocks.get() };
238        if let Some(cached) = cached_blocks.get_mut(section_pos) {
239            return cached.passable_bitset.index(index);
240        }
241
242        let Some(cached) = self.calculate_bitsets_for_section(section_pos) else {
243            return false;
244        };
245        let passable = cached.passable_bitset.index(index);
246        cached_blocks.insert(cached);
247        passable
248    }
249
250    /// Get the block state at the given position. This is relatively slow, so
251    /// you should avoid it whenever possible.
252    pub fn get_block_state(&self, pos: RelBlockPos) -> BlockState {
253        self.get_block_state_at_pos(pos.apply(self.origin))
254    }
255
256    fn get_block_state_at_pos(&self, pos: BlockPos) -> BlockState {
257        let (section_pos, section_block_pos) =
258            (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
259        let index = u16::from(section_block_pos) as usize;
260
261        self.with_section(section_pos, |section| section.get_at_index(index))
262            .unwrap_or_default()
263    }
264
265    pub fn is_block_solid(&self, pos: RelBlockPos) -> bool {
266        self.is_block_pos_solid(pos.apply(self.origin))
267    }
268    pub fn is_block_standable(&self, pos: RelBlockPos) -> bool {
269        self.is_block_pos_standable(pos.apply(self.origin))
270    }
271
272    fn is_block_pos_solid(&self, pos: BlockPos) -> bool {
273        let (section_pos, section_block_pos) =
274            (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
275        let index = u16::from(section_block_pos) as usize;
276        // SAFETY: we're only accessing this from one thread
277        let cached_blocks = unsafe { &mut *self.cached_blocks.get() };
278        if let Some(cached) = cached_blocks.get_mut(section_pos) {
279            return cached.solid_bitset.index(index);
280        }
281
282        let Some(cached) = self.calculate_bitsets_for_section(section_pos) else {
283            return false;
284        };
285        let solid = cached.solid_bitset.index(index);
286        cached_blocks.insert(cached);
287        solid
288    }
289    fn is_block_pos_standable(&self, pos: BlockPos) -> bool {
290        let (section_pos, section_block_pos) =
291            (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
292        let index = u16::from(section_block_pos) as usize;
293        // SAFETY: we're only accessing this from one thread
294        let cached_blocks = unsafe { &mut *self.cached_blocks.get() };
295        if let Some(cached) = cached_blocks.get_mut(section_pos) {
296            return cached.standable_bitset.index(index);
297        }
298
299        let Some(cached) = self.calculate_bitsets_for_section(section_pos) else {
300            return false;
301        };
302        let solid = cached.standable_bitset.index(index);
303        cached_blocks.insert(cached);
304        solid
305    }
306
307    /// Returns how much it costs to break this block. Returns 0 if the block is
308    /// already passable.
309    pub fn cost_for_breaking_block(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
310        // SAFETY: pathfinding is single-threaded
311        let cached_mining_costs = unsafe { &mut *self.cached_mining_costs.get() };
312        // 20 bits total:
313        // 8 bits for x, 4 bits for y, 8 bits for z
314        let hash_index = ((pos.x as usize & 0xff) << 12)
315            | ((pos.y as usize & 0xf) << 8)
316            | (pos.z as usize & 0xff);
317        debug_assert!(hash_index < 1048576);
318        let &(cached_pos, potential_cost) =
319            unsafe { cached_mining_costs.get_unchecked(hash_index) };
320        if cached_pos == pos {
321            return potential_cost;
322        }
323
324        let cost = self.uncached_cost_for_breaking_block(pos, mining_cache);
325        unsafe {
326            *cached_mining_costs.get_unchecked_mut(hash_index) = (pos, cost);
327        };
328
329        cost
330    }
331
332    fn uncached_cost_for_breaking_block(
333        &self,
334        pos: RelBlockPos,
335        mining_cache: &MiningCache,
336    ) -> f32 {
337        if self.is_block_passable(pos) {
338            // if the block is passable then it doesn't need to be broken
339            return 0.;
340        }
341
342        let pos = pos.apply(self.origin);
343
344        let (section_pos, section_block_pos) =
345            (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
346
347        // we use this as an optimization to avoid getting the section again if the
348        // block is in the same section
349        let up_is_in_same_section = section_block_pos.y != 15;
350        let north_is_in_same_section = section_block_pos.z != 0;
351        let east_is_in_same_section = section_block_pos.x != 15;
352        let south_is_in_same_section = section_block_pos.z != 15;
353        let west_is_in_same_section = section_block_pos.x != 0;
354
355        let Some(mining_cost) = self.with_section(section_pos, |section| {
356            let block_state = section.get_at_index(u16::from(section_block_pos) as usize);
357            let mining_cost = mining_cache.cost_for(block_state);
358
359            if mining_cost == f32::INFINITY {
360                // the block is unbreakable
361                return f32::INFINITY;
362            }
363
364            // if there's a falling block or liquid above this block, abort
365            if up_is_in_same_section {
366                let up_block = section.get_at_index(u16::from(section_block_pos.up(1)) as usize);
367                if mining_cache.is_liquid(up_block) || mining_cache.is_falling_block(up_block) {
368                    return f32::INFINITY;
369                }
370            }
371
372            // if there's a liquid to the north of this block, abort
373            if north_is_in_same_section {
374                let north_block =
375                    section.get_at_index(u16::from(section_block_pos.north(1)) as usize);
376                if mining_cache.is_liquid(north_block) {
377                    return f32::INFINITY;
378                }
379            }
380
381            // liquid to the east
382            if east_is_in_same_section {
383                let east_block =
384                    section.get_at_index(u16::from(section_block_pos.east(1)) as usize);
385                if mining_cache.is_liquid(east_block) {
386                    return f32::INFINITY;
387                }
388            }
389
390            // liquid to the south
391            if south_is_in_same_section {
392                let south_block =
393                    section.get_at_index(u16::from(section_block_pos.south(1)) as usize);
394                if mining_cache.is_liquid(south_block) {
395                    return f32::INFINITY;
396                }
397            }
398
399            // liquid to the west
400            if west_is_in_same_section {
401                let west_block =
402                    section.get_at_index(u16::from(section_block_pos.west(1)) as usize);
403                if mining_cache.is_liquid(west_block) {
404                    return f32::INFINITY;
405                }
406            }
407
408            // the block is probably safe to break, we'll have to check the adjacent blocks
409            // that weren't in the same section next though
410            mining_cost
411        }) else {
412            // the chunk isn't loaded
413            let cost = if self.is_block_pos_solid(pos) {
414                // assume it's unbreakable if it's solid and out of render distance
415                f32::INFINITY
416            } else {
417                0.
418            };
419            return cost;
420        };
421
422        if mining_cost == f32::INFINITY {
423            // the block is unbreakable
424            return f32::INFINITY;
425        }
426
427        let check_should_avoid_this_block = |pos: BlockPos, check: &dyn Fn(BlockState) -> bool| {
428            let block_state = self
429                .with_section(ChunkSectionPos::from(pos), |section| {
430                    section.get_at_index(u16::from(ChunkSectionBlockPos::from(pos)) as usize)
431                })
432                .unwrap_or_default();
433            check(block_state)
434        };
435
436        // check the adjacent blocks that weren't in the same section
437        if !up_is_in_same_section
438            && check_should_avoid_this_block(pos.up(1), &|b| {
439                mining_cache.is_liquid(b) || mining_cache.is_falling_block(b)
440            })
441        {
442            return f32::INFINITY;
443        }
444        if !north_is_in_same_section
445            && check_should_avoid_this_block(pos.north(1), &|b| mining_cache.is_liquid(b))
446        {
447            return f32::INFINITY;
448        }
449        if !east_is_in_same_section
450            && check_should_avoid_this_block(pos.east(1), &|b| mining_cache.is_liquid(b))
451        {
452            return f32::INFINITY;
453        }
454        if !south_is_in_same_section
455            && check_should_avoid_this_block(pos.south(1), &|b| mining_cache.is_liquid(b))
456        {
457            return f32::INFINITY;
458        }
459        if !west_is_in_same_section
460            && check_should_avoid_this_block(pos.west(1), &|b| mining_cache.is_liquid(b))
461        {
462            return f32::INFINITY;
463        }
464
465        mining_cost
466    }
467
468    /// Whether this block and the block above are passable
469    pub fn is_passable(&self, pos: RelBlockPos) -> bool {
470        self.is_passable_at_block_pos(pos.apply(self.origin))
471    }
472    fn is_passable_at_block_pos(&self, pos: BlockPos) -> bool {
473        self.is_block_pos_passable(pos) && self.is_block_pos_passable(pos.up(1))
474    }
475
476    pub fn cost_for_passing(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
477        self.cost_for_breaking_block(pos, mining_cache)
478            + self.cost_for_breaking_block(pos.up(1), mining_cache)
479    }
480
481    /// Whether we can stand in this position. Checks if the block below is
482    /// solid, and that the two blocks above that are passable.
483    pub fn is_standable(&self, pos: RelBlockPos) -> bool {
484        self.is_standable_at_block_pos(pos.apply(self.origin))
485    }
486    fn is_standable_at_block_pos(&self, pos: BlockPos) -> bool {
487        self.is_block_pos_standable(pos.down(1)) && self.is_passable_at_block_pos(pos)
488    }
489
490    pub fn cost_for_standing(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
491        if !self.is_block_standable(pos.down(1)) {
492            return f32::INFINITY;
493        }
494        self.cost_for_passing(pos, mining_cache)
495    }
496
497    /// Get the amount of air blocks until the next solid block below this one.
498    pub fn fall_distance(&self, pos: RelBlockPos) -> u32 {
499        let mut distance = 0;
500        let mut current_pos = pos.down(1);
501        while self.is_block_passable(current_pos) {
502            distance += 1;
503            current_pos = current_pos.down(1);
504
505            if current_pos.y < self.min_y {
506                return u32::MAX;
507            }
508        }
509        distance
510    }
511
512    pub fn origin(&self) -> BlockPos {
513        self.origin
514    }
515}
516
517/// Whether our client could pass through this block.
518pub fn is_block_state_passable(block_state: BlockState) -> bool {
519    // i already tried optimizing this by having it cache in an IntMap/FxHashMap but
520    // it wasn't measurably faster
521
522    if block_state.is_air() {
523        // fast path
524        return true;
525    }
526    if !block_state.is_collision_shape_empty() {
527        return false;
528    }
529    let registry_block = Block::from(block_state);
530    if registry_block == Block::Water {
531        return false;
532    }
533    if block_state
534        .property::<azalea_block::properties::Waterlogged>()
535        .unwrap_or_default()
536    {
537        return false;
538    }
539    if registry_block == Block::Lava {
540        return false;
541    }
542    // block.waterlogged currently doesn't account for seagrass and some other water
543    // blocks
544    if block_state == Block::Seagrass.into() {
545        return false;
546    }
547
548    // don't walk into fire
549    if registry_block == Block::Fire || registry_block == Block::SoulFire {
550        return false;
551    }
552
553    if registry_block == Block::PowderSnow {
554        // we can't jump out of powder snow
555        return false;
556    }
557
558    if registry_block == Block::SweetBerryBush {
559        // these hurt us
560        return false;
561    }
562
563    true
564}
565
566/// Whether this block has a solid hitbox at the top (i.e. we can stand on it
567/// and do parkour from it).
568#[inline]
569pub fn is_block_state_solid(block_state: BlockState) -> bool {
570    if block_state.is_air() {
571        // fast path
572        return false;
573    }
574    if block_state.is_collision_shape_full() {
575        return true;
576    }
577
578    if matches!(
579        block_state.property::<properties::Type>(),
580        Some(properties::Type::Top | properties::Type::Double)
581    ) {
582        // top slabs
583        return true;
584    }
585
586    let block = Block::from(block_state);
587    // solid enough
588    if matches!(block, Block::DirtPath | Block::Farmland) {
589        return true;
590    }
591
592    false
593}
594
595/// Whether we can stand on this block (but not necessarily do parkour jumps
596/// from it).
597pub fn is_block_state_standable(block_state: BlockState) -> bool {
598    if is_block_state_solid(block_state) {
599        return true;
600    }
601
602    let block = Block::from(block_state);
603    if tags::blocks::SLABS.contains(&block) || tags::blocks::STAIRS.contains(&block) {
604        return true;
605    }
606
607    false
608}
609
610#[cfg(test)]
611mod tests {
612    use azalea_world::{Chunk, ChunkStorage, PartialInstance};
613
614    use super::*;
615
616    #[test]
617    fn test_is_passable() {
618        let mut partial_world = PartialInstance::default();
619        let mut world = ChunkStorage::default();
620
621        partial_world
622            .chunks
623            .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
624        partial_world
625            .chunks
626            .set_block_state(BlockPos::new(0, 0, 0), Block::Stone.into(), &world);
627        partial_world
628            .chunks
629            .set_block_state(BlockPos::new(0, 1, 0), BlockState::AIR, &world);
630
631        let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
632        assert!(!ctx.is_block_pos_passable(BlockPos::new(0, 0, 0)));
633        assert!(ctx.is_block_pos_passable(BlockPos::new(0, 1, 0),));
634    }
635
636    #[test]
637    fn test_is_solid() {
638        let mut partial_world = PartialInstance::default();
639        let mut world = ChunkStorage::default();
640        partial_world
641            .chunks
642            .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
643        partial_world
644            .chunks
645            .set_block_state(BlockPos::new(0, 0, 0), Block::Stone.into(), &world);
646        partial_world
647            .chunks
648            .set_block_state(BlockPos::new(0, 1, 0), BlockState::AIR, &world);
649
650        let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
651        assert!(ctx.is_block_pos_solid(BlockPos::new(0, 0, 0)));
652        assert!(!ctx.is_block_pos_solid(BlockPos::new(0, 1, 0)));
653    }
654
655    #[test]
656    fn test_is_standable() {
657        let mut partial_world = PartialInstance::default();
658        let mut world = ChunkStorage::default();
659        partial_world
660            .chunks
661            .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
662        partial_world
663            .chunks
664            .set_block_state(BlockPos::new(0, 0, 0), Block::Stone.into(), &world);
665        partial_world
666            .chunks
667            .set_block_state(BlockPos::new(0, 1, 0), BlockState::AIR, &world);
668        partial_world
669            .chunks
670            .set_block_state(BlockPos::new(0, 2, 0), BlockState::AIR, &world);
671        partial_world
672            .chunks
673            .set_block_state(BlockPos::new(0, 3, 0), BlockState::AIR, &world);
674
675        let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
676        assert!(ctx.is_standable_at_block_pos(BlockPos::new(0, 1, 0)));
677        assert!(!ctx.is_standable_at_block_pos(BlockPos::new(0, 0, 0)));
678        assert!(!ctx.is_standable_at_block_pos(BlockPos::new(0, 2, 0)));
679    }
680}