azalea/pathfinder/
world.rs

1use std::{
2    cell::{RefCell, UnsafeCell},
3    sync::Arc,
4};
5
6use azalea_block::{BlockState, properties};
7use azalea_core::{
8    bitset::FastFixedBitSet,
9    position::{BlockPos, ChunkPos, ChunkSectionBlockPos, ChunkSectionPos},
10};
11use azalea_physics::collision::BlockWithShape;
12use azalea_registry::{Block, tags};
13use azalea_world::{Instance, palette::PalettedContainer};
14use parking_lot::RwLock;
15
16use super::{mining::MiningCache, rel_block_pos::RelBlockPos};
17
18/// An efficient representation of the world used for the pathfinder.
19pub struct CachedWorld {
20    /// The origin that the [`RelBlockPos`] types will be relative to.
21    ///
22    /// This is for an optimization that reduces the size of the block positions
23    /// that are used by the pathfinder.
24    origin: BlockPos,
25
26    min_y: i32,
27    world_lock: Arc<RwLock<Instance>>,
28
29    // we store `PalettedContainer`s instead of `Chunk`s or `Section`s because it doesn't contain
30    // any unnecessary data like heightmaps or biomes.
31    cached_chunks: RefCell<
32        Vec<(
33            ChunkPos,
34            Vec<azalea_world::palette::PalettedContainer<BlockState>>,
35        )>,
36    >,
37    last_chunk_cache_index: RefCell<Option<usize>>,
38
39    cached_blocks: UnsafeCell<CachedSections>,
40
41    cached_mining_costs: UnsafeCell<Box<[(RelBlockPos, f32)]>>,
42}
43
44#[derive(Default)]
45pub struct CachedSections {
46    pub last_index: usize,
47    pub second_last_index: usize,
48    pub sections: Vec<CachedSection>,
49}
50
51impl CachedSections {
52    #[inline]
53    pub fn get_mut(&mut self, pos: ChunkSectionPos) -> Option<&mut CachedSection> {
54        if let Some(last_item) = self.sections.get(self.last_index) {
55            if last_item.pos == pos {
56                return Some(&mut self.sections[self.last_index]);
57            } else if let Some(second_last_item) = self.sections.get(self.second_last_index)
58                && second_last_item.pos == pos
59            {
60                return Some(&mut self.sections[self.second_last_index]);
61            }
62        }
63
64        let index = self
65            .sections
66            .binary_search_by(|section| section.pos.cmp(&pos))
67            .ok();
68
69        if let Some(index) = index {
70            self.second_last_index = self.last_index;
71            self.last_index = index;
72            return Some(&mut self.sections[index]);
73        }
74        None
75    }
76
77    #[inline]
78    pub fn insert(&mut self, section: CachedSection) {
79        // self.sections.push(section);
80        // self.sections.sort_unstable_by(|a, b| a.pos.cmp(&b.pos));
81        let index = self
82            .sections
83            .binary_search_by(|s| s.pos.cmp(&section.pos))
84            .unwrap_or_else(|e| e);
85        self.sections.insert(index, section);
86    }
87}
88
89pub struct CachedSection {
90    pub pos: ChunkSectionPos,
91    /// Blocks that we can fully pass through (like air).
92    pub passable_bitset: FastFixedBitSet<4096>,
93    /// Blocks that we can stand on and do parkour from.
94    pub solid_bitset: FastFixedBitSet<4096>,
95    /// Blocks that we can stand on but might not be able to parkour from.
96    pub standable_bitset: FastFixedBitSet<4096>,
97}
98
99impl CachedWorld {
100    pub fn new(world_lock: Arc<RwLock<Instance>>, origin: BlockPos) -> Self {
101        let min_y = world_lock.read().chunks.min_y;
102        Self {
103            origin,
104            min_y,
105            world_lock,
106            cached_chunks: Default::default(),
107            last_chunk_cache_index: Default::default(),
108            cached_blocks: Default::default(),
109            // this uses about 12mb of memory. it *really* helps though.
110            cached_mining_costs: UnsafeCell::new(
111                vec![(RelBlockPos::new(i16::MAX, i32::MAX, i16::MAX), 0.); 2usize.pow(20)]
112                    .into_boxed_slice(),
113            ),
114        }
115    }
116
117    // ```
118    // fn get_block_state(&self, pos: BlockPos) -> Option<BlockState> {
119    //     self.with_section(ChunkSectionPos::from(pos), |section| {
120    //         let state = section.get(pos.x as usize, pos.y as usize, pos.z as usize);
121    //         BlockState::try_from(state).unwrap_or(BlockState::AIR)
122    //     })
123    // }
124    // ```
125
126    fn with_section<T>(
127        &self,
128        section_pos: ChunkSectionPos,
129        f: impl FnOnce(&azalea_world::palette::PalettedContainer<BlockState>) -> T,
130    ) -> Option<T> {
131        if section_pos.y * 16 < self.min_y {
132            // y position is out of bounds
133            return None;
134        }
135
136        let chunk_pos = ChunkPos::from(section_pos);
137        let section_index =
138            azalea_world::chunk_storage::section_index(section_pos.y * 16, self.min_y) as usize;
139
140        let mut cached_chunks = self.cached_chunks.borrow_mut();
141
142        // optimization: avoid doing the iter lookup if the last chunk we looked up is
143        // the same
144        if let Some(last_chunk_cache_index) = *self.last_chunk_cache_index.borrow()
145            && cached_chunks[last_chunk_cache_index].0 == chunk_pos
146        {
147            // don't bother with the iter lookup
148            let sections = &cached_chunks[last_chunk_cache_index].1;
149            if section_index >= sections.len() {
150                // y position is out of bounds
151                return None;
152            };
153            let section = &sections[section_index];
154            return Some(f(section));
155        }
156
157        // get section from cache
158        if let Some((chunk_index, sections)) =
159            cached_chunks
160                .iter()
161                .enumerate()
162                .find_map(|(i, (pos, sections))| {
163                    if *pos == chunk_pos {
164                        Some((i, sections))
165                    } else {
166                        None
167                    }
168                })
169        {
170            if section_index >= sections.len() {
171                // y position is out of bounds
172                return None;
173            };
174            *self.last_chunk_cache_index.borrow_mut() = Some(chunk_index);
175            let section = &sections[section_index];
176            return Some(f(section));
177        }
178
179        let world = self.world_lock.read();
180        let chunk = world.chunks.get(&chunk_pos)?;
181        let chunk = chunk.read();
182
183        let sections = chunk
184            .sections
185            .iter()
186            .map(|section| section.states.clone())
187            .collect::<Vec<PalettedContainer<BlockState>>>();
188
189        if section_index >= sections.len() {
190            // y position is out of bounds
191            return None;
192        };
193
194        let section = &sections[section_index];
195        let r = f(section);
196
197        // add the sections to the chunk cache
198        cached_chunks.push((chunk_pos, sections));
199
200        Some(r)
201    }
202
203    fn calculate_bitsets_for_section(&self, section_pos: ChunkSectionPos) -> Option<CachedSection> {
204        self.with_section(section_pos, |section| {
205            let mut passable_bitset = FastFixedBitSet::<4096>::new();
206            let mut solid_bitset = FastFixedBitSet::<4096>::new();
207            let mut standable_bitset = FastFixedBitSet::<4096>::new();
208            for i in 0..4096 {
209                let block_state = section.get_at_index(i);
210                if is_block_state_passable(block_state) {
211                    passable_bitset.set(i);
212                }
213                if is_block_state_solid(block_state) {
214                    solid_bitset.set(i);
215                }
216                if is_block_state_standable(block_state) {
217                    standable_bitset.set(i);
218                }
219            }
220            CachedSection {
221                pos: section_pos,
222                passable_bitset,
223                solid_bitset,
224                standable_bitset,
225            }
226        })
227    }
228
229    pub fn is_block_passable(&self, pos: RelBlockPos) -> bool {
230        self.is_block_pos_passable(pos.apply(self.origin))
231    }
232
233    fn is_block_pos_passable(&self, pos: BlockPos) -> bool {
234        let (section_pos, section_block_pos) =
235            (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
236        let index = u16::from(section_block_pos) as usize;
237        // SAFETY: we're only accessing this from one thread
238        let cached_blocks = unsafe { &mut *self.cached_blocks.get() };
239        if let Some(cached) = cached_blocks.get_mut(section_pos) {
240            return cached.passable_bitset.index(index);
241        }
242
243        let Some(cached) = self.calculate_bitsets_for_section(section_pos) else {
244            return false;
245        };
246        let passable = cached.passable_bitset.index(index);
247        cached_blocks.insert(cached);
248        passable
249    }
250
251    /// Get the block state at the given position.
252    ///
253    /// This is relatively slow, so you should avoid it whenever possible.
254    pub fn get_block_state(&self, pos: RelBlockPos) -> BlockState {
255        self.get_block_state_at_pos(pos.apply(self.origin))
256    }
257
258    fn get_block_state_at_pos(&self, pos: BlockPos) -> BlockState {
259        let (section_pos, section_block_pos) =
260            (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
261        let index = u16::from(section_block_pos) as usize;
262
263        self.with_section(section_pos, |section| section.get_at_index(index))
264            .unwrap_or_default()
265    }
266
267    pub fn is_block_solid(&self, pos: RelBlockPos) -> bool {
268        self.is_block_pos_solid(pos.apply(self.origin))
269    }
270    pub fn is_block_standable(&self, pos: RelBlockPos) -> bool {
271        self.is_block_pos_standable(pos.apply(self.origin))
272    }
273
274    fn is_block_pos_solid(&self, pos: BlockPos) -> bool {
275        let (section_pos, section_block_pos) =
276            (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
277        let index = u16::from(section_block_pos) as usize;
278        // SAFETY: we're only accessing this from one thread
279        let cached_blocks = unsafe { &mut *self.cached_blocks.get() };
280        if let Some(cached) = cached_blocks.get_mut(section_pos) {
281            return cached.solid_bitset.index(index);
282        }
283
284        let Some(cached) = self.calculate_bitsets_for_section(section_pos) else {
285            return false;
286        };
287        let solid = cached.solid_bitset.index(index);
288        cached_blocks.insert(cached);
289        solid
290    }
291    fn is_block_pos_standable(&self, pos: BlockPos) -> bool {
292        let (section_pos, section_block_pos) =
293            (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
294        let index = u16::from(section_block_pos) as usize;
295        // SAFETY: we're only accessing this from one thread
296        let cached_blocks = unsafe { &mut *self.cached_blocks.get() };
297        if let Some(cached) = cached_blocks.get_mut(section_pos) {
298            return cached.standable_bitset.index(index);
299        }
300
301        let Some(cached) = self.calculate_bitsets_for_section(section_pos) else {
302            return false;
303        };
304        let solid = cached.standable_bitset.index(index);
305        cached_blocks.insert(cached);
306        solid
307    }
308
309    /// Returns how much it costs to break this block.
310    ///
311    /// Returns 0 if the block is already passable.
312    pub fn cost_for_breaking_block(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
313        // SAFETY: pathfinding is single-threaded
314        let cached_mining_costs = unsafe { &mut *self.cached_mining_costs.get() };
315        // 20 bits total:
316        // 8 bits for x, 4 bits for y, 8 bits for z
317        let hash_index = ((pos.x as usize & 0xff) << 12)
318            | ((pos.y as usize & 0xf) << 8)
319            | (pos.z as usize & 0xff);
320        debug_assert!(hash_index < 1048576);
321        let &(cached_pos, potential_cost) =
322            unsafe { cached_mining_costs.get_unchecked(hash_index) };
323        if cached_pos == pos {
324            return potential_cost;
325        }
326
327        let cost = self.uncached_cost_for_breaking_block(pos, mining_cache);
328        unsafe {
329            *cached_mining_costs.get_unchecked_mut(hash_index) = (pos, cost);
330        };
331
332        cost
333    }
334
335    fn uncached_cost_for_breaking_block(
336        &self,
337        pos: RelBlockPos,
338        mining_cache: &MiningCache,
339    ) -> f32 {
340        if self.is_block_passable(pos) {
341            // if the block is passable then it doesn't need to be broken
342            return 0.;
343        }
344
345        let pos = pos.apply(self.origin);
346
347        let (section_pos, section_block_pos) =
348            (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
349
350        // we use this as an optimization to avoid getting the section again if the
351        // block is in the same section
352        let up_is_in_same_section = section_block_pos.y != 15;
353        let north_is_in_same_section = section_block_pos.z != 0;
354        let east_is_in_same_section = section_block_pos.x != 15;
355        let south_is_in_same_section = section_block_pos.z != 15;
356        let west_is_in_same_section = section_block_pos.x != 0;
357
358        let Some(mining_cost) = self.with_section(section_pos, |section| {
359            let block_state = section.get_at_index(u16::from(section_block_pos) as usize);
360            let mining_cost = mining_cache.cost_for(block_state);
361
362            if mining_cost == f32::INFINITY {
363                // the block is unbreakable
364                return f32::INFINITY;
365            }
366
367            // if there's a falling block or liquid above this block, abort
368            if up_is_in_same_section {
369                let up_block = section.get_at_index(u16::from(section_block_pos.up(1)) as usize);
370                if mining_cache.is_liquid(up_block) || mining_cache.is_falling_block(up_block) {
371                    return f32::INFINITY;
372                }
373            }
374
375            // if there's a liquid to the north of this block, abort
376            if north_is_in_same_section {
377                let north_block =
378                    section.get_at_index(u16::from(section_block_pos.north(1)) as usize);
379                if mining_cache.is_liquid(north_block) {
380                    return f32::INFINITY;
381                }
382            }
383
384            // liquid to the east
385            if east_is_in_same_section {
386                let east_block =
387                    section.get_at_index(u16::from(section_block_pos.east(1)) as usize);
388                if mining_cache.is_liquid(east_block) {
389                    return f32::INFINITY;
390                }
391            }
392
393            // liquid to the south
394            if south_is_in_same_section {
395                let south_block =
396                    section.get_at_index(u16::from(section_block_pos.south(1)) as usize);
397                if mining_cache.is_liquid(south_block) {
398                    return f32::INFINITY;
399                }
400            }
401
402            // liquid to the west
403            if west_is_in_same_section {
404                let west_block =
405                    section.get_at_index(u16::from(section_block_pos.west(1)) as usize);
406                if mining_cache.is_liquid(west_block) {
407                    return f32::INFINITY;
408                }
409            }
410
411            // the block is probably safe to break, we'll have to check the adjacent blocks
412            // that weren't in the same section next though
413            mining_cost
414        }) else {
415            // the chunk isn't loaded
416            let cost = if self.is_block_pos_solid(pos) {
417                // assume it's unbreakable if it's solid and out of render distance
418                f32::INFINITY
419            } else {
420                0.
421            };
422            return cost;
423        };
424
425        if mining_cost == f32::INFINITY {
426            // the block is unbreakable
427            return f32::INFINITY;
428        }
429
430        let check_should_avoid_this_block = |pos: BlockPos, check: &dyn Fn(BlockState) -> bool| {
431            let block_state = self
432                .with_section(ChunkSectionPos::from(pos), |section| {
433                    section.get_at_index(u16::from(ChunkSectionBlockPos::from(pos)) as usize)
434                })
435                .unwrap_or_default();
436            check(block_state)
437        };
438
439        // check the adjacent blocks that weren't in the same section
440        if !up_is_in_same_section
441            && check_should_avoid_this_block(pos.up(1), &|b| {
442                mining_cache.is_liquid(b) || mining_cache.is_falling_block(b)
443            })
444        {
445            return f32::INFINITY;
446        }
447        if !north_is_in_same_section
448            && check_should_avoid_this_block(pos.north(1), &|b| mining_cache.is_liquid(b))
449        {
450            return f32::INFINITY;
451        }
452        if !east_is_in_same_section
453            && check_should_avoid_this_block(pos.east(1), &|b| mining_cache.is_liquid(b))
454        {
455            return f32::INFINITY;
456        }
457        if !south_is_in_same_section
458            && check_should_avoid_this_block(pos.south(1), &|b| mining_cache.is_liquid(b))
459        {
460            return f32::INFINITY;
461        }
462        if !west_is_in_same_section
463            && check_should_avoid_this_block(pos.west(1), &|b| mining_cache.is_liquid(b))
464        {
465            return f32::INFINITY;
466        }
467
468        mining_cost
469    }
470
471    /// Whether this block and the block above are passable
472    pub fn is_passable(&self, pos: RelBlockPos) -> bool {
473        self.is_passable_at_block_pos(pos.apply(self.origin))
474    }
475    fn is_passable_at_block_pos(&self, pos: BlockPos) -> bool {
476        self.is_block_pos_passable(pos) && self.is_block_pos_passable(pos.up(1))
477    }
478
479    pub fn cost_for_passing(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
480        self.cost_for_breaking_block(pos, mining_cache)
481            + self.cost_for_breaking_block(pos.up(1), mining_cache)
482    }
483
484    /// Whether we can stand in this position.
485    ///
486    /// Checks if the block below is solid, and that the two blocks above that
487    /// are passable.
488    pub fn is_standable(&self, pos: RelBlockPos) -> bool {
489        self.is_standable_at_block_pos(pos.apply(self.origin))
490    }
491    fn is_standable_at_block_pos(&self, pos: BlockPos) -> bool {
492        self.is_block_pos_standable(pos.down(1)) && self.is_passable_at_block_pos(pos)
493    }
494
495    pub fn cost_for_standing(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
496        if !self.is_block_standable(pos.down(1)) {
497            return f32::INFINITY;
498        }
499        self.cost_for_passing(pos, mining_cache)
500    }
501
502    /// Get the amount of air blocks until the next solid block below this one.
503    pub fn fall_distance(&self, pos: RelBlockPos) -> u32 {
504        let mut distance = 0;
505        let mut current_pos = pos.down(1);
506        while self.is_block_passable(current_pos) {
507            distance += 1;
508            current_pos = current_pos.down(1);
509
510            if current_pos.y < self.min_y {
511                return u32::MAX;
512            }
513        }
514        distance
515    }
516
517    pub fn origin(&self) -> BlockPos {
518        self.origin
519    }
520}
521
522/// Whether our client could pass through this block.
523pub fn is_block_state_passable(block_state: BlockState) -> bool {
524    // i already tried optimizing this by having it cache in an IntMap/FxHashMap but
525    // it wasn't measurably faster
526
527    if block_state.is_air() {
528        // fast path
529        return true;
530    }
531    if !block_state.is_collision_shape_empty() {
532        return false;
533    }
534    let registry_block = Block::from(block_state);
535    if registry_block == Block::Water {
536        return false;
537    }
538    if block_state
539        .property::<azalea_block::properties::Waterlogged>()
540        .unwrap_or_default()
541    {
542        return false;
543    }
544    if registry_block == Block::Lava {
545        return false;
546    }
547    // block.waterlogged currently doesn't account for seagrass and some other water
548    // blocks
549    if block_state == Block::Seagrass.into() {
550        return false;
551    }
552
553    // don't walk into fire
554    if registry_block == Block::Fire || registry_block == Block::SoulFire {
555        return false;
556    }
557
558    if registry_block == Block::PowderSnow {
559        // we can't jump out of powder snow
560        return false;
561    }
562
563    if registry_block == Block::SweetBerryBush {
564        // these hurt us
565        return false;
566    }
567
568    true
569}
570
571/// Whether this block has a solid hitbox at the top (i.e. we can stand on it
572/// and do parkour from it).
573#[inline]
574pub fn is_block_state_solid(block_state: BlockState) -> bool {
575    if block_state.is_air() {
576        // fast path
577        return false;
578    }
579    if block_state.is_collision_shape_full() {
580        return true;
581    }
582
583    if matches!(
584        block_state.property::<properties::Type>(),
585        Some(properties::Type::Top | properties::Type::Double)
586    ) {
587        // top slabs
588        return true;
589    }
590
591    let block = Block::from(block_state);
592    // solid enough
593    if matches!(block, Block::DirtPath | Block::Farmland) {
594        return true;
595    }
596
597    false
598}
599
600/// Whether we can stand on this block (but not necessarily do parkour jumps
601/// from it).
602pub fn is_block_state_standable(block_state: BlockState) -> bool {
603    if is_block_state_solid(block_state) {
604        return true;
605    }
606
607    let block = Block::from(block_state);
608    if tags::blocks::SLABS.contains(&block) || tags::blocks::STAIRS.contains(&block) {
609        return true;
610    }
611
612    false
613}
614
615#[cfg(test)]
616mod tests {
617    use azalea_world::{Chunk, ChunkStorage, PartialInstance};
618
619    use super::*;
620
621    #[test]
622    fn test_is_passable() {
623        let mut partial_world = PartialInstance::default();
624        let mut world = ChunkStorage::default();
625
626        partial_world
627            .chunks
628            .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
629        partial_world
630            .chunks
631            .set_block_state(BlockPos::new(0, 0, 0), Block::Stone.into(), &world);
632        partial_world
633            .chunks
634            .set_block_state(BlockPos::new(0, 1, 0), BlockState::AIR, &world);
635
636        let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
637        assert!(!ctx.is_block_pos_passable(BlockPos::new(0, 0, 0)));
638        assert!(ctx.is_block_pos_passable(BlockPos::new(0, 1, 0),));
639    }
640
641    #[test]
642    fn test_is_solid() {
643        let mut partial_world = PartialInstance::default();
644        let mut world = ChunkStorage::default();
645        partial_world
646            .chunks
647            .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
648        partial_world
649            .chunks
650            .set_block_state(BlockPos::new(0, 0, 0), Block::Stone.into(), &world);
651        partial_world
652            .chunks
653            .set_block_state(BlockPos::new(0, 1, 0), BlockState::AIR, &world);
654
655        let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
656        assert!(ctx.is_block_pos_solid(BlockPos::new(0, 0, 0)));
657        assert!(!ctx.is_block_pos_solid(BlockPos::new(0, 1, 0)));
658    }
659
660    #[test]
661    fn test_is_standable() {
662        let mut partial_world = PartialInstance::default();
663        let mut world = ChunkStorage::default();
664        partial_world
665            .chunks
666            .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
667        partial_world
668            .chunks
669            .set_block_state(BlockPos::new(0, 0, 0), Block::Stone.into(), &world);
670        partial_world
671            .chunks
672            .set_block_state(BlockPos::new(0, 1, 0), BlockState::AIR, &world);
673        partial_world
674            .chunks
675            .set_block_state(BlockPos::new(0, 2, 0), BlockState::AIR, &world);
676        partial_world
677            .chunks
678            .set_block_state(BlockPos::new(0, 3, 0), BlockState::AIR, &world);
679
680        let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
681        assert!(ctx.is_standable_at_block_pos(BlockPos::new(0, 1, 0)));
682        assert!(!ctx.is_standable_at_block_pos(BlockPos::new(0, 0, 0)));
683        assert!(!ctx.is_standable_at_block_pos(BlockPos::new(0, 2, 0)));
684    }
685}