azalea/pathfinder/
world.rs

1use std::{
2    cell::{RefCell, UnsafeCell},
3    sync::Arc,
4};
5
6use azalea_block::{BlockState, properties};
7use azalea_core::{
8    bitset::FixedBitSet,
9    position::{BlockPos, ChunkPos, ChunkSectionBlockPos, ChunkSectionPos},
10};
11use azalea_physics::collision::BlockWithShape;
12use azalea_world::{Instance, palette::PalettedContainer};
13use parking_lot::RwLock;
14
15use super::{mining::MiningCache, rel_block_pos::RelBlockPos};
16
17/// An efficient representation of the world used for the pathfinder.
18pub struct CachedWorld {
19    /// The origin that the [`RelBlockPos`] types will be relative to. This is
20    /// for an optimization that reduces the size of the block positions
21    /// that are used by the pathfinder.
22    origin: BlockPos,
23
24    min_y: i32,
25    world_lock: Arc<RwLock<Instance>>,
26
27    // we store `PalettedContainer`s instead of `Chunk`s or `Section`s because it doesn't contain
28    // any unnecessary data like heightmaps or biomes.
29    cached_chunks: RefCell<
30        Vec<(
31            ChunkPos,
32            Vec<azalea_world::palette::PalettedContainer<BlockState>>,
33        )>,
34    >,
35    last_chunk_cache_index: RefCell<Option<usize>>,
36
37    cached_blocks: UnsafeCell<CachedSections>,
38
39    cached_mining_costs: UnsafeCell<Box<[(RelBlockPos, f32)]>>,
40}
41
42#[derive(Default)]
43pub struct CachedSections {
44    pub last_index: usize,
45    pub second_last_index: usize,
46    pub sections: Vec<CachedSection>,
47}
48
49impl CachedSections {
50    #[inline]
51    pub fn get_mut(&mut self, pos: ChunkSectionPos) -> Option<&mut CachedSection> {
52        if let Some(last_item) = self.sections.get(self.last_index) {
53            if last_item.pos == pos {
54                return Some(&mut self.sections[self.last_index]);
55            } else if let Some(second_last_item) = self.sections.get(self.second_last_index)
56                && second_last_item.pos == pos
57            {
58                return Some(&mut self.sections[self.second_last_index]);
59            }
60        }
61
62        let index = self
63            .sections
64            .binary_search_by(|section| section.pos.cmp(&pos))
65            .ok();
66
67        if let Some(index) = index {
68            self.second_last_index = self.last_index;
69            self.last_index = index;
70            return Some(&mut self.sections[index]);
71        }
72        None
73    }
74
75    #[inline]
76    pub fn insert(&mut self, section: CachedSection) {
77        // self.sections.push(section);
78        // self.sections.sort_unstable_by(|a, b| a.pos.cmp(&b.pos));
79        let index = self
80            .sections
81            .binary_search_by(|s| s.pos.cmp(&section.pos))
82            .unwrap_or_else(|e| e);
83        self.sections.insert(index, section);
84    }
85}
86
87pub struct CachedSection {
88    pub pos: ChunkSectionPos,
89    /// Blocks that we can fully pass through (like air).
90    pub passable_bitset: FixedBitSet<4096>,
91    /// Blocks that we can stand on and do parkour from.
92    pub solid_bitset: FixedBitSet<4096>,
93    /// Blocks that we can stand on but might not be able to parkour from.
94    pub standable_bitset: FixedBitSet<4096>,
95}
96
97impl CachedWorld {
98    pub fn new(world_lock: Arc<RwLock<Instance>>, origin: BlockPos) -> Self {
99        let min_y = world_lock.read().chunks.min_y;
100        Self {
101            origin,
102            min_y,
103            world_lock,
104            cached_chunks: Default::default(),
105            last_chunk_cache_index: Default::default(),
106            cached_blocks: Default::default(),
107            // this uses about 12mb of memory. it *really* helps though.
108            cached_mining_costs: UnsafeCell::new(
109                vec![(RelBlockPos::new(i16::MAX, i32::MAX, i16::MAX), 0.); 2usize.pow(20)]
110                    .into_boxed_slice(),
111            ),
112        }
113    }
114
115    // ```
116    // fn get_block_state(&self, pos: BlockPos) -> Option<BlockState> {
117    //     self.with_section(ChunkSectionPos::from(pos), |section| {
118    //         let state = section.get(pos.x as usize, pos.y as usize, pos.z as usize);
119    //         BlockState::try_from(state).unwrap_or(BlockState::AIR)
120    //     })
121    // }
122    // ```
123
124    fn with_section<T>(
125        &self,
126        section_pos: ChunkSectionPos,
127        f: impl FnOnce(&azalea_world::palette::PalettedContainer<BlockState>) -> T,
128    ) -> Option<T> {
129        if section_pos.y * 16 < self.min_y {
130            // y position is out of bounds
131            return None;
132        }
133
134        let chunk_pos = ChunkPos::from(section_pos);
135        let section_index =
136            azalea_world::chunk_storage::section_index(section_pos.y * 16, self.min_y) as usize;
137
138        let mut cached_chunks = self.cached_chunks.borrow_mut();
139
140        // optimization: avoid doing the iter lookup if the last chunk we looked up is
141        // the same
142        if let Some(last_chunk_cache_index) = *self.last_chunk_cache_index.borrow()
143            && cached_chunks[last_chunk_cache_index].0 == chunk_pos
144        {
145            // don't bother with the iter lookup
146            let sections = &cached_chunks[last_chunk_cache_index].1;
147            if section_index >= sections.len() {
148                // y position is out of bounds
149                return None;
150            };
151            let section = &sections[section_index];
152            return Some(f(section));
153        }
154
155        // get section from cache
156        if let Some((chunk_index, sections)) =
157            cached_chunks
158                .iter()
159                .enumerate()
160                .find_map(|(i, (pos, sections))| {
161                    if *pos == chunk_pos {
162                        Some((i, sections))
163                    } else {
164                        None
165                    }
166                })
167        {
168            if section_index >= sections.len() {
169                // y position is out of bounds
170                return None;
171            };
172            *self.last_chunk_cache_index.borrow_mut() = Some(chunk_index);
173            let section = &sections[section_index];
174            return Some(f(section));
175        }
176
177        let world = self.world_lock.read();
178        let chunk = world.chunks.get(&chunk_pos)?;
179        let chunk = chunk.read();
180
181        let sections = chunk
182            .sections
183            .iter()
184            .map(|section| section.states.clone())
185            .collect::<Vec<PalettedContainer<BlockState>>>();
186
187        if section_index >= sections.len() {
188            // y position is out of bounds
189            return None;
190        };
191
192        let section = &sections[section_index];
193        let r = f(section);
194
195        // add the sections to the chunk cache
196        cached_chunks.push((chunk_pos, sections));
197
198        Some(r)
199    }
200
201    fn calculate_bitsets_for_section(&self, section_pos: ChunkSectionPos) -> Option<CachedSection> {
202        self.with_section(section_pos, |section| {
203            let mut passable_bitset = FixedBitSet::<4096>::new();
204            let mut solid_bitset = FixedBitSet::<4096>::new();
205            let mut standable_bitset = FixedBitSet::<4096>::new();
206            for i in 0..4096 {
207                let block_state = section.get_at_index(i);
208                if is_block_state_passable(block_state) {
209                    passable_bitset.set(i);
210                }
211                if is_block_state_solid(block_state) {
212                    solid_bitset.set(i);
213                }
214                if is_block_state_standable(block_state) {
215                    standable_bitset.set(i);
216                }
217            }
218            CachedSection {
219                pos: section_pos,
220                passable_bitset,
221                solid_bitset,
222                standable_bitset,
223            }
224        })
225    }
226
227    pub fn is_block_passable(&self, pos: RelBlockPos) -> bool {
228        self.is_block_pos_passable(pos.apply(self.origin))
229    }
230
231    fn is_block_pos_passable(&self, pos: BlockPos) -> bool {
232        let (section_pos, section_block_pos) =
233            (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
234        let index = u16::from(section_block_pos) as usize;
235        // SAFETY: we're only accessing this from one thread
236        let cached_blocks = unsafe { &mut *self.cached_blocks.get() };
237        if let Some(cached) = cached_blocks.get_mut(section_pos) {
238            return cached.passable_bitset.index(index);
239        }
240
241        let Some(cached) = self.calculate_bitsets_for_section(section_pos) else {
242            return false;
243        };
244        let passable = cached.passable_bitset.index(index);
245        cached_blocks.insert(cached);
246        passable
247    }
248
249    /// Get the block state at the given position. This is relatively slow, so
250    /// you should avoid it whenever possible.
251    pub fn get_block_state(&self, pos: RelBlockPos) -> BlockState {
252        self.get_block_state_at_pos(pos.apply(self.origin))
253    }
254
255    fn get_block_state_at_pos(&self, pos: BlockPos) -> BlockState {
256        let (section_pos, section_block_pos) =
257            (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
258        let index = u16::from(section_block_pos) as usize;
259
260        self.with_section(section_pos, |section| section.get_at_index(index))
261            .unwrap_or_default()
262    }
263
264    pub fn is_block_solid(&self, pos: RelBlockPos) -> bool {
265        self.is_block_pos_solid(pos.apply(self.origin))
266    }
267    pub fn is_block_standable(&self, pos: RelBlockPos) -> bool {
268        self.is_block_pos_standable(pos.apply(self.origin))
269    }
270
271    fn is_block_pos_solid(&self, pos: BlockPos) -> bool {
272        let (section_pos, section_block_pos) =
273            (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
274        let index = u16::from(section_block_pos) as usize;
275        // SAFETY: we're only accessing this from one thread
276        let cached_blocks = unsafe { &mut *self.cached_blocks.get() };
277        if let Some(cached) = cached_blocks.get_mut(section_pos) {
278            return cached.solid_bitset.index(index);
279        }
280
281        let Some(cached) = self.calculate_bitsets_for_section(section_pos) else {
282            return false;
283        };
284        let solid = cached.solid_bitset.index(index);
285        cached_blocks.insert(cached);
286        solid
287    }
288    fn is_block_pos_standable(&self, pos: BlockPos) -> bool {
289        let (section_pos, section_block_pos) =
290            (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
291        let index = u16::from(section_block_pos) as usize;
292        // SAFETY: we're only accessing this from one thread
293        let cached_blocks = unsafe { &mut *self.cached_blocks.get() };
294        if let Some(cached) = cached_blocks.get_mut(section_pos) {
295            return cached.standable_bitset.index(index);
296        }
297
298        let Some(cached) = self.calculate_bitsets_for_section(section_pos) else {
299            return false;
300        };
301        let solid = cached.standable_bitset.index(index);
302        cached_blocks.insert(cached);
303        solid
304    }
305
306    /// Returns how much it costs to break this block. Returns 0 if the block is
307    /// already passable.
308    pub fn cost_for_breaking_block(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
309        // SAFETY: pathfinding is single-threaded
310        let cached_mining_costs = unsafe { &mut *self.cached_mining_costs.get() };
311        // 20 bits total:
312        // 8 bits for x, 4 bits for y, 8 bits for z
313        let hash_index = ((pos.x as usize & 0xff) << 12)
314            | ((pos.y as usize & 0xf) << 8)
315            | (pos.z as usize & 0xff);
316        debug_assert!(hash_index < 1048576);
317        let &(cached_pos, potential_cost) =
318            unsafe { cached_mining_costs.get_unchecked(hash_index) };
319        if cached_pos == pos {
320            return potential_cost;
321        }
322
323        let cost = self.uncached_cost_for_breaking_block(pos, mining_cache);
324        unsafe {
325            *cached_mining_costs.get_unchecked_mut(hash_index) = (pos, cost);
326        };
327
328        cost
329    }
330
331    fn uncached_cost_for_breaking_block(
332        &self,
333        pos: RelBlockPos,
334        mining_cache: &MiningCache,
335    ) -> f32 {
336        if self.is_block_passable(pos) {
337            // if the block is passable then it doesn't need to be broken
338            return 0.;
339        }
340
341        let pos = pos.apply(self.origin);
342
343        let (section_pos, section_block_pos) =
344            (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
345
346        // we use this as an optimization to avoid getting the section again if the
347        // block is in the same section
348        let up_is_in_same_section = section_block_pos.y != 15;
349        let north_is_in_same_section = section_block_pos.z != 0;
350        let east_is_in_same_section = section_block_pos.x != 15;
351        let south_is_in_same_section = section_block_pos.z != 15;
352        let west_is_in_same_section = section_block_pos.x != 0;
353
354        let Some(mining_cost) = self.with_section(section_pos, |section| {
355            let block_state = section.get_at_index(u16::from(section_block_pos) as usize);
356            let mining_cost = mining_cache.cost_for(block_state);
357
358            if mining_cost == f32::INFINITY {
359                // the block is unbreakable
360                return f32::INFINITY;
361            }
362
363            // if there's a falling block or liquid above this block, abort
364            if up_is_in_same_section {
365                let up_block = section.get_at_index(u16::from(section_block_pos.up(1)) as usize);
366                if mining_cache.is_liquid(up_block) || mining_cache.is_falling_block(up_block) {
367                    return f32::INFINITY;
368                }
369            }
370
371            // if there's a liquid to the north of this block, abort
372            if north_is_in_same_section {
373                let north_block =
374                    section.get_at_index(u16::from(section_block_pos.north(1)) as usize);
375                if mining_cache.is_liquid(north_block) {
376                    return f32::INFINITY;
377                }
378            }
379
380            // liquid to the east
381            if east_is_in_same_section {
382                let east_block =
383                    section.get_at_index(u16::from(section_block_pos.east(1)) as usize);
384                if mining_cache.is_liquid(east_block) {
385                    return f32::INFINITY;
386                }
387            }
388
389            // liquid to the south
390            if south_is_in_same_section {
391                let south_block =
392                    section.get_at_index(u16::from(section_block_pos.south(1)) as usize);
393                if mining_cache.is_liquid(south_block) {
394                    return f32::INFINITY;
395                }
396            }
397
398            // liquid to the west
399            if west_is_in_same_section {
400                let west_block =
401                    section.get_at_index(u16::from(section_block_pos.west(1)) as usize);
402                if mining_cache.is_liquid(west_block) {
403                    return f32::INFINITY;
404                }
405            }
406
407            // the block is probably safe to break, we'll have to check the adjacent blocks
408            // that weren't in the same section next though
409            mining_cost
410        }) else {
411            // the chunk isn't loaded
412            let cost = if self.is_block_pos_solid(pos) {
413                // assume it's unbreakable if it's solid and out of render distance
414                f32::INFINITY
415            } else {
416                0.
417            };
418            return cost;
419        };
420
421        if mining_cost == f32::INFINITY {
422            // the block is unbreakable
423            return f32::INFINITY;
424        }
425
426        let check_should_avoid_this_block = |pos: BlockPos, check: &dyn Fn(BlockState) -> bool| {
427            let block_state = self
428                .with_section(ChunkSectionPos::from(pos), |section| {
429                    section.get_at_index(u16::from(ChunkSectionBlockPos::from(pos)) as usize)
430                })
431                .unwrap_or_default();
432            check(block_state)
433        };
434
435        // check the adjacent blocks that weren't in the same section
436        if !up_is_in_same_section
437            && check_should_avoid_this_block(pos.up(1), &|b| {
438                mining_cache.is_liquid(b) || mining_cache.is_falling_block(b)
439            })
440        {
441            return f32::INFINITY;
442        }
443        if !north_is_in_same_section
444            && check_should_avoid_this_block(pos.north(1), &|b| mining_cache.is_liquid(b))
445        {
446            return f32::INFINITY;
447        }
448        if !east_is_in_same_section
449            && check_should_avoid_this_block(pos.east(1), &|b| mining_cache.is_liquid(b))
450        {
451            return f32::INFINITY;
452        }
453        if !south_is_in_same_section
454            && check_should_avoid_this_block(pos.south(1), &|b| mining_cache.is_liquid(b))
455        {
456            return f32::INFINITY;
457        }
458        if !west_is_in_same_section
459            && check_should_avoid_this_block(pos.west(1), &|b| mining_cache.is_liquid(b))
460        {
461            return f32::INFINITY;
462        }
463
464        mining_cost
465    }
466
467    /// Whether this block and the block above are passable
468    pub fn is_passable(&self, pos: RelBlockPos) -> bool {
469        self.is_passable_at_block_pos(pos.apply(self.origin))
470    }
471    fn is_passable_at_block_pos(&self, pos: BlockPos) -> bool {
472        self.is_block_pos_passable(pos) && self.is_block_pos_passable(pos.up(1))
473    }
474
475    pub fn cost_for_passing(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
476        self.cost_for_breaking_block(pos, mining_cache)
477            + self.cost_for_breaking_block(pos.up(1), mining_cache)
478    }
479
480    /// Whether we can stand in this position. Checks if the block below is
481    /// solid, and that the two blocks above that are passable.
482    pub fn is_standable(&self, pos: RelBlockPos) -> bool {
483        self.is_standable_at_block_pos(pos.apply(self.origin))
484    }
485    fn is_standable_at_block_pos(&self, pos: BlockPos) -> bool {
486        self.is_block_pos_standable(pos.down(1)) && self.is_passable_at_block_pos(pos)
487    }
488
489    pub fn cost_for_standing(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
490        if !self.is_block_standable(pos.down(1)) {
491            return f32::INFINITY;
492        }
493        self.cost_for_passing(pos, mining_cache)
494    }
495
496    /// Get the amount of air blocks until the next solid block below this one.
497    pub fn fall_distance(&self, pos: RelBlockPos) -> u32 {
498        let mut distance = 0;
499        let mut current_pos = pos.down(1);
500        while self.is_block_passable(current_pos) {
501            distance += 1;
502            current_pos = current_pos.down(1);
503
504            if current_pos.y < self.min_y {
505                return u32::MAX;
506            }
507        }
508        distance
509    }
510
511    pub fn origin(&self) -> BlockPos {
512        self.origin
513    }
514}
515
516/// whether this block is passable
517pub fn is_block_state_passable(block: BlockState) -> bool {
518    // i already tried optimizing this by having it cache in an IntMap/FxHashMap but
519    // it wasn't measurably faster
520
521    if block.is_air() {
522        // fast path
523        return true;
524    }
525    if !block.is_collision_shape_empty() {
526        return false;
527    }
528    let registry_block = azalea_registry::Block::from(block);
529    if registry_block == azalea_registry::Block::Water {
530        return false;
531    }
532    if block
533        .property::<azalea_block::properties::Waterlogged>()
534        .unwrap_or_default()
535    {
536        return false;
537    }
538    if registry_block == azalea_registry::Block::Lava {
539        return false;
540    }
541    // block.waterlogged currently doesn't account for seagrass and some other water
542    // blocks
543    if block == azalea_registry::Block::Seagrass.into() {
544        return false;
545    }
546
547    // don't walk into fire
548    if registry_block == azalea_registry::Block::Fire
549        || registry_block == azalea_registry::Block::SoulFire
550    {
551        return false;
552    }
553
554    if registry_block == azalea_registry::Block::PowderSnow {
555        // we can't jump out of powder snow
556        return false;
557    }
558
559    if registry_block == azalea_registry::Block::SweetBerryBush {
560        // these hurt us
561        return false;
562    }
563
564    true
565}
566
567/// whether this block has a solid hitbox at the top (i.e. we can stand on it
568/// and do parkour from it)
569pub fn is_block_state_solid(block: BlockState) -> bool {
570    if block.is_air() {
571        // fast path
572        return false;
573    }
574    if block.is_collision_shape_full() {
575        return true;
576    }
577
578    if matches!(
579        block.property::<properties::Type>(),
580        Some(properties::Type::Top | properties::Type::Double)
581    ) {
582        // top slabs
583        return true;
584    }
585
586    false
587}
588
589pub fn is_block_state_standable(block: BlockState) -> bool {
590    if block.is_air() {
591        // fast path
592        return false;
593    }
594    if block.is_collision_shape_full() {
595        return true;
596    }
597
598    let registry_block = azalea_registry::Block::from(block);
599    if azalea_registry::tags::blocks::SLABS.contains(&registry_block)
600        || azalea_registry::tags::blocks::STAIRS.contains(&registry_block)
601    {
602        return true;
603    }
604
605    if registry_block == azalea_registry::Block::DirtPath {
606        return true;
607    }
608
609    false
610}
611
612#[cfg(test)]
613mod tests {
614
615    use azalea_world::{Chunk, ChunkStorage, PartialInstance};
616
617    use super::*;
618
619    #[test]
620    fn test_is_passable() {
621        let mut partial_world = PartialInstance::default();
622        let mut world = ChunkStorage::default();
623
624        partial_world
625            .chunks
626            .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
627        partial_world.chunks.set_block_state(
628            &BlockPos::new(0, 0, 0),
629            azalea_registry::Block::Stone.into(),
630            &world,
631        );
632        partial_world
633            .chunks
634            .set_block_state(&BlockPos::new(0, 1, 0), BlockState::AIR, &world);
635
636        let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
637        assert!(!ctx.is_block_pos_passable(BlockPos::new(0, 0, 0)));
638        assert!(ctx.is_block_pos_passable(BlockPos::new(0, 1, 0),));
639    }
640
641    #[test]
642    fn test_is_solid() {
643        let mut partial_world = PartialInstance::default();
644        let mut world = ChunkStorage::default();
645        partial_world
646            .chunks
647            .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
648        partial_world.chunks.set_block_state(
649            &BlockPos::new(0, 0, 0),
650            azalea_registry::Block::Stone.into(),
651            &world,
652        );
653        partial_world
654            .chunks
655            .set_block_state(&BlockPos::new(0, 1, 0), BlockState::AIR, &world);
656
657        let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
658        assert!(ctx.is_block_pos_solid(BlockPos::new(0, 0, 0)));
659        assert!(!ctx.is_block_pos_solid(BlockPos::new(0, 1, 0)));
660    }
661
662    #[test]
663    fn test_is_standable() {
664        let mut partial_world = PartialInstance::default();
665        let mut world = ChunkStorage::default();
666        partial_world
667            .chunks
668            .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
669        partial_world.chunks.set_block_state(
670            &BlockPos::new(0, 0, 0),
671            azalea_registry::Block::Stone.into(),
672            &world,
673        );
674        partial_world
675            .chunks
676            .set_block_state(&BlockPos::new(0, 1, 0), BlockState::AIR, &world);
677        partial_world
678            .chunks
679            .set_block_state(&BlockPos::new(0, 2, 0), BlockState::AIR, &world);
680        partial_world
681            .chunks
682            .set_block_state(&BlockPos::new(0, 3, 0), BlockState::AIR, &world);
683
684        let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
685        assert!(ctx.is_standable_at_block_pos(BlockPos::new(0, 1, 0)));
686        assert!(!ctx.is_standable_at_block_pos(BlockPos::new(0, 0, 0)));
687        assert!(!ctx.is_standable_at_block_pos(BlockPos::new(0, 2, 0)));
688    }
689}