azalea/pathfinder/
world.rs

1use std::{
2    cell::{RefCell, UnsafeCell},
3    sync::Arc,
4};
5
6use azalea_block::{BlockState, properties};
7use azalea_core::{
8    bitset::FixedBitSet,
9    position::{BlockPos, ChunkPos, ChunkSectionBlockPos, ChunkSectionPos},
10};
11use azalea_physics::collision::BlockWithShape;
12use azalea_world::Instance;
13use parking_lot::RwLock;
14
15use super::{mining::MiningCache, rel_block_pos::RelBlockPos};
16
17/// An efficient representation of the world used for the pathfinder.
18pub struct CachedWorld {
19    /// The origin that the [`RelBlockPos`] types will be relative to. This is
20    /// for an optimization that reduces the size of the block positions
21    /// that are used by the pathfinder.
22    origin: BlockPos,
23
24    min_y: i32,
25    world_lock: Arc<RwLock<Instance>>,
26
27    // we store `PalettedContainer`s instead of `Chunk`s or `Section`s because it doesn't contain
28    // any unnecessary data like heightmaps or biomes.
29    cached_chunks: RefCell<Vec<(ChunkPos, Vec<azalea_world::palette::PalettedContainer>)>>,
30    last_chunk_cache_index: RefCell<Option<usize>>,
31
32    cached_blocks: UnsafeCell<CachedSections>,
33
34    cached_mining_costs: UnsafeCell<Box<[(RelBlockPos, f32)]>>,
35}
36
37#[derive(Default)]
38pub struct CachedSections {
39    pub last_index: usize,
40    pub second_last_index: usize,
41    pub sections: Vec<CachedSection>,
42}
43
44impl CachedSections {
45    #[inline]
46    pub fn get_mut(&mut self, pos: ChunkSectionPos) -> Option<&mut CachedSection> {
47        if let Some(last_item) = self.sections.get(self.last_index) {
48            if last_item.pos == pos {
49                return Some(&mut self.sections[self.last_index]);
50            } else if let Some(second_last_item) = self.sections.get(self.second_last_index)
51                && second_last_item.pos == pos
52            {
53                return Some(&mut self.sections[self.second_last_index]);
54            }
55        }
56
57        let index = self
58            .sections
59            .binary_search_by(|section| section.pos.cmp(&pos))
60            .ok();
61
62        if let Some(index) = index {
63            self.second_last_index = self.last_index;
64            self.last_index = index;
65            return Some(&mut self.sections[index]);
66        }
67        None
68    }
69
70    #[inline]
71    pub fn insert(&mut self, section: CachedSection) {
72        // self.sections.push(section);
73        // self.sections.sort_unstable_by(|a, b| a.pos.cmp(&b.pos));
74        let index = self
75            .sections
76            .binary_search_by(|s| s.pos.cmp(&section.pos))
77            .unwrap_or_else(|e| e);
78        self.sections.insert(index, section);
79    }
80}
81
82pub struct CachedSection {
83    pub pos: ChunkSectionPos,
84    /// Blocks that we can fully pass through (like air).
85    pub passable_bitset: FixedBitSet<{ 4096_usize.div_ceil(8) }>,
86    /// Blocks that we can stand on and do parkour from.
87    pub solid_bitset: FixedBitSet<{ 4096_usize.div_ceil(8) }>,
88    /// Blocks that we can stand on but might not be able to parkour from.
89    pub standable_bitset: FixedBitSet<{ 4096_usize.div_ceil(8) }>,
90}
91
92impl CachedWorld {
93    pub fn new(world_lock: Arc<RwLock<Instance>>, origin: BlockPos) -> Self {
94        let min_y = world_lock.read().chunks.min_y;
95        Self {
96            origin,
97            min_y,
98            world_lock,
99            cached_chunks: Default::default(),
100            last_chunk_cache_index: Default::default(),
101            cached_blocks: Default::default(),
102            // this uses about 12mb of memory. it *really* helps though.
103            cached_mining_costs: UnsafeCell::new(
104                vec![(RelBlockPos::new(i16::MAX, i32::MAX, i16::MAX), 0.); 2usize.pow(20)]
105                    .into_boxed_slice(),
106            ),
107        }
108    }
109
110    // ```
111    // fn get_block_state(&self, pos: BlockPos) -> Option<BlockState> {
112    //     self.with_section(ChunkSectionPos::from(pos), |section| {
113    //         let state = section.get(pos.x as usize, pos.y as usize, pos.z as usize);
114    //         BlockState::try_from(state).unwrap_or(BlockState::AIR)
115    //     })
116    // }
117    // ```
118
119    fn with_section<T>(
120        &self,
121        section_pos: ChunkSectionPos,
122        f: impl FnOnce(&azalea_world::palette::PalettedContainer) -> T,
123    ) -> Option<T> {
124        if section_pos.y * 16 < self.min_y {
125            // y position is out of bounds
126            return None;
127        }
128
129        let chunk_pos = ChunkPos::from(section_pos);
130        let section_index =
131            azalea_world::chunk_storage::section_index(section_pos.y * 16, self.min_y) as usize;
132
133        let mut cached_chunks = self.cached_chunks.borrow_mut();
134
135        // optimization: avoid doing the iter lookup if the last chunk we looked up is
136        // the same
137        if let Some(last_chunk_cache_index) = *self.last_chunk_cache_index.borrow()
138            && cached_chunks[last_chunk_cache_index].0 == chunk_pos
139        {
140            // don't bother with the iter lookup
141            let sections = &cached_chunks[last_chunk_cache_index].1;
142            if section_index >= sections.len() {
143                // y position is out of bounds
144                return None;
145            };
146            let section: &azalea_world::palette::PalettedContainer = &sections[section_index];
147            return Some(f(section));
148        }
149
150        // get section from cache
151        if let Some((chunk_index, sections)) =
152            cached_chunks
153                .iter()
154                .enumerate()
155                .find_map(|(i, (pos, sections))| {
156                    if *pos == chunk_pos {
157                        Some((i, sections))
158                    } else {
159                        None
160                    }
161                })
162        {
163            if section_index >= sections.len() {
164                // y position is out of bounds
165                return None;
166            };
167            *self.last_chunk_cache_index.borrow_mut() = Some(chunk_index);
168            let section: &azalea_world::palette::PalettedContainer = &sections[section_index];
169            return Some(f(section));
170        }
171
172        let world = self.world_lock.read();
173        let chunk = world.chunks.get(&chunk_pos)?;
174        let chunk = chunk.read();
175
176        let sections: Vec<azalea_world::palette::PalettedContainer> = chunk
177            .sections
178            .iter()
179            .map(|section| section.states.clone())
180            .collect();
181
182        if section_index >= sections.len() {
183            // y position is out of bounds
184            return None;
185        };
186
187        let section = &sections[section_index];
188        let r = f(section);
189
190        // add the sections to the chunk cache
191        cached_chunks.push((chunk_pos, sections));
192
193        Some(r)
194    }
195
196    fn calculate_bitsets_for_section(&self, section_pos: ChunkSectionPos) -> Option<CachedSection> {
197        self.with_section(section_pos, |section| {
198            let mut passable_bitset = FixedBitSet::<{ 4096_usize.div_ceil(8) }>::new();
199            let mut solid_bitset = FixedBitSet::<{ 4096_usize.div_ceil(8) }>::new();
200            let mut standable_bitset = FixedBitSet::<{ 4096_usize.div_ceil(8) }>::new();
201            for i in 0..4096 {
202                let block_state = section.get_at_index(i);
203                if is_block_state_passable(block_state) {
204                    passable_bitset.set(i);
205                }
206                if is_block_state_solid(block_state) {
207                    solid_bitset.set(i);
208                }
209                if is_block_state_standable(block_state) {
210                    standable_bitset.set(i);
211                }
212            }
213            CachedSection {
214                pos: section_pos,
215                passable_bitset,
216                solid_bitset,
217                standable_bitset,
218            }
219        })
220    }
221
222    pub fn is_block_passable(&self, pos: RelBlockPos) -> bool {
223        self.is_block_pos_passable(pos.apply(self.origin))
224    }
225
226    fn is_block_pos_passable(&self, pos: BlockPos) -> bool {
227        let (section_pos, section_block_pos) =
228            (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
229        let index = u16::from(section_block_pos) as usize;
230        // SAFETY: we're only accessing this from one thread
231        let cached_blocks = unsafe { &mut *self.cached_blocks.get() };
232        if let Some(cached) = cached_blocks.get_mut(section_pos) {
233            return cached.passable_bitset.index(index);
234        }
235
236        let Some(cached) = self.calculate_bitsets_for_section(section_pos) else {
237            return false;
238        };
239        let passable = cached.passable_bitset.index(index);
240        cached_blocks.insert(cached);
241        passable
242    }
243
244    /// Get the block state at the given position. This is relatively slow, so
245    /// you should avoid it whenever possible.
246    pub fn get_block_state(&self, pos: RelBlockPos) -> BlockState {
247        self.get_block_state_at_pos(pos.apply(self.origin))
248    }
249
250    fn get_block_state_at_pos(&self, pos: BlockPos) -> BlockState {
251        let (section_pos, section_block_pos) =
252            (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
253        let index = u16::from(section_block_pos) as usize;
254
255        self.with_section(section_pos, |section| section.get_at_index(index))
256            .unwrap_or_default()
257    }
258
259    pub fn is_block_solid(&self, pos: RelBlockPos) -> bool {
260        self.is_block_pos_solid(pos.apply(self.origin))
261    }
262    pub fn is_block_standable(&self, pos: RelBlockPos) -> bool {
263        self.is_block_pos_standable(pos.apply(self.origin))
264    }
265
266    fn is_block_pos_solid(&self, pos: BlockPos) -> bool {
267        let (section_pos, section_block_pos) =
268            (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
269        let index = u16::from(section_block_pos) as usize;
270        // SAFETY: we're only accessing this from one thread
271        let cached_blocks = unsafe { &mut *self.cached_blocks.get() };
272        if let Some(cached) = cached_blocks.get_mut(section_pos) {
273            return cached.solid_bitset.index(index);
274        }
275
276        let Some(cached) = self.calculate_bitsets_for_section(section_pos) else {
277            return false;
278        };
279        let solid = cached.solid_bitset.index(index);
280        cached_blocks.insert(cached);
281        solid
282    }
283    fn is_block_pos_standable(&self, pos: BlockPos) -> bool {
284        let (section_pos, section_block_pos) =
285            (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
286        let index = u16::from(section_block_pos) as usize;
287        // SAFETY: we're only accessing this from one thread
288        let cached_blocks = unsafe { &mut *self.cached_blocks.get() };
289        if let Some(cached) = cached_blocks.get_mut(section_pos) {
290            return cached.standable_bitset.index(index);
291        }
292
293        let Some(cached) = self.calculate_bitsets_for_section(section_pos) else {
294            return false;
295        };
296        let solid = cached.standable_bitset.index(index);
297        cached_blocks.insert(cached);
298        solid
299    }
300
301    /// Returns how much it costs to break this block. Returns 0 if the block is
302    /// already passable.
303    pub fn cost_for_breaking_block(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
304        // SAFETY: pathfinding is single-threaded
305        let cached_mining_costs = unsafe { &mut *self.cached_mining_costs.get() };
306        // 20 bits total:
307        // 8 bits for x, 4 bits for y, 8 bits for z
308        let hash_index = ((pos.x as usize & 0xff) << 12)
309            | ((pos.y as usize & 0xf) << 8)
310            | (pos.z as usize & 0xff);
311        debug_assert!(hash_index < 1048576);
312        let &(cached_pos, potential_cost) =
313            unsafe { cached_mining_costs.get_unchecked(hash_index) };
314        if cached_pos == pos {
315            return potential_cost;
316        }
317
318        let cost = self.uncached_cost_for_breaking_block(pos, mining_cache);
319        unsafe {
320            *cached_mining_costs.get_unchecked_mut(hash_index) = (pos, cost);
321        };
322
323        cost
324    }
325
326    fn uncached_cost_for_breaking_block(
327        &self,
328        pos: RelBlockPos,
329        mining_cache: &MiningCache,
330    ) -> f32 {
331        if self.is_block_passable(pos) {
332            // if the block is passable then it doesn't need to be broken
333            return 0.;
334        }
335
336        let pos = pos.apply(self.origin);
337
338        let (section_pos, section_block_pos) =
339            (ChunkSectionPos::from(pos), ChunkSectionBlockPos::from(pos));
340
341        // we use this as an optimization to avoid getting the section again if the
342        // block is in the same section
343        let up_is_in_same_section = section_block_pos.y != 15;
344        let north_is_in_same_section = section_block_pos.z != 0;
345        let east_is_in_same_section = section_block_pos.x != 15;
346        let south_is_in_same_section = section_block_pos.z != 15;
347        let west_is_in_same_section = section_block_pos.x != 0;
348
349        let Some(mining_cost) = self.with_section(section_pos, |section| {
350            let block_state = section.get_at_index(u16::from(section_block_pos) as usize);
351            let mining_cost = mining_cache.cost_for(block_state);
352
353            if mining_cost == f32::INFINITY {
354                // the block is unbreakable
355                return f32::INFINITY;
356            }
357
358            // if there's a falling block or liquid above this block, abort
359            if up_is_in_same_section {
360                let up_block = section.get_at_index(u16::from(section_block_pos.up(1)) as usize);
361                if mining_cache.is_liquid(up_block) || mining_cache.is_falling_block(up_block) {
362                    return f32::INFINITY;
363                }
364            }
365
366            // if there's a liquid to the north of this block, abort
367            if north_is_in_same_section {
368                let north_block =
369                    section.get_at_index(u16::from(section_block_pos.north(1)) as usize);
370                if mining_cache.is_liquid(north_block) {
371                    return f32::INFINITY;
372                }
373            }
374
375            // liquid to the east
376            if east_is_in_same_section {
377                let east_block =
378                    section.get_at_index(u16::from(section_block_pos.east(1)) as usize);
379                if mining_cache.is_liquid(east_block) {
380                    return f32::INFINITY;
381                }
382            }
383
384            // liquid to the south
385            if south_is_in_same_section {
386                let south_block =
387                    section.get_at_index(u16::from(section_block_pos.south(1)) as usize);
388                if mining_cache.is_liquid(south_block) {
389                    return f32::INFINITY;
390                }
391            }
392
393            // liquid to the west
394            if west_is_in_same_section {
395                let west_block =
396                    section.get_at_index(u16::from(section_block_pos.west(1)) as usize);
397                if mining_cache.is_liquid(west_block) {
398                    return f32::INFINITY;
399                }
400            }
401
402            // the block is probably safe to break, we'll have to check the adjacent blocks
403            // that weren't in the same section next though
404            mining_cost
405        }) else {
406            // the chunk isn't loaded
407            let cost = if self.is_block_pos_solid(pos) {
408                // assume it's unbreakable if it's solid and out of render distance
409                f32::INFINITY
410            } else {
411                0.
412            };
413            return cost;
414        };
415
416        if mining_cost == f32::INFINITY {
417            // the block is unbreakable
418            return f32::INFINITY;
419        }
420
421        let check_should_avoid_this_block = |pos: BlockPos, check: &dyn Fn(BlockState) -> bool| {
422            let block_state = self
423                .with_section(ChunkSectionPos::from(pos), |section| {
424                    section.get_at_index(u16::from(ChunkSectionBlockPos::from(pos)) as usize)
425                })
426                .unwrap_or_default();
427            check(block_state)
428        };
429
430        // check the adjacent blocks that weren't in the same section
431        if !up_is_in_same_section
432            && check_should_avoid_this_block(pos.up(1), &|b| {
433                mining_cache.is_liquid(b) || mining_cache.is_falling_block(b)
434            })
435        {
436            return f32::INFINITY;
437        }
438        if !north_is_in_same_section
439            && check_should_avoid_this_block(pos.north(1), &|b| mining_cache.is_liquid(b))
440        {
441            return f32::INFINITY;
442        }
443        if !east_is_in_same_section
444            && check_should_avoid_this_block(pos.east(1), &|b| mining_cache.is_liquid(b))
445        {
446            return f32::INFINITY;
447        }
448        if !south_is_in_same_section
449            && check_should_avoid_this_block(pos.south(1), &|b| mining_cache.is_liquid(b))
450        {
451            return f32::INFINITY;
452        }
453        if !west_is_in_same_section
454            && check_should_avoid_this_block(pos.west(1), &|b| mining_cache.is_liquid(b))
455        {
456            return f32::INFINITY;
457        }
458
459        mining_cost
460    }
461
462    /// Whether this block and the block above are passable
463    pub fn is_passable(&self, pos: RelBlockPos) -> bool {
464        self.is_passable_at_block_pos(pos.apply(self.origin))
465    }
466    fn is_passable_at_block_pos(&self, pos: BlockPos) -> bool {
467        self.is_block_pos_passable(pos) && self.is_block_pos_passable(pos.up(1))
468    }
469
470    pub fn cost_for_passing(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
471        self.cost_for_breaking_block(pos, mining_cache)
472            + self.cost_for_breaking_block(pos.up(1), mining_cache)
473    }
474
475    /// Whether we can stand in this position. Checks if the block below is
476    /// solid, and that the two blocks above that are passable.
477    pub fn is_standable(&self, pos: RelBlockPos) -> bool {
478        self.is_standable_at_block_pos(pos.apply(self.origin))
479    }
480    fn is_standable_at_block_pos(&self, pos: BlockPos) -> bool {
481        self.is_block_pos_standable(pos.down(1)) && self.is_passable_at_block_pos(pos)
482    }
483
484    pub fn cost_for_standing(&self, pos: RelBlockPos, mining_cache: &MiningCache) -> f32 {
485        if !self.is_block_standable(pos.down(1)) {
486            return f32::INFINITY;
487        }
488        self.cost_for_passing(pos, mining_cache)
489    }
490
491    /// Get the amount of air blocks until the next solid block below this one.
492    pub fn fall_distance(&self, pos: RelBlockPos) -> u32 {
493        let mut distance = 0;
494        let mut current_pos = pos.down(1);
495        while self.is_block_passable(current_pos) {
496            distance += 1;
497            current_pos = current_pos.down(1);
498
499            if current_pos.y < self.min_y {
500                return u32::MAX;
501            }
502        }
503        distance
504    }
505
506    pub fn origin(&self) -> BlockPos {
507        self.origin
508    }
509}
510
511/// whether this block is passable
512pub fn is_block_state_passable(block: BlockState) -> bool {
513    // i already tried optimizing this by having it cache in an IntMap/FxHashMap but
514    // it wasn't measurably faster
515
516    if block.is_air() {
517        // fast path
518        return true;
519    }
520    if !block.is_collision_shape_empty() {
521        return false;
522    }
523    let registry_block = azalea_registry::Block::from(block);
524    if registry_block == azalea_registry::Block::Water {
525        return false;
526    }
527    if block
528        .property::<azalea_block::properties::Waterlogged>()
529        .unwrap_or_default()
530    {
531        return false;
532    }
533    if registry_block == azalea_registry::Block::Lava {
534        return false;
535    }
536    // block.waterlogged currently doesn't account for seagrass and some other water
537    // blocks
538    if block == azalea_registry::Block::Seagrass.into() {
539        return false;
540    }
541
542    // don't walk into fire
543    if registry_block == azalea_registry::Block::Fire
544        || registry_block == azalea_registry::Block::SoulFire
545    {
546        return false;
547    }
548
549    true
550}
551
552/// whether this block has a solid hitbox at the top (i.e. we can stand on it
553/// and do parkour from it)
554pub fn is_block_state_solid(block: BlockState) -> bool {
555    if block.is_air() {
556        // fast path
557        return false;
558    }
559    if block.is_collision_shape_full() {
560        return true;
561    }
562
563    if matches!(
564        block.property::<properties::Type>(),
565        Some(properties::Type::Top | properties::Type::Double)
566    ) {
567        // top slabs
568        return true;
569    }
570
571    false
572}
573
574pub fn is_block_state_standable(block: BlockState) -> bool {
575    if block.is_air() {
576        // fast path
577        return false;
578    }
579    if block.is_collision_shape_full() {
580        return true;
581    }
582
583    let registry_block = azalea_registry::Block::from(block);
584    if azalea_registry::tags::blocks::SLABS.contains(&registry_block)
585        || azalea_registry::tags::blocks::STAIRS.contains(&registry_block)
586    {
587        return true;
588    }
589
590    if registry_block == azalea_registry::Block::DirtPath {
591        return true;
592    }
593
594    false
595}
596
597#[cfg(test)]
598mod tests {
599
600    use azalea_world::{Chunk, ChunkStorage, PartialInstance};
601
602    use super::*;
603
604    #[test]
605    fn test_is_passable() {
606        let mut partial_world = PartialInstance::default();
607        let mut world = ChunkStorage::default();
608
609        partial_world
610            .chunks
611            .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
612        partial_world.chunks.set_block_state(
613            &BlockPos::new(0, 0, 0),
614            azalea_registry::Block::Stone.into(),
615            &world,
616        );
617        partial_world
618            .chunks
619            .set_block_state(&BlockPos::new(0, 1, 0), BlockState::AIR, &world);
620
621        let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
622        assert!(!ctx.is_block_pos_passable(BlockPos::new(0, 0, 0)));
623        assert!(ctx.is_block_pos_passable(BlockPos::new(0, 1, 0),));
624    }
625
626    #[test]
627    fn test_is_solid() {
628        let mut partial_world = PartialInstance::default();
629        let mut world = ChunkStorage::default();
630        partial_world
631            .chunks
632            .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
633        partial_world.chunks.set_block_state(
634            &BlockPos::new(0, 0, 0),
635            azalea_registry::Block::Stone.into(),
636            &world,
637        );
638        partial_world
639            .chunks
640            .set_block_state(&BlockPos::new(0, 1, 0), BlockState::AIR, &world);
641
642        let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
643        assert!(ctx.is_block_pos_solid(BlockPos::new(0, 0, 0)));
644        assert!(!ctx.is_block_pos_solid(BlockPos::new(0, 1, 0)));
645    }
646
647    #[test]
648    fn test_is_standable() {
649        let mut partial_world = PartialInstance::default();
650        let mut world = ChunkStorage::default();
651        partial_world
652            .chunks
653            .set(&ChunkPos { x: 0, z: 0 }, Some(Chunk::default()), &mut world);
654        partial_world.chunks.set_block_state(
655            &BlockPos::new(0, 0, 0),
656            azalea_registry::Block::Stone.into(),
657            &world,
658        );
659        partial_world
660            .chunks
661            .set_block_state(&BlockPos::new(0, 1, 0), BlockState::AIR, &world);
662        partial_world
663            .chunks
664            .set_block_state(&BlockPos::new(0, 2, 0), BlockState::AIR, &world);
665        partial_world
666            .chunks
667            .set_block_state(&BlockPos::new(0, 3, 0), BlockState::AIR, &world);
668
669        let ctx = CachedWorld::new(Arc::new(RwLock::new(world.into())), BlockPos::default());
670        assert!(ctx.is_standable_at_block_pos(BlockPos::new(0, 1, 0)));
671        assert!(!ctx.is_standable_at_block_pos(BlockPos::new(0, 0, 0)));
672        assert!(!ctx.is_standable_at_block_pos(BlockPos::new(0, 2, 0)));
673    }
674}