1use crate::types::CpuType;
7
8use super::arch::{arm64_compact, x86_64_compact};
9use super::registers::RegisterContext;
10use super::{MemoryReader, SectionRef, UnwindError};
11
12#[derive(Debug, Clone)]
18pub enum CompactEntry {
19 FrameBased {
21 saved_regs: Vec<(u16, i16)>, },
23 FramelessImmediate {
25 stack_size: u64,
26 saved_regs: Vec<u16>,
27 return_address_register: Option<u16>,
30 },
31 FramelessIndirect {
33 sub_offset: u32,
34 stack_adjust: u32,
35 saved_regs: Vec<u16>,
36 },
37 Dwarf { fde_offset: u32 },
39 None,
41}
42
43const HEADER_VERSION: u64 = 0;
75const HEADER_COMMON_ENC_OFFSET: u64 = 4;
76const HEADER_COMMON_ENC_COUNT: u64 = 8;
77const HEADER_INDEX_OFFSET: u64 = 20;
80const HEADER_INDEX_COUNT: u64 = 24;
81
82const FL_ENTRY_SIZE: u64 = 12;
84const FL_FUNC_OFFSET: u64 = 0;
85const FL_SECOND_LEVEL: u64 = 4;
86
87const SL_REGULAR: u32 = 2;
89const SL_COMPRESSED: u32 = 3;
90
91const REG_ENTRY_PAGE_OFFSET: u64 = 4;
93const REG_ENTRY_COUNT: u64 = 6;
94const REG_ENTRY_SIZE: u64 = 8; const COMP_ENTRY_PAGE_OFFSET: u64 = 4;
98const COMP_ENTRY_COUNT: u64 = 6;
99const COMP_ENC_PAGE_OFFSET: u64 = 8;
100const COMP_ENC_COUNT: u64 = 10;
101
102pub fn lookup_encoding(
111 reader: &dyn MemoryReader,
112 unwind_info: &SectionRef,
113 pc: u64,
114 image_base: u64,
115) -> Option<(u32, u32)> {
116 let base = unwind_info.vm_addr;
117
118 let version = reader.read_u32(base + HEADER_VERSION)?;
120 if version != 1 {
121 return None;
122 }
123
124 let common_enc_offset = reader.read_u32(base + HEADER_COMMON_ENC_OFFSET)? as u64;
125 let common_enc_count = reader.read_u32(base + HEADER_COMMON_ENC_COUNT)?;
126 let index_offset = reader.read_u32(base + HEADER_INDEX_OFFSET)? as u64;
127 let index_count = reader.read_u32(base + HEADER_INDEX_COUNT)?;
128
129 let index_count = index_count.checked_sub(1)?;
131 if index_count == 0 {
132 return None;
133 }
134
135 let func_offset = pc.checked_sub(image_base)? as u32;
137
138 let fl_base = base + index_offset;
140 let idx = first_level_binary_search(reader, fl_base, index_count, func_offset)?;
141
142 let entry_addr = fl_base + idx as u64 * FL_ENTRY_SIZE;
144 let fl_func_offset = reader.read_u32(entry_addr + FL_FUNC_OFFSET)?;
145 let sl_offset = reader.read_u32(entry_addr + FL_SECOND_LEVEL)? as u64;
146
147 if sl_offset == 0 {
148 return None;
149 }
150
151 let sl_base = base + sl_offset;
152 let page_kind = reader.read_u32(sl_base)?;
153
154 match page_kind {
155 SL_REGULAR => lookup_regular_page(reader, sl_base, func_offset),
156 SL_COMPRESSED => lookup_compressed_page(
157 reader,
158 sl_base,
159 func_offset,
160 fl_func_offset,
161 base + common_enc_offset,
162 common_enc_count,
163 ),
164 _ => None,
165 }
166}
167
168fn first_level_binary_search(
169 reader: &dyn MemoryReader,
170 base: u64,
171 count: u32,
172 func_offset: u32,
173) -> Option<u32> {
174 if count == 0 {
175 return None;
176 }
177
178 let mut lo = 0u32;
180 let mut hi = count - 1;
181 let mut result = 0u32;
182
183 while lo <= hi {
184 let mid = lo + (hi - lo) / 2;
185 let entry_func = reader.read_u32(base + mid as u64 * FL_ENTRY_SIZE + FL_FUNC_OFFSET)?;
186
187 if entry_func <= func_offset {
188 result = mid;
189 if mid == hi {
190 break;
191 }
192 lo = mid + 1;
193 } else {
194 if mid == 0 {
195 break;
196 }
197 hi = mid - 1;
198 }
199 }
200
201 Some(result)
202}
203
204fn lookup_regular_page(
207 reader: &dyn MemoryReader,
208 page_base: u64,
209 func_offset: u32,
210) -> Option<(u32, u32)> {
211 let entry_page_offset = reader.read_u16(page_base + REG_ENTRY_PAGE_OFFSET)? as u64;
212 let entry_count = reader.read_u16(page_base + REG_ENTRY_COUNT)? as u32;
213
214 if entry_count == 0 {
215 return None;
216 }
217
218 let entries_base = page_base + entry_page_offset;
219
220 let mut lo = 0u32;
222 let mut hi = entry_count - 1;
223 let mut best: Option<(u32, u32)> = None;
224
225 while lo <= hi {
226 let mid = lo + (hi - lo) / 2;
227 let entry_addr = entries_base + mid as u64 * REG_ENTRY_SIZE;
228 let entry_func = reader.read_u32(entry_addr)?;
229
230 if entry_func <= func_offset {
231 let encoding = reader.read_u32(entry_addr + 4)?;
232 best = Some((encoding, entry_func));
233 if mid == hi {
234 break;
235 }
236 lo = mid + 1;
237 } else {
238 if mid == 0 {
239 break;
240 }
241 hi = mid - 1;
242 }
243 }
244
245 best
246}
247
248fn lookup_compressed_page(
251 reader: &dyn MemoryReader,
252 page_base: u64,
253 func_offset: u32,
254 fl_func_offset: u32,
255 common_enc_base: u64,
256 common_enc_count: u32,
257) -> Option<(u32, u32)> {
258 let entry_page_offset = reader.read_u16(page_base + COMP_ENTRY_PAGE_OFFSET)? as u64;
259 let entry_count = reader.read_u16(page_base + COMP_ENTRY_COUNT)? as u32;
260 let enc_page_offset = reader.read_u16(page_base + COMP_ENC_PAGE_OFFSET)? as u64;
261 let enc_count = reader.read_u16(page_base + COMP_ENC_COUNT)? as u32;
262
263 if entry_count == 0 {
264 return None;
265 }
266
267 let entries_base = page_base + entry_page_offset;
268
269 let mut lo = 0u32;
273 let mut hi = entry_count - 1;
274 let mut best_idx: Option<u32> = None;
275
276 while lo <= hi {
277 let mid = lo + (hi - lo) / 2;
278 let entry = reader.read_u32(entries_base + mid as u64 * 4)?;
279 let entry_func = fl_func_offset + (entry & 0x00FF_FFFF);
280
281 if entry_func <= func_offset {
282 best_idx = Some(mid);
283 if mid == hi {
284 break;
285 }
286 lo = mid + 1;
287 } else {
288 if mid == 0 {
289 break;
290 }
291 hi = mid - 1;
292 }
293 }
294
295 let idx = best_idx?;
296 let entry = reader.read_u32(entries_base + idx as u64 * 4)?;
297 let encoding_index = (entry >> 24) & 0xFF;
298 let entry_func_base = fl_func_offset + (entry & 0x00FF_FFFF);
299
300 let encoding = if encoding_index < common_enc_count {
302 reader.read_u32(common_enc_base + encoding_index as u64 * 4)?
303 } else {
304 let local_idx = encoding_index - common_enc_count;
305 if local_idx < enc_count {
306 let page_enc_base = page_base + enc_page_offset;
307 reader.read_u32(page_enc_base + local_idx as u64 * 4)?
308 } else {
309 return None;
310 }
311 };
312
313 Some((encoding, entry_func_base))
314}
315
316pub fn decode_encoding(encoding: u32, cpu_type: CpuType) -> CompactEntry {
322 if encoding == 0 {
323 return CompactEntry::None;
324 }
325 if cpu_type == CpuType::ARM64 || cpu_type == CpuType::ARM {
326 decode_arm64(encoding)
327 } else {
328 decode_x86_64(encoding)
329 }
330}
331
332pub fn decode_arm64(encoding: u32) -> CompactEntry {
333 let mode = encoding & arm64_compact::MODE_MASK;
334
335 match mode {
336 arm64_compact::MODE_FRAME => {
337 let mut saved_regs = Vec::new();
339 let mut offset: i16 = -16; for (bit, &(r1, r2)) in arm64_compact::FRAME_REG_PAIRS.iter().enumerate() {
342 if encoding & (1 << bit) != 0 {
343 saved_regs.push((r1, offset));
344 saved_regs.push((r2, offset + 8));
345 offset -= 16;
346 }
347 }
348
349 CompactEntry::FrameBased { saved_regs }
352 }
353 arm64_compact::MODE_FRAMELESS => {
354 let stack_size = ((encoding & arm64_compact::FRAMELESS_STACK_SIZE_MASK)
355 >> arm64_compact::FRAMELESS_STACK_SIZE_SHIFT) as u64
356 * 16;
357 CompactEntry::FramelessImmediate {
358 stack_size,
359 saved_regs: Vec::new(),
360 return_address_register: Some(super::registers::arm64::LR),
361 }
362 }
363 arm64_compact::MODE_DWARF => {
364 let fde_offset = encoding & arm64_compact::DWARF_FDE_OFFSET_MASK;
365 CompactEntry::Dwarf { fde_offset }
366 }
367 _ => CompactEntry::None,
368 }
369}
370
371pub fn decode_x86_64(encoding: u32) -> CompactEntry {
372 let mode = encoding & x86_64_compact::MODE_MASK;
373
374 match mode {
375 x86_64_compact::MODE_FRAME => {
376 let reg_bits = encoding & x86_64_compact::FRAME_REG_MASK;
378 let frame_offset = ((encoding & x86_64_compact::FRAME_OFFSET_MASK)
379 >> x86_64_compact::FRAME_OFFSET_SHIFT) as i16;
380 let mut saved_regs = Vec::new();
381 let mut offset: i16 = -(frame_offset * 8);
382
383 for i in 0..5 {
384 let reg_enc = ((reg_bits >> (i * 3)) & 0x7) as usize;
385 if reg_enc != 0 && reg_enc < x86_64_compact::FRAME_REG_MAP.len() {
386 let dwarf_reg = x86_64_compact::FRAME_REG_MAP[reg_enc];
387 saved_regs.push((dwarf_reg, offset));
388 offset -= 8;
389 }
390 }
391
392 CompactEntry::FrameBased { saved_regs }
393 }
394 x86_64_compact::MODE_FRAMELESS_IMMEDIATE => {
395 let stack_size = ((encoding & x86_64_compact::FRAMELESS_STACK_SIZE_MASK)
396 >> x86_64_compact::FRAMELESS_STACK_SIZE_SHIFT) as u64
397 * 8;
398 let saved_regs = super::arch::x86_64_decode_permutation(encoding);
399 CompactEntry::FramelessImmediate {
400 stack_size,
401 saved_regs,
402 return_address_register: None,
403 }
404 }
405 x86_64_compact::MODE_FRAMELESS_INDIRECT => {
406 let sub_offset = (encoding & x86_64_compact::INDIRECT_STACK_OFFSET_MASK)
407 >> x86_64_compact::INDIRECT_STACK_OFFSET_SHIFT;
408 let stack_adjust = (encoding & x86_64_compact::INDIRECT_STACK_ADJUST_MASK)
409 >> x86_64_compact::INDIRECT_STACK_ADJUST_SHIFT;
410 let saved_regs = super::arch::x86_64_decode_permutation(encoding);
411 CompactEntry::FramelessIndirect {
412 sub_offset,
413 stack_adjust,
414 saved_regs,
415 }
416 }
417 x86_64_compact::MODE_DWARF => {
418 let fde_offset = encoding & x86_64_compact::DWARF_FDE_OFFSET_MASK;
419 CompactEntry::Dwarf { fde_offset }
420 }
421 _ => CompactEntry::None,
422 }
423}
424
425pub fn apply_entry(
431 entry: &CompactEntry,
432 regs: &mut RegisterContext,
433 reader: &dyn MemoryReader,
434 func_start: u64,
435 is_64_bit: bool,
436) -> Result<bool, UnwindError> {
437 match entry {
438 CompactEntry::FrameBased { saved_regs } => {
439 let fp = regs.fp().ok_or(UnwindError::BrokenFrameChain)?;
440
441 let ptr_size = if is_64_bit { 8u64 } else { 4u64 };
443 let prev_fp = reader
444 .read_pointer(fp, is_64_bit)
445 .ok_or(UnwindError::MemoryReadFailed(fp))?;
446 let return_addr = reader
447 .read_pointer(fp + ptr_size, is_64_bit)
448 .ok_or(UnwindError::MemoryReadFailed(fp + ptr_size))?;
449
450 for &(dwarf_reg, offset) in saved_regs {
452 let addr = (fp as i64 + offset as i64) as u64;
453 if let Some(val) = reader.read_pointer(addr, is_64_bit) {
454 regs.set(dwarf_reg, val);
455 }
456 }
457
458 regs.clear_volatile();
459 regs.set_fp(prev_fp);
460 regs.set_sp(fp + 2 * ptr_size);
461 regs.set_pc(return_addr);
462
463 Ok(true)
464 }
465 CompactEntry::FramelessImmediate {
466 stack_size,
467 saved_regs,
468 return_address_register,
469 } => {
470 let sp = regs.sp().ok_or(UnwindError::BrokenFrameChain)?;
471 let ptr_size = if is_64_bit { 8u64 } else { 4u64 };
472
473 let (return_addr, mut reg_addr) = if let Some(ra_reg) = return_address_register {
474 let ra = regs.get(*ra_reg).ok_or(UnwindError::BrokenFrameChain)?;
476 (ra, sp + stack_size - ptr_size)
478 } else {
479 let ra = reader
481 .read_pointer(sp + stack_size - ptr_size, is_64_bit)
482 .ok_or(UnwindError::MemoryReadFailed(sp + stack_size - ptr_size))?;
483 (ra, sp + stack_size - 2 * ptr_size)
485 };
486
487 for &dwarf_reg in saved_regs {
488 if let Some(val) = reader.read_pointer(reg_addr, is_64_bit) {
489 regs.set(dwarf_reg, val);
490 }
491 reg_addr -= ptr_size;
492 }
493
494 regs.clear_volatile();
495 regs.set_sp(sp + stack_size);
496 regs.set_pc(return_addr);
497
498 Ok(true)
499 }
500 CompactEntry::FramelessIndirect {
501 sub_offset,
502 stack_adjust,
503 saved_regs,
504 } => {
505 let sp = regs.sp().ok_or(UnwindError::BrokenFrameChain)?;
506 let ptr_size = if is_64_bit { 8u64 } else { 4u64 };
507
508 let sub_addr = func_start + *sub_offset as u64;
510 let sub_imm = reader
511 .read_u32(sub_addr)
512 .ok_or(UnwindError::MemoryReadFailed(sub_addr))?;
513 let stack_size = sub_imm as u64 + (*stack_adjust as u64 * ptr_size);
514
515 let return_addr = reader
516 .read_pointer(sp + stack_size - ptr_size, is_64_bit)
517 .ok_or(UnwindError::MemoryReadFailed(sp + stack_size - ptr_size))?;
518
519 let mut reg_addr = sp + stack_size - 2 * ptr_size;
520 for &dwarf_reg in saved_regs {
521 if let Some(val) = reader.read_pointer(reg_addr, is_64_bit) {
522 regs.set(dwarf_reg, val);
523 }
524 reg_addr -= ptr_size;
525 }
526
527 regs.clear_volatile();
528 regs.set_sp(sp + stack_size);
529 regs.set_pc(return_addr);
530
531 Ok(true)
532 }
533 CompactEntry::Dwarf { .. } => {
534 Ok(false)
536 }
537 CompactEntry::None => Ok(false),
538 }
539}
540
541#[cfg(test)]
542mod tests {
543 use super::*;
544 use crate::types::CpuType;
545 use crate::unwind::SliceMemoryReader;
546
547 #[test]
552 fn decode_arm64_frame_based() {
553 let encoding = arm64_compact::MODE_FRAME | 0x03;
555 let entry = decode_arm64(encoding);
556 match entry {
557 CompactEntry::FrameBased { saved_regs } => {
558 let reg_nums: Vec<u16> = saved_regs.iter().map(|(r, _)| *r).collect();
560 assert!(reg_nums.contains(&19));
561 assert!(reg_nums.contains(&20));
562 assert!(reg_nums.contains(&21));
563 assert!(reg_nums.contains(&22));
564 }
565 _ => panic!("expected FrameBased"),
566 }
567 }
568
569 #[test]
570 fn decode_arm64_frameless() {
571 let encoding =
573 arm64_compact::MODE_FRAMELESS | (3 << arm64_compact::FRAMELESS_STACK_SIZE_SHIFT);
574 let entry = decode_arm64(encoding);
575 match entry {
576 CompactEntry::FramelessImmediate {
577 stack_size,
578 return_address_register,
579 ..
580 } => {
581 assert_eq!(stack_size, 48);
582 assert_eq!(
583 return_address_register,
584 Some(super::super::registers::arm64::LR)
585 );
586 }
587 _ => panic!("expected FramelessImmediate"),
588 }
589 }
590
591 #[test]
592 fn decode_arm64_dwarf() {
593 let encoding = arm64_compact::MODE_DWARF | 0x42;
594 let entry = decode_arm64(encoding);
595 match entry {
596 CompactEntry::Dwarf { fde_offset } => {
597 assert_eq!(fde_offset, 0x42);
598 }
599 _ => panic!("expected Dwarf"),
600 }
601 }
602
603 #[test]
604 fn decode_x86_64_frame_based() {
605 let encoding = x86_64_compact::MODE_FRAME | (1 << x86_64_compact::FRAME_OFFSET_SHIFT) | 1;
607 let entry = decode_x86_64(encoding);
608 match entry {
609 CompactEntry::FrameBased { saved_regs } => {
610 assert_eq!(saved_regs.len(), 1);
611 assert_eq!(saved_regs[0].0, super::super::registers::x86_64::RBX);
612 assert_eq!(saved_regs[0].1, -8);
613 }
614 _ => panic!("expected FrameBased"),
615 }
616 }
617
618 #[test]
619 fn decode_x86_64_frame_offset() {
620 let encoding = x86_64_compact::MODE_FRAME | (3 << x86_64_compact::FRAME_OFFSET_SHIFT) | 1;
622 let entry = decode_x86_64(encoding);
623 match entry {
624 CompactEntry::FrameBased { saved_regs } => {
625 assert_eq!(saved_regs.len(), 1);
626 assert_eq!(saved_regs[0].0, super::super::registers::x86_64::RBX);
627 assert_eq!(saved_regs[0].1, -24); }
629 _ => panic!("expected FrameBased"),
630 }
631 }
632
633 #[test]
634 fn decode_x86_64_frameless_immediate() {
635 let encoding = x86_64_compact::MODE_FRAMELESS_IMMEDIATE
637 | (4 << x86_64_compact::FRAMELESS_STACK_SIZE_SHIFT);
638 let entry = decode_x86_64(encoding);
639 match entry {
640 CompactEntry::FramelessImmediate {
641 stack_size,
642 return_address_register,
643 ..
644 } => {
645 assert_eq!(stack_size, 32);
646 assert_eq!(return_address_register, None);
647 }
648 _ => panic!("expected FramelessImmediate"),
649 }
650 }
651
652 #[test]
653 fn decode_x86_64_dwarf() {
654 let encoding = x86_64_compact::MODE_DWARF | 0x100;
655 let entry = decode_x86_64(encoding);
656 match entry {
657 CompactEntry::Dwarf { fde_offset } => {
658 assert_eq!(fde_offset, 0x100);
659 }
660 _ => panic!("expected Dwarf"),
661 }
662 }
663
664 #[test]
665 fn decode_zero_encoding() {
666 assert!(matches!(decode_arm64(0), CompactEntry::None));
667 assert!(matches!(decode_x86_64(0), CompactEntry::None));
668 }
669
670 #[test]
675 fn apply_frame_based_arm64() {
676 let base = 0x5000u64;
677 let mut data = vec![0u8; 0x2000];
678
679 let fp_off = 0x800usize;
685 data[fp_off..fp_off + 8].copy_from_slice(&(base + 0x900).to_le_bytes());
686 data[fp_off + 8..fp_off + 16].copy_from_slice(&0xDEAD_CAFEu64.to_le_bytes());
687 data[fp_off - 16..fp_off - 8].copy_from_slice(&0x1919u64.to_le_bytes());
688 data[fp_off - 8..fp_off].copy_from_slice(&0x2020u64.to_le_bytes());
689
690 let reader = SliceMemoryReader {
691 data,
692 base_address: base,
693 };
694
695 let mut regs = RegisterContext::new(CpuType::ARM64);
696 regs.set_fp(base + fp_off as u64);
697 regs.set_sp(base + fp_off as u64 - 32);
698 regs.set_pc(0xAAAA);
699
700 let entry = CompactEntry::FrameBased {
701 saved_regs: vec![(19, -16), (20, -8)],
702 };
703
704 assert!(apply_entry(&entry, &mut regs, &reader, 0x1000, true).unwrap());
705 assert_eq!(regs.fp(), Some(base + 0x900));
706 assert_eq!(regs.pc(), Some(0xDEAD_CAFE));
707 assert_eq!(regs.get(19), Some(0x1919));
708 assert_eq!(regs.get(20), Some(0x2020));
709 }
710
711 #[test]
712 fn apply_frameless_immediate_x86_64() {
713 let base = 0x3000u64;
714 let mut data = vec![0u8; 0x200];
715 let stack_size = 32u64;
716
717 data[0x98..0xA0].copy_from_slice(&0xBEEF_0001u64.to_le_bytes());
720
721 let reader = SliceMemoryReader {
722 data,
723 base_address: base,
724 };
725
726 let mut regs = RegisterContext::new(CpuType::X86_64);
727 regs.set_sp(base + 0x80);
728 regs.set_pc(0xAAAA);
729
730 let entry = CompactEntry::FramelessImmediate {
731 stack_size,
732 saved_regs: Vec::new(),
733 return_address_register: None,
734 };
735
736 assert!(apply_entry(&entry, &mut regs, &reader, 0x1000, true).unwrap());
737 assert_eq!(regs.pc(), Some(0xBEEF_0001));
738 assert_eq!(regs.sp(), Some(base + 0x80 + stack_size));
739 }
740
741 #[test]
742 fn apply_frameless_immediate_arm64_lr() {
743 let base = 0x3000u64;
744 let data = vec![0u8; 0x200];
745 let stack_size = 32u64;
746
747 let reader = SliceMemoryReader {
748 data,
749 base_address: base,
750 };
751
752 let mut regs = RegisterContext::new(CpuType::ARM64);
753 regs.set_sp(base + 0x80);
754 regs.set_pc(0xAAAA);
755 regs.set(super::super::registers::arm64::LR, 0xCAFE_BABE);
756
757 let entry = CompactEntry::FramelessImmediate {
758 stack_size,
759 saved_regs: Vec::new(),
760 return_address_register: Some(super::super::registers::arm64::LR),
761 };
762
763 assert!(apply_entry(&entry, &mut regs, &reader, 0x1000, true).unwrap());
764 assert_eq!(regs.pc(), Some(0xCAFE_BABE));
766 assert_eq!(regs.sp(), Some(base + 0x80 + stack_size));
767 }
768
769 fn build_synthetic_unwind_info() -> (SliceMemoryReader, SectionRef) {
785 let base = 0x10_0000u64;
786 let mut data = vec![0u8; 256];
787
788 let w32 = |d: &mut Vec<u8>, off: usize, val: u32| {
789 d[off..off + 4].copy_from_slice(&val.to_le_bytes());
790 };
791 let w16 = |d: &mut Vec<u8>, off: usize, val: u16| {
792 d[off..off + 2].copy_from_slice(&val.to_le_bytes());
793 };
794
795 w32(&mut data, 0, 1); w32(&mut data, 4, 28); w32(&mut data, 8, 2); w32(&mut data, 12, 36); w32(&mut data, 16, 0); w32(&mut data, 20, 36); w32(&mut data, 24, 3); w32(&mut data, 28, 0x0400_0001); w32(&mut data, 32, 0x0200_2000); w32(&mut data, 36, 0x0000); w32(&mut data, 40, 72); w32(&mut data, 44, 0); w32(&mut data, 48, 0x1000); w32(&mut data, 52, 104); w32(&mut data, 56, 0); w32(&mut data, 60, 0xFFFF); w32(&mut data, 64, 0); w32(&mut data, 68, 0); w32(&mut data, 72, 2); w16(&mut data, 76, 8); w16(&mut data, 78, 2); w32(&mut data, 80, 0x0000); w32(&mut data, 84, 0x0400_0001); w32(&mut data, 88, 0x0100); w32(&mut data, 92, 0x0200_2000); w32(&mut data, 104, 3); w16(&mut data, 108, 12); w16(&mut data, 110, 2); w16(&mut data, 112, 20); w16(&mut data, 114, 1); w32(&mut data, 116, (0u32 << 24) | 0x000000);
841 w32(&mut data, 120, (2u32 << 24) | 0x000200);
843 w32(&mut data, 124, 0x0300_0042); let reader = SliceMemoryReader {
847 data,
848 base_address: base,
849 };
850 let section = SectionRef {
851 vm_addr: base,
852 size: 256,
853 };
854
855 (reader, section)
856 }
857
858 #[test]
859 fn lookup_regular_page_first_entry() {
860 let (reader, section) = build_synthetic_unwind_info();
861 let base = reader.base_address;
862
863 let result = lookup_encoding(&reader, §ion, base + 0x0050, base);
865 assert_eq!(result, Some((0x0400_0001, 0x0000)));
866 }
867
868 #[test]
869 fn lookup_regular_page_second_entry() {
870 let (reader, section) = build_synthetic_unwind_info();
871 let base = reader.base_address;
872
873 let result = lookup_encoding(&reader, §ion, base + 0x0150, base);
875 assert_eq!(result, Some((0x0200_2000, 0x0100)));
876 }
877
878 #[test]
879 fn lookup_compressed_page_first_entry() {
880 let (reader, section) = build_synthetic_unwind_info();
881 let base = reader.base_address;
882
883 let result = lookup_encoding(&reader, §ion, base + 0x1050, base);
885 assert_eq!(result, Some((0x0400_0001, 0x1000)));
886 }
887
888 #[test]
889 fn lookup_compressed_page_second_entry() {
890 let (reader, section) = build_synthetic_unwind_info();
891 let base = reader.base_address;
892
893 let result = lookup_encoding(&reader, §ion, base + 0x1300, base);
896 assert_eq!(result, Some((0x0300_0042, 0x1200)));
897 }
898
899 #[test]
900 fn lookup_compressed_page_local_encoding() {
901 let (reader, section) = build_synthetic_unwind_info();
902 let base = reader.base_address;
903
904 let result = lookup_encoding(&reader, §ion, base + 0x1200, base);
906 let (encoding, func_base) = result.unwrap();
907 assert_eq!(func_base, 0x1200);
908 let entry = decode_arm64(encoding);
910 match entry {
911 CompactEntry::Dwarf { fde_offset } => assert_eq!(fde_offset, 0x42),
912 _ => panic!("expected Dwarf, got {:?}", entry),
913 }
914 }
915
916 #[test]
917 fn lookup_pc_before_image_returns_none() {
918 let (reader, section) = build_synthetic_unwind_info();
919 let base = reader.base_address;
920
921 let result = lookup_encoding(&reader, §ion, base - 1, base);
923 assert!(result.is_none());
924 }
925
926 #[test]
927 fn lookup_func_start_from_regular_page() {
928 let (reader, section) = build_synthetic_unwind_info();
929 let base = reader.base_address;
930
931 let result = lookup_encoding(&reader, §ion, base + 0x01FF, base);
934 let (_, func_base) = result.unwrap();
935 assert_eq!(func_base, 0x0100);
936
937 let func_start = base + func_base as u64;
939 assert_eq!(func_start, base + 0x0100);
940 }
941
942 #[test]
943 fn lookup_func_start_from_compressed_page() {
944 let (reader, section) = build_synthetic_unwind_info();
945 let base = reader.base_address;
946
947 let result = lookup_encoding(&reader, §ion, base + 0x13FF, base);
949 let (_, func_base) = result.unwrap();
950 assert_eq!(func_base, 0x1200);
951
952 let func_start = base + func_base as u64;
953 assert_eq!(func_start, base + 0x1200);
954 }
955}