retour\arch\x86/
patcher.rs1use super::thunk;
2use crate::error::{Error, Result};
3use crate::{pic, util};
4use std::{mem, slice};
5
6pub struct Patcher {
7 patch_area: &'static mut [u8],
8 original_prolog: Vec<u8>,
9 detour_prolog: Vec<u8>,
10}
11
12impl Patcher {
13 pub unsafe fn new(target: *const (), detour: *const (), prolog_size: usize) -> Result<Patcher> {
21 let patch_area = Self::patch_area(target, prolog_size)?;
23 let emitter = Self::hook_template(detour, patch_area);
24
25 let patch_address = patch_area.as_ptr() as *const ();
26 let original_prolog = patch_area.to_vec();
27
28 Ok(Patcher {
29 detour_prolog: emitter.emit(patch_address),
30 original_prolog,
31 patch_area,
32 })
33 }
34
35 pub fn area(&self) -> &[u8] {
37 self.patch_area
38 }
39
40 pub unsafe fn toggle(&mut self, enable: bool) {
42 self.patch_area.copy_from_slice(if enable {
44 &self.detour_prolog
45 } else {
46 &self.original_prolog
47 });
48 }
49
50 unsafe fn patch_area(target: *const (), prolog_size: usize) -> Result<&'static mut [u8]> {
53 let jump_rel08_size = mem::size_of::<thunk::x86::JumpShort>();
54 let jump_rel32_size = mem::size_of::<thunk::x86::JumpRel>();
55
56 if !Self::is_patchable(target, prolog_size, jump_rel32_size) {
58 if Self::is_patchable(target, prolog_size, jump_rel08_size) {
60 let hot_patch = target as usize - jump_rel32_size;
63 let hot_patch_area = slice::from_raw_parts(hot_patch as *const u8, jump_rel32_size);
64
65 if !Self::is_code_padding(hot_patch_area)
67 || !util::is_executable_address(hot_patch_area.as_ptr() as *const _)?
68 {
69 Err(Error::NoPatchArea)?;
70 }
71
72 let patch_size = jump_rel32_size + jump_rel08_size;
74 Ok(slice::from_raw_parts_mut(hot_patch as *mut u8, patch_size))
75 } else {
76 Err(Error::NoPatchArea)
77 }
78 } else {
79 Ok(slice::from_raw_parts_mut(
81 target as *mut u8,
82 jump_rel32_size,
83 ))
84 }
85 }
86
87 fn hook_template(detour: *const (), patch_area: &[u8]) -> pic::CodeEmitter {
89 let mut emitter = pic::CodeEmitter::new();
90
91 emitter.add_thunk(thunk::x86::jmp_rel32(detour as usize));
93
94 let jump_rel32_size = mem::size_of::<thunk::x86::JumpRel>();
96 let uses_hot_patch = patch_area.len() > jump_rel32_size;
97
98 if uses_hot_patch {
99 let displacement = -(jump_rel32_size as i8);
100 emitter.add_thunk(thunk::x86::jmp_rel8(displacement));
101 }
102
103 while emitter.len() < patch_area.len() {
105 emitter.add_thunk(thunk::x86::nop());
106 }
107
108 emitter
109 }
110
111 unsafe fn is_patchable(target: *const (), prolog_size: usize, patch_size: usize) -> bool {
113 if prolog_size >= patch_size {
114 return true;
116 }
117
118 let slice = slice::from_raw_parts(
120 (target as usize + prolog_size) as *const u8,
121 patch_size - prolog_size,
122 );
123
124 Self::is_code_padding(slice)
125 }
126
127 fn is_code_padding(buffer: &[u8]) -> bool {
129 const PADDING: [u8; 3] = [0x00, 0x90, 0xCC];
130 buffer.iter().all(|code| PADDING.contains(code))
131 }
132}