Skip to content

Commit

Permalink
feat: initial commit
Browse files Browse the repository at this point in the history
  • Loading branch information
darfink committed Dec 3, 2016
0 parents commit 668c73c
Show file tree
Hide file tree
Showing 13 changed files with 1,026 additions and 0 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
target
Cargo.lock
12 changes: 12 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
[package]
authors = ["Elliott Linder <elliott.darfink@gmail.com>"]
name = "detour"
version = "0.1.0"

[dependencies]
error-chain = "0.7.1"
generic-array = "0.5.1"
libc = "0.2.17"
libudis86-sys = "0.1.0"
memmap = "0.5.0"
region = "0.0.4"
24 changes: 24 additions & 0 deletions src/error.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
use region;
use std;

error_chain! {
foreign_links {
RegionFailure(region::error::Error);
AllocateFailure(std::io::Error);
}

errors {
NotExecutable { display("address is not executable") }
IsExecutable { display("address is executable") }

InvalidAddress { display("cannot read from address") }
InvalidCode { display("function contains invalid code") }

ExternalLoop { display("function contains a loop with external destination") }
UnsupportedRelativeBranch {
display("function contains unhandled relative branching")
}

NoPatchArea { display("cannot find an inline patch area") }
}
}
132 changes: 132 additions & 0 deletions src/inline/detour.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
use std::io::Write;

use memmap::{Mmap, Protection};

use Detour;
use super::arch;
use inline::pic;
use error::*;
use util;

pub struct InlineDetour {
patcher: arch::Patcher,
trampoline: Mmap,
}

impl InlineDetour {
/// Constructs a new inline detour patcher.
pub unsafe fn new(target: *const (), detour: *const ()) -> Result<Self> {
if !util::is_executable_address(target)? || !util::is_executable_address(detour)? {
bail!(ErrorKind::NotExecutable);
}

// Create a trampoline generator for the target function
let patch_size = arch::Patcher::patch_size(target);
let trampoline = arch::Trampoline::new(target, patch_size)?;

Ok(InlineDetour {
patcher: arch::Patcher::new(target, detour, trampoline.prolog_size())?,
trampoline: Self::allocate_trampoline(trampoline.generator())?,
})
}

// TODO: allocate a trampoline close to the target
fn allocate_trampoline(generator: &pic::Generator) -> Result<Mmap> {
// Create a memory map for the trampoline
let mut map = Mmap::anonymous(generator.len(), Protection::ReadWrite)?;

// Generate the raw instruction bytes for the specific address
let trampoline = generator.generate(map.ptr() as *const ());
unsafe { map.as_mut_slice().write(&trampoline)? };
map.set_protection(Protection::ReadExecute)?;
Ok(map)
}
}

impl Detour for InlineDetour {
unsafe fn enable(&mut self) -> Result<()> {
self.patcher.toggle(true)
}

unsafe fn disable(&mut self) -> Result<()> {
self.patcher.toggle(false)
}

fn callable_address(&self) -> *const () {
self.trampoline.ptr() as *const ()
}

fn is_hooked(&self) -> bool {
self.patcher.is_patched()
}
}

impl Drop for InlineDetour {
fn drop(&mut self) {
unsafe { self.disable().unwrap() };
}
}

#[cfg(test)]
mod tests {
use super::*;
use std::mem;

type CRet = unsafe extern "C" fn() -> i32;

#[naked]
unsafe extern "C" fn branch_ret5() -> i32 {
asm!("test sp, sp
jne ret5
mov eax, 2
jmp done
ret5:
mov eax, 5
done:
ret"
:::: "intel");
::std::intrinsics::unreachable();
}

#[naked]
unsafe extern "C" fn hotpatch_ret0() -> i32 {
asm!("nop
nop
nop
nop
nop
xor eax, eax
ret"
:::: "intel");
::std::intrinsics::unreachable();
}

unsafe extern "C" fn ret10() -> i32 {
10
}

unsafe fn detour_test(target: CRet, result: i32) {
let mut hook = InlineDetour::new(target as *const (), ret10 as *const ()).unwrap();

assert_eq!(target(), result);
hook.enable().unwrap();
{
assert_eq!(target(), 10);

let original: CRet = mem::transmute(hook.callable_address());
assert_eq!(original(), result);
}
hook.disable().unwrap();
assert_eq!(target(), result);
}

#[test]
fn detour_relative_branch() {
unsafe { detour_test(branch_ret5, 5); }
}

#[test]
fn detour_hot_patch() {
unsafe { detour_test(mem::transmute(hotpatch_ret0 as usize + 5), 0); }
}
}
13 changes: 13 additions & 0 deletions src/inline/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
// Re-export the inline detour
pub use self::detour::InlineDetour as Inline;

// Modules
mod detour;
mod pic;

#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
mod x86;

#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
use self::x86 as arch;

61 changes: 61 additions & 0 deletions src/inline/pic.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
/// Functionality for generating PIC.
pub struct Generator {
thunks: Vec<Box<Thunkable>>,
}

impl Generator {
/// Constructs a new PIC generator.
pub fn new() -> Self {
Generator { thunks: Vec::new() }
}

/// Generates code for use at the specified address.
pub fn generate(&self, base: *const ()) -> Vec<u8> {
let mut result = Vec::with_capacity(self.len());
let mut base = base as usize;

for thunk in &self.thunks {
// Retrieve the code for the segment
let code = thunk.generate(base);
assert_eq!(code.len(), thunk.len());

// Advance the current EIP address
base += thunk.len();
result.extend(code);
}

result
}

/// Adds a position-independant code segment.
pub fn add_thunk(&mut self, thunk: Box<Thunkable>) {
self.thunks.push(thunk);
}

/// Returns the total size of a all code segments.
pub fn len(&self) -> usize {
self.thunks.iter().fold(0, |sum, thunk| sum + thunk.len())
}
}

/// An interface for generating PIC thunks.
pub trait Thunkable {
/// Generates the code at the specified address.
fn generate(&self, address: usize) -> Vec<u8>;

/// Returns the size of a generated thunk.
fn len(&self) -> usize;
}

/// Thunkable implementation for static data
impl Thunkable for Vec<u8> {
/// Generates static thunks assumed to be PIC
fn generate(&self, _address: usize) -> Vec<u8> {
self.clone()
}

/// Returns the size of a generated thunk
fn len(&self) -> usize {
self.len()
}
}
10 changes: 10 additions & 0 deletions src/inline/x86/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
extern crate libudis86_sys as udis;

// Re-exports
pub use self::patcher::Patcher;
pub use self::trampoline::Trampoline;

// Modules
mod patcher;
mod trampoline;
mod thunk;
126 changes: 126 additions & 0 deletions src/inline/x86/patcher.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
use std::{mem, slice};
use region;

use super::thunk;
use inline::pic;
use error::*;
use util;

pub struct Patcher {
patched: bool,
patch_area: &'static mut [u8],
detour_bounce: Vec<u8>,
target_backup: Vec<u8>,
}

impl Patcher {
// TODO: add relay function for x64
// TODO: allocate memory close to target
pub unsafe fn new(target: *const (), detour: *const (), prolog_size: usize) -> Result<Patcher> {
let jump_rel32_size = mem::size_of::<thunk::x86::JumpRel>();
let jump_rel08_size = mem::size_of::<thunk::x86::JumpShort>();

// Check if there isn't enough space for a relative long jump
let patch_area = if !Self::is_patchable(target, prolog_size, jump_rel32_size) {
// ... otherwise check if a relative small jump fits
if Self::is_patchable(target, prolog_size, jump_rel08_size) {
// A small jump relies on there being a hot patch area above the
// function, that consists of at least 5 bytes (a rel32 jump).
let hot_patch = (target as usize).wrapping_sub(jump_rel32_size);
let hot_patch_area = slice::from_raw_parts(hot_patch as *const u8, jump_rel32_size);

// Assert that the area only contains padding
if !Self::is_code_padding(hot_patch_area) {
bail!(ErrorKind::NoPatchArea);
}

// Ensure that the hot patch area is executable
if !util::is_executable_address(hot_patch_area.as_ptr() as *const _)? {
bail!(ErrorKind::NotExecutable);
}

// The range is from the start of the hot patch to the end of the jump
let patch_size = jump_rel32_size + jump_rel08_size;
slice::from_raw_parts_mut(hot_patch as *mut u8, patch_size)
} else {
bail!(ErrorKind::NoPatchArea);
}
} else {
// The range is from the start of the function to the end of the jump
slice::from_raw_parts_mut(target as *mut u8, jump_rel32_size)
};

let mut generator = pic::Generator::new();

// Both hot patch and normal detours use a relative long jump
generator.add_thunk(thunk::x86::jmp_rel32(detour as u32));

// The hot patch relies on a small jump to land on the long jump
if patch_area.len() > jump_rel32_size {
let displacement = -(jump_rel32_size as i8);
generator.add_thunk(thunk::x86::jmp_rel8(displacement));
}

let backup = patch_area.to_vec();
let patch_address = patch_area.as_ptr() as *const ();

Ok(Patcher {
patched: false,
patch_area: patch_area,
target_backup: backup,
detour_bounce: generator.generate(patch_address),
})
}

/// Either patches or removes a patch from a function.
pub unsafe fn toggle(&mut self, enable: bool) -> Result<()> {
if self.patched == enable {
return Ok(());
}

// Runtime code is by default only read-execute
let mut region = region::View::new(self.patch_area.as_ptr(), self.patch_area.len())?;

region.exec_with_prot(region::Protection::ReadWriteExecute, || {
// Copy either the detour or the original bytes of the function
self.patch_area.copy_from_slice(if enable {
&self.detour_bounce
} else {
&self.target_backup
});
})?;

self.patched = enable;
Ok(())
}

/// Returns whether the function is patched or not.
pub fn is_patched(&self) -> bool {
self.patched
}

/// Returns the default size of a patch.
pub fn patch_size(_target: *const ()) -> usize {
mem::size_of::<thunk::x86::JumpRel>()
}

/// Returns whether an address can be inline patched or not.
unsafe fn is_patchable(target: *const(), prolog_size: usize, patch_size: usize) -> bool {
if prolog_size >= patch_size {
// If the whole patch fits it's good to go!
true
} else {
// Otherwise the inline patch relies on padding after the prolog
let slice = slice::from_raw_parts(
(target as usize + prolog_size) as *const u8,
patch_size - prolog_size);
Self::is_code_padding(slice)
}
}

/// Returns true if the slice only contains code padding.
fn is_code_padding(buffer: &[u8]) -> bool {
const PADDING: [u8; 3] = [0x00, 0x90, 0xCC];
buffer.iter().all(|code| PADDING.contains(code))
}
}
Loading

0 comments on commit 668c73c

Please sign in to comment.