Skip to content

Commit

Permalink
refactor: general improvements
Browse files Browse the repository at this point in the history
  • Loading branch information
darfink committed Jun 15, 2017
1 parent c51d3f3 commit ae44d79
Show file tree
Hide file tree
Showing 17 changed files with 416 additions and 369 deletions.
6 changes: 2 additions & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,8 @@ generic-array = "0.8.2"
lazy_static = "0.2.8"
libc = "0.2"
matches = "0.1.6"
slice-pool = "0.3.3"

[dependencies.region]
git = "https://github.com/darfink/region-rs.git"
region = "0.0.7"
slice-pool = "0.3.4"

[dependencies.mmap]
git = "https://github.com/retep998/rust-mmap.git"
Expand Down
17 changes: 16 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
# `detour-rs`
detour-rs
=========
[![Travis build status][travis-shield]][travis]
[![Appveyor build status][appveyor-shield]][appveyor]
[![crates.io version][crate-shield]][crate]
[![Language (Rust)][rust-shield]][rust]

This is a cross-platform detour library developed in Rust. Beyond the basic
functionality, this library handles branch redirects, RIP-relative
Expand Down Expand Up @@ -104,3 +109,13 @@ fn basics() {
be hooked with a 5-byte `jmp`, are supported thanks to the detection of
code padding (`NOP/INT3` instructions). Therefore the required amount of
trailing `NOP` instructions will be replaced, to make room for the detour.*
<!-- Links -->
[travis-shield]: https://img.shields.io/travis/darfink/detour-rs.svg?style=flat-square
[travis]: https://travis-ci.org/darfink/detour-rs
[appveyor-shield]: https://img.shields.io/appveyor/ci/darfink/detour-rs/master.svg?style=flat-square
[appveyor]: https://ci.appveyor.com/project/darfink/detour-rs
[crate-shield]: https://img.shields.io/crates/v/detour.svg?style=flat-square
[crate]: https://crates.io/crates/detour
[rust-shield]: https://img.shields.io/badge/powered%20by-rust-blue.svg?style=flat-square
[rust]: https://www.rust-lang.org
18 changes: 10 additions & 8 deletions src/alloc/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,23 @@ use std::ops::{Deref, DerefMut};
use std::sync::{Arc, Mutex};
use error::*;

mod details;
mod proximity;
mod search;

/// A thread-safe memory pool for allocating chunks close to addresses.
pub struct ProximityAllocator(Arc<Mutex<details::Allocator>>);
pub struct Allocator(Arc<Mutex<proximity::ProximityAllocator>>);

impl ProximityAllocator {
/// Creates a new proximity allocator
// TODO: Decrease use of mutexes
impl Allocator {
/// Creates a new proximity memory allocator.
pub fn new(max_distance: usize) -> Self {
ProximityAllocator(Arc::new(Mutex::new(details::Allocator {
Allocator(Arc::new(Mutex::new(proximity::ProximityAllocator {
max_distance: max_distance,
pools: Vec::new(),
})))
}

/// Allocates a new slice close to `origin`.
/// Allocates read-, write- & executable memory close to `origin`.
pub fn allocate(&mut self, origin: *const (), size: usize) -> Result<Slice> {
let mut allocator = self.0.lock().unwrap();
allocator.allocate(origin, size).map(|value| Slice {
Expand All @@ -29,8 +31,8 @@ impl ProximityAllocator {
// TODO: Come up with a better name
/// A handle for allocated proximity memory.
pub struct Slice {
allocator: Arc<Mutex<details::Allocator>>,
value: details::Allocation,
allocator: Arc<Mutex<proximity::ProximityAllocator>>,
value: proximity::Allocation,
}

impl Drop for Slice {
Expand Down
100 changes: 20 additions & 80 deletions src/alloc/details.rs → src/alloc/proximity.rs
Original file line number Diff line number Diff line change
@@ -1,32 +1,30 @@
use std::ops::Range;
use std::slice;
use std::ops::Range;

use {region, mmap};
use boolinator::Boolinator;
use slice_pool::{SlicePool, PoolVal};
use error::*;
use boolinator::Boolinator;
use mmap;

lazy_static! {
static ref PAGE_SIZE: usize = region::page_size();
}
use error::*;
use super::search as region_search;

/// Defines the allocation type.
pub type Allocation = PoolVal<u8>;

/// Shared instance containing all pools
pub struct Allocator {
pub struct ProximityAllocator {
pub max_distance: usize,
pub pools: Vec<SlicePool<u8>>,
}

impl Allocator {
impl ProximityAllocator {
/// Allocates a slice in an eligible memory map.
pub fn allocate(&mut self, origin: *const (), size: usize) -> Result<Allocation> {
let memory_range = ((origin as usize).saturating_sub(self.max_distance))
..((origin as usize).saturating_add(self.max_distance));

// Check if an existing pool can handle the allocation request
self.allocate_existing(&memory_range, size).or_else(|_| {
self.allocate_memory(&memory_range, size).or_else(|_| {
// ... otherwise allocate a pool within the memory range
self.allocate_pool(&memory_range, origin, size).map(|pool| {
// Use the newly allocated pool for the request
Expand All @@ -46,7 +44,7 @@ impl Allocator {

// Determine if this is the associated memory pool
(lower..upper).contains(value.as_ptr() as usize)
}).unwrap();
}).expect("retrieving associated memory pool");

// Release the pool if the associated allocation is unique
if self.pools[index].allocations() == 1 {
Expand All @@ -55,12 +53,12 @@ impl Allocator {
}

/// Allocates a chunk using any of the existing pools.
fn allocate_existing(&mut self, range: &Range<usize>, size: usize) -> Result<Allocation> {
fn allocate_memory(&mut self, range: &Range<usize>, size: usize) -> Result<Allocation> {
// Returns true if the pool's memory is within the range
let is_pool_in_range = |pool: &SlicePool<u8>| {
let lower = pool.as_ptr();
let upper = unsafe { lower.offset(pool.len() as isize) };
range.contains(lower as usize) && range.contains(upper as usize - 1)
let lower = pool.as_ptr() as usize;
let upper = lower + pool.len();
range.contains(lower) && range.contains(upper - 1)
};

// Tries to allocate a slice within any eligible pool
Expand All @@ -75,34 +73,30 @@ impl Allocator {
range: &Range<usize>,
origin: *const (),
size: usize) -> Result<SlicePool<u8>> {
let after = RegionFreeIter::new(origin, Some(range.clone()), RegionSearch::After);
let before = RegionFreeIter::new(origin, Some(range.clone()), RegionSearch::Before);
let before = region_search::before(origin, Some(range.clone()));
let after = region_search::after(origin, Some(range.clone()));

// TODO: Part of the pool can be out of range
// Try to allocate after the specified address first (mostly because
// macOS cannot allocate memory before the process's address).
after.chain(before).filter_map(|result| {
match result {
Ok(address) => Self::allocate_region_pool(address, size).map(Ok),
Ok(address) => Self::allocate_fixed_pool(address, size).map(Ok),
Err(error) => Some(Err(error)),
}
}).next().unwrap_or(Err(ErrorKind::OutOfMemory.into()))
}

/// Tries to allocate fixed memory at the specified address.
fn allocate_region_pool(address: *const (), size: usize) -> Option<SlicePool<u8>> {
fn allocate_fixed_pool(address: *const (), size: usize) -> Option<SlicePool<u8>> {
// Try to allocate memory at the specified address
mmap::MemoryMap::new(Self::page_ceil(size), &[
mmap::MemoryMap::new(size, &[
mmap::MapOption::MapReadable,
mmap::MapOption::MapWritable,
mmap::MapOption::MapExecutable,
mmap::MapOption::MapAddr(address as *const _),
]).ok().map(SliceableMemoryMap).map(SlicePool::new)
}

/// Rounds an address up to the closest page boundary.
fn page_ceil(address: usize) -> usize {
(address + *PAGE_SIZE - 1) & !(*PAGE_SIZE - 1)
}
}

// TODO: Use memmap-rs instead
Expand All @@ -127,58 +121,4 @@ impl AsMut<[u8]> for SliceableMemoryMap {
fn as_mut(&mut self) -> &mut [u8] { self.as_mut_slice() }
}

unsafe impl Send for SliceableMemoryMap { }

/// Direction for the region search.
pub enum RegionSearch {
Before,
After,
}

/// An iterator searching for free regions.
pub struct RegionFreeIter {
range: Range<usize>,
search: RegionSearch,
current: usize,
}

impl RegionFreeIter {
/// Creates a new iterator for free regions.
pub fn new(origin: *const (), range: Option<Range<usize>>, search: RegionSearch) -> Self {
RegionFreeIter {
range: range.unwrap_or(0..usize::max_value()),
current: origin as usize,
search: search,
}
}
}

impl Iterator for RegionFreeIter {
type Item = Result<*const ()>;

/// Returns the next free region for the current address.
fn next(&mut self) -> Option<Self::Item> {
let page_size = region::page_size();

while self.current > 0 && self.range.contains(self.current) {
match region::query(self.current as *const _) {
Ok(region) => self.current = match self.search {
RegionSearch::Before => region.lower().saturating_sub(page_size),
RegionSearch::After => region.upper(),
},
Err(error) => {
match self.search {
RegionSearch::Before => self.current -= page_size,
RegionSearch::After => self.current += page_size,
}

// Check whether the region is free, otherwise return the error
return Some(matches!(error.kind(), &region::error::ErrorKind::Free)
.as_result(self.current as *const _, error.into()));
},
}
}

None
}
}
unsafe impl Send for SliceableMemoryMap { }
76 changes: 76 additions & 0 deletions src/alloc/search.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
use std::ops::Range;
use region;
use error::*;

/// Returns an iterator for free before the specified address.
pub fn before(origin: *const (), range: Option<Range<usize>>) -> RegionIter {
RegionIter::new(origin, range, SearchDirection::Before)
}

/// Returns an iterator for free after the specified address.
pub fn after(origin: *const (), range: Option<Range<usize>>) -> RegionIter {
RegionIter::new(origin, range, SearchDirection::After)
}

/// Direction for the region search.
enum SearchDirection {
Before,
After,
}

/// An iterator searching for free regions.
pub struct RegionIter {
range: Range<usize>,
search: SearchDirection,
current: usize,
}

impl RegionIter {
/// Creates a new iterator for free regions.
fn new(origin: *const (), range: Option<Range<usize>>, search: SearchDirection) -> Self {
RegionIter {
range: range.unwrap_or(0..usize::max_value()),
current: origin as usize,
search,
}
}
}

impl Iterator for RegionIter {
type Item = Result<*const ()>;

/// Returns the closest free region for the current address.
fn next(&mut self) -> Option<Self::Item> {
let page_size = region::page_size();

while self.current > 0 && self.range.contains(self.current) {
match region::query(self.current as *const _) {
Ok(region) => self.current = match self.search {
SearchDirection::Before => region.lower().saturating_sub(page_size),
SearchDirection::After => region.upper(),
},
Err(error) => {
// Check whether the region is free, otherwise return the error
let result = Some(match error.kind() {
&region::error::ErrorKind::Free => Ok(self.current as *const _),
_ => Err(error.into())
});

// Adjust the offset for repeated calls.
match self.search {
SearchDirection::Before => { self.current.saturating_sub(page_size); },
SearchDirection::After => self.current += page_size,
}

return result;
},
}
}

None
}
}

#[cfg(test)]
mod tests {
}
Loading

0 comments on commit ae44d79

Please sign in to comment.