Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ebpf: when an error occurs, return the value of the Err. #5

Merged
merged 1 commit into from
Dec 22, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
107 changes: 52 additions & 55 deletions c-memleak-ebpf/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,33 +31,33 @@ static TRACE_ALL: bool = false;
pub fn malloc_enter(ctx: ProbeContext) -> u32 {
match try_malloc_enter(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_malloc_enter(ctx: ProbeContext) -> Result<u32, c_long> {
let size: usize = ctx.arg(0).ok_or(-1)?;
let size: usize = ctx.arg(0).ok_or(1)?;
gen_alloc_entry(&ctx, size)
}

#[uretprobe]
pub fn malloc_exit(ctx: RetProbeContext) -> u32 {
match try_malloc_exit(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_malloc_exit(ctx: RetProbeContext) -> Result<u32, c_long> {
let ptr: u64 = ctx.ret().ok_or(-1)?;
let ptr: u64 = ctx.ret().ok_or(1)?;
gen_alloc_exit(&ctx, ptr)
}

#[uprobe]
pub fn free_enter(ctx: ProbeContext) -> u32 {
match try_free_enter(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

Expand All @@ -70,13 +70,13 @@ fn try_free_enter(ctx: ProbeContext) -> Result<u32, c_long> {
pub fn calloc_enter(ctx: ProbeContext) -> u32 {
match try_calloc_enter(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_calloc_enter(ctx: ProbeContext) -> Result<u32, c_long> {
let count: usize = ctx.arg(0).ok_or(-1)?;
let size: usize = ctx.arg(1).ok_or(-1)?;
let count: usize = ctx.arg(0).ok_or(1)?;
let size: usize = ctx.arg(1).ok_or(1)?;

gen_alloc_entry(&ctx, count * size)
}
Expand All @@ -85,26 +85,26 @@ fn try_calloc_enter(ctx: ProbeContext) -> Result<u32, c_long> {
pub fn calloc_exit(ctx: RetProbeContext) -> u32 {
match try_calloc_exit(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_calloc_exit(ctx: RetProbeContext) -> Result<u32, c_long> {
let ptr: u64 = ctx.ret().ok_or(-1)?;
let ptr: u64 = ctx.ret().ok_or(1)?;
gen_alloc_exit(&ctx, ptr)
}

#[uprobe]
pub fn realloc_enter(ctx: ProbeContext) -> u32 {
match try_realloc_enter(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_realloc_enter(ctx: ProbeContext) -> Result<u32, c_long> {
let ptr: u64 = ctx.arg(0).ok_or(-1)?;
let size: usize = ctx.arg(1).ok_or(-1)?;
let ptr: u64 = ctx.arg(0).ok_or(1)?;
let size: usize = ctx.arg(1).ok_or(1)?;

gen_free_enter(&ctx, ptr)?;
gen_alloc_entry(&ctx, size)
Expand All @@ -114,67 +114,67 @@ fn try_realloc_enter(ctx: ProbeContext) -> Result<u32, c_long> {
pub fn realloc_exit(ctx: RetProbeContext) -> u32 {
match try_realloc_exit(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_realloc_exit(ctx: RetProbeContext) -> Result<u32, c_long> {
let ptr: u64 = ctx.ret().ok_or(-1)?;
let ptr: u64 = ctx.ret().ok_or(1)?;
gen_alloc_exit(&ctx, ptr)
}

#[uprobe]
pub fn mmap_enter(ctx: ProbeContext) -> u32 {
match try_mmap_enter(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_mmap_enter(ctx: ProbeContext) -> Result<u32, c_long> {
//let addr: u64 = ctx.arg(0).ok_or(-1)?;
let len: usize = ctx.arg(1).ok_or(-1)?;
//let addr: u64 = ctx.arg(0).ok_or(1)?;
let len: usize = ctx.arg(1).ok_or(1)?;
gen_alloc_entry(&ctx, len)
}

#[uretprobe]
pub fn mmap_exit(ctx: RetProbeContext) -> u32 {
match try_mmap_exit(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_mmap_exit(ctx: RetProbeContext) -> Result<u32, c_long> {
let ptr: u64 = ctx.ret().ok_or(-1)?;
let ptr: u64 = ctx.ret().ok_or(1)?;
gen_alloc_exit(&ctx, ptr)
}

#[uprobe]
pub fn munmap_enter(ctx: ProbeContext) -> u32 {
match try_munmap_enter(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_munmap_enter(ctx: ProbeContext) -> Result<u32, c_long> {
let addr: u64 = ctx.arg(0).ok_or(-1)?;
let addr: u64 = ctx.arg(0).ok_or(1)?;
gen_free_enter(&ctx, addr)
}

#[uprobe]
pub fn mremap_enter(ctx: ProbeContext) -> u32 {
match try_mremap_enter(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_mremap_enter(ctx: ProbeContext) -> Result<u32, c_long> {
let old_addr: u64 = ctx.arg(0).ok_or(-1)?;
//let old_size: usize = ctx.arg(1).ok_or(-1)?;
let new_size: usize = ctx.arg(2).ok_or(-1)?;
let old_addr: u64 = ctx.arg(0).ok_or(1)?;
//let old_size: usize = ctx.arg(1).ok_or(1)?;
let new_size: usize = ctx.arg(2).ok_or(1)?;

gen_free_enter(&ctx, old_addr)?;
gen_alloc_entry(&ctx, new_size)
Expand All @@ -184,27 +184,27 @@ fn try_mremap_enter(ctx: ProbeContext) -> Result<u32, c_long> {
pub fn mremap_exit(ctx: RetProbeContext) -> u32 {
match try_mremap_exit(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_mremap_exit(ctx: RetProbeContext) -> Result<u32, c_long> {
let new_addr: u64 = ctx.ret().ok_or(-1)?;
let new_addr: u64 = ctx.ret().ok_or(1)?;
gen_alloc_exit(&ctx, new_addr)
}

#[uprobe]
pub fn posix_memalign_enter(ctx: ProbeContext) -> u32 {
match try_posix_memalign_enter(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_posix_memalign_enter(ctx: ProbeContext) -> Result<u32, c_long> {
let memptr: u64 = ctx.arg(0).ok_or(-1)?;
//let alignment: usize = ctx.arg(1).ok_or(-1)?;
let size: usize = ctx.arg(2).ok_or(-1)?;
let memptr: u64 = ctx.arg(0).ok_or(1)?;
//let alignment: usize = ctx.arg(1).ok_or(1)?;
let size: usize = ctx.arg(2).ok_or(1)?;

let tid = bpf_get_current_pid_tgid() as u32;
MEMPTRS.insert(&tid, &memptr, 0)?;
Expand All @@ -216,17 +216,14 @@ fn try_posix_memalign_enter(ctx: ProbeContext) -> Result<u32, c_long> {
pub fn posix_memalign_exit(ctx: RetProbeContext) -> u32 {
match try_posix_memalign_exit(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_posix_memalign_exit(ctx: RetProbeContext) -> Result<u32, c_long> {
let tid = bpf_get_current_pid_tgid() as u32;

let memptr = match unsafe { MEMPTRS.get(&tid) } {
Some(ptr) => ptr,
None => return Ok(0),
};
let memptr = unsafe { MEMPTRS.get(&tid) }.ok_or(0)?;
MEMPTRS.remove(&tid)?;

let addr = match unsafe { bpf_probe_read_user(*memptr as *const c_void) } {
Expand All @@ -241,105 +238,105 @@ fn try_posix_memalign_exit(ctx: RetProbeContext) -> Result<u32, c_long> {
pub fn aligned_alloc_enter(ctx: ProbeContext) -> u32 {
match try_aligned_alloc_enter(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_aligned_alloc_enter(ctx: ProbeContext) -> Result<u32, c_long> {
//let alignment: usize = ctx.arg(0).ok_or(-1)?;
let size: usize = ctx.arg(1).ok_or(-1)?;
//let alignment: usize = ctx.arg(0).ok_or(1)?;
let size: usize = ctx.arg(1).ok_or(1)?;
gen_alloc_entry(&ctx, size)
}

#[uretprobe]
pub fn aligned_alloc_exit(ctx: RetProbeContext) -> u32 {
match try_aligned_alloc_exit(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_aligned_alloc_exit(ctx: RetProbeContext) -> Result<u32, c_long> {
let ptr: u64 = ctx.ret().ok_or(-1)?;
let ptr: u64 = ctx.ret().ok_or(1)?;
gen_alloc_exit(&ctx, ptr)
}

#[uprobe]
pub fn valloc_enter(ctx: ProbeContext) -> u32 {
match try_valloc_enter(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_valloc_enter(ctx: ProbeContext) -> Result<u32, c_long> {
let size: usize = ctx.arg(0).ok_or(-1)?;
let size: usize = ctx.arg(0).ok_or(1)?;
gen_alloc_entry(&ctx, size)
}

#[uretprobe]
pub fn valloc_exit(ctx: RetProbeContext) -> u32 {
match try_valloc_exit(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_valloc_exit(ctx: RetProbeContext) -> Result<u32, c_long> {
let ptr: u64 = ctx.ret().ok_or(-1)?;
let ptr: u64 = ctx.ret().ok_or(1)?;
gen_alloc_exit(&ctx, ptr)
}

#[uprobe]
pub fn memalign_enter(ctx: ProbeContext) -> u32 {
match try_memalign_enter(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_memalign_enter(ctx: ProbeContext) -> Result<u32, c_long> {
//let alignment: usize = ctx.arg(0).ok_or(-1)?;
let size: usize = ctx.arg(1).ok_or(-1)?;
//let alignment: usize = ctx.arg(0).ok_or(1)?;
let size: usize = ctx.arg(1).ok_or(1)?;
gen_alloc_entry(&ctx, size)
}

#[uretprobe]
pub fn memalign_exit(ctx: RetProbeContext) -> u32 {
match try_memalign_exit(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_memalign_exit(ctx: RetProbeContext) -> Result<u32, c_long> {
let ptr: u64 = ctx.ret().ok_or(-1)?;
let ptr: u64 = ctx.ret().ok_or(1)?;
gen_alloc_exit(&ctx, ptr)
}

#[uprobe]
pub fn pvalloc_enter(ctx: ProbeContext) -> u32 {
match try_pvalloc_enter(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_pvalloc_enter(ctx: ProbeContext) -> Result<u32, c_long> {
let size: usize = ctx.arg(0).ok_or(-1)?;
let size: usize = ctx.arg(0).ok_or(1)?;
gen_alloc_entry(&ctx, size)
}

#[uretprobe]
pub fn pvalloc_exit(ctx: RetProbeContext) -> u32 {
match try_pvalloc_exit(ctx) {
Ok(rc) => rc,
Err(_) => 1,
Err(rc) => rc as u32,
}
}

fn try_pvalloc_exit(ctx: RetProbeContext) -> Result<u32, c_long> {
let ptr: u64 = ctx.ret().ok_or(-1)?;
let ptr: u64 = ctx.ret().ok_or(1)?;
gen_alloc_exit(&ctx, ptr)
}

Expand Down
Loading