From fb9e573e8cd98a2a3af3e5096a5f9e765650c646 Mon Sep 17 00:00:00 2001 From: Peter Zhu Date: Wed, 18 Dec 2024 12:01:09 -0500 Subject: [PATCH] Prefix asan_poison_object with rb --- ext/coverage/coverage.c | 6 +++--- ext/objspace/objspace.c | 6 +++--- ext/objspace/objspace_dump.c | 6 +++--- gc.c | 8 ++++---- gc/default/default.c | 32 ++++++++++++++++---------------- imemo.c | 4 ++-- internal/sanitizers.h | 14 +++++++------- iseq.c | 16 ++++++++-------- rjit_c.c | 4 ++-- vm_method.c | 6 +++--- yjit.c | 4 ++-- 11 files changed, 53 insertions(+), 53 deletions(-) diff --git a/ext/coverage/coverage.c b/ext/coverage/coverage.c index 9fc93bb58d734c..1519b559cdafc1 100644 --- a/ext/coverage/coverage.c +++ b/ext/coverage/coverage.c @@ -243,8 +243,8 @@ method_coverage_i(void *vstart, void *vend, size_t stride, void *data) VALUE ncoverages = *(VALUE*)data, v; for (v = (VALUE)vstart; v != (VALUE)vend; v += stride) { - void *poisoned = asan_poisoned_object_p(v); - asan_unpoison_object(v, false); + void *poisoned = rb_asan_poisoned_object_p(v); + rb_asan_unpoison_object(v, false); if (RB_TYPE_P(v, T_IMEMO) && imemo_type(v) == imemo_ment) { const rb_method_entry_t *me = (rb_method_entry_t *) v; @@ -287,7 +287,7 @@ method_coverage_i(void *vstart, void *vend, size_t stride, void *data) } if (poisoned) { - asan_poison_object(v); + rb_asan_poison_object(v); } } return 0; diff --git a/ext/objspace/objspace.c b/ext/objspace/objspace.c index e3269b5cd954b4..0ead94713aca10 100644 --- a/ext/objspace/objspace.c +++ b/ext/objspace/objspace.c @@ -81,15 +81,15 @@ heap_iter(void *vstart, void *vend, size_t stride, void *ptr) VALUE v; for (v = (VALUE)vstart; v != (VALUE)vend; v += stride) { - void *poisoned = asan_poisoned_object_p(v); - asan_unpoison_object(v, false); + void *poisoned = rb_asan_poisoned_object_p(v); + rb_asan_unpoison_object(v, false); if (RBASIC(v)->flags) { (*ctx->cb)(v, ctx->data); } if (poisoned) { - asan_poison_object(v); + rb_asan_poison_object(v); } } diff --git a/ext/objspace/objspace_dump.c b/ext/objspace/objspace_dump.c index 1bcb4033cb304c..96bd521c793c94 100644 --- a/ext/objspace/objspace_dump.c +++ b/ext/objspace/objspace_dump.c @@ -658,15 +658,15 @@ heap_i(void *vstart, void *vend, size_t stride, void *data) struct dump_config *dc = (struct dump_config *)data; VALUE v = (VALUE)vstart; for (; v != (VALUE)vend; v += stride) { - void *ptr = asan_poisoned_object_p(v); - asan_unpoison_object(v, false); + void *ptr = rb_asan_poisoned_object_p(v); + rb_asan_unpoison_object(v, false); dc->cur_page_slot_size = stride; if (dc->full_heap || RBASIC(v)->flags) dump_object(v, dc); if (ptr) { - asan_poison_object(v); + rb_asan_poison_object(v); } } return 0; diff --git a/gc.c b/gc.c index 82bef38bf92078..41c81efb71ae34 100644 --- a/gc.c +++ b/gc.c @@ -1478,7 +1478,7 @@ internal_object_p(VALUE obj) } } if (ptr || !RBASIC(obj)->flags) { - asan_poison_object(obj); + rb_asan_poison_object(obj); } return 1; } @@ -4310,21 +4310,21 @@ rb_raw_obj_info_buitin_type(char *const buff, const size_t buff_size, const VALU #undef C void -asan_poison_object(VALUE obj) +rb_asan_poison_object(VALUE obj) { MAYBE_UNUSED(struct RVALUE *) ptr = (void *)obj; asan_poison_memory_region(ptr, rb_gc_obj_slot_size(obj)); } void -asan_unpoison_object(VALUE obj, bool newobj_p) +rb_asan_unpoison_object(VALUE obj, bool newobj_p) { MAYBE_UNUSED(struct RVALUE *) ptr = (void *)obj; asan_unpoison_memory_region(ptr, rb_gc_obj_slot_size(obj), newobj_p); } void * -asan_poisoned_object_p(VALUE obj) +rb_asan_poisoned_object_p(VALUE obj) { MAYBE_UNUSED(struct RVALUE *) ptr = (void *)obj; return __asan_region_is_poisoned(ptr, rb_gc_obj_slot_size(obj)); diff --git a/gc/default/default.c b/gc/default/default.c index 4dc4776fc5c3c0..505d9d6645d7e7 100644 --- a/gc/default/default.c +++ b/gc/default/default.c @@ -1621,7 +1621,7 @@ static void heap_page_free(rb_objspace_t *objspace, struct heap_page *page); static inline void heap_page_add_freeobj(rb_objspace_t *objspace, struct heap_page *page, VALUE obj) { - asan_unpoison_object(obj, false); + rb_asan_unpoison_object(obj, false); asan_unlock_freelist(page); @@ -1641,7 +1641,7 @@ heap_page_add_freeobj(rb_objspace_t *objspace, struct heap_page *page, VALUE obj rb_bug("heap_page_add_freeobj: %p is not rvalue.", (void *)obj); } - asan_poison_object(obj); + rb_asan_poison_object(obj); gc_report(3, objspace, "heap_page_add_freeobj: add %p to freelist\n", (void *)obj); } @@ -2291,7 +2291,7 @@ ractor_cache_allocate_slot(rb_objspace_t *objspace, rb_ractor_newobj_cache_t *ca if (RB_LIKELY(p)) { VALUE obj = (VALUE)p; - asan_unpoison_object(obj, true); + rb_asan_unpoison_object(obj, true); heap_cache->freelist = p->next; #if RGENGC_CHECK_MODE GC_ASSERT(rb_gc_impl_obj_slot_size(obj) == heap_slot_size(heap_idx)); @@ -2341,9 +2341,9 @@ ractor_cache_set_page(rb_objspace_t *objspace, rb_ractor_newobj_cache_t *cache, page->free_slots = 0; page->freelist = NULL; - asan_unpoison_object((VALUE)heap_cache->freelist, false); + rb_asan_unpoison_object((VALUE)heap_cache->freelist, false); GC_ASSERT(RB_TYPE_P((VALUE)heap_cache->freelist, T_NONE)); - asan_poison_object((VALUE)heap_cache->freelist); + rb_asan_poison_object((VALUE)heap_cache->freelist); } static inline size_t @@ -2926,7 +2926,7 @@ finalize_list(rb_objspace_t *objspace, VALUE zombie) while (zombie) { VALUE next_zombie; struct heap_page *page; - asan_unpoison_object(zombie, false); + rb_asan_unpoison_object(zombie, false); next_zombie = RZOMBIE(zombie)->next; page = GET_HEAP_PAGE(zombie); @@ -3247,7 +3247,7 @@ try_move(rb_objspace_t *objspace, rb_heap_t *heap, struct heap_page *free_page, VALUE dest = (VALUE)free_page->freelist; asan_lock_freelist(free_page); if (dest) { - asan_unpoison_object(dest, false); + rb_asan_unpoison_object(dest, false); } else { /* if we can't get something from the freelist then the page must be @@ -3514,7 +3514,7 @@ gc_sweep_plane(rb_objspace_t *objspace, rb_heap_t *heap, uintptr_t p, bits_t bit VALUE vp = (VALUE)p; GC_ASSERT(vp % BASE_SLOT_SIZE == 0); - asan_unpoison_object(vp, false); + rb_asan_unpoison_object(vp, false); if (bitset & 1) { switch (BUILTIN_TYPE(vp)) { default: /* majority case */ @@ -3663,9 +3663,9 @@ gc_sweep_page(rb_objspace_t *objspace, rb_heap_t *heap, struct gc_sweep_context struct free_slot *ptr = sweep_page->freelist; while (ptr) { freelist_len++; - asan_unpoison_object((VALUE)ptr, false); + rb_asan_unpoison_object((VALUE)ptr, false); struct free_slot *next = ptr->next; - asan_poison_object((VALUE)ptr); + rb_asan_poison_object((VALUE)ptr); ptr = next; } asan_lock_freelist(sweep_page); @@ -3712,15 +3712,15 @@ heap_page_freelist_append(struct heap_page *page, struct free_slot *freelist) asan_unlock_freelist(page); if (page->freelist) { struct free_slot *p = page->freelist; - asan_unpoison_object((VALUE)p, false); + rb_asan_unpoison_object((VALUE)p, false); while (p->next) { struct free_slot *prev = p; p = p->next; - asan_poison_object((VALUE)prev); - asan_unpoison_object((VALUE)p, false); + rb_asan_poison_object((VALUE)prev); + rb_asan_unpoison_object((VALUE)p, false); } p->next = freelist; - asan_poison_object((VALUE)p); + rb_asan_poison_object((VALUE)p); } else { page->freelist = freelist; @@ -5143,12 +5143,12 @@ gc_verify_heap_pages_(rb_objspace_t *objspace, struct ccan_list_head *head) while (p) { VALUE vp = (VALUE)p; VALUE prev = vp; - asan_unpoison_object(vp, false); + rb_asan_unpoison_object(vp, false); if (BUILTIN_TYPE(vp) != T_NONE) { fprintf(stderr, "freelist slot expected to be T_NONE but was: %s\n", rb_obj_info(vp)); } p = p->next; - asan_poison_object(prev); + rb_asan_poison_object(prev); } asan_lock_freelist(page); diff --git a/imemo.c b/imemo.c index 49b925413bb3b0..f146051d810c05 100644 --- a/imemo.c +++ b/imemo.c @@ -470,12 +470,12 @@ vm_ccs_free(struct rb_class_cc_entries *ccs, int alive, VALUE klass) } else { if (ptr) { - asan_poison_object((VALUE)cc); + rb_asan_poison_object((VALUE)cc); } continue; } if (ptr) { - asan_poison_object((VALUE)cc); + rb_asan_poison_object((VALUE)cc); } } diff --git a/internal/sanitizers.h b/internal/sanitizers.h index 5a55335e42929a..6a9f80bcc95048 100644 --- a/internal/sanitizers.h +++ b/internal/sanitizers.h @@ -121,7 +121,7 @@ asan_poison_memory_region(const volatile void *ptr, size_t size) #ifdef RUBY_ASAN_ENABLED #define asan_poison_object_if(ptr, obj) do { \ - if (ptr) asan_poison_object(obj); \ + if (ptr) rb_asan_poison_object(obj); \ } while (0) #else #define asan_poison_object_if(ptr, obj) ((void)(ptr), (void)(obj)) @@ -133,7 +133,7 @@ RUBY_SYMBOL_EXPORT_BEGIN * * @param[in] obj target object. */ -void asan_poison_object(VALUE obj); +void rb_asan_poison_object(VALUE obj); /** * This function predicates if the given object is fully addressable or not. @@ -142,7 +142,7 @@ void asan_poison_object(VALUE obj); * @retval 0 the given object is fully addressable. * @retval otherwise pointer to first such byte who is poisoned. */ -void *asan_poisoned_object_p(VALUE obj); +void *rb_asan_poisoned_object_p(VALUE obj); /** * This is a variant of asan_unpoison_memory_region that takes a VALUE. @@ -150,7 +150,7 @@ void *asan_poisoned_object_p(VALUE obj); * @param[in] obj target object. * @param[in] malloc_p if the memory region is like a malloc's return value or not. */ -void asan_unpoison_object(VALUE obj, bool newobj_p); +void rb_asan_unpoison_object(VALUE obj, bool newobj_p); RUBY_SYMBOL_EXPORT_END @@ -184,8 +184,8 @@ asan_unpoison_memory_region(const volatile void *ptr, size_t size, bool malloc_p static inline void * asan_unpoison_object_temporary(VALUE obj) { - void *ptr = asan_poisoned_object_p(obj); - asan_unpoison_object(obj, false); + void *ptr = rb_asan_poisoned_object_p(obj); + rb_asan_unpoison_object(obj, false); return ptr; } @@ -193,7 +193,7 @@ static inline void * asan_poison_object_restore(VALUE obj, void *ptr) { if (ptr) { - asan_poison_object(obj); + rb_asan_poison_object(obj); } return NULL; } diff --git a/iseq.c b/iseq.c index 195e2a4ad53702..639ca3a4cd014e 100644 --- a/iseq.c +++ b/iseq.c @@ -1441,8 +1441,8 @@ remove_coverage_i(void *vstart, void *vend, size_t stride, void *data) { VALUE v = (VALUE)vstart; for (; v != (VALUE)vend; v += stride) { - void *ptr = asan_poisoned_object_p(v); - asan_unpoison_object(v, false); + void *ptr = rb_asan_poisoned_object_p(v); + rb_asan_unpoison_object(v, false); if (rb_obj_is_iseq(v)) { rb_iseq_t *iseq = (rb_iseq_t *)v; @@ -4035,8 +4035,8 @@ clear_attr_ccs_i(void *vstart, void *vend, size_t stride, void *data) { VALUE v = (VALUE)vstart; for (; v != (VALUE)vend; v += stride) { - void *ptr = asan_poisoned_object_p(v); - asan_unpoison_object(v, false); + void *ptr = rb_asan_poisoned_object_p(v); + rb_asan_unpoison_object(v, false); clear_attr_cc(v); asan_poison_object_if(ptr, v); } @@ -4054,8 +4054,8 @@ clear_bf_ccs_i(void *vstart, void *vend, size_t stride, void *data) { VALUE v = (VALUE)vstart; for (; v != (VALUE)vend; v += stride) { - void *ptr = asan_poisoned_object_p(v); - asan_unpoison_object(v, false); + void *ptr = rb_asan_poisoned_object_p(v); + rb_asan_unpoison_object(v, false); clear_bf_cc(v); asan_poison_object_if(ptr, v); } @@ -4075,8 +4075,8 @@ trace_set_i(void *vstart, void *vend, size_t stride, void *data) VALUE v = (VALUE)vstart; for (; v != (VALUE)vend; v += stride) { - void *ptr = asan_poisoned_object_p(v); - asan_unpoison_object(v, false); + void *ptr = rb_asan_poisoned_object_p(v); + rb_asan_unpoison_object(v, false); if (rb_obj_is_iseq(v)) { rb_iseq_trace_set(rb_iseq_check((rb_iseq_t *)v), turnon_events); diff --git a/rjit_c.c b/rjit_c.c index 9afbabb9ef7a47..e421763480b775 100644 --- a/rjit_c.c +++ b/rjit_c.c @@ -493,8 +493,8 @@ for_each_iseq_i(void *vstart, void *vend, size_t stride, void *data) VALUE block = (VALUE)data; VALUE v = (VALUE)vstart; for (; v != (VALUE)vend; v += stride) { - void *ptr = asan_poisoned_object_p(v); - asan_unpoison_object(v, false); + void *ptr = rb_asan_poisoned_object_p(v); + rb_asan_unpoison_object(v, false); if (rb_obj_is_iseq(v)) { extern VALUE rb_rjit_iseq_new(rb_iseq_t *iseq); diff --git a/vm_method.c b/vm_method.c index 69a533b1827a28..670b9fe23767ab 100644 --- a/vm_method.c +++ b/vm_method.c @@ -314,8 +314,8 @@ invalidate_all_refinement_cc(void *vstart, void *vend, size_t stride, void *data { VALUE v = (VALUE)vstart; for (; v != (VALUE)vend; v += stride) { - void *ptr = asan_poisoned_object_p(v); - asan_unpoison_object(v, false); + void *ptr = rb_asan_poisoned_object_p(v); + rb_asan_unpoison_object(v, false); if (RBASIC(v)->flags) { // liveness check if (imemo_type_p(v, imemo_callcache)) { @@ -327,7 +327,7 @@ invalidate_all_refinement_cc(void *vstart, void *vend, size_t stride, void *data } if (ptr) { - asan_poison_object(v); + rb_asan_poison_object(v); } } return 0; // continue to iteration diff --git a/yjit.c b/yjit.c index bc5a8e75287374..6a1d0308b2a8e4 100644 --- a/yjit.c +++ b/yjit.c @@ -1096,8 +1096,8 @@ for_each_iseq_i(void *vstart, void *vend, size_t stride, void *data) const struct iseq_callback_data *callback_data = (struct iseq_callback_data *)data; VALUE v = (VALUE)vstart; for (; v != (VALUE)vend; v += stride) { - void *ptr = asan_poisoned_object_p(v); - asan_unpoison_object(v, false); + void *ptr = rb_asan_poisoned_object_p(v); + rb_asan_unpoison_object(v, false); if (rb_obj_is_iseq(v)) { rb_iseq_t *iseq = (rb_iseq_t *)v;