add kmalloc/krealloc alloc_size attributes

Note that this is overly strict when combined with ksize users accessing
beyond the requested data size.

Signed-off-by: Daniel Micay <danielmicay@gmail.com>
Signed-off-by: anupritaisno1 <www.anuprita804@gmail.com>
This commit is contained in:
Daniel Micay 2017-05-03 12:02:56 -04:00 committed by Kreciorek
parent 78d3c27fff
commit 4e3b773c8c

View file

@ -181,7 +181,7 @@ int kmem_cache_shrink(struct kmem_cache *);
/* /*
* Common kmalloc functions provided by all allocators * Common kmalloc functions provided by all allocators
*/ */
void * __must_check krealloc(const void *, size_t, gfp_t); void * __must_check krealloc(const void *, size_t, gfp_t) __attribute((alloc_size(2)));
void kfree(const void *); void kfree(const void *);
void kfree_sensitive(const void *); void kfree_sensitive(const void *);
size_t __ksize(const void *); size_t __ksize(const void *);
@ -398,7 +398,7 @@ static __always_inline unsigned int kmalloc_index(size_t size)
} }
#endif /* !CONFIG_SLOB */ #endif /* !CONFIG_SLOB */
void *__kmalloc(size_t size, gfp_t flags) __assume_kmalloc_alignment __malloc; void *__kmalloc(size_t size, gfp_t flags) __assume_kmalloc_alignment __malloc __attribute__((alloc_size(1)));
void *kmem_cache_alloc(struct kmem_cache *, gfp_t flags) __assume_slab_alignment __malloc; void *kmem_cache_alloc(struct kmem_cache *, gfp_t flags) __assume_slab_alignment __malloc;
void kmem_cache_free(struct kmem_cache *, void *); void kmem_cache_free(struct kmem_cache *, void *);
@ -422,7 +422,7 @@ static __always_inline void kfree_bulk(size_t size, void **p)
} }
#ifdef CONFIG_NUMA #ifdef CONFIG_NUMA
void *__kmalloc_node(size_t size, gfp_t flags, int node) __assume_kmalloc_alignment __malloc; void *__kmalloc_node(size_t size, gfp_t flags, int node) __assume_kmalloc_alignment __malloc __attribute__((alloc_size(1)));
void *kmem_cache_alloc_node(struct kmem_cache *, gfp_t flags, int node) __assume_slab_alignment __malloc; void *kmem_cache_alloc_node(struct kmem_cache *, gfp_t flags, int node) __assume_slab_alignment __malloc;
#else #else
static __always_inline void *__kmalloc_node(size_t size, gfp_t flags, int node) static __always_inline void *__kmalloc_node(size_t size, gfp_t flags, int node)
@ -547,7 +547,7 @@ static __always_inline void *kmalloc_large(size_t size, gfp_t flags)
* Try really hard to succeed the allocation but fail * Try really hard to succeed the allocation but fail
* eventually. * eventually.
*/ */
static __always_inline void *kmalloc(size_t size, gfp_t flags) static __always_inline __attribute__((alloc_size(1))) void *kmalloc(size_t size, gfp_t flags)
{ {
if (__builtin_constant_p(size)) { if (__builtin_constant_p(size)) {
#ifndef CONFIG_SLOB #ifndef CONFIG_SLOB
@ -569,7 +569,7 @@ static __always_inline void *kmalloc(size_t size, gfp_t flags)
return __kmalloc(size, flags); return __kmalloc(size, flags);
} }
static __always_inline void *kmalloc_node(size_t size, gfp_t flags, int node) static __always_inline __attribute__((alloc_size(1))) void *kmalloc_node(size_t size, gfp_t flags, int node)
{ {
#ifndef CONFIG_SLOB #ifndef CONFIG_SLOB
if (__builtin_constant_p(size) && if (__builtin_constant_p(size) &&