2020#include "random.h"
2121#include "util.h"
2222
23+ #if CONFIG_BLOCK_OPS_CHECK_SIZE && !defined(HAS_ARM_MTE )
24+ #include "musl.h"
25+ #endif
26+
2327#ifdef USE_PKEY
2428#include <sys/mman.h>
2529#endif
@@ -528,7 +532,7 @@ static void set_canary(UNUSED const struct slab_metadata *metadata, UNUSED void
528532 }
529533#endif
530534
531- memcpy ((char * )p + size - canary_size , & metadata -> canary_value , canary_size );
535+ h_memcpy_internal ((char * )p + size - canary_size , & metadata -> canary_value , canary_size );
532536#endif
533537}
534538
@@ -541,7 +545,7 @@ static void check_canary(UNUSED const struct slab_metadata *metadata, UNUSED con
541545#endif
542546
543547 u64 canary_value ;
544- memcpy (& canary_value , (const char * )p + size - canary_size , canary_size );
548+ h_memcpy_internal (& canary_value , (const char * )p + size - canary_size , canary_size );
545549
546550#ifdef HAS_ARM_MTE
547551 if (unlikely (canary_value == 0 )) {
@@ -831,7 +835,7 @@ static inline void deallocate_small(void *p, const size_t *expected_size) {
831835#endif
832836
833837 if (ZERO_ON_FREE && !skip_zero ) {
834- memset (p , 0 , size - canary_size );
838+ h_memset_internal (p , 0 , size - canary_size );
835839 }
836840 }
837841
@@ -1502,7 +1506,7 @@ EXPORT void *h_calloc(size_t nmemb, size_t size) {
15021506 total_size = adjust_size_for_canary (total_size );
15031507 void * p = alloc (total_size );
15041508 if (!ZERO_ON_FREE && likely (p != NULL ) && total_size && total_size <= max_slab_size_class ) {
1505- memset (p , 0 , total_size - canary_size );
1509+ h_memset_internal (p , 0 , total_size - canary_size );
15061510 }
15071511#ifdef HAS_ARM_MTE
15081512 // use an assert instead of adding a conditional to memset() above (freed memory is always
@@ -1624,7 +1628,7 @@ EXPORT void *h_realloc(void *old, size_t size) {
16241628 mutex_unlock (& ra -> lock );
16251629
16261630 if (memory_remap_fixed (old , old_size , new , size )) {
1627- memcpy (new , old , copy_size );
1631+ h_memcpy_internal (new , old , copy_size );
16281632 deallocate_pages (old , old_size , old_guard_size );
16291633 } else {
16301634 memory_unmap ((char * )old - old_guard_size , old_guard_size );
@@ -1646,7 +1650,7 @@ EXPORT void *h_realloc(void *old, size_t size) {
16461650 if (copy_size > 0 && copy_size <= max_slab_size_class ) {
16471651 copy_size -= canary_size ;
16481652 }
1649- memcpy (new , old_orig , copy_size );
1653+ h_memcpy_internal (new , old_orig , copy_size );
16501654 if (old_size <= max_slab_size_class ) {
16511655 deallocate_small (old , NULL );
16521656 } else {
@@ -1874,6 +1878,133 @@ EXPORT size_t h_malloc_object_size_fast(const void *p) {
18741878 return SIZE_MAX ;
18751879}
18761880
1881+ #if CONFIG_BLOCK_OPS_CHECK_SIZE && !defined(HAS_ARM_MTE )
1882+ EXPORT void * memcpy (void * restrict dst , const void * restrict src , size_t len ) {
1883+ if (unlikely (dst == src || len == 0 )) {
1884+ return dst ;
1885+ }
1886+ if (unlikely (dst < (src + len ) && (dst + len ) > src )) {
1887+ fatal_error ("memcpy overlap" );
1888+ }
1889+ if (unlikely (len > malloc_object_size (src ))) {
1890+ fatal_error ("memcpy read overflow" );
1891+ }
1892+ if (unlikely (len > malloc_object_size (dst ))) {
1893+ fatal_error ("memcpy buffer overflow" );
1894+ }
1895+ return musl_memcpy (dst , src , len );
1896+ }
1897+
1898+ EXPORT void * memccpy (void * restrict dst , const void * restrict src , int value , size_t len ) {
1899+ if (unlikely (dst == src || len == 0 )) {
1900+ return dst ;
1901+ }
1902+ if (unlikely (dst < (src + len ) && (dst + len ) > src )) {
1903+ fatal_error ("memccpy overlap" );
1904+ }
1905+ if (unlikely (len > malloc_object_size (src ) && value != 0 )) {
1906+ fatal_error ("memccpy read overflow" );
1907+ }
1908+ if (unlikely (len > malloc_object_size (dst ))) {
1909+ fatal_error ("memccpy buffer overflow" );
1910+ }
1911+ return musl_memccpy (dst , src , value , len );
1912+ }
1913+
1914+ EXPORT void * memmove (void * dst , const void * src , size_t len ) {
1915+ if (unlikely (dst == src || len == 0 )) {
1916+ return dst ;
1917+ }
1918+ if (unlikely (len > malloc_object_size (src ))) {
1919+ fatal_error ("memmove read overflow" );
1920+ }
1921+ if (unlikely (len > malloc_object_size (dst ))) {
1922+ fatal_error ("memmove buffer overflow" );
1923+ }
1924+ return musl_memmove (dst , src , len );
1925+ }
1926+
1927+ EXPORT void * mempcpy (void * restrict dst , const void * restrict src , size_t len ) {
1928+ return memcpy (dst , src , len ) + len ;
1929+ }
1930+
1931+ EXPORT void * memset (void * dst , int value , size_t len ) {
1932+ if (unlikely (len == 0 )) {
1933+ return dst ;
1934+ }
1935+ if (unlikely (len > malloc_object_size (dst ))) {
1936+ fatal_error ("memset buffer overflow" );
1937+ }
1938+ return musl_memset (dst , value , len );
1939+ }
1940+
1941+ EXPORT void bcopy (const void * src , void * dst , size_t len ) {
1942+ memmove (dst , src , len );
1943+ }
1944+
1945+ EXPORT void swab (const void * restrict src , void * restrict dst , ssize_t len ) {
1946+ if (unlikely (len <= 0 )) {
1947+ return ;
1948+ }
1949+ size_t length = len ;
1950+ if (unlikely (dst < (src + length ) && (dst + length ) > src )) {
1951+ fatal_error ("swab overlap" );
1952+ }
1953+ if (unlikely (length > malloc_object_size (src ))) {
1954+ fatal_error ("swab read overflow" );
1955+ }
1956+ if (unlikely (length > malloc_object_size (dst ))) {
1957+ fatal_error ("swab buffer overflow" );
1958+ }
1959+ return musl_swab (src , dst , len );
1960+ }
1961+
1962+ EXPORT wchar_t * wmemcpy (wchar_t * restrict dst , const wchar_t * restrict src , size_t len ) {
1963+ if (unlikely (dst == src || len == 0 )) {
1964+ return dst ;
1965+ }
1966+ if (unlikely (dst < (src + len ) && (dst + len ) > src )) {
1967+ fatal_error ("wmemcpy overlap" );
1968+ }
1969+ size_t lenAdj = len * sizeof (wchar_t );
1970+ if (unlikely (lenAdj > malloc_object_size (src ))) {
1971+ fatal_error ("wmemcpy read overflow" );
1972+ }
1973+ if (unlikely (lenAdj > malloc_object_size (dst ))) {
1974+ fatal_error ("wmemcpy buffer overflow" );
1975+ }
1976+ return (wchar_t * )musl_memcpy ((char * )dst , (const char * )src , lenAdj );
1977+ }
1978+
1979+ EXPORT wchar_t * wmemmove (wchar_t * dst , const wchar_t * src , size_t len ) {
1980+ if (unlikely (dst == src || len == 0 )) {
1981+ return dst ;
1982+ }
1983+ size_t lenAdj = len * sizeof (wchar_t );
1984+ if (unlikely (lenAdj > malloc_object_size (src ))) {
1985+ fatal_error ("wmemmove read overflow" );
1986+ }
1987+ if (unlikely (lenAdj > malloc_object_size (dst ))) {
1988+ fatal_error ("wmemmove buffer overflow" );
1989+ }
1990+ return (wchar_t * )musl_memmove ((char * )dst , (const char * )src , lenAdj );
1991+ }
1992+
1993+ EXPORT wchar_t * wmempcpy (wchar_t * restrict dst , const wchar_t * restrict src , size_t len ) {
1994+ return wmemcpy (dst , src , len ) + len ;
1995+ }
1996+
1997+ EXPORT wchar_t * wmemset (wchar_t * dst , wchar_t value , size_t len ) {
1998+ if (unlikely (len == 0 )) {
1999+ return dst ;
2000+ }
2001+ if (unlikely ((len * sizeof (wchar_t )) > malloc_object_size (dst ))) {
2002+ fatal_error ("wmemset buffer overflow" );
2003+ }
2004+ return musl_wmemset (dst , value , len );
2005+ }
2006+ #endif /* CONFIG_BLOCK_OPS_CHECK_SIZE && !defined(HAS_ARM_MTE) */
2007+
18772008EXPORT int h_mallopt (UNUSED int param , UNUSED int value ) {
18782009#ifdef __ANDROID__
18792010 if (param == M_PURGE ) {
0 commit comments