Skip to content

Commit 040d11c

Browse files
author
Fox Snowpatch
committed
1 parent 1ecdccb commit 040d11c

24 files changed

+472
-81
lines changed

arch/arm64/Kconfig

+1
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ config ARM64
2121
select ARCH_ENABLE_THP_MIGRATION if TRANSPARENT_HUGEPAGE
2222
select ARCH_HAS_CACHE_LINE_SIZE
2323
select ARCH_HAS_CC_PLATFORM
24+
select ARCH_HAS_COPY_MC if ACPI_APEI_GHES
2425
select ARCH_HAS_CURRENT_STACK_POINTER
2526
select ARCH_HAS_DEBUG_VIRTUAL
2627
select ARCH_HAS_DEBUG_VM_PGTABLE

arch/arm64/include/asm/asm-extable.h

+26-5
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,13 @@
55
#include <linux/bits.h>
66
#include <asm/gpr-num.h>
77

8-
#define EX_TYPE_NONE 0
9-
#define EX_TYPE_BPF 1
10-
#define EX_TYPE_UACCESS_ERR_ZERO 2
11-
#define EX_TYPE_KACCESS_ERR_ZERO 3
12-
#define EX_TYPE_LOAD_UNALIGNED_ZEROPAD 4
8+
#define EX_TYPE_NONE 0
9+
#define EX_TYPE_BPF 1
10+
#define EX_TYPE_UACCESS_ERR_ZERO 2
11+
#define EX_TYPE_KACCESS_ERR_ZERO 3
12+
#define EX_TYPE_LOAD_UNALIGNED_ZEROPAD 4
13+
/* kernel access memory error safe */
14+
#define EX_TYPE_KACCESS_ERR_ZERO_MEM_ERR 5
1315

1416
/* Data fields for EX_TYPE_UACCESS_ERR_ZERO */
1517
#define EX_DATA_REG_ERR_SHIFT 0
@@ -51,6 +53,17 @@
5153
#define _ASM_EXTABLE_UACCESS(insn, fixup) \
5254
_ASM_EXTABLE_UACCESS_ERR_ZERO(insn, fixup, wzr, wzr)
5355

56+
#define _ASM_EXTABLE_KACCESS_ERR_ZERO_MEM_ERR(insn, fixup, err, zero) \
57+
__ASM_EXTABLE_RAW(insn, fixup, \
58+
EX_TYPE_KACCESS_ERR_ZERO_MEM_ERR, \
59+
( \
60+
EX_DATA_REG(ERR, err) | \
61+
EX_DATA_REG(ZERO, zero) \
62+
))
63+
64+
#define _ASM_EXTABLE_KACCESS_MEM_ERR(insn, fixup) \
65+
_ASM_EXTABLE_KACCESS_ERR_ZERO_MEM_ERR(insn, fixup, wzr, wzr)
66+
5467
/*
5568
* Create an exception table entry for uaccess `insn`, which will branch to `fixup`
5669
* when an unhandled fault is taken.
@@ -69,6 +82,14 @@
6982
.endif
7083
.endm
7184

85+
/*
86+
* Create an exception table entry for kaccess `insn`, which will branch to
87+
* `fixup` when an unhandled fault is taken.
88+
*/
89+
.macro _asm_extable_kaccess_mem_err, insn, fixup
90+
_ASM_EXTABLE_KACCESS_MEM_ERR(\insn, \fixup)
91+
.endm
92+
7293
#else /* __ASSEMBLY__ */
7394

7495
#include <linux/stringify.h>

arch/arm64/include/asm/asm-uaccess.h

+4
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,10 @@ alternative_else_nop_endif
5757
.endm
5858
#endif
5959

60+
#define KERNEL_MEM_ERR(l, x...) \
61+
9999: x; \
62+
_asm_extable_kaccess_mem_err 9999b, l
63+
6064
#define USER(l, x...) \
6165
9999: x; \
6266
_asm_extable_uaccess 9999b, l

arch/arm64/include/asm/extable.h

+1
Original file line numberDiff line numberDiff line change
@@ -46,4 +46,5 @@ bool ex_handler_bpf(const struct exception_table_entry *ex,
4646
#endif /* !CONFIG_BPF_JIT */
4747

4848
bool fixup_exception(struct pt_regs *regs);
49+
bool fixup_exception_me(struct pt_regs *regs);
4950
#endif

arch/arm64/include/asm/mte.h

+9
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,11 @@ static inline bool try_page_mte_tagging(struct page *page)
9898
void mte_zero_clear_page_tags(void *addr);
9999
void mte_sync_tags(pte_t pte, unsigned int nr_pages);
100100
void mte_copy_page_tags(void *kto, const void *kfrom);
101+
102+
#ifdef CONFIG_ARCH_HAS_COPY_MC
103+
int mte_copy_mc_page_tags(void *kto, const void *kfrom);
104+
#endif
105+
101106
void mte_thread_init_user(void);
102107
void mte_thread_switch(struct task_struct *next);
103108
void mte_cpu_setup(void);
@@ -134,6 +139,10 @@ static inline void mte_sync_tags(pte_t pte, unsigned int nr_pages)
134139
static inline void mte_copy_page_tags(void *kto, const void *kfrom)
135140
{
136141
}
142+
static inline int mte_copy_mc_page_tags(void *kto, const void *kfrom)
143+
{
144+
return 0;
145+
}
137146
static inline void mte_thread_init_user(void)
138147
{
139148
}

arch/arm64/include/asm/page.h

+10
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,16 @@ void copy_user_highpage(struct page *to, struct page *from,
2929
void copy_highpage(struct page *to, struct page *from);
3030
#define __HAVE_ARCH_COPY_HIGHPAGE
3131

32+
#ifdef CONFIG_ARCH_HAS_COPY_MC
33+
int copy_mc_page(void *to, const void *from);
34+
int copy_mc_highpage(struct page *to, struct page *from);
35+
#define __HAVE_ARCH_COPY_MC_HIGHPAGE
36+
37+
int copy_mc_user_highpage(struct page *to, struct page *from,
38+
unsigned long vaddr, struct vm_area_struct *vma);
39+
#define __HAVE_ARCH_COPY_MC_USER_HIGHPAGE
40+
#endif
41+
3242
struct folio *vma_alloc_zeroed_movable_folio(struct vm_area_struct *vma,
3343
unsigned long vaddr);
3444
#define vma_alloc_zeroed_movable_folio vma_alloc_zeroed_movable_folio

arch/arm64/include/asm/string.h

+5
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,10 @@ extern void *memchr(const void *, int, __kernel_size_t);
3535
extern void *memcpy(void *, const void *, __kernel_size_t);
3636
extern void *__memcpy(void *, const void *, __kernel_size_t);
3737

38+
#define __HAVE_ARCH_MEMCPY_MC
39+
extern int memcpy_mc(void *, const void *, __kernel_size_t);
40+
extern int __memcpy_mc(void *, const void *, __kernel_size_t);
41+
3842
#define __HAVE_ARCH_MEMMOVE
3943
extern void *memmove(void *, const void *, __kernel_size_t);
4044
extern void *__memmove(void *, const void *, __kernel_size_t);
@@ -57,6 +61,7 @@ void memcpy_flushcache(void *dst, const void *src, size_t cnt);
5761
*/
5862

5963
#define memcpy(dst, src, len) __memcpy(dst, src, len)
64+
#define memcpy_mc(dst, src, len) __memcpy_mc(dst, src, len)
6065
#define memmove(dst, src, len) __memmove(dst, src, len)
6166
#define memset(s, c, n) __memset(s, c, n)
6267

arch/arm64/include/asm/uaccess.h

+18
Original file line numberDiff line numberDiff line change
@@ -542,4 +542,22 @@ static inline void put_user_gcs(unsigned long val, unsigned long __user *addr,
542542

543543
#endif /* CONFIG_ARM64_GCS */
544544

545+
#ifdef CONFIG_ARCH_HAS_COPY_MC
546+
/**
547+
* copy_mc_to_kernel - memory copy that handles source exceptions
548+
*
549+
* @to: destination address
550+
* @from: source address
551+
* @size: number of bytes to copy
552+
*
553+
* Return 0 for success, or bytes not copied.
554+
*/
555+
static inline unsigned long __must_check
556+
copy_mc_to_kernel(void *to, const void *from, unsigned long size)
557+
{
558+
return memcpy_mc(to, from, size);
559+
}
560+
#define copy_mc_to_kernel copy_mc_to_kernel
561+
#endif
562+
545563
#endif /* __ASM_UACCESS_H */

arch/arm64/lib/Makefile

+2
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@ endif
1313

1414
lib-$(CONFIG_ARCH_HAS_UACCESS_FLUSHCACHE) += uaccess_flushcache.o
1515

16+
lib-$(CONFIG_ARCH_HAS_COPY_MC) += copy_mc_page.o memcpy_mc.o
17+
1618
obj-$(CONFIG_CRC32) += crc32.o crc32-glue.o
1719

1820
obj-$(CONFIG_FUNCTION_ERROR_INJECTION) += error-inject.o

arch/arm64/lib/copy_mc_page.S

+37
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
/* SPDX-License-Identifier: GPL-2.0-only */
2+
3+
#include <linux/linkage.h>
4+
#include <linux/const.h>
5+
#include <asm/assembler.h>
6+
#include <asm/page.h>
7+
#include <asm/cpufeature.h>
8+
#include <asm/alternative.h>
9+
#include <asm/asm-extable.h>
10+
#include <asm/asm-uaccess.h>
11+
12+
/*
13+
* Copy a page from src to dest (both are page aligned) with memory error safe
14+
*
15+
* Parameters:
16+
* x0 - dest
17+
* x1 - src
18+
* Returns:
19+
* x0 - Return 0 if copy success, or -EFAULT if anything goes wrong
20+
* while copying.
21+
*/
22+
.macro ldp1 reg1, reg2, ptr, val
23+
KERNEL_MEM_ERR(9998f, ldp \reg1, \reg2, [\ptr, \val])
24+
.endm
25+
26+
SYM_FUNC_START(__pi_copy_mc_page)
27+
#include "copy_page_template.S"
28+
29+
mov x0, #0
30+
ret
31+
32+
9998: mov x0, #-EFAULT
33+
ret
34+
35+
SYM_FUNC_END(__pi_copy_mc_page)
36+
SYM_FUNC_ALIAS(copy_mc_page, __pi_copy_mc_page)
37+
EXPORT_SYMBOL(copy_mc_page)

arch/arm64/lib/copy_page.S

+5-57
Original file line numberDiff line numberDiff line change
@@ -17,65 +17,13 @@
1717
* x0 - dest
1818
* x1 - src
1919
*/
20-
SYM_FUNC_START(__pi_copy_page)
21-
#ifdef CONFIG_AS_HAS_MOPS
22-
.arch_extension mops
23-
alternative_if_not ARM64_HAS_MOPS
24-
b .Lno_mops
25-
alternative_else_nop_endif
26-
27-
mov x2, #PAGE_SIZE
28-
cpypwn [x0]!, [x1]!, x2!
29-
cpymwn [x0]!, [x1]!, x2!
30-
cpyewn [x0]!, [x1]!, x2!
31-
ret
32-
.Lno_mops:
33-
#endif
34-
ldp x2, x3, [x1]
35-
ldp x4, x5, [x1, #16]
36-
ldp x6, x7, [x1, #32]
37-
ldp x8, x9, [x1, #48]
38-
ldp x10, x11, [x1, #64]
39-
ldp x12, x13, [x1, #80]
40-
ldp x14, x15, [x1, #96]
41-
ldp x16, x17, [x1, #112]
42-
43-
add x0, x0, #256
44-
add x1, x1, #128
45-
1:
46-
tst x0, #(PAGE_SIZE - 1)
4720

48-
stnp x2, x3, [x0, #-256]
49-
ldp x2, x3, [x1]
50-
stnp x4, x5, [x0, #16 - 256]
51-
ldp x4, x5, [x1, #16]
52-
stnp x6, x7, [x0, #32 - 256]
53-
ldp x6, x7, [x1, #32]
54-
stnp x8, x9, [x0, #48 - 256]
55-
ldp x8, x9, [x1, #48]
56-
stnp x10, x11, [x0, #64 - 256]
57-
ldp x10, x11, [x1, #64]
58-
stnp x12, x13, [x0, #80 - 256]
59-
ldp x12, x13, [x1, #80]
60-
stnp x14, x15, [x0, #96 - 256]
61-
ldp x14, x15, [x1, #96]
62-
stnp x16, x17, [x0, #112 - 256]
63-
ldp x16, x17, [x1, #112]
64-
65-
add x0, x0, #128
66-
add x1, x1, #128
67-
68-
b.ne 1b
69-
70-
stnp x2, x3, [x0, #-256]
71-
stnp x4, x5, [x0, #16 - 256]
72-
stnp x6, x7, [x0, #32 - 256]
73-
stnp x8, x9, [x0, #48 - 256]
74-
stnp x10, x11, [x0, #64 - 256]
75-
stnp x12, x13, [x0, #80 - 256]
76-
stnp x14, x15, [x0, #96 - 256]
77-
stnp x16, x17, [x0, #112 - 256]
21+
.macro ldp1 reg1, reg2, ptr, val
22+
ldp \reg1, \reg2, [\ptr, \val]
23+
.endm
7824

25+
SYM_FUNC_START(__pi_copy_page)
26+
#include "copy_page_template.S"
7927
ret
8028
SYM_FUNC_END(__pi_copy_page)
8129
SYM_FUNC_ALIAS(copy_page, __pi_copy_page)

arch/arm64/lib/copy_page_template.S

+70
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
/* SPDX-License-Identifier: GPL-2.0-only */
2+
/*
3+
* Copyright (C) 2012 ARM Ltd.
4+
*/
5+
6+
/*
7+
* Copy a page from src to dest (both are page aligned)
8+
*
9+
* Parameters:
10+
* x0 - dest
11+
* x1 - src
12+
*/
13+
14+
#ifdef CONFIG_AS_HAS_MOPS
15+
.arch_extension mops
16+
alternative_if_not ARM64_HAS_MOPS
17+
b .Lno_mops
18+
alternative_else_nop_endif
19+
20+
mov x2, #PAGE_SIZE
21+
cpypwn [x0]!, [x1]!, x2!
22+
cpymwn [x0]!, [x1]!, x2!
23+
cpyewn [x0]!, [x1]!, x2!
24+
ret
25+
.Lno_mops:
26+
#endif
27+
ldp1 x2, x3, x1, #0
28+
ldp1 x4, x5, x1, #16
29+
ldp1 x6, x7, x1, #32
30+
ldp1 x8, x9, x1, #48
31+
ldp1 x10, x11, x1, #64
32+
ldp1 x12, x13, x1, #80
33+
ldp1 x14, x15, x1, #96
34+
ldp1 x16, x17, x1, #112
35+
36+
add x0, x0, #256
37+
add x1, x1, #128
38+
1:
39+
tst x0, #(PAGE_SIZE - 1)
40+
41+
stnp x2, x3, [x0, #-256]
42+
ldp1 x2, x3, x1, #0
43+
stnp x4, x5, [x0, #16 - 256]
44+
ldp1 x4, x5, x1, #16
45+
stnp x6, x7, [x0, #32 - 256]
46+
ldp1 x6, x7, x1, #32
47+
stnp x8, x9, [x0, #48 - 256]
48+
ldp1 x8, x9, x1, #48
49+
stnp x10, x11, [x0, #64 - 256]
50+
ldp1 x10, x11, x1, #64
51+
stnp x12, x13, [x0, #80 - 256]
52+
ldp1 x12, x13, x1, #80
53+
stnp x14, x15, [x0, #96 - 256]
54+
ldp1 x14, x15, x1, #96
55+
stnp x16, x17, [x0, #112 - 256]
56+
ldp1 x16, x17, x1, #112
57+
58+
add x0, x0, #128
59+
add x1, x1, #128
60+
61+
b.ne 1b
62+
63+
stnp x2, x3, [x0, #-256]
64+
stnp x4, x5, [x0, #16 - 256]
65+
stnp x6, x7, [x0, #32 - 256]
66+
stnp x8, x9, [x0, #48 - 256]
67+
stnp x10, x11, [x0, #64 - 256]
68+
stnp x12, x13, [x0, #80 - 256]
69+
stnp x14, x15, [x0, #96 - 256]
70+
stnp x16, x17, [x0, #112 - 256]

arch/arm64/lib/copy_to_user.S

+5-5
Original file line numberDiff line numberDiff line change
@@ -20,31 +20,31 @@
2020
* x0 - bytes not copied
2121
*/
2222
.macro ldrb1 reg, ptr, val
23-
ldrb \reg, [\ptr], \val
23+
KERNEL_MEM_ERR(9998f, ldrb \reg, [\ptr], \val)
2424
.endm
2525

2626
.macro strb1 reg, ptr, val
2727
user_ldst 9998f, sttrb, \reg, \ptr, \val
2828
.endm
2929

3030
.macro ldrh1 reg, ptr, val
31-
ldrh \reg, [\ptr], \val
31+
KERNEL_MEM_ERR(9998f, ldrh \reg, [\ptr], \val)
3232
.endm
3333

3434
.macro strh1 reg, ptr, val
3535
user_ldst 9997f, sttrh, \reg, \ptr, \val
3636
.endm
3737

3838
.macro ldr1 reg, ptr, val
39-
ldr \reg, [\ptr], \val
39+
KERNEL_MEM_ERR(9998f, ldr \reg, [\ptr], \val)
4040
.endm
4141

4242
.macro str1 reg, ptr, val
4343
user_ldst 9997f, sttr, \reg, \ptr, \val
4444
.endm
4545

4646
.macro ldp1 reg1, reg2, ptr, val
47-
ldp \reg1, \reg2, [\ptr], \val
47+
KERNEL_MEM_ERR(9998f, ldp \reg1, \reg2, [\ptr], \val)
4848
.endm
4949

5050
.macro stp1 reg1, reg2, ptr, val
@@ -64,7 +64,7 @@ SYM_FUNC_START(__arch_copy_to_user)
6464
9997: cmp dst, dstin
6565
b.ne 9998f
6666
// Before being absolutely sure we couldn't copy anything, try harder
67-
ldrb tmp1w, [srcin]
67+
KERNEL_MEM_ERR(9998f, ldrb tmp1w, [srcin])
6868
USER(9998f, sttrb tmp1w, [dst])
6969
add dst, dst, #1
7070
9998: sub x0, end, dst // bytes not copied

0 commit comments

Comments
 (0)