1/*
2 * Copyright (c) 2019 Apple Inc. All rights reserved.
3 *
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
14 *
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
17 *
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
25 *
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27 */
28
29#ifndef _ARM64_AMCC_RORGN_H_
30#define _ARM64_AMCC_RORGN_H_
31
32#include <sys/cdefs.h>
33
34__BEGIN_DECLS
35
36#if defined(KERNEL_INTEGRITY_KTRR) || defined(KERNEL_INTEGRITY_CTRR)
37#include <stdbool.h>
38
39#include <libkern/section_keywords.h>
40
41void rorgn_stash_range(void);
42void rorgn_lockdown(void);
43bool rorgn_contains(vm_offset_t addr, vm_size_t size, bool defval);
44void rorgn_validate_core(void);
45
46extern vm_offset_t ctrr_begin, ctrr_end;
47#if CONFIG_CSR_FROM_DT
48extern bool csr_unsafe_kernel_text;
49#endif /* CONFIG_CSR_FROM_DT */
50
51#if KERNEL_CTRR_VERSION >= 3
52#define CTXR_XN_DISALLOW_ALL \
53 /* Execute Masks for EL2&0 */ \
54 (CTXR3_XN_disallow_inside << CTXR3_x_CTL_EL2_XN_EL2_shift) | \
55 (CTXR3_XN_disallow_inside << CTXR3_x_CTL_EL2_XN_EL0TGE1_shift) | \
56 (CTXR3_XN_disallow_inside << CTXR3_x_CTL_EL2_XN_GL2_shift) | \
57 (CTXR3_XN_disallow_inside << CTXR3_x_CTL_EL2_XN_GL0TGE1_shift) | \
58 (CTXR3_XN_disallow_inside << CTXR3_x_CTL_EL2_XN_MMUOFF_shift) | \
59 /* Execute Masks for EL1&0 when Stage2 Translation is disabled */ \
60 (CTXR3_XN_disallow_inside << CTXR3_x_CTL_EL2_XN_EL1_shift) | \
61 (CTXR3_XN_disallow_inside << CTXR3_x_CTL_EL2_XN_EL0TGE0_shift) | \
62 (CTXR3_XN_disallow_inside << CTXR3_x_CTL_EL2_XN_GL1_shift) | \
63 (CTXR3_XN_disallow_inside << CTXR3_x_CTL_EL2_XN_GL0TGE0_shift)
64
65#define CTXR_XN_KERNEL \
66 /* Execute Masks for EL2&0 */ \
67 (CTXR3_XN_disallow_outside << CTXR3_x_CTL_EL2_XN_EL2_shift) | \
68 (CTXR3_XN_disallow_inside << CTXR3_x_CTL_EL2_XN_EL0TGE1_shift) | \
69 (CTXR3_XN_disallow_outside << CTXR3_x_CTL_EL2_XN_GL2_shift) | \
70 (CTXR3_XN_disallow_inside << CTXR3_x_CTL_EL2_XN_GL0TGE1_shift) | \
71 (CTXR3_XN_disallow_outside << CTXR3_x_CTL_EL2_XN_MMUOFF_shift) | \
72 /* Execute Masks for EL1&0 when Stage2 Translation is disabled */ \
73 (CTXR3_XN_disallow_inside << CTXR3_x_CTL_EL2_XN_EL1_shift) | \
74 (CTXR3_XN_disallow_inside << CTXR3_x_CTL_EL2_XN_EL0TGE0_shift) | \
75 (CTXR3_XN_disallow_inside << CTXR3_x_CTL_EL2_XN_GL1_shift) | \
76 (CTXR3_XN_disallow_inside << CTXR3_x_CTL_EL2_XN_GL0TGE0_shift)
77#endif /* KERNEL_CTRR_VERSION >= 3 */
78
79#endif /* defined(KERNEL_INTEGRITY_KTRR) || defined(KERNEL_INTEGRITY_CTRR) */
80
81__END_DECLS
82
83#endif /* _ARM64_AMCC_RORGN_H_ */
84