1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * * Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * * Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in
12 * the documentation and/or other materials provided with the
13 * distribution.
14 *
15 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
16 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
17 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
18 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
19 * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
21 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
22 * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
23 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
25 * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 * SUCH DAMAGE.
27 */
28
29 #include <private/bionic_ifuncs.h>
30 #include <stddef.h>
31
__bionic_is_oryon(unsigned long hwcap)32 static inline bool __bionic_is_oryon(unsigned long hwcap) {
33 if (!(hwcap & HWCAP_CPUID)) return false;
34
35 // Extract the implementor and variant bits from MIDR_EL1.
36 // https://www.kernel.org/doc/html/latest/arch/arm64/cpu-feature-registers.html#list-of-registers-with-visible-features
37 unsigned long midr;
38 __asm__ __volatile__("mrs %0, MIDR_EL1" : "=r"(midr));
39 uint16_t cpu = (midr >> 20) & 0xfff;
40
41 auto make_cpu = [](unsigned implementor, unsigned variant) {
42 return (implementor << 4) | variant;
43 };
44
45 // Check for implementor Qualcomm's variants 0x1..0x5 (Oryon).
46 return cpu >= make_cpu('Q', 0x1) && cpu <= make_cpu('Q', 0x5);
47 }
48
49 extern "C" {
50
DEFINE_IFUNC_FOR(memchr)51 DEFINE_IFUNC_FOR(memchr) {
52 if (arg->_hwcap2 & HWCAP2_MTE) {
53 RETURN_FUNC(memchr_func_t, __memchr_aarch64_mte);
54 } else {
55 RETURN_FUNC(memchr_func_t, __memchr_aarch64);
56 }
57 }
58 MEMCHR_SHIM()
59
DEFINE_IFUNC_FOR(memcmp)60 DEFINE_IFUNC_FOR(memcmp) {
61 // TODO: enable the SVE version.
62 RETURN_FUNC(memcmp_func_t, __memcmp_aarch64);
63 }
64 MEMCMP_SHIM()
65
DEFINE_IFUNC_FOR(memcpy)66 DEFINE_IFUNC_FOR(memcpy) {
67 if (arg->_hwcap2 & HWCAP2_MOPS) {
68 RETURN_FUNC(memcpy_func_t, __memmove_aarch64_mops);
69 } else if (__bionic_is_oryon(arg->_hwcap)) {
70 RETURN_FUNC(memcpy_func_t, __memcpy_aarch64_nt);
71 } else if (arg->_hwcap & HWCAP_ASIMD) {
72 RETURN_FUNC(memcpy_func_t, __memcpy_aarch64_simd);
73 } else {
74 RETURN_FUNC(memcpy_func_t, __memcpy_aarch64);
75 }
76 }
77 MEMCPY_SHIM()
78
DEFINE_IFUNC_FOR(memmove)79 DEFINE_IFUNC_FOR(memmove) {
80 if (arg->_hwcap2 & HWCAP2_MOPS) {
81 RETURN_FUNC(memmove_func_t, __memmove_aarch64_mops);
82 } else if (__bionic_is_oryon(arg->_hwcap)) {
83 RETURN_FUNC(memmove_func_t, __memmove_aarch64_nt);
84 } else if (arg->_hwcap & HWCAP_ASIMD) {
85 RETURN_FUNC(memmove_func_t, __memmove_aarch64_simd);
86 } else {
87 RETURN_FUNC(memmove_func_t, __memmove_aarch64);
88 }
89 }
90 MEMMOVE_SHIM()
91
DEFINE_IFUNC_FOR(memrchr)92 DEFINE_IFUNC_FOR(memrchr) {
93 RETURN_FUNC(memrchr_func_t, __memrchr_aarch64);
94 }
95 MEMRCHR_SHIM()
96
DEFINE_IFUNC_FOR(memset)97 DEFINE_IFUNC_FOR(memset) {
98 if (arg->_hwcap2 & HWCAP2_MOPS) {
99 RETURN_FUNC(memset_func_t, __memset_aarch64_mops);
100 } else if (__bionic_is_oryon(arg->_hwcap)) {
101 RETURN_FUNC(memset_func_t, __memset_aarch64_nt);
102 } else {
103 RETURN_FUNC(memset_func_t, __memset_aarch64);
104 }
105 }
106 MEMSET_SHIM()
107
DEFINE_IFUNC_FOR(stpcpy)108 DEFINE_IFUNC_FOR(stpcpy) {
109 // TODO: enable the SVE version.
110 RETURN_FUNC(stpcpy_func_t, __stpcpy_aarch64);
111 }
112 STPCPY_SHIM()
113
DEFINE_IFUNC_FOR(strchr)114 DEFINE_IFUNC_FOR(strchr) {
115 if (arg->_hwcap2 & HWCAP2_MTE) {
116 RETURN_FUNC(strchr_func_t, __strchr_aarch64_mte);
117 } else {
118 RETURN_FUNC(strchr_func_t, __strchr_aarch64);
119 }
120 }
121 STRCHR_SHIM()
122
DEFINE_IFUNC_FOR(strchrnul)123 DEFINE_IFUNC_FOR(strchrnul) {
124 if (arg->_hwcap2 & HWCAP2_MTE) {
125 RETURN_FUNC(strchrnul_func_t, __strchrnul_aarch64_mte);
126 } else {
127 RETURN_FUNC(strchrnul_func_t, __strchrnul_aarch64);
128 }
129 }
130 STRCHRNUL_SHIM()
131
DEFINE_IFUNC_FOR(strcmp)132 DEFINE_IFUNC_FOR(strcmp) {
133 // TODO: enable the SVE version.
134 RETURN_FUNC(strcmp_func_t, __strcmp_aarch64);
135 }
136 STRCMP_SHIM()
137
DEFINE_IFUNC_FOR(strcpy)138 DEFINE_IFUNC_FOR(strcpy) {
139 // TODO: enable the SVE version.
140 RETURN_FUNC(strcpy_func_t, __strcpy_aarch64);
141 }
142 STRCPY_SHIM()
143
DEFINE_IFUNC_FOR(strlen)144 DEFINE_IFUNC_FOR(strlen) {
145 if (arg->_hwcap2 & HWCAP2_MTE) {
146 RETURN_FUNC(strlen_func_t, __strlen_aarch64_mte);
147 } else {
148 RETURN_FUNC(strlen_func_t, __strlen_aarch64);
149 }
150 }
151 STRLEN_SHIM()
152
DEFINE_IFUNC_FOR(strncmp)153 DEFINE_IFUNC_FOR(strncmp) {
154 // TODO: enable the SVE version.
155 RETURN_FUNC(strncmp_func_t, __strncmp_aarch64);
156 }
157 STRNCMP_SHIM()
158
DEFINE_IFUNC_FOR(strnlen)159 DEFINE_IFUNC_FOR(strnlen) {
160 // TODO: enable the SVE version.
161 RETURN_FUNC(strnlen_func_t, __strnlen_aarch64);
162 }
163 STRNLEN_SHIM()
164
DEFINE_IFUNC_FOR(strrchr)165 DEFINE_IFUNC_FOR(strrchr) {
166 if (arg->_hwcap2 & HWCAP2_MTE) {
167 RETURN_FUNC(strrchr_func_t, __strrchr_aarch64_mte);
168 } else {
169 RETURN_FUNC(strrchr_func_t, __strrchr_aarch64);
170 }
171 }
172 STRRCHR_SHIM()
173
174 } // extern "C"
175