1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171
|
/* SPDX-License-Identifier: GPL-2.0 */
/*
* Copyright (C) 2020-2022 Loongson Technology Corporation Limited
*/
#include <linux/export.h>
#include <asm/alternative-asm.h>
#include <asm/asm.h>
#include <asm/asmmacro.h>
#include <asm/cpu.h>
#include <asm/regdef.h>
#include <asm/unwind_hints.h>
.macro fill_to_64 r0
bstrins.d \r0, \r0, 15, 8
bstrins.d \r0, \r0, 31, 16
bstrins.d \r0, \r0, 63, 32
.endm
.section .noinstr.text, "ax"
SYM_FUNC_START(memset)
/*
* Some CPUs support hardware unaligned access
*/
ALTERNATIVE "b __memset_generic", \
"b __memset_fast", CPU_FEATURE_UAL
SYM_FUNC_END(memset)
SYM_FUNC_ALIAS(__memset, memset)
EXPORT_SYMBOL(memset)
EXPORT_SYMBOL(__memset)
_ASM_NOKPROBE(memset)
_ASM_NOKPROBE(__memset)
/*
* void *__memset_generic(void *s, int c, size_t n)
*
* a0: s
* a1: c
* a2: n
*/
SYM_FUNC_START(__memset_generic)
move a3, a0
beqz a2, 2f
1: st.b a1, a0, 0
addi.d a0, a0, 1
addi.d a2, a2, -1
bgt a2, zero, 1b
2: move a0, a3
jr ra
SYM_FUNC_END(__memset_generic)
_ASM_NOKPROBE(__memset_generic)
/*
* void *__memset_fast(void *s, int c, size_t n)
*
* a0: s
* a1: c
* a2: n
*/
SYM_FUNC_START(__memset_fast)
/* fill a1 to 64 bits */
fill_to_64 a1
sltui t0, a2, 9
bnez t0, .Lsmall
add.d a2, a0, a2
st.d a1, a0, 0
/* align up address */
addi.d a3, a0, 8
bstrins.d a3, zero, 2, 0
addi.d a4, a2, -64
bgeu a3, a4, .Llt64
/* set 64 bytes at a time */
.Lloop64:
st.d a1, a3, 0
st.d a1, a3, 8
st.d a1, a3, 16
st.d a1, a3, 24
st.d a1, a3, 32
st.d a1, a3, 40
st.d a1, a3, 48
st.d a1, a3, 56
addi.d a3, a3, 64
bltu a3, a4, .Lloop64
/* set the remaining bytes */
.Llt64:
addi.d a4, a2, -32
bgeu a3, a4, .Llt32
st.d a1, a3, 0
st.d a1, a3, 8
st.d a1, a3, 16
st.d a1, a3, 24
addi.d a3, a3, 32
.Llt32:
addi.d a4, a2, -16
bgeu a3, a4, .Llt16
st.d a1, a3, 0
st.d a1, a3, 8
addi.d a3, a3, 16
.Llt16:
addi.d a4, a2, -8
bgeu a3, a4, .Llt8
st.d a1, a3, 0
.Llt8:
st.d a1, a2, -8
/* return */
jr ra
.align 4
.Lsmall:
pcaddi t0, 4
slli.d a2, a2, 4
add.d t0, t0, a2
jr t0
.align 4
0: jr ra
.align 4
1: st.b a1, a0, 0
jr ra
.align 4
2: st.h a1, a0, 0
jr ra
.align 4
3: st.h a1, a0, 0
st.b a1, a0, 2
jr ra
.align 4
4: st.w a1, a0, 0
jr ra
.align 4
5: st.w a1, a0, 0
st.b a1, a0, 4
jr ra
.align 4
6: st.w a1, a0, 0
st.h a1, a0, 4
jr ra
.align 4
7: st.w a1, a0, 0
st.w a1, a0, 3
jr ra
.align 4
8: st.d a1, a0, 0
jr ra
SYM_FUNC_END(__memset_fast)
_ASM_NOKPROBE(__memset_fast)
STACK_FRAME_NON_STANDARD __memset_fast
|