/*- * SPDX-License-Identifier: BSD-2-Clause-FreeBSD * * Copyright (c) 2018, Matthew Macy * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * * $FreeBSD$ */ #include "assym.inc" #include "opt_sched.h" #include #include #include #include #include #include #ifdef _CALL_ELF .abiversion _CALL_ELF #endif #ifdef __powerpc64__ #define LOAD ld #define STORE std #define COMPARE cmpdi #define WORD 8 /* log_2(8 * WORD) */ #define LOOP_LOG 6 #else #define LOAD lwz #define STORE stw #define WORD 4 #define COMPARE cmpwi /* log_2(8 * WORD) */ #define LOOP_LOG 5 #endif #define ENTRY_DIRECT(x) ENTRY(x ## _direct) #ifdef __powerpc64__ #define PROLOGUE ;\ mflr %r0 ;\ std %r0, 16(%r1) ;\ #define EPILOGUE ;\ ld %r0, 16(%r1) ;\ mtlr %r0 ;\ blr ;\ nop #define VALIDATE_ADDR_COPY(raddr) \ srdi %r0, raddr, 52 ;\ cmpwi %r0, 1 ;\ bge- copy_fault ;\ nop #define VALIDATE_ADDR_FUSU(raddr) ;\ srdi %r0, raddr, 52 ;\ cmpwi %r0, 1 ;\ bge- fusufault ;\ nop #else #define PROLOGUE ;\ mflr %r0 ;\ stw %r0, 4(%r1) ;\ #define EPILOGUE ;\ lwz %r0, 4(%r1) ;\ mtlr %r0 ;\ blr ;\ nop #define VALIDATE_ADDR_COPY(raddr) \ mtcrf 0x80, raddr ;\ bt- 0, copy_fault ;\ nop #define VALIDATE_ADDR_FUSU(raddr) ;\ mtcrf 0x80, raddr ;\ bt- 0, fusufault ;\ nop #endif #define PCPU(reg) mfsprg reg, 0 #define SET_COPYFAULT(raddr, rpcb) \ VALIDATE_ADDR_COPY(raddr) ;\ PCPU(%r9) ;\ li %r0, COPYFAULT ;\ LOAD rpcb, PC_CURPCB(%r9) ;\ STORE %r0, PCB_ONFAULT(rpcb) ;\ #define SET_FUSUFAULT(raddr, rpcb) \ VALIDATE_ADDR_FUSU(raddr) ;\ PCPU(%r9) ;\ li %r0, FUSUFAULT ;\ LOAD rpcb, PC_CURPCB(%r9) ;\ STORE %r0, PCB_ONFAULT(rpcb) ;\ #define CLEAR_FAULT_NO_CLOBBER(rpcb) \ PCPU(%r9) ;\ LOAD rpcb, PC_CURPCB(%r9) ;\ li %r0, 0 ;\ STORE %r0, PCB_ONFAULT(rpcb) #define CLEAR_FAULT(rpcb) \ CLEAR_FAULT_NO_CLOBBER(rpcb) ;\ li %r3, 0 /* * bcopy(src, dst, len) * %r3 %r4 %r5 * * %r7 is the pcb pointer * * %r0 and %r8-%r10 are volatile * * */ #define rs %r3 #define rd %r4 #define rl %r5 #define t1 %r6 #define t2 %r7 #define t3 %r8 #define t4 %r9 #define t5 %r10 #define t6 %r11 #define t7 %r12 #define t8 %r0 #define Thresh 64 .text ENTRY(bcopy_generic) cmplwi 0, %r5, 0 beq .Lend dcbtst 0, rd dcbt 0, rs cmplwi rl, Thresh blt .Lsmall b .Llarge /* memcpy */ /* ... */ .Lsmall: mtcrf 0x3, rl // load LSB 8 bits .Lsmall64: bf 26, .Lsmall32 // <32 bytes remain LOAD t1, 0(rs) LOAD t2, WORD*1(rs) LOAD t3, WORD*2(rs) LOAD t4, WORD*3(rs) #ifndef __powerpc64__ LOAD t5, WORD*4(rs) LOAD t6, WORD*5(rs) LOAD t7, WORD*6(rs) LOAD t8, WORD*7(rs) #endif addi rs, rs, 32 STORE t1, 0(rd) STORE t2, WORD*1(rd) STORE t3, WORD*2(rd) STORE t4, WORD*3(rd) #ifndef __powerpc64__ STORE t5, WORD*4(rd) STORE t6, WORD*5(rd) STORE t7, WORD*6(rd) STORE t8, WORD*7(rd) #endif addi rd, rd, 32 .Lsmall32: bf 27, 1f // <16 bytes remain LOAD t1, 0(rs) LOAD t2, WORD*1(rs) #ifndef __powerpc64__ LOAD t3, WORD*2(rs) LOAD t4, WORD*3(rs) #endif addi rs, rs, 16 STORE t1, 0(rd) STORE t2, WORD*1(rd) #ifndef __powerpc64__ STORE t3, WORD*2(rd) STORE t4, WORD*3(rd) #endif addi rd, rd, 16 1: bf 28, 2f // <8 bytes remain LOAD t1, 0(rs) #ifndef __powerpc64__ LOAD t2, WORD(rs) #endif addi rs, rs, 8 STORE t1, 0(rd) #ifndef __powerpc64__ STORE t2, WORD(rd) #endif addi rd, rd, 8 2: bf 29, 3f // < 4 bytes remain lwz t1, 0(rs) addi rs, rs, 4 stw t1, 0(rd) addi rd, rd, 4 3: bf 30, 4f // < 2 bytes remain lhz t1, 0(rs) addi rs, rs, 2 sth t1, 0(rd) addi rd, rd, 2 4: bf 31, .Lout // 0 bytes remain lbz t1, 0(rs) addi rs, rs, 1 stb t1, 0(rd) addi rd, rd, 1 b .Lout .align 4 .Llarge: // Preamble - byte copy until dest dword aligned. neg t3, rd // NOT(rd) + 1 andi. t6, t3, 0x7 mtctr t6 sub rl, rl, t6 beq+ .Llargealigned // Already aligned. 1: lbz t1, 0(rs) addi rs, rs, 1 stb t1, 0(rd) addi rd, rd, 1 bdnz 1b .Llargealigned: srwi. t2, rl, LOOP_LOG /* length >> log_2(loop_size) => 64B (32B) iterations */ mtcrf 0x3, rl #ifndef __powerpc64__ beq .Lsmall32 #else beq .Lsmall64 #endif mtctr t2 /* 64 byte (32 byte) blocks */ b 1f .align 5 1: LOAD t1, 0(rs) LOAD t2, WORD(rs) LOAD t3, WORD*2(rs) LOAD t4, WORD*3(rs) LOAD t5, WORD*4(rs) LOAD t6, WORD*5(rs) LOAD t7, WORD*6(rs) LOAD t8, WORD*7(rs) addi rs, rs, WORD*8 STORE t1, 0(rd) STORE t2, WORD*1(rd) STORE t3, WORD*2(rd) STORE t4, WORD*3(rd) STORE t5, WORD*4(rd) STORE t6, WORD*5(rd) STORE t7, WORD*6(rd) STORE t8, WORD*7(rd) addi rd, rd, WORD*8 bdnz 1b #ifndef __powerpc64__ b .Lsmall32 #else b .Lsmall64 #endif .Lout: /* done */ .Lend: blr ENTRY(pagezero) li %r0, PAGE_SIZE/512 mtctr %r0 li %r4, 128 li %r5, 256 li %r6, 384 0: dcbz 0, %r3 dcbz %r4, %r3 dcbz %r5, %r3 dcbz %r6, %r3 addi %r3, %r3, 512 bdnz+ 0b blr /* * copyout(from_kernel, to_user, len) * %r3, %r4, %r5 */ ENTRY_DIRECT(copyout) PROLOGUE SET_COPYFAULT(%r4, %r7) bl bcopy_generic nop CLEAR_FAULT(%r7) EPILOGUE ENTRY_DIRECT(copyin) PROLOGUE SET_COPYFAULT(%r3, %r7) bl bcopy_generic nop CLEAR_FAULT(%r7) EPILOGUE /* * copyinstr(const void *udaddr, void *kaddr, size_t len, size_t *done) * %r3 %r4 %r5 %r6 * */ ENTRY_DIRECT(copyinstr) PROLOGUE SET_COPYFAULT(%r3, %r7) addi %r9, %r5, 1 mtctr %r9 mr %r8, %r3 addi %r8, %r8, -1 addi %r4, %r4, -1 li %r3, ENAMETOOLONG 0: bdz- 2f lbzu %r0, 1(%r8) stbu %r0, 1(%r4) // NULL byte reached ? COMPARE %r0, 0 beq- 1f b 0b 1: li %r3, 0 2: /* skip storing length if done is NULL */ COMPARE %r6, 0 beq- 3f mfctr %r0 sub %r0, %r9, %r0 STORE %r0, 0(%r6) 3: CLEAR_FAULT_NO_CLOBBER(%r7) EPILOGUE ENTRY_DIRECT(subyte) PROLOGUE SET_FUSUFAULT(%r3, %r7) stb %r4, 0(%r3) CLEAR_FAULT(%r7) EPILOGUE #ifndef __powerpc64__ ENTRY_DIRECT(suword) PROLOGUE SET_FUSUFAULT(%r3, %r7) stw %r4, 0(%r3) CLEAR_FAULT(%r7) EPILOGUE #endif ENTRY_DIRECT(suword32) PROLOGUE SET_FUSUFAULT(%r3, %r7) stw %r4, 0(%r3) CLEAR_FAULT(%r7) EPILOGUE #ifdef __powerpc64__ ENTRY_DIRECT(suword64) PROLOGUE SET_FUSUFAULT(%r3, %r7) std %r4, 0(%r3) CLEAR_FAULT(%r7) EPILOGUE ENTRY_DIRECT(suword) PROLOGUE SET_FUSUFAULT(%r3, %r7) std %r4, 0(%r3) CLEAR_FAULT(%r7) EPILOGUE #endif ENTRY_DIRECT(fubyte) PROLOGUE SET_FUSUFAULT(%r3, %r7) lbz %r3, 0(%r3) CLEAR_FAULT_NO_CLOBBER(%r7) EPILOGUE ENTRY_DIRECT(fuword16) PROLOGUE SET_FUSUFAULT(%r3, %r7) lhz %r3, 0(%r3) CLEAR_FAULT_NO_CLOBBER(%r7) EPILOGUE #ifndef __powerpc64__ ENTRY_DIRECT(fueword) PROLOGUE SET_FUSUFAULT(%r3, %r7) lwz %r0, 0(%r3) stw %r0, 0(%r4) CLEAR_FAULT(%r7) EPILOGUE #endif ENTRY_DIRECT(fueword32) PROLOGUE SET_FUSUFAULT(%r3, %r7) lwz %r0, 0(%r3) stw %r0, 0(%r4) CLEAR_FAULT(%r7) EPILOGUE #ifdef __powerpc64__ ENTRY_DIRECT(fueword) PROLOGUE SET_FUSUFAULT(%r3, %r7) ld %r0, 0(%r3) std %r0, 0(%r4) CLEAR_FAULT(%r7) EPILOGUE ENTRY_DIRECT(fueword64) PROLOGUE SET_FUSUFAULT(%r3, %r7) ld %r0, 0(%r3) std %r0, 0(%r4) CLEAR_FAULT(%r7) EPILOGUE #endif /* * casueword(volatile u_long *base, u_long old, u_long *oldp, u_long new) * %r3 %r4 %r5 %r6 */ #define CASUEWORD32(raddr, rpcb) ;\ PROLOGUE ;\ SET_FUSUFAULT(raddr, rpcb) ;\ li %r8, 0 ;\ 1: ;\ lwarx %r0, 0, %r3 ;\ cmplw %r4, %r0 ;\ bne 2f ;\ stwcx. %r6, 0, %r3 ;\ bne- 3f ;\ b 4f ;\ 2: ;\ stwcx. %r0, 0, %r3 /* clear reservation (74xx) */ ;\ 3: ;\ li %r8, 1 ;\ 4: ;\ stw %r0, 0(%r5) ;\ CLEAR_FAULT_NO_CLOBBER(rpcb) ;\ mr %r3, %r8 ;\ EPILOGUE ENTRY_DIRECT(casueword32) CASUEWORD32(%r3, %r7) #ifdef __powerpc64__ #define CASUEWORD64(raddr, rpcb) ;\ PROLOGUE ;\ SET_FUSUFAULT(raddr, rpcb) ;\ li %r8, 0 ;\ 1: ;\ ldarx %r0, 0, %r3 ;\ cmpld %r4, %r0 ;\ bne 2f ;\ stdcx. %r6, 0, %r3 ;\ bne- 3f ;\ b 4f ;\ 2: ;\ stdcx. %r0, 0, %r3 /* clear reservation (74xx) */ ;\ 3: ;\ li %r8, 1 ;\ 4: ;\ std %r0, 0(%r5) ;\ CLEAR_FAULT_NO_CLOBBER(rpcb) ;\ mr %r3, %r8 ;\ EPILOGUE ENTRY_DIRECT(casueword) CASUEWORD64(%r3, %r7) ENTRY_DIRECT(casueword64) CASUEWORD64(%r3, %r7) #else ENTRY_DIRECT(casueword) CASUEWORD32(%r3, %r7) #endif ENTRY(fusufault) CLEAR_FAULT_NO_CLOBBER(%r7) li %r3, -1 EPILOGUE ENTRY(copy_fault) CLEAR_FAULT_NO_CLOBBER(%r7) li %r3, EFAULT EPILOGUE