From: Kenn H. <ke...@us...> - 2003-06-15 22:33:59
|
Update of /cvsroot/linux-vax/kernel-2.5/arch/vax/lib In directory sc8-pr-cvs1:/tmp/cvs-serv14457/arch/vax/lib Modified Files: checksum.S clear_user.S copy_tofrom_user.S string.c strncpy_user.S strnlen_user.S Log Message: Switch over to % prefix on register names (required for newer toolchain) Index: checksum.S =================================================================== RCS file: /cvsroot/linux-vax/kernel-2.5/arch/vax/lib/checksum.S,v retrieving revision 1.4 retrieving revision 1.5 diff -u -r1.4 -r1.5 --- checksum.S 20 May 2002 00:33:33 -0000 1.4 +++ checksum.S 15 Jun 2003 22:33:56 -0000 1.5 @@ -46,9 +46,9 @@ .text ENTRY(csum_partial) .word 0x3e - movl 4(ap), r2 /* r2 now has buf */ - movl 8(ap), r3 /* r3 has len */ - movl 12(ap), r0 /* r4 has sum */ + movl 4(%ap), %r2 /* r2 now has buf */ + movl 8(%ap), %r3 /* r3 has len */ + movl 12(%ap), %r0 /* r4 has sum */ /* test stuff */ # Check Alignment @@ -56,54 +56,54 @@ # Alignment uses up two bytes # Jump if we have two bytes # something < 2 deal with it - bbc $1, r2, 2f # Check is bit 1 , jump if clear - subl2 $2, r3 # Alignment uses up 2 bytes - bgequ 1f # Jump if we have at least two bytes - addl2 $2, r3 # Deal with it if we have not already + bbc $1, %r2, 2f # Check is bit 1 , jump if clear + subl2 $2, %r3 # Alignment uses up 2 bytes + bgequ 1f # Jump if we have at least two bytes + addl2 $2, %r3 # Deal with it if we have not already jmp 6f 1: /* If here copy halfword, and checksum it, */ - addw2 (r2), r0 # Add the half double-word to r0 - adwc $0, r0 # Carry - addl2 $2, r2 # move pointer on two bytes + addw2 (%r2), %r0 # Add the half double-word to r0 + adwc $0, %r0 # Carry + addl2 $2, %r2 # move pointer on two bytes 2: /* Get 32-bit word count and do 3 checksum on it */ /* if 0 count jump over it */ -# divl3 $32, r3, r5 - ashl $-5, r3, r5 +# divl3 $32, %r3, %r5 + ashl $-5, %r3, %r5 beqlu 4f -3: addl2 (r2), r0 # Checksum 32 bytes - adwc 4(r2), r0 - adwc 8(r2), r0 - adwc 12(r2), r0 - adwc 16(r2), r0 - adwc 20(r2), r0 - adwc 24(r2), r0 - adwc 28(r2), r0 - adwc $0, r0 - addl2 $32, r2 - sobgtr r5,3b +3: addl2 (%r2), %r0 # Checksum 32 bytes + adwc 4(%r2), %r0 + adwc 8(%r2), %r0 + adwc 12(%r2), %r0 + adwc 16(%r2), %r0 + adwc 20(%r2), %r0 + adwc 24(%r2), %r0 + adwc 28(%r2), %r0 + adwc $0, %r0 + addl2 $32, %r2 + sobgtr %r5,3b /* jump not equal back to 3b*/ -4: bicl3 $0xFFFFFFE3, r3, r5 /* D.A. My method for an AND, AND r3 with 0x1C (00011100) */ +4: bicl3 $0xFFFFFFE3, %r3, %r5 /* D.A. My method for an AND, AND r3 with 0x1C (00011100) */ /* this put in r5 a value of 4, 8, 12, 16, 20, 24, 28 bytes */ beqlu 6f /* rotate r5 by -2 gives 1, 2, 3, 4, 5, 6, 7 */ - rotl $-2, r5, r5 -5: addl2 (r2), r0 /* Add in long from R2 */ - adwc $0, r0 /* Add in carry */ - addl2 $4, r2 /* move r2 pointer along 4 bytes */ - sobgtr r5, 5b /* jump to 5: if r5 is > 0 */ -6: bicl3 $0xFFFFFFFC, r3, r5 /* AND either 1 or 2 into r5 */ + rotl $-2, %r5, %r5 +5: addl2 (%r2), %r0 /* Add in long from R2 */ + adwc $0, %r0 /* Add in carry */ + addl2 $4, %r2 /* move r2 pointer along 4 bytes */ + sobgtr %r5, 5b /* jump to 5: if r5 is > 0 */ +6: bicl3 $0xFFFFFFFC, %r3, %r5 /* AND either 1 or 2 into r5 */ beqlu 9f /* if no further bytes we are finished */ - cmpl $2, r5 /* compare what we have left with 2 */ + cmpl $2, %r5 /* compare what we have left with 2 */ blssu 7f /* if 2 or greater go to 7f */ - movw (r2), r3 /* move a word into r3 */ - addl2 $2, r2 /* move r2 on two bytes */ - cmpl $2, r5 + movw (%r2), %r3 /* move a word into r3 */ + addl2 $2, %r2 /* move r2 on two bytes */ + cmpl $2, %r5 beqlu 8f /* if what are we checking here?? */ - rotl $16, r3, r3 /* rotate r3 by a half word. */ - bicl2 $0xFFFF, r3 /* AND off bottom half */ -7: addb2 (r2), r3 /* ADD Byte from R2 to R3 */ -8: addl2 r3, r0 /* Add Long R3 */ - adwc $0, r0 /* Add in any carry */ -9: ret \ No newline at end of file + rotl $16, %r3, %r3 /* rotate r3 by a half word. */ + bicl2 $0xFFFF, %r3 /* AND off bottom half */ +7: addb2 (%r2), %r3 /* ADD Byte from R2 to R3 */ +8: addl2 %r3, %r0 /* Add Long R3 */ + adwc $0, %r0 /* Add in any carry */ +9: ret Index: clear_user.S =================================================================== RCS file: /cvsroot/linux-vax/kernel-2.5/arch/vax/lib/clear_user.S,v retrieving revision 1.5 retrieving revision 1.6 diff -u -r1.5 -r1.6 --- clear_user.S 16 Feb 2003 13:17:20 -0000 1.5 +++ clear_user.S 15 Jun 2003 22:33:56 -0000 1.6 @@ -23,11 +23,11 @@ .text ENTRY(__clear_user) .word 0x3e - movl 4(ap), r1 /* r1 now has addr */ - movl 8(ap), r0 /* r0 has size */ + movl 4(%ap), %r1 /* r1 now has addr */ + movl 8(%ap), %r0 /* r0 has size */ beql 2f -1: EX(movb, $0, (r1)+, fault) - sobgtr r0, 1b +1: EX(movb, $0, (%r1)+, fault) + sobgtr %r0, 1b 2: ret .section .fixup, "ax" Index: copy_tofrom_user.S =================================================================== RCS file: /cvsroot/linux-vax/kernel-2.5/arch/vax/lib/copy_tofrom_user.S,v retrieving revision 1.5 retrieving revision 1.6 diff -u -r1.5 -r1.6 --- copy_tofrom_user.S 16 Feb 2003 13:17:20 -0000 1.5 +++ copy_tofrom_user.S 15 Jun 2003 22:33:56 -0000 1.6 @@ -22,13 +22,13 @@ .text ENTRY(__copy_tofrom_user) .word 0x3e - movl 4(ap), r2 /* to in r2 */ - movl 8(ap), r3 /* from in r3 */ - movl 12(ap), r0 /* size in r0 */ + movl 4(%ap), %r2 /* to in r2 */ + movl 8(%ap), %r3 /* from in r3 */ + movl 12(%ap), %r0 /* size in r0 */ -1: EX(movb, (r3)+, r4, l_fixup) - EX(movb, r4, (r2)+, s_fixup) - sobgtr r0, 1b +1: EX(movb, (%r3)+, %r4, l_fixup) + EX(movb, %r4, (%r2)+, s_fixup) + sobgtr %r0, 1b ret .section .fixup,"ax" .align 4 Index: string.c =================================================================== RCS file: /cvsroot/linux-vax/kernel-2.5/arch/vax/lib/string.c,v retrieving revision 1.4 retrieving revision 1.5 diff -u -r1.4 -r1.5 --- string.c 20 May 2002 00:33:33 -0000 1.4 +++ string.c 15 Jun 2003 22:33:56 -0000 1.5 @@ -207,17 +207,17 @@ void * memset(void * s, int c , __kernel_size_t count) { asm ( - " movl %2, r6 \n" /* R6 holds bytes left */ - " movl %0, r3 \n" /* dest in R3 */ - " movl $0xffff, r7 \n" /* R7 always holds 65535 */ + " movl %2, %%r6 \n" /* R6 holds bytes left */ + " movl %0, %%r3 \n" /* dest in R3 */ + " movl $0xffff, %%r7 \n" /* R7 always holds 65535 */ " next_chunk: \n" - " cmpl r6, r7 \n" + " cmpl %%r6, %%r7 \n" " blequ last_chunk \n" /* < 65535 bytes left */ - " movc5 $0, 0, %1, r7, (r3) \n" /* MOVC5 updates R3 for us */ - " subl2 r7, r6 \n" + " movc5 $0, 0, %1, %%r7, (%%r3) \n" /* MOVC5 updates R3 for us */ + " subl2 %%r7, %%r6 \n" " brb next_chunk \n" "last_chunk: \n" - " movc5 $0, 0, %1, r6, (r3) " + " movc5 $0, 0, %1, %%r6, (%%r3) " : /* no outputs */ : "g" (s), "g" (c), "g" (count) : "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7"); Index: strncpy_user.S =================================================================== RCS file: /cvsroot/linux-vax/kernel-2.5/arch/vax/lib/strncpy_user.S,v retrieving revision 1.6 retrieving revision 1.7 diff -u -r1.6 -r1.7 --- strncpy_user.S 3 Mar 2003 00:48:53 -0000 1.6 +++ strncpy_user.S 15 Jun 2003 22:33:56 -0000 1.7 @@ -23,19 +23,19 @@ .text ENTRY(__strncpy_from_user) .word 0x3e - movl 4(ap), r2 /* r2 now has dst */ - movl 8(ap), r3 /* r3 now has src */ - movl 12(ap), r0 /* r0 has count */ - movl r0, r1 /* keep count in r1 */ + movl 4(%ap), %r2 /* r2 now has dst */ + movl 8(%ap), %r3 /* r3 now has src */ + movl 12(%ap), %r0 /* r0 has count */ + movl %r0, %r1 /* keep count in r1 */ beql 2f -1: EX(movb, (r3)+, r4, fault) - movb r4, (r2)+ - cmpb $0, r4 +1: EX(movb, (%r3)+, %r4, fault) + movb %r4, (%r2)+ + cmpb $0, %r4 beql 2f - sobgtr r1, 1b -2: subl2 r1, r0 + sobgtr %r1, 1b +2: subl2 %r1, %r0 ret .section .fixup, "ax" -fault: movl $-EFAULT, r0 +fault: movl $-EFAULT, %r0 ret .previous Index: strnlen_user.S =================================================================== RCS file: /cvsroot/linux-vax/kernel-2.5/arch/vax/lib/strnlen_user.S,v retrieving revision 1.5 retrieving revision 1.6 diff -u -r1.5 -r1.6 --- strnlen_user.S 16 Feb 2003 13:17:20 -0000 1.5 +++ strnlen_user.S 15 Jun 2003 22:33:56 -0000 1.6 @@ -22,18 +22,18 @@ .text ENTRY(__strnlen_user) .word 0x3e - movl 4(ap), r0 - movl 8(ap), r1 - movl r0, r2 + movl 4(%ap), %r0 + movl 8(%ap), %r1 + movl %r0, %r2 -1: EX(movb, (r0)+, r3, fault) - cmpb $0, r3 +1: EX(movb, (%r0)+, %r3, fault) + cmpb $0, %r3 beql 2f - sobgtr r1, 1b - incl r0 -2: subl2 r2, r0 + sobgtr %r1, 1b + incl %r0 +2: subl2 %r2, %r0 ret .section .fixup,"ax" -fault: movl $0, r0 +fault: movl $0, %r0 ret |