kernel - Refactor copyin, copyout
authorMatthew Dillon <dillon@apollo.backplane.com>
Tue, 8 May 2018 16:33:19 +0000 (09:33 -0700)
committerMatthew Dillon <dillon@apollo.backplane.com>
Tue, 8 May 2018 17:01:22 +0000 (10:01 -0700)
* Conditionalize movsq/movsb sequences to improve performance.

sys/platform/pc64/x86_64/support.s

index 3c88f8b..9b402e5 100644 (file)
@@ -286,15 +286,17 @@ ENTRY(std_copyout)
        ja      copyout_fault
 
        xchgq   %rdi,%rsi
+       cld
        /* bcopy(%rsi, %rdi, %rdx) */
        movq    %rdx,%rcx
 
        shrq    $3,%rcx
-       cld
+       jz      1f
        rep
        movsq
-       movb    %dl,%cl
-       andb    $7,%cl
+1:     movq    %rdx,%rcx
+       andq    $7,%rcx
+       jz      done_copyout
        rep
        movsb
 
@@ -337,14 +339,15 @@ ENTRY(std_copyin)
        ja      copyin_fault
 
        xchgq   %rdi,%rsi
+       cld
        movq    %rdx,%rcx
-       movb    %cl,%al
        shrq    $3,%rcx                         /* copy longword-wise */
-       cld
+       jz      1f
        rep
        movsq
-       movb    %al,%cl
-       andb    $7,%cl                          /* copy remaining bytes */
+1:     movq    %rdx,%rcx
+       andq    $7,%rcx                         /* copy remaining bytes */
+       jz      done_copyin
        rep
        movsb