|
kusano |
2b45e8 |
/*********************************************************************/
|
|
kusano |
2b45e8 |
/* Copyright 2009, 2010 The University of Texas at Austin. */
|
|
kusano |
2b45e8 |
/* All rights reserved. */
|
|
kusano |
2b45e8 |
/* */
|
|
kusano |
2b45e8 |
/* Redistribution and use in source and binary forms, with or */
|
|
kusano |
2b45e8 |
/* without modification, are permitted provided that the following */
|
|
kusano |
2b45e8 |
/* conditions are met: */
|
|
kusano |
2b45e8 |
/* */
|
|
kusano |
2b45e8 |
/* 1. Redistributions of source code must retain the above */
|
|
kusano |
2b45e8 |
/* copyright notice, this list of conditions and the following */
|
|
kusano |
2b45e8 |
/* disclaimer. */
|
|
kusano |
2b45e8 |
/* */
|
|
kusano |
2b45e8 |
/* 2. Redistributions in binary form must reproduce the above */
|
|
kusano |
2b45e8 |
/* copyright notice, this list of conditions and the following */
|
|
kusano |
2b45e8 |
/* disclaimer in the documentation and/or other materials */
|
|
kusano |
2b45e8 |
/* provided with the distribution. */
|
|
kusano |
2b45e8 |
/* */
|
|
kusano |
2b45e8 |
/* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
|
|
kusano |
2b45e8 |
/* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
|
|
kusano |
2b45e8 |
/* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
|
|
kusano |
2b45e8 |
/* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
|
|
kusano |
2b45e8 |
/* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
|
|
kusano |
2b45e8 |
/* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
|
|
kusano |
2b45e8 |
/* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
|
|
kusano |
2b45e8 |
/* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
|
|
kusano |
2b45e8 |
/* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
|
|
kusano |
2b45e8 |
/* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
|
|
kusano |
2b45e8 |
/* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
|
|
kusano |
2b45e8 |
/* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
|
|
kusano |
2b45e8 |
/* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
|
|
kusano |
2b45e8 |
/* POSSIBILITY OF SUCH DAMAGE. */
|
|
kusano |
2b45e8 |
/* */
|
|
kusano |
2b45e8 |
/* The views and conclusions contained in the software and */
|
|
kusano |
2b45e8 |
/* documentation are those of the authors and should not be */
|
|
kusano |
2b45e8 |
/* interpreted as representing official policies, either expressed */
|
|
kusano |
2b45e8 |
/* or implied, of The University of Texas at Austin. */
|
|
kusano |
2b45e8 |
/*********************************************************************/
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#define ASSEMBLER
|
|
kusano |
2b45e8 |
#include "common.h"
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#define N ARG1 /* rdi */
|
|
kusano |
2b45e8 |
#define X ARG2 /* rsi */
|
|
kusano |
2b45e8 |
#define INCX ARG3 /* rdx */
|
|
kusano |
2b45e8 |
#define Y ARG4 /* rcx */
|
|
kusano |
2b45e8 |
#ifndef WINDOWS_ABI
|
|
kusano |
2b45e8 |
#define INCY ARG5 /* r8 */
|
|
kusano |
2b45e8 |
#define FLAG ARG6
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
#define INCY %r10
|
|
kusano |
2b45e8 |
#define FLAG %r11
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#include "l1param.h"
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
PROLOGUE
|
|
kusano |
2b45e8 |
PROFCODE
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef WINDOWS_ABI
|
|
kusano |
2b45e8 |
movq 40(%rsp), INCY
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
EMMS
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
salq $ZBASE_SHIFT, INCX
|
|
kusano |
2b45e8 |
salq $ZBASE_SHIFT, INCY
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
testq N, N # if m == 0 goto End
|
|
kusano |
2b45e8 |
jle .L999
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
cmpq $2 * SIZE, INCX # if incx != 1
|
|
kusano |
2b45e8 |
jne .L100
|
|
kusano |
2b45e8 |
cmpq $2 * SIZE, INCY # if incy != 1
|
|
kusano |
2b45e8 |
jne .L100
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq N, %rax # i = m
|
|
kusano |
2b45e8 |
sarq $2, %rax
|
|
kusano |
2b45e8 |
jle .L20
|
|
kusano |
2b45e8 |
ALIGN_2
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L11:
|
|
kusano |
2b45e8 |
#ifdef XDOUBLE
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef PREFETCH
|
|
kusano |
2b45e8 |
PREFETCH (PREFETCHSIZE + 0) - PREOFFSET(X)
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 0(X), %mm0
|
|
kusano |
2b45e8 |
movq %mm0, 0(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 8(X), %mm1
|
|
kusano |
2b45e8 |
movq %mm1, 8(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 16(X), %mm2
|
|
kusano |
2b45e8 |
movq %mm2, 16(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 24(X), %mm3
|
|
kusano |
2b45e8 |
movq %mm3, 24(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef PREFETCHW
|
|
kusano |
2b45e8 |
PREFETCHW (PREFETCHSIZE + 0) - PREOFFSET(Y)
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 32(X), %mm4
|
|
kusano |
2b45e8 |
movq %mm4, 32(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 40(X), %mm5
|
|
kusano |
2b45e8 |
movq %mm5, 40(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 48(X), %mm6
|
|
kusano |
2b45e8 |
movq %mm6, 48(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 56(X), %mm7
|
|
kusano |
2b45e8 |
movq %mm7, 56(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef PREFETCH
|
|
kusano |
2b45e8 |
PREFETCH (PREFETCHSIZE + 64) - PREOFFSET(X)
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 64(X), %mm0
|
|
kusano |
2b45e8 |
movq %mm0, 64(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 72(X), %mm1
|
|
kusano |
2b45e8 |
movq %mm1, 72(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 80(X), %mm2
|
|
kusano |
2b45e8 |
movq %mm2, 80(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 88(X), %mm3
|
|
kusano |
2b45e8 |
movq %mm3, 88(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef PREFETCHW
|
|
kusano |
2b45e8 |
PREFETCHW (PREFETCHSIZE + 64) - PREOFFSET(Y)
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 96(X), %mm4
|
|
kusano |
2b45e8 |
movq %mm4, 96(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 104(X), %mm5
|
|
kusano |
2b45e8 |
movq %mm5, 104(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 112(X), %mm6
|
|
kusano |
2b45e8 |
movq %mm6, 112(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 120(X), %mm7
|
|
kusano |
2b45e8 |
movq %mm7, 120(Y)
|
|
kusano |
2b45e8 |
#elif defined(DOUBLE)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef PREFETCH
|
|
kusano |
2b45e8 |
PREFETCH (PREFETCHSIZE + 0) - PREOFFSET(X)
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm0
|
|
kusano |
2b45e8 |
movq 1 * SIZE(X), %mm1
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq %mm0, 0 * SIZE(Y)
|
|
kusano |
2b45e8 |
movq %mm1, 1 * SIZE(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 2 * SIZE(X), %mm2
|
|
kusano |
2b45e8 |
movq 3 * SIZE(X), %mm3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq %mm2, 2 * SIZE(Y)
|
|
kusano |
2b45e8 |
movq %mm3, 3 * SIZE(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 4 * SIZE(X), %mm4
|
|
kusano |
2b45e8 |
movq 5 * SIZE(X), %mm5
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef PREFETCHW
|
|
kusano |
2b45e8 |
PREFETCHW (PREFETCHSIZE + 0) - PREOFFSET(Y)
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq %mm4, 4 * SIZE(Y)
|
|
kusano |
2b45e8 |
movq %mm5, 5 * SIZE(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 6 * SIZE(X), %mm6
|
|
kusano |
2b45e8 |
movq 7 * SIZE(X), %mm7
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq %mm6, 6 * SIZE(Y)
|
|
kusano |
2b45e8 |
movq %mm7, 7 * SIZE(Y)
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm0
|
|
kusano |
2b45e8 |
movq 2 * SIZE(X), %mm2
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef PREFETCH
|
|
kusano |
2b45e8 |
PREFETCH (PREFETCHSIZE + 0) - PREOFFSET(X)
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq %mm0, 0 * SIZE(Y)
|
|
kusano |
2b45e8 |
movq %mm2, 2 * SIZE(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 4 * SIZE(X), %mm4
|
|
kusano |
2b45e8 |
movq 6 * SIZE(X), %mm6
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef PREFETCHW
|
|
kusano |
2b45e8 |
PREFETCHW (PREFETCHSIZE + 0) - PREOFFSET(Y)
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq %mm4, 4 * SIZE(Y)
|
|
kusano |
2b45e8 |
movq %mm6, 6 * SIZE(Y)
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
addq $8 * SIZE, X
|
|
kusano |
2b45e8 |
addq $8 * SIZE, Y
|
|
kusano |
2b45e8 |
decq %rax
|
|
kusano |
2b45e8 |
jg .L11
|
|
kusano |
2b45e8 |
ALIGN_2
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L20:
|
|
kusano |
2b45e8 |
movq N, %rax # i = m
|
|
kusano |
2b45e8 |
andq $3, %rax
|
|
kusano |
2b45e8 |
jle .L99
|
|
kusano |
2b45e8 |
ALIGN_2
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L21:
|
|
kusano |
2b45e8 |
#ifdef XDOUBLE
|
|
kusano |
2b45e8 |
movq 0(X), %mm0
|
|
kusano |
2b45e8 |
movq %mm0, 0(Y)
|
|
kusano |
2b45e8 |
movq 8(X), %mm1
|
|
kusano |
2b45e8 |
movq %mm1, 8(Y)
|
|
kusano |
2b45e8 |
movq 16(X), %mm2
|
|
kusano |
2b45e8 |
movq %mm2, 16(Y)
|
|
kusano |
2b45e8 |
movq 24(X), %mm3
|
|
kusano |
2b45e8 |
movq %mm3, 24(Y)
|
|
kusano |
2b45e8 |
#elif defined(DOUBLE)
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm0
|
|
kusano |
2b45e8 |
movq %mm0, 0 * SIZE(Y)
|
|
kusano |
2b45e8 |
movq 1 * SIZE(X), %mm1
|
|
kusano |
2b45e8 |
movq %mm1, 1 * SIZE(Y)
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm0
|
|
kusano |
2b45e8 |
movq %mm0, 0 * SIZE(Y)
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addq $2 * SIZE, X
|
|
kusano |
2b45e8 |
addq $2 * SIZE, Y
|
|
kusano |
2b45e8 |
decq %rax
|
|
kusano |
2b45e8 |
jg .L21
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L99:
|
|
kusano |
2b45e8 |
xorq %rax,%rax
|
|
kusano |
2b45e8 |
EMMS
|
|
kusano |
2b45e8 |
ret
|
|
kusano |
2b45e8 |
ALIGN_3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L100:
|
|
kusano |
2b45e8 |
movq N, %rax
|
|
kusano |
2b45e8 |
sarq $2, %rax
|
|
kusano |
2b45e8 |
jle .L120
|
|
kusano |
2b45e8 |
ALIGN_2
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L111:
|
|
kusano |
2b45e8 |
#ifdef XDOUBLE
|
|
kusano |
2b45e8 |
movq 0(X), %mm0
|
|
kusano |
2b45e8 |
movq %mm0, 0(Y)
|
|
kusano |
2b45e8 |
movq 8(X), %mm1
|
|
kusano |
2b45e8 |
movq %mm1, 8(Y)
|
|
kusano |
2b45e8 |
movq 16(X), %mm2
|
|
kusano |
2b45e8 |
movq %mm2, 16(Y)
|
|
kusano |
2b45e8 |
movq 24(X), %mm3
|
|
kusano |
2b45e8 |
movq %mm3, 24(Y)
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 0(X), %mm0
|
|
kusano |
2b45e8 |
movq %mm0, 0(Y)
|
|
kusano |
2b45e8 |
movq 8(X), %mm1
|
|
kusano |
2b45e8 |
movq %mm1, 8(Y)
|
|
kusano |
2b45e8 |
movq 16(X), %mm2
|
|
kusano |
2b45e8 |
movq %mm2, 16(Y)
|
|
kusano |
2b45e8 |
movq 24(X), %mm3
|
|
kusano |
2b45e8 |
movq %mm3, 24(Y)
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 0(X), %mm0
|
|
kusano |
2b45e8 |
movq %mm0, 0(Y)
|
|
kusano |
2b45e8 |
movq 8(X), %mm1
|
|
kusano |
2b45e8 |
movq %mm1, 8(Y)
|
|
kusano |
2b45e8 |
movq 16(X), %mm2
|
|
kusano |
2b45e8 |
movq %mm2, 16(Y)
|
|
kusano |
2b45e8 |
movq 24(X), %mm3
|
|
kusano |
2b45e8 |
movq %mm3, 24(Y)
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 0(X), %mm0
|
|
kusano |
2b45e8 |
movq %mm0, 0(Y)
|
|
kusano |
2b45e8 |
movq 8(X), %mm1
|
|
kusano |
2b45e8 |
movq %mm1, 8(Y)
|
|
kusano |
2b45e8 |
movq 16(X), %mm2
|
|
kusano |
2b45e8 |
movq %mm2, 16(Y)
|
|
kusano |
2b45e8 |
movq 24(X), %mm3
|
|
kusano |
2b45e8 |
movq %mm3, 24(Y)
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
#elif defined(DOUBLE)
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm0
|
|
kusano |
2b45e8 |
movq %mm0, 0 * SIZE(Y)
|
|
kusano |
2b45e8 |
movq 1 * SIZE(X), %mm1
|
|
kusano |
2b45e8 |
movq %mm1, 1 * SIZE(Y)
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm2
|
|
kusano |
2b45e8 |
movq %mm2, 0 * SIZE(Y)
|
|
kusano |
2b45e8 |
movq 1 * SIZE(X), %mm3
|
|
kusano |
2b45e8 |
movq %mm3, 1 * SIZE(Y)
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm4
|
|
kusano |
2b45e8 |
movq %mm4, 0 * SIZE(Y)
|
|
kusano |
2b45e8 |
movq 1 * SIZE(X), %mm5
|
|
kusano |
2b45e8 |
movq %mm5, 1 * SIZE(Y)
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm6
|
|
kusano |
2b45e8 |
movq %mm6, 0 * SIZE(Y)
|
|
kusano |
2b45e8 |
movq 1 * SIZE(X), %mm7
|
|
kusano |
2b45e8 |
movq %mm7, 1 * SIZE(Y)
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm0
|
|
kusano |
2b45e8 |
movq %mm0, 0 * SIZE(Y)
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm2
|
|
kusano |
2b45e8 |
movq %mm2, 0 * SIZE(Y)
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm4
|
|
kusano |
2b45e8 |
movq %mm4, 0 * SIZE(Y)
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm6
|
|
kusano |
2b45e8 |
movq %mm6, 0 * SIZE(Y)
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
decq %rax
|
|
kusano |
2b45e8 |
jg .L111
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L120:
|
|
kusano |
2b45e8 |
movq N, %rax
|
|
kusano |
2b45e8 |
andq $3, %rax
|
|
kusano |
2b45e8 |
jle .L999
|
|
kusano |
2b45e8 |
ALIGN_2
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L121:
|
|
kusano |
2b45e8 |
#ifdef XDOUBLE
|
|
kusano |
2b45e8 |
movq 0(X), %mm0
|
|
kusano |
2b45e8 |
movq %mm0, 0(Y)
|
|
kusano |
2b45e8 |
movq 8(X), %mm1
|
|
kusano |
2b45e8 |
movq %mm1, 8(Y)
|
|
kusano |
2b45e8 |
movq 16(X), %mm2
|
|
kusano |
2b45e8 |
movq %mm2, 16(Y)
|
|
kusano |
2b45e8 |
movq 24(X), %mm3
|
|
kusano |
2b45e8 |
movq %mm3, 24(Y)
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
#elif defined(DOUBLE)
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm0
|
|
kusano |
2b45e8 |
movq %mm0, 0 * SIZE(Y)
|
|
kusano |
2b45e8 |
movq 1 * SIZE(X), %mm1
|
|
kusano |
2b45e8 |
movq %mm1, 1 * SIZE(Y)
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm0
|
|
kusano |
2b45e8 |
movq %mm0, 0 * SIZE(Y)
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
decq %rax
|
|
kusano |
2b45e8 |
jg .L121
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L999:
|
|
kusano |
2b45e8 |
xorq %rax,%rax
|
|
kusano |
2b45e8 |
EMMS
|
|
kusano |
2b45e8 |
ret
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
EPILOGUE
|
|
kusano |
2b45e8 |
|