|
kusano |
2b45e8 |
/*********************************************************************/
|
|
kusano |
2b45e8 |
/* Copyright 2009, 2010 The University of Texas at Austin. */
|
|
kusano |
2b45e8 |
/* All rights reserved. */
|
|
kusano |
2b45e8 |
/* */
|
|
kusano |
2b45e8 |
/* Redistribution and use in source and binary forms, with or */
|
|
kusano |
2b45e8 |
/* without modification, are permitted provided that the following */
|
|
kusano |
2b45e8 |
/* conditions are met: */
|
|
kusano |
2b45e8 |
/* */
|
|
kusano |
2b45e8 |
/* 1. Redistributions of source code must retain the above */
|
|
kusano |
2b45e8 |
/* copyright notice, this list of conditions and the following */
|
|
kusano |
2b45e8 |
/* disclaimer. */
|
|
kusano |
2b45e8 |
/* */
|
|
kusano |
2b45e8 |
/* 2. Redistributions in binary form must reproduce the above */
|
|
kusano |
2b45e8 |
/* copyright notice, this list of conditions and the following */
|
|
kusano |
2b45e8 |
/* disclaimer in the documentation and/or other materials */
|
|
kusano |
2b45e8 |
/* provided with the distribution. */
|
|
kusano |
2b45e8 |
/* */
|
|
kusano |
2b45e8 |
/* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
|
|
kusano |
2b45e8 |
/* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
|
|
kusano |
2b45e8 |
/* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
|
|
kusano |
2b45e8 |
/* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
|
|
kusano |
2b45e8 |
/* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
|
|
kusano |
2b45e8 |
/* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
|
|
kusano |
2b45e8 |
/* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
|
|
kusano |
2b45e8 |
/* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
|
|
kusano |
2b45e8 |
/* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
|
|
kusano |
2b45e8 |
/* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
|
|
kusano |
2b45e8 |
/* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
|
|
kusano |
2b45e8 |
/* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
|
|
kusano |
2b45e8 |
/* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
|
|
kusano |
2b45e8 |
/* POSSIBILITY OF SUCH DAMAGE. */
|
|
kusano |
2b45e8 |
/* */
|
|
kusano |
2b45e8 |
/* The views and conclusions contained in the software and */
|
|
kusano |
2b45e8 |
/* documentation are those of the authors and should not be */
|
|
kusano |
2b45e8 |
/* interpreted as representing official policies, either expressed */
|
|
kusano |
2b45e8 |
/* or implied, of The University of Texas at Austin. */
|
|
kusano |
2b45e8 |
/*********************************************************************/
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#define ASSEMBLER
|
|
kusano |
2b45e8 |
#include "common.h"
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifndef WINDOWS_ABI
|
|
kusano |
2b45e8 |
#define N ARG1 /* rdi */
|
|
kusano |
2b45e8 |
#define X ARG4
|
|
kusano |
2b45e8 |
#define INCX ARG5
|
|
kusano |
2b45e8 |
#define Y ARG6
|
|
kusano |
2b45e8 |
#define INCY ARG2
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
#define N ARG1
|
|
kusano |
2b45e8 |
#define X ARG2
|
|
kusano |
2b45e8 |
#define INCX ARG3
|
|
kusano |
2b45e8 |
#define Y ARG4
|
|
kusano |
2b45e8 |
#define INCY %rbx
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#define XX %r10
|
|
kusano |
2b45e8 |
#define YY %r11
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#include "l1param.h"
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
PROLOGUE
|
|
kusano |
2b45e8 |
PROFCODE
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifndef WINDOWS_ABI
|
|
kusano |
2b45e8 |
#ifndef XDOUBLE
|
|
kusano |
2b45e8 |
movq 8(%rsp), INCY
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movq 40(%rsp), INCY
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
pushq %rbx
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 56(%rsp), X
|
|
kusano |
2b45e8 |
movq 64(%rsp), INCX
|
|
kusano |
2b45e8 |
movq 72(%rsp), Y
|
|
kusano |
2b45e8 |
movq 80(%rsp), INCY
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
EMMS
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
salq $ZBASE_SHIFT, INCX
|
|
kusano |
2b45e8 |
salq $ZBASE_SHIFT, INCY
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
cmpq $2 * SIZE, INCX
|
|
kusano |
2b45e8 |
jne .L14
|
|
kusano |
2b45e8 |
cmpq $2 * SIZE, INCY
|
|
kusano |
2b45e8 |
jne .L14
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq N, %rax
|
|
kusano |
2b45e8 |
sarq $2, %rax
|
|
kusano |
2b45e8 |
jle .L15
|
|
kusano |
2b45e8 |
ALIGN_3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L16:
|
|
kusano |
2b45e8 |
#ifdef XDOUBLE
|
|
kusano |
2b45e8 |
movq 0(X), %mm0
|
|
kusano |
2b45e8 |
movq 8(X), %mm1
|
|
kusano |
2b45e8 |
movq 16(X), %mm2
|
|
kusano |
2b45e8 |
movq 24(X), %mm3
|
|
kusano |
2b45e8 |
movq 0(Y), %mm4
|
|
kusano |
2b45e8 |
movq 8(Y), %mm5
|
|
kusano |
2b45e8 |
movq 16(Y), %mm6
|
|
kusano |
2b45e8 |
movq 24(Y), %mm7
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq %mm4, 0(X)
|
|
kusano |
2b45e8 |
movq %mm5, 8(X)
|
|
kusano |
2b45e8 |
movq %mm6, 16(X)
|
|
kusano |
2b45e8 |
movq %mm7, 24(X)
|
|
kusano |
2b45e8 |
movq %mm0, 0(Y)
|
|
kusano |
2b45e8 |
movq %mm1, 8(Y)
|
|
kusano |
2b45e8 |
movq %mm2, 16(Y)
|
|
kusano |
2b45e8 |
movq %mm3, 24(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 32(X), %mm0
|
|
kusano |
2b45e8 |
movq 40(X), %mm1
|
|
kusano |
2b45e8 |
movq 48(X), %mm2
|
|
kusano |
2b45e8 |
movq 56(X), %mm3
|
|
kusano |
2b45e8 |
movq 32(Y), %mm4
|
|
kusano |
2b45e8 |
movq 40(Y), %mm5
|
|
kusano |
2b45e8 |
movq 48(Y), %mm6
|
|
kusano |
2b45e8 |
movq 56(Y), %mm7
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq %mm4, 32(X)
|
|
kusano |
2b45e8 |
movq %mm5, 40(X)
|
|
kusano |
2b45e8 |
movq %mm6, 48(X)
|
|
kusano |
2b45e8 |
movq %mm7, 56(X)
|
|
kusano |
2b45e8 |
movq %mm0, 32(Y)
|
|
kusano |
2b45e8 |
movq %mm1, 40(Y)
|
|
kusano |
2b45e8 |
movq %mm2, 48(Y)
|
|
kusano |
2b45e8 |
movq %mm3, 56(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 64(X), %mm0
|
|
kusano |
2b45e8 |
movq 72(X), %mm1
|
|
kusano |
2b45e8 |
movq 80(X), %mm2
|
|
kusano |
2b45e8 |
movq 88(X), %mm3
|
|
kusano |
2b45e8 |
movq 64(Y), %mm4
|
|
kusano |
2b45e8 |
movq 72(Y), %mm5
|
|
kusano |
2b45e8 |
movq 80(Y), %mm6
|
|
kusano |
2b45e8 |
movq 88(Y), %mm7
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq %mm4, 64(X)
|
|
kusano |
2b45e8 |
movq %mm5, 72(X)
|
|
kusano |
2b45e8 |
movq %mm6, 80(X)
|
|
kusano |
2b45e8 |
movq %mm7, 88(X)
|
|
kusano |
2b45e8 |
movq %mm0, 64(Y)
|
|
kusano |
2b45e8 |
movq %mm1, 72(Y)
|
|
kusano |
2b45e8 |
movq %mm2, 80(Y)
|
|
kusano |
2b45e8 |
movq %mm3, 88(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 96(X), %mm0
|
|
kusano |
2b45e8 |
movq 104(X), %mm1
|
|
kusano |
2b45e8 |
movq 112(X), %mm2
|
|
kusano |
2b45e8 |
movq 120(X), %mm3
|
|
kusano |
2b45e8 |
movq 96(Y), %mm4
|
|
kusano |
2b45e8 |
movq 104(Y), %mm5
|
|
kusano |
2b45e8 |
movq 112(Y), %mm6
|
|
kusano |
2b45e8 |
movq 120(Y), %mm7
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq %mm4, 96(X)
|
|
kusano |
2b45e8 |
movq %mm5, 104(X)
|
|
kusano |
2b45e8 |
movq %mm6, 112(X)
|
|
kusano |
2b45e8 |
movq %mm7, 120(X)
|
|
kusano |
2b45e8 |
movq %mm0, 96(Y)
|
|
kusano |
2b45e8 |
movq %mm1, 104(Y)
|
|
kusano |
2b45e8 |
movq %mm2, 112(Y)
|
|
kusano |
2b45e8 |
movq %mm3, 120(Y)
|
|
kusano |
2b45e8 |
#elif defined(DOUBLE)
|
|
kusano |
2b45e8 |
prefetchw PREFETCHSIZE * SIZE(X)
|
|
kusano |
2b45e8 |
MOVQ 0 * SIZE(X), %mm0
|
|
kusano |
2b45e8 |
MOVQ 1 * SIZE(X), %mm1
|
|
kusano |
2b45e8 |
MOVQ 2 * SIZE(X), %mm2
|
|
kusano |
2b45e8 |
MOVQ 3 * SIZE(X), %mm3
|
|
kusano |
2b45e8 |
prefetchw PREFETCHSIZE * SIZE(Y)
|
|
kusano |
2b45e8 |
MOVQ 0 * SIZE(Y), %mm4
|
|
kusano |
2b45e8 |
MOVQ 1 * SIZE(Y), %mm5
|
|
kusano |
2b45e8 |
MOVQ 2 * SIZE(Y), %mm6
|
|
kusano |
2b45e8 |
MOVQ 3 * SIZE(Y), %mm7
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
MOVQ %mm4, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
MOVQ %mm5, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
MOVQ %mm6, 2 * SIZE(X)
|
|
kusano |
2b45e8 |
MOVQ %mm7, 3 * SIZE(X)
|
|
kusano |
2b45e8 |
MOVQ %mm0, 0 * SIZE(Y)
|
|
kusano |
2b45e8 |
MOVQ %mm1, 1 * SIZE(Y)
|
|
kusano |
2b45e8 |
MOVQ %mm2, 2 * SIZE(Y)
|
|
kusano |
2b45e8 |
MOVQ %mm3, 3 * SIZE(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
MOVQ 4 * SIZE(X), %mm0
|
|
kusano |
2b45e8 |
MOVQ 5 * SIZE(X), %mm1
|
|
kusano |
2b45e8 |
MOVQ 6 * SIZE(X), %mm2
|
|
kusano |
2b45e8 |
MOVQ 7 * SIZE(X), %mm3
|
|
kusano |
2b45e8 |
MOVQ 4 * SIZE(Y), %mm4
|
|
kusano |
2b45e8 |
MOVQ 5 * SIZE(Y), %mm5
|
|
kusano |
2b45e8 |
MOVQ 6 * SIZE(Y), %mm6
|
|
kusano |
2b45e8 |
MOVQ 7 * SIZE(Y), %mm7
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
MOVQ %mm4, 4 * SIZE(X)
|
|
kusano |
2b45e8 |
MOVQ %mm5, 5 * SIZE(X)
|
|
kusano |
2b45e8 |
MOVQ %mm6, 6 * SIZE(X)
|
|
kusano |
2b45e8 |
MOVQ %mm7, 7 * SIZE(X)
|
|
kusano |
2b45e8 |
MOVQ %mm0, 4 * SIZE(Y)
|
|
kusano |
2b45e8 |
MOVQ %mm1, 5 * SIZE(Y)
|
|
kusano |
2b45e8 |
MOVQ %mm2, 6 * SIZE(Y)
|
|
kusano |
2b45e8 |
MOVQ %mm3, 7 * SIZE(Y)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
#ifdef OPTERON
|
|
kusano |
2b45e8 |
prefetchw PREFETCHSIZE * SIZE(X)
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm0
|
|
kusano |
2b45e8 |
movq 2 * SIZE(X), %mm1
|
|
kusano |
2b45e8 |
movq 4 * SIZE(X), %mm2
|
|
kusano |
2b45e8 |
movq 6 * SIZE(X), %mm3
|
|
kusano |
2b45e8 |
movq 0 * SIZE(Y), %mm4
|
|
kusano |
2b45e8 |
movq 2 * SIZE(Y), %mm5
|
|
kusano |
2b45e8 |
movq 4 * SIZE(Y), %mm6
|
|
kusano |
2b45e8 |
movq 6 * SIZE(Y), %mm7
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef OPTERON
|
|
kusano |
2b45e8 |
prefetchw PREFETCHSIZE * SIZE(Y)
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
movq %mm4, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
movq %mm5, 2 * SIZE(X)
|
|
kusano |
2b45e8 |
movq %mm6, 4 * SIZE(X)
|
|
kusano |
2b45e8 |
movq %mm7, 6 * SIZE(X)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq %mm0, 0 * SIZE(Y)
|
|
kusano |
2b45e8 |
movq %mm1, 2 * SIZE(Y)
|
|
kusano |
2b45e8 |
movq %mm2, 4 * SIZE(Y)
|
|
kusano |
2b45e8 |
movq %mm3, 6 * SIZE(Y)
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addq $8 * SIZE, X
|
|
kusano |
2b45e8 |
addq $8 * SIZE, Y
|
|
kusano |
2b45e8 |
decq %rax
|
|
kusano |
2b45e8 |
jg .L16
|
|
kusano |
2b45e8 |
ALIGN_3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L15:
|
|
kusano |
2b45e8 |
movq N, %rax
|
|
kusano |
2b45e8 |
andq $3, %rax
|
|
kusano |
2b45e8 |
jle .L27
|
|
kusano |
2b45e8 |
ALIGN_3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L22:
|
|
kusano |
2b45e8 |
#ifdef XDOUBLE
|
|
kusano |
2b45e8 |
movq 0(X), %mm0
|
|
kusano |
2b45e8 |
movq 8(X), %mm1
|
|
kusano |
2b45e8 |
movq 16(X), %mm2
|
|
kusano |
2b45e8 |
movq 24(X), %mm3
|
|
kusano |
2b45e8 |
movq 0(Y), %mm4
|
|
kusano |
2b45e8 |
movq 8(Y), %mm5
|
|
kusano |
2b45e8 |
movq 16(Y), %mm6
|
|
kusano |
2b45e8 |
movq 24(Y), %mm7
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq %mm4, 0(X)
|
|
kusano |
2b45e8 |
movq %mm5, 8(X)
|
|
kusano |
2b45e8 |
movq %mm6, 16(X)
|
|
kusano |
2b45e8 |
movq %mm7, 24(X)
|
|
kusano |
2b45e8 |
movq %mm0, 0(Y)
|
|
kusano |
2b45e8 |
movq %mm1, 8(Y)
|
|
kusano |
2b45e8 |
movq %mm2, 16(Y)
|
|
kusano |
2b45e8 |
movq %mm3, 24(Y)
|
|
kusano |
2b45e8 |
#elif defined(DOUBLE)
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm0
|
|
kusano |
2b45e8 |
movq 1 * SIZE(X), %mm1
|
|
kusano |
2b45e8 |
movq 0 * SIZE(Y), %mm4
|
|
kusano |
2b45e8 |
movq 1 * SIZE(Y), %mm5
|
|
kusano |
2b45e8 |
movq %mm4, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
movq %mm5, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
movq %mm0, 0 * SIZE(Y)
|
|
kusano |
2b45e8 |
movq %mm1, 1 * SIZE(Y)
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm0
|
|
kusano |
2b45e8 |
movq 0 * SIZE(Y), %mm4
|
|
kusano |
2b45e8 |
movq %mm4, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
movq %mm0, 0 * SIZE(Y)
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
addq $2 * SIZE, X
|
|
kusano |
2b45e8 |
addq $2 * SIZE, Y
|
|
kusano |
2b45e8 |
decq %rax
|
|
kusano |
2b45e8 |
jg .L22
|
|
kusano |
2b45e8 |
jmp .L27
|
|
kusano |
2b45e8 |
ALIGN_3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
/* INCX != 1 or INCY != 1 */
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L14:
|
|
kusano |
2b45e8 |
movq N, %rax
|
|
kusano |
2b45e8 |
movq X, XX
|
|
kusano |
2b45e8 |
movq Y, YY
|
|
kusano |
2b45e8 |
sarq $1, %rax
|
|
kusano |
2b45e8 |
jle .L28
|
|
kusano |
2b45e8 |
ALIGN_2
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L29:
|
|
kusano |
2b45e8 |
#ifdef XDOUBLE
|
|
kusano |
2b45e8 |
movq 0(X), %mm0
|
|
kusano |
2b45e8 |
movq 8(X), %mm1
|
|
kusano |
2b45e8 |
movq 16(X), %mm2
|
|
kusano |
2b45e8 |
movq 24(X), %mm3
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
movq 0(Y), %mm4
|
|
kusano |
2b45e8 |
movq 8(Y), %mm5
|
|
kusano |
2b45e8 |
movq 16(Y), %mm6
|
|
kusano |
2b45e8 |
movq 24(Y), %mm7
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq %mm4, 0(XX)
|
|
kusano |
2b45e8 |
movq %mm5, 8(XX)
|
|
kusano |
2b45e8 |
movq %mm6, 16(XX)
|
|
kusano |
2b45e8 |
movq %mm7, 24(XX)
|
|
kusano |
2b45e8 |
addq INCX, XX
|
|
kusano |
2b45e8 |
movq %mm0, 0(YY)
|
|
kusano |
2b45e8 |
movq %mm1, 8(YY)
|
|
kusano |
2b45e8 |
movq %mm2, 16(YY)
|
|
kusano |
2b45e8 |
movq %mm3, 24(YY)
|
|
kusano |
2b45e8 |
addq INCY, YY
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 0(X), %mm0
|
|
kusano |
2b45e8 |
movq 8(X), %mm1
|
|
kusano |
2b45e8 |
movq 16(X), %mm2
|
|
kusano |
2b45e8 |
movq 24(X), %mm3
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
movq 0(Y), %mm4
|
|
kusano |
2b45e8 |
movq 8(Y), %mm5
|
|
kusano |
2b45e8 |
movq 16(Y), %mm6
|
|
kusano |
2b45e8 |
movq 24(Y), %mm7
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq %mm4, 0(XX)
|
|
kusano |
2b45e8 |
movq %mm5, 8(XX)
|
|
kusano |
2b45e8 |
movq %mm6, 16(XX)
|
|
kusano |
2b45e8 |
movq %mm7, 24(XX)
|
|
kusano |
2b45e8 |
addq INCX, XX
|
|
kusano |
2b45e8 |
movq %mm0, 0(YY)
|
|
kusano |
2b45e8 |
movq %mm1, 8(YY)
|
|
kusano |
2b45e8 |
movq %mm2, 16(YY)
|
|
kusano |
2b45e8 |
movq %mm3, 24(YY)
|
|
kusano |
2b45e8 |
addq INCY, YY
|
|
kusano |
2b45e8 |
#elif defined(DOUBLE)
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm0
|
|
kusano |
2b45e8 |
movq 1 * SIZE(X), %mm1
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm2
|
|
kusano |
2b45e8 |
movq 1 * SIZE(X), %mm3
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 0 * SIZE(Y), %mm4
|
|
kusano |
2b45e8 |
movq 1 * SIZE(Y), %mm5
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
movq 0 * SIZE(Y), %mm6
|
|
kusano |
2b45e8 |
movq 1 * SIZE(Y), %mm7
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq %mm4, 0 * SIZE(XX)
|
|
kusano |
2b45e8 |
movq %mm5, 1 * SIZE(XX)
|
|
kusano |
2b45e8 |
addq INCX, XX
|
|
kusano |
2b45e8 |
movq %mm6, 0 * SIZE(XX)
|
|
kusano |
2b45e8 |
movq %mm7, 1 * SIZE(XX)
|
|
kusano |
2b45e8 |
addq INCX, XX
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq %mm0, 0 * SIZE(YY)
|
|
kusano |
2b45e8 |
movq %mm1, 1 * SIZE(YY)
|
|
kusano |
2b45e8 |
addq INCY, YY
|
|
kusano |
2b45e8 |
movq %mm2, 0 * SIZE(YY)
|
|
kusano |
2b45e8 |
movq %mm3, 1 * SIZE(YY)
|
|
kusano |
2b45e8 |
addq INCY, YY
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm0
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm2
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq 0 * SIZE(Y), %mm4
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
movq 0 * SIZE(Y), %mm6
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq %mm4, 0 * SIZE(XX)
|
|
kusano |
2b45e8 |
addq INCX, XX
|
|
kusano |
2b45e8 |
movq %mm6, 0 * SIZE(XX)
|
|
kusano |
2b45e8 |
addq INCX, XX
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq %mm0, 0 * SIZE(YY)
|
|
kusano |
2b45e8 |
addq INCY, YY
|
|
kusano |
2b45e8 |
movq %mm2, 0 * SIZE(YY)
|
|
kusano |
2b45e8 |
addq INCY, YY
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
decq %rax
|
|
kusano |
2b45e8 |
jg .L29
|
|
kusano |
2b45e8 |
ALIGN_3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L28:
|
|
kusano |
2b45e8 |
movq N, %rax
|
|
kusano |
2b45e8 |
andq $1, %rax
|
|
kusano |
2b45e8 |
jle .L27
|
|
kusano |
2b45e8 |
ALIGN_3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L35:
|
|
kusano |
2b45e8 |
#ifdef XDOUBLE
|
|
kusano |
2b45e8 |
movq 0(X), %mm0
|
|
kusano |
2b45e8 |
movq 8(X), %mm1
|
|
kusano |
2b45e8 |
movq 16(X), %mm2
|
|
kusano |
2b45e8 |
movq 24(X), %mm3
|
|
kusano |
2b45e8 |
movq 0(Y), %mm4
|
|
kusano |
2b45e8 |
movq 8(Y), %mm5
|
|
kusano |
2b45e8 |
movq 16(Y), %mm6
|
|
kusano |
2b45e8 |
movq 24(Y), %mm7
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq %mm4, 0(X)
|
|
kusano |
2b45e8 |
movq %mm5, 8(X)
|
|
kusano |
2b45e8 |
movq %mm6, 16(X)
|
|
kusano |
2b45e8 |
movq %mm7, 24(X)
|
|
kusano |
2b45e8 |
movq %mm0, 0(Y)
|
|
kusano |
2b45e8 |
movq %mm1, 8(Y)
|
|
kusano |
2b45e8 |
movq %mm2, 16(Y)
|
|
kusano |
2b45e8 |
movq %mm3, 24(Y)
|
|
kusano |
2b45e8 |
#elif defined(DOUBLE)
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm0
|
|
kusano |
2b45e8 |
movq 1 * SIZE(X), %mm1
|
|
kusano |
2b45e8 |
movq 0 * SIZE(Y), %mm4
|
|
kusano |
2b45e8 |
movq 1 * SIZE(Y), %mm5
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movq %mm4, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
movq %mm5, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
movq %mm0, 0 * SIZE(Y)
|
|
kusano |
2b45e8 |
movq %mm1, 1 * SIZE(Y)
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movq 0 * SIZE(X), %mm0
|
|
kusano |
2b45e8 |
movq 0 * SIZE(Y), %mm4
|
|
kusano |
2b45e8 |
movq %mm4, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
movq %mm0, 0 * SIZE(Y)
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
addq INCX, X
|
|
kusano |
2b45e8 |
addq INCY, Y
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
decq %rax
|
|
kusano |
2b45e8 |
jg .L35
|
|
kusano |
2b45e8 |
ALIGN_3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L27:
|
|
kusano |
2b45e8 |
EMMS
|
|
kusano |
2b45e8 |
xorq %rax,%rax
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef WINDOWS_ABI
|
|
kusano |
2b45e8 |
popq %rbx
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
ret
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
EPILOGUE
|