|
kusano |
2b45e8 |
/*********************************************************************/
|
|
kusano |
2b45e8 |
/* Copyright 2009, 2010 The University of Texas at Austin. */
|
|
kusano |
2b45e8 |
/* All rights reserved. */
|
|
kusano |
2b45e8 |
/* */
|
|
kusano |
2b45e8 |
/* Redistribution and use in source and binary forms, with or */
|
|
kusano |
2b45e8 |
/* without modification, are permitted provided that the following */
|
|
kusano |
2b45e8 |
/* conditions are met: */
|
|
kusano |
2b45e8 |
/* */
|
|
kusano |
2b45e8 |
/* 1. Redistributions of source code must retain the above */
|
|
kusano |
2b45e8 |
/* copyright notice, this list of conditions and the following */
|
|
kusano |
2b45e8 |
/* disclaimer. */
|
|
kusano |
2b45e8 |
/* */
|
|
kusano |
2b45e8 |
/* 2. Redistributions in binary form must reproduce the above */
|
|
kusano |
2b45e8 |
/* copyright notice, this list of conditions and the following */
|
|
kusano |
2b45e8 |
/* disclaimer in the documentation and/or other materials */
|
|
kusano |
2b45e8 |
/* provided with the distribution. */
|
|
kusano |
2b45e8 |
/* */
|
|
kusano |
2b45e8 |
/* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
|
|
kusano |
2b45e8 |
/* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
|
|
kusano |
2b45e8 |
/* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
|
|
kusano |
2b45e8 |
/* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
|
|
kusano |
2b45e8 |
/* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
|
|
kusano |
2b45e8 |
/* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
|
|
kusano |
2b45e8 |
/* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
|
|
kusano |
2b45e8 |
/* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
|
|
kusano |
2b45e8 |
/* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
|
|
kusano |
2b45e8 |
/* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
|
|
kusano |
2b45e8 |
/* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
|
|
kusano |
2b45e8 |
/* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
|
|
kusano |
2b45e8 |
/* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
|
|
kusano |
2b45e8 |
/* POSSIBILITY OF SUCH DAMAGE. */
|
|
kusano |
2b45e8 |
/* */
|
|
kusano |
2b45e8 |
/* The views and conclusions contained in the software and */
|
|
kusano |
2b45e8 |
/* documentation are those of the authors and should not be */
|
|
kusano |
2b45e8 |
/* interpreted as representing official policies, either expressed */
|
|
kusano |
2b45e8 |
/* or implied, of The University of Texas at Austin. */
|
|
kusano |
2b45e8 |
/*********************************************************************/
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#define ASSEMBLER
|
|
kusano |
2b45e8 |
#include "common.h"
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#define N r3
|
|
kusano |
2b45e8 |
#define XX r4
|
|
kusano |
2b45e8 |
#define PREA r5
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef linux
|
|
kusano |
2b45e8 |
#ifndef __64BIT__
|
|
kusano |
2b45e8 |
#define X r6
|
|
kusano |
2b45e8 |
#define INCX r7
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
#define X r8
|
|
kusano |
2b45e8 |
#define INCX r9
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#if defined(_AIX) || defined(__APPLE__)
|
|
kusano |
2b45e8 |
#if !defined(__64BIT__) && defined(DOUBLE)
|
|
kusano |
2b45e8 |
#define X r10
|
|
kusano |
2b45e8 |
#define INCX r8
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
#define X r8
|
|
kusano |
2b45e8 |
#define INCX r9
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#define FZERO f0
|
|
kusano |
2b45e8 |
#define ALPHA_R f1
|
|
kusano |
2b45e8 |
#define ALPHA_I f2
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
PROLOGUE
|
|
kusano |
2b45e8 |
PROFCODE
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addi SP, SP, -8
|
|
kusano |
2b45e8 |
li r0, 0
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
stw r0, 0(SP)
|
|
kusano |
2b45e8 |
lfs FZERO, 0(SP)
|
|
kusano |
2b45e8 |
addi SP, SP, 8
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#if (defined(_AIX) || defined(__APPLE__)) && !defined(__64BIT__) && defined(DOUBLE)
|
|
kusano |
2b45e8 |
lwz INCX, 56(SP)
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
slwi INCX, INCX, ZBASE_SHIFT
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
li PREA, L1_PREFETCHSIZE
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
cmpwi cr0, N, 0
|
|
kusano |
2b45e8 |
blelr- cr0
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
fcmpu cr0, FZERO, ALPHA_R
|
|
kusano |
2b45e8 |
bne- cr0, LL(A1I1)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
fcmpu cr0, FZERO, ALPHA_I
|
|
kusano |
2b45e8 |
bne- cr0, LL(A1I1)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
cmpwi cr0, INCX, 2 * SIZE
|
|
kusano |
2b45e8 |
bne- cr0, LL(A0IN)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
srawi. r0, N, 3
|
|
kusano |
2b45e8 |
mtspr CTR, r0
|
|
kusano |
2b45e8 |
beq- cr0, LL(A0I1_Remain)
|
|
kusano |
2b45e8 |
.align 4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
LL(A0I1_kernel):
|
|
kusano |
2b45e8 |
STFD FZERO, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 2 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 3 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 4 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 5 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 6 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 7 * SIZE(X)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
STFD FZERO, 8 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 9 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 10 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 11 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 12 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 13 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 14 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 15 * SIZE(X)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addi X, X, 16 * SIZE
|
|
kusano |
2b45e8 |
bdnz LL(A0I1_kernel)
|
|
kusano |
2b45e8 |
.align 4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
LL(A0I1_Remain):
|
|
kusano |
2b45e8 |
andi. r0, N, 7
|
|
kusano |
2b45e8 |
mtspr CTR, r0
|
|
kusano |
2b45e8 |
beqlr+
|
|
kusano |
2b45e8 |
.align 4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
LL(A0I1_RemainKernel):
|
|
kusano |
2b45e8 |
STFD FZERO, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
addi X, X, 2 * SIZE
|
|
kusano |
2b45e8 |
bdnz LL(A0I1_RemainKernel)
|
|
kusano |
2b45e8 |
blr
|
|
kusano |
2b45e8 |
.align 4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
LL(A0IN):
|
|
kusano |
2b45e8 |
srawi. r0, N, 3
|
|
kusano |
2b45e8 |
mtspr CTR, r0
|
|
kusano |
2b45e8 |
beq- LL(A0IN_Remain)
|
|
kusano |
2b45e8 |
.align 4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
LL(A0IN_Kernel):
|
|
kusano |
2b45e8 |
dcbtst X, PREA
|
|
kusano |
2b45e8 |
STFD FZERO, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
add X, X, INCX
|
|
kusano |
2b45e8 |
STFD FZERO, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
add X, X, INCX
|
|
kusano |
2b45e8 |
STFD FZERO, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
add X, X, INCX
|
|
kusano |
2b45e8 |
STFD FZERO, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
add X, X, INCX
|
|
kusano |
2b45e8 |
STFD FZERO, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
add X, X, INCX
|
|
kusano |
2b45e8 |
STFD FZERO, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
add X, X, INCX
|
|
kusano |
2b45e8 |
STFD FZERO, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
add X, X, INCX
|
|
kusano |
2b45e8 |
STFD FZERO, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
add X, X, INCX
|
|
kusano |
2b45e8 |
bdnz LL(A0IN_Kernel)
|
|
kusano |
2b45e8 |
.align 4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
LL(A0IN_Remain):
|
|
kusano |
2b45e8 |
andi. r0, N, 7
|
|
kusano |
2b45e8 |
mtspr CTR, r0
|
|
kusano |
2b45e8 |
beqlr+
|
|
kusano |
2b45e8 |
.align 4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
LL(A0IN_RemainKernel):
|
|
kusano |
2b45e8 |
STFD FZERO, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD FZERO, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
add X, X, INCX
|
|
kusano |
2b45e8 |
bdnz LL(A0IN_RemainKernel)
|
|
kusano |
2b45e8 |
blr
|
|
kusano |
2b45e8 |
.align 4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
LL(A1I1):
|
|
kusano |
2b45e8 |
cmpwi cr0, INCX, 2 * SIZE
|
|
kusano |
2b45e8 |
bne- LL(A1IN)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
mr XX, X
|
|
kusano |
2b45e8 |
srawi. r0, N, 3
|
|
kusano |
2b45e8 |
mtspr CTR, r0
|
|
kusano |
2b45e8 |
beq+ LL(A1I1_Remain)
|
|
kusano |
2b45e8 |
.align 4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
LL(A1I1_kernel):
|
|
kusano |
2b45e8 |
LFD f3, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
LFD f4, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
LFD f5, 2 * SIZE(X)
|
|
kusano |
2b45e8 |
LFD f6, 3 * SIZE(X)
|
|
kusano |
2b45e8 |
LFD f7, 4 * SIZE(X)
|
|
kusano |
2b45e8 |
LFD f8, 5 * SIZE(X)
|
|
kusano |
2b45e8 |
LFD f9, 6 * SIZE(X)
|
|
kusano |
2b45e8 |
LFD f10, 7 * SIZE(X)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
FMUL f0, ALPHA_I, f4
|
|
kusano |
2b45e8 |
FMUL f4, ALPHA_R, f4
|
|
kusano |
2b45e8 |
FMUL f11, ALPHA_I, f6
|
|
kusano |
2b45e8 |
FMUL f6, ALPHA_R, f6
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
FMUL f12, ALPHA_I, f8
|
|
kusano |
2b45e8 |
FMUL f8, ALPHA_R, f8
|
|
kusano |
2b45e8 |
FMUL f13, ALPHA_I, f10
|
|
kusano |
2b45e8 |
FMUL f10, ALPHA_R, f10
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
FMADD f4, ALPHA_I, f3, f4
|
|
kusano |
2b45e8 |
FMSUB f3, ALPHA_R, f3, f0
|
|
kusano |
2b45e8 |
FMADD f6, ALPHA_I, f5, f6
|
|
kusano |
2b45e8 |
FMSUB f5, ALPHA_R, f5, f11
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
FMADD f8, ALPHA_I, f7, f8
|
|
kusano |
2b45e8 |
FMSUB f7, ALPHA_R, f7, f12
|
|
kusano |
2b45e8 |
FMADD f10, ALPHA_I, f9, f10
|
|
kusano |
2b45e8 |
FMSUB f9, ALPHA_R, f9, f13
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
STFD f3, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD f4, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD f5, 2 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD f6, 3 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD f7, 4 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD f8, 5 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD f9, 6 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD f10, 7 * SIZE(X)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
LFD f3, 8 * SIZE(X)
|
|
kusano |
2b45e8 |
LFD f4, 9 * SIZE(X)
|
|
kusano |
2b45e8 |
LFD f5, 10 * SIZE(X)
|
|
kusano |
2b45e8 |
LFD f6, 11 * SIZE(X)
|
|
kusano |
2b45e8 |
LFD f7, 12 * SIZE(X)
|
|
kusano |
2b45e8 |
LFD f8, 13 * SIZE(X)
|
|
kusano |
2b45e8 |
LFD f9, 14 * SIZE(X)
|
|
kusano |
2b45e8 |
LFD f10,15 * SIZE(X)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
FMUL f0, ALPHA_I, f4
|
|
kusano |
2b45e8 |
FMUL f4, ALPHA_R, f4
|
|
kusano |
2b45e8 |
FMUL f11, ALPHA_I, f6
|
|
kusano |
2b45e8 |
FMUL f6, ALPHA_R, f6
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
FMUL f12, ALPHA_I, f8
|
|
kusano |
2b45e8 |
FMUL f8, ALPHA_R, f8
|
|
kusano |
2b45e8 |
FMUL f13, ALPHA_I, f10
|
|
kusano |
2b45e8 |
FMUL f10, ALPHA_R, f10
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
FMADD f4, ALPHA_I, f3, f4
|
|
kusano |
2b45e8 |
FMSUB f3, ALPHA_R, f3, f0
|
|
kusano |
2b45e8 |
FMADD f6, ALPHA_I, f5, f6
|
|
kusano |
2b45e8 |
FMSUB f5, ALPHA_R, f5, f11
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
FMADD f8, ALPHA_I, f7, f8
|
|
kusano |
2b45e8 |
FMSUB f7, ALPHA_R, f7, f12
|
|
kusano |
2b45e8 |
FMADD f10, ALPHA_I, f9, f10
|
|
kusano |
2b45e8 |
FMSUB f9, ALPHA_R, f9, f13
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
STFD f3, 8 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD f4, 9 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD f5, 10 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD f6, 11 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD f7, 12 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD f8, 13 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD f9, 14 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD f10,15 * SIZE(X)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addi X, X, 16 * SIZE
|
|
kusano |
2b45e8 |
dcbtst X, PREA
|
|
kusano |
2b45e8 |
bdnz LL(A1I1_kernel)
|
|
kusano |
2b45e8 |
.align 4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
LL(A1I1_Remain):
|
|
kusano |
2b45e8 |
andi. r0, N, 7
|
|
kusano |
2b45e8 |
mtspr CTR, r0
|
|
kusano |
2b45e8 |
beqlr+
|
|
kusano |
2b45e8 |
.align 4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
LL(A1I1_RemainKernel):
|
|
kusano |
2b45e8 |
LFD f3, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
LFD f4, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
FMUL f5, ALPHA_I, f4
|
|
kusano |
2b45e8 |
FMUL f4, ALPHA_R, f4
|
|
kusano |
2b45e8 |
FMADD f4, ALPHA_I, f3, f4
|
|
kusano |
2b45e8 |
FMSUB f3, ALPHA_R, f3, f5
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
STFD f3, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD f4, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
addi X, X, 2 * SIZE
|
|
kusano |
2b45e8 |
bdnz LL(A1I1_RemainKernel)
|
|
kusano |
2b45e8 |
blr
|
|
kusano |
2b45e8 |
.align 4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
LL(A1IN):
|
|
kusano |
2b45e8 |
mr XX, X
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
srawi. r0, N, 2
|
|
kusano |
2b45e8 |
mtspr CTR, r0
|
|
kusano |
2b45e8 |
beq- LL(A1IN_Remain)
|
|
kusano |
2b45e8 |
.align 4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
LL(A1IN_Kernel):
|
|
kusano |
2b45e8 |
LFD f3, 0 * SIZE(XX)
|
|
kusano |
2b45e8 |
LFD f4, 1 * SIZE(XX)
|
|
kusano |
2b45e8 |
add XX, XX, INCX
|
|
kusano |
2b45e8 |
LFD f5, 0 * SIZE(XX)
|
|
kusano |
2b45e8 |
LFD f6, 1 * SIZE(XX)
|
|
kusano |
2b45e8 |
add XX, XX, INCX
|
|
kusano |
2b45e8 |
LFD f7, 0 * SIZE(XX)
|
|
kusano |
2b45e8 |
LFD f8, 1 * SIZE(XX)
|
|
kusano |
2b45e8 |
add XX, XX, INCX
|
|
kusano |
2b45e8 |
LFD f9, 0 * SIZE(XX)
|
|
kusano |
2b45e8 |
LFD f10, 1 * SIZE(XX)
|
|
kusano |
2b45e8 |
add XX, XX, INCX
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
FMUL f0, ALPHA_I, f4
|
|
kusano |
2b45e8 |
FMUL f4, ALPHA_R, f4
|
|
kusano |
2b45e8 |
FMUL f11, ALPHA_I, f6
|
|
kusano |
2b45e8 |
FMUL f6, ALPHA_R, f6
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
FMUL f12, ALPHA_I, f8
|
|
kusano |
2b45e8 |
FMUL f8, ALPHA_R, f8
|
|
kusano |
2b45e8 |
FMUL f13, ALPHA_I, f10
|
|
kusano |
2b45e8 |
FMUL f10, ALPHA_R, f10
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
FMADD f4, ALPHA_I, f3, f4
|
|
kusano |
2b45e8 |
FMSUB f3, ALPHA_R, f3, f0
|
|
kusano |
2b45e8 |
FMADD f6, ALPHA_I, f5, f6
|
|
kusano |
2b45e8 |
FMSUB f5, ALPHA_R, f5, f11
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
FMADD f8, ALPHA_I, f7, f8
|
|
kusano |
2b45e8 |
FMSUB f7, ALPHA_R, f7, f12
|
|
kusano |
2b45e8 |
FMADD f10, ALPHA_I, f9, f10
|
|
kusano |
2b45e8 |
FMSUB f9, ALPHA_R, f9, f13
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
STFD f3, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD f4, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
add X, X, INCX
|
|
kusano |
2b45e8 |
STFD f5, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD f6, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
add X, X, INCX
|
|
kusano |
2b45e8 |
STFD f7, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD f8, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
add X, X, INCX
|
|
kusano |
2b45e8 |
STFD f9, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD f10, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
add X, X, INCX
|
|
kusano |
2b45e8 |
dcbtst X, PREA
|
|
kusano |
2b45e8 |
bdnz LL(A1IN_Kernel)
|
|
kusano |
2b45e8 |
.align 4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
LL(A1IN_Remain):
|
|
kusano |
2b45e8 |
andi. r0, N, 3
|
|
kusano |
2b45e8 |
mtspr CTR, r0
|
|
kusano |
2b45e8 |
beqlr+
|
|
kusano |
2b45e8 |
.align 4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
LL(A1IN_RemainKernel):
|
|
kusano |
2b45e8 |
LFD f3, 0 * SIZE(XX)
|
|
kusano |
2b45e8 |
LFD f4, 1 * SIZE(XX)
|
|
kusano |
2b45e8 |
add XX, XX, INCX
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
FMUL f5, ALPHA_I, f4
|
|
kusano |
2b45e8 |
FMUL f4, ALPHA_R, f4
|
|
kusano |
2b45e8 |
FMADD f4, ALPHA_I, f3, f4
|
|
kusano |
2b45e8 |
FMSUB f3, ALPHA_R, f3, f5
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
STFD f3, 0 * SIZE(X)
|
|
kusano |
2b45e8 |
STFD f4, 1 * SIZE(X)
|
|
kusano |
2b45e8 |
add X, X, INCX
|
|
kusano |
2b45e8 |
bdnz LL(A1IN_RemainKernel)
|
|
kusano |
2b45e8 |
blr
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
EPILOGUE
|