|
kusano |
2b45e8 |
/*********************************************************************/
|
|
kusano |
2b45e8 |
/* Copyright 2009, 2010 The University of Texas at Austin. */
|
|
kusano |
2b45e8 |
/* All rights reserved. */
|
|
kusano |
2b45e8 |
/* */
|
|
kusano |
2b45e8 |
/* Redistribution and use in source and binary forms, with or */
|
|
kusano |
2b45e8 |
/* without modification, are permitted provided that the following */
|
|
kusano |
2b45e8 |
/* conditions are met: */
|
|
kusano |
2b45e8 |
/* */
|
|
kusano |
2b45e8 |
/* 1. Redistributions of source code must retain the above */
|
|
kusano |
2b45e8 |
/* copyright notice, this list of conditions and the following */
|
|
kusano |
2b45e8 |
/* disclaimer. */
|
|
kusano |
2b45e8 |
/* */
|
|
kusano |
2b45e8 |
/* 2. Redistributions in binary form must reproduce the above */
|
|
kusano |
2b45e8 |
/* copyright notice, this list of conditions and the following */
|
|
kusano |
2b45e8 |
/* disclaimer in the documentation and/or other materials */
|
|
kusano |
2b45e8 |
/* provided with the distribution. */
|
|
kusano |
2b45e8 |
/* */
|
|
kusano |
2b45e8 |
/* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
|
|
kusano |
2b45e8 |
/* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
|
|
kusano |
2b45e8 |
/* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
|
|
kusano |
2b45e8 |
/* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
|
|
kusano |
2b45e8 |
/* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
|
|
kusano |
2b45e8 |
/* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
|
|
kusano |
2b45e8 |
/* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
|
|
kusano |
2b45e8 |
/* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
|
|
kusano |
2b45e8 |
/* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
|
|
kusano |
2b45e8 |
/* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
|
|
kusano |
2b45e8 |
/* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
|
|
kusano |
2b45e8 |
/* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
|
|
kusano |
2b45e8 |
/* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
|
|
kusano |
2b45e8 |
/* POSSIBILITY OF SUCH DAMAGE. */
|
|
kusano |
2b45e8 |
/* */
|
|
kusano |
2b45e8 |
/* The views and conclusions contained in the software and */
|
|
kusano |
2b45e8 |
/* documentation are those of the authors and should not be */
|
|
kusano |
2b45e8 |
/* interpreted as representing official policies, either expressed */
|
|
kusano |
2b45e8 |
/* or implied, of The University of Texas at Austin. */
|
|
kusano |
2b45e8 |
/*********************************************************************/
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#define ASSEMBLER
|
|
kusano |
2b45e8 |
#include "common.h"
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#define STACK 16
|
|
kusano |
2b45e8 |
#define ARGS 16
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#define M 4 + STACK + ARGS(%esp)
|
|
kusano |
2b45e8 |
#define N 8 + STACK + ARGS(%esp)
|
|
kusano |
2b45e8 |
#define K 12 + STACK + ARGS(%esp)
|
|
kusano |
2b45e8 |
#define ALPHA 16 + STACK + ARGS(%esp)
|
|
kusano |
2b45e8 |
#define A 24 + STACK + ARGS(%esp)
|
|
kusano |
2b45e8 |
#define ARG_B 28 + STACK + ARGS(%esp)
|
|
kusano |
2b45e8 |
#define C 32 + STACK + ARGS(%esp)
|
|
kusano |
2b45e8 |
#define ARG_LDC 36 + STACK + ARGS(%esp)
|
|
kusano |
2b45e8 |
#define OFFSET 40 + STACK + ARGS(%esp)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#define J 0 + STACK(%esp)
|
|
kusano |
2b45e8 |
#define BX 4 + STACK(%esp)
|
|
kusano |
2b45e8 |
#define KK 8 + STACK(%esp)
|
|
kusano |
2b45e8 |
#define KKK 12 + STACK(%esp)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef PENTIUM4
|
|
kusano |
2b45e8 |
#define PREFETCH_R (8 * 4)
|
|
kusano |
2b45e8 |
#define PREFETCH prefetcht1
|
|
kusano |
2b45e8 |
#define PREFETCHSIZE 84
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef PENTIUMM
|
|
kusano |
2b45e8 |
#define PREFETCH_R (8 * 4)
|
|
kusano |
2b45e8 |
#define PREFETCH prefetcht1
|
|
kusano |
2b45e8 |
#define PREFETCHSIZE 84
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#define AA %edx
|
|
kusano |
2b45e8 |
#define BB %ecx
|
|
kusano |
2b45e8 |
#define LDC %ebp
|
|
kusano |
2b45e8 |
#define B %edi
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#define KERNEL1(address) \
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2; \
|
|
kusano |
2b45e8 |
PREFETCH (PREFETCHSIZE + 0) * SIZE + (address) * 1 * SIZE(AA); \
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4; \
|
|
kusano |
2b45e8 |
movddup 1 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2; \
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm5; \
|
|
kusano |
2b45e8 |
movddup 2 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2; \
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm6; \
|
|
kusano |
2b45e8 |
movddup 3 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2; \
|
|
kusano |
2b45e8 |
movapd 2 * SIZE + (address) * 1 * SIZE(AA), %xmm0; \
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm7; \
|
|
kusano |
2b45e8 |
movddup 4 * SIZE + (address) * 2 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#define KERNEL2(address) \
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2; \
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4; \
|
|
kusano |
2b45e8 |
movddup 5 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2; \
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm5; \
|
|
kusano |
2b45e8 |
movddup 6 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2; \
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm6; \
|
|
kusano |
2b45e8 |
movddup 7 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2; \
|
|
kusano |
2b45e8 |
movapd 4 * SIZE + (address) * 1 * SIZE(AA), %xmm0; \
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm7; \
|
|
kusano |
2b45e8 |
movddup 16 * SIZE + (address) * 2 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#define KERNEL3(address) \
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3; \
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm4; \
|
|
kusano |
2b45e8 |
movddup 9 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3; \
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm5; \
|
|
kusano |
2b45e8 |
movddup 10 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3; \
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm6; \
|
|
kusano |
2b45e8 |
movddup 11 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3; \
|
|
kusano |
2b45e8 |
movapd 6 * SIZE + (address) * 1 * SIZE(AA), %xmm0; \
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm7; \
|
|
kusano |
2b45e8 |
movddup 12 * SIZE + (address) * 2 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#define KERNEL4(address) \
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3; \
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm4; \
|
|
kusano |
2b45e8 |
movddup 13 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3; \
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm5; \
|
|
kusano |
2b45e8 |
movddup 14 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3; \
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm6; \
|
|
kusano |
2b45e8 |
movddup 15 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3; \
|
|
kusano |
2b45e8 |
movapd 16 * SIZE + (address) * 1 * SIZE(AA), %xmm0; \
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm7; \
|
|
kusano |
2b45e8 |
movddup 24 * SIZE + (address) * 2 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#define KERNEL5(address) \
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2; \
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4; \
|
|
kusano |
2b45e8 |
movddup 17 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2; \
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm5; \
|
|
kusano |
2b45e8 |
movddup 18 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2; \
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm6; \
|
|
kusano |
2b45e8 |
movddup 19 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2; \
|
|
kusano |
2b45e8 |
movapd 10 * SIZE + (address) * 1 * SIZE(AA), %xmm1; \
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm7
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#define KERNEL6(address) \
|
|
kusano |
2b45e8 |
movddup 20 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2; \
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4; \
|
|
kusano |
2b45e8 |
movddup 21 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2; \
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm5; \
|
|
kusano |
2b45e8 |
movddup 22 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2; \
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm6; \
|
|
kusano |
2b45e8 |
movddup 23 * SIZE + (address) * 2 * SIZE(BB), %xmm2; \
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2; \
|
|
kusano |
2b45e8 |
movapd 12 * SIZE + (address) * 1 * SIZE(AA), %xmm1; \
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm7; \
|
|
kusano |
2b45e8 |
movddup 32 * SIZE + (address) * 2 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#define KERNEL7(address) \
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3; \
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm4; \
|
|
kusano |
2b45e8 |
movddup 25 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3; \
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm5; \
|
|
kusano |
2b45e8 |
movddup 26 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3; \
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm6; \
|
|
kusano |
2b45e8 |
movddup 27 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3; \
|
|
kusano |
2b45e8 |
movapd 14 * SIZE + (address) * 1 * SIZE(AA), %xmm1; \
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm7; \
|
|
kusano |
2b45e8 |
movddup 28 * SIZE + (address) * 2 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#define KERNEL8(address) \
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3; \
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm4; \
|
|
kusano |
2b45e8 |
movddup 29 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3; \
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm5; \
|
|
kusano |
2b45e8 |
movddup 30 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3; \
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm6; \
|
|
kusano |
2b45e8 |
movddup 31 * SIZE + (address) * 2 * SIZE(BB), %xmm3; \
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3; \
|
|
kusano |
2b45e8 |
movapd 24 * SIZE + (address) * 1 * SIZE(AA), %xmm1; \
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm7; \
|
|
kusano |
2b45e8 |
movddup 40 * SIZE + (address) * 2 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
PROLOGUE
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
subl $ARGS, %esp
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
pushl %ebp
|
|
kusano |
2b45e8 |
pushl %edi
|
|
kusano |
2b45e8 |
pushl %esi
|
|
kusano |
2b45e8 |
pushl %ebx
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
PROFCODE
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movl ARG_B, B
|
|
kusano |
2b45e8 |
movl ARG_LDC, LDC
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef TRMMKERNEL
|
|
kusano |
2b45e8 |
movl OFFSET, %eax
|
|
kusano |
2b45e8 |
#ifndef LEFT
|
|
kusano |
2b45e8 |
negl %eax
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
movl %eax, KK
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
leal (, LDC, SIZE), LDC
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movl N, %eax
|
|
kusano |
2b45e8 |
sarl $2, %eax
|
|
kusano |
2b45e8 |
movl %eax, J
|
|
kusano |
2b45e8 |
jle .L30
|
|
kusano |
2b45e8 |
ALIGN_2
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L10:
|
|
kusano |
2b45e8 |
#if defined(TRMMKERNEL) && defined(LEFT)
|
|
kusano |
2b45e8 |
movl OFFSET, %eax
|
|
kusano |
2b45e8 |
movl %eax, KK
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
sall $BASE_SHIFT + 2, %eax
|
|
kusano |
2b45e8 |
leal (B, %eax), %eax
|
|
kusano |
2b45e8 |
movl %eax, BX
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movl C, %esi # coffset = c
|
|
kusano |
2b45e8 |
movl A, AA # aoffset = a
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movl M, %ebx
|
|
kusano |
2b45e8 |
sarl $1, %ebx # i = (m >> 2)
|
|
kusano |
2b45e8 |
jle .L20
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L11:
|
|
kusano |
2b45e8 |
#if !defined(TRMMKERNEL) || \
|
|
kusano |
2b45e8 |
(defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
|
|
kusano |
2b45e8 |
(defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movl B, BB
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movl KK, %eax
|
|
kusano |
2b45e8 |
leal (, %eax, SIZE), %eax
|
|
kusano |
2b45e8 |
leal (AA, %eax, 2), AA
|
|
kusano |
2b45e8 |
leal (B, %eax, 4), BB
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movl BX, %eax
|
|
kusano |
2b45e8 |
prefetcht2 0 * SIZE(%eax)
|
|
kusano |
2b45e8 |
subl $-4 * SIZE, BX
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movapd 0 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
pxor %xmm4, %xmm4
|
|
kusano |
2b45e8 |
movapd 8 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
pxor %xmm5, %xmm5
|
|
kusano |
2b45e8 |
movddup 0 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
pxor %xmm6, %xmm6
|
|
kusano |
2b45e8 |
movddup 8 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
pxor %xmm7, %xmm7
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
leal (LDC, LDC, 2), %eax
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef PENTIUM4
|
|
kusano |
2b45e8 |
prefetchnta 3 * SIZE(%esi)
|
|
kusano |
2b45e8 |
prefetchnta 3 * SIZE(%esi, LDC, 1)
|
|
kusano |
2b45e8 |
prefetchnta 3 * SIZE(%esi, LDC, 2)
|
|
kusano |
2b45e8 |
prefetchnta 3 * SIZE(%esi, %eax, 1)
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifndef TRMMKERNEL
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
#elif (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
subl KK, %eax
|
|
kusano |
2b45e8 |
movl %eax, KKK
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movl KK, %eax
|
|
kusano |
2b45e8 |
#ifdef LEFT
|
|
kusano |
2b45e8 |
addl $2, %eax
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
addl $4, %eax
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
movl %eax, KKK
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef CORE_PRESCOTT
|
|
kusano |
2b45e8 |
andl $-8, %eax
|
|
kusano |
2b45e8 |
sall $4, %eax
|
|
kusano |
2b45e8 |
je .L15
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L1X:
|
|
kusano |
2b45e8 |
KERNEL1(16 * 0)
|
|
kusano |
2b45e8 |
KERNEL2(16 * 0)
|
|
kusano |
2b45e8 |
KERNEL3(16 * 0)
|
|
kusano |
2b45e8 |
KERNEL4(16 * 0)
|
|
kusano |
2b45e8 |
KERNEL5(16 * 0)
|
|
kusano |
2b45e8 |
KERNEL6(16 * 0)
|
|
kusano |
2b45e8 |
KERNEL7(16 * 0)
|
|
kusano |
2b45e8 |
KERNEL8(16 * 0)
|
|
kusano |
2b45e8 |
cmpl $128 * 1, %eax
|
|
kusano |
2b45e8 |
jle .L12
|
|
kusano |
2b45e8 |
KERNEL1(16 * 1)
|
|
kusano |
2b45e8 |
KERNEL2(16 * 1)
|
|
kusano |
2b45e8 |
KERNEL3(16 * 1)
|
|
kusano |
2b45e8 |
KERNEL4(16 * 1)
|
|
kusano |
2b45e8 |
KERNEL5(16 * 1)
|
|
kusano |
2b45e8 |
KERNEL6(16 * 1)
|
|
kusano |
2b45e8 |
KERNEL7(16 * 1)
|
|
kusano |
2b45e8 |
KERNEL8(16 * 1)
|
|
kusano |
2b45e8 |
cmpl $128 * 2, %eax
|
|
kusano |
2b45e8 |
jle .L12
|
|
kusano |
2b45e8 |
KERNEL1(16 * 2)
|
|
kusano |
2b45e8 |
KERNEL2(16 * 2)
|
|
kusano |
2b45e8 |
KERNEL3(16 * 2)
|
|
kusano |
2b45e8 |
KERNEL4(16 * 2)
|
|
kusano |
2b45e8 |
KERNEL5(16 * 2)
|
|
kusano |
2b45e8 |
KERNEL6(16 * 2)
|
|
kusano |
2b45e8 |
KERNEL7(16 * 2)
|
|
kusano |
2b45e8 |
KERNEL8(16 * 2)
|
|
kusano |
2b45e8 |
cmpl $128 * 3, %eax
|
|
kusano |
2b45e8 |
jle .L12
|
|
kusano |
2b45e8 |
KERNEL1(16 * 3)
|
|
kusano |
2b45e8 |
KERNEL2(16 * 3)
|
|
kusano |
2b45e8 |
KERNEL3(16 * 3)
|
|
kusano |
2b45e8 |
KERNEL4(16 * 3)
|
|
kusano |
2b45e8 |
KERNEL5(16 * 3)
|
|
kusano |
2b45e8 |
KERNEL6(16 * 3)
|
|
kusano |
2b45e8 |
KERNEL7(16 * 3)
|
|
kusano |
2b45e8 |
KERNEL8(16 * 3)
|
|
kusano |
2b45e8 |
cmpl $128 * 4, %eax
|
|
kusano |
2b45e8 |
jle .L12
|
|
kusano |
2b45e8 |
KERNEL1(16 * 4)
|
|
kusano |
2b45e8 |
KERNEL2(16 * 4)
|
|
kusano |
2b45e8 |
KERNEL3(16 * 4)
|
|
kusano |
2b45e8 |
KERNEL4(16 * 4)
|
|
kusano |
2b45e8 |
KERNEL5(16 * 4)
|
|
kusano |
2b45e8 |
KERNEL6(16 * 4)
|
|
kusano |
2b45e8 |
KERNEL7(16 * 4)
|
|
kusano |
2b45e8 |
KERNEL8(16 * 4)
|
|
kusano |
2b45e8 |
cmpl $128 * 5, %eax
|
|
kusano |
2b45e8 |
jle .L12
|
|
kusano |
2b45e8 |
KERNEL1(16 * 5)
|
|
kusano |
2b45e8 |
KERNEL2(16 * 5)
|
|
kusano |
2b45e8 |
KERNEL3(16 * 5)
|
|
kusano |
2b45e8 |
KERNEL4(16 * 5)
|
|
kusano |
2b45e8 |
KERNEL5(16 * 5)
|
|
kusano |
2b45e8 |
KERNEL6(16 * 5)
|
|
kusano |
2b45e8 |
KERNEL7(16 * 5)
|
|
kusano |
2b45e8 |
KERNEL8(16 * 5)
|
|
kusano |
2b45e8 |
cmpl $128 * 6, %eax
|
|
kusano |
2b45e8 |
jle .L12
|
|
kusano |
2b45e8 |
KERNEL1(16 * 6)
|
|
kusano |
2b45e8 |
KERNEL2(16 * 6)
|
|
kusano |
2b45e8 |
KERNEL3(16 * 6)
|
|
kusano |
2b45e8 |
KERNEL4(16 * 6)
|
|
kusano |
2b45e8 |
KERNEL5(16 * 6)
|
|
kusano |
2b45e8 |
KERNEL6(16 * 6)
|
|
kusano |
2b45e8 |
KERNEL7(16 * 6)
|
|
kusano |
2b45e8 |
KERNEL8(16 * 6)
|
|
kusano |
2b45e8 |
cmpl $128 * 7, %eax
|
|
kusano |
2b45e8 |
jle .L12
|
|
kusano |
2b45e8 |
KERNEL1(16 * 7)
|
|
kusano |
2b45e8 |
KERNEL2(16 * 7)
|
|
kusano |
2b45e8 |
KERNEL3(16 * 7)
|
|
kusano |
2b45e8 |
KERNEL4(16 * 7)
|
|
kusano |
2b45e8 |
KERNEL5(16 * 7)
|
|
kusano |
2b45e8 |
KERNEL6(16 * 7)
|
|
kusano |
2b45e8 |
KERNEL7(16 * 7)
|
|
kusano |
2b45e8 |
KERNEL8(16 * 7)
|
|
kusano |
2b45e8 |
#if 1
|
|
kusano |
2b45e8 |
cmpl $128 * 8, %eax
|
|
kusano |
2b45e8 |
jle .L12
|
|
kusano |
2b45e8 |
KERNEL1(16 * 8)
|
|
kusano |
2b45e8 |
KERNEL2(16 * 8)
|
|
kusano |
2b45e8 |
KERNEL3(16 * 8)
|
|
kusano |
2b45e8 |
KERNEL4(16 * 8)
|
|
kusano |
2b45e8 |
KERNEL5(16 * 8)
|
|
kusano |
2b45e8 |
KERNEL6(16 * 8)
|
|
kusano |
2b45e8 |
KERNEL7(16 * 8)
|
|
kusano |
2b45e8 |
KERNEL8(16 * 8)
|
|
kusano |
2b45e8 |
cmpl $128 * 9, %eax
|
|
kusano |
2b45e8 |
jle .L12
|
|
kusano |
2b45e8 |
KERNEL1(16 * 9)
|
|
kusano |
2b45e8 |
KERNEL2(16 * 9)
|
|
kusano |
2b45e8 |
KERNEL3(16 * 9)
|
|
kusano |
2b45e8 |
KERNEL4(16 * 9)
|
|
kusano |
2b45e8 |
KERNEL5(16 * 9)
|
|
kusano |
2b45e8 |
KERNEL6(16 * 9)
|
|
kusano |
2b45e8 |
KERNEL7(16 * 9)
|
|
kusano |
2b45e8 |
KERNEL8(16 * 9)
|
|
kusano |
2b45e8 |
cmpl $128 * 10, %eax
|
|
kusano |
2b45e8 |
jle .L12
|
|
kusano |
2b45e8 |
KERNEL1(16 * 10)
|
|
kusano |
2b45e8 |
KERNEL2(16 * 10)
|
|
kusano |
2b45e8 |
KERNEL3(16 * 10)
|
|
kusano |
2b45e8 |
KERNEL4(16 * 10)
|
|
kusano |
2b45e8 |
KERNEL5(16 * 10)
|
|
kusano |
2b45e8 |
KERNEL6(16 * 10)
|
|
kusano |
2b45e8 |
KERNEL7(16 * 10)
|
|
kusano |
2b45e8 |
KERNEL8(16 * 10)
|
|
kusano |
2b45e8 |
cmpl $128 * 11, %eax
|
|
kusano |
2b45e8 |
jle .L12
|
|
kusano |
2b45e8 |
KERNEL1(16 * 11)
|
|
kusano |
2b45e8 |
KERNEL2(16 * 11)
|
|
kusano |
2b45e8 |
KERNEL3(16 * 11)
|
|
kusano |
2b45e8 |
KERNEL4(16 * 11)
|
|
kusano |
2b45e8 |
KERNEL5(16 * 11)
|
|
kusano |
2b45e8 |
KERNEL6(16 * 11)
|
|
kusano |
2b45e8 |
KERNEL7(16 * 11)
|
|
kusano |
2b45e8 |
KERNEL8(16 * 11)
|
|
kusano |
2b45e8 |
cmpl $128 * 12, %eax
|
|
kusano |
2b45e8 |
jle .L12
|
|
kusano |
2b45e8 |
KERNEL1(16 * 12)
|
|
kusano |
2b45e8 |
KERNEL2(16 * 12)
|
|
kusano |
2b45e8 |
KERNEL3(16 * 12)
|
|
kusano |
2b45e8 |
KERNEL4(16 * 12)
|
|
kusano |
2b45e8 |
KERNEL5(16 * 12)
|
|
kusano |
2b45e8 |
KERNEL6(16 * 12)
|
|
kusano |
2b45e8 |
KERNEL7(16 * 12)
|
|
kusano |
2b45e8 |
KERNEL8(16 * 12)
|
|
kusano |
2b45e8 |
cmpl $128 * 13, %eax
|
|
kusano |
2b45e8 |
jle .L12
|
|
kusano |
2b45e8 |
KERNEL1(16 * 13)
|
|
kusano |
2b45e8 |
KERNEL2(16 * 13)
|
|
kusano |
2b45e8 |
KERNEL3(16 * 13)
|
|
kusano |
2b45e8 |
KERNEL4(16 * 13)
|
|
kusano |
2b45e8 |
KERNEL5(16 * 13)
|
|
kusano |
2b45e8 |
KERNEL6(16 * 13)
|
|
kusano |
2b45e8 |
KERNEL7(16 * 13)
|
|
kusano |
2b45e8 |
KERNEL8(16 * 13)
|
|
kusano |
2b45e8 |
cmpl $128 * 14, %eax
|
|
kusano |
2b45e8 |
jle .L12
|
|
kusano |
2b45e8 |
KERNEL1(16 * 14)
|
|
kusano |
2b45e8 |
KERNEL2(16 * 14)
|
|
kusano |
2b45e8 |
KERNEL3(16 * 14)
|
|
kusano |
2b45e8 |
KERNEL4(16 * 14)
|
|
kusano |
2b45e8 |
KERNEL5(16 * 14)
|
|
kusano |
2b45e8 |
KERNEL6(16 * 14)
|
|
kusano |
2b45e8 |
KERNEL7(16 * 14)
|
|
kusano |
2b45e8 |
KERNEL8(16 * 14)
|
|
kusano |
2b45e8 |
cmpl $128 * 15, %eax
|
|
kusano |
2b45e8 |
jle .L12
|
|
kusano |
2b45e8 |
KERNEL1(16 * 15)
|
|
kusano |
2b45e8 |
KERNEL2(16 * 15)
|
|
kusano |
2b45e8 |
KERNEL3(16 * 15)
|
|
kusano |
2b45e8 |
KERNEL4(16 * 15)
|
|
kusano |
2b45e8 |
KERNEL5(16 * 15)
|
|
kusano |
2b45e8 |
KERNEL6(16 * 15)
|
|
kusano |
2b45e8 |
KERNEL7(16 * 15)
|
|
kusano |
2b45e8 |
KERNEL8(16 * 15)
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
addl $32 * 4 * SIZE, AA
|
|
kusano |
2b45e8 |
addl $32 * 8 * SIZE, BB
|
|
kusano |
2b45e8 |
subl $128 * 8, %eax
|
|
kusano |
2b45e8 |
jg .L1X
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L12:
|
|
kusano |
2b45e8 |
leal (AA, %eax, 1), AA # * 16
|
|
kusano |
2b45e8 |
leal (BB, %eax, 2), BB # * 64
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
sarl $3, %eax
|
|
kusano |
2b45e8 |
je .L15
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L12:
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
PREFETCH (PREFETCHSIZE + 0) * SIZE(AA)
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4
|
|
kusano |
2b45e8 |
movddup 1 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm5
|
|
kusano |
2b45e8 |
movddup 2 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm6
|
|
kusano |
2b45e8 |
movddup 3 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
movapd 2 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm7
|
|
kusano |
2b45e8 |
movddup 4 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4
|
|
kusano |
2b45e8 |
movddup 5 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm5
|
|
kusano |
2b45e8 |
movddup 6 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm6
|
|
kusano |
2b45e8 |
movddup 7 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
movapd 4 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm7
|
|
kusano |
2b45e8 |
movddup 16 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm4
|
|
kusano |
2b45e8 |
movddup 9 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm5
|
|
kusano |
2b45e8 |
movddup 10 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm6
|
|
kusano |
2b45e8 |
movddup 11 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3
|
|
kusano |
2b45e8 |
movapd 6 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm7
|
|
kusano |
2b45e8 |
movddup 12 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm4
|
|
kusano |
2b45e8 |
movddup 13 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm5
|
|
kusano |
2b45e8 |
movddup 14 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm6
|
|
kusano |
2b45e8 |
movddup 15 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3
|
|
kusano |
2b45e8 |
movapd 16 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm7
|
|
kusano |
2b45e8 |
movddup 24 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4
|
|
kusano |
2b45e8 |
movddup 17 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm5
|
|
kusano |
2b45e8 |
movddup 18 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm6
|
|
kusano |
2b45e8 |
movddup 19 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2
|
|
kusano |
2b45e8 |
movapd 10 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm7
|
|
kusano |
2b45e8 |
movddup 20 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4
|
|
kusano |
2b45e8 |
movddup 21 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm5
|
|
kusano |
2b45e8 |
movddup 22 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm6
|
|
kusano |
2b45e8 |
movddup 23 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2
|
|
kusano |
2b45e8 |
movapd 12 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm7
|
|
kusano |
2b45e8 |
movddup 32 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm4
|
|
kusano |
2b45e8 |
movddup 25 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm5
|
|
kusano |
2b45e8 |
movddup 26 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm6
|
|
kusano |
2b45e8 |
movddup 27 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
movapd 14 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm7
|
|
kusano |
2b45e8 |
movddup 28 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm4
|
|
kusano |
2b45e8 |
movddup 29 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm5
|
|
kusano |
2b45e8 |
movddup 30 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm6
|
|
kusano |
2b45e8 |
movddup 31 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
movapd 24 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm7
|
|
kusano |
2b45e8 |
movddup 40 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addl $32 * SIZE, BB
|
|
kusano |
2b45e8 |
addl $16 * SIZE, AA
|
|
kusano |
2b45e8 |
decl %eax
|
|
kusano |
2b45e8 |
jne .L12
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L15:
|
|
kusano |
2b45e8 |
#ifndef TRMMKERNEL
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movl KKK, %eax
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
movddup ALPHA, %xmm3
|
|
kusano |
2b45e8 |
andl $7, %eax # if (k & 1)
|
|
kusano |
2b45e8 |
BRANCH
|
|
kusano |
2b45e8 |
je .L18
|
|
kusano |
2b45e8 |
ALIGN_3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L16:
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4
|
|
kusano |
2b45e8 |
movddup 1 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm5
|
|
kusano |
2b45e8 |
movddup 2 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm6
|
|
kusano |
2b45e8 |
movddup 3 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
movapd 2 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm7
|
|
kusano |
2b45e8 |
movddup 4 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addl $2 * SIZE, AA
|
|
kusano |
2b45e8 |
addl $4 * SIZE, BB
|
|
kusano |
2b45e8 |
decl %eax
|
|
kusano |
2b45e8 |
jg .L16
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L18:
|
|
kusano |
2b45e8 |
SHUFPD_2 %xmm0, %xmm0
|
|
kusano |
2b45e8 |
SHUFPD_2 %xmm1, %xmm1
|
|
kusano |
2b45e8 |
SHUFPD_2 %xmm2, %xmm2
|
|
kusano |
2b45e8 |
SHUFPD_2 %xmm3, %xmm3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
mulpd %xmm3, %xmm4
|
|
kusano |
2b45e8 |
mulpd %xmm3, %xmm5
|
|
kusano |
2b45e8 |
mulpd %xmm3, %xmm6
|
|
kusano |
2b45e8 |
mulpd %xmm3, %xmm7
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movl %esi, %eax
|
|
kusano |
2b45e8 |
orl LDC, %eax
|
|
kusano |
2b45e8 |
testl $15, %eax
|
|
kusano |
2b45e8 |
NOBRANCH
|
|
kusano |
2b45e8 |
jne .L18x
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
leal (LDC, LDC, 2), %eax
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifndef TRMMKERNEL
|
|
kusano |
2b45e8 |
movapd 0 * SIZE(%esi), %xmm0
|
|
kusano |
2b45e8 |
movapd 0 * SIZE(%esi, LDC, 1), %xmm1
|
|
kusano |
2b45e8 |
movapd 0 * SIZE(%esi, LDC, 2), %xmm2
|
|
kusano |
2b45e8 |
movapd 0 * SIZE(%esi, %eax, 1), %xmm3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm4
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm5
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm6
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm7
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movapd %xmm4, 0 * SIZE(%esi)
|
|
kusano |
2b45e8 |
movapd %xmm5, 0 * SIZE(%esi, LDC, 1)
|
|
kusano |
2b45e8 |
movapd %xmm6, 0 * SIZE(%esi, LDC, 2)
|
|
kusano |
2b45e8 |
movapd %xmm7, 0 * SIZE(%esi, %eax, 1)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#if (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
|
|
kusano |
2b45e8 |
(defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
subl KKK, %eax
|
|
kusano |
2b45e8 |
leal (,%eax, SIZE), %eax
|
|
kusano |
2b45e8 |
leal (AA, %eax, 2), AA
|
|
kusano |
2b45e8 |
leal (BB, %eax, 4), BB
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#if defined(TRMMKERNEL) && defined(LEFT)
|
|
kusano |
2b45e8 |
addl $2, KK
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addl $2 * SIZE, %esi # coffset += 2
|
|
kusano |
2b45e8 |
decl %ebx # i --
|
|
kusano |
2b45e8 |
jg .L11
|
|
kusano |
2b45e8 |
jmp .L20
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L18x:
|
|
kusano |
2b45e8 |
leal (LDC, LDC, 2), %eax
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifndef TRMMKERNEL
|
|
kusano |
2b45e8 |
movsd 0 * SIZE(%esi), %xmm0
|
|
kusano |
2b45e8 |
movhpd 1 * SIZE(%esi), %xmm0
|
|
kusano |
2b45e8 |
movsd 0 * SIZE(%esi, LDC, 1), %xmm1
|
|
kusano |
2b45e8 |
movhpd 1 * SIZE(%esi, LDC, 1), %xmm1
|
|
kusano |
2b45e8 |
movsd 0 * SIZE(%esi, LDC, 2), %xmm2
|
|
kusano |
2b45e8 |
movhpd 1 * SIZE(%esi, LDC, 2), %xmm2
|
|
kusano |
2b45e8 |
movsd 0 * SIZE(%esi, %eax, 1), %xmm3
|
|
kusano |
2b45e8 |
movhpd 1 * SIZE(%esi, %eax, 1), %xmm3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm4
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm5
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm6
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm7
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movsd %xmm4, 0 * SIZE(%esi)
|
|
kusano |
2b45e8 |
movhpd %xmm4, 1 * SIZE(%esi)
|
|
kusano |
2b45e8 |
movsd %xmm5, 0 * SIZE(%esi, LDC, 1)
|
|
kusano |
2b45e8 |
movhpd %xmm5, 1 * SIZE(%esi, LDC, 1)
|
|
kusano |
2b45e8 |
movsd %xmm6, 0 * SIZE(%esi, LDC, 2)
|
|
kusano |
2b45e8 |
movhpd %xmm6, 1 * SIZE(%esi, LDC, 2)
|
|
kusano |
2b45e8 |
movsd %xmm7, 0 * SIZE(%esi, %eax, 1)
|
|
kusano |
2b45e8 |
movhpd %xmm7, 1 * SIZE(%esi, %eax, 1)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#if (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
|
|
kusano |
2b45e8 |
(defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
subl KKK, %eax
|
|
kusano |
2b45e8 |
leal (,%eax, SIZE), %eax
|
|
kusano |
2b45e8 |
leal (AA, %eax, 2), AA
|
|
kusano |
2b45e8 |
leal (BB, %eax, 4), BB
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#if defined(TRMMKERNEL) && defined(LEFT)
|
|
kusano |
2b45e8 |
addl $2, KK
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addl $2 * SIZE, %esi # coffset += 2
|
|
kusano |
2b45e8 |
decl %ebx # i --
|
|
kusano |
2b45e8 |
jg .L11
|
|
kusano |
2b45e8 |
ALIGN_3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L20:
|
|
kusano |
2b45e8 |
movl M, %ebx
|
|
kusano |
2b45e8 |
testl $1, %ebx # i = (m >> 2)
|
|
kusano |
2b45e8 |
jle .L29
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#if !defined(TRMMKERNEL) || \
|
|
kusano |
2b45e8 |
(defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
|
|
kusano |
2b45e8 |
(defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movl B, BB
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movl KK, %eax
|
|
kusano |
2b45e8 |
leal (, %eax, SIZE), %eax
|
|
kusano |
2b45e8 |
leal (AA, %eax, 1), AA
|
|
kusano |
2b45e8 |
leal (B, %eax, 4), BB
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movddup 0 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
pxor %xmm4, %xmm4
|
|
kusano |
2b45e8 |
movddup 8 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
pxor %xmm5, %xmm5
|
|
kusano |
2b45e8 |
movapd 0 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
pxor %xmm6, %xmm6
|
|
kusano |
2b45e8 |
movapd 8 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
pxor %xmm7, %xmm7
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifndef TRMMKERNEL
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
#elif (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
subl KK, %eax
|
|
kusano |
2b45e8 |
movl %eax, KKK
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movl KK, %eax
|
|
kusano |
2b45e8 |
#ifdef LEFT
|
|
kusano |
2b45e8 |
addl $1, %eax
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
addl $4, %eax
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
movl %eax, KKK
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
sarl $4, %eax
|
|
kusano |
2b45e8 |
je .L25
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L22:
|
|
kusano |
2b45e8 |
PREFETCH (PREFETCHSIZE + 0) * SIZE(AA)
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
mulpd 2 * SIZE(BB), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4
|
|
kusano |
2b45e8 |
movapd 4 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm5
|
|
kusano |
2b45e8 |
movddup 1 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
mulpd 6 * SIZE(BB), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm6
|
|
kusano |
2b45e8 |
movapd 16 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm7
|
|
kusano |
2b45e8 |
movddup 2 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3
|
|
kusano |
2b45e8 |
mulpd 10 * SIZE(BB), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm4
|
|
kusano |
2b45e8 |
movapd 12 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm5
|
|
kusano |
2b45e8 |
movddup 3 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3
|
|
kusano |
2b45e8 |
mulpd 14 * SIZE(BB), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm6
|
|
kusano |
2b45e8 |
movapd 24 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm7
|
|
kusano |
2b45e8 |
movddup 4 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
mulpd 18 * SIZE(BB), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4
|
|
kusano |
2b45e8 |
movapd 20 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm5
|
|
kusano |
2b45e8 |
movddup 5 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
mulpd 22 * SIZE(BB), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm6
|
|
kusano |
2b45e8 |
movapd 32 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm7
|
|
kusano |
2b45e8 |
movddup 6 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3
|
|
kusano |
2b45e8 |
mulpd 26 * SIZE(BB), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm4
|
|
kusano |
2b45e8 |
movapd 28 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm5
|
|
kusano |
2b45e8 |
movddup 7 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3
|
|
kusano |
2b45e8 |
mulpd 30 * SIZE(BB), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm6
|
|
kusano |
2b45e8 |
movapd 40 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm7
|
|
kusano |
2b45e8 |
movddup 16 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2
|
|
kusano |
2b45e8 |
mulpd 34 * SIZE(BB), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4
|
|
kusano |
2b45e8 |
movapd 36 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm5
|
|
kusano |
2b45e8 |
movddup 9 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2
|
|
kusano |
2b45e8 |
mulpd 38 * SIZE(BB), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm6
|
|
kusano |
2b45e8 |
movapd 48 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm7
|
|
kusano |
2b45e8 |
movddup 10 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
mulpd 42 * SIZE(BB), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm4
|
|
kusano |
2b45e8 |
movapd 44 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm5
|
|
kusano |
2b45e8 |
movddup 11 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
mulpd 46 * SIZE(BB), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm6
|
|
kusano |
2b45e8 |
movapd 56 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm7
|
|
kusano |
2b45e8 |
movddup 12 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2
|
|
kusano |
2b45e8 |
mulpd 50 * SIZE(BB), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4
|
|
kusano |
2b45e8 |
movapd 52 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm5
|
|
kusano |
2b45e8 |
movddup 13 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2
|
|
kusano |
2b45e8 |
mulpd 54 * SIZE(BB), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm6
|
|
kusano |
2b45e8 |
movapd 64 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm7
|
|
kusano |
2b45e8 |
movddup 14 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
mulpd 58 * SIZE(BB), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm4
|
|
kusano |
2b45e8 |
movapd 60 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm5
|
|
kusano |
2b45e8 |
movddup 15 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
mulpd 62 * SIZE(BB), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm6
|
|
kusano |
2b45e8 |
movapd 72 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm7
|
|
kusano |
2b45e8 |
movddup 24 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addl $16 * SIZE, AA
|
|
kusano |
2b45e8 |
addl $64 * SIZE, BB
|
|
kusano |
2b45e8 |
decl %eax
|
|
kusano |
2b45e8 |
jne .L22
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L25:
|
|
kusano |
2b45e8 |
#ifndef TRMMKERNEL
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movl KKK, %eax
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
movddup ALPHA, %xmm3
|
|
kusano |
2b45e8 |
andl $15, %eax # if (k & 1)
|
|
kusano |
2b45e8 |
BRANCH
|
|
kusano |
2b45e8 |
je .L28
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L26:
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
mulpd 2 * SIZE(BB), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4
|
|
kusano |
2b45e8 |
movapd 4 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm5
|
|
kusano |
2b45e8 |
movddup 1 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addl $1 * SIZE, AA
|
|
kusano |
2b45e8 |
addl $4 * SIZE, BB
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
decl %eax
|
|
kusano |
2b45e8 |
jg .L26
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L28:
|
|
kusano |
2b45e8 |
leal (%esi, LDC, 1), %eax
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addpd %xmm6, %xmm4
|
|
kusano |
2b45e8 |
addpd %xmm7, %xmm5
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
mulpd %xmm3, %xmm4
|
|
kusano |
2b45e8 |
mulpd %xmm3, %xmm5
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifndef TRMMKERNEL
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef PENTIUM4
|
|
kusano |
2b45e8 |
SHUFPD_2 %xmm0, %xmm0
|
|
kusano |
2b45e8 |
SHUFPD_2 %xmm1, %xmm1
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movsd 0 * SIZE(%esi), %xmm0
|
|
kusano |
2b45e8 |
movhpd 0 * SIZE(%eax), %xmm0
|
|
kusano |
2b45e8 |
movsd 0 * SIZE(%esi, LDC, 2), %xmm1
|
|
kusano |
2b45e8 |
movhpd 0 * SIZE(%eax, LDC, 2), %xmm1
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm4
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm5
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movsd %xmm4, 0 * SIZE(%esi)
|
|
kusano |
2b45e8 |
movhpd %xmm4, 0 * SIZE(%eax)
|
|
kusano |
2b45e8 |
movsd %xmm5, 0 * SIZE(%esi, LDC, 2)
|
|
kusano |
2b45e8 |
movhpd %xmm5, 0 * SIZE(%eax, LDC, 2)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#if (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
|
|
kusano |
2b45e8 |
(defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
subl KKK, %eax
|
|
kusano |
2b45e8 |
leal (,%eax, SIZE), %eax
|
|
kusano |
2b45e8 |
leal (AA, %eax, 1), AA
|
|
kusano |
2b45e8 |
leal (BB, %eax, 4), BB
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#if defined(TRMMKERNEL) && defined(LEFT)
|
|
kusano |
2b45e8 |
addl $1, KK
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L29:
|
|
kusano |
2b45e8 |
#if defined(TRMMKERNEL) && !defined(LEFT)
|
|
kusano |
2b45e8 |
addl $4, KK
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
leal (, LDC, 4), %eax
|
|
kusano |
2b45e8 |
movl BB, B
|
|
kusano |
2b45e8 |
addl %eax, C # c += 4 * ldc
|
|
kusano |
2b45e8 |
decl J # j --
|
|
kusano |
2b45e8 |
jg .L10
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L30:
|
|
kusano |
2b45e8 |
testl $2, N
|
|
kusano |
2b45e8 |
je .L60
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movl C, %esi # coffset = c
|
|
kusano |
2b45e8 |
movl A, AA # aoffset = a
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#if defined(TRMMKERNEL) && defined(LEFT)
|
|
kusano |
2b45e8 |
movl OFFSET, %eax
|
|
kusano |
2b45e8 |
movl %eax, KK
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movl M, %ebx
|
|
kusano |
2b45e8 |
sarl $1, %ebx # i = (m >> 2)
|
|
kusano |
2b45e8 |
jle .L50
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L41:
|
|
kusano |
2b45e8 |
#if !defined(TRMMKERNEL) || \
|
|
kusano |
2b45e8 |
(defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
|
|
kusano |
2b45e8 |
(defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movl B, BB
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movl KK, %eax
|
|
kusano |
2b45e8 |
leal (, %eax, SIZE), %eax
|
|
kusano |
2b45e8 |
leal (AA, %eax, 2), AA
|
|
kusano |
2b45e8 |
leal (B, %eax, 2), BB
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movapd 0 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
pxor %xmm4, %xmm4
|
|
kusano |
2b45e8 |
movapd 8 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
pxor %xmm5, %xmm5
|
|
kusano |
2b45e8 |
movddup 0 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
pxor %xmm6, %xmm6
|
|
kusano |
2b45e8 |
movddup 8 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
pxor %xmm7, %xmm7
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef HAVE_3DNOW
|
|
kusano |
2b45e8 |
prefetchw 2 * SIZE(%esi)
|
|
kusano |
2b45e8 |
prefetchw 2 * SIZE(%esi, LDC)
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef PENTIUM4
|
|
kusano |
2b45e8 |
prefetchnta 3 * SIZE(%esi)
|
|
kusano |
2b45e8 |
prefetchnta 3 * SIZE(%esi, LDC)
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifndef TRMMKERNEL
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
#elif (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
subl KK, %eax
|
|
kusano |
2b45e8 |
movl %eax, KKK
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movl KK, %eax
|
|
kusano |
2b45e8 |
#ifdef LEFT
|
|
kusano |
2b45e8 |
addl $2, %eax
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
addl $2, %eax
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
movl %eax, KKK
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
sarl $3, %eax
|
|
kusano |
2b45e8 |
je .L45
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L42:
|
|
kusano |
2b45e8 |
PREFETCH (PREFETCHSIZE + 0) * SIZE(AA)
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4
|
|
kusano |
2b45e8 |
movddup 1 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
movapd 2 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm5
|
|
kusano |
2b45e8 |
movddup 2 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm6
|
|
kusano |
2b45e8 |
movddup 3 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
movapd 4 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm7
|
|
kusano |
2b45e8 |
movddup 4 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4
|
|
kusano |
2b45e8 |
movddup 5 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
movapd 6 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm5
|
|
kusano |
2b45e8 |
movddup 6 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm6
|
|
kusano |
2b45e8 |
movddup 7 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
movapd 16 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm7
|
|
kusano |
2b45e8 |
movddup 16 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm4
|
|
kusano |
2b45e8 |
movddup 9 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
movapd 10 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm5
|
|
kusano |
2b45e8 |
movddup 10 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm6
|
|
kusano |
2b45e8 |
movddup 11 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
movapd 12 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm7
|
|
kusano |
2b45e8 |
movddup 12 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm4
|
|
kusano |
2b45e8 |
movddup 13 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
movapd 14 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm5
|
|
kusano |
2b45e8 |
movddup 14 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm6
|
|
kusano |
2b45e8 |
movddup 15 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
movapd 24 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm7
|
|
kusano |
2b45e8 |
movddup 24 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addl $16 * SIZE, AA
|
|
kusano |
2b45e8 |
addl $16 * SIZE, BB
|
|
kusano |
2b45e8 |
decl %eax
|
|
kusano |
2b45e8 |
jne .L42
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L45:
|
|
kusano |
2b45e8 |
#ifndef TRMMKERNEL
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movl KKK, %eax
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
movddup ALPHA, %xmm3
|
|
kusano |
2b45e8 |
andl $7, %eax # if (k & 1)
|
|
kusano |
2b45e8 |
BRANCH
|
|
kusano |
2b45e8 |
je .L48
|
|
kusano |
2b45e8 |
ALIGN_3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L46:
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4
|
|
kusano |
2b45e8 |
movddup 1 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
movapd 2 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm5
|
|
kusano |
2b45e8 |
movddup 2 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addl $2 * SIZE, AA
|
|
kusano |
2b45e8 |
addl $2 * SIZE, BB
|
|
kusano |
2b45e8 |
decl %eax
|
|
kusano |
2b45e8 |
jg .L46
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L48:
|
|
kusano |
2b45e8 |
addpd %xmm6, %xmm4
|
|
kusano |
2b45e8 |
addpd %xmm7, %xmm5
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
mulpd %xmm3, %xmm4
|
|
kusano |
2b45e8 |
mulpd %xmm3, %xmm5
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifndef TRMMKERNEL
|
|
kusano |
2b45e8 |
#ifdef PENTIUM4
|
|
kusano |
2b45e8 |
SHUFPD_2 %xmm0, %xmm0
|
|
kusano |
2b45e8 |
SHUFPD_2 %xmm1, %xmm1
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movsd 0 * SIZE(%esi), %xmm0
|
|
kusano |
2b45e8 |
movhpd 1 * SIZE(%esi), %xmm0
|
|
kusano |
2b45e8 |
movsd 0 * SIZE(%esi, LDC, 1), %xmm1
|
|
kusano |
2b45e8 |
movhpd 1 * SIZE(%esi, LDC, 1), %xmm1
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm4
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm5
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movsd %xmm4, 0 * SIZE(%esi)
|
|
kusano |
2b45e8 |
movhpd %xmm4, 1 * SIZE(%esi)
|
|
kusano |
2b45e8 |
movsd %xmm5, 0 * SIZE(%esi, LDC, 1)
|
|
kusano |
2b45e8 |
movhpd %xmm5, 1 * SIZE(%esi, LDC, 1)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#if (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
|
|
kusano |
2b45e8 |
(defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
subl KKK, %eax
|
|
kusano |
2b45e8 |
leal (,%eax, SIZE), %eax
|
|
kusano |
2b45e8 |
leal (AA, %eax, 2), AA
|
|
kusano |
2b45e8 |
leal (BB, %eax, 2), BB
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#if defined(TRMMKERNEL) && defined(LEFT)
|
|
kusano |
2b45e8 |
addl $2, KK
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addl $2 * SIZE, %esi # coffset += 2
|
|
kusano |
2b45e8 |
decl %ebx # i --
|
|
kusano |
2b45e8 |
jg .L41
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L50:
|
|
kusano |
2b45e8 |
movl M, %ebx
|
|
kusano |
2b45e8 |
testl $1, %ebx # i = (m >> 2)
|
|
kusano |
2b45e8 |
jle .L59
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#if !defined(TRMMKERNEL) || \
|
|
kusano |
2b45e8 |
(defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
|
|
kusano |
2b45e8 |
(defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movl B, BB
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movl KK, %eax
|
|
kusano |
2b45e8 |
leal (, %eax, SIZE), %eax
|
|
kusano |
2b45e8 |
leal (AA, %eax, 1), AA
|
|
kusano |
2b45e8 |
leal (B, %eax, 2), BB
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movddup 0 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
pxor %xmm4, %xmm4
|
|
kusano |
2b45e8 |
movddup 8 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
pxor %xmm5, %xmm5
|
|
kusano |
2b45e8 |
movapd 0 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
pxor %xmm6, %xmm6
|
|
kusano |
2b45e8 |
movapd 8 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
pxor %xmm7, %xmm7
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifndef TRMMKERNEL
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
#elif (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
subl KK, %eax
|
|
kusano |
2b45e8 |
movl %eax, KKK
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movl KK, %eax
|
|
kusano |
2b45e8 |
#ifdef LEFT
|
|
kusano |
2b45e8 |
addl $1, %eax
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
addl $2, %eax
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
movl %eax, KKK
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
sarl $4, %eax
|
|
kusano |
2b45e8 |
je .L55
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L52:
|
|
kusano |
2b45e8 |
PREFETCH (PREFETCHSIZE + 0) * SIZE(AA)
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
movddup 1 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4
|
|
kusano |
2b45e8 |
mulpd 2 * SIZE(BB), %xmm0
|
|
kusano |
2b45e8 |
movapd 16 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm5
|
|
kusano |
2b45e8 |
movddup 2 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
mulpd 4 * SIZE(BB), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm6
|
|
kusano |
2b45e8 |
movddup 3 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
mulpd 6 * SIZE(BB), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm7
|
|
kusano |
2b45e8 |
movddup 4 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm3
|
|
kusano |
2b45e8 |
movddup 5 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm4
|
|
kusano |
2b45e8 |
mulpd 10 * SIZE(BB), %xmm0
|
|
kusano |
2b45e8 |
movapd 24 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm5
|
|
kusano |
2b45e8 |
movddup 6 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
mulpd 12 * SIZE(BB), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm6
|
|
kusano |
2b45e8 |
movddup 7 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
mulpd 14 * SIZE(BB), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm7
|
|
kusano |
2b45e8 |
movddup 16 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm2
|
|
kusano |
2b45e8 |
movddup 9 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4
|
|
kusano |
2b45e8 |
mulpd 18 * SIZE(BB), %xmm1
|
|
kusano |
2b45e8 |
movapd 32 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm5
|
|
kusano |
2b45e8 |
movddup 10 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
mulpd 20 * SIZE(BB), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm6
|
|
kusano |
2b45e8 |
movddup 11 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
mulpd 22 * SIZE(BB), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm7
|
|
kusano |
2b45e8 |
movddup 12 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
movddup 13 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm4
|
|
kusano |
2b45e8 |
mulpd 26 * SIZE(BB), %xmm1
|
|
kusano |
2b45e8 |
movapd 40 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm5
|
|
kusano |
2b45e8 |
movddup 14 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
mulpd 28 * SIZE(BB), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm6
|
|
kusano |
2b45e8 |
movddup 15 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
mulpd 30 * SIZE(BB), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm7
|
|
kusano |
2b45e8 |
movddup 24 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addl $16 * SIZE, AA
|
|
kusano |
2b45e8 |
addl $32 * SIZE, BB
|
|
kusano |
2b45e8 |
decl %eax
|
|
kusano |
2b45e8 |
jne .L52
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L55:
|
|
kusano |
2b45e8 |
#ifndef TRMMKERNEL
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movl KKK, %eax
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
movddup ALPHA, %xmm3
|
|
kusano |
2b45e8 |
andl $15, %eax # if (k & 1)
|
|
kusano |
2b45e8 |
BRANCH
|
|
kusano |
2b45e8 |
je .L58
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L56:
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
movddup 1 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4
|
|
kusano |
2b45e8 |
movapd 2 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addl $1 * SIZE, AA
|
|
kusano |
2b45e8 |
addl $2 * SIZE, BB
|
|
kusano |
2b45e8 |
decl %eax
|
|
kusano |
2b45e8 |
jg .L56
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L58:
|
|
kusano |
2b45e8 |
addpd %xmm5, %xmm4
|
|
kusano |
2b45e8 |
addpd %xmm7, %xmm6
|
|
kusano |
2b45e8 |
addpd %xmm6, %xmm4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
mulpd %xmm3, %xmm4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifndef TRMMKERNEL
|
|
kusano |
2b45e8 |
#ifdef PENTIUM4
|
|
kusano |
2b45e8 |
SHUFPD_2 %xmm0, %xmm0
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movsd 0 * SIZE(%esi), %xmm0
|
|
kusano |
2b45e8 |
movhpd 0 * SIZE(%esi, LDC, 1), %xmm0
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm4
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movsd %xmm4, 0 * SIZE(%esi)
|
|
kusano |
2b45e8 |
movhpd %xmm4, 0 * SIZE(%esi, LDC, 1)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#if (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
|
|
kusano |
2b45e8 |
(defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
subl KKK, %eax
|
|
kusano |
2b45e8 |
leal (,%eax, SIZE), %eax
|
|
kusano |
2b45e8 |
leal (AA, %eax, 1), AA
|
|
kusano |
2b45e8 |
leal (BB, %eax, 2), BB
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#if defined(TRMMKERNEL) && defined(LEFT)
|
|
kusano |
2b45e8 |
addl $1, KK
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L59:
|
|
kusano |
2b45e8 |
#if defined(TRMMKERNEL) && !defined(LEFT)
|
|
kusano |
2b45e8 |
addl $2, KK
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
leal (, LDC, 2), %eax
|
|
kusano |
2b45e8 |
movl BB, B
|
|
kusano |
2b45e8 |
addl %eax, C # c += 4 * ldc
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L60:
|
|
kusano |
2b45e8 |
testl $1, N
|
|
kusano |
2b45e8 |
je .L999
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movl C, %esi # coffset = c
|
|
kusano |
2b45e8 |
movl A, AA # aoffset = a
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#if defined(TRMMKERNEL) && defined(LEFT)
|
|
kusano |
2b45e8 |
movl OFFSET, %eax
|
|
kusano |
2b45e8 |
movl %eax, KK
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movl M, %ebx
|
|
kusano |
2b45e8 |
sarl $1, %ebx # i = (m >> 2)
|
|
kusano |
2b45e8 |
jle .L80
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L71:
|
|
kusano |
2b45e8 |
#if !defined(TRMMKERNEL) || \
|
|
kusano |
2b45e8 |
(defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
|
|
kusano |
2b45e8 |
(defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movl B, BB
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movl KK, %eax
|
|
kusano |
2b45e8 |
leal (, %eax, SIZE), %eax
|
|
kusano |
2b45e8 |
leal (AA, %eax, 2), AA
|
|
kusano |
2b45e8 |
leal (B, %eax, 1), BB
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movapd 0 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
pxor %xmm4, %xmm4
|
|
kusano |
2b45e8 |
movapd 8 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
pxor %xmm5, %xmm5
|
|
kusano |
2b45e8 |
movddup 0 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
pxor %xmm6, %xmm6
|
|
kusano |
2b45e8 |
movddup 4 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
pxor %xmm7, %xmm7
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifdef PENTIUM4
|
|
kusano |
2b45e8 |
prefetchnta 3 * SIZE(%esi)
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifndef TRMMKERNEL
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
#elif (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
subl KK, %eax
|
|
kusano |
2b45e8 |
movl %eax, KKK
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movl KK, %eax
|
|
kusano |
2b45e8 |
#ifdef LEFT
|
|
kusano |
2b45e8 |
addl $2, %eax
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
addl $1, %eax
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
movl %eax, KKK
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
sarl $3, %eax
|
|
kusano |
2b45e8 |
je .L75
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L72:
|
|
kusano |
2b45e8 |
PREFETCH (PREFETCHSIZE + 0) * SIZE(AA)
|
|
kusano |
2b45e8 |
mulpd %xmm2, %xmm0
|
|
kusano |
2b45e8 |
movddup 1 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm4
|
|
kusano |
2b45e8 |
movapd 16 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
mulpd 2 * SIZE(AA), %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm5
|
|
kusano |
2b45e8 |
movddup 2 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd 4 * SIZE(AA), %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm6
|
|
kusano |
2b45e8 |
movddup 3 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd 6 * SIZE(AA), %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm7
|
|
kusano |
2b45e8 |
movddup 8 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
mulpd %xmm3, %xmm1
|
|
kusano |
2b45e8 |
movddup 5 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm4
|
|
kusano |
2b45e8 |
movapd 24 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
mulpd 10 * SIZE(AA), %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm5
|
|
kusano |
2b45e8 |
movddup 6 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd 12 * SIZE(AA), %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm6
|
|
kusano |
2b45e8 |
movddup 7 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
mulpd 14 * SIZE(AA), %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm7
|
|
kusano |
2b45e8 |
movddup 12 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addl $16 * SIZE, AA
|
|
kusano |
2b45e8 |
addl $ 8 * SIZE, BB
|
|
kusano |
2b45e8 |
decl %eax
|
|
kusano |
2b45e8 |
jne .L72
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L75:
|
|
kusano |
2b45e8 |
#ifndef TRMMKERNEL
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movl KKK, %eax
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
movddup ALPHA, %xmm3
|
|
kusano |
2b45e8 |
andl $7, %eax # if (k & 1)
|
|
kusano |
2b45e8 |
BRANCH
|
|
kusano |
2b45e8 |
je .L78
|
|
kusano |
2b45e8 |
ALIGN_3
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L76:
|
|
kusano |
2b45e8 |
mulpd %xmm2, %xmm0
|
|
kusano |
2b45e8 |
movddup 1 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm4
|
|
kusano |
2b45e8 |
movapd 2 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addl $2 * SIZE, AA
|
|
kusano |
2b45e8 |
addl $1 * SIZE, BB
|
|
kusano |
2b45e8 |
decl %eax
|
|
kusano |
2b45e8 |
jg .L76
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L78:
|
|
kusano |
2b45e8 |
addpd %xmm5, %xmm4
|
|
kusano |
2b45e8 |
addpd %xmm7, %xmm6
|
|
kusano |
2b45e8 |
addpd %xmm6, %xmm4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
mulpd %xmm3, %xmm4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifndef TRMMKERNEL
|
|
kusano |
2b45e8 |
#ifdef PENTIUM4
|
|
kusano |
2b45e8 |
SHUFPD_2 %xmm0, %xmm0
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movsd 0 * SIZE(%esi), %xmm0
|
|
kusano |
2b45e8 |
movhpd 1 * SIZE(%esi), %xmm0
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm4
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movsd %xmm4, 0 * SIZE(%esi)
|
|
kusano |
2b45e8 |
movhpd %xmm4, 1 * SIZE(%esi)
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#if (defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
|
|
kusano |
2b45e8 |
(defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
subl KKK, %eax
|
|
kusano |
2b45e8 |
leal (,%eax, SIZE), %eax
|
|
kusano |
2b45e8 |
leal (AA, %eax, 2), AA
|
|
kusano |
2b45e8 |
leal (BB, %eax, 1), BB
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#if defined(TRMMKERNEL) && defined(LEFT)
|
|
kusano |
2b45e8 |
addl $2, KK
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addl $2 * SIZE, %esi # coffset += 2
|
|
kusano |
2b45e8 |
decl %ebx # i --
|
|
kusano |
2b45e8 |
jg .L71
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L80:
|
|
kusano |
2b45e8 |
movl M, %ebx
|
|
kusano |
2b45e8 |
testl $1, %ebx # i = (m >> 2)
|
|
kusano |
2b45e8 |
jle .L999
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#if !defined(TRMMKERNEL) || \
|
|
kusano |
2b45e8 |
(defined(TRMMKERNEL) && defined(LEFT) && defined(TRANSA)) || \
|
|
kusano |
2b45e8 |
(defined(TRMMKERNEL) && !defined(LEFT) && !defined(TRANSA))
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movl B, BB
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movl KK, %eax
|
|
kusano |
2b45e8 |
leal (, %eax, SIZE), %eax
|
|
kusano |
2b45e8 |
leal (AA, %eax, 1), AA
|
|
kusano |
2b45e8 |
leal (B, %eax, 1), BB
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movapd 0 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
pxor %xmm4, %xmm4
|
|
kusano |
2b45e8 |
movapd 8 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
pxor %xmm5, %xmm5
|
|
kusano |
2b45e8 |
movapd 0 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
pxor %xmm6, %xmm6
|
|
kusano |
2b45e8 |
movapd 8 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
pxor %xmm7, %xmm7
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifndef TRMMKERNEL
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
#elif (defined(LEFT) && !defined(TRANSA)) || (!defined(LEFT) && defined(TRANSA))
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
subl KK, %eax
|
|
kusano |
2b45e8 |
movl %eax, KKK
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movl KK, %eax
|
|
kusano |
2b45e8 |
#ifdef LEFT
|
|
kusano |
2b45e8 |
addl $1, %eax
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
addl $1, %eax
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
movl %eax, KKK
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
sarl $4, %eax
|
|
kusano |
2b45e8 |
je .L85
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L82:
|
|
kusano |
2b45e8 |
PREFETCH (PREFETCHSIZE + 0) * SIZE(AA)
|
|
kusano |
2b45e8 |
mulpd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
movapd 2 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm2, %xmm4
|
|
kusano |
2b45e8 |
mulpd 2 * SIZE(BB), %xmm0
|
|
kusano |
2b45e8 |
movapd 16 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm5
|
|
kusano |
2b45e8 |
movapd 4 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
mulpd 4 * SIZE(BB), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm6
|
|
kusano |
2b45e8 |
movapd 6 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
mulpd 6 * SIZE(BB), %xmm0
|
|
kusano |
2b45e8 |
addpd %xmm0, %xmm7
|
|
kusano |
2b45e8 |
movapd 16 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
mulpd %xmm1, %xmm3
|
|
kusano |
2b45e8 |
movapd 10 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm3, %xmm4
|
|
kusano |
2b45e8 |
mulpd 10 * SIZE(BB), %xmm1
|
|
kusano |
2b45e8 |
movapd 24 * SIZE(BB), %xmm3
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm5
|
|
kusano |
2b45e8 |
movapd 12 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
mulpd 12 * SIZE(BB), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm6
|
|
kusano |
2b45e8 |
movapd 14 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
mulpd 14 * SIZE(BB), %xmm1
|
|
kusano |
2b45e8 |
addpd %xmm1, %xmm7
|
|
kusano |
2b45e8 |
movapd 24 * SIZE(AA), %xmm1
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addl $16 * SIZE, AA
|
|
kusano |
2b45e8 |
addl $16 * SIZE, BB
|
|
kusano |
2b45e8 |
decl %eax
|
|
kusano |
2b45e8 |
jne .L82
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L85:
|
|
kusano |
2b45e8 |
#ifndef TRMMKERNEL
|
|
kusano |
2b45e8 |
movl K, %eax
|
|
kusano |
2b45e8 |
#else
|
|
kusano |
2b45e8 |
movl KKK, %eax
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
movddup ALPHA, %xmm3
|
|
kusano |
2b45e8 |
andl $15, %eax # if (k & 1)
|
|
kusano |
2b45e8 |
BRANCH
|
|
kusano |
2b45e8 |
je .L88
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L86:
|
|
kusano |
2b45e8 |
mulsd %xmm0, %xmm2
|
|
kusano |
2b45e8 |
movsd 1 * SIZE(AA), %xmm0
|
|
kusano |
2b45e8 |
addsd %xmm2, %xmm4
|
|
kusano |
2b45e8 |
movsd 1 * SIZE(BB), %xmm2
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addl $1 * SIZE, AA
|
|
kusano |
2b45e8 |
addl $1 * SIZE, BB
|
|
kusano |
2b45e8 |
decl %eax
|
|
kusano |
2b45e8 |
jg .L86
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L88:
|
|
kusano |
2b45e8 |
addpd %xmm5, %xmm4
|
|
kusano |
2b45e8 |
addpd %xmm7, %xmm6
|
|
kusano |
2b45e8 |
addpd %xmm6, %xmm4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
haddpd %xmm4, %xmm4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
mulsd %xmm3, %xmm4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
#ifndef TRMMKERNEL
|
|
kusano |
2b45e8 |
#ifdef PENTIUM4
|
|
kusano |
2b45e8 |
SHUFPD_2 %xmm0, %xmm0
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movsd 0 * SIZE(%esi), %xmm0
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addsd %xmm0, %xmm4
|
|
kusano |
2b45e8 |
#endif
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
movsd %xmm4, 0 * SIZE(%esi)
|
|
kusano |
2b45e8 |
ALIGN_4
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
.L999:
|
|
kusano |
2b45e8 |
popl %ebx
|
|
kusano |
2b45e8 |
popl %esi
|
|
kusano |
2b45e8 |
popl %edi
|
|
kusano |
2b45e8 |
popl %ebp
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
addl $ARGS, %esp
|
|
kusano |
2b45e8 |
ret
|
|
kusano |
2b45e8 |
|
|
kusano |
2b45e8 |
EPILOGUE
|