Blame thirdparty/openblas/xianyi-OpenBLAS-e6e87a2/kernel/x86_64/ztrsm_kernel_LN_2x2_core2.S

kusano 2b45e8
/*********************************************************************/
kusano 2b45e8
/* Copyright 2009, 2010 The University of Texas at Austin.           */
kusano 2b45e8
/* All rights reserved.                                              */
kusano 2b45e8
/*                                                                   */
kusano 2b45e8
/* Redistribution and use in source and binary forms, with or        */
kusano 2b45e8
/* without modification, are permitted provided that the following   */
kusano 2b45e8
/* conditions are met:                                               */
kusano 2b45e8
/*                                                                   */
kusano 2b45e8
/*   1. Redistributions of source code must retain the above         */
kusano 2b45e8
/*      copyright notice, this list of conditions and the following  */
kusano 2b45e8
/*      disclaimer.                                                  */
kusano 2b45e8
/*                                                                   */
kusano 2b45e8
/*   2. Redistributions in binary form must reproduce the above      */
kusano 2b45e8
/*      copyright notice, this list of conditions and the following  */
kusano 2b45e8
/*      disclaimer in the documentation and/or other materials       */
kusano 2b45e8
/*      provided with the distribution.                              */
kusano 2b45e8
/*                                                                   */
kusano 2b45e8
/*    THIS  SOFTWARE IS PROVIDED  BY THE  UNIVERSITY OF  TEXAS AT    */
kusano 2b45e8
/*    AUSTIN  ``AS IS''  AND ANY  EXPRESS OR  IMPLIED WARRANTIES,    */
kusano 2b45e8
/*    INCLUDING, BUT  NOT LIMITED  TO, THE IMPLIED  WARRANTIES OF    */
kusano 2b45e8
/*    MERCHANTABILITY  AND FITNESS FOR  A PARTICULAR  PURPOSE ARE    */
kusano 2b45e8
/*    DISCLAIMED.  IN  NO EVENT SHALL THE UNIVERSITY  OF TEXAS AT    */
kusano 2b45e8
/*    AUSTIN OR CONTRIBUTORS BE  LIABLE FOR ANY DIRECT, INDIRECT,    */
kusano 2b45e8
/*    INCIDENTAL,  SPECIAL, EXEMPLARY,  OR  CONSEQUENTIAL DAMAGES    */
kusano 2b45e8
/*    (INCLUDING, BUT  NOT LIMITED TO,  PROCUREMENT OF SUBSTITUTE    */
kusano 2b45e8
/*    GOODS  OR  SERVICES; LOSS  OF  USE,  DATA,  OR PROFITS;  OR    */
kusano 2b45e8
/*    BUSINESS INTERRUPTION) HOWEVER CAUSED  AND ON ANY THEORY OF    */
kusano 2b45e8
/*    LIABILITY, WHETHER  IN CONTRACT, STRICT  LIABILITY, OR TORT    */
kusano 2b45e8
/*    (INCLUDING NEGLIGENCE OR OTHERWISE)  ARISING IN ANY WAY OUT    */
kusano 2b45e8
/*    OF  THE  USE OF  THIS  SOFTWARE,  EVEN  IF ADVISED  OF  THE    */
kusano 2b45e8
/*    POSSIBILITY OF SUCH DAMAGE.                                    */
kusano 2b45e8
/*                                                                   */
kusano 2b45e8
/* The views and conclusions contained in the software and           */
kusano 2b45e8
/* documentation are those of the authors and should not be          */
kusano 2b45e8
/* interpreted as representing official policies, either expressed   */
kusano 2b45e8
/* or implied, of The University of Texas at Austin.                 */
kusano 2b45e8
/*********************************************************************/
kusano 2b45e8
kusano 2b45e8
#define ASSEMBLER
kusano 2b45e8
#include "common.h"
kusano 2b45e8
 
kusano 2b45e8
#define OLD_M	%rdi
kusano 2b45e8
#define OLD_N	%rsi
kusano 2b45e8
#define M	%r13
kusano 2b45e8
#define N	%r14
kusano 2b45e8
#define K	%rdx
kusano 2b45e8
kusano 2b45e8
#define A	%rcx
kusano 2b45e8
#define B	%r8
kusano 2b45e8
#define C	%r9
kusano 2b45e8
#define LDC	%r10
kusano 2b45e8
	
kusano 2b45e8
#define I	%r11
kusano 2b45e8
#define AO	%rdi
kusano 2b45e8
#define BO	%rsi
kusano 2b45e8
#define	CO1	%rbx
kusano 2b45e8
#define CO2	%rbp
kusano 2b45e8
#define BB	%r12
kusano 2b45e8
kusano 2b45e8
#ifndef WINDOWS_ABI
kusano 2b45e8
kusano 2b45e8
#define STACKSIZE 64
kusano 2b45e8
kusano 2b45e8
#define OLD_LDC		 8 + STACKSIZE(%rsp)
kusano 2b45e8
#define OLD_OFFSET	16 + STACKSIZE(%rsp)
kusano 2b45e8
kusano 2b45e8
#else
kusano 2b45e8
kusano 2b45e8
#define STACKSIZE 256
kusano 2b45e8
kusano 2b45e8
#define OLD_A		48 + STACKSIZE(%rsp)
kusano 2b45e8
#define OLD_B		56 + STACKSIZE(%rsp)
kusano 2b45e8
#define OLD_C		64 + STACKSIZE(%rsp)
kusano 2b45e8
#define OLD_LDC		72 + STACKSIZE(%rsp)
kusano 2b45e8
#define OLD_OFFSET	80 + STACKSIZE(%rsp)
kusano 2b45e8
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#define POSINV	  0(%rsp)
kusano 2b45e8
#define J	 16(%rsp)
kusano 2b45e8
#define OFFSET	 24(%rsp)
kusano 2b45e8
#define KK	 32(%rsp)
kusano 2b45e8
#define KKK	 40(%rsp)
kusano 2b45e8
#define AORIG    48(%rsp)
kusano 2b45e8
#define BORIG	 56(%rsp)
kusano 2b45e8
#define BUFFER	128(%rsp)
kusano 2b45e8
	
kusano 2b45e8
#define PREFETCH_R    (8 * 4 + 0)
kusano 2b45e8
#define PREFETCH_W    (PREFETCH_R)
kusano 2b45e8
kusano 2b45e8
#define PREFETCHSIZE  (8 * 17 + 2)
kusano 2b45e8
#define PREFETCH     prefetcht0
kusano 2b45e8
kusano 2b45e8
#ifndef CONJ
kusano 2b45e8
#define NN
kusano 2b45e8
#else
kusano 2b45e8
#if defined(LN) || defined(LT)
kusano 2b45e8
#define CN
kusano 2b45e8
#else
kusano 2b45e8
#define NC
kusano 2b45e8
#endif
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#define ADD1	  addpd
kusano 2b45e8
#define ADD2	  addpd
kusano 2b45e8
kusano 2b45e8
	PROLOGUE
kusano 2b45e8
	PROFCODE
kusano 2b45e8
	
kusano 2b45e8
	subq	$STACKSIZE, %rsp
kusano 2b45e8
	
kusano 2b45e8
	movq	%rbx,  0(%rsp)
kusano 2b45e8
	movq	%rbp,  8(%rsp)
kusano 2b45e8
	movq	%r12, 16(%rsp)
kusano 2b45e8
	movq	%r13, 24(%rsp)
kusano 2b45e8
	movq	%r14, 32(%rsp)
kusano 2b45e8
	movq	%r15, 40(%rsp)
kusano 2b45e8
kusano 2b45e8
#ifdef WINDOWS_ABI
kusano 2b45e8
	movq	%rdi,    48(%rsp)
kusano 2b45e8
	movq	%rsi,    56(%rsp)
kusano 2b45e8
	movups	%xmm6,   64(%rsp)
kusano 2b45e8
	movups	%xmm7,   80(%rsp)
kusano 2b45e8
	movups	%xmm8,   96(%rsp)
kusano 2b45e8
	movups	%xmm9,  112(%rsp)
kusano 2b45e8
	movups	%xmm10, 128(%rsp)
kusano 2b45e8
	movups	%xmm11, 144(%rsp)
kusano 2b45e8
	movups	%xmm12, 160(%rsp)
kusano 2b45e8
	movups	%xmm13, 176(%rsp)
kusano 2b45e8
	movups	%xmm14, 192(%rsp)
kusano 2b45e8
	movups	%xmm15, 208(%rsp)
kusano 2b45e8
kusano 2b45e8
	movq	ARG1,      OLD_M
kusano 2b45e8
	movq	ARG2,      OLD_N
kusano 2b45e8
	movq	ARG3,      K
kusano 2b45e8
	movq	OLD_A,     A
kusano 2b45e8
	movq	OLD_B,     B
kusano 2b45e8
	movq	OLD_C,     C
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	movq	OLD_LDC,    LDC
kusano 2b45e8
	movq	OLD_OFFSET, %rax
kusano 2b45e8
kusano 2b45e8
	movq	%rsp, %r15	# save old stack
kusano 2b45e8
	subq	$128 + LOCAL_BUFFER_SIZE, %rsp
kusano 2b45e8
	andq	$-4096, %rsp	# align stack
kusano 2b45e8
kusano 2b45e8
	STACK_TOUCHING
kusano 2b45e8
kusano 2b45e8
	movq	%rax, KK
kusano 2b45e8
	movq	%rax, OFFSET
kusano 2b45e8
kusano 2b45e8
	movq	OLD_M, M
kusano 2b45e8
	movq	OLD_N, N
kusano 2b45e8
kusano 2b45e8
	subq	$-16 * SIZE, A
kusano 2b45e8
	subq	$-16 * SIZE, B
kusano 2b45e8
kusano 2b45e8
	pcmpeqb	%xmm15, %xmm15
kusano 2b45e8
	psllq	$63, %xmm15	# Generate mask
kusano 2b45e8
	pxor	%xmm2, %xmm2
kusano 2b45e8
kusano 2b45e8
	movlpd	  %xmm2,  0 + POSINV
kusano 2b45e8
	movlpd	  %xmm15, 8 + POSINV
kusano 2b45e8
kusano 2b45e8
	salq	$ZBASE_SHIFT, LDC
kusano 2b45e8
kusano 2b45e8
#ifdef LN
kusano 2b45e8
       movq	M, %rax
kusano 2b45e8
       salq	$ZBASE_SHIFT, %rax
kusano 2b45e8
       addq	%rax, C
kusano 2b45e8
       imulq	K, %rax
kusano 2b45e8
       addq	%rax, A
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef RT
kusano 2b45e8
       movq	N, %rax
kusano 2b45e8
       salq	$ZBASE_SHIFT, %rax
kusano 2b45e8
       imulq	K, %rax
kusano 2b45e8
       addq	%rax, B
kusano 2b45e8
kusano 2b45e8
       movq	N, %rax
kusano 2b45e8
       imulq	LDC, %rax
kusano 2b45e8
       addq	%rax, C
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef RN
kusano 2b45e8
	negq	KK
kusano 2b45e8
#endif	
kusano 2b45e8
kusano 2b45e8
#ifdef RT
kusano 2b45e8
       movq	N, %rax
kusano 2b45e8
       subq	OFFSET, %rax
kusano 2b45e8
       movq	%rax, KK
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	movq	N,  J
kusano 2b45e8
	sarq	$1, J		# j = (n >> 2)
kusano 2b45e8
	jle	.L100
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
.L01:
kusano 2b45e8
#ifdef LN
kusano 2b45e8
	movq	OFFSET, %rax
kusano 2b45e8
	addq	M, %rax
kusano 2b45e8
	movq	%rax, KK
kusano 2b45e8
#endif	
kusano 2b45e8
kusano 2b45e8
	leaq	16 * SIZE +  BUFFER, BO
kusano 2b45e8
	
kusano 2b45e8
#ifdef RT
kusano 2b45e8
       movq	K, %rax
kusano 2b45e8
       salq	$1 + ZBASE_SHIFT, %rax
kusano 2b45e8
       subq	%rax, B
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(RT)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
	movq	B, BORIG
kusano 2b45e8
	salq	$ZBASE_SHIFT, %rax
kusano 2b45e8
	leaq	(B,  %rax, 2), B
kusano 2b45e8
	leaq	(BO, %rax, 4), BO
kusano 2b45e8
#endif	
kusano 2b45e8
kusano 2b45e8
#if defined(LT)
kusano 2b45e8
	movq	OFFSET, %rax
kusano 2b45e8
	movq	%rax, KK
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(LT) || defined(RN)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
#else
kusano 2b45e8
	movq	K, %rax
kusano 2b45e8
	subq	KK, %rax
kusano 2b45e8
#endif
kusano 2b45e8
	sarq	$2, %rax
kusano 2b45e8
	jle	.L03
kusano 2b45e8
kusano 2b45e8
	addq	%rax, %rax
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
	
kusano 2b45e8
.L02:
kusano 2b45e8
	prefetcht0	(PREFETCH_R + 0) * SIZE(B)
kusano 2b45e8
kusano 2b45e8
	movddup	 -16 * SIZE(B), %xmm8
kusano 2b45e8
	movddup	 -15 * SIZE(B), %xmm9
kusano 2b45e8
	movddup	 -14 * SIZE(B), %xmm10
kusano 2b45e8
	movddup	 -13 * SIZE(B), %xmm11
kusano 2b45e8
	movddup	 -12 * SIZE(B), %xmm12
kusano 2b45e8
	movddup	 -11 * SIZE(B), %xmm13
kusano 2b45e8
	movddup	 -10 * SIZE(B), %xmm14
kusano 2b45e8
	movddup	  -9 * SIZE(B), %xmm15
kusano 2b45e8
kusano 2b45e8
	prefetcht0	(PREFETCH_W + 0) * SIZE(BO)
kusano 2b45e8
kusano 2b45e8
	movapd	%xmm8,  -16 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm9,  -14 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm10,  -12 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm11,  -10 * SIZE(BO)
kusano 2b45e8
kusano 2b45e8
	prefetcht0	(PREFETCH_W + 8) * SIZE(BO)
kusano 2b45e8
kusano 2b45e8
	movapd	%xmm12,   -8 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm13,   -6 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm14,   -4 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm15,   -2 * SIZE(BO)
kusano 2b45e8
kusano 2b45e8
	addq	$  8 * SIZE, B
kusano 2b45e8
	subq	$-16 * SIZE, BO
kusano 2b45e8
	decq	%rax
kusano 2b45e8
	jne	.L02
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
.L03:
kusano 2b45e8
#if defined(LT) || defined(RN)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
#else
kusano 2b45e8
	movq	K, %rax
kusano 2b45e8
	subq	KK, %rax
kusano 2b45e8
#endif
kusano 2b45e8
	andq	$3, %rax
kusano 2b45e8
	BRANCH
kusano 2b45e8
	jle	.L05
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
.L04:
kusano 2b45e8
	movddup	 -16 * SIZE(B), %xmm8
kusano 2b45e8
	movddup	 -15 * SIZE(B), %xmm9
kusano 2b45e8
	movddup	 -14 * SIZE(B), %xmm10
kusano 2b45e8
	movddup	 -13 * SIZE(B), %xmm11
kusano 2b45e8
kusano 2b45e8
	movapd	%xmm8,  -16 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm9,  -14 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm10,  -12 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm11,  -10 * SIZE(BO)
kusano 2b45e8
kusano 2b45e8
	addq	$ 4 * SIZE, B
kusano 2b45e8
	addq	$ 8 * SIZE, BO
kusano 2b45e8
kusano 2b45e8
	decq	%rax
kusano 2b45e8
	jne	.L04
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
	
kusano 2b45e8
.L05:
kusano 2b45e8
#if defined(LT) || defined(RN)
kusano 2b45e8
	movq	A, AO
kusano 2b45e8
#else
kusano 2b45e8
	movq	A, AORIG
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef RT
kusano 2b45e8
       leaq	(, LDC, 2), %rax
kusano 2b45e8
       subq	%rax, C
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	movq	C, CO1
kusano 2b45e8
	leaq	(C, LDC, 1), CO2
kusano 2b45e8
kusano 2b45e8
#ifndef RT
kusano 2b45e8
	leaq	(C, LDC, 2), C
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	testq	$1, M
kusano 2b45e8
	jle	.L30
kusano 2b45e8
kusano 2b45e8
#ifdef LN
kusano 2b45e8
       movq	K, %rax
kusano 2b45e8
       salq	$0 + ZBASE_SHIFT, %rax
kusano 2b45e8
       subq	%rax, AORIG
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(RT)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
	movq	AORIG, AO
kusano 2b45e8
	salq	$ZBASE_SHIFT, %rax
kusano 2b45e8
	addq	%rax, AO
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	leaq	16 * SIZE + BUFFER, BO
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(RT)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
	salq	$1 + ZBASE_SHIFT, %rax
kusano 2b45e8
	leaq	(BO, %rax, 2), BO
kusano 2b45e8
#endif	
kusano 2b45e8
kusano 2b45e8
	pxor	%xmm8, %xmm8
kusano 2b45e8
	pxor	%xmm9, %xmm9
kusano 2b45e8
	pxor	%xmm10, %xmm10
kusano 2b45e8
	pxor	%xmm11, %xmm11
kusano 2b45e8
kusano 2b45e8
#if defined(LT) || defined(RN)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
#else
kusano 2b45e8
	movq	K, %rax
kusano 2b45e8
	subq	KK, %rax
kusano 2b45e8
#endif
kusano 2b45e8
	sarq	$2, %rax
kusano 2b45e8
	je	.L42
kusano 2b45e8
kusano 2b45e8
.L41:
kusano 2b45e8
	PREFETCH (PREFETCHSIZE +  0) * SIZE(AO)
kusano 2b45e8
kusano 2b45e8
	movapd	-16 * SIZE(AO), %xmm0
kusano 2b45e8
	movapd	-16 * SIZE(BO), %xmm2
kusano 2b45e8
	movapd	-14 * SIZE(BO), %xmm3
kusano 2b45e8
	movapd	-12 * SIZE(BO), %xmm4
kusano 2b45e8
	movapd	-10 * SIZE(BO), %xmm5
kusano 2b45e8
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	mulpd	%xmm0, %xmm3
kusano 2b45e8
	mulpd	%xmm0, %xmm4
kusano 2b45e8
	mulpd	%xmm0, %xmm5
kusano 2b45e8
kusano 2b45e8
	ADD1	%xmm2, %xmm8
kusano 2b45e8
	ADD2	%xmm3, %xmm9
kusano 2b45e8
	ADD1	%xmm4, %xmm10
kusano 2b45e8
	ADD2	%xmm5, %xmm11
kusano 2b45e8
kusano 2b45e8
	movapd	-14 * SIZE(AO), %xmm0
kusano 2b45e8
	movapd	 -8 * SIZE(BO), %xmm2
kusano 2b45e8
	movapd	 -6 * SIZE(BO), %xmm3
kusano 2b45e8
	movapd	 -4 * SIZE(BO), %xmm4
kusano 2b45e8
	movapd	 -2 * SIZE(BO), %xmm5
kusano 2b45e8
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	mulpd	%xmm0, %xmm3
kusano 2b45e8
	mulpd	%xmm0, %xmm4
kusano 2b45e8
	mulpd	%xmm0, %xmm5
kusano 2b45e8
kusano 2b45e8
	ADD1	%xmm2, %xmm8
kusano 2b45e8
	ADD2	%xmm3, %xmm9
kusano 2b45e8
	ADD1	%xmm4, %xmm10
kusano 2b45e8
	ADD2	%xmm5, %xmm11
kusano 2b45e8
kusano 2b45e8
	movapd	-12 * SIZE(AO), %xmm0
kusano 2b45e8
	movapd	  0 * SIZE(BO), %xmm2
kusano 2b45e8
	movapd	  2 * SIZE(BO), %xmm3
kusano 2b45e8
	movapd	  4 * SIZE(BO), %xmm4
kusano 2b45e8
	movapd	  6 * SIZE(BO), %xmm5
kusano 2b45e8
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	mulpd	%xmm0, %xmm3
kusano 2b45e8
	mulpd	%xmm0, %xmm4
kusano 2b45e8
	mulpd	%xmm0, %xmm5
kusano 2b45e8
kusano 2b45e8
	ADD1	%xmm2, %xmm8
kusano 2b45e8
	ADD2	%xmm3, %xmm9
kusano 2b45e8
	ADD1	%xmm4, %xmm10
kusano 2b45e8
	ADD2	%xmm5, %xmm11
kusano 2b45e8
kusano 2b45e8
	movapd	-10 * SIZE(AO), %xmm0
kusano 2b45e8
	movapd	  8 * SIZE(BO), %xmm2
kusano 2b45e8
	movapd	 10 * SIZE(BO), %xmm3
kusano 2b45e8
	movapd	 12 * SIZE(BO), %xmm4
kusano 2b45e8
	movapd	 14 * SIZE(BO), %xmm5
kusano 2b45e8
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	mulpd	%xmm0, %xmm3
kusano 2b45e8
	mulpd	%xmm0, %xmm4
kusano 2b45e8
	mulpd	%xmm0, %xmm5
kusano 2b45e8
kusano 2b45e8
	ADD1	%xmm2, %xmm8
kusano 2b45e8
	ADD2	%xmm3, %xmm9
kusano 2b45e8
	ADD1	%xmm4, %xmm10
kusano 2b45e8
	ADD2	%xmm5, %xmm11
kusano 2b45e8
kusano 2b45e8
	subq	$ -8 * SIZE, AO
kusano 2b45e8
	subq	$-32 * SIZE, BO
kusano 2b45e8
	subq	$1, %rax
kusano 2b45e8
	jne    .L41
kusano 2b45e8
kusano 2b45e8
.L42:
kusano 2b45e8
#if defined(LT) || defined(RN)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
#else
kusano 2b45e8
	movq	K, %rax
kusano 2b45e8
	subq	KK, %rax
kusano 2b45e8
#endif
kusano 2b45e8
	movapd	POSINV,  %xmm7
kusano 2b45e8
kusano 2b45e8
	andq	$3, %rax		# if (k & 1)
kusano 2b45e8
	BRANCH
kusano 2b45e8
	jle .L44
kusano 2b45e8
kusano 2b45e8
.L43:
kusano 2b45e8
	movapd	-16 * SIZE(AO), %xmm0
kusano 2b45e8
	movapd	-16 * SIZE(BO), %xmm2
kusano 2b45e8
	movapd	-14 * SIZE(BO), %xmm3
kusano 2b45e8
	movapd	-12 * SIZE(BO), %xmm4
kusano 2b45e8
	movapd	-10 * SIZE(BO), %xmm5
kusano 2b45e8
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	mulpd	%xmm0, %xmm3
kusano 2b45e8
	mulpd	%xmm0, %xmm4
kusano 2b45e8
	mulpd	%xmm0, %xmm5
kusano 2b45e8
kusano 2b45e8
	ADD1	%xmm2, %xmm8
kusano 2b45e8
	ADD2	%xmm3, %xmm9
kusano 2b45e8
	ADD1	%xmm4, %xmm10
kusano 2b45e8
	ADD2	%xmm5, %xmm11
kusano 2b45e8
kusano 2b45e8
	addq	$2 * SIZE, AO
kusano 2b45e8
	addq	$8 * SIZE, BO
kusano 2b45e8
	subq	$1, %rax
kusano 2b45e8
	jg	.L43
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
.L44:
kusano 2b45e8
#if defined(LN) || defined(RT)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
#ifdef LN
kusano 2b45e8
	subq	$1, %rax
kusano 2b45e8
#else
kusano 2b45e8
	subq	$2, %rax
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	movq	AORIG, AO
kusano 2b45e8
	movq	BORIG, B
kusano 2b45e8
	leaq	16 * SIZE + BUFFER, BO
kusano 2b45e8
kusano 2b45e8
	salq	$ZBASE_SHIFT, %rax
kusano 2b45e8
	leaq	(AO, %rax, 1), AO
kusano 2b45e8
	leaq	(B,  %rax, 2), B
kusano 2b45e8
	leaq	(BO, %rax, 4), BO
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm9, %xmm9
kusano 2b45e8
	SHUFPD_1 %xmm11, %xmm11
kusano 2b45e8
kusano 2b45e8
#if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
kusano 2b45e8
    defined(NR) || defined(NC) || defined(TR) || defined(TC)
kusano 2b45e8
	xorpd	%xmm7, %xmm9
kusano 2b45e8
	xorpd	%xmm7, %xmm11
kusano 2b45e8
#else
kusano 2b45e8
	xorpd	%xmm7, %xmm8
kusano 2b45e8
	xorpd	%xmm7, %xmm10
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
kusano 2b45e8
    defined(RR) || defined(RC) || defined(CR) || defined(CC)
kusano 2b45e8
	subpd	%xmm9, %xmm8
kusano 2b45e8
	subpd	%xmm11, %xmm10
kusano 2b45e8
#else
kusano 2b45e8
	addpd	%xmm9, %xmm8
kusano 2b45e8
	addpd	%xmm11, %xmm10
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(LT)
kusano 2b45e8
	movapd	-16 * SIZE(B), %xmm9
kusano 2b45e8
	movapd	-14 * SIZE(B), %xmm11
kusano 2b45e8
kusano 2b45e8
	subpd	%xmm8,  %xmm9
kusano 2b45e8
	subpd	%xmm10,  %xmm11
kusano 2b45e8
#else
kusano 2b45e8
	movapd	-16 * SIZE(AO), %xmm9
kusano 2b45e8
	movapd	-14 * SIZE(AO), %xmm11
kusano 2b45e8
kusano 2b45e8
	subpd	%xmm8,  %xmm9
kusano 2b45e8
	subpd	%xmm10,  %xmm11
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifndef CONJ
kusano 2b45e8
	SHUFPD_1 %xmm7, %xmm7
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(LT)
kusano 2b45e8
	movddup	-16 * SIZE(AO), %xmm0
kusano 2b45e8
	movddup	-15 * SIZE(AO), %xmm1
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm9, %xmm8
kusano 2b45e8
	pshufd	$0x4e, %xmm11, %xmm10
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm8
kusano 2b45e8
	xorpd	 %xmm7, %xmm10
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm0, %xmm9
kusano 2b45e8
	mulpd	 %xmm1, %xmm8
kusano 2b45e8
	mulpd	 %xmm0, %xmm11
kusano 2b45e8
	mulpd	 %xmm1, %xmm10
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm8, %xmm9
kusano 2b45e8
	addpd	 %xmm10, %xmm11
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef RN
kusano 2b45e8
	movddup	-16 * SIZE(B), %xmm0
kusano 2b45e8
	movddup	-15 * SIZE(B), %xmm1
kusano 2b45e8
	movddup	-14 * SIZE(B), %xmm2
kusano 2b45e8
	movddup	-13 * SIZE(B), %xmm3
kusano 2b45e8
	movddup	-10 * SIZE(B), %xmm4
kusano 2b45e8
	movddup	 -9 * SIZE(B), %xmm5
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm9, %xmm8
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm8
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm0, %xmm9
kusano 2b45e8
	mulpd	 %xmm1, %xmm8
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm8, %xmm9
kusano 2b45e8
kusano 2b45e8
	movapd	 %xmm9, %xmm8
kusano 2b45e8
	pshufd	 $0x4e, %xmm9, %xmm12
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm12
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm2, %xmm8
kusano 2b45e8
	mulpd	 %xmm3, %xmm12
kusano 2b45e8
kusano 2b45e8
	subpd	 %xmm8, %xmm11
kusano 2b45e8
	subpd	 %xmm12, %xmm11
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm11, %xmm10
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm10
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm4, %xmm11
kusano 2b45e8
	mulpd	 %xmm5, %xmm10
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm10, %xmm11
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef RT
kusano 2b45e8
	movddup	-10 * SIZE(B), %xmm0
kusano 2b45e8
	movddup	 -9 * SIZE(B), %xmm1
kusano 2b45e8
	movddup	-12 * SIZE(B), %xmm2
kusano 2b45e8
	movddup	-11 * SIZE(B), %xmm3
kusano 2b45e8
	movddup	-16 * SIZE(B), %xmm4
kusano 2b45e8
	movddup	-15 * SIZE(B), %xmm5
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm11, %xmm10
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm10
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm0, %xmm11
kusano 2b45e8
	mulpd	 %xmm1, %xmm10
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm10, %xmm11
kusano 2b45e8
kusano 2b45e8
	movapd	 %xmm11, %xmm8
kusano 2b45e8
	pshufd	 $0x4e, %xmm11, %xmm12
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm12
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm2, %xmm8
kusano 2b45e8
	mulpd	 %xmm3, %xmm12
kusano 2b45e8
kusano 2b45e8
	subpd	 %xmm8, %xmm9
kusano 2b45e8
	subpd	 %xmm12, %xmm9
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm9, %xmm8
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm8
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm4, %xmm9
kusano 2b45e8
	mulpd	 %xmm5, %xmm8
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm8, %xmm9
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef LN
kusano 2b45e8
	subq	$2 * SIZE, CO1
kusano 2b45e8
	subq	$2 * SIZE, CO2
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	movsd	%xmm9,   0 * SIZE(CO1)
kusano 2b45e8
	movhpd	%xmm9,   1 * SIZE(CO1)
kusano 2b45e8
kusano 2b45e8
	movsd	%xmm11,  0 * SIZE(CO2)
kusano 2b45e8
	movhpd	%xmm11,  1 * SIZE(CO2)
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(LT)
kusano 2b45e8
	movapd	%xmm9,  -16 * SIZE(B)
kusano 2b45e8
	movapd	%xmm11, -14 * SIZE(B)
kusano 2b45e8
kusano 2b45e8
	movddup	%xmm9,  %xmm8
kusano 2b45e8
	unpckhpd %xmm9,  %xmm9
kusano 2b45e8
	movddup	%xmm11, %xmm10
kusano 2b45e8
	unpckhpd %xmm11, %xmm11
kusano 2b45e8
kusano 2b45e8
	movapd	%xmm8,  -16 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm9,  -14 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm10, -12 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm11, -10 * SIZE(BO)
kusano 2b45e8
#else
kusano 2b45e8
	movapd	%xmm9,  -16 * SIZE(AO)
kusano 2b45e8
	movapd	%xmm11, -14 * SIZE(AO)
kusano 2b45e8
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifndef LN
kusano 2b45e8
	addq	$2 * SIZE, CO1
kusano 2b45e8
	addq	$2 * SIZE, CO2
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(LT) || defined(RN)
kusano 2b45e8
	movq	K,  %rax
kusano 2b45e8
	subq	KK, %rax
kusano 2b45e8
	salq	$ZBASE_SHIFT, %rax
kusano 2b45e8
	leaq	(AO, %rax, 1), AO
kusano 2b45e8
#ifdef LT
kusano 2b45e8
	addq	$4 * SIZE, B
kusano 2b45e8
#endif
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef LN
kusano 2b45e8
	subq	$1, KK
kusano 2b45e8
	movq	BORIG, B
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef LT
kusano 2b45e8
	addq	$1, KK
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef RT
kusano 2b45e8
	movq	K, %rax
kusano 2b45e8
	movq	BORIG, B
kusano 2b45e8
	salq	$0 + ZBASE_SHIFT, %rax
kusano 2b45e8
	addq	%rax, AORIG
kusano 2b45e8
#endif
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
.L30:
kusano 2b45e8
	movq	M,  I
kusano 2b45e8
	sarq	$1, I		# i = (m >> 2)
kusano 2b45e8
	jle	.L99
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
.L10:
kusano 2b45e8
	leaq	(PREFETCH_R +  0) * SIZE(B), BB
kusano 2b45e8
kusano 2b45e8
#ifdef LN
kusano 2b45e8
       movq	K, %rax
kusano 2b45e8
       salq	$1 + ZBASE_SHIFT, %rax
kusano 2b45e8
       subq	%rax, AORIG
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(RT)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
	movq	AORIG, AO
kusano 2b45e8
	salq	$ZBASE_SHIFT, %rax
kusano 2b45e8
	leaq	(AO, %rax, 2), AO
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	leaq	16 * SIZE + BUFFER, BO
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(RT)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
	salq	$1 + ZBASE_SHIFT, %rax
kusano 2b45e8
	leaq	(BO, %rax, 2), BO
kusano 2b45e8
#endif	
kusano 2b45e8
kusano 2b45e8
	prefetcht2	0 * SIZE(BB)
kusano 2b45e8
kusano 2b45e8
#ifdef LN
kusano 2b45e8
	pxor	%xmm8, %xmm8
kusano 2b45e8
	prefetcht1    -3 * SIZE(CO1)
kusano 2b45e8
	pxor	%xmm9, %xmm9
kusano 2b45e8
	pxor	%xmm10, %xmm10
kusano 2b45e8
	prefetcht1    -3 * SIZE(CO2)
kusano 2b45e8
	pxor	%xmm11, %xmm11
kusano 2b45e8
#else
kusano 2b45e8
	pxor	%xmm8, %xmm8
kusano 2b45e8
	prefetcht1     3 * SIZE(CO1)
kusano 2b45e8
	pxor	%xmm9, %xmm9
kusano 2b45e8
	pxor	%xmm10, %xmm10
kusano 2b45e8
	prefetcht1     3 * SIZE(CO2)
kusano 2b45e8
	pxor	%xmm11, %xmm11
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	pxor	%xmm12, %xmm12
kusano 2b45e8
	pxor	%xmm13, %xmm13
kusano 2b45e8
	pxor	%xmm14, %xmm14
kusano 2b45e8
	pxor	%xmm15, %xmm15
kusano 2b45e8
kusano 2b45e8
	pxor	%xmm2, %xmm2
kusano 2b45e8
	pxor	%xmm3, %xmm3
kusano 2b45e8
	pxor	%xmm4, %xmm4
kusano 2b45e8
	pxor	%xmm5, %xmm5
kusano 2b45e8
kusano 2b45e8
	subq		$-8 * SIZE, BB
kusano 2b45e8
kusano 2b45e8
#if defined(LT) || defined(RN)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
#else
kusano 2b45e8
	movq	K, %rax
kusano 2b45e8
	subq	KK, %rax
kusano 2b45e8
#endif
kusano 2b45e8
	sarq	$2, %rax
kusano 2b45e8
	NOBRANCH
kusano 2b45e8
	jle	.L15
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
.L12:	
kusano 2b45e8
	PREFETCH (PREFETCHSIZE +  0) * SIZE(AO)
kusano 2b45e8
kusano 2b45e8
	movapd	-16 * SIZE(AO), %xmm0
kusano 2b45e8
	ADD1	%xmm2, %xmm10
kusano 2b45e8
	movapd	-16 * SIZE(BO), %xmm2
kusano 2b45e8
	ADD1	%xmm3, %xmm14
kusano 2b45e8
	movapd	 %xmm2, %xmm3
kusano 2b45e8
	movapd	-14 * SIZE(AO), %xmm1
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	mulpd	%xmm1, %xmm3
kusano 2b45e8
	ADD2	%xmm4, %xmm11
kusano 2b45e8
	movapd	-14 * SIZE(BO), %xmm4
kusano 2b45e8
	ADD2	%xmm5, %xmm15
kusano 2b45e8
	movapd	 %xmm4, %xmm5
kusano 2b45e8
	mulpd	%xmm0, %xmm4
kusano 2b45e8
	mulpd	%xmm1, %xmm5
kusano 2b45e8
kusano 2b45e8
	ADD1	%xmm2, %xmm8
kusano 2b45e8
	movapd	-12 * SIZE(BO), %xmm2
kusano 2b45e8
	ADD1	%xmm3, %xmm12
kusano 2b45e8
	movapd	 %xmm2, %xmm3
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	mulpd	%xmm1, %xmm3
kusano 2b45e8
	ADD2	%xmm4, %xmm9
kusano 2b45e8
	movapd	-10 * SIZE(BO), %xmm4
kusano 2b45e8
	ADD2	%xmm5, %xmm13
kusano 2b45e8
	movapd	 %xmm4, %xmm5
kusano 2b45e8
	mulpd	%xmm0, %xmm4
kusano 2b45e8
	mulpd	%xmm1, %xmm5
kusano 2b45e8
kusano 2b45e8
	movapd	-12 * SIZE(AO), %xmm0
kusano 2b45e8
	ADD1	%xmm2, %xmm10
kusano 2b45e8
	movapd	 -8 * SIZE(BO), %xmm2
kusano 2b45e8
	ADD1	%xmm3, %xmm14
kusano 2b45e8
	movapd	 %xmm2, %xmm3
kusano 2b45e8
	movapd	-10 * SIZE(AO), %xmm1
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	mulpd	%xmm1, %xmm3
kusano 2b45e8
	ADD2	%xmm4, %xmm11
kusano 2b45e8
	ADD2	%xmm5, %xmm15
kusano 2b45e8
	movapd	-6 * SIZE(BO), %xmm4
kusano 2b45e8
	movapd	 %xmm4, %xmm5
kusano 2b45e8
	mulpd	%xmm0, %xmm4
kusano 2b45e8
	mulpd	%xmm1, %xmm5
kusano 2b45e8
kusano 2b45e8
	ADD1	%xmm2, %xmm8
kusano 2b45e8
	ADD1	%xmm3, %xmm12
kusano 2b45e8
	movapd	-4 * SIZE(BO), %xmm2
kusano 2b45e8
	movapd	 %xmm2, %xmm3
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	mulpd	%xmm1, %xmm3
kusano 2b45e8
	ADD2	%xmm4, %xmm9
kusano 2b45e8
	ADD2	%xmm5, %xmm13
kusano 2b45e8
	movapd	-2 * SIZE(BO), %xmm4
kusano 2b45e8
	movapd	 %xmm4, %xmm5
kusano 2b45e8
	PREFETCH (PREFETCHSIZE +  8) * SIZE(AO)
kusano 2b45e8
	mulpd	%xmm0, %xmm4
kusano 2b45e8
	mulpd	%xmm1, %xmm5
kusano 2b45e8
kusano 2b45e8
	movapd	-8 * SIZE(AO), %xmm0
kusano 2b45e8
	ADD1	%xmm2, %xmm10
kusano 2b45e8
	movapd	 0 * SIZE(BO), %xmm2
kusano 2b45e8
	ADD1	%xmm3, %xmm14
kusano 2b45e8
	movapd	 %xmm2, %xmm3
kusano 2b45e8
	movapd	-6 * SIZE(AO), %xmm1
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	mulpd	%xmm1, %xmm3
kusano 2b45e8
	ADD2	%xmm4, %xmm11
kusano 2b45e8
	movapd	 2 * SIZE(BO), %xmm4
kusano 2b45e8
	ADD2	%xmm5, %xmm15
kusano 2b45e8
	movapd	 %xmm4, %xmm5
kusano 2b45e8
	mulpd	%xmm0, %xmm4
kusano 2b45e8
	mulpd	%xmm1, %xmm5
kusano 2b45e8
kusano 2b45e8
	ADD1	%xmm2, %xmm8
kusano 2b45e8
	movapd	 4 * SIZE(BO), %xmm2
kusano 2b45e8
	ADD1	%xmm3, %xmm12
kusano 2b45e8
	movapd	 %xmm2, %xmm3
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	mulpd	%xmm1, %xmm3
kusano 2b45e8
	ADD2	%xmm4, %xmm9
kusano 2b45e8
	movapd	 6 * SIZE(BO), %xmm4
kusano 2b45e8
	ADD2	%xmm5, %xmm13
kusano 2b45e8
	movapd	 %xmm4, %xmm5
kusano 2b45e8
	mulpd	%xmm0, %xmm4
kusano 2b45e8
	mulpd	%xmm1, %xmm5
kusano 2b45e8
kusano 2b45e8
	movapd	-4 * SIZE(AO), %xmm0
kusano 2b45e8
	ADD1	%xmm2, %xmm10
kusano 2b45e8
	ADD1	%xmm3, %xmm14
kusano 2b45e8
	movapd	 8 * SIZE(BO), %xmm2
kusano 2b45e8
	movapd	 %xmm2, %xmm3
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	movapd	-2 * SIZE(AO), %xmm1
kusano 2b45e8
	mulpd	%xmm1, %xmm3
kusano 2b45e8
	ADD2	%xmm4, %xmm11
kusano 2b45e8
	movapd	10 * SIZE(BO), %xmm4
kusano 2b45e8
	ADD2	%xmm5, %xmm15
kusano 2b45e8
	subq	$-32 * SIZE, BO
kusano 2b45e8
	movapd	 %xmm4, %xmm5
kusano 2b45e8
	mulpd	%xmm0, %xmm4
kusano 2b45e8
	mulpd	%xmm1, %xmm5
kusano 2b45e8
kusano 2b45e8
	ADD1	%xmm2, %xmm8
kusano 2b45e8
	ADD1	%xmm3, %xmm12
kusano 2b45e8
	movapd	-20 * SIZE(BO), %xmm2
kusano 2b45e8
	movapd	 %xmm2, %xmm3
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	subq	$-16 * SIZE, AO
kusano 2b45e8
	mulpd	%xmm1, %xmm3
kusano 2b45e8
	ADD2	%xmm4, %xmm9
kusano 2b45e8
	ADD2	%xmm5, %xmm13
kusano 2b45e8
	movapd	-18 * SIZE(BO), %xmm4
kusano 2b45e8
	movapd	 %xmm4, %xmm5
kusano 2b45e8
	mulpd	%xmm0, %xmm4
kusano 2b45e8
	mulpd	%xmm1, %xmm5
kusano 2b45e8
kusano 2b45e8
	subq	$1, %rax
kusano 2b45e8
	BRANCH
kusano 2b45e8
	BRANCH
kusano 2b45e8
	jg	.L12
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
.L15:
kusano 2b45e8
#if defined(LT) || defined(RN)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
#else
kusano 2b45e8
	movq	K, %rax
kusano 2b45e8
	subq	KK, %rax
kusano 2b45e8
#endif
kusano 2b45e8
	movapd	POSINV,  %xmm7
kusano 2b45e8
kusano 2b45e8
	andq	$3, %rax
kusano 2b45e8
	BRANCH
kusano 2b45e8
	BRANCH
kusano 2b45e8
	je	.L19
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
.L16:
kusano 2b45e8
	ADD1	%xmm2, %xmm10
kusano 2b45e8
	ADD1	%xmm3, %xmm14
kusano 2b45e8
	ADD2	%xmm4, %xmm11
kusano 2b45e8
	ADD2	%xmm5, %xmm15
kusano 2b45e8
kusano 2b45e8
	movapd	-16 * SIZE(BO), %xmm2
kusano 2b45e8
	movapd	 %xmm2, %xmm3
kusano 2b45e8
	movapd	-14 * SIZE(BO), %xmm4
kusano 2b45e8
	movapd	 %xmm4, %xmm5
kusano 2b45e8
kusano 2b45e8
	movapd	-16 * SIZE(AO), %xmm0
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	movapd	-14 * SIZE(AO), %xmm1
kusano 2b45e8
	mulpd	%xmm1, %xmm3
kusano 2b45e8
	mulpd	%xmm0, %xmm4
kusano 2b45e8
	mulpd	%xmm1, %xmm5
kusano 2b45e8
kusano 2b45e8
	ADD1	%xmm2, %xmm8
kusano 2b45e8
	ADD1	%xmm3, %xmm12
kusano 2b45e8
	ADD2	%xmm4, %xmm9
kusano 2b45e8
	ADD2	%xmm5, %xmm13
kusano 2b45e8
kusano 2b45e8
	movapd	-12 * SIZE(BO), %xmm2
kusano 2b45e8
	movapd	 %xmm2, %xmm3
kusano 2b45e8
	movapd	-10 * SIZE(BO), %xmm4
kusano 2b45e8
	movapd	 %xmm4, %xmm5
kusano 2b45e8
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	mulpd	%xmm1, %xmm3
kusano 2b45e8
	mulpd	%xmm0, %xmm4
kusano 2b45e8
	mulpd	%xmm1, %xmm5
kusano 2b45e8
kusano 2b45e8
	addq	$4 * SIZE, AO
kusano 2b45e8
	addq	$8 * SIZE, BO
kusano 2b45e8
	subq	$1, %rax
kusano 2b45e8
	BRANCH
kusano 2b45e8
	jg	.L16
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
.L19:
kusano 2b45e8
	ADD1	%xmm2, %xmm10
kusano 2b45e8
	ADD1	%xmm3, %xmm14
kusano 2b45e8
	ADD2	%xmm4, %xmm11
kusano 2b45e8
	ADD2	%xmm5, %xmm15
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(RT)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
#ifdef LN
kusano 2b45e8
	subq	$2, %rax
kusano 2b45e8
#else
kusano 2b45e8
	subq	$2, %rax
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	movq	AORIG, AO
kusano 2b45e8
	movq	BORIG, B
kusano 2b45e8
	leaq	16 * SIZE + BUFFER, BO
kusano 2b45e8
kusano 2b45e8
	salq	$ZBASE_SHIFT, %rax
kusano 2b45e8
	leaq	(AO, %rax, 2), AO
kusano 2b45e8
	leaq	(B,  %rax, 2), B
kusano 2b45e8
	leaq	(BO, %rax, 4), BO
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm9,  %xmm9
kusano 2b45e8
	SHUFPD_1 %xmm11, %xmm11
kusano 2b45e8
	SHUFPD_1 %xmm13, %xmm13
kusano 2b45e8
	SHUFPD_1 %xmm15, %xmm15
kusano 2b45e8
kusano 2b45e8
#if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
kusano 2b45e8
    defined(NR) || defined(NC) || defined(TR) || defined(TC)
kusano 2b45e8
	xorpd	%xmm7, %xmm9
kusano 2b45e8
	xorpd	%xmm7, %xmm11
kusano 2b45e8
	xorpd	%xmm7, %xmm13
kusano 2b45e8
	xorpd	%xmm7, %xmm15
kusano 2b45e8
#else
kusano 2b45e8
	xorpd	%xmm7, %xmm8
kusano 2b45e8
	xorpd	%xmm7, %xmm10
kusano 2b45e8
	xorpd	%xmm7, %xmm12
kusano 2b45e8
	xorpd	%xmm7, %xmm14
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
kusano 2b45e8
    defined(RR) || defined(RC) || defined(CR) || defined(CC)
kusano 2b45e8
	subpd	%xmm9,  %xmm8
kusano 2b45e8
	subpd	%xmm11, %xmm10
kusano 2b45e8
	subpd	%xmm13, %xmm12
kusano 2b45e8
	subpd	%xmm15, %xmm14
kusano 2b45e8
#else
kusano 2b45e8
	addpd	%xmm9,  %xmm8
kusano 2b45e8
	addpd	%xmm11, %xmm10
kusano 2b45e8
	addpd	%xmm13, %xmm12
kusano 2b45e8
	addpd	%xmm15, %xmm14
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(LT)
kusano 2b45e8
	movapd	-16 * SIZE(B), %xmm9
kusano 2b45e8
	movapd	-14 * SIZE(B), %xmm11
kusano 2b45e8
	movapd	-12 * SIZE(B), %xmm13
kusano 2b45e8
	movapd	-10 * SIZE(B), %xmm15
kusano 2b45e8
kusano 2b45e8
	subpd	%xmm8,   %xmm9
kusano 2b45e8
	subpd	%xmm10,  %xmm11
kusano 2b45e8
	subpd	%xmm12,  %xmm13
kusano 2b45e8
	subpd	%xmm14,  %xmm15
kusano 2b45e8
#else
kusano 2b45e8
	movapd	-16 * SIZE(AO), %xmm9
kusano 2b45e8
	movapd	-14 * SIZE(AO), %xmm13
kusano 2b45e8
	movapd	-12 * SIZE(AO), %xmm11
kusano 2b45e8
	movapd	-10 * SIZE(AO), %xmm15
kusano 2b45e8
kusano 2b45e8
	subpd	%xmm8,   %xmm9
kusano 2b45e8
	subpd	%xmm10,  %xmm11
kusano 2b45e8
	subpd	%xmm12,  %xmm13
kusano 2b45e8
	subpd	%xmm14,  %xmm15
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifndef CONJ
kusano 2b45e8
	SHUFPD_1 %xmm7, %xmm7
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef LN
kusano 2b45e8
	movddup	-10 * SIZE(AO), %xmm0
kusano 2b45e8
	movddup	 -9 * SIZE(AO), %xmm1
kusano 2b45e8
	movddup	-12 * SIZE(AO), %xmm2
kusano 2b45e8
	movddup	-11 * SIZE(AO), %xmm3
kusano 2b45e8
	movddup	-16 * SIZE(AO), %xmm4
kusano 2b45e8
	movddup	-15 * SIZE(AO), %xmm5
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm13, %xmm12
kusano 2b45e8
	pshufd	$0x4e, %xmm15, %xmm14
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm12
kusano 2b45e8
	xorpd	 %xmm7, %xmm14
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm0, %xmm13
kusano 2b45e8
	mulpd	 %xmm1, %xmm12
kusano 2b45e8
	mulpd	 %xmm0, %xmm15
kusano 2b45e8
	mulpd	 %xmm1, %xmm14
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm12, %xmm13
kusano 2b45e8
	addpd	 %xmm14, %xmm15
kusano 2b45e8
kusano 2b45e8
	movapd	 %xmm13, %xmm8
kusano 2b45e8
	movapd	 %xmm15, %xmm10
kusano 2b45e8
	pshufd	 $0x4e, %xmm13, %xmm12
kusano 2b45e8
	pshufd	 $0x4e, %xmm15, %xmm14
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm12
kusano 2b45e8
	xorpd	 %xmm7, %xmm14
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm2, %xmm8
kusano 2b45e8
	mulpd	 %xmm2, %xmm10
kusano 2b45e8
	mulpd	 %xmm3, %xmm12
kusano 2b45e8
	mulpd	 %xmm3, %xmm14
kusano 2b45e8
kusano 2b45e8
	subpd	 %xmm8, %xmm9
kusano 2b45e8
	subpd	 %xmm10, %xmm11
kusano 2b45e8
	subpd	 %xmm12, %xmm9
kusano 2b45e8
	subpd	 %xmm14, %xmm11
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm9, %xmm8
kusano 2b45e8
	pshufd	$0x4e, %xmm11, %xmm10
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm8
kusano 2b45e8
	xorpd	 %xmm7, %xmm10
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm4, %xmm9
kusano 2b45e8
	mulpd	 %xmm5, %xmm8
kusano 2b45e8
	mulpd	 %xmm4, %xmm11
kusano 2b45e8
	mulpd	 %xmm5, %xmm10
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm8, %xmm9
kusano 2b45e8
	addpd	 %xmm10, %xmm11
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef LT
kusano 2b45e8
	movddup	-16 * SIZE(AO), %xmm0
kusano 2b45e8
	movddup	-15 * SIZE(AO), %xmm1
kusano 2b45e8
	movddup	-14 * SIZE(AO), %xmm2
kusano 2b45e8
	movddup	-13 * SIZE(AO), %xmm3
kusano 2b45e8
	movddup	-10 * SIZE(AO), %xmm4
kusano 2b45e8
	movddup	 -9 * SIZE(AO), %xmm5
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm9, %xmm8
kusano 2b45e8
	pshufd	$0x4e, %xmm11, %xmm10
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm8
kusano 2b45e8
	xorpd	 %xmm7, %xmm10
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm0, %xmm9
kusano 2b45e8
	mulpd	 %xmm1, %xmm8
kusano 2b45e8
	mulpd	 %xmm0, %xmm11
kusano 2b45e8
	mulpd	 %xmm1, %xmm10
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm8, %xmm9
kusano 2b45e8
	addpd	 %xmm10, %xmm11
kusano 2b45e8
kusano 2b45e8
	movapd	 %xmm9, %xmm8
kusano 2b45e8
	movapd	 %xmm11, %xmm10
kusano 2b45e8
	pshufd	 $0x4e, %xmm9, %xmm12
kusano 2b45e8
	pshufd	 $0x4e, %xmm11, %xmm14
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm12
kusano 2b45e8
	xorpd	 %xmm7, %xmm14
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm2, %xmm8
kusano 2b45e8
	mulpd	 %xmm2, %xmm10
kusano 2b45e8
	mulpd	 %xmm3, %xmm12
kusano 2b45e8
	mulpd	 %xmm3, %xmm14
kusano 2b45e8
kusano 2b45e8
	subpd	 %xmm8, %xmm13
kusano 2b45e8
	subpd	 %xmm10, %xmm15
kusano 2b45e8
	subpd	 %xmm12, %xmm13
kusano 2b45e8
	subpd	 %xmm14, %xmm15
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm13, %xmm12
kusano 2b45e8
	pshufd	$0x4e, %xmm15, %xmm14
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm12
kusano 2b45e8
	xorpd	 %xmm7, %xmm14
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm4, %xmm13
kusano 2b45e8
	mulpd	 %xmm5, %xmm12
kusano 2b45e8
	mulpd	 %xmm4, %xmm15
kusano 2b45e8
	mulpd	 %xmm5, %xmm14
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm12, %xmm13
kusano 2b45e8
	addpd	 %xmm14, %xmm15
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef RN
kusano 2b45e8
	movddup	-16 * SIZE(B), %xmm0
kusano 2b45e8
	movddup	-15 * SIZE(B), %xmm1
kusano 2b45e8
	movddup	-14 * SIZE(B), %xmm2
kusano 2b45e8
	movddup	-13 * SIZE(B), %xmm3
kusano 2b45e8
	movddup	-10 * SIZE(B), %xmm4
kusano 2b45e8
	movddup	 -9 * SIZE(B), %xmm5
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm9, %xmm8
kusano 2b45e8
	pshufd	$0x4e, %xmm13, %xmm12
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm8
kusano 2b45e8
	xorpd	 %xmm7, %xmm12
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm0, %xmm9
kusano 2b45e8
	mulpd	 %xmm1, %xmm8
kusano 2b45e8
	mulpd	 %xmm0, %xmm13
kusano 2b45e8
	mulpd	 %xmm1, %xmm12
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm8, %xmm9
kusano 2b45e8
	addpd	 %xmm12, %xmm13
kusano 2b45e8
kusano 2b45e8
	movapd	 %xmm9, %xmm8
kusano 2b45e8
	movapd	 %xmm13, %xmm10
kusano 2b45e8
	pshufd	 $0x4e, %xmm9, %xmm12
kusano 2b45e8
	pshufd	 $0x4e, %xmm13, %xmm14
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm12
kusano 2b45e8
	xorpd	 %xmm7, %xmm14
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm2, %xmm8
kusano 2b45e8
	mulpd	 %xmm2, %xmm10
kusano 2b45e8
	mulpd	 %xmm3, %xmm12
kusano 2b45e8
	mulpd	 %xmm3, %xmm14
kusano 2b45e8
kusano 2b45e8
	subpd	 %xmm8, %xmm11
kusano 2b45e8
	subpd	 %xmm10, %xmm15
kusano 2b45e8
	subpd	 %xmm12, %xmm11
kusano 2b45e8
	subpd	 %xmm14, %xmm15
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm11, %xmm10
kusano 2b45e8
	pshufd	$0x4e, %xmm15, %xmm14
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm10
kusano 2b45e8
	xorpd	 %xmm7, %xmm14
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm4, %xmm11
kusano 2b45e8
	mulpd	 %xmm5, %xmm10
kusano 2b45e8
	mulpd	 %xmm4, %xmm15
kusano 2b45e8
	mulpd	 %xmm5, %xmm14
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm10, %xmm11
kusano 2b45e8
	addpd	 %xmm14, %xmm15
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef RT
kusano 2b45e8
	movddup	-10 * SIZE(B), %xmm0
kusano 2b45e8
	movddup	 -9 * SIZE(B), %xmm1
kusano 2b45e8
	movddup	-12 * SIZE(B), %xmm2
kusano 2b45e8
	movddup	-11 * SIZE(B), %xmm3
kusano 2b45e8
	movddup	-16 * SIZE(B), %xmm4
kusano 2b45e8
	movddup	-15 * SIZE(B), %xmm5
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm11, %xmm10
kusano 2b45e8
	pshufd	$0x4e, %xmm15, %xmm14
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm10
kusano 2b45e8
	xorpd	 %xmm7, %xmm14
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm0, %xmm11
kusano 2b45e8
	mulpd	 %xmm1, %xmm10
kusano 2b45e8
	mulpd	 %xmm0, %xmm15
kusano 2b45e8
	mulpd	 %xmm1, %xmm14
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm10, %xmm11
kusano 2b45e8
	addpd	 %xmm14, %xmm15
kusano 2b45e8
kusano 2b45e8
	movapd	 %xmm11, %xmm8
kusano 2b45e8
	movapd	 %xmm15, %xmm10
kusano 2b45e8
	pshufd	 $0x4e, %xmm11, %xmm12
kusano 2b45e8
	pshufd	 $0x4e, %xmm15, %xmm14
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm12
kusano 2b45e8
	xorpd	 %xmm7, %xmm14
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm2, %xmm8
kusano 2b45e8
	mulpd	 %xmm2, %xmm10
kusano 2b45e8
	mulpd	 %xmm3, %xmm12
kusano 2b45e8
	mulpd	 %xmm3, %xmm14
kusano 2b45e8
kusano 2b45e8
	subpd	 %xmm8, %xmm9
kusano 2b45e8
	subpd	 %xmm10, %xmm13
kusano 2b45e8
	subpd	 %xmm12, %xmm9
kusano 2b45e8
	subpd	 %xmm14, %xmm13
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm9, %xmm8
kusano 2b45e8
	pshufd	$0x4e, %xmm13, %xmm12
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm8
kusano 2b45e8
	xorpd	 %xmm7, %xmm12
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm4, %xmm9
kusano 2b45e8
	mulpd	 %xmm5, %xmm8
kusano 2b45e8
	mulpd	 %xmm4, %xmm13
kusano 2b45e8
	mulpd	 %xmm5, %xmm12
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm8, %xmm9
kusano 2b45e8
	addpd	 %xmm12, %xmm13
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef LN
kusano 2b45e8
	subq	$4 * SIZE, CO1
kusano 2b45e8
	subq	$4 * SIZE, CO2
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	movsd	%xmm9,    0 * SIZE(CO1)
kusano 2b45e8
	movhpd	%xmm9,    1 * SIZE(CO1)
kusano 2b45e8
	movsd	%xmm13,   2 * SIZE(CO1)
kusano 2b45e8
	movhpd	%xmm13,   3 * SIZE(CO1)
kusano 2b45e8
kusano 2b45e8
	movsd	%xmm11,   0 * SIZE(CO2)
kusano 2b45e8
	movhpd	%xmm11,   1 * SIZE(CO2)
kusano 2b45e8
	movsd	%xmm15,   2 * SIZE(CO2)
kusano 2b45e8
	movhpd	%xmm15,   3 * SIZE(CO2)
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(LT)
kusano 2b45e8
	movapd	%xmm9,  -16 * SIZE(B)
kusano 2b45e8
	movapd	%xmm11, -14 * SIZE(B)
kusano 2b45e8
	movapd	%xmm13, -12 * SIZE(B)
kusano 2b45e8
	movapd	%xmm15, -10 * SIZE(B)
kusano 2b45e8
kusano 2b45e8
	movddup	%xmm9,  %xmm8
kusano 2b45e8
	unpckhpd %xmm9,  %xmm9
kusano 2b45e8
	movddup	%xmm11, %xmm10
kusano 2b45e8
	unpckhpd %xmm11, %xmm11
kusano 2b45e8
	movddup	%xmm13, %xmm12
kusano 2b45e8
	unpckhpd %xmm13, %xmm13
kusano 2b45e8
	movddup	%xmm15, %xmm14
kusano 2b45e8
	unpckhpd %xmm15, %xmm15
kusano 2b45e8
kusano 2b45e8
	movapd	%xmm8,  -16 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm9,  -14 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm10, -12 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm11, -10 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm12,  -8 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm13,  -6 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm14,  -4 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm15,  -2 * SIZE(BO)
kusano 2b45e8
#else
kusano 2b45e8
	movapd	%xmm9,  -16 * SIZE(AO)
kusano 2b45e8
	movapd	%xmm13, -14 * SIZE(AO)
kusano 2b45e8
	movapd	%xmm11, -12 * SIZE(AO)
kusano 2b45e8
	movapd	%xmm15, -10 * SIZE(AO)
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifndef LN
kusano 2b45e8
	addq	$4 * SIZE, CO1
kusano 2b45e8
	addq	$4 * SIZE, CO2
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(LT) || defined(RN)
kusano 2b45e8
	movq	K,  %rax
kusano 2b45e8
	subq	KK, %rax
kusano 2b45e8
	salq	$ZBASE_SHIFT, %rax
kusano 2b45e8
	leaq	(AO, %rax, 2), AO
kusano 2b45e8
#ifdef LT
kusano 2b45e8
	addq	$8 * SIZE, B
kusano 2b45e8
#endif
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef LN
kusano 2b45e8
	subq	$2, KK
kusano 2b45e8
	movq	BORIG, B
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef LT
kusano 2b45e8
	addq	$2, KK
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef RT
kusano 2b45e8
	movq	K, %rax
kusano 2b45e8
	movq	BORIG, B
kusano 2b45e8
	salq	$1 + ZBASE_SHIFT, %rax
kusano 2b45e8
	addq	%rax, AORIG
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	decq	I			# i --
kusano 2b45e8
	jg	.L10
kusano 2b45e8
	ALIGN_4	
kusano 2b45e8
kusano 2b45e8
.L99:
kusano 2b45e8
#ifdef LN
kusano 2b45e8
       leaq	(, K, SIZE), %rax
kusano 2b45e8
       leaq	(B, %rax, 4), B
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(LT) || defined(RN)
kusano 2b45e8
	movq	K,  %rax
kusano 2b45e8
	subq	KK, %rax
kusano 2b45e8
	leaq	(,%rax, SIZE), %rax
kusano 2b45e8
	leaq	(B,  %rax, 2 * COMPSIZE), B
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef RN
kusano 2b45e8
	addq	$2, KK
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef RT
kusano 2b45e8
	subq	$2, KK
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	decq	J			# j --
kusano 2b45e8
	jg	.L01
kusano 2b45e8
kusano 2b45e8
.L100:
kusano 2b45e8
	testq	$1, N
kusano 2b45e8
	jle	.L999
kusano 2b45e8
kusano 2b45e8
.L101:
kusano 2b45e8
#ifdef LN
kusano 2b45e8
	movq	OFFSET, %rax
kusano 2b45e8
	addq	M, %rax
kusano 2b45e8
	movq	%rax, KK
kusano 2b45e8
#endif	
kusano 2b45e8
kusano 2b45e8
	leaq	BUFFER, BO
kusano 2b45e8
kusano 2b45e8
#ifdef RT
kusano 2b45e8
       movq	K, %rax
kusano 2b45e8
       salq	$0 + ZBASE_SHIFT, %rax
kusano 2b45e8
       subq	%rax, B
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(RT)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
	movq	B, BORIG
kusano 2b45e8
	salq	$ZBASE_SHIFT, %rax
kusano 2b45e8
	leaq	(B,  %rax, 1), B
kusano 2b45e8
	leaq	(BO, %rax, 2), BO
kusano 2b45e8
#endif	
kusano 2b45e8
kusano 2b45e8
#if defined(LT)
kusano 2b45e8
	movq	OFFSET, %rax
kusano 2b45e8
	movq	%rax, KK
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(LT) || defined(RN)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
#else
kusano 2b45e8
	movq	K, %rax
kusano 2b45e8
	subq	KK, %rax
kusano 2b45e8
#endif
kusano 2b45e8
	sarq	$2, %rax
kusano 2b45e8
	jle	.L103
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
	
kusano 2b45e8
.L102:
kusano 2b45e8
	movddup	 -16 * SIZE(B), %xmm8
kusano 2b45e8
	movddup	 -15 * SIZE(B), %xmm9
kusano 2b45e8
	movddup	 -14 * SIZE(B), %xmm10
kusano 2b45e8
	movddup	 -13 * SIZE(B), %xmm11
kusano 2b45e8
	movddup	 -12 * SIZE(B), %xmm12
kusano 2b45e8
	movddup	 -11 * SIZE(B), %xmm13
kusano 2b45e8
	movddup	 -10 * SIZE(B), %xmm14
kusano 2b45e8
	movddup	  -9 * SIZE(B), %xmm15
kusano 2b45e8
kusano 2b45e8
	movapd	%xmm8,  0 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm9,  2 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm10,  4 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm11,  6 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm12,  8 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm13, 10 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm14, 12 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm15, 14 * SIZE(BO)
kusano 2b45e8
kusano 2b45e8
	addq	$  8 * SIZE, B
kusano 2b45e8
	subq	$-16 * SIZE, BO
kusano 2b45e8
	decq	%rax
kusano 2b45e8
	jne	.L102
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
.L103:
kusano 2b45e8
#if defined(LT) || defined(RN)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
#else
kusano 2b45e8
	movq	K, %rax
kusano 2b45e8
	subq	KK, %rax
kusano 2b45e8
#endif
kusano 2b45e8
	andq	$3, %rax
kusano 2b45e8
	BRANCH
kusano 2b45e8
	jle	.L105
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
.L104:
kusano 2b45e8
	movddup	 -16 * SIZE(B), %xmm8
kusano 2b45e8
	movddup	 -15 * SIZE(B), %xmm9
kusano 2b45e8
kusano 2b45e8
	movapd	%xmm8,  0 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm9,  2 * SIZE(BO)
kusano 2b45e8
kusano 2b45e8
	addq	$4 * SIZE, BO
kusano 2b45e8
	addq	$2 * SIZE, B
kusano 2b45e8
	decq	%rax
kusano 2b45e8
	jne	.L104
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
	
kusano 2b45e8
.L105:
kusano 2b45e8
#if defined(LT) || defined(RN)
kusano 2b45e8
	movq	A, AO
kusano 2b45e8
#else
kusano 2b45e8
	movq	A, AORIG
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef RT
kusano 2b45e8
       subq	LDC, C
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	movq	C, CO1
kusano 2b45e8
#ifndef RT
kusano 2b45e8
	addq	LDC, C
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	testq	$1, M
kusano 2b45e8
	jle	.L130
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
.L140:
kusano 2b45e8
#ifdef LN
kusano 2b45e8
       movq	K, %rax
kusano 2b45e8
       salq	$0 + ZBASE_SHIFT, %rax
kusano 2b45e8
       subq	%rax, AORIG
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(RT)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
	movq	AORIG, AO
kusano 2b45e8
	salq	$ZBASE_SHIFT, %rax
kusano 2b45e8
	leaq	(AO, %rax, 1), AO
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	leaq	16 * SIZE + BUFFER, BO
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(RT)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
	salq	$0 + ZBASE_SHIFT, %rax
kusano 2b45e8
	leaq	(BO, %rax, 2), BO
kusano 2b45e8
#endif	
kusano 2b45e8
kusano 2b45e8
	pxor	%xmm8, %xmm8
kusano 2b45e8
	pxor	%xmm9, %xmm9
kusano 2b45e8
	pxor	%xmm10, %xmm10
kusano 2b45e8
	pxor	%xmm11, %xmm11
kusano 2b45e8
kusano 2b45e8
#if defined(LT) || defined(RN)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
#else
kusano 2b45e8
	movq	K, %rax
kusano 2b45e8
	subq	KK, %rax
kusano 2b45e8
#endif
kusano 2b45e8
	sarq	$2, %rax
kusano 2b45e8
	je	.L142
kusano 2b45e8
kusano 2b45e8
.L141:
kusano 2b45e8
	PREFETCH (PREFETCHSIZE +  0) * SIZE(AO)
kusano 2b45e8
kusano 2b45e8
	movapd	-16 * SIZE(AO), %xmm0
kusano 2b45e8
	movapd	-14 * SIZE(AO), %xmm1
kusano 2b45e8
	movapd	-16 * SIZE(BO), %xmm2
kusano 2b45e8
	movapd	-14 * SIZE(BO), %xmm3
kusano 2b45e8
	movapd	-12 * SIZE(BO), %xmm4
kusano 2b45e8
	movapd	-10 * SIZE(BO), %xmm5
kusano 2b45e8
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	mulpd	%xmm0, %xmm3
kusano 2b45e8
	mulpd	%xmm1, %xmm4
kusano 2b45e8
	mulpd	%xmm1, %xmm5
kusano 2b45e8
kusano 2b45e8
	ADD1	%xmm2, %xmm8
kusano 2b45e8
	ADD2	%xmm3, %xmm9
kusano 2b45e8
	ADD1	%xmm4, %xmm10
kusano 2b45e8
	ADD2	%xmm5, %xmm11
kusano 2b45e8
kusano 2b45e8
	movapd	-12 * SIZE(AO), %xmm0
kusano 2b45e8
	movapd	-10 * SIZE(AO), %xmm1
kusano 2b45e8
	movapd	 -8 * SIZE(BO), %xmm2
kusano 2b45e8
	movapd	 -6 * SIZE(BO), %xmm3
kusano 2b45e8
	movapd	 -4 * SIZE(BO), %xmm4
kusano 2b45e8
	movapd	 -2 * SIZE(BO), %xmm5
kusano 2b45e8
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	mulpd	%xmm0, %xmm3
kusano 2b45e8
	mulpd	%xmm1, %xmm4
kusano 2b45e8
	mulpd	%xmm1, %xmm5
kusano 2b45e8
kusano 2b45e8
	ADD1	%xmm2, %xmm8
kusano 2b45e8
	ADD2	%xmm3, %xmm9
kusano 2b45e8
	ADD1	%xmm4, %xmm10
kusano 2b45e8
	ADD2	%xmm5, %xmm11
kusano 2b45e8
kusano 2b45e8
	subq	$ -8 * SIZE, AO
kusano 2b45e8
	subq	$-16 * SIZE, BO
kusano 2b45e8
	subq	$1, %rax
kusano 2b45e8
	jne    .L141
kusano 2b45e8
kusano 2b45e8
.L142:
kusano 2b45e8
#if defined(LT) || defined(RN)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
#else
kusano 2b45e8
	movq	K, %rax
kusano 2b45e8
	subq	KK, %rax
kusano 2b45e8
#endif
kusano 2b45e8
	movapd	POSINV, %xmm7
kusano 2b45e8
kusano 2b45e8
	andq	$3, %rax		# if (k & 1)
kusano 2b45e8
	BRANCH
kusano 2b45e8
	jle .L144
kusano 2b45e8
kusano 2b45e8
.L143:
kusano 2b45e8
	movapd	-16 * SIZE(AO), %xmm0
kusano 2b45e8
	movapd	-16 * SIZE(BO), %xmm2
kusano 2b45e8
	movapd	-14 * SIZE(BO), %xmm3
kusano 2b45e8
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	mulpd	%xmm0, %xmm3
kusano 2b45e8
kusano 2b45e8
	ADD1	%xmm2, %xmm8
kusano 2b45e8
	ADD2	%xmm3, %xmm9
kusano 2b45e8
kusano 2b45e8
	addq	$2 * SIZE, AO
kusano 2b45e8
	addq	$4 * SIZE, BO
kusano 2b45e8
	subq	$1, %rax
kusano 2b45e8
	jg	.L143
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
.L144:
kusano 2b45e8
	addpd	%xmm10, %xmm8
kusano 2b45e8
	addpd	%xmm11, %xmm9
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(RT)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
#ifdef LN
kusano 2b45e8
	subq	$1, %rax
kusano 2b45e8
#else
kusano 2b45e8
	subq	$1, %rax
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	movq	AORIG, AO
kusano 2b45e8
	movq	BORIG, B
kusano 2b45e8
	leaq	16 * SIZE + BUFFER, BO
kusano 2b45e8
kusano 2b45e8
	salq	$ZBASE_SHIFT, %rax
kusano 2b45e8
	leaq	(AO, %rax, 1), AO
kusano 2b45e8
	leaq	(B,  %rax, 1), B
kusano 2b45e8
	leaq	(BO, %rax, 2), BO
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm9, %xmm9
kusano 2b45e8
kusano 2b45e8
#if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
kusano 2b45e8
    defined(NR) || defined(NC) || defined(TR) || defined(TC)
kusano 2b45e8
	xorpd	%xmm7, %xmm9
kusano 2b45e8
#else
kusano 2b45e8
	xorpd	%xmm7, %xmm8
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
kusano 2b45e8
    defined(RR) || defined(RC) || defined(CR) || defined(CC)
kusano 2b45e8
	subpd	%xmm9, %xmm8
kusano 2b45e8
#else
kusano 2b45e8
	addpd	%xmm9, %xmm8
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(LT)
kusano 2b45e8
	movapd	-16 * SIZE(B), %xmm9
kusano 2b45e8
kusano 2b45e8
	subpd	%xmm8,  %xmm9
kusano 2b45e8
#else
kusano 2b45e8
	movapd	-16 * SIZE(AO), %xmm9
kusano 2b45e8
kusano 2b45e8
	subpd	%xmm8,  %xmm9
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifndef CONJ
kusano 2b45e8
	SHUFPD_1 %xmm7, %xmm7
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef LN
kusano 2b45e8
	movddup	-16 * SIZE(AO), %xmm0
kusano 2b45e8
	movddup	-15 * SIZE(AO), %xmm1
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm9, %xmm8
kusano 2b45e8
	xorpd	 %xmm7, %xmm8
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm0, %xmm9
kusano 2b45e8
	mulpd	 %xmm1, %xmm8
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm8, %xmm9
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef LT
kusano 2b45e8
	movddup	-16 * SIZE(AO), %xmm0
kusano 2b45e8
	movddup	-15 * SIZE(AO), %xmm1
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm9, %xmm8
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm8
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm0, %xmm9
kusano 2b45e8
	mulpd	 %xmm1, %xmm8
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm8, %xmm9
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef RN
kusano 2b45e8
	movddup	-16 * SIZE(B), %xmm0
kusano 2b45e8
	movddup	-15 * SIZE(B), %xmm1
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm9, %xmm8
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm8
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm0, %xmm9
kusano 2b45e8
	mulpd	 %xmm1, %xmm8
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm8, %xmm9
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef RT
kusano 2b45e8
	movddup	-16 * SIZE(B), %xmm0
kusano 2b45e8
	movddup	-15 * SIZE(B), %xmm1
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm9, %xmm8
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm8
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm0, %xmm9
kusano 2b45e8
	mulpd	 %xmm1, %xmm8
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm8, %xmm9
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef LN
kusano 2b45e8
	subq	$2 * SIZE, CO1
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	movsd	%xmm9,   0 * SIZE(CO1)
kusano 2b45e8
	movhpd	%xmm9,   1 * SIZE(CO1)
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(LT)
kusano 2b45e8
	movapd	%xmm9, -16 * SIZE(B)
kusano 2b45e8
kusano 2b45e8
	movddup	%xmm9,  %xmm8
kusano 2b45e8
	unpckhpd %xmm9,  %xmm9
kusano 2b45e8
kusano 2b45e8
	movapd	%xmm8,  -16 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm9,  -14 * SIZE(BO)
kusano 2b45e8
#else
kusano 2b45e8
	movapd	%xmm9, -16 * SIZE(AO)
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifndef LN
kusano 2b45e8
	addq	$2 * SIZE, CO1
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(LT) || defined(RN)
kusano 2b45e8
	movq	K,  %rax
kusano 2b45e8
	subq	KK, %rax
kusano 2b45e8
	salq	$ZBASE_SHIFT, %rax
kusano 2b45e8
	leaq	(AO, %rax, 1), AO
kusano 2b45e8
#ifdef LT
kusano 2b45e8
	addq	$2 * SIZE, B
kusano 2b45e8
#endif
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef LN
kusano 2b45e8
	subq	$1, KK
kusano 2b45e8
	movq	BORIG, B
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef LT
kusano 2b45e8
	addq	$1, KK
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef RT
kusano 2b45e8
	movq	K, %rax
kusano 2b45e8
	movq	BORIG, B
kusano 2b45e8
	salq	$0 + ZBASE_SHIFT, %rax
kusano 2b45e8
	addq	%rax, AORIG
kusano 2b45e8
#endif
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
.L130:
kusano 2b45e8
	movq	M,  I
kusano 2b45e8
	sarq	$1, I		# i = (m >> 2)
kusano 2b45e8
	jle	.L199
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
.L110:
kusano 2b45e8
#ifdef LN
kusano 2b45e8
       movq	K, %rax
kusano 2b45e8
       salq	$1 + ZBASE_SHIFT, %rax
kusano 2b45e8
       subq	%rax, AORIG
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(RT)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
	movq	AORIG, AO
kusano 2b45e8
	salq	$ZBASE_SHIFT, %rax
kusano 2b45e8
	leaq	(AO, %rax, 2), AO
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	leaq	16 * SIZE + BUFFER, BO
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(RT)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
	salq	$0 + ZBASE_SHIFT, %rax
kusano 2b45e8
	leaq	(BO, %rax, 2), BO
kusano 2b45e8
#endif	
kusano 2b45e8
kusano 2b45e8
	pxor	%xmm8, %xmm8
kusano 2b45e8
	pxor	%xmm9, %xmm9
kusano 2b45e8
	pxor	%xmm12, %xmm12
kusano 2b45e8
	pxor	%xmm13, %xmm13
kusano 2b45e8
	prefetcht0    -3 * SIZE(CO1)
kusano 2b45e8
kusano 2b45e8
#if defined(LT) || defined(RN)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
#else
kusano 2b45e8
	movq	K, %rax
kusano 2b45e8
	subq	KK, %rax
kusano 2b45e8
#endif
kusano 2b45e8
	sarq	$2, %rax
kusano 2b45e8
	je	.L112
kusano 2b45e8
kusano 2b45e8
.L111:
kusano 2b45e8
	PREFETCH (PREFETCHSIZE +  0) * SIZE(AO)
kusano 2b45e8
kusano 2b45e8
	movapd	-16 * SIZE(AO), %xmm0
kusano 2b45e8
	movapd	-14 * SIZE(AO), %xmm1
kusano 2b45e8
kusano 2b45e8
	movapd	-16 * SIZE(BO), %xmm2
kusano 2b45e8
	movapd	 %xmm2, %xmm3
kusano 2b45e8
	movapd	-14 * SIZE(BO), %xmm4
kusano 2b45e8
	movapd	 %xmm4, %xmm5
kusano 2b45e8
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	mulpd	%xmm1, %xmm3
kusano 2b45e8
	mulpd	%xmm0, %xmm4
kusano 2b45e8
	mulpd	%xmm1, %xmm5
kusano 2b45e8
kusano 2b45e8
	ADD1	%xmm2, %xmm8
kusano 2b45e8
	ADD1	%xmm3, %xmm12
kusano 2b45e8
	ADD2	%xmm4, %xmm9
kusano 2b45e8
	ADD2	%xmm5, %xmm13
kusano 2b45e8
kusano 2b45e8
	movapd	-12 * SIZE(AO), %xmm0
kusano 2b45e8
	movapd	-10 * SIZE(AO), %xmm1
kusano 2b45e8
kusano 2b45e8
	movapd	-12 * SIZE(BO), %xmm2
kusano 2b45e8
	movapd	 %xmm2, %xmm3
kusano 2b45e8
	movapd	-10 * SIZE(BO), %xmm4
kusano 2b45e8
	movapd	 %xmm4, %xmm5
kusano 2b45e8
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	mulpd	%xmm1, %xmm3
kusano 2b45e8
	mulpd	%xmm0, %xmm4
kusano 2b45e8
	mulpd	%xmm1, %xmm5
kusano 2b45e8
kusano 2b45e8
	ADD1	%xmm2, %xmm8
kusano 2b45e8
	ADD1	%xmm3, %xmm12
kusano 2b45e8
	ADD2	%xmm4, %xmm9
kusano 2b45e8
	ADD2	%xmm5, %xmm13
kusano 2b45e8
kusano 2b45e8
	movapd	 -8 * SIZE(AO), %xmm0
kusano 2b45e8
	movapd	 -6 * SIZE(AO), %xmm1
kusano 2b45e8
kusano 2b45e8
	movapd	 -8 * SIZE(BO), %xmm2
kusano 2b45e8
	movapd	 %xmm2, %xmm3
kusano 2b45e8
	movapd	 -6 * SIZE(BO), %xmm4
kusano 2b45e8
	movapd	 %xmm4, %xmm5
kusano 2b45e8
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	mulpd	%xmm1, %xmm3
kusano 2b45e8
	mulpd	%xmm0, %xmm4
kusano 2b45e8
	mulpd	%xmm1, %xmm5
kusano 2b45e8
kusano 2b45e8
	ADD1	%xmm2, %xmm8
kusano 2b45e8
	ADD1	%xmm3, %xmm12
kusano 2b45e8
	ADD2	%xmm4, %xmm9
kusano 2b45e8
	ADD2	%xmm5, %xmm13
kusano 2b45e8
kusano 2b45e8
	movapd	 -4 * SIZE(AO), %xmm0
kusano 2b45e8
	movapd	 -2 * SIZE(AO), %xmm1
kusano 2b45e8
kusano 2b45e8
	movapd	 -4 * SIZE(BO), %xmm2
kusano 2b45e8
	movapd	 %xmm2, %xmm3
kusano 2b45e8
	movapd	 -2 * SIZE(BO), %xmm4
kusano 2b45e8
	movapd	 %xmm4, %xmm5
kusano 2b45e8
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	mulpd	%xmm1, %xmm3
kusano 2b45e8
	mulpd	%xmm0, %xmm4
kusano 2b45e8
	mulpd	%xmm1, %xmm5
kusano 2b45e8
kusano 2b45e8
	ADD1	%xmm2, %xmm8
kusano 2b45e8
	ADD1	%xmm3, %xmm12
kusano 2b45e8
	ADD2	%xmm4, %xmm9
kusano 2b45e8
	ADD2	%xmm5, %xmm13
kusano 2b45e8
kusano 2b45e8
	subq	$-16 * SIZE, AO
kusano 2b45e8
	subq	$-16 * SIZE, BO
kusano 2b45e8
	subq	$1, %rax
kusano 2b45e8
	jne    .L111
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
.L112:
kusano 2b45e8
#if defined(LT) || defined(RN)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
#else
kusano 2b45e8
	movq	K, %rax
kusano 2b45e8
	subq	KK, %rax
kusano 2b45e8
#endif
kusano 2b45e8
	movapd	POSINV,  %xmm7
kusano 2b45e8
	andq	$3, %rax		# if (k & 1)
kusano 2b45e8
	BRANCH
kusano 2b45e8
	jle .L114
kusano 2b45e8
kusano 2b45e8
.L113:
kusano 2b45e8
	movapd	-16 * SIZE(AO), %xmm0
kusano 2b45e8
	movapd	-14 * SIZE(AO), %xmm1
kusano 2b45e8
kusano 2b45e8
	movapd	-16 * SIZE(BO), %xmm2
kusano 2b45e8
	movapd	 %xmm2, %xmm3
kusano 2b45e8
	movapd	-14 * SIZE(BO), %xmm4
kusano 2b45e8
	movapd	 %xmm4, %xmm5
kusano 2b45e8
kusano 2b45e8
	mulpd	%xmm0, %xmm2
kusano 2b45e8
	mulpd	%xmm1, %xmm3
kusano 2b45e8
	mulpd	%xmm0, %xmm4
kusano 2b45e8
	mulpd	%xmm1, %xmm5
kusano 2b45e8
kusano 2b45e8
	ADD1	%xmm2, %xmm8
kusano 2b45e8
	ADD1	%xmm3, %xmm12
kusano 2b45e8
	ADD2	%xmm4, %xmm9
kusano 2b45e8
	ADD2	%xmm5, %xmm13
kusano 2b45e8
kusano 2b45e8
	addq	$4 * SIZE, AO
kusano 2b45e8
	addq	$4 * SIZE, BO
kusano 2b45e8
	subq	$1, %rax
kusano 2b45e8
	jg	.L113
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
.L114:
kusano 2b45e8
#if defined(LN) || defined(RT)
kusano 2b45e8
	movq	KK, %rax
kusano 2b45e8
#ifdef LN
kusano 2b45e8
	subq	$2, %rax
kusano 2b45e8
#else
kusano 2b45e8
	subq	$1, %rax
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	movq	AORIG, AO
kusano 2b45e8
	movq	BORIG, B
kusano 2b45e8
	leaq	16 * SIZE + BUFFER, BO
kusano 2b45e8
kusano 2b45e8
	salq	$ZBASE_SHIFT, %rax
kusano 2b45e8
	leaq	(AO, %rax, 2), AO
kusano 2b45e8
	leaq	(B,  %rax, 1), B
kusano 2b45e8
	leaq	(BO, %rax, 2), BO
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm9, %xmm9
kusano 2b45e8
	SHUFPD_1 %xmm13, %xmm13
kusano 2b45e8
kusano 2b45e8
#if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
kusano 2b45e8
    defined(NR) || defined(NC) || defined(TR) || defined(TC)
kusano 2b45e8
	xorpd	%xmm7, %xmm9
kusano 2b45e8
	xorpd	%xmm7, %xmm13
kusano 2b45e8
#else
kusano 2b45e8
	xorpd	%xmm7, %xmm8
kusano 2b45e8
	xorpd	%xmm7, %xmm12
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(NN) || defined(NT) || defined(TN) || defined(TT) || \
kusano 2b45e8
    defined(RR) || defined(RC) || defined(CR) || defined(CC)
kusano 2b45e8
	subpd	%xmm9, %xmm8
kusano 2b45e8
	subpd	%xmm13, %xmm12
kusano 2b45e8
#else
kusano 2b45e8
	addpd	%xmm9, %xmm8
kusano 2b45e8
	addpd	%xmm13, %xmm12
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(LT)
kusano 2b45e8
	movapd	-16 * SIZE(B), %xmm9
kusano 2b45e8
	movapd	-14 * SIZE(B), %xmm13
kusano 2b45e8
kusano 2b45e8
	subpd	%xmm8,  %xmm9
kusano 2b45e8
	subpd	%xmm12,  %xmm13
kusano 2b45e8
#else
kusano 2b45e8
	movapd	-16 * SIZE(AO), %xmm9
kusano 2b45e8
	movapd	-14 * SIZE(AO), %xmm13
kusano 2b45e8
kusano 2b45e8
	subpd	%xmm8,  %xmm9
kusano 2b45e8
	subpd	%xmm12,  %xmm13
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifndef CONJ
kusano 2b45e8
	SHUFPD_1 %xmm7, %xmm7
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef LN
kusano 2b45e8
	movddup	-10 * SIZE(AO), %xmm0
kusano 2b45e8
	movddup	 -9 * SIZE(AO), %xmm1
kusano 2b45e8
	movddup	-12 * SIZE(AO), %xmm2
kusano 2b45e8
	movddup	-11 * SIZE(AO), %xmm3
kusano 2b45e8
	movddup	-16 * SIZE(AO), %xmm4
kusano 2b45e8
	movddup	-15 * SIZE(AO), %xmm5
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm13, %xmm12
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm12
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm0, %xmm13
kusano 2b45e8
	mulpd	 %xmm1, %xmm12
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm12, %xmm13
kusano 2b45e8
kusano 2b45e8
	movapd	 %xmm13, %xmm8
kusano 2b45e8
	pshufd	 $0x4e, %xmm13, %xmm12
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm12
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm2, %xmm8
kusano 2b45e8
	mulpd	 %xmm3, %xmm12
kusano 2b45e8
kusano 2b45e8
	subpd	 %xmm8, %xmm9
kusano 2b45e8
	subpd	 %xmm12, %xmm9
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm9, %xmm8
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm8
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm4, %xmm9
kusano 2b45e8
	mulpd	 %xmm5, %xmm8
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm8, %xmm9
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef LT
kusano 2b45e8
	movddup	-16 * SIZE(AO), %xmm0
kusano 2b45e8
	movddup	-15 * SIZE(AO), %xmm1
kusano 2b45e8
	movddup	-14 * SIZE(AO), %xmm2
kusano 2b45e8
	movddup	-13 * SIZE(AO), %xmm3
kusano 2b45e8
	movddup	-10 * SIZE(AO), %xmm4
kusano 2b45e8
	movddup	 -9 * SIZE(AO), %xmm5
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm9, %xmm8
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm8
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm0, %xmm9
kusano 2b45e8
	mulpd	 %xmm1, %xmm8
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm8, %xmm9
kusano 2b45e8
kusano 2b45e8
	movapd	 %xmm9, %xmm8
kusano 2b45e8
	pshufd	 $0x4e, %xmm9, %xmm12
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm12
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm2, %xmm8
kusano 2b45e8
	mulpd	 %xmm3, %xmm12
kusano 2b45e8
kusano 2b45e8
	subpd	 %xmm8, %xmm13
kusano 2b45e8
	subpd	 %xmm12, %xmm13
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm13, %xmm12
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm12
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm4, %xmm13
kusano 2b45e8
	mulpd	 %xmm5, %xmm12
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm12, %xmm13
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef RN
kusano 2b45e8
	movddup	-16 * SIZE(B), %xmm0
kusano 2b45e8
	movddup	-15 * SIZE(B), %xmm1
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm9, %xmm8
kusano 2b45e8
	pshufd	$0x4e, %xmm13, %xmm12
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm8
kusano 2b45e8
	xorpd	 %xmm7, %xmm12
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm0, %xmm9
kusano 2b45e8
	mulpd	 %xmm1, %xmm8
kusano 2b45e8
	mulpd	 %xmm0, %xmm13
kusano 2b45e8
	mulpd	 %xmm1, %xmm12
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm8, %xmm9
kusano 2b45e8
	addpd	 %xmm12, %xmm13
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef RT
kusano 2b45e8
	movddup	-16 * SIZE(B), %xmm0
kusano 2b45e8
	movddup	-15 * SIZE(B), %xmm1
kusano 2b45e8
kusano 2b45e8
	pshufd	$0x4e, %xmm9, %xmm8
kusano 2b45e8
	pshufd	$0x4e, %xmm13, %xmm12
kusano 2b45e8
kusano 2b45e8
	xorpd	 %xmm7, %xmm8
kusano 2b45e8
	xorpd	 %xmm7, %xmm12
kusano 2b45e8
kusano 2b45e8
	mulpd	 %xmm0, %xmm9
kusano 2b45e8
	mulpd	 %xmm1, %xmm8
kusano 2b45e8
	mulpd	 %xmm0, %xmm13
kusano 2b45e8
	mulpd	 %xmm1, %xmm12
kusano 2b45e8
kusano 2b45e8
	addpd	 %xmm8, %xmm9
kusano 2b45e8
	addpd	 %xmm12, %xmm13
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef LN
kusano 2b45e8
	subq	$4 * SIZE, CO1
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	movsd	%xmm9,   0 * SIZE(CO1)
kusano 2b45e8
	movhpd	%xmm9,   1 * SIZE(CO1)
kusano 2b45e8
	movsd	%xmm13,  2 * SIZE(CO1)
kusano 2b45e8
	movhpd	%xmm13,  3 * SIZE(CO1)
kusano 2b45e8
kusano 2b45e8
#if defined(LN) || defined(LT)
kusano 2b45e8
	movapd	%xmm9,  -16 * SIZE(B)
kusano 2b45e8
	movapd	%xmm13, -14 * SIZE(B)
kusano 2b45e8
kusano 2b45e8
	movddup	%xmm9,  %xmm8
kusano 2b45e8
	unpckhpd %xmm9,  %xmm9
kusano 2b45e8
	movddup	%xmm13, %xmm12
kusano 2b45e8
	unpckhpd %xmm13, %xmm13
kusano 2b45e8
kusano 2b45e8
	movapd	%xmm8,  -16 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm9,  -14 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm12, -12 * SIZE(BO)
kusano 2b45e8
	movapd	%xmm13, -10 * SIZE(BO)
kusano 2b45e8
#else
kusano 2b45e8
	movapd	%xmm9,  -16 * SIZE(AO)
kusano 2b45e8
	movapd	%xmm13, -14 * SIZE(AO)
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifndef LN
kusano 2b45e8
	addq	$4 * SIZE, CO1
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(LT) || defined(RN)
kusano 2b45e8
	movq	K,  %rax
kusano 2b45e8
	subq	KK, %rax
kusano 2b45e8
	salq	$ZBASE_SHIFT, %rax
kusano 2b45e8
	leaq	(AO, %rax, 2), AO
kusano 2b45e8
#ifdef LT
kusano 2b45e8
	addq	$4 * SIZE, B
kusano 2b45e8
#endif
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef LN
kusano 2b45e8
	subq	$2, KK
kusano 2b45e8
	movq	BORIG, B
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef LT
kusano 2b45e8
	addq	$2, KK
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef RT
kusano 2b45e8
	movq	K, %rax
kusano 2b45e8
	movq	BORIG, B
kusano 2b45e8
	salq	$1 + ZBASE_SHIFT, %rax
kusano 2b45e8
	addq	%rax, AORIG
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	decq	I			# i --
kusano 2b45e8
	jg	.L110
kusano 2b45e8
	ALIGN_4	
kusano 2b45e8
kusano 2b45e8
.L199:
kusano 2b45e8
#ifdef LN
kusano 2b45e8
       leaq	(, K, SIZE), %rax
kusano 2b45e8
       leaq	(B, %rax, 2), B
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#if defined(LT) || defined(RN)
kusano 2b45e8
	movq	K,  %rax
kusano 2b45e8
	subq	KK, %rax
kusano 2b45e8
	leaq	(,%rax, SIZE), %rax
kusano 2b45e8
	leaq	(B,  %rax, 1 * COMPSIZE), B
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef RN
kusano 2b45e8
	addq	$1, KK
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#ifdef RT
kusano 2b45e8
	subq	$1, KK
kusano 2b45e8
#endif
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
	
kusano 2b45e8
.L999:
kusano 2b45e8
	movq	%r15, %rsp
kusano 2b45e8
kusano 2b45e8
	movq	  0(%rsp), %rbx
kusano 2b45e8
	movq	  8(%rsp), %rbp
kusano 2b45e8
	movq	 16(%rsp), %r12
kusano 2b45e8
	movq	 24(%rsp), %r13
kusano 2b45e8
	movq	 32(%rsp), %r14
kusano 2b45e8
	movq	 40(%rsp), %r15
kusano 2b45e8
kusano 2b45e8
#ifdef WINDOWS_ABI
kusano 2b45e8
	movq	 48(%rsp), %rdi
kusano 2b45e8
	movq	 56(%rsp), %rsi
kusano 2b45e8
	movups	 64(%rsp), %xmm6
kusano 2b45e8
	movups	 80(%rsp), %xmm7
kusano 2b45e8
	movups	 96(%rsp), %xmm8
kusano 2b45e8
	movups	112(%rsp), %xmm9
kusano 2b45e8
	movups	128(%rsp), %xmm10
kusano 2b45e8
	movups	144(%rsp), %xmm11
kusano 2b45e8
	movups	160(%rsp), %xmm12
kusano 2b45e8
	movups	176(%rsp), %xmm13
kusano 2b45e8
	movups	192(%rsp), %xmm14
kusano 2b45e8
	movups	208(%rsp), %xmm15
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	addq	$STACKSIZE, %rsp
kusano 2b45e8
	ret
kusano 2b45e8
kusano 2b45e8
	EPILOGUE