Blame thirdparty/openblas/xianyi-OpenBLAS-e6e87a2/kernel/x86_64/axpy_sse2.S

kusano 2b45e8
/*********************************************************************/
kusano 2b45e8
/* Copyright 2009, 2010 The University of Texas at Austin.           */
kusano 2b45e8
/* All rights reserved.                                              */
kusano 2b45e8
/*                                                                   */
kusano 2b45e8
/* Redistribution and use in source and binary forms, with or        */
kusano 2b45e8
/* without modification, are permitted provided that the following   */
kusano 2b45e8
/* conditions are met:                                               */
kusano 2b45e8
/*                                                                   */
kusano 2b45e8
/*   1. Redistributions of source code must retain the above         */
kusano 2b45e8
/*      copyright notice, this list of conditions and the following  */
kusano 2b45e8
/*      disclaimer.                                                  */
kusano 2b45e8
/*                                                                   */
kusano 2b45e8
/*   2. Redistributions in binary form must reproduce the above      */
kusano 2b45e8
/*      copyright notice, this list of conditions and the following  */
kusano 2b45e8
/*      disclaimer in the documentation and/or other materials       */
kusano 2b45e8
/*      provided with the distribution.                              */
kusano 2b45e8
/*                                                                   */
kusano 2b45e8
/*    THIS  SOFTWARE IS PROVIDED  BY THE  UNIVERSITY OF  TEXAS AT    */
kusano 2b45e8
/*    AUSTIN  ``AS IS''  AND ANY  EXPRESS OR  IMPLIED WARRANTIES,    */
kusano 2b45e8
/*    INCLUDING, BUT  NOT LIMITED  TO, THE IMPLIED  WARRANTIES OF    */
kusano 2b45e8
/*    MERCHANTABILITY  AND FITNESS FOR  A PARTICULAR  PURPOSE ARE    */
kusano 2b45e8
/*    DISCLAIMED.  IN  NO EVENT SHALL THE UNIVERSITY  OF TEXAS AT    */
kusano 2b45e8
/*    AUSTIN OR CONTRIBUTORS BE  LIABLE FOR ANY DIRECT, INDIRECT,    */
kusano 2b45e8
/*    INCIDENTAL,  SPECIAL, EXEMPLARY,  OR  CONSEQUENTIAL DAMAGES    */
kusano 2b45e8
/*    (INCLUDING, BUT  NOT LIMITED TO,  PROCUREMENT OF SUBSTITUTE    */
kusano 2b45e8
/*    GOODS  OR  SERVICES; LOSS  OF  USE,  DATA,  OR PROFITS;  OR    */
kusano 2b45e8
/*    BUSINESS INTERRUPTION) HOWEVER CAUSED  AND ON ANY THEORY OF    */
kusano 2b45e8
/*    LIABILITY, WHETHER  IN CONTRACT, STRICT  LIABILITY, OR TORT    */
kusano 2b45e8
/*    (INCLUDING NEGLIGENCE OR OTHERWISE)  ARISING IN ANY WAY OUT    */
kusano 2b45e8
/*    OF  THE  USE OF  THIS  SOFTWARE,  EVEN  IF ADVISED  OF  THE    */
kusano 2b45e8
/*    POSSIBILITY OF SUCH DAMAGE.                                    */
kusano 2b45e8
/*                                                                   */
kusano 2b45e8
/* The views and conclusions contained in the software and           */
kusano 2b45e8
/* documentation are those of the authors and should not be          */
kusano 2b45e8
/* interpreted as representing official policies, either expressed   */
kusano 2b45e8
/* or implied, of The University of Texas at Austin.                 */
kusano 2b45e8
/*********************************************************************/
kusano 2b45e8
kusano 2b45e8
#define ASSEMBLER
kusano 2b45e8
#include "common.h"
kusano 2b45e8
kusano 2b45e8
#ifndef WINDOWS_ABI
kusano 2b45e8
#define M	ARG1
kusano 2b45e8
#define X	ARG4
kusano 2b45e8
#define INCX	ARG5
kusano 2b45e8
#define Y	ARG6
kusano 2b45e8
#define INCY	ARG2
kusano 2b45e8
#else
kusano 2b45e8
#define M	ARG1
kusano 2b45e8
#define X	ARG2
kusano 2b45e8
#define INCX	ARG3
kusano 2b45e8
#define Y	ARG4
kusano 2b45e8
#define INCY	%r10
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
#define	YY	%r11
kusano 2b45e8
#define ALPHA	%xmm15
kusano 2b45e8
kusano 2b45e8
#include "l1param.h"
kusano 2b45e8
	
kusano 2b45e8
	PROLOGUE
kusano 2b45e8
	PROFCODE
kusano 2b45e8
kusano 2b45e8
#ifndef WINDOWS_ABI
kusano 2b45e8
#ifndef XDOUBLE
kusano 2b45e8
	movq	 8(%rsp), INCY
kusano 2b45e8
#else
kusano 2b45e8
	movq	24(%rsp), INCY
kusano 2b45e8
#endif
kusano 2b45e8
	movaps	%xmm0,  ALPHA
kusano 2b45e8
#else
kusano 2b45e8
	movaps	%xmm3,  ALPHA
kusano 2b45e8
kusano 2b45e8
	movq	40(%rsp), X
kusano 2b45e8
	movq	48(%rsp), INCX
kusano 2b45e8
	movq	56(%rsp), Y
kusano 2b45e8
	movq	64(%rsp), INCY
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	SAVEREGISTERS
kusano 2b45e8
kusano 2b45e8
	unpcklpd ALPHA, ALPHA
kusano 2b45e8
kusano 2b45e8
	leaq	(, INCX, SIZE), INCX
kusano 2b45e8
	leaq	(, INCY, SIZE), INCY
kusano 2b45e8
kusano 2b45e8
	testq	M, M
kusano 2b45e8
	jle	.L47
kusano 2b45e8
	
kusano 2b45e8
	cmpq	$SIZE, INCX
kusano 2b45e8
	jne	.L40
kusano 2b45e8
	cmpq	$SIZE, INCY
kusano 2b45e8
	jne	.L40
kusano 2b45e8
kusano 2b45e8
	testq	$SIZE, Y
kusano 2b45e8
	je	.L10
kusano 2b45e8
kusano 2b45e8
	movsd	(X), %xmm0
kusano 2b45e8
	mulsd	ALPHA, %xmm0
kusano 2b45e8
	addsd	(Y), %xmm0
kusano 2b45e8
	movsd	%xmm0, (Y)
kusano 2b45e8
	addq	$1 * SIZE, X
kusano 2b45e8
	addq	$1 * SIZE, Y
kusano 2b45e8
	decq	M
kusano 2b45e8
	jle	.L19
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
.L10:
kusano 2b45e8
	subq	$-16 * SIZE, X
kusano 2b45e8
	subq	$-16 * SIZE, Y
kusano 2b45e8
kusano 2b45e8
	testq	$SIZE, X
kusano 2b45e8
	jne	.L20
kusano 2b45e8
kusano 2b45e8
	movq	M,  %rax
kusano 2b45e8
	sarq	$4, %rax
kusano 2b45e8
	jle	.L13
kusano 2b45e8
kusano 2b45e8
	movaps	-16 * SIZE(X), %xmm0
kusano 2b45e8
	movaps	-14 * SIZE(X), %xmm1
kusano 2b45e8
	movaps	-12 * SIZE(X), %xmm2
kusano 2b45e8
	movaps	-10 * SIZE(X), %xmm3
kusano 2b45e8
kusano 2b45e8
	decq	%rax
kusano 2b45e8
	jle .L12
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L11:
kusano 2b45e8
	movaps	 -8 * SIZE(X), %xmm4
kusano 2b45e8
	movaps	 -6 * SIZE(X), %xmm5
kusano 2b45e8
kusano 2b45e8
#ifdef PREFETCHW
kusano 2b45e8
	PREFETCHW (PREFETCHSIZE +  0) - PREOFFSET(Y)
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm0
kusano 2b45e8
	addpd	-16 * SIZE(Y), %xmm0
kusano 2b45e8
	movaps	%xmm0, -16 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm1
kusano 2b45e8
	addpd	-14 * SIZE(Y), %xmm1
kusano 2b45e8
	movaps	%xmm1, -14 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	movaps	 -4 * SIZE(X), %xmm6
kusano 2b45e8
	movaps	 -2 * SIZE(X), %xmm7
kusano 2b45e8
kusano 2b45e8
#ifdef PREFETCH
kusano 2b45e8
	PREFETCH (PREFETCHSIZE +  0) - PREOFFSET(X)
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm2
kusano 2b45e8
	addpd	-12 * SIZE(Y), %xmm2
kusano 2b45e8
	movaps	%xmm2, -12 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm3
kusano 2b45e8
	addpd	-10 * SIZE(Y), %xmm3
kusano 2b45e8
	movaps	%xmm3, -10 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	movaps	 0 * SIZE(X), %xmm0
kusano 2b45e8
	movaps	 2 * SIZE(X), %xmm1
kusano 2b45e8
kusano 2b45e8
#if defined(PREFETCHW) && !defined(FETCH128)
kusano 2b45e8
	PREFETCHW (PREFETCHSIZE +  64) - PREOFFSET(Y)
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm4
kusano 2b45e8
	addpd	 -8 * SIZE(Y), %xmm4
kusano 2b45e8
	movaps	%xmm4, -8 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm5
kusano 2b45e8
	addpd	 -6 * SIZE(Y), %xmm5
kusano 2b45e8
	movaps	%xmm5, -6 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	movaps	 4 * SIZE(X), %xmm2
kusano 2b45e8
	movaps	 6 * SIZE(X), %xmm3
kusano 2b45e8
kusano 2b45e8
#if defined(PREFETCH) && !defined(FETCH128)
kusano 2b45e8
	PREFETCH (PREFETCHSIZE +  64) - PREOFFSET(X)
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm6
kusano 2b45e8
	addpd	 -4 * SIZE(Y), %xmm6
kusano 2b45e8
	movaps	%xmm6, -4 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm7
kusano 2b45e8
	addpd	 -2 * SIZE(Y), %xmm7
kusano 2b45e8
	movaps	%xmm7, -2 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	subq	$-16 * SIZE, Y
kusano 2b45e8
	subq	$-16 * SIZE, X
kusano 2b45e8
	decq	%rax
kusano 2b45e8
	jg	.L11
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L12:
kusano 2b45e8
	movaps	 -8 * SIZE(X), %xmm4
kusano 2b45e8
	movaps	 -6 * SIZE(X), %xmm5
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm0
kusano 2b45e8
	addpd	-16 * SIZE(Y), %xmm0
kusano 2b45e8
	movaps	%xmm0, -16 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm1
kusano 2b45e8
	addpd	-14 * SIZE(Y), %xmm1
kusano 2b45e8
	movaps	%xmm1, -14 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	movaps	 -4 * SIZE(X), %xmm6
kusano 2b45e8
	movaps	 -2 * SIZE(X), %xmm7
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm2
kusano 2b45e8
	addpd	-12 * SIZE(Y), %xmm2
kusano 2b45e8
	movaps	%xmm2, -12 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm3
kusano 2b45e8
	addpd	-10 * SIZE(Y), %xmm3
kusano 2b45e8
	movaps	%xmm3, -10 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm4
kusano 2b45e8
	addpd	 -8 * SIZE(Y), %xmm4
kusano 2b45e8
	movaps	%xmm4,  -8 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm5
kusano 2b45e8
	addpd	 -6 * SIZE(Y), %xmm5
kusano 2b45e8
	movaps	%xmm5,  -6 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm6
kusano 2b45e8
	addpd	 -4 * SIZE(Y), %xmm6
kusano 2b45e8
	movaps	%xmm6,  -4 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm7
kusano 2b45e8
	addpd	 -2 * SIZE(Y), %xmm7
kusano 2b45e8
	movaps	%xmm7,  -2 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	subq	$-16 * SIZE, Y
kusano 2b45e8
	subq	$-16 * SIZE, X
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L13:
kusano 2b45e8
	movq	M,  %rax
kusano 2b45e8
	andq	$8, %rax
kusano 2b45e8
	jle	.L14
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
	movaps	-16 * SIZE(X), %xmm0
kusano 2b45e8
	movaps	-14 * SIZE(X), %xmm1
kusano 2b45e8
	movaps	-12 * SIZE(X), %xmm2
kusano 2b45e8
	movaps	-10 * SIZE(X), %xmm3
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm0
kusano 2b45e8
	addpd	-16 * SIZE(Y), %xmm0
kusano 2b45e8
	mulpd	ALPHA, %xmm1
kusano 2b45e8
	addpd	-14 * SIZE(Y), %xmm1
kusano 2b45e8
	mulpd	ALPHA, %xmm2
kusano 2b45e8
	addpd	-12 * SIZE(Y), %xmm2
kusano 2b45e8
	mulpd	ALPHA, %xmm3
kusano 2b45e8
	addpd	-10 * SIZE(Y), %xmm3
kusano 2b45e8
kusano 2b45e8
	movaps	%xmm0, -16 * SIZE(Y)
kusano 2b45e8
	movaps	%xmm1, -14 * SIZE(Y)
kusano 2b45e8
	movaps	%xmm2, -12 * SIZE(Y)
kusano 2b45e8
	movaps	%xmm3, -10 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	addq	$8 * SIZE, X
kusano 2b45e8
	addq	$8 * SIZE, Y
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L14:
kusano 2b45e8
	movq	M,  %rax
kusano 2b45e8
	andq	$4, %rax
kusano 2b45e8
	jle	.L15
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
	movaps	-16 * SIZE(X), %xmm0
kusano 2b45e8
	movaps	-14 * SIZE(X), %xmm1
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm0
kusano 2b45e8
	mulpd	ALPHA, %xmm1
kusano 2b45e8
kusano 2b45e8
	addpd	-16 * SIZE(Y), %xmm0
kusano 2b45e8
	addpd	-14 * SIZE(Y), %xmm1
kusano 2b45e8
kusano 2b45e8
	movaps	%xmm0, -16 * SIZE(Y)
kusano 2b45e8
	movaps	%xmm1, -14 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	addq	$4 * SIZE, X
kusano 2b45e8
	addq	$4 * SIZE, Y
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L15:
kusano 2b45e8
	movq	M,  %rax
kusano 2b45e8
	andq	$2, %rax
kusano 2b45e8
	jle	.L16
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
	movaps	-16 * SIZE(X), %xmm0
kusano 2b45e8
	mulpd	ALPHA, %xmm0
kusano 2b45e8
	addpd	-16 * SIZE(Y), %xmm0
kusano 2b45e8
	movaps	%xmm0, -16 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	addq	$2 * SIZE, X
kusano 2b45e8
	addq	$2 * SIZE, Y
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L16:
kusano 2b45e8
	movq	M,  %rax
kusano 2b45e8
	andq	$1, %rax
kusano 2b45e8
	jle	.L19
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
	movsd	-16 * SIZE(X), %xmm0
kusano 2b45e8
	mulsd	ALPHA, %xmm0
kusano 2b45e8
	addsd	-16 * SIZE(Y), %xmm0
kusano 2b45e8
kusano 2b45e8
	movsd	%xmm0, 	-16 * SIZE(Y)
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L19:
kusano 2b45e8
	xorq	%rax,%rax
kusano 2b45e8
kusano 2b45e8
	RESTOREREGISTERS
kusano 2b45e8
kusano 2b45e8
	ret
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L20:
kusano 2b45e8
#ifdef ALIGNED_ACCESS
kusano 2b45e8
kusano 2b45e8
	movhps	-16 * SIZE(X), %xmm0
kusano 2b45e8
kusano 2b45e8
	movq	M,  %rax
kusano 2b45e8
	sarq	$4, %rax
kusano 2b45e8
	jle	.L23
kusano 2b45e8
kusano 2b45e8
	movaps	-15 * SIZE(X), %xmm1
kusano 2b45e8
	movaps	-13 * SIZE(X), %xmm2
kusano 2b45e8
	movaps	-11 * SIZE(X), %xmm3
kusano 2b45e8
kusano 2b45e8
	decq	%rax
kusano 2b45e8
	jle .L22
kusano 2b45e8
	ALIGN_4
kusano 2b45e8
kusano 2b45e8
.L21:
kusano 2b45e8
	movaps	 -9 * SIZE(X), %xmm4
kusano 2b45e8
	movaps	 -7 * SIZE(X), %xmm5
kusano 2b45e8
kusano 2b45e8
#ifdef PREFETCHW
kusano 2b45e8
	PREFETCHW (PREFETCHSIZE +  0) - PREOFFSET(Y)
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm1, %xmm0
kusano 2b45e8
	mulpd	ALPHA, %xmm0
kusano 2b45e8
	addpd	-16 * SIZE(Y), %xmm0
kusano 2b45e8
	movaps	%xmm0, -16 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm2, %xmm1
kusano 2b45e8
	mulpd	ALPHA, %xmm1
kusano 2b45e8
	addpd	-14 * SIZE(Y), %xmm1
kusano 2b45e8
	movaps	%xmm1, -14 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	movaps	 -5 * SIZE(X), %xmm6
kusano 2b45e8
	movaps	 -3 * SIZE(X), %xmm7
kusano 2b45e8
kusano 2b45e8
#ifdef PREFETCH
kusano 2b45e8
	PREFETCH (PREFETCHSIZE +  0) - PREOFFSET(X)
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm3, %xmm2
kusano 2b45e8
	mulpd	ALPHA, %xmm2
kusano 2b45e8
	addpd	-12 * SIZE(Y), %xmm2
kusano 2b45e8
	movaps	%xmm2, -12 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm4, %xmm3
kusano 2b45e8
	mulpd	ALPHA, %xmm3
kusano 2b45e8
	addpd	-10 * SIZE(Y), %xmm3
kusano 2b45e8
	movaps	%xmm3, -10 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	movaps	-1 * SIZE(X), %xmm0
kusano 2b45e8
	movaps	 1 * SIZE(X), %xmm1
kusano 2b45e8
kusano 2b45e8
#if defined(PREFETCHW) && !defined(FETCH128)
kusano 2b45e8
	PREFETCHW (PREFETCHSIZE +  64) - PREOFFSET(Y)
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm5, %xmm4
kusano 2b45e8
	mulpd	ALPHA, %xmm4
kusano 2b45e8
	addpd	 -8 * SIZE(Y), %xmm4
kusano 2b45e8
	movaps	%xmm4,  -8 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm6, %xmm5
kusano 2b45e8
	mulpd	ALPHA, %xmm5
kusano 2b45e8
	addpd	 -6 * SIZE(Y), %xmm5
kusano 2b45e8
	movaps	%xmm5, -6 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	movaps	 3 * SIZE(X), %xmm2
kusano 2b45e8
	movaps	 5 * SIZE(X), %xmm3
kusano 2b45e8
kusano 2b45e8
#if defined(PREFETCH) && !defined(FETCH128)
kusano 2b45e8
	PREFETCH (PREFETCHSIZE +  64) - PREOFFSET(X)
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm7, %xmm6
kusano 2b45e8
	mulpd	ALPHA, %xmm6
kusano 2b45e8
	addpd	 -4 * SIZE(Y), %xmm6
kusano 2b45e8
	movaps	%xmm6, -4 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm0, %xmm7
kusano 2b45e8
	mulpd	ALPHA, %xmm7
kusano 2b45e8
	addpd	-2 * SIZE(Y), %xmm7
kusano 2b45e8
	movaps	%xmm7, -2 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	subq	$-16 * SIZE, X
kusano 2b45e8
	subq	$-16 * SIZE, Y
kusano 2b45e8
	decq	%rax
kusano 2b45e8
	jg	.L21
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L22:
kusano 2b45e8
	movaps	 -9 * SIZE(X), %xmm4
kusano 2b45e8
	movaps	 -7 * SIZE(X), %xmm5
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm1, %xmm0
kusano 2b45e8
	mulpd	ALPHA, %xmm0
kusano 2b45e8
	addpd	-16 * SIZE(Y), %xmm0
kusano 2b45e8
	movaps	%xmm0, -16 * SIZE(Y)
kusano 2b45e8
	movaps	-1 * SIZE(X), %xmm0
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm2, %xmm1
kusano 2b45e8
	mulpd	ALPHA, %xmm1
kusano 2b45e8
	addpd	-14 * SIZE(Y), %xmm1
kusano 2b45e8
	movaps	%xmm1, -14 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	movaps	 -5 * SIZE(X), %xmm6
kusano 2b45e8
	movaps	 -3 * SIZE(X), %xmm7
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm3, %xmm2
kusano 2b45e8
	mulpd	ALPHA, %xmm2
kusano 2b45e8
	addpd	-12 * SIZE(Y), %xmm2
kusano 2b45e8
	movaps	%xmm2, -12 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm4, %xmm3
kusano 2b45e8
	mulpd	ALPHA, %xmm3
kusano 2b45e8
	addpd	-10 * SIZE(Y), %xmm3
kusano 2b45e8
	movaps	%xmm3, -10 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm5, %xmm4
kusano 2b45e8
	mulpd	ALPHA, %xmm4
kusano 2b45e8
	addpd	 -8 * SIZE(Y), %xmm4
kusano 2b45e8
	movaps	%xmm4,  -8 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm6, %xmm5
kusano 2b45e8
	mulpd	ALPHA, %xmm5
kusano 2b45e8
	addpd	 -6 * SIZE(Y), %xmm5
kusano 2b45e8
	movaps	%xmm5,  -6 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm7, %xmm6
kusano 2b45e8
	mulpd	ALPHA, %xmm6
kusano 2b45e8
	addpd	 -4 * SIZE(Y), %xmm6
kusano 2b45e8
	movaps	%xmm6,  -4 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm0, %xmm7
kusano 2b45e8
	mulpd	ALPHA, %xmm7
kusano 2b45e8
	addpd	 -2 * SIZE(Y), %xmm7
kusano 2b45e8
	movaps	%xmm7,  -2 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	subq	$-16 * SIZE, X
kusano 2b45e8
	subq	$-16 * SIZE, Y
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L23:
kusano 2b45e8
	movq	M,  %rax
kusano 2b45e8
	andq	$8, %rax
kusano 2b45e8
	jle	.L24
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
	movaps	-15 * SIZE(X), %xmm1
kusano 2b45e8
	movaps	-13 * SIZE(X), %xmm2
kusano 2b45e8
	movaps	-11 * SIZE(X), %xmm3
kusano 2b45e8
	movaps	 -9 * SIZE(X), %xmm8
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm1, %xmm0
kusano 2b45e8
	mulpd	ALPHA, %xmm0
kusano 2b45e8
	addpd	-16 * SIZE(Y), %xmm0
kusano 2b45e8
	movaps	%xmm0, -16 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm2, %xmm1
kusano 2b45e8
	mulpd	ALPHA, %xmm1
kusano 2b45e8
	addpd	-14 * SIZE(Y), %xmm1
kusano 2b45e8
	movaps	%xmm1, -14 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm3, %xmm2
kusano 2b45e8
	mulpd	ALPHA, %xmm2
kusano 2b45e8
	addpd	-12 * SIZE(Y), %xmm2
kusano 2b45e8
	movaps	%xmm2, -12 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm8, %xmm3
kusano 2b45e8
	mulpd	ALPHA, %xmm3
kusano 2b45e8
	addpd	-10 * SIZE(Y), %xmm3
kusano 2b45e8
	movaps	%xmm3, -10 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	movaps	%xmm8, %xmm0
kusano 2b45e8
kusano 2b45e8
	addq	$8 * SIZE, X
kusano 2b45e8
	addq	$8 * SIZE, Y
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L24:
kusano 2b45e8
	movq	M,  %rax
kusano 2b45e8
	andq	$4, %rax
kusano 2b45e8
	jle	.L25
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
	movaps	-15 * SIZE(X), %xmm1
kusano 2b45e8
	movaps	-13 * SIZE(X), %xmm2
kusano 2b45e8
kusano 2b45e8
	SHUFPD_1 %xmm1, %xmm0
kusano 2b45e8
	SHUFPD_1 %xmm2, %xmm1
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm0
kusano 2b45e8
	mulpd	ALPHA, %xmm1
kusano 2b45e8
kusano 2b45e8
	addpd	-16 * SIZE(Y), %xmm0
kusano 2b45e8
	addpd	-14 * SIZE(Y), %xmm1
kusano 2b45e8
kusano 2b45e8
	movaps	%xmm0, -16 * SIZE(Y)
kusano 2b45e8
	movaps	%xmm1, -14 * SIZE(Y)
kusano 2b45e8
	movaps	%xmm2, %xmm0
kusano 2b45e8
kusano 2b45e8
	addq	$4 * SIZE, X
kusano 2b45e8
	addq	$4 * SIZE, Y
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L25:
kusano 2b45e8
	movq	M,  %rax
kusano 2b45e8
	andq	$2, %rax
kusano 2b45e8
	jle	.L26
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
	movaps	-15 * SIZE(X), %xmm1
kusano 2b45e8
	SHUFPD_1 %xmm1, %xmm0
kusano 2b45e8
	mulpd	ALPHA,  %xmm0
kusano 2b45e8
	addpd	-16 * SIZE(Y), %xmm0
kusano 2b45e8
kusano 2b45e8
	movaps	%xmm0, -16 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	addq	$2 * SIZE, X
kusano 2b45e8
	addq	$2 * SIZE, Y
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L26:
kusano 2b45e8
	movq	M,  %rax
kusano 2b45e8
	andq	$1, %rax
kusano 2b45e8
	jle	.L29
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
	movsd	-16 * SIZE(X), %xmm0
kusano 2b45e8
	mulsd	ALPHA, %xmm0
kusano 2b45e8
	addsd	-16 * SIZE(Y), %xmm0
kusano 2b45e8
kusano 2b45e8
	movsd	%xmm0, 	-16 * SIZE(Y)
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L29:
kusano 2b45e8
	xorq	%rax,%rax
kusano 2b45e8
kusano 2b45e8
	RESTOREREGISTERS
kusano 2b45e8
kusano 2b45e8
	ret
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
#else
kusano 2b45e8
	movq	M,  %rax
kusano 2b45e8
	sarq	$4, %rax
kusano 2b45e8
	jle	.L23
kusano 2b45e8
kusano 2b45e8
	movsd	-16 * SIZE(X), %xmm0
kusano 2b45e8
	movhps	-15 * SIZE(X), %xmm0
kusano 2b45e8
	movsd	-14 * SIZE(X), %xmm1
kusano 2b45e8
	movhps	-13 * SIZE(X), %xmm1
kusano 2b45e8
	movsd	-12 * SIZE(X), %xmm2
kusano 2b45e8
	movhps	-11 * SIZE(X), %xmm2
kusano 2b45e8
	movsd	-10 * SIZE(X), %xmm3
kusano 2b45e8
	movhps	 -9 * SIZE(X), %xmm3
kusano 2b45e8
kusano 2b45e8
	decq	%rax
kusano 2b45e8
	jle .L22
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L21:
kusano 2b45e8
	movsd	 -8 * SIZE(X), %xmm4
kusano 2b45e8
	movhps	 -7 * SIZE(X), %xmm4
kusano 2b45e8
	movsd	 -6 * SIZE(X), %xmm5
kusano 2b45e8
	movhps	 -5 * SIZE(X), %xmm5
kusano 2b45e8
kusano 2b45e8
#ifdef PREFETCHW
kusano 2b45e8
	PREFETCHW (PREFETCHSIZE +  0) - PREOFFSET(Y)
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm0
kusano 2b45e8
	addpd	-16 * SIZE(Y), %xmm0
kusano 2b45e8
	movaps	%xmm0, -16 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm1
kusano 2b45e8
	addpd	-14 * SIZE(Y), %xmm1
kusano 2b45e8
	movaps	%xmm1, -14 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	movsd	 -4 * SIZE(X), %xmm6
kusano 2b45e8
	movhps	 -3 * SIZE(X), %xmm6
kusano 2b45e8
	movsd	 -2 * SIZE(X), %xmm7
kusano 2b45e8
	movhps	 -1 * SIZE(X), %xmm7
kusano 2b45e8
kusano 2b45e8
#ifdef PREFETCH
kusano 2b45e8
	PREFETCH (PREFETCHSIZE +  0) - PREOFFSET(X)
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm2
kusano 2b45e8
	addpd	-12 * SIZE(Y), %xmm2
kusano 2b45e8
	movaps	%xmm2, -12 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm3
kusano 2b45e8
	addpd	-10 * SIZE(Y), %xmm3
kusano 2b45e8
	movaps	%xmm3, -10 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	movsd	 0 * SIZE(X), %xmm0
kusano 2b45e8
	movhps	 1 * SIZE(X), %xmm0
kusano 2b45e8
	movsd	 2 * SIZE(X), %xmm1
kusano 2b45e8
	movhps	 3 * SIZE(X), %xmm1
kusano 2b45e8
kusano 2b45e8
#if defined(PREFETCHW) && !defined(FETCH128)
kusano 2b45e8
	PREFETCHW (PREFETCHSIZE +  64) - PREOFFSET(Y)
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm4
kusano 2b45e8
	addpd	 -8 * SIZE(Y), %xmm4
kusano 2b45e8
	movaps	%xmm4, -8 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm5
kusano 2b45e8
	addpd	 -6 * SIZE(Y), %xmm5
kusano 2b45e8
	movaps	%xmm5, -6 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	movsd	 4 * SIZE(X), %xmm2
kusano 2b45e8
	movhps	 5 * SIZE(X), %xmm2
kusano 2b45e8
	movsd	 6 * SIZE(X), %xmm3
kusano 2b45e8
	movhps	 7 * SIZE(X), %xmm3
kusano 2b45e8
kusano 2b45e8
#if defined(PREFETCH) && !defined(FETCH128)
kusano 2b45e8
	PREFETCH (PREFETCHSIZE +  64) - PREOFFSET(X)
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm6
kusano 2b45e8
	addpd	 -4 * SIZE(Y), %xmm6
kusano 2b45e8
	movaps	%xmm6, -4 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm7
kusano 2b45e8
	addpd	 -2 * SIZE(Y), %xmm7
kusano 2b45e8
	movaps	%xmm7, -2 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	subq	$-16 * SIZE, Y
kusano 2b45e8
	subq	$-16 * SIZE, X
kusano 2b45e8
	decq	%rax
kusano 2b45e8
	jg	.L21
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L22:
kusano 2b45e8
	movsd	 -8 * SIZE(X), %xmm4
kusano 2b45e8
	movhps	 -7 * SIZE(X), %xmm4
kusano 2b45e8
	movsd	 -6 * SIZE(X), %xmm5
kusano 2b45e8
	movhps	 -5 * SIZE(X), %xmm5
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm0
kusano 2b45e8
	addpd	-16 * SIZE(Y), %xmm0
kusano 2b45e8
	movaps	%xmm0, -16 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm1
kusano 2b45e8
	addpd	-14 * SIZE(Y), %xmm1
kusano 2b45e8
	movaps	%xmm1, -14 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	movsd	 -4 * SIZE(X), %xmm6
kusano 2b45e8
	movhps	 -3 * SIZE(X), %xmm6
kusano 2b45e8
	movsd	 -2 * SIZE(X), %xmm7
kusano 2b45e8
	movhps	 -1 * SIZE(X), %xmm7
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm2
kusano 2b45e8
	addpd	-12 * SIZE(Y), %xmm2
kusano 2b45e8
	movaps	%xmm2, -12 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm3
kusano 2b45e8
	addpd	-10 * SIZE(Y), %xmm3
kusano 2b45e8
	movaps	%xmm3, -10 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm4
kusano 2b45e8
	addpd	 -8 * SIZE(Y), %xmm4
kusano 2b45e8
	movaps	%xmm4,  -8 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm5
kusano 2b45e8
	addpd	 -6 * SIZE(Y), %xmm5
kusano 2b45e8
	movaps	%xmm5,  -6 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm6
kusano 2b45e8
	addpd	 -4 * SIZE(Y), %xmm6
kusano 2b45e8
	movaps	%xmm6,  -4 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm7
kusano 2b45e8
	addpd	 -2 * SIZE(Y), %xmm7
kusano 2b45e8
	movaps	%xmm7,  -2 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	subq	$-16 * SIZE, Y
kusano 2b45e8
	subq	$-16 * SIZE, X
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L23:
kusano 2b45e8
	movq	M,  %rax
kusano 2b45e8
	andq	$8, %rax
kusano 2b45e8
	jle	.L24
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
	movsd	-16 * SIZE(X), %xmm0
kusano 2b45e8
	movhps	-15 * SIZE(X), %xmm0
kusano 2b45e8
	movsd	-14 * SIZE(X), %xmm1
kusano 2b45e8
	movhps	-13 * SIZE(X), %xmm1
kusano 2b45e8
	movsd	-12 * SIZE(X), %xmm2
kusano 2b45e8
	movhps	-11 * SIZE(X), %xmm2
kusano 2b45e8
	movsd	-10 * SIZE(X), %xmm3
kusano 2b45e8
	movhps	 -9 * SIZE(X), %xmm3
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm0
kusano 2b45e8
	addpd	-16 * SIZE(Y), %xmm0
kusano 2b45e8
	mulpd	ALPHA, %xmm1
kusano 2b45e8
	addpd	-14 * SIZE(Y), %xmm1
kusano 2b45e8
	mulpd	ALPHA, %xmm2
kusano 2b45e8
	addpd	-12 * SIZE(Y), %xmm2
kusano 2b45e8
	mulpd	ALPHA, %xmm3
kusano 2b45e8
	addpd	-10 * SIZE(Y), %xmm3
kusano 2b45e8
kusano 2b45e8
	movaps	%xmm0, -16 * SIZE(Y)
kusano 2b45e8
	movaps	%xmm1, -14 * SIZE(Y)
kusano 2b45e8
	movaps	%xmm2, -12 * SIZE(Y)
kusano 2b45e8
	movaps	%xmm3, -10 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	addq	$8 * SIZE, X
kusano 2b45e8
	addq	$8 * SIZE, Y
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L24:
kusano 2b45e8
	movq	M,  %rax
kusano 2b45e8
	andq	$4, %rax
kusano 2b45e8
	jle	.L25
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
	movsd	-16 * SIZE(X), %xmm0
kusano 2b45e8
	movhps	-15 * SIZE(X), %xmm0
kusano 2b45e8
	movsd	-14 * SIZE(X), %xmm1
kusano 2b45e8
	movhps	-13 * SIZE(X), %xmm1
kusano 2b45e8
kusano 2b45e8
	mulpd	ALPHA, %xmm0
kusano 2b45e8
	mulpd	ALPHA, %xmm1
kusano 2b45e8
kusano 2b45e8
	addpd	-16 * SIZE(Y), %xmm0
kusano 2b45e8
	addpd	-14 * SIZE(Y), %xmm1
kusano 2b45e8
kusano 2b45e8
	movaps	%xmm0, -16 * SIZE(Y)
kusano 2b45e8
	movaps	%xmm1, -14 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	addq	$4 * SIZE, X
kusano 2b45e8
	addq	$4 * SIZE, Y
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L25:
kusano 2b45e8
	movq	M,  %rax
kusano 2b45e8
	andq	$2, %rax
kusano 2b45e8
	jle	.L26
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
	movsd	-16 * SIZE(X), %xmm0
kusano 2b45e8
	movhps	-15 * SIZE(X), %xmm0
kusano 2b45e8
	mulpd	ALPHA, %xmm0
kusano 2b45e8
	addpd	-16 * SIZE(Y), %xmm0
kusano 2b45e8
	movaps	%xmm0, -16 * SIZE(Y)
kusano 2b45e8
kusano 2b45e8
	addq	$2 * SIZE, X
kusano 2b45e8
	addq	$2 * SIZE, Y
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L26:
kusano 2b45e8
	movq	M,  %rax
kusano 2b45e8
	andq	$1, %rax
kusano 2b45e8
	jle	.L29
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
	movsd	-16 * SIZE(X), %xmm0
kusano 2b45e8
	mulsd	ALPHA, %xmm0
kusano 2b45e8
	addsd	-16 * SIZE(Y), %xmm0
kusano 2b45e8
kusano 2b45e8
	movsd	%xmm0, 	-16 * SIZE(Y)
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L29:
kusano 2b45e8
	xorq	%rax,%rax
kusano 2b45e8
kusano 2b45e8
	RESTOREREGISTERS
kusano 2b45e8
kusano 2b45e8
	ret
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
#endif
kusano 2b45e8
kusano 2b45e8
.L40:
kusano 2b45e8
	movq	Y, YY
kusano 2b45e8
	movq	M,  %rax
kusano 2b45e8
//If incx==0 || incy==0, avoid unloop.
kusano 2b45e8
	cmpq	$0, INCX
kusano 2b45e8
	je  .L46
kusano 2b45e8
	cmpq	$0, INCY
kusano 2b45e8
	je  .L46
kusano 2b45e8
		
kusano 2b45e8
	sarq	$3, %rax
kusano 2b45e8
	jle	.L45
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L41:
kusano 2b45e8
	movsd	0 * SIZE(X), %xmm0
kusano 2b45e8
	addq	INCX, X
kusano 2b45e8
	movhpd	0 * SIZE(X), %xmm0
kusano 2b45e8
	addq	INCX, X
kusano 2b45e8
	mulpd	ALPHA, %xmm0
kusano 2b45e8
kusano 2b45e8
	movsd	0 * SIZE(YY), %xmm6
kusano 2b45e8
	addq	INCY, YY
kusano 2b45e8
	movhpd	0 * SIZE(YY), %xmm6
kusano 2b45e8
	addq	INCY, YY
kusano 2b45e8
	addpd	%xmm6, %xmm0
kusano 2b45e8
kusano 2b45e8
	movsd	0 * SIZE(X), %xmm1
kusano 2b45e8
	addq	INCX, X
kusano 2b45e8
	movhpd	0 * SIZE(X), %xmm1
kusano 2b45e8
	addq	INCX, X
kusano 2b45e8
	mulpd	ALPHA, %xmm1
kusano 2b45e8
kusano 2b45e8
	movsd	0 * SIZE(YY), %xmm6
kusano 2b45e8
	addq	INCY, YY
kusano 2b45e8
	movhpd	0 * SIZE(YY), %xmm6
kusano 2b45e8
	addq	INCY, YY
kusano 2b45e8
	addpd	%xmm6, %xmm1
kusano 2b45e8
kusano 2b45e8
	movsd	0 * SIZE(X), %xmm2
kusano 2b45e8
	addq	INCX, X
kusano 2b45e8
	movhpd	0 * SIZE(X), %xmm2
kusano 2b45e8
	addq	INCX, X
kusano 2b45e8
	mulpd	ALPHA, %xmm2
kusano 2b45e8
kusano 2b45e8
	movsd	0 * SIZE(YY), %xmm6
kusano 2b45e8
	addq	INCY, YY
kusano 2b45e8
	movhpd	0 * SIZE(YY), %xmm6
kusano 2b45e8
	addq	INCY, YY
kusano 2b45e8
	addpd	%xmm6, %xmm2
kusano 2b45e8
kusano 2b45e8
	movsd	0 * SIZE(X), %xmm3
kusano 2b45e8
	addq	INCX, X
kusano 2b45e8
	movhpd	0 * SIZE(X), %xmm3
kusano 2b45e8
	addq	INCX, X
kusano 2b45e8
	mulpd	ALPHA, %xmm3
kusano 2b45e8
kusano 2b45e8
	movsd	0 * SIZE(YY), %xmm6
kusano 2b45e8
	addq	INCY, YY
kusano 2b45e8
	movhpd	0 * SIZE(YY), %xmm6
kusano 2b45e8
	addq	INCY, YY
kusano 2b45e8
	addpd	%xmm6, %xmm3
kusano 2b45e8
kusano 2b45e8
	movsd	%xmm0, 0 * SIZE(Y)
kusano 2b45e8
	addq	INCY, Y
kusano 2b45e8
	movhpd	%xmm0, 0 * SIZE(Y)
kusano 2b45e8
	addq	INCY, Y
kusano 2b45e8
	movsd	%xmm1, 0 * SIZE(Y)
kusano 2b45e8
	addq	INCY, Y
kusano 2b45e8
	movhpd	%xmm1, 0 * SIZE(Y)
kusano 2b45e8
	addq	INCY, Y
kusano 2b45e8
	movsd	%xmm2, 0 * SIZE(Y)
kusano 2b45e8
	addq	INCY, Y
kusano 2b45e8
	movhpd	%xmm2, 0 * SIZE(Y)
kusano 2b45e8
	addq	INCY, Y
kusano 2b45e8
	movsd	%xmm3, 0 * SIZE(Y)
kusano 2b45e8
	addq	INCY, Y
kusano 2b45e8
	movhpd	%xmm3, 0 * SIZE(Y)
kusano 2b45e8
	addq	INCY, Y
kusano 2b45e8
kusano 2b45e8
	decq	%rax
kusano 2b45e8
	jg	.L41
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L45:
kusano 2b45e8
	movq	M,  %rax
kusano 2b45e8
	andq	$7, %rax
kusano 2b45e8
	jle	.L47
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L46:
kusano 2b45e8
	movsd	(X), %xmm0
kusano 2b45e8
	addq	INCX, X
kusano 2b45e8
	mulsd	%xmm15, %xmm0
kusano 2b45e8
	addsd	(Y), %xmm0
kusano 2b45e8
	movsd	%xmm0, (Y)
kusano 2b45e8
	addq	INCY, Y
kusano 2b45e8
	decq	%rax
kusano 2b45e8
	jg	.L46
kusano 2b45e8
	ALIGN_3
kusano 2b45e8
kusano 2b45e8
.L47:
kusano 2b45e8
	xorq	%rax, %rax
kusano 2b45e8
kusano 2b45e8
	RESTOREREGISTERS
kusano 2b45e8
kusano 2b45e8
	ret
kusano 2b45e8
kusano 2b45e8
	EPILOGUE