diff --git a/kernel/x86_64/cgemm_kernel_8x2_haswell.S b/kernel/x86_64/cgemm_kernel_8x2_haswell.S index 98f40054e..a608071db 100644 --- a/kernel/x86_64/cgemm_kernel_8x2_haswell.S +++ b/kernel/x86_64/cgemm_kernel_8x2_haswell.S @@ -227,8 +227,8 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. VFMADDPS_I( %ymm7 ,%ymm3,%ymm1 ) - addq $6*SIZE, BO - addq $16*SIZE, AO + addq $ 6*SIZE, BO + addq $ 16*SIZE, AO decq %rax .endm @@ -356,8 +356,8 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. VFMADDPS_R( %ymm4 ,%ymm2,%ymm0 ) VFMADDPS_I( %ymm5 ,%ymm3,%ymm0 ) - addq $6*SIZE, BO - addq $8*SIZE, AO + addq $ 6*SIZE, BO + addq $ 8*SIZE, AO decq %rax .endm @@ -447,8 +447,8 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. VFMADDPS_R( %xmm4 ,%xmm2,%xmm0 ) VFMADDPS_I( %xmm5 ,%xmm3,%xmm0 ) - addq $6*SIZE, BO - addq $4*SIZE, AO + addq $ 6*SIZE, BO + addq $ 4*SIZE, AO decq %rax .endm @@ -540,8 +540,8 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. VFMADDPS_R( %xmm4 ,%xmm2,%xmm0 ) VFMADDPS_I( %xmm5 ,%xmm3,%xmm0 ) - addq $6*SIZE, BO - addq $2*SIZE, AO + addq $ 6*SIZE, BO + addq $ 2*SIZE, AO decq %rax .endm diff --git a/kernel/x86_64/sgemm_kernel_16x4_haswell.S b/kernel/x86_64/sgemm_kernel_16x4_haswell.S index d88add02b..ef156fd27 100644 --- a/kernel/x86_64/sgemm_kernel_16x4_haswell.S +++ b/kernel/x86_64/sgemm_kernel_16x4_haswell.S @@ -181,8 +181,8 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. VFMADD231PS_( %ymm14,%ymm3,%ymm0 ) VFMADD231PS_( %ymm15,%ymm3,%ymm1 ) - addq $6*SIZE, BO - addq $16*SIZE, AO + addq $ 6*SIZE, BO + addq $ 16*SIZE, AO decq %rax .endm @@ -268,8 +268,8 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. VFMADD231PS_( %ymm12,%ymm2,%ymm0 ) VFMADD231PS_( %ymm14,%ymm3,%ymm0 ) - addq $6*SIZE, BO - addq $8*SIZE, AO + addq $ 6*SIZE, BO + addq $ 8*SIZE, AO decq %rax .endm @@ -327,8 +327,8 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. VFMADD231PS_( %xmm12,%xmm2,%xmm0 ) VFMADD231PS_( %xmm14,%xmm3,%xmm0 ) - addq $6*SIZE, BO - addq $4*SIZE, AO + addq $ 6*SIZE, BO + addq $ 4*SIZE, AO decq %rax .endm @@ -392,8 +392,8 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. VFMADD231SS_( %xmm14,%xmm3,%xmm0 ) VFMADD231SS_( %xmm15,%xmm3,%xmm1 ) - addq $6*SIZE, BO - addq $2*SIZE, AO + addq $ 6*SIZE, BO + addq $ 2*SIZE, AO decq %rax .endm @@ -478,8 +478,8 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. VFMADD231SS_( %xmm12,%xmm2,%xmm0 ) VFMADD231SS_( %xmm14,%xmm3,%xmm0 ) - addq $6*SIZE, BO - addq $1*SIZE, AO + addq $ 6*SIZE, BO + addq $ 1*SIZE, AO decq %rax .endm diff --git a/kernel/x86_64/zgemm_kernel_4x2_haswell.S b/kernel/x86_64/zgemm_kernel_4x2_haswell.S index e23e09ecc..f91bfa89b 100644 --- a/kernel/x86_64/zgemm_kernel_4x2_haswell.S +++ b/kernel/x86_64/zgemm_kernel_4x2_haswell.S @@ -222,8 +222,8 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. VFMADDPD_I( %ymm5 ,%ymm3,%ymm0 ) VFMADDPD_I( %ymm7 ,%ymm3,%ymm1 ) - addq $6*SIZE, BO - addq $8*SIZE, AO + addq $ 6*SIZE, BO + addq $ 8*SIZE, AO decq %rax .endm @@ -362,8 +362,8 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. VFMADDPD_I( %xmm5 ,%xmm3,%xmm0 ) VFMADDPD_I( %xmm7 ,%xmm3,%xmm1 ) - addq $6*SIZE, BO - addq $4*SIZE, AO + addq $ 6*SIZE, BO + addq $ 4*SIZE, AO decq %rax .endm @@ -491,8 +491,8 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. VFMADDPD_R( %xmm4 ,%xmm2,%xmm0 ) VFMADDPD_I( %xmm5 ,%xmm3,%xmm0 ) - addq $6*SIZE, BO - addq $2*SIZE, AO + addq $ 6*SIZE, BO + addq $ 2*SIZE, AO decq %rax .endm