diff --git a/kernel/loongarch64/rot_lasx.S b/kernel/loongarch64/rot_lasx.S index 5d7e3d7cc..71378e0b2 100644 --- a/kernel/loongarch64/rot_lasx.S +++ b/kernel/loongarch64/rot_lasx.S @@ -1036,7 +1036,7 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. add.d Y, Y, INCY xvfmul.d VT0, VX1, VXC xvfmadd.d VT0, VX3, VXS, VT0 - xvfmul.d VT1, VX0, VXS + xvfmul.d VT1, VX1, VXS xvfmsub.d VT1, VX3, VXC, VT1 xvstelm.d VT0, XX, 0, 0 add.d XX, XX, INCX diff --git a/kernel/loongarch64/rot_lsx.S b/kernel/loongarch64/rot_lsx.S index 4b0e59310..3bb77aaec 100644 --- a/kernel/loongarch64/rot_lsx.S +++ b/kernel/loongarch64/rot_lsx.S @@ -1142,7 +1142,6 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #ifdef DOUBLE vinsgr2vr.d VX0, t1, 0 vinsgr2vr.d VX0, t2, 1 - add.d X, X, INCX ld.d t1, Y, 0 * SIZE add.d Y, Y, INCY ld.d t2, Y, 0 * SIZE @@ -1199,7 +1198,7 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. add.d Y, Y, INCY VMUL VT0, VX1, VXC VFMADD VT0, VX3, VXS, VT0 - VMUL VT1, VX0, VXS + VMUL VT1, VX1, VXS VMSUB VT1, VX3, VXC, VT1 vstelm.d VT0, XX, 0, 0 add.d XX, XX, INCX @@ -1223,7 +1222,7 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. add.d Y, Y, INCY VMUL VT0, VX1, VXC VFMADD VT0, VX3, VXS, VT0 - VMUL VT1, VX0, VXS + VMUL VT1, VX1, VXS VMSUB VT1, VX3, VXC, VT1 vstelm.d VT0, XX, 0, 0 add.d XX, XX, INCX @@ -1296,7 +1295,7 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. add.d Y, Y, INCY VMUL VT0, VX1, VXC VFMADD VT0, VX3, VXS, VT0 - VMUL VT1, VX0, VXS + VMUL VT1, VX1, VXS VMSUB VT1, VX3, VXC, VT1 vstelm.w VT0, XX, 0, 0 add.d XX, XX, INCX