summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/core/arm/skyeye_common/vfp/vfpsingle.cpp46
1 files changed, 23 insertions, 23 deletions
diff --git a/src/core/arm/skyeye_common/vfp/vfpsingle.cpp b/src/core/arm/skyeye_common/vfp/vfpsingle.cpp
index 9a7ca5c14..5a655a6f2 100644
--- a/src/core/arm/skyeye_common/vfp/vfpsingle.cpp
+++ b/src/core/arm/skyeye_common/vfp/vfpsingle.cpp
@@ -67,7 +67,7 @@ static struct vfp_single vfp_single_default_qnan = {
67 67
68static void vfp_single_dump(const char *str, struct vfp_single *s) 68static void vfp_single_dump(const char *str, struct vfp_single *s)
69{ 69{
70 LOG_DEBUG(Core_ARM11, "%s: sign=%d exponent=%d significand=%08x", 70 LOG_TRACE(Core_ARM11, "%s: sign=%d exponent=%d significand=%08x",
71 str, s->sign != 0, s->exponent, s->significand); 71 str, s->sign != 0, s->exponent, s->significand);
72} 72}
73 73
@@ -158,7 +158,7 @@ u32 vfp_single_normaliseround(ARMul_State* state, int sd, struct vfp_single *vs,
158 } else if ((rmode == FPSCR_ROUND_PLUSINF) ^ (vs->sign != 0)) 158 } else if ((rmode == FPSCR_ROUND_PLUSINF) ^ (vs->sign != 0))
159 incr = (1 << (VFP_SINGLE_LOW_BITS + 1)) - 1; 159 incr = (1 << (VFP_SINGLE_LOW_BITS + 1)) - 1;
160 160
161 LOG_DEBUG(Core_ARM11, "rounding increment = 0x%08x", incr); 161 LOG_TRACE(Core_ARM11, "rounding increment = 0x%08x", incr);
162 162
163 /* 163 /*
164 * Is our rounding going to overflow? 164 * Is our rounding going to overflow?
@@ -213,7 +213,7 @@ pack:
213 vfp_single_dump("pack: final", vs); 213 vfp_single_dump("pack: final", vs);
214 { 214 {
215 s32 d = vfp_single_pack(vs); 215 s32 d = vfp_single_pack(vs);
216 LOG_DEBUG(Core_ARM11, "%s: d(s%d)=%08x exceptions=%08x", func, 216 LOG_TRACE(Core_ARM11, "%s: d(s%d)=%08x exceptions=%08x", func,
217 sd, d, exceptions); 217 sd, d, exceptions);
218 vfp_put_float(state, d, sd); 218 vfp_put_float(state, d, sd);
219 } 219 }
@@ -304,7 +304,7 @@ u32 vfp_estimate_sqrt_significand(u32 exponent, u32 significand)
304 u32 z, a; 304 u32 z, a;
305 305
306 if ((significand & 0xc0000000) != 0x40000000) { 306 if ((significand & 0xc0000000) != 0x40000000) {
307 LOG_DEBUG(Core_ARM11, "invalid significand"); 307 LOG_TRACE(Core_ARM11, "invalid significand");
308 } 308 }
309 309
310 a = significand << 1; 310 a = significand << 1;
@@ -394,7 +394,7 @@ sqrt_invalid:
394 term = (u64)vsd.significand * vsd.significand; 394 term = (u64)vsd.significand * vsd.significand;
395 rem = ((u64)vsm.significand << 32) - term; 395 rem = ((u64)vsm.significand << 32) - term;
396 396
397 LOG_DEBUG(Core_ARM11, "term=%016" PRIx64 "rem=%016" PRIx64, term, rem); 397 LOG_TRACE(Core_ARM11, "term=%016" PRIx64 "rem=%016" PRIx64, term, rem);
398 398
399 while (rem < 0) { 399 while (rem < 0) {
400 vsd.significand -= 1; 400 vsd.significand -= 1;
@@ -626,7 +626,7 @@ static u32 vfp_single_ftoui(ARMul_State* state, int sd, int unused, s32 m, u32 f
626 } 626 }
627 } 627 }
628 628
629 LOG_DEBUG(Core_ARM11, "ftoui: d(s%d)=%08x exceptions=%08x", sd, d, exceptions); 629 LOG_TRACE(Core_ARM11, "ftoui: d(s%d)=%08x exceptions=%08x", sd, d, exceptions);
630 630
631 vfp_put_float(state, d, sd); 631 vfp_put_float(state, d, sd);
632 632
@@ -705,7 +705,7 @@ static u32 vfp_single_ftosi(ARMul_State* state, int sd, int unused, s32 m, u32 f
705 } 705 }
706 } 706 }
707 707
708 LOG_DEBUG(Core_ARM11, "ftosi: d(s%d)=%08x exceptions=%08x", sd, d, exceptions); 708 LOG_TRACE(Core_ARM11, "ftosi: d(s%d)=%08x exceptions=%08x", sd, d, exceptions);
709 709
710 vfp_put_float(state, (s32)d, sd); 710 vfp_put_float(state, (s32)d, sd);
711 711
@@ -873,7 +873,7 @@ vfp_single_multiply(struct vfp_single *vsd, struct vfp_single *vsn, struct vfp_s
873 struct vfp_single *t = vsn; 873 struct vfp_single *t = vsn;
874 vsn = vsm; 874 vsn = vsm;
875 vsm = t; 875 vsm = t;
876 LOG_DEBUG(Core_ARM11, "swapping M <-> N"); 876 LOG_TRACE(Core_ARM11, "swapping M <-> N");
877 } 877 }
878 878
879 vsd->sign = vsn->sign ^ vsm->sign; 879 vsd->sign = vsn->sign ^ vsm->sign;
@@ -926,7 +926,7 @@ vfp_single_multiply_accumulate(ARMul_State* state, int sd, int sn, s32 m, u32 fp
926 s32 v; 926 s32 v;
927 927
928 v = vfp_get_float(state, sn); 928 v = vfp_get_float(state, sn);
929 LOG_DEBUG(Core_ARM11, "s%u = %08x", sn, v); 929 LOG_TRACE(Core_ARM11, "s%u = %08x", sn, v);
930 vfp_single_unpack(&vsn, v, &fpscr); 930 vfp_single_unpack(&vsn, v, &fpscr);
931 if (vsn.exponent == 0 && vsn.significand) 931 if (vsn.exponent == 0 && vsn.significand)
932 vfp_single_normalise_denormal(&vsn); 932 vfp_single_normalise_denormal(&vsn);
@@ -941,7 +941,7 @@ vfp_single_multiply_accumulate(ARMul_State* state, int sd, int sn, s32 m, u32 fp
941 vsp.sign = vfp_sign_negate(vsp.sign); 941 vsp.sign = vfp_sign_negate(vsp.sign);
942 942
943 v = vfp_get_float(state, sd); 943 v = vfp_get_float(state, sd);
944 LOG_DEBUG(Core_ARM11, "s%u = %08x", sd, v); 944 LOG_TRACE(Core_ARM11, "s%u = %08x", sd, v);
945 vfp_single_unpack(&vsn, v, &fpscr); 945 vfp_single_unpack(&vsn, v, &fpscr);
946 if (vsn.exponent == 0 && vsn.significand != 0) 946 if (vsn.exponent == 0 && vsn.significand != 0)
947 vfp_single_normalise_denormal(&vsn); 947 vfp_single_normalise_denormal(&vsn);
@@ -963,7 +963,7 @@ vfp_single_multiply_accumulate(ARMul_State* state, int sd, int sn, s32 m, u32 fp
963 */ 963 */
964static u32 vfp_single_fmac(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr) 964static u32 vfp_single_fmac(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr)
965{ 965{
966 LOG_DEBUG(Core_ARM11, "s%u = %08x", sn, sd); 966 LOG_TRACE(Core_ARM11, "s%u = %08x", sn, sd);
967 return vfp_single_multiply_accumulate(state, sd, sn, m, fpscr, 0, "fmac"); 967 return vfp_single_multiply_accumulate(state, sd, sn, m, fpscr, 0, "fmac");
968} 968}
969 969
@@ -973,7 +973,7 @@ static u32 vfp_single_fmac(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr)
973static u32 vfp_single_fnmac(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr) 973static u32 vfp_single_fnmac(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr)
974{ 974{
975 // TODO: this one has its arguments inverted, investigate. 975 // TODO: this one has its arguments inverted, investigate.
976 LOG_DEBUG(Core_ARM11, "s%u = %08x", sd, sn); 976 LOG_TRACE(Core_ARM11, "s%u = %08x", sd, sn);
977 return vfp_single_multiply_accumulate(state, sd, sn, m, fpscr, NEG_MULTIPLY, "fnmac"); 977 return vfp_single_multiply_accumulate(state, sd, sn, m, fpscr, NEG_MULTIPLY, "fnmac");
978} 978}
979 979
@@ -982,7 +982,7 @@ static u32 vfp_single_fnmac(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr
982 */ 982 */
983static u32 vfp_single_fmsc(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr) 983static u32 vfp_single_fmsc(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr)
984{ 984{
985 LOG_DEBUG(Core_ARM11, "s%u = %08x", sn, sd); 985 LOG_TRACE(Core_ARM11, "s%u = %08x", sn, sd);
986 return vfp_single_multiply_accumulate(state, sd, sn, m, fpscr, NEG_SUBTRACT, "fmsc"); 986 return vfp_single_multiply_accumulate(state, sd, sn, m, fpscr, NEG_SUBTRACT, "fmsc");
987} 987}
988 988
@@ -991,7 +991,7 @@ static u32 vfp_single_fmsc(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr)
991 */ 991 */
992static u32 vfp_single_fnmsc(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr) 992static u32 vfp_single_fnmsc(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr)
993{ 993{
994 LOG_DEBUG(Core_ARM11, "s%u = %08x", sn, sd); 994 LOG_TRACE(Core_ARM11, "s%u = %08x", sn, sd);
995 return vfp_single_multiply_accumulate(state, sd, sn, m, fpscr, NEG_SUBTRACT | NEG_MULTIPLY, "fnmsc"); 995 return vfp_single_multiply_accumulate(state, sd, sn, m, fpscr, NEG_SUBTRACT | NEG_MULTIPLY, "fnmsc");
996} 996}
997 997
@@ -1004,7 +1004,7 @@ static u32 vfp_single_fmul(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr)
1004 u32 exceptions; 1004 u32 exceptions;
1005 s32 n = vfp_get_float(state, sn); 1005 s32 n = vfp_get_float(state, sn);
1006 1006
1007 LOG_DEBUG(Core_ARM11, "s%u = %08x", sn, n); 1007 LOG_TRACE(Core_ARM11, "s%u = %08x", sn, n);
1008 1008
1009 vfp_single_unpack(&vsn, n, &fpscr); 1009 vfp_single_unpack(&vsn, n, &fpscr);
1010 if (vsn.exponent == 0 && vsn.significand) 1010 if (vsn.exponent == 0 && vsn.significand)
@@ -1027,7 +1027,7 @@ static u32 vfp_single_fnmul(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr
1027 u32 exceptions; 1027 u32 exceptions;
1028 s32 n = vfp_get_float(state, sn); 1028 s32 n = vfp_get_float(state, sn);
1029 1029
1030 LOG_DEBUG(Core_ARM11, "s%u = %08x", sn, n); 1030 LOG_TRACE(Core_ARM11, "s%u = %08x", sn, n);
1031 1031
1032 vfp_single_unpack(&vsn, n, &fpscr); 1032 vfp_single_unpack(&vsn, n, &fpscr);
1033 if (vsn.exponent == 0 && vsn.significand) 1033 if (vsn.exponent == 0 && vsn.significand)
@@ -1051,7 +1051,7 @@ static u32 vfp_single_fadd(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr)
1051 u32 exceptions; 1051 u32 exceptions;
1052 s32 n = vfp_get_float(state, sn); 1052 s32 n = vfp_get_float(state, sn);
1053 1053
1054 LOG_DEBUG(Core_ARM11, "s%u = %08x", sn, n); 1054 LOG_TRACE(Core_ARM11, "s%u = %08x", sn, n);
1055 1055
1056 /* 1056 /*
1057 * Unpack and normalise denormals. 1057 * Unpack and normalise denormals.
@@ -1074,7 +1074,7 @@ static u32 vfp_single_fadd(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr)
1074 */ 1074 */
1075static u32 vfp_single_fsub(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr) 1075static u32 vfp_single_fsub(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr)
1076{ 1076{
1077 LOG_DEBUG(Core_ARM11, "s%u = %08x", sn, sd); 1077 LOG_TRACE(Core_ARM11, "s%u = %08x", sn, sd);
1078 /* 1078 /*
1079 * Subtraction is addition with one sign inverted. 1079 * Subtraction is addition with one sign inverted.
1080 */ 1080 */
@@ -1094,7 +1094,7 @@ static u32 vfp_single_fdiv(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr)
1094 s32 n = vfp_get_float(state, sn); 1094 s32 n = vfp_get_float(state, sn);
1095 int tm, tn; 1095 int tm, tn;
1096 1096
1097 LOG_DEBUG(Core_ARM11, "s%u = %08x", sn, n); 1097 LOG_TRACE(Core_ARM11, "s%u = %08x", sn, n);
1098 1098
1099 vfp_single_unpack(&vsn, n, &fpscr); 1099 vfp_single_unpack(&vsn, n, &fpscr);
1100 vfp_single_unpack(&vsm, m, &fpscr); 1100 vfp_single_unpack(&vsm, m, &fpscr);
@@ -1241,7 +1241,7 @@ u32 vfp_single_cpdo(ARMul_State* state, u32 inst, u32 fpscr)
1241 else 1241 else
1242 veclen = fpscr & FPSCR_LENGTH_MASK; 1242 veclen = fpscr & FPSCR_LENGTH_MASK;
1243 1243
1244 LOG_DEBUG(Core_ARM11, "vecstride=%u veclen=%u", vecstride, 1244 LOG_TRACE(Core_ARM11, "vecstride=%u veclen=%u", vecstride,
1245 (veclen >> FPSCR_LENGTH_BIT) + 1); 1245 (veclen >> FPSCR_LENGTH_BIT) + 1);
1246 1246
1247 if (!fop->fn) { 1247 if (!fop->fn) {
@@ -1257,16 +1257,16 @@ u32 vfp_single_cpdo(ARMul_State* state, u32 inst, u32 fpscr)
1257 1257
1258 type = (fop->flags & OP_DD) ? 'd' : 's'; 1258 type = (fop->flags & OP_DD) ? 'd' : 's';
1259 if (op == FOP_EXT) 1259 if (op == FOP_EXT)
1260 LOG_DEBUG(Core_ARM11, "itr%d (%c%u) = op[%u] (s%u=%08x)", 1260 LOG_TRACE(Core_ARM11, "itr%d (%c%u) = op[%u] (s%u=%08x)",
1261 vecitr >> FPSCR_LENGTH_BIT, type, dest, sn, 1261 vecitr >> FPSCR_LENGTH_BIT, type, dest, sn,
1262 sm, m); 1262 sm, m);
1263 else 1263 else
1264 LOG_DEBUG(Core_ARM11, "itr%d (%c%u) = (s%u) op[%u] (s%u=%08x)", 1264 LOG_TRACE(Core_ARM11, "itr%d (%c%u) = (s%u) op[%u] (s%u=%08x)",
1265 vecitr >> FPSCR_LENGTH_BIT, type, dest, sn, 1265 vecitr >> FPSCR_LENGTH_BIT, type, dest, sn,
1266 FOP_TO_IDX(op), sm, m); 1266 FOP_TO_IDX(op), sm, m);
1267 1267
1268 except = fop->fn(state, dest, sn, m, fpscr); 1268 except = fop->fn(state, dest, sn, m, fpscr);
1269 LOG_DEBUG(Core_ARM11, "itr%d: exceptions=%08x", 1269 LOG_TRACE(Core_ARM11, "itr%d: exceptions=%08x",
1270 vecitr >> FPSCR_LENGTH_BIT, except); 1270 vecitr >> FPSCR_LENGTH_BIT, except);
1271 1271
1272 exceptions |= except; 1272 exceptions |= except;