diff --git a/compiler/cgobj.pas b/compiler/cgobj.pas index 9919feaba2..acd6881153 100644 --- a/compiler/cgobj.pas +++ b/compiler/cgobj.pas @@ -927,8 +927,9 @@ implementation a_op_const_reg_reg(list,OP_SHR,sreg.subsetregsize,sreg.startbit,sreg.subsetreg,tmpreg); stopbit := sreg.startbit + sreg.bitlen; // on x86(64), 1 shl 32(64) = 1 instead of 0 - if (stopbit - sreg.startbit <> AIntBits) then - bitmask := (aint(1) shl (stopbit - sreg.startbit)) - 1 + // use aword to prevent overflow with 1 shl 31 + if (stopbit - sreg.startbit < AIntBits) then + bitmask := (aword(1) shl (stopbit - sreg.startbit)) - 1 else bitmask := -1; a_op_const_reg(list,OP_AND,sreg.subsetregsize,bitmask,tmpreg); @@ -953,9 +954,9 @@ implementation stopbit := sreg.startbit + sreg.bitlen; // on x86(64), 1 shl 32(64) = 1 instead of 0 if (stopbit <> AIntBits) then - bitmask := not(((aint(1) shl stopbit)-1) xor ((aint(1) shl sreg.startbit)-1)) + bitmask := not(((aword(1) shl stopbit)-1) xor ((aword(1) shl sreg.startbit)-1)) else - bitmask := not(-1 xor ((aint(1) shl sreg.startbit)-1)); + bitmask := not(-1 xor ((aword(1) shl sreg.startbit)-1)); if not(slopt in [SL_SETZERO,SL_SETMAX]) then begin tmpreg:=getintregister(list,sreg.subsetregsize); @@ -966,13 +967,13 @@ implementation end; if (slopt <> SL_SETMAX) then a_op_const_reg(list,OP_AND,sreg.subsetregsize,bitmask,sreg.subsetreg); - + case slopt of SL_SETZERO : ; SL_SETMAX : if (sreg.bitlen <> AIntBits) then a_op_const_reg(list,OP_OR,sreg.subsetregsize, - ((aint(1) shl sreg.bitlen)-1) shl sreg.startbit, + ((aword(1) shl sreg.bitlen)-1) shl sreg.startbit, sreg.subsetreg) else a_load_const_reg(list,sreg.subsetregsize,-1,sreg.subsetreg); @@ -999,9 +1000,9 @@ implementation stopbit := tosreg.startbit + tosreg.bitlen; // on x86(64), 1 shl 32(64) = 1 instead of 0 if (stopbit <> AIntBits) then - bitmask := not(((aint(1) shl stopbit)-1) xor ((aint(1) shl tosreg.startbit)-1)) + bitmask := not(((aword(1) shl stopbit)-1) xor ((aword(1) shl tosreg.startbit)-1)) else - bitmask := (aint(1) shl tosreg.startbit) - 1; + bitmask := (aword(1) shl tosreg.startbit) - 1; a_op_const_reg(list,OP_AND,tosreg.subsetregsize,bitmask,tosreg.subsetreg); a_op_const_reg(list,OP_AND,tosreg.subsetregsize,not(bitmask),tmpreg); a_op_reg_reg(list,OP_OR,tosreg.subsetregsize,tmpreg,tosreg.subsetreg); @@ -1043,9 +1044,9 @@ implementation stopbit := sreg.startbit + sreg.bitlen; // on x86(64), 1 shl 32(64) = 1 instead of 0 if (stopbit <> AIntBits) then - bitmask := not(((aint(1) shl stopbit)-1) xor ((aint(1) shl sreg.startbit)-1)) + bitmask := not(((aword(1) shl stopbit)-1) xor ((aword(1) shl sreg.startbit)-1)) else - bitmask := (aint(1) shl sreg.startbit) - 1; + bitmask := (aword(1) shl sreg.startbit) - 1; if (((a shl sreg.startbit) and not bitmask) <> not bitmask) then a_op_const_reg(list,OP_AND,sreg.subsetregsize,bitmask,sreg.subsetreg); a_op_const_reg(list,OP_OR,sreg.subsetregsize,(a shl sreg.startbit) and not(bitmask),sreg.subsetreg); @@ -1167,7 +1168,7 @@ implementation a_op_const_reg(list,OP_SHL,OS_INT,restbits,valuereg); { mask other bits } if (sref.bitlen <> AIntBits) then - a_op_const_reg(list,OP_AND,OS_INT,(aint(1) shl sref.bitlen)-1,valuereg); + a_op_const_reg(list,OP_AND,OS_INT,(aword(1) shl sref.bitlen)-1,valuereg); a_op_const_reg(list,OP_SHR,OS_INT,loadbitsize-restbits,extra_value_reg) end else @@ -1177,7 +1178,7 @@ implementation a_op_const_reg(list,OP_SHL,OS_INT,loadbitsize-sref.startbit,extra_value_reg); { mask other bits } if (sref.bitlen <> AIntBits) then - a_op_const_reg(list,OP_AND,OS_INT,(aint(1) shl sref.bitlen)-1,extra_value_reg); + a_op_const_reg(list,OP_AND,OS_INT,(aword(1) shl sref.bitlen)-1,extra_value_reg); end; { merge } a_op_reg_reg(list,OP_OR,OS_INT,extra_value_reg,valuereg); @@ -1199,7 +1200,7 @@ implementation a_op_const_reg(list,OP_SHR,OS_INT,loadbitsize-sref.bitlen,valuereg); if (loadbitsize <> AIntBits) then { mask left over bits } - a_op_const_reg(list,OP_AND,OS_INT,(aint(1) shl sref.bitlen)-1,valuereg); + a_op_const_reg(list,OP_AND,OS_INT,(aword(1) shl sref.bitlen)-1,valuereg); tmpreg := getintregister(list,OS_INT); { the bits in extra_value_reg (if any) start at the most significant bit => } { extra_value_reg must be shr by (loadbitsize-sref.bitlen)+(loadsize-sref.bitindex) } @@ -1239,7 +1240,7 @@ implementation { merge } a_op_reg_reg(list,OP_OR,OS_INT,extra_value_reg,valuereg); { mask other bits } - a_op_const_reg(list,OP_AND,OS_INT,(aint(1) shl sref.bitlen)-1,valuereg); + a_op_const_reg(list,OP_AND,OS_INT,(aword(1) shl sref.bitlen)-1,valuereg); end; end; @@ -1290,7 +1291,7 @@ implementation else a_op_reg_reg(list,OP_SHR,OS_INT,sref.bitindexreg,valuereg); { mask other bits } - a_op_const_reg(list,OP_AND,OS_INT,(aint(1) shl sref.bitlen)-1,valuereg); + a_op_const_reg(list,OP_AND,OS_INT,(aword(1) shl sref.bitlen)-1,valuereg); end end else @@ -1396,7 +1397,7 @@ implementation if (slopt <> SL_SETMAX) then begin maskreg := getintregister(list,OS_INT); - a_load_const_reg(list,OS_INT,(aint(1) shl sref.bitlen)-1,maskreg); + a_load_const_reg(list,OS_INT,(aword(1) shl sref.bitlen)-1,maskreg); a_op_reg_reg(list,OP_SHL,OS_INT,tmpindexreg,maskreg); a_op_reg_reg(list,OP_NOT,OS_INT,maskreg,maskreg); a_op_reg_reg(list,OP_AND,OS_INT,maskreg,valuereg); @@ -1409,11 +1410,11 @@ implementation if (slopt <> SL_SETMAX) then a_load_reg_reg(list,fromsize,OS_INT,fromreg,tmpreg) else if (sref.bitlen <> AIntBits) then - a_load_const_reg(list,OS_INT,(aint(1) shl sref.bitlen) - 1, tmpreg) + a_load_const_reg(list,OS_INT,(aword(1) shl sref.bitlen) - 1, tmpreg) else a_load_const_reg(list,OS_INT,-1,tmpreg); if (slopt <> SL_REGNOSRCMASK) then - a_op_const_reg(list,OP_AND,OS_INT,(aint(1) shl sref.bitlen)-1,tmpreg); + a_op_const_reg(list,OP_AND,OS_INT,(aword(1) shl sref.bitlen)-1,tmpreg); a_op_reg_reg(list,OP_SHL,OS_INT,tmpindexreg,tmpreg); a_op_reg_reg(list,OP_OR,OS_INT,tmpreg,valuereg); end; @@ -1508,12 +1509,12 @@ implementation maskreg := getintregister(list,OS_INT); if (target_info.endian = endian_big) then begin - a_load_const_reg(list,OS_INT,((aint(1) shl sref.bitlen)-1) shl (loadbitsize-sref.bitlen),maskreg); + a_load_const_reg(list,OS_INT,((aword(1) shl sref.bitlen)-1) shl (loadbitsize-sref.bitlen),maskreg); a_op_reg_reg(list,OP_SHR,OS_INT,sref.bitindexreg,maskreg); end else begin - a_load_const_reg(list,OS_INT,(aint(1) shl sref.bitlen)-1,maskreg); + a_load_const_reg(list,OS_INT,(aword(1) shl sref.bitlen)-1,maskreg); a_op_reg_reg(list,OP_SHL,OS_INT,sref.bitindexreg,maskreg); end; @@ -1528,7 +1529,7 @@ implementation if (slopt <> SL_SETMAX) then a_load_reg_reg(list,fromsize,OS_INT,fromreg,tmpreg) else if (sref.bitlen <> AIntBits) then - a_load_const_reg(list,OS_INT,(aint(1) shl sref.bitlen) - 1, tmpreg) + a_load_const_reg(list,OS_INT,(aword(1) shl sref.bitlen) - 1, tmpreg) else a_load_const_reg(list,OS_INT,-1,tmpreg); if (target_info.endian = endian_big) then @@ -1537,7 +1538,7 @@ implementation if not(slopt in [SL_REGNOSRCMASK,SL_SETMAX]) and (loadbitsize <> AIntBits) then { mask left over bits } - a_op_const_reg(list,OP_AND,OS_INT,((aint(1) shl sref.bitlen)-1) shl (loadbitsize-sref.bitlen),tmpreg); + a_op_const_reg(list,OP_AND,OS_INT,((aword(1) shl sref.bitlen)-1) shl (loadbitsize-sref.bitlen),tmpreg); a_op_reg_reg(list,OP_SHR,OS_INT,sref.bitindexreg,tmpreg); end else @@ -1545,7 +1546,7 @@ implementation if not(slopt in [SL_REGNOSRCMASK,SL_SETMAX]) and (loadbitsize <> AIntBits) then { mask left over bits } - a_op_const_reg(list,OP_AND,OS_INT,(aint(1) shl sref.bitlen)-1,tmpreg); + a_op_const_reg(list,OP_AND,OS_INT,(aword(1) shl sref.bitlen)-1,tmpreg); a_op_reg_reg(list,OP_SHL,OS_INT,sref.bitindexreg,tmpreg); end; a_op_reg_reg(list,OP_OR,OS_INT,tmpreg,valuereg); @@ -1564,7 +1565,7 @@ implementation if (slopt <> SL_SETMAX) then a_load_reg_reg(list,fromsize,OS_INT,fromreg,tmpreg) else if (sref.bitlen <> AIntBits) then - a_load_const_reg(list,OS_INT,(aint(1) shl sref.bitlen) - 1, tmpreg) + a_load_const_reg(list,OS_INT,(aword(1) shl sref.bitlen) - 1, tmpreg) else a_load_const_reg(list,OS_INT,-1,tmpreg); end; @@ -1577,7 +1578,7 @@ implementation begin a_op_const_reg_reg(list,OP_ADD,OS_INT,sref.bitlen-2*loadbitsize,sref.bitindexreg,tmpindexreg); a_op_reg_reg(list,OP_NEG,OS_INT,tmpindexreg,tmpindexreg); - a_load_const_reg(list,OS_INT,(aint(1) shl sref.bitlen)-1,maskreg); + a_load_const_reg(list,OS_INT,(aword(1) shl sref.bitlen)-1,maskreg); a_op_reg_reg(list,OP_SHL,OS_INT,tmpindexreg,maskreg); end else @@ -1585,7 +1586,7 @@ implementation { Y-x = -(Y-x) } a_op_const_reg_reg(list,OP_SUB,OS_INT,loadbitsize,sref.bitindexreg,tmpindexreg); a_op_reg_reg(list,OP_NEG,OS_INT,tmpindexreg,tmpindexreg); - a_load_const_reg(list,OS_INT,(aint(1) shl sref.bitlen)-1,maskreg); + a_load_const_reg(list,OS_INT,(aword(1) shl sref.bitlen)-1,maskreg); a_op_reg_reg(list,OP_SHR,OS_INT,tmpindexreg,maskreg); {$ifdef x86} { on i386 "x shl 32 = x shl 0", on x86/64 "x shl 64 = x shl 0". Fix so it's 0. } @@ -1600,10 +1601,10 @@ implementation if (slopt <> SL_SETZERO) then a_op_reg_reg(list,OP_AND,OS_INT,valuereg,tmpreg); a_op_reg_reg(list,OP_AND,OS_INT,valuereg,maskreg); - end; + end; {$endif x86} end; - + a_op_reg_reg(list,OP_NOT,OS_INT,maskreg,maskreg); a_op_reg_reg(list,OP_AND,OS_INT,maskreg,extra_value_reg); end; @@ -1615,7 +1616,7 @@ implementation else begin if not(slopt in [SL_REGNOSRCMASK,SL_SETMAX]) then - a_op_const_reg(list,OP_AND,OS_INT,(aint(1) shl sref.bitlen)-1,tmpreg); + a_op_const_reg(list,OP_AND,OS_INT,(aword(1) shl sref.bitlen)-1,tmpreg); a_op_reg_reg(list,OP_SHR,OS_INT,tmpindexreg,tmpreg); end; a_op_reg_reg(list,OP_OR,OS_INT,tmpreg,extra_value_reg); @@ -1664,7 +1665,7 @@ implementation begin slopt := SL_REGNOSRCMASK; if ( - (a = (aint(1) shl sref.bitlen) -1) or + (a = (aword(1) shl sref.bitlen) -1) or { broken x86 "x shl regbitsize = x" } ((sref.bitlen = AIntBits) and (a = -1))