This question is similar to this other question; however, I'd like to understand why this is working as it is.
The following code:
console.log((parseInt('0xdeadbeef', 16) & parseInt('0x000000ff', 16)).toString(16));
console.log((parseInt('0xdeadbeef', 16) & parseInt('0x0000ff00', 16)).toString(16));
console.log((parseInt('0xdeadbeef', 16) & parseInt('0x00ff0000', 16)).toString(16));
console.log((parseInt('0xdeadbeef', 16) & parseInt('0xff000000', 16)).toString(16));
console.log((parseInt('0xdeadbeef', 16) & parseInt('0x000000ff', 16)).toString(16));
console.log((parseInt('0xdeadbeef', 16) & parseInt('0x0000ffff', 16)).toString(16));
console.log((parseInt('0xdeadbeef', 16) & parseInt('0x00ffffff', 16)).toString(16));
console.log((parseInt('0xdeadbeef', 16) & parseInt('0xffffffff', 16)).toString(16));
Returns:
ef
be00
ad0000
-22000000
ef
beef
adbeef
-21524111
When what I am expecting from .string(16) would be:
ef
be00
ad0000
de000000
ef
beef
adbeef
deadbeef
What's going on with this?
Thank you in advance for your help.
With thank you to the responders and commenters below, and also to the following sources:
- http://speakingjs.com/es5/ch11.html
- JavaScript C Style Type Cast From Signed To Unsigned
Here is a solution that works by providing utility functions to convert a radix-16 32-bit number to and from a signed 32-bit integer:
// Convert 'x' to a signed 32-bit integer treating 'x' as a radix-16 number
// c.f. http://speakingjs.com/es5/ch11.html
function toInt32Radix16(x) {
return (parseInt(x, 16) | 0);
}
// Convert a signed 32-bit integer 'x' to a radix-16 number
// c.f. https://stackoverflow.com/questions/14890994/javascript-c-style-type-cast-from-signed-to-unsigned
function toRadix16int32(x) {
return ((x >>> 0).toString(16));
}
console.log(toRadix16int32(toInt32Radix16('0xdeadbeef') & toInt32Radix16('0x000000ff')));
console.log(toRadix16int32(toInt32Radix16('0xdeadbeef') & toInt32Radix16('0x0000ff00')));
console.log(toRadix16int32(toInt32Radix16('0xdeadbeef') & toInt32Radix16('0x00ff0000')));
console.log(toRadix16int32(toInt32Radix16('0xdeadbeef') & toInt32Radix16('0xff000000')));
console.log(toRadix16int32(toInt32Radix16('0xdeadbeef') & toInt32Radix16('0x000000ff')));
console.log(toRadix16int32(toInt32Radix16('0xdeadbeef') & toInt32Radix16('0x0000ffff')));
console.log(toRadix16int32(toInt32Radix16('0xdeadbeef') & toInt32Radix16('0x00ffffff')));
console.log(toRadix16int32(toInt32Radix16('0xdeadbeef') & toInt32Radix16('0xffffffff')));
Which yields the expected output:
ef
be00
ad0000
de000000
ef
beef
adbeef
deadbeef
As well as some good learning on my part about JavaScript integer behavior.