This question is similar to this other question ; however, I would like to understand why this works as it is.
The following code:
console.log((parseInt('0xdeadbeef', 16) & parseInt('0x000000ff', 16)).toString(16)); console.log((parseInt('0xdeadbeef', 16) & parseInt('0x0000ff00', 16)).toString(16)); console.log((parseInt('0xdeadbeef', 16) & parseInt('0x00ff0000', 16)).toString(16)); console.log((parseInt('0xdeadbeef', 16) & parseInt('0xff000000', 16)).toString(16)); console.log((parseInt('0xdeadbeef', 16) & parseInt('0x000000ff', 16)).toString(16)); console.log((parseInt('0xdeadbeef', 16) & parseInt('0x0000ffff', 16)).toString(16)); console.log((parseInt('0xdeadbeef', 16) & parseInt('0x00ffffff', 16)).toString(16)); console.log((parseInt('0xdeadbeef', 16) & parseInt('0xffffffff', 16)).toString(16));
Return:
ef be00 ad0000 -22000000 ef beef adbeef -21524111
When what I expect from .string (16) will be:
ef be00 ad0000 de000000 ef beef adbeef deadbeef
What is going on with this?
Thank you in advance for your help.
Thanks to the respondents and commentators below, as well as to the following sources:
Here is a solution that works by providing utility functions for converting a 32-bit radix-16 number to and from a signed 32-bit integer:
// Convert 'x' to a signed 32-bit integer treating 'x' as a radix-16 number // cf http://speakingjs.com/es5/ch11.html function toInt32Radix16(x) { return (parseInt(x, 16) | 0); } // Convert a signed 32-bit integer 'x' to a radix-16 number // cf /questions/791168/javascript-c-style-type-cast-from-signed-to-unsigned function toRadix16int32(x) { return ((x >>> 0).toString(16)); } console.log(toRadix16int32(toInt32Radix16('0xdeadbeef') & toInt32Radix16('0x000000ff'))); console.log(toRadix16int32(toInt32Radix16('0xdeadbeef') & toInt32Radix16('0x0000ff00'))); console.log(toRadix16int32(toInt32Radix16('0xdeadbeef') & toInt32Radix16('0x00ff0000'))); console.log(toRadix16int32(toInt32Radix16('0xdeadbeef') & toInt32Radix16('0xff000000'))); console.log(toRadix16int32(toInt32Radix16('0xdeadbeef') & toInt32Radix16('0x000000ff'))); console.log(toRadix16int32(toInt32Radix16('0xdeadbeef') & toInt32Radix16('0x0000ffff'))); console.log(toRadix16int32(toInt32Radix16('0xdeadbeef') & toInt32Radix16('0x00ffffff'))); console.log(toRadix16int32(toInt32Radix16('0xdeadbeef') & toInt32Radix16('0xffffffff')));
Which gives the expected result:
ef be00 ad0000 de000000 ef beef adbeef deadbeef
Like some good training on my part about the behavior of integer JavaScript code.