How to determine the number of similar bits?

I need to compare two numbers and look for similarities in more significant bits. I am trying to determine the number of least significant bits that are different.

10111000
10111011

184 and 187 require an offset of two, since only the two least significant bits are different.

10111011
11111011

187 and 251 require an offset of seven since the seventh least significant bit is different.

My first idea was to XOR the numbers together, and then bit-shift until the number is zero. I feel that there is a more reasonable solution for this that does not include loops, but I have not done enough bit twisting to come up with it.

The solution should work for any 64 bits, since my numbers are stored as UInt64. This is written in C #, but the solution is most likely a language agnostic.


11101101
11010101

6 . , .

+5
5
#include <stdio.h>
#include <stdlib.h>

#define TO_L(s) (strtol((s), NULL, 16))

int tsb(unsigned long xa, unsigned long xb) {
  unsigned long v = xa ^ xb;
  static const unsigned long b[] = {
    0x2, 0xC, 0xF0, 0xFF00, 0xFFFF0000L, 0xFFFFffff00000000L
  };
  static const unsigned int S[]  = { 1, 2, 4, 8, 16, 32 };
  unsigned int r = 0;

#define STEP(i)   \
  if(v & b[i]) {  \
    int t = S[i]; \
    v >>= t;      \
    r  |= t;      \
  }
  STEP(5)
  STEP(4)
  STEP(3)
  STEP(2)
  STEP(1)
  STEP(0)
  return r;
}

int main(int ac, char **av) {
  return printf("%d\n", tsb(TO_L(av[1]), TO_L(av[2]))), 0;
}

, , , 6 . - .

so ross$ ./a.out 1f f
4
so ross$ ./a.out 471234abcdabcd 981234abcdabcd
55
so ross$ ./a.out 1deadbeef 7feedface
34
+1

, ; r = x XOR y, r. . O (n), r , . ( 64), , :

pos = 0
r = x XOR y
if r>>32 == 0 :
   r = r & 2^32-1
else
   pos += 32
   r = r>>32
if r>>16 == 0 :
   r = r & 2^16-1
else
   pos += 16
   r = r>16
... etc
+1

-

floor( log(184 ^ 187) / log(2) ) + 1

, , . .

() , no-loop, 64 .

</" > :

double Ilog2 = 1 / log(2);

,

floor( log(184 ^ 187) * ILog2 ) + 1
0

O (log (n)), :

int findHighestSetBit(unsigned long long x) {
    int rv = 0;
    if (x == 0)
        return -1;  // no set bits
    for (int shift = 32; shift > 0; shift >>= 1) {
        if (x >> shift) {
            rv += shift;
            x >>= shift;
        }
    }
    return rv+1; // number least significant bit as '1' rather than '0'
}

, 5 .

0

, 8- . - 256- :

static unsigned char highest_bit_num_LUT[256] = {0, 1, 2, 2, 3, etc }; // precomputed

unsigned diff = (unsigned)a ^ (unsigned)b; // sure you need XOR and not MINUS?
unsigned highest_bit_num = highest_bit_num_LUT[diff & 0xff];

now extends it for more bits:

static unsigned char highest_bit_num_LUT[256] = {0, 1, 2, 2, 3, etc }; // precomputed
unsigned diff = (unsigned)a ^ (unsigned)b; // sure you need XOR and not MINUS?
unsigned highest_bit_num = 0;
for (int i = 7; i >= 0; i--)    
    if (diff >> ( i*8) ){ // found most significant non-zero byte
        highest_bit_num = i*8 + highest_bit_num_LUT[diff >> (i*8)];
        break;
    }

so now we have no more than 8 iterations.

EDIT: It would be faster to use the idea of ​​DigitalRoss for the first 3 iterations, and then use LUT.

0
source

All Articles