/* Bit set in the bytes that have a zero */ staticinlinelong prep_zero_mask(unsignedlong val, unsignedlong rhs, conststruct word_at_a_time *c)
{ unsignedlong mask = (val & c->low_bits) + c->low_bits; return ~(mask | rhs);
}
#define create_zero_mask(mask) (mask)
staticinlinelong find_zero(unsignedlong mask)
{ long leading_zero_bits;
/* This will give us 0xff for a NULL char and 0x00 elsewhere */ staticinlineunsignedlong has_zero(unsignedlong a, unsignedlong *bits, conststruct word_at_a_time *c)
{ unsignedlong ret; unsignedlong zero = 0;
/* * This is largely generic for little-endian machines, but the * optimal byte mask counting is probably going to be something * that is architecture-specific. If you have a reliably fast * bit count instruction, that might be better than the multiply * and shift, for example.
*/
/* Carl Chatfield / Jan Achrenius G+ version for 32-bit */ staticinlinelong count_masked_bytes(long mask)
{ /* (000000 0000ff 00ffff ffffff) -> ( 1 1 2 3 ) */ long a = (0x0ff0001+mask) >> 23; /* Fix the 1 for 00 case */ return a & mask;
}
/* The mask we created is directly usable as a bytemask */ #define zero_bytemask(mask) (mask)
#endif/* CONFIG_64BIT */
#endif/* __BIG_ENDIAN__ */
/* * We use load_unaligned_zero() in a selftest, which builds a userspace * program. Some linker scripts seem to discard the .fixup section, so allow * the test code to use a different section name.
*/ #ifndef FIXUP_SECTION #define FIXUP_SECTION ".fixup" #endif
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.