Fix ASCII detection; can't use "< 0x80" on a signed char.

This commit is contained in:
Howard Chu 2002-01-05 10:33:38 +00:00
parent f3b61d87b7
commit 996eb58011
2 changed files with 2 additions and 2 deletions

View File

@ -106,7 +106,7 @@ LDAP_F (void) ldap_pvt_hex_unescape LDAP_P(( char *s ));
* these macros assume 'x' is an ASCII x
* and assume the "C" locale
*/
#define LDAP_ASCII(c) ((c) < 0x80)
#define LDAP_ASCII(c) (!((c) & 0x80))
#define LDAP_SPACE(c) ((c) == ' ' || (c) == '\t' || (c) == '\n')
#define LDAP_DIGIT(c) ((c) >= '0' && (c) <= '9')
#define LDAP_LOWER(c) ((c) >= 'a' && (c) <= 'z')

View File

@ -77,7 +77,7 @@ LDAP_F (char*) ldap_utf8_strtok( char* sp, const char* sep, char **last);
/* Optimizations */
LDAP_V (const char) ldap_utf8_lentab[128];
#define LDAP_UTF8_ISASCII(p) ( *(unsigned char *)(p) ^ 0x80 )
#define LDAP_UTF8_ISASCII(p) ( !(*(unsigned char *)(p) & 0x80 ) )
#define LDAP_UTF8_CHARLEN(p) ( LDAP_UTF8_ISASCII(p) \
? 1 : ldap_utf8_lentab[*(unsigned char *)(p) ^ 0x80] )
#define LDAP_UTF8_OFFSET(p) ( LDAP_UTF8_ISASCII(p) \