Use int32_t instead of plain int with Unicode code points

On some architectures, int just isn't big enough to hold all Unicode
code points.
This commit is contained in:
Petri Lehtinen 2009-12-02 23:48:50 +02:00
parent e0a88d19d1
commit d67aeb9739
3 changed files with 10 additions and 6 deletions

View File

@ -9,6 +9,7 @@
#include <stdio.h> #include <stdio.h>
#include <stdlib.h> #include <stdlib.h>
#include <string.h> #include <string.h>
#include <stdint.h>
#include <jansson.h> #include <jansson.h>
#include "jansson_private.h" #include "jansson_private.h"

View File

@ -14,6 +14,7 @@
#include <string.h> #include <string.h>
#include <stdarg.h> #include <stdarg.h>
#include <assert.h> #include <assert.h>
#include <stdint.h>
#include <jansson.h> #include <jansson.h>
#include "jansson_private.h" #include "jansson_private.h"
@ -221,10 +222,10 @@ static void lex_save_cached(lex_t *lex)
} }
/* assumes that str points to 'u' plus at least 4 valid hex digits */ /* assumes that str points to 'u' plus at least 4 valid hex digits */
static int decode_unicode_escape(const char *str) static int32_t decode_unicode_escape(const char *str)
{ {
int i; int i;
int value = 0; int32_t value = 0;
assert(str[0] == 'u'); assert(str[0] == 'u');
@ -325,7 +326,7 @@ static void lex_scan_string(lex_t *lex, json_error_t *error)
if(*p == 'u') { if(*p == 'u') {
char buffer[4]; char buffer[4];
int length; int length;
int value; int32_t value;
value = decode_unicode_escape(p); value = decode_unicode_escape(p);
p += 5; p += 5;
@ -333,7 +334,7 @@ static void lex_scan_string(lex_t *lex, json_error_t *error)
if(0xD800 <= value && value <= 0xDBFF) { if(0xD800 <= value && value <= 0xDBFF) {
/* surrogate pair */ /* surrogate pair */
if(*p == '\\' && *(p + 1) == 'u') { if(*p == '\\' && *(p + 1) == 'u') {
int value2 = decode_unicode_escape(++p); int32_t value2 = decode_unicode_escape(++p);
p += 5; p += 5;
if(0xDC00 <= value2 && value2 <= 0xDFFF) { if(0xDC00 <= value2 && value2 <= 0xDFFF) {

View File

@ -6,8 +6,9 @@
*/ */
#include <string.h> #include <string.h>
#include <stdint.h>
int utf8_encode(int codepoint, char *buffer, int *size) int utf8_encode(int32_t codepoint, char *buffer, int *size)
{ {
if(codepoint < 0) if(codepoint < 0)
return -1; return -1;
@ -81,7 +82,8 @@ int utf8_check_first(char byte)
int utf8_check_full(const char *buffer, int size) int utf8_check_full(const char *buffer, int size)
{ {
int i, value = 0; int i;
int32_t value = 0;
unsigned char u = (unsigned char)buffer[0]; unsigned char u = (unsigned char)buffer[0];
if(size == 2) if(size == 2)