Kouhei Sutou
null+****@clear*****
Fri Jun 14 19:06:58 JST 2013
Kouhei Sutou 2013-06-14 19:06:58 +0900 (Fri, 14 Jun 2013) New Revision: 19b8ebbb07a3d4d1545e208511b40efe89589f8e https://github.com/groonga/groonga/commit/19b8ebbb07a3d4d1545e208511b40efe89589f8e Message: Add NULL check on finalizing tokenizer Tokenizer may be NULL when normalizer returns an error such as unsupported encoding. Modified files: lib/token.c plugins/tokenizers/mecab.c Modified: lib/token.c (+9 -0) =================================================================== --- lib/token.c 2013-06-14 15:39:33 +0900 (1aebe06) +++ lib/token.c 2013-06-14 19:06:58 +0900 (34f5aa2) @@ -92,6 +92,9 @@ static grn_obj * uvector_fin(grn_ctx *ctx, int nargs, grn_obj **args, grn_user_data *user_data) { grn_uvector_tokenizer *tokenizer = user_data->ptr; + if (!tokenizer) { + return NULL; + } grn_tokenizer_token_fin(ctx, &(tokenizer->token)); GRN_FREE(tokenizer); return NULL; @@ -202,6 +205,9 @@ static grn_obj * delimited_fin(grn_ctx *ctx, int nargs, grn_obj **args, grn_user_data *user_data) { grn_delimited_tokenizer *tokenizer = user_data->ptr; + if (!tokenizer) { + return NULL; + } grn_tokenizer_query_close(ctx, tokenizer->query); grn_tokenizer_token_fin(ctx, &(tokenizer->token)); GRN_FREE(tokenizer); @@ -444,6 +450,9 @@ static grn_obj * ngram_fin(grn_ctx *ctx, int nargs, grn_obj **args, grn_user_data *user_data) { grn_ngram_tokenizer *tokenizer = user_data->ptr; + if (!tokenizer) { + return NULL; + } grn_tokenizer_token_fin(ctx, &(tokenizer->token)); grn_tokenizer_query_close(ctx, tokenizer->query); GRN_FREE(tokenizer); Modified: plugins/tokenizers/mecab.c (+3 -0) =================================================================== --- plugins/tokenizers/mecab.c 2013-06-14 15:39:33 +0900 (df70ef1) +++ plugins/tokenizers/mecab.c 2013-06-14 19:06:58 +0900 (246431f) @@ -241,6 +241,9 @@ static grn_obj * mecab_fin(grn_ctx *ctx, int nargs, grn_obj **args, grn_user_data *user_data) { grn_mecab_tokenizer *tokenizer = user_data->ptr; + if (!tokenizer) { + return NULL; + } grn_tokenizer_token_fin(ctx, &(tokenizer->token)); grn_tokenizer_query_close(ctx, tokenizer->query); if (tokenizer->buf) { -------------- next part -------------- HTML����������������������������...Download