[Groonga-commit] groonga/groonga at 1ab7d53 [master] grn_tokenizer_query_get_token_filter_index: add a new API

Back to archive index
Kouhei Sutou null+****@clear*****
Mon Oct 29 15:26:46 JST 2018


Kouhei Sutou	2018-10-29 15:26:46 +0900 (Mon, 29 Oct 2018)

  Revision: 1ab7d5308f87508f9feb7fd4be96c5a6a39df941
  https://github.com/groonga/groonga/commit/1ab7d5308f87508f9feb7fd4be96c5a6a39df941

  Message:
    grn_tokenizer_query_get_token_filter_index: add a new API
    
    It's for providing the current token filter is the Nth token filter.
    
    grn_tokenizer_query_set_token_filter_index() is an internal API.

  Modified files:
    include/groonga/tokenizer.h
    lib/grn_tokenizer.h
    lib/tokenizer.c

  Modified: include/groonga/tokenizer.h (+4 -0)
===================================================================
--- include/groonga/tokenizer.h    2018-10-29 15:26:14 +0900 (e11deb7b6)
+++ include/groonga/tokenizer.h    2018-10-29 15:26:46 +0900 (61468ef59)
@@ -141,6 +141,10 @@ grn_tokenizer_query_get_mode(grn_ctx *ctx, grn_tokenizer_query *query);
 GRN_PLUGIN_EXPORT grn_obj *
 grn_tokenizer_query_get_lexicon(grn_ctx *ctx, grn_tokenizer_query *query);
 
+GRN_PLUGIN_EXPORT unsigned int
+grn_tokenizer_query_get_token_filter_index(grn_ctx *ctx,
+                                           grn_tokenizer_query *query);
+
 /*
   grn_tokenizer_token is needed to return tokens. A grn_tokenizer_token object
   stores a token to be returned and it must be maintained until a request for

  Modified: lib/grn_tokenizer.h (+6 -0)
===================================================================
--- lib/grn_tokenizer.h    2018-10-29 15:26:14 +0900 (badf7bc45)
+++ lib/grn_tokenizer.h    2018-10-29 15:26:46 +0900 (52fcdb7e9)
@@ -1,6 +1,7 @@
 /* -*- c-basic-offset: 2 -*- */
 /*
   Copyright(C) 2018 Brazil
+  Copyright(C) 2018 Kouhei Sutou <kou****@clear*****>
 
   This library is free software; you can redistribute it and/or
   modify it under the terms of the GNU Lesser General Public
@@ -43,6 +44,7 @@ typedef struct _grn_tokenizer_query {
   /* End _grn_tokenizer_query_deprecated compatible layout. */
 
   grn_obj *lexicon;
+  unsigned int token_filter_index;
   unsigned int normalize_flags;
   grn_bool need_normalize;
   grn_bool need_delimiter_check;
@@ -71,6 +73,10 @@ grn_rc
 grn_tokenizer_query_set_lexicon(grn_ctx *ctx,
                                 grn_tokenizer_query *query,
                                 grn_obj *lexicon);
+grn_rc
+grn_tokenizer_query_set_token_filter_index(grn_ctx *ctx,
+                                           grn_tokenizer_query *query,
+                                           unsigned int index);
 
 #ifdef __cplusplus
 }

  Modified: lib/tokenizer.c (+20 -0)
===================================================================
--- lib/tokenizer.c    2018-10-29 15:26:14 +0900 (90912164c)
+++ lib/tokenizer.c    2018-10-29 15:26:46 +0900 (3cefaa4a5)
@@ -1,6 +1,7 @@
 /* -*- c-basic-offset: 2 -*- */
 /*
   Copyright(C) 2012-2018 Brazil
+  Copyright(C) 2018 Kouhei Sutou <kou****@clear*****>
 
   This library is free software; you can redistribute it and/or
   modify it under the terms of the GNU Lesser General Public
@@ -167,6 +168,7 @@ grn_tokenizer_query_init(grn_ctx *ctx, grn_tokenizer_query *query)
   query->token_mode = query->tokenize_mode;
   query->lexicon = NULL;
   query->encoding = ctx->encoding;
+  query->token_filter_index = 0;
 
   query->need_normalize = GRN_TRUE;
   query->need_delimiter_check = GRN_TRUE;
@@ -427,6 +429,24 @@ grn_tokenizer_query_get_lexicon(grn_ctx *ctx, grn_tokenizer_query *query)
   GRN_API_RETURN(query->lexicon);
 }
 
+grn_rc
+grn_tokenizer_query_set_token_filter_index(grn_ctx *ctx,
+                                           grn_tokenizer_query *query,
+                                           unsigned int index)
+{
+  GRN_API_ENTER;
+  query->token_filter_index = index;
+  GRN_API_RETURN(ctx->rc);
+}
+
+unsigned int
+grn_tokenizer_query_get_token_filter_index(grn_ctx *ctx,
+                                           grn_tokenizer_query *query)
+{
+  GRN_API_ENTER;
+  GRN_API_RETURN(query->token_filter_index);
+}
+
 void
 grn_tokenizer_token_init(grn_ctx *ctx, grn_tokenizer_token *token)
 {
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <https://lists.osdn.me/mailman/archives/groonga-commit/attachments/20181029/c4117547/attachment-0001.html>


More information about the Groonga-commit mailing list
Back to archive index