Kouhei Sutou
null+****@clear*****
Thu Nov 6 15:36:36 JST 2014
Kouhei Sutou 2014-11-06 15:36:36 +0900 (Thu, 06 Nov 2014) New Revision: b48bde4aedafa8ec988f482cc27698ee46104452 https://github.com/ranguba/rroonga/commit/b48bde4aedafa8ec988f482cc27698ee46104452 Message: Support :token_filters for .create of key supported tables Modified files: ext/groonga/rb-grn-double-array-trie.c ext/groonga/rb-grn-hash.c ext/groonga/rb-grn-patricia-trie.c test/test-double-array-trie.rb test/test-hash.rb test/test-patricia-trie.rb Modified: ext/groonga/rb-grn-double-array-trie.c (+11 -2) =================================================================== --- ext/groonga/rb-grn-double-array-trie.c 2014-11-06 15:30:23 +0900 (b596395) +++ ext/groonga/rb-grn-double-array-trie.c 2014-11-06 15:36:36 +0900 (aa4c701) @@ -219,6 +219,10 @@ rb_grn_double_array_trie_bind (RbGrnDoubleArrayTrie *rb_grn_double_array_trie, * {Groonga::IndexColumn} を定義する場合は * @"TokenBigram"@ などを指定する必要がある。 * + * @option options [::Array<String, Groonga::Procedure>, nil] + * :token_filters (nil) The token filters to be used in the + * table. + * * @option options :sub_records * +true+ を指定すると {#group} でグループ化したときに、 * {Groonga::Record#n_sub_records} でグループに含まれるレコー @@ -246,7 +250,9 @@ rb_grn_double_array_trie_s_create (int argc, VALUE *argv, VALUE klass) VALUE options, rb_context, rb_name, rb_path, rb_persistent; VALUE rb_key_normalize, rb_key_with_sis, rb_key_type; VALUE rb_value_type; - VALUE rb_default_tokenizer, rb_sub_records; + VALUE rb_default_tokenizer; + VALUE rb_token_filters; + VALUE rb_sub_records; VALUE rb_normalizer; rb_scan_args(argc, argv, "01", &options); @@ -261,6 +267,7 @@ rb_grn_double_array_trie_s_create (int argc, VALUE *argv, VALUE klass) "key_type", &rb_key_type, "value_type", &rb_value_type, "default_tokenizer", &rb_default_tokenizer, + "token_filters", &rb_token_filters, "sub_records", &rb_sub_records, "normalizer", &rb_normalizer, NULL); @@ -308,7 +315,9 @@ rb_grn_double_array_trie_s_create (int argc, VALUE *argv, VALUE klass) if (!NIL_P(rb_default_tokenizer)) rb_funcall(rb_table, rb_intern("default_tokenizer="), 1, rb_default_tokenizer); - + if (!NIL_P(rb_token_filters)) + rb_funcall(rb_table, rb_intern("token_filters="), 1, + rb_token_filters); if (!NIL_P(rb_normalizer)) rb_funcall(rb_table, rb_intern("normalizer="), 1, rb_normalizer); Modified: ext/groonga/rb-grn-hash.c (+10 -0) =================================================================== --- ext/groonga/rb-grn-hash.c 2014-11-06 15:30:23 +0900 (f50dce3) +++ ext/groonga/rb-grn-hash.c 2014-11-06 15:36:36 +0900 (9d928de) @@ -131,6 +131,11 @@ VALUE rb_cGrnHash; * デフォルトでは何も設定されていないので、テーブルに * {Groonga::IndexColumn} を定義する場合は * @"TokenBigram"@ などを指定する必要がある。 + * + * @option options [::Array<String, Groonga::Procedure>, nil] + * :token_filters (nil) The token filters to be used in the + * table. + * * @option options [Groonga::Record#n_sub_records] :sub_records * +true+ を指定すると {#group} でグループ化したときに、 * {Groonga::Record#n_sub_records} でグループに含まれるレコー @@ -157,6 +162,7 @@ rb_grn_hash_s_create (int argc, VALUE *argv, VALUE klass) VALUE rb_table; VALUE options, rb_context, rb_name, rb_path, rb_persistent; VALUE rb_key_normalize, rb_key_type, rb_value_type, rb_default_tokenizer; + VALUE rb_token_filters; VALUE rb_sub_records; VALUE rb_normalizer; @@ -171,6 +177,7 @@ rb_grn_hash_s_create (int argc, VALUE *argv, VALUE klass) "key_type", &rb_key_type, "value_type", &rb_value_type, "default_tokenizer", &rb_default_tokenizer, + "token_filters", &rb_token_filters, "sub_records", &rb_sub_records, "normalizer", &rb_normalizer, NULL); @@ -215,6 +222,9 @@ rb_grn_hash_s_create (int argc, VALUE *argv, VALUE klass) if (!NIL_P(rb_default_tokenizer)) rb_funcall(rb_table, rb_intern("default_tokenizer="), 1, rb_default_tokenizer); + if (!NIL_P(rb_token_filters)) + rb_funcall(rb_table, rb_intern("token_filters="), 1, + rb_token_filters); if (!NIL_P(rb_normalizer)) rb_funcall(rb_table, rb_intern("normalizer="), 1, rb_normalizer); Modified: ext/groonga/rb-grn-patricia-trie.c (+12 -1) =================================================================== --- ext/groonga/rb-grn-patricia-trie.c 2014-11-06 15:30:23 +0900 (afe8bd2) +++ ext/groonga/rb-grn-patricia-trie.c 2014-11-06 15:36:36 +0900 (9219e2b) @@ -141,6 +141,11 @@ VALUE rb_cGrnPatriciaTrie; * デフォルトでは何も設定されていないので、テーブルに * {Groonga::IndexColumn} を定義する場合は * @"TokenBigram"@ などを指定する必要がある。 + * + * @option options [::Array<String, Groonga::Procedure>, nil] + * :token_filters (nil) The token filters to be used in the + * table. + * * @option options :sub_records * +true+ を指定すると {#group} でグループ化したときに、 * {Groonga::Record#n_sub_records} でグループに含まれるレコー @@ -168,7 +173,9 @@ rb_grn_patricia_trie_s_create (int argc, VALUE *argv, VALUE klass) VALUE options, rb_context, rb_name, rb_path, rb_persistent; VALUE rb_key_normalize, rb_key_with_sis, rb_key_type; VALUE rb_value_type; - VALUE rb_default_tokenizer, rb_sub_records; + VALUE rb_default_tokenizer; + VALUE rb_token_filters; + VALUE rb_sub_records; VALUE rb_normalizer; rb_scan_args(argc, argv, "01", &options); @@ -183,6 +190,7 @@ rb_grn_patricia_trie_s_create (int argc, VALUE *argv, VALUE klass) "key_type", &rb_key_type, "value_type", &rb_value_type, "default_tokenizer", &rb_default_tokenizer, + "token_filters", &rb_token_filters, "sub_records", &rb_sub_records, "normalizer", &rb_normalizer, NULL); @@ -230,6 +238,9 @@ rb_grn_patricia_trie_s_create (int argc, VALUE *argv, VALUE klass) if (!NIL_P(rb_default_tokenizer)) rb_funcall(rb_table, rb_intern("default_tokenizer="), 1, rb_default_tokenizer); + if (!NIL_P(rb_token_filters)) + rb_funcall(rb_table, rb_intern("token_filters="), 1, + rb_token_filters); if (!NIL_P(rb_normalizer)) rb_funcall(rb_table, rb_intern("normalizer="), 1, rb_normalizer); Modified: test/test-double-array-trie.rb (+8 -0) =================================================================== --- test/test-double-array-trie.rb 2014-11-06 15:30:23 +0900 (b6824fe) +++ test/test-double-array-trie.rb 2014-11-06 15:36:36 +0900 (5918b68) @@ -64,6 +64,14 @@ class DoubleArrayTrieTest < Test::Unit::TestCase assert_equal([context["TokenFilterStopWord"]], trie.token_filters) end + + def test_create + context.register_plugin("token_filters/stop_word") + token_filters = ["TokenFilterStopWord"] + trie = Groonga::DoubleArrayTrie.create(:token_filters => token_filters) + assert_equal([context["TokenFilterStopWord"]], + trie.token_filters) + end end def test_search Modified: test/test-hash.rb (+7 -0) =================================================================== --- test/test-hash.rb 2014-11-06 15:30:23 +0900 (cce7f45) +++ test/test-hash.rb 2014-11-06 15:36:36 +0900 (aaa0a06) @@ -204,6 +204,13 @@ class HashTest < Test::Unit::TestCase assert_equal([context["TokenFilterStopWord"]], hash.token_filters) end + + def test_create + context.register_plugin("token_filters/stop_word") + hash = Groonga::Hash.create(:token_filters => ["TokenFilterStopWord"]) + assert_equal([context["TokenFilterStopWord"]], + hash.token_filters) + end end def test_normalizer Modified: test/test-patricia-trie.rb (+8 -0) =================================================================== --- test/test-patricia-trie.rb 2014-11-06 15:30:23 +0900 (05634bd) +++ test/test-patricia-trie.rb 2014-11-06 15:36:36 +0900 (066cd16) @@ -64,6 +64,14 @@ class PatriciaTrieTest < Test::Unit::TestCase assert_equal([context["TokenFilterStopWord"]], trie.token_filters) end + + def test_create + context.register_plugin("token_filters/stop_word") + token_filters = ["TokenFilterStopWord"] + trie = Groonga::PatriciaTrie.create(:token_filters => token_filters) + assert_equal([context["TokenFilterStopWord"]], + trie.token_filters) + end end def test_normalizer -------------- next part -------------- HTML����������������������������...Download