Kouhei Sutou
null+****@clear*****
Tue Mar 22 12:18:45 JST 2016
Kouhei Sutou 2016-03-22 12:18:45 +0900 (Tue, 22 Mar 2016) New Revision: 2ccd7b08ffa8f607063eabf05124984e5b19a8a9 https://github.com/ranguba/groonga-client/commit/2ccd7b08ffa8f607063eabf05124984e5b19a8a9 Message: schema: fix a bug that tokenizer support doesn't work Modified files: lib/groonga/client/response/schema.rb test/response/test-schema.rb Modified: lib/groonga/client/response/schema.rb (+1 -1) =================================================================== --- lib/groonga/client/response/schema.rb 2016-03-22 12:17:11 +0900 (0f62200) +++ lib/groonga/client/response/schema.rb 2016-03-22 12:18:45 +0900 (1cae2ad) @@ -202,7 +202,7 @@ module Groonga when :key_type super(key, coerce_key_type(value)) when :tokenizer - super(key, coerce_tokenzer(value)) + super(key, coerce_tokenizer(value)) when :normalizer super(key, coerce_normalizer(value)) when :columns Modified: test/response/test-schema.rb (+20 -0) =================================================================== --- test/response/test-schema.rb 2016-03-22 12:17:11 +0900 (6d42f43) +++ test/response/test-schema.rb 2016-03-22 12:18:45 +0900 (7860869) @@ -62,6 +62,26 @@ class TestResponseSchema < Test::Unit::TestCase response.tables["Users"].key_type.name) end + def test_tokenizer + body = { + "tokenizers" => { + "TokenBigram" => { + "name" => "TokenBigram", + }, + }, + "tables" => { + "Terms" => { + "tokenizer" => { + "name" => "TokenBigram", + }, + } + } + } + response = create_response(body) + assert_equal(response.tokenizers["TokenBigram"], + response.tables["Terms"].tokenizer) + end + def test_normalizer body = { "normalizers" => { -------------- next part -------------- HTML����������������������������...Download