Kouhei Sutou
null+****@clear*****
Sun Dec 28 22:53:27 JST 2014
Kouhei Sutou 2014-12-28 22:53:27 +0900 (Sun, 28 Dec 2014) New Revision: 094185309da6da0d2e5bbf8f1a60f2aac53d1e23 https://github.com/groonga/groonga/commit/094185309da6da0d2e5bbf8f1a60f2aac53d1e23 Message: doc token filters: use separated database for each tokenizer Because example uses the same table names. Modified files: doc/source/example/reference/token_filters/example-table-create.log doc/source/example/reference/token_filters/stem.log doc/source/example/reference/token_filters/stop_word.log doc/source/reference/token_filters.rst Modified: doc/source/example/reference/token_filters/example-table-create.log (+3 -1) =================================================================== --- doc/source/example/reference/token_filters/example-table-create.log 2014-12-28 22:47:04 +0900 (ac93153) +++ doc/source/example/reference/token_filters/example-table-create.log 2014-12-28 22:53:27 +0900 (c61f662) @@ -1,7 +1,9 @@ Execution example:: + register token_filters/stop_word + # [[0, 1337566253.89858, 0.000355720520019531], true] table_create Terms TABLE_PAT_KEY ShortText \ --default_tokenizer TokenBigram \ --normalizer NormalizerAuto \ --token_filters TokenFilterStopWord - # [[0,0.0,0.0],true] + # [[0, 1337566253.89858, 0.000355720520019531], true] Modified: doc/source/example/reference/token_filters/stem.log (+20 -20) =================================================================== --- doc/source/example/reference/token_filters/stem.log 2014-12-28 22:47:04 +0900 (790bf4b) +++ doc/source/example/reference/token_filters/stem.log 2014-12-28 22:53:27 +0900 (9040199) @@ -1,57 +1,57 @@ Execution example:: register token_filters/stem - # [[0,0.0,0.0],true] + # [[0, 1337566253.89858, 0.000355720520019531], true] table_create Memos TABLE_NO_KEY - # [[0,0.0,0.0],true] + # [[0, 1337566253.89858, 0.000355720520019531], true] column_create Memos content COLUMN_SCALAR ShortText - # [[0,0.0,0.0],true] + # [[0, 1337566253.89858, 0.000355720520019531], true] table_create Terms TABLE_PAT_KEY ShortText \ --default_tokenizer TokenBigram \ --normalizer NormalizerAuto \ --token_filters TokenFilterStem - # [[0,0.0,0.0],true] + # [[0, 1337566253.89858, 0.000355720520019531], true] column_create Terms memos_content COLUMN_INDEX|WITH_POSITION Memos content - # [[0,0.0,0.0],true] + # [[0, 1337566253.89858, 0.000355720520019531], true] load --table Memos [ {"content": "I develop Groonga"}, {"content": "I'm developing Groonga"}, {"content": "I developed Groonga"} ] - # [[0,0.0,0.0],3] + # [[0, 1337566253.89858, 0.000355720520019531], 3] select Memos --match_columns content --query "develops" # [ # [ - # 0, - # 0.0, - # 0.0 - # ], + # 0, + # 1337566253.89858, + # 0.000355720520019531 + # ], # [ # [ # [ # 3 - # ], + # ], # [ # [ - # "_id", + # "_id", # "UInt32" - # ], + # ], # [ - # "content", + # "content", # "ShortText" # ] - # ], + # ], # [ - # 1, + # 1, # "I develop Groonga" - # ], + # ], # [ - # 2, + # 2, # "I'm developing Groonga" - # ], + # ], # [ - # 3, + # 3, # "I developed Groonga" # ] # ] Modified: doc/source/example/reference/token_filters/stop_word.log (+20 -20) =================================================================== --- doc/source/example/reference/token_filters/stop_word.log 2014-12-28 22:47:04 +0900 (7084ec2) +++ doc/source/example/reference/token_filters/stop_word.log 2014-12-28 22:53:27 +0900 (1599fcd) @@ -1,60 +1,60 @@ Execution example:: register token_filters/stop_word - # [[0,0.0,0.0],true] + # [[0, 1337566253.89858, 0.000355720520019531], true] table_create Memos TABLE_NO_KEY - # [[0,0.0,0.0],true] + # [[0, 1337566253.89858, 0.000355720520019531], true] column_create Memos content COLUMN_SCALAR ShortText - # [[0,0.0,0.0],true] + # [[0, 1337566253.89858, 0.000355720520019531], true] table_create Terms TABLE_PAT_KEY ShortText \ --default_tokenizer TokenBigram \ --normalizer NormalizerAuto \ --token_filters TokenFilterStopWord - # [[0,0.0,0.0],true] + # [[0, 1337566253.89858, 0.000355720520019531], true] column_create Terms memos_content COLUMN_INDEX|WITH_POSITION Memos content - # [[0,0.0,0.0],true] + # [[0, 1337566253.89858, 0.000355720520019531], true] column_create Terms is_stop_word COLUMN_SCALAR Bool - # [[0,0.0,0.0],true] + # [[0, 1337566253.89858, 0.000355720520019531], true] load --table Terms [ {"_key": "and", "is_stop_word": true} ] - # [[0,0.0,0.0],1] + # [[0, 1337566253.89858, 0.000355720520019531], 1] load --table Memos [ {"content": "Hello"}, {"content": "Hello and Good-bye"}, {"content": "Good-bye"} ] - # [[0,0.0,0.0],3] + # [[0, 1337566253.89858, 0.000355720520019531], 3] select Memos --match_columns content --query "Hello and" # [ # [ - # 0, - # 0.0, - # 0.0 - # ], + # 0, + # 1337566253.89858, + # 0.000355720520019531 + # ], # [ # [ # [ # 2 - # ], + # ], # [ # [ - # "_id", + # "_id", # "UInt32" - # ], + # ], # [ - # "content", + # "content", # "ShortText" # ] - # ], + # ], # [ - # 1, + # 1, # "Hello" - # ], + # ], # [ - # 2, + # 2, # "Hello and Good-bye" # ] # ] Modified: doc/source/reference/token_filters.rst (+16 -6) =================================================================== --- doc/source/reference/token_filters.rst 2014-12-28 22:47:04 +0900 (dc86cd7) +++ doc/source/reference/token_filters.rst 2014-12-28 22:53:27 +0900 (4eeefff) @@ -2,9 +2,6 @@ .. highlightlang:: none -.. groonga-command -.. database: token_filters - Token filters ============= @@ -25,8 +22,13 @@ Here is an example ``table_create`` that uses ``TokenFilterStopWord`` token filter module: .. groonga-command +.. database: token_filters_example .. include:: ../example/reference/token_filters/example-table-create.log -.. table_create Terms TABLE_PAT_KEY ShortText --default_tokenizer TokenBigram --normalizer NormalizerAuto --token_filters TokenFilterStopWord +.. register token_filters/stop_word +.. table_create Terms TABLE_PAT_KEY ShortText \ +.. --default_tokenizer TokenBigram \ +.. --normalizer NormalizerAuto \ +.. --token_filters TokenFilterStopWord Available token filters ----------------------- @@ -52,11 +54,15 @@ The stop word is specified ``is_stop_word`` column on lexicon table. Here is an example that uses ``TokenFilterStopWord`` token filter: .. groonga-command +.. database: token_filters_stop_word .. include:: ../example/reference/token_filters/stop_word.log .. register token_filters/stop_word .. table_create Memos TABLE_NO_KEY .. column_create Memos content COLUMN_SCALAR ShortText -.. table_create Terms TABLE_PAT_KEY ShortText --default_tokenizer TokenBigram --normalizer NormalizerAuto --token_filters TokenFilterStopWord +.. table_create Terms TABLE_PAT_KEY ShortText \ +.. --default_tokenizer TokenBigram \ +.. --normalizer NormalizerAuto \ +.. --token_filters TokenFilterStopWord .. column_create Terms memos_content COLUMN_INDEX|WITH_POSITION Memos content .. column_create Terms is_stop_word COLUMN_SCALAR Bool .. load --table Terms @@ -86,11 +92,15 @@ Here is an example that uses ``TokenFilterStopWord`` token filter: Here is an example that uses ``TokenFilterStem`` token filter: .. groonga-command +.. database: token_filters_stem .. include:: ../example/reference/token_filters/stem.log .. register token_filters/stem .. table_create Memos TABLE_NO_KEY .. column_create Memos content COLUMN_SCALAR ShortText -.. table_create Terms TABLE_PAT_KEY ShortText --default_tokenizer TokenBigram --normalizer NormalizerAuto --token_filters TokenFilterStem +.. table_create Terms TABLE_PAT_KEY ShortText \ +.. --default_tokenizer TokenBigram \ +.. --normalizer NormalizerAuto \ +.. --token_filters TokenFilterStem .. column_create Terms memos_content COLUMN_INDEX|WITH_POSITION Memos content .. load --table Memos .. [ -------------- next part -------------- HTML����������������������������...Download