File: test_whoosh_file.py

package info (click to toggle)
python-jieba 0.42.1-5
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 51,824 kB
  • sloc: python: 194,937; makefile: 5; sh: 3
file content (42 lines) | stat: -rw-r--r-- 1,089 bytes parent folder | download | duplicates (4)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
# -*- coding: UTF-8 -*-
from __future__ import unicode_literals
import sys
import os
sys.path.append("../")
from whoosh.index import create_in
from whoosh.fields import *
from whoosh.qparser import QueryParser

from jieba.analyse import ChineseAnalyzer

analyzer = ChineseAnalyzer()

schema = Schema(title=TEXT(stored=True), path=ID(stored=True), content=TEXT(stored=True, analyzer=analyzer))
if not os.path.exists("tmp"):
    os.mkdir("tmp")
ix = create_in("tmp", schema)
writer = ix.writer()

file_name = sys.argv[1]

with open(file_name,"rb") as inf:
    i=0
    for line in inf:
        i+=1
        writer.add_document(
            title="line"+str(i),
            path="/a",
            content=line.decode('gbk','ignore')
        )
writer.commit()

searcher = ix.searcher()
parser = QueryParser("content", schema=ix.schema)

for keyword in ("水果小姐","你","first","中文","交换机","交换"):
    print("result of " + keyword)
    q = parser.parse(keyword)
    results = searcher.search(q)
    for hit in results:
        print(hit.highlights("content"))
    print("="*10)