forked from PyThaiNLP/pythainlp
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtestc_tokenize.py
90 lines (77 loc) · 2.65 KB
/
testc_tokenize.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
# -*- coding: utf-8 -*-
# SPDX-FileCopyrightText: 2016-2024 PyThaiNLP Project
# SPDX-License-Identifier: Apache-2.0
# Tests for tokenize functions that need "compact" dependencies
import unittest
from pythainlp.tokenize import (
pyicu,
sent_tokenize,
subword_tokenize,
word_tokenize,
)
from ..core.test_tokenize import (
SENT_1,
SENT_1_TOKS,
SENT_2,
SENT_2_TOKS,
SENT_3,
SENT_3_TOKS,
SENT_4,
TEXT_1,
)
class WordTokenizeICUTestCase(unittest.TestCase):
def test_icu(self):
self.assertEqual(pyicu.segment(None), [])
self.assertEqual(pyicu.segment(""), [])
self.assertEqual(
word_tokenize("ฉันรักภาษาไทยเพราะฉันเป็นคนไทย", engine="icu"),
["ฉัน", "รัก", "ภาษา", "ไทย", "เพราะ", "ฉัน", "เป็น", "คน", "ไทย"],
)
def test_word_tokenize_icu(self):
self.assertIsNotNone(word_tokenize(TEXT_1, engine="icu"))
class SentTokenizeCRFCutTestCase(unittest.TestCase):
def test_sent_tokenize(self):
# Use default engine (crfcut)
self.assertEqual(sent_tokenize(None), [])
self.assertEqual(sent_tokenize(""), [])
self.assertEqual(
sent_tokenize(SENT_1),
SENT_1_TOKS,
)
self.assertEqual(
sent_tokenize(SENT_2),
SENT_2_TOKS,
)
self.assertEqual(
sent_tokenize(SENT_3),
SENT_3_TOKS,
)
self.assertEqual(
sent_tokenize(SENT_1, engine="crfcut"),
SENT_1_TOKS,
)
self.assertEqual(
sent_tokenize(SENT_2, engine="crfcut"),
SENT_2_TOKS,
)
self.assertEqual(
sent_tokenize(SENT_3, engine="crfcut"),
SENT_3_TOKS,
)
self.assertEqual(
sent_tokenize(SENT_4, engine="crfcut"),
[["ผม", "กิน", "ข้าว", " ", "\n", "เธอ", "เล่น", "เกม"]],
)
class SubwordTokenizeHanSoloTestCase(unittest.TestCase):
def test_subword_tokenize(self):
self.assertEqual(subword_tokenize(None, engine="han_solo"), [])
self.assertEqual(
subword_tokenize("แมวกินปลา", engine="han_solo"),
["แมว", "กิน", "ปลา"],
)
self.assertIn(
"ดาว", subword_tokenize("สวัสดีดาวอังคาร", engine="han_solo")
)
self.assertNotIn(
"า", subword_tokenize("สวัสดีดาวอังคาร", engine="han_solo")
)