forked from explosion/spaCy
-
Notifications
You must be signed in to change notification settings - Fork 0
/
test_pickles.py
61 lines (55 loc) · 1.97 KB
/
test_pickles.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
import pytest
import numpy
import srsly
from spacy.lang.en import English
from spacy.strings import StringStore
from spacy.tokens import Doc
from spacy.vocab import Vocab
from spacy.attrs import NORM
@pytest.mark.parametrize("text1,text2", [("hello", "bye")])
def test_pickle_string_store(text1, text2):
stringstore = StringStore()
store1 = stringstore[text1]
store2 = stringstore[text2]
data = srsly.pickle_dumps(stringstore, protocol=-1)
unpickled = srsly.pickle_loads(data)
assert unpickled[text1] == store1
assert unpickled[text2] == store2
assert len(stringstore) == len(unpickled)
@pytest.mark.parametrize("text1,text2", [("dog", "cat")])
def test_pickle_vocab(text1, text2):
vocab = Vocab(
lex_attr_getters={int(NORM): lambda string: string[:-1]},
get_noun_chunks=English.Defaults.syntax_iterators.get("noun_chunks"),
)
vocab.set_vector("dog", numpy.ones((5,), dtype="f"))
lex1 = vocab[text1]
lex2 = vocab[text2]
assert lex1.norm_ == text1[:-1]
assert lex2.norm_ == text2[:-1]
data = srsly.pickle_dumps(vocab)
unpickled = srsly.pickle_loads(data)
assert unpickled[text1].orth == lex1.orth
assert unpickled[text2].orth == lex2.orth
assert unpickled[text1].norm == lex1.norm
assert unpickled[text2].norm == lex2.norm
assert unpickled[text1].norm != unpickled[text2].norm
assert unpickled.vectors is not None
assert unpickled.get_noun_chunks is not None
assert list(vocab["dog"].vector) == [1.0, 1.0, 1.0, 1.0, 1.0]
def test_pickle_doc(en_vocab):
words = ["a", "b", "c"]
deps = ["dep"] * len(words)
heads = [0] * len(words)
doc = Doc(
en_vocab,
words=words,
deps=deps,
heads=heads,
)
data = srsly.pickle_dumps(doc)
unpickled = srsly.pickle_loads(data)
assert [t.text for t in unpickled] == words
assert [t.dep_ for t in unpickled] == deps
assert [t.head.i for t in unpickled] == heads
assert list(doc.noun_chunks) == []