-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathutils.py
36 lines (28 loc) · 1.02 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
import pickle
from nltk.tokenize import WordPunctTokenizer
from nltk.stem import WordNetLemmatizer
from itertools import islice
def tokenize(document, stopwords):
tokenizer = WordPunctTokenizer()
lemmatizer = WordNetLemmatizer()
tokens = tokenizer.tokenize(document.lower())
tokens = [lemmatizer.lemmatize(t) for t in tokens]
tokens = [t for t in tokens if len(t) > 2]
tokens = [t for t in tokens if t not in stopwords]
return tokens
def window(seq, n=2):
"Returns a sliding window (of width n) over data from the iterable"
" s -> (s0,s1,...s[n-1]), (s1,s2,...,sn), ... "
it = iter(seq)
result = tuple(islice(it, n))
if len(result) == n:
yield result
for elem in it:
result = result[1:] + (elem,)
yield result
def save_obj(obj, name):
with open('obj/' + name + '.pkl', 'wb') as f:
pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)
def load_obj(name):
with open('obj/' + name + '.pkl', 'rb') as f:
return pickle.load(f)