6
6
7
7
<p align =" center " >
8
8
<img src="https://img.shields.io/badge/Python-v3-E97040?logo=python&logoColor=white" />
9
- <img alt="powered by Pytorch" src="https://img.shields.io/badge/PyTorch-❤️-F8C6B5?logo=pytorch&logoColor=white">
10
- <img src="https://img.shields.io/badge/Conda-Supported-lightgreen?style=social&logo=anaconda" />
9
+ <img alt="powered by PyTorch" src="https://img.shields.io/badge/PyTorch-❤️-F8C6B5?logo=pytorch&logoColor=white">
11
10
<a href="https://github.com/WenjieDu/SAITS/blob/main/LICENSE">
12
11
<img src="https://img.shields.io/badge/License-MIT-3C7699?logo=opensourceinitiative&logoColor=white" />
13
12
</a>
14
13
<a href="https://doi.org/10.1016/j.eswa.2023.119619">
15
14
<img src="https://img.shields.io/badge/ESWA-published-75C1C4?logo=elsevier&color=FF6C00" />
16
15
</a>
17
- <a href="https://scholar.google.com/citations?view_op=view_citation&hl=en&user=j9qvUg0AAAAJ&citation_for_view=j9qvUg0AAAAJ:Y0pCki6q_DkC">
16
+ <a href="https://scholar.google.com/citations?view_op=view_citation&hl=en&user=j9qvUg0AAAAJ&citation_for_view=j9qvUg0AAAAJ:Y0pCki6q_DkC" title="Paper citation number from Google Scholar" >
18
17
<img src="https://img.shields.io/endpoint?url=https://pypots.com/figs/citation_badges/saits.json" />
19
18
</a>
19
+ <a href="https://webofscience.clarivate.cn/wos/woscc/full-record/WOS:000943170100001?SID=USW2EC0D82x89d30RifxLVxJpho5Y" title="This is a Highly Cited Paper recognized by ESI">
20
+ <img src="https://pypots.com/figs/citation_badges/ESI_highly_cited_paper.svg" />
21
+ </a>
20
22
<img src="https://hits.seeyoufarm.com/api/count/incr/badge.svg?url=https%3A%2F%2Fgithub.com%2FWenjieDu%2FSAITS&count_bg=%23009A0A&title_bg=%23555555&icon=&icon_color=%23E7E7E7&title=Visits&edge_flat=false" />
21
23
</p >
22
24
@@ -62,6 +64,7 @@ for easily modeling your partially-observed time-series datasets.
62
64
<summary ><b >👉 Click here to see the example 👀</b ></summary >
63
65
64
66
``` python
67
+ # pip install pypots>=0.4
65
68
import numpy as np
66
69
from sklearn.preprocessing import StandardScaler
67
70
from pygrinder import mcar
@@ -82,7 +85,7 @@ dataset = {"X": X} # X for model input
82
85
print (X.shape) # (11988, 48, 37), 11988 samples and each sample has 48 time steps, 37 features
83
86
84
87
# Model training. This is PyPOTS showtime.
85
- saits = SAITS(n_steps = 48 , n_features = 37 , n_layers = 2 , d_model = 256 , d_inner = 128 , n_heads = 4 , d_k = 64 , d_v = 64 , dropout = 0.1 , epochs = 10 )
88
+ saits = SAITS(n_steps = 48 , n_features = 37 , n_layers = 2 , d_model = 256 , d_ffn = 128 , n_heads = 4 , d_k = 64 , d_v = 64 , dropout = 0.1 , epochs = 10 )
86
89
# Here I use the whole dataset as the training set because ground truth is not visible to the model, you can also split it into train/val/test sets
87
90
saits.fit(dataset)
88
91
imputation = saits.impute(dataset) # impute the originally-missing values and artificially-missing values
0 commit comments