@szha/

gluon-ts

Python

A simple time series example with GluonTS for predicting twitter volume with DeepAR

fork
loading
Files
  • main.py
  • requirements.txt

This Plugin Crashed!

Error: Error: must not create an existing file {"type":"CREATE_FILE","wid":"0.7398182465825447","path":"main.py","file":{"path":"main.py","content":{"asEncoding":{"base64":"ZnJvbSBnbHVvbnRzLmRhdGFzZXQgaW1wb3J0IGNvbW1vbgpmcm9tIGdsdW9udHMubW9kZWwgaW1wb3J0IGRlZXBhcgoKaW1wb3J0IHBhbmRhcyBhcyBwZAoKdXJsID0gImh0dHBzOi8vcmF3LmdpdGh1YnVzZXJjb250ZW50LmNvbS9udW1lbnRhL05BQi9tYXN0ZXIvZGF0YS9yZWFsVHdlZXRzL1R3aXR0ZXJfdm9sdW1lX0FNWk4uY3N2IgpkZiA9IHBkLnJlYWRfY3N2KHVybCwgaGVhZGVyPTAsIGluZGV4X2NvbD0wKQpkYXRhID0gY29tbW9uLkxpc3REYXRhc2V0KFt7InN0YXJ0IjogZGYuaW5kZXhbMF0sCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAidGFyZ2V0IjogZGYudmFsdWVbOiIyMDE1LTA0LTA1IDAwOjAwOjAwIl19XSwKICAgICAgICAgICAgICAgICAgICAgICAgICBmcmVxPSI1bWluIikKCmVzdGltYXRvciA9IGRlZXBhci5EZWVwQVJFc3RpbWF0b3IoZnJlcT0iNW1pbiIsIHByZWRpY3Rpb25fbGVuZ3RoPTEyKQpwcmVkaWN0b3IgPSBlc3RpbWF0b3IudHJhaW4odHJhaW5pbmdfZGF0YT1kYXRhKQoKcHJlZGljdGlvbiA9IG5leHQocHJlZGljdG9yLnByZWRpY3QoZGF0YSkpCnByaW50KHByZWRpY3Rpb24ubWVhbikKcHJlZGljdGlvbi5wbG90KG91dHB1dF9maWxlPSdncmFwaC5wbmcnKQ=="},"asBuffer":null},"loaded":true}}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
from gluonts.dataset import common
from gluonts.model import deepar

import pandas as pd

url = "https://raw.githubusercontent.com/numenta/NAB/master/data/realTweets/Twitter_volume_AMZN.csv"
df = pd.read_csv(url, header=0, index_col=0)
data = common.ListDataset([{"start": df.index[0],
                            "target": df.value[:"2015-04-05 00:00:00"]}],
                          freq="5min")

estimator = deepar.DeepAREstimator(freq="5min", prediction_length=12)
predictor = estimator.train(training_data=data)

prediction = next(predictor.predict(data))
print(prediction.mean)
prediction.plot(output_file='graph.png')