Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update README.md #3

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file modified Procfile
100644 → 100755
Empty file.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,5 +33,5 @@ Visualization using Seaborn/Matplotlib
1. [Tanishka Gaur](https://github.com/tanishkaa31/)
2. [Vedant Chourasia](https://github.com/Vc8006/)
3. [Shriya Dixit](https://github.com/shriyaD13/)
4. [Pranjali Verma](https://github.com/pranjali1909/)
4. [Pranjali Jadkar](https://github.com/pranjali1909/)
5. [Shrish Kumar Singhal](https://github.com/shrishsinghal/)
Binary file modified __pycache__/manage.cpython-38.pyc
100644 → 100755
Binary file not shown.
Empty file modified __pycache__/ml_model.cpython-38.pyc
100644 → 100755
Empty file.
Binary file modified db.sqlite3
100644 → 100755
Binary file not shown.
156 changes: 156 additions & 0 deletions main/Ml_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,156 @@
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
from .models import Data , Data_Predicted ,Update
from datetime import date
import datetime
import statsmodels.api as sm
from pandas.tseries.offsets import DateOffset

# df = pd.read_csv("./main/static/new_data.csv",parse_dates = ['Date'], index_col = ['Date'])
update = Update()
data1 = Data_Predicted()
data = Data()
q = Data.objects.all().values()
df = pd.DataFrame(q)
df['Date'] = pd.to_datetime(df['Date'])
df = df.set_index(df['Date'])

m = Data_Predicted.objects.all().values()
df_pred = pd.DataFrame(m)
df_pred['Date'] = pd.to_datetime(df_pred['Date'])
df_pred = df_pred.set_index(df_pred['Date'])

from sklearn.metrics import mean_squared_error
import statsmodels.graphics.tsaplots as sgt

from statsmodels.tsa.arima_model import ARIMA
df = df_pred
size = int(len(df)*0.8)
df_train = df.iloc[:size]
df_test = df.iloc[size:]

# u = Update.objects.get(name = "")
u = Update.objects.all().values()

date1 = [2555]
date2 = [2556]
def fun(df):
dfa = df['AQI']
df = df.dropna()
future_dates = [df_pred.index[-1] + DateOffset(days = x)for x in range(2)]
future_datest_df_new = pd.DataFrame(index = future_dates[1:],columns=df.columns)

my_order = (0, 0, 0)
my_seasonal_order = (1, 0, 1, 12)
smodel = sm.tsa.SARIMAX(dfa, order=my_order,seasonal_order=my_seasonal_order,trend= 't')
result_seasonal = smodel.fit()
# date1 = future_dates[0].date()
# date2 = future_dates[1].date()
predictions_seasonal = result_seasonal.predict(steps = 1)
# predictions_seasonal = result_seasonal.predict(start=date1[0],end=date2[0],typ="levels")
# predictions_seasonal = result_seasonal.forecast(steps = 1)[0]
# data1.AQI = 20
print('############################ f_date',future_dates[0].date())
print('############################ future dates',future_dates)
x_date_n = ""
x_date_n = future_dates[0].date().strftime("%Y-%m-%d")
print(x_date_n)
print('AQI :*********************** ',predictions_seasonal)
data1.AQI = predictions_seasonal.iloc[-1]
# data.AQI = predictions_seasonal.iloc[-1]

# date1[0] = date1[0] + 1
# date2[0] = date2[0] + 1

# model = ARIMA(df_train['AQI'],order = (2,1,2))
# model_fit = model.fit()
# data1.AQI = model_fit.forecast(steps = 1)[0]
# future_datest_df_new['AQI'] = model_fit.forecast(steps = 1)[0]

smodel_so2 = sm.tsa.SARIMAX(df['SO2'], order=my_order,seasonal_order=my_seasonal_order,trend= 't')
result_seasonal_so2 = smodel_so2.fit()
predictions_seasonal_so2 = result_seasonal_so2.predict(steps = 1)
# predictions_seasonal_so2 = result_seasonal_so2.forecast(steps = 1)[0]
print('so2 : ',predictions_seasonal_so2.iloc[-1])
data1.SO2 = predictions_seasonal_so2.iloc[-1]
# data.SO2 = predictions_seasonal_so2.iloc[-1]
# data1.SO2 = 20

# model = ARIMA(df_train['SO2'],order = (2,1,2))
# model_fit = model.fit()
# # data1.SO2 = model_fit.forecast(steps = 1)[0]
# # future_datest_df_new['SO2'] = model_fit.forecast(steps = 1)[0]
# data1.SO2 = 20


smodel_no2 = sm.tsa.SARIMAX(df['NO2'], order=my_order,seasonal_order=my_seasonal_order,trend= 't')
result_seasonal_no2 = smodel_no2.fit()
predictions_seasonal_no2 = result_seasonal_no2.predict(steps = 1)
# predictions_seasonal_no2 = result_seasonal_no2.forecast(steps = 1)[0]
print('no2 : ',predictions_seasonal_no2.iloc[-1])
data1.NO2 = predictions_seasonal_no2.iloc[-1]
# data.NO2 = predictions_seasonal_no2.iloc[-1]
# data1.NO2 = 20

# model = ARIMA(df_train['NO2'],order = (2,1,2))
# model_fit = model.fit()
# data1.NO2 = model_fit.forecast(steps = 1)[0]
# future_datest_df_new['NO2'] = model_fit.forecast(steps = 1)[0]
# data1.NO2 = model_fit.forecast(steps = 1)[0]

smodel_o3 = sm.tsa.SARIMAX(df['O3'], order=my_order,seasonal_order=my_seasonal_order,trend= 't')
result_seasonal_o3 = smodel_o3.fit()
predictions_seasonal_o3 = result_seasonal_o3.predict(steps = 1)
# predictions_seasonal_o3 = result_seasonal_o3.forecast(steps = 1)[0]
print('o3 : ',predictions_seasonal_o3.iloc[-1])
data1.O3 = predictions_seasonal_o3.iloc[-1]
# data.O3 = predictions_seasonal_o3.iloc[-1]
# data1.O3 = 20

# model = ARIMA(df_train['O3'],order = (2,1,2))
# model_fit = model.fit()
# # data1.O3 = model_fit.forecast(steps = 1)[0]
# # future_datest_df_new['O3'] = model_fit.forecast(steps = 1)[0]
# data1.O3 = 80


smodel_pm10 = sm.tsa.SARIMAX(df['PM10'], order=my_order,seasonal_order=my_seasonal_order,trend= 't')
result_seasonal_pm10 = smodel_pm10.fit()
predictions_seasonal_pm10 = result_seasonal_pm10.predict(steps = 1)
# predictions_seasonal_pm10 = result_seasonal_pm10.forecast(steps = 1)[0]
print('pm10 : ',predictions_seasonal_pm10.iloc[-1])
data1.PM10 = predictions_seasonal_pm10.iloc[-1]
# data.PM10 = predictions_seasonal_pm10.iloc[-1]
# data1.PM10 = 20

# model = ARIMA(df_train['PM10'],order = (2,1,2))
# model_fit = model.fit()
# data1.PM10 = model_fit.forecast(steps = 1)[0]
# future_datest_df_new['PM10'] = model_fit.forecast(steps = 1)[0]
# data1.PM10 = model_fit.forecast(steps = 1)[0]


# df = pd.concat([df,future_datest_df_new])
# print(df.tail())
# data1.Date = future_dates[1].strftime('%Y-%m-%d')
data1.Date = pd.to_datetime(future_dates[1]).date()
# data.Date = pd.to_datetime(future_dates[1]).date()
data1.save()
# data.save()
# print(future_dates[1].strftime('%Y-%m-%d'))
# print(future_datest_df_new['AQI'])
# return df




d1 = df['AQI'][-30:].tolist()
d2 = df.index[-30:].strftime('%Y-%m-%d').tolist()

d3 = df['SO2'][-30:].tolist()
d4 = df['NO2'][-30:].tolist()
d5 = df['O3'][-30:].tolist()

print(df['Date'])
print(df.tail())
50 changes: 50 additions & 0 deletions main/Scraper.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import requests
from datetime import datetime
import pandas as pd
from .models import Data,Data_Predicted

data = Data()
data1 = Data_Predicted()



def scrap():
url = "https://api.ambeedata.com/latest/by-city"
querystring = {"city":"Delhi"}
headers = {
'x-api-key': "mqoZ9hQjIi3ZqydD8W3lU7fGnXJ5ndJI44sAUusN",
'Content-type': "application/json"
}
response = requests.request("GET", url, headers=headers, params=querystring)

r = response.json()

data.NO2 = r['stations'][0]['NO2']
data.O3 = r['stations'][0]['OZONE']
data.SO2 = r['stations'][0]['SO2']
data.PM10 = r['stations'][0]['PM10']
data.AQI = r['stations'][0]['AQI']

x = pd.to_datetime(r['stations'][0]['updatedAt'])
data.Date = x.date()
data.save()

print('no2',r['stations'][0]['NO2'])
print('o3',r['stations'][0]['OZONE'])
print('so2',r['stations'][0]['SO2'])
print('pm10',r['stations'][0]['PM10'])
print('aqi',r['stations'][0]['AQI'])
print('date',r['stations'][0]['updatedAt'])
print('date type',type(r['stations'][0]['updatedAt']))



print(type(response))
print(type(r))

print(r['message'])
print(r['stations'][0]['OZONE'])

print(response.text)

# scrap()
Empty file added main/__init__.py
Empty file.
Binary file added main/__pycache__/Ml_model.cpython-38.pyc
Binary file not shown.
Binary file added main/__pycache__/Scraper.cpython-38.pyc
Binary file not shown.
Binary file added main/__pycache__/__init__.cpython-38.pyc
Binary file not shown.
Binary file added main/__pycache__/admin.cpython-38.pyc
Binary file not shown.
Binary file added main/__pycache__/cron.cpython-38.pyc
Binary file not shown.
Binary file added main/__pycache__/models.cpython-38.pyc
Binary file not shown.
Binary file added main/__pycache__/scraper.cpython-38.pyc
Binary file not shown.
Binary file added main/__pycache__/serializers.cpython-38.pyc
Binary file not shown.
Binary file added main/__pycache__/tasks.cpython-38.pyc
Binary file not shown.
Binary file added main/__pycache__/urls.cpython-38.pyc
Binary file not shown.
Binary file added main/__pycache__/views.cpython-38.pyc
Binary file not shown.
Empty file modified main/admin.py
100644 → 100755
Empty file.
Empty file modified main/apps.py
100644 → 100755
Empty file.
Empty file added main/apscheduler/__init__.py
Empty file.
12 changes: 12 additions & 0 deletions main/apscheduler/data_update_autojob.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# from apscheduler.schedulers.background import BackgroundScheduler

# from . import Ml_model as ml
# from .models import Test , Data

# data = Data()
# def start():
# scheduler = BackgroundScheduler()
# scheduler.add_job(ml.fun(ml.df),"interval",minutes = 1,id = "data_001")
# scheduler.start()
# Test.objects.create(name = "test")

2 changes: 1 addition & 1 deletion main/cron.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ def my_scheduled_job():
# data.AQI = 2
# data.save()
ml.fun(ml.df)
sc.scrap()
# sc.scrap()


# ml.f_s(ml.df)
Expand Down
Empty file modified main/migrations/0001_initial.py
100644 → 100755
Empty file.
Empty file modified main/migrations/0002_update.py
100644 → 100755
Empty file.
Empty file added main/migrations/__init__.py
Empty file.
Binary file not shown.
Binary file not shown.
Binary file added main/migrations/__pycache__/__init__.cpython-38.pyc
Binary file not shown.
22 changes: 22 additions & 0 deletions main/models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
from django.db import models

# Create your models here.
class Data(models.Model):
Date = models.DateField()
PM10 = models.IntegerField()
NO2 = models.IntegerField()
SO2 = models.IntegerField()
O3 = models.IntegerField()
AQI = models.IntegerField()


class Data_Predicted(models.Model):
Date = models.DateField()
PM10 = models.IntegerField()
NO2 = models.IntegerField()
SO2 = models.IntegerField()
O3 = models.IntegerField()
AQI = models.IntegerField()

class Update(models.Model):
dat_up = models.IntegerField()
6 changes: 6 additions & 0 deletions main/resources.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
from import_export import resources
from .models import Data

class DataResources(resources.ModelResource):
class Meta:
model = Data
17 changes: 17 additions & 0 deletions main/sc.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import requests
from datetime import datetime

aq = []
def scrap():
url = "http://vc8006.pythonanywhere.com/api/"
response = requests.request("GET", url)

r = response.json()
for i in range(1,31):
aq.append(r[-i]['AQI'])
# print(r[-i])

# print(response.text)
print(aq)

scrap()
7 changes: 7 additions & 0 deletions main/serializers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
from rest_framework import fields, serializers
from .models import Data_Predicted

class DataPredSerializers(serializers.ModelSerializer):
class Meta:
fields = ('Date','PM10','NO2','SO2','O3','AQI')
model = Data_Predicted
Loading