import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import torch
import torch.optim as optim
import warnings
warnings.filterwarnings("ignore")
%matplotlib inline
features = pd.read_csv('temps.csv')
#看看數(shù)據(jù)長(zhǎng)什么樣子
features.head()
year | month | day | week | temp_2 | temp_1 | average | actual | friend | |
---|---|---|---|---|---|---|---|---|---|
0 | 2016 | 1 | 1 | Fri | 45 | 45 | 45.6 | 45 | 29 |
1 | 2016 | 1 | 2 | Sat | 44 | 45 | 45.7 | 44 | 61 |
2 | 2016 | 1 | 3 | Sun | 45 | 44 | 45.8 | 41 | 56 |
3 | 2016 | 1 | 4 | Mon | 44 | 41 | 45.9 | 40 | 53 |
4 | 2016 | 1 | 5 | Tues | 41 | 40 | 46.0 | 44 | 41 |
數(shù)據(jù)表中
- year,moth,day,week分別表示的具體的時(shí)間
- temp_2:前天的最高溫度值
- temp_1:昨天的最高溫度值
- average:在歷史中,每年這一天的平均最高溫度值
- actual:這就是我們的標(biāo)簽值了,當(dāng)天的真實(shí)最高溫度
- friend:這一列可能是湊熱鬧的,你的朋友猜測(cè)的可能值,咱們不管它就好了
print('數(shù)據(jù)維度:', features.shape)
數(shù)據(jù)維度: (348, 9)
# 處理時(shí)間數(shù)據(jù)
import datetime
# 分別得到年,月,日
years = features['year']
months = features['month']
days = features['day']
# datetime格式
dates = [str(int(year)) + '-' + str(int(month)) + '-' + str(int(day)) for year, month, day in zip(years, months, days)]
dates = [datetime.datetime.strptime(date, '%Y-%m-%d') for date in dates]
dates[:5]
[datetime.datetime(2016, 1, 1, 0, 0),
datetime.datetime(2016, 1, 2, 0, 0),
datetime.datetime(2016, 1, 3, 0, 0),
datetime.datetime(2016, 1, 4, 0, 0),
datetime.datetime(2016, 1, 5, 0, 0)]
# 準(zhǔn)備畫圖
# 指定默認(rèn)風(fēng)格
plt.style.use('fivethirtyeight')
# 設(shè)置布局
fig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(nrows=2, ncols=2, figsize = (10,10))
fig.autofmt_xdate(rotation = 45) #x標(biāo)簽傾斜45度
# 標(biāo)簽值
ax1.plot(dates, features['actual'])
ax1.set_xlabel(''); ax1.set_ylabel('Temperature'); ax1.set_title('Max Temp')
# 昨天
ax2.plot(dates, features['temp_1'])
ax2.set_xlabel(''); ax2.set_ylabel('Temperature'); ax2.set_title('Previous Max Temp')
# 前天
ax3.plot(dates, features['temp_2'])
ax3.set_xlabel('Date'); ax3.set_ylabel('Temperature'); ax3.set_title('Two Days Prior Max Temp')
# 我的逗逼朋友
ax4.plot(dates, features['friend'])
ax4.set_xlabel('Date'); ax4.set_ylabel('Temperature'); ax4.set_title('Friend Estimate')
plt.tight_layout(pad=2)
# 獨(dú)熱編碼
features = pd.get_dummies(features)
features.head(5)
year | month | day | temp_2 | temp_1 | average | actual | friend | week_Fri | week_Mon | week_Sat | week_Sun | week_Thurs | week_Tues | week_Wed | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 2016 | 1 | 1 | 45 | 45 | 45.6 | 45 | 29 | 1 | 0 | 0 | 0 | 0 | 0 | 0 |
1 | 2016 | 1 | 2 | 44 | 45 | 45.7 | 44 | 61 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
2 | 2016 | 1 | 3 | 45 | 44 | 45.8 | 41 | 56 | 0 | 0 | 0 | 1 | 0 | 0 | 0 |
3 | 2016 | 1 | 4 | 44 | 41 | 45.9 | 40 | 53 | 0 | 1 | 0 | 0 | 0 | 0 | 0 |
4 | 2016 | 1 | 5 | 41 | 40 | 46.0 | 44 | 41 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
# 標(biāo)簽
labels = np.array(features['actual'])
# 在特征中去掉標(biāo)簽
features= features.drop('actual', axis = 1)
# 名字單獨(dú)保存一下,以備后患
feature_list = list(features.columns)
# 轉(zhuǎn)換成合適的格式
features = np.array(features)
features.shape
(348, 14)
from sklearn import preprocessing
input_features = preprocessing.StandardScaler().fit_transform(features)
input_features[0]
array([ 0. , -1.5678393 , -1.65682171, -1.48452388, -1.49443549,
-1.3470703 , -1.98891668, 2.44131112, -0.40482045, -0.40961596,
-0.40482045, -0.40482045, -0.41913682, -0.40482045])
構(gòu)建網(wǎng)絡(luò)模型
[外鏈圖片轉(zhuǎn)存失敗,源站可能有防盜鏈機(jī)制,建議將圖片保存下來直接上傳(img-I9ez3tyG-1691437809583)(attachment:image.png)]
#將數(shù)據(jù)轉(zhuǎn)化為tensor的形式
x = torch.tensor(input_features, dtype = float)
y = torch.tensor(labels, dtype = float)
# 權(quán)重參數(shù)初始化
weights = torch.randn((14, 128), dtype = float, requires_grad = True)
biases = torch.randn(128, dtype = float, requires_grad = True)
weights2 = torch.randn((128, 1), dtype = float, requires_grad = True)
biases2 = torch.randn(1, dtype = float, requires_grad = True)
learning_rate = 0.001
losses = []
for i in range(1000):
# 計(jì)算隱層
hidden = x.mm(weights) + biases
# 加入激活函數(shù)
hidden = torch.relu(hidden)
# 預(yù)測(cè)結(jié)果
predictions = hidden.mm(weights2) + biases2
# 通計(jì)算損失
loss = torch.mean((predictions - y) ** 2)
losses.append(loss.data.numpy())
# 打印損失值
if i % 100 == 0:
print('loss:', loss)
#返向傳播計(jì)算
loss.backward()
#更新參數(shù)
weights.data.add_(- learning_rate * weights.grad.data)
biases.data.add_(- learning_rate * biases.grad.data)
weights2.data.add_(- learning_rate * weights2.grad.data)
biases2.data.add_(- learning_rate * biases2.grad.data)
# 每次迭代都得記得清空
weights.grad.data.zero_()
biases.grad.data.zero_()
weights2.grad.data.zero_()
biases2.grad.data.zero_()
loss: tensor(4238.8822, dtype=torch.float64, grad_fn=<MeanBackward0>)
loss: tensor(155.8961, dtype=torch.float64, grad_fn=<MeanBackward0>)
loss: tensor(146.9377, dtype=torch.float64, grad_fn=<MeanBackward0>)
loss: tensor(144.1912, dtype=torch.float64, grad_fn=<MeanBackward0>)
loss: tensor(142.8590, dtype=torch.float64, grad_fn=<MeanBackward0>)
loss: tensor(142.0588, dtype=torch.float64, grad_fn=<MeanBackward0>)
loss: tensor(141.5304, dtype=torch.float64, grad_fn=<MeanBackward0>)
loss: tensor(141.1626, dtype=torch.float64, grad_fn=<MeanBackward0>)
loss: tensor(140.8778, dtype=torch.float64, grad_fn=<MeanBackward0>)
loss: tensor(140.6519, dtype=torch.float64, grad_fn=<MeanBackward0>)
predictions.shape
torch.Size([348, 1])
更簡(jiǎn)單的構(gòu)建網(wǎng)絡(luò)模型
input_size = input_features.shape[1]
hidden_size = 128
output_size = 1
batch_size = 16
my_nn = torch.nn.Sequential(
torch.nn.Linear(input_size, hidden_size),
torch.nn.Sigmoid(),
torch.nn.Linear(hidden_size, output_size),
)
cost = torch.nn.MSELoss(reduction='mean')
optimizer = torch.optim.Adam(my_nn.parameters(), lr = 0.001)
# 訓(xùn)練網(wǎng)絡(luò)
losses = []
for i in range(1000):
batch_loss = []
# MINI-Batch方法來進(jìn)行訓(xùn)練
for start in range(0, len(input_features), batch_size):
end = start + batch_size if start + batch_size < len(input_features) else len(input_features)
xx = torch.tensor(input_features[start:end], dtype = torch.float, requires_grad = True)
yy = torch.tensor(labels[start:end], dtype = torch.float, requires_grad = True)
prediction = my_nn(xx)
loss = cost(prediction, yy)
optimizer.zero_grad()
loss.backward(retain_graph=True)
optimizer.step()
batch_loss.append(loss.data.numpy())
# 打印損失
if i % 100==0:
losses.append(np.mean(batch_loss))
print(i, np.mean(batch_loss))
0 3947.049
100 37.844784
200 35.660378
300 35.282845
400 35.11639
500 34.988346
600 34.87178
700 34.753754
800 34.62929
900 34.49678
預(yù)測(cè)訓(xùn)練結(jié)果文章來源:http://www.zghlxwxcb.cn/news/detail-632067.html
x = torch.tensor(input_features, dtype = torch.float)
predict = my_nn(x).data.numpy()
# 轉(zhuǎn)換日期格式
dates = [str(int(year)) + '-' + str(int(month)) + '-' + str(int(day)) for year, month, day in zip(years, months, days)]
dates = [datetime.datetime.strptime(date, '%Y-%m-%d') for date in dates]
# 創(chuàng)建一個(gè)表格來存日期和其對(duì)應(yīng)的標(biāo)簽數(shù)值
true_data = pd.DataFrame(data = {'date': dates, 'actual': labels})
# 同理,再創(chuàng)建一個(gè)來存日期和其對(duì)應(yīng)的模型預(yù)測(cè)值
months = features[:, feature_list.index('month')]
days = features[:, feature_list.index('day')]
years = features[:, feature_list.index('year')]
test_dates = [str(int(year)) + '-' + str(int(month)) + '-' + str(int(day)) for year, month, day in zip(years, months, days)]
test_dates = [datetime.datetime.strptime(date, '%Y-%m-%d') for date in test_dates]
predictions_data = pd.DataFrame(data = {'date': test_dates, 'prediction': predict.reshape(-1)})
# 真實(shí)值
plt.plot(true_data['date'], true_data['actual'], 'b-', label = 'actual')
# 預(yù)測(cè)值
plt.plot(predictions_data['date'], predictions_data['prediction'], 'ro', label = 'prediction')
plt.xticks(rotation = '60');
plt.legend()
# 圖名
plt.xlabel('Date'); plt.ylabel('Maximum Temperature (F)'); plt.title('Actual and Predicted Values');
文章來源地址http://www.zghlxwxcb.cn/news/detail-632067.html
到了這里,關(guān)于【搭建PyTorch神經(jīng)網(wǎng)絡(luò)進(jìn)行氣溫預(yù)測(cè)】的文章就介紹完了。如果您還想了解更多內(nèi)容,請(qǐng)?jiān)谟疑辖撬阉鱐OY模板網(wǎng)以前的文章或繼續(xù)瀏覽下面的相關(guān)文章,希望大家以后多多支持TOY模板網(wǎng)!