lstm

import matplotlib.pyplot as plt
from pandas import read_excel
from pandas import DataFrame
from pandas import concat
from sklearn.preprocessing import MinMaxScaler
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import LSTM,Dense,Dropout
from numpy import concatenate
from sklearn.metrics import mean_squared_error,mean_absolute_error,r2_score
from math import sqrt
from sklearn.model_selection import train_test_split
import tensorflow
print(tensorflow.__version__)
2.10.0
import tensorflow as tf
tf.random.set_seed(2)
qy_data=read_excel(r'科技类(1).xlsx')
qy_data.index.name='num' #选定索引列
print(qy_data.head())
     d1
num    
0     1
1     2
2     3
3     4
4     5
# 获取DataFrame中的数据,形式为数组array形式
values = qy_data.values
# 确保所有数据为float类型
values = values.astype('float32')
scaler = MinMaxScaler(feature_range=(0, 1))
scaled = scaler.fit_transform(values)
scaled

       [1.61995797e-03],
       [2.42993678e-03],
       [2.59193266e-03],
       [2.91592418e-03],
       [3.56390723e-03],
       [4.04989487e-03],
       [4.53588227e-03],
       [5.66985272e-03],
       [5.83184883e-03],
       [6.64182752e-03],
       [7.12781493e-03],
       [7.45180668e-03],
       [7.61380233e-03],
       [8.09978880e-03],
       [8.42378102e-03],
       [1.15017006e-02],
       [1.39316376e-02],
       [1.87915117e-02],
       [2.08974574e-02],
       [3.12651880e-02],
       [3.33711356e-02],
       [3.38571221e-02],
       [3.95269729e-02],
       [4.40628566e-02],
       [4.71407771e-02],
       [5.08666784e-02],
       [5.29726259e-02],
       [6.04244322e-02],
       [6.09104186e-02],
       [6.12344109e-02],
       [6.13964051e-02],
       [6.15584031e-02],
       [6.20443895e-02],
       [6.22063838e-02],
       [6.23683818e-02],
       [6.25303760e-02],
       [6.26923665e-02],
       [6.28543645e-02],
       [6.30163625e-02],
       [6.31783530e-02],
       [6.33403510e-02],
       [6.35023490e-02],
       [6.36643469e-02],
       [6.38263375e-02],
       [6.39883354e-02],
       [6.41503334e-02],
       [6.43123239e-02],
       [6.44743219e-02],
       [6.46363199e-02],
       [6.47983178e-02],
       [6.49603084e-02],
       [6.51223063e-02],
       [6.52843043e-02],
       [6.54462948e-02],
       [6.56082928e-02],
       [6.57702908e-02],
       [6.59322813e-02],
       [6.60942793e-02],
       [6.62562773e-02],
       [6.64182752e-02],
       [6.65802658e-02],
       [6.67422637e-02],
       [6.69042617e-02],
       [6.70662522e-02],
       [6.72282502e-02],
       [6.73902482e-02],
       [6.75522462e-02],
       [6.77142367e-02],
       [6.78762347e-02],
       [6.80382326e-02],
       [6.82002231e-02],
       [6.83622211e-02],
       [6.85242191e-02],
       [6.86862096e-02],
       [6.88482076e-02],
       [6.90102056e-02],
       [6.91722035e-02],
       [6.93341941e-02],
       [6.94961920e-02],
       [6.96581900e-02],
       [6.98201805e-02],
       [6.99821785e-02],
       [7.01441765e-02],
       [7.03061745e-02],
       [7.04681650e-02],
       [7.06301630e-02],
       [7.07921609e-02],
       [7.09541515e-02],
       [7.11161494e-02],
       [7.12781474e-02],
       [7.14401454e-02],
       [7.16021359e-02],
       [7.17641339e-02],
       [7.19261318e-02],
       [7.20881224e-02],
       [7.22501203e-02],
       [7.24121183e-02],
       [7.25741088e-02],
       [7.27361068e-02],
       [7.28981048e-02],
       [7.30601028e-02],
       [7.32220933e-02],
       [7.33840913e-02],
       [7.35460892e-02],
       [7.37080798e-02],
       [7.38700777e-02],
       [7.40320757e-02],
       [7.41940737e-02],
       [7.43560642e-02],
       [7.45180622e-02],
       [7.46800601e-02],
       [7.48420507e-02],
       [7.50040486e-02],
       [7.51660466e-02],
       [7.53280371e-02],
       [7.54900351e-02],
       [7.56520331e-02],
       [7.58140311e-02],
       [7.59760216e-02],
       [7.61380196e-02],
       [7.63000175e-02],
       [7.64620081e-02],
       [7.66240060e-02],
       [7.67860040e-02],
       [7.69480020e-02],
       [7.71099925e-02],
       [7.72719905e-02],
       [7.74339885e-02],
       [7.75959790e-02],
       [7.77579769e-02],
       [7.79199749e-02],
       [7.80819654e-02],
       [7.82439634e-02],
       [7.84059614e-02],
       [7.85679594e-02],
       [7.87299499e-02],
       [7.88919479e-02],
       [7.90539458e-02],
       [7.92159364e-02],
       [7.93779343e-02],
       [7.95399323e-02],
       [7.97019303e-02],
       [8.00259188e-02],
       [8.01879168e-02],
       [8.03499073e-02],
       [8.05119053e-02],
       [8.06739032e-02],
       [8.08359012e-02],
       [8.09978917e-02],
       [8.11598897e-02],
       [8.13218877e-02],
       [8.14838782e-02],
       [8.16458762e-02],
       [8.21318626e-02],
       [8.22938606e-02],
       [8.24558586e-02],
       [8.26178491e-02],
       [8.27798471e-02],
       [8.29418451e-02],
       [8.31038356e-02],
       [8.34278315e-02],
       [8.35898295e-02],
       [8.37518200e-02],
       [8.39138180e-02],
       [8.40758160e-02],
       [8.42378065e-02],
       [8.43998045e-02],
       [8.45618024e-02],
       [8.47237930e-02],
       [8.50477889e-02],
       [8.52097869e-02],
       [8.53717774e-02],
       [8.56957734e-02],
       [8.58577639e-02],
       [8.61817598e-02],
       [8.63437578e-02],
       [8.65057483e-02],
       [8.66677463e-02],
       [8.68297443e-02],
       [8.71537328e-02],
       [8.76397192e-02],
       [8.78017172e-02],
       [8.79637152e-02],
       [8.81257057e-02],
       [8.82877037e-02],
       [8.84497017e-02],
       [8.87736902e-02],
       [8.89356881e-02],
       [8.92596766e-02],
       [8.94216746e-02],
       [8.97456631e-02],
       [8.99076611e-02],
       [9.02316570e-02],
       [9.03936476e-02],
       [9.05556455e-02],
       [9.08796340e-02],
       [9.10416320e-02],
       [9.13656205e-02],
       [9.15276185e-02],
       [9.18516144e-02],
       [9.20136049e-02],
       [9.23376009e-02],
       [9.26615894e-02],
       [9.28235874e-02],
       [9.31475759e-02],
       [9.33095738e-02],
       [9.34715718e-02],
       [9.36335623e-02],
       [9.39575583e-02],
       [9.42815468e-02],
       [9.44435447e-02],
       [9.46055427e-02],
       [9.52535197e-02],
       [9.54155177e-02],
       [9.59015042e-02],
       [9.67114866e-02],
       [9.68734846e-02],
       [9.71974730e-02],
       [9.73594710e-02],
       [9.76834595e-02],
       [9.80074480e-02],
       [9.83314440e-02],
       [9.88174304e-02],
       [9.89794284e-02],
       [9.91414189e-02],
       [9.94654149e-02],
       [9.96274129e-02],
       [9.99514014e-02],
       [1.00437388e-01],
       [1.00599386e-01],
       [1.00761376e-01],
       [1.01085372e-01],
       [1.01409361e-01],
       [1.02705330e-01],
       [1.02867328e-01],
       [1.03515305e-01],
       [1.03677303e-01],
       [1.03839301e-01],
       [1.04325287e-01],
       [1.04487285e-01],
       [1.04973271e-01],
       [1.05297260e-01],
       [1.05459258e-01],
       [1.05621256e-01],
       [1.05783246e-01],
       [1.05945244e-01],
       [1.06107242e-01],
       [1.07403204e-01],
       [1.07565202e-01],
       [1.08051188e-01],
       [1.08537175e-01],
       [1.08699173e-01],
       [1.09509155e-01],
       [1.10481128e-01],
       [1.10643126e-01],
       [1.10805117e-01],
       [1.12101085e-01],
       [1.12587072e-01],
       [1.13721043e-01],
       [1.14369027e-01],
       [1.15988985e-01],
       [1.16960958e-01],
       [1.19390897e-01],
       [1.19876884e-01],
       [1.21334843e-01],
       [1.22144826e-01],
       [1.22468814e-01],
       [1.22630812e-01],
       [1.22792810e-01],
       [1.25222757e-01],
       [1.25870734e-01],
       [1.26032725e-01],
       [1.27652690e-01],
       [1.28948659e-01],
       [1.29110649e-01],
       [1.29920632e-01],
       [1.30082622e-01],
       [1.30244628e-01],
       [1.30406618e-01],
       [1.30892605e-01],
       [1.31216601e-01],
       [1.31378591e-01],
       [1.31702587e-01],
       [1.32350564e-01],
       [1.34294510e-01],
       [1.34942502e-01],
       [1.35590479e-01],
       [1.37048438e-01],
       [1.37372434e-01],
       [1.37858421e-01],
       [1.38020426e-01],
       [1.38182417e-01],
       [1.38668403e-01],
       [1.38830394e-01],
       [1.39478385e-01],
       [1.39640376e-01],
       [1.39802366e-01],
       [1.40450358e-01],
       [1.40774354e-01],
       [1.40936345e-01],
       [1.41098335e-01],
       [1.41422331e-01],
       [1.41584322e-01],
       [1.41908318e-01],
       [1.42070308e-01],
       [1.42556295e-01],
       [1.42718300e-01],
       [1.43042296e-01],
       [1.43204287e-01],
       [1.43528283e-01],
       [1.43852264e-01],
       [1.44014269e-01],
       [1.44662246e-01],
       [1.44824237e-01],
       [1.44986242e-01],
       [1.45148233e-01],
       [1.45310223e-01],
       [1.45796224e-01],
       [1.46120206e-01],
       [1.46282211e-01],
       [1.46444201e-01],
       [1.46606192e-01],
       [1.46768197e-01],
       [1.46930188e-01],
       [1.47254184e-01],
       [1.47578165e-01],
       [1.47740170e-01],
       [1.48226157e-01],
       [1.48550153e-01],
       [1.48712143e-01],
       [1.49036139e-01],
       [1.49198130e-01],
       [1.49360120e-01],
       [1.49522126e-01],
       [1.49684116e-01],
       [1.49846107e-01],
       [1.50008112e-01],
       [1.50170103e-01],
       [1.50332093e-01],
       [1.50980085e-01],
       [1.51142076e-01],
       [1.51304081e-01],
       [1.51466072e-01],
       [1.51628062e-01],
       [1.51790068e-01],
       [1.51952058e-01],
       [1.52114049e-01],
       [1.52276054e-01],
       [1.52438045e-01],
       [1.52600035e-01],
       [1.52924031e-01],
       [1.53086022e-01],
       [1.53410017e-01],
       [1.53572008e-01],
       [1.53734013e-01],
       [1.53896004e-01],
       [1.54058009e-01],
       [1.54220000e-01],
       [1.54381990e-01],
       [1.54543996e-01],
       [1.54705986e-01],
       [1.55029982e-01],
       [1.55191973e-01],
       [1.55353963e-01],
       [1.55515969e-01],
       [1.55677959e-01],
       [1.56163946e-01],
       [1.56325936e-01],
       [1.56487942e-01],
       [1.56649932e-01],
       [1.56811938e-01],
       [1.57135919e-01],
       [1.57621905e-01],
       [1.57945901e-01],
       [1.58107892e-01],
       [1.58269897e-01],
       [1.58431888e-01],
       [1.58593878e-01],
       [1.58917874e-01],
       [1.59241870e-01],
       [1.59727857e-01],
       [1.60051852e-01],
       [1.60213843e-01],
       [1.60375834e-01],
       [1.60537839e-01],
       [1.60861820e-01],
       [1.61185816e-01],
       [1.61347806e-01],
       [1.61509812e-01],
       [1.61671802e-01],
       [1.61833808e-01],
       [1.61995798e-01],
       [1.62157789e-01],
       [1.62319794e-01],
       [1.62481785e-01],
       [1.62643775e-01],
       [1.62805781e-01],
       [1.62967771e-01],
       [1.63129762e-01],
       [1.63291767e-01],
       [1.63453758e-01],
       [1.63615748e-01],
       [1.63777754e-01],
       [1.63939744e-01],
       [1.64101735e-01],
       [1.64263740e-01],
       [1.64425731e-01],
       [1.64749727e-01],
       [1.64911717e-01],
       [1.65235713e-01],
       [1.65559709e-01],
       [1.65721700e-01],
       [1.65883690e-01],
       [1.66045696e-01],
       [1.66369677e-01],
       [1.66531682e-01],
       [1.66693673e-01],
       [1.66855663e-01],
       [1.67017668e-01],
       [1.67179659e-01],
       [1.67341664e-01],
       [1.67503655e-01],
       [1.67827651e-01],
       [1.67989641e-01],
       [1.68151632e-01],
       [1.68475628e-01],
       [1.68637618e-01],
       [1.68799624e-01],
       [1.68961614e-01],
       [1.69447601e-01],
       [1.69609591e-01],
       [1.69771597e-01],
       [1.69933587e-01],
       [1.70095593e-01],
       [1.70581579e-01],
       [1.70743570e-01],
       [1.70905560e-01],
       [1.71067566e-01],
       [1.71229556e-01],
       [1.71391547e-01],
       [1.71553552e-01],
       [1.71715543e-01],
       [1.71877533e-01],
       [1.72201529e-01],
       [1.72363520e-01],
       [1.72525525e-01],
       [1.72849521e-01],
       [1.73335508e-01],
       [1.73659489e-01],
       [1.73821494e-01],
       [1.73983485e-01],
       [1.74145475e-01],
       [1.74307480e-01],
       [1.74469471e-01],
       [1.74631462e-01],
       [1.74793467e-01],
       [1.74955457e-01],
       [1.75117463e-01],
       [1.75279453e-01],
       [1.75441444e-01],
       [1.75603449e-01],
       [1.75765440e-01],
       [1.75927430e-01],
       [1.76089436e-01],
       [1.76251426e-01],
       [1.76575422e-01],
       [1.76737413e-01],
       [1.76899403e-01],
       [1.77061409e-01],
       [1.77223399e-01],
       [1.77385390e-01],
       [1.77709386e-01],
       [1.77871391e-01],
       [1.78195372e-01],
       [1.78681359e-01],
       [1.78843364e-01],
       [1.79329351e-01],
       [1.79977328e-01],
       [1.80301324e-01],
       [1.80463314e-01],
       [1.80787310e-01],
       [1.81111306e-01],
       [1.81273296e-01],
       [1.81597292e-01],
       [1.81759283e-01],
       [1.81921273e-01],
       [1.82083279e-01],
       [1.82569265e-01],
       [1.82731256e-01],
       [1.82893246e-01],
       [1.83055252e-01],
       [1.83217242e-01],
       [1.83379248e-01],
       [1.83703229e-01],
       [1.83865234e-01],
       [1.84189215e-01],
       [1.84513211e-01],
       [1.84837207e-01],
       [1.84999198e-01],
       [1.85161188e-01],
       [1.85323194e-01],
       [1.85485184e-01],
       [1.85809180e-01],
       [1.85971171e-01],
       [1.86133176e-01],
       [1.86295167e-01],
       [1.86457157e-01],
       [1.86619163e-01],
       [1.86781153e-01],
       [1.86943144e-01],
       [1.87267140e-01],
       [1.87591136e-01],
       [1.87915117e-01],
       [1.88077122e-01],
       [1.88239112e-01],
       [1.88401103e-01],
       [1.88563108e-01],
       [1.88725099e-01],
       [1.89049095e-01],
       [1.89535081e-01],
       [1.89697072e-01],
       [1.89859077e-01],
       [1.90021068e-01],
       [1.90183058e-01],
       [1.90345064e-01],
       [1.90507054e-01],
       [1.90669045e-01],
       [1.90831050e-01],
       [1.90993041e-01],
       [1.91155046e-01],
       [1.91317037e-01],
       [1.91965014e-01],
       [1.92127019e-01],
       [1.92289010e-01],
       [1.92451000e-01],
       [1.92774996e-01],
       [1.92936987e-01],
       [1.93098992e-01],
       [1.93260983e-01],
       [1.93422973e-01],
       [1.93584979e-01],
       [1.94232956e-01],
       [1.94556952e-01],
       [1.94718942e-01],
       [1.94880947e-01],
       [1.95204929e-01],
       [1.95366934e-01],
       [1.96176901e-01],
       [1.96500897e-01],
       [1.97310880e-01],
       [1.99092835e-01],
       [1.99254826e-01],
       [1.99416831e-01],
       [1.99578822e-01],
       [2.02008754e-01],
       [2.08164603e-01],
       [2.09946558e-01],
       [2.10918531e-01],
       [2.11080521e-01],
       [2.11566508e-01],
       [2.14482427e-01],
       [2.14968413e-01],
       [2.15130419e-01],
       [2.17398360e-01],
       [2.21610248e-01],
       [2.23068208e-01],
       [2.25012153e-01],
       [2.28414074e-01],
       [2.30358019e-01],
       [2.32139975e-01],
       [2.33597934e-01],
       [2.35379890e-01],
       [2.37809822e-01],
       [2.47853562e-01],
       [2.48987541e-01],
       [2.51903445e-01],
       [2.57411301e-01],
       [2.73772866e-01],
       [2.75068849e-01],
       [2.76040822e-01],
       [2.85112590e-01],
       [2.90944427e-01],
       [2.93860346e-01],
       [3.03580105e-01],
       [3.42621088e-01],
       [3.87493908e-01],
       [3.89923871e-01],
       [3.99157614e-01],
       [4.10173327e-01],
       [4.36416656e-01],
       [4.41924512e-01],
       [4.58124101e-01],
       [4.60716009e-01],
       [4.69625801e-01],
       [4.77725565e-01],
       [5.03644884e-01],
       [5.08990765e-01],
       [5.10448754e-01],
       [5.11420727e-01],
       [5.41551948e-01],
       [6.03758276e-01],
       [6.58836901e-01],
       [6.68232620e-01],
       [6.69042587e-01],
       [6.73416495e-01],
       [6.74388468e-01],
       [7.02251732e-01],
       [8.66515458e-01],
       [8.69269431e-01],
       [9.07500386e-01],
       [9.44921434e-01],
       [1.00000000e+00]], dtype=float32)
def series_to_supervised(data, n_in=1, n_out=1, dropnan=True):
    n_vars = 1 if type(data) is list else data.shape[1]
    df = DataFrame(data)
    cols, names = list(), list()
    # input sequence (t-n, ... t-1)
    for i in range(n_in, 0, -1):
        cols.append(df.shift(i))
        names += [('var%d(t-%d)' % (j + 1, i)) for j in range(n_vars)]
        # forecast sequence (t, t+1, ... t+n)
    for i in range(0, n_out):
        cols.append(df.shift(-i))
        if i == 0:
            names += [('var%d(t)' % (j + 1)) for j in range(n_vars)]
        else:
            names += [('var%d(t+%d)' % (j + 1, i)) for j in range(n_vars)]
    # put it all together
    agg = concat(cols, axis=1)
    agg.columns = names
    # drop rows with NaN values
    if dropnan:
        agg.dropna(inplace=True)
    return agg
 
reframed = series_to_supervised(scaled, 3, 1)
x = [1,2,3,4,5,6,7,8,9]
x = DataFrame(x)
xx = series_to_supervised(x,3,1)
xx

var1(t-3)

var1(t-2)

var1(t-1)

var1(t)

3

1.0

2.0

3.0

4

4

2.0

3.0

4.0

5

5

3.0

4.0

5.0

6

6

4.0

5.0

6.0

7

7

5.0

6.0

7.0

8

8

6.0

7.0

8.0

9

values = reframed.values
X, y = values[:, 0], values[:, 1]
X = X.reshape(len(X), 1, 1)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=42)
model = Sequential()
model.add(LSTM(10, input_shape=(1,1)))
model.add(Dense(1))
# 2. 编译网络
model.compile(optimizer='adam', loss='mean_squared_error')
# 3. 训练网络
history = model.fit(X, y, epochs=1000, batch_size=len(X), verbose=0)
# 4. 评估网络
loss = model.evaluate(X, y, verbose=0)
print(loss)
# 5. 进行预测
predictions = model.predict(X, verbose=0)
print(predictions[:, 0])
6.705456326017156e-05
[0.01086098 0.01100232 0.01114367 0.01128505 0.01142643 0.01170926
 0.01213363 0.01227511 0.0129828  0.01312439 0.01340762 0.01397427
 0.01439944 0.01482475 0.01581774 0.01595967 0.01666952 0.01709564
 0.0173798  0.0175219  0.01794832 0.01823267 0.0209374  0.02307694
 0.02736717 0.02923089 0.03844657 0.04032669 0.04076096 0.04583821
 0.04991433 0.05268748 0.05605223 0.05795782 0.06472237 0.06516472
 0.06545968 0.06560719 0.06575473 0.0661974  0.06634499 0.0664926
 0.06664021 0.06678785 0.0669355  0.06708318 0.06723086 0.06737857
 0.06752628 0.06767402 0.06782176 0.06796953 0.06811731 0.0682651
 0.06841291 0.06856076 0.06870859 0.06885645 0.06900433 0.06915223
 0.06930012 0.06944805 0.06959599 0.06974395 0.06989192 0.07003991
 0.0701879  0.07033592 0.07048396 0.07063201 0.07078008 0.07092816
 0.07107626 0.07122438 0.07137249 0.07152065 0.07166881 0.07181698
 0.07196518 0.07211339 0.07226161 0.07240985 0.07255811 0.07270639
 0.07285467 0.07300296 0.0731513  0.07329962 0.07344798 0.07359635
 0.07374474 0.07389313 0.07404154 0.07418998 0.07433842 0.07448689
 0.07463536 0.07478385 0.07493236 0.07508089 0.07522944 0.07537798
 0.07552656 0.07567514 0.07582375 0.07597236 0.07612101 0.07626966
 0.07641832 0.07656701 0.0767157  0.07686441 0.07701314 0.07716189
 0.07731065 0.07745943 0.07760822 0.07775704 0.07790584 0.07805471
 0.07820356 0.07835241 0.07850131 0.07865021 0.07879914 0.07894807
 0.07909703 0.07924599 0.07939497 0.07954396 0.07969298 0.07984201
 0.07999105 0.08014011 0.08028919 0.08043828 0.08058738 0.0807365
 0.08088565 0.0810348  0.08118397 0.08133316 0.08148237 0.08163158
 0.08178081 0.08193006 0.08207932 0.0822286  0.0823779  0.08267653
 0.08282588 0.08297524 0.0831246  0.083274   0.08342341 0.08357282
 0.08372226 0.08387171 0.08402118 0.08417065 0.0846192  0.08476876
 0.08491832 0.08506789 0.08521749 0.0853671  0.08551671 0.08581601
 0.08596568 0.08611538 0.08626507 0.08641478 0.08656451 0.08671427
 0.08686402 0.08701381 0.08731342 0.08746326 0.08761308 0.08791283
 0.0880627  0.08836253 0.08851248 0.08866242 0.08881239 0.08896238
 0.08926237 0.08971252 0.0898626  0.09001269 0.0901628  0.09031291
 0.09046306 0.09076338 0.09091357 0.09121397 0.09136421 0.09166471
 0.09181499 0.0921156  0.09226592 0.09241626 0.09271699 0.09286738
 0.09316821 0.09331863 0.09361953 0.09377    0.09407102 0.09437209
 0.09452264 0.09482381 0.09497441 0.09512503 0.09527566 0.09557698
 0.09587834 0.09602907 0.09617978 0.09678284 0.09693365 0.09738616
 0.09814064 0.09829159 0.09859352 0.09874452 0.09904653 0.09934861
 0.09965077 0.1001041  0.10025524 0.1004064  0.10070877 0.10085996
 0.10116243 0.10161622 0.10176753 0.10191882 0.1022215  0.10252424
 0.10373577 0.1038873  0.10449348 0.10464507 0.10479668 0.10525158
 0.10540324 0.10585835 0.1061618  0.10631356 0.10646532 0.10661712
 0.10676891 0.10692073 0.10813582 0.10828777 0.10874374 0.10919984
 0.1093519  0.11011243 0.11102558 0.11117782 0.11133008 0.11254871
 0.11300594 0.11407335 0.11468364 0.11621041 0.1171272  0.11942154
 0.11988082 0.12125946 0.1220259  0.12233259 0.12248595 0.12263934
 0.12494181 0.12555638 0.12571004 0.12724766 0.12847883 0.12863275
 0.1294028  0.12955686 0.12971093 0.129865   0.13032734 0.13063563
 0.1307898  0.1310982  0.13171513 0.13356741 0.13418533 0.13480347
 0.13619514 0.13650456 0.13696884 0.1371236  0.1372784  0.13774286
 0.1378977  0.13851728 0.1386722  0.13882715 0.13944706 0.1397571
 0.13991213 0.14006719 0.14037736 0.14053246 0.14084272 0.14099787
 0.14146338 0.14161861 0.14192908 0.14208432 0.14239487 0.14270546
 0.1428608  0.14348224 0.14363763 0.14379305 0.14394847 0.14410391
 0.14457032 0.14488134 0.14503688 0.14519241 0.14534795 0.14550354
 0.14565913 0.14597037 0.14628163 0.14643729 0.14690438 0.14721583
 0.14737158 0.1476831  0.1478389  0.1479947  0.14815053 0.14830637
 0.1484622  0.14861809 0.14877397 0.14892986 0.1495536  0.14970955
 0.14986554 0.15002155 0.15017754 0.15033358 0.15048961 0.15064566
 0.15080173 0.15095782 0.1511139  0.15142617 0.15158229 0.15189463
 0.15205082 0.15220702 0.15236326 0.15251946 0.1526757  0.15283199
 0.15298826 0.15314452 0.15345715 0.1536135  0.15376984 0.1539262
 0.15408254 0.15455176 0.1547082  0.15486465 0.1550211  0.1551776
 0.15549058 0.15596017 0.15627332 0.15642993 0.15658653 0.15674315
 0.15689978 0.1572131  0.15752648 0.15799662 0.15831016 0.15846694
 0.15862373 0.15878054 0.1590942  0.15940791 0.1595648  0.1597217
 0.15987858 0.16003552 0.16019246 0.16034941 0.1605064  0.16066337
 0.16082035 0.16097736 0.16113439 0.16129145 0.16144851 0.16160557
 0.16176262 0.16191974 0.16207683 0.16223398 0.16239113 0.16254826
 0.16286261 0.16301982 0.16333424 0.16364874 0.16380602 0.1639633
 0.16412057 0.16443522 0.16459258 0.1647499  0.16490728 0.16506468
 0.16522206 0.16537948 0.16553691 0.16585182 0.16600929 0.16616677
 0.16648176 0.16663928 0.16679683 0.16695438 0.16742712 0.16758473
 0.16774239 0.16790003 0.1680577  0.16853073 0.16868845 0.16884615
 0.1690039  0.16916165 0.16931939 0.16947721 0.169635   0.1697928
 0.17010847 0.17026632 0.17042421 0.17073998 0.17121376 0.17152968
 0.17168766 0.17184564 0.17200366 0.17216168 0.17231973 0.17247777
 0.17263584 0.17279392 0.17295203 0.17311014 0.17326824 0.17342639
 0.17358455 0.1737427  0.17390087 0.1740591  0.1743755  0.17453372
 0.174692   0.17485027 0.17500854 0.17516683 0.17548347 0.17564182
 0.17595853 0.17643371 0.17659211 0.17706749 0.17770144 0.17801856
 0.1781771  0.17849424 0.17881149 0.17897013 0.17928743 0.17944613
 0.17960478 0.17976348 0.18023969 0.18039843 0.18055719 0.18071601
 0.18087481 0.1810336  0.1813513  0.18151014 0.18182789 0.18214571
 0.18246357 0.18262254 0.18278149 0.18294048 0.18309948 0.1834175
 0.18357657 0.1837356  0.18389468 0.18405373 0.18421283 0.18437198
 0.18453106 0.18484937 0.1851677  0.18548611 0.18564534 0.18580455
 0.18596381 0.18612309 0.18628235 0.18660094 0.18707894 0.18723828
 0.18739766 0.18755707 0.18771645 0.18787588 0.18803531 0.18819475
 0.18835422 0.18851367 0.18867318 0.18883269 0.1894708  0.18963039
 0.18978995 0.18994956 0.19026878 0.19042842 0.19058809 0.19074774
 0.19090739 0.1910671  0.19170605 0.19202557 0.19218534 0.19234517
 0.1926648  0.19282466 0.1936241  0.19394395 0.19474387 0.19650492
 0.1966651  0.19682528 0.1969855  0.19939023 0.20549595 0.20726705
 0.2082338  0.20839497 0.20887858 0.21178277 0.21226719 0.21242872
 0.2146913  0.21890022 0.22035927 0.22230631 0.22571832 0.2276706
 0.22946192 0.2309287  0.23272291 0.23517211 0.24532652 0.24647611
 0.24943507 0.2550356  0.2717586  0.27308866 0.27408674 0.28342327
 0.28944555 0.29246256 0.30254734 0.3434768  0.3913111  0.39392433
 0.40387502 0.41578814 0.44434795 0.45037284 0.46815327 0.47100624
 0.4808303  0.48978332 0.51856995 0.52453226 0.5261598  0.52724516
 0.56102216 0.63148654 0.6945702  0.70538616 0.70631933 0.71135986
 0.7124803  0.7446617  0.9359466  0.9391651  0.9838463  1.027557  ]
# model = Sequential()
# model.add(LSTM(1, input_shape=(1,1)))
# model.add(Dropout(0.5))
# model.add(Dense(15,activation='relu'))#激活函数
# model.compile(loss='mae', optimizer='adam')
# history = model.fit(X_train, y_train, epochs=95, batch_size=2, validation_data=(X_test, y_test), verbose=2,shuffle=False)
Epoch 1/95
207/207 - 2s - loss: 0.1184 - val_loss: 0.1095 - 2s/epoch - 9ms/step
Epoch 2/95
207/207 - 0s - loss: 0.1086 - val_loss: 0.1091 - 413ms/epoch - 2ms/step
Epoch 3/95
207/207 - 0s - loss: 0.0766 - val_loss: 0.0689 - 323ms/epoch - 2ms/step
Epoch 4/95
207/207 - 0s - loss: 0.0617 - val_loss: 0.0668 - 361ms/epoch - 2ms/step
Epoch 5/95
207/207 - 0s - loss: 0.0578 - val_loss: 0.0622 - 323ms/epoch - 2ms/step
Epoch 6/95
207/207 - 0s - loss: 0.0532 - val_loss: 0.0550 - 455ms/epoch - 2ms/step
Epoch 7/95
207/207 - 0s - loss: 0.0491 - val_loss: 0.0470 - 340ms/epoch - 2ms/step
Epoch 8/95
207/207 - 0s - loss: 0.0382 - val_loss: 0.0395 - 367ms/epoch - 2ms/step
Epoch 9/95
207/207 - 0s - loss: 0.0374 - val_loss: 0.0377 - 358ms/epoch - 2ms/step
Epoch 10/95
207/207 - 0s - loss: 0.0388 - val_loss: 0.0369 - 305ms/epoch - 1ms/step
Epoch 11/95
207/207 - 0s - loss: 0.0352 - val_loss: 0.0365 - 302ms/epoch - 1ms/step
Epoch 12/95
207/207 - 0s - loss: 0.0332 - val_loss: 0.0365 - 339ms/epoch - 2ms/step
Epoch 13/95
207/207 - 0s - loss: 0.0355 - val_loss: 0.0364 - 308ms/epoch - 1ms/step
Epoch 14/95
207/207 - 0s - loss: 0.0347 - val_loss: 0.0372 - 349ms/epoch - 2ms/step
Epoch 15/95
207/207 - 0s - loss: 0.0362 - val_loss: 0.0367 - 329ms/epoch - 2ms/step
Epoch 16/95
207/207 - 0s - loss: 0.0324 - val_loss: 0.0367 - 351ms/epoch - 2ms/step
Epoch 17/95
207/207 - 0s - loss: 0.0340 - val_loss: 0.0365 - 309ms/epoch - 1ms/step
Epoch 18/95
207/207 - 0s - loss: 0.0342 - val_loss: 0.0361 - 307ms/epoch - 1ms/step
Epoch 19/95
207/207 - 0s - loss: 0.0332 - val_loss: 0.0363 - 354ms/epoch - 2ms/step
Epoch 20/95
207/207 - 0s - loss: 0.0339 - val_loss: 0.0357 - 344ms/epoch - 2ms/step
Epoch 21/95
207/207 - 0s - loss: 0.0322 - val_loss: 0.0359 - 342ms/epoch - 2ms/step
Epoch 22/95
207/207 - 0s - loss: 0.0313 - val_loss: 0.0362 - 324ms/epoch - 2ms/step
Epoch 23/95
207/207 - 0s - loss: 0.0293 - val_loss: 0.0362 - 361ms/epoch - 2ms/step
Epoch 24/95
207/207 - 0s - loss: 0.0309 - val_loss: 0.0351 - 324ms/epoch - 2ms/step
Epoch 25/95
207/207 - 0s - loss: 0.0302 - val_loss: 0.0351 - 351ms/epoch - 2ms/step
Epoch 26/95
207/207 - 0s - loss: 0.0359 - val_loss: 0.0357 - 320ms/epoch - 2ms/step
Epoch 27/95
207/207 - 0s - loss: 0.0292 - val_loss: 0.0355 - 345ms/epoch - 2ms/step
Epoch 28/95
207/207 - 0s - loss: 0.0315 - val_loss: 0.0354 - 341ms/epoch - 2ms/step
Epoch 29/95
207/207 - 0s - loss: 0.0322 - val_loss: 0.0357 - 340ms/epoch - 2ms/step
Epoch 30/95
207/207 - 0s - loss: 0.0307 - val_loss: 0.0359 - 355ms/epoch - 2ms/step
Epoch 31/95
207/207 - 0s - loss: 0.0333 - val_loss: 0.0359 - 329ms/epoch - 2ms/step
Epoch 32/95
207/207 - 0s - loss: 0.0311 - val_loss: 0.0357 - 320ms/epoch - 2ms/step
Epoch 33/95
207/207 - 0s - loss: 0.0341 - val_loss: 0.0353 - 316ms/epoch - 2ms/step
Epoch 34/95
207/207 - 0s - loss: 0.0337 - val_loss: 0.0361 - 387ms/epoch - 2ms/step
Epoch 35/95
207/207 - 0s - loss: 0.0323 - val_loss: 0.0356 - 319ms/epoch - 2ms/step
Epoch 36/95
207/207 - 0s - loss: 0.0351 - val_loss: 0.0356 - 335ms/epoch - 2ms/step
Epoch 37/95
207/207 - 0s - loss: 0.0321 - val_loss: 0.0356 - 345ms/epoch - 2ms/step
Epoch 38/95
207/207 - 0s - loss: 0.0295 - val_loss: 0.0358 - 336ms/epoch - 2ms/step
Epoch 39/95
207/207 - 0s - loss: 0.0347 - val_loss: 0.0364 - 345ms/epoch - 2ms/step
Epoch 40/95
207/207 - 0s - loss: 0.0357 - val_loss: 0.0356 - 332ms/epoch - 2ms/step
Epoch 41/95
207/207 - 0s - loss: 0.0299 - val_loss: 0.0356 - 318ms/epoch - 2ms/step
Epoch 42/95
207/207 - 0s - loss: 0.0373 - val_loss: 0.0363 - 308ms/epoch - 1ms/step
Epoch 43/95
207/207 - 0s - loss: 0.0306 - val_loss: 0.0354 - 359ms/epoch - 2ms/step
Epoch 44/95
207/207 - 0s - loss: 0.0292 - val_loss: 0.0358 - 318ms/epoch - 2ms/step
Epoch 45/95
207/207 - 0s - loss: 0.0341 - val_loss: 0.0363 - 371ms/epoch - 2ms/step
Epoch 46/95
207/207 - 0s - loss: 0.0318 - val_loss: 0.0356 - 339ms/epoch - 2ms/step
Epoch 47/95
207/207 - 0s - loss: 0.0300 - val_loss: 0.0354 - 333ms/epoch - 2ms/step
Epoch 48/95
207/207 - 0s - loss: 0.0333 - val_loss: 0.0362 - 365ms/epoch - 2ms/step
Epoch 49/95
207/207 - 0s - loss: 0.0334 - val_loss: 0.0355 - 347ms/epoch - 2ms/step
Epoch 50/95
207/207 - 0s - loss: 0.0332 - val_loss: 0.0351 - 308ms/epoch - 1ms/step
Epoch 51/95
207/207 - 0s - loss: 0.0352 - val_loss: 0.0356 - 308ms/epoch - 1ms/step
Epoch 52/95
207/207 - 0s - loss: 0.0400 - val_loss: 0.0353 - 359ms/epoch - 2ms/step
Epoch 53/95
207/207 - 0s - loss: 0.0331 - val_loss: 0.0355 - 319ms/epoch - 2ms/step
Epoch 54/95
207/207 - 0s - loss: 0.0311 - val_loss: 0.0360 - 330ms/epoch - 2ms/step
Epoch 55/95
207/207 - 0s - loss: 0.0352 - val_loss: 0.0356 - 341ms/epoch - 2ms/step
Epoch 56/95
207/207 - 0s - loss: 0.0350 - val_loss: 0.0356 - 333ms/epoch - 2ms/step
Epoch 57/95
207/207 - 0s - loss: 0.0332 - val_loss: 0.0356 - 329ms/epoch - 2ms/step
Epoch 58/95
207/207 - 0s - loss: 0.0361 - val_loss: 0.0362 - 309ms/epoch - 1ms/step
Epoch 59/95
207/207 - 0s - loss: 0.0316 - val_loss: 0.0357 - 300ms/epoch - 1ms/step
Epoch 60/95
207/207 - 0s - loss: 0.0355 - val_loss: 0.0355 - 334ms/epoch - 2ms/step
Epoch 61/95
207/207 - 0s - loss: 0.0316 - val_loss: 0.0357 - 342ms/epoch - 2ms/step
Epoch 62/95
207/207 - 0s - loss: 0.0362 - val_loss: 0.0358 - 338ms/epoch - 2ms/step
Epoch 63/95
207/207 - 0s - loss: 0.0362 - val_loss: 0.0357 - 354ms/epoch - 2ms/step
Epoch 64/95
207/207 - 0s - loss: 0.0363 - val_loss: 0.0352 - 330ms/epoch - 2ms/step
Epoch 65/95
207/207 - 0s - loss: 0.0332 - val_loss: 0.0354 - 328ms/epoch - 2ms/step
Epoch 66/95
207/207 - 0s - loss: 0.0316 - val_loss: 0.0357 - 354ms/epoch - 2ms/step
Epoch 67/95
207/207 - 0s - loss: 0.0327 - val_loss: 0.0358 - 328ms/epoch - 2ms/step
Epoch 68/95
207/207 - 0s - loss: 0.0343 - val_loss: 0.0356 - 302ms/epoch - 1ms/step
Epoch 69/95
207/207 - 0s - loss: 0.0297 - val_loss: 0.0353 - 378ms/epoch - 2ms/step
Epoch 70/95
207/207 - 0s - loss: 0.0312 - val_loss: 0.0354 - 331ms/epoch - 2ms/step
Epoch 71/95
207/207 - 0s - loss: 0.0357 - val_loss: 0.0357 - 302ms/epoch - 1ms/step
Epoch 72/95
207/207 - 0s - loss: 0.0320 - val_loss: 0.0359 - 346ms/epoch - 2ms/step
Epoch 73/95
207/207 - 0s - loss: 0.0326 - val_loss: 0.0363 - 304ms/epoch - 1ms/step
Epoch 74/95
207/207 - 0s - loss: 0.0328 - val_loss: 0.0356 - 368ms/epoch - 2ms/step
Epoch 75/95
207/207 - 0s - loss: 0.0342 - val_loss: 0.0357 - 331ms/epoch - 2ms/step
Epoch 76/95
207/207 - 0s - loss: 0.0326 - val_loss: 0.0357 - 322ms/epoch - 2ms/step
Epoch 77/95
207/207 - 0s - loss: 0.0280 - val_loss: 0.0354 - 332ms/epoch - 2ms/step
Epoch 78/95
207/207 - 0s - loss: 0.0337 - val_loss: 0.0359 - 350ms/epoch - 2ms/step
Epoch 79/95
207/207 - 0s - loss: 0.0319 - val_loss: 0.0354 - 314ms/epoch - 2ms/step
Epoch 80/95
207/207 - 0s - loss: 0.0345 - val_loss: 0.0353 - 310ms/epoch - 1ms/step
Epoch 81/95
207/207 - 0s - loss: 0.0314 - val_loss: 0.0362 - 305ms/epoch - 1ms/step
Epoch 82/95
207/207 - 0s - loss: 0.0347 - val_loss: 0.0359 - 310ms/epoch - 1ms/step
Epoch 83/95
207/207 - 0s - loss: 0.0358 - val_loss: 0.0358 - 292ms/epoch - 1ms/step
Epoch 84/95
207/207 - 0s - loss: 0.0374 - val_loss: 0.0359 - 294ms/epoch - 1ms/step
Epoch 85/95
207/207 - 0s - loss: 0.0329 - val_loss: 0.0360 - 297ms/epoch - 1ms/step
Epoch 86/95
207/207 - 0s - loss: 0.0346 - val_loss: 0.0360 - 295ms/epoch - 1ms/step
Epoch 87/95
207/207 - 0s - loss: 0.0285 - val_loss: 0.0357 - 307ms/epoch - 1ms/step
Epoch 88/95
207/207 - 0s - loss: 0.0334 - val_loss: 0.0359 - 320ms/epoch - 2ms/step
Epoch 89/95
207/207 - 0s - loss: 0.0310 - val_loss: 0.0358 - 306ms/epoch - 1ms/step
Epoch 90/95
207/207 - 0s - loss: 0.0301 - val_loss: 0.0356 - 351ms/epoch - 2ms/step
Epoch 91/95
207/207 - 0s - loss: 0.0279 - val_loss: 0.0357 - 323ms/epoch - 2ms/step
Epoch 92/95
207/207 - 0s - loss: 0.0374 - val_loss: 0.0357 - 312ms/epoch - 2ms/step
Epoch 93/95
207/207 - 0s - loss: 0.0348 - val_loss: 0.0358 - 314ms/epoch - 2ms/step
Epoch 94/95
207/207 - 0s - loss: 0.0364 - val_loss: 0.0356 - 301ms/epoch - 1ms/step
Epoch 95/95
207/207 - 0s - loss: 0.0292 - val_loss: 0.0357 - 319ms/epoch - 2ms/step
# y_predict = model.predict(X_test)
# test_X = X_test.reshape((X_test.shape[0], X_test.shape[2]))
7/7 [==============================] - 0s 1ms/step
plt.figure(figsize=(10,8),dpi=150)
plt.plot(y,color='red',label='Original')
plt.plot(predictions,color='green',label='Predict')
plt.xlabel('the number of test data')
plt.ylabel('Soil moisture')
plt.legend()
plt.show()

png


本文章使用limfx的vscode插件快速发布