class bondElastic
(nn.Module):
def __init__(self):
super(bondElastic, self).__init__()
self.net = nn.Sequential(nn.Linear(5,5),
nn.ReLU6(),
nn.Linear(5,1))
def forward(self, X):
return self.net(X).reshape(-1)
def train_net(net, theIter, testIter, epochs=50, lr=0.001, weight_decay=0.0005):
测算评估 = pd.DataFrame(index=list(range(epochs)), columns=["训练集损失", "测试集损失"])
updater = torch.optim.Adam(net.parameters(), lr, weight_decay=weight_decay)
loss = nn.MSELoss()
for i in range(epochs):
total_loss, test_loss = 0.0, 0.0
for X, y in theIter:
l = loss(net(X), y).sum()
updater.zero_grad()
l.backward()
updater.step()
total_loss += l.detach().numpy()
for
X, y in testIter:
test_loss += loss(net(X), y).sum().detach().numpy()
print(f"在第{i+1}次训练,总损失为{total_loss:.2f},测试损失{test_loss:.2f}")
测算评估.loc[i] = [total_loss, test_loss]
return 测算评估
def predict(codes, date, obj, net):
dfX = pd.DataFrame(index=codes, columns=["ConvPrem", "StrbPrem", "Outstanding", "Ptm", "Vol"])
for field in dfX.columns[:4]:
dfX.loc[codes, field] = obj.DB[field].loc[date, codes]
dfX["Outstanding"] /= 100000000.0
dfX["Vol"] = st.factor(codes, "annualstdevr_100w", date)
X = torch.tensor(dfX.astype(float).values, dtype=torch.float32)
return pd.Series(net(X).detach().numpy(), index=codes)
netTest = bondElastic()
测算评估 = train_net(netTest, train_iter, test_iter)