我一次又一次地收到此错误。我不知道问题是什么以及如何解决。
/lem_robotics$/bin/python/home/pallabi/lem_robotics/src/control/ilqr_py/pytorch_cost_object1.py
Traceback (most recent call last):
File "/home/pallabi/lem_robotics/src/control/ilqr_py/pytorch_cost_object1.py", line 64, in <module>
cost = c_obj.helper_all_cost_stage(x0, u0, y0)
File "/home/pallabi/lem_robotics/src/control/ilqr_py/pytorch_cost_object1.py", line 40, in helper_all_cost_stage
J = self.forward(states, inputs, outputs)
File "/home/pallabi/lem_robotics/src/control/ilqr_py/pytorch_cost_object1.py", line 20, in forward
Jquad_x = self.helper_quadratic_cost_term(self.qx, states, self.rx)
File "/home/pallabi/lem_robotics/src/control/ilqr_py/pytorch_cost_object1.py", line 36, in helper_quadratic_cost_term
assert (cost.size() == torch.Size([1]))
AssertionError
这是实际的代码:
import torch
import torch.nn as nn
import torch.nn.functional as F
class CostObject(nn.Module):
def __init__(self, dynamical_system):
super(CostObject, self).__init__()
self.ds = dynamical_system
self.qx = nn.Parameter(torch.zeros(self.ds.nx))
self.qu = nn.Parameter(torch.zeros(self.ds.nu))
self.qy = nn.Parameter(torch.zeros(self.ds.ny))
self.rx = nn.Parameter(torch.zeros(self.ds.nx))
self.ru = nn.Parameter(torch.zeros(self.ds.nu))
self.ry = nn.Parameter(torch.zeros(self.ds.ny))
self.Qx = nn.Parameter(torch.diag(self.qx))
self.Qu = nn.Parameter(torch.diag(self.qu))
self.Qy = nn.Parameter(torch.diag(self.qy))
def forward(self, states, inputs, outputs):
Jquad_x = self.helper_quadratic_cost_term(self.qx, states, self.rx)
Jquad_u = self.helper_quadratic_cost_term(self.qu, inputs, self.ru)
Jquad_y = self.helper_quadratic_cost_term(self.qy, outputs, self.ry)
J = Jquad_x + Jquad_u + Jquad_y
return J
def helper_quadratic_cost_term(self, weights, variables, references):
assert (weights.size() == variables.size())
assert (weights.size() == references.size())
error = variables - references
D = torch.diag(weights)
#cost = torch.matmul(error.T, torch.matmul(D, error))
cost = (error.permute(*torch.arange(error.ndim - 1, -1, -1)) @ D @ error).squeeze()
#cost = (error. permute(1, 0) @ D @ error).squeeze()
#cost = (error.T @ D @ error).squeeze()
assert (cost.size() == torch.Size([1]))
return cost
def helper_all_cost_stage(self, states, inputs, outputs):
J = self.forward(states, inputs, outputs)
Jx = torch.autograd.grad(J, states, create_graph=True)[0]
Ju = torch.autograd.grad(J, inputs, create_graph=True)[0]
Jxx = torch.autograd.grad(Jx, states, create_graph=True)[0]
Jux = torch.autograd.grad(Jx, inputs, create_graph=True)[0]
Juu = torch.autograd.grad(Ju, inputs, create_graph=True)[0]
return J, Jx, Ju, Jxx, Jux, Juu
if __name__ == '__main__':
from robot_model_parameters import robot_model
import numpy as np
robot = robot_model('ackermann')
c_obj = CostObject(robot)
x0 = torch.zeros(robot.nx, requires_grad=True)
u0 = torch.zeros(robot.nu, requires_grad=True)
y0 = torch.zeros(robot.ny, requires_grad=True)
qx = torch.ones(robot.nx, requires_grad=True)
qu = torch.ones(robot.nu, requires_grad=True)
qy = torch.ones(robot.ny, requires_grad=True)
rx = torch.ones(robot.nx, requires_grad=True)
ru = torch.zeros(robot.nu, requires_grad=True)
ry = rx
cost = c_obj.helper_all_cost_stage(x0, u0, y0)
print("cost", cost)
这意味着断言中的条件是错误的。也就是说,cost.size()不等于torch.Size([1])。您应该检查条件并进行调试。