每日 26

求解无约束优化问题: f(x)=(x1+ 10x2)2+ 5(x3-x4)2+(x2-2x3)++10(x1-x4) ;
(2) 终止准则取||▽f(x)||10-6;
python:
import numpy as np
from scipy.optimize import minimize_scalar

def objective(x):
"""目标函数"""
x1, x2, x3, x4 = x
term1 = (x1 + 10 * x2) ** 2
term2 = 5 * (x3 - x4) ** 2
term3 = (x2 - 2 * x3) ** 4
term4 = 10 * (x1 - x4)
return term1 + term2 + term3 + term4

def gradient(x):
"""梯度函数(返回梯度向量)"""
x1, x2, x3, x4 = x
grad = np.zeros(4)

# 对x1的偏导
grad[0] = 2 * (x1 + 10 * x2) + 10
# 对x2的偏导
grad[1] = 20 * (x1 + 10 * x2) + 4 * (x2 - 2 * x3) ** 3
# 对x3的偏导
grad[2] = 10 * (x3 - x4) - 8 * (x2 - 2 * x3) ** 3
# 对x4的偏导
grad[3] = -10 * (x3 - x4) - 10

return grad

def steepest_descent(initial_x, tol=1e-6, max_iter=1000):
"""最速下降法主函数"""
x = initial_x.copy()
history = [x.copy()] # 记录迭代点
iter_count = 0

while iter_count < max_iter:
    grad = gradient(x)
    grad_norm = np.linalg.norm(grad)
    
    if grad_norm <= tol:
        break
    
    d = -grad  # 搜索方向:负梯度
    # 精确线搜索:求解单变量函数f(x + alpha*d)的最小值
    def line_search_func(alpha):
        return objective(x + alpha * d)
    
    # 使用黄金分割法寻找最优步长alpha
    res = minimize_scalar(line_search_func, method='golden')
    alpha = res.x
    
    x = x + alpha * d
    history.append(x.copy())
    iter_count += 1

return x, objective(x), iter_count, history

测试不同初始点

initial_points = [
np.array([0, 0, 0, 0]), # 初始点1
np.array([1, -1, 2, -2]), # 初始点2
np.array([-5, 3, 1, 4]) # 初始点3
]

for idx, x0 in enumerate(initial_points):
x_opt, f_opt, n_iter, _ = steepest_descent(x0)
print(f"初始点 {idx+1}😊
print(f"最优解 x = {x_opt.round(6)}")
print(f"最优函数值 f(x) = {f_opt.round(6)}")
print(f"迭代次数: {n_iter}")
print("-" * 50)

posted @ 2025-05-13 21:45  一如初见233  阅读(6)  评论(0)    收藏  举报