Matlab最优化算法

function steepest_descent
    % 主函数
    clc, clear;
    
    % 测试不同初始点
    initial_points = [
        1.5, 1.5;
        -1.0, 1.0;
        2.0, -2.0;
        -1.5, -1.5
    ];
    
    for i = 1:size(initial_points, 1)
        x0 = initial_points(i, :)';
        fprintf('\n测试初始点 %d: x0 = [%.1f, %.1f]\n', i, x0(1), x0(2));
        [x_opt, f_opt, iterations, history] = optimize(x0);
        fprintf('最优解: x* = [%.6f, %.6f]\n', x_opt(1), x_opt(2));
        fprintf('最优值: f* = %.6f\n', f_opt);
        fprintf('迭代次数: %d\n', iterations);
        plot_results(history);
    end
end

function [x, f_val, iter, history] = optimize(x0)
    % 最速下降法优化函数
    epsilon = 1e-6;    % 终止准则
    max_iter = 1000;   % 最大迭代次数
    
    x = x0;
    history = zeros(max_iter + 1, 3);  % 记录[x1, x2, f(x)]
    history(1, :) = [x', objective_function(x)];
    iter = 1;
    
    while iter <= max_iter
        grad = gradient_function(x);
        if norm(grad) < epsilon
            break;
        end
        
        d = -grad;  % 负梯度方向
        alpha = line_search(x, d);
        x = x + alpha * d;
        
        history(iter + 1, :) = [x', objective_function(x)];
        iter = iter + 1;
    end
    
    history = history(1:iter, :);
    f_val = objective_function(x);
end

function f = objective_function(x)
    % 目标函数 f(x1,x2) = x1^2 + 2x2^2
    f = x(1)^2 + 2*x(2)^2;
end

function grad = gradient_function(x)
    % 计算梯度
    grad = [2*x(1); 4*x(2)];
end

function alpha = line_search(x, d)
    % 一维搜索求步长
    alpha = 0.1;   % 初始步长
    beta = 0.8;    % 缩减因子
    t = 0.5;       % 充分下降条件的参数
    
    while true
        new_x = x + alpha * d;
        if objective_function(new_x) <= objective_function(x) + t * alpha * gradient_function(x)' * d
            break;
        end
        alpha = alpha * beta;
    end
end

function plot_results(history)
    % 绘制优化过程
    figure('Position', [100, 100, 1200, 500]);
    
    % 等高线图
    subplot(1, 2, 1);
    [X1, X2] = meshgrid(-2:0.1:2, -2:0.1:2);
    Z = X1.^2 + 2*X2.^2;
    contour(X1, X2, Z, 20);
    hold on;
    plot(history(:,1), history(:,2), 'r.-', 'LineWidth', 1.5);
    plot(history(1,1), history(1,2), 'go', 'MarkerSize', 10, 'LineWidth', 2);
    plot(history(end,1), history(end,2), 'ro', 'MarkerSize', 10, 'LineWidth', 2);
    xlabel('x1');
    ylabel('x2');
    title('优化路径等高线图');
    legend('等高线', '优化路径', '初始点', '最优点');
    grid on;
    
    % 目标函数值随迭代次数的变化
    subplot(1, 2, 2);
    plot(0:size(history,1)-1, history(:,3), 'b.-', 'LineWidth', 1.5);
    xlabel('迭代次数');
    ylabel('目标函数值');
    title('目标函数值随迭代次数的变化');
    grid on;
end
posted @ 2025-05-12 23:59  QixunQiu  阅读(16)  评论(0)    收藏  举报