首页 > 解决方案 > 设置全局 PHP-CURL 上传和下载速度限制?

问题描述

我在几个单独的 PHP 脚本中使用 CURL 来下载/上传文件,有没有办法设置 GLOBAL(不是 per-curl-handle)UL / DL 速率速度限制?

不幸的是,您只能在 CURL 上为单个会话设置速度限制,但这不是动态的。

作为使用服务器操作系统 Ubuntu,是否有另一种方法可以不同地限制 CURL 进程?

谢谢

标签: phpubuntucurlphp-curl

解决方案


curl/libcurl 没有任何功能可以跨 curl_easy 句柄共享带宽限制,更不用说跨不同进程了。我建议使用 curl 守护程序来强制执行带宽限制。客户看起来像

class curl_daemon_response{
    public $stdout;
    public $stderr;
}
function curl_daemon(array $curl_options):curl_daemon_response{
    $from_big_uint64_t=function(string $i): int {
        $arr = unpack ( 'Juint64_t', $i );
        return $arr ['uint64_t'];
    };
    $to_big_uint64_t=function(int $i): string {
        return pack ( 'J', $i );
    };
    $conn = stream_socket_client("unix:///var/run/curl_daemon", $errno, $errstr, 3);
    if (!$conn) {
        throw new \RuntimeError("failed to connect to /var/run/curl_daemon! $errstr ($errno)");
    }
    stream_set_blocking($conn,true);
    $curl_options=serialize($curl_options);
    fwrite($conn,$to_big_uint64_t(strlen($curl_options)).$curl_options);
    $stdoutLen=$from_big_uint64_t(fread($conn,8));
    $stdout=fread($conn,$stdoutLen);
    $stderrLen=$from_big_uint64_t(fread($conn,8));
    $stderr=fread($conn,$stderrLen);
    $ret=new curl_daemon_response();
    $ret->stdout=$stdout;
    $ret->stderr=$stderr;
    fclose($conn);
    return $ret;
}

和守护进程看起来像

<?php
declare(strict_types=1);
const MAX_DOWNLOAD_SPEED=1000*1024; // 1000 kilobytes
const MINIMUM_DOWNLOAD_SPEED=100; // 100 bytes per second,
class Client{
    public $id;
    public $socket;
    public $curl;
    public $arguments;
    public $stdout;
    public $stderr;
}
$clients=[];
$mh=curl_multi_init();
$srv = stream_socket_server("unix:///var/run/curl_daemon", $errno, $errstr);
if (!$srv) {
  throw new \RuntimeError("failed to create unix socket /var/run/curl_daemon! $errstr ($errno)");
}
stream_set_blocking($srv,false);
while(true){
    getNewClients();
    $cc=count($clients);
    if(!$cc){
        sleep(1); // nothing to do.
        continue;
    }
    curl_multi_exec($mh, $running);
    if($running!==$cc){
        // at least 1 of the curls finished!
        while(false!==($info=curl_multi_info_read($mh))){
            $key=curl_getinfo($info['handle'],CURLINFO_PRIVATE);
            curl_multi_remove_handle($mh,$clients[$key]->curl);
            curl_close($clients[$key]->curl);
            $stdout=file_get_contents(stream_get_meta_data($clients[$key]->stdout)['uri']); // https://bugs.php.net/bug.php?id=76268
            fclose($clients[$key]->stdout);
            $stderr=file_get_contents(stream_get_meta_data($clients[$key]->stderr)['uri']); // https://bugs.php.net/bug.php?id=76268
            fclose($clients[$key]->stderr);
            $sock=$clients[$key]->socket;
            fwrite($sock,to_big_uint64_t(strlen($stdout)).$stdout.to_big_uint64_t(strlen($stderr)).$stderr);
            fclose($sock);
            echo "finished request #{$key}!\n";
            unset($clients[$key],$key,$stdout,$stderr,$sock);
        }
        updateSpeed();
    }
    curl_multi_select($mh);
}

function updateSpeed(){
    global $clients;
    static $old_speed=-1;
    if(empty($clients)){
        return;
    }
    $clientsn=count($clients);
    $per_handle_speed=MAX(MINIMUM_DOWNLOAD_SPEED,(MAX_DOWNLOAD_SPEED/$clientsn));
    if($per_handle_speed===$old_speed){
        return;
    }
    $old_speed=$per_handle_speed;
    echo "new per handle speed: {$per_handle_speed} - clients: {$clientsn}\n";
    foreach($clients as $client){
        /** @var Client $client */
        curl_setopt($client->curl,CURLOPT_MAX_RECV_SPEED_LARGE,$per_hande_speed);
    }
}


function getNewClients(){
    global $clients,$srv,$mh;
    static $counter=-1;
    $newClients=false;
    while(false!==($new=stream_socket_accept($srv,0))){
        ++$counter;
        $newClients=true;
        echo "new client! request #{$counter}\n";
        stream_set_blocking($new,true);
        $tmp=new Client();
        $tmp->id=$counter;
        $tmp->socket=$new;
        $tmp->curl=curl_init();
        $tmp->stdout=tmpfile();
        $tmp->stderr=tmpfile();
        $size=from_big_uint64_t(fread($new,8));
        $arguments=fread($new,$size);
        $arguments=unserialize($arguments);
        assert(is_array($arguments));
        $tmp->arguments=$arguments;
        curl_setopt_array($tmp->curl,$arguments);
        curl_setopt_array($tmp->curl,array(
            CURLOPT_FILE=>$tmp->stdout,
            CURLOPT_STDERR=>$tmp->stderr,
            CURLOPT_VERBOSE=>1,
            CURLOPT_PRIVATE=>$counter
        ));
        curl_multi_add_handle($mh,$tmp->curl);
    }
    if($newClients){
        updateSpeed();
    }
}

function from_big_uint64_t(string $i): int {
    $arr = unpack ( 'Juint64_t', $i );
    return $arr ['uint64_t'];
}
function to_big_uint64_t(int $i): string {
    return pack ( 'J', $i );
}

注意:这是完全未经测试的代码,因为我的开发环境实际上在几个小时前就死了,我用记事本++编写了所有这些。(我的开发环境根本无法启动,它是一个虚拟机,不确定 wtf 是否发生,但还没有修复它)

此外,代码根本没有针对大文件传输进行优化,如果您需要以这种方式支持大文件传输(您不想打包在内存中的大小,例如千兆字节+),那么修改守护程序以返回文件路径而不是写入Unix 套接字上的所有数据。


推荐阅读