Re: apache problems with number of cgi requests

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

 



Digvijoy Chatterjee wrote:
So there are two scripts I am using as a test firefox 3.0.8 , launch
test.cgi and you can see it provides a link to another cgi script
which sleeps for 60 seconds.
I launch this link again and again in a new tab/window , but any given
time only one process has been started by apache on the host.

And what happens when you do this from 2 *different* workstations at the same time, or, on the same workstation, with 2 entirely different browsers (I mean 1 x Firefox and 1 x IE for instance) ?

The point is :
A browser may open one single connection to the server, and use that same connection to send several requests. That is what is called "keepalive" connections. On the server side, this results in this one connection being handed over to one single Apache "child", and this single child keeps the connection open and processes all requests on that connection, one after the other. No matter how many windows or tabs you open in the same browser, it *may* still be using only that one connection, for all its windows or tabs. But two different browsers (or the same browser on two different workstations) will not share one connection.

Since you are anyway using perl for your cgi scripts, why don't you create a simple perl script, using the LWP module, to make the requests to Apache ? You could then run several instances of this script in different console windows at the same time, and check the results.

Attached is such a script, but I don't know if it will make it to the list. If not, ask and I will send it to you privately.

There also exist lots of other possibilities, such as creating a small shell script which calls wget or curl repeatedly, and run that script in several consoles.
You can also, I believe, use the "ab" program which comes with Apache.
See here : http://httpd.apache.org/docs/2.2/programs/

#!/usr/bin/perl
#
# HTTPchecker.pl : stand-alone PROGRAM to check/exercise HTTP links and produce a trace file
#
#       Initial version : AW 2009/01/28
#
# Use "(perl) HTTPchecker.pl -h" for help
#
use warnings;
use strict;
no strict 'refs';
use FileHandle;
use File::Basename;
use Getopt::Std;
use MIME::Base64;
use LWP::UserAgent;
use LWP::Debug;
use Time::HiRes qw( gettimeofday tv_interval );


our $PROGRAM = File::Basename::basename($0,"\.pl","\.exe");
our $VERSION = q$Revision$; $VERSION = length($VERSION) > 8 ? substr $VERSION,10 : "0.0001(dev)";
our $DEBUG = 0; # set to 0 for normal processing

our $STOP_REQUEST = 0;
our $OUTPUT_ALL = 0;
our $PROXY_ENV = 0;
our $HTTP_PROXY = '';
our $HTTP_PROXY_USER = '';
our $HTTP_PROXY_PW = '';
our $NO_PROXY = '';
our $HTTP_TIMEOUT = 30;
our $REPEAT_URL = 1;
our $MAX_LOOPS = 1;
our $PAUSE_URL = 0;
our $KEEP_ALIVE = 0;

#
################  Parse command line switches  #####################
#
sub usage {
#########
        print <<EOU;
$PROGRAM v $VERSION
Usage : [perl] $PROGRAM.pl [options] input_file
            (output = STDOUT)
options :
        -h : print this help message and exit
        -d level : set logging level 0-5
        -A : output for all links, not just errors
        -t n : set timeout to n sec. (default = 30)
        -L n : stop after n loops through input file
                (default = 1, 0 = loop continuously)
        -e : pick up proxy settings from environment
        -r n : repeat each URL request n times (default = 1 time)
        -p : use KeepAlive connection for repeated URLs (faster)
        -w n : wait n sec. between each request (default = 0)

EOU
}
    my (%Switches,$val);
	unless (getopts("d:L:t:r:w:hA",\%Switches)) {
        usage();
        exit;
	}
	if (exists($Switches{"h"})) { # print name, version, and help
		usage();
        exit;
	}
	if (exists($Switches{"d"})) { # d = set Debug level
		$DEBUG = $Switches{d};
		$DEBUG = 0 unless (($DEBUG =~ /\d+/) && ($DEBUG >= 0) && ($DEBUG <= 5));
	}
	if (exists($Switches{"L"})) {
		$val = $Switches{L};
        usage(), exit 1 unless (($val =~ /\d+/) && ($val >= 0));
		$MAX_LOOPS = $val;
	}
	if (exists($Switches{"A"})) { # output all links, even correct ones
        $OUTPUT_ALL = 1;
	}
	if (exists($Switches{"e"})) { # pick up proxy settings from environment
    	$PROXY_ENV = 1;
		if (exists($ENV{"http_proxy"})) {
			$HTTP_PROXY = $ENV{"http_proxy"};
			# Then also get user and pw if any
			if (exists($ENV{"http_proxy_user"})) {
				$HTTP_PROXY_USER = $ENV{"http_proxy_user"};
			}
			if (exists($ENV{"http_proxy_pw"})) {
				$HTTP_PROXY_PW = $ENV{"http_proxy_pw"};
			}
		}
		if (exists($ENV{"no_proxy"})) {
			$NO_PROXY = $ENV{"no_proxy"};
		}
	}
	if (exists($Switches{"t"})) { # set timeout for HTTP access
       	$HTTP_TIMEOUT = $Switches{t};
		# but be reasonable ...
		$HTTP_TIMEOUT = 1 unless ($HTTP_TIMEOUT > 1);
		$HTTP_TIMEOUT = 180 unless ($HTTP_TIMEOUT < 180);
	}
	if (exists($Switches{"r"})) {
    	$val = $Switches{r};
        $REPEAT_URL = $val if (($val =~ /\d+/) && ($val > 0) && ($val < 10000));
	}
	if (exists($Switches{"w"})) {
    	$val = $Switches{w};
        $PAUSE_URL = $val if (($val =~ /\d+/) && ($val > 0) && ($val < 3600));
	}
	if (exists($Switches{"p"})) {
    	$KEEP_ALIVE = 1;
	}
    #
    # input file must be there
    #
	my $InFile = $ARGV[0];
	unless (defined($InFile)) {
		# error type 1 : no argument
        usage();
		die "Missing input file argument !";
		exit 1;
	}
	unless (open(INFILE,'<',$InFile)) {
		# error type 2 : input file does not exist or not readable
		die "Input file [$InFile] cannot be opened : $!";
		exit 1;
	}

	LWP::Debug::level('-');
	if ($DEBUG > 2) {
		LWP::Debug::level('+','-debug');
	}

    my $UA;
    my $Proxy_Auth = '';
    if ($KEEP_ALIVE && ($REPEAT_URL > 1)) {
        $UA = LWP::UserAgent->new(keep_alive => $REPEAT_URL);
    } else {
        $UA = LWP::UserAgent->new(keep_alive => 0);
    }
	$UA->agent("$PROGRAM/$VERSION " . $UA->agent);
	if ($HTTP_PROXY ne '') {
		$UA->proxy(http => $HTTP_PROXY);
		if ($HTTP_PROXY_USER ne '') {
			$Proxy_Auth = "Basic " . MIME::Base64::encode("${HTTP_PROXY_USER}:${HTTP_PROXY_PW}", "");
		}
	}
	if ($NO_PROXY ne '') {
		$UA->no_proxy(qw($NO_PROXY));
	}

    my $Loops = 999999; # some arbitrary very high value
    $Loops = $MAX_LOOPS if $MAX_LOOPS; # change only if not 0

	# Set up handlers for interrupt signals
	# Note : the following catches "CTRL-C" when in console mode
	$SIG{INT} = \&SIG_processor;   # install handler

# Stats
my $TOTAL_reqs = 0;
my $TOTAL_time = 0;
my $TOTAL_req_errors = 0;
my $TOTAL_req_success = 0;
my $TOTAL_time_success = 0;

open(STDERR,'>&1') or die "could not dup STDOUT";
    
MAIN: while ($Loops) {
    last MAIN if $STOP_REQUEST;
    seek(INFILE,0,0); # rewind to beginning
    my $URLLine;
    INP: while (defined($URLLine = <INFILE>)) {
        last MAIN if $STOP_REQUEST;
        chomp $URLLine;
        $URLLine =~ s/^\s+//; $URLLine =~ s/\s+$//;
        next INP if $URLLine eq ''; # skip blank lines
        next INP if $URLLine =~ m/^#/; # skip comments
        # if there is no "scheme", assume http
        unless ($URLLine =~ m#^(\w+)://#) {
            $URLLine = "http://"; . $URLLine;
        }

        my $req = HTTP::Request->new(GET => $URLLine);
        if ($Proxy_Auth ne '') {
            $req->header('Proxy-Authorization' => $$Proxy_Auth);
        }
        $UA->timeout($HTTP_TIMEOUT); # set time-out
        #$UA->max_size($X::MAX_RESP_SIZE); # set maximum response size
        
        URL: for (my $rep=0;$rep<$REPEAT_URL;$rep++ ) { 
            last MAIN if $STOP_REQUEST;
            # process 1 URL, repeatedly if requested
            my ($t0,$t1);
        	$t0 = [gettimeofday()];
            $TOTAL_reqs++;
            # use simple_request() to skip redirects etc..
            my $res = $UA->simple_request($req); # send request, wait for result
        	$t1 = tv_interval($t0,[gettimeofday()]);
            $TOTAL_time += $t1;
            if ($res->is_success) {
                my $size = length($res->content);
                log_msg("GET [$URLLine] OK [$t1] [s=$size]") if $OUTPUT_ALL;
                $TOTAL_req_success++;
                $TOTAL_time_success += $t1;
                sleep $PAUSE_URL if $PAUSE_URL;
                next URL;
            } elsif ($res->is_redirect) {
                log_msg("GET [$URLLine] OK (redirect) [$t1]") if $OUTPUT_ALL;
                $TOTAL_req_success++;
                $TOTAL_time_success += $t1;
                sleep $PAUSE_URL if $PAUSE_URL;
                next URL;
            } elsif ($res->is_info) {
                log_msg("GET [$URLLine] OK (info) [$t1]") if $OUTPUT_ALL;
                $TOTAL_req_success++;
                $TOTAL_time_success += $t1;
                sleep $PAUSE_URL if $PAUSE_URL;
                next URL;
            }
            # else we have an error
            $TOTAL_req_errors++;
            my $status = $res->status_line;
            chomp($status);
            log_msg("** GET [$URLLine] NOT OK [$status] [$t1]");

            # repeat request with full logging
        	LWP::Debug::level('+debug','+conn');
            log_msg("  repeat GET [$URLLine], with debug info");
            # use simple_request() to skip redirects etc..
            $res = $UA->simple_request($req); # send request, wait for result
            $status = $res->status_line;
            chomp($status);
            log_msg("  repeat status [$status]");
        	LWP::Debug::level('-');
            if ($DEBUG > 2) {
                LWP::Debug::level('+','-debug');
            }
            sleep $PAUSE_URL if $PAUSE_URL;
            next URL;

        } # end URL
    
    } # end INP
    
    $Loops--;
} # end MAIN

    close INFILE;

    #print final stats
    print "Total requests : ",$TOTAL_reqs,"\n";
    print "Total time : ",$TOTAL_time,"\n";
    print "Total requests with errors : ",$TOTAL_req_errors,"\n";
    print "Total succesful requests : ",$TOTAL_req_success,"\n";
    if ($TOTAL_req_success) {
        print "Average time per succesful request : ",$TOTAL_time_success/$TOTAL_req_success,"\n";
    }
    exit 0;

sub log_msg {
  my ($sec,$min,$hour,$mday,$mon,$year) = localtime(time);
  my $logtime = sprintf("%04d/%02d/%02d-%02d:%02d:%02d",$year+1900,$mon+1,$mday,$hour,$min,$sec);
  print $logtime,"> ",@_,"\n";    
}
sub SIG_processor {
# Interrupt routine (hopefully) called when process receives a signal
# (a nice way to tell the daemon to stop)
# Note : as of ActivePerl build 518, this doesn't work under NT

	my $signame = shift;
	$SIG{$signame} = 'DEFAULT'; # reset default behaviour
	$STOP_REQUEST = 1;      # set to stop at next loop

} # end sub

---------------------------------------------------------------------
The official User-To-User support forum of the Apache HTTP Server Project.
See <URL:http://httpd.apache.org/userslist.html> for more info.
To unsubscribe, e-mail: users-unsubscribe@xxxxxxxxxxxxxxxx
   "   from the digest: users-digest-unsubscribe@xxxxxxxxxxxxxxxx
For additional commands, e-mail: users-help@xxxxxxxxxxxxxxxx

[Index of Archives]     [Open SSH Users]     [Linux ACPI]     [Linux Kernel]     [Linux Laptop]     [Kernel Newbies]     [Security]     [Netfilter]     [Bugtraq]     [Squid]     [Yosemite News]     [MIPS Linux]     [ARM Linux]     [Linux Security]     [Linux RAID]     [Samba]     [Video 4 Linux]     [Device Mapper]

  Powered by Linux