|
@@ -17,7 +17,7 @@ use DateTime::Format::HTTP();
|
|
|
use CGI::Cookie ();
|
|
use CGI::Cookie ();
|
|
|
use File::Basename();
|
|
use File::Basename();
|
|
|
use IO::Compress::Gzip();
|
|
use IO::Compress::Gzip();
|
|
|
-use Time::HiRes qw{gettimeofday tv_interval};
|
|
|
|
|
|
|
+use Time::HiRes qw{gettimeofday tv_interval};
|
|
|
use HTTP::Parser::XS qw{HEADERS_AS_HASHREF};
|
|
use HTTP::Parser::XS qw{HEADERS_AS_HASHREF};
|
|
|
use List::Util;
|
|
use List::Util;
|
|
|
|
|
|
|
@@ -36,13 +36,13 @@ use Trog::Vars;
|
|
|
|
|
|
|
|
# Import the routes
|
|
# Import the routes
|
|
|
|
|
|
|
|
-my $conf = Trog::Config::get();
|
|
|
|
|
-my $data = Trog::Data->new($conf);
|
|
|
|
|
|
|
+my $conf = Trog::Config::get();
|
|
|
|
|
+my $data = Trog::Data->new($conf);
|
|
|
my %roots = $data->routes();
|
|
my %roots = $data->routes();
|
|
|
|
|
|
|
|
my %routes = %Trog::Routes::HTML::routes;
|
|
my %routes = %Trog::Routes::HTML::routes;
|
|
|
-@routes{keys(%Trog::Routes::JSON::routes)} = values(%Trog::Routes::JSON::routes);
|
|
|
|
|
-@routes{keys(%roots)} = values(%roots);
|
|
|
|
|
|
|
+@routes{ keys(%Trog::Routes::JSON::routes) } = values(%Trog::Routes::JSON::routes);
|
|
|
|
|
+@routes{ keys(%roots) } = values(%roots);
|
|
|
|
|
|
|
|
my %aliases = $data->aliases();
|
|
my %aliases = $data->aliases();
|
|
|
|
|
|
|
@@ -70,23 +70,24 @@ If a path passed is not a defined route (or regex route), but exists as a file u
|
|
|
=cut
|
|
=cut
|
|
|
|
|
|
|
|
sub app {
|
|
sub app {
|
|
|
|
|
+
|
|
|
# Start the server timing clock
|
|
# Start the server timing clock
|
|
|
my $start = [gettimeofday];
|
|
my $start = [gettimeofday];
|
|
|
|
|
|
|
|
my $env = shift;
|
|
my $env = shift;
|
|
|
|
|
|
|
|
- return _toolong() if length($env->{REQUEST_URI}) > 2048;
|
|
|
|
|
|
|
+ return _toolong() if length( $env->{REQUEST_URI} ) > 2048;
|
|
|
|
|
|
|
|
# Check eTags. If we don't know about it, just assume it's good and lazily fill the cache
|
|
# Check eTags. If we don't know about it, just assume it's good and lazily fill the cache
|
|
|
# XXX yes, this allows cache poisoning...but only for logged in users!
|
|
# XXX yes, this allows cache poisoning...but only for logged in users!
|
|
|
- if ($env->{HTTP_IF_NONE_MATCH}) {
|
|
|
|
|
- return [304, [], ['']] if $env->{HTTP_IF_NONE_MATCH} eq ($etags{$env->{REQUEST_URI}} || '');
|
|
|
|
|
- $etags{$env->{REQUEST_URI}} = $env->{HTTP_IF_NONE_MATCH} unless exists $etags{$env->{REQUEST_URI}};
|
|
|
|
|
|
|
+ if ( $env->{HTTP_IF_NONE_MATCH} ) {
|
|
|
|
|
+ return [ 304, [], [''] ] if $env->{HTTP_IF_NONE_MATCH} eq ( $etags{ $env->{REQUEST_URI} } || '' );
|
|
|
|
|
+ $etags{ $env->{REQUEST_URI} } = $env->{HTTP_IF_NONE_MATCH} unless exists $etags{ $env->{REQUEST_URI} };
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
my $last_fetch = 0;
|
|
my $last_fetch = 0;
|
|
|
- if ($env->{HTTP_IF_MODIFIED_SINCE}) {
|
|
|
|
|
- $last_fetch = DateTime::Format::HTTP->parse_datetime($env->{HTTP_IF_MODIFIED_SINCE})->epoch();
|
|
|
|
|
|
|
+ if ( $env->{HTTP_IF_MODIFIED_SINCE} ) {
|
|
|
|
|
+ $last_fetch = DateTime::Format::HTTP->parse_datetime( $env->{HTTP_IF_MODIFIED_SINCE} )->epoch();
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
#XXX Don't use statics anything that has a search query
|
|
#XXX Don't use statics anything that has a search query
|
|
@@ -95,22 +96,22 @@ sub app {
|
|
|
my $has_query = !!$env->{QUERY_STRING};
|
|
my $has_query = !!$env->{QUERY_STRING};
|
|
|
|
|
|
|
|
my $query = {};
|
|
my $query = {};
|
|
|
- $query = URL::Encode::url_params_mixed($env->{QUERY_STRING}) if $env->{QUERY_STRING};
|
|
|
|
|
|
|
+ $query = URL::Encode::url_params_mixed( $env->{QUERY_STRING} ) if $env->{QUERY_STRING};
|
|
|
|
|
|
|
|
#Actually parse the POSTDATA and dump it into the QUERY object if this is a POST
|
|
#Actually parse the POSTDATA and dump it into the QUERY object if this is a POST
|
|
|
- if ($env->{REQUEST_METHOD} eq 'POST') {
|
|
|
|
|
|
|
+ if ( $env->{REQUEST_METHOD} eq 'POST' ) {
|
|
|
|
|
|
|
|
my $body = HTTP::Body->new( $env->{CONTENT_TYPE}, $env->{CONTENT_LENGTH} );
|
|
my $body = HTTP::Body->new( $env->{CONTENT_TYPE}, $env->{CONTENT_LENGTH} );
|
|
|
- while ( $env->{'psgi.input'}->read(my $buf, $CHUNK_SIZE) ) {
|
|
|
|
|
|
|
+ while ( $env->{'psgi.input'}->read( my $buf, $CHUNK_SIZE ) ) {
|
|
|
$body->add($buf);
|
|
$body->add($buf);
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
- @$query{keys(%{$body->param})} = values(%{$body->param});
|
|
|
|
|
- @$query{keys(%{$body->upload})} = values(%{$body->upload});
|
|
|
|
|
|
|
+ @$query{ keys( %{ $body->param } ) } = values( %{ $body->param } );
|
|
|
|
|
+ @$query{ keys( %{ $body->upload } ) } = values( %{ $body->upload } );
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
# Grab the list of ACLs we want to add to a post, if any.
|
|
# Grab the list of ACLs we want to add to a post, if any.
|
|
|
- $query->{acls} = [$query->{acls}] if ($query->{acls} && ref $query->{acls} ne 'ARRAY');
|
|
|
|
|
|
|
+ $query->{acls} = [ $query->{acls} ] if ( $query->{acls} && ref $query->{acls} ne 'ARRAY' );
|
|
|
|
|
|
|
|
my $path = $env->{PATH_INFO};
|
|
my $path = $env->{PATH_INFO};
|
|
|
$path = '/index' if $path eq '/';
|
|
$path = '/index' if $path eq '/';
|
|
@@ -122,7 +123,7 @@ sub app {
|
|
|
my $alist = $env->{HTTP_ACCEPT_ENCODING} || '';
|
|
my $alist = $env->{HTTP_ACCEPT_ENCODING} || '';
|
|
|
$alist =~ s/\s//g;
|
|
$alist =~ s/\s//g;
|
|
|
my @accept_encodings;
|
|
my @accept_encodings;
|
|
|
- @accept_encodings = split(/,/, $alist);
|
|
|
|
|
|
|
+ @accept_encodings = split( /,/, $alist );
|
|
|
my $deflate = grep { 'gzip' eq $_ } @accept_encodings;
|
|
my $deflate = grep { 'gzip' eq $_ } @accept_encodings;
|
|
|
|
|
|
|
|
# Collapse multiple slashes in the path
|
|
# Collapse multiple slashes in the path
|
|
@@ -132,23 +133,23 @@ sub app {
|
|
|
return $routes{default}{callback}->($query) unless -f "config/setup";
|
|
return $routes{default}{callback}->($query) unless -f "config/setup";
|
|
|
|
|
|
|
|
my $cookies = {};
|
|
my $cookies = {};
|
|
|
- if ($env->{HTTP_COOKIE}) {
|
|
|
|
|
- $cookies = CGI::Cookie->parse($env->{HTTP_COOKIE});
|
|
|
|
|
|
|
+ if ( $env->{HTTP_COOKIE} ) {
|
|
|
|
|
+ $cookies = CGI::Cookie->parse( $env->{HTTP_COOKIE} );
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
my $active_user = '';
|
|
my $active_user = '';
|
|
|
- if (exists $cookies->{tcmslogin}) {
|
|
|
|
|
- $active_user = Trog::Auth::session2user($cookies->{tcmslogin}->value);
|
|
|
|
|
|
|
+ if ( exists $cookies->{tcmslogin} ) {
|
|
|
|
|
+ $active_user = Trog::Auth::session2user( $cookies->{tcmslogin}->value );
|
|
|
}
|
|
}
|
|
|
$query->{user_acls} = [];
|
|
$query->{user_acls} = [];
|
|
|
$query->{user_acls} = Trog::Auth::acls4user($active_user) // [] if $active_user;
|
|
$query->{user_acls} = Trog::Auth::acls4user($active_user) // [] if $active_user;
|
|
|
|
|
|
|
|
# Filter out passed ACLs which are naughty
|
|
# Filter out passed ACLs which are naughty
|
|
|
- my $is_admin = grep { $_ eq 'admin' } @{$query->{user_acls}};
|
|
|
|
|
- @{$query->{acls}} = grep { $_ ne 'admin' } @{$query->{acls}} unless $is_admin;
|
|
|
|
|
|
|
+ my $is_admin = grep { $_ eq 'admin' } @{ $query->{user_acls} };
|
|
|
|
|
+ @{ $query->{acls} } = grep { $_ ne 'admin' } @{ $query->{acls} } unless $is_admin;
|
|
|
|
|
|
|
|
# Disallow any paths that are naughty ( starman auto-removes .. up-traversal)
|
|
# Disallow any paths that are naughty ( starman auto-removes .. up-traversal)
|
|
|
- if (index($path,'/templates') == 0 || index($path, '/statics') == 0 || $path =~ m/.*(\.psgi|\.pm)$/i ) {
|
|
|
|
|
|
|
+ if ( index( $path, '/templates' ) == 0 || index( $path, '/statics' ) == 0 || $path =~ m/.*(\.psgi|\.pm)$/i ) {
|
|
|
return _forbidden($query);
|
|
return _forbidden($query);
|
|
|
}
|
|
}
|
|
|
|
|
|
|
@@ -158,9 +159,9 @@ sub app {
|
|
|
# If we have a static render, just use it instead (These will ALWAYS be correct, data saves invalidate this)
|
|
# If we have a static render, just use it instead (These will ALWAYS be correct, data saves invalidate this)
|
|
|
# TODO: make this key on admin INSTEAD of active user when we add non-admin users.
|
|
# TODO: make this key on admin INSTEAD of active user when we add non-admin users.
|
|
|
$query->{start} = $start;
|
|
$query->{start} = $start;
|
|
|
- if (!$active_user && !$has_query) {
|
|
|
|
|
- return _static("$path.z",$start, $streaming) if -f "www/statics/$path.z" && $deflate;
|
|
|
|
|
- return _static($path,$start, $streaming) if -f "www/statics/$path";
|
|
|
|
|
|
|
+ if ( !$active_user && !$has_query ) {
|
|
|
|
|
+ return _static( "$path.z", $start, $streaming ) if -f "www/statics/$path.z" && $deflate;
|
|
|
|
|
+ return _static( $path, $start, $streaming ) if -f "www/statics/$path";
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
# Handle HTTP range/streaming requests
|
|
# Handle HTTP range/streaming requests
|
|
@@ -169,24 +170,28 @@ sub app {
|
|
|
my @ranges;
|
|
my @ranges;
|
|
|
if ($range) {
|
|
if ($range) {
|
|
|
$range =~ s/bytes=//g;
|
|
$range =~ s/bytes=//g;
|
|
|
- push(@ranges, map {
|
|
|
|
|
- [split(/-/, $_)];
|
|
|
|
|
- #$tuples[1] //= $tuples[0] + $CHUNK_SIZE;
|
|
|
|
|
- #\@tuples
|
|
|
|
|
- } split(/,/, $range) );
|
|
|
|
|
|
|
+ push(
|
|
|
|
|
+ @ranges,
|
|
|
|
|
+ map {
|
|
|
|
|
+ [ split( /-/, $_ ) ];
|
|
|
|
|
+
|
|
|
|
|
+ #$tuples[1] //= $tuples[0] + $CHUNK_SIZE;
|
|
|
|
|
+ #\@tuples
|
|
|
|
|
+ } split( /,/, $range )
|
|
|
|
|
+ );
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
- return _serve("www/$path", $start, $streaming, \@ranges, $last_fetch, $deflate) if -f "www/$path";
|
|
|
|
|
- return _serve("totp/$path", $start, $streaming, \@ranges, $last_fetch, $deflate) if -f "totp/$path" && $active_user;
|
|
|
|
|
|
|
+ return _serve( "www/$path", $start, $streaming, \@ranges, $last_fetch, $deflate ) if -f "www/$path";
|
|
|
|
|
+ return _serve( "totp/$path", $start, $streaming, \@ranges, $last_fetch, $deflate ) if -f "totp/$path" && $active_user;
|
|
|
|
|
|
|
|
#Handle regex/capture routes
|
|
#Handle regex/capture routes
|
|
|
- if (!exists $routes{$path}) {
|
|
|
|
|
|
|
+ if ( !exists $routes{$path} ) {
|
|
|
my @captures;
|
|
my @captures;
|
|
|
- foreach my $pattern (keys(%routes)) {
|
|
|
|
|
|
|
+ foreach my $pattern ( keys(%routes) ) {
|
|
|
@captures = $path =~ m/^$pattern$/;
|
|
@captures = $path =~ m/^$pattern$/;
|
|
|
if (@captures) {
|
|
if (@captures) {
|
|
|
$path = $pattern;
|
|
$path = $pattern;
|
|
|
- foreach my $field (@{$routes{$path}{captures}}) {
|
|
|
|
|
|
|
+ foreach my $field ( @{ $routes{$path}{captures} } ) {
|
|
|
$routes{$path}{data} //= {};
|
|
$routes{$path}{data} //= {};
|
|
|
$routes{$path}{data}{$field} = shift @captures;
|
|
$routes{$path}{data}{$field} = shift @captures;
|
|
|
}
|
|
}
|
|
@@ -199,10 +204,10 @@ sub app {
|
|
|
$query->{user} = $active_user;
|
|
$query->{user} = $active_user;
|
|
|
|
|
|
|
|
return _forbidden($query) if $routes{$path}{auth} && !$active_user;
|
|
return _forbidden($query) if $routes{$path}{auth} && !$active_user;
|
|
|
- return _notfound($query) unless exists $routes{$path};
|
|
|
|
|
- return _badrequest($query) unless grep { $env->{REQUEST_METHOD} eq $_ } ($routes{$path}{method} || '','HEAD');
|
|
|
|
|
|
|
+ return _notfound($query) unless exists $routes{$path};
|
|
|
|
|
+ return _badrequest($query) unless grep { $env->{REQUEST_METHOD} eq $_ } ( $routes{$path}{method} || '', 'HEAD' );
|
|
|
|
|
|
|
|
- @{$query}{keys(%{$routes{$path}{'data'}})} = values(%{$routes{$path}{'data'}}) if ref $routes{$path}{'data'} eq 'HASH' && %{$routes{$path}{'data'}};
|
|
|
|
|
|
|
+ @{$query}{ keys( %{ $routes{$path}{'data'} } ) } = values( %{ $routes{$path}{'data'} } ) if ref $routes{$path}{'data'} eq 'HASH' && %{ $routes{$path}{'data'} };
|
|
|
|
|
|
|
|
#Set various things we don't want overridden
|
|
#Set various things we don't want overridden
|
|
|
$query->{body} = '';
|
|
$query->{body} = '';
|
|
@@ -219,19 +224,20 @@ sub app {
|
|
|
{
|
|
{
|
|
|
no strict 'refs';
|
|
no strict 'refs';
|
|
|
my $output = $routes{$path}{callback}->($query);
|
|
my $output = $routes{$path}{callback}->($query);
|
|
|
|
|
+
|
|
|
# Append server-timing headers
|
|
# Append server-timing headers
|
|
|
my $tot = tv_interval($start) * 1000;
|
|
my $tot = tv_interval($start) * 1000;
|
|
|
- push(@{$output->[1]}, 'Server-Timing' => "app;dur=$tot");
|
|
|
|
|
|
|
+ push( @{ $output->[1] }, 'Server-Timing' => "app;dur=$tot" );
|
|
|
return $output;
|
|
return $output;
|
|
|
}
|
|
}
|
|
|
-};
|
|
|
|
|
|
|
+}
|
|
|
|
|
|
|
|
-sub _generic($type, $query) {
|
|
|
|
|
- return _static("$type.z",$query->{start}, $query->{streaming}) if -f "www/statics/$type.z";
|
|
|
|
|
- return _static($type, $query->{start}, $query->{streaming}) if -f "www/statics/$type";
|
|
|
|
|
|
|
+sub _generic ( $type, $query ) {
|
|
|
|
|
+ return _static( "$type.z", $query->{start}, $query->{streaming} ) if -f "www/statics/$type.z";
|
|
|
|
|
+ return _static( $type, $query->{start}, $query->{streaming} ) if -f "www/statics/$type";
|
|
|
my %lookup = (
|
|
my %lookup = (
|
|
|
- notfound => \&Trog::Routes::HTML::notfound,
|
|
|
|
|
- forbidden => \&Trog::Routes::HTML::forbidden,
|
|
|
|
|
|
|
+ notfound => \&Trog::Routes::HTML::notfound,
|
|
|
|
|
+ forbidden => \&Trog::Routes::HTML::forbidden,
|
|
|
badrequest => \&Trog::Routes::HTML::badrequest,
|
|
badrequest => \&Trog::Routes::HTML::badrequest,
|
|
|
toolong => \&Trog::Routes::HTML::toolong,
|
|
toolong => \&Trog::Routes::HTML::toolong,
|
|
|
);
|
|
);
|
|
@@ -239,38 +245,39 @@ sub _generic($type, $query) {
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
sub _notfound ($query) {
|
|
sub _notfound ($query) {
|
|
|
- return _generic('notfound', $query);
|
|
|
|
|
|
|
+ return _generic( 'notfound', $query );
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
-sub _forbidden($query) {
|
|
|
|
|
- return _generic('forbidden', $query);
|
|
|
|
|
|
|
+sub _forbidden ($query) {
|
|
|
|
|
+ return _generic( 'forbidden', $query );
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
-sub _badrequest($query) {
|
|
|
|
|
- return _generic('badrequest', $query);
|
|
|
|
|
|
|
+sub _badrequest ($query) {
|
|
|
|
|
+ return _generic( 'badrequest', $query );
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
sub _toolong() {
|
|
sub _toolong() {
|
|
|
- return _generic('toolong', {});
|
|
|
|
|
|
|
+ return _generic( 'toolong', {} );
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
-sub _static($path,$start,$streaming,$last_fetch=0) {
|
|
|
|
|
|
|
+sub _static ( $path, $start, $streaming, $last_fetch = 0 ) {
|
|
|
|
|
|
|
|
# XXX because of psgi I can't just vomit the file directly
|
|
# XXX because of psgi I can't just vomit the file directly
|
|
|
- if (open(my $fh, '<', "www/statics/$path")) {
|
|
|
|
|
|
|
+ if ( open( my $fh, '<', "www/statics/$path" ) ) {
|
|
|
my $headers = '';
|
|
my $headers = '';
|
|
|
|
|
+
|
|
|
# NOTE: this is relying on while advancing the file pointer
|
|
# NOTE: this is relying on while advancing the file pointer
|
|
|
while (<$fh>) {
|
|
while (<$fh>) {
|
|
|
last if $_ eq "\n";
|
|
last if $_ eq "\n";
|
|
|
$headers .= $_;
|
|
$headers .= $_;
|
|
|
}
|
|
}
|
|
|
- my(undef, undef, $status, undef, $headers_parsed) = HTTP::Parser::XS::parse_http_response("$headers\n", HEADERS_AS_HASHREF);
|
|
|
|
|
|
|
+ my ( undef, undef, $status, undef, $headers_parsed ) = HTTP::Parser::XS::parse_http_response( "$headers\n", HEADERS_AS_HASHREF );
|
|
|
|
|
|
|
|
#XXX need to put this into the file itself
|
|
#XXX need to put this into the file itself
|
|
|
- my $mt = (stat($fh))[9];
|
|
|
|
|
- my @gm = gmtime($mt);
|
|
|
|
|
|
|
+ my $mt = ( stat($fh) )[9];
|
|
|
|
|
+ my @gm = gmtime($mt);
|
|
|
my $now_string = strftime( "%a, %d %b %Y %H:%M:%S GMT", @gm );
|
|
my $now_string = strftime( "%a, %d %b %Y %H:%M:%S GMT", @gm );
|
|
|
- my $code = $mt > $last_fetch ? $status : 304;
|
|
|
|
|
|
|
+ my $code = $mt > $last_fetch ? $status : 304;
|
|
|
$headers_parsed->{"Last-Modified"} = $now_string;
|
|
$headers_parsed->{"Last-Modified"} = $now_string;
|
|
|
|
|
|
|
|
# Append server-timing headers
|
|
# Append server-timing headers
|
|
@@ -284,44 +291,48 @@ sub _static($path,$start,$streaming,$last_fetch=0) {
|
|
|
|
|
|
|
|
return sub {
|
|
return sub {
|
|
|
my $responder = shift;
|
|
my $responder = shift;
|
|
|
|
|
+
|
|
|
#push(@headers, 'Content-Length' => $sz);
|
|
#push(@headers, 'Content-Length' => $sz);
|
|
|
- my $writer = $responder->([ $code, [%$headers_parsed]]);
|
|
|
|
|
- while ( $fh->read( my $buf, $CHUNK_SIZE) ) {
|
|
|
|
|
|
|
+ my $writer = $responder->( [ $code, [%$headers_parsed] ] );
|
|
|
|
|
+ while ( $fh->read( my $buf, $CHUNK_SIZE ) ) {
|
|
|
$writer->write($buf);
|
|
$writer->write($buf);
|
|
|
}
|
|
}
|
|
|
close $fh;
|
|
close $fh;
|
|
|
$writer->close;
|
|
$writer->close;
|
|
|
- } if $streaming;
|
|
|
|
|
|
|
+ }
|
|
|
|
|
+ if $streaming;
|
|
|
|
|
|
|
|
- return [$code, [%$headers_parsed], $fh];
|
|
|
|
|
|
|
+ return [ $code, [%$headers_parsed], $fh ];
|
|
|
}
|
|
}
|
|
|
- return [ 403, ['Content-Type' => $Trog::Vars::content_types{plain}], ["STAY OUT YOU RED MENACE"]];
|
|
|
|
|
|
|
+ return [ 403, [ 'Content-Type' => $Trog::Vars::content_types{plain} ], ["STAY OUT YOU RED MENACE"] ];
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
-sub _range ($fh, $ranges, $sz, %headers) {
|
|
|
|
|
|
|
+sub _range ( $fh, $ranges, $sz, %headers ) {
|
|
|
|
|
+
|
|
|
# Set mode
|
|
# Set mode
|
|
|
- my $primary_ct = "Content-Type: $headers{'Content-type'}";
|
|
|
|
|
|
|
+ my $primary_ct = "Content-Type: $headers{'Content-type'}";
|
|
|
my $is_multipart = scalar(@$ranges) > 1;
|
|
my $is_multipart = scalar(@$ranges) > 1;
|
|
|
- if ( $is_multipart ) {
|
|
|
|
|
|
|
+ if ($is_multipart) {
|
|
|
$headers{'Content-type'} = "multipart/byteranges; boundary=$CHUNK_SEP";
|
|
$headers{'Content-type'} = "multipart/byteranges; boundary=$CHUNK_SEP";
|
|
|
}
|
|
}
|
|
|
my $code = 206;
|
|
my $code = 206;
|
|
|
|
|
|
|
|
my $fc = '';
|
|
my $fc = '';
|
|
|
|
|
+
|
|
|
# Calculate the content-length up-front. We have to fix unspecified lengths first, and reject bad requests.
|
|
# Calculate the content-length up-front. We have to fix unspecified lengths first, and reject bad requests.
|
|
|
foreach my $range (@$ranges) {
|
|
foreach my $range (@$ranges) {
|
|
|
- $range->[1] //= $sz-1;
|
|
|
|
|
- return [416, [%headers], ["Requested range not satisfiable"]] if $range->[0] > $sz || $range->[0] < 0 || $range->[1] < 0 || $range->[0] > $range->[1];
|
|
|
|
|
|
|
+ $range->[1] //= $sz - 1;
|
|
|
|
|
+ return [ 416, [%headers], ["Requested range not satisfiable"] ] if $range->[0] > $sz || $range->[0] < 0 || $range->[1] < 0 || $range->[0] > $range->[1];
|
|
|
}
|
|
}
|
|
|
- $headers{'Content-Length'} = List::Util::sum(map { my $arr=$_; $arr->[1]+1, -$arr->[0] } @$ranges);
|
|
|
|
|
|
|
+ $headers{'Content-Length'} = List::Util::sum( map { my $arr = $_; $arr->[1] + 1, -$arr->[0] } @$ranges );
|
|
|
|
|
|
|
|
#XXX Add the entity header lengths to the value - should hash-ify this to DRY
|
|
#XXX Add the entity header lengths to the value - should hash-ify this to DRY
|
|
|
if ($is_multipart) {
|
|
if ($is_multipart) {
|
|
|
foreach my $range (@$ranges) {
|
|
foreach my $range (@$ranges) {
|
|
|
- $headers{'Content-Length'} += length("$fc--$CHUNK_SEP\n$primary_ct\nContent-Range: bytes $range->[0]-$range->[1]/$sz\n\n" );
|
|
|
|
|
|
|
+ $headers{'Content-Length'} += length("$fc--$CHUNK_SEP\n$primary_ct\nContent-Range: bytes $range->[0]-$range->[1]/$sz\n\n");
|
|
|
$fc = "\n";
|
|
$fc = "\n";
|
|
|
}
|
|
}
|
|
|
- $headers{'Content-Length'} += length( "\n--$CHUNK_SEP\--\n" );
|
|
|
|
|
|
|
+ $headers{'Content-Length'} += length("\n--$CHUNK_SEP\--\n");
|
|
|
$fc = '';
|
|
$fc = '';
|
|
|
}
|
|
}
|
|
|
|
|
|
|
@@ -331,76 +342,79 @@ sub _range ($fh, $ranges, $sz, %headers) {
|
|
|
|
|
|
|
|
foreach my $range (@$ranges) {
|
|
foreach my $range (@$ranges) {
|
|
|
$headers{'Content-Range'} = "bytes $range->[0]-$range->[1]/$sz" unless $is_multipart;
|
|
$headers{'Content-Range'} = "bytes $range->[0]-$range->[1]/$sz" unless $is_multipart;
|
|
|
- $writer //= $responder->([ $code, [%headers]]);
|
|
|
|
|
- $writer->write( "$fc--$CHUNK_SEP\n$primary_ct\nContent-Range: bytes $range->[0]-$range->[1]/$sz\n\n" ) if $is_multipart;
|
|
|
|
|
|
|
+ $writer //= $responder->( [ $code, [%headers] ] );
|
|
|
|
|
+ $writer->write("$fc--$CHUNK_SEP\n$primary_ct\nContent-Range: bytes $range->[0]-$range->[1]/$sz\n\n") if $is_multipart;
|
|
|
$fc = "\n";
|
|
$fc = "\n";
|
|
|
|
|
|
|
|
- my $len = List::Util::min($sz,$range->[1]+1) - $range->[0];
|
|
|
|
|
|
|
+ my $len = List::Util::min( $sz, $range->[1] + 1 ) - $range->[0];
|
|
|
|
|
|
|
|
- $fh->seek( $range->[0], 0);
|
|
|
|
|
|
|
+ $fh->seek( $range->[0], 0 );
|
|
|
while ($len) {
|
|
while ($len) {
|
|
|
- $fh->read(my $buf, List::Util::min($len,$CHUNK_SIZE) );
|
|
|
|
|
|
|
+ $fh->read( my $buf, List::Util::min( $len, $CHUNK_SIZE ) );
|
|
|
$writer->write($buf);
|
|
$writer->write($buf);
|
|
|
|
|
|
|
|
# Adjust for amount written
|
|
# Adjust for amount written
|
|
|
- $len = List::Util::max($len - $CHUNK_SIZE, 0);
|
|
|
|
|
|
|
+ $len = List::Util::max( $len - $CHUNK_SIZE, 0 );
|
|
|
}
|
|
}
|
|
|
}
|
|
}
|
|
|
$fh->close();
|
|
$fh->close();
|
|
|
- $writer->write( "\n--$CHUNK_SEP\--\n" ) if $is_multipart;
|
|
|
|
|
|
|
+ $writer->write("\n--$CHUNK_SEP\--\n") if $is_multipart;
|
|
|
$writer->close;
|
|
$writer->close;
|
|
|
};
|
|
};
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
-sub _serve ($path, $start, $streaming, $ranges, $last_fetch=0, $deflate=0) {
|
|
|
|
|
- my $mf = Mojo::File->new($path);
|
|
|
|
|
- my $ext = '.'.$mf->extname();
|
|
|
|
|
|
|
+sub _serve ( $path, $start, $streaming, $ranges, $last_fetch = 0, $deflate = 0 ) {
|
|
|
|
|
+ my $mf = Mojo::File->new($path);
|
|
|
|
|
+ my $ext = '.' . $mf->extname();
|
|
|
my $ft;
|
|
my $ft;
|
|
|
if ($ext) {
|
|
if ($ext) {
|
|
|
$ft = Plack::MIME->mime_type($ext) if $ext;
|
|
$ft = Plack::MIME->mime_type($ext) if $ext;
|
|
|
- $ft ||= $extra_types{$ext} if exists $extra_types{$ext};
|
|
|
|
|
|
|
+ $ft ||= $extra_types{$ext} if exists $extra_types{$ext};
|
|
|
}
|
|
}
|
|
|
$ft ||= $Trog::Vars::content_types{plain};
|
|
$ft ||= $Trog::Vars::content_types{plain};
|
|
|
|
|
|
|
|
- my $ct = 'Content-type';
|
|
|
|
|
- my @headers = ($ct => $ft);
|
|
|
|
|
|
|
+ my $ct = 'Content-type';
|
|
|
|
|
+ my @headers = ( $ct => $ft );
|
|
|
|
|
+
|
|
|
#TODO use static Cache-Control for everything but JS/CSS?
|
|
#TODO use static Cache-Control for everything but JS/CSS?
|
|
|
- push(@headers,'Cache-control' => $Trog::Vars::cache_control{revalidate});
|
|
|
|
|
|
|
+ push( @headers, 'Cache-control' => $Trog::Vars::cache_control{revalidate} );
|
|
|
|
|
|
|
|
- push(@headers,'Accept-Ranges' => 'bytes');
|
|
|
|
|
|
|
+ push( @headers, 'Accept-Ranges' => 'bytes' );
|
|
|
|
|
|
|
|
- my $mt = (stat($path))[9];
|
|
|
|
|
- my $sz = (stat(_))[7];
|
|
|
|
|
- my @gm = gmtime($mt);
|
|
|
|
|
|
|
+ my $mt = ( stat($path) )[9];
|
|
|
|
|
+ my $sz = ( stat(_) )[7];
|
|
|
|
|
+ my @gm = gmtime($mt);
|
|
|
my $now_string = strftime( "%a, %d %b %Y %H:%M:%S GMT", @gm );
|
|
my $now_string = strftime( "%a, %d %b %Y %H:%M:%S GMT", @gm );
|
|
|
- my $code = $mt > $last_fetch ? 200 : 304;
|
|
|
|
|
|
|
+ my $code = $mt > $last_fetch ? 200 : 304;
|
|
|
|
|
|
|
|
- push(@headers, "Last-Modified" => $now_string);
|
|
|
|
|
- push(@headers, 'Vary' => 'Accept-Encoding');
|
|
|
|
|
|
|
+ push( @headers, "Last-Modified" => $now_string );
|
|
|
|
|
+ push( @headers, 'Vary' => 'Accept-Encoding' );
|
|
|
|
|
|
|
|
- if (open(my $fh, '<', $path)) {
|
|
|
|
|
- return _range($fh, $ranges, $sz, @headers) if @$ranges && $streaming;
|
|
|
|
|
|
|
+ if ( open( my $fh, '<', $path ) ) {
|
|
|
|
|
+ return _range( $fh, $ranges, $sz, @headers ) if @$ranges && $streaming;
|
|
|
|
|
|
|
|
# Transfer-encoding: chunked
|
|
# Transfer-encoding: chunked
|
|
|
return sub {
|
|
return sub {
|
|
|
my $responder = shift;
|
|
my $responder = shift;
|
|
|
- push(@headers, 'Content-Length' => $sz);
|
|
|
|
|
- my $writer = $responder->([ $code, \@headers]);
|
|
|
|
|
- while ( $fh->read( my $buf, $CHUNK_SIZE) ) {
|
|
|
|
|
|
|
+ push( @headers, 'Content-Length' => $sz );
|
|
|
|
|
+ my $writer = $responder->( [ $code, \@headers ] );
|
|
|
|
|
+ while ( $fh->read( my $buf, $CHUNK_SIZE ) ) {
|
|
|
$writer->write($buf);
|
|
$writer->write($buf);
|
|
|
}
|
|
}
|
|
|
close $fh;
|
|
close $fh;
|
|
|
$writer->close;
|
|
$writer->close;
|
|
|
- } if $streaming && $sz > $CHUNK_SIZE;
|
|
|
|
|
|
|
+ }
|
|
|
|
|
+ if $streaming && $sz > $CHUNK_SIZE;
|
|
|
|
|
|
|
|
#Return data in the event the caller does not support deflate
|
|
#Return data in the event the caller does not support deflate
|
|
|
- if (!$deflate) {
|
|
|
|
|
|
|
+ if ( !$deflate ) {
|
|
|
push( @headers, "Content-Length" => $sz );
|
|
push( @headers, "Content-Length" => $sz );
|
|
|
|
|
+
|
|
|
# Append server-timing headers
|
|
# Append server-timing headers
|
|
|
my $tot = tv_interval($start) * 1000;
|
|
my $tot = tv_interval($start) * 1000;
|
|
|
- push(@headers, 'Server-Timing' => "file;dur=$tot");
|
|
|
|
|
|
|
+ push( @headers, 'Server-Timing' => "file;dur=$tot" );
|
|
|
|
|
|
|
|
- return [ $code, \@headers, $fh];
|
|
|
|
|
|
|
+ return [ $code, \@headers, $fh ];
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
#Compress everything less than 1MB
|
|
#Compress everything less than 1MB
|
|
@@ -412,12 +426,12 @@ sub _serve ($path, $start, $streaming, $ranges, $last_fetch=0, $deflate=0) {
|
|
|
|
|
|
|
|
# Append server-timing headers
|
|
# Append server-timing headers
|
|
|
my $tot = tv_interval($start) * 1000;
|
|
my $tot = tv_interval($start) * 1000;
|
|
|
- push(@headers, 'Server-Timing' => "file;dur=$tot");
|
|
|
|
|
|
|
+ push( @headers, 'Server-Timing' => "file;dur=$tot" );
|
|
|
|
|
|
|
|
- return [ $code, \@headers, [$dfh]];
|
|
|
|
|
|
|
+ return [ $code, \@headers, [$dfh] ];
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
- return [ 403, [$ct => $Trog::Vars::content_types{plain}], ["STAY OUT YOU RED MENACE"]];
|
|
|
|
|
|
|
+ return [ 403, [ $ct => $Trog::Vars::content_types{plain} ], ["STAY OUT YOU RED MENACE"] ];
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
1;
|
|
1;
|