|
@@ -17,7 +17,6 @@ use parent qw{Trog::DataModule};
|
|
|
our $datastore = 'data/files';
|
|
our $datastore = 'data/files';
|
|
|
sub lang { 'Perl Regex in Quotemeta' }
|
|
sub lang { 'Perl Regex in Quotemeta' }
|
|
|
sub help { 'https://perldoc.perl.org/functions/quotemeta.html' }
|
|
sub help { 'https://perldoc.perl.org/functions/quotemeta.html' }
|
|
|
-our @index;
|
|
|
|
|
|
|
|
|
|
=head1 Trog::Data::FlatFile
|
|
=head1 Trog::Data::FlatFile
|
|
|
|
|
|
|
@@ -29,41 +28,74 @@ You can only post once per second due to it storing each post as a file named af
|
|
|
our $parser = JSON::MaybeXS->new();
|
|
our $parser = JSON::MaybeXS->new();
|
|
|
|
|
|
|
|
sub read ($self, $query={}) {
|
|
sub read ($self, $query={}) {
|
|
|
- @index = $self->_index() unless @index;
|
|
|
|
|
|
|
+ #Optimize direct ID
|
|
|
|
|
+ my @index;
|
|
|
|
|
+ if ($query->{id}) {
|
|
|
|
|
+ @index = ("$datastore/$query->{id}");
|
|
|
|
|
+ } else {
|
|
|
|
|
+ @index = $self->_index();
|
|
|
|
|
+ }
|
|
|
|
|
+ $query->{limit} //= 25;
|
|
|
|
|
+
|
|
|
my @items;
|
|
my @items;
|
|
|
foreach my $item (@index) {
|
|
foreach my $item (@index) {
|
|
|
|
|
+ next unless -f $item;
|
|
|
my $slurped = File::Slurper::read_text($item);
|
|
my $slurped = File::Slurper::read_text($item);
|
|
|
my $parsed = $parser->decode($slurped);
|
|
my $parsed = $parser->decode($slurped);
|
|
|
- push(@items,$parsed) if $self->filter($query,$parsed);
|
|
|
|
|
|
|
+
|
|
|
|
|
+ #XXX this imposes an inefficiency in itself, get() will filter uselessly again here
|
|
|
|
|
+ my @filtered = $self->filter($query,@$parsed);
|
|
|
|
|
+
|
|
|
|
|
+ push(@items,@filtered) if @filtered;
|
|
|
last if scalar(@items) == $query->{limit};
|
|
last if scalar(@items) == $query->{limit};
|
|
|
}
|
|
}
|
|
|
|
|
+
|
|
|
return \@items;
|
|
return \@items;
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
sub _index ($self) {
|
|
sub _index ($self) {
|
|
|
- return @index if @index;
|
|
|
|
|
confess "Can't find datastore!" unless -d $datastore;
|
|
confess "Can't find datastore!" unless -d $datastore;
|
|
|
opendir(my $dh, $datastore) or confess;
|
|
opendir(my $dh, $datastore) or confess;
|
|
|
- @index = grep { -f } map { "$datastore/$_" } readdir $dh;
|
|
|
|
|
|
|
+ my @index = grep { -f } map { "$datastore/$_" } readdir $dh;
|
|
|
closedir $dh;
|
|
closedir $dh;
|
|
|
return sort { $b cmp $a } @index;
|
|
return sort { $b cmp $a } @index;
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
sub write($self,$data) {
|
|
sub write($self,$data) {
|
|
|
- my $file = "$datastore/$data->{created}";
|
|
|
|
|
- open(my $fh, '>', $file) or confess;
|
|
|
|
|
- print $fh $parser->encode($data);
|
|
|
|
|
- close $fh;
|
|
|
|
|
|
|
+ foreach my $post (@$data) {
|
|
|
|
|
+ my $file = "$datastore/$post->{id}";
|
|
|
|
|
+ my $update = [$post];
|
|
|
|
|
+ if (-f $file) {
|
|
|
|
|
+ my $slurped = File::Slurper::read_text($file);
|
|
|
|
|
+ my $parsed = $parser->decode($slurped);
|
|
|
|
|
+
|
|
|
|
|
+ $update = [(@$parsed, $post)];
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ open(my $fh, '>', $file) or confess;
|
|
|
|
|
+ print $fh $parser->encode($update);
|
|
|
|
|
+ close $fh;
|
|
|
|
|
+ }
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
sub count ($self) {
|
|
sub count ($self) {
|
|
|
- @index = $self->_index() unless @index;
|
|
|
|
|
|
|
+ my @index = $self->_index();
|
|
|
return scalar(@index);
|
|
return scalar(@index);
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|
|
+sub add ($self,@posts) {
|
|
|
|
|
+ my $ctime = time();
|
|
|
|
|
+ @posts = map {
|
|
|
|
|
+ $_->{id} //= $ctime;
|
|
|
|
|
+ $_->{created} = $ctime;
|
|
|
|
|
+ $_
|
|
|
|
|
+ } @posts;
|
|
|
|
|
+ return $self->SUPER::add(@posts);
|
|
|
|
|
+}
|
|
|
|
|
+
|
|
|
sub delete($self, @posts) {
|
|
sub delete($self, @posts) {
|
|
|
foreach my $update (@posts) {
|
|
foreach my $update (@posts) {
|
|
|
- unlink "$datastore/$update->{created}" or confess;
|
|
|
|
|
|
|
+ unlink "$datastore/$update->{id}" or confess;
|
|
|
}
|
|
}
|
|
|
return 0;
|
|
return 0;
|
|
|
}
|
|
}
|