George S. Baugh před 4 roky
rodič
revize
97edb9e0b5
2 změnil soubory, kde provedl 95 přidání a 8 odebrání
  1. 42 8
      lib/Trog/SQLite.pm
  2. 53 0
      t/Trog-SQLite.t

+ 42 - 8
lib/Trog/SQLite.pm

@@ -10,14 +10,37 @@ use POSIX qw{floor};
 
 use DBI;
 use DBD::SQLite;
-use File::Slurper qw{read_text};
+use File::Slurper();
+use List::Util qw{any};
+
+=head1 Name
+
+Bogo::SQLite - Abstracts the boilerpain away!
+
+=head1 SYNOPSIS
+
+    my $dbh = Bogo::SQLite::dbh("my_schema.sql", "my_sqlite3.db");
+    ...
+
+=head1 FUNCTIONS
+
+Everything in this module throws when something goes wrong.
+
+=head2 dbh
+
+Get you a database handle with fkeys turned on, and schema consistency enforced.
+Caches the handle past the first call.
+
+Be careful when first calling, the standard fork-safety concerns with sqlite apply
+
+=cut
 
 my $dbh = {};
 # Ensure the db schema is OK, and give us a handle
 sub dbh {
     my ($schema,$dbname) = @_;
     return $dbh->{$schema} if $dbh->{$schema};
-    my $qq = read_text($schema);
+    my $qq = File::Slurper::read_text($schema);
     my $db = DBI->connect("dbi:SQLite:dbname=$dbname","","");
     $db->{sqlite_allow_multiple_statements} = 1;
     $db->do($qq) or die "Could not ensure database consistency";
@@ -25,25 +48,36 @@ sub dbh {
     $dbh->{$schema} = $db;
 
     # Turn on fkeys
-    $db->do("PRAGMA foreign_keys = ON");
+    $db->do("PRAGMA foreign_keys = ON") or die "Could not enable foreign keys";
     return $db;
 }
 
-=head2 bulk_insert(DBI $dbh, STRING $table, ARRAYREF $keys, MIXED @values)
+=head2 bulk_insert(DBI $dbh, STRING $table, ARRAYREF $keys, STRING $action='IGNORE', MIXED @values)
+
+Insert the values into specified table corresponding to the provided keys.
+Values must be repeating tuples corresponding to the values. Example:
 
-Upsert the values into specified table with provided keys.
-values will be N-tuples based on the number and ordering of the keys.
+    my $keys   = [qw{A B C}];
+    my @values = qw{1 2 3 4 5 6 7 8 9};
 
 Essentially works around the 999 named param limit and executes by re-using prepared statements.
 This results in a quick insert/update of lots of data, such as when building an index or importing data.
 
-Dies on failure.
+For the vast majority of in-practice usage, this will be swatting flies with an elephant gun.
+That said, it should always do the job correctly and quickly, even for trivial datasets.
 
-Doesn't escape the table name or keys, so don't be a maroon and let users pass data to this
+If you don't put fkeys in place (or simply turn them off),
+you can use REPLACE as your action to do upserts without causing destructive consequences.
+It's less code than writing an ON CONFLICT UPDATE clause, and faster.
+
+Batch your values to whatever is appropriate given your available heap.
 
 =cut
 
+
 sub bulk_insert ($dbh, $table, $keys, $ACTION='IGNORE', @values) {
+    die "unsupported insert action $ACTION" unless any { $ACTION eq $_ } qw{ROLLBACK ABORT FAIL IGNORE REPLACE};
+
     die "keys must be nonempty ARRAYREF" unless ref $keys eq 'ARRAY' && @$keys;
     die "#Values must be a multiple of #keys" if @values % @$keys;
 

+ 53 - 0
t/Trog-SQLite.t

@@ -0,0 +1,53 @@
+use strict;
+use warnings;
+
+use Test::More;
+use Test::MockModule qw{strict};
+use Test::Deep;
+use Test::Fatal qw{exception};
+use FindBin;
+
+use lib "$FindBin::Bin/../lib";
+
+require_ok('Trog::SQLite') or BAIL_OUT("Can't find SUT");
+
+subtest 'dbh' => sub {
+    my $readmock = Test::MockModule->new('File::Slurper');
+    $readmock->redefine('read_text', sub { "SELECT me FROM candidates" });
+    my $dbimock = Test::MockModule->new("DBI");
+    $dbimock->redefine('connect', sub { bless({},'TrogDBD') });
+    my $works = 0;
+    no warnings qw{redefine once};
+    local *TrogDBD::do = sub { $works };
+
+    like(exception { Trog::SQLite::dbh('bogus','bogus') }, qr/ensure/i, "Failure to enforce schema throws");
+    $works = 1;
+
+    # Otherwise it works
+    isa_ok(Trog::SQLite::dbh('bogus','bogus'),'TrogDBD');
+};
+
+subtest bulk_insert => sub {
+    like(exception { Trog::SQLite::bulk_insert({},'bogus', [qw{a b c}], 'PROCASTINATE') }, qr/unsupported/i, "insert OR keyword consistency enforced");
+    like(exception { Trog::SQLite::bulk_insert({},'bogus', []) }, qr/nonempty/, "keys must be provided");
+    like(exception { Trog::SQLite::bulk_insert({},'bogus',[qw{a b c}],'IGNORE',qw{jello}) }, qr/multiple of/i, "sufficient values must be provided");
+
+    my $smt;
+    my $dbh = bless({},'TrogDBH');
+    no warnings qw{redefine once};
+    local *TrogDBH::prepare = sub { $smt .= $_[1]; return bless({},'TrogSMT') };
+    local *TrogSMT::execute = sub {};
+
+    is(exception { Trog::SQLite::bulk_insert($dbh,'bogus', [qw{moo cows}], 'IGNORE', qw{a b c d}) }, undef, "can do bulk insert");
+    is($smt, "INSERT OR IGNORE INTO bogus (moo,cows) VALUES (?,?),(?,?)", "Expected query prepared");
+
+    # Million insert
+    $smt='';
+    my $keys = [("a") x 10];
+    my @values = ("b") x (10**6);
+    Trog::SQLite::bulk_insert($dbh,'bogus', $keys, 'IGNORE', @values);
+    my $expected = "INSERT OR IGNORE INTO bogus (a,a,a,a,a,a,a,a,a,a) VALUES (?,?,?,?,?,?,?,?,?,?),(?,?,?,?,?,?,?,?,?,?),(?,?,?,?,?,?,?,?,?,?),(?,?,?,?,?,?,?,?,?,?),(?,?,?,?,?,?,?,?,?,?),(?,?,?,?,?,?,?,?,?,?),(?,?,?,?,?,?,?,?,?,?),(?,?,?,?,?,?,?,?,?,?),(?,?,?,?,?,?,?,?,?,?)INSERT OR IGNORE INTO bogus (a,a,a,a,a,a,a,a,a,a) VALUES (?,?,?,?,?,?,?,?,?,?)";
+    is($smt,$expected, "As expected, only two statements are necessary to be prepared, no matter how many rows to insert.");
+};
+
+done_testing;