On 2004-09-07 08:40:33 -0600, Reidy, Ron wrote:
> I've never seen before insert trigger being slower than doing the same
> thing in the client code.  I guess it would depend on what you are
> doing in the trigger and how many sequences are cached (but the amount
> cached would also have the same effect with client code).

Here is a simple example:

--snip----snip----snip----snip----snip----snip----snip----snip--
#!/usr/bin/perl 
use warnings;
use strict;

use DBI;
use Time::HiRes qw(time);


# tunables:
# number of rows to insert
my $n = 10_000;

# read credits from file
sub _read_cred {
    my ($fn) = @_;

    open(FN, "<$fn") or die "cannot open $fn: $!";
    my $line = <FN>; 
    close(FN);
    my @cred = split(/[\s\n]/, $line); 
    return @cred;
}


my $dbh;

sub dbiconnect {
    my $cred_file = $ENV{DBI_CREDENTIAL_FILE};
    if (! defined($cred_file)) {
        $cred_file = "$ENV{HOME}/.dbi/default";
    } elsif ($cred_file !~ m{/}) { 
        $cred_file = "$ENV{HOME}/.dbi/$cred_file";
    }

    $dbh = DBI->connect(_read_cred($cred_file), {RaiseError => 0, AutoCommit => 0}); 
    return $dbh;
}

$dbh = dbiconnect();

$dbh->do("drop table insertbench_table");
$dbh->do("create table insertbench_table(id number, data number)");
$dbh->do("drop sequence insertbench_id_seq");
$dbh->do("create sequence insertbench_id_seq");
my $sth = $dbh->prepare("insert into insertbench_table(id, data)
                                                values(insertbench_id_seq.nextval, 
?)");
my $t0 = time();
for my $i (0 .. $n) {
    $sth->execute($i);
}
$dbh->commit();

my $t1 = time();

printf "insert(insertbench_id_seq.nextval): %g seconds, %g inserts/second\n",
        $t1 - $t0, $n / ($t1 - $t0);

$dbh->do("drop table insertbench_table");
$dbh->do("create table insertbench_table(id number, data number)");
$dbh->do("drop sequence insertbench_id_seq");
$dbh->do("create sequence insertbench_id_seq");
$dbh->do("create or replace trigger trig_ins_insertbench_table 
          before insert on insertbench_table
          for each row
          begin
            if :new.id is null
            then
                select insertbench_id_seq.nextval into :new.id from dual;
            end if;
          end trig_ins_insertbench_table;
         ");
$sth = $dbh->prepare("insert into insertbench_table(id, data)
                                                values(null, ?)");
$t0 = time();
for my $i (0 .. $n) {
    $sth->execute($i);
}
$dbh->commit();

$t1 = time();

printf "insert(trigger): %g seconds, %g inserts/second\n",
        $t1 - $t0, $n / ($t1 - $t0);
--snip----snip----snip----snip----snip----snip----snip----snip--

On my system, it prints:

insert(insertbench_id_seq.nextval): 5.32295 seconds, 1878.66 inserts/second
insert(trigger): 9.54803 seconds, 1047.34 inserts/second

so the trigger is only 80% slower than accessing the sequence directly
in the insert statement. It may be possible to write a more efficient
trigger though (maybe doing the insert directly in an "instead of"
trigger).

        hp

-- 
   _  | Peter J. Holzer      | Shooting the users in the foot is bad. 
|_|_) | Sysadmin WSR / LUGA  | Giving them a gun isn't.
| |   | [EMAIL PROTECTED]        |      -- Gordon Schumacher,
__/   | http://www.hjp.at/   |     mozilla bug #84128

Attachment: pgpGUqRFne6nc.pgp
Description: PGP signature

Reply via email to