Subject: | [PATCH] Optimize fetchall_arrayref with hash slice |
Date: | Thu, 12 Apr 2012 23:16:22 +0100 |
To: | bug-DBI [...] rt.cpan.org |
From: | Dagfinn Ilmari Mannsåker <ilmari [...] ilmari.org> |
Bind the sliced columns to an existing hash and copy that into the
return array, rather than calling fetchrow_hashref and building a new
hash each time.
---
DBI.pm | 25 ++++++++-----------------
1 files changed, 8 insertions(+), 17 deletions(-)
diff --git a/DBI.pm b/DBI.pm
index 035087a..9b5fc0b 100644
--- a/DBI.pm
+++ b/DBI.pm
@@ -2031,29 +2031,20 @@ sub _new_sth { # called by DBD::<drivername>::db::prepare)
}
elsif ($mode eq 'HASH') {
$max_rows = -1 unless defined $max_rows;
- # XXX both these could be made faster (and unified) by pre-binding
- # a local hash using bind_columns and then copying it per row, so
- # we'd be able to replace the expensive fetchrow_hashref with
- # fetchrow_arrayref. So the main loop would end up being like:
- # push @rows, { %bound_hash }
- # while ($max_rows-- and $sth->fetchrow_arrayref);
- # XXX Also, it would be very helpful for DBIx::Class and others
+ # XXX It would be very helpful for DBIx::Class and others
# if a slice could 'rename' columns. Some kind of 'renaming slice'
# could be incorporated here.
+ my %row;
if (keys %$slice) {
- my @o_keys = keys %$slice;
- my @i_keys = map { lc } keys %$slice;
- while ($max_rows-- and $row = $sth->fetchrow_hashref('NAME_lc')) {
- my %hash;
- @hash{@o_keys} = @{$row}{@i_keys};
- push @rows, \%hash;
- }
+ my %map = map { lc($_) => $_ } keys %$slice;
+ $sth->bind_columns( map { exists $map{$_} ? \$row{$map{$_}} : \do { my $dummy } } @{$sth->FETCH('NAME_lc')} );
}
else {
- # XXX assumes new ref each fetchhash
- push @rows, $row
- while ($max_rows-- and $row = $sth->fetchrow_hashref());
+ $sth->bind_columns( \( @row{ @{$sth->FETCH($sth->FETCH('FetchHashKeyName')) } } ) );
}
+ push @rows, { %row }
+ while ($max_rows-- and $sth->fetch);
+
}
else { Carp::croak("fetchall_arrayref($mode) invalid") }
return \@rows;
--
1.7.5.4