Skip to content

Commit

Permalink
remove artificial "Long data" length limit of 1_000_000 bytes
Browse files Browse the repository at this point in the history
DBD::Firebird already honours the LongDataLen setting, there is no need
to put a hardcoded limit
  • Loading branch information
real-dam committed Dec 5, 2017
1 parent 79adfb2 commit 4310121
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 33 deletions.
19 changes: 0 additions & 19 deletions dbdimp.c
Original file line number Diff line number Diff line change
Expand Up @@ -1759,19 +1759,6 @@ AV *dbd_st_fetch(SV *sth, imp_sth_t *imp_sth)
break;
}

if (total_length >= MAX_SAFE_BLOB_LENGTH)
{
do_error(sth, 1, "Blob exceeds maximum length.");

sv_setpvn(sv, "** Blob exceeds maximum safe length **", 38);

/* I deliberately don't set FAILURE based on this. */
isc_close_blob(status, &blob_handle);
if (ib_error_check(sth, status))
return FALSE;
break;
}

/* Create a zero-length string. */
sv_setpv(sv, "");

Expand Down Expand Up @@ -1799,12 +1786,6 @@ AV *dbd_st_fetch(SV *sth, imp_sth_t *imp_sth)
/*
* As long as the fetch was successful, concatenate the segment we fetched
* into the growing Perl scalar.
*/
/*
* This is dangerous if the Blob is enormous. But Perl is supposed
* to be able to grow scalars indefinitely as far as resources allow,
* so what the heck. Besides, I limited the max length of a Blob earlier
* to MAX_SAFE_BLOB_LENGTH.
*/

sv_catpvn(sv, blob_segment_buffer, seg_length);
Expand Down
2 changes: 0 additions & 2 deletions dbdimp.h
Original file line number Diff line number Diff line change
Expand Up @@ -193,8 +193,6 @@ do { \
* If you want to fetch Blobs that are bigger, write your own Perl
*/

#define MAX_SAFE_BLOB_LENGTH (1000000)

#define MAX_EVENTS 15

typedef enum { ACTIVE, INACTIVE } IB_EVENT_STATE;
Expand Down
34 changes: 22 additions & 12 deletions t/42-blobs.t
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ use strict;
use warnings;

use Test::More;
use Test::Exception;
use DBI qw(:sql_types);

use lib 't','.';
Expand All @@ -29,9 +30,6 @@ if ($error_str) {
unless ( $dbh->isa('DBI::db') ) {
plan skip_all => 'Connection to database failed, cannot continue testing';
}
else {
plan tests => 262;
}

ok($dbh, 'Connected to the database');

Expand All @@ -44,18 +42,19 @@ my $table = find_new_table($dbh);
#diag $table;
ok($table);

my $def = qq{
CREATE TABLE $table (
id INTEGER NOT NULL PRIMARY KEY,
name BLOB
)
};

# Repeat test?
foreach my $size ( 1 .. 5 ) {

#
# Create a new table
#
my $def = qq{
CREATE TABLE $table (
id INTEGER NOT NULL PRIMARY KEY,
name BLOB
)
};
ok( $dbh->do($def), qq{CREATE TABLE '$table'} );

$dbh->{AutoCommit} = 0;
Expand All @@ -69,9 +68,8 @@ CREATE TABLE $table (
for ( my $j = 0 ; $j < 256 ; $j++ ) {
$b .= chr($j);
}
for ( my $i = 0 ; $i < $size ; $i++ ) {
$blob .= $b;
}

$blob = $b x $size;

#
# Insert a row into the test table.......
Expand Down Expand Up @@ -126,4 +124,16 @@ CREATE TABLE $table (

} # repeat test

ok( $dbh->do($def), qq{CREATE TABLE '$table'} );
my $random_bin = '';
$random_bin .= chr(int(rand(256))) for 1..600_000;

ok( $dbh->do( "INSERT into $table values(?, ?)", undef, 42, $random_bin ),
"insert blog larger than LongReadLen" );

throws_ok { $dbh->selectall_arrayref("select * from $table WHERE id = 42") }
qr/Not enough LongReadLen buffer/,
"Fetching a BLOB larger than LongReadLen throws";

#- end test
done_testing();

0 comments on commit 4310121

Please sign in to comment.