mirror of
https://github.com/git/git.git
synced 2026-01-17 14:21:57 +00:00
Git.pm: Use stream-like writing in cat_blob()
This commit fixes the issue with the handling of large files causing an 'Out of memory' perl exception. Instead of reading and writing the whole blob at once now the blob is written in small pieces. The problem was raised and discussed in this mail to the msysGit mailing list: http://thread.gmane.org/gmane.comp.version-control.msysgit/12080 Signed-off-by: Gregor Uhlenheuer <kongo2002@googlemail.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de>
This commit is contained in:
committed by
Pat Thoyts
parent
e51d5af4ef
commit
c3e2378010
15
perl/Git.pm
15
perl/Git.pm
@@ -896,22 +896,26 @@ sub cat_blob {
|
||||
}
|
||||
|
||||
my $size = $1;
|
||||
|
||||
my $blob;
|
||||
my $bytesRead = 0;
|
||||
|
||||
while (1) {
|
||||
my $blob;
|
||||
my $bytesLeft = $size - $bytesRead;
|
||||
last unless $bytesLeft;
|
||||
|
||||
my $bytesToRead = $bytesLeft < 1024 ? $bytesLeft : 1024;
|
||||
my $read = read($in, $blob, $bytesToRead, $bytesRead);
|
||||
my $read = read($in, $blob, $bytesToRead);
|
||||
unless (defined($read)) {
|
||||
$self->_close_cat_blob();
|
||||
throw Error::Simple("in pipe went bad");
|
||||
}
|
||||
|
||||
$bytesRead += $read;
|
||||
|
||||
unless (print $fh $blob) {
|
||||
$self->_close_cat_blob();
|
||||
throw Error::Simple("couldn't write to passed in filehandle");
|
||||
}
|
||||
}
|
||||
|
||||
# Skip past the trailing newline.
|
||||
@@ -926,11 +930,6 @@ sub cat_blob {
|
||||
throw Error::Simple("didn't find newline after blob");
|
||||
}
|
||||
|
||||
unless (print $fh $blob) {
|
||||
$self->_close_cat_blob();
|
||||
throw Error::Simple("couldn't write to passed in filehandle");
|
||||
}
|
||||
|
||||
return $size;
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user