mirror of https://github.com/python/cpython.git
Fix FileIO.readall() (new_buffersize()) for large files
Truncate the buffer size to PY_SSIZE_T_MAX.
This commit is contained in:
parent
950468e553
commit
c5af7730e3
|
@ -564,7 +564,11 @@ new_buffersize(fileio *self, size_t currentsize
|
|||
*/
|
||||
if (end >= SMALLCHUNK && end >= pos && pos >= 0) {
|
||||
/* Add 1 so if the file were to grow we'd notice. */
|
||||
return currentsize + end - pos + 1;
|
||||
Py_off_t bufsize = currentsize + end - pos + 1;
|
||||
if (bufsize < PY_SSIZE_T_MAX)
|
||||
return (size_t)bufsize;
|
||||
else
|
||||
return PY_SSIZE_T_MAX;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
|
Loading…
Reference in New Issue