16
16
RequestDataTooBig ,
17
17
SuspiciousMultipartForm ,
18
18
TooManyFieldsSent ,
19
+ TooManyFilesSent ,
19
20
)
20
21
from django .core .files .uploadhandler import SkipFile , StopFutureHandlers , StopUpload
21
22
from django .utils .datastructures import MultiValueDict
@@ -39,6 +40,7 @@ class InputStreamExhausted(Exception):
39
40
RAW = "raw"
40
41
FILE = "file"
41
42
FIELD = "field"
43
+ FIELD_TYPES = frozenset ([FIELD , RAW ])
42
44
43
45
44
46
class MultiPartParser :
@@ -109,6 +111,22 @@ def __init__(self, META, input_data, upload_handlers, encoding=None):
109
111
self ._upload_handlers = upload_handlers
110
112
111
113
def parse (self ):
114
+ # Call the actual parse routine and close all open files in case of
115
+ # errors. This is needed because if exceptions are thrown the
116
+ # MultiPartParser will not be garbage collected immediately and
117
+ # resources would be kept alive. This is only needed for errors because
118
+ # the Request object closes all uploaded files at the end of the
119
+ # request.
120
+ try :
121
+ return self ._parse ()
122
+ except Exception :
123
+ if hasattr (self , "_files" ):
124
+ for _ , files in self ._files .lists ():
125
+ for fileobj in files :
126
+ fileobj .close ()
127
+ raise
128
+
129
+ def _parse (self ):
112
130
"""
113
131
Parse the POST data and break it into a FILES MultiValueDict and a POST
114
132
MultiValueDict.
@@ -154,6 +172,8 @@ def parse(self):
154
172
num_bytes_read = 0
155
173
# To count the number of keys in the request.
156
174
num_post_keys = 0
175
+ # To count the number of files in the request.
176
+ num_files = 0
157
177
# To limit the amount of data read from the request.
158
178
read_size = None
159
179
# Whether a file upload is finished.
@@ -169,6 +189,20 @@ def parse(self):
169
189
old_field_name = None
170
190
uploaded_file = True
171
191
192
+ if (
193
+ item_type in FIELD_TYPES
194
+ and settings .DATA_UPLOAD_MAX_NUMBER_FIELDS is not None
195
+ ):
196
+ # Avoid storing more than DATA_UPLOAD_MAX_NUMBER_FIELDS.
197
+ num_post_keys += 1
198
+ # 2 accounts for empty raw fields before and after the
199
+ # last boundary.
200
+ if settings .DATA_UPLOAD_MAX_NUMBER_FIELDS + 2 < num_post_keys :
201
+ raise TooManyFieldsSent (
202
+ "The number of GET/POST parameters exceeded "
203
+ "settings.DATA_UPLOAD_MAX_NUMBER_FIELDS."
204
+ )
205
+
172
206
try :
173
207
disposition = meta_data ["content-disposition" ][1 ]
174
208
field_name = disposition ["name" ].strip ()
@@ -181,17 +215,6 @@ def parse(self):
181
215
field_name = force_str (field_name , encoding , errors = "replace" )
182
216
183
217
if item_type == FIELD :
184
- # Avoid storing more than DATA_UPLOAD_MAX_NUMBER_FIELDS.
185
- num_post_keys += 1
186
- if (
187
- settings .DATA_UPLOAD_MAX_NUMBER_FIELDS is not None
188
- and settings .DATA_UPLOAD_MAX_NUMBER_FIELDS < num_post_keys
189
- ):
190
- raise TooManyFieldsSent (
191
- "The number of GET/POST parameters exceeded "
192
- "settings.DATA_UPLOAD_MAX_NUMBER_FIELDS."
193
- )
194
-
195
218
# Avoid reading more than DATA_UPLOAD_MAX_MEMORY_SIZE.
196
219
if settings .DATA_UPLOAD_MAX_MEMORY_SIZE is not None :
197
220
read_size = (
@@ -226,6 +249,16 @@ def parse(self):
226
249
field_name , force_str (data , encoding , errors = "replace" )
227
250
)
228
251
elif item_type == FILE :
252
+ # Avoid storing more than DATA_UPLOAD_MAX_NUMBER_FILES.
253
+ num_files += 1
254
+ if (
255
+ settings .DATA_UPLOAD_MAX_NUMBER_FILES is not None
256
+ and num_files > settings .DATA_UPLOAD_MAX_NUMBER_FILES
257
+ ):
258
+ raise TooManyFilesSent (
259
+ "The number of files exceeded "
260
+ "settings.DATA_UPLOAD_MAX_NUMBER_FILES."
261
+ )
229
262
# This is a file, use the handler...
230
263
file_name = disposition .get ("filename" )
231
264
if file_name :
@@ -303,8 +336,13 @@ def parse(self):
303
336
# Handle file upload completions on next iteration.
304
337
old_field_name = field_name
305
338
else :
306
- # If this is neither a FIELD or a FILE, just exhaust the stream.
307
- exhaust (stream )
339
+ # If this is neither a FIELD nor a FILE, exhaust the field
340
+ # stream. Note: There could be an error here at some point,
341
+ # but there will be at least two RAW types (before and
342
+ # after the other boundaries). This branch is usually not
343
+ # reached at all, because a missing content-disposition
344
+ # header will skip the whole boundary.
345
+ exhaust (field_stream )
308
346
except StopUpload as e :
309
347
self ._close_files ()
310
348
if not e .connection_reset :
0 commit comments