A RetroSearch Logo

Home - News ( United States | United Kingdom | Italy | Germany ) - Football scores

Search Query:

Showing content from https://github.com/django/django/commit/a665ed5179f5bbd3db95ce67286d0192eff041d8 below:

[3.2.x] Fixed CVE-2023-24580 -- Prevented DoS with too many uploaded … · django/django@a665ed5 · GitHub

14 14

from django.conf import settings

15 15

from django.core.exceptions import (

16 16

RequestDataTooBig, SuspiciousMultipartForm, TooManyFieldsSent,

17 +

TooManyFilesSent,

17 18

)

18 19

from django.core.files.uploadhandler import (

19 20

SkipFile, StopFutureHandlers, StopUpload,

@@ -38,6 +39,7 @@ class InputStreamExhausted(Exception):

38 39

RAW = "raw"

39 40

FILE = "file"

40 41

FIELD = "field"

42 +

FIELD_TYPES = frozenset([FIELD, RAW])

41 43 42 44 43 45

class MultiPartParser:

@@ -102,6 +104,22 @@ def __init__(self, META, input_data, upload_handlers, encoding=None):

102 104

self._upload_handlers = upload_handlers

103 105 104 106

def parse(self):

107 +

# Call the actual parse routine and close all open files in case of

108 +

# errors. This is needed because if exceptions are thrown the

109 +

# MultiPartParser will not be garbage collected immediately and

110 +

# resources would be kept alive. This is only needed for errors because

111 +

# the Request object closes all uploaded files at the end of the

112 +

# request.

113 +

try:

114 +

return self._parse()

115 +

except Exception:

116 +

if hasattr(self, "_files"):

117 +

for _, files in self._files.lists():

118 +

for fileobj in files:

119 +

fileobj.close()

120 +

raise

121 + 122 +

def _parse(self):

105 123

"""

106 124

Parse the POST data and break it into a FILES MultiValueDict and a POST

107 125

MultiValueDict.

@@ -147,6 +165,8 @@ def parse(self):

147 165

num_bytes_read = 0

148 166

# To count the number of keys in the request.

149 167

num_post_keys = 0

168 +

# To count the number of files in the request.

169 +

num_files = 0

150 170

# To limit the amount of data read from the request.

151 171

read_size = None

152 172

# Whether a file upload is finished.

@@ -162,6 +182,20 @@ def parse(self):

162 182

old_field_name = None

163 183

uploaded_file = True

164 184 185 +

if (

186 +

item_type in FIELD_TYPES and

187 +

settings.DATA_UPLOAD_MAX_NUMBER_FIELDS is not None

188 +

):

189 +

# Avoid storing more than DATA_UPLOAD_MAX_NUMBER_FIELDS.

190 +

num_post_keys += 1

191 +

# 2 accounts for empty raw fields before and after the

192 +

# last boundary.

193 +

if settings.DATA_UPLOAD_MAX_NUMBER_FIELDS + 2 < num_post_keys:

194 +

raise TooManyFieldsSent(

195 +

"The number of GET/POST parameters exceeded "

196 +

"settings.DATA_UPLOAD_MAX_NUMBER_FIELDS."

197 +

)

198 + 165 199

try:

166 200

disposition = meta_data['content-disposition'][1]

167 201

field_name = disposition['name'].strip()

@@ -174,15 +208,6 @@ def parse(self):

174 208

field_name = force_str(field_name, encoding, errors='replace')

175 209 176 210

if item_type == FIELD:

177 -

# Avoid storing more than DATA_UPLOAD_MAX_NUMBER_FIELDS.

178 -

num_post_keys += 1

179 -

if (settings.DATA_UPLOAD_MAX_NUMBER_FIELDS is not None and

180 -

settings.DATA_UPLOAD_MAX_NUMBER_FIELDS < num_post_keys):

181 -

raise TooManyFieldsSent(

182 -

'The number of GET/POST parameters exceeded '

183 -

'settings.DATA_UPLOAD_MAX_NUMBER_FIELDS.'

184 -

)

185 - 186 211

# Avoid reading more than DATA_UPLOAD_MAX_MEMORY_SIZE.

187 212

if settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None:

188 213

read_size = settings.DATA_UPLOAD_MAX_MEMORY_SIZE - num_bytes_read

@@ -208,6 +233,16 @@ def parse(self):

208 233 209 234

self._post.appendlist(field_name, force_str(data, encoding, errors='replace'))

210 235

elif item_type == FILE:

236 +

# Avoid storing more than DATA_UPLOAD_MAX_NUMBER_FILES.

237 +

num_files += 1

238 +

if (

239 +

settings.DATA_UPLOAD_MAX_NUMBER_FILES is not None and

240 +

num_files > settings.DATA_UPLOAD_MAX_NUMBER_FILES

241 +

):

242 +

raise TooManyFilesSent(

243 +

"The number of files exceeded "

244 +

"settings.DATA_UPLOAD_MAX_NUMBER_FILES."

245 +

)

211 246

# This is a file, use the handler...

212 247

file_name = disposition.get('filename')

213 248

if file_name:

@@ -276,8 +311,13 @@ def parse(self):

276 311

# Handle file upload completions on next iteration.

277 312

old_field_name = field_name

278 313

else:

279 -

# If this is neither a FIELD or a FILE, just exhaust the stream.

280 -

exhaust(stream)

314 +

# If this is neither a FIELD nor a FILE, exhaust the field

315 +

# stream. Note: There could be an error here at some point,

316 +

# but there will be at least two RAW types (before and

317 +

# after the other boundaries). This branch is usually not

318 +

# reached at all, because a missing content-disposition

319 +

# header will skip the whole boundary.

320 +

exhaust(field_stream)

281 321

except StopUpload as e:

282 322

self._close_files()

283 323

if not e.connection_reset:


RetroSearch is an open source project built by @garambo | Open a GitHub Issue

Search and Browse the WWW like it's 1997 | Search results from DuckDuckGo

HTML: 3.2 | Encoding: UTF-8 | Version: 0.7.4