diff options
Diffstat (limited to 'meta-python/recipes-devtools')
51 files changed, 3416 insertions, 73 deletions
diff --git a/meta-python/recipes-devtools/python/python3-aiohttp-jinja2_1.5.bb b/meta-python/recipes-devtools/python/python3-aiohttp-jinja2_1.5.bb index c86ec092a6..871eb7cae9 100644 --- a/meta-python/recipes-devtools/python/python3-aiohttp-jinja2_1.5.bb +++ b/meta-python/recipes-devtools/python/python3-aiohttp-jinja2_1.5.bb @@ -11,5 +11,3 @@ RDEPENDS:${PN} += " \ ${PYTHON_PN}-jinja2 \ ${PYTHON_PN}-aiohttp \ " - -BBCLASSEXTEND = "native nativesdk" diff --git a/meta-python/recipes-devtools/python/python3-aiohttp/CVE-2024-23334.patch b/meta-python/recipes-devtools/python/python3-aiohttp/CVE-2024-23334.patch new file mode 100644 index 0000000000..29909529aa --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-aiohttp/CVE-2024-23334.patch @@ -0,0 +1,222 @@ +From 1c335944d6a8b1298baf179b7c0b3069f10c514b +From: Sam Bull <git@sambull.org> +Date: Sun Jan 28 18:13:06 2024 +0000 +Subject: [PATCH] python3-aiohttp: Validate static paths (#8079) + +Co-authored-by: J. Nick Koston <nick@koston.org> + +CVE: CVE-2024-23334 + +Upstream-Status: Backport [https://github.com/aio-libs/aiohttp/commit/1c335944d6a8b1298baf179b7c0b3069f10c514b] + +Signed-off-by: Rahul Janani Pandi <RahulJanani.Pandi@windriver.com> +--- + CHANGES/8079.bugfix.rst | 1 + + aiohttp/web_urldispatcher.py | 18 +++++-- + docs/web_advanced.rst | 16 ++++-- + docs/web_reference.rst | 12 +++-- + tests/test_web_urldispatcher.py | 91 +++++++++++++++++++++++++++++++++ + 5 files changed, 128 insertions(+), 10 deletions(-) + create mode 100644 CHANGES/8079.bugfix.rst + +diff --git a/CHANGES/8079.bugfix.rst b/CHANGES/8079.bugfix.rst +new file mode 100644 +index 0000000..57bc8bf +--- /dev/null ++++ b/CHANGES/8079.bugfix.rst +@@ -0,0 +1 @@ ++Improved validation of paths for static resources -- by :user:`bdraco`. +diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py +index 5942e35..e8a8023 100644 +--- a/aiohttp/web_urldispatcher.py ++++ b/aiohttp/web_urldispatcher.py +@@ -593,9 +593,14 @@ class StaticResource(PrefixResource): + url = url / filename + + if append_version: ++ unresolved_path = self._directory.joinpath(filename) + try: +- filepath = self._directory.joinpath(filename).resolve() +- if not self._follow_symlinks: ++ if self._follow_symlinks: ++ normalized_path = Path(os.path.normpath(unresolved_path)) ++ normalized_path.relative_to(self._directory) ++ filepath = normalized_path.resolve() ++ else: ++ filepath = unresolved_path.resolve() + filepath.relative_to(self._directory) + except (ValueError, FileNotFoundError): + # ValueError for case when path point to symlink +@@ -660,8 +665,13 @@ class StaticResource(PrefixResource): + # /static/\\machine_name\c$ or /static/D:\path + # where the static dir is totally different + raise HTTPForbidden() +- filepath = self._directory.joinpath(filename).resolve() +- if not self._follow_symlinks: ++ unresolved_path = self._directory.joinpath(filename) ++ if self._follow_symlinks: ++ normalized_path = Path(os.path.normpath(unresolved_path)) ++ normalized_path.relative_to(self._directory) ++ filepath = normalized_path.resolve() ++ else: ++ filepath = unresolved_path.resolve() + filepath.relative_to(self._directory) + except (ValueError, FileNotFoundError) as error: + # relatively safe +diff --git a/docs/web_advanced.rst b/docs/web_advanced.rst +index 3a98b78..5129397 100644 +--- a/docs/web_advanced.rst ++++ b/docs/web_advanced.rst +@@ -136,12 +136,22 @@ instead could be enabled with ``show_index`` parameter set to ``True``:: + + web.static('/prefix', path_to_static_folder, show_index=True) + +-When a symlink from the static directory is accessed, the server responses to +-client with ``HTTP/404 Not Found`` by default. To allow the server to follow +-symlinks, parameter ``follow_symlinks`` should be set to ``True``:: ++When a symlink that leads outside the static directory is accessed, the server ++responds to the client with ``HTTP/404 Not Found`` by default. To allow the server to ++follow symlinks that lead outside the static root, the parameter ``follow_symlinks`` ++should be set to ``True``:: + + web.static('/prefix', path_to_static_folder, follow_symlinks=True) + ++.. caution:: ++ ++ Enabling ``follow_symlinks`` can be a security risk, and may lead to ++ a directory transversal attack. You do NOT need this option to follow symlinks ++ which point to somewhere else within the static directory, this option is only ++ used to break out of the security sandbox. Enabling this option is highly ++ discouraged, and only expected to be used for edge cases in a local ++ development setting where remote users do not have access to the server. ++ + When you want to enable cache busting, + parameter ``append_version`` can be set to ``True`` + +diff --git a/docs/web_reference.rst b/docs/web_reference.rst +index a156f47..b100676 100644 +--- a/docs/web_reference.rst ++++ b/docs/web_reference.rst +@@ -1836,9 +1836,15 @@ Router is any object that implements :class:`~aiohttp.abc.AbstractRouter` interf + by default it's not allowed and HTTP/403 will + be returned on directory access. + +- :param bool follow_symlinks: flag for allowing to follow symlinks from +- a directory, by default it's not allowed and +- HTTP/404 will be returned on access. ++ :param bool follow_symlinks: flag for allowing to follow symlinks that lead ++ outside the static root directory, by default it's not allowed and ++ HTTP/404 will be returned on access. Enabling ``follow_symlinks`` ++ can be a security risk, and may lead to a directory transversal attack. ++ You do NOT need this option to follow symlinks which point to somewhere ++ else within the static directory, this option is only used to break out ++ of the security sandbox. Enabling this option is highly discouraged, ++ and only expected to be used for edge cases in a local development ++ setting where remote users do not have access to the server. + + :param bool append_version: flag for adding file version (hash) + to the url query string, this value will +diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py +index f24f451..f40f6a5 100644 +--- a/tests/test_web_urldispatcher.py ++++ b/tests/test_web_urldispatcher.py +@@ -123,6 +123,97 @@ async def test_follow_symlink(tmp_dir_path, aiohttp_client) -> None: + assert (await r.text()) == data + + ++async def test_follow_symlink_directory_traversal( ++ tmp_path: pathlib.Path, aiohttp_client: AiohttpClient ++) -> None: ++ # Tests that follow_symlinks does not allow directory transversal ++ data = "private" ++ ++ private_file = tmp_path / "private_file" ++ private_file.write_text(data) ++ ++ safe_path = tmp_path / "safe_dir" ++ safe_path.mkdir() ++ ++ app = web.Application() ++ ++ # Register global static route: ++ app.router.add_static("/", str(safe_path), follow_symlinks=True) ++ client = await aiohttp_client(app) ++ ++ await client.start_server() ++ # We need to use a raw socket to test this, as the client will normalize ++ # the path before sending it to the server. ++ reader, writer = await asyncio.open_connection(client.host, client.port) ++ writer.write(b"GET /../private_file HTTP/1.1\r\n\r\n") ++ response = await reader.readuntil(b"\r\n\r\n") ++ assert b"404 Not Found" in response ++ writer.close() ++ await writer.wait_closed() ++ await client.close() ++ ++ ++async def test_follow_symlink_directory_traversal_after_normalization( ++ tmp_path: pathlib.Path, aiohttp_client: AiohttpClient ++) -> None: ++ # Tests that follow_symlinks does not allow directory transversal ++ # after normalization ++ # ++ # Directory structure ++ # |-- secret_dir ++ # | |-- private_file (should never be accessible) ++ # | |-- symlink_target_dir ++ # | |-- symlink_target_file (should be accessible via the my_symlink symlink) ++ # | |-- sandbox_dir ++ # | |-- my_symlink -> symlink_target_dir ++ # ++ secret_path = tmp_path / "secret_dir" ++ secret_path.mkdir() ++ ++ # This file is below the symlink target and should not be reachable ++ private_file = secret_path / "private_file" ++ private_file.write_text("private") ++ ++ symlink_target_path = secret_path / "symlink_target_dir" ++ symlink_target_path.mkdir() ++ ++ sandbox_path = symlink_target_path / "sandbox_dir" ++ sandbox_path.mkdir() ++ ++ # This file should be reachable via the symlink ++ symlink_target_file = symlink_target_path / "symlink_target_file" ++ symlink_target_file.write_text("readable") ++ ++ my_symlink_path = sandbox_path / "my_symlink" ++ pathlib.Path(str(my_symlink_path)).symlink_to(str(symlink_target_path), True) ++ ++ app = web.Application() ++ ++ # Register global static route: ++ app.router.add_static("/", str(sandbox_path), follow_symlinks=True) ++ client = await aiohttp_client(app) ++ ++ await client.start_server() ++ # We need to use a raw socket to test this, as the client will normalize ++ # the path before sending it to the server. ++ reader, writer = await asyncio.open_connection(client.host, client.port) ++ writer.write(b"GET /my_symlink/../private_file HTTP/1.1\r\n\r\n") ++ response = await reader.readuntil(b"\r\n\r\n") ++ assert b"404 Not Found" in response ++ writer.close() ++ await writer.wait_closed() ++ ++ reader, writer = await asyncio.open_connection(client.host, client.port) ++ writer.write(b"GET /my_symlink/symlink_target_file HTTP/1.1\r\n\r\n") ++ response = await reader.readuntil(b"\r\n\r\n") ++ assert b"200 OK" in response ++ response = await reader.readuntil(b"readable") ++ assert response == b"readable" ++ writer.close() ++ await writer.wait_closed() ++ await client.close() ++ ++ + @pytest.mark.parametrize( + "dir_name,filename,data", + [ +-- +2.40.0 diff --git a/meta-python/recipes-devtools/python/python3-aiohttp_3.8.1.bb b/meta-python/recipes-devtools/python/python3-aiohttp_3.8.6.bb index f2b8d52a72..c805e17d86 100644 --- a/meta-python/recipes-devtools/python/python3-aiohttp_3.8.1.bb +++ b/meta-python/recipes-devtools/python/python3-aiohttp_3.8.6.bb @@ -2,9 +2,12 @@ SUMMARY = "Async http client/server framework" DESCRIPTION = "Asynchronous HTTP client/server framework for asyncio and Python" HOMEPAGE = "https://github.com/aio-libs/aiohttp" LICENSE = "Apache-2.0" -LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=8074d6c6e217873b2a018a4522243ea3" +LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=748073912af33aa59430d3702aa32d41" -SRC_URI[sha256sum] = "fc5471e1a54de15ef71c1bc6ebe80d4dc681ea600e68bfd1cbce40427f0b7578" +SRC_URI += "file://CVE-2024-23334.patch \ + " + +SRC_URI[sha256sum] = "b0cf2a4501bff9330a8a5248b4ce951851e415bdcce9dc158e76cfd55e15085c" PYPI_PACKAGE = "aiohttp" inherit python_setuptools_build_meta pypi diff --git a/meta-python/recipes-devtools/python/python3-autobahn_22.3.2.bb b/meta-python/recipes-devtools/python/python3-autobahn_22.3.2.bb index 78514a412f..afb798bd71 100644 --- a/meta-python/recipes-devtools/python/python3-autobahn_22.3.2.bb +++ b/meta-python/recipes-devtools/python/python3-autobahn_22.3.2.bb @@ -19,5 +19,3 @@ RDEPENDS:${PN} += " \ ${PYTHON_PN}-txaio \ ${PYTHON_PN}-six \ " - -BBCLASSEXTEND = "native nativesdk" diff --git a/meta-python/recipes-devtools/python/python3-can_4.0.0.bb b/meta-python/recipes-devtools/python/python3-can_4.0.0.bb index 2cd2e624b9..79aa3e19ec 100644 --- a/meta-python/recipes-devtools/python/python3-can_4.0.0.bb +++ b/meta-python/recipes-devtools/python/python3-can_4.0.0.bb @@ -11,16 +11,19 @@ inherit pypi setuptools3 RDEPENDS:${PN}:class-target += "\ ${PYTHON_PN}-aenum \ - ${PYTHON_PN}-ctypes \ ${PYTHON_PN}-codecs \ ${PYTHON_PN}-compression \ + ${PYTHON_PN}-ctypes \ ${PYTHON_PN}-fcntl \ ${PYTHON_PN}-logging \ ${PYTHON_PN}-misc \ ${PYTHON_PN}-netserver \ + ${PYTHON_PN}-packaging \ + ${PYTHON_PN}-pkg-resources \ + ${PYTHON_PN}-setuptools \ ${PYTHON_PN}-sqlite3 \ + ${PYTHON_PN}-typing-extensions \ ${PYTHON_PN}-wrapt \ - ${PYTHON_PN}-pkg-resources \ " BBCLASSEXTEND = "native nativesdk" diff --git a/meta-python/recipes-devtools/python/python3-django/CVE-2023-31047.patch b/meta-python/recipes-devtools/python/python3-django/CVE-2023-31047.patch new file mode 100644 index 0000000000..ab29a2ed97 --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-django/CVE-2023-31047.patch @@ -0,0 +1,352 @@ +From fd3215dec5d50aa1f09cb1f8eba193524e7379f3 Mon Sep 17 00:00:00 2001 +From: Mariusz Felisiak <felisiak.mariusz@gmail.com> +Date: Thu, 25 May 2023 14:49:15 +0000 +Subject: [PATCH] Fixed CVE-2023-31047, Fixed #31710 + +-- Prevented potential bypass of validation when uploading multiple files using one form field. + +Thanks Moataz Al-Sharida and nawaik for reports. + +Co-authored-by: Shai Berger <shai@platonix.com> +Co-authored-by: nessita <124304+nessita@users.noreply.github.com> + +CVE: CVE-2023-31047 + +Upstream-Status: Backport [https://github.com/django/django/commit/fb4c55d9ec4bb812a7fb91fa20510d91645e411b] + +Signed-off-by: Narpat Mali <narpat.mali@windriver.com> +--- + django/forms/widgets.py | 26 ++++++- + docs/releases/2.2.28.txt | 18 +++++ + docs/topics/http/file-uploads.txt | 65 ++++++++++++++++-- + .../forms_tests/field_tests/test_filefield.py | 68 ++++++++++++++++++- + .../widget_tests/test_clearablefileinput.py | 5 ++ + .../widget_tests/test_fileinput.py | 44 ++++++++++++ + 6 files changed, 218 insertions(+), 8 deletions(-) + +diff --git a/django/forms/widgets.py b/django/forms/widgets.py +index e37036c..d0cc131 100644 +--- a/django/forms/widgets.py ++++ b/django/forms/widgets.py +@@ -372,17 +372,41 @@ class MultipleHiddenInput(HiddenInput): + + + class FileInput(Input): ++ allow_multiple_selected = False + input_type = 'file' + needs_multipart_form = True + template_name = 'django/forms/widgets/file.html' + ++ def __init__(self, attrs=None): ++ if ( ++ attrs is not None ++ and not self.allow_multiple_selected ++ and attrs.get("multiple", False) ++ ): ++ raise ValueError( ++ "%s doesn't support uploading multiple files." ++ % self.__class__.__qualname__ ++ ) ++ if self.allow_multiple_selected: ++ if attrs is None: ++ attrs = {"multiple": True} ++ else: ++ attrs.setdefault("multiple", True) ++ super().__init__(attrs) ++ + def format_value(self, value): + """File input never renders a value.""" + return + + def value_from_datadict(self, data, files, name): + "File widgets take data from FILES, not POST" +- return files.get(name) ++ getter = files.get ++ if self.allow_multiple_selected: ++ try: ++ getter = files.getlist ++ except AttributeError: ++ pass ++ return getter(name) + + def value_omitted_from_data(self, data, files, name): + return name not in files +diff --git a/docs/releases/2.2.28.txt b/docs/releases/2.2.28.txt +index 43270fc..854c6b0 100644 +--- a/docs/releases/2.2.28.txt ++++ b/docs/releases/2.2.28.txt +@@ -20,3 +20,21 @@ CVE-2022-28347: Potential SQL injection via ``QuerySet.explain(**options)`` on P + :meth:`.QuerySet.explain` method was subject to SQL injection in option names, + using a suitably crafted dictionary, with dictionary expansion, as the + ``**options`` argument. ++ ++Backporting the CVE-2023-31047 fix on Django 2.2.28. ++ ++CVE-2023-31047: Potential bypass of validation when uploading multiple files using one form field ++================================================================================================= ++ ++Uploading multiple files using one form field has never been supported by ++:class:`.forms.FileField` or :class:`.forms.ImageField` as only the last ++uploaded file was validated. Unfortunately, :ref:`uploading_multiple_files` ++topic suggested otherwise. ++ ++In order to avoid the vulnerability, :class:`~django.forms.ClearableFileInput` ++and :class:`~django.forms.FileInput` form widgets now raise ``ValueError`` when ++the ``multiple`` HTML attribute is set on them. To prevent the exception and ++keep the old behavior, set ``allow_multiple_selected`` to ``True``. ++ ++For more details on using the new attribute and handling of multiple files ++through a single field, see :ref:`uploading_multiple_files`. +diff --git a/docs/topics/http/file-uploads.txt b/docs/topics/http/file-uploads.txt +index 21a6f06..c1ffb80 100644 +--- a/docs/topics/http/file-uploads.txt ++++ b/docs/topics/http/file-uploads.txt +@@ -127,19 +127,54 @@ field in the model:: + form = UploadFileForm() + return render(request, 'upload.html', {'form': form}) + ++.. _uploading_multiple_files: ++ + Uploading multiple files + ------------------------ + +-If you want to upload multiple files using one form field, set the ``multiple`` +-HTML attribute of field's widget: ++.. ++ Tests in tests.forms_tests.field_tests.test_filefield.MultipleFileFieldTest ++ should be updated after any changes in the following snippets. ++ ++If you want to upload multiple files using one form field, create a subclass ++of the field's widget and set the ``allow_multiple_selected`` attribute on it ++to ``True``. ++ ++In order for such files to be all validated by your form (and have the value of ++the field include them all), you will also have to subclass ``FileField``. See ++below for an example. ++ ++.. admonition:: Multiple file field ++ ++ Django is likely to have a proper multiple file field support at some point ++ in the future. + + .. code-block:: python + :caption: forms.py + + from django import forms + ++ ++ class MultipleFileInput(forms.ClearableFileInput): ++ allow_multiple_selected = True ++ ++ ++ class MultipleFileField(forms.FileField): ++ def __init__(self, *args, **kwargs): ++ kwargs.setdefault("widget", MultipleFileInput()) ++ super().__init__(*args, **kwargs) ++ ++ def clean(self, data, initial=None): ++ single_file_clean = super().clean ++ if isinstance(data, (list, tuple)): ++ result = [single_file_clean(d, initial) for d in data] ++ else: ++ result = single_file_clean(data, initial) ++ return result ++ ++ + class FileFieldForm(forms.Form): +- file_field = forms.FileField(widget=forms.ClearableFileInput(attrs={'multiple': True})) ++ file_field = MultipleFileField() + + Then override the ``post`` method of your + :class:`~django.views.generic.edit.FormView` subclass to handle multiple file +@@ -159,14 +194,32 @@ uploads: + def post(self, request, *args, **kwargs): + form_class = self.get_form_class() + form = self.get_form(form_class) +- files = request.FILES.getlist('file_field') + if form.is_valid(): +- for f in files: +- ... # Do something with each file. + return self.form_valid(form) + else: + return self.form_invalid(form) + ++ def form_valid(self, form): ++ files = form.cleaned_data["file_field"] ++ for f in files: ++ ... # Do something with each file. ++ return super().form_valid() ++ ++.. warning:: ++ ++ This will allow you to handle multiple files at the form level only. Be ++ aware that you cannot use it to put multiple files on a single model ++ instance (in a single field), for example, even if the custom widget is used ++ with a form field related to a model ``FileField``. ++ ++.. backportedfix:: 2.2.28 ++ ++ In previous versions, there was no support for the ``allow_multiple_selected`` ++ class attribute, and users were advised to create the widget with the HTML ++ attribute ``multiple`` set through the ``attrs`` argument. However, this ++ caused validation of the form field to be applied only to the last file ++ submitted, which could have adverse security implications. ++ + Upload Handlers + =============== + +diff --git a/tests/forms_tests/field_tests/test_filefield.py b/tests/forms_tests/field_tests/test_filefield.py +index 3357444..ba559ee 100644 +--- a/tests/forms_tests/field_tests/test_filefield.py ++++ b/tests/forms_tests/field_tests/test_filefield.py +@@ -1,7 +1,8 @@ + import pickle + + from django.core.files.uploadedfile import SimpleUploadedFile +-from django.forms import FileField, ValidationError ++from django.core.validators import validate_image_file_extension ++from django.forms import FileField, FileInput, ValidationError + from django.test import SimpleTestCase + + +@@ -82,3 +83,68 @@ class FileFieldTest(SimpleTestCase): + + def test_file_picklable(self): + self.assertIsInstance(pickle.loads(pickle.dumps(FileField())), FileField) ++ ++ ++class MultipleFileInput(FileInput): ++ allow_multiple_selected = True ++ ++ ++class MultipleFileField(FileField): ++ def __init__(self, *args, **kwargs): ++ kwargs.setdefault("widget", MultipleFileInput()) ++ super().__init__(*args, **kwargs) ++ ++ def clean(self, data, initial=None): ++ single_file_clean = super().clean ++ if isinstance(data, (list, tuple)): ++ result = [single_file_clean(d, initial) for d in data] ++ else: ++ result = single_file_clean(data, initial) ++ return result ++ ++ ++class MultipleFileFieldTest(SimpleTestCase): ++ def test_file_multiple(self): ++ f = MultipleFileField() ++ files = [ ++ SimpleUploadedFile("name1", b"Content 1"), ++ SimpleUploadedFile("name2", b"Content 2"), ++ ] ++ self.assertEqual(f.clean(files), files) ++ ++ def test_file_multiple_empty(self): ++ f = MultipleFileField() ++ files = [ ++ SimpleUploadedFile("empty", b""), ++ SimpleUploadedFile("nonempty", b"Some Content"), ++ ] ++ msg = "'The submitted file is empty.'" ++ with self.assertRaisesMessage(ValidationError, msg): ++ f.clean(files) ++ with self.assertRaisesMessage(ValidationError, msg): ++ f.clean(files[::-1]) ++ ++ def test_file_multiple_validation(self): ++ f = MultipleFileField(validators=[validate_image_file_extension]) ++ ++ good_files = [ ++ SimpleUploadedFile("image1.jpg", b"fake JPEG"), ++ SimpleUploadedFile("image2.png", b"faux image"), ++ SimpleUploadedFile("image3.bmp", b"fraudulent bitmap"), ++ ] ++ self.assertEqual(f.clean(good_files), good_files) ++ ++ evil_files = [ ++ SimpleUploadedFile("image1.sh", b"#!/bin/bash -c 'echo pwned!'\n"), ++ SimpleUploadedFile("image2.png", b"faux image"), ++ SimpleUploadedFile("image3.jpg", b"fake JPEG"), ++ ] ++ ++ evil_rotations = ( ++ evil_files[i:] + evil_files[:i] # Rotate by i. ++ for i in range(len(evil_files)) ++ ) ++ msg = "File extension “sh” is not allowed. Allowed extensions are: " ++ for rotated_evil_files in evil_rotations: ++ with self.assertRaisesMessage(ValidationError, msg): ++ f.clean(rotated_evil_files) +diff --git a/tests/forms_tests/widget_tests/test_clearablefileinput.py b/tests/forms_tests/widget_tests/test_clearablefileinput.py +index 2ba376d..8d9e38a 100644 +--- a/tests/forms_tests/widget_tests/test_clearablefileinput.py ++++ b/tests/forms_tests/widget_tests/test_clearablefileinput.py +@@ -161,3 +161,8 @@ class ClearableFileInputTest(WidgetTest): + self.assertIs(widget.value_omitted_from_data({}, {}, 'field'), True) + self.assertIs(widget.value_omitted_from_data({}, {'field': 'x'}, 'field'), False) + self.assertIs(widget.value_omitted_from_data({'field-clear': 'y'}, {}, 'field'), False) ++ ++ def test_multiple_error(self): ++ msg = "ClearableFileInput doesn't support uploading multiple files." ++ with self.assertRaisesMessage(ValueError, msg): ++ ClearableFileInput(attrs={"multiple": True}) +diff --git a/tests/forms_tests/widget_tests/test_fileinput.py b/tests/forms_tests/widget_tests/test_fileinput.py +index bbd7c7f..24daf5d 100644 +--- a/tests/forms_tests/widget_tests/test_fileinput.py ++++ b/tests/forms_tests/widget_tests/test_fileinput.py +@@ -1,4 +1,6 @@ ++from django.core.files.uploadedfile import SimpleUploadedFile + from django.forms import FileInput ++from django.utils.datastructures import MultiValueDict + + from .base import WidgetTest + +@@ -18,3 +20,45 @@ class FileInputTest(WidgetTest): + def test_value_omitted_from_data(self): + self.assertIs(self.widget.value_omitted_from_data({}, {}, 'field'), True) + self.assertIs(self.widget.value_omitted_from_data({}, {'field': 'value'}, 'field'), False) ++ ++ def test_multiple_error(self): ++ msg = "FileInput doesn't support uploading multiple files." ++ with self.assertRaisesMessage(ValueError, msg): ++ FileInput(attrs={"multiple": True}) ++ ++ def test_value_from_datadict_multiple(self): ++ class MultipleFileInput(FileInput): ++ allow_multiple_selected = True ++ ++ file_1 = SimpleUploadedFile("something1.txt", b"content 1") ++ file_2 = SimpleUploadedFile("something2.txt", b"content 2") ++ # Uploading multiple files is allowed. ++ widget = MultipleFileInput(attrs={"multiple": True}) ++ value = widget.value_from_datadict( ++ data={"name": "Test name"}, ++ files=MultiValueDict({"myfile": [file_1, file_2]}), ++ name="myfile", ++ ) ++ self.assertEqual(value, [file_1, file_2]) ++ # Uploading multiple files is not allowed. ++ widget = FileInput() ++ value = widget.value_from_datadict( ++ data={"name": "Test name"}, ++ files=MultiValueDict({"myfile": [file_1, file_2]}), ++ name="myfile", ++ ) ++ self.assertEqual(value, file_2) ++ ++ def test_multiple_default(self): ++ class MultipleFileInput(FileInput): ++ allow_multiple_selected = True ++ ++ tests = [ ++ (None, True), ++ ({"class": "myclass"}, True), ++ ({"multiple": False}, False), ++ ] ++ for attrs, expected in tests: ++ with self.subTest(attrs=attrs): ++ widget = MultipleFileInput(attrs=attrs) ++ self.assertIs(widget.attrs["multiple"], expected) +-- +2.40.0 diff --git a/meta-python/recipes-devtools/python/python3-django/CVE-2023-36053.patch b/meta-python/recipes-devtools/python/python3-django/CVE-2023-36053.patch new file mode 100644 index 0000000000..2ad38d8e95 --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-django/CVE-2023-36053.patch @@ -0,0 +1,263 @@ +From a0b2eeeb7350d0c3a9b9be191783ff15daeffec5 Mon Sep 17 00:00:00 2001 +From: Mariusz Felisiak <felisiak.mariusz@gmail.com> +Date: Thu, 27 Jul 2023 14:51:48 +0000 +Subject: [PATCH] Fixed CVE-2023-36053 + +-- Prevented potential ReDoS in EmailValidator and URLValidator. + +Thanks Seokchan Yoon for reports. + +CVE: CVE-2023-36053 + +Upstream-Status: Backport [https://github.com/django/django/commit/454f2fb93437f98917283336201b4048293f7582] + +Signed-off-by: Narpat Mali <narpat.mali@windriver.com> +--- + django/core/validators.py | 9 +++++++-- + django/forms/fields.py | 3 +++ + docs/ref/forms/fields.txt | 4 ++++ + docs/ref/validators.txt | 19 ++++++++++++++++++- + docs/releases/2.2.28.txt | 9 +++++++++ + .../field_tests/test_emailfield.py | 5 ++++- + tests/forms_tests/tests/test_forms.py | 19 +++++++++++++------ + tests/validators/tests.py | 11 +++++++++++ + 8 files changed, 69 insertions(+), 10 deletions(-) + +diff --git a/django/core/validators.py b/django/core/validators.py +index 2da0688..2dbd3bf 100644 +--- a/django/core/validators.py ++++ b/django/core/validators.py +@@ -102,6 +102,7 @@ class URLValidator(RegexValidator): + message = _('Enter a valid URL.') + schemes = ['http', 'https', 'ftp', 'ftps'] + unsafe_chars = frozenset('\t\r\n') ++ max_length = 2048 + + def __init__(self, schemes=None, **kwargs): + super().__init__(**kwargs) +@@ -109,7 +110,9 @@ class URLValidator(RegexValidator): + self.schemes = schemes + + def __call__(self, value): +- if isinstance(value, str) and self.unsafe_chars.intersection(value): ++ if not isinstance(value, str) or len(value) > self.max_length: ++ raise ValidationError(self.message, code=self.code) ++ if self.unsafe_chars.intersection(value): + raise ValidationError(self.message, code=self.code) + # Check if the scheme is valid. + scheme = value.split('://')[0].lower() +@@ -190,7 +193,9 @@ class EmailValidator: + self.domain_whitelist = whitelist + + def __call__(self, value): +- if not value or '@' not in value: ++ # The maximum length of an email is 320 characters per RFC 3696 ++ # section 3. ++ if not value or '@' not in value or len(value) > 320: + raise ValidationError(self.message, code=self.code) + + user_part, domain_part = value.rsplit('@', 1) +diff --git a/django/forms/fields.py b/django/forms/fields.py +index a977256..f939338 100644 +--- a/django/forms/fields.py ++++ b/django/forms/fields.py +@@ -542,6 +542,9 @@ class FileField(Field): + def __init__(self, *, max_length=None, allow_empty_file=False, **kwargs): + self.max_length = max_length + self.allow_empty_file = allow_empty_file ++ # The default maximum length of an email is 320 characters per RFC 3696 ++ # section 3. ++ kwargs.setdefault("max_length", 320) + super().__init__(**kwargs) + + def to_python(self, data): +diff --git a/docs/ref/forms/fields.txt b/docs/ref/forms/fields.txt +index 6f76d0d..3a888ef 100644 +--- a/docs/ref/forms/fields.txt ++++ b/docs/ref/forms/fields.txt +@@ -592,6 +592,10 @@ For each field, we describe the default widget used if you don't specify + Has two optional arguments for validation, ``max_length`` and ``min_length``. + If provided, these arguments ensure that the string is at most or at least the + given length. ++ ``empty_value`` which work just as they do for :class:`CharField`. The ++ ``max_length`` argument defaults to 320 (see :rfc:`3696#section-3`). ++ ++ The default value for ``max_length`` was changed to 320 characters. + + ``FileField`` + ------------- +diff --git a/docs/ref/validators.txt b/docs/ref/validators.txt +index 75d1394..4178a1f 100644 +--- a/docs/ref/validators.txt ++++ b/docs/ref/validators.txt +@@ -125,6 +125,11 @@ to, or in lieu of custom ``field.clean()`` methods. + :param code: If not ``None``, overrides :attr:`code`. + :param whitelist: If not ``None``, overrides :attr:`whitelist`. + ++ An :class:`EmailValidator` ensures that a value looks like an email, and ++ raises a :exc:`~django.core.exceptions.ValidationError` with ++ :attr:`message` and :attr:`code` if it doesn't. Values longer than 320 ++ characters are always considered invalid. ++ + .. attribute:: message + + The error message used by +@@ -145,13 +150,17 @@ to, or in lieu of custom ``field.clean()`` methods. + ``['localhost']``. Other domains that don't contain a dot won't pass + validation, so you'd need to whitelist them as necessary. + ++ In older versions, values longer than 320 characters could be ++ considered valid. ++ + ``URLValidator`` + ---------------- + + .. class:: URLValidator(schemes=None, regex=None, message=None, code=None) + + A :class:`RegexValidator` that ensures a value looks like a URL, and raises +- an error code of ``'invalid'`` if it doesn't. ++ an error code of ``'invalid'`` if it doesn't. Values longer than ++ :attr:`max_length` characters are always considered invalid. + + Loopback addresses and reserved IP spaces are considered valid. Literal + IPv6 addresses (:rfc:`3986#section-3.2.2`) and unicode domains are both +@@ -168,6 +177,14 @@ to, or in lieu of custom ``field.clean()`` methods. + + .. _valid URI schemes: https://www.iana.org/assignments/uri-schemes/uri-schemes.xhtml + ++ .. attribute:: max_length ++ ++ The maximum length of values that could be considered valid. Defaults ++ to 2048 characters. ++ ++ In older versions, values longer than 2048 characters could be ++ considered valid. ++ + ``validate_email`` + ------------------ + +diff --git a/docs/releases/2.2.28.txt b/docs/releases/2.2.28.txt +index 854c6b0..ab4884b 100644 +--- a/docs/releases/2.2.28.txt ++++ b/docs/releases/2.2.28.txt +@@ -38,3 +38,12 @@ keep the old behavior, set ``allow_multiple_selected`` to ``True``. + + For more details on using the new attribute and handling of multiple files + through a single field, see :ref:`uploading_multiple_files`. ++ ++Backporting the CVE-2023-36053 fix on Django 2.2.28. ++ ++CVE-2023-36053: Potential regular expression denial of service vulnerability in ``EmailValidator``/``URLValidator`` ++=================================================================================================================== ++ ++``EmailValidator`` and ``URLValidator`` were subject to potential regular ++expression denial of service attack via a very large number of domain name ++labels of emails and URLs. +diff --git a/tests/forms_tests/field_tests/test_emailfield.py b/tests/forms_tests/field_tests/test_emailfield.py +index 826524a..fe5b644 100644 +--- a/tests/forms_tests/field_tests/test_emailfield.py ++++ b/tests/forms_tests/field_tests/test_emailfield.py +@@ -8,7 +8,10 @@ class EmailFieldTest(FormFieldAssertionsMixin, SimpleTestCase): + + def test_emailfield_1(self): + f = EmailField() +- self.assertWidgetRendersTo(f, '<input type="email" name="f" id="id_f" required>') ++ self.assertEqual(f.max_length, 320) ++ self.assertWidgetRendersTo( ++ f, '<input type="email" name="f" id="id_f" maxlength="320" required>' ++ ) + with self.assertRaisesMessage(ValidationError, "'This field is required.'"): + f.clean('') + with self.assertRaisesMessage(ValidationError, "'This field is required.'"): +diff --git a/tests/forms_tests/tests/test_forms.py b/tests/forms_tests/tests/test_forms.py +index d4e421d..8893f89 100644 +--- a/tests/forms_tests/tests/test_forms.py ++++ b/tests/forms_tests/tests/test_forms.py +@@ -422,11 +422,18 @@ class FormsTestCase(SimpleTestCase): + get_spam = BooleanField() + + f = SignupForm(auto_id=False) +- self.assertHTMLEqual(str(f['email']), '<input type="email" name="email" required>') ++ self.assertHTMLEqual( ++ str(f["email"]), ++ '<input type="email" name="email" maxlength="320" required>', ++ ) + self.assertHTMLEqual(str(f['get_spam']), '<input type="checkbox" name="get_spam" required>') + + f = SignupForm({'email': 'test@example.com', 'get_spam': True}, auto_id=False) +- self.assertHTMLEqual(str(f['email']), '<input type="email" name="email" value="test@example.com" required>') ++ self.assertHTMLEqual( ++ str(f["email"]), ++ '<input type="email" name="email" maxlength="320" value="test@example.com" ' ++ "required>", ++ ) + self.assertHTMLEqual( + str(f['get_spam']), + '<input checked type="checkbox" name="get_spam" required>', +@@ -2780,7 +2787,7 @@ Good luck picking a username that doesn't already exist.</p> + <option value="true">Yes</option> + <option value="false">No</option> + </select></li> +-<li><label for="id_email">Email:</label> <input type="email" name="email" id="id_email"></li> ++<li><label for="id_email">Email:</label> <input type="email" name="email" id="id_email" maxlength="320"></li> + <li class="required error"><ul class="errorlist"><li>This field is required.</li></ul> + <label class="required" for="id_age">Age:</label> <input type="number" name="age" id="id_age" required></li>""" + ) +@@ -2796,7 +2803,7 @@ Good luck picking a username that doesn't already exist.</p> + <option value="true">Yes</option> + <option value="false">No</option> + </select></p> +-<p><label for="id_email">Email:</label> <input type="email" name="email" id="id_email"></p> ++<p><label for="id_email">Email:</label> <input type="email" name="email" id="id_email" maxlength="320"></p> + <ul class="errorlist"><li>This field is required.</li></ul> + <p class="required error"><label class="required" for="id_age">Age:</label> + <input type="number" name="age" id="id_age" required></p>""" +@@ -2815,7 +2822,7 @@ Good luck picking a username that doesn't already exist.</p> + <option value="false">No</option> + </select></td></tr> + <tr><th><label for="id_email">Email:</label></th><td> +-<input type="email" name="email" id="id_email"></td></tr> ++<input type="email" name="email" id="id_email" maxlength="320"></td></tr> + <tr class="required error"><th><label class="required" for="id_age">Age:</label></th> + <td><ul class="errorlist"><li>This field is required.</li></ul> + <input type="number" name="age" id="id_age" required></td></tr>""" +@@ -3428,7 +3435,7 @@ Good luck picking a username that doesn't already exist.</p> + f = CommentForm(data, auto_id=False, error_class=DivErrorList) + self.assertHTMLEqual(f.as_p(), """<p>Name: <input type="text" name="name" maxlength="50"></p> + <div class="errorlist"><div class="error">Enter a valid email address.</div></div> +-<p>Email: <input type="email" name="email" value="invalid" required></p> ++<p>Email: <input type="email" name="email" value="invalid" maxlength="320" required></p> + <div class="errorlist"><div class="error">This field is required.</div></div> + <p>Comment: <input type="text" name="comment" required></p>""") + +diff --git a/tests/validators/tests.py b/tests/validators/tests.py +index 1f09fb5..8204f00 100644 +--- a/tests/validators/tests.py ++++ b/tests/validators/tests.py +@@ -58,6 +58,7 @@ TEST_DATA = [ + + (validate_email, 'example@atm.%s' % ('a' * 64), ValidationError), + (validate_email, 'example@%s.atm.%s' % ('b' * 64, 'a' * 63), ValidationError), ++ (validate_email, "example@%scom" % (("a" * 63 + ".") * 100), ValidationError), + (validate_email, None, ValidationError), + (validate_email, '', ValidationError), + (validate_email, 'abc', ValidationError), +@@ -242,6 +243,16 @@ TEST_DATA = [ + (URLValidator(EXTENDED_SCHEMES), 'git+ssh://git@github.com/example/hg-git.git', None), + + (URLValidator(EXTENDED_SCHEMES), 'git://-invalid.com', ValidationError), ++ ( ++ URLValidator(), ++ "http://example." + ("a" * 63 + ".") * 1000 + "com", ++ ValidationError, ++ ), ++ ( ++ URLValidator(), ++ "http://userid:password" + "d" * 2000 + "@example.aaaaaaaaaaaaa.com", ++ None, ++ ), + # Newlines and tabs are not accepted. + (URLValidator(), 'http://www.djangoproject.com/\n', ValidationError), + (URLValidator(), 'http://[::ffff:192.9.5.5]\n', ValidationError), +-- +2.40.0 diff --git a/meta-python/recipes-devtools/python/python3-django/CVE-2023-41164.patch b/meta-python/recipes-devtools/python/python3-django/CVE-2023-41164.patch new file mode 100644 index 0000000000..9bc38b0cca --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-django/CVE-2023-41164.patch @@ -0,0 +1,105 @@ +From 9c95e8fec62153f8dfcc45a70b8a68d74333a66f Mon Sep 17 00:00:00 2001 +From: Mariusz Felisiak <felisiak.mariusz@gmail.com> +Date: Tue, 26 Sep 2023 10:23:30 +0000 +Subject: [PATCH] Fixed CVE-2023-41164 -- Fixed potential DoS in + django.utils.encoding.uri_to_iri(). + +Thanks MProgrammer (https://hackerone.com/mprogrammer) for the report. + +Co-authored-by: nessita <124304+nessita@users.noreply.github.com> + +CVE: CVE-2023-41164 + +Upstream-Status: Backport [https://github.com/django/django/commit/3f41d6d62929dfe53eda8109b3b836f26645bdce] + +Signed-off-by: Narpat Mali <narpat.mali@windriver.com> +--- + django/utils/encoding.py | 6 ++++-- + docs/releases/2.2.28.txt | 9 +++++++++ + tests/utils_tests/test_encoding.py | 21 ++++++++++++++++++++- + 3 files changed, 33 insertions(+), 3 deletions(-) + +diff --git a/django/utils/encoding.py b/django/utils/encoding.py +index 98da647..3769702 100644 +--- a/django/utils/encoding.py ++++ b/django/utils/encoding.py +@@ -225,6 +225,7 @@ def repercent_broken_unicode(path): + repercent-encode any octet produced that is not part of a strictly legal + UTF-8 octet sequence. + """ ++ changed_parts = [] + while True: + try: + path.decode() +@@ -232,9 +233,10 @@ def repercent_broken_unicode(path): + # CVE-2019-14235: A recursion shouldn't be used since the exception + # handling uses massive amounts of memory + repercent = quote(path[e.start:e.end], safe=b"/#%[]=:;$&()+,!?*@'~") +- path = path[:e.start] + force_bytes(repercent) + path[e.end:] ++ changed_parts.append(path[: e.start] + repercent.encode()) ++ path = path[e.end :] + else: +- return path ++ return b"".join(changed_parts) + path + + + def filepath_to_uri(path): +diff --git a/docs/releases/2.2.28.txt b/docs/releases/2.2.28.txt +index ab4884b..40eb230 100644 +--- a/docs/releases/2.2.28.txt ++++ b/docs/releases/2.2.28.txt +@@ -47,3 +47,12 @@ CVE-2023-36053: Potential regular expression denial of service vulnerability in + ``EmailValidator`` and ``URLValidator`` were subject to potential regular + expression denial of service attack via a very large number of domain name + labels of emails and URLs. ++ ++Backporting the CVE-2023-41164 fix on Django 2.2.28. ++ ++CVE-2023-41164: Potential denial of service vulnerability in ``django.utils.encoding.uri_to_iri()`` ++=================================================================================================== ++ ++``django.utils.encoding.uri_to_iri()`` was subject to potential denial of ++service attack via certain inputs with a very large number of Unicode ++characters. +diff --git a/tests/utils_tests/test_encoding.py b/tests/utils_tests/test_encoding.py +index ea7ba5f..93a3162 100644 +--- a/tests/utils_tests/test_encoding.py ++++ b/tests/utils_tests/test_encoding.py +@@ -1,8 +1,9 @@ + import datetime ++import inspect + import sys + import unittest + from unittest import mock +-from urllib.parse import quote_plus ++from urllib.parse import quote, quote_plus + + from django.test import SimpleTestCase + from django.utils.encoding import ( +@@ -100,6 +101,24 @@ class TestEncodingUtils(SimpleTestCase): + except RecursionError: + self.fail('Unexpected RecursionError raised.') + ++ def test_repercent_broken_unicode_small_fragments(self): ++ data = b"test\xfctest\xfctest\xfc" ++ decoded_paths = [] ++ ++ def mock_quote(*args, **kwargs): ++ # The second frame is the call to repercent_broken_unicode(). ++ decoded_paths.append(inspect.currentframe().f_back.f_locals["path"]) ++ return quote(*args, **kwargs) ++ ++ with mock.patch("django.utils.encoding.quote", mock_quote): ++ self.assertEqual(repercent_broken_unicode(data), b"test%FCtest%FCtest%FC") ++ ++ # decode() is called on smaller fragment of the path each time. ++ self.assertEqual( ++ decoded_paths, ++ [b"test\xfctest\xfctest\xfc", b"test\xfctest\xfc", b"test\xfc"], ++ ) ++ + + class TestRFC3987IEncodingUtils(unittest.TestCase): + +-- +2.40.0 diff --git a/meta-python/recipes-devtools/python/python3-django/CVE-2023-43665.patch b/meta-python/recipes-devtools/python/python3-django/CVE-2023-43665.patch new file mode 100644 index 0000000000..dbfb9b68a8 --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-django/CVE-2023-43665.patch @@ -0,0 +1,199 @@ +From b269a0063e9b10a6c88c92b24d1b92c7421950de Mon Sep 17 00:00:00 2001 +From: Natalia <124304+nessita@users.noreply.github.com> +Date: Wed, 29 Nov 2023 12:20:01 +0000 +Subject: [PATCH 1/2] Fixed CVE-2023-43665 -- Mitigated potential DoS in + django.utils.text.Truncator when truncating HTML text. + +Thanks Wenchao Li of Alibaba Group for the report. + +CVE: CVE-2023-43665 + +Upstream-Status: Backport [https://github.com/django/django/commit/ccdade1a0262537868d7ca64374de3d957ca50c5] + +Signed-off-by: Narpat Mali <narpat.mali@windriver.com> +--- + django/utils/text.py | 18 ++++++++++++++++- + docs/ref/templates/builtins.txt | 20 +++++++++++++++++++ + docs/releases/2.2.28.txt | 20 +++++++++++++++++++ + tests/utils_tests/test_text.py | 35 ++++++++++++++++++++++++--------- + 4 files changed, 83 insertions(+), 10 deletions(-) + +diff --git a/django/utils/text.py b/django/utils/text.py +index 1fae7b2..06a377b 100644 +--- a/django/utils/text.py ++++ b/django/utils/text.py +@@ -57,7 +57,14 @@ def wrap(text, width): + class Truncator(SimpleLazyObject): + """ + An object used to truncate text, either by characters or words. ++ ++ When truncating HTML text (either chars or words), input will be limited to ++ at most `MAX_LENGTH_HTML` characters. + """ ++ ++ # 5 million characters are approximately 4000 text pages or 3 web pages. ++ MAX_LENGTH_HTML = 5_000_000 ++ + def __init__(self, text): + super().__init__(lambda: str(text)) + +@@ -154,6 +161,11 @@ class Truncator(SimpleLazyObject): + if words and length <= 0: + return '' + ++ size_limited = False ++ if len(text) > self.MAX_LENGTH_HTML: ++ text = text[: self.MAX_LENGTH_HTML] ++ size_limited = True ++ + html4_singlets = ( + 'br', 'col', 'link', 'base', 'img', + 'param', 'area', 'hr', 'input' +@@ -203,10 +215,14 @@ class Truncator(SimpleLazyObject): + # Add it to the start of the open tags list + open_tags.insert(0, tagname) + ++ truncate_text = self.add_truncation_text("", truncate) ++ + if current_len <= length: ++ if size_limited and truncate_text: ++ text += truncate_text + return text ++ + out = text[:end_text_pos] +- truncate_text = self.add_truncation_text('', truncate) + if truncate_text: + out += truncate_text + # Close any tags still open +diff --git a/docs/ref/templates/builtins.txt b/docs/ref/templates/builtins.txt +index c4b0fa3..4faab38 100644 +--- a/docs/ref/templates/builtins.txt ++++ b/docs/ref/templates/builtins.txt +@@ -2318,6 +2318,16 @@ If ``value`` is ``"<p>Joel is a slug</p>"``, the output will be + + Newlines in the HTML content will be preserved. + ++.. admonition:: Size of input string ++ ++ Processing large, potentially malformed HTML strings can be ++ resource-intensive and impact service performance. ``truncatechars_html`` ++ limits input to the first five million characters. ++ ++.. versionchanged:: 2.2.28 ++ ++ In older versions, strings over five million characters were processed. ++ + .. templatefilter:: truncatewords + + ``truncatewords`` +@@ -2356,6 +2366,16 @@ If ``value`` is ``"<p>Joel is a slug</p>"``, the output will be + + Newlines in the HTML content will be preserved. + ++.. admonition:: Size of input string ++ ++ Processing large, potentially malformed HTML strings can be ++ resource-intensive and impact service performance. ``truncatewords_html`` ++ limits input to the first five million characters. ++ ++.. versionchanged:: 2.2.28 ++ ++ In older versions, strings over five million characters were processed. ++ + .. templatefilter:: unordered_list + + ``unordered_list`` +diff --git a/docs/releases/2.2.28.txt b/docs/releases/2.2.28.txt +index 40eb230..6a38e9c 100644 +--- a/docs/releases/2.2.28.txt ++++ b/docs/releases/2.2.28.txt +@@ -56,3 +56,23 @@ CVE-2023-41164: Potential denial of service vulnerability in ``django.utils.enco + ``django.utils.encoding.uri_to_iri()`` was subject to potential denial of + service attack via certain inputs with a very large number of Unicode + characters. ++ ++Backporting the CVE-2023-43665 fix on Django 2.2.28. ++ ++CVE-2023-43665: Denial-of-service possibility in ``django.utils.text.Truncator`` ++================================================================================ ++ ++Following the fix for :cve:`2019-14232`, the regular expressions used in the ++implementation of ``django.utils.text.Truncator``'s ``chars()`` and ``words()`` ++methods (with ``html=True``) were revised and improved. However, these regular ++expressions still exhibited linear backtracking complexity, so when given a ++very long, potentially malformed HTML input, the evaluation would still be ++slow, leading to a potential denial of service vulnerability. ++ ++The ``chars()`` and ``words()`` methods are used to implement the ++:tfilter:`truncatechars_html` and :tfilter:`truncatewords_html` template ++filters, which were thus also vulnerable. ++ ++The input processed by ``Truncator``, when operating in HTML mode, has been ++limited to the first five million characters in order to avoid potential ++performance and memory issues. +diff --git a/tests/utils_tests/test_text.py b/tests/utils_tests/test_text.py +index 27e440b..cb3063d 100644 +--- a/tests/utils_tests/test_text.py ++++ b/tests/utils_tests/test_text.py +@@ -1,5 +1,6 @@ + import json + import sys ++from unittest.mock import patch + + from django.core.exceptions import SuspiciousFileOperation + from django.test import SimpleTestCase +@@ -87,11 +88,17 @@ class TestUtilsText(SimpleTestCase): + # lazy strings are handled correctly + self.assertEqual(text.Truncator(lazystr('The quick brown fox')).chars(10), 'The quick…') + +- def test_truncate_chars_html(self): ++ @patch("django.utils.text.Truncator.MAX_LENGTH_HTML", 10_000) ++ def test_truncate_chars_html_size_limit(self): ++ max_len = text.Truncator.MAX_LENGTH_HTML ++ bigger_len = text.Truncator.MAX_LENGTH_HTML + 1 ++ valid_html = "<p>Joel is a slug</p>" # 14 chars + perf_test_values = [ +- (('</a' + '\t' * 50000) + '//>', None), +- ('&' * 50000, '&' * 9 + '…'), +- ('_X<<<<<<<<<<<>', None), ++ ("</a" + "\t" * (max_len - 6) + "//>", None), ++ ("</p" + "\t" * bigger_len + "//>", "</p" + "\t" * 6 + "…"), ++ ("&" * bigger_len, "&" * 9 + "…"), ++ ("_X<<<<<<<<<<<>", None), ++ (valid_html * bigger_len, "<p>Joel is a…</p>"), # 10 chars + ] + for value, expected in perf_test_values: + with self.subTest(value=value): +@@ -149,15 +156,25 @@ class TestUtilsText(SimpleTestCase): + truncator = text.Truncator('<p>I <3 python, what about you?</p>') + self.assertEqual('<p>I <3 python,…</p>', truncator.words(3, html=True)) + ++ @patch("django.utils.text.Truncator.MAX_LENGTH_HTML", 10_000) ++ def test_truncate_words_html_size_limit(self): ++ max_len = text.Truncator.MAX_LENGTH_HTML ++ bigger_len = text.Truncator.MAX_LENGTH_HTML + 1 ++ valid_html = "<p>Joel is a slug</p>" # 4 words + perf_test_values = [ +- ('</a' + '\t' * 50000) + '//>', +- '&' * 50000, +- '_X<<<<<<<<<<<>', ++ ("</a" + "\t" * (max_len - 6) + "//>", None), ++ ("</p" + "\t" * bigger_len + "//>", "</p" + "\t" * (max_len - 3) + "…"), ++ ("&" * max_len, None), # no change ++ ("&" * bigger_len, "&" * max_len + "…"), ++ ("_X<<<<<<<<<<<>", None), ++ (valid_html * bigger_len, valid_html * 12 + "<p>Joel is…</p>"), # 50 words + ] +- for value in perf_test_values: ++ for value, expected in perf_test_values: + with self.subTest(value=value): + truncator = text.Truncator(value) +- self.assertEqual(value, truncator.words(50, html=True)) ++ self.assertEqual( ++ expected if expected else value, truncator.words(50, html=True) ++ ) + + def test_wrap(self): + digits = '1234 67 9' +-- +2.40.0 diff --git a/meta-python/recipes-devtools/python/python3-django/CVE-2023-46695.patch b/meta-python/recipes-devtools/python/python3-django/CVE-2023-46695.patch new file mode 100644 index 0000000000..b7dda41f8f --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-django/CVE-2023-46695.patch @@ -0,0 +1,90 @@ +From 32bc7fa517be1d50239827520cc13f3112d3d748 Mon Sep 17 00:00:00 2001 +From: Mariusz Felisiak <felisiak.mariusz@gmail.com> +Date: Wed, 29 Nov 2023 12:49:41 +0000 +Subject: [PATCH 2/2] Fixed CVE-2023-46695 -- Fixed potential DoS in + UsernameField on Windows. + +Thanks MProgrammer (https://hackerone.com/mprogrammer) for the report. + +CVE: CVE-2023-46695 + +Upstream-Status: Backport [https://github.com/django/django/commit/f9a7fb8466a7ba4857eaf930099b5258f3eafb2b] + +Signed-off-by: Narpat Mali <narpat.mali@windriver.com> +--- + django/contrib/auth/forms.py | 10 +++++++++- + docs/releases/2.2.28.txt | 14 ++++++++++++++ + tests/auth_tests/test_forms.py | 8 +++++++- + 3 files changed, 30 insertions(+), 2 deletions(-) + +diff --git a/django/contrib/auth/forms.py b/django/contrib/auth/forms.py +index e6f73fe..26d3ca7 100644 +--- a/django/contrib/auth/forms.py ++++ b/django/contrib/auth/forms.py +@@ -68,7 +68,15 @@ class ReadOnlyPasswordHashField(forms.Field): + + class UsernameField(forms.CharField): + def to_python(self, value): +- return unicodedata.normalize('NFKC', super().to_python(value)) ++ value = super().to_python(value) ++ if self.max_length is not None and len(value) > self.max_length: ++ # Normalization can increase the string length (e.g. ++ # "ff" -> "ff", "½" -> "1⁄2") but cannot reduce it, so there is no ++ # point in normalizing invalid data. Moreover, Unicode ++ # normalization is very slow on Windows and can be a DoS attack ++ # vector. ++ return value ++ return unicodedata.normalize("NFKC", value) + + + class UserCreationForm(forms.ModelForm): +diff --git a/docs/releases/2.2.28.txt b/docs/releases/2.2.28.txt +index 6a38e9c..c653cb6 100644 +--- a/docs/releases/2.2.28.txt ++++ b/docs/releases/2.2.28.txt +@@ -76,3 +76,17 @@ filters, which were thus also vulnerable. + The input processed by ``Truncator``, when operating in HTML mode, has been + limited to the first five million characters in order to avoid potential + performance and memory issues. ++ ++Backporting the CVE-2023-46695 fix on Django 2.2.28. ++ ++CVE-2023-46695: Potential denial of service vulnerability in ``UsernameField`` on Windows ++========================================================================================= ++ ++The :func:`NFKC normalization <python:unicodedata.normalize>` is slow on ++Windows. As a consequence, ``django.contrib.auth.forms.UsernameField`` was ++subject to a potential denial of service attack via certain inputs with a very ++large number of Unicode characters. ++ ++In order to avoid the vulnerability, invalid values longer than ++``UsernameField.max_length`` are no longer normalized, since they cannot pass ++validation anyway. +diff --git a/tests/auth_tests/test_forms.py b/tests/auth_tests/test_forms.py +index bed23af..e73d4b8 100644 +--- a/tests/auth_tests/test_forms.py ++++ b/tests/auth_tests/test_forms.py +@@ -6,7 +6,7 @@ from django import forms + from django.contrib.auth.forms import ( + AdminPasswordChangeForm, AuthenticationForm, PasswordChangeForm, + PasswordResetForm, ReadOnlyPasswordHashField, ReadOnlyPasswordHashWidget, +- SetPasswordForm, UserChangeForm, UserCreationForm, ++ SetPasswordForm, UserChangeForm, UserCreationForm, UsernameField, + ) + from django.contrib.auth.models import User + from django.contrib.auth.signals import user_login_failed +@@ -132,6 +132,12 @@ class UserCreationFormTest(TestDataMixin, TestCase): + self.assertNotEqual(user.username, ohm_username) + self.assertEqual(user.username, 'testΩ') # U+03A9 GREEK CAPITAL LETTER OMEGA + ++ def test_invalid_username_no_normalize(self): ++ field = UsernameField(max_length=254) ++ # Usernames are not normalized if they are too long. ++ self.assertEqual(field.to_python("½" * 255), "½" * 255) ++ self.assertEqual(field.to_python("ff" * 254), "ff" * 254) ++ + def test_duplicate_normalized_unicode(self): + """ + To prevent almost identical usernames, visually identical but differing +-- +2.40.0 diff --git a/meta-python/recipes-devtools/python/python3-django/CVE-2024-24680.patch b/meta-python/recipes-devtools/python/python3-django/CVE-2024-24680.patch new file mode 100644 index 0000000000..aec67453ae --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-django/CVE-2024-24680.patch @@ -0,0 +1,48 @@ +From 572ea07e84b38ea8de0551f4b4eda685d91d09d2 +From: Adam Johnson <me@adamj.eu> +Date: Mon Jan 22 13:21:13 2024 +0000 +Subject: [PATCH] Fixed CVE-2024-24680 -- Mitigated potential DoS in intcomma + template filter + +Thanks Seokchan Yoon for the report. + +Co-authored-by: Mariusz Felisiak <felisiak.mariusz@gmail.com> +Co-authored-by: Natalia <124304+nessita@users.noreply.github.com> +Co-authored-by: Shai Berger <shai@platonix.com> + +CVE: CVE-2024-24680 + +Upstream-Status: Backport [https://github.com/django/django/commit/572ea07e84b38ea8de0551f4b4eda685d91d09d2] + +Signed-off-by: Rahul Janani Pandi <RahulJanani.Pandi@windriver.com> +--- + django/contrib/humanize/templatetags/humanize.py | 13 +++++++------ + 1 file changed, 7 insertions(+), 6 deletions(-) + +diff --git a/django/contrib/humanize/templatetags/humanize.py b/django/contrib/humanize/templatetags/humanize.py +index 194c7e8..ee22a45 100644 +--- a/django/contrib/humanize/templatetags/humanize.py ++++ b/django/contrib/humanize/templatetags/humanize.py +@@ -71,13 +71,14 @@ def intcomma(value, use_l10n=True): + return intcomma(value, False) + else: + return number_format(value, force_grouping=True) +- orig = str(value) +- new = re.sub(r"^(-?\d+)(\d{3})", r'\g<1>,\g<2>', orig) +- if orig == new: +- return new +- else: +- return intcomma(new, use_l10n) + ++ result = str(value) ++ match = re.match(r"-?\d+", result) ++ if match: ++ prefix = match[0] ++ prefix_with_commas = re.sub(r"\d{3}", r"\g<0>,", prefix[::-1])[::-1] ++ result = prefix_with_commas + result[len(prefix) :] ++ return result + + # A tuple of standard large number to their converters + intword_converters = ( +-- +2.40.0 diff --git a/meta-python/recipes-devtools/python/python3-django_2.2.28.bb b/meta-python/recipes-devtools/python/python3-django_2.2.28.bb index 9ef988176e..cbd2c69c05 100644 --- a/meta-python/recipes-devtools/python/python3-django_2.2.28.bb +++ b/meta-python/recipes-devtools/python/python3-django_2.2.28.bb @@ -5,6 +5,14 @@ UPSTREAM_CHECK_REGEX = "/${PYPI_PACKAGE}/(?P<pver>(2\.2\.\d*)+)/" inherit setuptools3 +SRC_URI += "file://CVE-2023-31047.patch \ + file://CVE-2023-36053.patch \ + file://CVE-2023-41164.patch \ + file://CVE-2023-43665.patch \ + file://CVE-2023-46695.patch \ + file://CVE-2024-24680.patch \ + " + SRC_URI[sha256sum] = "0200b657afbf1bc08003845ddda053c7641b9b24951e52acd51f6abda33a7413" RDEPENDS:${PN} += "\ diff --git a/meta-python/recipes-devtools/python/python3-django_3.2.12.bb b/meta-python/recipes-devtools/python/python3-django_3.2.23.bb index adbc498bdf..beecaa607c 100644 --- a/meta-python/recipes-devtools/python/python3-django_3.2.12.bb +++ b/meta-python/recipes-devtools/python/python3-django_3.2.23.bb @@ -1,7 +1,7 @@ require python-django.inc inherit setuptools3 -SRC_URI[sha256sum] = "9772e6935703e59e993960832d66a614cf0233a1c5123bc6224ecc6ad69e41e2" +SRC_URI[sha256sum] = "82968f3640e29ef4a773af2c28448f5f7a08d001c6ac05b32d02aeee6509508b" RDEPENDS:${PN} += "\ ${PYTHON_PN}-sqlparse \ @@ -9,5 +9,5 @@ RDEPENDS:${PN} += "\ # Set DEFAULT_PREFERENCE so that the LTS version of django is built by # default. To build the 3.x branch, -# PREFERRED_VERSION_python3-django = "3.2.2" can be added to local.conf +# PREFERRED_VERSION_python3-django = "3.2.23" can be added to local.conf DEFAULT_PREFERENCE = "-1" diff --git a/meta-python/recipes-devtools/python/python3-django_4.0.2.bb b/meta-python/recipes-devtools/python/python3-django_4.2.10.bb index 690b9809dc..a9f25ac2b3 100644 --- a/meta-python/recipes-devtools/python/python3-django_4.0.2.bb +++ b/meta-python/recipes-devtools/python/python3-django_4.2.10.bb @@ -1,7 +1,7 @@ require python-django.inc inherit setuptools3 -SRC_URI[sha256sum] = "110fb58fb12eca59e072ad59fc42d771cd642dd7a2f2416582aa9da7a8ef954a" +SRC_URI[sha256sum] = "b1260ed381b10a11753c73444408e19869f3241fc45c985cd55a30177c789d13" RDEPENDS:${PN} += "\ ${PYTHON_PN}-sqlparse \ @@ -9,5 +9,5 @@ RDEPENDS:${PN} += "\ # Set DEFAULT_PREFERENCE so that the LTS version of django is built by # default. To build the 4.x branch, -# PREFERRED_VERSION_python3-django = "4.0.2" can be added to local.conf +# PREFERRED_VERSION_python3-django = "4.2.7" can be added to local.conf DEFAULT_PREFERENCE = "-1" diff --git a/meta-python/recipes-devtools/python/python3-gcovr/0001-Fix-parsing-of-gcov-metadata-601.patch b/meta-python/recipes-devtools/python/python3-gcovr/0001-Fix-parsing-of-gcov-metadata-601.patch new file mode 100644 index 0000000000..5530a39857 --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-gcovr/0001-Fix-parsing-of-gcov-metadata-601.patch @@ -0,0 +1,84 @@ +From c4f53f28c4c537b75b5912a44083c41262807504 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Michael=20F=C3=B6rderer?= <michael.foerderer@gmx.de> +Date: Sun, 3 Apr 2022 22:58:33 +0200 +Subject: [PATCH] Fix parsing of gcov metadata (#601) + +gcc-11 has metadata line "-: 0:Source is newer than graph" which throws an error. + +Upstream-Status: Backport [https://github.com/gcovr/gcovr/commit/7b6947bd4b6fd28a477606313fff3c13fcea8d3d] + +Signed-off-by: Jasper Orschulko <jasper@fancydomain.eu> +--- + gcovr/gcov.py | 5 ++++- + gcovr/gcov_parser.py | 24 ++++++++++++++++++++---- + 2 files changed, 24 insertions(+), 5 deletions(-) + +diff --git a/gcovr/gcov.py b/gcovr/gcov.py +index cc7a9af4..ff4cdb0b 100644 +--- a/gcovr/gcov.py ++++ b/gcovr/gcov.py +@@ -98,8 +98,11 @@ def process_gcov_data(data_fname, covdata, source_fname, options, currdir=None): + # Find the source file + # TODO: instead of heuristics, use "working directory" if available + metadata = parse_metadata(lines) ++ source = metadata.get("Source") ++ if source is None: ++ raise RuntimeError("Unexpected value 'None' for metadata 'Source'.") + fname = guess_source_file_name( +- metadata["Source"].strip(), ++ source, + data_fname, + source_fname, + root_dir=options.root_dir, +diff --git a/gcovr/gcov_parser.py b/gcovr/gcov_parser.py +index 391ecd78..523ea406 100644 +--- a/gcovr/gcov_parser.py ++++ b/gcovr/gcov_parser.py +@@ -121,7 +121,7 @@ class _MetadataLine(NamedTuple): + """A gcov line with metadata: ``-: 0:KEY:VALUE``""" + + key: str +- value: str ++ value: Optional[str] + + + class _BlockLine(NamedTuple): +@@ -214,7 +214,19 @@ def parse_metadata(lines: List[str]) -> Dict[str, str]: + ... -: 0:Foo:bar + ... -: 0:Key:123 + ... '''.splitlines()) +- {'Foo': 'bar', 'Key': '123'} ++ Traceback (most recent call last): ++ ... ++ RuntimeError: Missing key 'Source' in metadata. GCOV data was >> ++ -: 0:Foo:bar ++ -: 0:Key:123<< End of GCOV data ++ >>> parse_metadata('-: 0:Source: file \n -: 0:Foo: bar \n -: 0:Key: 123 '.splitlines()) ++ {'Source': 'file', 'Foo': 'bar', 'Key': '123'} ++ >>> parse_metadata(''' ++ ... -: 0:Source:file ++ ... -: 0:Foo:bar ++ ... -: 0:Key ++ ... '''.splitlines()) ++ {'Source': 'file', 'Foo': 'bar', 'Key': None} + """ + collected = {} + for line in lines: +@@ -721,8 +733,12 @@ def _parse_line(line: str) -> _Line: + + # METADATA (key, value) + if count_str == "-" and lineno == "0": +- key, value = source_code.split(":", 1) +- return _MetadataLine(key, value) ++ if ":" in source_code: ++ key, value = source_code.split(":", 1) ++ return _MetadataLine(key, value.strip()) ++ else: ++ # Add a syntethic metadata with no value ++ return _MetadataLine(source_code, None) + + if count_str == "-": + count = 0 +-- +2.41.0 + diff --git a/meta-python/recipes-devtools/python/python3-gcovr_5.1.bb b/meta-python/recipes-devtools/python/python3-gcovr_5.1.bb index 995f3b779b..5dcd9496c5 100644 --- a/meta-python/recipes-devtools/python/python3-gcovr_5.1.bb +++ b/meta-python/recipes-devtools/python/python3-gcovr_5.1.bb @@ -4,7 +4,8 @@ SECTION = "devel/python" LICENSE = "BSD-3-Clause" LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=08208c66520e8d69d5367483186d94ed" -SRC_URI = "git://github.com/gcovr/gcovr.git;branch=master;protocol=https" +SRC_URI = "git://github.com/gcovr/gcovr.git;branch=main;protocol=https" +SRC_URI += "file://0001-Fix-parsing-of-gcov-metadata-601.patch" SRCREV = "e71e883521b78122c49016eb4e510e6da06c6916" S = "${WORKDIR}/git" @@ -12,6 +13,6 @@ S = "${WORKDIR}/git" inherit setuptools3 PIP_INSTALL_PACKAGE = "gcovr" -RDEPENDS:${PN} += "${PYTHON_PN}-jinja2 ${PYTHON_PN}-lxml ${PYTHON_PN}-setuptools ${PYTHON_PN}-pygments" +RDEPENDS:${PN} += "${PYTHON_PN}-jinja2 ${PYTHON_PN}-lxml ${PYTHON_PN}-setuptools ${PYTHON_PN}-pygments ${PYTHON_PN}-multiprocessing" BBCLASSEXTEND = "native nativesdk" diff --git a/meta-python/recipes-devtools/python/python3-gevent/CVE-2023-41419.patch b/meta-python/recipes-devtools/python/python3-gevent/CVE-2023-41419.patch new file mode 100644 index 0000000000..c92ba876a8 --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-gevent/CVE-2023-41419.patch @@ -0,0 +1,673 @@ +From f80ee15e27b67b6fdd101d5f91cf584d19b2b26e Mon Sep 17 00:00:00 2001 +From: Jason Madden <jamadden@gmail.com> +Date: Fri, 6 Oct 2023 12:41:59 +0000 +Subject: [PATCH] gevent.pywsgi: Much improved handling of chunk trailers. + Validation is much stricter to the specification. + +Fixes #1989 + +CVE: CVE-2023-41419 + +Upstream-Status: Backport [https://github.com/gevent/gevent/commit/2f53c851eaf926767fbac62385615efd4886221c] + +Signed-off-by: Narpat Mali <narpat.mali@windriver.com> +--- + docs/changes/1989.bugfix | 26 ++++ + src/gevent/pywsgi.py | 229 ++++++++++++++++++++++++------- + src/gevent/subprocess.py | 7 +- + src/gevent/testing/testcase.py | 2 +- + src/gevent/tests/test__pywsgi.py | 193 ++++++++++++++++++++++++-- + 5 files changed, 390 insertions(+), 67 deletions(-) + create mode 100644 docs/changes/1989.bugfix + +diff --git a/docs/changes/1989.bugfix b/docs/changes/1989.bugfix +new file mode 100644 +index 0000000..7ce4a93 +--- /dev/null ++++ b/docs/changes/1989.bugfix +@@ -0,0 +1,26 @@ ++Make ``gevent.pywsgi`` comply more closely with the HTTP specification ++for chunked transfer encoding. In particular, we are much stricter ++about trailers, and trailers that are invalid (too long or featuring ++disallowed characters) forcibly close the connection to the client ++*after* the results have been sent. ++ ++Trailers otherwise continue to be ignored and are not available to the ++WSGI application. ++ ++Previously, carefully crafted invalid trailers in chunked requests on ++keep-alive connections might appear as two requests to ++``gevent.pywsgi``. Because this was handled exactly as a normal ++keep-alive connection with two requests, the WSGI application should ++handle it normally. However, if you were counting on some upstream ++server to filter incoming requests based on paths or header fields, ++and the upstream server simply passed trailers through without ++validating them, then this embedded second request would bypass those ++checks. (If the upstream server validated that the trailers meet the ++HTTP specification, this could not occur, because characters that are ++required in an HTTP request, like a space, are not allowed in ++trailers.) CVE-2023-41419 was reserved for this. ++ ++Our thanks to the original reporters, Keran Mu ++(mkr22@mails.tsinghua.edu.cn) and Jianjun Chen ++(jianjun@tsinghua.edu.cn), from Tsinghua University and Zhongguancun ++Laboratory. +diff --git a/src/gevent/pywsgi.py b/src/gevent/pywsgi.py +index 0ebe095..078398a 100644 +--- a/src/gevent/pywsgi.py ++++ b/src/gevent/pywsgi.py +@@ -1,13 +1,28 @@ + # Copyright (c) 2005-2009, eventlet contributors + # Copyright (c) 2009-2018, gevent contributors + """ +-A pure-Python, gevent-friendly WSGI server. ++A pure-Python, gevent-friendly WSGI server implementing HTTP/1.1. + + The server is provided in :class:`WSGIServer`, but most of the actual + WSGI work is handled by :class:`WSGIHandler` --- a new instance is + created for each request. The server can be customized to use + different subclasses of :class:`WSGIHandler`. + ++.. important:: ++ This server is intended primarily for development and testing, and ++ secondarily for other "safe" scenarios where it will not be exposed to ++ potentially malicious input. The code has not been security audited, ++ and is not intended for direct exposure to the public Internet. For production ++ usage on the Internet, either choose a production-strength server such as ++ gunicorn, or put a reverse proxy between gevent and the Internet. ++.. versionchanged:: NEXT ++ Complies more closely with the HTTP specification for chunked transfer encoding. ++ In particular, we are much stricter about trailers, and trailers that ++ are invalid (too long or featuring disallowed characters) forcibly close ++ the connection to the client *after* the results have been sent. ++ Trailers otherwise continue to be ignored and are not available to the ++ WSGI application. ++ + """ + from __future__ import absolute_import + +@@ -22,10 +37,7 @@ import time + import traceback + from datetime import datetime + +-try: +- from urllib import unquote +-except ImportError: +- from urllib.parse import unquote # python 2 pylint:disable=import-error,no-name-in-module ++from urllib.parse import unquote + + from gevent import socket + import gevent +@@ -53,29 +65,52 @@ __all__ = [ + + MAX_REQUEST_LINE = 8192 + # Weekday and month names for HTTP date/time formatting; always English! +-_WEEKDAYNAME = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] +-_MONTHNAME = [None, # Dummy so we can use 1-based month numbers ++_WEEKDAYNAME = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun") ++_MONTHNAME = (None, # Dummy so we can use 1-based month numbers + "Jan", "Feb", "Mar", "Apr", "May", "Jun", +- "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"] ++ "Jul", "Aug", "Sep", "Oct", "Nov", "Dec") + + # The contents of the "HEX" grammar rule for HTTP, upper and lowercase A-F plus digits, + # in byte form for comparing to the network. + _HEX = string.hexdigits.encode('ascii') + ++# The characters allowed in "token" rules. ++ ++# token = 1*tchar ++# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" ++# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" ++# / DIGIT / ALPHA ++# ; any VCHAR, except delimiters ++# ALPHA = %x41-5A / %x61-7A ; A-Z / a-z ++_ALLOWED_TOKEN_CHARS = frozenset( ++ # Remember we have to be careful because bytestrings ++ # inexplicably iterate as integers, which are not equal to bytes. ++ ++ # explicit chars then DIGIT ++ (c.encode('ascii') for c in "!#$%&'*+-.^_`|~0123456789") ++ # Then we add ALPHA ++) | {c.encode('ascii') for c in string.ascii_letters} ++assert b'A' in _ALLOWED_TOKEN_CHARS ++ ++ + # Errors + _ERRORS = {} + _INTERNAL_ERROR_STATUS = '500 Internal Server Error' + _INTERNAL_ERROR_BODY = b'Internal Server Error' +-_INTERNAL_ERROR_HEADERS = [('Content-Type', 'text/plain'), +- ('Connection', 'close'), +- ('Content-Length', str(len(_INTERNAL_ERROR_BODY)))] ++_INTERNAL_ERROR_HEADERS = ( ++ ('Content-Type', 'text/plain'), ++ ('Connection', 'close'), ++ ('Content-Length', str(len(_INTERNAL_ERROR_BODY))) ++) + _ERRORS[500] = (_INTERNAL_ERROR_STATUS, _INTERNAL_ERROR_HEADERS, _INTERNAL_ERROR_BODY) + + _BAD_REQUEST_STATUS = '400 Bad Request' + _BAD_REQUEST_BODY = '' +-_BAD_REQUEST_HEADERS = [('Content-Type', 'text/plain'), +- ('Connection', 'close'), +- ('Content-Length', str(len(_BAD_REQUEST_BODY)))] ++_BAD_REQUEST_HEADERS = ( ++ ('Content-Type', 'text/plain'), ++ ('Connection', 'close'), ++ ('Content-Length', str(len(_BAD_REQUEST_BODY))) ++) + _ERRORS[400] = (_BAD_REQUEST_STATUS, _BAD_REQUEST_HEADERS, _BAD_REQUEST_BODY) + + _REQUEST_TOO_LONG_RESPONSE = b"HTTP/1.1 414 Request URI Too Long\r\nConnection: close\r\nContent-length: 0\r\n\r\n" +@@ -204,23 +239,32 @@ class Input(object): + # Read and return the next integer chunk length. If no + # chunk length can be read, raises _InvalidClientInput. + +- # Here's the production for a chunk: +- # (http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html) +- # chunk = chunk-size [ chunk-extension ] CRLF +- # chunk-data CRLF +- # chunk-size = 1*HEX +- # chunk-extension= *( ";" chunk-ext-name [ "=" chunk-ext-val ] ) +- # chunk-ext-name = token +- # chunk-ext-val = token | quoted-string +- +- # To cope with malicious or broken clients that fail to send valid +- # chunk lines, the strategy is to read character by character until we either reach +- # a ; or newline. If at any time we read a non-HEX digit, we bail. If we hit a +- # ;, indicating an chunk-extension, we'll read up to the next +- # MAX_REQUEST_LINE characters +- # looking for the CRLF, and if we don't find it, we bail. If we read more than 16 hex characters, +- # (the number needed to represent a 64-bit chunk size), we bail (this protects us from +- # a client that sends an infinite stream of `F`, for example). ++ # Here's the production for a chunk (actually the whole body): ++ # (https://www.rfc-editor.org/rfc/rfc7230#section-4.1) ++ ++ # chunked-body = *chunk ++ # last-chunk ++ # trailer-part ++ # CRLF ++ # ++ # chunk = chunk-size [ chunk-ext ] CRLF ++ # chunk-data CRLF ++ # chunk-size = 1*HEXDIG ++ # last-chunk = 1*("0") [ chunk-ext ] CRLF ++ # trailer-part = *( header-field CRLF ) ++ # chunk-data = 1*OCTET ; a sequence of chunk-size octets ++ ++ # To cope with malicious or broken clients that fail to send ++ # valid chunk lines, the strategy is to read character by ++ # character until we either reach a ; or newline. If at any ++ # time we read a non-HEX digit, we bail. If we hit a ;, ++ # indicating an chunk-extension, we'll read up to the next ++ # MAX_REQUEST_LINE characters ("A server ought to limit the ++ # total length of chunk extensions received") looking for the ++ # CRLF, and if we don't find it, we bail. If we read more than ++ # 16 hex characters, (the number needed to represent a 64-bit ++ # chunk size), we bail (this protects us from a client that ++ # sends an infinite stream of `F`, for example). + + buf = BytesIO() + while 1: +@@ -228,16 +272,20 @@ class Input(object): + if not char: + self._chunked_input_error = True + raise _InvalidClientInput("EOF before chunk end reached") +- if char == b'\r': +- break +- if char == b';': ++ ++ if char in ( ++ b'\r', # Beginning EOL ++ b';', # Beginning extension ++ ): + break + +- if char not in _HEX: ++ if char not in _HEX: # Invalid data. + self._chunked_input_error = True + raise _InvalidClientInput("Non-hex data", char) ++ + buf.write(char) +- if buf.tell() > 16: ++ ++ if buf.tell() > 16: # Too many hex bytes + self._chunked_input_error = True + raise _InvalidClientInput("Chunk-size too large.") + +@@ -257,11 +305,72 @@ class Input(object): + if char == b'\r': + # We either got here from the main loop or from the + # end of an extension ++ self.__read_chunk_size_crlf(rfile, newline_only=True) ++ result = int(buf.getvalue(), 16) ++ if result == 0: ++ # The only time a chunk size of zero is allowed is the final ++ # chunk. It is either followed by another \r\n, or some trailers ++ # which are then followed by \r\n. ++ while self.__read_chunk_trailer(rfile): ++ pass ++ return result ++ ++ # Trailers have the following production (they are a header-field followed by CRLF) ++ # See above for the definition of "token". ++ # ++ # header-field = field-name ":" OWS field-value OWS ++ # field-name = token ++ # field-value = *( field-content / obs-fold ) ++ # field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] ++ # field-vchar = VCHAR / obs-text ++ # obs-fold = CRLF 1*( SP / HTAB ) ++ # ; obsolete line folding ++ # ; see Section 3.2.4 ++ ++ ++ def __read_chunk_trailer(self, rfile, ): ++ # With rfile positioned just after a \r\n, read a trailer line. ++ # Return a true value if a non-empty trailer was read, and ++ # return false if an empty trailer was read (meaning the trailers are ++ # done). ++ # If a single line exceeds the MAX_REQUEST_LINE, raise an exception. ++ # If the field-name portion contains invalid characters, raise an exception. ++ ++ i = 0 ++ empty = True ++ seen_field_name = False ++ while i < MAX_REQUEST_LINE: ++ char = rfile.read(1) ++ if char == b'\r': ++ # Either read the next \n or raise an error. ++ self.__read_chunk_size_crlf(rfile, newline_only=True) ++ break ++ # Not a \r, so we are NOT an empty chunk. ++ empty = False ++ if char == b':' and i > 0: ++ # We're ending the field-name part; stop validating characters. ++ # Unless : was the first character... ++ seen_field_name = True ++ if not seen_field_name and char not in _ALLOWED_TOKEN_CHARS: ++ raise _InvalidClientInput('Invalid token character: %r' % (char,)) ++ i += 1 ++ else: ++ # We read too much ++ self._chunked_input_error = True ++ raise _InvalidClientInput("Too large chunk trailer") ++ return not empty ++ ++ def __read_chunk_size_crlf(self, rfile, newline_only=False): ++ # Also for safety, correctly verify that we get \r\n when expected. ++ if not newline_only: + char = rfile.read(1) +- if char != b'\n': ++ if char != b'\r': + self._chunked_input_error = True +- raise _InvalidClientInput("Line didn't end in CRLF") +- return int(buf.getvalue(), 16) ++ raise _InvalidClientInput("Line didn't end in CRLF: %r" % (char,)) ++ char = rfile.read(1) ++ if char != b'\n': ++ self._chunked_input_error = True ++ raise _InvalidClientInput("Line didn't end in LF: %r" % (char,)) + + def _chunked_read(self, length=None, use_readline=False): + # pylint:disable=too-many-branches +@@ -294,7 +403,7 @@ class Input(object): + + self.position += datalen + if self.chunk_length == self.position: +- rfile.readline() ++ self.__read_chunk_size_crlf(rfile) + + if length is not None: + length -= datalen +@@ -307,9 +416,9 @@ class Input(object): + # determine the next size to read + self.chunk_length = self.__read_chunk_length(rfile) + self.position = 0 +- if self.chunk_length == 0: +- # Last chunk. Terminates with a CRLF. +- rfile.readline() ++ # If chunk_length was 0, we already read any trailers and ++ # validated that we have ended with \r\n\r\n. ++ + return b''.join(response) + + def read(self, length=None): +@@ -532,7 +641,8 @@ class WSGIHandler(object): + elif len(words) == 2: + self.command, self.path = words + if self.command != "GET": +- raise _InvalidClientRequest('Expected GET method: %r' % (raw_requestline,)) ++ raise _InvalidClientRequest('Expected GET method; Got command=%r; path=%r; raw=%r' % ( ++ self.command, self.path, raw_requestline,)) + self.request_version = "HTTP/0.9" + # QQQ I'm pretty sure we can drop support for HTTP/0.9 + else: +@@ -1000,14 +1110,28 @@ class WSGIHandler(object): + finally: + try: + self.wsgi_input._discard() +- except (socket.error, IOError): +- # Don't let exceptions during discarding ++ except _InvalidClientInput: ++ # This one is deliberately raised to the outer ++ # scope, because, with the incoming stream in some bad state, ++ # we can't be sure we can synchronize and properly parse the next ++ # request. ++ raise ++ except socket.error ++ # Don't let socket exceptions during discarding + # input override any exception that may have been + # raised by the application, such as our own _InvalidClientInput. + # In the general case, these aren't even worth logging (see the comment + # just below) + pass +- except _InvalidClientInput: ++ except _InvalidClientInput as ex: ++ # DO log this one because: ++ # - Some of the data may have been read and acted on by the ++ # application; ++ # - The response may or may not have been sent; ++ # - It's likely that the client is bad, or malicious, and ++ # users might wish to take steps to block the client. ++ self._handle_client_error(ex) ++ self.close_connection = True + self._send_error_response_if_possible(400) + except socket.error as ex: + if ex.args[0] in self.ignored_socket_errors: +@@ -1054,17 +1178,22 @@ class WSGIHandler(object): + def _handle_client_error(self, ex): + # Called for invalid client input + # Returns the appropriate error response. +- if not isinstance(ex, ValueError): ++ if not isinstance(ex, (ValueError, _InvalidClientInput)): + # XXX: Why not self._log_error to send it through the loop's + # handle_error method? ++ # _InvalidClientRequest is a ValueError; _InvalidClientInput is an IOError. + traceback.print_exc() + if isinstance(ex, _InvalidClientRequest): + # No formatting needed, that's already been handled. In fact, because the + # formatted message contains user input, it might have a % in it, and attempting + # to format that with no arguments would be an error. +- self.log_error(ex.formatted_message) ++ # However, the error messages do not include the requesting IP ++ # necessarily, so we do add that. ++ self.log_error('(from %s) %s', self.client_address, ex.formatted_message) + else: +- self.log_error('Invalid request: %s', str(ex) or ex.__class__.__name__) ++ self.log_error('Invalid request (from %s): %s', ++ self.client_address, ++ str(ex) or ex.__class__.__name__) + return ('400', _BAD_REQUEST_RESPONSE) + + def _headers(self): +diff --git a/src/gevent/subprocess.py b/src/gevent/subprocess.py +index 38c9bd3..8a8ccad 100644 +--- a/src/gevent/subprocess.py ++++ b/src/gevent/subprocess.py +@@ -352,10 +352,11 @@ def check_output(*popenargs, **kwargs): + + To capture standard error in the result, use ``stderr=STDOUT``:: + +- >>> print(check_output(["/bin/sh", "-c", ++ >>> output = check_output(["/bin/sh", "-c", + ... "ls -l non_existent_file ; exit 0"], +- ... stderr=STDOUT).decode('ascii').strip()) +- ls: non_existent_file: No such file or directory ++ ... stderr=STDOUT).decode('ascii').strip() ++ >>> print(output.rsplit(':', 1)[1].strip()) ++ No such file or directory + + There is an additional optional argument, "input", allowing you to + pass a string to the subprocess's stdin. If you use this argument +diff --git a/src/gevent/testing/testcase.py b/src/gevent/testing/testcase.py +index cd5db80..aa86dcf 100644 +--- a/src/gevent/testing/testcase.py ++++ b/src/gevent/testing/testcase.py +@@ -225,7 +225,7 @@ class TestCaseMetaClass(type): + classDict.pop(key) + # XXX: When did we stop doing this? + #value = wrap_switch_count_check(value) +- value = _wrap_timeout(timeout, value) ++ #value = _wrap_timeout(timeout, value) + error_fatal = getattr(value, 'error_fatal', error_fatal) + if error_fatal: + value = errorhandler.wrap_error_fatal(value) +diff --git a/src/gevent/tests/test__pywsgi.py b/src/gevent/tests/test__pywsgi.py +index d2125a8..d46030b 100644 +--- a/src/gevent/tests/test__pywsgi.py ++++ b/src/gevent/tests/test__pywsgi.py +@@ -25,21 +25,11 @@ from gevent import monkey + monkey.patch_all() + + from contextlib import contextmanager +-try: +- from urllib.parse import parse_qs +-except ImportError: +- # Python 2 +- from urlparse import parse_qs ++from urllib.parse import parse_qs + import os + import sys +-try: +- # On Python 2, we want the C-optimized version if +- # available; it has different corner-case behaviour than +- # the Python implementation, and it used by socket.makefile +- # by default. +- from cStringIO import StringIO +-except ImportError: +- from io import BytesIO as StringIO ++from io import BytesIO as StringIO ++ + import weakref + import unittest + from wsgiref.validate import validator +@@ -156,6 +146,10 @@ class Response(object): + @classmethod + def read(cls, fd, code=200, reason='default', version='1.1', + body=None, chunks=None, content_length=None): ++ """ ++ Read an HTTP response, optionally perform assertions, ++ and return the Response object. ++ """ + # pylint:disable=too-many-branches + _status_line, headers = read_headers(fd) + self = cls(_status_line, headers) +@@ -716,7 +710,14 @@ class TestNegativeReadline(TestCase): + + class TestChunkedPost(TestCase): + ++ calls = 0 ++ ++ def setUp(self): ++ super().setUp() ++ self.calls = 0 ++ + def application(self, env, start_response): ++ self.calls += 1 + self.assertTrue(env.get('wsgi.input_terminated')) + start_response('200 OK', [('Content-Type', 'text/plain')]) + if env['PATH_INFO'] == '/a': +@@ -730,6 +731,8 @@ class TestChunkedPost(TestCase): + if env['PATH_INFO'] == '/c': + return list(iter(lambda: env['wsgi.input'].read(1), b'')) + ++ return [b'We should not get here', env['PATH_INFO'].encode('ascii')] ++ + def test_014_chunked_post(self): + data = (b'POST /a HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n' + b'Transfer-Encoding: chunked\r\n\r\n' +@@ -797,6 +800,170 @@ class TestChunkedPost(TestCase): + fd.write(data) + read_http(fd, code=400) + ++ def test_trailers_keepalive_ignored(self): ++ # Trailers after a chunk are ignored. ++ data = ( ++ b'POST /a HTTP/1.1\r\n' ++ b'Host: localhost\r\n' ++ b'Connection: keep-alive\r\n' ++ b'Transfer-Encoding: chunked\r\n' ++ b'\r\n' ++ b'2\r\noh\r\n' ++ b'4\r\n hai\r\n' ++ b'0\r\n' # last-chunk ++ # Normally the final CRLF would go here, but if you put in a ++ # trailer, it doesn't. ++ b'trailer1: value1\r\n' ++ b'trailer2: value2\r\n' ++ b'\r\n' # Really terminate the chunk. ++ b'POST /a HTTP/1.1\r\n' ++ b'Host: localhost\r\n' ++ b'Connection: close\r\n' ++ b'Transfer-Encoding: chunked\r\n' ++ b'\r\n' ++ b'2\r\noh\r\n' ++ b'4\r\n bye\r\n' ++ b'0\r\n' # last-chunk ++ ) ++ with self.makefile() as fd: ++ fd.write(data) ++ read_http(fd, body='oh hai') ++ read_http(fd, body='oh bye') ++ ++ self.assertEqual(self.calls, 2) ++ ++ def test_trailers_too_long(self): ++ # Trailers after a chunk are ignored. ++ data = ( ++ b'POST /a HTTP/1.1\r\n' ++ b'Host: localhost\r\n' ++ b'Connection: keep-alive\r\n' ++ b'Transfer-Encoding: chunked\r\n' ++ b'\r\n' ++ b'2\r\noh\r\n' ++ b'4\r\n hai\r\n' ++ b'0\r\n' # last-chunk ++ # Normally the final CRLF would go here, but if you put in a ++ # trailer, it doesn't. ++ b'trailer2: value2' # not lack of \r\n ++ ) ++ data += b't' * pywsgi.MAX_REQUEST_LINE ++ # No termination, because we detect the trailer as being too ++ # long and abort the connection. ++ with self.makefile() as fd: ++ fd.write(data) ++ read_http(fd, body='oh hai') ++ with self.assertRaises(ConnectionClosed): ++ read_http(fd, body='oh bye') ++ ++ def test_trailers_request_smuggling_missing_last_chunk_keep_alive(self): ++ # When something that looks like a request line comes in the trailer ++ # as the first line, immediately after an invalid last chunk. ++ # We detect this and abort the connection, because the ++ # whitespace in the GET line isn't a legal part of a trailer. ++ # If we didn't abort the connection, then, because we specified ++ # keep-alive, the server would be hanging around waiting for more input. ++ data = ( ++ b'POST /a HTTP/1.1\r\n' ++ b'Host: localhost\r\n' ++ b'Connection: keep-alive\r\n' ++ b'Transfer-Encoding: chunked\r\n' ++ b'\r\n' ++ b'2\r\noh\r\n' ++ b'4\r\n hai\r\n' ++ b'0' # last-chunk, but missing the \r\n ++ # Normally the final CRLF would go here, but if you put in a ++ # trailer, it doesn't. ++ # b'\r\n' ++ b'GET /path2?a=:123 HTTP/1.1\r\n' ++ b'Host: a.com\r\n' ++ b'Connection: close\r\n' ++ b'\r\n' ++ ) ++ with self.makefile() as fd: ++ fd.write(data) ++ read_http(fd, body='oh hai') ++ with self.assertRaises(ConnectionClosed): ++ read_http(fd) ++ ++ self.assertEqual(self.calls, 1) ++ ++ def test_trailers_request_smuggling_missing_last_chunk_close(self): ++ # Same as the above, except the trailers are actually valid ++ # and since we ask to close the connection we don't get stuck ++ # waiting for more input. ++ data = ( ++ b'POST /a HTTP/1.1\r\n' ++ b'Host: localhost\r\n' ++ b'Connection: close\r\n' ++ b'Transfer-Encoding: chunked\r\n' ++ b'\r\n' ++ b'2\r\noh\r\n' ++ b'4\r\n hai\r\n' ++ b'0\r\n' # last-chunk ++ # Normally the final CRLF would go here, but if you put in a ++ # trailer, it doesn't. ++ # b'\r\n' ++ b'GETpath2a:123 HTTP/1.1\r\n' ++ b'Host: a.com\r\n' ++ b'Connection: close\r\n' ++ b'\r\n' ++ ) ++ with self.makefile() as fd: ++ fd.write(data) ++ read_http(fd, body='oh hai') ++ with self.assertRaises(ConnectionClosed): ++ read_http(fd) ++ ++ def test_trailers_request_smuggling_header_first(self): ++ # When something that looks like a header comes in the first line. ++ data = ( ++ b'POST /a HTTP/1.1\r\n' ++ b'Host: localhost\r\n' ++ b'Connection: keep-alive\r\n' ++ b'Transfer-Encoding: chunked\r\n' ++ b'\r\n' ++ b'2\r\noh\r\n' ++ b'4\r\n hai\r\n' ++ b'0\r\n' # last-chunk, but only one CRLF ++ b'Header: value\r\n' ++ b'GET /path2?a=:123 HTTP/1.1\r\n' ++ b'Host: a.com\r\n' ++ b'Connection: close\r\n' ++ b'\r\n' ++ ) ++ with self.makefile() as fd: ++ fd.write(data) ++ read_http(fd, body='oh hai') ++ with self.assertRaises(ConnectionClosed): ++ read_http(fd, code=400) ++ ++ self.assertEqual(self.calls, 1) ++ ++ def test_trailers_request_smuggling_request_terminates_then_header(self): ++ data = ( ++ b'POST /a HTTP/1.1\r\n' ++ b'Host: localhost\r\n' ++ b'Connection: keep-alive\r\n' ++ b'Transfer-Encoding: chunked\r\n' ++ b'\r\n' ++ b'2\r\noh\r\n' ++ b'4\r\n hai\r\n' ++ b'0\r\n' # last-chunk ++ b'\r\n' ++ b'Header: value' ++ b'GET /path2?a=:123 HTTP/1.1\r\n' ++ b'Host: a.com\r\n' ++ b'Connection: close\r\n' ++ b'\r\n' ++ ) ++ with self.makefile() as fd: ++ fd.write(data) ++ read_http(fd, body='oh hai') ++ read_http(fd, code=400) ++ ++ self.assertEqual(self.calls, 1) ++ + + class TestUseWrite(TestCase): + +-- +2.40.0 diff --git a/meta-python/recipes-devtools/python/python3-gevent_21.12.0.bb b/meta-python/recipes-devtools/python/python3-gevent_21.12.0.bb index 9efeec4d9f..fd6b0f531a 100644 --- a/meta-python/recipes-devtools/python/python3-gevent_21.12.0.bb +++ b/meta-python/recipes-devtools/python/python3-gevent_21.12.0.bb @@ -13,6 +13,8 @@ RDEPENDS:${PN} = "${PYTHON_PN}-greenlet \ SRC_URI[sha256sum] = "f48b64578c367b91fa793bf8eaaaf4995cb93c8bc45860e473bf868070ad094e" +SRC_URI += "file://CVE-2023-41419.patch" + inherit pypi setuptools3 # Don't embed libraries, link to the system instead diff --git a/meta-python/recipes-devtools/python/python3-kivy_2.1.0..bb b/meta-python/recipes-devtools/python/python3-kivy_2.1.0..bb index 684bca03e1..b02c55a85b 100644 --- a/meta-python/recipes-devtools/python/python3-kivy_2.1.0..bb +++ b/meta-python/recipes-devtools/python/python3-kivy_2.1.0..bb @@ -43,7 +43,9 @@ export KIVY_GRAPHICS KIVY_CROSS_SYSROOT="${RECIPE_SYSROOT}" export KIVY_CROSS_SYSROOT -REQUIRED_DISTRO_FEATURES += "x11 opengl" +REQUIRED_DISTRO_FEATURES += "opengl gobject-introspection-data" + +ANY_OF_DISTRO_FEATURES = "x11 wayland" DEPENDS += " \ gstreamer1.0 \ diff --git a/meta-python/recipes-devtools/python/python3-lxml/CVE-2022-2309.patch b/meta-python/recipes-devtools/python/python3-lxml/CVE-2022-2309.patch new file mode 100644 index 0000000000..5ec55dfd2a --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-lxml/CVE-2022-2309.patch @@ -0,0 +1,99 @@ +From 86368e9cf70a0ad23cccd5ee32de847149af0c6f Mon Sep 17 00:00:00 2001 +From: Stefan Behnel <stefan_ml@behnel.de> +Date: Fri, 1 Jul 2022 21:06:10 +0200 +Subject: [PATCH] Fix a crash when incorrect parser input occurs together with + usages of iterwalk() on trees generated by the same parser. + +CVE: CVE-2022-2309 + +Upstream-Status: Backport +[https://github.com/lxml/lxml/commit/86368e9cf70a0ad23cccd5ee32de847149af0c6f] + +Signed-off-by: Yue Tao <yue.tao@windriver.com> + +--- + src/lxml/apihelpers.pxi | 7 ++++--- + src/lxml/iterparse.pxi | 11 ++++++----- + src/lxml/tests/test_etree.py | 20 ++++++++++++++++++++ + 3 files changed, 30 insertions(+), 8 deletions(-) + +diff --git a/src/lxml/apihelpers.pxi b/src/lxml/apihelpers.pxi +index c1662762..9fae9fb1 100644 +--- a/src/lxml/apihelpers.pxi ++++ b/src/lxml/apihelpers.pxi +@@ -246,9 +246,10 @@ cdef dict _build_nsmap(xmlNode* c_node): + while c_node is not NULL and c_node.type == tree.XML_ELEMENT_NODE: + c_ns = c_node.nsDef + while c_ns is not NULL: +- prefix = funicodeOrNone(c_ns.prefix) +- if prefix not in nsmap: +- nsmap[prefix] = funicodeOrNone(c_ns.href) ++ if c_ns.prefix or c_ns.href: ++ prefix = funicodeOrNone(c_ns.prefix) ++ if prefix not in nsmap: ++ nsmap[prefix] = funicodeOrNone(c_ns.href) + c_ns = c_ns.next + c_node = c_node.parent + return nsmap +diff --git a/src/lxml/iterparse.pxi b/src/lxml/iterparse.pxi +index 138c23a6..a7299da6 100644 +--- a/src/lxml/iterparse.pxi ++++ b/src/lxml/iterparse.pxi +@@ -420,7 +420,7 @@ cdef int _countNsDefs(xmlNode* c_node): + count = 0 + c_ns = c_node.nsDef + while c_ns is not NULL: +- count += 1 ++ count += (c_ns.href is not NULL) + c_ns = c_ns.next + return count + +@@ -431,9 +431,10 @@ cdef int _appendStartNsEvents(xmlNode* c_node, list event_list) except -1: + count = 0 + c_ns = c_node.nsDef + while c_ns is not NULL: +- ns_tuple = (funicode(c_ns.prefix) if c_ns.prefix is not NULL else '', +- funicode(c_ns.href)) +- event_list.append( (u"start-ns", ns_tuple) ) +- count += 1 ++ if c_ns.href: ++ ns_tuple = (funicodeOrEmpty(c_ns.prefix), ++ funicode(c_ns.href)) ++ event_list.append( (u"start-ns", ns_tuple) ) ++ count += 1 + c_ns = c_ns.next + return count +diff --git a/src/lxml/tests/test_etree.py b/src/lxml/tests/test_etree.py +index e5f08469..285313f6 100644 +--- a/src/lxml/tests/test_etree.py ++++ b/src/lxml/tests/test_etree.py +@@ -1460,6 +1460,26 @@ class ETreeOnlyTestCase(HelperTestCase): + [1,2,1,4], + counts) + ++ def test_walk_after_parse_failure(self): ++ # This used to be an issue because libxml2 can leak empty namespaces ++ # between failed parser runs. iterwalk() failed to handle such a tree. ++ try: ++ etree.XML('''<anot xmlns="1">''') ++ except etree.XMLSyntaxError: ++ pass ++ else: ++ assert False, "invalid input did not fail to parse" ++ ++ et = etree.XML('''<root> </root>''') ++ try: ++ ns = next(etree.iterwalk(et, events=('start-ns',))) ++ except StopIteration: ++ # This would be the expected result, because there was no namespace ++ pass ++ else: ++ # This is a bug in libxml2 ++ assert not ns, repr(ns) ++ + def test_itertext_comment_pi(self): + # https://bugs.launchpad.net/lxml/+bug/1844674 + XML = self.etree.XML +-- +2.17.1 + diff --git a/meta-python/recipes-devtools/python/python3-lxml_4.8.0.bb b/meta-python/recipes-devtools/python/python3-lxml_4.8.0.bb index c4d4df383a..0c78d97abd 100644 --- a/meta-python/recipes-devtools/python/python3-lxml_4.8.0.bb +++ b/meta-python/recipes-devtools/python/python3-lxml_4.8.0.bb @@ -20,7 +20,8 @@ DEPENDS += "libxml2 libxslt" SRC_URI[sha256sum] = "f63f62fc60e6228a4ca9abae28228f35e1bd3ce675013d1dfb828688d50c6e23" -SRC_URI += "${PYPI_SRC_URI}" +SRC_URI += "${PYPI_SRC_URI} \ + file://CVE-2022-2309.patch " inherit pkgconfig pypi setuptools3 # {standard input}: Assembler messages: diff --git a/meta-python/recipes-devtools/python/python3-m2crypto/CVE-2020-25657.patch b/meta-python/recipes-devtools/python/python3-m2crypto/CVE-2020-25657.patch new file mode 100644 index 0000000000..cc915f1478 --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-m2crypto/CVE-2020-25657.patch @@ -0,0 +1,175 @@ +From 2fa92e048b76fcc7bf2d4f4443478c8292d17470 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Mat=C4=9Bj=20Cepl?= <mcepl@cepl.eu> +Date: Thu, 1 Jun 2023 14:56:34 +0000 +Subject: [PATCH] Mitigate the Bleichenbacher timing attacks in the RSA + decryption API (CVE-2020-25657) + +Fixes #282 + +CVE: CVE-2020-25657 + +Upstream-Status: Backport [https://gitlab.com/m2crypto/m2crypto/-/commit/84c53958def0f510e92119fca14d74f94215827a] + +Signed-off-by: Narpat Mali <narpat.mali@windriver.com> +--- + src/SWIG/_m2crypto_wrap.c | 20 ++++++++++++-------- + src/SWIG/_rsa.i | 20 ++++++++++++-------- + tests/test_rsa.py | 15 +++++++-------- + 3 files changed, 31 insertions(+), 24 deletions(-) + +diff --git a/src/SWIG/_m2crypto_wrap.c b/src/SWIG/_m2crypto_wrap.c +index 3db88b9..6aafe1f 100644 +--- a/src/SWIG/_m2crypto_wrap.c ++++ b/src/SWIG/_m2crypto_wrap.c +@@ -7129,9 +7129,10 @@ PyObject *rsa_private_encrypt(RSA *rsa, PyObject *from, int padding) { + tlen = RSA_private_encrypt(flen, (unsigned char *)fbuf, + (unsigned char *)tbuf, rsa, padding); + if (tlen == -1) { +- m2_PyErr_Msg(_rsa_err); ++ ERR_clear_error(); ++ PyErr_Clear(); + PyMem_Free(tbuf); +- return NULL; ++ Py_RETURN_NONE; + } + + ret = PyBytes_FromStringAndSize((const char *)tbuf, tlen); +@@ -7159,9 +7160,10 @@ PyObject *rsa_public_decrypt(RSA *rsa, PyObject *from, int padding) { + tlen = RSA_public_decrypt(flen, (unsigned char *)fbuf, + (unsigned char *)tbuf, rsa, padding); + if (tlen == -1) { +- m2_PyErr_Msg(_rsa_err); ++ ERR_clear_error(); ++ PyErr_Clear(); + PyMem_Free(tbuf); +- return NULL; ++ Py_RETURN_NONE; + } + + ret = PyBytes_FromStringAndSize((const char *)tbuf, tlen); +@@ -7186,9 +7188,10 @@ PyObject *rsa_public_encrypt(RSA *rsa, PyObject *from, int padding) { + tlen = RSA_public_encrypt(flen, (unsigned char *)fbuf, + (unsigned char *)tbuf, rsa, padding); + if (tlen == -1) { +- m2_PyErr_Msg(_rsa_err); ++ ERR_clear_error(); ++ PyErr_Clear(); + PyMem_Free(tbuf); +- return NULL; ++ Py_RETURN_NONE; + } + + ret = PyBytes_FromStringAndSize((const char *)tbuf, tlen); +@@ -7213,9 +7216,10 @@ PyObject *rsa_private_decrypt(RSA *rsa, PyObject *from, int padding) { + tlen = RSA_private_decrypt(flen, (unsigned char *)fbuf, + (unsigned char *)tbuf, rsa, padding); + if (tlen == -1) { +- m2_PyErr_Msg(_rsa_err); ++ ERR_clear_error(); ++ PyErr_Clear(); + PyMem_Free(tbuf); +- return NULL; ++ Py_RETURN_NONE; + } + ret = PyBytes_FromStringAndSize((const char *)tbuf, tlen); + +diff --git a/src/SWIG/_rsa.i b/src/SWIG/_rsa.i +index bc714e0..1377b8b 100644 +--- a/src/SWIG/_rsa.i ++++ b/src/SWIG/_rsa.i +@@ -239,9 +239,10 @@ PyObject *rsa_private_encrypt(RSA *rsa, PyObject *from, int padding) { + tlen = RSA_private_encrypt(flen, (unsigned char *)fbuf, + (unsigned char *)tbuf, rsa, padding); + if (tlen == -1) { +- m2_PyErr_Msg(_rsa_err); ++ ERR_clear_error(); ++ PyErr_Clear(); + PyMem_Free(tbuf); +- return NULL; ++ Py_RETURN_NONE; + } + + ret = PyBytes_FromStringAndSize((const char *)tbuf, tlen); +@@ -269,9 +270,10 @@ PyObject *rsa_public_decrypt(RSA *rsa, PyObject *from, int padding) { + tlen = RSA_public_decrypt(flen, (unsigned char *)fbuf, + (unsigned char *)tbuf, rsa, padding); + if (tlen == -1) { +- m2_PyErr_Msg(_rsa_err); ++ ERR_clear_error(); ++ PyErr_Clear(); + PyMem_Free(tbuf); +- return NULL; ++ Py_RETURN_NONE; + } + + ret = PyBytes_FromStringAndSize((const char *)tbuf, tlen); +@@ -296,9 +298,10 @@ PyObject *rsa_public_encrypt(RSA *rsa, PyObject *from, int padding) { + tlen = RSA_public_encrypt(flen, (unsigned char *)fbuf, + (unsigned char *)tbuf, rsa, padding); + if (tlen == -1) { +- m2_PyErr_Msg(_rsa_err); ++ ERR_clear_error(); ++ PyErr_Clear(); + PyMem_Free(tbuf); +- return NULL; ++ Py_RETURN_NONE; + } + + ret = PyBytes_FromStringAndSize((const char *)tbuf, tlen); +@@ -323,9 +326,10 @@ PyObject *rsa_private_decrypt(RSA *rsa, PyObject *from, int padding) { + tlen = RSA_private_decrypt(flen, (unsigned char *)fbuf, + (unsigned char *)tbuf, rsa, padding); + if (tlen == -1) { +- m2_PyErr_Msg(_rsa_err); ++ ERR_clear_error(); ++ PyErr_Clear(); + PyMem_Free(tbuf); +- return NULL; ++ Py_RETURN_NONE; + } + ret = PyBytes_FromStringAndSize((const char *)tbuf, tlen); + +diff --git a/tests/test_rsa.py b/tests/test_rsa.py +index 7bb3af7..5e75d68 100644 +--- a/tests/test_rsa.py ++++ b/tests/test_rsa.py +@@ -109,8 +109,9 @@ class RSATestCase(unittest.TestCase): + # The other paddings. + for padding in self.s_padding_nok: + p = getattr(RSA, padding) +- with self.assertRaises(RSA.RSAError): +- priv.private_encrypt(self.data, p) ++ # Exception disabled as a part of mitigation against CVE-2020-25657 ++ # with self.assertRaises(RSA.RSAError): ++ priv.private_encrypt(self.data, p) + # Type-check the data to be encrypted. + with self.assertRaises(TypeError): + priv.private_encrypt(self.gen_callback, RSA.pkcs1_padding) +@@ -127,10 +128,12 @@ class RSATestCase(unittest.TestCase): + self.assertEqual(ptxt, self.data) + + # no_padding +- with six.assertRaisesRegex(self, RSA.RSAError, 'data too small'): +- priv.public_encrypt(self.data, RSA.no_padding) ++ # Exception disabled as a part of mitigation against CVE-2020-25657 ++ # with six.assertRaisesRegex(self, RSA.RSAError, 'data too small'): ++ priv.public_encrypt(self.data, RSA.no_padding) + + # Type-check the data to be encrypted. ++ # Exception disabled as a part of mitigation against CVE-2020-25657 + with self.assertRaises(TypeError): + priv.public_encrypt(self.gen_callback, RSA.pkcs1_padding) + +@@ -146,10 +149,6 @@ class RSATestCase(unittest.TestCase): + b'\000\000\000\003\001\000\001') # aka 65537 aka 0xf4 + with self.assertRaises(RSA.RSAError): + setattr(rsa, 'e', '\000\000\000\003\001\000\001') +- with self.assertRaises(RSA.RSAError): +- rsa.private_encrypt(1) +- with self.assertRaises(RSA.RSAError): +- rsa.private_decrypt(1) + assert rsa.check_key() + + def test_loadpub_bad(self): +-- +2.40.0 diff --git a/meta-python/recipes-devtools/python/python3-m2crypto_0.38.0.bb b/meta-python/recipes-devtools/python/python3-m2crypto_0.38.0.bb index 51a0dd676e..155a9066ca 100644 --- a/meta-python/recipes-devtools/python/python3-m2crypto_0.38.0.bb +++ b/meta-python/recipes-devtools/python/python3-m2crypto_0.38.0.bb @@ -10,6 +10,7 @@ SRC_URI += "file://0001-setup.py-link-in-sysroot-not-in-host-directories.patch \ file://cross-compile-platform.patch \ file://avoid-host-contamination.patch \ file://0001-setup.py-address-openssl-3.x-build-issue.patch \ + file://CVE-2020-25657.patch \ " SRC_URI[sha256sum] = "99f2260a30901c949a8dc6d5f82cd5312ffb8abc92e76633baf231bbbcb2decb" diff --git a/meta-python/recipes-devtools/python/python3-matplotlib_3.5.1.bb b/meta-python/recipes-devtools/python/python3-matplotlib_3.5.1.bb index b9eab3c931..cd05b455d4 100644 --- a/meta-python/recipes-devtools/python/python3-matplotlib_3.5.1.bb +++ b/meta-python/recipes-devtools/python/python3-matplotlib_3.5.1.bb @@ -50,6 +50,7 @@ RDEPENDS:${PN} = "\ ${PYTHON_PN}-dateutil \ ${PYTHON_PN}-kiwisolver \ ${PYTHON_PN}-pytz \ + ${PYTHON_PN}-pillow \ " ENABLELTO:toolchain-clang:riscv64 = "echo enable_lto = False >> ${S}/mplsetup.cfg" diff --git a/meta-python/recipes-devtools/python/python3-oauthlib_3.2.0.bb b/meta-python/recipes-devtools/python/python3-oauthlib_3.2.2.bb index e7f7f0b47b..566279d71c 100644 --- a/meta-python/recipes-devtools/python/python3-oauthlib_3.2.0.bb +++ b/meta-python/recipes-devtools/python/python3-oauthlib_3.2.2.bb @@ -4,7 +4,7 @@ HOMEPAGE = "https://github.com/idan/oauthlib" LICENSE = "BSD-3-Clause" LIC_FILES_CHKSUM = "file://LICENSE;md5=abd2675e944a2011aed7e505290ba482" -SRC_URI[sha256sum] = "23a8208d75b902797ea29fd31fa80a15ed9dc2c6c16fe73f5d346f83f6fa27a2" +SRC_URI[sha256sum] = "9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918" inherit pypi setuptools3 diff --git a/meta-python/recipes-devtools/python/python3-pillow/CVE-2023-44271.patch b/meta-python/recipes-devtools/python/python3-pillow/CVE-2023-44271.patch new file mode 100644 index 0000000000..ad51f17288 --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-pillow/CVE-2023-44271.patch @@ -0,0 +1,156 @@ +From 1fe1bb49c452b0318cad12ea9d97c3bef188e9a7 Mon Sep 17 00:00:00 2001 +From: Andrew Murray <radarhere@users.noreply.github.com> +Date: Fri, 30 Jun 2023 23:32:26 +1000 +Subject: [PATCH] Added ImageFont.MAX_STRING_LENGTH + +Upstream-status: Backport [https://github.com/python-pillow/Pillow/commit/1fe1bb49c452b0318cad12ea9d97c3bef188e9a7] +CVE: CVE-2023-44271 +Comment: Refresh hunk for test_imagefont.py, ImageFont.py and +Remove hunk 10.0.0.rst because in our version it is 9.4.0 + +Signed-off-by: Pawan Badganchi <Pawan.Badganchi@kpit.com> +Signed-off-by: Dnyandev Padalkar <padalkards17082001@gmail.com> +--- + Tests/test_imagefont.py | 19 +++++++++++++++++++ + docs/reference/ImageFont.rst | 18 ++++++++++++++++++ + src/PIL/ImageFont.py | 15 +++++++++++++++ + 3 files changed, 52 insertions(+) + +diff --git a/Tests/test_imagefont.py b/Tests/test_imagefont.py +index 7fa8ff8cbfd..c50447a153d 100644 +--- a/Tests/test_imagefont.py ++++ b/Tests/test_imagefont.py +@@ -1107,6 +1107,25 @@ + assert_image_equal_tofile(im, "Tests/images/text_mono.gif") + + ++def test_too_many_characters(font): ++ with pytest.raises(ValueError): ++ font.getlength("A" * 1000001) ++ with pytest.raises(ValueError): ++ font.getbbox("A" * 1000001) ++ with pytest.raises(ValueError): ++ font.getmask2("A" * 1000001) ++ ++ transposed_font = ImageFont.TransposedFont(font) ++ with pytest.raises(ValueError): ++ transposed_font.getlength("A" * 1000001) ++ ++ default_font = ImageFont.load_default() ++ with pytest.raises(ValueError): ++ default_font.getlength("A" * 1000001) ++ with pytest.raises(ValueError): ++ default_font.getbbox("A" * 1000001) ++ ++ + @pytest.mark.parametrize( + "test_file", + [ +diff --git a/docs/reference/ImageFont.rst b/docs/reference/ImageFont.rst +index 946bd3c4bed..2abfa0cc997 100644 +--- a/docs/reference/ImageFont.rst ++++ b/docs/reference/ImageFont.rst +@@ -18,6 +18,15 @@ OpenType fonts (as well as other font formats supported by the FreeType + library). For earlier versions, TrueType support is only available as part of + the imToolkit package. + ++.. warning:: ++ To protect against potential DOS attacks when using arbitrary strings as ++ text input, Pillow will raise a ``ValueError`` if the number of characters ++ is over a certain limit, :py:data:`MAX_STRING_LENGTH`. ++ ++ This threshold can be changed by setting ++ :py:data:`MAX_STRING_LENGTH`. It can be disabled by setting ++ ``ImageFont.MAX_STRING_LENGTH = None``. ++ + Example + ------- + +@@ -73,3 +82,12 @@ Constants + + Requires Raqm, you can check support using + :py:func:`PIL.features.check_feature` with ``feature="raqm"``. ++ ++Constants ++--------- ++ ++.. data:: MAX_STRING_LENGTH ++ ++ Set to 1,000,000, to protect against potential DOS attacks. Pillow will ++ raise a ``ValueError`` if the number of characters is over this limit. The ++ check can be disabled by setting ``ImageFont.MAX_STRING_LENGTH = None``. +diff --git a/src/PIL/ImageFont.py b/src/PIL/ImageFont.py +index 3ddc1aaad64..1030985ebc4 100644 +--- a/src/PIL/ImageFont.py ++++ b/src/PIL/ImageFont.py +@@ -43,6 +43,9 @@ + RAQM = 1 + + ++MAX_STRING_LENGTH = 1000000 ++ ++ + def __getattr__(name): + for enum, prefix in {Layout: "LAYOUT_"}.items(): + if name.startswith(prefix): +@@ -67,6 +67,12 @@ + core = _ImagingFtNotInstalled() + + ++def _string_length_check(text): ++ if MAX_STRING_LENGTH is not None and len(text) > MAX_STRING_LENGTH: ++ msg = "too many characters in string" ++ raise ValueError(msg) ++ ++ + _UNSPECIFIED = object() + + +@@ -192,6 +192,7 @@ + + :return: ``(left, top, right, bottom)`` bounding box + """ ++ _string_length_check(text) + width, height = self.font.getsize(text) + return 0, 0, width, height + +@@ -202,6 +202,7 @@ + + .. versionadded:: 9.2.0 + """ ++ _string_length_check(text) + width, height = self.font.getsize(text) + return width + +@@ -359,6 +359,7 @@ + + :return: Width for horizontal, height for vertical text. + """ ++ _string_length_check(text) + return self.font.getlength(text, mode, direction, features, language) / 64 + + def getbbox( +@@ -418,6 +418,7 @@ + + :return: ``(left, top, right, bottom)`` bounding box + """ ++ _string_length_check(text) + size, offset = self.font.getsize( + text, mode, direction, features, language, anchor + ) +@@ -762,6 +762,7 @@ + :py:mod:`PIL.Image.core` interface module, and the text offset, the + gap between the starting coordinate and the first marking + """ ++ _string_length_check(text) + if fill is _UNSPECIFIED: + fill = Image.core.fill + else: +@@ -924,6 +924,7 @@ + if self.orientation in (Image.Transpose.ROTATE_90, Image.Transpose.ROTATE_270): + msg = "text length is undefined for text rotated by 90 or 270 degrees" + raise ValueError(msg) ++ _string_length_check(text) + return self.font.getlength(text, *args, **kwargs) + + diff --git a/meta-python/recipes-devtools/python/python3-pillow/CVE-2023-50447-1.patch b/meta-python/recipes-devtools/python/python3-pillow/CVE-2023-50447-1.patch new file mode 100644 index 0000000000..7de12be5d5 --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-pillow/CVE-2023-50447-1.patch @@ -0,0 +1,29 @@ +From 3652f431c2d8b9c10bf20b70f284d300d12e814a +From: Andrew Murray <radarhere@users.noreply.github.com> +Date: Sat Oct 28 14:22:39 2023 +1100 +Subject: [PATCH] python3-pillow: Simplified code + +CVE: CVE-2023-50447 + +Upstream-Status: Backport [https://github.com/python-pillow/Pillow/commit/3652f431c2d8b9c10bf20b70f284d300d12e814a] + +Signed-off-by: Rahul Janani Pandi <RahulJanani.Pandi@windriver.com> +--- + src/PIL/ImageMath.py | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/src/PIL/ImageMath.py b/src/PIL/ImageMath.py +index ac7d36b69..71872a3fb 100644 +--- a/src/PIL/ImageMath.py ++++ b/src/PIL/ImageMath.py +@@ -239,7 +239,7 @@ def eval(expression, _dict={}, **kw): + args = ops.copy() + args.update(_dict) + args.update(kw) +- for k, v in list(args.items()): ++ for k, v in args.items(): + if hasattr(v, "im"): + args[k] = _Operand(v) + +-- +2.40.0 diff --git a/meta-python/recipes-devtools/python/python3-pillow/CVE-2023-50447-2.patch b/meta-python/recipes-devtools/python/python3-pillow/CVE-2023-50447-2.patch new file mode 100644 index 0000000000..13fbaf6d78 --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-pillow/CVE-2023-50447-2.patch @@ -0,0 +1,31 @@ +From 45c726fd4daa63236a8f3653530f297dc87b160a +From: Eric Soroos <eric-github@soroos.net> +Date: Fri Oct 27 11:21:18 2023 +0200 +Subject: [PATCH] python3-pillow: Don't allow __ or builtins in env dictionarys + +CVE: CVE-2023-50447 + +Upstream-Status: Backport [https://github.com/python-pillow/Pillow/commit/45c726fd4daa63236a8f3653530f297dc87b160a] + +Signed-off-by: Rahul Janani Pandi <RahulJanani.Pandi@windriver.com> +--- + src/PIL/ImageMath.py | 4 ++++ + 1 file changed, 4 insertions(+) + +diff --git a/src/PIL/ImageMath.py b/src/PIL/ImageMath.py +index 71872a3fb..923a8eeae 100644 +--- a/src/PIL/ImageMath.py ++++ b/src/PIL/ImageMath.py +@@ -240,6 +240,10 @@ def eval(expression, _dict={}, **kw): + args.update(_dict) + args.update(kw) + for k, v in args.items(): ++ if '__' in k or hasattr(__builtins__, k): ++ msg = f"'{k}' not allowed" ++ raise ValueError(msg) ++ + if hasattr(v, "im"): + args[k] = _Operand(v) + +-- +2.40.0 diff --git a/meta-python/recipes-devtools/python/python3-pillow/CVE-2023-50447-3.patch b/meta-python/recipes-devtools/python/python3-pillow/CVE-2023-50447-3.patch new file mode 100644 index 0000000000..bbfc32a6c7 --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-pillow/CVE-2023-50447-3.patch @@ -0,0 +1,56 @@ +From 0ca3c33c59927e1c7e0c14dbc1eea1dfb2431a80 +From: Andrew Murray <radarhere@users.noreply.github.com> +Date: Sat, 28 Oct 2023 15:58:52 +1100 +Subject: [PATCH] python3-pillow: Allow ops + +CVE: CVE-2023-50447 + +Upstream-Status: Backport [https://github.com/python-pillow/Pillow/commit/0ca3c33c59927e1c7e0c14dbc1eea1dfb2431a80] + +Signed-off-by: Rahul Janani Pandi <RahulJanani.Pandi@windriver.com> +--- + Tests/test_imagemath.py | 5 +++++ + src/PIL/ImageMath.py | 9 +++++---- + 2 files changed, 10 insertions(+), 4 deletions(-) + +diff --git a/Tests/test_imagemath.py b/Tests/test_imagemath.py +index fe7ac9a7a..ded8c0011 100644 +--- a/Tests/test_imagemath.py ++++ b/Tests/test_imagemath.py +@@ -63,6 +63,11 @@ def test_prevent_exec(expression): + ImageMath.eval(expression) + + ++def test_prevent_double_underscores(): ++ with pytest.raises(ValueError): ++ ImageMath.eval("1", {"__": None}) ++ ++ + def test_logical(): + assert pixel(ImageMath.eval("not A", images)) == 0 + assert pixel(ImageMath.eval("A and B", images)) == "L 2" +diff --git a/src/PIL/ImageMath.py b/src/PIL/ImageMath.py +index 923a8eeae..c14598a4c 100644 +--- a/src/PIL/ImageMath.py ++++ b/src/PIL/ImageMath.py +@@ -237,13 +237,14 @@ def eval(expression, _dict={}, **kw): + + # build execution namespace + args = ops.copy() +- args.update(_dict) +- args.update(kw) +- for k, v in args.items(): +- if '__' in k or hasattr(__builtins__, k): ++ for k in list(_dict.keys()) + list(kw.keys()): ++ if "__" in k or hasattr(__builtins__, k): + msg = f"'{k}' not allowed" + raise ValueError(msg) + ++ args.update(_dict) ++ args.update(kw) ++ for k, v in args.items(): + if hasattr(v, "im"): + args[k] = _Operand(v) + +-- +2.40.0 diff --git a/meta-python/recipes-devtools/python/python3-pillow/CVE-2023-50447-4.patch b/meta-python/recipes-devtools/python/python3-pillow/CVE-2023-50447-4.patch new file mode 100644 index 0000000000..da3e2c1974 --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-pillow/CVE-2023-50447-4.patch @@ -0,0 +1,66 @@ +From 557ba59d13de919d04b3fd4cdef8634f7d4b3348 +From: Andrew Murray <radarhere@users.noreply.github.com> +Date: Sat Dec 30 09:30:12 2023 +1100 +Subject: [PATCH] python3-pillow: Include further builtins + +CVE: CVE-2023-50447 + +Upstream-Status: Backport [https://github.com/python-pillow/Pillow/commit/557ba59d13de919d04b3fd4cdef8634f7d4b3348] + +Signed-off-by: Rahul Janani Pandi <RahulJanani.Pandi@windriver.com> +--- + Tests/test_imagemath.py | 5 +++++ + docs/releasenotes/9.4.0.rst | 8 ++++++++ + src/PIL/ImageMath.py | 2 +- + 3 files changed, 14 insertions(+), 1 deletion(-) + +diff --git a/Tests/test_imagemath.py b/Tests/test_imagemath.py +index ded8c0011..124687478 100644 +--- a/Tests/test_imagemath.py ++++ b/Tests/test_imagemath.py +@@ -67,6 +67,11 @@ def test_prevent_double_underscores(): + with pytest.raises(ValueError): + ImageMath.eval("1", {"__": None}) + ++def test_prevent_builtins(): ++ with pytest.raises(ValueError): ++ ImageMath.eval("(lambda: exec('exit()'))()", {"exec": None}) ++ ++ + + def test_logical(): + assert pixel(ImageMath.eval("not A", images)) == 0 +diff --git a/docs/releasenotes/9.4.0.rst b/docs/releasenotes/9.4.0.rst +index 0af5bc8ca..9ca7c9f6f 100644 +--- a/docs/releasenotes/9.4.0.rst ++++ b/docs/releasenotes/9.4.0.rst +@@ -88,6 +88,14 @@ Pillow attempted to dereference a null pointer in ``ImageFont``, leading to a + crash. An error is now raised instead. This has been present since + Pillow 8.0.0. + ++Restricted environment keys for ImageMath.eval ++^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ++ ++:cve:`2023-50447`: If an attacker has control over the keys passed to the ++``environment`` argument of :py:meth:`PIL.ImageMath.eval`, they may be able to execute ++arbitrary code. To prevent this, keys matching the names of builtins and keys ++containing double underscores will now raise a :py:exc:`ValueError`. ++ + Other Changes + ============= + +diff --git a/src/PIL/ImageMath.py b/src/PIL/ImageMath.py +index c14598a4c..b2c50bc5b 100644 +--- a/src/PIL/ImageMath.py ++++ b/src/PIL/ImageMath.py +@@ -238,7 +238,7 @@ def eval(expression, _dict={}, **kw): + # build execution namespace + args = ops.copy() + for k in list(_dict.keys()) + list(kw.keys()): +- if "__" in k or hasattr(__builtins__, k): ++ if "__" in k or hasattr(builtins, k): + msg = f"'{k}' not allowed" + raise ValueError(msg) + +-- +2.40.0 diff --git a/meta-python/recipes-devtools/python/python3-pillow/run-ptest b/meta-python/recipes-devtools/python/python3-pillow/run-ptest new file mode 100644 index 0000000000..3385d68939 --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-pillow/run-ptest @@ -0,0 +1,3 @@ +#!/bin/sh + +pytest -o log_cli=true -o log_cli_level=INFO | sed -e 's/\[...%\]//g'| sed -e 's/PASSED/PASS/g'| sed -e 's/FAILED/FAIL/g'|sed -e 's/SKIPED/SKIP/g'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS"){printf "%s: %s\n", $NF, $0}else{print}}'| awk '{if ($NF=="PASS" || $NF=="FAIL" || $NF=="SKIP" || $NF=="XFAIL" || $NF=="XPASS") {$NF="";print $0}else{print}}' diff --git a/meta-python/recipes-devtools/python/python3-pillow_9.0.1.bb b/meta-python/recipes-devtools/python/python3-pillow_9.0.1.bb deleted file mode 100644 index fb86322f77..0000000000 --- a/meta-python/recipes-devtools/python/python3-pillow_9.0.1.bb +++ /dev/null @@ -1,42 +0,0 @@ -SUMMARY = "Python Imaging Library (Fork). Pillow is the friendly PIL fork by Alex \ -Clark and Contributors. PIL is the Python Imaging Library by Fredrik Lundh and \ -Contributors." -HOMEPAGE = "https://pillow.readthedocs.io" -LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://LICENSE;md5=ad081a0aede51e89f8da13333a8fb849" - -SRC_URI = "git://github.com/python-pillow/Pillow.git;branch=9.0.x;protocol=https \ - file://0001-support-cross-compiling.patch \ - file://0001-explicitly-set-compile-options.patch \ -" -SRCREV ?= "82541b6dec8452cb612067fcebba1c5a1a2bfdc8" - -inherit setuptools3 - -PIP_INSTALL_PACKAGE = "Pillow" -PIP_INSTALL_DIST_PATH = "${S}/dist" - -DEPENDS += " \ - zlib \ - jpeg \ - tiff \ - freetype \ - lcms \ - openjpeg \ -" - -RDEPENDS:${PN} += " \ - ${PYTHON_PN}-misc \ - ${PYTHON_PN}-logging \ - ${PYTHON_PN}-numbers \ -" - -CVE_PRODUCT = "pillow" - -S = "${WORKDIR}/git" - -RPROVIDES:${PN} += "python3-imaging" - -BBCLASSEXTEND = "native" - -SRCREV = "6deac9e3a23caffbfdd75c00d3f0a1cd36cdbd5d" diff --git a/meta-python/recipes-devtools/python/python3-pillow_9.4.0.bb b/meta-python/recipes-devtools/python/python3-pillow_9.4.0.bb new file mode 100644 index 0000000000..e1d0b30860 --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-pillow_9.4.0.bb @@ -0,0 +1,69 @@ +SUMMARY = "Python Imaging Library (Fork). Pillow is the friendly PIL fork by Alex \ +Clark and Contributors. PIL is the Python Imaging Library by Fredrik Lundh and \ +Contributors." +HOMEPAGE = "https://pillow.readthedocs.io" +LICENSE = "MIT" +LIC_FILES_CHKSUM = "file://LICENSE;md5=bc416d18f294943285560364be7cbec1" + +SRC_URI = "git://github.com/python-pillow/Pillow.git;branch=main;protocol=https \ + file://0001-support-cross-compiling.patch \ + file://0001-explicitly-set-compile-options.patch \ + file://run-ptest \ + file://CVE-2023-44271.patch \ + file://CVE-2023-50447-1.patch \ + file://CVE-2023-50447-2.patch \ + file://CVE-2023-50447-3.patch \ + file://CVE-2023-50447-4.patch \ + " +SRCREV ?= "82541b6dec8452cb612067fcebba1c5a1a2bfdc8" + +inherit setuptools3 ptest + +PIP_INSTALL_PACKAGE = "Pillow" +PIP_INSTALL_DIST_PATH = "${S}/dist" + +DEPENDS += " \ + zlib \ + jpeg \ + tiff \ + freetype \ + lcms \ + openjpeg \ +" + +RDEPENDS:${PN} += " \ + ${PYTHON_PN}-misc \ + ${PYTHON_PN}-logging \ + ${PYTHON_PN}-numbers \ +" + +RDEPENDS:${PN}-ptest += " \ + bash \ + ghostscript \ + jpeg-tools \ + libwebp \ + ${PYTHON_PN}-core \ + ${PYTHON_PN}-distutils \ + ${PYTHON_PN}-image \ + ${PYTHON_PN}-mmap \ + ${PYTHON_PN}-pytest \ + ${PYTHON_PN}-pytest-timeout \ + ${PYTHON_PN}-resource \ + ${PYTHON_PN}-unixadmin\ + ${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'tk', '', d)} \ +" + +CVE_PRODUCT = "pillow" + +S = "${WORKDIR}/git" + +RPROVIDES:${PN} += "python3-imaging" + +do_install_ptest() { + install -d ${D}${PTEST_PATH}/Tests + cp -rf ${S}/Tests ${D}${PTEST_PATH}/ +} + +BBCLASSEXTEND = "native" + +SRCREV = "a5bbab1c1e63b439de191ef2040173713b26d2da" diff --git a/meta-python/recipes-devtools/python/python3-protobuf_3.20.0.bb b/meta-python/recipes-devtools/python/python3-protobuf_3.20.3.bb index 5c4de4ac2b..76b48e1ffc 100644 --- a/meta-python/recipes-devtools/python/python3-protobuf_3.20.0.bb +++ b/meta-python/recipes-devtools/python/python3-protobuf_3.20.3.bb @@ -7,7 +7,7 @@ LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=53dbfa56f61b90215a inherit pypi setuptools3 -SRC_URI[sha256sum] = "71b2c3d1cd26ed1ec7c8196834143258b2ad7f444efff26fdc366c6f5e752702" +SRC_URI[sha256sum] = "2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2" # http://errors.yoctoproject.org/Errors/Details/184715/ # Can't find required file: ../src/google/protobuf/descriptor.proto diff --git a/meta-python/recipes-devtools/python/python3-pybluez/0001-Use-Py_ssize_t-when-parsing-buffer-length-fix-426-42.patch b/meta-python/recipes-devtools/python/python3-pybluez/0001-Use-Py_ssize_t-when-parsing-buffer-length-fix-426-42.patch new file mode 100644 index 0000000000..9126aba8d3 --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-pybluez/0001-Use-Py_ssize_t-when-parsing-buffer-length-fix-426-42.patch @@ -0,0 +1,153 @@ +From aa8ee5e5e934908f0357364f6ec90a3ecda62880 Mon Sep 17 00:00:00 2001 +From: Nicolas Schodet <nico@ni.fr.eu.org> +Date: Mon, 3 Jan 2022 02:37:01 +0100 +Subject: [PATCH] Use Py_ssize_t when parsing buffer length, fix #426 (#427) + +From python 3.9 documentation: + +> For all # variants of formats (s#, y#, etc.), the macro +> PY_SSIZE_T_CLEAN must be defined before including Python.h. On Python +> 3.9 and older, the type of the length argument is Py_ssize_t if the +> PY_SSIZE_T_CLEAN macro is defined, or int otherwise. + +From python 3.8 changes: + +> Use of # variants of formats in parsing or building value (e.g. +> PyArg_ParseTuple(), Py_BuildValue(), PyObject_CallFunction(), etc.) +> without PY_SSIZE_T_CLEAN defined raises DeprecationWarning now. It +> will be removed in 3.10 or 4.0. Read Parsing arguments and building +> values for detail. (Contributed by Inada Naoki in bpo-36381.) + +Fixes https://github.com/pybluez/pybluez/issues/426 +--- +Upstream-Status: Accepted + + bluez/btmodule.c | 23 ++++++++++++++--------- + msbt/_msbt.c | 6 ++++-- + 2 files changed, 18 insertions(+), 11 deletions(-) + +diff --git a/bluez/btmodule.c b/bluez/btmodule.c +index 518b723..912a489 100644 +--- a/bluez/btmodule.c ++++ b/bluez/btmodule.c +@@ -16,7 +16,8 @@ Local naming conventions: + - names starting with bt_ are module-level functions + + */ +- ++#define PY_SSIZE_T_CLEAN 1 ++#include "Python.h" + #include "btmodule.h" + #include "structmember.h" + +@@ -732,7 +733,7 @@ sock_setsockopt(PySocketSockObject *s, PyObject *args) + int optname; + int res; + void *buf; +- int buflen; ++ Py_ssize_t buflen; + int flag; + + if (PyArg_ParseTuple(args, "iii:setsockopt", &level, &optname, &flag)) { +@@ -2001,7 +2002,8 @@ static PyObject * + bt_hci_send_cmd(PyObject *self, PyObject *args) + { + PySocketSockObject *socko = NULL; +- int err, plen = 0; ++ int err; ++ Py_ssize_t plen = 0; + uint16_t ogf, ocf; + char *param = NULL; + int dd = 0; +@@ -2036,6 +2038,7 @@ bt_hci_send_req(PyObject *self, PyObject *args, PyObject *kwds) + int err; + int to=0; + char rparam[256]; ++ Py_ssize_t req_clen; + struct hci_request req = { 0 }; + int dd = 0; + +@@ -2043,9 +2046,10 @@ bt_hci_send_req(PyObject *self, PyObject *args, PyObject *kwds) + "timeout", 0 }; + + if( !PyArg_ParseTupleAndKeywords(args, kwds, "OHHii|s#i", keywords, +- &socko, &req.ogf, &req.ocf, &req.event, &req.rlen, +- &req.cparam, &req.clen, &to) ) ++ &socko, &req.ogf, &req.ocf, &req.event, &req.rlen, ++ &req.cparam, &req_clen, &to) ) + return 0; ++ req.clen = req_clen; + + req.rparam = rparam; + dd = socko->sock_fd; +@@ -2274,7 +2278,8 @@ Returns the name of the device, or raises an error on failure"); + static PyObject * bt_hci_filter_ ## name (PyObject *self, PyObject *args )\ + { \ + char *param; \ +- int len, arg; \ ++ Py_ssize_t len; \ ++ int arg; \ + if( !PyArg_ParseTuple(args,"s#i", ¶m, &len, &arg) ) \ + return 0; \ + if( len != sizeof(struct hci_filter) ) { \ +@@ -2303,7 +2308,7 @@ DECL_HCI_FILTER_OP_1(test_opcode, "test opcode!") + static PyObject * bt_hci_filter_ ## name (PyObject *self, PyObject *args )\ + { \ + char *param; \ +- int len; \ ++ Py_ssize_t len; \ + if( !PyArg_ParseTuple(args,"s#", ¶m, &len) ) \ + return 0; \ + if( len != sizeof(struct hci_filter) ) { \ +@@ -2364,7 +2369,7 @@ static PyObject * + bt_ba2str(PyObject *self, PyObject *args) + { + char *data=NULL; +- int len=0; ++ Py_ssize_t len=0; + char ba_str[19] = {0}; + if (!PyArg_ParseTuple(args, "s#", &data, &len)) return 0; + ba2str((bdaddr_t*)data, ba_str); +@@ -2579,7 +2584,7 @@ bt_sdp_advertise_service( PyObject *self, PyObject *args ) + *provider = NULL, + *description = NULL; + PyObject *service_classes, *profiles, *protocols; +- int namelen = 0, provlen = 0, desclen = 0; ++ Py_ssize_t namelen = 0, provlen = 0, desclen = 0; + uuid_t svc_uuid = { 0 }; + int i; + char addrbuf[256] = { 0 }; +diff --git a/msbt/_msbt.c b/msbt/_msbt.c +index b3d27ff..81f5ee9 100644 +--- a/msbt/_msbt.c ++++ b/msbt/_msbt.c +@@ -2,6 +2,8 @@ + #define UNICODE + #endif + ++#define PY_SSIZE_T_CLEAN 1 ++ + #include <winsock2.h> + #include <ws2bth.h> + #include <BluetoothAPIs.h> +@@ -155,7 +157,7 @@ static PyObject * + msbt_bind(PyObject *self, PyObject *args) + { + wchar_t *addrstr = NULL; +- int addrstrlen = -1; ++ Py_ssize_t addrstrlen = -1; + int sockfd = -1; + int port = -1; + char buf[100] = { 0 }; +@@ -765,7 +767,7 @@ msbt_set_service_raw(PyObject *self, PyObject *args) + WSAESETSERVICEOP op; + + char *record = NULL; +- int reclen = -1; ++ Py_ssize_t reclen = -1; + BTH_SET_SERVICE *si = NULL; + int silen = -1; + ULONG sdpVersion = BTH_SDP_VERSION; +-- +2.34.1 + diff --git a/meta-python/recipes-devtools/python/python3-pybluez_0.23.bb b/meta-python/recipes-devtools/python/python3-pybluez_0.23.bb index b32f3a3627..6a1df273a2 100644 --- a/meta-python/recipes-devtools/python/python3-pybluez_0.23.bb +++ b/meta-python/recipes-devtools/python/python3-pybluez_0.23.bb @@ -7,6 +7,7 @@ DEPENDS = "bluez5" LICENSE = "GPL-2.0-only" LIC_FILES_CHKSUM = "file://COPYING;md5=8a71d0475d08eee76d8b6d0c6dbec543" +SRC_URI += "file://0001-Use-Py_ssize_t-when-parsing-buffer-length-fix-426-42.patch" SRC_URI[md5sum] = "afbe8429bb82d2c46a3d0f5f4f898f9d" SRC_URI[sha256sum] = "c8f04d2e78951eaa9de486b4d49381704e8943d0a6e6e58f55fcd7b8582e90de" diff --git a/meta-python/recipes-devtools/python/python3-pyudev_0.23.2.bb b/meta-python/recipes-devtools/python/python3-pyudev_0.23.2.bb index 4c4c959eba..035e149518 100644 --- a/meta-python/recipes-devtools/python/python3-pyudev_0.23.2.bb +++ b/meta-python/recipes-devtools/python/python3-pyudev_0.23.2.bb @@ -21,4 +21,4 @@ RDEPENDS:${PN} = "\ libudev \ " -BBCLASSEXTEND = "native nativesdk" +BBCLASSEXTEND = "native" diff --git a/meta-python/recipes-devtools/python/python3-pyyaml-include/run-ptest b/meta-python/recipes-devtools/python/python3-pyyaml-include/run-ptest new file mode 100755 index 0000000000..8d2017d39c --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-pyyaml-include/run-ptest @@ -0,0 +1,3 @@ +#!/bin/sh + +pytest --automake diff --git a/meta-python/recipes-devtools/python/python3-pyyaml-include_1.4.1.bb b/meta-python/recipes-devtools/python/python3-pyyaml-include_1.4.1.bb new file mode 100644 index 0000000000..6768e959f4 --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-pyyaml-include_1.4.1.bb @@ -0,0 +1,29 @@ +SUMMARY = "Extending PyYAML with a custom constructor for including YAML files within YAML files" +HOMEPAGE = "https://github.com/tanbro/pyyaml-include" +LICENSE = "GPL-3.0-only" +LIC_FILES_CHKSUM = "file://LICENSE;md5=d32239bcb673463ab874e80d47fae504" +SRCREV = "0f86bf16343d2ad52b53b793e0b35bb7ed7cd85b" + +SRC_URI = " \ + git://github.com/tanbro/pyyaml-include;protocol=https;branch=1.x \ + file://run-ptest \ + " + +S = "${WORKDIR}/git" + +inherit python_setuptools_build_meta ptest + +do_install_ptest() { + install -d ${D}${PTEST_PATH}/tests + cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/ +} + +RDEPENDS:${PN} += " \ + python3-pyyaml \ + python3-toml \ +" +RDEPENDS:${PN}-ptest += " \ + python3-pytest \ + python3-unittest-automake-output \ +" + diff --git a/meta-python/recipes-devtools/python/python3-requests-toolbelt/0001-Fix-collections.abc-deprecation-warning-in-downloadu.patch b/meta-python/recipes-devtools/python/python3-requests-toolbelt/0001-Fix-collections.abc-deprecation-warning-in-downloadu.patch new file mode 100644 index 0000000000..baa833b6d2 --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-requests-toolbelt/0001-Fix-collections.abc-deprecation-warning-in-downloadu.patch @@ -0,0 +1,41 @@ +From 7188b06330e5260be20bce8cbcf0d5ae44e34eaf Mon Sep 17 00:00:00 2001 +From: Jon Dufresne <jon.dufresne@gmail.com> +Date: Fri, 1 Feb 2019 16:30:01 -0800 +Subject: [PATCH] Fix collections.abc deprecation warning in downloadutils + +Warning appears as: + +tests/test_downloadutils.py::test_stream_response_to_specific_filename + requests_toolbelt/downloadutils/stream.py:161: DeprecationWarning: Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated, and in 3.8 it will stop working + if path and isinstance(getattr(path, 'write', None), collections.Callable): + +Upstream-Status: Backport [https://github.com/requests/toolbelt/commit/7188b06330e5260be20bce8cbcf0d5ae44e34eaf] + +Signed-off-by: Narpat Mali <narpat.mali@windriver.com> +--- + requests_toolbelt/downloadutils/stream.py | 3 +-- + 1 file changed, 1 insertion(+), 2 deletions(-) + +diff --git a/requests_toolbelt/downloadutils/stream.py b/requests_toolbelt/downloadutils/stream.py +index eed60a7..1d1c31b 100644 +--- a/requests_toolbelt/downloadutils/stream.py ++++ b/requests_toolbelt/downloadutils/stream.py +@@ -1,6 +1,5 @@ + # -*- coding: utf-8 -*- + """Utilities for dealing with streamed requests.""" +-import collections + import os.path + import re + +@@ -158,7 +157,7 @@ def stream_response_to_file(response, path=None, chunksize=_DEFAULT_CHUNKSIZE): + pre_opened = False + fd = None + filename = None +- if path and isinstance(getattr(path, 'write', None), collections.Callable): ++ if path and callable(getattr(path, 'write', None)): + pre_opened = True + fd = path + filename = getattr(fd, 'name', None) +-- +2.25.1 + diff --git a/meta-python/recipes-devtools/python/python3-requests-toolbelt_0.9.1.bb b/meta-python/recipes-devtools/python/python3-requests-toolbelt_0.9.1.bb index 366f41ca81..72ad7a6180 100644 --- a/meta-python/recipes-devtools/python/python3-requests-toolbelt_0.9.1.bb +++ b/meta-python/recipes-devtools/python/python3-requests-toolbelt_0.9.1.bb @@ -6,7 +6,8 @@ LICENSE = "Apache-2.0" LIC_FILES_CHKSUM = "file://LICENSE;md5=71760e0f1dda8cff91b0bc9246caf571" SRC_URI = "file://run-ptest \ - " + file://0001-Fix-collections.abc-deprecation-warning-in-downloadu.patch \ + " SRC_URI[md5sum] = "b1509735c4b4cf95df2619facbc3672e" SRC_URI[sha256sum] = "968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0" @@ -31,4 +32,4 @@ do_install_ptest() { # remove test test_multipart_encoder.py as it fails, # downloaded file is not supported rm -f ${D}${PTEST_PATH}/tests/test_multipart_encoder.py -} +} diff --git a/meta-python/recipes-devtools/python/python3-robotframework-seriallibrary_0.3.1.bb b/meta-python/recipes-devtools/python/python3-robotframework-seriallibrary_0.3.1.bb index d9465af081..ecc15499cf 100644 --- a/meta-python/recipes-devtools/python/python3-robotframework-seriallibrary_0.3.1.bb +++ b/meta-python/recipes-devtools/python/python3-robotframework-seriallibrary_0.3.1.bb @@ -16,5 +16,3 @@ RDEPENDS:${PN} += " \ ${PYTHON_PN}-pyserial \ ${PYTHON_PN}-robotframework \ " - -BBCLASSEXTEND = "native nativesdk" diff --git a/meta-python/recipes-devtools/python/python3-snappy_0.6.1.bb b/meta-python/recipes-devtools/python/python3-snappy_0.6.1.bb index 8a30f7cb78..bd0979d0b4 100644 --- a/meta-python/recipes-devtools/python/python3-snappy_0.6.1.bb +++ b/meta-python/recipes-devtools/python/python3-snappy_0.6.1.bb @@ -11,5 +11,3 @@ inherit pypi setuptools3 PYPI_PACKAGE = "python-snappy" RDEPENDS:${PN} += "snappy" - -BBCLASSEXTEND = "native nativesdk" diff --git a/meta-python/recipes-devtools/python/python3-soupsieve_2.3.1.bb b/meta-python/recipes-devtools/python/python3-soupsieve_2.3.1.bb index 7cb76b426f..631a45c99e 100644 --- a/meta-python/recipes-devtools/python/python3-soupsieve_2.3.1.bb +++ b/meta-python/recipes-devtools/python/python3-soupsieve_2.3.1.bb @@ -12,10 +12,6 @@ SRC_URI += " \ file://run-ptest \ " -RDEPENDS:${PN} += "\ - ${PYTHON_PN}-beautifulsoup4 \ -" - RDEPENDS:${PN}-ptest += " \ ${PYTHON_PN}-pytest \ ${PYTHON_PN}-beautifulsoup4 \ diff --git a/meta-python/recipes-devtools/python/python3-sqlparse/CVE-2023-30608.patch b/meta-python/recipes-devtools/python/python3-sqlparse/CVE-2023-30608.patch new file mode 100644 index 0000000000..41dbf088e1 --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-sqlparse/CVE-2023-30608.patch @@ -0,0 +1,75 @@ +From fa1cc25e1967228e5d47b9ddb626cc82dba92d7e Mon Sep 17 00:00:00 2001 +From: Andi Albrecht <albrecht.andi@gmail.com> +Date: Wed, 31 May 2023 12:29:07 +0000 +Subject: [PATCH] Remove unnecessary parts in regex for bad escaping. + +The regex tried to deal with situations where escaping in the +SQL to be parsed was suspicious. + +CVE: CVE-2023-30608 + +Upstream-Status: Backport [https://github.com/andialbrecht/sqlparse/commit/c457abd5f097dd13fb21543381e7cfafe7d31cfb] + +Signed-off-by: Narpat Mali <narpat.mali@windriver.com> +--- + CHANGELOG | 15 +++++++++++++++ + sqlparse/keywords.py | 4 ++-- + tests/test_split.py | 4 ++-- + 3 files changed, 19 insertions(+), 4 deletions(-) + +diff --git a/CHANGELOG b/CHANGELOG +index 65e03fc..a584003 100644 +--- a/CHANGELOG ++++ b/CHANGELOG +@@ -1,3 +1,18 @@ ++Backport CVE-2023-30608 Fix ++--------------------------- ++ ++Notable Changes ++ ++* IMPORTANT: This release fixes a security vulnerability in the ++ parser where a regular expression vulnerable to ReDOS (Regular ++ Expression Denial of Service) was used. See the security advisory ++ for details: https://github.com/andialbrecht/sqlparse/security/advisories/GHSA-rrm6-wvj7-cwh2 ++ The vulnerability was discovered by @erik-krogh from GitHub ++ Security Lab (GHSL). Thanks for reporting! ++ ++* Fix regular expressions for string parsing. ++ ++ + Release 0.4.2 (Sep 10, 2021) + ---------------------------- + +diff --git a/sqlparse/keywords.py b/sqlparse/keywords.py +index 6850628..4e97477 100644 +--- a/sqlparse/keywords.py ++++ b/sqlparse/keywords.py +@@ -66,9 +66,9 @@ SQL_REGEX = { + (r'(?![_A-ZÀ-Ü])-?(\d+(\.\d*)|\.\d+)(?![_A-ZÀ-Ü])', + tokens.Number.Float), + (r'(?![_A-ZÀ-Ü])-?\d+(?![_A-ZÀ-Ü])', tokens.Number.Integer), +- (r"'(''|\\\\|\\'|[^'])*'", tokens.String.Single), ++ (r"'(''|\\'|[^'])*'", tokens.String.Single), + # not a real string literal in ANSI SQL: +- (r'"(""|\\\\|\\"|[^"])*"', tokens.String.Symbol), ++ (r'"(""|\\"|[^"])*"', tokens.String.Symbol), + (r'(""|".*?[^\\]")', tokens.String.Symbol), + # sqlite names can be escaped with [square brackets]. left bracket + # cannot be preceded by word character or a right bracket -- +diff --git a/tests/test_split.py b/tests/test_split.py +index a9d7576..e79750e 100644 +--- a/tests/test_split.py ++++ b/tests/test_split.py +@@ -18,8 +18,8 @@ def test_split_semicolon(): + + + def test_split_backslash(): +- stmts = sqlparse.parse(r"select '\\'; select '\''; select '\\\'';") +- assert len(stmts) == 3 ++ stmts = sqlparse.parse("select '\'; select '\'';") ++ assert len(stmts) == 2 + + + @pytest.mark.parametrize('fn', ['function.sql', +-- +2.40.0 diff --git a/meta-python/recipes-devtools/python/python3-sqlparse_0.4.2.bb b/meta-python/recipes-devtools/python/python3-sqlparse_0.4.2.bb index 0980ff9c24..b5cc41e730 100644 --- a/meta-python/recipes-devtools/python/python3-sqlparse_0.4.2.bb +++ b/meta-python/recipes-devtools/python/python3-sqlparse_0.4.2.bb @@ -6,6 +6,7 @@ LIC_FILES_CHKSUM = "file://LICENSE;md5=2b136f573f5386001ea3b7b9016222fc" SRC_URI += "file://0001-sqlparse-change-shebang-to-python3.patch \ file://run-ptest \ + file://CVE-2023-30608.patch \ " SRC_URI[sha256sum] = "0c00730c74263a94e5a9919ade150dfc3b19c574389985446148402998287dae" diff --git a/meta-python/recipes-devtools/python/python3-twisted_22.2.0.bb b/meta-python/recipes-devtools/python/python3-twisted_22.2.0.bb index f4575c9c2e..c55c86ea50 100644 --- a/meta-python/recipes-devtools/python/python3-twisted_22.2.0.bb +++ b/meta-python/recipes-devtools/python/python3-twisted_22.2.0.bb @@ -57,6 +57,7 @@ RDEPENDS:${PN} = "\ " RDEPENDS:${PN}-core = "${PYTHON_PN}-appdirs \ + ${PYTHON_PN}-asyncio \ ${PYTHON_PN}-automat \ ${PYTHON_PN}-constantly \ ${PYTHON_PN}-core \ @@ -65,6 +66,7 @@ RDEPENDS:${PN}-core = "${PYTHON_PN}-appdirs \ ${PYTHON_PN}-incremental \ ${PYTHON_PN}-pyhamcrest \ ${PYTHON_PN}-pyserial \ + ${PYTHON_PN}-typing-extensions \ ${PYTHON_PN}-unixadmin \ ${PYTHON_PN}-zopeinterface \ " diff --git a/meta-python/recipes-devtools/python/python3-txaio_22.2.1.bb b/meta-python/recipes-devtools/python/python3-txaio_22.2.1.bb index e2102695ec..50f14b17fd 100644 --- a/meta-python/recipes-devtools/python/python3-txaio_22.2.1.bb +++ b/meta-python/recipes-devtools/python/python3-txaio_22.2.1.bb @@ -10,5 +10,3 @@ inherit pypi setuptools3 RDEPENDS:${PN} += " \ ${PYTHON_PN}-twisted \ " - -BBCLASSEXTEND = "native nativesdk" diff --git a/meta-python/recipes-devtools/python/python3-werkzeug/CVE-2023-23934.patch b/meta-python/recipes-devtools/python/python3-werkzeug/CVE-2023-23934.patch new file mode 100644 index 0000000000..3a0f4324a1 --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-werkzeug/CVE-2023-23934.patch @@ -0,0 +1,117 @@ +From db1457abec7fe27148673f5f8bfdf5c52eb7f29f Mon Sep 17 00:00:00 2001 +From: David Lord <davidism@gmail.com> +Date: Wed, 10 May 2023 11:33:18 +0000 +Subject: [PATCH] Merge pull request from GHSA-px8h-6qxv-m22q + +don't strip leading `=` when parsing cookie + +"src/werkzeug/sansio/http.py" file is not available in the current recipe +version 2.1.1 and this has been introduced from 2.2.0 version. Before 2.2.0 +version, this http.py file was only available in the "src/werkzeug/http.py" +and we could see the same functions available there which are getting modified +in the CVE fix commit. Hence, modifying the same at "src/werkzeug/http.py" file. + +CVE: CVE-2023-23934 + +Upstream-Status: Backport [https://github.com/pallets/werkzeug/commit/cf275f42acad1b5950c50ffe8ef58fe62cdce028] + +Signed-off-by: Narpat Mali <narpat.mali@windriver.com> +--- + CHANGES.rst | 3 +++ + src/werkzeug/_internal.py | 13 +++++++++---- + src/werkzeug/http.py | 4 ---- + tests/test_http.py | 4 +++- + 4 files changed, 15 insertions(+), 9 deletions(-) + +diff --git a/CHANGES.rst b/CHANGES.rst +index 6e809ba..13ef75b 100644 +--- a/CHANGES.rst ++++ b/CHANGES.rst +@@ -4,6 +4,9 @@ + ``RequestEntityTooLarge`` exception is raised on parsing. This mitigates a DoS + attack where a larger number of form/file parts would result in disproportionate + resource use. ++- A cookie header that starts with ``=`` is treated as an empty key and discarded, ++ rather than stripping the leading ``==``. ++ + + Version 2.1.1 + ------------- +diff --git a/src/werkzeug/_internal.py b/src/werkzeug/_internal.py +index a8b3523..d6290ba 100644 +--- a/src/werkzeug/_internal.py ++++ b/src/werkzeug/_internal.py +@@ -34,7 +34,7 @@ _quote_re = re.compile(rb"[\\].") + _legal_cookie_chars_re = rb"[\w\d!#%&\'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=]" + _cookie_re = re.compile( + rb""" +- (?P<key>[^=;]+) ++ (?P<key>[^=;]*) + (?:\s*=\s* + (?P<val> + "(?:[^\\"]|\\.)*" | +@@ -382,16 +382,21 @@ def _cookie_parse_impl(b: bytes) -> t.Iterator[t.Tuple[bytes, bytes]]: + """Lowlevel cookie parsing facility that operates on bytes.""" + i = 0 + n = len(b) ++ b += b";" + + while i < n: +- match = _cookie_re.search(b + b";", i) ++ match = _cookie_re.match(b, i) ++ + if not match: + break + +- key = match.group("key").strip() +- value = match.group("val") or b"" + i = match.end(0) ++ key = match.group("key").strip() ++ ++ if not key: ++ continue + ++ value = match.group("val") or b"" + yield key, _cookie_unquote(value) + + +diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py +index 9369900..ae133e3 100644 +--- a/src/werkzeug/http.py ++++ b/src/werkzeug/http.py +@@ -1205,10 +1205,6 @@ def parse_cookie( + def _parse_pairs() -> t.Iterator[t.Tuple[str, str]]: + for key, val in _cookie_parse_impl(header): # type: ignore + key_str = _to_str(key, charset, errors, allow_none_charset=True) +- +- if not key_str: +- continue +- + val_str = _to_str(val, charset, errors, allow_none_charset=True) + yield key_str, val_str + +diff --git a/tests/test_http.py b/tests/test_http.py +index 5936bfa..59cc179 100644 +--- a/tests/test_http.py ++++ b/tests/test_http.py +@@ -427,7 +427,8 @@ class TestHTTPUtility: + def test_parse_cookie(self): + cookies = http.parse_cookie( + "dismiss-top=6; CP=null*; PHPSESSID=0a539d42abc001cdc762809248d4beed;" +- 'a=42; b="\\";"; ; fo234{=bar;blub=Blah; "__Secure-c"=d' ++ 'a=42; b="\\";"; ; fo234{=bar;blub=Blah; "__Secure-c"=d;' ++ "==__Host-eq=bad;__Host-eq=good;" + ) + assert cookies.to_dict() == { + "CP": "null*", +@@ -438,6 +439,7 @@ class TestHTTPUtility: + "fo234{": "bar", + "blub": "Blah", + '"__Secure-c"': "d", ++ "__Host-eq": "good", + } + + def test_dump_cookie(self): +-- +2.40.0 + diff --git a/meta-python/recipes-devtools/python/python3-werkzeug/CVE-2023-25577.patch b/meta-python/recipes-devtools/python/python3-werkzeug/CVE-2023-25577.patch new file mode 100644 index 0000000000..61551d8fca --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-werkzeug/CVE-2023-25577.patch @@ -0,0 +1,231 @@ +From 5a56cdcbaec2153cd67596c6c2c8056e1ea5ed56 Mon Sep 17 00:00:00 2001 +From: David Lord <davidism@gmail.com> +Date: Tue, 2 May 2023 11:31:10 +0000 +Subject: [PATCH] Merge pull request from GHSA-xg9f-g7g7-2323 + +limit the maximum number of multipart form parts + +CVE: CVE-2023-25577 + +Upstream-Status: Backport [https://github.com/pallets/werkzeug/commit/517cac5a804e8c4dc4ed038bb20dacd038e7a9f1] + +Signed-off-by: Narpat Mali <narpat.mali@windriver.com> +--- + CHANGES.rst | 5 +++++ + docs/request_data.rst | 37 +++++++++++++++++--------------- + src/werkzeug/formparser.py | 12 ++++++++++- + src/werkzeug/sansio/multipart.py | 8 +++++++ + src/werkzeug/wrappers/request.py | 8 +++++++ + tests/test_formparser.py | 9 ++++++++ + 6 files changed, 61 insertions(+), 18 deletions(-) + +diff --git a/CHANGES.rst b/CHANGES.rst +index a351d7c..6e809ba 100644 +--- a/CHANGES.rst ++++ b/CHANGES.rst +@@ -1,5 +1,10 @@ + .. currentmodule:: werkzeug + ++- Specify a maximum number of multipart parts, default 1000, after which a ++ ``RequestEntityTooLarge`` exception is raised on parsing. This mitigates a DoS ++ attack where a larger number of form/file parts would result in disproportionate ++ resource use. ++ + Version 2.1.1 + ------------- + +diff --git a/docs/request_data.rst b/docs/request_data.rst +index 83c6278..e55841e 100644 +--- a/docs/request_data.rst ++++ b/docs/request_data.rst +@@ -73,23 +73,26 @@ read the stream *or* call :meth:`~Request.get_data`. + Limiting Request Data + --------------------- + +-To avoid being the victim of a DDOS attack you can set the maximum +-accepted content length and request field sizes. The :class:`Request` +-class has two attributes for that: :attr:`~Request.max_content_length` +-and :attr:`~Request.max_form_memory_size`. +- +-The first one can be used to limit the total content length. For example +-by setting it to ``1024 * 1024 * 16`` the request won't accept more than +-16MB of transmitted data. +- +-Because certain data can't be moved to the hard disk (regular post data) +-whereas temporary files can, there is a second limit you can set. The +-:attr:`~Request.max_form_memory_size` limits the size of `POST` +-transmitted form data. By setting it to ``1024 * 1024 * 2`` you can make +-sure that all in memory-stored fields are not more than 2MB in size. +- +-This however does *not* affect in-memory stored files if the +-`stream_factory` used returns a in-memory file. ++The :class:`Request` class provides a few attributes to control how much data is ++processed from the request body. This can help mitigate DoS attacks that craft the ++request in such a way that the server uses too many resources to handle it. Each of ++these limits will raise a :exc:`~werkzeug.exceptions.RequestEntityTooLarge` if they are ++exceeded. ++ ++- :attr:`~Request.max_content_length` Stop reading request data after this number ++ of bytes. It's better to configure this in the WSGI server or HTTP server, rather ++ than the WSGI application. ++- :attr:`~Request.max_form_memory_size` Stop reading request data if any form part is ++ larger than this number of bytes. While file parts can be moved to disk, regular ++ form field data is stored in memory only. ++- :attr:`~Request.max_form_parts` Stop reading request data if more than this number ++ of parts are sent in multipart form data. This is useful to stop a very large number ++ of very small parts, especially file parts. The default is 1000. ++ ++Using Werkzeug to set these limits is only one layer of protection. WSGI servers ++and HTTPS servers should set their own limits on size and timeouts. The operating system ++or container manager should set limits on memory and processing time for server ++processes. + + + How to extend Parsing? +diff --git a/src/werkzeug/formparser.py b/src/werkzeug/formparser.py +index 10d58ca..bebb2fc 100644 +--- a/src/werkzeug/formparser.py ++++ b/src/werkzeug/formparser.py +@@ -179,6 +179,8 @@ class FormDataParser: + :param cls: an optional dict class to use. If this is not specified + or `None` the default :class:`MultiDict` is used. + :param silent: If set to False parsing errors will not be caught. ++ :param max_form_parts: The maximum number of parts to be parsed. If this is ++ exceeded, a :exc:`~exceptions.RequestEntityTooLarge` exception is raised. + """ + + def __init__( +@@ -190,6 +192,8 @@ class FormDataParser: + max_content_length: t.Optional[int] = None, + cls: t.Optional[t.Type[MultiDict]] = None, + silent: bool = True, ++ *, ++ max_form_parts: t.Optional[int] = None, + ) -> None: + if stream_factory is None: + stream_factory = default_stream_factory +@@ -199,6 +203,7 @@ class FormDataParser: + self.errors = errors + self.max_form_memory_size = max_form_memory_size + self.max_content_length = max_content_length ++ self.max_form_parts = max_form_parts + + if cls is None: + cls = MultiDict +@@ -281,6 +286,7 @@ class FormDataParser: + self.errors, + max_form_memory_size=self.max_form_memory_size, + cls=self.cls, ++ max_form_parts=self.max_form_parts, + ) + boundary = options.get("boundary", "").encode("ascii") + +@@ -346,10 +352,12 @@ class MultiPartParser: + max_form_memory_size: t.Optional[int] = None, + cls: t.Optional[t.Type[MultiDict]] = None, + buffer_size: int = 64 * 1024, ++ max_form_parts: t.Optional[int] = None, + ) -> None: + self.charset = charset + self.errors = errors + self.max_form_memory_size = max_form_memory_size ++ self.max_form_parts = max_form_parts + + if stream_factory is None: + stream_factory = default_stream_factory +@@ -409,7 +417,9 @@ class MultiPartParser: + [None], + ) + +- parser = MultipartDecoder(boundary, self.max_form_memory_size) ++ parser = MultipartDecoder( ++ boundary, self.max_form_memory_size, max_parts=self.max_form_parts ++ ) + + fields = [] + files = [] +diff --git a/src/werkzeug/sansio/multipart.py b/src/werkzeug/sansio/multipart.py +index 2d54422..e7d742b 100644 +--- a/src/werkzeug/sansio/multipart.py ++++ b/src/werkzeug/sansio/multipart.py +@@ -83,10 +83,13 @@ class MultipartDecoder: + self, + boundary: bytes, + max_form_memory_size: Optional[int] = None, ++ *, ++ max_parts: Optional[int] = None, + ) -> None: + self.buffer = bytearray() + self.complete = False + self.max_form_memory_size = max_form_memory_size ++ self.max_parts = max_parts + self.state = State.PREAMBLE + self.boundary = boundary + +@@ -113,6 +116,7 @@ class MultipartDecoder: + % (LINE_BREAK, re.escape(boundary), LINE_BREAK, LINE_BREAK), + re.MULTILINE, + ) ++ self._parts_decoded = 0 + + def last_newline(self) -> int: + try: +@@ -177,6 +181,10 @@ class MultipartDecoder: + name=name, + ) + self.state = State.DATA ++ self._parts_decoded += 1 ++ ++ if self.max_parts is not None and self._parts_decoded > self.max_parts: ++ raise RequestEntityTooLarge() + + elif self.state == State.DATA: + if self.buffer.find(b"--" + self.boundary) == -1: +diff --git a/src/werkzeug/wrappers/request.py b/src/werkzeug/wrappers/request.py +index 57b739c..a6d5429 100644 +--- a/src/werkzeug/wrappers/request.py ++++ b/src/werkzeug/wrappers/request.py +@@ -83,6 +83,13 @@ class Request(_SansIORequest): + #: .. versionadded:: 0.5 + max_form_memory_size: t.Optional[int] = None + ++ #: The maximum number of multipart parts to parse, passed to ++ #: :attr:`form_data_parser_class`. Parsing form data with more than this ++ #: many parts will raise :exc:`~.RequestEntityTooLarge`. ++ #: ++ #: .. versionadded:: 2.2.3 ++ max_form_parts = 1000 ++ + #: The form data parser that should be used. Can be replaced to customize + #: the form date parsing. + form_data_parser_class: t.Type[FormDataParser] = FormDataParser +@@ -246,6 +253,7 @@ class Request(_SansIORequest): + self.max_form_memory_size, + self.max_content_length, + self.parameter_storage_class, ++ max_form_parts=self.max_form_parts, + ) + + def _load_form_data(self) -> None: +diff --git a/tests/test_formparser.py b/tests/test_formparser.py +index 5fc803e..834324f 100644 +--- a/tests/test_formparser.py ++++ b/tests/test_formparser.py +@@ -127,6 +127,15 @@ class TestFormParser: + req.max_form_memory_size = 400 + assert req.form["foo"] == "Hello World" + ++ req = Request.from_values( ++ input_stream=io.BytesIO(data), ++ content_length=len(data), ++ content_type="multipart/form-data; boundary=foo", ++ method="POST", ++ ) ++ req.max_form_parts = 1 ++ pytest.raises(RequestEntityTooLarge, lambda: req.form["foo"]) ++ + def test_missing_multipart_boundary(self): + data = ( + b"--foo\r\nContent-Disposition: form-field; name=foo\r\n\r\n" +-- +2.40.0 diff --git a/meta-python/recipes-devtools/python/python3-werkzeug_2.1.1.bb b/meta-python/recipes-devtools/python/python3-werkzeug_2.1.1.bb index 476a3a5964..fc0789a73e 100644 --- a/meta-python/recipes-devtools/python/python3-werkzeug_2.1.1.bb +++ b/meta-python/recipes-devtools/python/python3-werkzeug_2.1.1.bb @@ -12,6 +12,9 @@ LIC_FILES_CHKSUM = "file://LICENSE.rst;md5=5dc88300786f1c214c1e9827a5229462" PYPI_PACKAGE = "Werkzeug" +SRC_URI += "file://CVE-2023-25577.patch \ + file://CVE-2023-23934.patch" + SRC_URI[sha256sum] = "f8e89a20aeabbe8a893c24a461d3ee5dad2123b05cc6abd73ceed01d39c3ae74" inherit pypi setuptools3 |