diff options
Diffstat (limited to 'target/linux/generic/backport-5.4/042-v5.5-arm64-csum-Fix-pathological-zero-length-calls.patch')
-rw-r--r-- | target/linux/generic/backport-5.4/042-v5.5-arm64-csum-Fix-pathological-zero-length-calls.patch | 28 |
1 files changed, 28 insertions, 0 deletions
diff --git a/target/linux/generic/backport-5.4/042-v5.5-arm64-csum-Fix-pathological-zero-length-calls.patch b/target/linux/generic/backport-5.4/042-v5.5-arm64-csum-Fix-pathological-zero-length-calls.patch new file mode 100644 index 0000000000..50b210e14f --- /dev/null +++ b/target/linux/generic/backport-5.4/042-v5.5-arm64-csum-Fix-pathological-zero-length-calls.patch @@ -0,0 +1,28 @@ +From: Robin Murphy <robin.murphy@arm.com> +Date: Fri, 17 Jan 2020 15:48:39 +0000 +Subject: [PATCH] arm64: csum: Fix pathological zero-length calls + +In validating the checksumming results of the new routine, I sadly +neglected to test its not-checksumming results. Thus it slipped through +that the one case where @buff is already dword-aligned and @len = 0 +manages to defeat the tail-masking logic and behave as if @len = 8. +For a zero length it doesn't make much sense to deference @buff anyway, +so just add an early return (which has essentially zero impact on +performance). + +Signed-off-by: Robin Murphy <robin.murphy@arm.com> +Signed-off-by: Will Deacon <will@kernel.org> +--- + +--- a/arch/arm64/lib/csum.c ++++ b/arch/arm64/lib/csum.c +@@ -20,6 +20,9 @@ unsigned int do_csum(const unsigned char + const u64 *ptr; + u64 data, sum64 = 0; + ++ if (unlikely(len == 0)) ++ return 0; ++ + offset = (unsigned long)buff & 7; + /* + * This is to all intents and purposes safe, since rounding down cannot |