Sophie

Sophie

distrib > Scientific%20Linux > 5x > x86_64 > by-pkgid > fc11cd6e1c513a17304da94a5390f3cd > files > 2870

kernel-2.6.18-194.11.1.el5.src.rpm

From: Steve Best <sbest@redhat.com>
Date: Wed, 3 Feb 2010 14:09:31 -0500
Subject: [ppc] fix sched while atomic error in alignment handler
Message-id: <20100203140223.7502.99370.sendpatchset@squad5-lp1.lab.bos.redhat.com>
Patchwork-id: 23117
O-Subject: [PATCH RHEL5.5 BZ543637 V2] oops with error 'BUG: scheduling while
	atomic
Bugzilla: 543637
RH-Acked-by: David S. Miller <davem@redhat.com>
RH-Acked-by: David Howells <dhowells@redhat.com>

RHBZ#:
======
https://bugzilla.redhat.com/show_bug.cgi?id=543637

Description:
============
Alignment exception uses __get/put_user_inatomic
Make the alignment exception handler use the new _inatomic variants
of __get/put_user.

V2 - fix spacing issue pointed out by Prarit, patch now matches upstream
commit

RHEL Version Found:
===================
RHEL 5.4

kABI Status:
============
No symbols were harmed.

Brew:
=====
https://brewweb.devel.redhat.com/taskinfo?taskID=2240385

Upstream Status:
================
http://www.mail-archive.com/git-commits-head@vger.kernel.org/msg10115.html

Test Status:
============
Tested by Ken Werner. Test case included in the bz.

Signed-off-by: Jarod Wilson <jarod@redhat.com>

diff --git a/arch/powerpc/kernel/align.c b/arch/powerpc/kernel/align.c
index bc99107..37c73a9 100644
--- a/arch/powerpc/kernel/align.c
+++ b/arch/powerpc/kernel/align.c
@@ -241,7 +241,7 @@ static int emulate_dcbz(struct pt_regs *regs, unsigned char __user *addr)
 	if (user_mode(regs) && !access_ok(VERIFY_WRITE, p, size))
 		return -EFAULT;
 	for (i = 0; i < size / sizeof(long); ++i)
-		if (__put_user(0, p+i))
+               if (__put_user_inatomic(0, p+i))
 			return -EFAULT;
 	return 1;
 }
@@ -288,7 +288,8 @@ static int emulate_multiple(struct pt_regs *regs, unsigned char __user *addr,
 		} else {
 			unsigned long pc = regs->nip ^ (swiz & 4);
 
-			if (__get_user(instr, (unsigned int __user *)pc))
+                       if (__get_user_inatomic(instr,
+                                               (unsigned int __user *)pc))
 				return -EFAULT;
 			if (swiz == 0 && (flags & SW))
 				instr = cpu_to_le32(instr);
@@ -324,27 +325,31 @@ static int emulate_multiple(struct pt_regs *regs, unsigned char __user *addr,
 			       ((nb0 + 3) / 4) * sizeof(unsigned long));
 
 		for (i = 0; i < nb; ++i, ++p)
-			if (__get_user(REG_BYTE(rptr, i ^ bswiz), SWIZ_PTR(p)))
+                       if (__get_user_inatomic(REG_BYTE(rptr, i ^ bswiz),
+                                               SWIZ_PTR(p)))
 				return -EFAULT;
 		if (nb0 > 0) {
 			rptr = &regs->gpr[0];
 			addr += nb;
 			for (i = 0; i < nb0; ++i, ++p)
-				if (__get_user(REG_BYTE(rptr, i ^ bswiz),
-					       SWIZ_PTR(p)))
+                               if (__get_user_inatomic(REG_BYTE(rptr,
+                                                                i ^ bswiz),
+                                                       SWIZ_PTR(p)))
 					return -EFAULT;
 		}
 
 	} else {
 		for (i = 0; i < nb; ++i, ++p)
-			if (__put_user(REG_BYTE(rptr, i ^ bswiz), SWIZ_PTR(p)))
+                       if (__put_user_inatomic(REG_BYTE(rptr, i ^ bswiz),
+                                               SWIZ_PTR(p)))
 				return -EFAULT;
 		if (nb0 > 0) {
 			rptr = &regs->gpr[0];
 			addr += nb;
 			for (i = 0; i < nb0; ++i, ++p)
-				if (__put_user(REG_BYTE(rptr, i ^ bswiz),
-					       SWIZ_PTR(p)))
+                               if (__put_user_inatomic(REG_BYTE(rptr,
+                                                                i ^ bswiz),
+                                                       SWIZ_PTR(p)))
 					return -EFAULT;
 		}
 	}
@@ -434,7 +439,8 @@ int fix_alignment(struct pt_regs *regs)
 
 		if (cpu_has_feature(CPU_FTR_PPC_LE) && (regs->msr & MSR_LE))
 			pc ^= 4;
-		if (unlikely(__get_user(instr, (unsigned int __user *)pc)))
+               if (unlikely(__get_user_inatomic(instr,
+                                                (unsigned int __user *)pc)))
 			return -EFAULT;
 		if (cpu_has_feature(CPU_FTR_REAL_LE) && (regs->msr & MSR_LE))
 			instr = cpu_to_le32(instr);
@@ -514,16 +520,16 @@ int fix_alignment(struct pt_regs *regs)
 		p = (unsigned long) addr;
 		switch (nb) {
 		case 8:
-			ret |= __get_user(data.v[0], SWIZ_PTR(p++));
-			ret |= __get_user(data.v[1], SWIZ_PTR(p++));
-			ret |= __get_user(data.v[2], SWIZ_PTR(p++));
-			ret |= __get_user(data.v[3], SWIZ_PTR(p++));
+                       ret |= __get_user_inatomic(data.v[0], SWIZ_PTR(p++));
+                       ret |= __get_user_inatomic(data.v[1], SWIZ_PTR(p++));
+                       ret |= __get_user_inatomic(data.v[2], SWIZ_PTR(p++));
+                       ret |= __get_user_inatomic(data.v[3], SWIZ_PTR(p++));
 		case 4:
-			ret |= __get_user(data.v[4], SWIZ_PTR(p++));
-			ret |= __get_user(data.v[5], SWIZ_PTR(p++));
+                       ret |= __get_user_inatomic(data.v[4], SWIZ_PTR(p++));
+                       ret |= __get_user_inatomic(data.v[5], SWIZ_PTR(p++));
 		case 2:
-			ret |= __get_user(data.v[6], SWIZ_PTR(p++));
-			ret |= __get_user(data.v[7], SWIZ_PTR(p++));
+                       ret |= __get_user_inatomic(data.v[6], SWIZ_PTR(p++));
+                       ret |= __get_user_inatomic(data.v[7], SWIZ_PTR(p++));
 			if (unlikely(ret))
 				return -EFAULT;
 		}
@@ -592,16 +598,16 @@ int fix_alignment(struct pt_regs *regs)
 		p = (unsigned long) addr;
 		switch (nb) {
 		case 8:
-			ret |= __put_user(data.v[0], SWIZ_PTR(p++));
-			ret |= __put_user(data.v[1], SWIZ_PTR(p++));
-			ret |= __put_user(data.v[2], SWIZ_PTR(p++));
-			ret |= __put_user(data.v[3], SWIZ_PTR(p++));
+                       ret |= __put_user_inatomic(data.v[0], SWIZ_PTR(p++));
+                       ret |= __put_user_inatomic(data.v[1], SWIZ_PTR(p++));
+                       ret |= __put_user_inatomic(data.v[2], SWIZ_PTR(p++));
+                       ret |= __put_user_inatomic(data.v[3], SWIZ_PTR(p++));
 		case 4:
-			ret |= __put_user(data.v[4], SWIZ_PTR(p++));
-			ret |= __put_user(data.v[5], SWIZ_PTR(p++));
+                       ret |= __put_user_inatomic(data.v[4], SWIZ_PTR(p++));
+                       ret |= __put_user_inatomic(data.v[5], SWIZ_PTR(p++));
 		case 2:
-			ret |= __put_user(data.v[6], SWIZ_PTR(p++));
-			ret |= __put_user(data.v[7], SWIZ_PTR(p++));
+                       ret |= __put_user_inatomic(data.v[6], SWIZ_PTR(p++));
+                       ret |= __put_user_inatomic(data.v[7], SWIZ_PTR(p++));
 		}
 		if (unlikely(ret))
 			return -EFAULT;