from include/linux/kernel.h:
#define ALIGN(x,a) (((x)+(a)-1)&~((a)-1))
from crypto/cipher.c:
unsigned int alignmask = ...
u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
...
unsigned int alignmask = ...
u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
...
unsigned int align;
addr = ALIGN(addr, align);
addr += ALIGN(tfm->__crt_alg->cra_ctxsize, align);
The compiler first does ~((a)-1)) and then expands the unsigned int to
unsigned long for the & operation. So we end up with only the lower 32
bits of the address. Who did smoke what to do this? Patch attached.
--
Andreas Steinmetz SPAMmers use [email protected]
--- linux.orig/crypto/cipher.c 2005-07-17 13:35:15.000000000 +0200
+++ linux/crypto/cipher.c 2005-07-17 14:04:00.000000000 +0200
@@ -41,7 +41,7 @@
struct scatter_walk *in,
struct scatter_walk *out, unsigned int bsize)
{
- unsigned int alignmask = crypto_tfm_alg_alignmask(desc->tfm);
+ unsigned long alignmask = crypto_tfm_alg_alignmask(desc->tfm);
u8 buffer[bsize * 2 + alignmask];
u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
u8 *dst = src + bsize;
@@ -160,7 +160,7 @@
unsigned int nbytes)
{
struct crypto_tfm *tfm = desc->tfm;
- unsigned int alignmask = crypto_tfm_alg_alignmask(tfm);
+ unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
u8 *iv = desc->info;
if (unlikely(((unsigned long)iv & alignmask))) {
@@ -424,7 +424,7 @@
}
if (ops->cit_mode == CRYPTO_TFM_MODE_CBC) {
- unsigned int align;
+ unsigned long align;
unsigned long addr;
switch (crypto_tfm_alg_blocksize(tfm)) {
[Index of Archives]
[Kernel Newbies]
[Netfilter]
[Bugtraq]
[Photo]
[Gimp]
[Yosemite News]
[MIPS Linux]
[ARM Linux]
[Linux Security]
[Linux RAID]
[Video 4 Linux]
[Linux for the blind]
|
|