summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorotto <>2011-05-05 12:11:20 +0000
committerotto <>2011-05-05 12:11:20 +0000
commitad527ff4137ec2b21fca81a6af20dd16c170ea59 (patch)
treec677fe9275cd7c858ab19904f151d7ac9ca8097b /src
parent8153c96e134ad79b74adeb4f92f16ced96b28c78 (diff)
downloadopenbsd-ad527ff4137ec2b21fca81a6af20dd16c170ea59.tar.gz
openbsd-ad527ff4137ec2b21fca81a6af20dd16c170ea59.tar.bz2
openbsd-ad527ff4137ec2b21fca81a6af20dd16c170ea59.zip
Up until now, malloc scanned the bits of the chunk bitmap from
position zero, skipping a random number of free slots and then picking the next free one. This slowed things down, especially if the number of full slots increases. This changes the scannning to start at a random position in the bitmap and then taking the first available free slot, wrapping if the end of the bitmap is reached. Of course we'll still scan more if the bitmap becomes more full, but the extra iterations skipping free slots and then some full slots are avoided. The random number is derived from a global, which is incremented by a few random bits every time a chunk is needed (with a small optimization if only one free slot is left). Thanks to the testers!
Diffstat (limited to 'src')
-rw-r--r--src/lib/libc/stdlib/malloc.c56
1 files changed, 24 insertions, 32 deletions
diff --git a/src/lib/libc/stdlib/malloc.c b/src/lib/libc/stdlib/malloc.c
index 84ca2be718..0d1b2290be 100644
--- a/src/lib/libc/stdlib/malloc.c
+++ b/src/lib/libc/stdlib/malloc.c
@@ -1,4 +1,4 @@
1/* $OpenBSD: malloc.c,v 1.129 2011/04/30 15:46:46 otto Exp $ */ 1/* $OpenBSD: malloc.c,v 1.130 2011/05/05 12:11:20 otto Exp $ */
2/* 2/*
3 * Copyright (c) 2008 Otto Moerbeek <otto@drijf.net> 3 * Copyright (c) 2008 Otto Moerbeek <otto@drijf.net>
4 * 4 *
@@ -113,6 +113,7 @@ struct dir_info {
113 struct region_info free_regions[MALLOC_MAXCACHE]; 113 struct region_info free_regions[MALLOC_MAXCACHE];
114 /* delayed free chunk slots */ 114 /* delayed free chunk slots */
115 void *delayed_chunks[MALLOC_DELAYED_CHUNKS + 1]; 115 void *delayed_chunks[MALLOC_DELAYED_CHUNKS + 1];
116 u_short chunk_start;
116#ifdef MALLOC_STATS 117#ifdef MALLOC_STATS
117 size_t inserts; 118 size_t inserts;
118 size_t insert_collisions; 119 size_t insert_collisions;
@@ -1013,40 +1014,31 @@ malloc_bytes(struct dir_info *d, size_t size)
1013 1014
1014 if (bp->canary != d->canary1) 1015 if (bp->canary != d->canary1)
1015 wrterror("chunk info corrupted", NULL); 1016 wrterror("chunk info corrupted", NULL);
1016 /* Find first word of bitmap which isn't empty */
1017 for (lp = bp->bits; !*lp; lp++)
1018 /* EMPTY */;
1019
1020 /* Find that bit, and tweak it */
1021 u = 1;
1022 k = 0;
1023 while (!(*lp & u)) {
1024 u += u;
1025 k++;
1026 }
1027 1017
1028 /* advance a random # of positions */ 1018 i = d->chunk_start;
1029 if (bp->free > 1) { 1019 if (bp->free > 1)
1030 i = getrnibble() % bp->free; 1020 i += getrnibble();
1031 while (i > 0) { 1021 if (i >= bp->total)
1032 u += u; 1022 i &= bp->total - 1;
1033 k++; 1023 for (;;) {
1034 if (k >= MALLOC_BITS) { 1024 for (;;) {
1035 while (!*++lp) 1025 lp = &bp->bits[i / MALLOC_BITS];
1036 /* EMPTY */; 1026 if (!*lp) {
1037 u = 1; 1027 i += MALLOC_BITS;
1038 k = 0; 1028 i &= ~(MALLOC_BITS - 1);
1039 if (lp - bp->bits > (bp->total - 1) / 1029 if (i >= bp->total)
1040 MALLOC_BITS) { 1030 i = 0;
1041 wrterror("chunk overflow", NULL); 1031 } else
1042 errno = EFAULT; 1032 break;
1043 return (NULL);
1044 }
1045 }
1046 if (*lp & u)
1047 i--;
1048 } 1033 }
1034 k = i % MALLOC_BITS;
1035 u = 1 << k;
1036 if (*lp & u)
1037 break;
1038 if (++i >= bp->total)
1039 i = 0;
1049 } 1040 }
1041 d->chunk_start += i + 1;
1050 1042
1051 *lp ^= u; 1043 *lp ^= u;
1052 1044