summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorYorhel <git@yorhel.nl>2010-04-26 15:56:47 +0200
committerYorhel <git@yorhel.nl>2010-04-26 16:04:47 +0200
commit7aaacf7f48c01ca5ce7c3ab369403279b8b86866 (patch)
tree98353fabe9481adb4aeed9645a5973cb09bbf36f
parent2023fe624160af43b5295c20d11c0aedbd0f9d80 (diff)
Implemented free(), node merging, changed back to a doubly linked list
A link to the previous node in the unused linked list was required in order to merge nodes together. I also added a 3rd field to the end of each unused node indicating the size of the node. This is required to find the offset of the previous unused node when doing a backward merge. This increases the minimum node size to 6 bytes. Not all functionality has been tested yet. This code is bound to have bugs.
-rw-r--r--compll.c222
1 files changed, 140 insertions, 82 deletions
diff --git a/compll.c b/compll.c
index 8bb74af..3745960 100644
--- a/compll.c
+++ b/compll.c
@@ -36,9 +36,9 @@
/* minimum lengh of the data part of a node, setting this too low can badly
* increase the length of the unused node list and slow down operations of
- * alloc() and free(). Setting this too high would waste memory.
- * Must be at least 2, in order to hold the unused nodes linked list. */
-#define MIN_NODE_SIZE 4
+ * alloc() and free(). Setting this too high would waste memory. Must be at
+ * least 6, in order to hold the unused nodes linked list and node size. */
+#define MIN_NODE_SIZE 6
unsigned int conf_alignment; /* in bit count, actual alignment bytes = 2^a */
@@ -109,6 +109,9 @@ unsigned char *compress_buf = NULL;
#define node_head(ch) datshort((ch) - 2)
#define node_foot(ch) datshort((ch) + ((node_head(ch) >> 1) << conf_alignment) - 2)
#define node_fnext(ch) datshort(ch)
+#define node_fprev(ch) datshort((ch) + 2)
+#define node_usize(ch) datshort((ch) + ((node_head(ch) >> 1) << conf_alignment) - 4)
+#define node_psize(ch) datshort((ch) - 4)
/* update the 'lasta' field in an uncompressed block and increase the
* instruction counter. */
@@ -237,51 +240,6 @@ static int block_load(int ab) {
}
-/* find a memory block which has a node free of at least the specified (packed)
- * size, creating a new block if all others are full. The block will be
- * available in ublocks.
- * returns the ablocks index or -1 on error*/
-static int block_getfree(short size) {
- int ab, start, length;
- unsigned char *dat;
-
- /* find a suitable memory block, load it, and return.
- * TODO: this loops through the whole ablocks array in the worst case, which
- * is not really fast. Maybe we'll have to do some caching? */
- for(ab=0; ab<ablocks_size; ab++)
- if((ablocks[ab].data != NULL || ablocks[ab].ublock != -1) && ablocks[ab].free >= size) {
- if(block_load(ab) == -1)
- return -1;
- return ab;
- }
-
- /* none found, create a new block and initialize the block structure */
- if((ab = block_alloc()) == -1)
- return -1;
- dat = ublocks[ablocks[ab].ublock].data;
-
- /* the first two bytes contain the offset to the first unused node, this one
- * can be found at offset align(2+2) - 2 bytes for this pointer and 2 for the
- * metadata of the unused node */
- start = align(4);
- datshort(dat) = start >> conf_alignment;
-
- /* metadata 2 bytes before the start of the unused node, this indicates the
- * length of the node and that the previous node should be considered as "in
- * use" */
- length = (conf_blocksize - start) & ~conf_align_mask;
- datshort(dat+start-2) = ((length >> conf_alignment) << 1) | 1;
-
- /* the metadata of the last node can be found at (dat+start+length-2) and
- * should be 0 (length = 0, not in use), which it already is so we don't have
- * to do anything. */
-
- ablocks[ab].free = length >> conf_alignment;
- return ab;
-}
-
-
-
/*
* The following is the result of a brainstorm session on how to store the node
@@ -318,12 +276,14 @@ static int block_getfree(short size) {
*
* 0: [ 2 - offset of first node]
* fm: [ 2 - size of the first node, previous node = used]
- * fd: [ns - unallocated data]
+ * fd: [ns - unallocated data (zeros)]
+ * fs: [ 2 - ns]
* lm: [ 2 - next node size = 0, previous node = unused]
*
* fm = metadata of first node = fd - 2
* fd = offset of first node = align(4)
* ns = size of the first node = floor((blocksize - fd) / alignment) = (blocksize - fd) & ~conf_align_mask
+ * fs = offset of the duplicated size of the first node = fd+ns-4
* lm = metadata of last node = fd+ns-2
*
* This means that the largest node *contents* within a block can be calculated with:
@@ -354,36 +314,107 @@ static int block_getfree(short size) {
+/* find a memory block which has a node free of at least the specified (packed)
+ * size, creating a new block if all others are full. The block will be
+ * available in ublocks.
+ * returns the ablocks index or -1 on error*/
+static int block_getfree(short size) {
+ int ab, start, length;
+ unsigned char *dat;
+
+ /* find a suitable memory block, load it, and return.
+ * TODO: this loops through the whole ablocks array in the worst case, which
+ * is not really fast. Maybe we'll have to do some caching? */
+ for(ab=0; ab<ablocks_size; ab++)
+ if((ablocks[ab].data != NULL || ablocks[ab].ublock != -1) && ablocks[ab].free >= size) {
+ if(block_load(ab) == -1)
+ return -1;
+ return ab;
+ }
+
+ /* none found, create a new block and initialize the block structure */
+ if((ab = block_alloc()) == -1)
+ return -1;
+ dat = ublocks[ablocks[ab].ublock].data;
+
+ /* the first two bytes contain the offset to the first unused node, this one
+ * can be found at offset align(2+2) - 2 bytes for this pointer and 2 for the
+ * metadata of the unused node */
+ start = align(4);
+ datshort(dat) = start >> conf_alignment;
+
+ /* metadata 2 bytes before the start of the unused node, this indicates the
+ * length of the node and that the previous node should be considered as "in
+ * use" */
+ length = (conf_blocksize - start) & ~conf_align_mask;
+ datshort(dat+start-2) = ((length >> conf_alignment) << 1) | 1;
+
+ /* the duplicated size of the next node. While this field is not really used
+ * in the case of this node, every unused node should have this */
+ datshort(dat+start+length-4) = length >> conf_alignment;
+
+ /* the metadata of the last node can be found at (dat+start+length-2)
+ * should be 0 (length = 0, not in use), which it already is so we don't have
+ * to do anything. */
+
+ ablocks[ab].free = length >> conf_alignment;
+ return ab;
+}
+
+
/* Add the specified node at the correct position in the unused nodes list and
* update ablock.free if necessary */
static void node_addfree(int ab, unsigned short off) {
unsigned char *dat = ublocks[ablocks[ab].ublock].data;
- unsigned short psize = node_head(dat+off);
- int cur;
+ unsigned short psize = node_head(dat+off) >> 1;
+ int next, cur = 0;
/* find the node within the free nodes list after which this node should be
* inserted. (that is, of which the next node is larger than or equal to this
* one) */
- cur = datshort(dat) << conf_alignment;
- if(cur) {
- while(1) {
- if(!node_fnext(dat+cur) || node_head(dat + (node_fnext(dat+cur) << conf_alignment)) >= psize)
- break;
- cur = node_fnext(dat+cur) << conf_alignment;
- }
+ next = datshort(dat) << conf_alignment;
+ while(next) {
+ if((node_head(dat+next)>>1) >= psize)
+ break;
+ cur = next;
+ next = node_fnext(dat+cur) << conf_alignment;
}
/* if this is the last node in the list (thus the largest free node in this
* block), update ablock.free */
- if(!cur || !node_fnext(dat+cur))
- ablocks[ab].free = psize >> 1;
+ if(!next)
+ ablocks[ab].free = psize;
/* update references */
- node_fnext(dat+off) = cur ? node_fnext(dat+cur) : 0;
+ node_fnext(dat+off) = next ? (next >> conf_alignment) : 0;
+ node_fprev(dat+off) = cur ? (cur >> conf_alignment) : 0;
if(cur)
node_fnext(dat+cur) = off >> conf_alignment;
else
datshort(dat) = off >> conf_alignment;
+ if(next)
+ node_fprev(dat+next) = off >> conf_alignment;
+}
+
+
+/* removes a node from the unused node list and updates ablock.free if this was
+ * the largest node. This function does not touch (meta-)data of the node itself */
+static void node_rmfree(int ab, int off) {
+ unsigned char *dat = ublocks[ablocks[ab].ublock].data;
+ int prev = node_fprev(dat+off) << conf_alignment,
+ next = node_fnext(dat+off) << conf_alignment;
+
+ /* update references */
+ if(prev)
+ node_fnext(dat+prev) = node_fnext(dat+off);
+ else
+ datshort(dat) = node_fnext(dat+off);
+ if(next)
+ node_fprev(dat+next) = node_fprev(dat+off);
+
+ /* if this was the largest free node, update ablock.free */
+ if(!next)
+ ablocks[ab].free = prev ? (node_head(dat+prev)>>1) : 0;
}
@@ -396,42 +427,27 @@ static int node_alloc(int ab, unsigned short size) {
unsigned short psize = size << 1;
/* find the best fitting unused node by looping through the unused nodes
- * linked list, while keeping track of the previous node in the list (off2)
- * in order to update it's fnext reference */
+ * linked list */
off = datshort(dat) << conf_alignment;
assert(off > 0);
while(1) {
if(!node_fnext(dat+off) || node_head(dat+off) >= psize)
break;
- off2 = off;
off = node_fnext(dat+off) << conf_alignment;
}
- /* mark this node as used and remove it from the free nodes list */
node_foot(dat+off) |= 1;
- if(off2)
- node_fnext(dat + off2) = node_fnext(dat+off);
- else
- datshort(dat) = node_fnext(dat+off);
-
- /* if this was the largest free node, update ablock.free */
- if(!node_fnext(dat + off))
- ablocks[ab].free = off2 ? (node_head(dat+off2)>>1) : 0;
- /* reset the next pointer to make sure the node data contains only zeros */
- else
- node_fnext(dat+off) = 0;
+ node_rmfree(ab, off);
+ node_fnext(dat+off) = node_fprev(dat+off) = node_usize(dat+off) = 0;
/* split this node into another empty node if it is large enough */
free = ((node_head(dat+off) >> 1) - size) << conf_alignment;
- if(free > MIN_NODE_SIZE) {
- /* decrease size of the large node to the specified size */
+ if(free > MIN_NODE_SIZE+2) {
node_head(dat+off) = psize | (node_head(dat+off) & 1);
- /* calculate offset of the free node */
off2 = off + (size << conf_alignment);
- /* update metadata of this and next node */
node_head(dat+off2) = ((free >> conf_alignment) << 1) | 1;
+ node_usize(dat+off2) = free >> conf_alignment;
node_foot(dat+off2) &= ~1;
- /* add to the free nodes list */
node_addfree(ab, off2);
}
@@ -470,6 +486,42 @@ compll_t compll_alloc(unsigned int size) {
+void compll_free(compll_t node) {
+ int ab = node >> 24,
+ off = node & 0xFFFFFF;
+ int ub, off2;
+ unsigned char *dat;
+
+ if((ub = block_load(ab)) < 0)
+ return;
+ dat = ublocks[ub].data;
+
+ node_foot(dat+off) &= ~1;
+ memset(dat+off, 0, ((node_head(dat+off)>>1)<<conf_alignment)-2);
+
+ /* merge with above node if it's free */
+ if(!(node_head(dat+off) & 1)) {
+ off2 = off - (node_psize(dat+off) << conf_alignment);
+ node_head(dat+off2) += node_head(dat+off);
+ node_head(dat+off) = node_psize(dat+off) = 0;
+ node_rmfree(ab, off2);
+ off = off2;
+ }
+
+ /* and with the node below */
+ off2 = off + ((node_head(dat+off)>>1) << conf_alignment);
+ if(node_head(dat+off2) && !(node_foot(dat+off2) & 1)) {
+ node_rmfree(ab, off2);
+ node_head(dat+off) += node_head(dat+off2);
+ node_head(dat+off2) = node_fnext(dat+off2) = node_fprev(dat+off2) = 0;
+ }
+
+ node_usize(dat+off) = node_head(dat+off)>>1;
+ node_addfree(ab, off);
+}
+
+
+
int compll_init(unsigned int block_size,
unsigned short alignment,
unsigned short uncomp_count,
@@ -566,15 +618,21 @@ void dbg_decompress(const unsigned char *src, unsigned int srclen, unsigned char
int main() {
int i, off;
+ compll_t m;
FILE *f;
printf("compll_init = %d\n", compll_init(1<<16, 8, 10, dbg_compress, dbg_decompress));
for(i=4; i<=16; i+=4) {
off = compll_alloc(i);
- printf("block_getfree(%d) = %d (free = %d)\n", i, off, ablocks[0].free);
+ printf("compll_alloc(%d) = %d (free = %d)\n", i, off, ablocks[0].free);
}
+ m = compll_alloc(500);
+ compll_alloc(10);
+ compll_free(m);
+ compll_alloc(50);
+
f = fopen("dump", "w");
fwrite(ublocks[ablocks[0].ublock].data, conf_blocksize, 1, f);
fclose(f);