import { hash } from "internal/hash"; // A deterministic hash map based on CloseTable from https://github.com/jorendorff/dht const INITIAL_CAPACITY = 4; const FILL_FACTOR: f64 = 8 / 3; const FREE_FACTOR: f64 = 3 / 4; /** Structure of a map entry. */ @unmanaged class MapEntry { key: K; value: V; taggedNext: usize; // LSB=1 indicates EMPTY } /** Empty bit. */ const EMPTY: usize = 1 << 0; /** Size of a bucket. */ const BUCKET_SIZE = sizeof(); /** Computes the alignment of an entry. */ @inline function ENTRY_ALIGN(): usize { // can align to 4 instead of 8 if 32-bit and K/V is <= 32-bits const maxkv = sizeof() > sizeof() ? sizeof() : sizeof(); const align = (maxkv > sizeof() ? maxkv : sizeof()) - 1; return align; } /** Computes the aligned size of an entry. */ @inline function ENTRY_SIZE(): usize { const align = ENTRY_ALIGN(); const size = (offsetof>() + align) & ~align; return size; } class Map { // buckets holding references to the respective first entry within private buckets: ArrayBuffer; // usize[bucketsMask + 1] private bucketsMask: u32; // entries in insertion order private entries: ArrayBuffer; // MapEntry[entriesCapacity] private entriesCapacity: i32; private entriesOffset: i32; private entriesCount: i32; get size(): i32 { return this.entriesCount; } constructor() { this.clear(); } clear(): void { const bucketsSize = INITIAL_CAPACITY * BUCKET_SIZE; this.buckets = new ArrayBuffer(bucketsSize); this.bucketsMask = INITIAL_CAPACITY - 1; const entriesSize = INITIAL_CAPACITY * ENTRY_SIZE(); this.entries = new ArrayBuffer(entriesSize, true); this.entriesCapacity = INITIAL_CAPACITY; this.entriesOffset = 0; this.entriesCount = 0; } private find(key: K, hashCode: u32): MapEntry | null { var entry = this.buckets.load>(hashCode & this.bucketsMask); while (entry) { if (!(entry.taggedNext & EMPTY) && entry.key == key) return entry; entry = changetype>(entry.taggedNext & ~EMPTY); } return null; } has(key: K): bool { return this.find(key, hash(key)) !== null; } get(key: K): V { var entry = this.find(key, hash(key)); return entry ? entry.value : unreachable(); } set(key: K, value: V): void { var hashCode = hash(key); var entry = this.find(key, hashCode); if (entry) { entry.value = value; } else { // check if rehashing is necessary if (this.entriesOffset == this.entriesCapacity) { this.rehash( this.entriesCount < (this.entriesCapacity * FREE_FACTOR) ? this.bucketsMask // just rehash if 1/4+ entries are empty : (this.bucketsMask << 1) | 1 // grow capacity to next 2^N ); } // append new entry let entries = this.entries; entry = changetype>( changetype(entries) + ArrayBuffer.HEADER_SIZE + this.entriesOffset++ * ENTRY_SIZE() ); entry.key = key; entry.value = value; ++this.entriesCount; // link with previous entry in bucket let bucketIndex = hashCode & this.bucketsMask; entry.taggedNext = this.buckets.load(bucketIndex); this.buckets.store(bucketIndex, changetype(entry)); } } delete(key: K): bool { var entry = this.find(key, hash(key)); if (!entry) return false; entry.taggedNext |= EMPTY; --this.entriesCount; // check if rehashing is appropriate var halfBucketsMask = this.bucketsMask >> 1; if ( halfBucketsMask + 1 >= max(INITIAL_CAPACITY, this.entriesCount) && this.entriesCount < (this.entriesCapacity * FREE_FACTOR) ) this.rehash(halfBucketsMask); return true; } private rehash(newBucketsMask: u32): void { var newBucketsCapacity = (newBucketsMask + 1); var newBuckets = new ArrayBuffer(newBucketsCapacity * BUCKET_SIZE); var newEntriesCapacity = (newBucketsCapacity * FILL_FACTOR); var newEntries = new ArrayBuffer(newEntriesCapacity * ENTRY_SIZE(), true); // copy old entries to new entries var oldPtr = changetype(this.entries) + ArrayBuffer.HEADER_SIZE; var oldEnd = oldPtr + this.entriesOffset * ENTRY_SIZE(); var newPtr = changetype(newEntries) + ArrayBuffer.HEADER_SIZE; while (oldPtr != oldEnd) { let oldEntry = changetype>(oldPtr); if (!(oldEntry.taggedNext & EMPTY)) { let newEntry = changetype>(newPtr); newEntry.key = oldEntry.key; newEntry.value = oldEntry.value; let newBucketIndex = hash(oldEntry.key) & newBucketsMask; let newBucketPtr = changetype(newBuckets) + newBucketIndex * BUCKET_SIZE; newEntry.taggedNext = load(newBucketPtr, ArrayBuffer.HEADER_SIZE); store(newBucketPtr, newPtr, ArrayBuffer.HEADER_SIZE); newPtr += ENTRY_SIZE(); } oldPtr += ENTRY_SIZE(); } this.buckets = newBuckets; this.bucketsMask = newBucketsMask; this.entries = newEntries; this.entriesCapacity = newEntriesCapacity; this.entriesOffset = this.entriesCount; } } import "allocator/arena"; function test(): void { var map = new Map(); // insert new for (let k: K = 0; k < 100; ++k) { assert(!map.has(k)); map.set(k, 10 + k); assert(map.has(k)); assert(map.get(k) == 10 + k); } assert(map.size == 100); // insert duplicate for (let k: K = 0; k < 100; ++k) { assert(map.has(k)); assert(map.get(k) == 10 + k); map.set(k, 20 + k); assert(map.has(k)); assert(map.get(k) == 20 + k); } assert(map.size == 100); // delete for (let k: K = 0; k < 50; ++k) { assert(map.has(k)); assert(map.get(k) == 20 + k); map.delete(k); assert(!map.has(k)); } assert(map.size == 50); // insert + delete for (let k: K = 0; k < 50; ++k) { assert(!map.has(k)); map.set(k, 10 + k); assert(map.has(k)); map.delete(k); assert(!map.has(k)); } assert(map.size == 50); // clear map.clear(); assert(map.size == 0); } test(); test(); test(); test(); test(); test(); test(); test(); test(); test();