mirror of
https://github.com/fluencelabs/assemblyscript
synced 2025-04-25 15:12:12 +00:00
Add initial Set implementation to tests, see #17; Fix check when shrinking a Map
This commit is contained in:
parent
dd4be7b693
commit
c44cbec2c7
2
dist/asc.js
vendored
2
dist/asc.js
vendored
File diff suppressed because one or more lines are too long
2
dist/asc.js.map
vendored
2
dist/asc.js.map
vendored
File diff suppressed because one or more lines are too long
@ -32,11 +32,11 @@ export class ArrayBuffer {
|
||||
|
||||
static readonly HEADER_SIZE: usize = HEADER_SIZE;
|
||||
|
||||
@inline load<T>(index: i32): T {
|
||||
@inline load<T>(index: usize): T {
|
||||
return load<T>(changetype<usize>(this) + index * sizeof<T>(), HEADER_SIZE);
|
||||
}
|
||||
|
||||
@inline store<T>(index: i32, value: T): void {
|
||||
@inline store<T>(index: usize, value: T): void {
|
||||
store<T>(changetype<usize>(this) + index * sizeof<T>(), value, HEADER_SIZE);
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -2,7 +2,7 @@ import {
|
||||
hash
|
||||
} from "internal/hash";
|
||||
|
||||
// A deterministic hash table based on CloseTable from https://github.com/jorendorff/dht
|
||||
// A deterministic hash map based on CloseTable from https://github.com/jorendorff/dht
|
||||
|
||||
const INITIAL_CAPACITY = 4;
|
||||
const FILL_FACTOR: f64 = 8 / 3;
|
||||
@ -19,6 +19,9 @@ class MapEntry<K,V> {
|
||||
/** Empty bit. */
|
||||
const EMPTY: usize = 1 << 0;
|
||||
|
||||
/** Size of a bucket. */
|
||||
const BUCKET_SIZE = sizeof<usize>();
|
||||
|
||||
/** Computes the alignment of an entry. */
|
||||
@inline function ENTRY_ALIGN<K,V>(): usize {
|
||||
// can align to 4 instead of 8 if 32-bit and K/V is <= 32-bits
|
||||
@ -51,7 +54,7 @@ class Map<K,V> {
|
||||
constructor() { this.clear(); }
|
||||
|
||||
clear(): void {
|
||||
const bucketsSize = INITIAL_CAPACITY * <i32>sizeof<usize>();
|
||||
const bucketsSize = INITIAL_CAPACITY * <i32>BUCKET_SIZE;
|
||||
this.buckets = new ArrayBuffer(bucketsSize);
|
||||
this.bucketsMask = INITIAL_CAPACITY - 1;
|
||||
const entriesSize = INITIAL_CAPACITY * <i32>ENTRY_SIZE<K,V>();
|
||||
@ -71,29 +74,27 @@ class Map<K,V> {
|
||||
}
|
||||
|
||||
has(key: K): bool {
|
||||
return this.find(key, hash(key)) !== null;
|
||||
return this.find(key, hash<K>(key)) !== null;
|
||||
}
|
||||
|
||||
get(key: K): V {
|
||||
var entry = this.find(key, hash(key));
|
||||
var entry = this.find(key, hash<K>(key));
|
||||
return entry ? entry.value : <V>unreachable();
|
||||
}
|
||||
|
||||
set(key: K, value: V): void {
|
||||
var hashCode = hash(key);
|
||||
var hashCode = hash<K>(key);
|
||||
var entry = this.find(key, hashCode);
|
||||
if (entry) {
|
||||
entry.value = value;
|
||||
} else {
|
||||
// check if rehashing is necessary
|
||||
let capacity = this.entriesCapacity;
|
||||
if (this.entriesOffset == capacity) {
|
||||
if (this.entriesOffset == this.entriesCapacity) {
|
||||
this.rehash(
|
||||
this.entriesCount >= <i32>(capacity * FREE_FACTOR)
|
||||
? (this.bucketsMask << 1) | 1 // grow capacity to next 2^N
|
||||
: this.bucketsMask // just rehash if 1/4+ entries are empty
|
||||
this.entriesCount < <i32>(this.entriesCapacity * FREE_FACTOR)
|
||||
? this.bucketsMask // just rehash if 1/4+ entries are empty
|
||||
: (this.bucketsMask << 1) | 1 // grow capacity to next 2^N
|
||||
);
|
||||
capacity = this.entriesCapacity;
|
||||
}
|
||||
// append new entry
|
||||
let entries = this.entries;
|
||||
@ -102,48 +103,51 @@ class Map<K,V> {
|
||||
);
|
||||
entry.key = key;
|
||||
entry.value = value;
|
||||
++this.entriesCount;
|
||||
// link with previous entry in bucket
|
||||
let bucketIndex = hashCode & this.bucketsMask;
|
||||
entry.taggedNext = this.buckets.load<usize>(bucketIndex);
|
||||
this.buckets.store<usize>(bucketIndex, changetype<usize>(entry));
|
||||
++this.entriesCount;
|
||||
}
|
||||
}
|
||||
|
||||
delete(key: K): bool {
|
||||
var entry = this.find(key, hash(key));
|
||||
var entry = this.find(key, hash<K>(key));
|
||||
if (!entry) return false;
|
||||
entry.taggedNext |= EMPTY;
|
||||
--this.entriesCount;
|
||||
// check if rehashing is appropriate
|
||||
var halfBucketsMask = this.bucketsMask >> 1;
|
||||
if (
|
||||
this.bucketsMask > <u32>INITIAL_CAPACITY &&
|
||||
this.entriesCount < <i32>(this.entriesOffset * FREE_FACTOR)
|
||||
) this.rehash(this.bucketsMask >> 1);
|
||||
halfBucketsMask + 1 >= max<u32>(INITIAL_CAPACITY, this.entriesCount) &&
|
||||
this.entriesCount < <i32>(this.entriesCapacity * FREE_FACTOR)
|
||||
) this.rehash(halfBucketsMask);
|
||||
return true;
|
||||
}
|
||||
|
||||
private rehash(newBucketsMask: i32): void {
|
||||
var newBucketsCapacity = newBucketsMask + 1;
|
||||
var newBuckets = new ArrayBuffer(newBucketsCapacity * sizeof<usize>());
|
||||
private rehash(newBucketsMask: u32): void {
|
||||
var newBucketsCapacity = <i32>(newBucketsMask + 1);
|
||||
var newBuckets = new ArrayBuffer(newBucketsCapacity * <i32>BUCKET_SIZE);
|
||||
var newEntriesCapacity = <i32>(newBucketsCapacity * FILL_FACTOR);
|
||||
var newEntries = new ArrayBuffer(newEntriesCapacity * ENTRY_SIZE<K,V>(), true);
|
||||
var newEntries = new ArrayBuffer(newEntriesCapacity * <i32>ENTRY_SIZE<K,V>(), true);
|
||||
|
||||
// copy old entries to new entries
|
||||
var p = changetype<usize>(this.entries) + ArrayBuffer.HEADER_SIZE;
|
||||
var q = changetype<usize>(newEntries) + ArrayBuffer.HEADER_SIZE;
|
||||
var k = p + this.entriesOffset * ENTRY_SIZE<K,V>();
|
||||
while (p != k) {
|
||||
let pEntry = changetype<MapEntry<K,V>>(p);
|
||||
let qEntry = changetype<MapEntry<K,V>>(q);
|
||||
if (!(pEntry.taggedNext & EMPTY)) {
|
||||
qEntry.key = pEntry.key;
|
||||
qEntry.value = pEntry.value;
|
||||
let bucketIndex = hash(pEntry.key) & newBucketsMask;
|
||||
qEntry.taggedNext = newBuckets.load<usize>(bucketIndex);
|
||||
newBuckets.store<MapEntry<K,V>>(bucketIndex, qEntry);
|
||||
q += ENTRY_SIZE<K,V>();
|
||||
var oldPtr = changetype<usize>(this.entries) + ArrayBuffer.HEADER_SIZE;
|
||||
var oldEnd = oldPtr + <usize>this.entriesOffset * ENTRY_SIZE<K,V>();
|
||||
var newPtr = changetype<usize>(newEntries) + ArrayBuffer.HEADER_SIZE;
|
||||
while (oldPtr != oldEnd) {
|
||||
let oldEntry = changetype<MapEntry<K,V>>(oldPtr);
|
||||
if (!(oldEntry.taggedNext & EMPTY)) {
|
||||
let newEntry = changetype<MapEntry<K,V>>(newPtr);
|
||||
newEntry.key = oldEntry.key;
|
||||
newEntry.value = oldEntry.value;
|
||||
let newBucketIndex = hash<K>(oldEntry.key) & newBucketsMask;
|
||||
let newBucketPtr = changetype<usize>(newBuckets) + <usize>newBucketIndex * BUCKET_SIZE;
|
||||
newEntry.taggedNext = load<usize>(newBucketPtr, ArrayBuffer.HEADER_SIZE);
|
||||
store<usize>(newBucketPtr, newPtr, ArrayBuffer.HEADER_SIZE);
|
||||
newPtr += ENTRY_SIZE<K,V>();
|
||||
}
|
||||
p += ENTRY_SIZE<K,V>();
|
||||
oldPtr += ENTRY_SIZE<K,V>();
|
||||
}
|
||||
|
||||
this.buckets = newBuckets;
|
||||
@ -160,54 +164,55 @@ function test<K,V>(): void {
|
||||
var map = new Map<K,V>();
|
||||
|
||||
// insert new
|
||||
for (let k: K = 1; k <= 200; ++k) {
|
||||
map.set(k, 100 + <V>k);
|
||||
for (let k: K = 0; k < 100; ++k) {
|
||||
assert(!map.has(k));
|
||||
map.set(k, 10 + <V>k);
|
||||
assert(map.has(k));
|
||||
assert(!map.has(k + 1));
|
||||
assert(map.get(k) == 100 + k);
|
||||
assert(map.get(k) == 10 + <V>k);
|
||||
}
|
||||
assert(map.size == 200);
|
||||
assert(map.size == 100);
|
||||
|
||||
// insert duplicate
|
||||
for (let k: K = 50; k <= 100; ++k) {
|
||||
for (let k: K = 0; k < 100; ++k) {
|
||||
assert(map.has(k));
|
||||
assert(map.get(k) == 100 + <V>k);
|
||||
map.set(k, 100 + <V>k);
|
||||
assert(map.get(k) == 10 + <V>k);
|
||||
map.set(k, 20 + <V>k);
|
||||
assert(map.has(k));
|
||||
assert(map.get(k) == 100 + <V>k);
|
||||
assert(map.get(k) == 20 + <V>k);
|
||||
}
|
||||
assert(map.size == 200);
|
||||
assert(map.size == 100);
|
||||
|
||||
// delete
|
||||
for (let k: K = 1; k <= 100; ++k) {
|
||||
for (let k: K = 0; k < 50; ++k) {
|
||||
assert(map.has(k));
|
||||
assert(map.get(k) == 100 + <V>k);
|
||||
assert(map.get(k) == 20 + <V>k);
|
||||
map.delete(k);
|
||||
assert(!map.has(k));
|
||||
assert(map.has(k + 1));
|
||||
}
|
||||
assert(map.size == 100);
|
||||
assert(map.size == 50);
|
||||
|
||||
// insert + delete
|
||||
for (let k: K = 1; k <= 50; ++k) {
|
||||
for (let k: K = 0; k < 50; ++k) {
|
||||
assert(!map.has(k));
|
||||
map.set(k, 100 + <V>k);
|
||||
map.set(k, 10 + <V>k);
|
||||
assert(map.has(k));
|
||||
map.delete(k);
|
||||
assert(!map.has(k));
|
||||
}
|
||||
assert(map.size == 100);
|
||||
assert(map.size == 50);
|
||||
|
||||
// clear
|
||||
map.clear();
|
||||
assert(map.size == 0);
|
||||
}
|
||||
|
||||
test<i32,i32>();
|
||||
test<i64,i32>();
|
||||
test<i64,i64>();
|
||||
test<i32,i64>();
|
||||
test<i8,i32>();
|
||||
test<u8,i32>();
|
||||
test<i16,i32>();
|
||||
test<i16,i64>();
|
||||
test<i32,i16>();
|
||||
test<i64,i16>();
|
||||
test<u16,i32>();
|
||||
test<i32,i32>();
|
||||
test<u32,i32>();
|
||||
test<i64,i32>();
|
||||
test<u64,i32>();
|
||||
test<f32,i32>();
|
||||
test<f64,i32>();
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,30 +1,204 @@
|
||||
import {
|
||||
hash
|
||||
} from "internal/hash";
|
||||
|
||||
// A deterministic hash set based on CloseTable from https://github.com/jorendorff/dht
|
||||
|
||||
const INITIAL_CAPACITY = 4;
|
||||
const FILL_FACTOR: f64 = 8 / 3;
|
||||
const FREE_FACTOR: f64 = 3 / 4;
|
||||
|
||||
/** Structure of a set entry. */
|
||||
@unmanaged
|
||||
class SetEntry<K> {
|
||||
key: K;
|
||||
taggedNext: usize; // LSB=1 indicates EMPTY
|
||||
}
|
||||
|
||||
/** Empty bit. */
|
||||
const EMPTY: usize = 1 << 0;
|
||||
|
||||
/** Size of a bucket. */
|
||||
const BUCKET_SIZE = sizeof<usize>();
|
||||
|
||||
/** Computes the alignment of an entry. */
|
||||
@inline function ENTRY_ALIGN<K>(): usize {
|
||||
// can align to 4 instead of 8 if 32-bit and K is <= 32-bits
|
||||
const align = (sizeof<K>() > sizeof<usize>() ? sizeof<K>() : sizeof<usize>()) - 1;
|
||||
return align;
|
||||
}
|
||||
|
||||
/** Computes the aligned size of an entry. */
|
||||
@inline function ENTRY_SIZE<K>(): usize {
|
||||
const align = ENTRY_ALIGN<K>();
|
||||
const size = (offsetof<SetEntry<K>>() + align) & ~align;
|
||||
return size;
|
||||
}
|
||||
|
||||
class Set<K> {
|
||||
|
||||
// buckets holding references to the respective first entry within
|
||||
private buckets: ArrayBuffer; // usize[bucketsMask + 1]
|
||||
private bucketsMask: u32;
|
||||
|
||||
// entries in insertion order
|
||||
private entries: ArrayBuffer; // SetEntry<K>[entriesCapacity]
|
||||
private entriesCapacity: i32;
|
||||
private entriesOffset: i32;
|
||||
private entriesCount: i32;
|
||||
|
||||
get size(): i32 { return this.entriesCount; }
|
||||
|
||||
constructor() { this.clear(); }
|
||||
|
||||
clear(): void {
|
||||
const bucketsSize = INITIAL_CAPACITY * <i32>BUCKET_SIZE;
|
||||
this.buckets = new ArrayBuffer(bucketsSize);
|
||||
this.bucketsMask = INITIAL_CAPACITY - 1;
|
||||
const entriesSize = INITIAL_CAPACITY * <i32>ENTRY_SIZE<K>();
|
||||
this.entries = new ArrayBuffer(entriesSize, true);
|
||||
this.entriesCapacity = INITIAL_CAPACITY;
|
||||
this.entriesOffset = 0;
|
||||
this.entriesCount = 0;
|
||||
}
|
||||
|
||||
private find(key: K, hashCode: u32): SetEntry<K> | null {
|
||||
var entry = this.buckets.load<SetEntry<K>>(hashCode & this.bucketsMask);
|
||||
var i = 0;
|
||||
while (entry) {
|
||||
if (!(entry.taggedNext & EMPTY) && entry.key == key) return entry;
|
||||
entry = changetype<SetEntry<K>>(entry.taggedNext & ~EMPTY);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
has(key: K): bool {
|
||||
return this.find(key, hash(key)) !== null;
|
||||
}
|
||||
|
||||
add(key: K): void {
|
||||
var hashCode = hash(key);
|
||||
var entry = this.find(key, hashCode);
|
||||
if (!entry) {
|
||||
// check if rehashing is necessary
|
||||
if (this.entriesOffset == this.entriesCapacity) {
|
||||
this.rehash(
|
||||
this.entriesCount < <i32>(this.entriesCapacity * FREE_FACTOR)
|
||||
? this.bucketsMask // just rehash if 1/4+ entries are empty
|
||||
: (this.bucketsMask << 1) | 1 // grow capacity to next 2^N
|
||||
);
|
||||
}
|
||||
// append new entry
|
||||
let entries = this.entries;
|
||||
entry = changetype<SetEntry<K>>(
|
||||
changetype<usize>(entries) + ArrayBuffer.HEADER_SIZE + this.entriesOffset++ * ENTRY_SIZE<K>()
|
||||
);
|
||||
entry.key = key;
|
||||
++this.entriesCount;
|
||||
// link with previous entry in bucket
|
||||
let bucketIndex = hashCode & this.bucketsMask;
|
||||
entry.taggedNext = this.buckets.load<usize>(bucketIndex);
|
||||
this.buckets.store<usize>(bucketIndex, changetype<usize>(entry));
|
||||
}
|
||||
}
|
||||
|
||||
delete(key: K): bool {
|
||||
var entry = this.find(key, hash<K>(key));
|
||||
if (!entry) return false;
|
||||
entry.taggedNext |= EMPTY;
|
||||
--this.entriesCount;
|
||||
// check if rehashing is appropriate
|
||||
var halfBucketsMask = this.bucketsMask >> 1;
|
||||
if (
|
||||
halfBucketsMask + 1 >= max<u32>(INITIAL_CAPACITY, this.entriesCount) &&
|
||||
this.entriesCount < <i32>(this.entriesCapacity * FREE_FACTOR)
|
||||
) this.rehash(halfBucketsMask);
|
||||
return true;
|
||||
}
|
||||
|
||||
private rehash(newBucketsMask: u32): void {
|
||||
var newBucketsCapacity = <i32>(newBucketsMask + 1);
|
||||
var newBuckets = new ArrayBuffer(newBucketsCapacity * <i32>BUCKET_SIZE);
|
||||
var newEntriesCapacity = <i32>(newBucketsCapacity * FILL_FACTOR);
|
||||
var newEntries = new ArrayBuffer(newEntriesCapacity * <i32>ENTRY_SIZE<K>(), true);
|
||||
|
||||
// copy old entries to new entries
|
||||
var oldPtr = changetype<usize>(this.entries) + ArrayBuffer.HEADER_SIZE;
|
||||
var oldEnd = oldPtr + <usize>this.entriesOffset * ENTRY_SIZE<K>();
|
||||
var newPtr = changetype<usize>(newEntries) + ArrayBuffer.HEADER_SIZE;
|
||||
while (oldPtr != oldEnd) {
|
||||
let oldEntry = changetype<SetEntry<K>>(oldPtr);
|
||||
if (!(oldEntry.taggedNext & EMPTY)) {
|
||||
let newEntry = changetype<SetEntry<K>>(newPtr);
|
||||
newEntry.key = oldEntry.key;
|
||||
let newBucketIndex = hash<K>(oldEntry.key) & newBucketsMask;
|
||||
let newBucketPtr = changetype<usize>(newBuckets) + <usize>newBucketIndex * BUCKET_SIZE;
|
||||
newEntry.taggedNext = load<usize>(newBucketPtr, ArrayBuffer.HEADER_SIZE);
|
||||
store<usize>(newBucketPtr, newPtr, ArrayBuffer.HEADER_SIZE);
|
||||
newPtr += ENTRY_SIZE<K>();
|
||||
}
|
||||
oldPtr += ENTRY_SIZE<K>();
|
||||
}
|
||||
|
||||
this.buckets = newBuckets;
|
||||
this.bucketsMask = newBucketsMask;
|
||||
this.entries = newEntries;
|
||||
this.entriesCapacity = newEntriesCapacity;
|
||||
this.entriesOffset = this.entriesCount;
|
||||
}
|
||||
}
|
||||
|
||||
import "allocator/arena";
|
||||
|
||||
// note that this doesn't test a real set implementation yet, see std/assembly/set.ts
|
||||
function test<K>(): void {
|
||||
var set = new Set<K>();
|
||||
|
||||
var set = changetype<Set<i32>>(allocate_memory(sizeof<usize>() + 2 * sizeof<i32>()));
|
||||
// insert new
|
||||
for (let k: K = 0; k < 100; ++k) {
|
||||
assert(!set.has(k));
|
||||
set.add(k);
|
||||
assert(set.has(k));
|
||||
}
|
||||
assert(set.size == 100);
|
||||
|
||||
assert(set.size == 0);
|
||||
// insert duplicate
|
||||
for (let k: K = 50; k < 100; ++k) {
|
||||
assert(set.has(k));
|
||||
set.add(k);
|
||||
assert(set.has(k));
|
||||
}
|
||||
assert(set.size == 100);
|
||||
|
||||
set.add(1);
|
||||
set.add(0);
|
||||
set.add(2);
|
||||
// delete
|
||||
for (let k: K = 0; k < 50; ++k) {
|
||||
assert(set.has(k));
|
||||
set.delete(k);
|
||||
assert(!set.has(k));
|
||||
}
|
||||
assert(set.size == 50);
|
||||
|
||||
assert(set.size == 3);
|
||||
// insert + delete
|
||||
for (let k: K = 0; k < 50; ++k) {
|
||||
assert(!set.has(k));
|
||||
set.add(k);
|
||||
assert(set.has(k));
|
||||
set.delete(k);
|
||||
assert(!set.has(k));
|
||||
}
|
||||
assert(set.size == 50);
|
||||
|
||||
assert(set.has(1));
|
||||
assert(set.has(0));
|
||||
assert(set.has(2));
|
||||
assert(!set.has(3));
|
||||
// clear
|
||||
set.clear();
|
||||
assert(set.size == 0);
|
||||
}
|
||||
|
||||
set.delete(0);
|
||||
|
||||
assert(set.size == 2);
|
||||
assert(set.has(1));
|
||||
assert(!set.has(0));
|
||||
assert(set.has(2));
|
||||
|
||||
set.clear();
|
||||
|
||||
assert(set.size == 0);
|
||||
assert(!set.has(1));
|
||||
test<i8>();
|
||||
test<u8>();
|
||||
test<i16>();
|
||||
test<u16>();
|
||||
test<i32>();
|
||||
test<u32>();
|
||||
test<i64>();
|
||||
test<u64>();
|
||||
test<f32>();
|
||||
test<f64>();
|
||||
|
File diff suppressed because it is too large
Load Diff
Loading…
x
Reference in New Issue
Block a user