3
3
Source: $(PHOBOSSRC std/experimental/allocator/building_blocks/kernighan_ritchie.d)
4
4
*/
5
5
module std.experimental.allocator.building_blocks.kernighan_ritchie ;
6
- import std.experimental.allocator.building_blocks.null_allocator :
7
- NullAllocator;
6
+ import std.experimental.allocator.building_blocks.null_allocator ;
8
7
9
8
// debug = KRRegion;
10
9
debug (KRRegion) import std.stdio ;
@@ -111,18 +110,21 @@ struct KRRegion(ParentAllocator = NullAllocator)
111
110
112
111
this (this ) @disable ;
113
112
113
+ pure nothrow @trusted @nogc
114
114
void [] payload () inout
115
115
{
116
116
return (cast (ubyte * ) &this )[0 .. size];
117
117
}
118
118
119
+ pure nothrow @trusted @nogc
119
120
bool adjacent (in Node* right) const
120
121
{
121
122
assert (right);
122
123
auto p = payload;
123
124
return p.ptr < right && right < p.ptr + p.length + Node.sizeof;
124
125
}
125
126
127
+ pure nothrow @trusted @nogc
126
128
bool coalesce (void * memoryEnd = null )
127
129
{
128
130
// Coalesce the last node before the memory end with any possible gap
@@ -139,6 +141,7 @@ struct KRRegion(ParentAllocator = NullAllocator)
139
141
return true ;
140
142
}
141
143
144
+ @safe
142
145
Tuple ! (void [], Node* ) allocateHere(size_t bytes)
143
146
{
144
147
assert (bytes >= Node.sizeof);
@@ -152,7 +155,7 @@ struct KRRegion(ParentAllocator = NullAllocator)
152
155
if (leftover >= Node.sizeof)
153
156
{
154
157
// There's room for another node
155
- auto newNode = cast (Node* ) ((cast (ubyte * ) &this ) + bytes);
158
+ auto newNode = (() @trusted => cast (Node* ) ((cast (ubyte * ) &this ) + bytes))( );
156
159
newNode.size = leftover;
157
160
newNode.next = next == &this ? newNode : next;
158
161
assert (next);
@@ -358,7 +361,7 @@ struct KRRegion(ParentAllocator = NullAllocator)
358
361
// / Ditto
359
362
static if (! is (ParentAllocator == NullAllocator)
360
363
&& hasMember! (ParentAllocator, " deallocate" ))
361
- ~this ()
364
+ @trusted ~this ()
362
365
{
363
366
parent.deallocate(payload);
364
367
}
@@ -396,6 +399,7 @@ struct KRRegion(ParentAllocator = NullAllocator)
396
399
397
400
Returns: A word-aligned buffer of `n` bytes, or `null`.
398
401
*/
402
+ @safe
399
403
void [] allocate (size_t n)
400
404
{
401
405
if (! n || ! root) return null ;
@@ -413,7 +417,7 @@ struct KRRegion(ParentAllocator = NullAllocator)
413
417
immutable balance = root.size - actualBytes;
414
418
if (balance >= Node.sizeof)
415
419
{
416
- auto newRoot = cast (Node* ) (result + actualBytes);
420
+ auto newRoot = (() @trusted => cast (Node* ) (result + actualBytes))( );
417
421
newRoot.next = root.next;
418
422
newRoot.size = balance;
419
423
root = newRoot;
@@ -423,7 +427,7 @@ struct KRRegion(ParentAllocator = NullAllocator)
423
427
root = null ;
424
428
switchToFreeList;
425
429
}
426
- return result[0 .. n];
430
+ return (() @trusted => result[0 .. n])() ;
427
431
}
428
432
429
433
// Not enough memory, switch to freelist mode and fall through
@@ -554,6 +558,7 @@ struct KRRegion(ParentAllocator = NullAllocator)
554
558
at the front of the free list. These blocks get coalesced, whether
555
559
`allocateAll` succeeds or fails due to fragmentation.
556
560
*/
561
+
557
562
void [] allocateAll ()
558
563
{
559
564
if (regionMode) switchToFreeList;
@@ -647,7 +652,7 @@ fronting the GC allocator.
647
652
import std.experimental.allocator.gc_allocator : GCAllocator;
648
653
import std.typecons : Ternary;
649
654
// KRRegion fronting a general-purpose allocator
650
- align (KRRegion ! ().alignment) ubyte [1024 * 128 ] buf;
655
+ ubyte [1024 * 128 ] buf;
651
656
auto alloc = fallbackAllocator(KRRegion! ()(buf), GCAllocator.instance);
652
657
auto b = alloc.allocate(100 );
653
658
assert (b.length == 100 );
@@ -669,6 +674,7 @@ it actually returns memory to the operating system when possible.
669
674
import std.algorithm.comparison : max;
670
675
import std.experimental.allocator.building_blocks.allocator_list
671
676
: AllocatorList;
677
+ import std.experimental.allocator.gc_allocator : GCAllocator;
672
678
import std.experimental.allocator.mmap_allocator : MmapAllocator;
673
679
AllocatorList! (n => KRRegion! MmapAllocator(max(n * 16 , 1024 * 1024 ))) alloc;
674
680
}
@@ -678,6 +684,7 @@ it actually returns memory to the operating system when possible.
678
684
import std.algorithm.comparison : max;
679
685
import std.experimental.allocator.building_blocks.allocator_list
680
686
: AllocatorList;
687
+ import std.experimental.allocator.gc_allocator : GCAllocator;
681
688
import std.experimental.allocator.mallocator : Mallocator;
682
689
import std.typecons : Ternary;
683
690
/*
@@ -710,6 +717,7 @@ it actually returns memory to the operating system when possible.
710
717
import std.algorithm.comparison : max;
711
718
import std.experimental.allocator.building_blocks.allocator_list
712
719
: AllocatorList;
720
+ import std.experimental.allocator.gc_allocator : GCAllocator;
713
721
import std.experimental.allocator.mmap_allocator : MmapAllocator;
714
722
import std.typecons : Ternary;
715
723
/*
@@ -742,7 +750,6 @@ it actually returns memory to the operating system when possible.
742
750
}
743
751
}
744
752
745
- version (StdUnittest)
746
753
@system unittest
747
754
{
748
755
import std.algorithm.comparison : max;
@@ -754,16 +761,17 @@ version (StdUnittest)
754
761
n => KRRegion! GCAllocator(max(n * 16 , 1024 * 1024 )))());
755
762
}
756
763
757
- @system unittest
764
+ @trusted unittest
758
765
{
759
766
import std.experimental.allocator.gc_allocator : GCAllocator;
760
-
761
767
auto alloc = KRRegion! GCAllocator(1024 * 1024 );
762
768
769
+
770
+
763
771
void [][] array;
764
772
foreach (i; 1 .. 4 )
765
773
{
766
- array ~= alloc.allocate(i);
774
+ array ~= (() nothrow @safe => alloc.allocate(i))( );
767
775
assert (array[$ - 1 ].length == i);
768
776
}
769
777
() nothrow @nogc { alloc.deallocate(array[1 ]); }();
@@ -778,11 +786,11 @@ version (StdUnittest)
778
786
import std.typecons : Ternary;
779
787
auto alloc = KRRegion! ()(
780
788
cast (ubyte [])(GCAllocator.instance.allocate(1024 * 1024 )));
781
- const store = alloc.allocate(KRRegion! ().sizeof);
789
+ const store = (() pure nothrow @safe @nogc => alloc.allocate(KRRegion! ().sizeof))( );
782
790
auto p = cast (KRRegion! ()* ) store.ptr;
783
- import core.lifetime : emplace;
784
791
import core.stdc.string : memcpy;
785
- import std.conv : text;
792
+ import std.algorithm.mutation : move;
793
+ import std.conv : text, emplace;
786
794
787
795
memcpy(p, &alloc, alloc.sizeof);
788
796
emplace(&alloc);
@@ -791,7 +799,7 @@ version (StdUnittest)
791
799
foreach (i; 0 .. array.length)
792
800
{
793
801
auto length = 100 * i + 1 ;
794
- array[i] = p.allocate(length);
802
+ array[i] = (() pure nothrow @safe @nogc => p.allocate(length))( );
795
803
assert (array[i].length == length, text(array[i].length));
796
804
assert ((() pure nothrow @safe @nogc => p.owns(array[i]))() == Ternary.yes);
797
805
}
@@ -820,14 +828,16 @@ version (StdUnittest)
820
828
assert (p.length == 1024 * 1024 );
821
829
}
822
830
831
+
823
832
@system unittest
824
833
{
825
- import std.random : randomCover;
834
+ import std.experimental.allocator.building_blocks ;
835
+ import std.random ;
826
836
import std.typecons : Ternary;
827
837
828
838
// Both sequences must work on either system
829
839
830
- // A sequence of allocs which generates the error described in https://issues.dlang.org/show_bug.cgi?id= 16564
840
+ // A sequence of allocs which generates the error described in issue 16564
831
841
// that is a gap at the end of buf from the perspective of the allocator
832
842
833
843
// for 64 bit systems (leftover balance = 8 bytes < 16)
@@ -837,16 +847,16 @@ version (StdUnittest)
837
847
int [] sizes32 = [81412 , 107068 , 49892 , 23768 ];
838
848
839
849
840
- void test (int [] sizes)
850
+ @system void test(int [] sizes)
841
851
{
842
852
align (size_t .sizeof) ubyte [256 * 1024 ] buf;
843
- auto a = KRRegion ! () (buf);
853
+ auto a = (() @trusted => createAllocator (buf))( );
844
854
845
855
void [][] bufs;
846
856
847
857
foreach (size; sizes)
848
858
{
849
- bufs ~= a.allocate(size);
859
+ bufs ~= (() pure nothrow @safe @nogc => a.allocate(size))( );
850
860
}
851
861
852
862
foreach (b; bufs.randomCover)
@@ -857,12 +867,22 @@ version (StdUnittest)
857
867
assert ((() pure nothrow @safe @nogc => a.empty)() == Ternary.yes);
858
868
}
859
869
860
- test(sizes64);
861
- test(sizes32);
870
+ () @trusted {
871
+ test(sizes64);
872
+ test(sizes32);
873
+ }();
862
874
}
863
875
864
- @system unittest
876
+ @system KRRegion! NullAllocator createAllocator(ubyte [] buf)
877
+ {
878
+ return KRRegion! NullAllocator(buf);
879
+ }
880
+
881
+
882
+ @safe unittest
865
883
{
884
+ import std.experimental.allocator.building_blocks ;
885
+ import std.random ;
866
886
import std.typecons : Ternary;
867
887
868
888
// For 64 bits, we allocate in multiples of 8, but the minimum alloc size is 16.
@@ -886,11 +906,11 @@ version (StdUnittest)
886
906
887
907
foreach (size; sizes)
888
908
{
889
- bufs ~= a.allocate(size);
909
+ bufs ~= (() pure nothrow @safe @nogc => a.allocate(size))( );
890
910
}
891
911
892
912
() nothrow @nogc { a.deallocate(bufs[1 ]); }();
893
- bufs ~= a.allocate(sizes[1 ] - word);
913
+ bufs ~= (() pure nothrow @safe @nogc => a.allocate(sizes[1 ] - word))( );
894
914
895
915
() nothrow @nogc { a.deallocate(bufs[0 ]); }();
896
916
foreach (i; 2 .. bufs.length)
@@ -916,7 +936,7 @@ version (StdUnittest)
916
936
@system unittest
917
937
{ import std.typecons : Ternary;
918
938
919
- align (KRRegion ! ().alignment) ubyte [1024 ] b;
939
+ ubyte [1024 ] b;
920
940
auto alloc = KRRegion! ()(b);
921
941
922
942
auto k = alloc.allocate(128 );
0 commit comments