-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsv.c
12849 lines (11185 loc) · 349 KB
/
sv.c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
/* sv.c
*
* Copyright (C) 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
* 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 by Larry Wall
* and others
*
* You may distribute under the terms of either the GNU General Public
* License or the Artistic License, as specified in the README file.
*
*/
/*
* 'I wonder what the Entish is for "yes" and "no",' he thought.
* --Pippin
*
* [p.480 of _The Lord of the Rings_, III/iv: "Treebeard"]
*/
/*
*
*
* This file contains the code that creates, manipulates and destroys
* scalar values (SVs). The other types (AV, HV, GV, etc.) reuse the
* structure of an SV, so their creation and destruction is handled
* here; higher-level functions are in av.c, hv.c, and so on. Opcode
* level functions (eg. substr, split, join) for each of the types are
* in the pp*.c files.
*/
#include "EXTERN.h"
#define PERL_IN_SV_C
#include "perl.h"
#include "regcomp.h"
#define FCALL *f
#ifdef __Lynx__
/* Missing proto on LynxOS */
char *gconvert(double, int, int, char *);
#endif
#ifdef PERL_UTF8_CACHE_ASSERT
/* if adding more checks watch out for the following tests:
* t/op/index.t t/op/length.t t/op/pat.t t/op/substr.t
* lib/utf8.t lib/Unicode/Collate/t/index.t
* --jhi
*/
# define ASSERT_UTF8_CACHE(cache) \
STMT_START { if (cache) { assert((cache)[0] <= (cache)[1]); \
assert((cache)[2] <= (cache)[3]); \
assert((cache)[3] <= (cache)[1]);} \
} STMT_END
#else
# define ASSERT_UTF8_CACHE(cache) NOOP
#endif
#ifdef PERL_OLD_COPY_ON_WRITE
#define SV_COW_NEXT_SV(sv) INT2PTR(SV *,SvUVX(sv))
#define SV_COW_NEXT_SV_SET(current,next) SvUV_set(current, PTR2UV(next))
/* This is a pessimistic view. Scalar must be purely a read-write PV to copy-
on-write. */
#endif
/* ============================================================================
=head1 Allocation and deallocation of SVs.
An SV (or AV, HV, etc.) is allocated in two parts: the head (struct
sv, av, hv...) contains type and reference count information, and for
many types, a pointer to the body (struct xrv, xpv, xpviv...), which
contains fields specific to each type. Some types store all they need
in the head, so don't have a body.
In all but the most memory-paranoid configuations (ex: PURIFY), heads
and bodies are allocated out of arenas, which by default are
approximately 4K chunks of memory parcelled up into N heads or bodies.
Sv-bodies are allocated by their sv-type, guaranteeing size
consistency needed to allocate safely from arrays.
For SV-heads, the first slot in each arena is reserved, and holds a
link to the next arena, some flags, and a note of the number of slots.
Snaked through each arena chain is a linked list of free items; when
this becomes empty, an extra arena is allocated and divided up into N
items which are threaded into the free list.
SV-bodies are similar, but they use arena-sets by default, which
separate the link and info from the arena itself, and reclaim the 1st
slot in the arena. SV-bodies are further described later.
The following global variables are associated with arenas:
PL_sv_arenaroot pointer to list of SV arenas
PL_sv_root pointer to list of free SV structures
PL_body_arenas head of linked-list of body arenas
PL_body_roots[] array of pointers to list of free bodies of svtype
arrays are indexed by the svtype needed
A few special SV heads are not allocated from an arena, but are
instead directly created in the interpreter structure, eg PL_sv_undef.
The size of arenas can be changed from the default by setting
PERL_ARENA_SIZE appropriately at compile time.
The SV arena serves the secondary purpose of allowing still-live SVs
to be located and destroyed during final cleanup.
At the lowest level, the macros new_SV() and del_SV() grab and free
an SV head. (If debugging with -DD, del_SV() calls the function S_del_sv()
to return the SV to the free list with error checking.) new_SV() calls
more_sv() / sv_add_arena() to add an extra arena if the free list is empty.
SVs in the free list have their SvTYPE field set to all ones.
At the time of very final cleanup, sv_free_arenas() is called from
perl_destruct() to physically free all the arenas allocated since the
start of the interpreter.
The function visit() scans the SV arenas list, and calls a specified
function for each SV it finds which is still live - ie which has an SvTYPE
other than all 1's, and a non-zero SvREFCNT. visit() is used by the
following functions (specified as [function that calls visit()] / [function
called by visit() for each SV]):
sv_report_used() / do_report_used()
dump all remaining SVs (debugging aid)
sv_clean_objs() / do_clean_objs(),do_clean_named_objs()
Attempt to free all objects pointed to by RVs,
and, unless DISABLE_DESTRUCTOR_KLUDGE is defined,
try to do the same for all objects indirectly
referenced by typeglobs too. Called once from
perl_destruct(), prior to calling sv_clean_all()
below.
sv_clean_all() / do_clean_all()
SvREFCNT_dec(sv) each remaining SV, possibly
triggering an sv_free(). It also sets the
SVf_BREAK flag on the SV to indicate that the
refcnt has been artificially lowered, and thus
stopping sv_free() from giving spurious warnings
about SVs which unexpectedly have a refcnt
of zero. called repeatedly from perl_destruct()
until there are no SVs left.
=head2 Arena allocator API Summary
Private API to rest of sv.c
new_SV(), del_SV(),
new_XIV(), del_XIV(),
new_XNV(), del_XNV(),
etc
Public API:
sv_report_used(), sv_clean_objs(), sv_clean_all(), sv_free_arenas()
=cut
* ========================================================================= */
/*
* "A time to plant, and a time to uproot what was planted..."
*/
void
Perl_offer_nice_chunk(pTHX_ void *chunk, U32 chunk_size)
{
dVAR;
void *new_chunk;
U32 new_chunk_size;
PERL_ARGS_ASSERT_OFFER_NICE_CHUNK;
new_chunk = (void *)(chunk);
new_chunk_size = (chunk_size);
if (new_chunk_size > PL_nice_chunk_size) {
Safefree(PL_nice_chunk);
PL_nice_chunk = (char *) new_chunk;
PL_nice_chunk_size = new_chunk_size;
} else {
Safefree(chunk);
}
}
#ifdef PERL_MEM_LOG
# define MEM_LOG_NEW_SV(sv, file, line, func) \
Perl_mem_log_new_sv(sv, file, line, func)
# define MEM_LOG_DEL_SV(sv, file, line, func) \
Perl_mem_log_del_sv(sv, file, line, func)
#else
# define MEM_LOG_NEW_SV(sv, file, line, func) NOOP
# define MEM_LOG_DEL_SV(sv, file, line, func) NOOP
#endif
#ifdef DEBUG_LEAKING_SCALARS
# define FREE_SV_DEBUG_FILE(sv) Safefree((sv)->sv_debug_file)
# define DEBUG_SV_SERIAL(sv) \
DEBUG_m(PerlIO_printf(Perl_debug_log, "0x%"UVxf": (%05ld) del_SV\n", \
PTR2UV(sv), (long)(sv)->sv_debug_serial))
#else
# define FREE_SV_DEBUG_FILE(sv)
# define DEBUG_SV_SERIAL(sv) NOOP
#endif
#ifdef PERL_POISON
# define SvARENA_CHAIN(sv) ((sv)->sv_u.svu_rv)
# define SvARENA_CHAIN_SET(sv,val) (sv)->sv_u.svu_rv = MUTABLE_SV((val))
/* Whilst I'd love to do this, it seems that things like to check on
unreferenced scalars
# define POSION_SV_HEAD(sv) PoisonNew(sv, 1, struct STRUCT_SV)
*/
# define POSION_SV_HEAD(sv) PoisonNew(&SvANY(sv), 1, void *), \
PoisonNew(&SvREFCNT(sv), 1, U32)
#else
# define SvARENA_CHAIN(sv) SvANY(sv)
# define SvARENA_CHAIN_SET(sv,val) SvANY(sv) = (void *)(val)
# define POSION_SV_HEAD(sv)
#endif
/* Mark an SV head as unused, and add to free list.
*
* If SVf_BREAK is set, skip adding it to the free list, as this SV had
* its refcount artificially decremented during global destruction, so
* there may be dangling pointers to it. The last thing we want in that
* case is for it to be reused. */
#define plant_SV(p) \
STMT_START { \
const U32 old_flags = SvFLAGS(p); \
MEM_LOG_DEL_SV(p, __FILE__, __LINE__, FUNCTION__); \
DEBUG_SV_SERIAL(p); \
FREE_SV_DEBUG_FILE(p); \
POSION_SV_HEAD(p); \
SvFLAGS(p) = SVTYPEMASK; \
if (!(old_flags & SVf_BREAK)) { \
SvARENA_CHAIN_SET(p, PL_sv_root); \
PL_sv_root = (p); \
} \
--PL_sv_count; \
} STMT_END
#define uproot_SV(p) \
STMT_START { \
(p) = PL_sv_root; \
PL_sv_root = MUTABLE_SV(SvARENA_CHAIN(p)); \
++PL_sv_count; \
} STMT_END
/* make some more SVs by adding another arena */
STATIC SV*
S_more_sv(pTHX)
{
dVAR;
SV* sv;
if (PL_nice_chunk) {
sv_add_arena(PL_nice_chunk, PL_nice_chunk_size, 0);
PL_nice_chunk = NULL;
PL_nice_chunk_size = 0;
}
else {
char *chunk; /* must use New here to match call to */
Newx(chunk,PERL_ARENA_SIZE,char); /* Safefree() in sv_free_arenas() */
sv_add_arena(chunk, PERL_ARENA_SIZE, 0);
}
uproot_SV(sv);
return sv;
}
/* new_SV(): return a new, empty SV head */
#ifdef DEBUG_LEAKING_SCALARS
/* provide a real function for a debugger to play with */
STATIC SV*
S_new_SV(pTHX_ const char *file, int line, const char *func)
{
SV* sv;
if (PL_sv_root)
uproot_SV(sv);
else
sv = S_more_sv(aTHX);
SvANY(sv) = 0;
SvREFCNT(sv) = 1;
SvFLAGS(sv) = 0;
sv->sv_debug_optype = PL_op ? PL_op->op_type : 0;
sv->sv_debug_line = (U16) (PL_parser && PL_parser->copline != NOLINE
? PL_parser->copline
: PL_curcop
? CopLINE(PL_curcop)
: 0
);
sv->sv_debug_inpad = 0;
sv->sv_debug_cloned = 0;
sv->sv_debug_file = PL_curcop ? savepv(CopFILE(PL_curcop)): NULL;
sv->sv_debug_serial = PL_sv_serial++;
MEM_LOG_NEW_SV(sv, file, line, func);
DEBUG_m(PerlIO_printf(Perl_debug_log, "0x%"UVxf": (%05ld) new_SV (from %s:%d [%s])\n",
PTR2UV(sv), (long)sv->sv_debug_serial, file, line, func));
return sv;
}
# define new_SV(p) (p)=S_new_SV(aTHX_ __FILE__, __LINE__, FUNCTION__)
#else
# define new_SV(p) \
STMT_START { \
if (PL_sv_root) \
uproot_SV(p); \
else \
(p) = S_more_sv(aTHX); \
SvANY(p) = 0; \
SvREFCNT(p) = 1; \
SvFLAGS(p) = 0; \
MEM_LOG_NEW_SV(p, __FILE__, __LINE__, FUNCTION__); \
} STMT_END
#endif
/* del_SV(): return an empty SV head to the free list */
#ifdef DEBUGGING
#define del_SV(p) \
STMT_START { \
if (DEBUG_D_TEST) \
del_sv(p); \
else \
plant_SV(p); \
} STMT_END
STATIC void
S_del_sv(pTHX_ SV *p)
{
dVAR;
PERL_ARGS_ASSERT_DEL_SV;
if (DEBUG_D_TEST) {
SV* sva;
bool ok = 0;
for (sva = PL_sv_arenaroot; sva; sva = MUTABLE_SV(SvANY(sva))) {
const SV * const sv = sva + 1;
const SV * const svend = &sva[SvREFCNT(sva)];
if (p >= sv && p < svend) {
ok = 1;
break;
}
}
if (!ok) {
if (ckWARN_d(WARN_INTERNAL))
Perl_warner(aTHX_ packWARN(WARN_INTERNAL),
"Attempt to free non-arena SV: 0x%"UVxf
pTHX__FORMAT, PTR2UV(p) pTHX__VALUE);
return;
}
}
plant_SV(p);
}
#else /* ! DEBUGGING */
#define del_SV(p) plant_SV(p)
#endif /* DEBUGGING */
/*
=head1 SV Manipulation Functions
=for apidoc sv_add_arena
Given a chunk of memory, link it to the head of the list of arenas,
and split it into a list of free SVs.
=cut
*/
void
Perl_sv_add_arena(pTHX_ char *ptr, U32 size, U32 flags)
{
dVAR;
SV *const sva = MUTABLE_SV(ptr);
register SV* sv;
register SV* svend;
PERL_ARGS_ASSERT_SV_ADD_ARENA;
/* The first SV in an arena isn't an SV. */
SvANY(sva) = (void *) PL_sv_arenaroot; /* ptr to next arena */
SvREFCNT(sva) = size / sizeof(SV); /* number of SV slots */
SvFLAGS(sva) = flags; /* FAKE if not to be freed */
PL_sv_arenaroot = sva;
PL_sv_root = sva + 1;
svend = &sva[SvREFCNT(sva) - 1];
sv = sva + 1;
while (sv < svend) {
SvARENA_CHAIN_SET(sv, (sv + 1));
#ifdef DEBUGGING
SvREFCNT(sv) = 0;
#endif
/* Must always set typemask because it's always checked in on cleanup
when the arenas are walked looking for objects. */
SvFLAGS(sv) = SVTYPEMASK;
sv++;
}
SvARENA_CHAIN_SET(sv, 0);
#ifdef DEBUGGING
SvREFCNT(sv) = 0;
#endif
SvFLAGS(sv) = SVTYPEMASK;
}
/* visit(): call the named function for each non-free SV in the arenas
* whose flags field matches the flags/mask args. */
STATIC I32
S_visit(pTHX_ SVFUNC_t f, U32 flags, U32 mask)
{
dVAR;
SV* sva;
I32 visited = 0;
PERL_ARGS_ASSERT_VISIT;
for (sva = PL_sv_arenaroot; sva; sva = MUTABLE_SV(SvANY(sva))) {
register const SV * const svend = &sva[SvREFCNT(sva)];
register SV* sv;
for (sv = sva + 1; sv < svend; ++sv) {
if (SvTYPE(sv) != SVTYPEMASK
&& (sv->sv_flags & mask) == flags
&& SvREFCNT(sv))
{
(FCALL)(aTHX_ sv);
++visited;
}
}
}
return visited;
}
#ifdef DEBUGGING
/* called by sv_report_used() for each live SV */
static void
do_report_used(pTHX_ SV *sv)
{
if (SvTYPE(sv) != SVTYPEMASK) {
PerlIO_printf(Perl_debug_log, "****\n");
sv_dump(sv);
}
}
#endif
/*
=for apidoc sv_report_used
Dump the contents of all SVs not yet freed. (Debugging aid).
=cut
*/
void
Perl_sv_report_used(pTHX)
{
#ifdef DEBUGGING
visit(do_report_used, 0, 0);
#else
PERL_UNUSED_CONTEXT;
#endif
}
/* called by sv_clean_objs() for each live SV */
static void
do_clean_objs(pTHX_ SV *ref)
{
dVAR;
assert (SvROK(ref));
{
SV * const target = SvRV(ref);
if (SvOBJECT(target)) {
DEBUG_D((PerlIO_printf(Perl_debug_log, "Cleaning object ref:\n "), sv_dump(ref)));
if (SvWEAKREF(ref)) {
sv_del_backref(target, ref);
SvWEAKREF_off(ref);
SvRV_set(ref, NULL);
} else {
SvROK_off(ref);
SvRV_set(ref, NULL);
SvREFCNT_dec(target);
}
}
}
/* XXX Might want to check arrays, etc. */
}
/* called by sv_clean_objs() for each live SV */
#ifndef DISABLE_DESTRUCTOR_KLUDGE
static void
do_clean_named_objs(pTHX_ SV *sv)
{
dVAR;
assert(SvTYPE(sv) == SVt_PVGV);
assert(isGV_with_GP(sv));
if (GvGP(sv)) {
if ((
#ifdef PERL_DONT_CREATE_GVSV
GvSV(sv) &&
#endif
SvOBJECT(GvSV(sv))) ||
(GvAV(sv) && SvOBJECT(GvAV(sv))) ||
(GvHV(sv) && SvOBJECT(GvHV(sv))) ||
/* In certain rare cases GvIOp(sv) can be NULL, which would make SvOBJECT(GvIO(sv)) dereference NULL. */
(GvIO(sv) ? (SvFLAGS(GvIOp(sv)) & SVs_OBJECT) : 0) ||
(GvCV(sv) && SvOBJECT(GvCV(sv))) )
{
DEBUG_D((PerlIO_printf(Perl_debug_log, "Cleaning named glob object:\n "), sv_dump(sv)));
SvFLAGS(sv) |= SVf_BREAK;
SvREFCNT_dec(sv);
}
}
}
#endif
/*
=for apidoc sv_clean_objs
Attempt to destroy all objects not yet freed
=cut
*/
void
Perl_sv_clean_objs(pTHX)
{
dVAR;
PL_in_clean_objs = TRUE;
visit(do_clean_objs, SVf_ROK, SVf_ROK);
#ifndef DISABLE_DESTRUCTOR_KLUDGE
/* some barnacles may yet remain, clinging to typeglobs */
visit(do_clean_named_objs, SVt_PVGV|SVpgv_GP, SVTYPEMASK|SVp_POK|SVpgv_GP);
#endif
PL_in_clean_objs = FALSE;
}
/* called by sv_clean_all() for each live SV */
static void
do_clean_all(pTHX_ SV *sv)
{
dVAR;
if (sv == (const SV *) PL_fdpid || sv == (const SV *)PL_strtab) {
/* don't clean pid table and strtab */
return;
}
DEBUG_D((PerlIO_printf(Perl_debug_log, "Cleaning loops: SV at 0x%"UVxf"\n", PTR2UV(sv)) ));
SvFLAGS(sv) |= SVf_BREAK;
SvREFCNT_dec(sv);
}
/*
=for apidoc sv_clean_all
Decrement the refcnt of each remaining SV, possibly triggering a
cleanup. This function may have to be called multiple times to free
SVs which are in complex self-referential hierarchies.
=cut
*/
I32
Perl_sv_clean_all(pTHX)
{
dVAR;
I32 cleaned;
PL_in_clean_all = TRUE;
cleaned = visit(do_clean_all, 0,0);
PL_in_clean_all = FALSE;
return cleaned;
}
/*
ARENASETS: a meta-arena implementation which separates arena-info
into struct arena_set, which contains an array of struct
arena_descs, each holding info for a single arena. By separating
the meta-info from the arena, we recover the 1st slot, formerly
borrowed for list management. The arena_set is about the size of an
arena, avoiding the needless malloc overhead of a naive linked-list.
The cost is 1 arena-set malloc per ~320 arena-mallocs, + the unused
memory in the last arena-set (1/2 on average). In trade, we get
back the 1st slot in each arena (ie 1.7% of a CV-arena, less for
smaller types). The recovery of the wasted space allows use of
small arenas for large, rare body types, by changing array* fields
in body_details_by_type[] below.
*/
struct arena_desc {
char *arena; /* the raw storage, allocated aligned */
size_t size; /* its size ~4k typ */
U32 misc; /* type, and in future other things. */
};
struct arena_set;
/* Get the maximum number of elements in set[] such that struct arena_set
will fit within PERL_ARENA_SIZE, which is probably just under 4K, and
therefore likely to be 1 aligned memory page. */
#define ARENAS_PER_SET ((PERL_ARENA_SIZE - sizeof(struct arena_set*) \
- 2 * sizeof(int)) / sizeof (struct arena_desc))
struct arena_set {
struct arena_set* next;
unsigned int set_size; /* ie ARENAS_PER_SET */
unsigned int curr; /* index of next available arena-desc */
struct arena_desc set[ARENAS_PER_SET];
};
/*
=for apidoc sv_free_arenas
Deallocate the memory used by all arenas. Note that all the individual SV
heads and bodies within the arenas must already have been freed.
=cut
*/
void
Perl_sv_free_arenas(pTHX)
{
dVAR;
SV* sva;
SV* svanext;
unsigned int i;
/* Free arenas here, but be careful about fake ones. (We assume
contiguity of the fake ones with the corresponding real ones.) */
for (sva = PL_sv_arenaroot; sva; sva = svanext) {
svanext = MUTABLE_SV(SvANY(sva));
while (svanext && SvFAKE(svanext))
svanext = MUTABLE_SV(SvANY(svanext));
if (!SvFAKE(sva))
Safefree(sva);
}
{
struct arena_set *aroot = (struct arena_set*) PL_body_arenas;
while (aroot) {
struct arena_set *current = aroot;
i = aroot->curr;
while (i--) {
assert(aroot->set[i].arena);
Safefree(aroot->set[i].arena);
}
aroot = aroot->next;
Safefree(current);
}
}
PL_body_arenas = 0;
i = PERL_ARENA_ROOTS_SIZE;
while (i--)
PL_body_roots[i] = 0;
Safefree(PL_nice_chunk);
PL_nice_chunk = NULL;
PL_nice_chunk_size = 0;
PL_sv_arenaroot = 0;
PL_sv_root = 0;
}
/*
Here are mid-level routines that manage the allocation of bodies out
of the various arenas. There are 5 kinds of arenas:
1. SV-head arenas, which are discussed and handled above
2. regular body arenas
3. arenas for reduced-size bodies
4. Hash-Entry arenas
5. pte arenas (thread related)
Arena types 2 & 3 are chained by body-type off an array of
arena-root pointers, which is indexed by svtype. Some of the
larger/less used body types are malloced singly, since a large
unused block of them is wasteful. Also, several svtypes dont have
bodies; the data fits into the sv-head itself. The arena-root
pointer thus has a few unused root-pointers (which may be hijacked
later for arena types 4,5)
3 differs from 2 as an optimization; some body types have several
unused fields in the front of the structure (which are kept in-place
for consistency). These bodies can be allocated in smaller chunks,
because the leading fields arent accessed. Pointers to such bodies
are decremented to point at the unused 'ghost' memory, knowing that
the pointers are used with offsets to the real memory.
HE, HEK arenas are managed separately, with separate code, but may
be merge-able later..
PTE arenas are not sv-bodies, but they share these mid-level
mechanics, so are considered here. The new mid-level mechanics rely
on the sv_type of the body being allocated, so we just reserve one
of the unused body-slots for PTEs, then use it in those (2) PTE
contexts below (line ~10k)
*/
/* get_arena(size): this creates custom-sized arenas
TBD: export properly for hv.c: S_more_he().
*/
void*
Perl_get_arena(pTHX_ size_t arena_size, U32 misc)
{
dVAR;
struct arena_desc* adesc;
struct arena_set *aroot = (struct arena_set*) PL_body_arenas;
unsigned int curr;
/* shouldnt need this
if (!arena_size) arena_size = PERL_ARENA_SIZE;
*/
/* may need new arena-set to hold new arena */
if (!aroot || aroot->curr >= aroot->set_size) {
struct arena_set *newroot;
Newxz(newroot, 1, struct arena_set);
newroot->set_size = ARENAS_PER_SET;
newroot->next = aroot;
aroot = newroot;
PL_body_arenas = (void *) newroot;
DEBUG_m(PerlIO_printf(Perl_debug_log, "new arenaset %p\n", (void*)aroot));
}
/* ok, now have arena-set with at least 1 empty/available arena-desc */
curr = aroot->curr++;
adesc = &(aroot->set[curr]);
assert(!adesc->arena);
Newx(adesc->arena, arena_size, char);
adesc->size = arena_size;
adesc->misc = misc;
DEBUG_m(PerlIO_printf(Perl_debug_log, "arena %d added: %p size %"UVuf"\n",
curr, (void*)adesc->arena, (UV)arena_size));
return adesc->arena;
}
/* return a thing to the free list */
#define del_body(thing, root) \
STMT_START { \
void ** const thing_copy = (void **)thing;\
*thing_copy = *root; \
*root = (void*)thing_copy; \
} STMT_END
/*
=head1 SV-Body Allocation
Allocation of SV-bodies is similar to SV-heads, differing as follows;
the allocation mechanism is used for many body types, so is somewhat
more complicated, it uses arena-sets, and has no need for still-live
SV detection.
At the outermost level, (new|del)_X*V macros return bodies of the
appropriate type. These macros call either (new|del)_body_type or
(new|del)_body_allocated macro pairs, depending on specifics of the
type. Most body types use the former pair, the latter pair is used to
allocate body types with "ghost fields".
"ghost fields" are fields that are unused in certain types, and
consequently don't need to actually exist. They are declared because
they're part of a "base type", which allows use of functions as
methods. The simplest examples are AVs and HVs, 2 aggregate types
which don't use the fields which support SCALAR semantics.
For these types, the arenas are carved up into appropriately sized
chunks, we thus avoid wasted memory for those unaccessed members.
When bodies are allocated, we adjust the pointer back in memory by the
size of the part not allocated, so it's as if we allocated the full
structure. (But things will all go boom if you write to the part that
is "not there", because you'll be overwriting the last members of the
preceding structure in memory.)
We calculate the correction using the STRUCT_OFFSET macro on the first
member present. If the allocated structure is smaller (no initial NV
actually allocated) then the net effect is to subtract the size of the NV
from the pointer, to return a new pointer as if an initial NV were actually
allocated. (We were using structures named *_allocated for this, but
this turned out to be a subtle bug, because a structure without an NV
could have a lower alignment constraint, but the compiler is allowed to
optimised accesses based on the alignment constraint of the actual pointer
to the full structure, for example, using a single 64 bit load instruction
because it "knows" that two adjacent 32 bit members will be 8-byte aligned.)
This is the same trick as was used for NV and IV bodies. Ironically it
doesn't need to be used for NV bodies any more, because NV is now at
the start of the structure. IV bodies don't need it either, because
they are no longer allocated.
In turn, the new_body_* allocators call S_new_body(), which invokes
new_body_inline macro, which takes a lock, and takes a body off the
linked list at PL_body_roots[sv_type], calling S_more_bodies() if
necessary to refresh an empty list. Then the lock is released, and
the body is returned.
S_more_bodies calls get_arena(), and carves it up into an array of N
bodies, which it strings into a linked list. It looks up arena-size
and body-size from the body_details table described below, thus
supporting the multiple body-types.
If PURIFY is defined, or PERL_ARENA_SIZE=0, arenas are not used, and
the (new|del)_X*V macros are mapped directly to malloc/free.
*/
/*
For each sv-type, struct body_details bodies_by_type[] carries
parameters which control these aspects of SV handling:
Arena_size determines whether arenas are used for this body type, and if
so, how big they are. PURIFY or PERL_ARENA_SIZE=0 set this field to
zero, forcing individual mallocs and frees.
Body_size determines how big a body is, and therefore how many fit into
each arena. Offset carries the body-pointer adjustment needed for
"ghost fields", and is used in *_allocated macros.
But its main purpose is to parameterize info needed in
Perl_sv_upgrade(). The info here dramatically simplifies the function
vs the implementation in 5.8.8, making it table-driven. All fields
are used for this, except for arena_size.
For the sv-types that have no bodies, arenas are not used, so those
PL_body_roots[sv_type] are unused, and can be overloaded. In
something of a special case, SVt_NULL is borrowed for HE arenas;
PL_body_roots[HE_SVSLOT=SVt_NULL] is filled by S_more_he, but the
bodies_by_type[SVt_NULL] slot is not used, as the table is not
available in hv.c.
PTEs also use arenas, but are never seen in Perl_sv_upgrade. Nonetheless,
they get their own slot in bodies_by_type[PTE_SVSLOT =SVt_IV], so they can
just use the same allocation semantics. At first, PTEs were also
overloaded to a non-body sv-type, but this yielded hard-to-find malloc
bugs, so was simplified by claiming a new slot. This choice has no
consequence at this time.
*/
struct body_details {
U8 body_size; /* Size to allocate */
U8 copy; /* Size of structure to copy (may be shorter) */
U8 offset;
unsigned int type : 4; /* We have space for a sanity check. */
unsigned int cant_upgrade : 1; /* Cannot upgrade this type */
unsigned int zero_nv : 1; /* zero the NV when upgrading from this */
unsigned int arena : 1; /* Allocated from an arena */
size_t arena_size; /* Size of arena to allocate */
};
#define HADNV FALSE
#define NONV TRUE
#ifdef PURIFY
/* With -DPURFIY we allocate everything directly, and don't use arenas.
This seems a rather elegant way to simplify some of the code below. */
#define HASARENA FALSE
#else
#define HASARENA TRUE
#endif
#define NOARENA FALSE
/* Size the arenas to exactly fit a given number of bodies. A count
of 0 fits the max number bodies into a PERL_ARENA_SIZE.block,
simplifying the default. If count > 0, the arena is sized to fit
only that many bodies, allowing arenas to be used for large, rare
bodies (XPVFM, XPVIO) without undue waste. The arena size is
limited by PERL_ARENA_SIZE, so we can safely oversize the
declarations.
*/
#define FIT_ARENA0(body_size) \
((size_t)(PERL_ARENA_SIZE / body_size) * body_size)
#define FIT_ARENAn(count,body_size) \
( count * body_size <= PERL_ARENA_SIZE) \
? count * body_size \
: FIT_ARENA0 (body_size)
#define FIT_ARENA(count,body_size) \
count \
? FIT_ARENAn (count, body_size) \
: FIT_ARENA0 (body_size)
/* Calculate the length to copy. Specifically work out the length less any
final padding the compiler needed to add. See the comment in sv_upgrade
for why copying the padding proved to be a bug. */
#define copy_length(type, last_member) \
STRUCT_OFFSET(type, last_member) \
+ sizeof (((type*)SvANY((const SV *)0))->last_member)
static const struct body_details bodies_by_type[] = {
{ sizeof(HE), 0, 0, SVt_NULL,
FALSE, NONV, NOARENA, FIT_ARENA(0, sizeof(HE)) },
/* The bind placeholder pretends to be an RV for now.
Also it's marked as "can't upgrade" to stop anyone using it before it's
implemented. */
{ 0, 0, 0, SVt_BIND, TRUE, NONV, NOARENA, 0 },
/* IVs are in the head, so the allocation size is 0.
However, the slot is overloaded for PTEs. */
{ sizeof(struct ptr_tbl_ent), /* This is used for PTEs. */
sizeof(IV), /* This is used to copy out the IV body. */
STRUCT_OFFSET(XPVIV, xiv_iv), SVt_IV, FALSE, NONV,
NOARENA /* IVS don't need an arena */,
/* But PTEs need to know the size of their arena */
FIT_ARENA(0, sizeof(struct ptr_tbl_ent))
},
/* 8 bytes on most ILP32 with IEEE doubles */
{ sizeof(NV), sizeof(NV), 0, SVt_NV, FALSE, HADNV, HASARENA,
FIT_ARENA(0, sizeof(NV)) },
/* RVs are in the head now. */
{ 0, 0, 0, SVt_RV, FALSE, NONV, NOARENA, 0 },
/* 8 bytes on most ILP32 with IEEE doubles */
{ sizeof(XPV) - STRUCT_OFFSET(XPV, xpv_cur),
copy_length(XPV, xpv_len) - STRUCT_OFFSET(XPV, xpv_cur),
+ STRUCT_OFFSET(XPV, xpv_cur),
SVt_PV, FALSE, NONV, HASARENA,
FIT_ARENA(0, sizeof(XPV) - STRUCT_OFFSET(XPV, xpv_cur)) },
/* 12 */
{ sizeof(XPVIV) - STRUCT_OFFSET(XPV, xpv_cur),
copy_length(XPVIV, xiv_u) - STRUCT_OFFSET(XPV, xpv_cur),
+ STRUCT_OFFSET(XPVIV, xpv_cur),
SVt_PVIV, FALSE, NONV, HASARENA,
FIT_ARENA(0, sizeof(XPV) - STRUCT_OFFSET(XPV, xpv_cur)) },
/* 20 */
{ sizeof(XPVNV), copy_length(XPVNV, xiv_u), 0, SVt_PVNV, FALSE, HADNV,
HASARENA, FIT_ARENA(0, sizeof(XPVNV)) },
/* 28 */
{ sizeof(XPVMG), copy_length(XPVMG, xmg_stash), 0, SVt_PVMG, FALSE, HADNV,
HASARENA, FIT_ARENA(0, sizeof(XPVMG)) },
/* 48 */
{ sizeof(XPVGV), sizeof(XPVGV), 0, SVt_PVGV, TRUE, HADNV,
HASARENA, FIT_ARENA(0, sizeof(XPVGV)) },
/* 64 */
{ sizeof(XPVLV), sizeof(XPVLV), 0, SVt_PVLV, TRUE, HADNV,
HASARENA, FIT_ARENA(0, sizeof(XPVLV)) },
{ sizeof(XPVAV) - STRUCT_OFFSET(XPVAV, xav_fill),
copy_length(XPVAV, xmg_stash) - STRUCT_OFFSET(XPVAV, xav_fill),
+ STRUCT_OFFSET(XPVAV, xav_fill),
SVt_PVAV, TRUE, NONV, HASARENA,
FIT_ARENA(0, sizeof(XPVAV) - STRUCT_OFFSET(XPVAV, xav_fill)) },
{ sizeof(XPVHV) - STRUCT_OFFSET(XPVHV, xhv_fill),
copy_length(XPVHV, xmg_stash) - STRUCT_OFFSET(XPVHV, xhv_fill),
+ STRUCT_OFFSET(XPVHV, xhv_fill),
SVt_PVHV, TRUE, NONV, HASARENA,
FIT_ARENA(0, sizeof(XPVHV) - STRUCT_OFFSET(XPVHV, xhv_fill)) },
/* 56 */
{ sizeof(XPVCV) - STRUCT_OFFSET(XPVCV, xpv_cur),
sizeof(XPVCV) - STRUCT_OFFSET(XPVCV, xpv_cur),
+ STRUCT_OFFSET(XPVCV, xpv_cur),
SVt_PVCV, TRUE, NONV, HASARENA,
FIT_ARENA(0, sizeof(XPVCV) - STRUCT_OFFSET(XPVCV, xpv_cur)) },
{ sizeof(XPVFM) - STRUCT_OFFSET(XPVFM, xpv_cur),
sizeof(XPVFM) - STRUCT_OFFSET(XPVFM, xpv_cur),
+ STRUCT_OFFSET(XPVFM, xpv_cur),
SVt_PVFM, TRUE, NONV, NOARENA,
FIT_ARENA(20, sizeof(XPVFM) - STRUCT_OFFSET(XPVFM, xpv_cur)) },
/* XPVIO is 84 bytes, fits 48x */
{ sizeof(XPVIO), sizeof(XPVIO), 0, SVt_PVIO, TRUE, HADNV,
HASARENA, FIT_ARENA(24, sizeof(XPVIO)) },
};