1 : /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=8 sw=4 et tw=78:
3 : *
4 : * ***** BEGIN LICENSE BLOCK *****
5 : * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 : *
7 : * The contents of this file are subject to the Mozilla Public License Version
8 : * 1.1 (the "License"); you may not use this file except in compliance with
9 : * the License. You may obtain a copy of the License at
10 : * http://www.mozilla.org/MPL/
11 : *
12 : * Software distributed under the License is distributed on an "AS IS" basis,
13 : * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 : * for the specific language governing rights and limitations under the
15 : * License.
16 : *
17 : * The Original Code is Mozilla Communicator client code, released
18 : * March 31, 1998.
19 : *
20 : * The Initial Developer of the Original Code is
21 : * Netscape Communications Corporation.
22 : * Portions created by the Initial Developer are Copyright (C) 1998
23 : * the Initial Developer. All Rights Reserved.
24 : *
25 : * Contributor(s):
26 : *
27 : * Alternatively, the contents of this file may be used under the terms of
28 : * either of the GNU General Public License Version 2 or later (the "GPL"),
29 : * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 : * in which case the provisions of the GPL or the LGPL are applicable instead
31 : * of those above. If you wish to allow use of your version of this file only
32 : * under the terms of either the GPL or the LGPL, and not to allow others to
33 : * use your version of this file under the terms of the MPL, indicate your
34 : * decision by deleting the provisions above and replace them with the notice
35 : * and other provisions required by the GPL or the LGPL. If you do not delete
36 : * the provisions above, a recipient may use your version of this file under
37 : * the terms of any one of the MPL, the GPL or the LGPL.
38 : *
39 : * ***** END LICENSE BLOCK ***** */
40 :
41 : /* JS Mark-and-Sweep Garbage Collector. */
42 :
43 : #include "mozilla/Attributes.h"
44 : #include "mozilla/Util.h"
45 :
46 : /*
47 : * This code implements a mark-and-sweep garbage collector. The mark phase is
48 : * incremental. Most sweeping is done on a background thread. A GC is divided
49 : * into slices as follows:
50 : *
51 : * Slice 1: Roots pushed onto the mark stack. The mark stack is processed by
52 : * popping an element, marking it, and pushing its children.
53 : * ... JS code runs ...
54 : * Slice 2: More mark stack processing.
55 : * ... JS code runs ...
56 : * Slice n-1: More mark stack processing.
57 : * ... JS code runs ...
58 : * Slice n: Mark stack is completely drained. Some sweeping is done.
59 : * ... JS code runs, remaining sweeping done on background thread ...
60 : *
61 : * When background sweeping finishes the GC is complete.
62 : *
63 : * Incremental GC requires close collaboration with the mutator (i.e., JS code):
64 : *
65 : * 1. During an incremental GC, if a memory location (except a root) is written
66 : * to, then the value it previously held must be marked. Write barriers ensure
67 : * this.
68 : * 2. Any object that is allocated during incremental GC must start out marked.
69 : * 3. Roots are special memory locations that don't need write
70 : * barriers. However, they must be marked in the first slice. Roots are things
71 : * like the C stack and the VM stack, since it would be too expensive to put
72 : * barriers on them.
73 : */
74 :
75 : #include <math.h>
76 : #include <string.h> /* for memset used when DEBUG */
77 :
78 : #include "jstypes.h"
79 : #include "jsutil.h"
80 : #include "jshash.h"
81 : #include "jsclist.h"
82 : #include "jsprf.h"
83 : #include "jsapi.h"
84 : #include "jsatom.h"
85 : #include "jscompartment.h"
86 : #include "jscrashreport.h"
87 : #include "jscrashformat.h"
88 : #include "jscntxt.h"
89 : #include "jsversion.h"
90 : #include "jsdbgapi.h"
91 : #include "jsexn.h"
92 : #include "jsfun.h"
93 : #include "jsgc.h"
94 : #include "jsgcmark.h"
95 : #include "jsinterp.h"
96 : #include "jsiter.h"
97 : #include "jslock.h"
98 : #include "jsnum.h"
99 : #include "jsobj.h"
100 : #include "jsprobes.h"
101 : #include "jsproxy.h"
102 : #include "jsscope.h"
103 : #include "jsscript.h"
104 : #include "jswatchpoint.h"
105 : #include "jsweakmap.h"
106 : #if JS_HAS_XML_SUPPORT
107 : #include "jsxml.h"
108 : #endif
109 :
110 : #include "frontend/Parser.h"
111 : #include "gc/Memory.h"
112 : #include "methodjit/MethodJIT.h"
113 : #include "vm/Debugger.h"
114 : #include "vm/String.h"
115 :
116 : #include "jsinterpinlines.h"
117 : #include "jsobjinlines.h"
118 :
119 : #include "vm/ScopeObject-inl.h"
120 : #include "vm/String-inl.h"
121 :
122 : #ifdef MOZ_VALGRIND
123 : # define JS_VALGRIND
124 : #endif
125 : #ifdef JS_VALGRIND
126 : # include <valgrind/memcheck.h>
127 : #endif
128 :
129 : #ifdef XP_WIN
130 : # include "jswin.h"
131 : #else
132 : # include <unistd.h>
133 : #endif
134 :
135 : using namespace mozilla;
136 : using namespace js;
137 : using namespace js::gc;
138 :
139 : namespace js {
140 : namespace gc {
141 :
142 : /*
143 : * Lower limit after which we limit the heap growth
144 : */
145 : const size_t GC_ALLOCATION_THRESHOLD = 30 * 1024 * 1024;
146 :
147 : /*
148 : * A GC is triggered once the number of newly allocated arenas is
149 : * GC_HEAP_GROWTH_FACTOR times the number of live arenas after the last GC
150 : * starting after the lower limit of GC_ALLOCATION_THRESHOLD. This number is
151 : * used for non-incremental GCs.
152 : */
153 : const float GC_HEAP_GROWTH_FACTOR = 3.0f;
154 :
155 : /* Perform a Full GC every 20 seconds if MaybeGC is called */
156 : static const uint64_t GC_IDLE_FULL_SPAN = 20 * 1000 * 1000;
157 :
158 : #ifdef JS_GC_ZEAL
159 : static void
160 : StartVerifyBarriers(JSContext *cx);
161 :
162 : static void
163 : EndVerifyBarriers(JSContext *cx);
164 :
165 : void
166 : FinishVerifier(JSRuntime *rt);
167 : #endif
168 :
169 : /* This array should be const, but that doesn't link right under GCC. */
170 : AllocKind slotsToThingKind[] = {
171 : /* 0 */ FINALIZE_OBJECT0, FINALIZE_OBJECT2, FINALIZE_OBJECT2, FINALIZE_OBJECT4,
172 : /* 4 */ FINALIZE_OBJECT4, FINALIZE_OBJECT8, FINALIZE_OBJECT8, FINALIZE_OBJECT8,
173 : /* 8 */ FINALIZE_OBJECT8, FINALIZE_OBJECT12, FINALIZE_OBJECT12, FINALIZE_OBJECT12,
174 : /* 12 */ FINALIZE_OBJECT12, FINALIZE_OBJECT16, FINALIZE_OBJECT16, FINALIZE_OBJECT16,
175 : /* 16 */ FINALIZE_OBJECT16
176 : };
177 :
178 : JS_STATIC_ASSERT(JS_ARRAY_LENGTH(slotsToThingKind) == SLOTS_TO_THING_KIND_LIMIT);
179 :
180 : const uint32_t Arena::ThingSizes[] = {
181 : sizeof(JSObject), /* FINALIZE_OBJECT0 */
182 : sizeof(JSObject), /* FINALIZE_OBJECT0_BACKGROUND */
183 : sizeof(JSObject_Slots2), /* FINALIZE_OBJECT2 */
184 : sizeof(JSObject_Slots2), /* FINALIZE_OBJECT2_BACKGROUND */
185 : sizeof(JSObject_Slots4), /* FINALIZE_OBJECT4 */
186 : sizeof(JSObject_Slots4), /* FINALIZE_OBJECT4_BACKGROUND */
187 : sizeof(JSObject_Slots8), /* FINALIZE_OBJECT8 */
188 : sizeof(JSObject_Slots8), /* FINALIZE_OBJECT8_BACKGROUND */
189 : sizeof(JSObject_Slots12), /* FINALIZE_OBJECT12 */
190 : sizeof(JSObject_Slots12), /* FINALIZE_OBJECT12_BACKGROUND */
191 : sizeof(JSObject_Slots16), /* FINALIZE_OBJECT16 */
192 : sizeof(JSObject_Slots16), /* FINALIZE_OBJECT16_BACKGROUND */
193 : sizeof(JSScript), /* FINALIZE_SCRIPT */
194 : sizeof(Shape), /* FINALIZE_SHAPE */
195 : sizeof(BaseShape), /* FINALIZE_BASE_SHAPE */
196 : sizeof(types::TypeObject), /* FINALIZE_TYPE_OBJECT */
197 : #if JS_HAS_XML_SUPPORT
198 : sizeof(JSXML), /* FINALIZE_XML */
199 : #endif
200 : sizeof(JSShortString), /* FINALIZE_SHORT_STRING */
201 : sizeof(JSString), /* FINALIZE_STRING */
202 : sizeof(JSExternalString), /* FINALIZE_EXTERNAL_STRING */
203 : };
204 :
205 : #define OFFSET(type) uint32_t(sizeof(ArenaHeader) + (ArenaSize - sizeof(ArenaHeader)) % sizeof(type))
206 :
207 : const uint32_t Arena::FirstThingOffsets[] = {
208 : OFFSET(JSObject), /* FINALIZE_OBJECT0 */
209 : OFFSET(JSObject), /* FINALIZE_OBJECT0_BACKGROUND */
210 : OFFSET(JSObject_Slots2), /* FINALIZE_OBJECT2 */
211 : OFFSET(JSObject_Slots2), /* FINALIZE_OBJECT2_BACKGROUND */
212 : OFFSET(JSObject_Slots4), /* FINALIZE_OBJECT4 */
213 : OFFSET(JSObject_Slots4), /* FINALIZE_OBJECT4_BACKGROUND */
214 : OFFSET(JSObject_Slots8), /* FINALIZE_OBJECT8 */
215 : OFFSET(JSObject_Slots8), /* FINALIZE_OBJECT8_BACKGROUND */
216 : OFFSET(JSObject_Slots12), /* FINALIZE_OBJECT12 */
217 : OFFSET(JSObject_Slots12), /* FINALIZE_OBJECT12_BACKGROUND */
218 : OFFSET(JSObject_Slots16), /* FINALIZE_OBJECT16 */
219 : OFFSET(JSObject_Slots16), /* FINALIZE_OBJECT16_BACKGROUND */
220 : OFFSET(JSScript), /* FINALIZE_SCRIPT */
221 : OFFSET(Shape), /* FINALIZE_SHAPE */
222 : OFFSET(BaseShape), /* FINALIZE_BASE_SHAPE */
223 : OFFSET(types::TypeObject), /* FINALIZE_TYPE_OBJECT */
224 : #if JS_HAS_XML_SUPPORT
225 : OFFSET(JSXML), /* FINALIZE_XML */
226 : #endif
227 : OFFSET(JSShortString), /* FINALIZE_SHORT_STRING */
228 : OFFSET(JSString), /* FINALIZE_STRING */
229 : OFFSET(JSExternalString), /* FINALIZE_EXTERNAL_STRING */
230 : };
231 :
232 : #undef OFFSET
233 :
234 : class GCCompartmentsIter {
235 : private:
236 : JSCompartment **it, **end;
237 :
238 : public:
239 465087 : GCCompartmentsIter(JSRuntime *rt) {
240 465087 : if (rt->gcCurrentCompartment) {
241 1944 : it = &rt->gcCurrentCompartment;
242 1944 : end = &rt->gcCurrentCompartment + 1;
243 : } else {
244 463143 : it = rt->compartments.begin();
245 463143 : end = rt->compartments.end();
246 : }
247 465087 : }
248 :
249 3846263 : bool done() const { return it == end; }
250 :
251 1123331 : void next() {
252 1123331 : JS_ASSERT(!done());
253 1123331 : it++;
254 1123331 : }
255 :
256 1134514 : JSCompartment *get() const {
257 1134514 : JS_ASSERT(!done());
258 1134514 : return *it;
259 : }
260 :
261 121869 : operator JSCompartment *() const { return get(); }
262 1012645 : JSCompartment *operator->() const { return get(); }
263 : };
264 :
265 : #ifdef DEBUG
266 : void
267 16216760 : ArenaHeader::checkSynchronizedWithFreeList() const
268 : {
269 : /*
270 : * Do not allow to access the free list when its real head is still stored
271 : * in FreeLists and is not synchronized with this one.
272 : */
273 16216760 : JS_ASSERT(allocated());
274 :
275 : /*
276 : * We can be called from the background finalization thread when the free
277 : * list in the compartment can mutate at any moment. We cannot do any
278 : * checks in this case.
279 : */
280 16216760 : if (!compartment->rt->gcRunning)
281 2666091 : return;
282 :
283 13550669 : FreeSpan firstSpan = FreeSpan::decodeOffsets(arenaAddress(), firstFreeSpanOffsets);
284 13550669 : if (firstSpan.isEmpty())
285 2430115 : return;
286 11120554 : const FreeSpan *list = compartment->arenas.getFreeList(getAllocKind());
287 11120554 : if (list->isEmpty() || firstSpan.arenaAddress() != list->arenaAddress())
288 10098564 : return;
289 :
290 : /*
291 : * Here this arena has free things, FreeList::lists[thingKind] is not
292 : * empty and also points to this arena. Thus they must the same.
293 : */
294 1021990 : JS_ASSERT(firstSpan.isSameNonEmptySpan(list));
295 : }
296 : #endif
297 :
298 : /* static */ void
299 0 : Arena::staticAsserts()
300 : {
301 : JS_STATIC_ASSERT(sizeof(Arena) == ArenaSize);
302 : JS_STATIC_ASSERT(JS_ARRAY_LENGTH(ThingSizes) == FINALIZE_LIMIT);
303 : JS_STATIC_ASSERT(JS_ARRAY_LENGTH(FirstThingOffsets) == FINALIZE_LIMIT);
304 0 : }
305 :
306 : template<typename T>
307 : inline bool
308 5546678 : Arena::finalize(JSContext *cx, AllocKind thingKind, size_t thingSize, bool background)
309 : {
310 : /* Enforce requirements on size of T. */
311 5546678 : JS_ASSERT(thingSize % Cell::CellSize == 0);
312 5546678 : JS_ASSERT(thingSize <= 255);
313 :
314 5546678 : JS_ASSERT(aheader.allocated());
315 5546678 : JS_ASSERT(thingKind == aheader.getAllocKind());
316 5546678 : JS_ASSERT(thingSize == aheader.getThingSize());
317 5546678 : JS_ASSERT(!aheader.hasDelayedMarking);
318 5546678 : JS_ASSERT(!aheader.markOverflow);
319 5546678 : JS_ASSERT(!aheader.allocatedDuringIncremental);
320 :
321 5546678 : uintptr_t thing = thingsStart(thingKind);
322 5546678 : uintptr_t lastByte = thingsEnd() - 1;
323 :
324 5546678 : FreeSpan nextFree(aheader.getFirstFreeSpan());
325 5546678 : nextFree.checkSpan();
326 :
327 5546678 : FreeSpan newListHead;
328 5546678 : FreeSpan *newListTail = &newListHead;
329 5546678 : uintptr_t newFreeSpanStart = 0;
330 5546678 : bool allClear = true;
331 11093356 : DebugOnly<size_t> nmarked = 0;
332 690497126 : for (;; thing += thingSize) {
333 696043804 : JS_ASSERT(thing <= lastByte + 1);
334 696043804 : if (thing == nextFree.first) {
335 17192316 : JS_ASSERT(nextFree.last <= lastByte);
336 17192316 : if (nextFree.last == lastByte)
337 : break;
338 11645638 : JS_ASSERT(Arena::isAligned(nextFree.last, thingSize));
339 11645638 : if (!newFreeSpanStart)
340 10054226 : newFreeSpanStart = thing;
341 11645638 : thing = nextFree.last;
342 11645638 : nextFree = *nextFree.nextSpan();
343 11645638 : nextFree.checkSpan();
344 : } else {
345 678851488 : T *t = reinterpret_cast<T *>(thing);
346 678851488 : if (t->isMarked()) {
347 497127906 : allClear = false;
348 497127906 : nmarked++;
349 497127906 : if (newFreeSpanStart) {
350 13499827 : JS_ASSERT(thing >= thingsStart(thingKind) + thingSize);
351 13499827 : newListTail->first = newFreeSpanStart;
352 13499827 : newListTail->last = thing - thingSize;
353 13499827 : newListTail = newListTail->nextSpanUnchecked(thingSize);
354 13499827 : newFreeSpanStart = 0;
355 : }
356 : } else {
357 181723582 : if (!newFreeSpanStart)
358 5378773 : newFreeSpanStart = thing;
359 181723582 : t->finalize(cx, background);
360 181723582 : JS_POISON(t, JS_FREE_PATTERN, thingSize);
361 : }
362 : }
363 : }
364 :
365 5546678 : if (allClear) {
366 1660980 : JS_ASSERT(newListTail == &newListHead);
367 1660980 : JS_ASSERT(newFreeSpanStart == thingsStart(thingKind));
368 1660980 : return true;
369 : }
370 :
371 3885698 : newListTail->first = newFreeSpanStart ? newFreeSpanStart : nextFree.first;
372 3885698 : JS_ASSERT(Arena::isAligned(newListTail->first, thingSize));
373 3885698 : newListTail->last = lastByte;
374 :
375 : #ifdef DEBUG
376 3885698 : size_t nfree = 0;
377 17385525 : for (const FreeSpan *span = &newListHead; span != newListTail; span = span->nextSpan()) {
378 13499827 : span->checkSpan();
379 13499827 : JS_ASSERT(Arena::isAligned(span->first, thingSize));
380 13499827 : JS_ASSERT(Arena::isAligned(span->last, thingSize));
381 13499827 : nfree += (span->last - span->first) / thingSize + 1;
382 13499827 : JS_ASSERT(nfree + nmarked <= thingsPerArena(thingSize));
383 : }
384 3885698 : nfree += (newListTail->last + 1 - newListTail->first) / thingSize;
385 3885698 : JS_ASSERT(nfree + nmarked == thingsPerArena(thingSize));
386 : #endif
387 3885698 : aheader.setFirstFreeSpan(&newListHead);
388 :
389 3885698 : return false;
390 : }
391 :
392 : template<typename T>
393 : inline void
394 1839207 : FinalizeTypedArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKind, bool background)
395 : {
396 : /*
397 : * Release empty arenas and move non-full arenas with some free things into
398 : * a separated list that we append to al after the loop to ensure that any
399 : * arena before al->cursor is full.
400 : */
401 1839207 : JS_ASSERT_IF(!al->head, al->cursor == &al->head);
402 1839207 : ArenaLists::ArenaList available;
403 1839207 : ArenaHeader **ap = &al->head;
404 1839207 : size_t thingSize = Arena::thingSize(thingKind);
405 12932563 : while (ArenaHeader *aheader = *ap) {
406 5546678 : bool allClear = aheader->getArena()->finalize<T>(cx, thingKind, thingSize, background);
407 5546678 : if (allClear) {
408 1660980 : *ap = aheader->next;
409 1660980 : aheader->chunk()->releaseArena(aheader);
410 3885698 : } else if (aheader->hasFreeThings()) {
411 2211404 : *ap = aheader->next;
412 2211404 : *available.cursor = aheader;
413 2211404 : available.cursor = &aheader->next;
414 : } else {
415 1674294 : ap = &aheader->next;
416 : }
417 : }
418 :
419 : /* Terminate the available list and append it to al. */
420 1839207 : *available.cursor = NULL;
421 1839207 : *ap = available.head;
422 1839207 : al->cursor = ap;
423 3077579 : JS_ASSERT_IF(!al->head, al->cursor == &al->head);
424 1839207 : }
425 :
426 : /*
427 : * Finalize the list. On return al->cursor points to the first non-empty arena
428 : * after the al->head.
429 : */
430 : static void
431 1839207 : FinalizeArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKind, bool background)
432 : {
433 1839207 : switch(thingKind) {
434 : case FINALIZE_OBJECT0:
435 : case FINALIZE_OBJECT0_BACKGROUND:
436 : case FINALIZE_OBJECT2:
437 : case FINALIZE_OBJECT2_BACKGROUND:
438 : case FINALIZE_OBJECT4:
439 : case FINALIZE_OBJECT4_BACKGROUND:
440 : case FINALIZE_OBJECT8:
441 : case FINALIZE_OBJECT8_BACKGROUND:
442 : case FINALIZE_OBJECT12:
443 : case FINALIZE_OBJECT12_BACKGROUND:
444 : case FINALIZE_OBJECT16:
445 : case FINALIZE_OBJECT16_BACKGROUND:
446 945193 : FinalizeTypedArenas<JSObject>(cx, al, thingKind, background);
447 945193 : break;
448 : case FINALIZE_SCRIPT:
449 121869 : FinalizeTypedArenas<JSScript>(cx, al, thingKind, background);
450 121869 : break;
451 : case FINALIZE_SHAPE:
452 121869 : FinalizeTypedArenas<Shape>(cx, al, thingKind, background);
453 121869 : break;
454 : case FINALIZE_BASE_SHAPE:
455 121869 : FinalizeTypedArenas<BaseShape>(cx, al, thingKind, background);
456 121869 : break;
457 : case FINALIZE_TYPE_OBJECT:
458 121869 : FinalizeTypedArenas<types::TypeObject>(cx, al, thingKind, background);
459 121869 : break;
460 : #if JS_HAS_XML_SUPPORT
461 : case FINALIZE_XML:
462 121869 : FinalizeTypedArenas<JSXML>(cx, al, thingKind, background);
463 121869 : break;
464 : #endif
465 : case FINALIZE_STRING:
466 101379 : FinalizeTypedArenas<JSString>(cx, al, thingKind, background);
467 101379 : break;
468 : case FINALIZE_SHORT_STRING:
469 61421 : FinalizeTypedArenas<JSShortString>(cx, al, thingKind, background);
470 61421 : break;
471 : case FINALIZE_EXTERNAL_STRING:
472 121869 : FinalizeTypedArenas<JSExternalString>(cx, al, thingKind, background);
473 121869 : break;
474 : }
475 1839207 : }
476 :
477 : static inline Chunk *
478 43377 : AllocChunk() {
479 43377 : return static_cast<Chunk *>(MapAlignedPages(ChunkSize, ChunkSize));
480 : }
481 :
482 : static inline void
483 43375 : FreeChunk(Chunk *p) {
484 43375 : UnmapPages(static_cast<void *>(p), ChunkSize);
485 43375 : }
486 :
487 : #ifdef JS_THREADSAFE
488 : inline bool
489 43394 : ChunkPool::wantBackgroundAllocation(JSRuntime *rt) const
490 : {
491 : /*
492 : * To minimize memory waste we do not want to run the background chunk
493 : * allocation if we have empty chunks or when the runtime needs just few
494 : * of them.
495 : */
496 43394 : return rt->gcHelperThread.canBackgroundAllocate() &&
497 : emptyCount == 0 &&
498 43394 : rt->gcChunkSet.count() >= 4;
499 : }
500 : #endif
501 :
502 : /* Must be called with the GC lock taken. */
503 : inline Chunk *
504 43394 : ChunkPool::get(JSRuntime *rt)
505 : {
506 43394 : JS_ASSERT(this == &rt->gcChunkPool);
507 :
508 43394 : Chunk *chunk = emptyChunkListHead;
509 43394 : if (chunk) {
510 17 : JS_ASSERT(emptyCount);
511 17 : emptyChunkListHead = chunk->info.next;
512 17 : --emptyCount;
513 : } else {
514 43377 : JS_ASSERT(!emptyCount);
515 43377 : chunk = Chunk::allocate(rt);
516 43377 : if (!chunk)
517 0 : return NULL;
518 43377 : JS_ASSERT(chunk->info.numArenasFreeCommitted == ArenasPerChunk);
519 43377 : rt->gcNumArenasFreeCommitted += ArenasPerChunk;
520 : }
521 43394 : JS_ASSERT(chunk->unused());
522 43394 : JS_ASSERT(!rt->gcChunkSet.has(chunk));
523 :
524 : #ifdef JS_THREADSAFE
525 43394 : if (wantBackgroundAllocation(rt))
526 0 : rt->gcHelperThread.startBackgroundAllocationIfIdle();
527 : #endif
528 :
529 43394 : return chunk;
530 : }
531 :
532 : /* Must be called either during the GC or with the GC lock taken. */
533 : inline void
534 43392 : ChunkPool::put(Chunk *chunk)
535 : {
536 43392 : chunk->info.age = 0;
537 43392 : chunk->info.next = emptyChunkListHead;
538 43392 : emptyChunkListHead = chunk;
539 43392 : emptyCount++;
540 43392 : }
541 :
542 : /* Must be called either during the GC or with the GC lock taken. */
543 : Chunk *
544 50834 : ChunkPool::expire(JSRuntime *rt, bool releaseAll)
545 : {
546 50834 : JS_ASSERT(this == &rt->gcChunkPool);
547 :
548 : /*
549 : * Return old empty chunks to the system while preserving the order of
550 : * other chunks in the list. This way, if the GC runs several times
551 : * without emptying the list, the older chunks will stay at the tail
552 : * and are more likely to reach the max age.
553 : */
554 50834 : Chunk *freeList = NULL;
555 150006 : for (Chunk **chunkp = &emptyChunkListHead; *chunkp; ) {
556 48338 : JS_ASSERT(emptyCount);
557 48338 : Chunk *chunk = *chunkp;
558 48338 : JS_ASSERT(chunk->unused());
559 48338 : JS_ASSERT(!rt->gcChunkSet.has(chunk));
560 48338 : JS_ASSERT(chunk->info.age <= MAX_EMPTY_CHUNK_AGE);
561 48338 : if (releaseAll || chunk->info.age == MAX_EMPTY_CHUNK_AGE) {
562 43375 : *chunkp = chunk->info.next;
563 43375 : --emptyCount;
564 43375 : chunk->prepareToBeFreed(rt);
565 43375 : chunk->info.next = freeList;
566 43375 : freeList = chunk;
567 : } else {
568 : /* Keep the chunk but increase its age. */
569 4963 : ++chunk->info.age;
570 4963 : chunkp = &chunk->info.next;
571 : }
572 : }
573 50834 : JS_ASSERT_IF(releaseAll, !emptyCount);
574 50834 : return freeList;
575 : }
576 :
577 : static void
578 63281 : FreeChunkList(Chunk *chunkListHead)
579 : {
580 106656 : while (Chunk *chunk = chunkListHead) {
581 43375 : JS_ASSERT(!chunk->info.numArenasFreeCommitted);
582 43375 : chunkListHead = chunk->info.next;
583 43375 : FreeChunk(chunk);
584 : }
585 19906 : }
586 :
587 : void
588 19868 : ChunkPool::expireAndFree(JSRuntime *rt, bool releaseAll)
589 : {
590 19868 : FreeChunkList(expire(rt, releaseAll));
591 19868 : }
592 :
593 : JS_FRIEND_API(int64_t)
594 3 : ChunkPool::countCleanDecommittedArenas(JSRuntime *rt)
595 : {
596 3 : JS_ASSERT(this == &rt->gcChunkPool);
597 :
598 3 : int64_t numDecommitted = 0;
599 3 : Chunk *chunk = emptyChunkListHead;
600 6 : while (chunk) {
601 0 : for (uint32_t i = 0; i < ArenasPerChunk; ++i)
602 0 : if (chunk->decommittedArenas.get(i))
603 0 : ++numDecommitted;
604 0 : chunk = chunk->info.next;
605 : }
606 3 : return numDecommitted;
607 : }
608 :
609 : /* static */ Chunk *
610 43377 : Chunk::allocate(JSRuntime *rt)
611 : {
612 43377 : Chunk *chunk = static_cast<Chunk *>(AllocChunk());
613 43377 : if (!chunk)
614 0 : return NULL;
615 43377 : chunk->init();
616 43377 : rt->gcStats.count(gcstats::STAT_NEW_CHUNK);
617 43377 : return chunk;
618 : }
619 :
620 : /* Must be called with the GC lock taken. */
621 : /* static */ inline void
622 0 : Chunk::release(JSRuntime *rt, Chunk *chunk)
623 : {
624 0 : JS_ASSERT(chunk);
625 0 : chunk->prepareToBeFreed(rt);
626 0 : FreeChunk(chunk);
627 0 : }
628 :
629 : inline void
630 43375 : Chunk::prepareToBeFreed(JSRuntime *rt)
631 : {
632 43375 : JS_ASSERT(rt->gcNumArenasFreeCommitted >= info.numArenasFreeCommitted);
633 43375 : rt->gcNumArenasFreeCommitted -= info.numArenasFreeCommitted;
634 43375 : rt->gcStats.count(gcstats::STAT_DESTROY_CHUNK);
635 :
636 : #ifdef DEBUG
637 : /*
638 : * Let FreeChunkList detect a missing prepareToBeFreed call before it
639 : * frees chunk.
640 : */
641 43375 : info.numArenasFreeCommitted = 0;
642 : #endif
643 43375 : }
644 :
645 : void
646 43377 : Chunk::init()
647 : {
648 43377 : JS_POISON(this, JS_FREE_PATTERN, ChunkSize);
649 :
650 : /*
651 : * We clear the bitmap to guard against xpc_IsGrayGCThing being called on
652 : * uninitialized data, which would happen before the first GC cycle.
653 : */
654 43377 : bitmap.clear();
655 :
656 : /* Initialize the arena tracking bitmap. */
657 43377 : decommittedArenas.clear(false);
658 :
659 : /* Initialize the chunk info. */
660 43377 : info.freeArenasHead = &arenas[0].aheader;
661 43377 : info.lastDecommittedArenaOffset = 0;
662 43377 : info.numArenasFree = ArenasPerChunk;
663 43377 : info.numArenasFreeCommitted = ArenasPerChunk;
664 43377 : info.age = 0;
665 :
666 : /* Initialize the arena header state. */
667 10974381 : for (unsigned i = 0; i < ArenasPerChunk; i++) {
668 10931004 : arenas[i].aheader.setAsNotAllocated();
669 : arenas[i].aheader.next = (i + 1 < ArenasPerChunk)
670 : ? &arenas[i + 1].aheader
671 10931004 : : NULL;
672 : }
673 :
674 : /* The rest of info fields are initialized in PickChunk. */
675 43377 : }
676 :
677 : inline Chunk **
678 2109075 : GetAvailableChunkList(JSCompartment *comp)
679 : {
680 2109075 : JSRuntime *rt = comp->rt;
681 : return comp->isSystemCompartment
682 : ? &rt->gcSystemAvailableChunkListHead
683 2109075 : : &rt->gcUserAvailableChunkListHead;
684 : }
685 :
686 : inline void
687 47484 : Chunk::addToAvailableList(JSCompartment *comp)
688 : {
689 47484 : insertToAvailableList(GetAvailableChunkList(comp));
690 47484 : }
691 :
692 : inline void
693 47484 : Chunk::insertToAvailableList(Chunk **insertPoint)
694 : {
695 47484 : JS_ASSERT(hasAvailableArenas());
696 47484 : JS_ASSERT(!info.prevp);
697 47484 : JS_ASSERT(!info.next);
698 47484 : info.prevp = insertPoint;
699 47484 : Chunk *insertBefore = *insertPoint;
700 47484 : if (insertBefore) {
701 4071 : JS_ASSERT(insertBefore->info.prevp == insertPoint);
702 4071 : insertBefore->info.prevp = &info.next;
703 : }
704 47484 : info.next = insertBefore;
705 47484 : *insertPoint = this;
706 47484 : }
707 :
708 : inline void
709 47482 : Chunk::removeFromAvailableList()
710 : {
711 47482 : JS_ASSERT(info.prevp);
712 47482 : *info.prevp = info.next;
713 47482 : if (info.next) {
714 3329 : JS_ASSERT(info.next->info.prevp == &info.next);
715 3329 : info.next->info.prevp = info.prevp;
716 : }
717 47482 : info.prevp = NULL;
718 47482 : info.next = NULL;
719 47482 : }
720 :
721 : /*
722 : * Search for and return the next decommitted Arena. Our goal is to keep
723 : * lastDecommittedArenaOffset "close" to a free arena. We do this by setting
724 : * it to the most recently freed arena when we free, and forcing it to
725 : * the last alloc + 1 when we allocate.
726 : */
727 : uint32_t
728 0 : Chunk::findDecommittedArenaOffset()
729 : {
730 : /* Note: lastFreeArenaOffset can be past the end of the list. */
731 0 : for (unsigned i = info.lastDecommittedArenaOffset; i < ArenasPerChunk; i++)
732 0 : if (decommittedArenas.get(i))
733 0 : return i;
734 0 : for (unsigned i = 0; i < info.lastDecommittedArenaOffset; i++)
735 0 : if (decommittedArenas.get(i))
736 0 : return i;
737 0 : JS_NOT_REACHED("No decommitted arenas found.");
738 : return -1;
739 : }
740 :
741 : ArenaHeader *
742 0 : Chunk::fetchNextDecommittedArena()
743 : {
744 0 : JS_ASSERT(info.numArenasFreeCommitted == 0);
745 0 : JS_ASSERT(info.numArenasFree > 0);
746 :
747 0 : unsigned offset = findDecommittedArenaOffset();
748 0 : info.lastDecommittedArenaOffset = offset + 1;
749 0 : --info.numArenasFree;
750 0 : decommittedArenas.unset(offset);
751 :
752 0 : Arena *arena = &arenas[offset];
753 0 : MarkPagesInUse(arena, ArenaSize);
754 0 : arena->aheader.setAsNotAllocated();
755 :
756 0 : return &arena->aheader;
757 : }
758 :
759 : inline ArenaHeader *
760 2065691 : Chunk::fetchNextFreeArena(JSRuntime *rt)
761 : {
762 2065691 : JS_ASSERT(info.numArenasFreeCommitted > 0);
763 2065691 : JS_ASSERT(info.numArenasFreeCommitted <= info.numArenasFree);
764 2065691 : JS_ASSERT(info.numArenasFreeCommitted <= rt->gcNumArenasFreeCommitted);
765 :
766 2065691 : ArenaHeader *aheader = info.freeArenasHead;
767 2065691 : info.freeArenasHead = aheader->next;
768 2065691 : --info.numArenasFreeCommitted;
769 2065691 : --info.numArenasFree;
770 2065691 : --rt->gcNumArenasFreeCommitted;
771 :
772 2065691 : return aheader;
773 : }
774 :
775 : ArenaHeader *
776 2061591 : Chunk::allocateArena(JSCompartment *comp, AllocKind thingKind)
777 : {
778 2061591 : JS_ASSERT(hasAvailableArenas());
779 :
780 2061591 : JSRuntime *rt = comp->rt;
781 2061591 : JS_ASSERT(rt->gcBytes <= rt->gcMaxBytes);
782 2061591 : if (rt->gcMaxBytes - rt->gcBytes < ArenaSize)
783 40 : return NULL;
784 :
785 2061551 : ArenaHeader *aheader = JS_LIKELY(info.numArenasFreeCommitted > 0)
786 : ? fetchNextFreeArena(rt)
787 2061551 : : fetchNextDecommittedArena();
788 2061551 : aheader->init(comp, thingKind);
789 2061551 : if (JS_UNLIKELY(!hasAvailableArenas()))
790 4090 : removeFromAvailableList();
791 :
792 2061551 : Probes::resizeHeap(comp, rt->gcBytes, rt->gcBytes + ArenaSize);
793 2061551 : rt->gcBytes += ArenaSize;
794 2061551 : comp->gcBytes += ArenaSize;
795 2061551 : if (comp->gcBytes >= comp->gcTriggerBytes)
796 97581 : TriggerCompartmentGC(comp, gcreason::ALLOC_TRIGGER);
797 :
798 2061551 : return aheader;
799 : }
800 :
801 : inline void
802 2061484 : Chunk::addArenaToFreeList(JSRuntime *rt, ArenaHeader *aheader)
803 : {
804 2061484 : JS_ASSERT(!aheader->allocated());
805 2061484 : aheader->next = info.freeArenasHead;
806 2061484 : info.freeArenasHead = aheader;
807 2061484 : ++info.numArenasFreeCommitted;
808 2061484 : ++info.numArenasFree;
809 2061484 : ++rt->gcNumArenasFreeCommitted;
810 2061484 : }
811 :
812 : void
813 2061484 : Chunk::releaseArena(ArenaHeader *aheader)
814 : {
815 2061484 : JS_ASSERT(aheader->allocated());
816 2061484 : JS_ASSERT(!aheader->hasDelayedMarking);
817 2061484 : JSCompartment *comp = aheader->compartment;
818 2061484 : JSRuntime *rt = comp->rt;
819 : #ifdef JS_THREADSAFE
820 4122968 : AutoLockGC maybeLock;
821 2061484 : if (rt->gcHelperThread.sweeping())
822 396552 : maybeLock.lock(rt);
823 : #endif
824 :
825 2061484 : Probes::resizeHeap(comp, rt->gcBytes, rt->gcBytes - ArenaSize);
826 2061484 : JS_ASSERT(rt->gcBytes >= ArenaSize);
827 2061484 : JS_ASSERT(comp->gcBytes >= ArenaSize);
828 : #ifdef JS_THREADSAFE
829 2061484 : if (rt->gcHelperThread.sweeping())
830 396552 : comp->reduceGCTriggerBytes(GC_HEAP_GROWTH_FACTOR * ArenaSize);
831 : #endif
832 2061484 : rt->gcBytes -= ArenaSize;
833 2061484 : comp->gcBytes -= ArenaSize;
834 :
835 2061484 : aheader->setAsNotAllocated();
836 2061484 : addArenaToFreeList(rt, aheader);
837 :
838 2061484 : if (info.numArenasFree == 1) {
839 4090 : JS_ASSERT(!info.prevp);
840 4090 : JS_ASSERT(!info.next);
841 4090 : addToAvailableList(comp);
842 2057394 : } else if (!unused()) {
843 2014002 : JS_ASSERT(info.prevp);
844 : } else {
845 43392 : rt->gcChunkSet.remove(this);
846 43392 : removeFromAvailableList();
847 43392 : rt->gcChunkPool.put(this);
848 : }
849 2061484 : }
850 :
851 : } /* namespace gc */
852 : } /* namespace js */
853 :
854 : /* The caller must hold the GC lock. */
855 : static Chunk *
856 2061591 : PickChunk(JSCompartment *comp)
857 : {
858 2061591 : JSRuntime *rt = comp->rt;
859 2061591 : Chunk **listHeadp = GetAvailableChunkList(comp);
860 2061591 : Chunk *chunk = *listHeadp;
861 2061591 : if (chunk)
862 2018197 : return chunk;
863 :
864 43394 : chunk = rt->gcChunkPool.get(rt);
865 43394 : if (!chunk)
866 0 : return NULL;
867 :
868 43394 : rt->gcChunkAllocationSinceLastGC = true;
869 :
870 : /*
871 : * FIXME bug 583732 - chunk is newly allocated and cannot be present in
872 : * the table so using ordinary lookupForAdd is suboptimal here.
873 : */
874 43394 : GCChunkSet::AddPtr p = rt->gcChunkSet.lookupForAdd(chunk);
875 43394 : JS_ASSERT(!p);
876 43394 : if (!rt->gcChunkSet.add(p, chunk)) {
877 0 : Chunk::release(rt, chunk);
878 0 : return NULL;
879 : }
880 :
881 43394 : chunk->info.prevp = NULL;
882 43394 : chunk->info.next = NULL;
883 43394 : chunk->addToAvailableList(comp);
884 :
885 43394 : return chunk;
886 : }
887 :
888 : JS_FRIEND_API(bool)
889 99568670 : IsAboutToBeFinalized(const Cell *thing)
890 : {
891 99568670 : JSCompartment *thingCompartment = reinterpret_cast<const Cell *>(thing)->compartment();
892 99568670 : JSRuntime *rt = thingCompartment->rt;
893 99568670 : if (rt->gcCurrentCompartment != NULL && rt->gcCurrentCompartment != thingCompartment)
894 131427 : return false;
895 99437243 : return !reinterpret_cast<const Cell *>(thing)->isMarked();
896 : }
897 :
898 : bool
899 174024 : IsAboutToBeFinalized(const Value &v)
900 : {
901 174024 : JS_ASSERT(v.isMarkable());
902 174024 : return IsAboutToBeFinalized((Cell *)v.toGCThing());
903 : }
904 :
905 : /* Lifetime for type sets attached to scripts containing observed types. */
906 : static const int64_t JIT_SCRIPT_RELEASE_TYPES_INTERVAL = 60 * 1000 * 1000;
907 :
908 : JSBool
909 19869 : js_InitGC(JSRuntime *rt, uint32_t maxbytes)
910 : {
911 19869 : if (!rt->gcChunkSet.init(INITIAL_CHUNK_CAPACITY))
912 0 : return false;
913 :
914 19869 : if (!rt->gcRootsHash.init(256))
915 0 : return false;
916 :
917 19869 : if (!rt->gcLocksHash.init(256))
918 0 : return false;
919 :
920 : #ifdef JS_THREADSAFE
921 19869 : rt->gcLock = PR_NewLock();
922 19869 : if (!rt->gcLock)
923 0 : return false;
924 19869 : if (!rt->gcHelperThread.init())
925 0 : return false;
926 : #endif
927 :
928 : /*
929 : * Separate gcMaxMallocBytes from gcMaxBytes but initialize to maxbytes
930 : * for default backward API compatibility.
931 : */
932 19869 : rt->gcMaxBytes = maxbytes;
933 19869 : rt->setGCMaxMallocBytes(maxbytes);
934 :
935 19869 : rt->gcJitReleaseTime = PRMJ_Now() + JIT_SCRIPT_RELEASE_TYPES_INTERVAL;
936 19869 : return true;
937 : }
938 :
939 : namespace js {
940 :
941 : inline bool
942 11158530 : InFreeList(ArenaHeader *aheader, uintptr_t addr)
943 : {
944 11158530 : if (!aheader->hasFreeThings())
945 2423410 : return false;
946 :
947 8735120 : FreeSpan firstSpan(aheader->getFirstFreeSpan());
948 :
949 16440276 : for (const FreeSpan *span = &firstSpan;;) {
950 : /* If the thing comes fore the current span, it's not free. */
951 16440276 : if (addr < span->first)
952 8734236 : return false;
953 :
954 : /*
955 : * If we find it inside the span, it's dead. We use here "<=" and not
956 : * "<" even for the last span as we know that thing is inside the
957 : * arena. Thus for the last span thing < span->end.
958 : */
959 7706040 : if (addr <= span->last)
960 884 : return true;
961 :
962 : /*
963 : * The last possible empty span is an the end of the arena. Here
964 : * span->end < thing < thingsEnd and so we must have more spans.
965 : */
966 7705156 : span = span->nextSpan();
967 : }
968 : }
969 :
970 : enum ConservativeGCTest
971 : {
972 : CGCT_VALID,
973 : CGCT_LOWBITSET, /* excluded because one of the low bits was set */
974 : CGCT_NOTARENA, /* not within arena range in a chunk */
975 : CGCT_OTHERCOMPARTMENT, /* in another compartment */
976 : CGCT_NOTCHUNK, /* not within a valid chunk */
977 : CGCT_FREEARENA, /* within arena containing only free things */
978 : CGCT_NOTLIVE, /* gcthing is not allocated */
979 : CGCT_END
980 : };
981 :
982 : /*
983 : * Tests whether w is a (possibly dead) GC thing. Returns CGCT_VALID and
984 : * details about the thing if so. On failure, returns the reason for rejection.
985 : */
986 : inline ConservativeGCTest
987 256283559 : IsAddressableGCThing(JSRuntime *rt, uintptr_t w,
988 : gc::AllocKind *thingKindPtr, ArenaHeader **arenaHeader, void **thing)
989 : {
990 : /*
991 : * We assume that the compiler never uses sub-word alignment to store
992 : * pointers and does not tag pointers on its own. Additionally, the value
993 : * representation for all values and the jsid representation for GC-things
994 : * do not touch the low two bits. Thus any word with the low two bits set
995 : * is not a valid GC-thing.
996 : */
997 : JS_STATIC_ASSERT(JSID_TYPE_STRING == 0 && JSID_TYPE_OBJECT == 4);
998 256283559 : if (w & 0x3)
999 43755004 : return CGCT_LOWBITSET;
1000 :
1001 : /*
1002 : * An object jsid has its low bits tagged. In the value representation on
1003 : * 64-bit, the high bits are tagged.
1004 : */
1005 212528555 : const uintptr_t JSID_PAYLOAD_MASK = ~uintptr_t(JSID_TYPE_MASK);
1006 : #if JS_BITS_PER_WORD == 32
1007 212528555 : uintptr_t addr = w & JSID_PAYLOAD_MASK;
1008 : #elif JS_BITS_PER_WORD == 64
1009 : uintptr_t addr = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK;
1010 : #endif
1011 :
1012 212528555 : Chunk *chunk = Chunk::fromAddress(addr);
1013 :
1014 212528555 : if (!rt->gcChunkSet.has(chunk))
1015 201173753 : return CGCT_NOTCHUNK;
1016 :
1017 : /*
1018 : * We query for pointers outside the arena array after checking for an
1019 : * allocated chunk. Such pointers are rare and we want to reject them
1020 : * after doing more likely rejections.
1021 : */
1022 11354802 : if (!Chunk::withinArenasRange(addr))
1023 6764 : return CGCT_NOTARENA;
1024 :
1025 : /* If the arena is not currently allocated, don't access the header. */
1026 11348038 : size_t arenaOffset = Chunk::arenaIndex(addr);
1027 11348038 : if (chunk->decommittedArenas.get(arenaOffset))
1028 0 : return CGCT_FREEARENA;
1029 :
1030 11348038 : ArenaHeader *aheader = &chunk->arenas[arenaOffset].aheader;
1031 :
1032 11348038 : if (!aheader->allocated())
1033 529 : return CGCT_FREEARENA;
1034 :
1035 11347509 : JSCompartment *curComp = rt->gcCurrentCompartment;
1036 11347509 : if (curComp && curComp != aheader->compartment)
1037 4417 : return CGCT_OTHERCOMPARTMENT;
1038 :
1039 11343092 : AllocKind thingKind = aheader->getAllocKind();
1040 11343092 : uintptr_t offset = addr & ArenaMask;
1041 11343092 : uintptr_t minOffset = Arena::firstThingOffset(thingKind);
1042 11343092 : if (offset < minOffset)
1043 184562 : return CGCT_NOTARENA;
1044 :
1045 : /* addr can point inside the thing so we must align the address. */
1046 11158530 : uintptr_t shift = (offset - minOffset) % Arena::thingSize(thingKind);
1047 11158530 : addr -= shift;
1048 :
1049 11158530 : if (thing)
1050 11158530 : *thing = reinterpret_cast<void *>(addr);
1051 11158530 : if (arenaHeader)
1052 11158530 : *arenaHeader = aheader;
1053 11158530 : if (thingKindPtr)
1054 11158530 : *thingKindPtr = thingKind;
1055 11158530 : return CGCT_VALID;
1056 : }
1057 :
1058 : /*
1059 : * Returns CGCT_VALID and mark it if the w can be a live GC thing and sets
1060 : * thingKind accordingly. Otherwise returns the reason for rejection.
1061 : */
1062 : inline ConservativeGCTest
1063 256283559 : MarkIfGCThingWord(JSTracer *trc, uintptr_t w)
1064 : {
1065 : void *thing;
1066 : ArenaHeader *aheader;
1067 : AllocKind thingKind;
1068 256283559 : ConservativeGCTest status = IsAddressableGCThing(trc->runtime, w, &thingKind, &aheader, &thing);
1069 256283559 : if (status != CGCT_VALID)
1070 245125029 : return status;
1071 :
1072 : /*
1073 : * Check if the thing is free. We must use the list of free spans as at
1074 : * this point we no longer have the mark bits from the previous GC run and
1075 : * we must account for newly allocated things.
1076 : */
1077 11158530 : if (InFreeList(aheader, uintptr_t(thing)))
1078 884 : return CGCT_NOTLIVE;
1079 :
1080 11157646 : JSGCTraceKind traceKind = MapAllocToTraceKind(thingKind);
1081 : #ifdef DEBUG
1082 11157646 : const char pattern[] = "machine_stack %p";
1083 : char nameBuf[sizeof(pattern) - 2 + sizeof(thing) * 2];
1084 11157646 : JS_snprintf(nameBuf, sizeof(nameBuf), pattern, thing);
1085 11157646 : JS_SET_TRACING_NAME(trc, nameBuf);
1086 : #endif
1087 11157646 : MarkKind(trc, thing, traceKind);
1088 :
1089 : #ifdef DEBUG
1090 11157646 : if (trc->runtime->gcIncrementalState == MARK_ROOTS)
1091 177178 : trc->runtime->gcSavedRoots.append(JSRuntime::SavedGCRoot(thing, traceKind));
1092 : #endif
1093 :
1094 11157646 : return CGCT_VALID;
1095 : }
1096 :
1097 : static void
1098 256148602 : MarkWordConservatively(JSTracer *trc, uintptr_t w)
1099 : {
1100 : /*
1101 : * The conservative scanner may access words that valgrind considers as
1102 : * undefined. To avoid false positives and not to alter valgrind view of
1103 : * the memory we make as memcheck-defined the argument, a copy of the
1104 : * original word. See bug 572678.
1105 : */
1106 : #ifdef JS_VALGRIND
1107 : JS_SILENCE_UNUSED_VALUE_IN_EXPR(VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w)));
1108 : #endif
1109 :
1110 256148602 : MarkIfGCThingWord(trc, w);
1111 256148602 : }
1112 :
1113 : static void
1114 51524 : MarkRangeConservatively(JSTracer *trc, const uintptr_t *begin, const uintptr_t *end)
1115 : {
1116 51524 : JS_ASSERT(begin <= end);
1117 256200126 : for (const uintptr_t *i = begin; i < end; ++i)
1118 256148602 : MarkWordConservatively(trc, *i);
1119 51524 : }
1120 :
1121 : static JS_NEVER_INLINE void
1122 34113 : MarkConservativeStackRoots(JSTracer *trc, bool useSavedRoots)
1123 : {
1124 34113 : JSRuntime *rt = trc->runtime;
1125 :
1126 : #ifdef DEBUG
1127 34113 : if (useSavedRoots) {
1128 325032 : for (JSRuntime::SavedGCRoot *root = rt->gcSavedRoots.begin();
1129 162516 : root != rt->gcSavedRoots.end();
1130 : root++)
1131 : {
1132 161308 : JS_SET_TRACING_NAME(trc, "cstack");
1133 161308 : MarkKind(trc, root->thing, root->kind);
1134 : }
1135 1208 : return;
1136 : }
1137 :
1138 32905 : if (rt->gcIncrementalState == MARK_ROOTS)
1139 1399 : rt->gcSavedRoots.clearAndFree();
1140 : #endif
1141 :
1142 32905 : ConservativeGCData *cgcd = &rt->conservativeGC;
1143 32905 : if (!cgcd->hasStackToScan()) {
1144 : #ifdef JS_THREADSAFE
1145 7143 : JS_ASSERT(!rt->suspendCount);
1146 7143 : JS_ASSERT(rt->requestDepth <= cgcd->requestThreshold);
1147 : #endif
1148 7143 : return;
1149 : }
1150 :
1151 : uintptr_t *stackMin, *stackEnd;
1152 : #if JS_STACK_GROWTH_DIRECTION > 0
1153 : stackMin = rt->nativeStackBase;
1154 : stackEnd = cgcd->nativeStackTop;
1155 : #else
1156 25762 : stackMin = cgcd->nativeStackTop + 1;
1157 25762 : stackEnd = reinterpret_cast<uintptr_t *>(rt->nativeStackBase);
1158 : #endif
1159 :
1160 25762 : JS_ASSERT(stackMin <= stackEnd);
1161 25762 : MarkRangeConservatively(trc, stackMin, stackEnd);
1162 : MarkRangeConservatively(trc, cgcd->registerSnapshot.words,
1163 25762 : ArrayEnd(cgcd->registerSnapshot.words));
1164 : }
1165 :
1166 : void
1167 0 : MarkStackRangeConservatively(JSTracer *trc, Value *beginv, Value *endv)
1168 : {
1169 : /*
1170 : * Normally, the drainMarkStack phase of marking will never trace outside
1171 : * of the compartment currently being collected. However, conservative
1172 : * scanning during drainMarkStack (as is done for generators) can break
1173 : * this invariant. So we disable the compartment assertions in this
1174 : * situation.
1175 : */
1176 : struct AutoSkipChecking {
1177 : JSRuntime *runtime;
1178 : JSCompartment *savedCompartment;
1179 :
1180 0 : AutoSkipChecking(JSRuntime *rt)
1181 0 : : runtime(rt), savedCompartment(rt->gcCheckCompartment) {
1182 0 : rt->gcCheckCompartment = NULL;
1183 0 : }
1184 0 : ~AutoSkipChecking() { runtime->gcCheckCompartment = savedCompartment; }
1185 0 : } as(trc->runtime);
1186 :
1187 0 : const uintptr_t *begin = beginv->payloadWord();
1188 0 : const uintptr_t *end = endv->payloadWord();
1189 : #ifdef JS_NUNBOX32
1190 : /*
1191 : * With 64-bit jsvals on 32-bit systems, we can optimize a bit by
1192 : * scanning only the payloads.
1193 : */
1194 0 : JS_ASSERT(begin <= end);
1195 0 : for (const uintptr_t *i = begin; i < end; i += sizeof(Value) / sizeof(uintptr_t))
1196 0 : MarkWordConservatively(trc, *i);
1197 : #else
1198 : MarkRangeConservatively(trc, begin, end);
1199 : #endif
1200 0 : }
1201 :
1202 :
1203 :
1204 : JS_NEVER_INLINE void
1205 566634 : ConservativeGCData::recordStackTop()
1206 : {
1207 : /* Update the native stack pointer if it points to a bigger stack. */
1208 : uintptr_t dummy;
1209 566634 : nativeStackTop = &dummy;
1210 :
1211 : /*
1212 : * To record and update the register snapshot for the conservative scanning
1213 : * with the latest values we use setjmp.
1214 : */
1215 : #if defined(_MSC_VER)
1216 : # pragma warning(push)
1217 : # pragma warning(disable: 4611)
1218 : #endif
1219 566634 : (void) setjmp(registerSnapshot.jmpbuf);
1220 : #if defined(_MSC_VER)
1221 : # pragma warning(pop)
1222 : #endif
1223 566634 : }
1224 :
1225 : static void
1226 54272 : RecordNativeStackTopForGC(JSRuntime *rt)
1227 : {
1228 54272 : ConservativeGCData *cgcd = &rt->conservativeGC;
1229 :
1230 : #ifdef JS_THREADSAFE
1231 : /* Record the stack top here only if we are called from a request. */
1232 54272 : JS_ASSERT(rt->requestDepth >= cgcd->requestThreshold);
1233 54272 : if (rt->requestDepth == cgcd->requestThreshold)
1234 27227 : return;
1235 : #endif
1236 27045 : cgcd->recordStackTop();
1237 : }
1238 :
1239 : } /* namespace js */
1240 :
1241 : bool
1242 0 : js_IsAddressableGCThing(JSRuntime *rt, uintptr_t w, gc::AllocKind *thingKind, void **thing)
1243 : {
1244 0 : return js::IsAddressableGCThing(rt, w, thingKind, NULL, thing) == CGCT_VALID;
1245 : }
1246 :
1247 : #ifdef DEBUG
1248 : static void
1249 : CheckLeakedRoots(JSRuntime *rt);
1250 : #endif
1251 :
1252 : void
1253 19868 : js_FinishGC(JSRuntime *rt)
1254 : {
1255 : /*
1256 : * Wait until the background finalization stops and the helper thread
1257 : * shuts down before we forcefully release any remaining GC memory.
1258 : */
1259 : #ifdef JS_THREADSAFE
1260 19868 : rt->gcHelperThread.finish();
1261 : #endif
1262 :
1263 : #ifdef JS_GC_ZEAL
1264 : /* Free memory associated with GC verification. */
1265 19868 : FinishVerifier(rt);
1266 : #endif
1267 :
1268 : /* Delete all remaining Compartments. */
1269 39736 : for (CompartmentsIter c(rt); !c.done(); c.next())
1270 19868 : Foreground::delete_(c.get());
1271 19868 : rt->compartments.clear();
1272 19868 : rt->atomsCompartment = NULL;
1273 :
1274 19868 : rt->gcSystemAvailableChunkListHead = NULL;
1275 19868 : rt->gcUserAvailableChunkListHead = NULL;
1276 19868 : for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
1277 0 : Chunk::release(rt, r.front());
1278 19868 : rt->gcChunkSet.clear();
1279 :
1280 19868 : rt->gcChunkPool.expireAndFree(rt, true);
1281 :
1282 : #ifdef DEBUG
1283 19868 : if (!rt->gcRootsHash.empty())
1284 0 : CheckLeakedRoots(rt);
1285 : #endif
1286 19868 : rt->gcRootsHash.clear();
1287 19868 : rt->gcLocksHash.clear();
1288 19868 : }
1289 :
1290 : JSBool
1291 476569 : js_AddRoot(JSContext *cx, Value *vp, const char *name)
1292 : {
1293 476569 : JSBool ok = js_AddRootRT(cx->runtime, vp, name);
1294 476569 : if (!ok)
1295 0 : JS_ReportOutOfMemory(cx);
1296 476569 : return ok;
1297 : }
1298 :
1299 : JSBool
1300 30207 : js_AddGCThingRoot(JSContext *cx, void **rp, const char *name)
1301 : {
1302 30207 : JSBool ok = js_AddGCThingRootRT(cx->runtime, rp, name);
1303 30207 : if (!ok)
1304 0 : JS_ReportOutOfMemory(cx);
1305 30207 : return ok;
1306 : }
1307 :
1308 : JS_FRIEND_API(JSBool)
1309 482818 : js_AddRootRT(JSRuntime *rt, jsval *vp, const char *name)
1310 : {
1311 : return !!rt->gcRootsHash.put((void *)vp,
1312 482818 : RootInfo(name, JS_GC_ROOT_VALUE_PTR));
1313 : }
1314 :
1315 : JS_FRIEND_API(JSBool)
1316 30207 : js_AddGCThingRootRT(JSRuntime *rt, void **rp, const char *name)
1317 : {
1318 : return !!rt->gcRootsHash.put((void *)rp,
1319 30207 : RootInfo(name, JS_GC_ROOT_GCTHING_PTR));
1320 : }
1321 :
1322 : JS_FRIEND_API(JSBool)
1323 513025 : js_RemoveRoot(JSRuntime *rt, void *rp)
1324 : {
1325 513025 : rt->gcRootsHash.remove(rp);
1326 513025 : rt->gcPoke = JS_TRUE;
1327 513025 : return JS_TRUE;
1328 : }
1329 :
1330 : typedef RootedValueMap::Range RootRange;
1331 : typedef RootedValueMap::Entry RootEntry;
1332 : typedef RootedValueMap::Enum RootEnum;
1333 :
1334 : #ifdef DEBUG
1335 :
1336 : static void
1337 0 : CheckLeakedRoots(JSRuntime *rt)
1338 : {
1339 0 : uint32_t leakedroots = 0;
1340 :
1341 : /* Warn (but don't assert) debug builds of any remaining roots. */
1342 0 : for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) {
1343 0 : RootEntry &entry = r.front();
1344 0 : leakedroots++;
1345 : fprintf(stderr,
1346 : "JS engine warning: leaking GC root \'%s\' at %p\n",
1347 0 : entry.value.name ? entry.value.name : "", entry.key);
1348 : }
1349 :
1350 0 : if (leakedroots > 0) {
1351 0 : if (leakedroots == 1) {
1352 : fprintf(stderr,
1353 : "JS engine warning: 1 GC root remains after destroying the JSRuntime at %p.\n"
1354 : " This root may point to freed memory. Objects reachable\n"
1355 : " through it have not been finalized.\n",
1356 0 : (void *) rt);
1357 : } else {
1358 : fprintf(stderr,
1359 : "JS engine warning: %lu GC roots remain after destroying the JSRuntime at %p.\n"
1360 : " These roots may point to freed memory. Objects reachable\n"
1361 : " through them have not been finalized.\n",
1362 0 : (unsigned long) leakedroots, (void *) rt);
1363 : }
1364 : }
1365 0 : }
1366 :
1367 : void
1368 0 : js_DumpNamedRoots(JSRuntime *rt,
1369 : void (*dump)(const char *name, void *rp, JSGCRootType type, void *data),
1370 : void *data)
1371 : {
1372 0 : for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) {
1373 0 : RootEntry &entry = r.front();
1374 0 : if (const char *name = entry.value.name)
1375 0 : dump(name, entry.key, entry.value.type, data);
1376 : }
1377 0 : }
1378 :
1379 : #endif /* DEBUG */
1380 :
1381 : uint32_t
1382 0 : js_MapGCRoots(JSRuntime *rt, JSGCRootMapFun map, void *data)
1383 : {
1384 0 : int ct = 0;
1385 0 : for (RootEnum e(rt->gcRootsHash); !e.empty(); e.popFront()) {
1386 0 : RootEntry &entry = e.front();
1387 :
1388 0 : ct++;
1389 0 : int mapflags = map(entry.key, entry.value.type, entry.value.name, data);
1390 :
1391 0 : if (mapflags & JS_MAP_GCROOT_REMOVE)
1392 0 : e.removeFront();
1393 0 : if (mapflags & JS_MAP_GCROOT_STOP)
1394 0 : break;
1395 : }
1396 :
1397 0 : return ct;
1398 : }
1399 :
1400 : static size_t
1401 284128 : ComputeTriggerBytes(size_t lastBytes, size_t maxBytes, JSGCInvocationKind gckind)
1402 : {
1403 284128 : size_t base = gckind == GC_SHRINK ? lastBytes : Max(lastBytes, GC_ALLOCATION_THRESHOLD);
1404 284128 : float trigger = float(base) * GC_HEAP_GROWTH_FACTOR;
1405 284128 : return size_t(Min(float(maxBytes), trigger));
1406 : }
1407 :
1408 : void
1409 142064 : JSCompartment::setGCLastBytes(size_t lastBytes, size_t lastMallocBytes, JSGCInvocationKind gckind)
1410 : {
1411 142064 : gcTriggerBytes = ComputeTriggerBytes(lastBytes, rt->gcMaxBytes, gckind);
1412 142064 : gcTriggerMallocAndFreeBytes = ComputeTriggerBytes(lastMallocBytes, SIZE_MAX, gckind);
1413 142064 : }
1414 :
1415 : void
1416 396552 : JSCompartment::reduceGCTriggerBytes(size_t amount)
1417 : {
1418 396552 : JS_ASSERT(amount > 0);
1419 : JS_ASSERT(gcTriggerBytes - amount >= 0);
1420 396552 : if (gcTriggerBytes - amount < GC_ALLOCATION_THRESHOLD * GC_HEAP_GROWTH_FACTOR)
1421 213159 : return;
1422 183393 : gcTriggerBytes -= amount;
1423 : }
1424 :
1425 : namespace js {
1426 : namespace gc {
1427 :
1428 : inline void
1429 5278 : ArenaLists::prepareForIncrementalGC(JSRuntime *rt)
1430 : {
1431 110838 : for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
1432 105560 : FreeSpan *headSpan = &freeLists[i];
1433 105560 : if (!headSpan->isEmpty()) {
1434 51808 : ArenaHeader *aheader = headSpan->arenaHeader();
1435 51808 : aheader->allocatedDuringIncremental = true;
1436 51808 : rt->gcMarker.delayMarkingArena(aheader);
1437 : }
1438 : }
1439 5278 : }
1440 :
1441 : inline void *
1442 2340714 : ArenaLists::allocateFromArena(JSCompartment *comp, AllocKind thingKind)
1443 : {
1444 2340714 : Chunk *chunk = NULL;
1445 :
1446 2340714 : ArenaList *al = &arenaLists[thingKind];
1447 4681428 : AutoLockGC maybeLock;
1448 :
1449 : #ifdef JS_THREADSAFE
1450 2340714 : volatile uintptr_t *bfs = &backgroundFinalizeState[thingKind];
1451 2340714 : if (*bfs != BFS_DONE) {
1452 : /*
1453 : * We cannot search the arena list for free things while the
1454 : * background finalization runs and can modify head or cursor at any
1455 : * moment. So we always allocate a new arena in that case.
1456 : */
1457 46371 : maybeLock.lock(comp->rt);
1458 46371 : if (*bfs == BFS_RUN) {
1459 5295 : JS_ASSERT(!*al->cursor);
1460 5295 : chunk = PickChunk(comp);
1461 5295 : if (!chunk) {
1462 : /*
1463 : * Let the caller to wait for the background allocation to
1464 : * finish and restart the allocation attempt.
1465 : */
1466 0 : return NULL;
1467 : }
1468 41076 : } else if (*bfs == BFS_JUST_FINISHED) {
1469 : /* See comments before BackgroundFinalizeState definition. */
1470 41076 : *bfs = BFS_DONE;
1471 : } else {
1472 0 : JS_ASSERT(*bfs == BFS_DONE);
1473 : }
1474 : }
1475 : #endif /* JS_THREADSAFE */
1476 :
1477 2340714 : if (!chunk) {
1478 2335419 : if (ArenaHeader *aheader = *al->cursor) {
1479 279123 : JS_ASSERT(aheader->hasFreeThings());
1480 :
1481 : /*
1482 : * The empty arenas are returned to the chunk and should not present on
1483 : * the list.
1484 : */
1485 279123 : JS_ASSERT(!aheader->isEmpty());
1486 279123 : al->cursor = &aheader->next;
1487 :
1488 : /*
1489 : * Move the free span stored in the arena to the free list and
1490 : * allocate from it.
1491 : */
1492 279123 : freeLists[thingKind] = aheader->getFirstFreeSpan();
1493 279123 : aheader->setAsFullyUsed();
1494 279123 : if (JS_UNLIKELY(comp->needsBarrier())) {
1495 67 : aheader->allocatedDuringIncremental = true;
1496 67 : comp->rt->gcMarker.delayMarkingArena(aheader);
1497 : }
1498 279123 : return freeLists[thingKind].infallibleAllocate(Arena::thingSize(thingKind));
1499 : }
1500 :
1501 : /* Make sure we hold the GC lock before we call PickChunk. */
1502 2056296 : if (!maybeLock.locked())
1503 2056293 : maybeLock.lock(comp->rt);
1504 2056296 : chunk = PickChunk(comp);
1505 2056296 : if (!chunk)
1506 0 : return NULL;
1507 : }
1508 :
1509 : /*
1510 : * While we still hold the GC lock get an arena from some chunk, mark it
1511 : * as full as its single free span is moved to the free lits, and insert
1512 : * it to the list as a fully allocated arena.
1513 : *
1514 : * We add the arena before the the head, not after the tail pointed by the
1515 : * cursor, so after the GC the most recently added arena will be used first
1516 : * for allocations improving cache locality.
1517 : */
1518 2061591 : JS_ASSERT(!*al->cursor);
1519 2061591 : ArenaHeader *aheader = chunk->allocateArena(comp, thingKind);
1520 2061591 : if (!aheader)
1521 40 : return NULL;
1522 :
1523 2061551 : if (JS_UNLIKELY(comp->needsBarrier())) {
1524 135 : aheader->allocatedDuringIncremental = true;
1525 135 : comp->rt->gcMarker.delayMarkingArena(aheader);
1526 : }
1527 2061551 : aheader->next = al->head;
1528 2061551 : if (!al->head) {
1529 392363 : JS_ASSERT(al->cursor == &al->head);
1530 392363 : al->cursor = &aheader->next;
1531 : }
1532 2061551 : al->head = aheader;
1533 :
1534 : /* See comments before allocateFromNewArena about this assert. */
1535 2061551 : JS_ASSERT(!aheader->hasFreeThings());
1536 2061551 : uintptr_t arenaAddr = aheader->arenaAddress();
1537 2061551 : return freeLists[thingKind].allocateFromNewArena(arenaAddr,
1538 : Arena::firstThingOffset(thingKind),
1539 4123102 : Arena::thingSize(thingKind));
1540 : }
1541 :
1542 : void
1543 1462428 : ArenaLists::finalizeNow(JSContext *cx, AllocKind thingKind)
1544 : {
1545 : #ifdef JS_THREADSAFE
1546 1462428 : JS_ASSERT(backgroundFinalizeState[thingKind] == BFS_DONE);
1547 : #endif
1548 1462428 : FinalizeArenas(cx, &arenaLists[thingKind], thingKind, false);
1549 1462428 : }
1550 :
1551 : inline void
1552 974952 : ArenaLists::finalizeLater(JSContext *cx, AllocKind thingKind)
1553 : {
1554 0 : JS_ASSERT(thingKind == FINALIZE_OBJECT0_BACKGROUND ||
1555 : thingKind == FINALIZE_OBJECT2_BACKGROUND ||
1556 : thingKind == FINALIZE_OBJECT4_BACKGROUND ||
1557 : thingKind == FINALIZE_OBJECT8_BACKGROUND ||
1558 : thingKind == FINALIZE_OBJECT12_BACKGROUND ||
1559 : thingKind == FINALIZE_OBJECT16_BACKGROUND ||
1560 : thingKind == FINALIZE_SHORT_STRING ||
1561 974952 : thingKind == FINALIZE_STRING);
1562 :
1563 : #ifdef JS_THREADSAFE
1564 974952 : JS_ASSERT(!cx->runtime->gcHelperThread.sweeping());
1565 :
1566 974952 : ArenaList *al = &arenaLists[thingKind];
1567 974952 : if (!al->head) {
1568 598173 : JS_ASSERT(backgroundFinalizeState[thingKind] == BFS_DONE);
1569 598173 : JS_ASSERT(al->cursor == &al->head);
1570 598173 : return;
1571 : }
1572 :
1573 : /*
1574 : * The state can be just-finished if we have not allocated any GC things
1575 : * from the arena list after the previous background finalization.
1576 : */
1577 554479 : JS_ASSERT(backgroundFinalizeState[thingKind] == BFS_DONE ||
1578 931258 : backgroundFinalizeState[thingKind] == BFS_JUST_FINISHED);
1579 :
1580 376779 : if (cx->gcBackgroundFree) {
1581 : /*
1582 : * To ensure the finalization order even during the background GC we
1583 : * must use infallibleAppend so arenas scheduled for background
1584 : * finalization would not be finalized now if the append fails.
1585 : */
1586 236513 : cx->gcBackgroundFree->finalizeVector.infallibleAppend(al->head);
1587 236513 : al->clear();
1588 236513 : backgroundFinalizeState[thingKind] = BFS_RUN;
1589 : } else {
1590 140266 : FinalizeArenas(cx, al, thingKind, false);
1591 140266 : backgroundFinalizeState[thingKind] = BFS_DONE;
1592 : }
1593 :
1594 : #else /* !JS_THREADSAFE */
1595 :
1596 : finalizeNow(cx, thingKind);
1597 :
1598 : #endif
1599 : }
1600 :
1601 : #ifdef JS_THREADSAFE
1602 : /*static*/ void
1603 236513 : ArenaLists::backgroundFinalize(JSContext *cx, ArenaHeader *listHead)
1604 : {
1605 236513 : JS_ASSERT(listHead);
1606 236513 : AllocKind thingKind = listHead->getAllocKind();
1607 236513 : JSCompartment *comp = listHead->compartment;
1608 236513 : ArenaList finalized;
1609 236513 : finalized.head = listHead;
1610 236513 : FinalizeArenas(cx, &finalized, thingKind, true);
1611 :
1612 : /*
1613 : * After we finish the finalization al->cursor must point to the end of
1614 : * the head list as we emptied the list before the background finalization
1615 : * and the allocation adds new arenas before the cursor.
1616 : */
1617 236513 : ArenaLists *lists = &comp->arenas;
1618 236513 : ArenaList *al = &lists->arenaLists[thingKind];
1619 :
1620 473026 : AutoLockGC lock(cx->runtime);
1621 236513 : JS_ASSERT(lists->backgroundFinalizeState[thingKind] == BFS_RUN);
1622 236513 : JS_ASSERT(!*al->cursor);
1623 :
1624 : /*
1625 : * We must set the state to BFS_JUST_FINISHED if we touch arenaList list,
1626 : * even if we add to the list only fully allocated arenas without any free
1627 : * things. It ensures that the allocation thread takes the GC lock and all
1628 : * writes to the free list elements are propagated. As we always take the
1629 : * GC lock when allocating new arenas from the chunks we can set the state
1630 : * to BFS_DONE if we have released all finalized arenas back to their
1631 : * chunks.
1632 : */
1633 236513 : if (finalized.head) {
1634 218783 : *al->cursor = finalized.head;
1635 218783 : if (finalized.cursor != &finalized.head)
1636 61819 : al->cursor = finalized.cursor;
1637 218783 : lists->backgroundFinalizeState[thingKind] = BFS_JUST_FINISHED;
1638 : } else {
1639 17730 : lists->backgroundFinalizeState[thingKind] = BFS_DONE;
1640 : }
1641 236513 : }
1642 : #endif /* JS_THREADSAFE */
1643 :
1644 : void
1645 121869 : ArenaLists::finalizeObjects(JSContext *cx)
1646 : {
1647 121869 : finalizeNow(cx, FINALIZE_OBJECT0);
1648 121869 : finalizeNow(cx, FINALIZE_OBJECT2);
1649 121869 : finalizeNow(cx, FINALIZE_OBJECT4);
1650 121869 : finalizeNow(cx, FINALIZE_OBJECT8);
1651 121869 : finalizeNow(cx, FINALIZE_OBJECT12);
1652 121869 : finalizeNow(cx, FINALIZE_OBJECT16);
1653 :
1654 : #ifdef JS_THREADSAFE
1655 121869 : finalizeLater(cx, FINALIZE_OBJECT0_BACKGROUND);
1656 121869 : finalizeLater(cx, FINALIZE_OBJECT2_BACKGROUND);
1657 121869 : finalizeLater(cx, FINALIZE_OBJECT4_BACKGROUND);
1658 121869 : finalizeLater(cx, FINALIZE_OBJECT8_BACKGROUND);
1659 121869 : finalizeLater(cx, FINALIZE_OBJECT12_BACKGROUND);
1660 121869 : finalizeLater(cx, FINALIZE_OBJECT16_BACKGROUND);
1661 : #endif
1662 :
1663 : #if JS_HAS_XML_SUPPORT
1664 121869 : finalizeNow(cx, FINALIZE_XML);
1665 : #endif
1666 121869 : }
1667 :
1668 : void
1669 121869 : ArenaLists::finalizeStrings(JSContext *cx)
1670 : {
1671 121869 : finalizeLater(cx, FINALIZE_SHORT_STRING);
1672 121869 : finalizeLater(cx, FINALIZE_STRING);
1673 :
1674 121869 : finalizeNow(cx, FINALIZE_EXTERNAL_STRING);
1675 121869 : }
1676 :
1677 : void
1678 121869 : ArenaLists::finalizeShapes(JSContext *cx)
1679 : {
1680 121869 : finalizeNow(cx, FINALIZE_SHAPE);
1681 121869 : finalizeNow(cx, FINALIZE_BASE_SHAPE);
1682 121869 : finalizeNow(cx, FINALIZE_TYPE_OBJECT);
1683 121869 : }
1684 :
1685 : void
1686 121869 : ArenaLists::finalizeScripts(JSContext *cx)
1687 : {
1688 121869 : finalizeNow(cx, FINALIZE_SCRIPT);
1689 121869 : }
1690 :
1691 : static void
1692 9043 : RunLastDitchGC(JSContext *cx, gcreason::Reason reason)
1693 : {
1694 9043 : JSRuntime *rt = cx->runtime;
1695 :
1696 : /* The last ditch GC preserves all atoms. */
1697 18086 : AutoKeepAtoms keep(rt);
1698 9043 : GC(cx, rt->gcTriggerCompartment, GC_NORMAL, reason);
1699 9043 : }
1700 :
1701 : /* static */ void *
1702 2340684 : ArenaLists::refillFreeList(JSContext *cx, AllocKind thingKind)
1703 : {
1704 2340684 : JS_ASSERT(cx->compartment->arenas.freeLists[thingKind].isEmpty());
1705 :
1706 2340684 : JSCompartment *comp = cx->compartment;
1707 2340684 : JSRuntime *rt = comp->rt;
1708 2340684 : JS_ASSERT(!rt->gcRunning);
1709 :
1710 2340684 : bool runGC = rt->gcIncrementalState != NO_INCREMENTAL && comp->gcBytes > comp->gcTriggerBytes;
1711 10 : for (;;) {
1712 2340694 : if (JS_UNLIKELY(runGC)) {
1713 10 : RunLastDitchGC(cx, gcreason::LAST_DITCH);
1714 :
1715 : /*
1716 : * The JSGC_END callback can legitimately allocate new GC
1717 : * things and populate the free list. If that happens, just
1718 : * return that list head.
1719 : */
1720 10 : size_t thingSize = Arena::thingSize(thingKind);
1721 10 : if (void *thing = comp->arenas.allocateFromFreeList(thingKind, thingSize))
1722 0 : return thing;
1723 : }
1724 :
1725 : /*
1726 : * allocateFromArena may fail while the background finalization still
1727 : * run. In that case we want to wait for it to finish and restart.
1728 : * However, checking for that is racy as the background finalization
1729 : * could free some things after allocateFromArena decided to fail but
1730 : * at this point it may have already stopped. To avoid this race we
1731 : * always try to allocate twice.
1732 : */
1733 2340714 : for (bool secondAttempt = false; ; secondAttempt = true) {
1734 2340714 : void *thing = comp->arenas.allocateFromArena(comp, thingKind);
1735 2340714 : if (JS_LIKELY(!!thing))
1736 2340674 : return thing;
1737 40 : if (secondAttempt)
1738 : break;
1739 :
1740 40 : AutoLockGC lock(rt);
1741 : #ifdef JS_THREADSAFE
1742 20 : rt->gcHelperThread.waitBackgroundSweepEnd();
1743 : #endif
1744 : }
1745 :
1746 : /*
1747 : * We failed to allocate. Run the GC if we haven't done it already.
1748 : * Otherwise report OOM.
1749 : */
1750 20 : if (runGC)
1751 : break;
1752 10 : runGC = true;
1753 : }
1754 :
1755 10 : js_ReportOutOfMemory(cx);
1756 10 : return NULL;
1757 : }
1758 :
1759 : } /* namespace gc */
1760 : } /* namespace js */
1761 :
1762 : JSGCTraceKind
1763 3404448 : js_GetGCThingTraceKind(void *thing)
1764 : {
1765 3404448 : return GetGCThingTraceKind(thing);
1766 : }
1767 :
1768 : JSBool
1769 0 : js_LockGCThingRT(JSRuntime *rt, void *thing)
1770 : {
1771 0 : if (!thing)
1772 0 : return true;
1773 :
1774 0 : if (GCLocks::Ptr p = rt->gcLocksHash.lookupWithDefault(thing, 0)) {
1775 0 : p->value++;
1776 0 : return true;
1777 : }
1778 :
1779 0 : return false;
1780 : }
1781 :
1782 : void
1783 0 : js_UnlockGCThingRT(JSRuntime *rt, void *thing)
1784 : {
1785 0 : if (!thing)
1786 0 : return;
1787 :
1788 0 : if (GCLocks::Ptr p = rt->gcLocksHash.lookup(thing)) {
1789 0 : rt->gcPoke = true;
1790 0 : if (--p->value == 0)
1791 0 : rt->gcLocksHash.remove(p);
1792 : }
1793 : }
1794 :
1795 : namespace js {
1796 :
1797 : void
1798 911614 : InitTracer(JSTracer *trc, JSRuntime *rt, JSTraceCallback callback)
1799 : {
1800 911614 : trc->runtime = rt;
1801 911614 : trc->callback = callback;
1802 911614 : trc->debugPrinter = NULL;
1803 911614 : trc->debugPrintArg = NULL;
1804 911614 : trc->debugPrintIndex = size_t(-1);
1805 911614 : trc->eagerlyTraceWeakMaps = true;
1806 911614 : }
1807 :
1808 : /* static */ int64_t
1809 50 : SliceBudget::TimeBudget(int64_t millis)
1810 : {
1811 50 : return millis * PRMJ_USEC_PER_MSEC;
1812 : }
1813 :
1814 : /* static */ int64_t
1815 0 : SliceBudget::WorkBudget(int64_t work)
1816 : {
1817 0 : return -work;
1818 : }
1819 :
1820 101970 : SliceBudget::SliceBudget()
1821 : : deadline(INT64_MAX),
1822 101970 : counter(INTPTR_MAX)
1823 : {
1824 101970 : }
1825 :
1826 0 : SliceBudget::SliceBudget(int64_t budget)
1827 : {
1828 0 : if (budget == Unlimited) {
1829 0 : deadline = INT64_MAX;
1830 0 : counter = INTPTR_MAX;
1831 0 : } else if (budget > 0) {
1832 0 : deadline = PRMJ_Now() + budget;
1833 0 : counter = CounterReset;
1834 : } else {
1835 0 : deadline = 0;
1836 0 : counter = -budget;
1837 : }
1838 0 : }
1839 :
1840 : bool
1841 0 : SliceBudget::checkOverBudget()
1842 : {
1843 0 : bool over = PRMJ_Now() > deadline;
1844 0 : if (!over)
1845 0 : counter = CounterReset;
1846 0 : return over;
1847 : }
1848 :
1849 19869 : GCMarker::GCMarker()
1850 : : stack(size_t(-1)),
1851 : color(BLACK),
1852 : started(false),
1853 : unmarkedArenaStackTop(NULL),
1854 : markLaterArenas(0),
1855 19869 : grayFailed(false)
1856 : {
1857 19869 : }
1858 :
1859 : bool
1860 19869 : GCMarker::init()
1861 : {
1862 19869 : return stack.init(MARK_STACK_LENGTH);
1863 : }
1864 :
1865 : void
1866 52324 : GCMarker::start(JSRuntime *rt)
1867 : {
1868 52324 : InitTracer(this, rt, NULL);
1869 52324 : JS_ASSERT(!started);
1870 52324 : started = true;
1871 52324 : color = BLACK;
1872 :
1873 52324 : JS_ASSERT(!unmarkedArenaStackTop);
1874 52324 : JS_ASSERT(markLaterArenas == 0);
1875 :
1876 52324 : JS_ASSERT(grayRoots.empty());
1877 52324 : JS_ASSERT(!grayFailed);
1878 :
1879 : /*
1880 : * The GC is recomputing the liveness of WeakMap entries, so we delay
1881 : * visting entries.
1882 : */
1883 52324 : eagerlyTraceWeakMaps = JS_FALSE;
1884 52324 : }
1885 :
1886 : void
1887 52224 : GCMarker::stop()
1888 : {
1889 52224 : JS_ASSERT(isDrained());
1890 :
1891 52224 : JS_ASSERT(started);
1892 52224 : started = false;
1893 :
1894 52224 : JS_ASSERT(!unmarkedArenaStackTop);
1895 52224 : JS_ASSERT(markLaterArenas == 0);
1896 :
1897 52224 : JS_ASSERT(grayRoots.empty());
1898 52224 : grayFailed = false;
1899 :
1900 : /* Free non-ballast stack memory. */
1901 52224 : stack.reset();
1902 52224 : grayRoots.clearAndFree();
1903 52224 : }
1904 :
1905 : void
1906 1399 : GCMarker::reset()
1907 : {
1908 1399 : color = BLACK;
1909 :
1910 1399 : stack.reset();
1911 1399 : JS_ASSERT(isMarkStackEmpty());
1912 :
1913 54808 : while (unmarkedArenaStackTop) {
1914 52010 : ArenaHeader *aheader = unmarkedArenaStackTop;
1915 52010 : JS_ASSERT(aheader->hasDelayedMarking);
1916 52010 : JS_ASSERT(markLaterArenas);
1917 52010 : unmarkedArenaStackTop = aheader->getNextDelayedMarking();
1918 52010 : aheader->hasDelayedMarking = 0;
1919 52010 : aheader->markOverflow = 0;
1920 52010 : aheader->allocatedDuringIncremental = 0;
1921 52010 : markLaterArenas--;
1922 : }
1923 1399 : JS_ASSERT(isDrained());
1924 1399 : JS_ASSERT(!markLaterArenas);
1925 :
1926 1399 : grayRoots.clearAndFree();
1927 1399 : grayFailed = false;
1928 1399 : }
1929 :
1930 : /*
1931 : * When the native stack is low, the GC does not call JS_TraceChildren to mark
1932 : * the reachable "children" of the thing. Rather the thing is put aside and
1933 : * JS_TraceChildren is called later with more space on the C stack.
1934 : *
1935 : * To implement such delayed marking of the children with minimal overhead for
1936 : * the normal case of sufficient native stack, the code adds a field per
1937 : * arena. The field markingDelay->link links all arenas with delayed things
1938 : * into a stack list with the pointer to stack top in
1939 : * GCMarker::unmarkedArenaStackTop. delayMarkingChildren adds
1940 : * arenas to the stack as necessary while markDelayedChildren pops the arenas
1941 : * from the stack until it empties.
1942 : */
1943 :
1944 : inline void
1945 52010 : GCMarker::delayMarkingArena(ArenaHeader *aheader)
1946 : {
1947 52010 : if (aheader->hasDelayedMarking) {
1948 : /* Arena already scheduled to be marked later */
1949 0 : return;
1950 : }
1951 52010 : aheader->setNextDelayedMarking(unmarkedArenaStackTop);
1952 52010 : unmarkedArenaStackTop = aheader;
1953 52010 : markLaterArenas++;
1954 : }
1955 :
1956 : void
1957 0 : GCMarker::delayMarkingChildren(const void *thing)
1958 : {
1959 0 : const Cell *cell = reinterpret_cast<const Cell *>(thing);
1960 0 : cell->arenaHeader()->markOverflow = 1;
1961 0 : delayMarkingArena(cell->arenaHeader());
1962 0 : }
1963 :
1964 : void
1965 0 : GCMarker::markDelayedChildren(ArenaHeader *aheader)
1966 : {
1967 0 : if (aheader->markOverflow) {
1968 0 : bool always = aheader->allocatedDuringIncremental;
1969 0 : aheader->markOverflow = 0;
1970 :
1971 0 : for (CellIterUnderGC i(aheader); !i.done(); i.next()) {
1972 0 : Cell *t = i.getCell();
1973 0 : if (always || t->isMarked()) {
1974 0 : t->markIfUnmarked();
1975 0 : JS_TraceChildren(this, t, MapAllocToTraceKind(aheader->getAllocKind()));
1976 : }
1977 : }
1978 : } else {
1979 0 : JS_ASSERT(aheader->allocatedDuringIncremental);
1980 0 : PushArena(this, aheader);
1981 : }
1982 0 : aheader->allocatedDuringIncremental = 0;
1983 0 : }
1984 :
1985 : bool
1986 0 : GCMarker::markDelayedChildren(SliceBudget &budget)
1987 : {
1988 0 : gcstats::AutoPhase ap(runtime->gcStats, gcstats::PHASE_MARK_DELAYED);
1989 :
1990 0 : JS_ASSERT(unmarkedArenaStackTop);
1991 0 : do {
1992 : /*
1993 : * If marking gets delayed at the same arena again, we must repeat
1994 : * marking of its things. For that we pop arena from the stack and
1995 : * clear its hasDelayedMarking flag before we begin the marking.
1996 : */
1997 0 : ArenaHeader *aheader = unmarkedArenaStackTop;
1998 0 : JS_ASSERT(aheader->hasDelayedMarking);
1999 0 : JS_ASSERT(markLaterArenas);
2000 0 : unmarkedArenaStackTop = aheader->getNextDelayedMarking();
2001 0 : aheader->hasDelayedMarking = 0;
2002 0 : markLaterArenas--;
2003 0 : markDelayedChildren(aheader);
2004 :
2005 0 : if (budget.checkOverBudget())
2006 0 : return false;
2007 : } while (unmarkedArenaStackTop);
2008 0 : JS_ASSERT(!markLaterArenas);
2009 :
2010 0 : return true;
2011 : }
2012 :
2013 : #ifdef DEBUG
2014 : void
2015 65448709 : GCMarker::checkCompartment(void *p)
2016 : {
2017 65448709 : JS_ASSERT(started);
2018 :
2019 65448709 : Cell *cell = static_cast<Cell *>(p);
2020 65448709 : if (runtime->gcRunning && runtime->gcCurrentCompartment)
2021 123066 : JS_ASSERT(cell->compartment() == runtime->gcCurrentCompartment);
2022 65325643 : else if (runtime->gcIncrementalCompartment)
2023 0 : JS_ASSERT(cell->compartment() == runtime->gcIncrementalCompartment);
2024 65448709 : }
2025 : #endif
2026 :
2027 : bool
2028 50825 : GCMarker::hasBufferedGrayRoots() const
2029 : {
2030 50825 : return !grayFailed;
2031 : }
2032 :
2033 : void
2034 14509 : GCMarker::startBufferingGrayRoots()
2035 : {
2036 14509 : JS_ASSERT(!callback);
2037 14509 : callback = GrayCallback;
2038 14509 : JS_ASSERT(IS_GC_MARKING_TRACER(this));
2039 14509 : }
2040 :
2041 : void
2042 14509 : GCMarker::endBufferingGrayRoots()
2043 : {
2044 14509 : JS_ASSERT(callback == GrayCallback);
2045 14509 : callback = NULL;
2046 14509 : JS_ASSERT(IS_GC_MARKING_TRACER(this));
2047 14509 : }
2048 :
2049 : void
2050 50825 : GCMarker::markBufferedGrayRoots()
2051 : {
2052 50825 : JS_ASSERT(!grayFailed);
2053 :
2054 246363 : for (GrayRoot *elem = grayRoots.begin(); elem != grayRoots.end(); elem++) {
2055 : #ifdef DEBUG
2056 195538 : debugPrinter = elem->debugPrinter;
2057 195538 : debugPrintArg = elem->debugPrintArg;
2058 195538 : debugPrintIndex = elem->debugPrintIndex;
2059 : #endif
2060 195538 : MarkKind(this, elem->thing, elem->kind);
2061 : }
2062 :
2063 50825 : grayRoots.clearAndFree();
2064 50825 : }
2065 :
2066 : void
2067 195538 : GCMarker::appendGrayRoot(void *thing, JSGCTraceKind kind)
2068 : {
2069 195538 : JS_ASSERT(started);
2070 :
2071 195538 : if (grayFailed)
2072 0 : return;
2073 :
2074 195538 : GrayRoot root(thing, kind);
2075 : #ifdef DEBUG
2076 195538 : root.debugPrinter = debugPrinter;
2077 195538 : root.debugPrintArg = debugPrintArg;
2078 195538 : root.debugPrintIndex = debugPrintIndex;
2079 : #endif
2080 :
2081 195538 : if (!grayRoots.append(root)) {
2082 0 : grayRoots.clearAndFree();
2083 0 : grayFailed = true;
2084 : }
2085 : }
2086 :
2087 : void
2088 195538 : GCMarker::GrayCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind)
2089 : {
2090 195538 : GCMarker *gcmarker = static_cast<GCMarker *>(trc);
2091 195538 : gcmarker->appendGrayRoot(*thingp, kind);
2092 195538 : }
2093 :
2094 : size_t
2095 3 : GCMarker::sizeOfExcludingThis(JSMallocSizeOfFun mallocSizeOf) const
2096 : {
2097 3 : return stack.sizeOfExcludingThis(mallocSizeOf) +
2098 3 : grayRoots.sizeOfExcludingThis(mallocSizeOf);
2099 : }
2100 :
2101 : void
2102 0 : SetMarkStackLimit(JSRuntime *rt, size_t limit)
2103 : {
2104 0 : JS_ASSERT(!rt->gcRunning);
2105 0 : rt->gcMarker.setSizeLimit(limit);
2106 0 : }
2107 :
2108 : } /* namespace js */
2109 :
2110 : #ifdef DEBUG
2111 : static void
2112 134957 : EmptyMarkCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind)
2113 : {
2114 134957 : }
2115 : #endif
2116 :
2117 : static void
2118 135329 : gc_root_traversal(JSTracer *trc, const RootEntry &entry)
2119 : {
2120 : #ifdef DEBUG
2121 : void *ptr;
2122 135329 : if (entry.value.type == JS_GC_ROOT_GCTHING_PTR) {
2123 135081 : ptr = *reinterpret_cast<void **>(entry.key);
2124 : } else {
2125 248 : Value *vp = reinterpret_cast<Value *>(entry.key);
2126 248 : ptr = vp->isGCThing() ? vp->toGCThing() : NULL;
2127 : }
2128 :
2129 135329 : if (ptr && !trc->runtime->gcCurrentCompartment) {
2130 : /*
2131 : * Use conservative machinery to find if ptr is a valid GC thing.
2132 : * We only do this during global GCs, to preserve the invariant
2133 : * that mark callbacks are not in place during compartment GCs.
2134 : */
2135 : JSTracer checker;
2136 134957 : JS_TracerInit(&checker, trc->runtime, EmptyMarkCallback);
2137 134957 : ConservativeGCTest test = MarkIfGCThingWord(&checker, reinterpret_cast<uintptr_t>(ptr));
2138 134957 : if (test != CGCT_VALID && entry.value.name) {
2139 : fprintf(stderr,
2140 : "JS API usage error: the address passed to JS_AddNamedRoot currently holds an\n"
2141 : "invalid gcthing. This is usually caused by a missing call to JS_RemoveRoot.\n"
2142 : "The root's name is \"%s\".\n",
2143 0 : entry.value.name);
2144 : }
2145 134957 : JS_ASSERT(test == CGCT_VALID);
2146 : }
2147 : #endif
2148 135329 : const char *name = entry.value.name ? entry.value.name : "root";
2149 135329 : if (entry.value.type == JS_GC_ROOT_GCTHING_PTR)
2150 135081 : MarkGCThingRoot(trc, *reinterpret_cast<void **>(entry.key), name);
2151 : else
2152 248 : MarkValueRoot(trc, reinterpret_cast<Value *>(entry.key), name);
2153 135329 : }
2154 :
2155 : static void
2156 0 : gc_lock_traversal(const GCLocks::Entry &entry, JSTracer *trc)
2157 : {
2158 0 : JS_ASSERT(entry.value >= 1);
2159 0 : MarkGCThingRoot(trc, entry.key, "locked object");
2160 0 : }
2161 :
2162 : namespace js {
2163 :
2164 : void
2165 0 : MarkCompartmentActive(StackFrame *fp)
2166 : {
2167 0 : if (fp->isScriptFrame())
2168 0 : fp->script()->compartment()->active = true;
2169 0 : }
2170 :
2171 : } /* namespace js */
2172 :
2173 : void
2174 : AutoIdArray::trace(JSTracer *trc)
2175 : {
2176 : JS_ASSERT(tag == IDARRAY);
2177 : gc::MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray");
2178 : }
2179 :
2180 : void
2181 0 : AutoEnumStateRooter::trace(JSTracer *trc)
2182 : {
2183 0 : gc::MarkObjectRoot(trc, &obj, "JS::AutoEnumStateRooter.obj");
2184 0 : }
2185 :
2186 : inline void
2187 21720 : AutoGCRooter::trace(JSTracer *trc)
2188 : {
2189 21720 : switch (tag) {
2190 : case JSVAL:
2191 1583 : MarkValueRoot(trc, &static_cast<AutoValueRooter *>(this)->val, "JS::AutoValueRooter.val");
2192 1583 : return;
2193 :
2194 : case PARSER:
2195 9 : static_cast<Parser *>(this)->trace(trc);
2196 9 : return;
2197 :
2198 : case ENUMERATOR:
2199 0 : static_cast<AutoEnumStateRooter *>(this)->trace(trc);
2200 0 : return;
2201 :
2202 : case IDARRAY: {
2203 0 : JSIdArray *ida = static_cast<AutoIdArray *>(this)->idArray;
2204 0 : MarkIdRange(trc, ida->length, ida->vector, "JS::AutoIdArray.idArray");
2205 0 : return;
2206 : }
2207 :
2208 : case DESCRIPTORS: {
2209 : PropDescArray &descriptors =
2210 9 : static_cast<AutoPropDescArrayRooter *>(this)->descriptors;
2211 18 : for (size_t i = 0, len = descriptors.length(); i < len; i++) {
2212 9 : PropDesc &desc = descriptors[i];
2213 9 : MarkValueRoot(trc, &desc.pd, "PropDesc::pd");
2214 9 : MarkValueRoot(trc, &desc.value, "PropDesc::value");
2215 9 : MarkValueRoot(trc, &desc.get, "PropDesc::get");
2216 9 : MarkValueRoot(trc, &desc.set, "PropDesc::set");
2217 : }
2218 9 : return;
2219 : }
2220 :
2221 : case DESCRIPTOR : {
2222 9 : PropertyDescriptor &desc = *static_cast<AutoPropertyDescriptorRooter *>(this);
2223 9 : if (desc.obj)
2224 9 : MarkObjectRoot(trc, &desc.obj, "Descriptor::obj");
2225 9 : MarkValueRoot(trc, &desc.value, "Descriptor::value");
2226 9 : if ((desc.attrs & JSPROP_GETTER) && desc.getter) {
2227 0 : JSObject *tmp = JS_FUNC_TO_DATA_PTR(JSObject *, desc.getter);
2228 0 : MarkObjectRoot(trc, &tmp, "Descriptor::get");
2229 0 : desc.getter = JS_DATA_TO_FUNC_PTR(JSPropertyOp, tmp);
2230 : }
2231 9 : if (desc.attrs & JSPROP_SETTER && desc.setter) {
2232 0 : JSObject *tmp = JS_FUNC_TO_DATA_PTR(JSObject *, desc.setter);
2233 0 : MarkObjectRoot(trc, &tmp, "Descriptor::set");
2234 0 : desc.setter = JS_DATA_TO_FUNC_PTR(JSStrictPropertyOp, tmp);
2235 : }
2236 9 : return;
2237 : }
2238 :
2239 : case NAMESPACES: {
2240 0 : JSXMLArray<JSObject> &array = static_cast<AutoNamespaceArray *>(this)->array;
2241 0 : MarkObjectRange(trc, array.length, array.vector, "JSXMLArray.vector");
2242 0 : js_XMLArrayCursorTrace(trc, array.cursors);
2243 0 : return;
2244 : }
2245 :
2246 : case XML:
2247 0 : js_TraceXML(trc, static_cast<AutoXMLRooter *>(this)->xml);
2248 0 : return;
2249 :
2250 : case OBJECT:
2251 10201 : if (static_cast<AutoObjectRooter *>(this)->obj)
2252 : MarkObjectRoot(trc, &static_cast<AutoObjectRooter *>(this)->obj,
2253 10201 : "JS::AutoObjectRooter.obj");
2254 10201 : return;
2255 :
2256 : case ID:
2257 0 : MarkIdRoot(trc, &static_cast<AutoIdRooter *>(this)->id_, "JS::AutoIdRooter.id_");
2258 0 : return;
2259 :
2260 : case VALVECTOR: {
2261 6486 : AutoValueVector::VectorImpl &vector = static_cast<AutoValueVector *>(this)->vector;
2262 6486 : MarkValueRootRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector");
2263 6486 : return;
2264 : }
2265 :
2266 : case STRING:
2267 647 : if (static_cast<AutoStringRooter *>(this)->str)
2268 : MarkStringRoot(trc, &static_cast<AutoStringRooter *>(this)->str,
2269 647 : "JS::AutoStringRooter.str");
2270 647 : return;
2271 :
2272 : case IDVECTOR: {
2273 1597 : AutoIdVector::VectorImpl &vector = static_cast<AutoIdVector *>(this)->vector;
2274 1597 : MarkIdRootRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector");
2275 1597 : return;
2276 : }
2277 :
2278 : case SHAPEVECTOR: {
2279 587 : AutoShapeVector::VectorImpl &vector = static_cast<js::AutoShapeVector *>(this)->vector;
2280 587 : MarkShapeRootRange(trc, vector.length(), const_cast<Shape **>(vector.begin()),
2281 587 : "js::AutoShapeVector.vector");
2282 587 : return;
2283 : }
2284 :
2285 : case OBJVECTOR: {
2286 9 : AutoObjectVector::VectorImpl &vector = static_cast<AutoObjectVector *>(this)->vector;
2287 9 : MarkObjectRootRange(trc, vector.length(), vector.begin(), "js::AutoObjectVector.vector");
2288 9 : return;
2289 : }
2290 :
2291 : case VALARRAY: {
2292 0 : AutoValueArray *array = static_cast<AutoValueArray *>(this);
2293 0 : MarkValueRootRange(trc, array->length(), array->start(), "js::AutoValueArray");
2294 0 : return;
2295 : }
2296 :
2297 : case SCRIPTVECTOR: {
2298 0 : AutoScriptVector::VectorImpl &vector = static_cast<AutoScriptVector *>(this)->vector;
2299 0 : for (size_t i = 0; i < vector.length(); i++)
2300 0 : MarkScriptRoot(trc, &vector[i], "AutoScriptVector element");
2301 0 : return;
2302 : }
2303 : }
2304 :
2305 583 : JS_ASSERT(tag >= 0);
2306 : MarkValueRootRange(trc, tag, static_cast<AutoArrayRooter *>(this)->array,
2307 583 : "JS::AutoArrayRooter.array");
2308 : }
2309 :
2310 : /* static */ void
2311 55580 : AutoGCRooter::traceAll(JSTracer *trc)
2312 : {
2313 77300 : for (js::AutoGCRooter *gcr = trc->runtime->autoGCRooters; gcr; gcr = gcr->down)
2314 21720 : gcr->trace(trc);
2315 55580 : }
2316 :
2317 : namespace js {
2318 :
2319 : static void
2320 54181 : MarkRuntime(JSTracer *trc, bool useSavedRoots = false)
2321 : {
2322 54181 : JSRuntime *rt = trc->runtime;
2323 54181 : JS_ASSERT(trc->callback != GCMarker::GrayCallback);
2324 54181 : if (rt->gcCurrentCompartment) {
2325 756 : for (CompartmentsIter c(rt); !c.done(); c.next())
2326 540 : c->markCrossCompartmentWrappers(trc);
2327 216 : Debugger::markCrossCompartmentDebuggerObjectReferents(trc);
2328 : }
2329 :
2330 54181 : AutoGCRooter::traceAll(trc);
2331 :
2332 54181 : if (rt->hasContexts())
2333 34113 : MarkConservativeStackRoots(trc, useSavedRoots);
2334 :
2335 189510 : for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront())
2336 135329 : gc_root_traversal(trc, r.front());
2337 :
2338 54181 : for (GCLocks::Range r = rt->gcLocksHash.all(); !r.empty(); r.popFront())
2339 0 : gc_lock_traversal(r.front(), trc);
2340 :
2341 54181 : if (rt->scriptPCCounters) {
2342 0 : ScriptOpcodeCountsVector &vec = *rt->scriptPCCounters;
2343 0 : for (size_t i = 0; i < vec.length(); i++)
2344 0 : MarkScriptRoot(trc, &vec[i].script, "scriptPCCounters");
2345 : }
2346 :
2347 54181 : js_TraceAtomState(trc);
2348 54181 : rt->staticStrings.trace(trc);
2349 :
2350 108600 : for (ContextIter acx(rt); !acx.done(); acx.next())
2351 54419 : acx->mark(trc);
2352 :
2353 187215 : for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
2354 133034 : if (c->activeAnalysis)
2355 18 : c->markTypes(trc);
2356 :
2357 : /* During a GC, these are treated as weak pointers. */
2358 133034 : if (!IS_GC_MARKING_TRACER(trc)) {
2359 11165 : if (c->watchpointMap)
2360 0 : c->watchpointMap->markAll(trc);
2361 : }
2362 :
2363 : /* Do not discard scripts with counters while profiling. */
2364 133034 : if (rt->profilingScripts) {
2365 0 : for (CellIterUnderGC i(c, FINALIZE_SCRIPT); !i.done(); i.next()) {
2366 0 : JSScript *script = i.get<JSScript>();
2367 0 : if (script->pcCounters) {
2368 0 : MarkScriptRoot(trc, &script, "profilingScripts");
2369 0 : JS_ASSERT(script == i.get<JSScript>());
2370 : }
2371 : }
2372 : }
2373 : }
2374 :
2375 : #ifdef JS_METHODJIT
2376 : /* We need to expand inline frames before stack scanning. */
2377 187539 : for (CompartmentsIter c(rt); !c.done(); c.next())
2378 133358 : mjit::ExpandInlineFrames(c);
2379 : #endif
2380 :
2381 54181 : rt->stackSpace.mark(trc);
2382 :
2383 : /* The embedding can register additional roots here. */
2384 54181 : if (JSTraceDataOp op = rt->gcBlackRootsTraceOp)
2385 14509 : (*op)(trc, rt->gcBlackRootsData);
2386 :
2387 : /* During GC, this buffers up the gray roots and doesn't mark them. */
2388 54181 : if (JSTraceDataOp op = rt->gcGrayRootsTraceOp) {
2389 14509 : if (IS_GC_MARKING_TRACER(trc)) {
2390 14509 : GCMarker *gcmarker = static_cast<GCMarker *>(trc);
2391 14509 : gcmarker->startBufferingGrayRoots();
2392 14509 : (*op)(trc, rt->gcGrayRootsData);
2393 14509 : gcmarker->endBufferingGrayRoots();
2394 : } else {
2395 0 : (*op)(trc, rt->gcGrayRootsData);
2396 : }
2397 : }
2398 54181 : }
2399 :
2400 : void
2401 175 : TriggerGC(JSRuntime *rt, gcreason::Reason reason)
2402 : {
2403 175 : JS_ASSERT(rt->onOwnerThread());
2404 :
2405 175 : if (rt->gcRunning || rt->gcIsNeeded)
2406 102 : return;
2407 :
2408 : /* Trigger the GC when it is safe to call an operation callback. */
2409 73 : rt->gcIsNeeded = true;
2410 73 : rt->gcTriggerCompartment = NULL;
2411 73 : rt->gcTriggerReason = reason;
2412 73 : rt->triggerOperationCallback();
2413 : }
2414 :
2415 : void
2416 97697 : TriggerCompartmentGC(JSCompartment *comp, gcreason::Reason reason)
2417 : {
2418 97697 : JSRuntime *rt = comp->rt;
2419 97697 : JS_ASSERT(!rt->gcRunning);
2420 :
2421 97697 : if (rt->gcZeal() == ZealAllocValue) {
2422 0 : TriggerGC(rt, reason);
2423 0 : return;
2424 : }
2425 :
2426 97697 : if (rt->gcMode == JSGC_MODE_GLOBAL || comp == rt->atomsCompartment) {
2427 : /* We can't do a compartmental GC of the default compartment. */
2428 29 : TriggerGC(rt, reason);
2429 29 : return;
2430 : }
2431 :
2432 97668 : if (rt->gcIsNeeded) {
2433 : /* If we need to GC more than one compartment, run a full GC. */
2434 97560 : if (rt->gcTriggerCompartment != comp)
2435 97560 : rt->gcTriggerCompartment = NULL;
2436 97560 : return;
2437 : }
2438 :
2439 : /*
2440 : * Trigger the GC when it is safe to call an operation callback on any
2441 : * thread.
2442 : */
2443 108 : rt->gcIsNeeded = true;
2444 108 : rt->gcTriggerCompartment = comp;
2445 108 : rt->gcTriggerReason = reason;
2446 108 : rt->triggerOperationCallback();
2447 : }
2448 :
2449 : void
2450 0 : MaybeGC(JSContext *cx)
2451 : {
2452 0 : JSRuntime *rt = cx->runtime;
2453 0 : JS_ASSERT(rt->onOwnerThread());
2454 :
2455 0 : if (rt->gcZeal() == ZealAllocValue || rt->gcZeal() == ZealPokeValue) {
2456 0 : GC(cx, NULL, GC_NORMAL, gcreason::MAYBEGC);
2457 0 : return;
2458 : }
2459 :
2460 0 : JSCompartment *comp = cx->compartment;
2461 0 : if (rt->gcIsNeeded) {
2462 : GCSlice(cx, (comp == rt->gcTriggerCompartment) ? comp : NULL,
2463 0 : GC_NORMAL, gcreason::MAYBEGC);
2464 0 : return;
2465 : }
2466 :
2467 0 : if (comp->gcBytes > 8192 &&
2468 : comp->gcBytes >= 3 * (comp->gcTriggerBytes / 4) &&
2469 : rt->gcIncrementalState == NO_INCREMENTAL)
2470 : {
2471 0 : GCSlice(cx, NULL, GC_NORMAL, gcreason::MAYBEGC);
2472 0 : return;
2473 : }
2474 :
2475 0 : if (comp->gcMallocAndFreeBytes > comp->gcTriggerMallocAndFreeBytes) {
2476 0 : GCSlice(cx, rt->gcMode == JSGC_MODE_GLOBAL ? NULL : comp, GC_NORMAL, gcreason::MAYBEGC);
2477 0 : return;
2478 : }
2479 :
2480 : /*
2481 : * Access to the counters and, on 32 bit, setting gcNextFullGCTime below
2482 : * is not atomic and a race condition could trigger or suppress the GC. We
2483 : * tolerate this.
2484 : */
2485 0 : int64_t now = PRMJ_Now();
2486 0 : if (rt->gcNextFullGCTime && rt->gcNextFullGCTime <= now) {
2487 0 : if (rt->gcChunkAllocationSinceLastGC ||
2488 : rt->gcNumArenasFreeCommitted > FreeCommittedArenasThreshold)
2489 : {
2490 0 : GCSlice(cx, NULL, GC_SHRINK, gcreason::MAYBEGC);
2491 : } else {
2492 0 : rt->gcNextFullGCTime = now + GC_IDLE_FULL_SPAN;
2493 : }
2494 : }
2495 : }
2496 :
2497 : static void
2498 18 : DecommitArenasFromAvailableList(JSRuntime *rt, Chunk **availableListHeadp)
2499 : {
2500 18 : Chunk *chunk = *availableListHeadp;
2501 18 : if (!chunk)
2502 0 : return;
2503 :
2504 : /*
2505 : * Decommit is expensive so we avoid holding the GC lock while calling it.
2506 : *
2507 : * We decommit from the tail of the list to minimize interference with the
2508 : * main thread that may start to allocate things at this point.
2509 : *
2510 : * The arena that is been decommitted outside the GC lock must not be
2511 : * available for allocations either via the free list or via the
2512 : * decommittedArenas bitmap. For that we just fetch the arena from the
2513 : * free list before the decommit pretending as it was allocated. If this
2514 : * arena also is the single free arena in the chunk, then we must remove
2515 : * from the available list before we release the lock so the allocation
2516 : * thread would not see chunks with no free arenas on the available list.
2517 : *
2518 : * After we retake the lock, we mark the arena as free and decommitted if
2519 : * the decommit was successful. We must also add the chunk back to the
2520 : * available list if we removed it previously or when the main thread
2521 : * have allocated all remaining free arenas in the chunk.
2522 : *
2523 : * We also must make sure that the aheader is not accessed again after we
2524 : * decommit the arena.
2525 : */
2526 18 : JS_ASSERT(chunk->info.prevp == availableListHeadp);
2527 18 : while (Chunk *next = chunk->info.next) {
2528 0 : JS_ASSERT(next->info.prevp == &chunk->info.next);
2529 0 : chunk = next;
2530 : }
2531 :
2532 0 : for (;;) {
2533 4176 : while (chunk->info.numArenasFreeCommitted != 0) {
2534 4140 : ArenaHeader *aheader = chunk->fetchNextFreeArena(rt);
2535 :
2536 4140 : Chunk **savedPrevp = chunk->info.prevp;
2537 4140 : if (!chunk->hasAvailableArenas())
2538 0 : chunk->removeFromAvailableList();
2539 :
2540 4140 : size_t arenaIndex = Chunk::arenaIndex(aheader->arenaAddress());
2541 : bool ok;
2542 : {
2543 : /*
2544 : * If the main thread waits for the decommit to finish, skip
2545 : * potentially expensive unlock/lock pair on the contested
2546 : * lock.
2547 : */
2548 8280 : Maybe<AutoUnlockGC> maybeUnlock;
2549 4140 : if (!rt->gcRunning)
2550 4140 : maybeUnlock.construct(rt);
2551 4140 : ok = MarkPagesUnused(aheader->getArena(), ArenaSize);
2552 : }
2553 :
2554 4140 : if (ok) {
2555 4140 : ++chunk->info.numArenasFree;
2556 4140 : chunk->decommittedArenas.set(arenaIndex);
2557 : } else {
2558 0 : chunk->addArenaToFreeList(rt, aheader);
2559 : }
2560 4140 : JS_ASSERT(chunk->hasAvailableArenas());
2561 4140 : JS_ASSERT(!chunk->unused());
2562 4140 : if (chunk->info.numArenasFree == 1) {
2563 : /*
2564 : * Put the chunk back to the available list either at the
2565 : * point where it was before to preserve the available list
2566 : * that we enumerate, or, when the allocation thread has fully
2567 : * used all the previous chunks, at the beginning of the
2568 : * available list.
2569 : */
2570 0 : Chunk **insertPoint = savedPrevp;
2571 0 : if (savedPrevp != availableListHeadp) {
2572 0 : Chunk *prev = Chunk::fromPointerToNext(savedPrevp);
2573 0 : if (!prev->hasAvailableArenas())
2574 0 : insertPoint = availableListHeadp;
2575 : }
2576 0 : chunk->insertToAvailableList(insertPoint);
2577 : } else {
2578 4140 : JS_ASSERT(chunk->info.prevp);
2579 : }
2580 :
2581 4140 : if (rt->gcChunkAllocationSinceLastGC) {
2582 : /*
2583 : * The allocator thread has started to get new chunks. We should stop
2584 : * to avoid decommitting arenas in just allocated chunks.
2585 : */
2586 0 : return;
2587 : }
2588 : }
2589 :
2590 : /*
2591 : * chunk->info.prevp becomes null when the allocator thread consumed
2592 : * all chunks from the available list.
2593 : */
2594 18 : JS_ASSERT_IF(chunk->info.prevp, *chunk->info.prevp == chunk);
2595 18 : if (chunk->info.prevp == availableListHeadp || !chunk->info.prevp)
2596 18 : break;
2597 :
2598 : /*
2599 : * prevp exists and is not the list head. It must point to the next
2600 : * field of the previous chunk.
2601 : */
2602 0 : chunk = chunk->getPrevious();
2603 : }
2604 : }
2605 :
2606 : static void
2607 9 : DecommitArenas(JSRuntime *rt)
2608 : {
2609 9 : DecommitArenasFromAvailableList(rt, &rt->gcSystemAvailableChunkListHead);
2610 9 : DecommitArenasFromAvailableList(rt, &rt->gcUserAvailableChunkListHead);
2611 9 : }
2612 :
2613 : /* Must be called with the GC lock taken. */
2614 : static void
2615 30966 : ExpireChunksAndArenas(JSRuntime *rt, bool shouldShrink)
2616 : {
2617 30966 : if (Chunk *toFree = rt->gcChunkPool.expire(rt, shouldShrink)) {
2618 76 : AutoUnlockGC unlock(rt);
2619 38 : FreeChunkList(toFree);
2620 : }
2621 :
2622 30966 : if (shouldShrink)
2623 9 : DecommitArenas(rt);
2624 30966 : }
2625 :
2626 : #ifdef JS_THREADSAFE
2627 :
2628 : static unsigned
2629 19869 : GetCPUCount()
2630 : {
2631 : static unsigned ncpus = 0;
2632 19869 : if (ncpus == 0) {
2633 : # ifdef XP_WIN
2634 : SYSTEM_INFO sysinfo;
2635 : GetSystemInfo(&sysinfo);
2636 : ncpus = unsigned(sysinfo.dwNumberOfProcessors);
2637 : # else
2638 19770 : long n = sysconf(_SC_NPROCESSORS_ONLN);
2639 19770 : ncpus = (n > 0) ? unsigned(n) : 1;
2640 : # endif
2641 : }
2642 19869 : return ncpus;
2643 : }
2644 :
2645 : bool
2646 19869 : GCHelperThread::init()
2647 : {
2648 19869 : if (!(wakeup = PR_NewCondVar(rt->gcLock)))
2649 0 : return false;
2650 19869 : if (!(done = PR_NewCondVar(rt->gcLock)))
2651 0 : return false;
2652 :
2653 : thread = PR_CreateThread(PR_USER_THREAD, threadMain, this, PR_PRIORITY_NORMAL,
2654 19869 : PR_LOCAL_THREAD, PR_JOINABLE_THREAD, 0);
2655 19869 : if (!thread)
2656 0 : return false;
2657 :
2658 19869 : backgroundAllocation = (GetCPUCount() >= 2);
2659 19869 : return true;
2660 : }
2661 :
2662 : void
2663 19868 : GCHelperThread::finish()
2664 : {
2665 19868 : PRThread *join = NULL;
2666 : {
2667 39736 : AutoLockGC lock(rt);
2668 19868 : if (thread && state != SHUTDOWN) {
2669 : /*
2670 : * We cannot be in the ALLOCATING or CANCEL_ALLOCATION states as
2671 : * the allocations should have been stopped during the last GC.
2672 : */
2673 19868 : JS_ASSERT(state == IDLE || state == SWEEPING);
2674 19868 : if (state == IDLE)
2675 19868 : PR_NotifyCondVar(wakeup);
2676 19868 : state = SHUTDOWN;
2677 19868 : join = thread;
2678 : }
2679 : }
2680 19868 : if (join) {
2681 : /* PR_DestroyThread is not necessary. */
2682 19868 : PR_JoinThread(join);
2683 : }
2684 19868 : if (wakeup)
2685 19868 : PR_DestroyCondVar(wakeup);
2686 19868 : if (done)
2687 19868 : PR_DestroyCondVar(done);
2688 19868 : }
2689 :
2690 : /* static */
2691 : void
2692 19869 : GCHelperThread::threadMain(void *arg)
2693 : {
2694 19869 : static_cast<GCHelperThread *>(arg)->threadLoop();
2695 19868 : }
2696 :
2697 : void
2698 19869 : GCHelperThread::threadLoop()
2699 : {
2700 39737 : AutoLockGC lock(rt);
2701 :
2702 : /*
2703 : * Even on the first iteration the state can be SHUTDOWN or SWEEPING if
2704 : * the stop request or the GC and the corresponding startBackgroundSweep call
2705 : * happen before this thread has a chance to run.
2706 : */
2707 79537 : for (;;) {
2708 99406 : switch (state) {
2709 : case SHUTDOWN:
2710 : return;
2711 : case IDLE:
2712 48572 : PR_WaitCondVar(wakeup, PR_INTERVAL_NO_TIMEOUT);
2713 48571 : break;
2714 : case SWEEPING:
2715 30966 : doSweep();
2716 30966 : if (state == SWEEPING)
2717 30966 : state = IDLE;
2718 30966 : PR_NotifyAllCondVar(done);
2719 30966 : break;
2720 : case ALLOCATING:
2721 0 : do {
2722 : Chunk *chunk;
2723 : {
2724 0 : AutoUnlockGC unlock(rt);
2725 0 : chunk = Chunk::allocate(rt);
2726 : }
2727 :
2728 : /* OOM stops the background allocation. */
2729 0 : if (!chunk)
2730 0 : break;
2731 0 : JS_ASSERT(chunk->info.numArenasFreeCommitted == ArenasPerChunk);
2732 0 : rt->gcNumArenasFreeCommitted += ArenasPerChunk;
2733 0 : rt->gcChunkPool.put(chunk);
2734 0 : } while (state == ALLOCATING && rt->gcChunkPool.wantBackgroundAllocation(rt));
2735 0 : if (state == ALLOCATING)
2736 0 : state = IDLE;
2737 0 : break;
2738 : case CANCEL_ALLOCATION:
2739 0 : state = IDLE;
2740 0 : PR_NotifyAllCondVar(done);
2741 0 : break;
2742 : }
2743 : }
2744 : }
2745 :
2746 : bool
2747 30957 : GCHelperThread::prepareForBackgroundSweep()
2748 : {
2749 30957 : JS_ASSERT(state == IDLE);
2750 30957 : size_t maxArenaLists = MAX_BACKGROUND_FINALIZE_KINDS * rt->compartments.length();
2751 30957 : return finalizeVector.reserve(maxArenaLists);
2752 : }
2753 :
2754 : /* Must be called with the GC lock taken. */
2755 : void
2756 30957 : GCHelperThread::startBackgroundSweep(JSContext *cx, bool shouldShrink)
2757 : {
2758 : /* The caller takes the GC lock. */
2759 30957 : JS_ASSERT(state == IDLE);
2760 30957 : JS_ASSERT(cx);
2761 30957 : JS_ASSERT(!finalizationContext);
2762 30957 : finalizationContext = cx;
2763 30957 : shrinkFlag = shouldShrink;
2764 30957 : state = SWEEPING;
2765 30957 : PR_NotifyCondVar(wakeup);
2766 30957 : }
2767 :
2768 : /* Must be called with the GC lock taken. */
2769 : void
2770 9 : GCHelperThread::startBackgroundShrink()
2771 : {
2772 9 : switch (state) {
2773 : case IDLE:
2774 9 : JS_ASSERT(!finalizationContext);
2775 9 : shrinkFlag = true;
2776 9 : state = SWEEPING;
2777 9 : PR_NotifyCondVar(wakeup);
2778 9 : break;
2779 : case SWEEPING:
2780 0 : shrinkFlag = true;
2781 0 : break;
2782 : case ALLOCATING:
2783 : case CANCEL_ALLOCATION:
2784 : /*
2785 : * If we have started background allocation there is nothing to
2786 : * shrink.
2787 : */
2788 0 : break;
2789 : case SHUTDOWN:
2790 0 : JS_NOT_REACHED("No shrink on shutdown");
2791 : }
2792 9 : }
2793 :
2794 : /* Must be called with the GC lock taken. */
2795 : void
2796 46519 : GCHelperThread::waitBackgroundSweepEnd()
2797 : {
2798 94430 : while (state == SWEEPING)
2799 1392 : PR_WaitCondVar(done, PR_INTERVAL_NO_TIMEOUT);
2800 46519 : }
2801 :
2802 : /* Must be called with the GC lock taken. */
2803 : void
2804 53738 : GCHelperThread::waitBackgroundSweepOrAllocEnd()
2805 : {
2806 53738 : if (state == ALLOCATING)
2807 0 : state = CANCEL_ALLOCATION;
2808 109470 : while (state == SWEEPING || state == CANCEL_ALLOCATION)
2809 1994 : PR_WaitCondVar(done, PR_INTERVAL_NO_TIMEOUT);
2810 53738 : }
2811 :
2812 : /* Must be called with the GC lock taken. */
2813 : inline void
2814 0 : GCHelperThread::startBackgroundAllocationIfIdle()
2815 : {
2816 0 : if (state == IDLE) {
2817 0 : state = ALLOCATING;
2818 0 : PR_NotifyCondVar(wakeup);
2819 : }
2820 0 : }
2821 :
2822 : JS_FRIEND_API(void)
2823 22655 : GCHelperThread::replenishAndFreeLater(void *ptr)
2824 : {
2825 22655 : JS_ASSERT(freeCursor == freeCursorEnd);
2826 : do {
2827 22655 : if (freeCursor && !freeVector.append(freeCursorEnd - FREE_ARRAY_LENGTH))
2828 0 : break;
2829 22655 : freeCursor = (void **) OffTheBooks::malloc_(FREE_ARRAY_SIZE);
2830 22655 : if (!freeCursor) {
2831 0 : freeCursorEnd = NULL;
2832 0 : break;
2833 : }
2834 22655 : freeCursorEnd = freeCursor + FREE_ARRAY_LENGTH;
2835 22655 : *freeCursor++ = ptr;
2836 22655 : return;
2837 : } while (false);
2838 0 : Foreground::free_(ptr);
2839 : }
2840 :
2841 : /* Must be called with the GC lock taken. */
2842 : void
2843 30966 : GCHelperThread::doSweep()
2844 : {
2845 30966 : if (JSContext *cx = finalizationContext) {
2846 30957 : finalizationContext = NULL;
2847 61914 : AutoUnlockGC unlock(rt);
2848 :
2849 : /*
2850 : * We must finalize in the insert order, see comments in
2851 : * finalizeObjects.
2852 : */
2853 267470 : for (ArenaHeader **i = finalizeVector.begin(); i != finalizeVector.end(); ++i)
2854 236513 : ArenaLists::backgroundFinalize(cx, *i);
2855 30957 : finalizeVector.resize(0);
2856 :
2857 30957 : if (freeCursor) {
2858 22651 : void **array = freeCursorEnd - FREE_ARRAY_LENGTH;
2859 22651 : freeElementsAndArray(array, freeCursor);
2860 22651 : freeCursor = freeCursorEnd = NULL;
2861 : } else {
2862 8306 : JS_ASSERT(!freeCursorEnd);
2863 : }
2864 30961 : for (void ***iter = freeVector.begin(); iter != freeVector.end(); ++iter) {
2865 4 : void **array = *iter;
2866 4 : freeElementsAndArray(array, array + FREE_ARRAY_LENGTH);
2867 : }
2868 30957 : freeVector.resize(0);
2869 : }
2870 :
2871 30966 : bool shrinking = shrinkFlag;
2872 30966 : ExpireChunksAndArenas(rt, shrinking);
2873 :
2874 : /*
2875 : * The main thread may have called ShrinkGCBuffers while
2876 : * ExpireChunksAndArenas(rt, false) was running, so we recheck the flag
2877 : * afterwards.
2878 : */
2879 30966 : if (!shrinking && shrinkFlag) {
2880 0 : shrinkFlag = false;
2881 0 : ExpireChunksAndArenas(rt, true);
2882 : }
2883 30966 : }
2884 :
2885 : #endif /* JS_THREADSAFE */
2886 :
2887 : } /* namespace js */
2888 :
2889 : static bool
2890 50609 : ReleaseObservedTypes(JSRuntime *rt)
2891 : {
2892 50609 : bool releaseTypes = false;
2893 50609 : int64_t now = PRMJ_Now();
2894 50609 : if (now >= rt->gcJitReleaseTime) {
2895 10 : releaseTypes = true;
2896 10 : rt->gcJitReleaseTime = now + JIT_SCRIPT_RELEASE_TYPES_INTERVAL;
2897 : }
2898 :
2899 50609 : return releaseTypes;
2900 : }
2901 :
2902 : static void
2903 50609 : SweepCompartments(JSContext *cx, JSGCInvocationKind gckind)
2904 : {
2905 50609 : JSRuntime *rt = cx->runtime;
2906 50609 : JSCompartmentCallback callback = rt->compartmentCallback;
2907 :
2908 : /* Skip the atomsCompartment. */
2909 50609 : JSCompartment **read = rt->compartments.begin() + 1;
2910 50609 : JSCompartment **end = rt->compartments.end();
2911 50609 : JSCompartment **write = read;
2912 50609 : JS_ASSERT(rt->compartments.length() >= 1);
2913 50609 : JS_ASSERT(*rt->compartments.begin() == rt->atomsCompartment);
2914 :
2915 172262 : while (read < end) {
2916 71044 : JSCompartment *compartment = *read++;
2917 :
2918 187548 : if (!compartment->hold &&
2919 116504 : (compartment->arenas.arenaListsAreEmpty() || !rt->hasContexts()))
2920 : {
2921 25594 : compartment->arenas.checkEmptyFreeLists();
2922 25594 : if (callback)
2923 3579 : JS_ALWAYS_TRUE(callback(cx, compartment, JSCOMPARTMENT_DESTROY));
2924 25594 : if (compartment->principals)
2925 3305 : JS_DropPrincipals(rt, compartment->principals);
2926 25594 : cx->delete_(compartment);
2927 25594 : continue;
2928 : }
2929 45450 : *write++ = compartment;
2930 : }
2931 50609 : rt->compartments.resize(write - rt->compartments.begin());
2932 50609 : }
2933 :
2934 : static void
2935 52324 : PurgeRuntime(JSRuntime *rt)
2936 : {
2937 179471 : for (GCCompartmentsIter c(rt); !c.done(); c.next())
2938 127147 : c->purge();
2939 :
2940 52324 : rt->tempLifoAlloc.freeUnused();
2941 52324 : rt->gsnCache.purge();
2942 :
2943 : /* FIXME: bug 506341 */
2944 52324 : rt->propertyCache.purge(rt);
2945 :
2946 104986 : for (ContextIter acx(rt); !acx.done(); acx.next())
2947 52662 : acx->purge();
2948 52324 : }
2949 :
2950 : static void
2951 50825 : BeginMarkPhase(JSRuntime *rt)
2952 : {
2953 50825 : GCMarker *gcmarker = &rt->gcMarker;
2954 :
2955 50825 : rt->gcStartNumber = rt->gcNumber;
2956 :
2957 : /* Reset weak map list. */
2958 50825 : WeakMapBase::resetWeakMapList(rt);
2959 :
2960 : /*
2961 : * We must purge the runtime at the beginning of an incremental GC. The
2962 : * danger if we purge later is that the snapshot invariant of incremental
2963 : * GC will be broken, as follows. If some object is reachable only through
2964 : * some cache (say the dtoaCache) then it will not be part of the snapshot.
2965 : * If we purge after root marking, then the mutator could obtain a pointer
2966 : * to the object and start using it. This object might never be marked, so
2967 : * a GC hazard would exist.
2968 : */
2969 50825 : PurgeRuntime(rt);
2970 :
2971 : /*
2972 : * Mark phase.
2973 : */
2974 101650 : gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_MARK);
2975 101650 : gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_MARK_ROOTS);
2976 :
2977 161769 : for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
2978 110944 : r.front()->bitmap.clear();
2979 :
2980 50825 : MarkRuntime(gcmarker);
2981 50825 : }
2982 :
2983 : void
2984 101650 : MarkWeakReferences(GCMarker *gcmarker)
2985 : {
2986 101650 : JS_ASSERT(gcmarker->isDrained());
2987 407312 : while (WatchpointMap::markAllIteratively(gcmarker) ||
2988 101970 : WeakMapBase::markAllIteratively(gcmarker) ||
2989 101722 : Debugger::markAllIteratively(gcmarker))
2990 : {
2991 320 : SliceBudget budget;
2992 320 : gcmarker->drainMarkStack(budget);
2993 : }
2994 101650 : JS_ASSERT(gcmarker->isDrained());
2995 101650 : }
2996 :
2997 : static void
2998 50825 : MarkGrayAndWeak(JSRuntime *rt)
2999 : {
3000 50825 : GCMarker *gcmarker = &rt->gcMarker;
3001 :
3002 50825 : JS_ASSERT(gcmarker->isDrained());
3003 50825 : MarkWeakReferences(gcmarker);
3004 :
3005 50825 : gcmarker->setMarkColorGray();
3006 50825 : if (gcmarker->hasBufferedGrayRoots()) {
3007 50825 : gcmarker->markBufferedGrayRoots();
3008 : } else {
3009 0 : if (JSTraceDataOp op = rt->gcGrayRootsTraceOp)
3010 0 : (*op)(gcmarker, rt->gcGrayRootsData);
3011 : }
3012 50825 : SliceBudget budget;
3013 50825 : gcmarker->drainMarkStack(budget);
3014 50825 : MarkWeakReferences(gcmarker);
3015 50825 : JS_ASSERT(gcmarker->isDrained());
3016 50825 : }
3017 :
3018 : #ifdef DEBUG
3019 : static void
3020 : ValidateIncrementalMarking(JSContext *cx);
3021 : #endif
3022 :
3023 : static void
3024 50825 : EndMarkPhase(JSContext *cx)
3025 : {
3026 50825 : JSRuntime *rt = cx->runtime;
3027 :
3028 : {
3029 101650 : gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_MARK);
3030 101650 : gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_MARK_OTHER);
3031 50825 : MarkGrayAndWeak(rt);
3032 : }
3033 :
3034 50825 : JS_ASSERT(rt->gcMarker.isDrained());
3035 :
3036 : #ifdef DEBUG
3037 50825 : if (rt->gcIncrementalState != NO_INCREMENTAL)
3038 0 : ValidateIncrementalMarking(cx);
3039 : #endif
3040 :
3041 : #ifdef DEBUG
3042 : /* Make sure that we didn't mark an object in another compartment */
3043 50825 : if (rt->gcCurrentCompartment) {
3044 756 : for (CompartmentsIter c(rt); !c.done(); c.next()) {
3045 972 : JS_ASSERT_IF(c != rt->gcCurrentCompartment && c != rt->atomsCompartment,
3046 972 : c->arenas.checkArenaListAllUnmarked());
3047 : }
3048 : }
3049 : #endif
3050 50825 : }
3051 :
3052 : #ifdef DEBUG
3053 : static void
3054 0 : ValidateIncrementalMarking(JSContext *cx)
3055 : {
3056 : typedef HashMap<Chunk *, uintptr_t *, GCChunkHasher, SystemAllocPolicy> BitmapMap;
3057 0 : BitmapMap map;
3058 0 : if (!map.init())
3059 : return;
3060 :
3061 0 : JSRuntime *rt = cx->runtime;
3062 0 : GCMarker *gcmarker = &rt->gcMarker;
3063 :
3064 : /* Save existing mark bits. */
3065 0 : for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) {
3066 0 : ChunkBitmap *bitmap = &r.front()->bitmap;
3067 0 : uintptr_t *entry = (uintptr_t *)js_malloc(sizeof(bitmap->bitmap));
3068 0 : if (!entry)
3069 : return;
3070 :
3071 0 : memcpy(entry, bitmap->bitmap, sizeof(bitmap->bitmap));
3072 0 : if (!map.putNew(r.front(), entry))
3073 : return;
3074 : }
3075 :
3076 : /* Save the existing weakmaps. */
3077 0 : WeakMapVector weakmaps;
3078 0 : if (!WeakMapBase::saveWeakMapList(rt, weakmaps))
3079 : return;
3080 :
3081 : /*
3082 : * After this point, the function should run to completion, so we shouldn't
3083 : * do anything fallible.
3084 : */
3085 :
3086 : /* Re-do all the marking, but non-incrementally. */
3087 0 : js::gc::State state = rt->gcIncrementalState;
3088 0 : rt->gcIncrementalState = NO_INCREMENTAL;
3089 :
3090 : /* As we're re-doing marking, we need to reset the weak map list. */
3091 0 : WeakMapBase::resetWeakMapList(rt);
3092 :
3093 0 : JS_ASSERT(gcmarker->isDrained());
3094 0 : gcmarker->reset();
3095 :
3096 0 : for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
3097 0 : r.front()->bitmap.clear();
3098 :
3099 0 : MarkRuntime(gcmarker, true);
3100 0 : SliceBudget budget;
3101 0 : rt->gcMarker.drainMarkStack(budget);
3102 0 : MarkGrayAndWeak(rt);
3103 :
3104 : /* Now verify that we have the same mark bits as before. */
3105 0 : for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) {
3106 0 : Chunk *chunk = r.front();
3107 0 : ChunkBitmap *bitmap = &chunk->bitmap;
3108 0 : uintptr_t *entry = map.lookup(r.front())->value;
3109 : ChunkBitmap incBitmap;
3110 :
3111 0 : memcpy(incBitmap.bitmap, entry, sizeof(incBitmap.bitmap));
3112 0 : js_free(entry);
3113 :
3114 0 : for (size_t i = 0; i < ArenasPerChunk; i++) {
3115 0 : Arena *arena = &chunk->arenas[i];
3116 0 : if (!arena->aheader.allocated())
3117 0 : continue;
3118 0 : if (rt->gcCurrentCompartment && arena->aheader.compartment != rt->gcCurrentCompartment)
3119 0 : continue;
3120 0 : if (arena->aheader.allocatedDuringIncremental)
3121 0 : continue;
3122 :
3123 0 : AllocKind kind = arena->aheader.getAllocKind();
3124 0 : uintptr_t thing = arena->thingsStart(kind);
3125 0 : uintptr_t end = arena->thingsEnd();
3126 0 : while (thing < end) {
3127 0 : Cell *cell = (Cell *)thing;
3128 0 : if (bitmap->isMarked(cell, BLACK) && !incBitmap.isMarked(cell, BLACK)) {
3129 0 : JS_DumpHeap(rt, stdout, NULL, JSGCTraceKind(0), NULL, 100000, NULL);
3130 0 : printf("Assertion cell: %p (%d)\n", (void *)cell, cell->getAllocKind());
3131 : }
3132 0 : JS_ASSERT_IF(bitmap->isMarked(cell, BLACK), incBitmap.isMarked(cell, BLACK));
3133 0 : thing += Arena::thingSize(kind);
3134 : }
3135 : }
3136 :
3137 0 : memcpy(bitmap->bitmap, incBitmap.bitmap, sizeof(incBitmap.bitmap));
3138 : }
3139 :
3140 : /* Restore the weak map list. */
3141 0 : WeakMapBase::resetWeakMapList(rt);
3142 0 : WeakMapBase::restoreWeakMapList(rt, weakmaps);
3143 :
3144 0 : rt->gcIncrementalState = state;
3145 : }
3146 : #endif
3147 :
3148 : static void
3149 50825 : SweepPhase(JSContext *cx, JSGCInvocationKind gckind)
3150 : {
3151 50825 : JSRuntime *rt = cx->runtime;
3152 :
3153 : #ifdef JS_THREADSAFE
3154 50825 : if (rt->hasContexts() && rt->gcHelperThread.prepareForBackgroundSweep())
3155 30957 : cx->gcBackgroundFree = &rt->gcHelperThread;
3156 : #endif
3157 :
3158 : /* Purge the ArenaLists before sweeping. */
3159 172694 : for (GCCompartmentsIter c(rt); !c.done(); c.next())
3160 121869 : c->arenas.purge();
3161 :
3162 50825 : if (rt->gcFinalizeCallback)
3163 14511 : rt->gcFinalizeCallback(cx, JSFINALIZE_START);
3164 :
3165 : /*
3166 : * Sweep phase.
3167 : *
3168 : * Finalize as we sweep, outside of rt->gcLock but with rt->gcRunning set
3169 : * so that any attempt to allocate a GC-thing from a finalizer will fail,
3170 : * rather than nest badly and leave the unmarked newborn to be swept.
3171 : *
3172 : * We first sweep atom state so we can use IsAboutToBeFinalized on
3173 : * JSString held in a hashtable to check if the hashtable entry can be
3174 : * freed. Note that even after the entry is freed, JSObject finalizers can
3175 : * continue to access the corresponding JSString* assuming that they are
3176 : * unique. This works since the atomization API must not be called during
3177 : * the GC.
3178 : */
3179 101650 : gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP);
3180 :
3181 : /* Finalize unreachable (key,value) pairs in all weak maps. */
3182 50825 : WeakMapBase::sweepAll(&rt->gcMarker);
3183 :
3184 50825 : js_SweepAtomState(rt);
3185 :
3186 : /* Collect watch points associated with unreachable objects. */
3187 50825 : WatchpointMap::sweepAll(rt);
3188 :
3189 50825 : if (!rt->gcCurrentCompartment)
3190 50609 : Debugger::sweepAll(cx);
3191 :
3192 50825 : bool releaseTypes = !rt->gcCurrentCompartment && ReleaseObservedTypes(rt);
3193 172694 : for (GCCompartmentsIter c(rt); !c.done(); c.next())
3194 121869 : c->sweep(cx, releaseTypes);
3195 :
3196 : {
3197 101650 : gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_OBJECT);
3198 :
3199 : /*
3200 : * We finalize objects before other GC things to ensure that the object's
3201 : * finalizer can access the other things even if they will be freed.
3202 : */
3203 172694 : for (GCCompartmentsIter c(rt); !c.done(); c.next())
3204 121869 : c->arenas.finalizeObjects(cx);
3205 : }
3206 :
3207 : {
3208 101650 : gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_STRING);
3209 172694 : for (GCCompartmentsIter c(rt); !c.done(); c.next())
3210 121869 : c->arenas.finalizeStrings(cx);
3211 : }
3212 :
3213 : {
3214 101650 : gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_SCRIPT);
3215 172694 : for (GCCompartmentsIter c(rt); !c.done(); c.next())
3216 121869 : c->arenas.finalizeScripts(cx);
3217 : }
3218 :
3219 : {
3220 101650 : gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_SHAPE);
3221 172694 : for (GCCompartmentsIter c(rt); !c.done(); c.next())
3222 121869 : c->arenas.finalizeShapes(cx);
3223 : }
3224 :
3225 : #ifdef DEBUG
3226 50825 : PropertyTree::dumpShapes(cx);
3227 : #endif
3228 :
3229 : {
3230 101650 : gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_DESTROY);
3231 :
3232 : /*
3233 : * Sweep script filenames after sweeping functions in the generic loop
3234 : * above. In this way when a scripted function's finalizer destroys the
3235 : * script and calls rt->destroyScriptHook, the hook can still access the
3236 : * script's filename. See bug 323267.
3237 : */
3238 172694 : for (GCCompartmentsIter c(rt); !c.done(); c.next())
3239 121869 : js_SweepScriptFilenames(c);
3240 :
3241 : /*
3242 : * This removes compartments from rt->compartment, so we do it last to make
3243 : * sure we don't miss sweeping any compartments.
3244 : */
3245 50825 : if (!rt->gcCurrentCompartment)
3246 50609 : SweepCompartments(cx, gckind);
3247 :
3248 : #ifndef JS_THREADSAFE
3249 : /*
3250 : * Destroy arenas after we finished the sweeping so finalizers can safely
3251 : * use IsAboutToBeFinalized().
3252 : * This is done on the GCHelperThread if JS_THREADSAFE is defined.
3253 : */
3254 : ExpireChunksAndArenas(rt, gckind == GC_SHRINK);
3255 : #endif
3256 : }
3257 :
3258 : {
3259 101650 : gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_XPCONNECT);
3260 50825 : if (rt->gcFinalizeCallback)
3261 14511 : rt->gcFinalizeCallback(cx, JSFINALIZE_END);
3262 : }
3263 :
3264 147424 : for (CompartmentsIter c(rt); !c.done(); c.next())
3265 96599 : c->setGCLastBytes(c->gcBytes, c->gcMallocAndFreeBytes, gckind);
3266 50825 : }
3267 :
3268 : /* Perform mark-and-sweep GC. If comp is set, we perform a single-compartment GC. */
3269 : static void
3270 50825 : MarkAndSweep(JSContext *cx, JSGCInvocationKind gckind)
3271 : {
3272 50825 : JSRuntime *rt = cx->runtime;
3273 :
3274 101650 : AutoUnlockGC unlock(rt);
3275 :
3276 50825 : rt->gcMarker.start(rt);
3277 50825 : JS_ASSERT(!rt->gcMarker.callback);
3278 :
3279 50825 : BeginMarkPhase(rt);
3280 : {
3281 101650 : gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_MARK);
3282 50825 : SliceBudget budget;
3283 50825 : rt->gcMarker.drainMarkStack(budget);
3284 : }
3285 50825 : EndMarkPhase(cx);
3286 50825 : SweepPhase(cx, gckind);
3287 :
3288 50825 : rt->gcMarker.stop();
3289 50825 : }
3290 :
3291 : /*
3292 : * This class should be used by any code that needs to exclusive access to the
3293 : * heap in order to trace through it...
3294 : */
3295 : class AutoHeapSession {
3296 : public:
3297 : explicit AutoHeapSession(JSRuntime *rt);
3298 : ~AutoHeapSession();
3299 :
3300 : protected:
3301 : JSRuntime *runtime;
3302 :
3303 : private:
3304 : AutoHeapSession(const AutoHeapSession&) MOZ_DELETE;
3305 : void operator=(const AutoHeapSession&) MOZ_DELETE;
3306 : };
3307 :
3308 : /* ...while this class is to be used only for garbage collection. */
3309 : class AutoGCSession : AutoHeapSession {
3310 : public:
3311 : explicit AutoGCSession(JSRuntime *rt, JSCompartment *comp);
3312 : ~AutoGCSession();
3313 : };
3314 :
3315 : /* Start a new heap session. */
3316 54409 : AutoHeapSession::AutoHeapSession(JSRuntime *rt)
3317 54409 : : runtime(rt)
3318 : {
3319 54409 : JS_ASSERT(!rt->noGCOrAllocationCheck);
3320 54409 : JS_ASSERT(!rt->gcRunning);
3321 54409 : rt->gcRunning = true;
3322 54409 : }
3323 :
3324 54409 : AutoHeapSession::~AutoHeapSession()
3325 : {
3326 54409 : JS_ASSERT(runtime->gcRunning);
3327 54409 : runtime->gcRunning = false;
3328 54409 : }
3329 :
3330 50825 : AutoGCSession::AutoGCSession(JSRuntime *rt, JSCompartment *comp)
3331 50825 : : AutoHeapSession(rt)
3332 : {
3333 50825 : JS_ASSERT(!runtime->gcCurrentCompartment);
3334 50825 : runtime->gcCurrentCompartment = comp;
3335 :
3336 50825 : runtime->gcIsNeeded = false;
3337 50825 : runtime->gcTriggerCompartment = NULL;
3338 50825 : runtime->gcInterFrameGC = true;
3339 :
3340 50825 : runtime->gcNumber++;
3341 :
3342 50825 : runtime->resetGCMallocBytes();
3343 :
3344 : /* Clear gcMallocBytes for all compartments */
3345 173018 : for (CompartmentsIter c(runtime); !c.done(); c.next())
3346 122193 : c->resetGCMallocBytes();
3347 50825 : }
3348 :
3349 101650 : AutoGCSession::~AutoGCSession()
3350 : {
3351 50825 : runtime->gcCurrentCompartment = NULL;
3352 50825 : runtime->gcNextFullGCTime = PRMJ_Now() + GC_IDLE_FULL_SPAN;
3353 50825 : runtime->gcChunkAllocationSinceLastGC = false;
3354 50825 : }
3355 :
3356 : static void
3357 50825 : ResetIncrementalGC(JSRuntime *rt, const char *reason)
3358 : {
3359 50825 : if (rt->gcIncrementalState == NO_INCREMENTAL)
3360 50825 : return;
3361 :
3362 0 : for (CompartmentsIter c(rt); !c.done(); c.next()) {
3363 0 : if (!rt->gcIncrementalCompartment || rt->gcIncrementalCompartment == c)
3364 0 : c->needsBarrier_ = false;
3365 :
3366 0 : JS_ASSERT(!c->needsBarrier_);
3367 : }
3368 :
3369 0 : rt->gcIncrementalCompartment = NULL;
3370 0 : rt->gcMarker.reset();
3371 0 : rt->gcMarker.stop();
3372 0 : rt->gcIncrementalState = NO_INCREMENTAL;
3373 :
3374 0 : rt->gcStats.reset(reason);
3375 : }
3376 :
3377 : class AutoGCSlice {
3378 : public:
3379 : AutoGCSlice(JSContext *cx);
3380 : ~AutoGCSlice();
3381 :
3382 : private:
3383 : JSContext *context;
3384 : };
3385 :
3386 0 : AutoGCSlice::AutoGCSlice(JSContext *cx)
3387 0 : : context(cx)
3388 : {
3389 0 : JSRuntime *rt = context->runtime;
3390 :
3391 : /*
3392 : * During incremental GC, the compartment's active flag determines whether
3393 : * there are stack frames active for any of its scripts. Normally this flag
3394 : * is set at the beginning of the mark phase. During incremental GC, we also
3395 : * set it at the start of every phase.
3396 : */
3397 0 : rt->stackSpace.markActiveCompartments();
3398 :
3399 0 : for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
3400 : /* Clear this early so we don't do any write barriers during GC. */
3401 0 : if (rt->gcIncrementalState == MARK)
3402 0 : c->needsBarrier_ = false;
3403 : else
3404 0 : JS_ASSERT(!c->needsBarrier_);
3405 : }
3406 0 : }
3407 :
3408 0 : AutoGCSlice::~AutoGCSlice()
3409 : {
3410 0 : JSRuntime *rt = context->runtime;
3411 :
3412 0 : for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
3413 0 : if (rt->gcIncrementalState == MARK) {
3414 0 : c->needsBarrier_ = true;
3415 0 : c->arenas.prepareForIncrementalGC(rt);
3416 : } else {
3417 0 : JS_ASSERT(rt->gcIncrementalState == NO_INCREMENTAL);
3418 :
3419 0 : c->needsBarrier_ = false;
3420 : }
3421 : }
3422 0 : }
3423 :
3424 : class AutoCopyFreeListToArenas {
3425 : JSRuntime *rt;
3426 :
3427 : public:
3428 54275 : AutoCopyFreeListToArenas(JSRuntime *rt)
3429 54275 : : rt(rt) {
3430 188112 : for (CompartmentsIter c(rt); !c.done(); c.next())
3431 133837 : c->arenas.copyFreeListsToArenas();
3432 54275 : }
3433 :
3434 54275 : ~AutoCopyFreeListToArenas() {
3435 162518 : for (CompartmentsIter c(rt); !c.done(); c.next())
3436 108243 : c->arenas.clearFreeListsInArenas();
3437 54275 : }
3438 : };
3439 :
3440 : static void
3441 0 : IncrementalGCSlice(JSContext *cx, int64_t budget, JSGCInvocationKind gckind)
3442 : {
3443 0 : JSRuntime *rt = cx->runtime;
3444 :
3445 0 : AutoUnlockGC unlock(rt);
3446 0 : AutoGCSlice slice(cx);
3447 :
3448 0 : gc::State initialState = rt->gcIncrementalState;
3449 :
3450 0 : if (rt->gcIncrementalState == NO_INCREMENTAL) {
3451 0 : JS_ASSERT(!rt->gcIncrementalCompartment);
3452 0 : rt->gcIncrementalCompartment = rt->gcCurrentCompartment;
3453 0 : rt->gcIncrementalState = MARK_ROOTS;
3454 0 : rt->gcLastMarkSlice = false;
3455 : }
3456 :
3457 0 : if (rt->gcIncrementalState == MARK_ROOTS) {
3458 0 : rt->gcMarker.start(rt);
3459 0 : JS_ASSERT(IS_GC_MARKING_TRACER(&rt->gcMarker));
3460 :
3461 0 : for (GCCompartmentsIter c(rt); !c.done(); c.next())
3462 0 : c->discardJitCode(cx);
3463 :
3464 0 : BeginMarkPhase(rt);
3465 :
3466 0 : rt->gcIncrementalState = MARK;
3467 : }
3468 :
3469 0 : if (rt->gcIncrementalState == MARK) {
3470 0 : gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_MARK);
3471 0 : SliceBudget sliceBudget(budget);
3472 :
3473 : /* If we needed delayed marking for gray roots, then collect until done. */
3474 0 : if (!rt->gcMarker.hasBufferedGrayRoots())
3475 0 : sliceBudget.reset();
3476 :
3477 0 : bool finished = rt->gcMarker.drainMarkStack(sliceBudget);
3478 :
3479 0 : if (finished) {
3480 0 : JS_ASSERT(rt->gcMarker.isDrained());
3481 0 : if (initialState == MARK && !rt->gcLastMarkSlice)
3482 0 : rt->gcLastMarkSlice = true;
3483 : else
3484 0 : rt->gcIncrementalState = SWEEP;
3485 : }
3486 : }
3487 :
3488 0 : if (rt->gcIncrementalState == SWEEP) {
3489 0 : EndMarkPhase(cx);
3490 0 : SweepPhase(cx, gckind);
3491 :
3492 0 : rt->gcMarker.stop();
3493 :
3494 : /* JIT code was already discarded during sweeping. */
3495 :
3496 0 : rt->gcIncrementalCompartment = NULL;
3497 :
3498 0 : rt->gcIncrementalState = NO_INCREMENTAL;
3499 : }
3500 0 : }
3501 :
3502 : class IncrementalSafety
3503 : {
3504 : const char *reason_;
3505 :
3506 3023 : IncrementalSafety(const char *reason) : reason_(reason) {}
3507 :
3508 : public:
3509 2807 : static IncrementalSafety Safe() { return IncrementalSafety(NULL); }
3510 216 : static IncrementalSafety Unsafe(const char *reason) { return IncrementalSafety(reason); }
3511 :
3512 : typedef void (IncrementalSafety::* ConvertibleToBool)();
3513 0 : void nonNull() {}
3514 :
3515 3023 : operator ConvertibleToBool() const {
3516 3023 : return reason_ == NULL ? &IncrementalSafety::nonNull : 0;
3517 : }
3518 :
3519 0 : const char *reason() {
3520 0 : JS_ASSERT(reason_);
3521 0 : return reason_;
3522 : }
3523 : };
3524 :
3525 : static IncrementalSafety
3526 3023 : IsIncrementalGCSafe(JSRuntime *rt)
3527 : {
3528 3023 : if (rt->gcCompartmentCreated) {
3529 48 : rt->gcCompartmentCreated = false;
3530 48 : return IncrementalSafety::Unsafe("compartment created");
3531 : }
3532 :
3533 2975 : if (rt->gcKeepAtoms)
3534 168 : return IncrementalSafety::Unsafe("gcKeepAtoms set");
3535 :
3536 12874 : for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
3537 10067 : if (c->activeAnalysis)
3538 0 : return IncrementalSafety::Unsafe("activeAnalysis set");
3539 : }
3540 :
3541 2807 : if (!rt->gcIncrementalEnabled)
3542 0 : return IncrementalSafety::Unsafe("incremental permanently disabled");
3543 :
3544 2807 : return IncrementalSafety::Safe();
3545 : }
3546 :
3547 : static void
3548 0 : BudgetIncrementalGC(JSRuntime *rt, int64_t *budget)
3549 : {
3550 0 : IncrementalSafety safe = IsIncrementalGCSafe(rt);
3551 0 : if (!safe) {
3552 0 : ResetIncrementalGC(rt, safe.reason());
3553 0 : *budget = SliceBudget::Unlimited;
3554 0 : rt->gcStats.nonincremental(safe.reason());
3555 0 : return;
3556 : }
3557 :
3558 0 : if (rt->gcMode != JSGC_MODE_INCREMENTAL) {
3559 0 : ResetIncrementalGC(rt, "GC mode change");
3560 0 : *budget = SliceBudget::Unlimited;
3561 0 : rt->gcStats.nonincremental("GC mode");
3562 0 : return;
3563 : }
3564 :
3565 : #ifdef ANDROID
3566 : JS_ASSERT(rt->gcIncrementalState == NO_INCREMENTAL);
3567 : *budget = SliceBudget::Unlimited;
3568 : rt->gcStats.nonincremental("Android");
3569 : return;
3570 : #endif
3571 :
3572 0 : if (rt->gcIncrementalState != NO_INCREMENTAL &&
3573 : rt->gcCurrentCompartment != rt->gcIncrementalCompartment)
3574 : {
3575 0 : ResetIncrementalGC(rt, "compartment change");
3576 0 : return;
3577 : }
3578 :
3579 0 : for (CompartmentsIter c(rt); !c.done(); c.next()) {
3580 0 : if (c->gcBytes > c->gcTriggerBytes) {
3581 0 : *budget = SliceBudget::Unlimited;
3582 0 : rt->gcStats.nonincremental("allocation trigger");
3583 0 : return;
3584 : }
3585 : }
3586 : }
3587 :
3588 : /*
3589 : * GC, repeatedly if necessary, until we think we have not created any new
3590 : * garbage. We disable inlining to ensure that the bottom of the stack with
3591 : * possible GC roots recorded in js_GC excludes any pointers we use during the
3592 : * marking implementation.
3593 : */
3594 : static JS_NEVER_INLINE void
3595 50825 : GCCycle(JSContext *cx, JSCompartment *comp, int64_t budget, JSGCInvocationKind gckind)
3596 : {
3597 50825 : JSRuntime *rt = cx->runtime;
3598 :
3599 50825 : JS_ASSERT_IF(comp, comp != rt->atomsCompartment);
3600 50825 : JS_ASSERT_IF(comp, rt->gcMode != JSGC_MODE_GLOBAL);
3601 :
3602 : /* Recursive GC is no-op. */
3603 50825 : if (rt->gcRunning)
3604 0 : return;
3605 :
3606 101650 : AutoGCSession gcsession(rt, comp);
3607 :
3608 : /* Don't GC if we are reporting an OOM. */
3609 50825 : if (rt->inOOMReport)
3610 : return;
3611 :
3612 : #ifdef JS_THREADSAFE
3613 : /*
3614 : * As we about to purge caches and clear the mark bits we must wait for
3615 : * any background finalization to finish. We must also wait for the
3616 : * background allocation to finish so we can avoid taking the GC lock
3617 : * when manipulating the chunks during the GC.
3618 : */
3619 50825 : JS_ASSERT(!cx->gcBackgroundFree);
3620 50825 : rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
3621 : #endif
3622 :
3623 50825 : if (budget == SliceBudget::Unlimited) {
3624 : /* If non-incremental GC was requested, reset incremental GC. */
3625 50825 : ResetIncrementalGC(rt, "requested");
3626 50825 : rt->gcStats.nonincremental("requested");
3627 : } else {
3628 0 : BudgetIncrementalGC(rt, &budget);
3629 : }
3630 :
3631 101650 : AutoCopyFreeListToArenas copy(rt);
3632 :
3633 50825 : if (budget == SliceBudget::Unlimited && rt->gcIncrementalState == NO_INCREMENTAL)
3634 50825 : MarkAndSweep(cx, gckind);
3635 : else
3636 0 : IncrementalGCSlice(cx, budget, gckind);
3637 :
3638 : #ifdef DEBUG
3639 50825 : if (rt->gcIncrementalState == NO_INCREMENTAL) {
3640 147424 : for (CompartmentsIter c(rt); !c.done(); c.next())
3641 96599 : JS_ASSERT(!c->needsBarrier_);
3642 : }
3643 : #endif
3644 : #ifdef JS_THREADSAFE
3645 50825 : if (rt->gcIncrementalState == NO_INCREMENTAL) {
3646 50825 : if (cx->gcBackgroundFree) {
3647 30957 : JS_ASSERT(cx->gcBackgroundFree == &rt->gcHelperThread);
3648 30957 : cx->gcBackgroundFree = NULL;
3649 30957 : rt->gcHelperThread.startBackgroundSweep(cx, gckind == GC_SHRINK);
3650 : }
3651 : }
3652 : #endif
3653 : }
3654 :
3655 : #ifdef JS_GC_ZEAL
3656 : static bool
3657 0 : IsDeterministicGCReason(gcreason::Reason reason)
3658 : {
3659 0 : if (reason > gcreason::DEBUG_GC && reason != gcreason::CC_FORCED)
3660 0 : return false;
3661 :
3662 0 : if (reason == gcreason::MAYBEGC)
3663 0 : return false;
3664 :
3665 0 : return true;
3666 : }
3667 : #endif
3668 :
3669 : static void
3670 50825 : Collect(JSContext *cx, JSCompartment *comp, int64_t budget,
3671 : JSGCInvocationKind gckind, gcreason::Reason reason)
3672 : {
3673 50825 : JSRuntime *rt = cx->runtime;
3674 50825 : JS_AbortIfWrongThread(rt);
3675 :
3676 : #ifdef JS_GC_ZEAL
3677 50825 : if (rt->gcDeterministicOnly && !IsDeterministicGCReason(reason))
3678 0 : return;
3679 : #endif
3680 :
3681 : JS_ASSERT_IF(budget != SliceBudget::Unlimited, JSGC_INCREMENTAL);
3682 :
3683 : #ifdef JS_GC_ZEAL
3684 : struct AutoVerifyBarriers {
3685 : JSContext *cx;
3686 : bool inVerify;
3687 50825 : AutoVerifyBarriers(JSContext *cx) : cx(cx), inVerify(cx->runtime->gcVerifyData) {
3688 50825 : if (inVerify) EndVerifyBarriers(cx);
3689 50825 : }
3690 50825 : ~AutoVerifyBarriers() { if (inVerify) StartVerifyBarriers(cx); }
3691 101650 : } av(cx);
3692 : #endif
3693 :
3694 50825 : RecordNativeStackTopForGC(rt);
3695 :
3696 : /* This is a heuristic to avoid resets. */
3697 50825 : if (rt->gcIncrementalState != NO_INCREMENTAL && !rt->gcIncrementalCompartment)
3698 0 : comp = NULL;
3699 :
3700 101650 : gcstats::AutoGCSlice agc(rt->gcStats, comp, reason);
3701 :
3702 50825 : do {
3703 : /*
3704 : * Let the API user decide to defer a GC if it wants to (unless this
3705 : * is the last context). Invoke the callback regardless.
3706 : */
3707 50825 : if (rt->gcIncrementalState == NO_INCREMENTAL) {
3708 50825 : if (JSGCCallback callback = rt->gcCallback)
3709 14509 : callback(rt, JSGC_BEGIN);
3710 : }
3711 :
3712 : {
3713 : /* Lock out other GC allocator and collector invocations. */
3714 101650 : AutoLockGC lock(rt);
3715 50825 : rt->gcPoke = false;
3716 50825 : GCCycle(cx, comp, budget, gckind);
3717 : }
3718 :
3719 50825 : if (rt->gcIncrementalState == NO_INCREMENTAL) {
3720 50825 : if (JSGCCallback callback = rt->gcCallback)
3721 14509 : callback(rt, JSGC_END);
3722 : }
3723 :
3724 : /*
3725 : * On shutdown, iterate until finalizers or the JSGC_END callback
3726 : * stop creating garbage.
3727 : */
3728 50825 : } while (!rt->hasContexts() && rt->gcPoke);
3729 : }
3730 :
3731 : namespace js {
3732 :
3733 : void
3734 50648 : GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, gcreason::Reason reason)
3735 : {
3736 50648 : Collect(cx, comp, SliceBudget::Unlimited, gckind, reason);
3737 50648 : }
3738 :
3739 : void
3740 177 : GCSlice(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, gcreason::Reason reason)
3741 : {
3742 177 : Collect(cx, comp, cx->runtime->gcSliceBudget, gckind, reason);
3743 177 : }
3744 :
3745 : void
3746 0 : GCDebugSlice(JSContext *cx, int64_t objCount)
3747 : {
3748 0 : Collect(cx, NULL, SliceBudget::WorkBudget(objCount), GC_NORMAL, gcreason::API);
3749 0 : }
3750 :
3751 : void
3752 9 : ShrinkGCBuffers(JSRuntime *rt)
3753 : {
3754 18 : AutoLockGC lock(rt);
3755 9 : JS_ASSERT(!rt->gcRunning);
3756 : #ifndef JS_THREADSAFE
3757 : ExpireChunksAndArenas(rt, true);
3758 : #else
3759 9 : rt->gcHelperThread.startBackgroundShrink();
3760 : #endif
3761 9 : }
3762 :
3763 : void
3764 549 : TraceRuntime(JSTracer *trc)
3765 : {
3766 549 : JS_ASSERT(!IS_GC_MARKING_TRACER(trc));
3767 :
3768 : #ifdef JS_THREADSAFE
3769 : {
3770 549 : JSRuntime *rt = trc->runtime;
3771 549 : if (!rt->gcRunning) {
3772 1098 : AutoLockGC lock(rt);
3773 1098 : AutoHeapSession session(rt);
3774 :
3775 549 : rt->gcHelperThread.waitBackgroundSweepEnd();
3776 1098 : AutoUnlockGC unlock(rt);
3777 :
3778 1098 : AutoCopyFreeListToArenas copy(rt);
3779 549 : RecordNativeStackTopForGC(rt);
3780 549 : MarkRuntime(trc);
3781 : return;
3782 : }
3783 : }
3784 : #else
3785 : AutoCopyFreeListToArenas copy(trc->runtime);
3786 : RecordNativeStackTopForGC(trc->runtime);
3787 : #endif
3788 :
3789 : /*
3790 : * Calls from inside a normal GC or a recursive calls are OK and do not
3791 : * require session setup.
3792 : */
3793 0 : MarkRuntime(trc);
3794 : }
3795 :
3796 : struct IterateArenaCallbackOp
3797 : {
3798 : JSRuntime *rt;
3799 : void *data;
3800 : IterateArenaCallback callback;
3801 : JSGCTraceKind traceKind;
3802 : size_t thingSize;
3803 180 : IterateArenaCallbackOp(JSRuntime *rt, void *data, IterateArenaCallback callback,
3804 : JSGCTraceKind traceKind, size_t thingSize)
3805 180 : : rt(rt), data(data), callback(callback), traceKind(traceKind), thingSize(thingSize)
3806 180 : {}
3807 1084 : void operator()(Arena *arena) { (*callback)(rt, data, arena, traceKind, thingSize); }
3808 : };
3809 :
3810 : struct IterateCellCallbackOp
3811 : {
3812 : JSRuntime *rt;
3813 : void *data;
3814 : IterateCellCallback callback;
3815 : JSGCTraceKind traceKind;
3816 : size_t thingSize;
3817 180 : IterateCellCallbackOp(JSRuntime *rt, void *data, IterateCellCallback callback,
3818 : JSGCTraceKind traceKind, size_t thingSize)
3819 180 : : rt(rt), data(data), callback(callback), traceKind(traceKind), thingSize(thingSize)
3820 180 : {}
3821 142337 : void operator()(Cell *cell) { (*callback)(rt, data, cell, traceKind, thingSize); }
3822 : };
3823 :
3824 : void
3825 3 : IterateCompartmentsArenasCells(JSRuntime *rt, void *data,
3826 : JSIterateCompartmentCallback compartmentCallback,
3827 : IterateArenaCallback arenaCallback,
3828 : IterateCellCallback cellCallback)
3829 : {
3830 3 : JS_ASSERT(!rt->gcRunning);
3831 :
3832 6 : AutoLockGC lock(rt);
3833 6 : AutoHeapSession session(rt);
3834 : #ifdef JS_THREADSAFE
3835 3 : rt->gcHelperThread.waitBackgroundSweepEnd();
3836 : #endif
3837 6 : AutoUnlockGC unlock(rt);
3838 :
3839 6 : AutoCopyFreeListToArenas copy(rt);
3840 12 : for (CompartmentsIter c(rt); !c.done(); c.next()) {
3841 9 : (*compartmentCallback)(rt, data, c);
3842 :
3843 189 : for (size_t thingKind = 0; thingKind != FINALIZE_LIMIT; thingKind++) {
3844 180 : JSGCTraceKind traceKind = MapAllocToTraceKind(AllocKind(thingKind));
3845 180 : size_t thingSize = Arena::thingSize(AllocKind(thingKind));
3846 180 : IterateArenaCallbackOp arenaOp(rt, data, arenaCallback, traceKind, thingSize);
3847 180 : IterateCellCallbackOp cellOp(rt, data, cellCallback, traceKind, thingSize);
3848 180 : ForEachArenaAndCell(c, AllocKind(thingKind), arenaOp, cellOp);
3849 : }
3850 : }
3851 3 : }
3852 :
3853 : void
3854 3 : IterateChunks(JSRuntime *rt, void *data, IterateChunkCallback chunkCallback)
3855 : {
3856 : /* :XXX: Any way to common this preamble with IterateCompartmentsArenasCells? */
3857 3 : JS_ASSERT(!rt->gcRunning);
3858 :
3859 6 : AutoLockGC lock(rt);
3860 6 : AutoHeapSession session(rt);
3861 : #ifdef JS_THREADSAFE
3862 3 : rt->gcHelperThread.waitBackgroundSweepEnd();
3863 : #endif
3864 6 : AutoUnlockGC unlock(rt);
3865 :
3866 12 : for (js::GCChunkSet::Range r = rt->gcChunkSet.all(); !r.empty(); r.popFront())
3867 9 : chunkCallback(rt, data, r.front());
3868 3 : }
3869 :
3870 : void
3871 0 : IterateCells(JSRuntime *rt, JSCompartment *compartment, AllocKind thingKind,
3872 : void *data, IterateCellCallback cellCallback)
3873 : {
3874 : /* :XXX: Any way to common this preamble with IterateCompartmentsArenasCells? */
3875 0 : JS_ASSERT(!rt->gcRunning);
3876 :
3877 0 : AutoLockGC lock(rt);
3878 0 : AutoHeapSession session(rt);
3879 : #ifdef JS_THREADSAFE
3880 0 : rt->gcHelperThread.waitBackgroundSweepEnd();
3881 : #endif
3882 0 : AutoUnlockGC unlock(rt);
3883 :
3884 0 : AutoCopyFreeListToArenas copy(rt);
3885 :
3886 0 : JSGCTraceKind traceKind = MapAllocToTraceKind(thingKind);
3887 0 : size_t thingSize = Arena::thingSize(thingKind);
3888 :
3889 0 : if (compartment) {
3890 0 : for (CellIterUnderGC i(compartment, thingKind); !i.done(); i.next())
3891 0 : cellCallback(rt, data, i.getCell(), traceKind, thingSize);
3892 : } else {
3893 0 : for (CompartmentsIter c(rt); !c.done(); c.next()) {
3894 0 : for (CellIterUnderGC i(c, thingKind); !i.done(); i.next())
3895 0 : cellCallback(rt, data, i.getCell(), traceKind, thingSize);
3896 : }
3897 : }
3898 0 : }
3899 :
3900 : namespace gc {
3901 :
3902 : JSCompartment *
3903 25596 : NewCompartment(JSContext *cx, JSPrincipals *principals)
3904 : {
3905 25596 : JSRuntime *rt = cx->runtime;
3906 25596 : JS_AbortIfWrongThread(rt);
3907 :
3908 25596 : JSCompartment *compartment = cx->new_<JSCompartment>(rt);
3909 25596 : if (compartment && compartment->init(cx)) {
3910 : // Any compartment with the trusted principals -- and there can be
3911 : // multiple -- is a system compartment.
3912 25596 : compartment->isSystemCompartment = principals && rt->trustedPrincipals() == principals;
3913 25596 : if (principals) {
3914 3307 : compartment->principals = principals;
3915 3307 : JS_HoldPrincipals(principals);
3916 : }
3917 :
3918 25596 : compartment->setGCLastBytes(8192, 8192, GC_NORMAL);
3919 :
3920 : /*
3921 : * Before reporting the OOM condition, |lock| needs to be cleaned up,
3922 : * hence the scoping.
3923 : */
3924 : {
3925 51192 : AutoLockGC lock(rt);
3926 :
3927 : /*
3928 : * If we're in the middle of an incremental GC, we cancel
3929 : * it. Otherwise we might fail the mark the newly created
3930 : * compartment fully.
3931 : */
3932 25596 : if (rt->gcIncrementalState == MARK)
3933 81 : rt->gcCompartmentCreated = true;
3934 :
3935 25596 : if (rt->compartments.append(compartment))
3936 25596 : return compartment;
3937 : }
3938 :
3939 0 : js_ReportOutOfMemory(cx);
3940 : }
3941 0 : Foreground::delete_(compartment);
3942 0 : return NULL;
3943 : }
3944 :
3945 : void
3946 9033 : RunDebugGC(JSContext *cx)
3947 : {
3948 : #ifdef JS_GC_ZEAL
3949 9033 : JSRuntime *rt = cx->runtime;
3950 :
3951 : /*
3952 : * If rt->gcDebugCompartmentGC is true, only GC the current
3953 : * compartment. But don't GC the atoms compartment.
3954 : */
3955 9033 : rt->gcTriggerCompartment = rt->gcDebugCompartmentGC ? cx->compartment : NULL;
3956 9033 : if (rt->gcTriggerCompartment == rt->atomsCompartment)
3957 0 : rt->gcTriggerCompartment = NULL;
3958 :
3959 9033 : RunLastDitchGC(cx, gcreason::DEBUG_GC);
3960 : #endif
3961 9033 : }
3962 :
3963 : void
3964 0 : SetDeterministicGC(JSContext *cx, bool enabled)
3965 : {
3966 : #ifdef JS_GC_ZEAL
3967 0 : JSRuntime *rt = cx->runtime;
3968 0 : rt->gcDeterministicOnly = enabled;
3969 : #endif
3970 0 : }
3971 :
3972 : #if defined(DEBUG) && defined(JSGC_ROOT_ANALYSIS) && !defined(JS_THREADSAFE)
3973 :
3974 : static void
3975 : CheckStackRoot(JSTracer *trc, uintptr_t *w)
3976 : {
3977 : /* Mark memory as defined for valgrind, as in MarkWordConservatively. */
3978 : #ifdef JS_VALGRIND
3979 : VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w));
3980 : #endif
3981 :
3982 : ConservativeGCTest test = MarkIfGCThingWord(trc, *w, DONT_MARK_THING);
3983 :
3984 : if (test == CGCT_VALID) {
3985 : JSContext *iter = NULL;
3986 : bool matched = false;
3987 : JSRuntime *rt = trc->runtime;
3988 : for (unsigned i = 0; i < THING_ROOT_COUNT; i++) {
3989 : Root<Cell*> *rooter = rt->thingGCRooters[i];
3990 : while (rooter) {
3991 : if (rooter->address() == (Cell **) w)
3992 : matched = true;
3993 : rooter = rooter->previous();
3994 : }
3995 : }
3996 : CheckRoot *check = rt->checkGCRooters;
3997 : while (check) {
3998 : if (check->contains(static_cast<uint8_t*>(w), sizeof(w)))
3999 : matched = true;
4000 : check = check->previous();
4001 : }
4002 : if (!matched) {
4003 : /*
4004 : * Only poison the last byte in the word. It is easy to get
4005 : * accidental collisions when a value that does not occupy a full
4006 : * word is used to overwrite a now-dead GC thing pointer. In this
4007 : * case we want to avoid damaging the smaller value.
4008 : */
4009 : PoisonPtr(w);
4010 : }
4011 : }
4012 : }
4013 :
4014 : static void
4015 : CheckStackRootsRange(JSTracer *trc, uintptr_t *begin, uintptr_t *end)
4016 : {
4017 : JS_ASSERT(begin <= end);
4018 : for (uintptr_t *i = begin; i != end; ++i)
4019 : CheckStackRoot(trc, i);
4020 : }
4021 :
4022 : void
4023 : CheckStackRoots(JSContext *cx)
4024 : {
4025 : AutoCopyFreeListToArenas copy(cx->runtime);
4026 :
4027 : JSTracer checker;
4028 : JS_TracerInit(&checker, cx, EmptyMarkCallback);
4029 :
4030 : ThreadData *td = JS_THREAD_DATA(cx);
4031 :
4032 : ConservativeGCThreadData *ctd = &td->conservativeGC;
4033 : ctd->recordStackTop();
4034 :
4035 : JS_ASSERT(ctd->hasStackToScan());
4036 : uintptr_t *stackMin, *stackEnd;
4037 : #if JS_STACK_GROWTH_DIRECTION > 0
4038 : stackMin = td->nativeStackBase;
4039 : stackEnd = ctd->nativeStackTop;
4040 : #else
4041 : stackMin = ctd->nativeStackTop + 1;
4042 : stackEnd = td->nativeStackBase;
4043 : #endif
4044 :
4045 : JS_ASSERT(stackMin <= stackEnd);
4046 : CheckStackRootsRange(&checker, stackMin, stackEnd);
4047 : CheckStackRootsRange(&checker, ctd->registerSnapshot.words,
4048 : ArrayEnd(ctd->registerSnapshot.words));
4049 : }
4050 :
4051 : #endif /* DEBUG && JSGC_ROOT_ANALYSIS && !JS_THREADSAFE */
4052 :
4053 : #ifdef JS_GC_ZEAL
4054 :
4055 : /*
4056 : * Write barrier verification
4057 : *
4058 : * The next few functions are for incremental write barrier verification. When
4059 : * StartVerifyBarriers is called, a snapshot is taken of all objects in the GC
4060 : * heap and saved in an explicit graph data structure. Later, EndVerifyBarriers
4061 : * traverses the heap again. Any pointer values that were in the snapshot and
4062 : * are no longer found must be marked; otherwise an assertion triggers. Note
4063 : * that we must not GC in between starting and finishing a verification phase.
4064 : *
4065 : * The VerifyBarriers function is a shorthand. It checks if a verification phase
4066 : * is currently running. If not, it starts one. Otherwise, it ends the current
4067 : * phase and starts a new one.
4068 : *
4069 : * The user can adjust the frequency of verifications, which causes
4070 : * VerifyBarriers to be a no-op all but one out of N calls. However, if the
4071 : * |always| parameter is true, it starts a new phase no matter what.
4072 : */
4073 :
4074 : struct EdgeValue
4075 : {
4076 : void *thing;
4077 : JSGCTraceKind kind;
4078 : char *label;
4079 : };
4080 :
4081 : struct VerifyNode
4082 : {
4083 : void *thing;
4084 : JSGCTraceKind kind;
4085 : uint32_t count;
4086 : EdgeValue edges[1];
4087 : };
4088 :
4089 : typedef HashMap<void *, VerifyNode *, DefaultHasher<void *>, SystemAllocPolicy> NodeMap;
4090 :
4091 : /*
4092 : * The verifier data structures are simple. The entire graph is stored in a
4093 : * single block of memory. At the beginning is a VerifyNode for the root
4094 : * node. It is followed by a sequence of EdgeValues--the exact number is given
4095 : * in the node. After the edges come more nodes and their edges.
4096 : *
4097 : * The edgeptr and term fields are used to allocate out of the block of memory
4098 : * for the graph. If we run out of memory (i.e., if edgeptr goes beyond term),
4099 : * we just abandon the verification.
4100 : *
4101 : * The nodemap field is a hashtable that maps from the address of the GC thing
4102 : * to the VerifyNode that represents it.
4103 : */
4104 : struct VerifyTracer : JSTracer {
4105 : /* The gcNumber when the verification began. */
4106 : uint64_t number;
4107 :
4108 : /* This counts up to JS_VERIFIER_FREQ to decide whether to verify. */
4109 : uint32_t count;
4110 :
4111 : /* This graph represents the initial GC "snapshot". */
4112 : VerifyNode *curnode;
4113 : VerifyNode *root;
4114 : char *edgeptr;
4115 : char *term;
4116 : NodeMap nodemap;
4117 :
4118 1499 : VerifyTracer() : root(NULL) {}
4119 1499 : ~VerifyTracer() { js_free(root); }
4120 : };
4121 :
4122 : /*
4123 : * This function builds up the heap snapshot by adding edges to the current
4124 : * node.
4125 : */
4126 : static void
4127 18879676 : AccumulateEdge(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
4128 : {
4129 18879676 : VerifyTracer *trc = (VerifyTracer *)jstrc;
4130 :
4131 18879676 : trc->edgeptr += sizeof(EdgeValue);
4132 18879676 : if (trc->edgeptr >= trc->term) {
4133 0 : trc->edgeptr = trc->term;
4134 0 : return;
4135 : }
4136 :
4137 18879676 : VerifyNode *node = trc->curnode;
4138 18879676 : uint32_t i = node->count;
4139 :
4140 18879676 : node->edges[i].thing = *thingp;
4141 18879676 : node->edges[i].kind = kind;
4142 18879676 : node->edges[i].label = trc->debugPrinter ? NULL : (char *)trc->debugPrintArg;
4143 18879676 : node->count++;
4144 : }
4145 :
4146 : static VerifyNode *
4147 18881175 : MakeNode(VerifyTracer *trc, void *thing, JSGCTraceKind kind)
4148 : {
4149 18881175 : NodeMap::AddPtr p = trc->nodemap.lookupForAdd(thing);
4150 18881175 : if (!p) {
4151 11393826 : VerifyNode *node = (VerifyNode *)trc->edgeptr;
4152 11393826 : trc->edgeptr += sizeof(VerifyNode) - sizeof(EdgeValue);
4153 11393826 : if (trc->edgeptr >= trc->term) {
4154 0 : trc->edgeptr = trc->term;
4155 0 : return NULL;
4156 : }
4157 :
4158 11393826 : node->thing = thing;
4159 11393826 : node->count = 0;
4160 11393826 : node->kind = kind;
4161 11393826 : trc->nodemap.add(p, thing, node);
4162 11393826 : return node;
4163 : }
4164 7487349 : return NULL;
4165 : }
4166 :
4167 : static
4168 : VerifyNode *
4169 21558511 : NextNode(VerifyNode *node)
4170 : {
4171 21558511 : if (node->count == 0)
4172 14505676 : return (VerifyNode *)((char *)node + sizeof(VerifyNode) - sizeof(EdgeValue));
4173 : else
4174 : return (VerifyNode *)((char *)node + sizeof(VerifyNode) +
4175 7052835 : sizeof(EdgeValue)*(node->count - 1));
4176 : }
4177 :
4178 : static void
4179 1624 : StartVerifyBarriers(JSContext *cx)
4180 : {
4181 1624 : JSRuntime *rt = cx->runtime;
4182 :
4183 1624 : if (rt->gcVerifyData || rt->gcIncrementalState != NO_INCREMENTAL)
4184 0 : return;
4185 :
4186 3248 : AutoLockGC lock(rt);
4187 3248 : AutoHeapSession session(rt);
4188 :
4189 1624 : if (!IsIncrementalGCSafe(rt))
4190 : return;
4191 :
4192 : #ifdef JS_THREADSAFE
4193 1499 : rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
4194 : #endif
4195 :
4196 2998 : AutoUnlockGC unlock(rt);
4197 :
4198 2998 : AutoCopyFreeListToArenas copy(rt);
4199 1499 : RecordNativeStackTopForGC(rt);
4200 :
4201 4397 : for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
4202 2898 : r.front()->bitmap.clear();
4203 :
4204 6777 : for (CompartmentsIter c(rt); !c.done(); c.next())
4205 5278 : c->discardJitCode(cx);
4206 :
4207 1499 : PurgeRuntime(rt);
4208 :
4209 1499 : VerifyTracer *trc = new (js_malloc(sizeof(VerifyTracer))) VerifyTracer;
4210 :
4211 1499 : rt->gcNumber++;
4212 1499 : trc->number = rt->gcNumber;
4213 1499 : trc->count = 0;
4214 :
4215 1499 : JS_TracerInit(trc, rt, AccumulateEdge);
4216 :
4217 1499 : const size_t size = 64 * 1024 * 1024;
4218 1499 : trc->root = (VerifyNode *)js_malloc(size);
4219 1499 : JS_ASSERT(trc->root);
4220 1499 : trc->edgeptr = (char *)trc->root;
4221 1499 : trc->term = trc->edgeptr + size;
4222 :
4223 1499 : trc->nodemap.init();
4224 :
4225 : /* Create the root node. */
4226 1499 : trc->curnode = MakeNode(trc, NULL, JSGCTraceKind(0));
4227 :
4228 : /* We want MarkRuntime to save the roots to gcSavedRoots. */
4229 1499 : rt->gcIncrementalState = MARK_ROOTS;
4230 :
4231 : /* Make all the roots be edges emanating from the root node. */
4232 1499 : MarkRuntime(trc);
4233 :
4234 1499 : VerifyNode *node = trc->curnode;
4235 1499 : if (trc->edgeptr == trc->term)
4236 0 : goto oom;
4237 :
4238 : /* For each edge, make a node for it if one doesn't already exist. */
4239 11396824 : while ((char *)node < trc->edgeptr) {
4240 30273502 : for (uint32_t i = 0; i < node->count; i++) {
4241 18879676 : EdgeValue &e = node->edges[i];
4242 18879676 : VerifyNode *child = MakeNode(trc, e.thing, e.kind);
4243 18879676 : if (child) {
4244 11392327 : trc->curnode = child;
4245 11392327 : JS_TraceChildren(trc, e.thing, e.kind);
4246 : }
4247 18879676 : if (trc->edgeptr == trc->term)
4248 0 : goto oom;
4249 : }
4250 :
4251 11393826 : node = NextNode(node);
4252 : }
4253 :
4254 1499 : rt->gcVerifyData = trc;
4255 1499 : rt->gcIncrementalState = MARK;
4256 1499 : rt->gcMarker.start(rt);
4257 6777 : for (CompartmentsIter c(rt); !c.done(); c.next()) {
4258 5278 : c->needsBarrier_ = true;
4259 5278 : c->arenas.prepareForIncrementalGC(rt);
4260 : }
4261 :
4262 : return;
4263 :
4264 : oom:
4265 0 : rt->gcIncrementalState = NO_INCREMENTAL;
4266 0 : trc->~VerifyTracer();
4267 1499 : js_free(trc);
4268 : }
4269 :
4270 : static void
4271 759 : MarkFromAutorooter(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
4272 : {
4273 759 : static_cast<Cell *>(*thingp)->markIfUnmarked();
4274 759 : }
4275 :
4276 : static bool
4277 2519 : IsMarkedOrAllocated(Cell *cell)
4278 : {
4279 2519 : return cell->isMarked() || cell->arenaHeader()->allocatedDuringIncremental;
4280 : }
4281 :
4282 : const static uint32_t MAX_VERIFIER_EDGES = 1000;
4283 :
4284 : /*
4285 : * This function is called by EndVerifyBarriers for every heap edge. If the edge
4286 : * already existed in the original snapshot, we "cancel it out" by overwriting
4287 : * it with NULL. EndVerifyBarriers later asserts that the remaining non-NULL
4288 : * edges (i.e., the ones from the original snapshot that must have been
4289 : * modified) must point to marked objects.
4290 : */
4291 : static void
4292 10642191 : CheckEdge(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
4293 : {
4294 10642191 : VerifyTracer *trc = (VerifyTracer *)jstrc;
4295 10642191 : VerifyNode *node = trc->curnode;
4296 :
4297 : /* Avoid n^2 behavior. */
4298 10642191 : if (node->count > MAX_VERIFIER_EDGES)
4299 0 : return;
4300 :
4301 49198330 : for (uint32_t i = 0; i < node->count; i++) {
4302 49196490 : if (node->edges[i].thing == *thingp) {
4303 10640351 : JS_ASSERT(node->edges[i].kind == kind);
4304 10640351 : node->edges[i].thing = NULL;
4305 10640351 : return;
4306 : }
4307 : }
4308 :
4309 : /*
4310 : * Anything that is reachable now should have been reachable before, or else
4311 : * it should be marked.
4312 : */
4313 1840 : NodeMap::Ptr p = trc->nodemap.lookup(*thingp);
4314 1840 : JS_ASSERT_IF(!p, IsMarkedOrAllocated(static_cast<Cell *>(*thingp)));
4315 : }
4316 :
4317 : static void
4318 6403163 : CheckReachable(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
4319 : {
4320 6403163 : VerifyTracer *trc = (VerifyTracer *)jstrc;
4321 6403163 : NodeMap::Ptr p = trc->nodemap.lookup(*thingp);
4322 6403163 : JS_ASSERT_IF(!p, IsMarkedOrAllocated(static_cast<Cell *>(*thingp)));
4323 6403163 : }
4324 :
4325 : static void
4326 1399 : EndVerifyBarriers(JSContext *cx)
4327 : {
4328 1399 : JSRuntime *rt = cx->runtime;
4329 :
4330 2798 : AutoLockGC lock(rt);
4331 2798 : AutoHeapSession session(rt);
4332 :
4333 : #ifdef JS_THREADSAFE
4334 1399 : rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
4335 : #endif
4336 :
4337 2798 : AutoUnlockGC unlock(rt);
4338 :
4339 2798 : AutoCopyFreeListToArenas copy(rt);
4340 1399 : RecordNativeStackTopForGC(rt);
4341 :
4342 1399 : VerifyTracer *trc = (VerifyTracer *)rt->gcVerifyData;
4343 :
4344 1399 : if (!trc)
4345 : return;
4346 :
4347 : /*
4348 : * We need to bump gcNumber so that the methodjit knows that jitcode has
4349 : * been discarded.
4350 : */
4351 1399 : JS_ASSERT(trc->number == rt->gcNumber);
4352 1399 : rt->gcNumber++;
4353 :
4354 : /* We need to disable barriers before tracing, which may invoke barriers. */
4355 6658 : for (CompartmentsIter c(rt); !c.done(); c.next())
4356 5259 : c->needsBarrier_ = false;
4357 :
4358 6658 : for (CompartmentsIter c(rt); !c.done(); c.next())
4359 5259 : c->discardJitCode(cx);
4360 :
4361 1399 : rt->gcVerifyData = NULL;
4362 1399 : rt->gcIncrementalState = NO_INCREMENTAL;
4363 :
4364 1399 : JS_TracerInit(trc, rt, MarkFromAutorooter);
4365 :
4366 1399 : AutoGCRooter::traceAll(trc);
4367 :
4368 1399 : if (IsIncrementalGCSafe(rt)) {
4369 : /*
4370 : * Verify that all the current roots were reachable previously, or else
4371 : * are marked.
4372 : */
4373 1308 : JS_TracerInit(trc, rt, CheckReachable);
4374 1308 : MarkRuntime(trc, true);
4375 :
4376 1308 : JS_TracerInit(trc, rt, CheckEdge);
4377 :
4378 : /* Start after the roots. */
4379 1308 : VerifyNode *node = NextNode(trc->root);
4380 10165993 : while ((char *)node < trc->edgeptr) {
4381 10163377 : trc->curnode = node;
4382 10163377 : JS_TraceChildren(trc, node->thing, node->kind);
4383 :
4384 10163377 : if (node->count <= MAX_VERIFIER_EDGES) {
4385 20804620 : for (uint32_t i = 0; i < node->count; i++) {
4386 10641243 : void *thing = node->edges[i].thing;
4387 10641243 : JS_ASSERT_IF(thing, IsMarkedOrAllocated(static_cast<Cell *>(thing)));
4388 : }
4389 : }
4390 :
4391 10163377 : node = NextNode(node);
4392 : }
4393 : }
4394 :
4395 1399 : rt->gcMarker.reset();
4396 1399 : rt->gcMarker.stop();
4397 :
4398 1399 : trc->~VerifyTracer();
4399 2798 : js_free(trc);
4400 : }
4401 :
4402 : void
4403 19868 : FinishVerifier(JSRuntime *rt)
4404 : {
4405 19868 : if (VerifyTracer *trc = (VerifyTracer *)rt->gcVerifyData) {
4406 100 : trc->~VerifyTracer();
4407 100 : js_free(trc);
4408 : }
4409 19868 : }
4410 :
4411 : void
4412 18 : VerifyBarriers(JSContext *cx)
4413 : {
4414 18 : JSRuntime *rt = cx->runtime;
4415 18 : if (rt->gcVerifyData)
4416 9 : EndVerifyBarriers(cx);
4417 : else
4418 9 : StartVerifyBarriers(cx);
4419 18 : }
4420 :
4421 : void
4422 2047139784 : MaybeVerifyBarriers(JSContext *cx, bool always)
4423 : {
4424 2047139784 : if (cx->runtime->gcZeal() != ZealVerifierValue)
4425 2047130922 : return;
4426 :
4427 8862 : uint32_t freq = cx->runtime->gcZealFrequency;
4428 :
4429 8862 : JSRuntime *rt = cx->runtime;
4430 8862 : if (VerifyTracer *trc = (VerifyTracer *)rt->gcVerifyData) {
4431 8637 : if (++trc->count < freq && !always)
4432 7372 : return;
4433 :
4434 1265 : EndVerifyBarriers(cx);
4435 : }
4436 1490 : StartVerifyBarriers(cx);
4437 : }
4438 :
4439 : #endif /* JS_GC_ZEAL */
4440 :
4441 : } /* namespace gc */
4442 :
4443 0 : static void ReleaseAllJITCode(JSContext *cx)
4444 : {
4445 : #ifdef JS_METHODJIT
4446 0 : for (GCCompartmentsIter c(cx->runtime); !c.done(); c.next()) {
4447 0 : mjit::ClearAllFrames(c);
4448 0 : for (CellIter i(c, FINALIZE_SCRIPT); !i.done(); i.next()) {
4449 0 : JSScript *script = i.get<JSScript>();
4450 0 : mjit::ReleaseScriptCode(cx, script);
4451 : }
4452 : }
4453 : #endif
4454 0 : }
4455 :
4456 : /*
4457 : * There are three possible PCCount profiling states:
4458 : *
4459 : * 1. None: Neither scripts nor the runtime have counter information.
4460 : * 2. Profile: Active scripts have counter information, the runtime does not.
4461 : * 3. Query: Scripts do not have counter information, the runtime does.
4462 : *
4463 : * When starting to profile scripts, counting begins immediately, with all JIT
4464 : * code discarded and recompiled with counters as necessary. Active interpreter
4465 : * frames will not begin profiling until they begin executing another script
4466 : * (via a call or return).
4467 : *
4468 : * The below API functions manage transitions to new states, according
4469 : * to the table below.
4470 : *
4471 : * Old State
4472 : * -------------------------
4473 : * Function None Profile Query
4474 : * --------
4475 : * StartPCCountProfiling Profile Profile Profile
4476 : * StopPCCountProfiling None Query Query
4477 : * PurgePCCounts None None None
4478 : */
4479 :
4480 : static void
4481 0 : ReleaseScriptPCCounters(JSContext *cx)
4482 : {
4483 0 : JSRuntime *rt = cx->runtime;
4484 0 : JS_ASSERT(rt->scriptPCCounters);
4485 :
4486 0 : ScriptOpcodeCountsVector &vec = *rt->scriptPCCounters;
4487 :
4488 0 : for (size_t i = 0; i < vec.length(); i++)
4489 0 : vec[i].counters.destroy(cx);
4490 :
4491 0 : cx->delete_(rt->scriptPCCounters);
4492 0 : rt->scriptPCCounters = NULL;
4493 0 : }
4494 :
4495 : JS_FRIEND_API(void)
4496 0 : StartPCCountProfiling(JSContext *cx)
4497 : {
4498 0 : JSRuntime *rt = cx->runtime;
4499 :
4500 0 : if (rt->profilingScripts)
4501 0 : return;
4502 :
4503 0 : if (rt->scriptPCCounters)
4504 0 : ReleaseScriptPCCounters(cx);
4505 :
4506 0 : ReleaseAllJITCode(cx);
4507 :
4508 0 : rt->profilingScripts = true;
4509 : }
4510 :
4511 : JS_FRIEND_API(void)
4512 0 : StopPCCountProfiling(JSContext *cx)
4513 : {
4514 0 : JSRuntime *rt = cx->runtime;
4515 :
4516 0 : if (!rt->profilingScripts)
4517 0 : return;
4518 0 : JS_ASSERT(!rt->scriptPCCounters);
4519 :
4520 0 : ReleaseAllJITCode(cx);
4521 :
4522 0 : ScriptOpcodeCountsVector *vec = cx->new_<ScriptOpcodeCountsVector>(SystemAllocPolicy());
4523 0 : if (!vec)
4524 0 : return;
4525 :
4526 0 : for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
4527 0 : for (CellIter i(c, FINALIZE_SCRIPT); !i.done(); i.next()) {
4528 0 : JSScript *script = i.get<JSScript>();
4529 0 : if (script->pcCounters && script->types) {
4530 0 : ScriptOpcodeCountsPair info;
4531 0 : info.script = script;
4532 0 : info.counters.steal(script->pcCounters);
4533 0 : if (!vec->append(info))
4534 0 : info.counters.destroy(cx);
4535 : }
4536 : }
4537 : }
4538 :
4539 0 : rt->profilingScripts = false;
4540 0 : rt->scriptPCCounters = vec;
4541 : }
4542 :
4543 : JS_FRIEND_API(void)
4544 0 : PurgePCCounts(JSContext *cx)
4545 : {
4546 0 : JSRuntime *rt = cx->runtime;
4547 :
4548 0 : if (!rt->scriptPCCounters)
4549 0 : return;
4550 0 : JS_ASSERT(!rt->profilingScripts);
4551 :
4552 0 : ReleaseScriptPCCounters(cx);
4553 : }
4554 :
4555 : } /* namespace js */
4556 :
4557 : JS_PUBLIC_API(void)
4558 6 : JS_IterateCompartments(JSRuntime *rt, void *data,
4559 : JSIterateCompartmentCallback compartmentCallback)
4560 : {
4561 6 : JS_ASSERT(!rt->gcRunning);
4562 :
4563 12 : AutoLockGC lock(rt);
4564 12 : AutoHeapSession session(rt);
4565 : #ifdef JS_THREADSAFE
4566 6 : rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
4567 : #endif
4568 12 : AutoUnlockGC unlock(rt);
4569 :
4570 24 : for (CompartmentsIter c(rt); !c.done(); c.next())
4571 18 : (*compartmentCallback)(rt, data, c);
4572 6 : }
4573 :
4574 : #if JS_HAS_XML_SUPPORT
4575 : extern size_t sE4XObjectsCreated;
4576 :
4577 : JSXML *
4578 4727724 : js_NewGCXML(JSContext *cx)
4579 : {
4580 4727724 : if (!cx->runningWithTrustedPrincipals())
4581 4727602 : ++sE4XObjectsCreated;
4582 :
4583 4727724 : return NewGCThing<JSXML>(cx, js::gc::FINALIZE_XML, sizeof(JSXML));
4584 : }
4585 : #endif
4586 :
|