1 : /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=4 sw=4 et tw=99:
3 : *
4 : * ***** BEGIN LICENSE BLOCK *****
5 : * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 : *
7 : * The contents of this file are subject to the Mozilla Public License Version
8 : * 1.1 (the "License"); you may not use this file except in compliance with
9 : * the License. You may obtain a copy of the License at
10 : * http://www.mozilla.org/MPL/
11 : *
12 : * Software distributed under the License is distributed on an "AS IS" basis,
13 : * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 : * for the specific language governing rights and limitations under the
15 : * License.
16 : *
17 : * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 : * May 28, 2008.
19 : *
20 : * The Initial Developer of the Original Code is
21 : * Brendan Eich <brendan@mozilla.org>
22 : *
23 : * Contributor(s):
24 : * David Anderson <danderson@mozilla.com>
25 : * David Mandelin <dmandelin@mozilla.com>
26 : * Jan de Mooij <jandemooij@gmail.com>
27 : *
28 : * Alternatively, the contents of this file may be used under the terms of
29 : * either of the GNU General Public License Version 2 or later (the "GPL"),
30 : * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
31 : * in which case the provisions of the GPL or the LGPL are applicable instead
32 : * of those above. If you wish to allow use of your version of this file only
33 : * under the terms of either the GPL or the LGPL, and not to allow others to
34 : * use your version of this file under the terms of the MPL, indicate your
35 : * decision by deleting the provisions above and replace them with the notice
36 : * and other provisions required by the GPL or the LGPL. If you do not delete
37 : * the provisions above, a recipient may use your version of this file under
38 : * the terms of any one of the MPL, the GPL or the LGPL.
39 : *
40 : * ***** END LICENSE BLOCK ***** */
41 :
42 : #include "MethodJIT.h"
43 : #include "jsnum.h"
44 : #include "jsbool.h"
45 : #include "jsiter.h"
46 : #include "Compiler.h"
47 : #include "StubCalls.h"
48 : #include "MonoIC.h"
49 : #include "PolyIC.h"
50 : #include "ICChecker.h"
51 : #include "Retcon.h"
52 : #include "assembler/jit/ExecutableAllocator.h"
53 : #include "assembler/assembler/LinkBuffer.h"
54 : #include "FrameState-inl.h"
55 : #include "jsobjinlines.h"
56 : #include "jsscriptinlines.h"
57 : #include "InlineFrameAssembler.h"
58 : #include "jscompartment.h"
59 : #include "jsopcodeinlines.h"
60 :
61 : #include "builtin/RegExp.h"
62 : #include "frontend/BytecodeEmitter.h"
63 : #include "vm/RegExpStatics.h"
64 : #include "vm/RegExpObject.h"
65 :
66 : #include "jsautooplen.h"
67 : #include "jstypedarrayinlines.h"
68 : #include "vm/RegExpObject-inl.h"
69 :
70 : using namespace js;
71 : using namespace js::mjit;
72 : #if defined(JS_POLYIC) || defined(JS_MONOIC)
73 : using namespace js::mjit::ic;
74 : #endif
75 : using namespace js::analyze;
76 :
77 : #define RETURN_IF_OOM(retval) \
78 : JS_BEGIN_MACRO \
79 : if (oomInVector || masm.oom() || stubcc.masm.oom()) \
80 : return retval; \
81 : JS_END_MACRO
82 :
83 : /*
84 : * Number of times a script must be called or had a backedge before we try to
85 : * inline its calls.
86 : */
87 : static const size_t USES_BEFORE_INLINING = 10000;
88 :
89 95862 : mjit::Compiler::Compiler(JSContext *cx, JSScript *outerScript,
90 : unsigned chunkIndex, bool isConstructing)
91 : : BaseCompiler(cx),
92 : outerScript(outerScript),
93 : chunkIndex(chunkIndex),
94 : isConstructing(isConstructing),
95 95862 : outerChunk(outerJIT()->chunkDescriptor(chunkIndex)),
96 : ssa(cx, outerScript),
97 95862 : globalObj(outerScript->hasGlobal() ? outerScript->global() : NULL),
98 92168 : globalSlots(globalObj ? globalObj->getRawSlots() : NULL),
99 95862 : frame(cx, *thisFromCtor(), masm, stubcc),
100 : a(NULL), outer(NULL), script(NULL), PC(NULL), loop(NULL),
101 95862 : inlineFrames(CompilerAllocPolicy(cx, *thisFromCtor())),
102 95862 : branchPatches(CompilerAllocPolicy(cx, *thisFromCtor())),
103 : #if defined JS_MONOIC
104 95862 : getGlobalNames(CompilerAllocPolicy(cx, *thisFromCtor())),
105 95862 : setGlobalNames(CompilerAllocPolicy(cx, *thisFromCtor())),
106 95862 : callICs(CompilerAllocPolicy(cx, *thisFromCtor())),
107 95862 : equalityICs(CompilerAllocPolicy(cx, *thisFromCtor())),
108 : #endif
109 : #if defined JS_POLYIC
110 95862 : pics(CompilerAllocPolicy(cx, *thisFromCtor())),
111 95862 : getElemICs(CompilerAllocPolicy(cx, *thisFromCtor())),
112 95862 : setElemICs(CompilerAllocPolicy(cx, *thisFromCtor())),
113 : #endif
114 95862 : callPatches(CompilerAllocPolicy(cx, *thisFromCtor())),
115 95862 : callSites(CompilerAllocPolicy(cx, *thisFromCtor())),
116 95862 : doubleList(CompilerAllocPolicy(cx, *thisFromCtor())),
117 95862 : fixedIntToDoubleEntries(CompilerAllocPolicy(cx, *thisFromCtor())),
118 95862 : fixedDoubleToAnyEntries(CompilerAllocPolicy(cx, *thisFromCtor())),
119 95862 : jumpTables(CompilerAllocPolicy(cx, *thisFromCtor())),
120 95862 : jumpTableEdges(CompilerAllocPolicy(cx, *thisFromCtor())),
121 95862 : loopEntries(CompilerAllocPolicy(cx, *thisFromCtor())),
122 95862 : chunkEdges(CompilerAllocPolicy(cx, *thisFromCtor())),
123 95862 : stubcc(cx, *thisFromCtor(), frame),
124 95862 : debugMode_(cx->compartment->debugMode()),
125 : inlining_(false),
126 : hasGlobalReallocation(false),
127 : oomInVector(false),
128 : overflowICSpace(false),
129 : gcNumber(cx->runtime->gcNumber),
130 : applyTricks(NoApplyTricks),
131 2392856 : pcLengths(NULL)
132 : {
133 : /* Once a script starts getting really hot we will inline calls in it. */
134 148466 : if (!debugMode() && cx->typeInferenceEnabled() && globalObj &&
135 26401 : (outerScript->getUseCount() >= USES_BEFORE_INLINING ||
136 26203 : cx->hasRunOption(JSOPTION_METHODJIT_ALWAYS))) {
137 25048 : inlining_ = true;
138 : }
139 95862 : }
140 :
141 : CompileStatus
142 95862 : mjit::Compiler::compile()
143 : {
144 95862 : JS_ASSERT(!outerChunkRef().chunk);
145 :
146 95862 : CompileStatus status = performCompilation();
147 95862 : if (status != Compile_Okay && status != Compile_Retry) {
148 2131 : JSScript::JITScriptHandle *jith = outerScript->jitHandle(isConstructing);
149 2131 : JSScript::ReleaseCode(cx->runtime->defaultFreeOp(), jith);
150 2131 : jith->setUnjittable();
151 :
152 2131 : if (outerScript->function()) {
153 1482 : outerScript->uninlineable = true;
154 1482 : types::MarkTypeObjectFlags(cx, outerScript->function(),
155 1482 : types::OBJECT_FLAG_UNINLINEABLE);
156 : }
157 : }
158 :
159 95862 : return status;
160 : }
161 :
162 : CompileStatus
163 103394 : mjit::Compiler::checkAnalysis(JSScript *script)
164 : {
165 103394 : if (script->hasClearedGlobal()) {
166 0 : JaegerSpew(JSpew_Abort, "script has a cleared global\n");
167 0 : return Compile_Abort;
168 : }
169 :
170 103394 : if (!script->ensureRanAnalysis(cx, NULL))
171 0 : return Compile_Error;
172 :
173 103394 : if (!script->analysis()->compileable()) {
174 2131 : JaegerSpew(JSpew_Abort, "script has uncompileable opcodes\n");
175 2131 : return Compile_Abort;
176 : }
177 :
178 101263 : if (cx->typeInferenceEnabled() && !script->ensureRanInference(cx))
179 0 : return Compile_Error;
180 :
181 101263 : ScriptAnalysis *analysis = script->analysis();
182 101263 : analysis->assertMatchingDebugMode();
183 101263 : if (analysis->failed()) {
184 0 : JaegerSpew(JSpew_Abort, "couldn't analyze bytecode; probably switchX or OOM\n");
185 0 : return Compile_Abort;
186 : }
187 :
188 101263 : return Compile_Okay;
189 : }
190 :
191 : CompileStatus
192 2942 : mjit::Compiler::addInlineFrame(JSScript *script, uint32_t depth,
193 : uint32_t parent, jsbytecode *parentpc)
194 : {
195 2942 : JS_ASSERT(inlining());
196 :
197 2942 : CompileStatus status = checkAnalysis(script);
198 2942 : if (status != Compile_Okay)
199 0 : return status;
200 :
201 2942 : if (!ssa.addInlineFrame(script, depth, parent, parentpc))
202 0 : return Compile_Error;
203 :
204 2942 : uint32_t index = ssa.iterFrame(ssa.numFrames() - 1).index;
205 2942 : return scanInlineCalls(index, depth);
206 : }
207 :
208 : CompileStatus
209 27677 : mjit::Compiler::scanInlineCalls(uint32_t index, uint32_t depth)
210 : {
211 : /* Maximum number of calls we will inline at the same site. */
212 : static const uint32_t INLINE_SITE_LIMIT = 5;
213 :
214 27677 : JS_ASSERT(inlining() && globalObj);
215 :
216 : /* Not inlining yet from 'new' scripts. */
217 27677 : if (isConstructing)
218 295 : return Compile_Okay;
219 :
220 27382 : JSScript *script = ssa.getFrame(index).script;
221 27382 : ScriptAnalysis *analysis = script->analysis();
222 :
223 : /* Don't inline from functions which could have a non-global scope object. */
224 142615 : if (!script->hasGlobal() ||
225 27381 : script->global() != globalObj ||
226 43926 : (script->function() && script->function()->getParent() != globalObj) ||
227 43926 : (script->function() && script->function()->isHeavyweight()) ||
228 : script->isActiveEval) {
229 4523 : return Compile_Okay;
230 : }
231 :
232 22859 : uint32_t nextOffset = 0;
233 22859 : uint32_t lastOffset = script->length;
234 :
235 22859 : if (index == CrossScriptSSA::OUTER_FRAME) {
236 19918 : nextOffset = outerChunk.begin;
237 19918 : lastOffset = outerChunk.end;
238 : }
239 :
240 1054590 : while (nextOffset < lastOffset) {
241 1008889 : uint32_t offset = nextOffset;
242 1008889 : jsbytecode *pc = script->code + offset;
243 1008889 : nextOffset = offset + GetBytecodeLength(pc);
244 :
245 1008889 : Bytecode *code = analysis->maybeCode(pc);
246 1008889 : if (!code)
247 7531 : continue;
248 :
249 : /* :XXX: Not yet inlining 'new' calls. */
250 1001358 : if (JSOp(*pc) != JSOP_CALL)
251 950689 : continue;
252 :
253 : /* Not inlining at monitored call sites or those with type barriers. */
254 50669 : if (code->monitoredTypes || code->monitoredTypesReturn || analysis->typeBarriers(cx, pc) != NULL)
255 21476 : continue;
256 :
257 29193 : uint32_t argc = GET_ARGC(pc);
258 29193 : types::TypeSet *calleeTypes = analysis->poppedTypes(pc, argc + 1);
259 :
260 29193 : if (calleeTypes->getKnownTypeTag(cx) != JSVAL_TYPE_OBJECT)
261 23328 : continue;
262 :
263 5865 : if (calleeTypes->getObjectCount() >= INLINE_SITE_LIMIT)
264 18 : continue;
265 :
266 : /*
267 : * Compute the maximum height we can grow the stack for inlined frames.
268 : * We always reserve space for loop temporaries, for an extra stack
269 : * frame pushed when making a call from the deepest inlined frame, and
270 : * for the temporary slot used by type barriers.
271 : */
272 : uint32_t stackLimit = outerScript->nslots + StackSpace::STACK_JIT_EXTRA
273 5847 : - VALUES_PER_STACK_FRAME - FrameState::TEMPORARY_LIMIT - 1;
274 :
275 : /* Compute the depth of any frames inlined at this site. */
276 5847 : uint32_t nextDepth = depth + VALUES_PER_STACK_FRAME + script->nfixed + code->stackDepth;
277 :
278 : /*
279 : * Scan each of the possible callees for other conditions precluding
280 : * inlining. We only inline at a call site if all callees are inlineable.
281 : */
282 5847 : unsigned count = calleeTypes->getObjectCount();
283 5847 : bool okay = true;
284 8812 : for (unsigned i = 0; i < count; i++) {
285 6038 : if (calleeTypes->getTypeObject(i) != NULL) {
286 649 : okay = false;
287 649 : break;
288 : }
289 :
290 5389 : JSObject *obj = calleeTypes->getSingleObject(i);
291 5389 : if (!obj)
292 0 : continue;
293 :
294 5389 : if (!obj->isFunction()) {
295 3 : okay = false;
296 3 : break;
297 : }
298 :
299 5386 : JSFunction *fun = obj->toFunction();
300 5386 : if (!fun->isInterpreted()) {
301 0 : okay = false;
302 0 : break;
303 : }
304 5386 : JSScript *script = fun->script();
305 :
306 : /*
307 : * Don't inline calls to scripts which haven't been analyzed.
308 : * We need to analyze the inlined scripts to compile them, and
309 : * doing so can change type information we have queried already
310 : * in making inlining decisions.
311 : */
312 5386 : if (!script->hasAnalysis() || !script->analysis()->ranInference()) {
313 249 : okay = false;
314 249 : break;
315 : }
316 :
317 : /*
318 : * The outer and inner scripts must have the same scope. This only
319 : * allows us to inline calls between non-inner functions. Also
320 : * check for consistent strictness between the functions.
321 : */
322 10274 : if (!globalObj ||
323 5137 : fun->getParent() != globalObj ||
324 : outerScript->strictModeCode != script->strictModeCode) {
325 79 : okay = false;
326 79 : break;
327 : }
328 :
329 : /* We can't cope with inlining recursive functions yet. */
330 5058 : uint32_t nindex = index;
331 17899 : while (nindex != CrossScriptSSA::INVALID_FRAME) {
332 7783 : if (ssa.getFrame(nindex).script == script)
333 468 : okay = false;
334 7783 : nindex = ssa.getFrame(nindex).parent;
335 : }
336 5058 : if (!okay)
337 468 : break;
338 :
339 : /* Watch for excessively deep nesting of inlined frames. */
340 4590 : if (nextDepth + script->nslots >= stackLimit) {
341 0 : okay = false;
342 0 : break;
343 : }
344 :
345 4590 : if (!script->types || !script->types->hasScope()) {
346 0 : okay = false;
347 0 : break;
348 : }
349 :
350 4590 : CompileStatus status = checkAnalysis(script);
351 4590 : if (status != Compile_Okay)
352 17 : return status;
353 :
354 4573 : if (!script->analysis()->inlineable(argc)) {
355 1451 : okay = false;
356 1451 : break;
357 : }
358 :
359 3122 : if (types::TypeSet::HasObjectFlags(cx, fun->getType(cx),
360 3122 : types::OBJECT_FLAG_UNINLINEABLE)) {
361 151 : okay = false;
362 151 : break;
363 : }
364 :
365 : /*
366 : * Don't inline scripts which use 'this' if it is possible they
367 : * could be called with a 'this' value requiring wrapping. During
368 : * inlining we do not want to modify frame entries belonging to the
369 : * caller.
370 : */
371 4071 : if (script->analysis()->usesThisValue() &&
372 1100 : types::TypeScript::ThisTypes(script)->getKnownTypeTag(cx) != JSVAL_TYPE_OBJECT) {
373 6 : okay = false;
374 6 : break;
375 : }
376 : }
377 5830 : if (!okay)
378 3056 : continue;
379 :
380 2774 : calleeTypes->addFreeze(cx);
381 :
382 : /*
383 : * Add the inline frames to the cross script SSA. We will pick these
384 : * back up when compiling the call site.
385 : */
386 5716 : for (unsigned i = 0; i < count; i++) {
387 2942 : JSObject *obj = calleeTypes->getSingleObject(i);
388 2942 : if (!obj)
389 0 : continue;
390 :
391 2942 : JSFunction *fun = obj->toFunction();
392 2942 : JSScript *script = fun->script();
393 :
394 2942 : CompileStatus status = addInlineFrame(script, nextDepth, index, pc);
395 2942 : if (status != Compile_Okay)
396 0 : return status;
397 : }
398 : }
399 :
400 22842 : return Compile_Okay;
401 : }
402 :
403 : CompileStatus
404 96663 : mjit::Compiler::pushActiveFrame(JSScript *script, uint32_t argc)
405 : {
406 96663 : if (cx->runtime->profilingScripts && !script->scriptCounts)
407 0 : script->initScriptCounts(cx);
408 :
409 96663 : ActiveFrame *newa = OffTheBooks::new_<ActiveFrame>(cx);
410 96663 : if (!newa) {
411 0 : js_ReportOutOfMemory(cx);
412 0 : return Compile_Error;
413 : }
414 :
415 96663 : newa->parent = a;
416 96663 : if (a)
417 2932 : newa->parentPC = PC;
418 96663 : newa->script = script;
419 96663 : newa->mainCodeStart = masm.size();
420 96663 : newa->stubCodeStart = stubcc.size();
421 :
422 96663 : if (outer) {
423 2932 : newa->inlineIndex = uint32_t(inlineFrames.length());
424 2932 : inlineFrames.append(newa);
425 : } else {
426 93731 : newa->inlineIndex = CrossScriptSSA::OUTER_FRAME;
427 93731 : outer = newa;
428 : }
429 96663 : JS_ASSERT(ssa.getFrame(newa->inlineIndex).script == script);
430 :
431 96663 : newa->inlinePCOffset = ssa.frameLength(newa->inlineIndex);
432 :
433 96663 : ScriptAnalysis *newAnalysis = script->analysis();
434 :
435 : #ifdef JS_METHODJIT_SPEW
436 96663 : if (cx->typeInferenceEnabled() && IsJaegerSpewChannelActive(JSpew_Regalloc)) {
437 0 : unsigned nargs = script->function() ? script->function()->nargs : 0;
438 0 : for (unsigned i = 0; i < nargs; i++) {
439 0 : uint32_t slot = ArgSlot(i);
440 0 : if (!newAnalysis->slotEscapes(slot)) {
441 0 : JaegerSpew(JSpew_Regalloc, "Argument %u:", i);
442 0 : newAnalysis->liveness(slot).print();
443 : }
444 : }
445 0 : for (unsigned i = 0; i < script->nfixed; i++) {
446 0 : uint32_t slot = LocalSlot(script, i);
447 0 : if (!newAnalysis->slotEscapes(slot)) {
448 0 : JaegerSpew(JSpew_Regalloc, "Local %u:", i);
449 0 : newAnalysis->liveness(slot).print();
450 : }
451 : }
452 : }
453 : #endif
454 :
455 96663 : if (!frame.pushActiveFrame(script, argc)) {
456 0 : js_ReportOutOfMemory(cx);
457 0 : return Compile_Error;
458 : }
459 :
460 96663 : newa->jumpMap = (Label *)OffTheBooks::malloc_(sizeof(Label) * script->length);
461 96663 : if (!newa->jumpMap) {
462 0 : js_ReportOutOfMemory(cx);
463 0 : return Compile_Error;
464 : }
465 : #ifdef DEBUG
466 23342968 : for (uint32_t i = 0; i < script->length; i++)
467 23246305 : newa->jumpMap[i] = Label();
468 : #endif
469 :
470 96663 : if (cx->typeInferenceEnabled()) {
471 60313 : CompileStatus status = prepareInferenceTypes(script, newa);
472 60313 : if (status != Compile_Okay)
473 0 : return status;
474 : }
475 :
476 96663 : this->script = script;
477 96663 : this->analysis = newAnalysis;
478 96663 : this->PC = script->code;
479 96663 : this->a = newa;
480 :
481 96663 : return Compile_Okay;
482 : }
483 :
484 : void
485 2932 : mjit::Compiler::popActiveFrame()
486 : {
487 2932 : JS_ASSERT(a->parent);
488 2932 : a->mainCodeEnd = masm.size();
489 2932 : a->stubCodeEnd = stubcc.size();
490 2932 : this->PC = a->parentPC;
491 2932 : this->a = (ActiveFrame *) a->parent;
492 2932 : this->script = a->script;
493 2932 : this->analysis = this->script->analysis();
494 :
495 2932 : frame.popActiveFrame();
496 2932 : }
497 :
498 : #define CHECK_STATUS(expr) \
499 : JS_BEGIN_MACRO \
500 : CompileStatus status_ = (expr); \
501 : if (status_ != Compile_Okay) { \
502 : if (oomInVector || masm.oom() || stubcc.masm.oom()) \
503 : js_ReportOutOfMemory(cx); \
504 : return status_; \
505 : } \
506 : JS_END_MACRO
507 :
508 : CompileStatus
509 95862 : mjit::Compiler::performCompilation()
510 : {
511 : JaegerSpew(JSpew_Scripts,
512 : "compiling script (file \"%s\") (line \"%d\") (length \"%d\") (chunk \"%d\")\n",
513 95862 : outerScript->filename, outerScript->lineno, outerScript->length, chunkIndex);
514 :
515 95862 : if (inlining()) {
516 : JaegerSpew(JSpew_Inlining,
517 : "inlining calls in script (file \"%s\") (line \"%d\")\n",
518 25048 : outerScript->filename, outerScript->lineno);
519 : }
520 :
521 : #ifdef JS_METHODJIT_SPEW
522 : Profiler prof;
523 95862 : prof.start();
524 : #endif
525 :
526 : #ifdef JS_METHODJIT
527 95862 : outerScript->debugMode = debugMode();
528 : #endif
529 :
530 95862 : JS_ASSERT(cx->compartment->activeInference);
531 :
532 : {
533 191724 : types::AutoEnterCompilation enter(cx, outerScript, isConstructing, chunkIndex);
534 :
535 95862 : CHECK_STATUS(checkAnalysis(outerScript));
536 93748 : if (inlining())
537 24735 : CHECK_STATUS(scanInlineCalls(CrossScriptSSA::OUTER_FRAME, 0));
538 93731 : CHECK_STATUS(pushActiveFrame(outerScript, 0));
539 :
540 93731 : if (outerScript->scriptCounts || Probes::wantNativeAddressInfo(cx)) {
541 0 : size_t length = ssa.frameLength(ssa.numFrames() - 1);
542 0 : pcLengths = (PCLengthEntry *) OffTheBooks::calloc_(sizeof(pcLengths[0]) * length);
543 0 : if (!pcLengths)
544 0 : return Compile_Error;
545 : }
546 :
547 93731 : if (chunkIndex == 0)
548 92564 : CHECK_STATUS(generatePrologue());
549 93731 : CHECK_STATUS(generateMethod());
550 93652 : if (outerJIT() && chunkIndex == outerJIT()->nchunks - 1)
551 92388 : CHECK_STATUS(generateEpilogue());
552 93652 : CHECK_STATUS(finishThisUp());
553 : }
554 :
555 : #ifdef JS_METHODJIT_SPEW
556 93652 : prof.stop();
557 93652 : JaegerSpew(JSpew_Prof, "compilation took %d us\n", prof.time_us());
558 : #endif
559 :
560 : JaegerSpew(JSpew_Scripts, "successfully compiled (code \"%p\") (size \"%u\")\n",
561 93652 : outerChunkRef().chunk->code.m_code.executableAddress(),
562 187304 : unsigned(outerChunkRef().chunk->code.m_size));
563 :
564 93652 : return Compile_Okay;
565 : }
566 :
567 : #undef CHECK_STATUS
568 :
569 96663 : mjit::JSActiveFrame::JSActiveFrame()
570 96663 : : parent(NULL), parentPC(NULL), script(NULL), inlineIndex(UINT32_MAX)
571 : {
572 96663 : }
573 :
574 96663 : mjit::Compiler::ActiveFrame::ActiveFrame(JSContext *cx)
575 : : jumpMap(NULL),
576 : varTypes(NULL), needReturnValue(false),
577 : syncReturnValue(false), returnValueDouble(false), returnSet(false),
578 96663 : returnEntry(NULL), returnJumps(NULL), exitState(NULL)
579 96663 : {}
580 :
581 96663 : mjit::Compiler::ActiveFrame::~ActiveFrame()
582 : {
583 96663 : js::Foreground::free_(jumpMap);
584 96663 : if (varTypes)
585 60313 : js::Foreground::free_(varTypes);
586 96663 : }
587 :
588 191724 : mjit::Compiler::~Compiler()
589 : {
590 95862 : if (outer)
591 93731 : cx->delete_(outer);
592 98794 : for (unsigned i = 0; i < inlineFrames.length(); i++)
593 2932 : cx->delete_(inlineFrames[i]);
594 191737 : while (loop) {
595 13 : LoopState *nloop = loop->outer;
596 13 : cx->delete_(loop);
597 13 : loop = nloop;
598 : }
599 95862 : }
600 :
601 : CompileStatus
602 60313 : mjit::Compiler::prepareInferenceTypes(JSScript *script, ActiveFrame *a)
603 : {
604 : /*
605 : * During our walk of the script, we need to preserve the invariant that at
606 : * join points the in memory type tag is always in sync with the known type
607 : * tag of the variable's SSA value at that join point. In particular, SSA
608 : * values inferred as (int|double) must in fact be doubles, stored either
609 : * in floating point registers or in memory. There is an exception for
610 : * locals whose value is currently dead, whose type might not be synced.
611 : *
612 : * To ensure this, we need to know the SSA values for each variable at each
613 : * join point, which the SSA analysis does not store explicitly. These can
614 : * be recovered, though. During the forward walk, the SSA value of a var
615 : * (and its associated type set) change only when we see an explicit assign
616 : * to the var or get to a join point with a phi node for that var. So we
617 : * can duplicate the effects of that walk here by watching for writes to
618 : * vars (updateVarTypes) and new phi nodes at join points.
619 : *
620 : * When we get to a branch and need to know a variable's value at the
621 : * branch target, we know it will either be a phi node at the target or
622 : * the variable's current value, as no phi node is created at the target
623 : * only if a variable has the same value on all incoming edges.
624 : */
625 :
626 : a->varTypes = (VarType *)
627 60313 : OffTheBooks::calloc_(TotalSlots(script) * sizeof(VarType));
628 60313 : if (!a->varTypes) {
629 0 : js_ReportOutOfMemory(cx);
630 0 : return Compile_Error;
631 : }
632 :
633 230955 : for (uint32_t slot = ArgSlot(0); slot < TotalSlots(script); slot++) {
634 170642 : VarType &vt = a->varTypes[slot];
635 170642 : vt.setTypes(types::TypeScript::SlotTypes(script, slot));
636 : }
637 :
638 60313 : return Compile_Okay;
639 : }
640 :
641 : /*
642 : * Number of times a script must be called or have back edges taken before we
643 : * run it in the methodjit. We wait longer if type inference is enabled, to
644 : * allow more gathering of type information and less recompilation.
645 : */
646 : static const size_t USES_BEFORE_COMPILE = 16;
647 : static const size_t INFER_USES_BEFORE_COMPILE = 40;
648 :
649 : /* Target maximum size, in bytecode length, for a compiled chunk of a script. */
650 : static uint32_t CHUNK_LIMIT = 1500;
651 :
652 : void
653 27 : mjit::SetChunkLimit(uint32_t limit)
654 : {
655 27 : if (limit)
656 27 : CHUNK_LIMIT = limit;
657 27 : }
658 :
659 : JITScript *
660 75536 : MakeJITScript(JSContext *cx, JSScript *script)
661 : {
662 75536 : if (!script->ensureRanAnalysis(cx, NULL))
663 0 : return NULL;
664 :
665 75536 : ScriptAnalysis *analysis = script->analysis();
666 :
667 151072 : Vector<ChunkDescriptor> chunks(cx);
668 151072 : Vector<CrossChunkEdge> edges(cx);
669 :
670 75536 : if (script->length < CHUNK_LIMIT || !cx->typeInferenceEnabled()) {
671 75298 : ChunkDescriptor desc;
672 75298 : desc.begin = 0;
673 75298 : desc.end = script->length;
674 75298 : if (!chunks.append(desc))
675 0 : return NULL;
676 : } else {
677 238 : if (!script->ensureRanInference(cx))
678 0 : return NULL;
679 :
680 : /* Outgoing edges within the current chunk. */
681 476 : Vector<CrossChunkEdge> currentEdges(cx);
682 238 : uint32_t chunkStart = 0;
683 :
684 238 : unsigned offset, nextOffset = 0;
685 1244028 : while (nextOffset < script->length) {
686 1243552 : offset = nextOffset;
687 :
688 1243552 : jsbytecode *pc = script->code + offset;
689 1243552 : JSOp op = JSOp(*pc);
690 :
691 1243552 : nextOffset = offset + GetBytecodeLength(pc);
692 :
693 1243552 : Bytecode *code = analysis->maybeCode(offset);
694 1243552 : if (!code)
695 14352 : continue;
696 :
697 : /* Whether this should be the last opcode in the chunk. */
698 1229200 : bool finishChunk = false;
699 :
700 : /* Keep going, override finishChunk. */
701 1229200 : bool preserveChunk = false;
702 :
703 : /*
704 : * Add an edge for opcodes which perform a branch. Skip LABEL ops,
705 : * which do not actually branch. XXX LABEL should not be JOF_JUMP.
706 : */
707 1229200 : uint32_t type = JOF_TYPE(js_CodeSpec[op].format);
708 1229200 : if (type == JOF_JUMP && op != JSOP_LABEL) {
709 30006 : CrossChunkEdge edge;
710 30006 : edge.source = offset;
711 30006 : edge.target = FollowBranch(cx, script, pc - script->code);
712 30006 : if (edge.target < offset) {
713 : /* Always end chunks after loop back edges. */
714 545 : finishChunk = true;
715 545 : if (edge.target < chunkStart) {
716 69 : analysis->getCode(edge.target).safePoint = true;
717 69 : if (!edges.append(edge))
718 0 : return NULL;
719 : }
720 29461 : } else if (edge.target == nextOffset) {
721 : /*
722 : * Override finishChunk for bytecodes which directly
723 : * jump to their fallthrough opcode ('if (x) {}'). This
724 : * creates two CFG edges with the same source/target, which
725 : * will confuse the compiler's edge patching code.
726 : */
727 12 : preserveChunk = true;
728 : } else {
729 29449 : if (!currentEdges.append(edge))
730 0 : return NULL;
731 : }
732 : }
733 :
734 1229200 : if (op == JSOP_TABLESWITCH) {
735 8 : jsbytecode *pc2 = pc;
736 8 : unsigned defaultOffset = offset + GET_JUMP_OFFSET(pc);
737 8 : pc2 += JUMP_OFFSET_LEN;
738 8 : int32_t low = GET_JUMP_OFFSET(pc2);
739 8 : pc2 += JUMP_OFFSET_LEN;
740 8 : int32_t high = GET_JUMP_OFFSET(pc2);
741 8 : pc2 += JUMP_OFFSET_LEN;
742 :
743 8 : CrossChunkEdge edge;
744 8 : edge.source = offset;
745 8 : edge.target = defaultOffset;
746 8 : if (!currentEdges.append(edge))
747 0 : return NULL;
748 :
749 28 : for (int32_t i = low; i <= high; i++) {
750 20 : unsigned targetOffset = offset + GET_JUMP_OFFSET(pc2);
751 20 : if (targetOffset != offset) {
752 : /*
753 : * This can end up inserting duplicate edges, all but
754 : * the first of which will be ignored.
755 : */
756 20 : CrossChunkEdge edge;
757 20 : edge.source = offset;
758 20 : edge.target = targetOffset;
759 20 : if (!currentEdges.append(edge))
760 0 : return NULL;
761 : }
762 20 : pc2 += JUMP_OFFSET_LEN;
763 : }
764 : }
765 :
766 1229200 : if (op == JSOP_LOOKUPSWITCH) {
767 4 : unsigned defaultOffset = offset + GET_JUMP_OFFSET(pc);
768 4 : jsbytecode *pc2 = pc + JUMP_OFFSET_LEN;
769 4 : unsigned npairs = GET_UINT16(pc2);
770 4 : pc2 += UINT16_LEN;
771 :
772 4 : CrossChunkEdge edge;
773 4 : edge.source = offset;
774 4 : edge.target = defaultOffset;
775 4 : if (!currentEdges.append(edge))
776 0 : return NULL;
777 :
778 18 : while (npairs) {
779 10 : pc2 += UINT32_INDEX_LEN;
780 10 : unsigned targetOffset = offset + GET_JUMP_OFFSET(pc2);
781 10 : CrossChunkEdge edge;
782 10 : edge.source = offset;
783 10 : edge.target = targetOffset;
784 10 : if (!currentEdges.append(edge))
785 0 : return NULL;
786 10 : pc2 += JUMP_OFFSET_LEN;
787 10 : npairs--;
788 : }
789 : }
790 :
791 1229200 : if (unsigned(offset - chunkStart) > CHUNK_LIMIT)
792 2548 : finishChunk = true;
793 :
794 1229200 : if (nextOffset >= script->length || !analysis->maybeCode(nextOffset)) {
795 : /* Ensure that chunks do not start on unreachable opcodes. */
796 14558 : preserveChunk = true;
797 : } else {
798 : /*
799 : * Start new chunks at the opcode before each loop head.
800 : * This ensures that the initial goto for loops is included in
801 : * the same chunk as the loop itself.
802 : */
803 1214642 : jsbytecode *nextpc = script->code + nextOffset;
804 :
805 : /*
806 : * Don't insert a chunk boundary in the middle of two opcodes
807 : * which may be fused together.
808 : */
809 1214642 : switch (JSOp(*nextpc)) {
810 : case JSOP_POP:
811 : case JSOP_IFNE:
812 : case JSOP_IFEQ:
813 130683 : preserveChunk = true;
814 130683 : break;
815 : default:
816 1083959 : break;
817 : }
818 :
819 1214642 : uint32_t afterOffset = nextOffset + GetBytecodeLength(nextpc);
820 1214642 : if (afterOffset < script->length) {
821 2415059 : if (analysis->maybeCode(afterOffset) &&
822 1200084 : JSOp(script->code[afterOffset]) == JSOP_LOOPHEAD &&
823 543 : analysis->getLoop(afterOffset))
824 : {
825 541 : finishChunk = true;
826 : }
827 : }
828 : }
829 :
830 1229200 : if (finishChunk && !preserveChunk) {
831 3218 : ChunkDescriptor desc;
832 3218 : desc.begin = chunkStart;
833 3218 : desc.end = nextOffset;
834 3218 : if (!chunks.append(desc))
835 0 : return NULL;
836 :
837 : /* Add an edge for fallthrough from this chunk to the next one. */
838 3218 : if (!BytecodeNoFallThrough(op)) {
839 3216 : CrossChunkEdge edge;
840 3216 : edge.source = offset;
841 3216 : edge.target = nextOffset;
842 3216 : analysis->getCode(edge.target).safePoint = true;
843 3216 : if (!edges.append(edge))
844 0 : return NULL;
845 : }
846 :
847 3218 : chunkStart = nextOffset;
848 32575 : for (unsigned i = 0; i < currentEdges.length(); i++) {
849 29357 : const CrossChunkEdge &edge = currentEdges[i];
850 29357 : if (edge.target >= nextOffset) {
851 285 : analysis->getCode(edge.target).safePoint = true;
852 285 : if (!edges.append(edge))
853 0 : return NULL;
854 : }
855 : }
856 3218 : currentEdges.clear();
857 : }
858 : }
859 :
860 238 : if (chunkStart != script->length) {
861 238 : ChunkDescriptor desc;
862 238 : desc.begin = chunkStart;
863 238 : desc.end = script->length;
864 238 : if (!chunks.append(desc))
865 0 : return NULL;
866 : }
867 : }
868 :
869 : size_t dataSize = sizeof(JITScript)
870 75536 : + (chunks.length() * sizeof(ChunkDescriptor))
871 75536 : + (edges.length() * sizeof(CrossChunkEdge));
872 75536 : uint8_t *cursor = (uint8_t *) OffTheBooks::calloc_(dataSize);
873 75536 : if (!cursor)
874 0 : return NULL;
875 :
876 75536 : JITScript *jit = (JITScript *) cursor;
877 75536 : cursor += sizeof(JITScript);
878 :
879 75536 : jit->script = script;
880 75536 : JS_INIT_CLIST(&jit->callers);
881 :
882 75536 : jit->nchunks = chunks.length();
883 154290 : for (unsigned i = 0; i < chunks.length(); i++) {
884 78754 : const ChunkDescriptor &a = chunks[i];
885 78754 : ChunkDescriptor &b = jit->chunkDescriptor(i);
886 78754 : b.begin = a.begin;
887 78754 : b.end = a.end;
888 :
889 78754 : if (chunks.length() == 1) {
890 : /* Seed the chunk's count so it is immediately compiled. */
891 75298 : b.counter = INFER_USES_BEFORE_COMPILE;
892 : }
893 : }
894 :
895 75536 : if (edges.empty())
896 75298 : return jit;
897 :
898 238 : jit->nedges = edges.length();
899 238 : CrossChunkEdge *jitEdges = jit->edges();
900 3808 : for (unsigned i = 0; i < edges.length(); i++) {
901 3570 : const CrossChunkEdge &a = edges[i];
902 3570 : CrossChunkEdge &b = jitEdges[i];
903 3570 : b.source = a.source;
904 3570 : b.target = a.target;
905 : }
906 :
907 : /* Generate a pool with all cross chunk shims, and set shimLabel for each edge. */
908 476 : Assembler masm;
909 3808 : for (unsigned i = 0; i < jit->nedges; i++) {
910 3570 : jsbytecode *pc = script->code + jitEdges[i].target;
911 3570 : jitEdges[i].shimLabel = (void *) masm.distanceOf(masm.label());
912 3570 : masm.move(JSC::MacroAssembler::ImmPtr(&jitEdges[i]), Registers::ArgReg1);
913 : masm.fallibleVMCall(true, JS_FUNC_TO_DATA_PTR(void *, stubs::CrossChunkShim),
914 3570 : pc, NULL, script->nfixed + analysis->getCode(pc).stackDepth);
915 : }
916 476 : LinkerHelper linker(masm, JSC::METHOD_CODE);
917 238 : JSC::ExecutablePool *ep = linker.init(cx);
918 238 : if (!ep)
919 0 : return NULL;
920 238 : jit->shimPool = ep;
921 :
922 238 : masm.finalize(linker);
923 238 : uint8_t *shimCode = (uint8_t *) linker.finalizeCodeAddendum().executableAddress();
924 :
925 238 : JS_ALWAYS_TRUE(linker.verifyRange(JSC::JITCode(shimCode, masm.size())));
926 :
927 : JaegerSpew(JSpew_PICs, "generated SHIM POOL stub %p (%lu bytes)\n",
928 238 : shimCode, (unsigned long)masm.size());
929 :
930 3808 : for (unsigned i = 0; i < jit->nedges; i++) {
931 3570 : CrossChunkEdge &edge = jitEdges[i];
932 3570 : edge.shimLabel = shimCode + (size_t) edge.shimLabel;
933 : }
934 :
935 238 : return jit;
936 : }
937 :
938 : CompileStatus
939 22312396 : mjit::CanMethodJIT(JSContext *cx, JSScript *script, jsbytecode *pc,
940 : bool construct, CompileRequest request)
941 : {
942 : restart:
943 22312396 : if (!cx->methodJitEnabled)
944 8333969 : return Compile_Abort;
945 :
946 13978427 : JSScript::JITScriptHandle *jith = script->jitHandle(construct);
947 13978427 : if (jith->isUnjittable())
948 1236717 : return Compile_Abort;
949 :
950 34382748 : if (request == CompileRequest_Interpreter &&
951 12741710 : !cx->hasRunOption(JSOPTION_METHODJIT_ALWAYS) &&
952 4449664 : (cx->typeInferenceEnabled()
953 4264684 : ? script->incUseCount() <= INFER_USES_BEFORE_COMPILE
954 184980 : : script->incUseCount() <= USES_BEFORE_COMPILE))
955 : {
956 236240 : return Compile_Skipped;
957 : }
958 :
959 12505470 : if (!cx->compartment->ensureJaegerCompartmentExists(cx))
960 0 : return Compile_Error;
961 :
962 : // Ensure that constructors have at least one slot.
963 12505470 : if (construct && !script->nslots)
964 191 : script->nslots++;
965 :
966 : JITScript *jit;
967 12505470 : if (jith->isEmpty()) {
968 75536 : jit = MakeJITScript(cx, script);
969 75536 : if (!jit)
970 0 : return Compile_Error;
971 75536 : jith->setValid(jit);
972 : } else {
973 12429934 : jit = jith->getValid();
974 : }
975 12505470 : unsigned chunkIndex = jit->chunkIndex(pc);
976 12505470 : ChunkDescriptor &desc = jit->chunkDescriptor(chunkIndex);
977 :
978 12505470 : if (desc.chunk)
979 12365523 : return Compile_Okay;
980 :
981 279894 : if (request == CompileRequest_Interpreter &&
982 139947 : !cx->hasRunOption(JSOPTION_METHODJIT_ALWAYS) &&
983 : ++desc.counter <= INFER_USES_BEFORE_COMPILE)
984 : {
985 44085 : return Compile_Skipped;
986 : }
987 :
988 : CompileStatus status;
989 : {
990 191724 : types::AutoEnterTypeInference enter(cx, true);
991 :
992 191724 : Compiler cc(cx, script, chunkIndex, construct);
993 95862 : status = cc.compile();
994 : }
995 :
996 95862 : if (status == Compile_Okay) {
997 : /*
998 : * Compiling a script can occasionally trigger its own recompilation,
999 : * so go back through the compilation logic.
1000 : */
1001 93652 : goto restart;
1002 : }
1003 :
1004 : /* Non-OOM errors should have an associated exception. */
1005 0 : JS_ASSERT_IF(status == Compile_Error,
1006 2210 : cx->isExceptionPending() || cx->runtime->hadOutOfMemory);
1007 :
1008 2210 : return status;
1009 : }
1010 :
1011 : CompileStatus
1012 92564 : mjit::Compiler::generatePrologue()
1013 : {
1014 92564 : invokeLabel = masm.label();
1015 :
1016 : /*
1017 : * If there is no function, then this can only be called via JaegerShot(),
1018 : * which expects an existing frame to be initialized like the interpreter.
1019 : */
1020 92564 : if (script->function()) {
1021 45914 : Jump j = masm.jump();
1022 :
1023 : /*
1024 : * Entry point #2: The caller has partially constructed a frame, and
1025 : * either argc >= nargs or the arity check has corrected the frame.
1026 : */
1027 45914 : invokeLabel = masm.label();
1028 :
1029 45914 : Label fastPath = masm.label();
1030 :
1031 : /* Store this early on so slow paths can access it. */
1032 45914 : masm.storePtr(ImmPtr(script->function()),
1033 91828 : Address(JSFrameReg, StackFrame::offsetOfExec()));
1034 :
1035 : {
1036 : /*
1037 : * Entry point #3: The caller has partially constructed a frame,
1038 : * but argc might be != nargs, so an arity check might be called.
1039 : *
1040 : * This loops back to entry point #2.
1041 : */
1042 45914 : arityLabel = stubcc.masm.label();
1043 :
1044 : Jump argMatch = stubcc.masm.branch32(Assembler::Equal, JSParamReg_Argc,
1045 45914 : Imm32(script->function()->nargs));
1046 :
1047 : if (JSParamReg_Argc != Registers::ArgReg1)
1048 45914 : stubcc.masm.move(JSParamReg_Argc, Registers::ArgReg1);
1049 :
1050 : /* Slow path - call the arity check function. Returns new fp. */
1051 45914 : stubcc.masm.storePtr(ImmPtr(script->function()),
1052 91828 : Address(JSFrameReg, StackFrame::offsetOfExec()));
1053 45914 : OOL_STUBCALL(stubs::FixupArity, REJOIN_NONE);
1054 45914 : stubcc.masm.move(Registers::ReturnReg, JSFrameReg);
1055 45914 : argMatch.linkTo(stubcc.masm.label(), &stubcc.masm);
1056 :
1057 45914 : argsCheckLabel = stubcc.masm.label();
1058 :
1059 : /* Type check the arguments as well. */
1060 45914 : if (cx->typeInferenceEnabled()) {
1061 : #ifdef JS_MONOIC
1062 31391 : this->argsCheckJump = stubcc.masm.jump();
1063 31391 : this->argsCheckStub = stubcc.masm.label();
1064 31391 : this->argsCheckJump.linkTo(this->argsCheckStub, &stubcc.masm);
1065 : #endif
1066 31391 : stubcc.masm.storePtr(ImmPtr(script->function()),
1067 62782 : Address(JSFrameReg, StackFrame::offsetOfExec()));
1068 31391 : OOL_STUBCALL(stubs::CheckArgumentTypes, REJOIN_CHECK_ARGUMENTS);
1069 : #ifdef JS_MONOIC
1070 31391 : this->argsCheckFallthrough = stubcc.masm.label();
1071 : #endif
1072 : }
1073 :
1074 45914 : stubcc.crossJump(stubcc.masm.jump(), fastPath);
1075 : }
1076 :
1077 : /*
1078 : * Guard that there is enough stack space. Note we reserve space for
1079 : * any inline frames we end up generating, or a callee's stack frame
1080 : * we write to before the callee checks the stack.
1081 : */
1082 45914 : uint32_t nvals = VALUES_PER_STACK_FRAME + script->nslots + StackSpace::STACK_JIT_EXTRA;
1083 45914 : masm.addPtr(Imm32(nvals * sizeof(Value)), JSFrameReg, Registers::ReturnReg);
1084 : Jump stackCheck = masm.branchPtr(Assembler::AboveOrEqual, Registers::ReturnReg,
1085 45914 : FrameAddress(offsetof(VMFrame, stackLimit)));
1086 :
1087 : /*
1088 : * If the stack check fails then we need to either commit more of the
1089 : * reserved stack space or throw an error. Specify that the number of
1090 : * local slots is 0 (instead of the default script->nfixed) since the
1091 : * range [fp->slots(), fp->base()) may not be commited. (The calling
1092 : * contract requires only that the caller has reserved space for fp.)
1093 : */
1094 : {
1095 45914 : stubcc.linkExitDirect(stackCheck, stubcc.masm.label());
1096 45914 : OOL_STUBCALL(stubs::HitStackQuota, REJOIN_NONE);
1097 45914 : stubcc.crossJump(stubcc.masm.jump(), masm.label());
1098 : }
1099 :
1100 45914 : markUndefinedLocals();
1101 :
1102 45914 : types::TypeScriptNesting *nesting = script->nesting();
1103 :
1104 : /*
1105 : * Run the function prologue if necessary. This is always done in a
1106 : * stub for heavyweight functions (including nesting outer functions).
1107 : */
1108 45914 : JS_ASSERT_IF(nesting && nesting->children, script->function()->isHeavyweight());
1109 45914 : if (script->function()->isHeavyweight() || script->needsArgsObj()) {
1110 6531 : prepareStubCall(Uses(0));
1111 6531 : INLINE_STUBCALL(stubs::FunctionFramePrologue, REJOIN_FUNCTION_PROLOGUE);
1112 : } else {
1113 : /*
1114 : * Load the scope chain into the frame if it will be needed by NAME
1115 : * opcodes or by the nesting prologue below. The scope chain is
1116 : * always set for global and eval frames, and will have been set by
1117 : * CreateFunCallObject for heavyweight function frames.
1118 : */
1119 39383 : if (analysis->usesScopeChain() || nesting) {
1120 13848 : RegisterID t0 = Registers::ReturnReg;
1121 : Jump hasScope = masm.branchTest32(Assembler::NonZero,
1122 13848 : FrameFlagsAddress(), Imm32(StackFrame::HAS_SCOPECHAIN));
1123 13848 : masm.loadPayload(Address(JSFrameReg, StackFrame::offsetOfCallee(script->function())), t0);
1124 13848 : masm.loadPtr(Address(t0, JSFunction::offsetOfEnvironment()), t0);
1125 13848 : masm.storePtr(t0, Address(JSFrameReg, StackFrame::offsetOfScopeChain()));
1126 13848 : hasScope.linkTo(masm.label(), &masm);
1127 : }
1128 :
1129 39383 : if (nesting) {
1130 : /*
1131 : * Inline the common case for the nesting prologue: the
1132 : * function is a non-heavyweight inner function with no
1133 : * children of its own. We ensure during inference that the
1134 : * outer function does not add scope objects for 'let' or
1135 : * 'with', so that the frame's scope chain will be
1136 : * the parent's call object, and if it differs from the
1137 : * parent's current activation then the parent is reentrant.
1138 : */
1139 2919 : JSScript *parent = nesting->parent;
1140 2919 : JS_ASSERT(parent);
1141 8757 : JS_ASSERT_IF(parent->hasAnalysis() && parent->analysis()->ranBytecode(),
1142 8757 : !parent->analysis()->addsScopeObjects());
1143 :
1144 2919 : RegisterID t0 = Registers::ReturnReg;
1145 2919 : masm.move(ImmPtr(&parent->nesting()->activeCall), t0);
1146 2919 : masm.loadPtr(Address(t0), t0);
1147 :
1148 2919 : Address scopeChain(JSFrameReg, StackFrame::offsetOfScopeChain());
1149 2919 : Jump mismatch = masm.branchPtr(Assembler::NotEqual, t0, scopeChain);
1150 2919 : masm.add32(Imm32(1), AbsoluteAddress(&nesting->activeFrames));
1151 :
1152 2919 : stubcc.linkExitDirect(mismatch, stubcc.masm.label());
1153 2919 : OOL_STUBCALL(stubs::FunctionFramePrologue, REJOIN_FUNCTION_PROLOGUE);
1154 2919 : stubcc.crossJump(stubcc.masm.jump(), masm.label());
1155 : }
1156 : }
1157 :
1158 45914 : if (script->mayNeedArgsObj()) {
1159 : /*
1160 : * Make sure that fp->u.nactual is always coherent. This may be
1161 : * inspected directly by JIT code, and is not guaranteed to be
1162 : * correct if the UNDERFLOW and OVERFLOW flags are not set.
1163 : */
1164 : Jump hasArgs = masm.branchTest32(Assembler::NonZero, FrameFlagsAddress(),
1165 : Imm32(StackFrame::UNDERFLOW_ARGS |
1166 4119 : StackFrame::OVERFLOW_ARGS));
1167 4119 : masm.storePtr(ImmPtr((void *)(size_t) script->function()->nargs),
1168 8238 : Address(JSFrameReg, StackFrame::offsetOfNumActual()));
1169 4119 : hasArgs.linkTo(masm.label(), &masm);
1170 : }
1171 :
1172 45914 : j.linkTo(masm.label(), &masm);
1173 : }
1174 :
1175 92564 : if (cx->typeInferenceEnabled()) {
1176 : #ifdef DEBUG
1177 56214 : if (script->function()) {
1178 31391 : prepareStubCall(Uses(0));
1179 31391 : INLINE_STUBCALL(stubs::AssertArgumentTypes, REJOIN_NONE);
1180 : }
1181 : #endif
1182 56214 : ensureDoubleArguments();
1183 : }
1184 :
1185 92564 : if (isConstructing) {
1186 1118 : if (!constructThis())
1187 0 : return Compile_Error;
1188 : }
1189 :
1190 92564 : if (debugMode()) {
1191 49843 : prepareStubCall(Uses(0));
1192 49843 : INLINE_STUBCALL(stubs::ScriptDebugPrologue, REJOIN_RESUME);
1193 42721 : } else if (Probes::callTrackingActive(cx)) {
1194 0 : prepareStubCall(Uses(0));
1195 0 : INLINE_STUBCALL(stubs::ScriptProbeOnlyPrologue, REJOIN_RESUME);
1196 : }
1197 :
1198 92564 : recompileCheckHelper();
1199 :
1200 92564 : return Compile_Okay;
1201 : }
1202 :
1203 : void
1204 59146 : mjit::Compiler::ensureDoubleArguments()
1205 : {
1206 : /* Convert integer arguments which were inferred as (int|double) to doubles. */
1207 127621 : for (uint32_t i = 0; script->function() && i < script->function()->nargs; i++) {
1208 68475 : uint32_t slot = ArgSlot(i);
1209 68475 : if (a->varTypes[slot].getTypeTag(cx) == JSVAL_TYPE_DOUBLE && analysis->trackSlot(slot))
1210 472 : frame.ensureDouble(frame.getArg(i));
1211 : }
1212 59146 : }
1213 :
1214 : void
1215 90213 : mjit::Compiler::markUndefinedLocal(uint32_t offset, uint32_t i)
1216 : {
1217 90213 : uint32_t depth = ssa.getFrame(a->inlineIndex).depth;
1218 90213 : uint32_t slot = LocalSlot(script, i);
1219 90213 : Address local(JSFrameReg, sizeof(StackFrame) + (depth + i) * sizeof(Value));
1220 90213 : if (!cx->typeInferenceEnabled() || !analysis->trackSlot(slot)) {
1221 69973 : masm.storeValue(UndefinedValue(), local);
1222 : } else {
1223 20240 : Lifetime *lifetime = analysis->liveness(slot).live(offset);
1224 20240 : if (lifetime)
1225 3512 : masm.storeValue(UndefinedValue(), local);
1226 : }
1227 90213 : }
1228 :
1229 : void
1230 48846 : mjit::Compiler::markUndefinedLocals()
1231 : {
1232 : /*
1233 : * Set locals to undefined, as in initCallFrameLatePrologue.
1234 : * Skip locals which aren't closed and are known to be defined before used,
1235 : */
1236 139059 : for (uint32_t i = 0; i < script->nfixed; i++)
1237 90213 : markUndefinedLocal(0, i);
1238 48846 : }
1239 :
1240 : CompileStatus
1241 92388 : mjit::Compiler::generateEpilogue()
1242 : {
1243 92388 : return Compile_Okay;
1244 : }
1245 :
1246 : CompileStatus
1247 93652 : mjit::Compiler::finishThisUp()
1248 : {
1249 : #ifdef JS_CPU_X64
1250 : /* Generate trampolines to ensure that cross chunk edges are patchable. */
1251 : for (unsigned i = 0; i < chunkEdges.length(); i++) {
1252 : chunkEdges[i].sourceTrampoline = stubcc.masm.label();
1253 : stubcc.masm.move(ImmPtr(NULL), Registers::ScratchReg);
1254 : stubcc.masm.jump(Registers::ScratchReg);
1255 : }
1256 : #endif
1257 :
1258 93652 : RETURN_IF_OOM(Compile_Error);
1259 :
1260 : /*
1261 : * Watch for reallocation of the global slots while we were in the middle
1262 : * of compiling due to, e.g. standard class initialization.
1263 : */
1264 93652 : if (globalSlots && globalObj->getRawSlots() != globalSlots)
1265 0 : return Compile_Retry;
1266 :
1267 : /*
1268 : * Watch for GCs which occurred during compilation. These may have
1269 : * renumbered shapes baked into the jitcode.
1270 : */
1271 93652 : if (cx->runtime->gcNumber != gcNumber)
1272 0 : return Compile_Retry;
1273 :
1274 : /* The JIT will not have been cleared if no GC has occurred. */
1275 93652 : JITScript *jit = outerJIT();
1276 93652 : JS_ASSERT(jit != NULL);
1277 :
1278 93652 : if (overflowICSpace) {
1279 0 : JaegerSpew(JSpew_Scripts, "dumped a constant pool while generating an IC\n");
1280 0 : return Compile_Abort;
1281 : }
1282 :
1283 93652 : a->mainCodeEnd = masm.size();
1284 93652 : a->stubCodeEnd = stubcc.size();
1285 :
1286 211134 : for (size_t i = 0; i < branchPatches.length(); i++) {
1287 117482 : Label label = labelOf(branchPatches[i].pc, branchPatches[i].inlineIndex);
1288 117482 : branchPatches[i].jump.linkTo(label, &masm);
1289 : }
1290 :
1291 : #ifdef JS_CPU_ARM
1292 : masm.forceFlushConstantPool();
1293 : stubcc.masm.forceFlushConstantPool();
1294 : #endif
1295 : JaegerSpew(JSpew_Insns, "## Fast code (masm) size = %lu, Slow code (stubcc) size = %lu.\n",
1296 93652 : (unsigned long) masm.size(), (unsigned long) stubcc.size());
1297 :
1298 : /* To make inlineDoubles and oolDoubles aligned to sizeof(double) bytes,
1299 : MIPS adds extra sizeof(double) bytes to codeSize. */
1300 93652 : size_t codeSize = masm.size() +
1301 : #if defined(JS_CPU_MIPS)
1302 : stubcc.size() + sizeof(double) +
1303 : #else
1304 93652 : stubcc.size() +
1305 : #endif
1306 93652 : (masm.numDoubles() * sizeof(double)) +
1307 93652 : (stubcc.masm.numDoubles() * sizeof(double)) +
1308 187304 : jumpTableEdges.length() * sizeof(void *);
1309 :
1310 187304 : Vector<ChunkJumpTableEdge> chunkJumps(cx);
1311 93652 : if (!chunkJumps.reserve(jumpTableEdges.length()))
1312 0 : return Compile_Error;
1313 :
1314 : JSC::ExecutablePool *execPool;
1315 : uint8_t *result = (uint8_t *)script->compartment()->jaegerCompartment()->execAlloc()->
1316 93652 : alloc(codeSize, &execPool, JSC::METHOD_CODE);
1317 93652 : if (!result) {
1318 0 : js_ReportOutOfMemory(cx);
1319 0 : return Compile_Error;
1320 : }
1321 93652 : JS_ASSERT(execPool);
1322 93652 : JSC::ExecutableAllocator::makeWritable(result, codeSize);
1323 93652 : masm.executableCopy(result);
1324 93652 : stubcc.masm.executableCopy(result + masm.size());
1325 :
1326 187304 : JSC::LinkBuffer fullCode(result, codeSize, JSC::METHOD_CODE);
1327 187304 : JSC::LinkBuffer stubCode(result + masm.size(), stubcc.size(), JSC::METHOD_CODE);
1328 :
1329 93652 : JS_ASSERT(!loop);
1330 :
1331 93652 : size_t nNmapLive = loopEntries.length();
1332 11819769 : for (size_t i = outerChunk.begin; i < outerChunk.end; i++) {
1333 11726117 : Bytecode *opinfo = analysis->maybeCode(i);
1334 11726117 : if (opinfo && opinfo->safePoint)
1335 112247 : nNmapLive++;
1336 : }
1337 :
1338 : /* Please keep in sync with JITChunk::sizeOfIncludingThis! */
1339 : size_t dataSize = sizeof(JITChunk) +
1340 : sizeof(NativeMapEntry) * nNmapLive +
1341 93652 : sizeof(InlineFrame) * inlineFrames.length() +
1342 93652 : sizeof(CallSite) * callSites.length() +
1343 : #if defined JS_MONOIC
1344 93652 : sizeof(ic::GetGlobalNameIC) * getGlobalNames.length() +
1345 93652 : sizeof(ic::SetGlobalNameIC) * setGlobalNames.length() +
1346 93652 : sizeof(ic::CallICInfo) * callICs.length() +
1347 93652 : sizeof(ic::EqualityICInfo) * equalityICs.length() +
1348 : #endif
1349 : #if defined JS_POLYIC
1350 93652 : sizeof(ic::PICInfo) * pics.length() +
1351 93652 : sizeof(ic::GetElementIC) * getElemICs.length() +
1352 93652 : sizeof(ic::SetElementIC) * setElemICs.length() +
1353 : #endif
1354 842868 : 0;
1355 :
1356 93652 : uint8_t *cursor = (uint8_t *)OffTheBooks::calloc_(dataSize);
1357 93652 : if (!cursor) {
1358 0 : execPool->release();
1359 0 : js_ReportOutOfMemory(cx);
1360 0 : return Compile_Error;
1361 : }
1362 :
1363 93652 : JITChunk *chunk = new(cursor) JITChunk;
1364 93652 : cursor += sizeof(JITChunk);
1365 :
1366 93652 : JS_ASSERT(outerScript == script);
1367 :
1368 93652 : chunk->code = JSC::MacroAssemblerCodeRef(result, execPool, masm.size() + stubcc.size());
1369 93652 : chunk->pcLengths = pcLengths;
1370 :
1371 93652 : if (chunkIndex == 0) {
1372 92485 : jit->invokeEntry = result;
1373 92485 : if (script->function()) {
1374 45845 : jit->arityCheckEntry = stubCode.locationOf(arityLabel).executableAddress();
1375 45845 : jit->argsCheckEntry = stubCode.locationOf(argsCheckLabel).executableAddress();
1376 45845 : jit->fastEntry = fullCode.locationOf(invokeLabel).executableAddress();
1377 : }
1378 : }
1379 :
1380 : /*
1381 : * WARNING: mics(), callICs() et al depend on the ordering of these
1382 : * variable-length sections. See JITChunk's declaration for details.
1383 : */
1384 :
1385 : /* ICs can only refer to bytecodes in the outermost script, not inlined calls. */
1386 93652 : Label *jumpMap = a->jumpMap;
1387 :
1388 : /* Build the pc -> ncode mapping. */
1389 93652 : NativeMapEntry *jitNmap = (NativeMapEntry *)cursor;
1390 93652 : chunk->nNmapPairs = nNmapLive;
1391 93652 : cursor += sizeof(NativeMapEntry) * chunk->nNmapPairs;
1392 93652 : size_t ix = 0;
1393 93652 : if (chunk->nNmapPairs > 0) {
1394 8710805 : for (size_t i = outerChunk.begin; i < outerChunk.end; i++) {
1395 8674382 : Bytecode *opinfo = analysis->maybeCode(i);
1396 8674382 : if (opinfo && opinfo->safePoint) {
1397 112247 : Label L = jumpMap[i];
1398 112247 : JS_ASSERT(L.isSet());
1399 112247 : jitNmap[ix].bcOff = i;
1400 112247 : jitNmap[ix].ncode = (uint8_t *)(result + masm.distanceOf(L));
1401 112247 : ix++;
1402 : }
1403 : }
1404 69975 : for (size_t i = 0; i < loopEntries.length(); i++) {
1405 : /* Insert the entry at the right position. */
1406 33552 : const LoopEntry &entry = loopEntries[i];
1407 : size_t j;
1408 83603 : for (j = 0; j < ix; j++) {
1409 54093 : if (jitNmap[j].bcOff > entry.pcOffset) {
1410 4042 : memmove(jitNmap + j + 1, jitNmap + j, (ix - j) * sizeof(NativeMapEntry));
1411 4042 : break;
1412 : }
1413 : }
1414 33552 : jitNmap[j].bcOff = entry.pcOffset;
1415 33552 : jitNmap[j].ncode = (uint8_t *) stubCode.locationOf(entry.label).executableAddress();
1416 33552 : ix++;
1417 : }
1418 : }
1419 93652 : JS_ASSERT(ix == chunk->nNmapPairs);
1420 :
1421 : /* Build the table of inlined frames. */
1422 93652 : InlineFrame *jitInlineFrames = (InlineFrame *)cursor;
1423 93652 : chunk->nInlineFrames = inlineFrames.length();
1424 93652 : cursor += sizeof(InlineFrame) * chunk->nInlineFrames;
1425 96584 : for (size_t i = 0; i < chunk->nInlineFrames; i++) {
1426 2932 : InlineFrame &to = jitInlineFrames[i];
1427 2932 : ActiveFrame *from = inlineFrames[i];
1428 2932 : if (from->parent != outer)
1429 1243 : to.parent = &jitInlineFrames[from->parent->inlineIndex];
1430 : else
1431 1689 : to.parent = NULL;
1432 2932 : to.parentpc = from->parentPC;
1433 2932 : to.fun = from->script->function();
1434 2932 : to.depth = ssa.getFrame(from->inlineIndex).depth;
1435 : }
1436 :
1437 : /* Build the table of call sites. */
1438 93652 : CallSite *jitCallSites = (CallSite *)cursor;
1439 93652 : chunk->nCallSites = callSites.length();
1440 93652 : cursor += sizeof(CallSite) * chunk->nCallSites;
1441 2873553 : for (size_t i = 0; i < chunk->nCallSites; i++) {
1442 2779901 : CallSite &to = jitCallSites[i];
1443 2779901 : InternalCallSite &from = callSites[i];
1444 :
1445 : /* Patch stores of f.regs.inlined for stubs called from within inline frames. */
1446 2779901 : if (cx->typeInferenceEnabled() &&
1447 : from.rejoin != REJOIN_TRAP &&
1448 : from.rejoin != REJOIN_SCRIPTED &&
1449 : from.inlineIndex != UINT32_MAX) {
1450 27038 : if (from.ool)
1451 26701 : stubCode.patch(from.inlinePatch, &to);
1452 : else
1453 337 : fullCode.patch(from.inlinePatch, &to);
1454 : }
1455 :
1456 : JSScript *script =
1457 2779901 : (from.inlineIndex == UINT32_MAX) ? outerScript : inlineFrames[from.inlineIndex]->script;
1458 : uint32_t codeOffset = from.ool
1459 2095214 : ? masm.size() + from.returnOffset
1460 4875115 : : from.returnOffset;
1461 2779901 : to.initialize(codeOffset, from.inlineIndex, from.inlinepc - script->code, from.rejoin);
1462 :
1463 : /*
1464 : * Patch stores of the base call's return address for InvariantFailure
1465 : * calls. InvariantFailure will patch its own return address to this
1466 : * pointer before triggering recompilation.
1467 : */
1468 2779901 : if (from.loopPatch.hasPatch)
1469 8351 : stubCode.patch(from.loopPatch.codePatch, result + codeOffset);
1470 : }
1471 :
1472 : #if defined JS_MONOIC
1473 93652 : if (chunkIndex == 0 && script->function()) {
1474 45845 : JS_ASSERT(jit->argsCheckPool == NULL);
1475 45845 : if (cx->typeInferenceEnabled()) {
1476 31342 : jit->argsCheckStub = stubCode.locationOf(argsCheckStub);
1477 31342 : jit->argsCheckFallthrough = stubCode.locationOf(argsCheckFallthrough);
1478 31342 : jit->argsCheckJump = stubCode.locationOf(argsCheckJump);
1479 : }
1480 : }
1481 :
1482 93652 : ic::GetGlobalNameIC *getGlobalNames_ = (ic::GetGlobalNameIC *)cursor;
1483 93652 : chunk->nGetGlobalNames = getGlobalNames.length();
1484 93652 : cursor += sizeof(ic::GetGlobalNameIC) * chunk->nGetGlobalNames;
1485 474456 : for (size_t i = 0; i < chunk->nGetGlobalNames; i++) {
1486 380804 : ic::GetGlobalNameIC &to = getGlobalNames_[i];
1487 380804 : GetGlobalNameICInfo &from = getGlobalNames[i];
1488 380804 : from.copyTo(to, fullCode, stubCode);
1489 :
1490 380804 : int offset = fullCode.locationOf(from.load) - to.fastPathStart;
1491 380804 : to.loadStoreOffset = offset;
1492 380804 : JS_ASSERT(to.loadStoreOffset == offset);
1493 :
1494 380804 : stubCode.patch(from.addrLabel, &to);
1495 : }
1496 :
1497 93652 : ic::SetGlobalNameIC *setGlobalNames_ = (ic::SetGlobalNameIC *)cursor;
1498 93652 : chunk->nSetGlobalNames = setGlobalNames.length();
1499 93652 : cursor += sizeof(ic::SetGlobalNameIC) * chunk->nSetGlobalNames;
1500 133150 : for (size_t i = 0; i < chunk->nSetGlobalNames; i++) {
1501 39498 : ic::SetGlobalNameIC &to = setGlobalNames_[i];
1502 39498 : SetGlobalNameICInfo &from = setGlobalNames[i];
1503 39498 : from.copyTo(to, fullCode, stubCode);
1504 39498 : to.slowPathStart = stubCode.locationOf(from.slowPathStart);
1505 :
1506 39498 : int offset = fullCode.locationOf(from.store).labelAtOffset(0) -
1507 78996 : to.fastPathStart;
1508 39498 : to.loadStoreOffset = offset;
1509 39498 : JS_ASSERT(to.loadStoreOffset == offset);
1510 :
1511 39498 : to.hasExtraStub = 0;
1512 39498 : to.objConst = from.objConst;
1513 39498 : to.shapeReg = from.shapeReg;
1514 39498 : to.objReg = from.objReg;
1515 39498 : to.vr = from.vr;
1516 :
1517 : offset = fullCode.locationOf(from.shapeGuardJump) -
1518 39498 : to.fastPathStart;
1519 39498 : to.inlineShapeJump = offset;
1520 39498 : JS_ASSERT(to.inlineShapeJump == offset);
1521 :
1522 : offset = fullCode.locationOf(from.fastPathRejoin) -
1523 39498 : to.fastPathStart;
1524 39498 : to.fastRejoinOffset = offset;
1525 39498 : JS_ASSERT(to.fastRejoinOffset == offset);
1526 :
1527 39498 : stubCode.patch(from.addrLabel, &to);
1528 : }
1529 :
1530 93652 : ic::CallICInfo *jitCallICs = (ic::CallICInfo *)cursor;
1531 93652 : chunk->nCallICs = callICs.length();
1532 93652 : cursor += sizeof(ic::CallICInfo) * chunk->nCallICs;
1533 184210 : for (size_t i = 0; i < chunk->nCallICs; i++) {
1534 90558 : jitCallICs[i].reset();
1535 90558 : jitCallICs[i].funGuard = fullCode.locationOf(callICs[i].funGuard);
1536 90558 : jitCallICs[i].funJump = fullCode.locationOf(callICs[i].funJump);
1537 90558 : jitCallICs[i].slowPathStart = stubCode.locationOf(callICs[i].slowPathStart);
1538 90558 : jitCallICs[i].typeMonitored = callICs[i].typeMonitored;
1539 :
1540 : /* Compute the hot call offset. */
1541 90558 : uint32_t offset = fullCode.locationOf(callICs[i].hotJump) -
1542 181116 : fullCode.locationOf(callICs[i].funGuard);
1543 90558 : jitCallICs[i].hotJumpOffset = offset;
1544 90558 : JS_ASSERT(jitCallICs[i].hotJumpOffset == offset);
1545 :
1546 : /* Compute the join point offset. */
1547 90558 : offset = fullCode.locationOf(callICs[i].joinPoint) -
1548 181116 : fullCode.locationOf(callICs[i].funGuard);
1549 90558 : jitCallICs[i].joinPointOffset = offset;
1550 90558 : JS_ASSERT(jitCallICs[i].joinPointOffset == offset);
1551 :
1552 : /* Compute the OOL call offset. */
1553 90558 : offset = stubCode.locationOf(callICs[i].oolCall) -
1554 181116 : stubCode.locationOf(callICs[i].slowPathStart);
1555 90558 : jitCallICs[i].oolCallOffset = offset;
1556 90558 : JS_ASSERT(jitCallICs[i].oolCallOffset == offset);
1557 :
1558 : /* Compute the OOL jump offset. */
1559 90558 : offset = stubCode.locationOf(callICs[i].oolJump) -
1560 181116 : stubCode.locationOf(callICs[i].slowPathStart);
1561 90558 : jitCallICs[i].oolJumpOffset = offset;
1562 90558 : JS_ASSERT(jitCallICs[i].oolJumpOffset == offset);
1563 :
1564 : /* Compute the start of the OOL IC call. */
1565 90558 : offset = stubCode.locationOf(callICs[i].icCall) -
1566 181116 : stubCode.locationOf(callICs[i].slowPathStart);
1567 90558 : jitCallICs[i].icCallOffset = offset;
1568 90558 : JS_ASSERT(jitCallICs[i].icCallOffset == offset);
1569 :
1570 : /* Compute the slow join point offset. */
1571 90558 : offset = stubCode.locationOf(callICs[i].slowJoinPoint) -
1572 181116 : stubCode.locationOf(callICs[i].slowPathStart);
1573 90558 : jitCallICs[i].slowJoinOffset = offset;
1574 90558 : JS_ASSERT(jitCallICs[i].slowJoinOffset == offset);
1575 :
1576 : /* Compute the join point offset for continuing on the hot path. */
1577 90558 : offset = stubCode.locationOf(callICs[i].hotPathLabel) -
1578 181116 : stubCode.locationOf(callICs[i].funGuard);
1579 90558 : jitCallICs[i].hotPathOffset = offset;
1580 90558 : JS_ASSERT(jitCallICs[i].hotPathOffset == offset);
1581 :
1582 90558 : jitCallICs[i].call = &jitCallSites[callICs[i].callIndex];
1583 90558 : jitCallICs[i].frameSize = callICs[i].frameSize;
1584 90558 : jitCallICs[i].funObjReg = callICs[i].funObjReg;
1585 90558 : stubCode.patch(callICs[i].addrLabel1, &jitCallICs[i]);
1586 90558 : stubCode.patch(callICs[i].addrLabel2, &jitCallICs[i]);
1587 : }
1588 :
1589 93652 : ic::EqualityICInfo *jitEqualityICs = (ic::EqualityICInfo *)cursor;
1590 93652 : chunk->nEqualityICs = equalityICs.length();
1591 93652 : cursor += sizeof(ic::EqualityICInfo) * chunk->nEqualityICs;
1592 98779 : for (size_t i = 0; i < chunk->nEqualityICs; i++) {
1593 5127 : if (equalityICs[i].trampoline) {
1594 311 : jitEqualityICs[i].target = stubCode.locationOf(equalityICs[i].trampolineStart);
1595 : } else {
1596 4816 : uint32_t offs = uint32_t(equalityICs[i].jumpTarget - script->code);
1597 4816 : JS_ASSERT(jumpMap[offs].isSet());
1598 4816 : jitEqualityICs[i].target = fullCode.locationOf(jumpMap[offs]);
1599 : }
1600 5127 : jitEqualityICs[i].stubEntry = stubCode.locationOf(equalityICs[i].stubEntry);
1601 5127 : jitEqualityICs[i].stubCall = stubCode.locationOf(equalityICs[i].stubCall);
1602 5127 : jitEqualityICs[i].stub = equalityICs[i].stub;
1603 5127 : jitEqualityICs[i].lvr = equalityICs[i].lvr;
1604 5127 : jitEqualityICs[i].rvr = equalityICs[i].rvr;
1605 5127 : jitEqualityICs[i].tempReg = equalityICs[i].tempReg;
1606 5127 : jitEqualityICs[i].cond = equalityICs[i].cond;
1607 5127 : if (equalityICs[i].jumpToStub.isSet())
1608 3366 : jitEqualityICs[i].jumpToStub = fullCode.locationOf(equalityICs[i].jumpToStub.get());
1609 5127 : jitEqualityICs[i].fallThrough = fullCode.locationOf(equalityICs[i].fallThrough);
1610 :
1611 5127 : stubCode.patch(equalityICs[i].addrLabel, &jitEqualityICs[i]);
1612 : }
1613 : #endif /* JS_MONOIC */
1614 :
1615 270331 : for (size_t i = 0; i < callPatches.length(); i++) {
1616 176679 : CallPatchInfo &patch = callPatches[i];
1617 :
1618 : CodeLocationLabel joinPoint = patch.joinSlow
1619 : ? stubCode.locationOf(patch.joinPoint)
1620 176679 : : fullCode.locationOf(patch.joinPoint);
1621 :
1622 176679 : if (patch.hasFastNcode)
1623 174210 : fullCode.patch(patch.fastNcodePatch, joinPoint);
1624 176679 : if (patch.hasSlowNcode)
1625 90558 : stubCode.patch(patch.slowNcodePatch, joinPoint);
1626 : }
1627 :
1628 : #ifdef JS_POLYIC
1629 93652 : ic::GetElementIC *jitGetElems = (ic::GetElementIC *)cursor;
1630 93652 : chunk->nGetElems = getElemICs.length();
1631 93652 : cursor += sizeof(ic::GetElementIC) * chunk->nGetElems;
1632 115300 : for (size_t i = 0; i < chunk->nGetElems; i++) {
1633 21648 : ic::GetElementIC &to = jitGetElems[i];
1634 21648 : GetElementICInfo &from = getElemICs[i];
1635 :
1636 21648 : new (&to) ic::GetElementIC();
1637 21648 : from.copyTo(to, fullCode, stubCode);
1638 :
1639 21648 : to.typeReg = from.typeReg;
1640 21648 : to.objReg = from.objReg;
1641 21648 : to.idRemat = from.id;
1642 :
1643 21648 : if (from.typeGuard.isSet()) {
1644 8213 : int inlineTypeGuard = fullCode.locationOf(from.typeGuard.get()) -
1645 16426 : fullCode.locationOf(from.fastPathStart);
1646 8213 : to.inlineTypeGuard = inlineTypeGuard;
1647 8213 : JS_ASSERT(to.inlineTypeGuard == inlineTypeGuard);
1648 : }
1649 : int inlineShapeGuard = fullCode.locationOf(from.shapeGuard) -
1650 21648 : fullCode.locationOf(from.fastPathStart);
1651 21648 : to.inlineShapeGuard = inlineShapeGuard;
1652 21648 : JS_ASSERT(to.inlineShapeGuard == inlineShapeGuard);
1653 :
1654 21648 : stubCode.patch(from.paramAddr, &to);
1655 : }
1656 :
1657 93652 : ic::SetElementIC *jitSetElems = (ic::SetElementIC *)cursor;
1658 93652 : chunk->nSetElems = setElemICs.length();
1659 93652 : cursor += sizeof(ic::SetElementIC) * chunk->nSetElems;
1660 99355 : for (size_t i = 0; i < chunk->nSetElems; i++) {
1661 5703 : ic::SetElementIC &to = jitSetElems[i];
1662 5703 : SetElementICInfo &from = setElemICs[i];
1663 :
1664 5703 : new (&to) ic::SetElementIC();
1665 5703 : from.copyTo(to, fullCode, stubCode);
1666 :
1667 5703 : to.strictMode = script->strictModeCode;
1668 5703 : to.vr = from.vr;
1669 5703 : to.objReg = from.objReg;
1670 5703 : to.objRemat = from.objRemat.toInt32();
1671 5703 : JS_ASSERT(to.objRemat == from.objRemat.toInt32());
1672 :
1673 5703 : to.hasConstantKey = from.key.isConstant();
1674 5703 : if (from.key.isConstant())
1675 1784 : to.keyValue = from.key.index();
1676 : else
1677 3919 : to.keyReg = from.key.reg();
1678 :
1679 : int inlineShapeGuard = fullCode.locationOf(from.shapeGuard) -
1680 5703 : fullCode.locationOf(from.fastPathStart);
1681 5703 : to.inlineShapeGuard = inlineShapeGuard;
1682 5703 : JS_ASSERT(to.inlineShapeGuard == inlineShapeGuard);
1683 :
1684 : int inlineHoleGuard = fullCode.locationOf(from.holeGuard) -
1685 5703 : fullCode.locationOf(from.fastPathStart);
1686 5703 : to.inlineHoleGuard = inlineHoleGuard;
1687 5703 : JS_ASSERT(to.inlineHoleGuard == inlineHoleGuard);
1688 :
1689 5703 : CheckIsStubCall(to.slowPathCall.labelAtOffset(0));
1690 :
1691 5703 : to.volatileMask = from.volatileMask;
1692 5703 : JS_ASSERT(to.volatileMask == from.volatileMask);
1693 :
1694 5703 : stubCode.patch(from.paramAddr, &to);
1695 : }
1696 :
1697 93652 : ic::PICInfo *jitPics = (ic::PICInfo *)cursor;
1698 93652 : chunk->nPICs = pics.length();
1699 93652 : cursor += sizeof(ic::PICInfo) * chunk->nPICs;
1700 222955 : for (size_t i = 0; i < chunk->nPICs; i++) {
1701 129303 : new (&jitPics[i]) ic::PICInfo();
1702 129303 : pics[i].copyTo(jitPics[i], fullCode, stubCode);
1703 129303 : pics[i].copySimpleMembersTo(jitPics[i]);
1704 :
1705 129303 : jitPics[i].shapeGuard = masm.distanceOf(pics[i].shapeGuard) -
1706 129303 : masm.distanceOf(pics[i].fastPathStart);
1707 258606 : JS_ASSERT(jitPics[i].shapeGuard == masm.distanceOf(pics[i].shapeGuard) -
1708 258606 : masm.distanceOf(pics[i].fastPathStart));
1709 129303 : jitPics[i].shapeRegHasBaseShape = true;
1710 129303 : jitPics[i].pc = pics[i].pc;
1711 :
1712 129303 : if (pics[i].kind == ic::PICInfo::SET) {
1713 15850 : jitPics[i].u.vr = pics[i].vr;
1714 113453 : } else if (pics[i].kind != ic::PICInfo::NAME) {
1715 81205 : if (pics[i].hasTypeCheck) {
1716 51092 : int32_t distance = stubcc.masm.distanceOf(pics[i].typeCheck) -
1717 51092 : stubcc.masm.distanceOf(pics[i].slowPathStart);
1718 51092 : JS_ASSERT(distance <= 0);
1719 51092 : jitPics[i].u.get.typeCheckOffset = distance;
1720 : }
1721 : }
1722 129303 : stubCode.patch(pics[i].paramAddr, &jitPics[i]);
1723 : }
1724 : #endif
1725 :
1726 93652 : JS_ASSERT(size_t(cursor - (uint8_t*)chunk) == dataSize);
1727 : /* Use the computed size here -- we don't want slop bytes to be counted. */
1728 93652 : JS_ASSERT(chunk->computedSizeOfIncludingThis() == dataSize);
1729 :
1730 : /* Link fast and slow paths together. */
1731 93652 : stubcc.fixCrossJumps(result, masm.size(), masm.size() + stubcc.size());
1732 :
1733 : #if defined(JS_CPU_MIPS)
1734 : /* Make sure doubleOffset is aligned to sizeof(double) bytes. */
1735 : size_t doubleOffset = (((size_t)result + masm.size() + stubcc.size() +
1736 : sizeof(double) - 1) & (~(sizeof(double) - 1))) -
1737 : (size_t)result;
1738 : JS_ASSERT((((size_t)result + doubleOffset) & 7) == 0);
1739 : #else
1740 93652 : size_t doubleOffset = masm.size() + stubcc.size();
1741 : #endif
1742 :
1743 93652 : double *inlineDoubles = (double *) (result + doubleOffset);
1744 : double *oolDoubles = (double*) (result + doubleOffset +
1745 93652 : masm.numDoubles() * sizeof(double));
1746 :
1747 : /* Generate jump tables. */
1748 93652 : void **jumpVec = (void **)(oolDoubles + stubcc.masm.numDoubles());
1749 :
1750 95002 : for (size_t i = 0; i < jumpTableEdges.length(); i++) {
1751 1350 : JumpTableEdge edge = jumpTableEdges[i];
1752 1350 : if (bytecodeInChunk(script->code + edge.target)) {
1753 1340 : JS_ASSERT(jumpMap[edge.target].isSet());
1754 1340 : jumpVec[i] = (void *)(result + masm.distanceOf(jumpMap[edge.target]));
1755 : } else {
1756 : ChunkJumpTableEdge nedge;
1757 10 : nedge.edge = edge;
1758 10 : nedge.jumpTableEntry = &jumpVec[i];
1759 10 : chunkJumps.infallibleAppend(nedge);
1760 10 : jumpVec[i] = NULL;
1761 : }
1762 : }
1763 :
1764 : /* Patch jump table references. */
1765 93914 : for (size_t i = 0; i < jumpTables.length(); i++) {
1766 262 : JumpTable &jumpTable = jumpTables[i];
1767 262 : fullCode.patch(jumpTable.label, &jumpVec[jumpTable.offsetIndex]);
1768 : }
1769 :
1770 : /* Patch all outgoing calls. */
1771 93652 : masm.finalize(fullCode, inlineDoubles);
1772 93652 : stubcc.masm.finalize(stubCode, oolDoubles);
1773 :
1774 93652 : JSC::ExecutableAllocator::makeExecutable(result, masm.size() + stubcc.size());
1775 93652 : JSC::ExecutableAllocator::cacheFlush(result, masm.size() + stubcc.size());
1776 :
1777 : Probes::registerMJITCode(cx, jit,
1778 : a,
1779 93652 : (JSActiveFrame**) inlineFrames.begin(),
1780 : result, masm.size(),
1781 187304 : result + masm.size(), stubcc.size());
1782 :
1783 93652 : outerChunkRef().chunk = chunk;
1784 :
1785 : /* Patch all incoming and outgoing cross-chunk jumps. */
1786 93652 : CrossChunkEdge *crossEdges = jit->edges();
1787 117687 : for (unsigned i = 0; i < jit->nedges; i++) {
1788 24035 : CrossChunkEdge &edge = crossEdges[i];
1789 24035 : if (bytecodeInChunk(outerScript->code + edge.source)) {
1790 1814 : JS_ASSERT(!edge.sourceJump1 && !edge.sourceJump2);
1791 1814 : void *label = edge.targetLabel ? edge.targetLabel : edge.shimLabel;
1792 1814 : CodeLocationLabel targetLabel(label);
1793 1814 : JSOp op = JSOp(script->code[edge.source]);
1794 1814 : if (op == JSOP_TABLESWITCH) {
1795 16 : if (edge.jumpTableEntries)
1796 0 : cx->free_(edge.jumpTableEntries);
1797 16 : CrossChunkEdge::JumpTableEntryVector *jumpTableEntries = NULL;
1798 16 : bool failed = false;
1799 50 : for (unsigned j = 0; j < chunkJumps.length(); j++) {
1800 34 : ChunkJumpTableEdge nedge = chunkJumps[j];
1801 34 : if (nedge.edge.source == edge.source && nedge.edge.target == edge.target) {
1802 10 : if (!jumpTableEntries) {
1803 10 : jumpTableEntries = OffTheBooks::new_<CrossChunkEdge::JumpTableEntryVector>();
1804 10 : if (!jumpTableEntries)
1805 0 : failed = true;
1806 : }
1807 10 : if (!jumpTableEntries->append(nedge.jumpTableEntry))
1808 0 : failed = true;
1809 10 : *nedge.jumpTableEntry = label;
1810 : }
1811 : }
1812 16 : if (failed) {
1813 0 : execPool->release();
1814 0 : cx->free_(chunk);
1815 0 : js_ReportOutOfMemory(cx);
1816 0 : return Compile_Error;
1817 : }
1818 16 : edge.jumpTableEntries = jumpTableEntries;
1819 : }
1820 2382 : for (unsigned j = 0; j < chunkEdges.length(); j++) {
1821 2364 : const OutgoingChunkEdge &oedge = chunkEdges[j];
1822 2364 : if (oedge.source == edge.source && oedge.target == edge.target) {
1823 : /*
1824 : * Only a single edge needs to be patched; we ensured while
1825 : * generating chunks that no two cross chunk edges can have
1826 : * the same source and target. Note that there may not be
1827 : * an edge to patch, if constant folding determined the
1828 : * jump is never taken.
1829 : */
1830 1796 : edge.sourceJump1 = fullCode.locationOf(oedge.fastJump).executableAddress();
1831 1796 : if (oedge.slowJump.isSet()) {
1832 : edge.sourceJump2 =
1833 93 : stubCode.locationOf(oedge.slowJump.get()).executableAddress();
1834 : }
1835 : #ifdef JS_CPU_X64
1836 : edge.sourceTrampoline =
1837 : stubCode.locationOf(oedge.sourceTrampoline).executableAddress();
1838 : #endif
1839 1796 : jit->patchEdge(edge, label);
1840 1796 : break;
1841 : }
1842 : }
1843 22221 : } else if (bytecodeInChunk(outerScript->code + edge.target)) {
1844 1681 : JS_ASSERT(!edge.targetLabel);
1845 1681 : JS_ASSERT(jumpMap[edge.target].isSet());
1846 1681 : edge.targetLabel = fullCode.locationOf(jumpMap[edge.target]).executableAddress();
1847 1681 : jit->patchEdge(edge, edge.targetLabel);
1848 : }
1849 : }
1850 :
1851 93652 : return Compile_Okay;
1852 : }
1853 :
1854 : #ifdef DEBUG
1855 : #define SPEW_OPCODE() \
1856 : JS_BEGIN_MACRO \
1857 : if (IsJaegerSpewChannelActive(JSpew_JSOps)) { \
1858 : Sprinter sprinter(cx); \
1859 : sprinter.init(); \
1860 : js_Disassemble1(cx, script, PC, PC - script->code, \
1861 : JS_TRUE, &sprinter); \
1862 : JaegerSpew(JSpew_JSOps, " %2d %s", \
1863 : frame.stackDepth(), sprinter.string()); \
1864 : } \
1865 : JS_END_MACRO;
1866 : #else
1867 : #define SPEW_OPCODE()
1868 : #endif /* DEBUG */
1869 :
1870 : #define BEGIN_CASE(name) case name:
1871 : #define END_CASE(name) \
1872 : JS_BEGIN_MACRO \
1873 : PC += name##_LENGTH; \
1874 : JS_END_MACRO; \
1875 : break;
1876 :
1877 : static inline void
1878 : FixDouble(Value &val)
1879 : {
1880 : if (val.isInt32())
1881 : val.setDouble((double)val.toInt32());
1882 : }
1883 :
1884 : inline bool
1885 216995 : mjit::Compiler::shouldStartLoop(jsbytecode *head)
1886 : {
1887 : /*
1888 : * Don't do loop based optimizations or register allocation for loops which
1889 : * span multiple chunks.
1890 : */
1891 216995 : if (*head == JSOP_LOOPHEAD && analysis->getLoop(head)) {
1892 67575 : uint32_t backedge = analysis->getLoop(head)->backedge;
1893 67575 : if (!bytecodeInChunk(script->code + backedge))
1894 429 : return false;
1895 67146 : return true;
1896 : }
1897 149420 : return false;
1898 : }
1899 :
1900 : CompileStatus
1901 96663 : mjit::Compiler::generateMethod()
1902 : {
1903 96663 : SrcNoteLineScanner scanner(script->notes(), script->lineno);
1904 :
1905 : /* For join points, whether there was fallthrough from the previous opcode. */
1906 96663 : bool fallthrough = true;
1907 :
1908 : /* Last bytecode processed. */
1909 96663 : jsbytecode *lastPC = NULL;
1910 :
1911 96663 : if (!outerJIT())
1912 0 : return Compile_Retry;
1913 :
1914 96663 : uint32_t chunkBegin = 0, chunkEnd = script->length;
1915 96663 : if (!a->parent) {
1916 : const ChunkDescriptor &desc =
1917 93731 : outerJIT()->chunkDescriptor(chunkIndex);
1918 93731 : chunkBegin = desc.begin;
1919 93731 : chunkEnd = desc.end;
1920 :
1921 2132593 : while (PC != script->code + chunkBegin) {
1922 1945131 : Bytecode *opinfo = analysis->maybeCode(PC);
1923 1945131 : if (opinfo) {
1924 1937875 : if (opinfo->jumpTarget) {
1925 : /* Update variable types for all new values at this bytecode. */
1926 20852 : const SlotValue *newv = analysis->newValues(PC);
1927 20852 : if (newv) {
1928 24573 : while (newv->slot) {
1929 17159 : if (newv->slot < TotalSlots(script)) {
1930 10892 : VarType &vt = a->varTypes[newv->slot];
1931 10892 : vt.setTypes(analysis->getValueTypes(newv->value));
1932 : }
1933 17159 : newv++;
1934 : }
1935 : }
1936 : }
1937 1937875 : if (analyze::BytecodeUpdatesSlot(JSOp(*PC))) {
1938 209404 : uint32_t slot = GetBytecodeSlot(script, PC);
1939 209404 : if (analysis->trackSlot(slot)) {
1940 100382 : VarType &vt = a->varTypes[slot];
1941 100382 : vt.setTypes(analysis->pushedTypes(PC, 0));
1942 : }
1943 : }
1944 : }
1945 :
1946 1945131 : PC += GetBytecodeLength(PC);
1947 : }
1948 :
1949 93731 : if (chunkIndex != 0) {
1950 1167 : uint32_t depth = analysis->getCode(PC).stackDepth;
1951 42323 : for (uint32_t i = 0; i < depth; i++)
1952 41156 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
1953 : }
1954 : }
1955 :
1956 4060810 : for (;;) {
1957 4157473 : JSOp op = JSOp(*PC);
1958 4157473 : int trap = stubs::JSTRAP_NONE;
1959 :
1960 4157473 : if (script->hasBreakpointsAt(PC))
1961 339 : trap |= stubs::JSTRAP_TRAP;
1962 :
1963 4157473 : Bytecode *opinfo = analysis->maybeCode(PC);
1964 :
1965 4157473 : if (!opinfo) {
1966 39301 : if (op == JSOP_STOP)
1967 19246 : break;
1968 20055 : if (js_CodeSpec[op].length != -1)
1969 20055 : PC += js_CodeSpec[op].length;
1970 : else
1971 0 : PC += js_GetVariableBytecodeLength(PC);
1972 20055 : continue;
1973 : }
1974 :
1975 4118172 : if (PC >= script->code + script->length)
1976 0 : break;
1977 :
1978 4118172 : scanner.advanceTo(PC - script->code);
1979 4118313 : if (script->stepModeEnabled() &&
1980 141 : (scanner.isLineHeader() || opinfo->jumpTarget))
1981 : {
1982 54 : trap |= stubs::JSTRAP_SINGLESTEP;
1983 : }
1984 :
1985 4118172 : frame.setPC(PC);
1986 4118172 : frame.setInTryBlock(opinfo->inTryBlock);
1987 :
1988 4118172 : if (fallthrough) {
1989 : /*
1990 : * If there is fallthrough from the previous opcode and we changed
1991 : * any entries into doubles for a branch at that previous op,
1992 : * revert those entries into integers. Similarly, if we forgot that
1993 : * an entry is a double then make it a double again, as the frame
1994 : * may have assigned it a normal register.
1995 : */
1996 4020967 : for (unsigned i = 0; i < fixedIntToDoubleEntries.length(); i++) {
1997 4 : FrameEntry *fe = frame.getSlotEntry(fixedIntToDoubleEntries[i]);
1998 4 : frame.ensureInteger(fe);
1999 : }
2000 4020998 : for (unsigned i = 0; i < fixedDoubleToAnyEntries.length(); i++) {
2001 35 : FrameEntry *fe = frame.getSlotEntry(fixedDoubleToAnyEntries[i]);
2002 35 : frame.syncAndForgetFe(fe);
2003 : }
2004 : }
2005 4118172 : fixedIntToDoubleEntries.clear();
2006 4118172 : fixedDoubleToAnyEntries.clear();
2007 :
2008 4118172 : if (PC >= script->code + chunkEnd) {
2009 1264 : if (fallthrough) {
2010 1264 : if (opinfo->jumpTarget)
2011 506 : fixDoubleTypes(PC);
2012 1264 : frame.syncAndForgetEverything();
2013 1264 : jsbytecode *curPC = PC;
2014 3686 : do {
2015 3686 : PC--;
2016 3686 : } while (!analysis->maybeCode(PC));
2017 1264 : if (!jumpAndRun(masm.jump(), curPC, NULL, NULL, /* fallthrough = */ true))
2018 0 : return Compile_Error;
2019 1264 : PC = curPC;
2020 : }
2021 1264 : break;
2022 : }
2023 :
2024 4116908 : if (opinfo->jumpTarget || trap) {
2025 297308 : if (fallthrough) {
2026 200099 : fixDoubleTypes(PC);
2027 200099 : fixedIntToDoubleEntries.clear();
2028 200099 : fixedDoubleToAnyEntries.clear();
2029 :
2030 : /*
2031 : * Watch for fallthrough to the head of a 'do while' loop.
2032 : * We don't know what register state we will be using at the head
2033 : * of the loop so sync, branch, and fix it up after the loop
2034 : * has been processed.
2035 : */
2036 200099 : if (cx->typeInferenceEnabled() && shouldStartLoop(PC)) {
2037 69 : frame.syncAndForgetEverything();
2038 69 : Jump j = masm.jump();
2039 69 : if (!startLoop(PC, j, PC))
2040 0 : return Compile_Error;
2041 : } else {
2042 200030 : Label start = masm.label();
2043 200030 : if (!frame.syncForBranch(PC, Uses(0)))
2044 0 : return Compile_Error;
2045 200030 : if (pcLengths && lastPC) {
2046 : /* Track this sync code for the previous op. */
2047 0 : size_t length = masm.size() - masm.distanceOf(start);
2048 0 : uint32_t offset = ssa.frameLength(a->inlineIndex) + lastPC - script->code;
2049 0 : pcLengths[offset].codeLength += length;
2050 : }
2051 200030 : JS_ASSERT(frame.consistentRegisters(PC));
2052 : }
2053 : }
2054 :
2055 297308 : if (!frame.discardForJoin(analysis->getAllocation(PC), opinfo->stackDepth))
2056 0 : return Compile_Error;
2057 297308 : updateJoinVarTypes();
2058 297308 : fallthrough = true;
2059 :
2060 297308 : if (!cx->typeInferenceEnabled()) {
2061 : /* All join points have synced state if we aren't doing cross-branch regalloc. */
2062 106611 : opinfo->safePoint = true;
2063 297308 : }
2064 3819600 : } else if (opinfo->safePoint) {
2065 1027 : frame.syncAndForgetEverything();
2066 : }
2067 4116908 : frame.assertValidRegisterState();
2068 4116908 : a->jumpMap[uint32_t(PC - script->code)] = masm.label();
2069 :
2070 : // Now that we have the PC's register allocation, make sure it gets
2071 : // explicitly updated if this is the loop entry and new loop registers
2072 : // are allocated later on.
2073 4116908 : if (loop && !a->parent)
2074 926923 : loop->setOuterPC(PC);
2075 :
2076 4116908 : SPEW_OPCODE();
2077 4116908 : JS_ASSERT(frame.stackDepth() == opinfo->stackDepth);
2078 :
2079 4116908 : if (op == JSOP_LOOPHEAD && analysis->getLoop(PC)) {
2080 34156 : jsbytecode *backedge = script->code + analysis->getLoop(PC)->backedge;
2081 34156 : if (!bytecodeInChunk(backedge)){
2082 15438 : for (uint32_t slot = ArgSlot(0); slot < TotalSlots(script); slot++) {
2083 15009 : if (a->varTypes[slot].getTypeTag(cx) == JSVAL_TYPE_DOUBLE) {
2084 4 : FrameEntry *fe = frame.getSlotEntry(slot);
2085 4 : masm.ensureInMemoryDouble(frame.addressOf(fe));
2086 : }
2087 : }
2088 : }
2089 : }
2090 :
2091 : // If this is an exception entry point, then jsl_InternalThrow has set
2092 : // VMFrame::fp to the correct fp for the entry point. We need to copy
2093 : // that value here to FpReg so that FpReg also has the correct sp.
2094 : // Otherwise, we would simply be using a stale FpReg value.
2095 4116908 : if (op == JSOP_ENTERBLOCK && analysis->getCode(PC).exceptionEntry)
2096 17395 : masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg);
2097 :
2098 4116908 : if (trap) {
2099 393 : prepareStubCall(Uses(0));
2100 393 : masm.move(Imm32(trap), Registers::ArgReg1);
2101 393 : Call cl = emitStubCall(JS_FUNC_TO_DATA_PTR(void *, stubs::Trap), NULL);
2102 : InternalCallSite site(masm.callReturnOffset(cl), a->inlineIndex, PC,
2103 393 : REJOIN_TRAP, false);
2104 393 : addCallSite(site);
2105 : }
2106 :
2107 : /* Don't compile fat opcodes, run the decomposed version instead. */
2108 4116908 : if (js_CodeSpec[op].format & JOF_DECOMPOSE) {
2109 11765 : PC += js_CodeSpec[op].length;
2110 11765 : continue;
2111 : }
2112 :
2113 4105143 : Label codeStart = masm.label();
2114 4105143 : bool countsUpdated = false;
2115 4105143 : bool arithUpdated = false;
2116 :
2117 4105143 : JSValueType arithFirstUseType = JSVAL_TYPE_UNKNOWN;
2118 4105143 : JSValueType arithSecondUseType = JSVAL_TYPE_UNKNOWN;
2119 4105143 : if (script->scriptCounts && !!(js_CodeSpec[op].format & JOF_ARITH)) {
2120 0 : if (GetUseCount(script, PC - script->code) == 1) {
2121 0 : FrameEntry *use = frame.peek(-1);
2122 : /*
2123 : * Pretend it's a binary operation and the second operand has
2124 : * the same type as the first one.
2125 : */
2126 0 : if (use->isTypeKnown())
2127 0 : arithFirstUseType = arithSecondUseType = use->getKnownType();
2128 : } else {
2129 0 : FrameEntry *use = frame.peek(-1);
2130 0 : if (use->isTypeKnown())
2131 0 : arithFirstUseType = use->getKnownType();
2132 0 : use = frame.peek(-2);
2133 0 : if (use->isTypeKnown())
2134 0 : arithSecondUseType = use->getKnownType();
2135 : }
2136 : }
2137 :
2138 : /*
2139 : * Update PC counts for jump opcodes at their start, so that we don't
2140 : * miss them when taking the jump. This is delayed for other opcodes,
2141 : * as we want to skip updating for ops we didn't generate any code for.
2142 : */
2143 4105143 : if (script->scriptCounts && JOF_OPTYPE(op) == JOF_JUMP)
2144 0 : updatePCCounts(PC, &codeStart, &countsUpdated);
2145 :
2146 : /**********************
2147 : * BEGIN COMPILER OPS *
2148 : **********************/
2149 :
2150 4105143 : lastPC = PC;
2151 :
2152 4105143 : switch (op) {
2153 : BEGIN_CASE(JSOP_NOP)
2154 22219 : END_CASE(JSOP_NOP)
2155 :
2156 : BEGIN_CASE(JSOP_UNDEFINED)
2157 136272 : frame.push(UndefinedValue());
2158 136272 : END_CASE(JSOP_UNDEFINED)
2159 :
2160 : BEGIN_CASE(JSOP_POPV)
2161 : BEGIN_CASE(JSOP_SETRVAL)
2162 : {
2163 17513 : RegisterID reg = frame.allocReg();
2164 17513 : masm.load32(FrameFlagsAddress(), reg);
2165 17513 : masm.or32(Imm32(StackFrame::HAS_RVAL), reg);
2166 17513 : masm.store32(reg, FrameFlagsAddress());
2167 17513 : frame.freeReg(reg);
2168 :
2169 : /* Scripts which write to the frame's return slot aren't inlined. */
2170 17513 : JS_ASSERT(a == outer);
2171 :
2172 17513 : FrameEntry *fe = frame.peek(-1);
2173 17513 : frame.storeTo(fe, Address(JSFrameReg, StackFrame::offsetOfReturnValue()), true);
2174 17513 : frame.pop();
2175 : }
2176 17513 : END_CASE(JSOP_POPV)
2177 :
2178 : BEGIN_CASE(JSOP_RETURN)
2179 22921 : if (script->scriptCounts)
2180 0 : updatePCCounts(PC, &codeStart, &countsUpdated);
2181 22921 : emitReturn(frame.peek(-1));
2182 22921 : fallthrough = false;
2183 22921 : END_CASE(JSOP_RETURN)
2184 :
2185 : BEGIN_CASE(JSOP_GOTO)
2186 : BEGIN_CASE(JSOP_DEFAULT)
2187 : {
2188 90500 : unsigned targetOffset = FollowBranch(cx, script, PC - script->code);
2189 90500 : jsbytecode *target = script->code + targetOffset;
2190 :
2191 90500 : fixDoubleTypes(target);
2192 :
2193 : /*
2194 : * Watch for gotos which are entering a 'for' or 'while' loop.
2195 : * These jump to the loop condition test and are immediately
2196 : * followed by the head of the loop.
2197 : */
2198 90500 : jsbytecode *next = PC + js_CodeSpec[op].length;
2199 185752 : if (cx->typeInferenceEnabled() &&
2200 48656 : analysis->maybeCode(next) &&
2201 46596 : shouldStartLoop(next))
2202 : {
2203 33504 : frame.syncAndForgetEverything();
2204 33504 : Jump j = masm.jump();
2205 33504 : if (!startLoop(next, j, target))
2206 0 : return Compile_Error;
2207 : } else {
2208 56996 : if (!frame.syncForBranch(target, Uses(0)))
2209 0 : return Compile_Error;
2210 56996 : Jump j = masm.jump();
2211 56996 : if (!jumpAndRun(j, target))
2212 0 : return Compile_Error;
2213 : }
2214 90500 : fallthrough = false;
2215 90500 : PC += js_CodeSpec[op].length;
2216 90500 : break;
2217 : }
2218 : END_CASE(JSOP_GOTO)
2219 :
2220 : BEGIN_CASE(JSOP_IFEQ)
2221 : BEGIN_CASE(JSOP_IFNE)
2222 : {
2223 47224 : jsbytecode *target = PC + GET_JUMP_OFFSET(PC);
2224 47224 : fixDoubleTypes(target);
2225 47224 : if (!jsop_ifneq(op, target))
2226 0 : return Compile_Error;
2227 47224 : PC += js_CodeSpec[op].length;
2228 47224 : break;
2229 : }
2230 : END_CASE(JSOP_IFNE)
2231 :
2232 : BEGIN_CASE(JSOP_ARGUMENTS)
2233 : {
2234 : /*
2235 : * For calls of the form 'f.apply(x, arguments)' we can avoid
2236 : * creating an args object by having ic::SplatApplyArgs pull
2237 : * directly from the stack. To do this, we speculate here that
2238 : * 'apply' actually refers to js_fun_apply. If this is not true,
2239 : * the slow path in JSOP_FUNAPPLY will create the args object.
2240 : */
2241 2658 : if (!script->needsArgsObj()) {
2242 1199 : if (canUseApplyTricks()) {
2243 : /*
2244 : * Check for interrupts at the JSOP_ARGUMENTS when using
2245 : * apply tricks, see inlineCallHelper().
2246 : */
2247 217 : interruptCheckHelper();
2248 :
2249 217 : applyTricks = LazyArgsObj;
2250 217 : pushSyncedEntry(0);
2251 : } else {
2252 : /*
2253 : * When analyzing whether a script needsArgsObject, the analysis in
2254 : * analyzeSSA uses the simple predicate SpeculateApplyOptimization.
2255 : * The actual mjit predicate for using the optimization is
2256 : * canUseApplyTricks which depends on temporal compiler state.
2257 : * Thus, script->needsArgsObj can be over-optimistic and needs to
2258 : * be checked here and corrected.
2259 : */
2260 982 : if (SpeculateApplyOptimization(PC)) {
2261 45 : if (!script->applySpeculationFailed(cx))
2262 0 : return Compile_Error;
2263 :
2264 : /* All our assumptions are wrong, try again. */
2265 45 : return Compile_Retry;
2266 : }
2267 :
2268 937 : frame.push(MagicValue(JS_OPTIMIZED_ARGUMENTS));
2269 : }
2270 : } else {
2271 1459 : jsop_arguments(REJOIN_FALLTHROUGH);
2272 1459 : pushSyncedEntry(0);
2273 : }
2274 : }
2275 2613 : END_CASE(JSOP_ARGUMENTS)
2276 :
2277 : BEGIN_CASE(JSOP_ITERNEXT)
2278 4416 : iterNext(GET_INT8(PC));
2279 4416 : END_CASE(JSOP_ITERNEXT)
2280 :
2281 : BEGIN_CASE(JSOP_DUP)
2282 87244 : frame.dup();
2283 87244 : END_CASE(JSOP_DUP)
2284 :
2285 : BEGIN_CASE(JSOP_DUP2)
2286 1302 : frame.dup2();
2287 1302 : END_CASE(JSOP_DUP2)
2288 :
2289 : BEGIN_CASE(JSOP_SWAP)
2290 82739 : frame.dup2();
2291 82739 : frame.shift(-3);
2292 82739 : frame.shift(-1);
2293 82739 : END_CASE(JSOP_SWAP)
2294 :
2295 : BEGIN_CASE(JSOP_PICK)
2296 : {
2297 9208 : uint32_t amt = GET_UINT8(PC);
2298 :
2299 : // Push -(amt + 1), say amt == 2
2300 : // Stack before: X3 X2 X1
2301 : // Stack after: X3 X2 X1 X3
2302 9208 : frame.dupAt(-int32_t(amt + 1));
2303 :
2304 : // For each item X[i...1] push it then move it down.
2305 : // The above would transition like so:
2306 : // X3 X2 X1 X3 X2 (dupAt)
2307 : // X2 X2 X1 X3 (shift)
2308 : // X2 X2 X1 X3 X1 (dupAt)
2309 : // X2 X1 X1 X3 (shift)
2310 28274 : for (int32_t i = -int32_t(amt); i < 0; i++) {
2311 19066 : frame.dupAt(i - 1);
2312 19066 : frame.shift(i - 2);
2313 : }
2314 :
2315 : // The stack looks like:
2316 : // Xn ... X1 X1 X{n+1}
2317 : // So shimmy the last value down.
2318 9208 : frame.shimmy(1);
2319 : }
2320 9208 : END_CASE(JSOP_PICK)
2321 :
2322 : BEGIN_CASE(JSOP_BITOR)
2323 : BEGIN_CASE(JSOP_BITXOR)
2324 : BEGIN_CASE(JSOP_BITAND)
2325 6647 : jsop_bitop(op);
2326 6647 : END_CASE(JSOP_BITAND)
2327 :
2328 : BEGIN_CASE(JSOP_LT)
2329 : BEGIN_CASE(JSOP_LE)
2330 : BEGIN_CASE(JSOP_GT)
2331 : BEGIN_CASE(JSOP_GE)
2332 : BEGIN_CASE(JSOP_EQ)
2333 : BEGIN_CASE(JSOP_NE)
2334 : {
2335 53852 : if (script->scriptCounts) {
2336 0 : updateArithCounts(PC, NULL, arithFirstUseType, arithSecondUseType);
2337 0 : arithUpdated = true;
2338 : }
2339 :
2340 : /* Detect fusions. */
2341 53852 : jsbytecode *next = &PC[JSOP_GE_LENGTH];
2342 53852 : JSOp fused = JSOp(*next);
2343 53852 : if ((fused != JSOP_IFEQ && fused != JSOP_IFNE) || analysis->jumpTarget(next))
2344 5420 : fused = JSOP_NOP;
2345 :
2346 : /* Get jump target, if any. */
2347 53852 : jsbytecode *target = NULL;
2348 53852 : if (fused != JSOP_NOP) {
2349 48432 : if (script->scriptCounts)
2350 0 : updatePCCounts(PC, &codeStart, &countsUpdated);
2351 48432 : target = next + GET_JUMP_OFFSET(next);
2352 48432 : fixDoubleTypes(target);
2353 : }
2354 :
2355 53852 : BoolStub stub = NULL;
2356 53852 : switch (op) {
2357 : case JSOP_LT:
2358 37198 : stub = stubs::LessThan;
2359 37198 : break;
2360 : case JSOP_LE:
2361 1549 : stub = stubs::LessEqual;
2362 1549 : break;
2363 : case JSOP_GT:
2364 3396 : stub = stubs::GreaterThan;
2365 3396 : break;
2366 : case JSOP_GE:
2367 2219 : stub = stubs::GreaterEqual;
2368 2219 : break;
2369 : case JSOP_EQ:
2370 4797 : stub = stubs::Equal;
2371 4797 : break;
2372 : case JSOP_NE:
2373 4693 : stub = stubs::NotEqual;
2374 4693 : break;
2375 : default:
2376 0 : JS_NOT_REACHED("WAT");
2377 : break;
2378 : }
2379 :
2380 : /*
2381 : * We need to ensure in the target case that we always rejoin
2382 : * before the rval test. In the non-target case we will rejoin
2383 : * correctly after the op finishes.
2384 : */
2385 :
2386 53852 : FrameEntry *rhs = frame.peek(-1);
2387 53852 : FrameEntry *lhs = frame.peek(-2);
2388 :
2389 : /* Check for easy cases that the parser does not constant fold. */
2390 53852 : if (lhs->isConstant() && rhs->isConstant()) {
2391 : /* Primitives can be trivially constant folded. */
2392 257 : const Value &lv = lhs->getValue();
2393 257 : const Value &rv = rhs->getValue();
2394 :
2395 257 : if (lv.isPrimitive() && rv.isPrimitive()) {
2396 247 : bool result = compareTwoValues(cx, op, lv, rv);
2397 :
2398 247 : frame.pop();
2399 247 : frame.pop();
2400 :
2401 247 : if (!target) {
2402 130 : frame.push(Value(BooleanValue(result)));
2403 : } else {
2404 117 : if (fused == JSOP_IFEQ)
2405 117 : result = !result;
2406 117 : if (!constantFoldBranch(target, result))
2407 0 : return Compile_Error;
2408 : }
2409 : } else {
2410 10 : if (!emitStubCmpOp(stub, target, fused))
2411 0 : return Compile_Error;
2412 : }
2413 : } else {
2414 : /* Anything else should go through the fast path generator. */
2415 53595 : if (!jsop_relational(op, stub, target, fused))
2416 0 : return Compile_Error;
2417 : }
2418 :
2419 : /* Advance PC manually. */
2420 : JS_STATIC_ASSERT(JSOP_LT_LENGTH == JSOP_GE_LENGTH);
2421 : JS_STATIC_ASSERT(JSOP_LE_LENGTH == JSOP_GE_LENGTH);
2422 : JS_STATIC_ASSERT(JSOP_GT_LENGTH == JSOP_GE_LENGTH);
2423 : JS_STATIC_ASSERT(JSOP_EQ_LENGTH == JSOP_GE_LENGTH);
2424 : JS_STATIC_ASSERT(JSOP_NE_LENGTH == JSOP_GE_LENGTH);
2425 :
2426 53852 : PC += JSOP_GE_LENGTH;
2427 53852 : if (fused != JSOP_NOP) {
2428 48432 : SPEW_OPCODE();
2429 48432 : PC += JSOP_IFNE_LENGTH;
2430 : }
2431 53852 : break;
2432 : }
2433 : END_CASE(JSOP_GE)
2434 :
2435 : BEGIN_CASE(JSOP_LSH)
2436 2011 : jsop_bitop(op);
2437 2011 : END_CASE(JSOP_LSH)
2438 :
2439 : BEGIN_CASE(JSOP_RSH)
2440 2812 : jsop_bitop(op);
2441 2812 : END_CASE(JSOP_RSH)
2442 :
2443 : BEGIN_CASE(JSOP_URSH)
2444 832 : jsop_bitop(op);
2445 832 : END_CASE(JSOP_URSH)
2446 :
2447 : BEGIN_CASE(JSOP_ADD)
2448 343961 : if (!jsop_binary(op, stubs::Add, knownPushedType(0), pushedTypeSet(0)))
2449 3 : return Compile_Retry;
2450 343958 : END_CASE(JSOP_ADD)
2451 :
2452 : BEGIN_CASE(JSOP_SUB)
2453 7673 : if (!jsop_binary(op, stubs::Sub, knownPushedType(0), pushedTypeSet(0)))
2454 0 : return Compile_Retry;
2455 7673 : END_CASE(JSOP_SUB)
2456 :
2457 : BEGIN_CASE(JSOP_MUL)
2458 6257 : if (!jsop_binary(op, stubs::Mul, knownPushedType(0), pushedTypeSet(0)))
2459 0 : return Compile_Retry;
2460 6257 : END_CASE(JSOP_MUL)
2461 :
2462 : BEGIN_CASE(JSOP_DIV)
2463 2844 : if (!jsop_binary(op, stubs::Div, knownPushedType(0), pushedTypeSet(0)))
2464 4 : return Compile_Retry;
2465 2840 : END_CASE(JSOP_DIV)
2466 :
2467 : BEGIN_CASE(JSOP_MOD)
2468 1090 : if (!jsop_mod())
2469 2 : return Compile_Retry;
2470 1088 : END_CASE(JSOP_MOD)
2471 :
2472 : BEGIN_CASE(JSOP_NOT)
2473 35607 : jsop_not();
2474 35607 : END_CASE(JSOP_NOT)
2475 :
2476 : BEGIN_CASE(JSOP_BITNOT)
2477 : {
2478 166 : FrameEntry *top = frame.peek(-1);
2479 166 : if (top->isConstant() && top->getValue().isPrimitive()) {
2480 : int32_t i;
2481 0 : JS_ALWAYS_TRUE(ToInt32(cx, top->getValue(), &i));
2482 0 : i = ~i;
2483 0 : frame.pop();
2484 0 : frame.push(Int32Value(i));
2485 : } else {
2486 166 : jsop_bitnot();
2487 : }
2488 : }
2489 166 : END_CASE(JSOP_BITNOT)
2490 :
2491 : BEGIN_CASE(JSOP_NEG)
2492 : {
2493 4196 : FrameEntry *top = frame.peek(-1);
2494 4196 : if (top->isConstant() && top->getValue().isPrimitive()) {
2495 : double d;
2496 429 : JS_ALWAYS_TRUE(ToNumber(cx, top->getValue(), &d));
2497 429 : d = -d;
2498 429 : Value v = NumberValue(d);
2499 :
2500 : /* Watch for overflow in constant propagation. */
2501 429 : types::TypeSet *pushed = pushedTypeSet(0);
2502 429 : if (!v.isInt32() && pushed && !pushed->hasType(types::Type::DoubleType())) {
2503 24 : types::TypeScript::MonitorOverflow(cx, script, PC);
2504 24 : return Compile_Retry;
2505 : }
2506 :
2507 405 : frame.pop();
2508 405 : frame.push(v);
2509 : } else {
2510 3767 : jsop_neg();
2511 : }
2512 : }
2513 4172 : END_CASE(JSOP_NEG)
2514 :
2515 : BEGIN_CASE(JSOP_POS)
2516 11961 : jsop_pos();
2517 11961 : END_CASE(JSOP_POS)
2518 :
2519 : BEGIN_CASE(JSOP_DELNAME)
2520 : {
2521 410 : uint32_t index = GET_UINT32_INDEX(PC);
2522 410 : PropertyName *name = script->getName(index);
2523 :
2524 410 : prepareStubCall(Uses(0));
2525 410 : masm.move(ImmPtr(name), Registers::ArgReg1);
2526 410 : INLINE_STUBCALL(stubs::DelName, REJOIN_FALLTHROUGH);
2527 410 : pushSyncedEntry(0);
2528 : }
2529 410 : END_CASE(JSOP_DELNAME)
2530 :
2531 : BEGIN_CASE(JSOP_DELPROP)
2532 : {
2533 260 : uint32_t index = GET_UINT32_INDEX(PC);
2534 260 : PropertyName *name = script->getName(index);
2535 :
2536 260 : prepareStubCall(Uses(1));
2537 260 : masm.move(ImmPtr(name), Registers::ArgReg1);
2538 260 : INLINE_STUBCALL(STRICT_VARIANT(stubs::DelProp), REJOIN_FALLTHROUGH);
2539 260 : frame.pop();
2540 260 : pushSyncedEntry(0);
2541 : }
2542 260 : END_CASE(JSOP_DELPROP)
2543 :
2544 : BEGIN_CASE(JSOP_DELELEM)
2545 : {
2546 374 : prepareStubCall(Uses(2));
2547 374 : INLINE_STUBCALL(STRICT_VARIANT(stubs::DelElem), REJOIN_FALLTHROUGH);
2548 374 : frame.popn(2);
2549 374 : pushSyncedEntry(0);
2550 : }
2551 374 : END_CASE(JSOP_DELELEM)
2552 :
2553 : BEGIN_CASE(JSOP_TYPEOF)
2554 : BEGIN_CASE(JSOP_TYPEOFEXPR)
2555 1792 : jsop_typeof();
2556 1792 : END_CASE(JSOP_TYPEOF)
2557 :
2558 : BEGIN_CASE(JSOP_VOID)
2559 524 : frame.pop();
2560 524 : frame.push(UndefinedValue());
2561 524 : END_CASE(JSOP_VOID)
2562 :
2563 : BEGIN_CASE(JSOP_GETPROP)
2564 : BEGIN_CASE(JSOP_CALLPROP)
2565 : BEGIN_CASE(JSOP_LENGTH)
2566 139645 : if (!jsop_getprop(script->getName(GET_UINT32_INDEX(PC)), knownPushedType(0)))
2567 0 : return Compile_Error;
2568 139645 : END_CASE(JSOP_GETPROP)
2569 :
2570 : BEGIN_CASE(JSOP_GETELEM)
2571 : BEGIN_CASE(JSOP_CALLELEM)
2572 48555 : if (script->scriptCounts)
2573 0 : updateElemCounts(PC, frame.peek(-2), frame.peek(-1));
2574 48555 : if (!jsop_getelem())
2575 0 : return Compile_Error;
2576 48555 : END_CASE(JSOP_GETELEM)
2577 :
2578 : BEGIN_CASE(JSOP_TOID)
2579 626 : jsop_toid();
2580 626 : END_CASE(JSOP_TOID)
2581 :
2582 : BEGIN_CASE(JSOP_SETELEM)
2583 : {
2584 16178 : if (script->scriptCounts)
2585 0 : updateElemCounts(PC, frame.peek(-3), frame.peek(-2));
2586 16178 : jsbytecode *next = &PC[JSOP_SETELEM_LENGTH];
2587 16178 : bool pop = (JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next));
2588 16178 : if (!jsop_setelem(pop))
2589 0 : return Compile_Error;
2590 : }
2591 16178 : END_CASE(JSOP_SETELEM);
2592 :
2593 : BEGIN_CASE(JSOP_EVAL)
2594 : {
2595 1978 : JaegerSpew(JSpew_Insns, " --- EVAL --- \n");
2596 1978 : emitEval(GET_ARGC(PC));
2597 1978 : JaegerSpew(JSpew_Insns, " --- END EVAL --- \n");
2598 : }
2599 1978 : END_CASE(JSOP_EVAL)
2600 :
2601 : BEGIN_CASE(JSOP_CALL)
2602 : BEGIN_CASE(JSOP_NEW)
2603 : BEGIN_CASE(JSOP_FUNAPPLY)
2604 : BEGIN_CASE(JSOP_FUNCALL)
2605 : {
2606 180460 : bool callingNew = (op == JSOP_NEW);
2607 :
2608 180460 : bool done = false;
2609 180460 : if ((op == JSOP_CALL || op == JSOP_NEW) && !monitored(PC)) {
2610 172875 : CompileStatus status = inlineNativeFunction(GET_ARGC(PC), callingNew);
2611 172875 : if (status == Compile_Okay)
2612 3177 : done = true;
2613 169698 : else if (status != Compile_InlineAbort)
2614 0 : return status;
2615 : }
2616 180460 : if (!done && inlining()) {
2617 60261 : CompileStatus status = inlineScriptedFunction(GET_ARGC(PC), callingNew);
2618 60261 : if (status == Compile_Okay)
2619 2764 : done = true;
2620 57497 : else if (status != Compile_InlineAbort)
2621 0 : return status;
2622 60261 : if (script->scriptCounts) {
2623 : /* Code generated while inlining has been accounted for. */
2624 0 : updatePCCounts(PC, &codeStart, &countsUpdated);
2625 : }
2626 : }
2627 :
2628 : FrameSize frameSize;
2629 180460 : frameSize.initStatic(frame.totalDepth(), GET_ARGC(PC));
2630 :
2631 180460 : if (!done) {
2632 174519 : JaegerSpew(JSpew_Insns, " --- SCRIPTED CALL --- \n");
2633 174519 : if (!inlineCallHelper(GET_ARGC(PC), callingNew, frameSize))
2634 0 : return Compile_Error;
2635 174519 : JaegerSpew(JSpew_Insns, " --- END SCRIPTED CALL --- \n");
2636 : }
2637 : }
2638 180460 : END_CASE(JSOP_CALL)
2639 :
2640 : BEGIN_CASE(JSOP_NAME)
2641 : BEGIN_CASE(JSOP_CALLNAME)
2642 : {
2643 60978 : PropertyName *name = script->getName(GET_UINT32_INDEX(PC));
2644 60978 : jsop_name(name, knownPushedType(0));
2645 60978 : frame.extra(frame.peek(-1)).name = name;
2646 : }
2647 60978 : END_CASE(JSOP_NAME)
2648 :
2649 : BEGIN_CASE(JSOP_IMPLICITTHIS)
2650 : {
2651 1859 : prepareStubCall(Uses(0));
2652 1859 : masm.move(ImmPtr(script->getName(GET_UINT32_INDEX(PC))), Registers::ArgReg1);
2653 1859 : INLINE_STUBCALL(stubs::ImplicitThis, REJOIN_FALLTHROUGH);
2654 1859 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
2655 : }
2656 1859 : END_CASE(JSOP_IMPLICITTHIS)
2657 :
2658 : BEGIN_CASE(JSOP_DOUBLE)
2659 : {
2660 7435 : double d = script->getConst(GET_UINT32_INDEX(PC)).toDouble();
2661 7435 : frame.push(Value(DoubleValue(d)));
2662 : }
2663 7435 : END_CASE(JSOP_DOUBLE)
2664 :
2665 : BEGIN_CASE(JSOP_STRING)
2666 163986 : frame.push(StringValue(script->getAtom(GET_UINT32_INDEX(PC))));
2667 163986 : END_CASE(JSOP_STRING)
2668 :
2669 : BEGIN_CASE(JSOP_ZERO)
2670 83908 : frame.push(JSVAL_ZERO);
2671 83908 : END_CASE(JSOP_ZERO)
2672 :
2673 : BEGIN_CASE(JSOP_ONE)
2674 62396 : frame.push(JSVAL_ONE);
2675 62396 : END_CASE(JSOP_ONE)
2676 :
2677 : BEGIN_CASE(JSOP_NULL)
2678 8010 : frame.push(NullValue());
2679 8010 : END_CASE(JSOP_NULL)
2680 :
2681 : BEGIN_CASE(JSOP_THIS)
2682 51147 : jsop_this();
2683 51147 : END_CASE(JSOP_THIS)
2684 :
2685 : BEGIN_CASE(JSOP_FALSE)
2686 7616 : frame.push(Value(BooleanValue(false)));
2687 7616 : END_CASE(JSOP_FALSE)
2688 :
2689 : BEGIN_CASE(JSOP_TRUE)
2690 9490 : frame.push(Value(BooleanValue(true)));
2691 9490 : END_CASE(JSOP_TRUE)
2692 :
2693 : BEGIN_CASE(JSOP_OR)
2694 : BEGIN_CASE(JSOP_AND)
2695 : {
2696 4712 : jsbytecode *target = PC + GET_JUMP_OFFSET(PC);
2697 4712 : fixDoubleTypes(target);
2698 4712 : if (!jsop_andor(op, target))
2699 0 : return Compile_Error;
2700 : }
2701 4712 : END_CASE(JSOP_AND)
2702 :
2703 : BEGIN_CASE(JSOP_TABLESWITCH)
2704 : /*
2705 : * Note: there is no need to syncForBranch for the various targets of
2706 : * switch statement. The liveness analysis has already marked these as
2707 : * allocated with no registers in use. There is also no need to fix
2708 : * double types, as we don't track types of slots in scripts with
2709 : * switch statements (could be fixed).
2710 : */
2711 298 : if (script->scriptCounts)
2712 0 : updatePCCounts(PC, &codeStart, &countsUpdated);
2713 : #if defined JS_CPU_ARM /* Need to implement jump(BaseIndex) for ARM */
2714 : frame.syncAndKillEverything();
2715 : masm.move(ImmPtr(PC), Registers::ArgReg1);
2716 :
2717 : /* prepareStubCall() is not needed due to syncAndForgetEverything() */
2718 : INLINE_STUBCALL(stubs::TableSwitch, REJOIN_NONE);
2719 : frame.pop();
2720 :
2721 : masm.jump(Registers::ReturnReg);
2722 : #else
2723 298 : if (!jsop_tableswitch(PC))
2724 0 : return Compile_Error;
2725 : #endif
2726 298 : PC += js_GetVariableBytecodeLength(PC);
2727 298 : break;
2728 : END_CASE(JSOP_TABLESWITCH)
2729 :
2730 : BEGIN_CASE(JSOP_LOOKUPSWITCH)
2731 71 : if (script->scriptCounts)
2732 0 : updatePCCounts(PC, &codeStart, &countsUpdated);
2733 71 : frame.syncAndForgetEverything();
2734 71 : masm.move(ImmPtr(PC), Registers::ArgReg1);
2735 :
2736 : /* prepareStubCall() is not needed due to syncAndForgetEverything() */
2737 71 : INLINE_STUBCALL(stubs::LookupSwitch, REJOIN_NONE);
2738 71 : frame.pop();
2739 :
2740 71 : masm.jump(Registers::ReturnReg);
2741 71 : PC += js_GetVariableBytecodeLength(PC);
2742 71 : break;
2743 : END_CASE(JSOP_LOOKUPSWITCH)
2744 :
2745 : BEGIN_CASE(JSOP_CASE)
2746 : // X Y
2747 :
2748 317 : frame.dupAt(-2);
2749 : // X Y X
2750 :
2751 317 : jsop_stricteq(JSOP_STRICTEQ);
2752 : // X cond
2753 :
2754 317 : if (!jsop_ifneq(JSOP_IFNE, PC + GET_JUMP_OFFSET(PC)))
2755 0 : return Compile_Error;
2756 317 : END_CASE(JSOP_CASE)
2757 :
2758 : BEGIN_CASE(JSOP_STRICTEQ)
2759 : BEGIN_CASE(JSOP_STRICTNE)
2760 8757 : if (script->scriptCounts) {
2761 0 : updateArithCounts(PC, NULL, arithFirstUseType, arithSecondUseType);
2762 0 : arithUpdated = true;
2763 : }
2764 8757 : jsop_stricteq(op);
2765 8757 : END_CASE(JSOP_STRICTEQ)
2766 :
2767 : BEGIN_CASE(JSOP_ITER)
2768 4176 : if (!iter(GET_UINT8(PC)))
2769 0 : return Compile_Error;
2770 4176 : END_CASE(JSOP_ITER)
2771 :
2772 : BEGIN_CASE(JSOP_MOREITER)
2773 : {
2774 : /* At the byte level, this is always fused with IFNE or IFNEX. */
2775 4069 : if (script->scriptCounts)
2776 0 : updatePCCounts(PC, &codeStart, &countsUpdated);
2777 4069 : jsbytecode *target = &PC[JSOP_MOREITER_LENGTH];
2778 4069 : JSOp next = JSOp(*target);
2779 4069 : JS_ASSERT(next == JSOP_IFNE);
2780 :
2781 4069 : target += GET_JUMP_OFFSET(target);
2782 :
2783 4069 : fixDoubleTypes(target);
2784 4069 : if (!iterMore(target))
2785 0 : return Compile_Error;
2786 4069 : PC += JSOP_MOREITER_LENGTH;
2787 4069 : PC += js_CodeSpec[next].length;
2788 4069 : break;
2789 : }
2790 : END_CASE(JSOP_MOREITER)
2791 :
2792 : BEGIN_CASE(JSOP_ENDITER)
2793 4175 : iterEnd();
2794 4175 : END_CASE(JSOP_ENDITER)
2795 :
2796 : BEGIN_CASE(JSOP_POP)
2797 350383 : frame.pop();
2798 350383 : END_CASE(JSOP_POP)
2799 :
2800 : BEGIN_CASE(JSOP_GETARG)
2801 : BEGIN_CASE(JSOP_CALLARG)
2802 : {
2803 97352 : restoreVarType();
2804 97352 : uint32_t arg = GET_SLOTNO(PC);
2805 97352 : if (JSObject *singleton = pushedSingleton(0))
2806 861 : frame.push(ObjectValue(*singleton));
2807 : else
2808 96491 : frame.pushArg(arg);
2809 : }
2810 97352 : END_CASE(JSOP_GETARG)
2811 :
2812 : BEGIN_CASE(JSOP_BINDGNAME)
2813 90377 : jsop_bindgname();
2814 90377 : END_CASE(JSOP_BINDGNAME)
2815 :
2816 : BEGIN_CASE(JSOP_SETARG)
2817 : {
2818 1866 : jsbytecode *next = &PC[JSOP_SETARG_LENGTH];
2819 1866 : bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
2820 1866 : frame.storeArg(GET_SLOTNO(PC), pop);
2821 1866 : updateVarType();
2822 :
2823 1866 : if (pop) {
2824 1781 : frame.pop();
2825 1781 : PC += JSOP_SETARG_LENGTH + JSOP_POP_LENGTH;
2826 1781 : break;
2827 : }
2828 : }
2829 85 : END_CASE(JSOP_SETARG)
2830 :
2831 : BEGIN_CASE(JSOP_GETLOCAL)
2832 : BEGIN_CASE(JSOP_CALLLOCAL)
2833 : {
2834 : /*
2835 : * Update the var type unless we are about to pop the variable.
2836 : * Sync is not guaranteed for types of dead locals, and GETLOCAL
2837 : * followed by POP is not regarded as a use of the variable.
2838 : */
2839 244269 : jsbytecode *next = &PC[JSOP_GETLOCAL_LENGTH];
2840 244269 : if (JSOp(*next) != JSOP_POP || analysis->jumpTarget(next))
2841 196494 : restoreVarType();
2842 244269 : uint32_t slot = GET_SLOTNO(PC);
2843 244269 : if (JSObject *singleton = pushedSingleton(0))
2844 38 : frame.push(ObjectValue(*singleton));
2845 : else
2846 244231 : frame.pushLocal(slot);
2847 : }
2848 244269 : END_CASE(JSOP_GETLOCAL)
2849 :
2850 : BEGIN_CASE(JSOP_SETLOCAL)
2851 : {
2852 169210 : jsbytecode *next = &PC[JSOP_SETLOCAL_LENGTH];
2853 169210 : bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
2854 169210 : frame.storeLocal(GET_SLOTNO(PC), pop);
2855 169210 : updateVarType();
2856 :
2857 169210 : if (pop) {
2858 168784 : frame.pop();
2859 168784 : PC += JSOP_SETLOCAL_LENGTH + JSOP_POP_LENGTH;
2860 168784 : break;
2861 : }
2862 : }
2863 426 : END_CASE(JSOP_SETLOCAL)
2864 :
2865 : BEGIN_CASE(JSOP_UINT16)
2866 10206 : frame.push(Value(Int32Value((int32_t) GET_UINT16(PC))));
2867 10206 : END_CASE(JSOP_UINT16)
2868 :
2869 : BEGIN_CASE(JSOP_NEWINIT)
2870 459 : if (!jsop_newinit())
2871 0 : return Compile_Error;
2872 459 : END_CASE(JSOP_NEWINIT)
2873 :
2874 : BEGIN_CASE(JSOP_NEWARRAY)
2875 12098 : if (!jsop_newinit())
2876 0 : return Compile_Error;
2877 12098 : END_CASE(JSOP_NEWARRAY)
2878 :
2879 : BEGIN_CASE(JSOP_NEWOBJECT)
2880 7280 : if (!jsop_newinit())
2881 0 : return Compile_Error;
2882 7280 : END_CASE(JSOP_NEWOBJECT)
2883 :
2884 : BEGIN_CASE(JSOP_ENDINIT)
2885 19816 : END_CASE(JSOP_ENDINIT)
2886 :
2887 : BEGIN_CASE(JSOP_INITPROP)
2888 7381 : jsop_initprop();
2889 7381 : frame.pop();
2890 7381 : END_CASE(JSOP_INITPROP)
2891 :
2892 : BEGIN_CASE(JSOP_INITELEM)
2893 40156 : jsop_initelem();
2894 40156 : frame.popn(2);
2895 40156 : END_CASE(JSOP_INITELEM)
2896 :
2897 : BEGIN_CASE(JSOP_INCARG)
2898 : BEGIN_CASE(JSOP_DECARG)
2899 : BEGIN_CASE(JSOP_ARGINC)
2900 : BEGIN_CASE(JSOP_ARGDEC)
2901 433 : if (script->scriptCounts) {
2902 0 : restoreVarType();
2903 0 : FrameEntry *fe = frame.getArg(GET_SLOTNO(PC));
2904 0 : if (fe->isTypeKnown())
2905 0 : arithFirstUseType = fe->getKnownType();
2906 : }
2907 :
2908 433 : if (!jsop_arginc(op, GET_SLOTNO(PC)))
2909 0 : return Compile_Retry;
2910 :
2911 433 : if (script->scriptCounts) {
2912 0 : FrameEntry *fe = frame.getArg(GET_SLOTNO(PC));
2913 0 : updateArithCounts(PC, fe, arithFirstUseType, JSVAL_TYPE_INT32);
2914 0 : arithUpdated = true;
2915 : }
2916 433 : END_CASE(JSOP_ARGDEC)
2917 :
2918 : BEGIN_CASE(JSOP_INCLOCAL)
2919 : BEGIN_CASE(JSOP_DECLOCAL)
2920 : BEGIN_CASE(JSOP_LOCALINC)
2921 : BEGIN_CASE(JSOP_LOCALDEC)
2922 29396 : if (script->scriptCounts) {
2923 0 : restoreVarType();
2924 0 : FrameEntry *fe = frame.getLocal(GET_SLOTNO(PC));
2925 0 : if (fe->isTypeKnown())
2926 0 : arithFirstUseType = fe->getKnownType();
2927 : }
2928 :
2929 29396 : if (!jsop_localinc(op, GET_SLOTNO(PC)))
2930 1 : return Compile_Retry;
2931 :
2932 29395 : if (script->scriptCounts) {
2933 0 : FrameEntry *fe = frame.getLocal(GET_SLOTNO(PC));
2934 0 : updateArithCounts(PC, fe, arithFirstUseType, JSVAL_TYPE_INT32);
2935 0 : arithUpdated = true;
2936 : }
2937 29395 : END_CASE(JSOP_LOCALDEC)
2938 :
2939 : BEGIN_CASE(JSOP_BINDNAME)
2940 3587 : jsop_bindname(script->getName(GET_UINT32_INDEX(PC)));
2941 3587 : END_CASE(JSOP_BINDNAME)
2942 :
2943 : BEGIN_CASE(JSOP_SETPROP)
2944 : {
2945 14798 : jsbytecode *next = &PC[JSOP_SETPROP_LENGTH];
2946 14798 : bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
2947 14798 : if (!jsop_setprop(script->getName(GET_UINT32_INDEX(PC)), pop))
2948 0 : return Compile_Error;
2949 : }
2950 14798 : END_CASE(JSOP_SETPROP)
2951 :
2952 : BEGIN_CASE(JSOP_SETNAME)
2953 : {
2954 3587 : jsbytecode *next = &PC[JSOP_SETNAME_LENGTH];
2955 3587 : bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
2956 3587 : if (!jsop_setprop(script->getName(GET_UINT32_INDEX(PC)), pop))
2957 0 : return Compile_Error;
2958 : }
2959 3587 : END_CASE(JSOP_SETNAME)
2960 :
2961 : BEGIN_CASE(JSOP_THROW)
2962 2018 : prepareStubCall(Uses(1));
2963 2018 : INLINE_STUBCALL(stubs::Throw, REJOIN_NONE);
2964 2018 : frame.pop();
2965 2018 : fallthrough = false;
2966 2018 : END_CASE(JSOP_THROW)
2967 :
2968 : BEGIN_CASE(JSOP_IN)
2969 : {
2970 33970 : jsop_in();
2971 : }
2972 33970 : END_CASE(JSOP_IN)
2973 :
2974 : BEGIN_CASE(JSOP_INSTANCEOF)
2975 1615 : if (!jsop_instanceof())
2976 0 : return Compile_Error;
2977 1615 : END_CASE(JSOP_INSTANCEOF)
2978 :
2979 : BEGIN_CASE(JSOP_EXCEPTION)
2980 : {
2981 17395 : prepareStubCall(Uses(0));
2982 17395 : INLINE_STUBCALL(stubs::Exception, REJOIN_FALLTHROUGH);
2983 17395 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
2984 : }
2985 17395 : END_CASE(JSOP_EXCEPTION)
2986 :
2987 : BEGIN_CASE(JSOP_LINENO)
2988 1978 : END_CASE(JSOP_LINENO)
2989 :
2990 : BEGIN_CASE(JSOP_ENUMELEM)
2991 : // Normally, SETELEM transforms the stack
2992 : // from: OBJ ID VALUE
2993 : // to: VALUE
2994 : //
2995 : // Here, the stack transition is
2996 : // from: VALUE OBJ ID
2997 : // to:
2998 : // So we make the stack look like a SETELEM, and re-use it.
2999 :
3000 : // Before: VALUE OBJ ID
3001 : // After: VALUE OBJ ID VALUE
3002 0 : frame.dupAt(-3);
3003 :
3004 : // Before: VALUE OBJ ID VALUE
3005 : // After: VALUE VALUE
3006 0 : if (!jsop_setelem(true))
3007 0 : return Compile_Error;
3008 :
3009 : // Before: VALUE VALUE
3010 : // After:
3011 0 : frame.popn(2);
3012 0 : END_CASE(JSOP_ENUMELEM)
3013 :
3014 : BEGIN_CASE(JSOP_CONDSWITCH)
3015 : /* No-op for the decompiler. */
3016 91 : END_CASE(JSOP_CONDSWITCH)
3017 :
3018 : BEGIN_CASE(JSOP_LABEL)
3019 108 : END_CASE(JSOP_LABEL)
3020 :
3021 : BEGIN_CASE(JSOP_DEFFUN)
3022 : {
3023 1513 : JSFunction *innerFun = script->getFunction(GET_UINT32_INDEX(PC));
3024 :
3025 1513 : prepareStubCall(Uses(0));
3026 1513 : masm.move(ImmPtr(innerFun), Registers::ArgReg1);
3027 1513 : INLINE_STUBCALL(STRICT_VARIANT(stubs::DefFun), REJOIN_FALLTHROUGH);
3028 : }
3029 1513 : END_CASE(JSOP_DEFFUN)
3030 :
3031 : BEGIN_CASE(JSOP_DEFVAR)
3032 : BEGIN_CASE(JSOP_DEFCONST)
3033 : {
3034 27779 : PropertyName *name = script->getName(GET_UINT32_INDEX(PC));
3035 :
3036 27779 : prepareStubCall(Uses(0));
3037 27779 : masm.move(ImmPtr(name), Registers::ArgReg1);
3038 27779 : INLINE_STUBCALL(stubs::DefVarOrConst, REJOIN_FALLTHROUGH);
3039 : }
3040 27779 : END_CASE(JSOP_DEFVAR)
3041 :
3042 : BEGIN_CASE(JSOP_SETCONST)
3043 : {
3044 25163 : PropertyName *name = script->getName(GET_UINT32_INDEX(PC));
3045 :
3046 25163 : prepareStubCall(Uses(1));
3047 25163 : masm.move(ImmPtr(name), Registers::ArgReg1);
3048 25163 : INLINE_STUBCALL(stubs::SetConst, REJOIN_FALLTHROUGH);
3049 : }
3050 25163 : END_CASE(JSOP_SETCONST)
3051 :
3052 : BEGIN_CASE(JSOP_LAMBDA)
3053 : {
3054 61625 : JSFunction *fun = script->getFunction(GET_UINT32_INDEX(PC));
3055 :
3056 61625 : JSObjStubFun stub = stubs::Lambda;
3057 61625 : uint32_t uses = 0;
3058 :
3059 61625 : prepareStubCall(Uses(uses));
3060 61625 : masm.move(ImmPtr(fun), Registers::ArgReg1);
3061 :
3062 61625 : INLINE_STUBCALL(stub, REJOIN_PUSH_OBJECT);
3063 :
3064 61625 : frame.takeReg(Registers::ReturnReg);
3065 61625 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
3066 : }
3067 61625 : END_CASE(JSOP_LAMBDA)
3068 :
3069 : BEGIN_CASE(JSOP_TRY)
3070 17401 : frame.syncAndForgetEverything();
3071 17401 : END_CASE(JSOP_TRY)
3072 :
3073 : BEGIN_CASE(JSOP_RETRVAL)
3074 1016 : emitReturn(NULL);
3075 1016 : fallthrough = false;
3076 1016 : END_CASE(JSOP_RETRVAL)
3077 :
3078 : BEGIN_CASE(JSOP_GETGNAME)
3079 : BEGIN_CASE(JSOP_CALLGNAME)
3080 : {
3081 495599 : uint32_t index = GET_UINT32_INDEX(PC);
3082 495599 : jsop_getgname(index);
3083 495599 : frame.extra(frame.peek(-1)).name = script->getName(index);
3084 : }
3085 495599 : END_CASE(JSOP_GETGNAME)
3086 :
3087 : BEGIN_CASE(JSOP_SETGNAME)
3088 : {
3089 90363 : jsbytecode *next = &PC[JSOP_SETGNAME_LENGTH];
3090 90363 : bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
3091 90363 : jsop_setgname(script->getName(GET_UINT32_INDEX(PC)), pop);
3092 : }
3093 90363 : END_CASE(JSOP_SETGNAME)
3094 :
3095 : BEGIN_CASE(JSOP_REGEXP)
3096 6317 : if (!jsop_regexp())
3097 0 : return Compile_Error;
3098 6317 : END_CASE(JSOP_REGEXP)
3099 :
3100 : BEGIN_CASE(JSOP_OBJECT)
3101 : {
3102 2780 : JSObject *object = script->getObject(GET_UINT32_INDEX(PC));
3103 2780 : RegisterID reg = frame.allocReg();
3104 2780 : masm.move(ImmPtr(object), reg);
3105 2780 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
3106 : }
3107 2780 : END_CASE(JSOP_OBJECT)
3108 :
3109 : BEGIN_CASE(JSOP_UINT24)
3110 479 : frame.push(Value(Int32Value((int32_t) GET_UINT24(PC))));
3111 479 : END_CASE(JSOP_UINT24)
3112 :
3113 : BEGIN_CASE(JSOP_STOP)
3114 76074 : if (script->scriptCounts)
3115 0 : updatePCCounts(PC, &codeStart, &countsUpdated);
3116 76074 : emitReturn(NULL);
3117 76074 : goto done;
3118 : END_CASE(JSOP_STOP)
3119 :
3120 : BEGIN_CASE(JSOP_GETXPROP)
3121 373 : if (!jsop_xname(script->getName(GET_UINT32_INDEX(PC))))
3122 0 : return Compile_Error;
3123 373 : END_CASE(JSOP_GETXPROP)
3124 :
3125 : BEGIN_CASE(JSOP_ENTERBLOCK)
3126 : BEGIN_CASE(JSOP_ENTERLET0)
3127 : BEGIN_CASE(JSOP_ENTERLET1)
3128 23105 : enterBlock(&script->getObject(GET_UINT32_INDEX(PC))->asStaticBlock());
3129 23105 : END_CASE(JSOP_ENTERBLOCK);
3130 :
3131 : BEGIN_CASE(JSOP_LEAVEBLOCK)
3132 23335 : leaveBlock();
3133 23335 : END_CASE(JSOP_LEAVEBLOCK)
3134 :
3135 : BEGIN_CASE(JSOP_INT8)
3136 120572 : frame.push(Value(Int32Value(GET_INT8(PC))));
3137 120572 : END_CASE(JSOP_INT8)
3138 :
3139 : BEGIN_CASE(JSOP_INT32)
3140 2482 : frame.push(Value(Int32Value(GET_INT32(PC))));
3141 2482 : END_CASE(JSOP_INT32)
3142 :
3143 : BEGIN_CASE(JSOP_HOLE)
3144 2188 : frame.push(MagicValue(JS_ARRAY_HOLE));
3145 2188 : END_CASE(JSOP_HOLE)
3146 :
3147 : BEGIN_CASE(JSOP_LOOPHEAD)
3148 : {
3149 43555 : if (analysis->jumpTarget(PC)) {
3150 43487 : interruptCheckHelper();
3151 43487 : recompileCheckHelper();
3152 : }
3153 : }
3154 43555 : END_CASE(JSOP_LOOPHEAD)
3155 :
3156 : BEGIN_CASE(JSOP_LOOPENTRY)
3157 43179 : END_CASE(JSOP_LOOPENTRY)
3158 :
3159 : BEGIN_CASE(JSOP_DEBUGGER)
3160 : {
3161 2089 : prepareStubCall(Uses(0));
3162 2089 : masm.move(ImmPtr(PC), Registers::ArgReg1);
3163 2089 : INLINE_STUBCALL(stubs::DebuggerStatement, REJOIN_FALLTHROUGH);
3164 : }
3165 2089 : END_CASE(JSOP_DEBUGGER)
3166 :
3167 : default:
3168 0 : JS_NOT_REACHED("Opcode not implemented");
3169 : }
3170 :
3171 : /**********************
3172 : * END COMPILER OPS *
3173 : **********************/
3174 :
3175 4028990 : if (cx->typeInferenceEnabled() && PC == lastPC + GetBytecodeLength(lastPC)) {
3176 : /*
3177 : * Inform the frame of the type sets for values just pushed. Skip
3178 : * this if we did any opcode fusions, we don't keep track of the
3179 : * associated type sets in such cases.
3180 : */
3181 2100406 : unsigned nuses = GetUseCount(script, lastPC - script->code);
3182 2100406 : unsigned ndefs = GetDefCount(script, lastPC - script->code);
3183 3923428 : for (unsigned i = 0; i < ndefs; i++) {
3184 1823022 : FrameEntry *fe = frame.getStack(opinfo->stackDepth - nuses + i);
3185 1823022 : if (fe) {
3186 : /* fe may be NULL for conditionally pushed entries, e.g. JSOP_AND */
3187 1819567 : frame.extra(fe).types = analysis->pushedTypes(lastPC - script->code, i);
3188 : }
3189 : }
3190 : }
3191 :
3192 4028990 : if (script->scriptCounts) {
3193 0 : size_t length = masm.size() - masm.distanceOf(codeStart);
3194 0 : bool typesUpdated = false;
3195 :
3196 : /* Update information about the type of value pushed by arithmetic ops. */
3197 0 : if ((js_CodeSpec[op].format & JOF_ARITH) && !arithUpdated) {
3198 0 : FrameEntry *pushed = NULL;
3199 0 : if (PC == lastPC + GetBytecodeLength(lastPC))
3200 0 : pushed = frame.peek(-1);
3201 0 : updateArithCounts(lastPC, pushed, arithFirstUseType, arithSecondUseType);
3202 0 : typesUpdated = true;
3203 : }
3204 :
3205 : /* Update information about the result type of access operations. */
3206 0 : if (PCCounts::accessOp(op) &&
3207 : op != JSOP_SETPROP && op != JSOP_SETELEM) {
3208 0 : FrameEntry *fe = (GetDefCount(script, lastPC - script->code) == 1)
3209 0 : ? frame.peek(-1)
3210 0 : : frame.peek(-2);
3211 0 : updatePCTypes(lastPC, fe);
3212 0 : typesUpdated = true;
3213 : }
3214 :
3215 0 : if (countsUpdated || typesUpdated || length != 0) {
3216 0 : if (!countsUpdated)
3217 0 : updatePCCounts(lastPC, &codeStart, &countsUpdated);
3218 :
3219 0 : if (pcLengths) {
3220 : /* Fill in the amount of inline code generated for the op. */
3221 0 : uint32_t offset = ssa.frameLength(a->inlineIndex) + lastPC - script->code;
3222 0 : pcLengths[offset].codeLength += length;
3223 : }
3224 : }
3225 4028990 : } else if (pcLengths) {
3226 : /* Fill in the amount of inline code generated for the op. */
3227 0 : size_t length = masm.size() - masm.distanceOf(codeStart);
3228 0 : uint32_t offset = ssa.frameLength(a->inlineIndex) + lastPC - script->code;
3229 0 : pcLengths[offset].codeLength += length;
3230 : }
3231 :
3232 4028990 : frame.assertValidRegisterState();
3233 : }
3234 :
3235 : done:
3236 96584 : return Compile_Okay;
3237 : }
3238 :
3239 : #undef END_CASE
3240 : #undef BEGIN_CASE
3241 :
3242 : void
3243 0 : mjit::Compiler::updatePCCounts(jsbytecode *pc, Label *start, bool *updated)
3244 : {
3245 0 : JS_ASSERT(script->scriptCounts);
3246 :
3247 : /*
3248 : * Bump the METHODJIT count for the opcode, read the METHODJIT_CODE_LENGTH
3249 : * and METHODJIT_PICS_LENGTH counts, indicating the amounts of inline path
3250 : * code and generated code, respectively, and add them to the accumulated
3251 : * total for the op.
3252 : */
3253 0 : uint32_t offset = ssa.frameLength(a->inlineIndex) + pc - script->code;
3254 :
3255 : /*
3256 : * Base register for addresses, we can't use AbsoluteAddress in all places.
3257 : * This may hold a live value, so write it out to the top of the stack
3258 : * first. This cannot overflow the stack, as space is always reserved for
3259 : * an extra callee frame.
3260 : */
3261 0 : RegisterID reg = Registers::ReturnReg;
3262 0 : masm.storePtr(reg, frame.addressOfTop());
3263 :
3264 0 : PCCounts counts = script->getPCCounts(pc);
3265 :
3266 0 : double *code = &counts.get(PCCounts::BASE_METHODJIT_CODE);
3267 0 : double *codeLength = &pcLengths[offset].codeLength;
3268 0 : masm.addCount(codeLength, code, reg);
3269 :
3270 0 : double *pics = &counts.get(PCCounts::BASE_METHODJIT_PICS);
3271 0 : double *picsLength = &pcLengths[offset].picsLength;
3272 0 : masm.addCount(picsLength, pics, reg);
3273 :
3274 0 : double *count = &counts.get(PCCounts::BASE_METHODJIT);
3275 0 : masm.bumpCount(count, reg);
3276 :
3277 : /* Reload the base register's original value. */
3278 0 : masm.loadPtr(frame.addressOfTop(), reg);
3279 :
3280 : /* The start label should reflect the code for the op, not instrumentation. */
3281 0 : *start = masm.label();
3282 0 : *updated = true;
3283 0 : }
3284 :
3285 : static inline bool
3286 0 : HasPayloadType(types::TypeSet *types)
3287 : {
3288 0 : if (types->unknown())
3289 0 : return false;
3290 :
3291 0 : types::TypeFlags flags = types->baseFlags();
3292 0 : bool objects = !!(flags & types::TYPE_FLAG_ANYOBJECT) || !!types->getObjectCount();
3293 :
3294 0 : if (objects && !!(flags & types::TYPE_FLAG_STRING))
3295 0 : return false;
3296 :
3297 0 : flags = flags & ~(types::TYPE_FLAG_ANYOBJECT | types::TYPE_FLAG_STRING);
3298 :
3299 : return (flags == types::TYPE_FLAG_UNDEFINED)
3300 : || (flags == types::TYPE_FLAG_NULL)
3301 0 : || (flags == types::TYPE_FLAG_BOOLEAN);
3302 : }
3303 :
3304 : void
3305 0 : mjit::Compiler::updatePCTypes(jsbytecode *pc, FrameEntry *fe)
3306 : {
3307 0 : JS_ASSERT(script->scriptCounts);
3308 :
3309 : /*
3310 : * Get a temporary register, as for updatePCCounts. Don't overlap with
3311 : * the backing store for the entry's type tag, if there is one.
3312 : */
3313 0 : RegisterID reg = Registers::ReturnReg;
3314 0 : if (frame.peekTypeInRegister(fe) && reg == frame.tempRegForType(fe)) {
3315 : JS_STATIC_ASSERT(Registers::ReturnReg != Registers::ArgReg1);
3316 0 : reg = Registers::ArgReg1;
3317 : }
3318 0 : masm.push(reg);
3319 :
3320 0 : PCCounts counts = script->getPCCounts(pc);
3321 :
3322 : /* Update the counts for pushed type tags and possible access types. */
3323 0 : if (fe->isTypeKnown()) {
3324 0 : masm.bumpCount(&counts.get(PCCounts::ACCESS_MONOMORPHIC), reg);
3325 0 : PCCounts::AccessCounts count = PCCounts::ACCESS_OBJECT;
3326 0 : switch (fe->getKnownType()) {
3327 0 : case JSVAL_TYPE_UNDEFINED: count = PCCounts::ACCESS_UNDEFINED; break;
3328 0 : case JSVAL_TYPE_NULL: count = PCCounts::ACCESS_NULL; break;
3329 0 : case JSVAL_TYPE_BOOLEAN: count = PCCounts::ACCESS_BOOLEAN; break;
3330 0 : case JSVAL_TYPE_INT32: count = PCCounts::ACCESS_INT32; break;
3331 0 : case JSVAL_TYPE_DOUBLE: count = PCCounts::ACCESS_DOUBLE; break;
3332 0 : case JSVAL_TYPE_STRING: count = PCCounts::ACCESS_STRING; break;
3333 0 : case JSVAL_TYPE_OBJECT: count = PCCounts::ACCESS_OBJECT; break;
3334 : default:;
3335 : }
3336 0 : if (count)
3337 0 : masm.bumpCount(&counts.get(count), reg);
3338 : } else {
3339 0 : types::TypeSet *types = frame.extra(fe).types;
3340 0 : if (types && HasPayloadType(types))
3341 0 : masm.bumpCount(&counts.get(PCCounts::ACCESS_DIMORPHIC), reg);
3342 : else
3343 0 : masm.bumpCount(&counts.get(PCCounts::ACCESS_POLYMORPHIC), reg);
3344 :
3345 0 : frame.loadTypeIntoReg(fe, reg);
3346 :
3347 0 : Jump j = masm.testUndefined(Assembler::NotEqual, reg);
3348 0 : masm.bumpCount(&counts.get(PCCounts::ACCESS_UNDEFINED), reg);
3349 0 : frame.loadTypeIntoReg(fe, reg);
3350 0 : j.linkTo(masm.label(), &masm);
3351 :
3352 0 : j = masm.testNull(Assembler::NotEqual, reg);
3353 0 : masm.bumpCount(&counts.get(PCCounts::ACCESS_NULL), reg);
3354 0 : frame.loadTypeIntoReg(fe, reg);
3355 0 : j.linkTo(masm.label(), &masm);
3356 :
3357 0 : j = masm.testBoolean(Assembler::NotEqual, reg);
3358 0 : masm.bumpCount(&counts.get(PCCounts::ACCESS_BOOLEAN), reg);
3359 0 : frame.loadTypeIntoReg(fe, reg);
3360 0 : j.linkTo(masm.label(), &masm);
3361 :
3362 0 : j = masm.testInt32(Assembler::NotEqual, reg);
3363 0 : masm.bumpCount(&counts.get(PCCounts::ACCESS_INT32), reg);
3364 0 : frame.loadTypeIntoReg(fe, reg);
3365 0 : j.linkTo(masm.label(), &masm);
3366 :
3367 0 : j = masm.testDouble(Assembler::NotEqual, reg);
3368 0 : masm.bumpCount(&counts.get(PCCounts::ACCESS_DOUBLE), reg);
3369 0 : frame.loadTypeIntoReg(fe, reg);
3370 0 : j.linkTo(masm.label(), &masm);
3371 :
3372 0 : j = masm.testString(Assembler::NotEqual, reg);
3373 0 : masm.bumpCount(&counts.get(PCCounts::ACCESS_STRING), reg);
3374 0 : frame.loadTypeIntoReg(fe, reg);
3375 0 : j.linkTo(masm.label(), &masm);
3376 :
3377 0 : j = masm.testObject(Assembler::NotEqual, reg);
3378 0 : masm.bumpCount(&counts.get(PCCounts::ACCESS_OBJECT), reg);
3379 0 : frame.loadTypeIntoReg(fe, reg);
3380 0 : j.linkTo(masm.label(), &masm);
3381 : }
3382 :
3383 : /* Update the count for accesses with type barriers. */
3384 0 : if (js_CodeSpec[*pc].format & JOF_TYPESET) {
3385 0 : double *count = &counts.get(hasTypeBarriers(pc)
3386 : ? PCCounts::ACCESS_BARRIER
3387 0 : : PCCounts::ACCESS_NOBARRIER);
3388 0 : masm.bumpCount(count, reg);
3389 : }
3390 :
3391 : /* Reload the base register's original value. */
3392 0 : masm.pop(reg);
3393 0 : }
3394 :
3395 : void
3396 0 : mjit::Compiler::updateArithCounts(jsbytecode *pc, FrameEntry *fe,
3397 : JSValueType firstUseType, JSValueType secondUseType)
3398 : {
3399 0 : JS_ASSERT(script->scriptCounts);
3400 :
3401 0 : RegisterID reg = Registers::ReturnReg;
3402 0 : masm.push(reg);
3403 :
3404 : /*
3405 : * What count we bump for arithmetic expressions depend on the
3406 : * known types of its operands.
3407 : *
3408 : * ARITH_INT: operands are known ints, result is int
3409 : * ARITH_OVERFLOW: operands are known ints, result is double
3410 : * ARITH_DOUBLE: either operand is a known double, result is double
3411 : * ARITH_OTHER: operands are monomorphic but not int or double
3412 : * ARITH_UNKNOWN: operands are polymorphic
3413 : */
3414 :
3415 : PCCounts::ArithCounts count;
3416 0 : if (firstUseType == JSVAL_TYPE_INT32 && secondUseType == JSVAL_TYPE_INT32 &&
3417 0 : (!fe || fe->isNotType(JSVAL_TYPE_DOUBLE))) {
3418 0 : count = PCCounts::ARITH_INT;
3419 0 : } else if (firstUseType == JSVAL_TYPE_INT32 || firstUseType == JSVAL_TYPE_DOUBLE ||
3420 : secondUseType == JSVAL_TYPE_INT32 || secondUseType == JSVAL_TYPE_DOUBLE) {
3421 0 : count = PCCounts::ARITH_DOUBLE;
3422 0 : } else if (firstUseType != JSVAL_TYPE_UNKNOWN && secondUseType != JSVAL_TYPE_UNKNOWN &&
3423 0 : (!fe || fe->isTypeKnown())) {
3424 0 : count = PCCounts::ARITH_OTHER;
3425 : } else {
3426 0 : count = PCCounts::ARITH_UNKNOWN;
3427 : }
3428 :
3429 0 : masm.bumpCount(&script->getPCCounts(pc).get(count), reg);
3430 0 : masm.pop(reg);
3431 0 : }
3432 :
3433 : void
3434 0 : mjit::Compiler::updateElemCounts(jsbytecode *pc, FrameEntry *obj, FrameEntry *id)
3435 : {
3436 0 : JS_ASSERT(script->scriptCounts);
3437 :
3438 0 : RegisterID reg = Registers::ReturnReg;
3439 0 : masm.push(reg);
3440 :
3441 0 : PCCounts counts = script->getPCCounts(pc);
3442 :
3443 : PCCounts::ElementCounts count;
3444 0 : if (id->isTypeKnown()) {
3445 0 : switch (id->getKnownType()) {
3446 0 : case JSVAL_TYPE_INT32: count = PCCounts::ELEM_ID_INT; break;
3447 0 : case JSVAL_TYPE_DOUBLE: count = PCCounts::ELEM_ID_DOUBLE; break;
3448 0 : default: count = PCCounts::ELEM_ID_OTHER; break;
3449 : }
3450 : } else {
3451 0 : count = PCCounts::ELEM_ID_UNKNOWN;
3452 : }
3453 0 : masm.bumpCount(&counts.get(count), reg);
3454 :
3455 0 : if (obj->mightBeType(JSVAL_TYPE_OBJECT)) {
3456 0 : types::TypeSet *types = frame.extra(obj).types;
3457 0 : if (types && !types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_TYPED_ARRAY) &&
3458 0 : types->getTypedArrayType(cx) != TypedArray::TYPE_MAX) {
3459 0 : count = PCCounts::ELEM_OBJECT_TYPED;
3460 0 : } else if (types && !types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_DENSE_ARRAY)) {
3461 0 : if (!types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_PACKED_ARRAY))
3462 0 : count = PCCounts::ELEM_OBJECT_PACKED;
3463 : else
3464 0 : count = PCCounts::ELEM_OBJECT_DENSE;
3465 : } else {
3466 0 : count = PCCounts::ELEM_OBJECT_OTHER;
3467 : }
3468 0 : masm.bumpCount(&counts.get(count), reg);
3469 : } else {
3470 0 : masm.bumpCount(&counts.get(PCCounts::ELEM_OBJECT_OTHER), reg);
3471 : }
3472 :
3473 0 : masm.pop(reg);
3474 0 : }
3475 :
3476 : void
3477 0 : mjit::Compiler::bumpPropCount(jsbytecode *pc, int count)
3478 : {
3479 : /* Don't accumulate counts for property ops fused with other ops. */
3480 0 : if (!(js_CodeSpec[*pc].format & JOF_PROP))
3481 0 : return;
3482 0 : RegisterID reg = Registers::ReturnReg;
3483 0 : masm.push(reg);
3484 0 : masm.bumpCount(&script->getPCCounts(pc).get(count), reg);
3485 0 : masm.pop(reg);
3486 : }
3487 :
3488 : JSC::MacroAssembler::Label
3489 256094 : mjit::Compiler::labelOf(jsbytecode *pc, uint32_t inlineIndex)
3490 : {
3491 256094 : ActiveFrame *a = (inlineIndex == UINT32_MAX) ? outer : inlineFrames[inlineIndex];
3492 256094 : JS_ASSERT(uint32_t(pc - a->script->code) < a->script->length);
3493 :
3494 256094 : uint32_t offs = uint32_t(pc - a->script->code);
3495 256094 : JS_ASSERT(a->jumpMap[offs].isSet());
3496 256094 : return a->jumpMap[offs];
3497 : }
3498 :
3499 : bool
3500 138626 : mjit::Compiler::knownJump(jsbytecode *pc)
3501 : {
3502 138626 : return pc < PC;
3503 : }
3504 :
3505 : bool
3506 190715 : mjit::Compiler::jumpInScript(Jump j, jsbytecode *pc)
3507 : {
3508 190715 : JS_ASSERT(pc >= script->code && uint32_t(pc - script->code) < script->length);
3509 :
3510 190715 : if (pc < PC) {
3511 73208 : j.linkTo(a->jumpMap[uint32_t(pc - script->code)], &masm);
3512 73208 : return true;
3513 : }
3514 117507 : return branchPatches.append(BranchPatch(j, pc, a->inlineIndex));
3515 : }
3516 :
3517 : void
3518 140204 : mjit::Compiler::emitFinalReturn(Assembler &masm)
3519 : {
3520 140204 : masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfNcode()), Registers::ReturnReg);
3521 140204 : masm.jump(Registers::ReturnReg);
3522 140204 : }
3523 :
3524 : // Emits code to load a return value of the frame into the scripted-ABI
3525 : // type & data register pair. If the return value is in fp->rval, then |fe|
3526 : // is NULL. Otherwise, |fe| contains the return value.
3527 : //
3528 : // If reading from fp->rval, |undefined| is loaded optimistically, before
3529 : // checking if fp->rval is set in the frame flags and loading that instead.
3530 : //
3531 : // Otherwise, if |masm| is the inline path, it is loaded as efficiently as
3532 : // the FrameState can manage. If |masm| is the OOL path, the value is simply
3533 : // loaded from its slot in the frame, since the caller has guaranteed it's
3534 : // been synced.
3535 : //
3536 : void
3537 139274 : mjit::Compiler::loadReturnValue(Assembler *masm, FrameEntry *fe)
3538 : {
3539 139274 : RegisterID typeReg = JSReturnReg_Type;
3540 139274 : RegisterID dataReg = JSReturnReg_Data;
3541 :
3542 139274 : if (fe) {
3543 : // If using the OOL assembler, the caller signifies that the |fe| is
3544 : // synced, but not to rely on its register state.
3545 19306 : if (masm != &this->masm) {
3546 9225 : if (fe->isConstant()) {
3547 2151 : stubcc.masm.loadValueAsComponents(fe->getValue(), typeReg, dataReg);
3548 : } else {
3549 7074 : Address rval(frame.addressOf(fe));
3550 7074 : if (fe->isTypeKnown() && !fe->isType(JSVAL_TYPE_DOUBLE)) {
3551 2752 : stubcc.masm.loadPayload(rval, dataReg);
3552 2752 : stubcc.masm.move(ImmType(fe->getKnownType()), typeReg);
3553 : } else {
3554 4322 : stubcc.masm.loadValueAsComponents(rval, typeReg, dataReg);
3555 : }
3556 : }
3557 : } else {
3558 10081 : frame.loadForReturn(fe, typeReg, dataReg, Registers::ReturnReg);
3559 : }
3560 : } else {
3561 : // Load a return value from POPV or SETRVAL into the return registers,
3562 : // otherwise return undefined.
3563 119968 : masm->loadValueAsComponents(UndefinedValue(), typeReg, dataReg);
3564 119968 : if (analysis->usesReturnValue()) {
3565 : Jump rvalClear = masm->branchTest32(Assembler::Zero,
3566 : FrameFlagsAddress(),
3567 80111 : Imm32(StackFrame::HAS_RVAL));
3568 80111 : Address rvalAddress(JSFrameReg, StackFrame::offsetOfReturnValue());
3569 80111 : masm->loadValueAsComponents(rvalAddress, typeReg, dataReg);
3570 80111 : rvalClear.linkTo(masm->label(), masm);
3571 : }
3572 : }
3573 139274 : }
3574 :
3575 : // This ensures that constructor return values are an object. If a non-object
3576 : // is returned, either explicitly or implicitly, the newly created object is
3577 : // loaded out of the frame. Otherwise, the explicitly returned object is kept.
3578 : //
3579 : void
3580 2013 : mjit::Compiler::fixPrimitiveReturn(Assembler *masm, FrameEntry *fe)
3581 : {
3582 2013 : JS_ASSERT(isConstructing);
3583 :
3584 2013 : bool ool = (masm != &this->masm);
3585 2013 : Address thisv(JSFrameReg, StackFrame::offsetOfThis(script->function()));
3586 :
3587 : // We can just load |thisv| if either of the following is true:
3588 : // (1) There is no explicit return value, AND fp->rval is not used.
3589 : // (2) There is an explicit return value, and it's known to be primitive.
3590 2119 : if ((!fe && !analysis->usesReturnValue()) ||
3591 106 : (fe && fe->isTypeKnown() && fe->getKnownType() != JSVAL_TYPE_OBJECT))
3592 : {
3593 930 : if (ool)
3594 438 : masm->loadValueAsComponents(thisv, JSReturnReg_Type, JSReturnReg_Data);
3595 : else
3596 492 : frame.loadThisForReturn(JSReturnReg_Type, JSReturnReg_Data, Registers::ReturnReg);
3597 930 : return;
3598 : }
3599 :
3600 : // If the type is known to be an object, just load the return value as normal.
3601 1083 : if (fe && fe->isTypeKnown() && fe->getKnownType() == JSVAL_TYPE_OBJECT) {
3602 6 : loadReturnValue(masm, fe);
3603 6 : return;
3604 : }
3605 :
3606 : // There's a return value, and its type is unknown. Test the type and load
3607 : // |thisv| if necessary. Sync the 'this' entry before doing so, as it may
3608 : // be stored in registers if we constructed it inline.
3609 1077 : frame.syncThis();
3610 1077 : loadReturnValue(masm, fe);
3611 1077 : Jump j = masm->testObject(Assembler::Equal, JSReturnReg_Type);
3612 1077 : masm->loadValueAsComponents(thisv, JSReturnReg_Type, JSReturnReg_Data);
3613 1077 : j.linkTo(masm->label(), masm);
3614 : }
3615 :
3616 : // Loads the return value into the scripted ABI register pair, such that JS
3617 : // semantics in constructors are preserved.
3618 : //
3619 : void
3620 140204 : mjit::Compiler::emitReturnValue(Assembler *masm, FrameEntry *fe)
3621 : {
3622 140204 : if (isConstructing)
3623 2013 : fixPrimitiveReturn(masm, fe);
3624 : else
3625 138191 : loadReturnValue(masm, fe);
3626 140204 : }
3627 :
3628 : void
3629 2607 : mjit::Compiler::emitInlineReturnValue(FrameEntry *fe)
3630 : {
3631 2607 : JS_ASSERT(!isConstructing && a->needReturnValue);
3632 :
3633 2607 : if (a->syncReturnValue) {
3634 : /* Needed return value with unknown type, the caller's entry is synced. */
3635 209 : Address address = frame.addressForInlineReturn();
3636 209 : if (fe)
3637 209 : frame.storeTo(fe, address);
3638 : else
3639 0 : masm.storeValue(UndefinedValue(), address);
3640 209 : return;
3641 : }
3642 :
3643 : /*
3644 : * For inlined functions that simply return an entry present in the outer
3645 : * script (e.g. a loop invariant term), mark the copy and propagate it
3646 : * after popping the frame.
3647 : */
3648 2398 : if (!a->exitState && fe && fe->isCopy() && frame.isOuterSlot(fe->backing())) {
3649 20 : a->returnEntry = fe->backing();
3650 20 : return;
3651 : }
3652 :
3653 2378 : if (a->returnValueDouble) {
3654 87 : JS_ASSERT(fe);
3655 87 : frame.ensureDouble(fe);
3656 : Registers mask(a->returnSet
3657 1 : ? Registers::maskReg(a->returnRegister)
3658 88 : : Registers::AvailFPRegs);
3659 : FPRegisterID fpreg;
3660 87 : if (!fe->isConstant()) {
3661 81 : fpreg = frame.tempRegInMaskForData(fe, mask.freeMask).fpreg();
3662 81 : frame.syncAndForgetFe(fe, true);
3663 81 : frame.takeReg(fpreg);
3664 : } else {
3665 6 : fpreg = frame.allocReg(mask.freeMask).fpreg();
3666 6 : masm.slowLoadConstantDouble(fe->getValue().toDouble(), fpreg);
3667 : }
3668 87 : JS_ASSERT_IF(a->returnSet, fpreg == a->returnRegister.fpreg());
3669 87 : a->returnRegister = fpreg;
3670 : } else {
3671 : Registers mask(a->returnSet
3672 314 : ? Registers::maskReg(a->returnRegister)
3673 2605 : : Registers::AvailRegs);
3674 : RegisterID reg;
3675 2291 : if (fe && !fe->isConstant()) {
3676 2175 : reg = frame.tempRegInMaskForData(fe, mask.freeMask).reg();
3677 2175 : frame.syncAndForgetFe(fe, true);
3678 2175 : frame.takeReg(reg);
3679 : } else {
3680 116 : reg = frame.allocReg(mask.freeMask).reg();
3681 116 : Value val = fe ? fe->getValue() : UndefinedValue();
3682 116 : masm.loadValuePayload(val, reg);
3683 : }
3684 2291 : JS_ASSERT_IF(a->returnSet, reg == a->returnRegister.reg());
3685 2291 : a->returnRegister = reg;
3686 : }
3687 :
3688 2378 : a->returnSet = true;
3689 2378 : if (a->exitState)
3690 547 : a->exitState->setUnassigned(a->returnRegister);
3691 : }
3692 :
3693 : void
3694 100011 : mjit::Compiler::emitReturn(FrameEntry *fe)
3695 : {
3696 100011 : JS_ASSERT_IF(!script->function(), JSOp(*PC) == JSOP_STOP);
3697 :
3698 : /* Only the top of the stack can be returned. */
3699 100011 : JS_ASSERT_IF(fe, fe == frame.peek(-1));
3700 :
3701 100011 : if (debugMode() || Probes::callTrackingActive(cx)) {
3702 : /* If the return value isn't in the frame's rval slot, move it there. */
3703 51760 : if (fe) {
3704 10179 : frame.storeTo(fe, Address(JSFrameReg, StackFrame::offsetOfReturnValue()), true);
3705 :
3706 : /* Set the frame flag indicating it's there. */
3707 10179 : RegisterID reg = frame.allocReg();
3708 10179 : masm.load32(FrameFlagsAddress(), reg);
3709 10179 : masm.or32(Imm32(StackFrame::HAS_RVAL), reg);
3710 10179 : masm.store32(reg, FrameFlagsAddress());
3711 10179 : frame.freeReg(reg);
3712 :
3713 : /* Use the frame's return value when generating further code. */
3714 10179 : fe = NULL;
3715 : }
3716 :
3717 51760 : prepareStubCall(Uses(0));
3718 51760 : INLINE_STUBCALL(stubs::ScriptDebugEpilogue, REJOIN_RESUME);
3719 : }
3720 :
3721 100011 : if (a != outer) {
3722 : /*
3723 : * Returning from an inlined script. The checks we do for inlineability
3724 : * and recompilation triggered by args object construction ensure that
3725 : * there can't be an arguments or call object.
3726 : */
3727 :
3728 3213 : if (a->needReturnValue)
3729 2607 : emitInlineReturnValue(fe);
3730 :
3731 3213 : if (a->exitState) {
3732 : /*
3733 : * Restore the register state to reflect that at the original call,
3734 : * modulo entries which will be popped once the call finishes and any
3735 : * entry which will be clobbered by the return value register.
3736 : */
3737 793 : frame.syncForAllocation(a->exitState, true, Uses(0));
3738 : }
3739 :
3740 : /*
3741 : * Simple tests to see if we are at the end of the script and will
3742 : * fallthrough after the script body finishes, thus won't need to jump.
3743 : */
3744 : bool endOfScript =
3745 : (JSOp(*PC) == JSOP_STOP) ||
3746 : (JSOp(*PC) == JSOP_RETURN &&
3747 2651 : (JSOp(PC[JSOP_RETURN_LENGTH]) == JSOP_STOP &&
3748 5864 : !analysis->maybeCode(PC + JSOP_RETURN_LENGTH)));
3749 3213 : if (!endOfScript)
3750 281 : a->returnJumps->append(masm.jump());
3751 :
3752 3213 : if (a->returnSet)
3753 2378 : frame.freeReg(a->returnRegister);
3754 3213 : return;
3755 : }
3756 :
3757 : /*
3758 : * Outside the mjit, activation objects (call objects and arguments objects) are put
3759 : * by ContextStack::pop* members. For JSOP_RETURN, the interpreter only calls
3760 : * popInlineFrame if fp != entryFrame since the VM protocol is that Invoke/Execute are
3761 : * responsible for pushing/popping the initial frame. However, an mjit function
3762 : * epilogue doesn't treat the initial StackFrame of its VMFrame specially: it always
3763 : * puts activation objects. And furthermore, if the last mjit frame throws, the mjit
3764 : * does *not* put the activation objects. So we can't assume any particular state of
3765 : * puttedness upon exit from the mjit.
3766 : *
3767 : * To avoid double-putting, EnterMethodJIT calls updateEpilogueFlags to clear the
3768 : * entry frame's hasArgsObj() and hasCallObj() flags if the given objects have already
3769 : * been put.
3770 : */
3771 96798 : if (script->function()) {
3772 50360 : types::TypeScriptNesting *nesting = script->nesting();
3773 50360 : if (script->function()->isHeavyweight() || script->needsArgsObj() ||
3774 : (nesting && nesting->children))
3775 : {
3776 6954 : prepareStubCall(Uses(fe ? 1 : 0));
3777 6954 : INLINE_STUBCALL(stubs::FunctionFrameEpilogue, REJOIN_NONE);
3778 : } else {
3779 : /* if hasCallObj() */
3780 : Jump putObjs = masm.branchTest32(Assembler::NonZero,
3781 43406 : Address(JSFrameReg, StackFrame::offsetOfFlags()),
3782 86812 : Imm32(StackFrame::HAS_CALL_OBJ));
3783 43406 : stubcc.linkExit(putObjs, Uses(frame.frameSlots()));
3784 :
3785 43406 : stubcc.leave();
3786 43406 : OOL_STUBCALL(stubs::FunctionFrameEpilogue, REJOIN_NONE);
3787 :
3788 43406 : emitReturnValue(&stubcc.masm, fe);
3789 43406 : emitFinalReturn(stubcc.masm);
3790 :
3791 : /*
3792 : * Do frame count balancing inline for inner functions in a nesting
3793 : * with no children of their own.
3794 : */
3795 43406 : if (nesting)
3796 3064 : masm.sub32(Imm32(1), AbsoluteAddress(&nesting->activeFrames));
3797 : }
3798 : }
3799 :
3800 96798 : emitReturnValue(&masm, fe);
3801 96798 : emitFinalReturn(masm);
3802 :
3803 : /*
3804 : * After we've placed the call object, all tracked state can be
3805 : * thrown away. This will happen anyway because the next live opcode (if
3806 : * any) must have an incoming edge. It's an optimization to throw it away
3807 : * early - the tracker won't be spilled on further exits or join points.
3808 : */
3809 96798 : frame.discardFrame();
3810 : }
3811 :
3812 : void
3813 481771 : mjit::Compiler::prepareStubCall(Uses uses)
3814 : {
3815 481771 : JaegerSpew(JSpew_Insns, " ---- STUB CALL, SYNCING FRAME ---- \n");
3816 481771 : frame.syncAndKill(Registers(Registers::TempAnyRegs), uses);
3817 481771 : JaegerSpew(JSpew_Insns, " ---- FRAME SYNCING DONE ---- \n");
3818 481771 : }
3819 :
3820 : JSC::MacroAssembler::Call
3821 510604 : mjit::Compiler::emitStubCall(void *ptr, DataLabelPtr *pinline)
3822 : {
3823 510604 : JaegerSpew(JSpew_Insns, " ---- CALLING STUB ---- \n");
3824 :
3825 510604 : masm.bumpStubCount(script, PC, Registers::tempCallReg());
3826 :
3827 510604 : Call cl = masm.fallibleVMCall(cx->typeInferenceEnabled(),
3828 1021208 : ptr, outerPC(), pinline, frame.totalDepth());
3829 510604 : JaegerSpew(JSpew_Insns, " ---- END STUB CALL ---- \n");
3830 : return cl;
3831 : }
3832 :
3833 : void
3834 219984 : mjit::Compiler::interruptCheckHelper()
3835 : {
3836 219984 : Jump jump;
3837 219984 : if (cx->runtime->gcZeal() == js::gc::ZealVerifierValue) {
3838 : /* For barrier verification, always take the interrupt so we can verify. */
3839 372 : jump = masm.jump();
3840 : } else {
3841 219612 : void *interrupt = (void*) &cx->runtime->interrupt;
3842 : #if defined(JS_CPU_X86) || defined(JS_CPU_ARM) || defined(JS_CPU_MIPS)
3843 219612 : jump = masm.branch32(Assembler::NotEqual, AbsoluteAddress(interrupt), Imm32(0));
3844 : #else
3845 : /* Handle processors that can't load from absolute addresses. */
3846 : RegisterID reg = frame.allocReg();
3847 : masm.move(ImmPtr(interrupt), reg);
3848 : jump = masm.branchTest32(Assembler::NonZero, Address(reg, 0));
3849 : frame.freeReg(reg);
3850 : #endif
3851 : }
3852 :
3853 219984 : stubcc.linkExitDirect(jump, stubcc.masm.label());
3854 :
3855 219984 : frame.sync(stubcc.masm, Uses(0));
3856 219984 : stubcc.masm.move(ImmPtr(PC), Registers::ArgReg1);
3857 219984 : OOL_STUBCALL(stubs::Interrupt, REJOIN_RESUME);
3858 219984 : stubcc.rejoin(Changes(0));
3859 219984 : }
3860 :
3861 : void
3862 136051 : mjit::Compiler::recompileCheckHelper()
3863 : {
3864 175225 : if (inlining() || debugMode() || !globalObj ||
3865 39174 : !analysis->hasFunctionCalls() || !cx->typeInferenceEnabled()) {
3866 133303 : return;
3867 : }
3868 :
3869 2748 : size_t *addr = script->addressOfUseCount();
3870 2748 : masm.add32(Imm32(1), AbsoluteAddress(addr));
3871 : #if defined(JS_CPU_X86) || defined(JS_CPU_ARM)
3872 : Jump jump = masm.branch32(Assembler::GreaterThanOrEqual, AbsoluteAddress(addr),
3873 2748 : Imm32(USES_BEFORE_INLINING));
3874 : #else
3875 : /* Handle processors that can't load from absolute addresses. */
3876 : RegisterID reg = frame.allocReg();
3877 : masm.move(ImmPtr(addr), reg);
3878 : Jump jump = masm.branch32(Assembler::GreaterThanOrEqual, Address(reg, 0),
3879 : Imm32(USES_BEFORE_INLINING));
3880 : frame.freeReg(reg);
3881 : #endif
3882 2748 : stubcc.linkExit(jump, Uses(0));
3883 2748 : stubcc.leave();
3884 :
3885 2748 : OOL_STUBCALL(stubs::RecompileForInline, REJOIN_RESUME);
3886 2748 : stubcc.rejoin(Changes(0));
3887 : }
3888 :
3889 : void
3890 174216 : mjit::Compiler::addReturnSite()
3891 : {
3892 174216 : InternalCallSite site(masm.distanceOf(masm.label()), a->inlineIndex, PC,
3893 348432 : REJOIN_SCRIPTED, false);
3894 174216 : addCallSite(site);
3895 174216 : masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfPrev()), JSFrameReg);
3896 174216 : }
3897 :
3898 : void
3899 83655 : mjit::Compiler::emitUncachedCall(uint32_t argc, bool callingNew)
3900 : {
3901 83655 : CallPatchInfo callPatch;
3902 :
3903 83655 : RegisterID r0 = Registers::ReturnReg;
3904 83655 : VoidPtrStubUInt32 stub = callingNew ? stubs::UncachedNew : stubs::UncachedCall;
3905 :
3906 83655 : frame.syncAndKill(Uses(argc + 2));
3907 83655 : prepareStubCall(Uses(argc + 2));
3908 83655 : masm.move(Imm32(argc), Registers::ArgReg1);
3909 83655 : INLINE_STUBCALL(stub, REJOIN_CALL_PROLOGUE);
3910 :
3911 83655 : Jump notCompiled = masm.branchTestPtr(Assembler::Zero, r0, r0);
3912 :
3913 83655 : masm.loadPtr(FrameAddress(VMFrame::offsetOfRegsSp()), JSFrameReg);
3914 83655 : callPatch.hasFastNcode = true;
3915 : callPatch.fastNcodePatch =
3916 : masm.storePtrWithPatch(ImmPtr(NULL),
3917 83655 : Address(JSFrameReg, StackFrame::offsetOfNcode()));
3918 :
3919 83655 : masm.jump(r0);
3920 83655 : callPatch.joinPoint = masm.label();
3921 83655 : addReturnSite();
3922 :
3923 83655 : frame.popn(argc + 2);
3924 :
3925 83655 : frame.takeReg(JSReturnReg_Type);
3926 83655 : frame.takeReg(JSReturnReg_Data);
3927 83655 : frame.pushRegs(JSReturnReg_Type, JSReturnReg_Data, knownPushedType(0));
3928 :
3929 : BarrierState barrier = testBarrier(JSReturnReg_Type, JSReturnReg_Data,
3930 : /* testUndefined = */ false,
3931 83655 : /* testReturn = */ true);
3932 :
3933 83655 : stubcc.linkExitDirect(notCompiled, stubcc.masm.label());
3934 83655 : stubcc.rejoin(Changes(1));
3935 83655 : callPatches.append(callPatch);
3936 :
3937 83655 : finishBarrier(barrier, REJOIN_FALLTHROUGH, 0);
3938 83655 : }
3939 :
3940 : static bool
3941 176947 : IsLowerableFunCallOrApply(jsbytecode *pc)
3942 : {
3943 : #ifdef JS_MONOIC
3944 9066 : return (*pc == JSOP_FUNCALL && GET_ARGC(pc) >= 1) ||
3945 186013 : (*pc == JSOP_FUNAPPLY && GET_ARGC(pc) == 2);
3946 : #else
3947 : return false;
3948 : #endif
3949 : }
3950 :
3951 : void
3952 2469 : mjit::Compiler::checkCallApplySpeculation(uint32_t callImmArgc, uint32_t speculatedArgc,
3953 : FrameEntry *origCallee, FrameEntry *origThis,
3954 : MaybeRegisterID origCalleeType, RegisterID origCalleeData,
3955 : MaybeRegisterID origThisType, RegisterID origThisData,
3956 : Jump *uncachedCallSlowRejoin, CallPatchInfo *uncachedCallPatch)
3957 : {
3958 2469 : JS_ASSERT(IsLowerableFunCallOrApply(PC));
3959 :
3960 : RegisterID temp;
3961 2469 : Registers tempRegs(Registers::AvailRegs);
3962 2469 : if (origCalleeType.isSet())
3963 727 : tempRegs.takeReg(origCalleeType.reg());
3964 2469 : tempRegs.takeReg(origCalleeData);
3965 2469 : if (origThisType.isSet())
3966 724 : tempRegs.takeReg(origThisType.reg());
3967 2469 : tempRegs.takeReg(origThisData);
3968 2469 : temp = tempRegs.takeAnyReg().reg();
3969 :
3970 : /*
3971 : * if (origCallee.isObject() &&
3972 : * origCallee.toObject().isFunction &&
3973 : * origCallee.toObject().toFunction() == js_fun_{call,apply})
3974 : */
3975 2469 : MaybeJump isObj;
3976 2469 : if (origCalleeType.isSet())
3977 727 : isObj = masm.testObject(Assembler::NotEqual, origCalleeType.reg());
3978 2469 : Jump isFun = masm.testFunction(Assembler::NotEqual, origCalleeData, temp);
3979 2469 : Native native = *PC == JSOP_FUNCALL ? js_fun_call : js_fun_apply;
3980 : Jump isNative = masm.branchPtr(Assembler::NotEqual,
3981 2469 : Address(origCalleeData, JSFunction::offsetOfNativeOrScript()),
3982 4938 : ImmPtr(JS_FUNC_TO_DATA_PTR(void *, native)));
3983 :
3984 : /*
3985 : * If speculation fails, we can't use the ic, since it is compiled on the
3986 : * assumption that speculation succeeds. Instead, just do an uncached call.
3987 : */
3988 : {
3989 2469 : if (isObj.isSet())
3990 727 : stubcc.linkExitDirect(isObj.getJump(), stubcc.masm.label());
3991 2469 : stubcc.linkExitDirect(isFun, stubcc.masm.label());
3992 2469 : stubcc.linkExitDirect(isNative, stubcc.masm.label());
3993 :
3994 : int32_t frameDepthAdjust;
3995 2469 : if (applyTricks == LazyArgsObj) {
3996 217 : OOL_STUBCALL(stubs::Arguments, REJOIN_RESUME);
3997 217 : frameDepthAdjust = +1;
3998 : } else {
3999 2252 : frameDepthAdjust = 0;
4000 : }
4001 :
4002 2469 : stubcc.masm.move(Imm32(callImmArgc), Registers::ArgReg1);
4003 2469 : JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW CALL CODE ---- \n");
4004 2469 : OOL_STUBCALL_LOCAL_SLOTS(JS_FUNC_TO_DATA_PTR(void *, stubs::SlowCall),
4005 2469 : REJOIN_FALLTHROUGH, frame.totalDepth() + frameDepthAdjust);
4006 2469 : JaegerSpew(JSpew_Insns, " ---- END SLOW CALL CODE ---- \n");
4007 :
4008 : /*
4009 : * inlineCallHelper will link uncachedCallSlowRejoin to the join point
4010 : * at the end of the ic. At that join point, the return value of the
4011 : * call is assumed to be in registers, so load them before jumping.
4012 : */
4013 2469 : JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW RESTORE CODE ---- \n");
4014 2469 : Address rval = frame.addressOf(origCallee); /* vp[0] == rval */
4015 2469 : if (knownPushedType(0) == JSVAL_TYPE_DOUBLE)
4016 36 : stubcc.masm.ensureInMemoryDouble(rval);
4017 2469 : stubcc.masm.loadValueAsComponents(rval, JSReturnReg_Type, JSReturnReg_Data);
4018 2469 : *uncachedCallSlowRejoin = stubcc.masm.jump();
4019 2469 : JaegerSpew(JSpew_Insns, " ---- END SLOW RESTORE CODE ---- \n");
4020 : }
4021 :
4022 : /*
4023 : * For simplicity, we don't statically specialize calls to
4024 : * ic::SplatApplyArgs based on applyTricks. Rather, this state is
4025 : * communicated dynamically through the VMFrame.
4026 : */
4027 2469 : if (*PC == JSOP_FUNAPPLY) {
4028 : masm.store32(Imm32(applyTricks == LazyArgsObj),
4029 1037 : FrameAddress(VMFrame::offsetOfLazyArgsObj()));
4030 : }
4031 2469 : }
4032 :
4033 : /* This predicate must be called before the current op mutates the FrameState. */
4034 : bool
4035 1199 : mjit::Compiler::canUseApplyTricks()
4036 : {
4037 1199 : JS_ASSERT(*PC == JSOP_ARGUMENTS);
4038 1199 : JS_ASSERT(!script->needsArgsObj());
4039 1199 : jsbytecode *nextpc = PC + JSOP_ARGUMENTS_LENGTH;
4040 : return *nextpc == JSOP_FUNAPPLY &&
4041 262 : IsLowerableFunCallOrApply(nextpc) &&
4042 262 : !analysis->jumpTarget(nextpc) &&
4043 250 : !debugMode() &&
4044 217 : !a->parent &&
4045 2190 : bytecodeInChunk(nextpc);
4046 : }
4047 :
4048 : /* See MonoIC.cpp, CallCompiler for more information on call ICs. */
4049 : bool
4050 174519 : mjit::Compiler::inlineCallHelper(uint32_t callImmArgc, bool callingNew, FrameSize &callFrameSize)
4051 : {
4052 : int32_t speculatedArgc;
4053 174519 : if (applyTricks == LazyArgsObj) {
4054 217 : frame.pop();
4055 217 : speculatedArgc = 1;
4056 : } else {
4057 : /*
4058 : * Check for interrupts on function call. We don't do this for lazy
4059 : * arguments objects as the interrupt may kick this frame into the
4060 : * interpreter, which doesn't know about the apply tricks. Instead, we
4061 : * do the interrupt check at the start of the JSOP_ARGUMENTS.
4062 : */
4063 174302 : interruptCheckHelper();
4064 :
4065 174302 : speculatedArgc = callImmArgc;
4066 : }
4067 :
4068 174519 : FrameEntry *origCallee = frame.peek(-(speculatedArgc + 2));
4069 174519 : FrameEntry *origThis = frame.peek(-(speculatedArgc + 1));
4070 :
4071 : /*
4072 : * 'this' does not need to be synced for constructing. :FIXME: is it
4073 : * possible that one of the arguments is directly copying the 'this'
4074 : * entry (something like 'new x.f(x)')?
4075 : */
4076 174519 : if (callingNew) {
4077 16652 : frame.discardFe(origThis);
4078 :
4079 : /*
4080 : * We store NULL here to ensure that the slot doesn't contain
4081 : * garbage. Additionally, we need to store a non-object value here for
4082 : * TI. If a GC gets triggered before the callee can fill in the slot
4083 : * (i.e. the GC happens on constructing the 'new' object or the call
4084 : * object for a heavyweight callee), it needs to be able to read the
4085 : * 'this' value to tell whether newScript constraints will need to be
4086 : * regenerated afterwards.
4087 : */
4088 16652 : masm.storeValue(NullValue(), frame.addressOf(origThis));
4089 : }
4090 :
4091 174519 : if (!cx->typeInferenceEnabled()) {
4092 53159 : CompileStatus status = callArrayBuiltin(callImmArgc, callingNew);
4093 53159 : if (status != Compile_InlineAbort)
4094 303 : return (status == Compile_Okay);
4095 : }
4096 :
4097 : /*
4098 : * From the presence of JSOP_FUN{CALL,APPLY}, we speculate that we are
4099 : * going to call js_fun_{call,apply}. Normally, this call would go through
4100 : * js::Invoke to ultimately call 'this'. We can do much better by having
4101 : * the callIC cache and call 'this' directly. However, if it turns out that
4102 : * we are not actually calling js_fun_call, the callIC must act as normal.
4103 : *
4104 : * Note: do *NOT* use type information or inline state in any way when
4105 : * deciding whether to lower a CALL or APPLY. The stub calls here store
4106 : * their return values in a different slot, so when recompiling we need
4107 : * to go down the exact same path.
4108 : */
4109 174216 : bool lowerFunCallOrApply = IsLowerableFunCallOrApply(PC);
4110 :
4111 174216 : bool newType = callingNew && cx->typeInferenceEnabled() && types::UseNewType(cx, script, PC);
4112 :
4113 : #ifdef JS_MONOIC
4114 174216 : if (debugMode() || newType) {
4115 : #endif
4116 83655 : if (applyTricks == LazyArgsObj) {
4117 : /* frame.pop() above reset us to pre-JSOP_ARGUMENTS state */
4118 0 : jsop_arguments(REJOIN_RESUME);
4119 0 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
4120 : }
4121 83655 : emitUncachedCall(callImmArgc, callingNew);
4122 83655 : applyTricks = NoApplyTricks;
4123 83655 : return true;
4124 : #ifdef JS_MONOIC
4125 : }
4126 :
4127 90561 : frame.forgetMismatchedObject(origCallee);
4128 90561 : if (lowerFunCallOrApply)
4129 2469 : frame.forgetMismatchedObject(origThis);
4130 :
4131 : /* Initialized by both branches below. */
4132 90561 : CallGenInfo callIC;
4133 90561 : CallPatchInfo callPatch;
4134 90561 : MaybeRegisterID icCalleeType; /* type to test for function-ness */
4135 : RegisterID icCalleeData; /* data to call */
4136 90561 : Address icRvalAddr; /* return slot on slow-path rejoin */
4137 :
4138 : /*
4139 : * IC space must be reserved (using RESERVE_IC_SPACE or RESERVE_OOL_SPACE) between the
4140 : * following labels (as used in finishThisUp):
4141 : * - funGuard -> hotJump
4142 : * - funGuard -> joinPoint
4143 : * - funGuard -> hotPathLabel
4144 : * - slowPathStart -> oolCall
4145 : * - slowPathStart -> oolJump
4146 : * - slowPathStart -> icCall
4147 : * - slowPathStart -> slowJoinPoint
4148 : * Because the call ICs are fairly long (compared to PICs), we don't reserve the space in each
4149 : * path until the first usage of funGuard (for the in-line path) or slowPathStart (for the
4150 : * out-of-line path).
4151 : */
4152 :
4153 : /* Initialized only on lowerFunCallOrApply branch. */
4154 90561 : Jump uncachedCallSlowRejoin;
4155 90561 : CallPatchInfo uncachedCallPatch;
4156 :
4157 : {
4158 90561 : MaybeRegisterID origCalleeType, maybeOrigCalleeData;
4159 : RegisterID origCalleeData;
4160 :
4161 : /* Get the callee in registers. */
4162 90561 : frame.ensureFullRegs(origCallee, &origCalleeType, &maybeOrigCalleeData);
4163 90561 : origCalleeData = maybeOrigCalleeData.reg();
4164 181122 : PinRegAcrossSyncAndKill p1(frame, origCalleeData), p2(frame, origCalleeType);
4165 :
4166 90561 : if (lowerFunCallOrApply) {
4167 2469 : MaybeRegisterID origThisType, maybeOrigThisData;
4168 : RegisterID origThisData;
4169 : {
4170 : /* Get thisv in registers. */
4171 2469 : frame.ensureFullRegs(origThis, &origThisType, &maybeOrigThisData);
4172 2469 : origThisData = maybeOrigThisData.reg();
4173 4938 : PinRegAcrossSyncAndKill p3(frame, origThisData), p4(frame, origThisType);
4174 :
4175 : /* Leaves pinned regs untouched. */
4176 2469 : frame.syncAndKill(Uses(speculatedArgc + 2));
4177 : }
4178 :
4179 : checkCallApplySpeculation(callImmArgc, speculatedArgc,
4180 : origCallee, origThis,
4181 : origCalleeType, origCalleeData,
4182 : origThisType, origThisData,
4183 2469 : &uncachedCallSlowRejoin, &uncachedCallPatch);
4184 :
4185 2469 : icCalleeType = origThisType;
4186 2469 : icCalleeData = origThisData;
4187 2469 : icRvalAddr = frame.addressOf(origThis);
4188 :
4189 : /*
4190 : * For f.call(), since we compile the ic under the (checked)
4191 : * assumption that call == js_fun_call, we still have a static
4192 : * frame size. For f.apply(), the frame size depends on the dynamic
4193 : * length of the array passed to apply.
4194 : */
4195 2469 : if (*PC == JSOP_FUNCALL)
4196 1432 : callIC.frameSize.initStatic(frame.totalDepth(), speculatedArgc - 1);
4197 : else
4198 1037 : callIC.frameSize.initDynamic();
4199 : } else {
4200 : /* Leaves pinned regs untouched. */
4201 88092 : frame.syncAndKill(Uses(speculatedArgc + 2));
4202 :
4203 88092 : icCalleeType = origCalleeType;
4204 88092 : icCalleeData = origCalleeData;
4205 88092 : icRvalAddr = frame.addressOf(origCallee);
4206 88092 : callIC.frameSize.initStatic(frame.totalDepth(), speculatedArgc);
4207 : }
4208 : }
4209 :
4210 90561 : callFrameSize = callIC.frameSize;
4211 :
4212 90561 : callIC.typeMonitored = monitored(PC) || hasTypeBarriers(PC);
4213 :
4214 : /* Test the type if necessary. Failing this always takes a really slow path. */
4215 90561 : MaybeJump notObjectJump;
4216 90561 : if (icCalleeType.isSet())
4217 54348 : notObjectJump = masm.testObject(Assembler::NotEqual, icCalleeType.reg());
4218 :
4219 : /*
4220 : * For an optimized apply, keep icCalleeData in a callee-saved register for
4221 : * the subsequent ic::SplatApplyArgs call.
4222 : */
4223 90561 : Registers tempRegs(Registers::AvailRegs);
4224 90561 : if (callIC.frameSize.isDynamic() && !Registers::isSaved(icCalleeData)) {
4225 816 : RegisterID x = tempRegs.takeAnyReg(Registers::SavedRegs).reg();
4226 816 : masm.move(icCalleeData, x);
4227 816 : icCalleeData = x;
4228 : } else {
4229 89745 : tempRegs.takeReg(icCalleeData);
4230 : }
4231 :
4232 : /* Reserve space just before initialization of funGuard. */
4233 : RESERVE_IC_SPACE(masm);
4234 :
4235 : /*
4236 : * Guard on the callee identity. This misses on the first run. If the
4237 : * callee is scripted, compiled/compilable, and argc == nargs, then this
4238 : * guard is patched, and the compiled code address is baked in.
4239 : */
4240 90561 : Jump j = masm.branchPtrWithPatch(Assembler::NotEqual, icCalleeData, callIC.funGuard);
4241 90561 : callIC.funJump = j;
4242 :
4243 : /* Reserve space just before initialization of slowPathStart. */
4244 : RESERVE_OOL_SPACE(stubcc.masm);
4245 :
4246 90561 : Jump rejoin1, rejoin2;
4247 : {
4248 : RESERVE_OOL_SPACE(stubcc.masm);
4249 90561 : stubcc.linkExitDirect(j, stubcc.masm.label());
4250 90561 : callIC.slowPathStart = stubcc.masm.label();
4251 :
4252 90561 : RegisterID tmp = tempRegs.takeAnyReg().reg();
4253 :
4254 : /*
4255 : * Test if the callee is even a function. If this doesn't match, we
4256 : * take a _really_ slow path later.
4257 : */
4258 90561 : Jump notFunction = stubcc.masm.testFunction(Assembler::NotEqual, icCalleeData, tmp);
4259 :
4260 : /* Test if the function is scripted. */
4261 90561 : stubcc.masm.load16(Address(icCalleeData, offsetof(JSFunction, flags)), tmp);
4262 90561 : stubcc.masm.and32(Imm32(JSFUN_KINDMASK), tmp);
4263 90561 : Jump isNative = stubcc.masm.branch32(Assembler::Below, tmp, Imm32(JSFUN_INTERPRETED));
4264 90561 : tempRegs.putReg(tmp);
4265 :
4266 : /*
4267 : * N.B. After this call, the frame will have a dynamic frame size.
4268 : * Check after the function is known not to be a native so that the
4269 : * catch-all/native path has a static depth.
4270 : */
4271 90561 : if (callIC.frameSize.isDynamic())
4272 1037 : OOL_STUBCALL(ic::SplatApplyArgs, REJOIN_CALL_SPLAT);
4273 :
4274 : /*
4275 : * No-op jump that gets patched by ic::New/Call to the stub generated
4276 : * by generateFullCallStub.
4277 : */
4278 90561 : Jump toPatch = stubcc.masm.jump();
4279 90561 : toPatch.linkTo(stubcc.masm.label(), &stubcc.masm);
4280 90561 : callIC.oolJump = toPatch;
4281 90561 : callIC.icCall = stubcc.masm.label();
4282 :
4283 90561 : RejoinState rejoinState = callIC.frameSize.rejoinState(PC, false);
4284 :
4285 : /*
4286 : * At this point the function is definitely scripted, so we try to
4287 : * compile it and patch either funGuard/funJump or oolJump. This code
4288 : * is only executed once.
4289 : */
4290 90561 : callIC.addrLabel1 = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
4291 90561 : void *icFunPtr = JS_FUNC_TO_DATA_PTR(void *, callingNew ? ic::New : ic::Call);
4292 90561 : if (callIC.frameSize.isStatic()) {
4293 89524 : callIC.oolCall = OOL_STUBCALL_LOCAL_SLOTS(icFunPtr, rejoinState, frame.totalDepth());
4294 : } else {
4295 1037 : callIC.oolCall = OOL_STUBCALL_LOCAL_SLOTS(icFunPtr, rejoinState, -1);
4296 : }
4297 :
4298 90561 : callIC.funObjReg = icCalleeData;
4299 :
4300 : /*
4301 : * The IC call either returns NULL, meaning call completed, or a
4302 : * function pointer to jump to.
4303 : */
4304 : rejoin1 = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
4305 90561 : Registers::ReturnReg);
4306 90561 : if (callIC.frameSize.isStatic())
4307 89524 : stubcc.masm.move(Imm32(callIC.frameSize.staticArgc()), JSParamReg_Argc);
4308 : else
4309 1037 : stubcc.masm.load32(FrameAddress(VMFrame::offsetOfDynamicArgc()), JSParamReg_Argc);
4310 90561 : stubcc.masm.loadPtr(FrameAddress(VMFrame::offsetOfRegsSp()), JSFrameReg);
4311 90561 : callPatch.hasSlowNcode = true;
4312 : callPatch.slowNcodePatch =
4313 : stubcc.masm.storePtrWithPatch(ImmPtr(NULL),
4314 90561 : Address(JSFrameReg, StackFrame::offsetOfNcode()));
4315 90561 : stubcc.masm.jump(Registers::ReturnReg);
4316 :
4317 :
4318 :
4319 : /*
4320 : * This ool path is the catch-all for everything but scripted function
4321 : * callees. For native functions, ic::NativeNew/NativeCall will repatch
4322 : * funGaurd/funJump with a fast call stub. All other cases
4323 : * (non-function callable objects and invalid callees) take the slow
4324 : * path through js::Invoke.
4325 : */
4326 90561 : if (notObjectJump.isSet())
4327 54348 : stubcc.linkExitDirect(notObjectJump.get(), stubcc.masm.label());
4328 90561 : notFunction.linkTo(stubcc.masm.label(), &stubcc.masm);
4329 90561 : isNative.linkTo(stubcc.masm.label(), &stubcc.masm);
4330 :
4331 90561 : callIC.addrLabel2 = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
4332 90561 : OOL_STUBCALL(callingNew ? ic::NativeNew : ic::NativeCall, rejoinState);
4333 :
4334 90561 : rejoin2 = stubcc.masm.jump();
4335 : }
4336 :
4337 : /*
4338 : * If the call site goes to a closure over the same function, it will
4339 : * generate an out-of-line stub that joins back here.
4340 : */
4341 90561 : callIC.hotPathLabel = masm.label();
4342 :
4343 90561 : uint32_t flags = 0;
4344 90561 : if (callingNew)
4345 8667 : flags |= StackFrame::CONSTRUCTING;
4346 :
4347 90561 : InlineFrameAssembler inlFrame(masm, callIC, flags);
4348 90561 : callPatch.hasFastNcode = true;
4349 90561 : callPatch.fastNcodePatch = inlFrame.assemble(NULL, PC);
4350 :
4351 90561 : callIC.hotJump = masm.jump();
4352 90561 : callIC.joinPoint = callPatch.joinPoint = masm.label();
4353 90561 : callIC.callIndex = callSites.length();
4354 90561 : addReturnSite();
4355 90561 : if (lowerFunCallOrApply)
4356 2469 : uncachedCallPatch.joinPoint = callIC.joinPoint;
4357 :
4358 : /*
4359 : * We've placed hotJump, joinPoint and hotPathLabel, and no other labels are located by offset
4360 : * in the in-line path so we can check the IC space now.
4361 : */
4362 : CHECK_IC_SPACE();
4363 :
4364 90561 : JSValueType type = knownPushedType(0);
4365 :
4366 90561 : frame.popn(speculatedArgc + 2);
4367 90561 : frame.takeReg(JSReturnReg_Type);
4368 90561 : frame.takeReg(JSReturnReg_Data);
4369 90561 : frame.pushRegs(JSReturnReg_Type, JSReturnReg_Data, type);
4370 :
4371 : BarrierState barrier = testBarrier(JSReturnReg_Type, JSReturnReg_Data,
4372 : /* testUndefined = */ false,
4373 90561 : /* testReturn = */ true);
4374 :
4375 : /*
4376 : * Now that the frame state is set, generate the rejoin path. Note that, if
4377 : * lowerFunCallOrApply, we cannot just call 'stubcc.rejoin' since the return
4378 : * value has been placed at vp[1] which is not the stack address associated
4379 : * with frame.peek(-1).
4380 : */
4381 90561 : callIC.slowJoinPoint = stubcc.masm.label();
4382 90561 : rejoin1.linkTo(callIC.slowJoinPoint, &stubcc.masm);
4383 90561 : rejoin2.linkTo(callIC.slowJoinPoint, &stubcc.masm);
4384 90561 : JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW RESTORE CODE ---- \n");
4385 90561 : frame.reloadEntry(stubcc.masm, icRvalAddr, frame.peek(-1));
4386 90561 : stubcc.crossJump(stubcc.masm.jump(), masm.label());
4387 90561 : JaegerSpew(JSpew_Insns, " ---- END SLOW RESTORE CODE ---- \n");
4388 :
4389 : CHECK_OOL_SPACE();
4390 :
4391 90561 : if (lowerFunCallOrApply)
4392 2469 : stubcc.crossJump(uncachedCallSlowRejoin, masm.label());
4393 :
4394 90561 : callICs.append(callIC);
4395 90561 : callPatches.append(callPatch);
4396 90561 : if (lowerFunCallOrApply)
4397 2469 : callPatches.append(uncachedCallPatch);
4398 :
4399 90561 : finishBarrier(barrier, REJOIN_FALLTHROUGH, 0);
4400 :
4401 90561 : applyTricks = NoApplyTricks;
4402 90561 : return true;
4403 : #endif
4404 : }
4405 :
4406 : CompileStatus
4407 53159 : mjit::Compiler::callArrayBuiltin(uint32_t argc, bool callingNew)
4408 : {
4409 53159 : if (!globalObj)
4410 910 : return Compile_InlineAbort;
4411 :
4412 52249 : if (applyTricks == LazyArgsObj)
4413 21 : return Compile_InlineAbort;
4414 :
4415 52228 : FrameEntry *origCallee = frame.peek(-((int)argc + 2));
4416 52228 : if (origCallee->isNotType(JSVAL_TYPE_OBJECT))
4417 12 : return Compile_InlineAbort;
4418 :
4419 52216 : if (frame.extra(origCallee).name != cx->runtime->atomState.classAtoms[JSProto_Array])
4420 51873 : return Compile_InlineAbort;
4421 :
4422 : JSObject *arrayObj;
4423 343 : if (!js_GetClassObject(cx, globalObj, JSProto_Array, &arrayObj))
4424 0 : return Compile_Error;
4425 :
4426 343 : JSObject *arrayProto = globalObj->global().getOrCreateArrayPrototype(cx);
4427 343 : if (!arrayProto)
4428 0 : return Compile_Error;
4429 :
4430 343 : if (argc > 1)
4431 38 : return Compile_InlineAbort;
4432 305 : FrameEntry *origArg = (argc == 1) ? frame.peek(-1) : NULL;
4433 305 : if (origArg) {
4434 174 : if (origArg->isNotType(JSVAL_TYPE_INT32))
4435 2 : return Compile_InlineAbort;
4436 172 : if (origArg->isConstant() && origArg->getValue().toInt32() < 0)
4437 0 : return Compile_InlineAbort;
4438 : }
4439 :
4440 303 : if (!origCallee->isTypeKnown()) {
4441 303 : Jump notObject = frame.testObject(Assembler::NotEqual, origCallee);
4442 303 : stubcc.linkExit(notObject, Uses(argc + 2));
4443 : }
4444 :
4445 303 : RegisterID reg = frame.tempRegForData(origCallee);
4446 303 : Jump notArray = masm.branchPtr(Assembler::NotEqual, reg, ImmPtr(arrayObj));
4447 303 : stubcc.linkExit(notArray, Uses(argc + 2));
4448 :
4449 303 : int32_t knownSize = 0;
4450 303 : MaybeRegisterID sizeReg;
4451 303 : if (origArg) {
4452 172 : if (origArg->isConstant()) {
4453 103 : knownSize = origArg->getValue().toInt32();
4454 : } else {
4455 69 : if (!origArg->isTypeKnown()) {
4456 67 : Jump notInt = frame.testInt32(Assembler::NotEqual, origArg);
4457 67 : stubcc.linkExit(notInt, Uses(argc + 2));
4458 : }
4459 69 : sizeReg = frame.tempRegForData(origArg);
4460 69 : Jump belowZero = masm.branch32(Assembler::LessThan, sizeReg.reg(), Imm32(0));
4461 69 : stubcc.linkExit(belowZero, Uses(argc + 2));
4462 : }
4463 : } else {
4464 131 : knownSize = 0;
4465 : }
4466 :
4467 303 : stubcc.leave();
4468 303 : stubcc.masm.move(Imm32(argc), Registers::ArgReg1);
4469 303 : OOL_STUBCALL(callingNew ? stubs::SlowNew : stubs::SlowCall, REJOIN_FALLTHROUGH);
4470 :
4471 : {
4472 606 : PinRegAcrossSyncAndKill p1(frame, sizeReg);
4473 303 : frame.popn(argc + 2);
4474 303 : frame.syncAndKill(Uses(0));
4475 : }
4476 :
4477 303 : prepareStubCall(Uses(0));
4478 303 : masm.storePtr(ImmPtr(arrayProto), FrameAddress(offsetof(VMFrame, scratch)));
4479 303 : if (sizeReg.isSet())
4480 69 : masm.move(sizeReg.reg(), Registers::ArgReg1);
4481 : else
4482 234 : masm.move(Imm32(knownSize), Registers::ArgReg1);
4483 303 : INLINE_STUBCALL(stubs::NewDenseUnallocatedArray, REJOIN_PUSH_OBJECT);
4484 :
4485 303 : frame.takeReg(Registers::ReturnReg);
4486 303 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
4487 303 : frame.forgetType(frame.peek(-1));
4488 :
4489 303 : stubcc.rejoin(Changes(1));
4490 :
4491 303 : return Compile_Okay;
4492 : }
4493 :
4494 : /* Maximum number of calls we will inline at the same site. */
4495 : static const uint32_t INLINE_SITE_LIMIT = 5;
4496 :
4497 : CompileStatus
4498 60261 : mjit::Compiler::inlineScriptedFunction(uint32_t argc, bool callingNew)
4499 : {
4500 60261 : JS_ASSERT(inlining());
4501 :
4502 : /* We already know which frames we are inlining at each PC, so scan the list of inline frames. */
4503 60261 : bool calleeMultipleReturns = false;
4504 120522 : Vector<JSScript *> inlineCallees(CompilerAllocPolicy(cx, *this));
4505 279383 : for (unsigned i = 0; i < ssa.numFrames(); i++) {
4506 219122 : if (ssa.iterFrame(i).parent == a->inlineIndex && ssa.iterFrame(i).parentpc == PC) {
4507 2932 : JSScript *script = ssa.iterFrame(i).script;
4508 2932 : inlineCallees.append(script);
4509 2932 : if (script->analysis()->numReturnSites() > 1)
4510 193 : calleeMultipleReturns = true;
4511 : }
4512 : }
4513 :
4514 60261 : if (inlineCallees.empty())
4515 57497 : return Compile_InlineAbort;
4516 :
4517 2764 : JS_ASSERT(!monitored(PC));
4518 :
4519 : /*
4520 : * Remove all dead entries from the frame's tracker. We will not recognize
4521 : * them as dead after pushing the new frame.
4522 : */
4523 2764 : frame.pruneDeadEntries();
4524 :
4525 2764 : RegisterAllocation *exitState = NULL;
4526 2764 : if (inlineCallees.length() > 1 || calleeMultipleReturns) {
4527 : /*
4528 : * Multiple paths through the callees, get a register allocation for
4529 : * the various incoming edges.
4530 : */
4531 344 : exitState = frame.computeAllocation(PC + JSOP_CALL_LENGTH);
4532 : }
4533 :
4534 : /*
4535 : * If this is a polymorphic callsite, get a register for the callee too.
4536 : * After this, do not touch the register state in the current frame until
4537 : * stubs for all callees have been generated.
4538 : */
4539 2764 : FrameEntry *origCallee = frame.peek(-((int)argc + 2));
4540 2764 : FrameEntry *entrySnapshot = NULL;
4541 2764 : MaybeRegisterID calleeReg;
4542 2764 : if (inlineCallees.length() > 1) {
4543 155 : frame.forgetMismatchedObject(origCallee);
4544 155 : calleeReg = frame.tempRegForData(origCallee);
4545 :
4546 155 : entrySnapshot = frame.snapshotState();
4547 155 : if (!entrySnapshot)
4548 0 : return Compile_Error;
4549 : }
4550 2764 : MaybeJump calleePrevious;
4551 :
4552 2764 : JSValueType returnType = knownPushedType(0);
4553 :
4554 2764 : bool needReturnValue = JSOP_POP != (JSOp)*(PC + JSOP_CALL_LENGTH);
4555 2764 : bool syncReturnValue = needReturnValue && returnType == JSVAL_TYPE_UNKNOWN;
4556 :
4557 : /* Track register state after the call. */
4558 2764 : bool returnSet = false;
4559 2764 : AnyRegisterID returnRegister;
4560 2764 : const FrameEntry *returnEntry = NULL;
4561 :
4562 5528 : Vector<Jump, 4, CompilerAllocPolicy> returnJumps(CompilerAllocPolicy(cx, *this));
4563 :
4564 5696 : for (unsigned i = 0; i < inlineCallees.length(); i++) {
4565 2932 : if (entrySnapshot)
4566 323 : frame.restoreFromSnapshot(entrySnapshot);
4567 :
4568 2932 : JSScript *script = inlineCallees[i];
4569 : CompileStatus status;
4570 :
4571 2932 : status = pushActiveFrame(script, argc);
4572 2932 : if (status != Compile_Okay)
4573 0 : return status;
4574 :
4575 2932 : a->exitState = exitState;
4576 :
4577 : JaegerSpew(JSpew_Inlining, "inlining call to script (file \"%s\") (line \"%d\")\n",
4578 2932 : script->filename, script->lineno);
4579 :
4580 2932 : if (calleePrevious.isSet()) {
4581 168 : calleePrevious.get().linkTo(masm.label(), &masm);
4582 168 : calleePrevious = MaybeJump();
4583 : }
4584 :
4585 2932 : if (i + 1 != inlineCallees.length()) {
4586 : /* Guard on the callee, except when this object must be the callee. */
4587 168 : JS_ASSERT(calleeReg.isSet());
4588 168 : calleePrevious = masm.branchPtr(Assembler::NotEqual, calleeReg.reg(), ImmPtr(script->function()));
4589 : }
4590 :
4591 2932 : a->returnJumps = &returnJumps;
4592 2932 : a->needReturnValue = needReturnValue;
4593 2932 : a->syncReturnValue = syncReturnValue;
4594 2932 : a->returnValueDouble = returnType == JSVAL_TYPE_DOUBLE;
4595 2932 : if (returnSet) {
4596 91 : a->returnSet = true;
4597 91 : a->returnRegister = returnRegister;
4598 : }
4599 :
4600 : /*
4601 : * Update the argument frame entries in place if the callee has had an
4602 : * argument inferred as double but we are passing an int.
4603 : */
4604 2932 : ensureDoubleArguments();
4605 :
4606 2932 : markUndefinedLocals();
4607 :
4608 2932 : status = generateMethod();
4609 2932 : if (status != Compile_Okay) {
4610 0 : popActiveFrame();
4611 0 : if (status == Compile_Abort) {
4612 : /* The callee is uncompileable, mark it as uninlineable and retry. */
4613 0 : script->uninlineable = true;
4614 0 : types::MarkTypeObjectFlags(cx, script->function(),
4615 0 : types::OBJECT_FLAG_UNINLINEABLE);
4616 0 : return Compile_Retry;
4617 : }
4618 0 : return status;
4619 : }
4620 :
4621 2932 : if (needReturnValue && !returnSet) {
4622 2256 : if (a->returnSet) {
4623 2063 : returnSet = true;
4624 2063 : returnRegister = a->returnRegister;
4625 : } else {
4626 193 : returnEntry = a->returnEntry;
4627 : }
4628 : }
4629 :
4630 2932 : popActiveFrame();
4631 :
4632 2932 : if (i + 1 != inlineCallees.length())
4633 168 : returnJumps.append(masm.jump());
4634 : }
4635 :
4636 3213 : for (unsigned i = 0; i < returnJumps.length(); i++)
4637 449 : returnJumps[i].linkTo(masm.label(), &masm);
4638 :
4639 2764 : frame.popn(argc + 2);
4640 :
4641 2764 : if (entrySnapshot)
4642 155 : cx->array_delete(entrySnapshot);
4643 :
4644 2764 : if (exitState)
4645 344 : frame.discardForJoin(exitState, analysis->getCode(PC).stackDepth - (argc + 2));
4646 :
4647 2764 : if (returnSet) {
4648 2063 : frame.takeReg(returnRegister);
4649 2063 : if (returnRegister.isReg())
4650 1977 : frame.pushTypedPayload(returnType, returnRegister.reg());
4651 : else
4652 86 : frame.pushDouble(returnRegister.fpreg());
4653 701 : } else if (returnEntry) {
4654 20 : frame.pushCopyOf((FrameEntry *) returnEntry);
4655 : } else {
4656 681 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
4657 : }
4658 :
4659 : JaegerSpew(JSpew_Inlining, "finished inlining call to script (file \"%s\") (line \"%d\")\n",
4660 2764 : script->filename, script->lineno);
4661 :
4662 2764 : return Compile_Okay;
4663 : }
4664 :
4665 : /*
4666 : * This function must be called immediately after any instruction which could
4667 : * cause a new StackFrame to be pushed and could lead to a new debug trap
4668 : * being set. This includes any API callbacks and any scripted or native call.
4669 : */
4670 : void
4671 2780483 : mjit::Compiler::addCallSite(const InternalCallSite &site)
4672 : {
4673 2780483 : callSites.append(site);
4674 2780483 : }
4675 :
4676 : void
4677 510211 : mjit::Compiler::inlineStubCall(void *stub, RejoinState rejoin, Uses uses)
4678 : {
4679 510211 : DataLabelPtr inlinePatch;
4680 510211 : Call cl = emitStubCall(stub, &inlinePatch);
4681 : InternalCallSite site(masm.callReturnOffset(cl), a->inlineIndex, PC,
4682 510211 : rejoin, false);
4683 510211 : site.inlinePatch = inlinePatch;
4684 510211 : if (loop && loop->generatingInvariants()) {
4685 1273 : Jump j = masm.jump();
4686 1273 : Label l = masm.label();
4687 1273 : loop->addInvariantCall(j, l, false, false, callSites.length(), uses);
4688 : }
4689 510211 : addCallSite(site);
4690 510211 : }
4691 :
4692 : bool
4693 247 : mjit::Compiler::compareTwoValues(JSContext *cx, JSOp op, const Value &lhs, const Value &rhs)
4694 : {
4695 247 : JS_ASSERT(lhs.isPrimitive());
4696 247 : JS_ASSERT(rhs.isPrimitive());
4697 :
4698 247 : if (lhs.isString() && rhs.isString()) {
4699 : int32_t cmp;
4700 88 : CompareStrings(cx, lhs.toString(), rhs.toString(), &cmp);
4701 88 : switch (op) {
4702 : case JSOP_LT:
4703 0 : return cmp < 0;
4704 : case JSOP_LE:
4705 0 : return cmp <= 0;
4706 : case JSOP_GT:
4707 0 : return cmp > 0;
4708 : case JSOP_GE:
4709 0 : return cmp >= 0;
4710 : case JSOP_EQ:
4711 57 : return cmp == 0;
4712 : case JSOP_NE:
4713 31 : return cmp != 0;
4714 : default:
4715 0 : JS_NOT_REACHED("NYI");
4716 : }
4717 : } else {
4718 : double ld, rd;
4719 :
4720 : /* These should be infallible w/ primitives. */
4721 159 : JS_ALWAYS_TRUE(ToNumber(cx, lhs, &ld));
4722 159 : JS_ALWAYS_TRUE(ToNumber(cx, rhs, &rd));
4723 159 : switch(op) {
4724 : case JSOP_LT:
4725 36 : return ld < rd;
4726 : case JSOP_LE:
4727 15 : return ld <= rd;
4728 : case JSOP_GT:
4729 44 : return ld > rd;
4730 : case JSOP_GE:
4731 12 : return ld >= rd;
4732 : case JSOP_EQ: /* fall through */
4733 : case JSOP_NE:
4734 : /* Special case null/undefined/void comparisons. */
4735 52 : if (lhs.isNullOrUndefined()) {
4736 4 : if (rhs.isNullOrUndefined())
4737 0 : return op == JSOP_EQ;
4738 4 : return op == JSOP_NE;
4739 : }
4740 48 : if (rhs.isNullOrUndefined())
4741 16 : return op == JSOP_NE;
4742 :
4743 : /* Normal return. */
4744 32 : return (op == JSOP_EQ) ? (ld == rd) : (ld != rd);
4745 : default:
4746 0 : JS_NOT_REACHED("NYI");
4747 : }
4748 : }
4749 :
4750 : JS_NOT_REACHED("NYI");
4751 : return false;
4752 : }
4753 :
4754 : bool
4755 264 : mjit::Compiler::constantFoldBranch(jsbytecode *target, bool taken)
4756 : {
4757 264 : if (taken) {
4758 169 : if (!frame.syncForBranch(target, Uses(0)))
4759 0 : return false;
4760 169 : Jump j = masm.jump();
4761 169 : if (!jumpAndRun(j, target))
4762 0 : return false;
4763 : } else {
4764 : /*
4765 : * Branch is never taken, but clean up any loop
4766 : * if this is a backedge.
4767 : */
4768 95 : if (target < PC && !finishLoop(target))
4769 0 : return false;
4770 : }
4771 264 : return true;
4772 : }
4773 :
4774 : bool
4775 1462 : mjit::Compiler::emitStubCmpOp(BoolStub stub, jsbytecode *target, JSOp fused)
4776 : {
4777 1462 : if (target)
4778 809 : frame.syncAndKillEverything();
4779 : else
4780 653 : frame.syncAndKill(Uses(2));
4781 :
4782 1462 : prepareStubCall(Uses(2));
4783 1462 : INLINE_STUBCALL(stub, target ? REJOIN_BRANCH : REJOIN_PUSH_BOOLEAN);
4784 1462 : frame.popn(2);
4785 :
4786 1462 : if (!target) {
4787 653 : frame.takeReg(Registers::ReturnReg);
4788 653 : frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
4789 653 : return true;
4790 : }
4791 :
4792 809 : JS_ASSERT(fused == JSOP_IFEQ || fused == JSOP_IFNE);
4793 : Jump j = masm.branchTest32(GetStubCompareCondition(fused), Registers::ReturnReg,
4794 809 : Registers::ReturnReg);
4795 809 : return jumpAndRun(j, target);
4796 : }
4797 :
4798 : void
4799 222 : mjit::Compiler::jsop_setprop_slow(PropertyName *name)
4800 : {
4801 222 : prepareStubCall(Uses(2));
4802 222 : masm.move(ImmPtr(name), Registers::ArgReg1);
4803 222 : INLINE_STUBCALL(STRICT_VARIANT(stubs::SetName), REJOIN_FALLTHROUGH);
4804 : JS_STATIC_ASSERT(JSOP_SETNAME_LENGTH == JSOP_SETPROP_LENGTH);
4805 222 : frame.shimmy(1);
4806 222 : if (script->scriptCounts)
4807 0 : bumpPropCount(PC, PCCounts::PROP_OTHER);
4808 222 : }
4809 :
4810 : void
4811 7092 : mjit::Compiler::jsop_getprop_slow(PropertyName *name, bool forPrototype)
4812 : {
4813 : /* See ::jsop_getprop */
4814 7092 : RejoinState rejoin = forPrototype ? REJOIN_THIS_PROTOTYPE : REJOIN_GETTER;
4815 :
4816 7092 : prepareStubCall(Uses(1));
4817 7092 : masm.move(ImmPtr(name), Registers::ArgReg1);
4818 7092 : INLINE_STUBCALL(forPrototype ? stubs::GetPropNoCache : stubs::GetProp, rejoin);
4819 :
4820 7092 : if (!forPrototype)
4821 7092 : testPushedType(rejoin, -1, /* ool = */ false);
4822 :
4823 7092 : frame.pop();
4824 7092 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
4825 :
4826 7092 : if (script->scriptCounts)
4827 0 : bumpPropCount(PC, PCCounts::PROP_OTHER);
4828 7092 : }
4829 :
4830 : #ifdef JS_MONOIC
4831 : void
4832 420321 : mjit::Compiler::passMICAddress(GlobalNameICInfo &ic)
4833 : {
4834 420321 : ic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
4835 420321 : }
4836 : #endif
4837 :
4838 : #if defined JS_POLYIC
4839 : void
4840 156730 : mjit::Compiler::passICAddress(BaseICInfo *ic)
4841 : {
4842 156730 : ic->paramAddr = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
4843 156730 : }
4844 :
4845 : bool
4846 142217 : mjit::Compiler::jsop_getprop(PropertyName *name, JSValueType knownType,
4847 : bool doTypeCheck, bool forPrototype)
4848 : {
4849 142217 : FrameEntry *top = frame.peek(-1);
4850 :
4851 : /*
4852 : * Use a different rejoin for GETPROP computing the 'this' object, as we
4853 : * can't use the current bytecode within InternalInterpret to tell this is
4854 : * fetching the 'this' value.
4855 : */
4856 142217 : RejoinState rejoin = REJOIN_GETTER;
4857 142217 : if (forPrototype) {
4858 957 : JS_ASSERT(top->isType(JSVAL_TYPE_OBJECT) &&
4859 957 : name == cx->runtime->atomState.classPrototypeAtom);
4860 957 : rejoin = REJOIN_THIS_PROTOTYPE;
4861 : }
4862 :
4863 : /* Handle length accesses on known strings without using a PIC. */
4864 160075 : if (name == cx->runtime->atomState.lengthAtom &&
4865 17308 : top->isType(JSVAL_TYPE_STRING) &&
4866 550 : (!cx->typeInferenceEnabled() || knownPushedType(0) == JSVAL_TYPE_INT32)) {
4867 194 : if (top->isConstant()) {
4868 0 : JSString *str = top->getValue().toString();
4869 : Value v;
4870 0 : v.setNumber(uint32_t(str->length()));
4871 0 : frame.pop();
4872 0 : frame.push(v);
4873 : } else {
4874 194 : RegisterID str = frame.ownRegForData(top);
4875 194 : masm.loadPtr(Address(str, JSString::offsetOfLengthAndFlags()), str);
4876 194 : masm.urshift32(Imm32(JSString::LENGTH_SHIFT), str);
4877 194 : frame.pop();
4878 194 : frame.pushTypedPayload(JSVAL_TYPE_INT32, str);
4879 : }
4880 194 : return true;
4881 : }
4882 :
4883 183462 : if (top->mightBeType(JSVAL_TYPE_OBJECT) &&
4884 16579 : JSOp(*PC) == JSOP_LENGTH && cx->typeInferenceEnabled() &&
4885 24860 : !hasTypeBarriers(PC) && knownPushedType(0) == JSVAL_TYPE_INT32) {
4886 : /* Check if this is an array we can make a loop invariant entry for. */
4887 7868 : if (loop && loop->generatingInvariants()) {
4888 245 : CrossSSAValue topv(a->inlineIndex, analysis->poppedValue(PC, 0));
4889 245 : FrameEntry *fe = loop->invariantLength(topv);
4890 245 : if (fe) {
4891 180 : frame.learnType(fe, JSVAL_TYPE_INT32, false);
4892 180 : frame.pop();
4893 180 : frame.pushCopyOf(fe);
4894 180 : if (script->scriptCounts)
4895 0 : bumpPropCount(PC, PCCounts::PROP_STATIC);
4896 180 : return true;
4897 : }
4898 : }
4899 :
4900 7688 : types::TypeSet *types = analysis->poppedTypes(PC, 0);
4901 :
4902 : /*
4903 : * Check if we are accessing the 'length' property of a known dense array.
4904 : * Note that if the types are known to indicate dense arrays, their lengths
4905 : * must fit in an int32.
4906 : */
4907 7688 : if (!types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_DENSE_ARRAY)) {
4908 7162 : bool isObject = top->isTypeKnown();
4909 7162 : if (!isObject) {
4910 374 : Jump notObject = frame.testObject(Assembler::NotEqual, top);
4911 374 : stubcc.linkExit(notObject, Uses(1));
4912 374 : stubcc.leave();
4913 374 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
4914 374 : OOL_STUBCALL(stubs::GetProp, rejoin);
4915 374 : if (rejoin == REJOIN_GETTER)
4916 374 : testPushedType(rejoin, -1);
4917 : }
4918 7162 : RegisterID result = frame.allocReg();
4919 7162 : RegisterID reg = frame.tempRegForData(top);
4920 7162 : frame.pop();
4921 7162 : masm.loadPtr(Address(reg, JSObject::offsetOfElements()), result);
4922 7162 : masm.load32(Address(result, ObjectElements::offsetOfLength()), result);
4923 7162 : frame.pushTypedPayload(JSVAL_TYPE_INT32, result);
4924 7162 : if (script->scriptCounts)
4925 0 : bumpPropCount(PC, PCCounts::PROP_DEFINITE);
4926 7162 : if (!isObject)
4927 374 : stubcc.rejoin(Changes(1));
4928 7162 : return true;
4929 : }
4930 :
4931 : /*
4932 : * Check if we're accessing the 'length' property of a typed array.
4933 : * The typed array length always fits in an int32.
4934 : */
4935 526 : if (!types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_TYPED_ARRAY)) {
4936 320 : bool isObject = top->isTypeKnown();
4937 320 : if (!isObject) {
4938 176 : Jump notObject = frame.testObject(Assembler::NotEqual, top);
4939 176 : stubcc.linkExit(notObject, Uses(1));
4940 176 : stubcc.leave();
4941 176 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
4942 176 : OOL_STUBCALL(stubs::GetProp, rejoin);
4943 176 : if (rejoin == REJOIN_GETTER)
4944 176 : testPushedType(rejoin, -1);
4945 : }
4946 320 : RegisterID reg = frame.copyDataIntoReg(top);
4947 320 : frame.pop();
4948 320 : masm.loadPayload(Address(reg, TypedArray::lengthOffset()), reg);
4949 320 : frame.pushTypedPayload(JSVAL_TYPE_INT32, reg);
4950 320 : if (script->scriptCounts)
4951 0 : bumpPropCount(PC, PCCounts::PROP_DEFINITE);
4952 320 : if (!isObject)
4953 176 : stubcc.rejoin(Changes(1));
4954 320 : return true;
4955 : }
4956 :
4957 : /*
4958 : * Check if we are accessing the 'length' of the lazy arguments for the
4959 : * current frame.
4960 : */
4961 206 : if (types->isLazyArguments(cx)) {
4962 0 : frame.pop();
4963 0 : frame.pushWord(Address(JSFrameReg, StackFrame::offsetOfNumActual()), JSVAL_TYPE_INT32);
4964 0 : if (script->scriptCounts)
4965 0 : bumpPropCount(PC, PCCounts::PROP_DEFINITE);
4966 0 : return true;
4967 : }
4968 : }
4969 :
4970 : /* If the access will definitely be fetching a particular value, nop it. */
4971 : bool testObject;
4972 : JSObject *singleton =
4973 134361 : (*PC == JSOP_GETPROP || *PC == JSOP_CALLPROP) ? pushedSingleton(0) : NULL;
4974 169134 : if (singleton && singleton->isFunction() && !hasTypeBarriers(PC) &&
4975 34773 : testSingletonPropertyTypes(top, ATOM_TO_JSID(name), &testObject)) {
4976 33154 : if (testObject) {
4977 1486 : Jump notObject = frame.testObject(Assembler::NotEqual, top);
4978 1486 : stubcc.linkExit(notObject, Uses(1));
4979 1486 : stubcc.leave();
4980 1486 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
4981 1486 : OOL_STUBCALL(stubs::GetProp, REJOIN_FALLTHROUGH);
4982 1486 : testPushedType(REJOIN_FALLTHROUGH, -1);
4983 : }
4984 :
4985 33154 : frame.pop();
4986 33154 : frame.push(ObjectValue(*singleton));
4987 :
4988 33154 : if (script->scriptCounts && cx->typeInferenceEnabled())
4989 0 : bumpPropCount(PC, PCCounts::PROP_STATIC);
4990 :
4991 33154 : if (testObject)
4992 1486 : stubcc.rejoin(Changes(1));
4993 :
4994 33154 : return true;
4995 : }
4996 :
4997 : /* Check if this is a property access we can make a loop invariant entry for. */
4998 101207 : if (loop && loop->generatingInvariants() && !hasTypeBarriers(PC)) {
4999 941 : CrossSSAValue topv(a->inlineIndex, analysis->poppedValue(PC, 0));
5000 941 : if (FrameEntry *fe = loop->invariantProperty(topv, ATOM_TO_JSID(name))) {
5001 82 : if (knownType != JSVAL_TYPE_UNKNOWN && knownType != JSVAL_TYPE_DOUBLE)
5002 82 : frame.learnType(fe, knownType, false);
5003 82 : frame.pop();
5004 82 : frame.pushCopyOf(fe);
5005 82 : if (script->scriptCounts)
5006 0 : bumpPropCount(PC, PCCounts::PROP_STATIC);
5007 82 : return true;
5008 : }
5009 : }
5010 :
5011 : /* If the incoming type will never PIC, take slow path. */
5012 101125 : if (top->isNotType(JSVAL_TYPE_OBJECT)) {
5013 7092 : jsop_getprop_slow(name, forPrototype);
5014 7092 : return true;
5015 : }
5016 :
5017 94033 : frame.forgetMismatchedObject(top);
5018 :
5019 : /*
5020 : * Check if we are accessing a known type which always has the property
5021 : * in a particular inline slot. Get the property directly in this case,
5022 : * without using an IC.
5023 : */
5024 94033 : jsid id = ATOM_TO_JSID(name);
5025 94033 : types::TypeSet *types = frame.extra(top).types;
5026 241547 : if (types && !types->unknownObject() &&
5027 58371 : types->getObjectCount() == 1 &&
5028 32640 : types->getTypeObject(0) != NULL &&
5029 28274 : !types->getTypeObject(0)->unknownProperties() &&
5030 28229 : id == types::MakeTypeId(cx, id)) {
5031 28209 : JS_ASSERT(!forPrototype);
5032 28209 : types::TypeObject *object = types->getTypeObject(0);
5033 28209 : types::TypeSet *propertyTypes = object->getProperty(cx, id, false);
5034 28209 : if (!propertyTypes)
5035 0 : return false;
5036 37258 : if (propertyTypes->isDefiniteProperty() &&
5037 9049 : !propertyTypes->isOwnProperty(cx, object, true)) {
5038 9039 : types->addFreeze(cx);
5039 9039 : uint32_t slot = propertyTypes->definiteSlot();
5040 9039 : bool isObject = top->isTypeKnown();
5041 9039 : if (!isObject) {
5042 2916 : Jump notObject = frame.testObject(Assembler::NotEqual, top);
5043 2916 : stubcc.linkExit(notObject, Uses(1));
5044 2916 : stubcc.leave();
5045 2916 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
5046 2916 : OOL_STUBCALL(stubs::GetProp, rejoin);
5047 2916 : if (rejoin == REJOIN_GETTER)
5048 2916 : testPushedType(rejoin, -1);
5049 : }
5050 9039 : RegisterID reg = frame.tempRegForData(top);
5051 9039 : frame.pop();
5052 :
5053 9039 : if (script->scriptCounts)
5054 0 : bumpPropCount(PC, PCCounts::PROP_DEFINITE);
5055 :
5056 9039 : Address address(reg, JSObject::getFixedSlotOffset(slot));
5057 9039 : BarrierState barrier = pushAddressMaybeBarrier(address, knownType, false);
5058 9039 : if (!isObject)
5059 2916 : stubcc.rejoin(Changes(1));
5060 9039 : finishBarrier(barrier, rejoin, 0);
5061 :
5062 9039 : return true;
5063 : }
5064 : }
5065 :
5066 : /* Check for a dynamic dispatch. */
5067 84994 : if (cx->typeInferenceEnabled()) {
5068 56380 : if (*PC == JSOP_CALLPROP && jsop_getprop_dispatch(name))
5069 7068 : return true;
5070 : }
5071 :
5072 77926 : if (script->scriptCounts)
5073 0 : bumpPropCount(PC, PCCounts::PROP_OTHER);
5074 :
5075 : /*
5076 : * These two must be loaded first. The objReg because the string path
5077 : * wants to read it, and the shapeReg because it could cause a spill that
5078 : * the string path wouldn't sink back.
5079 : */
5080 77926 : RegisterID objReg = frame.copyDataIntoReg(top);
5081 77926 : RegisterID shapeReg = frame.allocReg();
5082 :
5083 : RESERVE_IC_SPACE(masm);
5084 :
5085 77926 : PICGenInfo pic(ic::PICInfo::GET, JSOp(*PC));
5086 :
5087 : /*
5088 : * If this access has been on a shape with a getter hook, make preparations
5089 : * so that we can generate a stub to call the hook directly (rather than be
5090 : * forced to make a stub call). Sync the stack up front and kill all
5091 : * registers so that PIC stubs can contain calls, and always generate a
5092 : * type barrier if inference is enabled (known property types do not
5093 : * reflect properties with getter hooks).
5094 : */
5095 : pic.canCallHook = pic.forcedTypeBarrier =
5096 77926 : !forPrototype &&
5097 : JSOp(*PC) == JSOP_GETPROP &&
5098 77926 : analysis->getCode(PC).accessGetter;
5099 :
5100 : /* Guard that the type is an object. */
5101 77926 : Label typeCheck;
5102 77926 : if (doTypeCheck && !top->isTypeKnown()) {
5103 51133 : RegisterID reg = frame.tempRegForType(top);
5104 51133 : pic.typeReg = reg;
5105 :
5106 51133 : if (pic.canCallHook) {
5107 390 : PinRegAcrossSyncAndKill p1(frame, reg);
5108 195 : frame.syncAndKillEverything();
5109 : }
5110 :
5111 : /* Start the hot path where it's easy to patch it. */
5112 51133 : pic.fastPathStart = masm.label();
5113 51133 : Jump j = masm.testObject(Assembler::NotEqual, reg);
5114 51133 : typeCheck = masm.label();
5115 51133 : RETURN_IF_OOM(false);
5116 :
5117 51133 : pic.typeCheck = stubcc.linkExit(j, Uses(1));
5118 51133 : pic.hasTypeCheck = true;
5119 : } else {
5120 26793 : if (pic.canCallHook)
5121 872 : frame.syncAndKillEverything();
5122 :
5123 26793 : pic.fastPathStart = masm.label();
5124 26793 : pic.hasTypeCheck = false;
5125 26793 : pic.typeReg = Registers::ReturnReg;
5126 : }
5127 :
5128 77926 : pic.shapeReg = shapeReg;
5129 77926 : pic.name = name;
5130 :
5131 : /* Guard on shape. */
5132 77926 : masm.loadShape(objReg, shapeReg);
5133 77926 : pic.shapeGuard = masm.label();
5134 :
5135 77926 : DataLabelPtr inlineShapeLabel;
5136 : Jump j = masm.branchPtrWithPatch(Assembler::NotEqual, shapeReg,
5137 77926 : inlineShapeLabel, ImmPtr(NULL));
5138 77926 : Label inlineShapeJump = masm.label();
5139 :
5140 : RESERVE_OOL_SPACE(stubcc.masm);
5141 77926 : pic.slowPathStart = stubcc.linkExit(j, Uses(1));
5142 :
5143 77926 : stubcc.leave();
5144 77926 : passICAddress(&pic);
5145 77926 : pic.slowPathCall = OOL_STUBCALL(forPrototype ? ic::GetPropNoCache : ic::GetProp, rejoin);
5146 : CHECK_OOL_SPACE();
5147 77926 : if (rejoin == REJOIN_GETTER)
5148 76969 : testPushedType(rejoin, -1);
5149 :
5150 : /* Load the base slot address. */
5151 77926 : Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, JSObject::offsetOfSlots()),
5152 77926 : objReg);
5153 :
5154 : /* Copy the slot value to the expression stack. */
5155 77926 : Address slot(objReg, 1 << 24);
5156 77926 : frame.pop();
5157 :
5158 77926 : Label fastValueLoad = masm.loadValueWithAddressOffsetPatch(slot, shapeReg, objReg);
5159 77926 : pic.fastPathRejoin = masm.label();
5160 :
5161 77926 : RETURN_IF_OOM(false);
5162 :
5163 : /* Initialize op labels. */
5164 77926 : GetPropLabels &labels = pic.getPropLabels();
5165 77926 : labels.setDslotsLoad(masm, pic.fastPathRejoin, dslotsLoadLabel);
5166 77926 : labels.setInlineShapeData(masm, pic.shapeGuard, inlineShapeLabel);
5167 :
5168 77926 : labels.setValueLoad(masm, pic.fastPathRejoin, fastValueLoad);
5169 77926 : if (pic.hasTypeCheck)
5170 51133 : labels.setInlineTypeJump(masm, pic.fastPathStart, typeCheck);
5171 77926 : labels.setInlineShapeJump(masm, pic.shapeGuard, inlineShapeJump);
5172 :
5173 : CHECK_IC_SPACE();
5174 :
5175 77926 : pic.objReg = objReg;
5176 77926 : frame.pushRegs(shapeReg, objReg, knownType);
5177 : BarrierState barrier = testBarrier(pic.shapeReg, pic.objReg, false, false,
5178 77926 : /* force = */ pic.canCallHook);
5179 :
5180 77926 : stubcc.rejoin(Changes(1));
5181 77926 : pics.append(pic);
5182 :
5183 77926 : finishBarrier(barrier, rejoin, 0);
5184 77926 : return true;
5185 : }
5186 :
5187 : bool
5188 73343 : mjit::Compiler::testSingletonProperty(JSObject *obj, jsid id)
5189 : {
5190 : /*
5191 : * We would like to completely no-op property/global accesses which can
5192 : * produce only a particular JSObject or undefined, provided we can
5193 : * determine the pushed value must not be undefined (or, if it could be
5194 : * undefined, a recompilation will be triggered).
5195 : *
5196 : * If the access definitely goes through obj, either directly or on the
5197 : * prototype chain, then if obj has a defined property now, and the
5198 : * property has a default or method shape, the only way it can produce
5199 : * undefined in the future is if it is deleted. Deletion causes type
5200 : * properties to be explicitly marked with undefined.
5201 : */
5202 :
5203 73343 : JSObject *nobj = obj;
5204 298685 : while (nobj) {
5205 152001 : if (!nobj->isNative())
5206 2 : return false;
5207 151999 : if (nobj->getClass()->ops.lookupGeneric)
5208 0 : return false;
5209 151999 : nobj = nobj->getProto();
5210 : }
5211 :
5212 : JSObject *holder;
5213 73341 : JSProperty *prop = NULL;
5214 73341 : if (!obj->lookupGeneric(cx, id, &holder, &prop))
5215 0 : return false;
5216 73341 : if (!prop)
5217 99 : return false;
5218 :
5219 73242 : Shape *shape = (Shape *) prop;
5220 73242 : if (shape->hasDefaultGetter()) {
5221 73238 : if (!shape->hasSlot())
5222 0 : return false;
5223 73238 : if (holder->getSlot(shape->slot()).isUndefined())
5224 0 : return false;
5225 : } else {
5226 4 : return false;
5227 : }
5228 :
5229 73238 : return true;
5230 : }
5231 :
5232 : bool
5233 34773 : mjit::Compiler::testSingletonPropertyTypes(FrameEntry *top, jsid id, bool *testObject)
5234 : {
5235 34773 : *testObject = false;
5236 :
5237 34773 : types::TypeSet *types = frame.extra(top).types;
5238 34773 : if (!types || types->unknownObject())
5239 32 : return false;
5240 :
5241 34741 : JSObject *singleton = types->getSingleton(cx);
5242 34741 : if (singleton)
5243 7296 : return testSingletonProperty(singleton, id);
5244 :
5245 27445 : if (!globalObj)
5246 0 : return false;
5247 :
5248 : JSProtoKey key;
5249 27445 : JSValueType type = types->getKnownTypeTag(cx);
5250 27445 : switch (type) {
5251 : case JSVAL_TYPE_STRING:
5252 8673 : key = JSProto_String;
5253 8673 : break;
5254 :
5255 : case JSVAL_TYPE_INT32:
5256 : case JSVAL_TYPE_DOUBLE:
5257 6 : key = JSProto_Number;
5258 6 : break;
5259 :
5260 : case JSVAL_TYPE_BOOLEAN:
5261 0 : key = JSProto_Boolean;
5262 0 : break;
5263 :
5264 : case JSVAL_TYPE_OBJECT:
5265 : case JSVAL_TYPE_UNKNOWN:
5266 18766 : if (types->getObjectCount() == 1 && !top->isNotType(JSVAL_TYPE_OBJECT)) {
5267 17192 : JS_ASSERT_IF(top->isTypeKnown(), top->isType(JSVAL_TYPE_OBJECT));
5268 17192 : types::TypeObject *object = types->getTypeObject(0);
5269 17192 : if (object && object->proto) {
5270 17192 : if (!testSingletonProperty(object->proto, id))
5271 9 : return false;
5272 17183 : types->addFreeze(cx);
5273 :
5274 : /* If we don't know this is an object, we will need a test. */
5275 17183 : *testObject = (type != JSVAL_TYPE_OBJECT) && !top->isTypeKnown();
5276 17183 : return true;
5277 : }
5278 : }
5279 1574 : return false;
5280 :
5281 : default:
5282 0 : return false;
5283 : }
5284 :
5285 : JSObject *proto;
5286 8679 : if (!js_GetClassPrototype(cx, globalObj, key, &proto, NULL))
5287 0 : return NULL;
5288 :
5289 8679 : return testSingletonProperty(proto, id);
5290 : }
5291 :
5292 : bool
5293 23473 : mjit::Compiler::jsop_getprop_dispatch(PropertyName *name)
5294 : {
5295 : /*
5296 : * Check for a CALLPROP which is a dynamic dispatch: every value it can
5297 : * push is a singleton, and the pushed value is determined by the type of
5298 : * the object being accessed. Return true if the CALLPROP has been fully
5299 : * processed, false if no code was generated.
5300 : */
5301 23473 : FrameEntry *top = frame.peek(-1);
5302 23473 : if (top->isNotType(JSVAL_TYPE_OBJECT))
5303 0 : return false;
5304 :
5305 23473 : jsid id = ATOM_TO_JSID(name);
5306 23473 : if (id != types::MakeTypeId(cx, id))
5307 0 : return false;
5308 :
5309 23473 : types::TypeSet *pushedTypes = pushedTypeSet(0);
5310 23473 : if (pushedTypes->unknownObject() || pushedTypes->baseFlags() != 0)
5311 988 : return false;
5312 :
5313 : /* Check every pushed value is a singleton. */
5314 26316 : for (unsigned i = 0; i < pushedTypes->getObjectCount(); i++) {
5315 4051 : if (pushedTypes->getTypeObject(i) != NULL)
5316 220 : return false;
5317 : }
5318 :
5319 22265 : types::TypeSet *objTypes = analysis->poppedTypes(PC, 0);
5320 22265 : if (objTypes->unknownObject() || objTypes->getObjectCount() == 0)
5321 13523 : return false;
5322 :
5323 8742 : pushedTypes->addFreeze(cx);
5324 :
5325 : /* Map each type in the object to the resulting pushed value. */
5326 17484 : Vector<JSObject *> results(CompilerAllocPolicy(cx, *this));
5327 :
5328 : /*
5329 : * For each type of the base object, check it has no 'own' property for the
5330 : * accessed id and that its prototype does have such a property.
5331 : */
5332 8742 : uint32_t last = 0;
5333 22074 : for (unsigned i = 0; i < objTypes->getObjectCount(); i++) {
5334 15006 : if (objTypes->getSingleObject(i) != NULL)
5335 1069 : return false;
5336 13937 : types::TypeObject *object = objTypes->getTypeObject(i);
5337 13937 : if (!object) {
5338 3305 : results.append((JSObject *) NULL);
5339 3305 : continue;
5340 : }
5341 10632 : if (object->unknownProperties() || !object->proto)
5342 4 : return false;
5343 10628 : types::TypeSet *ownTypes = object->getProperty(cx, id, false);
5344 10628 : if (ownTypes->isOwnProperty(cx, object, false))
5345 12 : return false;
5346 :
5347 10616 : if (!testSingletonProperty(object->proto, id))
5348 88 : return false;
5349 :
5350 10528 : if (object->proto->getType(cx)->unknownProperties())
5351 29 : return false;
5352 10499 : types::TypeSet *protoTypes = object->proto->type()->getProperty(cx, id, false);
5353 10499 : if (!protoTypes)
5354 0 : return false;
5355 10499 : JSObject *singleton = protoTypes->getSingleton(cx);
5356 10499 : if (!singleton)
5357 472 : return false;
5358 :
5359 10027 : results.append(singleton);
5360 10027 : last = i;
5361 : }
5362 :
5363 7068 : if (oomInVector)
5364 0 : return false;
5365 :
5366 7068 : objTypes->addFreeze(cx);
5367 :
5368 : /* Done filtering, now generate code which dispatches on the type. */
5369 :
5370 7068 : frame.forgetMismatchedObject(top);
5371 :
5372 7068 : if (!top->isType(JSVAL_TYPE_OBJECT)) {
5373 1040 : Jump notObject = frame.testObject(Assembler::NotEqual, top);
5374 1040 : stubcc.linkExit(notObject, Uses(1));
5375 : }
5376 :
5377 7068 : RegisterID reg = frame.tempRegForData(top);
5378 7068 : frame.pinReg(reg);
5379 7068 : RegisterID pushreg = frame.allocReg();
5380 7068 : frame.unpinReg(reg);
5381 :
5382 7068 : Address typeAddress(reg, JSObject::offsetOfType());
5383 :
5384 14136 : Vector<Jump> rejoins(CompilerAllocPolicy(cx, *this));
5385 7068 : MaybeJump lastMiss;
5386 :
5387 12609 : for (unsigned i = 0; i < objTypes->getObjectCount(); i++) {
5388 12609 : types::TypeObject *object = objTypes->getTypeObject(i);
5389 12609 : if (!object) {
5390 2805 : JS_ASSERT(results[i] == NULL);
5391 2805 : continue;
5392 : }
5393 9804 : if (lastMiss.isSet())
5394 2736 : lastMiss.get().linkTo(masm.label(), &masm);
5395 :
5396 : /*
5397 : * Check that the pushed result is actually in the known pushed types
5398 : * for the bytecode; this bytecode may have type barriers. Redirect to
5399 : * the stub to update said pushed types.
5400 : */
5401 9804 : if (!pushedTypes->hasType(types::Type::ObjectType(results[i]))) {
5402 6097 : JS_ASSERT(hasTypeBarriers(PC));
5403 6097 : if (i == last) {
5404 6058 : stubcc.linkExit(masm.jump(), Uses(1));
5405 6058 : break;
5406 : } else {
5407 39 : lastMiss.setJump(masm.branchPtr(Assembler::NotEqual, typeAddress, ImmPtr(object)));
5408 39 : stubcc.linkExit(masm.jump(), Uses(1));
5409 39 : continue;
5410 : }
5411 : }
5412 :
5413 3707 : if (i == last) {
5414 1010 : masm.move(ImmPtr(results[i]), pushreg);
5415 1010 : break;
5416 : } else {
5417 2697 : lastMiss.setJump(masm.branchPtr(Assembler::NotEqual, typeAddress, ImmPtr(object)));
5418 2697 : masm.move(ImmPtr(results[i]), pushreg);
5419 2697 : rejoins.append(masm.jump());
5420 : }
5421 : }
5422 :
5423 9765 : for (unsigned i = 0; i < rejoins.length(); i++)
5424 2697 : rejoins[i].linkTo(masm.label(), &masm);
5425 :
5426 7068 : stubcc.leave();
5427 7068 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
5428 7068 : OOL_STUBCALL(stubs::GetProp, REJOIN_FALLTHROUGH);
5429 7068 : testPushedType(REJOIN_FALLTHROUGH, -1);
5430 :
5431 7068 : frame.pop();
5432 7068 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, pushreg);
5433 :
5434 7068 : if (script->scriptCounts)
5435 0 : bumpPropCount(PC, PCCounts::PROP_DEFINITE);
5436 :
5437 7068 : stubcc.rejoin(Changes(2));
5438 7068 : return true;
5439 : }
5440 :
5441 : bool
5442 18385 : mjit::Compiler::jsop_setprop(PropertyName *name, bool popGuaranteed)
5443 : {
5444 18385 : FrameEntry *lhs = frame.peek(-2);
5445 18385 : FrameEntry *rhs = frame.peek(-1);
5446 :
5447 : /* If the incoming type will never PIC, take slow path. */
5448 18385 : if (lhs->isTypeKnown() && lhs->getKnownType() != JSVAL_TYPE_OBJECT) {
5449 178 : jsop_setprop_slow(name);
5450 178 : return true;
5451 : }
5452 :
5453 : /*
5454 : * If this is a SETNAME to a variable of a non-reentrant outer function,
5455 : * set the variable's slot directly for the active call object.
5456 : */
5457 18207 : if (cx->typeInferenceEnabled() && js_CodeSpec[*PC].format & JOF_NAME) {
5458 : ScriptAnalysis::NameAccess access =
5459 2324 : analysis->resolveNameAccess(cx, ATOM_TO_JSID(name), true);
5460 2324 : if (access.nesting) {
5461 : /* Use a SavedReg so it isn't clobbered by the stub call. */
5462 512 : RegisterID nameReg = frame.allocReg(Registers::SavedRegs).reg();
5463 512 : Address address = frame.loadNameAddress(access, nameReg);
5464 :
5465 : #ifdef JSGC_INCREMENTAL_MJ
5466 : /* Write barrier. */
5467 512 : if (cx->compartment->needsBarrier()) {
5468 0 : stubcc.linkExit(masm.jump(), Uses(0));
5469 0 : stubcc.leave();
5470 :
5471 : /* sync() may have overwritten nameReg, so we reload its data. */
5472 0 : JS_ASSERT(address.base == nameReg);
5473 0 : stubcc.masm.move(ImmPtr(access.basePointer()), nameReg);
5474 0 : stubcc.masm.loadPtr(Address(nameReg), nameReg);
5475 0 : stubcc.masm.addPtr(Imm32(address.offset), nameReg, Registers::ArgReg1);
5476 :
5477 0 : OOL_STUBCALL(stubs::WriteBarrier, REJOIN_NONE);
5478 0 : stubcc.rejoin(Changes(0));
5479 : }
5480 : #endif
5481 :
5482 512 : frame.storeTo(rhs, address, popGuaranteed);
5483 512 : frame.shimmy(1);
5484 512 : frame.freeReg(address.base);
5485 512 : return true;
5486 : }
5487 : }
5488 :
5489 : /*
5490 : * Set the property directly if we are accessing a known object which
5491 : * always has the property in a particular inline slot.
5492 : */
5493 17695 : jsid id = ATOM_TO_JSID(name);
5494 17695 : types::TypeSet *types = frame.extra(lhs).types;
5495 46585 : if (JSOp(*PC) == JSOP_SETPROP && id == types::MakeTypeId(cx, id) &&
5496 9663 : types && !types->unknownObject() &&
5497 8744 : types->getObjectCount() == 1 &&
5498 5520 : types->getTypeObject(0) != NULL &&
5499 4963 : !types->getTypeObject(0)->unknownProperties()) {
5500 4951 : types::TypeObject *object = types->getTypeObject(0);
5501 4951 : types::TypeSet *propertyTypes = object->getProperty(cx, id, false);
5502 4951 : if (!propertyTypes)
5503 0 : return false;
5504 6766 : if (propertyTypes->isDefiniteProperty() &&
5505 1815 : !propertyTypes->isOwnProperty(cx, object, true)) {
5506 1801 : types->addFreeze(cx);
5507 1801 : uint32_t slot = propertyTypes->definiteSlot();
5508 1801 : RegisterID reg = frame.tempRegForData(lhs);
5509 1801 : bool isObject = lhs->isTypeKnown();
5510 1801 : MaybeJump notObject;
5511 1801 : if (!isObject)
5512 384 : notObject = frame.testObject(Assembler::NotEqual, lhs);
5513 : #ifdef JSGC_INCREMENTAL_MJ
5514 1801 : frame.pinReg(reg);
5515 1801 : if (cx->compartment->needsBarrier() && propertyTypes->needsBarrier(cx)) {
5516 : /* Write barrier. */
5517 4 : Jump j = masm.testGCThing(Address(reg, JSObject::getFixedSlotOffset(slot)));
5518 4 : stubcc.linkExit(j, Uses(0));
5519 4 : stubcc.leave();
5520 4 : stubcc.masm.addPtr(Imm32(JSObject::getFixedSlotOffset(slot)),
5521 4 : reg, Registers::ArgReg1);
5522 4 : OOL_STUBCALL(stubs::GCThingWriteBarrier, REJOIN_NONE);
5523 4 : stubcc.rejoin(Changes(0));
5524 : }
5525 1801 : frame.unpinReg(reg);
5526 : #endif
5527 1801 : if (!isObject) {
5528 384 : stubcc.linkExit(notObject.get(), Uses(2));
5529 384 : stubcc.leave();
5530 384 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
5531 384 : OOL_STUBCALL(STRICT_VARIANT(stubs::SetName), REJOIN_FALLTHROUGH);
5532 : }
5533 1801 : frame.storeTo(rhs, Address(reg, JSObject::getFixedSlotOffset(slot)), popGuaranteed);
5534 1801 : frame.shimmy(1);
5535 1801 : if (!isObject)
5536 384 : stubcc.rejoin(Changes(1));
5537 1801 : if (script->scriptCounts)
5538 0 : bumpPropCount(PC, PCCounts::PROP_DEFINITE);
5539 1801 : return true;
5540 : }
5541 : }
5542 :
5543 15894 : if (script->scriptCounts)
5544 0 : bumpPropCount(PC, PCCounts::PROP_OTHER);
5545 :
5546 15894 : JSOp op = JSOp(*PC);
5547 :
5548 : #ifdef JSGC_INCREMENTAL_MJ
5549 : /* Write barrier. We don't have type information for JSOP_SETNAME. */
5550 15930 : if (cx->compartment->needsBarrier() &&
5551 36 : (!types || op == JSOP_SETNAME || types->propertyNeedsBarrier(cx, id)))
5552 : {
5553 44 : jsop_setprop_slow(name);
5554 44 : return true;
5555 : }
5556 : #endif
5557 :
5558 15850 : PICGenInfo pic(ic::PICInfo::SET, op);
5559 15850 : pic.name = name;
5560 :
5561 15850 : if (monitored(PC)) {
5562 2659 : pic.typeMonitored = true;
5563 2659 : types::TypeSet *types = frame.extra(rhs).types;
5564 2659 : if (!types) {
5565 : /* Handle FORNAME and other compound opcodes. Yuck. */
5566 0 : types = types::TypeSet::make(cx, "unknownRHS");
5567 0 : if (!types)
5568 0 : return false;
5569 0 : types->addType(cx, types::Type::UnknownType());
5570 : }
5571 2659 : pic.rhsTypes = types;
5572 : } else {
5573 13191 : pic.typeMonitored = false;
5574 13191 : pic.rhsTypes = NULL;
5575 : }
5576 :
5577 : RESERVE_IC_SPACE(masm);
5578 : RESERVE_OOL_SPACE(stubcc.masm);
5579 :
5580 : /* Guard that the type is an object. */
5581 15850 : Jump typeCheck;
5582 15850 : if (!lhs->isTypeKnown()) {
5583 7858 : RegisterID reg = frame.tempRegForType(lhs);
5584 7858 : pic.typeReg = reg;
5585 :
5586 : /* Start the hot path where it's easy to patch it. */
5587 7858 : pic.fastPathStart = masm.label();
5588 7858 : Jump j = masm.testObject(Assembler::NotEqual, reg);
5589 :
5590 7858 : pic.typeCheck = stubcc.linkExit(j, Uses(2));
5591 7858 : stubcc.leave();
5592 :
5593 7858 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
5594 7858 : OOL_STUBCALL(STRICT_VARIANT(stubs::SetName), REJOIN_FALLTHROUGH);
5595 :
5596 7858 : typeCheck = stubcc.masm.jump();
5597 7858 : pic.hasTypeCheck = true;
5598 : } else {
5599 7992 : pic.fastPathStart = masm.label();
5600 7992 : pic.hasTypeCheck = false;
5601 7992 : pic.typeReg = Registers::ReturnReg;
5602 : }
5603 :
5604 15850 : frame.forgetMismatchedObject(lhs);
5605 :
5606 : /* Get the object into a mutable register. */
5607 15850 : RegisterID objReg = frame.copyDataIntoReg(lhs);
5608 15850 : pic.objReg = objReg;
5609 :
5610 : /* Get info about the RHS and pin it. */
5611 : ValueRemat vr;
5612 15850 : frame.pinEntry(rhs, vr);
5613 15850 : pic.vr = vr;
5614 :
5615 15850 : RegisterID shapeReg = frame.allocReg();
5616 15850 : pic.shapeReg = shapeReg;
5617 :
5618 15850 : frame.unpinEntry(vr);
5619 :
5620 : /* Guard on shape. */
5621 15850 : masm.loadShape(objReg, shapeReg);
5622 15850 : pic.shapeGuard = masm.label();
5623 15850 : DataLabelPtr inlineShapeData;
5624 : Jump j = masm.branchPtrWithPatch(Assembler::NotEqual, shapeReg,
5625 15850 : inlineShapeData, ImmPtr(NULL));
5626 15850 : Label afterInlineShapeJump = masm.label();
5627 :
5628 : /* Slow path. */
5629 : {
5630 15850 : pic.slowPathStart = stubcc.linkExit(j, Uses(2));
5631 :
5632 15850 : stubcc.leave();
5633 15850 : passICAddress(&pic);
5634 15850 : pic.slowPathCall = OOL_STUBCALL(ic::SetProp, REJOIN_FALLTHROUGH);
5635 : CHECK_OOL_SPACE();
5636 : }
5637 :
5638 : /* Load dslots. */
5639 15850 : Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, JSObject::offsetOfSlots()),
5640 15850 : objReg);
5641 :
5642 : /* Store RHS into object slot. */
5643 15850 : Address slot(objReg, 1 << 24);
5644 15850 : DataLabel32 inlineValueStore = masm.storeValueWithAddressOffsetPatch(vr, slot);
5645 15850 : pic.fastPathRejoin = masm.label();
5646 :
5647 15850 : frame.freeReg(objReg);
5648 15850 : frame.freeReg(shapeReg);
5649 :
5650 : /* "Pop under", taking out object (LHS) and leaving RHS. */
5651 15850 : frame.shimmy(1);
5652 :
5653 : /* Finish slow path. */
5654 : {
5655 15850 : if (pic.hasTypeCheck)
5656 7858 : typeCheck.linkTo(stubcc.masm.label(), &stubcc.masm);
5657 15850 : stubcc.rejoin(Changes(1));
5658 : }
5659 :
5660 15850 : RETURN_IF_OOM(false);
5661 :
5662 15850 : SetPropLabels &labels = pic.setPropLabels();
5663 15850 : labels.setInlineShapeData(masm, pic.shapeGuard, inlineShapeData);
5664 15850 : labels.setDslotsLoad(masm, pic.fastPathRejoin, dslotsLoadLabel);
5665 15850 : labels.setInlineValueStore(masm, pic.fastPathRejoin, inlineValueStore);
5666 15850 : labels.setInlineShapeJump(masm, pic.shapeGuard, afterInlineShapeJump);
5667 :
5668 15850 : pics.append(pic);
5669 15850 : return true;
5670 : }
5671 :
5672 : void
5673 60978 : mjit::Compiler::jsop_name(PropertyName *name, JSValueType type)
5674 : {
5675 : /*
5676 : * If this is a NAME for a variable of a non-reentrant outer function, get
5677 : * the variable's slot directly for the active call object. We always need
5678 : * to check for undefined, however.
5679 : */
5680 60978 : if (cx->typeInferenceEnabled()) {
5681 : ScriptAnalysis::NameAccess access =
5682 48066 : analysis->resolveNameAccess(cx, ATOM_TO_JSID(name), true);
5683 48066 : if (access.nesting) {
5684 28715 : Address address = frame.loadNameAddress(access);
5685 28715 : JSValueType type = knownPushedType(0);
5686 : BarrierState barrier = pushAddressMaybeBarrier(address, type, true,
5687 28715 : /* testUndefined = */ true);
5688 28715 : finishBarrier(barrier, REJOIN_GETTER, 0);
5689 28715 : return;
5690 : }
5691 : }
5692 :
5693 32263 : PICGenInfo pic(ic::PICInfo::NAME, JSOp(*PC));
5694 :
5695 : RESERVE_IC_SPACE(masm);
5696 :
5697 32263 : pic.shapeReg = frame.allocReg();
5698 32263 : pic.objReg = frame.allocReg();
5699 32263 : pic.typeReg = Registers::ReturnReg;
5700 32263 : pic.name = name;
5701 32263 : pic.hasTypeCheck = false;
5702 32263 : pic.fastPathStart = masm.label();
5703 :
5704 : /* There is no inline implementation, so we always jump to the slow path or to a stub. */
5705 32263 : pic.shapeGuard = masm.label();
5706 32263 : Jump inlineJump = masm.jump();
5707 : {
5708 : RESERVE_OOL_SPACE(stubcc.masm);
5709 32263 : pic.slowPathStart = stubcc.linkExit(inlineJump, Uses(0));
5710 32263 : stubcc.leave();
5711 32263 : passICAddress(&pic);
5712 32263 : pic.slowPathCall = OOL_STUBCALL(ic::Name, REJOIN_GETTER);
5713 : CHECK_OOL_SPACE();
5714 32263 : testPushedType(REJOIN_GETTER, 0);
5715 : }
5716 32263 : pic.fastPathRejoin = masm.label();
5717 :
5718 : /* Initialize op labels. */
5719 32263 : ScopeNameLabels &labels = pic.scopeNameLabels();
5720 32263 : labels.setInlineJump(masm, pic.fastPathStart, inlineJump);
5721 :
5722 : CHECK_IC_SPACE();
5723 :
5724 : /*
5725 : * We can't optimize away the PIC for the NAME access itself, but if we've
5726 : * only seen a single value pushed by this access, mark it as such and
5727 : * recompile if a different value becomes possible.
5728 : */
5729 32263 : JSObject *singleton = pushedSingleton(0);
5730 32263 : if (singleton) {
5731 5381 : frame.push(ObjectValue(*singleton));
5732 5381 : frame.freeReg(pic.shapeReg);
5733 5381 : frame.freeReg(pic.objReg);
5734 : } else {
5735 26882 : frame.pushRegs(pic.shapeReg, pic.objReg, type);
5736 : }
5737 32263 : BarrierState barrier = testBarrier(pic.shapeReg, pic.objReg, /* testUndefined = */ true);
5738 :
5739 32263 : stubcc.rejoin(Changes(1));
5740 :
5741 32263 : pics.append(pic);
5742 :
5743 32263 : finishBarrier(barrier, REJOIN_GETTER, 0);
5744 : }
5745 :
5746 : bool
5747 373 : mjit::Compiler::jsop_xname(PropertyName *name)
5748 : {
5749 : /*
5750 : * If this is a GETXPROP for a variable of a non-reentrant outer function,
5751 : * treat in the same way as a NAME.
5752 : */
5753 373 : if (cx->typeInferenceEnabled()) {
5754 : ScriptAnalysis::NameAccess access =
5755 258 : analysis->resolveNameAccess(cx, ATOM_TO_JSID(name), true);
5756 258 : if (access.nesting) {
5757 116 : frame.pop();
5758 116 : Address address = frame.loadNameAddress(access);
5759 116 : JSValueType type = knownPushedType(0);
5760 : BarrierState barrier = pushAddressMaybeBarrier(address, type, true,
5761 116 : /* testUndefined = */ true);
5762 116 : finishBarrier(barrier, REJOIN_GETTER, 0);
5763 116 : return true;
5764 : }
5765 : }
5766 :
5767 257 : PICGenInfo pic(ic::PICInfo::XNAME, JSOp(*PC));
5768 :
5769 257 : FrameEntry *fe = frame.peek(-1);
5770 257 : if (fe->isNotType(JSVAL_TYPE_OBJECT)) {
5771 0 : return jsop_getprop(name, knownPushedType(0));
5772 : }
5773 :
5774 257 : if (!fe->isTypeKnown()) {
5775 0 : Jump notObject = frame.testObject(Assembler::NotEqual, fe);
5776 0 : stubcc.linkExit(notObject, Uses(1));
5777 : }
5778 :
5779 257 : frame.forgetMismatchedObject(fe);
5780 :
5781 : RESERVE_IC_SPACE(masm);
5782 :
5783 257 : pic.shapeReg = frame.allocReg();
5784 257 : pic.objReg = frame.copyDataIntoReg(fe);
5785 257 : pic.typeReg = Registers::ReturnReg;
5786 257 : pic.name = name;
5787 257 : pic.hasTypeCheck = false;
5788 257 : pic.fastPathStart = masm.label();
5789 :
5790 : /* There is no inline implementation, so we always jump to the slow path or to a stub. */
5791 257 : pic.shapeGuard = masm.label();
5792 257 : Jump inlineJump = masm.jump();
5793 : {
5794 : RESERVE_OOL_SPACE(stubcc.masm);
5795 257 : pic.slowPathStart = stubcc.linkExit(inlineJump, Uses(1));
5796 257 : stubcc.leave();
5797 257 : passICAddress(&pic);
5798 257 : pic.slowPathCall = OOL_STUBCALL(ic::XName, REJOIN_GETTER);
5799 : CHECK_OOL_SPACE();
5800 257 : testPushedType(REJOIN_GETTER, -1);
5801 : }
5802 :
5803 257 : pic.fastPathRejoin = masm.label();
5804 :
5805 257 : RETURN_IF_OOM(false);
5806 :
5807 : /* Initialize op labels. */
5808 257 : ScopeNameLabels &labels = pic.scopeNameLabels();
5809 257 : labels.setInlineJumpOffset(masm.differenceBetween(pic.fastPathStart, inlineJump));
5810 :
5811 : CHECK_IC_SPACE();
5812 :
5813 257 : frame.pop();
5814 257 : frame.pushRegs(pic.shapeReg, pic.objReg, knownPushedType(0));
5815 :
5816 257 : BarrierState barrier = testBarrier(pic.shapeReg, pic.objReg, /* testUndefined = */ true);
5817 :
5818 257 : stubcc.rejoin(Changes(1));
5819 :
5820 257 : pics.append(pic);
5821 :
5822 257 : finishBarrier(barrier, REJOIN_FALLTHROUGH, 0);
5823 257 : return true;
5824 : }
5825 :
5826 : void
5827 3587 : mjit::Compiler::jsop_bindname(PropertyName *name)
5828 : {
5829 : /*
5830 : * If this is a BINDNAME for a variable of a non-reentrant outer function,
5831 : * the object is definitely the outer function's active call object.
5832 : */
5833 3587 : if (cx->typeInferenceEnabled()) {
5834 : ScriptAnalysis::NameAccess access =
5835 2324 : analysis->resolveNameAccess(cx, ATOM_TO_JSID(name), true);
5836 2324 : if (access.nesting) {
5837 512 : RegisterID reg = frame.allocReg();
5838 512 : JSObject **pobj = &access.nesting->activeCall;
5839 512 : masm.move(ImmPtr(pobj), reg);
5840 512 : masm.loadPtr(Address(reg), reg);
5841 512 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
5842 512 : return;
5843 : }
5844 : }
5845 :
5846 3075 : PICGenInfo pic(ic::PICInfo::BIND, JSOp(*PC));
5847 :
5848 : // This code does not check the frame flags to see if scopeChain has been
5849 : // set. Rather, it relies on the up-front analysis statically determining
5850 : // whether BINDNAME can be used, which reifies the scope chain at the
5851 : // prologue.
5852 3075 : JS_ASSERT(analysis->usesScopeChain());
5853 :
5854 3075 : pic.shapeReg = frame.allocReg();
5855 3075 : pic.objReg = frame.allocReg();
5856 3075 : pic.typeReg = Registers::ReturnReg;
5857 3075 : pic.name = name;
5858 3075 : pic.hasTypeCheck = false;
5859 :
5860 : RESERVE_IC_SPACE(masm);
5861 3075 : pic.fastPathStart = masm.label();
5862 :
5863 3075 : masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg);
5864 3075 : masm.loadPtr(Address(pic.objReg, JSObject::offsetOfShape()), pic.shapeReg);
5865 3075 : masm.loadPtr(Address(pic.shapeReg, Shape::offsetOfBase()), pic.shapeReg);
5866 3075 : Address parent(pic.shapeReg, BaseShape::offsetOfParent());
5867 :
5868 3075 : pic.shapeGuard = masm.label();
5869 3075 : Jump inlineJump = masm.branchPtr(Assembler::NotEqual, parent, ImmPtr(NULL));
5870 : {
5871 : RESERVE_OOL_SPACE(stubcc.masm);
5872 3075 : pic.slowPathStart = stubcc.linkExit(inlineJump, Uses(0));
5873 3075 : stubcc.leave();
5874 3075 : passICAddress(&pic);
5875 3075 : pic.slowPathCall = OOL_STUBCALL(ic::BindName, REJOIN_FALLTHROUGH);
5876 : CHECK_OOL_SPACE();
5877 : }
5878 :
5879 3075 : pic.fastPathRejoin = masm.label();
5880 :
5881 : /* Initialize op labels. */
5882 3075 : BindNameLabels &labels = pic.bindNameLabels();
5883 3075 : labels.setInlineJump(masm, pic.shapeGuard, inlineJump);
5884 :
5885 3075 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, pic.objReg);
5886 3075 : frame.freeReg(pic.shapeReg);
5887 :
5888 3075 : stubcc.rejoin(Changes(1));
5889 :
5890 3075 : pics.append(pic);
5891 : }
5892 :
5893 : #else /* !JS_POLYIC */
5894 :
5895 : void
5896 : mjit::Compiler::jsop_name(PropertyName *name, JSValueType type, bool isCall)
5897 : {
5898 : prepareStubCall(Uses(0));
5899 : INLINE_STUBCALL(isCall ? stubs::CallName : stubs::Name, REJOIN_FALLTHROUGH);
5900 : testPushedType(REJOIN_FALLTHROUGH, 0, /* ool = */ false);
5901 : frame.pushSynced(type);
5902 : if (isCall)
5903 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
5904 : }
5905 :
5906 : bool
5907 : mjit::Compiler::jsop_xname(PropertyName *name)
5908 : {
5909 : return jsop_getprop(name, knownPushedType(0), pushedTypeSet(0));
5910 : }
5911 :
5912 : bool
5913 : mjit::Compiler::jsop_getprop(PropertyName *name, JSValueType knownType, types::TypeSet *typeSet,
5914 : bool typecheck, bool forPrototype)
5915 : {
5916 : jsop_getprop_slow(name, forPrototype);
5917 : return true;
5918 : }
5919 :
5920 : bool
5921 : mjit::Compiler::jsop_setprop(PropertyName *name)
5922 : {
5923 : jsop_setprop_slow(name);
5924 : return true;
5925 : }
5926 :
5927 : void
5928 : mjit::Compiler::jsop_bindname(PropertyName *name)
5929 : {
5930 : RegisterID reg = frame.allocReg();
5931 : Address scopeChain(JSFrameReg, StackFrame::offsetOfScopeChain());
5932 : masm.loadPtr(scopeChain, reg);
5933 :
5934 : Address address(reg, offsetof(JSObject, parent));
5935 :
5936 : Jump j = masm.branchPtr(Assembler::NotEqual, address, ImmPtr(0));
5937 :
5938 : stubcc.linkExit(j, Uses(0));
5939 : stubcc.leave();
5940 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
5941 : OOL_STUBCALL(stubs::BindName, REJOIN_FALLTHROUGH);
5942 :
5943 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
5944 :
5945 : stubcc.rejoin(Changes(1));
5946 : }
5947 : #endif
5948 :
5949 : void
5950 51147 : mjit::Compiler::jsop_this()
5951 : {
5952 51147 : frame.pushThis();
5953 :
5954 : /*
5955 : * In strict mode code, we don't wrap 'this'.
5956 : * In direct-call eval code, we wrapped 'this' before entering the eval.
5957 : * In global code, 'this' is always an object.
5958 : */
5959 51147 : if (script->function() && !script->strictModeCode) {
5960 16985 : FrameEntry *thisFe = frame.peek(-1);
5961 :
5962 16985 : if (!thisFe->isType(JSVAL_TYPE_OBJECT)) {
5963 : /*
5964 : * Watch out for an obscure case where we don't know we are pushing
5965 : * an object: the script has not yet had a 'this' value assigned,
5966 : * so no pushed 'this' type has been inferred. Don't mark the type
5967 : * as known in this case, preserving the invariant that compiler
5968 : * types reflect inferred types.
5969 : */
5970 10122 : if (cx->typeInferenceEnabled() && knownPushedType(0) != JSVAL_TYPE_OBJECT) {
5971 2363 : prepareStubCall(Uses(1));
5972 2363 : INLINE_STUBCALL(stubs::This, REJOIN_FALLTHROUGH);
5973 2363 : return;
5974 : }
5975 :
5976 7759 : JSValueType type = cx->typeInferenceEnabled()
5977 5910 : ? types::TypeScript::ThisTypes(script)->getKnownTypeTag(cx)
5978 13669 : : JSVAL_TYPE_UNKNOWN;
5979 7759 : if (type != JSVAL_TYPE_OBJECT) {
5980 1871 : Jump notObj = frame.testObject(Assembler::NotEqual, thisFe);
5981 1871 : stubcc.linkExit(notObj, Uses(1));
5982 1871 : stubcc.leave();
5983 1871 : OOL_STUBCALL(stubs::This, REJOIN_FALLTHROUGH);
5984 1871 : stubcc.rejoin(Changes(1));
5985 : }
5986 :
5987 : // Now we know that |this| is an object.
5988 7759 : frame.pop();
5989 7759 : frame.learnThisIsObject(type != JSVAL_TYPE_OBJECT);
5990 7759 : frame.pushThis();
5991 : }
5992 :
5993 14622 : JS_ASSERT(thisFe->isType(JSVAL_TYPE_OBJECT));
5994 : }
5995 : }
5996 :
5997 : bool
5998 4176 : mjit::Compiler::iter(unsigned flags)
5999 : {
6000 4176 : FrameEntry *fe = frame.peek(-1);
6001 :
6002 : /*
6003 : * Stub the call if this is not a simple 'for in' loop or if the iterated
6004 : * value is known to not be an object.
6005 : */
6006 4176 : if ((flags != JSITER_ENUMERATE) || fe->isNotType(JSVAL_TYPE_OBJECT)) {
6007 1258 : prepareStubCall(Uses(1));
6008 1258 : masm.move(Imm32(flags), Registers::ArgReg1);
6009 1258 : INLINE_STUBCALL(stubs::Iter, REJOIN_FALLTHROUGH);
6010 1258 : frame.pop();
6011 1258 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
6012 1258 : return true;
6013 : }
6014 :
6015 2918 : if (!fe->isTypeKnown()) {
6016 1676 : Jump notObject = frame.testObject(Assembler::NotEqual, fe);
6017 1676 : stubcc.linkExit(notObject, Uses(1));
6018 : }
6019 :
6020 2918 : frame.forgetMismatchedObject(fe);
6021 :
6022 2918 : RegisterID reg = frame.tempRegForData(fe);
6023 :
6024 2918 : frame.pinReg(reg);
6025 2918 : RegisterID ioreg = frame.allocReg(); /* Will hold iterator JSObject */
6026 2918 : RegisterID nireg = frame.allocReg(); /* Will hold NativeIterator */
6027 2918 : RegisterID T1 = frame.allocReg();
6028 2918 : RegisterID T2 = frame.allocReg();
6029 2918 : frame.unpinReg(reg);
6030 :
6031 : /* Fetch the most recent iterator. */
6032 2918 : masm.loadPtr(&script->compartment()->nativeIterCache.last, ioreg);
6033 :
6034 : /* Test for NULL. */
6035 2918 : Jump nullIterator = masm.branchTest32(Assembler::Zero, ioreg, ioreg);
6036 2918 : stubcc.linkExit(nullIterator, Uses(1));
6037 :
6038 : /* Get NativeIterator from iter obj. */
6039 2918 : masm.loadObjPrivate(ioreg, nireg, JSObject::ITER_CLASS_NFIXED_SLOTS);
6040 :
6041 : /* Test for active iterator. */
6042 2918 : Address flagsAddr(nireg, offsetof(NativeIterator, flags));
6043 2918 : masm.load32(flagsAddr, T1);
6044 : Jump activeIterator = masm.branchTest32(Assembler::NonZero, T1,
6045 2918 : Imm32(JSITER_ACTIVE|JSITER_UNREUSABLE));
6046 2918 : stubcc.linkExit(activeIterator, Uses(1));
6047 :
6048 : /* Compare shape of object with iterator. */
6049 2918 : masm.loadShape(reg, T1);
6050 2918 : masm.loadPtr(Address(nireg, offsetof(NativeIterator, shapes_array)), T2);
6051 2918 : masm.loadPtr(Address(T2, 0), T2);
6052 2918 : Jump mismatchedObject = masm.branchPtr(Assembler::NotEqual, T1, T2);
6053 2918 : stubcc.linkExit(mismatchedObject, Uses(1));
6054 :
6055 : /* Compare shape of object's prototype with iterator. */
6056 2918 : masm.loadPtr(Address(reg, JSObject::offsetOfType()), T1);
6057 2918 : masm.loadPtr(Address(T1, offsetof(types::TypeObject, proto)), T1);
6058 2918 : masm.loadShape(T1, T1);
6059 2918 : masm.loadPtr(Address(nireg, offsetof(NativeIterator, shapes_array)), T2);
6060 2918 : masm.loadPtr(Address(T2, sizeof(Shape *)), T2);
6061 2918 : Jump mismatchedProto = masm.branchPtr(Assembler::NotEqual, T1, T2);
6062 2918 : stubcc.linkExit(mismatchedProto, Uses(1));
6063 :
6064 : /*
6065 : * Compare object's prototype's prototype with NULL. The last native
6066 : * iterator will always have a prototype chain length of one
6067 : * (i.e. it must be a plain object), so we do not need to generate
6068 : * a loop here.
6069 : */
6070 2918 : masm.loadPtr(Address(reg, JSObject::offsetOfType()), T1);
6071 2918 : masm.loadPtr(Address(T1, offsetof(types::TypeObject, proto)), T1);
6072 2918 : masm.loadPtr(Address(T1, JSObject::offsetOfType()), T1);
6073 2918 : masm.loadPtr(Address(T1, offsetof(types::TypeObject, proto)), T1);
6074 2918 : Jump overlongChain = masm.branchPtr(Assembler::NonZero, T1, T1);
6075 2918 : stubcc.linkExit(overlongChain, Uses(1));
6076 :
6077 : #ifdef JSGC_INCREMENTAL_MJ
6078 : /*
6079 : * Write barrier for stores to the iterator. We only need to take a write
6080 : * barrier if NativeIterator::obj is actually going to change.
6081 : */
6082 2918 : if (cx->compartment->needsBarrier()) {
6083 : Jump j = masm.branchPtr(Assembler::NotEqual,
6084 10 : Address(nireg, offsetof(NativeIterator, obj)), reg);
6085 10 : stubcc.linkExit(j, Uses(1));
6086 : }
6087 : #endif
6088 :
6089 : /* Found a match with the most recent iterator. Hooray! */
6090 :
6091 : /* Mark iterator as active. */
6092 2918 : masm.storePtr(reg, Address(nireg, offsetof(NativeIterator, obj)));
6093 2918 : masm.load32(flagsAddr, T1);
6094 2918 : masm.or32(Imm32(JSITER_ACTIVE), T1);
6095 2918 : masm.store32(T1, flagsAddr);
6096 :
6097 : /* Chain onto the active iterator stack. */
6098 2918 : masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T1);
6099 2918 : masm.loadPtr(Address(T1, offsetof(JSContext, enumerators)), T2);
6100 2918 : masm.storePtr(T2, Address(nireg, offsetof(NativeIterator, next)));
6101 2918 : masm.storePtr(ioreg, Address(T1, offsetof(JSContext, enumerators)));
6102 :
6103 2918 : frame.freeReg(nireg);
6104 2918 : frame.freeReg(T1);
6105 2918 : frame.freeReg(T2);
6106 :
6107 2918 : stubcc.leave();
6108 2918 : stubcc.masm.move(Imm32(flags), Registers::ArgReg1);
6109 2918 : OOL_STUBCALL(stubs::Iter, REJOIN_FALLTHROUGH);
6110 :
6111 : /* Push the iterator object. */
6112 2918 : frame.pop();
6113 2918 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, ioreg);
6114 :
6115 2918 : stubcc.rejoin(Changes(1));
6116 :
6117 2918 : return true;
6118 : }
6119 :
6120 : /*
6121 : * This big nasty function implements JSOP_ITERNEXT, which is used in the head
6122 : * of a for-in loop to put the next value on the stack.
6123 : */
6124 : void
6125 4416 : mjit::Compiler::iterNext(ptrdiff_t offset)
6126 : {
6127 4416 : FrameEntry *fe = frame.peek(-offset);
6128 4416 : RegisterID reg = frame.tempRegForData(fe);
6129 :
6130 : /* Is it worth trying to pin this longer? Prolly not. */
6131 4416 : frame.pinReg(reg);
6132 4416 : RegisterID T1 = frame.allocReg();
6133 4416 : frame.unpinReg(reg);
6134 :
6135 : /* Test clasp */
6136 4416 : Jump notFast = masm.testObjClass(Assembler::NotEqual, reg, T1, &IteratorClass);
6137 4416 : stubcc.linkExit(notFast, Uses(1));
6138 :
6139 : /* Get private from iter obj. */
6140 4416 : masm.loadObjPrivate(reg, T1, JSObject::ITER_CLASS_NFIXED_SLOTS);
6141 :
6142 4416 : RegisterID T3 = frame.allocReg();
6143 4416 : RegisterID T4 = frame.allocReg();
6144 :
6145 : /* Test for a value iterator, which could come through an Iterator object. */
6146 4416 : masm.load32(Address(T1, offsetof(NativeIterator, flags)), T3);
6147 4416 : notFast = masm.branchTest32(Assembler::NonZero, T3, Imm32(JSITER_FOREACH));
6148 4416 : stubcc.linkExit(notFast, Uses(1));
6149 :
6150 4416 : RegisterID T2 = frame.allocReg();
6151 :
6152 : /* Get cursor. */
6153 4416 : masm.loadPtr(Address(T1, offsetof(NativeIterator, props_cursor)), T2);
6154 :
6155 : /* Get the next string in the iterator. */
6156 4416 : masm.loadPtr(T2, T3);
6157 :
6158 : /* It's safe to increase the cursor now. */
6159 4416 : masm.addPtr(Imm32(sizeof(JSString*)), T2, T4);
6160 4416 : masm.storePtr(T4, Address(T1, offsetof(NativeIterator, props_cursor)));
6161 :
6162 4416 : frame.freeReg(T4);
6163 4416 : frame.freeReg(T1);
6164 4416 : frame.freeReg(T2);
6165 :
6166 4416 : stubcc.leave();
6167 4416 : stubcc.masm.move(Imm32(offset), Registers::ArgReg1);
6168 4416 : OOL_STUBCALL(stubs::IterNext, REJOIN_FALLTHROUGH);
6169 :
6170 4416 : frame.pushUntypedPayload(JSVAL_TYPE_STRING, T3);
6171 :
6172 : /* Join with the stub call. */
6173 4416 : stubcc.rejoin(Changes(1));
6174 4416 : }
6175 :
6176 : bool
6177 4069 : mjit::Compiler::iterMore(jsbytecode *target)
6178 : {
6179 4069 : if (!frame.syncForBranch(target, Uses(1)))
6180 0 : return false;
6181 :
6182 4069 : FrameEntry *fe = frame.peek(-1);
6183 4069 : RegisterID reg = frame.tempRegForData(fe);
6184 4069 : RegisterID tempreg = frame.allocReg();
6185 :
6186 : /* Test clasp */
6187 4069 : Jump notFast = masm.testObjClass(Assembler::NotEqual, reg, tempreg, &IteratorClass);
6188 4069 : stubcc.linkExitForBranch(notFast);
6189 :
6190 : /* Get private from iter obj. */
6191 4069 : masm.loadObjPrivate(reg, reg, JSObject::ITER_CLASS_NFIXED_SLOTS);
6192 :
6193 : /* Test that the iterator supports fast iteration. */
6194 : notFast = masm.branchTest32(Assembler::NonZero, Address(reg, offsetof(NativeIterator, flags)),
6195 4069 : Imm32(JSITER_FOREACH));
6196 4069 : stubcc.linkExitForBranch(notFast);
6197 :
6198 : /* Get props_cursor, test */
6199 4069 : masm.loadPtr(Address(reg, offsetof(NativeIterator, props_cursor)), tempreg);
6200 4069 : masm.loadPtr(Address(reg, offsetof(NativeIterator, props_end)), reg);
6201 :
6202 4069 : Jump jFast = masm.branchPtr(Assembler::LessThan, tempreg, reg);
6203 :
6204 4069 : stubcc.leave();
6205 4069 : OOL_STUBCALL(stubs::IterMore, REJOIN_BRANCH);
6206 : Jump j = stubcc.masm.branchTest32(Assembler::NonZero, Registers::ReturnReg,
6207 4069 : Registers::ReturnReg);
6208 :
6209 4069 : stubcc.rejoin(Changes(1));
6210 4069 : frame.freeReg(tempreg);
6211 :
6212 4069 : return jumpAndRun(jFast, target, &j);
6213 : }
6214 :
6215 : void
6216 4175 : mjit::Compiler::iterEnd()
6217 : {
6218 4175 : FrameEntry *fe= frame.peek(-1);
6219 4175 : RegisterID reg = frame.tempRegForData(fe);
6220 :
6221 4175 : frame.pinReg(reg);
6222 4175 : RegisterID T1 = frame.allocReg();
6223 4175 : frame.unpinReg(reg);
6224 :
6225 : /* Test clasp */
6226 4175 : Jump notIterator = masm.testObjClass(Assembler::NotEqual, reg, T1, &IteratorClass);
6227 4175 : stubcc.linkExit(notIterator, Uses(1));
6228 :
6229 : /* Get private from iter obj. */
6230 4175 : masm.loadObjPrivate(reg, T1, JSObject::ITER_CLASS_NFIXED_SLOTS);
6231 :
6232 4175 : RegisterID T2 = frame.allocReg();
6233 :
6234 : /* Load flags. */
6235 4175 : Address flagAddr(T1, offsetof(NativeIterator, flags));
6236 4175 : masm.loadPtr(flagAddr, T2);
6237 :
6238 : /* Test for a normal enumerate iterator. */
6239 4175 : Jump notEnumerate = masm.branchTest32(Assembler::Zero, T2, Imm32(JSITER_ENUMERATE));
6240 4175 : stubcc.linkExit(notEnumerate, Uses(1));
6241 :
6242 : /* Clear active bit. */
6243 4175 : masm.and32(Imm32(~JSITER_ACTIVE), T2);
6244 4175 : masm.storePtr(T2, flagAddr);
6245 :
6246 : /* Reset property cursor. */
6247 4175 : masm.loadPtr(Address(T1, offsetof(NativeIterator, props_array)), T2);
6248 4175 : masm.storePtr(T2, Address(T1, offsetof(NativeIterator, props_cursor)));
6249 :
6250 : /* Advance enumerators list. */
6251 4175 : masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T2);
6252 4175 : masm.loadPtr(Address(T1, offsetof(NativeIterator, next)), T1);
6253 4175 : masm.storePtr(T1, Address(T2, offsetof(JSContext, enumerators)));
6254 :
6255 4175 : frame.freeReg(T1);
6256 4175 : frame.freeReg(T2);
6257 :
6258 4175 : stubcc.leave();
6259 4175 : OOL_STUBCALL(stubs::EndIter, REJOIN_FALLTHROUGH);
6260 :
6261 4175 : frame.pop();
6262 :
6263 4175 : stubcc.rejoin(Changes(1));
6264 4175 : }
6265 :
6266 : void
6267 0 : mjit::Compiler::jsop_getgname_slow(uint32_t index)
6268 : {
6269 0 : prepareStubCall(Uses(0));
6270 0 : INLINE_STUBCALL(stubs::Name, REJOIN_GETTER);
6271 0 : testPushedType(REJOIN_GETTER, 0, /* ool = */ false);
6272 0 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
6273 0 : }
6274 :
6275 : void
6276 471198 : mjit::Compiler::jsop_bindgname()
6277 : {
6278 471198 : if (globalObj) {
6279 471198 : frame.push(ObjectValue(*globalObj));
6280 471198 : return;
6281 : }
6282 :
6283 : /* :TODO: this is slower than it needs to be. */
6284 0 : prepareStubCall(Uses(0));
6285 0 : INLINE_STUBCALL(stubs::BindGlobalName, REJOIN_NONE);
6286 0 : frame.takeReg(Registers::ReturnReg);
6287 0 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
6288 : }
6289 :
6290 : void
6291 495599 : mjit::Compiler::jsop_getgname(uint32_t index)
6292 : {
6293 : /* Optimize undefined, NaN and Infinity. */
6294 495599 : PropertyName *name = script->getName(index);
6295 495599 : if (name == cx->runtime->atomState.typeAtoms[JSTYPE_VOID]) {
6296 3509 : frame.push(UndefinedValue());
6297 3509 : return;
6298 : }
6299 492090 : if (name == cx->runtime->atomState.NaNAtom) {
6300 980 : frame.push(cx->runtime->NaNValue);
6301 980 : return;
6302 : }
6303 491110 : if (name == cx->runtime->atomState.InfinityAtom) {
6304 643 : frame.push(cx->runtime->positiveInfinityValue);
6305 643 : return;
6306 : }
6307 :
6308 : /* Optimize singletons like Math for JSOP_CALLPROP. */
6309 490467 : JSObject *obj = pushedSingleton(0);
6310 490467 : if (obj && !hasTypeBarriers(PC) && testSingletonProperty(globalObj, ATOM_TO_JSID(name))) {
6311 29556 : frame.push(ObjectValue(*obj));
6312 29556 : return;
6313 : }
6314 :
6315 460911 : jsid id = ATOM_TO_JSID(name);
6316 460911 : JSValueType type = knownPushedType(0);
6317 560879 : if (cx->typeInferenceEnabled() && globalObj->isGlobal() && id == types::MakeTypeId(cx, id) &&
6318 99968 : !globalObj->getType(cx)->unknownProperties()) {
6319 99952 : types::TypeSet *propertyTypes = globalObj->getType(cx)->getProperty(cx, id, false);
6320 99952 : if (!propertyTypes)
6321 0 : return;
6322 :
6323 : /*
6324 : * If we are accessing a defined global which is a normal data property
6325 : * then bake its address into the jitcode and guard against future
6326 : * reallocation of the global object's slots.
6327 : */
6328 99952 : const js::Shape *shape = globalObj->nativeLookup(cx, ATOM_TO_JSID(name));
6329 99952 : if (shape && shape->hasDefaultGetter() && shape->hasSlot()) {
6330 94482 : HeapSlot *value = &globalObj->getSlotRef(shape->slot());
6331 175185 : if (!value->isUndefined() &&
6332 80703 : !propertyTypes->isOwnProperty(cx, globalObj->getType(cx), true)) {
6333 80090 : watchGlobalReallocation();
6334 80090 : RegisterID reg = frame.allocReg();
6335 80090 : masm.move(ImmPtr(value), reg);
6336 :
6337 80090 : BarrierState barrier = pushAddressMaybeBarrier(Address(reg), type, true);
6338 80090 : finishBarrier(barrier, REJOIN_GETTER, 0);
6339 80090 : return;
6340 : }
6341 : }
6342 : }
6343 :
6344 : #if defined JS_MONOIC
6345 380821 : jsop_bindgname();
6346 :
6347 380821 : FrameEntry *fe = frame.peek(-1);
6348 380821 : JS_ASSERT(fe->isTypeKnown() && fe->getKnownType() == JSVAL_TYPE_OBJECT);
6349 :
6350 380821 : GetGlobalNameICInfo ic;
6351 : RESERVE_IC_SPACE(masm);
6352 : RegisterID objReg;
6353 380821 : Jump shapeGuard;
6354 :
6355 380821 : ic.fastPathStart = masm.label();
6356 380821 : if (fe->isConstant()) {
6357 380821 : JSObject *obj = &fe->getValue().toObject();
6358 380821 : frame.pop();
6359 380821 : JS_ASSERT(obj->isNative());
6360 :
6361 380821 : objReg = frame.allocReg();
6362 :
6363 380821 : masm.loadPtrFromImm(obj->addressOfShape(), objReg);
6364 : shapeGuard = masm.branchPtrWithPatch(Assembler::NotEqual, objReg,
6365 380821 : ic.shape, ImmPtr(NULL));
6366 380821 : masm.move(ImmPtr(obj), objReg);
6367 : } else {
6368 0 : objReg = frame.ownRegForData(fe);
6369 0 : frame.pop();
6370 0 : RegisterID reg = frame.allocReg();
6371 :
6372 0 : masm.loadShape(objReg, reg);
6373 : shapeGuard = masm.branchPtrWithPatch(Assembler::NotEqual, reg,
6374 0 : ic.shape, ImmPtr(NULL));
6375 0 : frame.freeReg(reg);
6376 : }
6377 380821 : stubcc.linkExit(shapeGuard, Uses(0));
6378 :
6379 380821 : stubcc.leave();
6380 380821 : passMICAddress(ic);
6381 380821 : ic.slowPathCall = OOL_STUBCALL(ic::GetGlobalName, REJOIN_GETTER);
6382 :
6383 : CHECK_IC_SPACE();
6384 :
6385 380821 : testPushedType(REJOIN_GETTER, 0);
6386 :
6387 : /* Garbage value. */
6388 380821 : uint32_t slot = 1 << 24;
6389 :
6390 380821 : masm.loadPtr(Address(objReg, JSObject::offsetOfSlots()), objReg);
6391 380821 : Address address(objReg, slot);
6392 :
6393 : /* Allocate any register other than objReg. */
6394 380821 : RegisterID treg = frame.allocReg();
6395 : /* After dreg is loaded, it's safe to clobber objReg. */
6396 380821 : RegisterID dreg = objReg;
6397 :
6398 380821 : ic.load = masm.loadValueWithAddressOffsetPatch(address, treg, dreg);
6399 :
6400 380821 : frame.pushRegs(treg, dreg, type);
6401 :
6402 : /*
6403 : * Note: no undefined check is needed for GNAME opcodes. These were not
6404 : * declared with 'var', so cannot be undefined without triggering an error
6405 : * or having been a pre-existing global whose value is undefined (which
6406 : * type inference will know about).
6407 : */
6408 380821 : BarrierState barrier = testBarrier(treg, dreg);
6409 :
6410 380821 : stubcc.rejoin(Changes(1));
6411 :
6412 380821 : getGlobalNames.append(ic);
6413 380821 : finishBarrier(barrier, REJOIN_GETTER, 0);
6414 : #else
6415 : jsop_getgname_slow(index);
6416 : #endif
6417 :
6418 : }
6419 :
6420 : void
6421 37 : mjit::Compiler::jsop_setgname_slow(PropertyName *name)
6422 : {
6423 37 : prepareStubCall(Uses(2));
6424 37 : masm.move(ImmPtr(name), Registers::ArgReg1);
6425 37 : INLINE_STUBCALL(STRICT_VARIANT(stubs::SetGlobalName), REJOIN_FALLTHROUGH);
6426 37 : frame.popn(2);
6427 37 : pushSyncedEntry(0);
6428 37 : }
6429 :
6430 : void
6431 90363 : mjit::Compiler::jsop_setgname(PropertyName *name, bool popGuaranteed)
6432 : {
6433 90363 : if (monitored(PC)) {
6434 : /* Global accesses are monitored only for a few names like __proto__. */
6435 0 : jsop_setgname_slow(name);
6436 0 : return;
6437 : }
6438 :
6439 90363 : jsid id = ATOM_TO_JSID(name);
6440 143738 : if (cx->typeInferenceEnabled() && globalObj->isGlobal() && id == types::MakeTypeId(cx, id) &&
6441 53375 : !globalObj->getType(cx)->unknownProperties()) {
6442 : /*
6443 : * Note: object branding is disabled when inference is enabled. With
6444 : * branding there is no way to ensure that a non-function property
6445 : * can't get a function later and cause the global object to become
6446 : * branded, requiring a shape change if it changes again.
6447 : */
6448 53365 : types::TypeSet *types = globalObj->getType(cx)->getProperty(cx, id, false);
6449 53365 : if (!types)
6450 0 : return;
6451 53365 : const js::Shape *shape = globalObj->nativeLookup(cx, ATOM_TO_JSID(name));
6452 205965 : if (shape && shape->hasDefaultSetter() &&
6453 101758 : shape->writable() && shape->hasSlot() &&
6454 50842 : !types->isOwnProperty(cx, globalObj->getType(cx), true)) {
6455 50826 : watchGlobalReallocation();
6456 50826 : HeapSlot *value = &globalObj->getSlotRef(shape->slot());
6457 50826 : RegisterID reg = frame.allocReg();
6458 : #ifdef JSGC_INCREMENTAL_MJ
6459 : /* Write barrier. */
6460 50826 : if (cx->compartment->needsBarrier() && types->needsBarrier(cx)) {
6461 65 : stubcc.linkExit(masm.jump(), Uses(0));
6462 65 : stubcc.leave();
6463 65 : stubcc.masm.move(ImmPtr(value), Registers::ArgReg1);
6464 65 : OOL_STUBCALL(stubs::WriteBarrier, REJOIN_NONE);
6465 65 : stubcc.rejoin(Changes(0));
6466 : }
6467 : #endif
6468 50826 : masm.move(ImmPtr(value), reg);
6469 50826 : frame.storeTo(frame.peek(-1), Address(reg), popGuaranteed);
6470 50826 : frame.shimmy(1);
6471 50826 : frame.freeReg(reg);
6472 50826 : return;
6473 : }
6474 : }
6475 :
6476 : #ifdef JSGC_INCREMENTAL_MJ
6477 : /* Write barrier. */
6478 39537 : if (cx->compartment->needsBarrier()) {
6479 37 : jsop_setgname_slow(name);
6480 37 : return;
6481 : }
6482 : #endif
6483 :
6484 : #if defined JS_MONOIC
6485 39500 : FrameEntry *objFe = frame.peek(-2);
6486 39500 : FrameEntry *fe = frame.peek(-1);
6487 39500 : JS_ASSERT_IF(objFe->isTypeKnown(), objFe->getKnownType() == JSVAL_TYPE_OBJECT);
6488 :
6489 39500 : if (!fe->isConstant() && fe->isType(JSVAL_TYPE_DOUBLE))
6490 2 : frame.forgetKnownDouble(fe);
6491 :
6492 39500 : SetGlobalNameICInfo ic;
6493 :
6494 39500 : frame.pinEntry(fe, ic.vr);
6495 39500 : Jump shapeGuard;
6496 :
6497 : RESERVE_IC_SPACE(masm);
6498 :
6499 39500 : ic.fastPathStart = masm.label();
6500 39500 : if (objFe->isConstant()) {
6501 39476 : JSObject *obj = &objFe->getValue().toObject();
6502 39476 : JS_ASSERT(obj->isNative());
6503 :
6504 39476 : ic.objReg = frame.allocReg();
6505 39476 : ic.shapeReg = ic.objReg;
6506 39476 : ic.objConst = true;
6507 :
6508 39476 : masm.loadPtrFromImm(obj->addressOfShape(), ic.shapeReg);
6509 : shapeGuard = masm.branchPtrWithPatch(Assembler::NotEqual, ic.shapeReg,
6510 39476 : ic.shape, ImmPtr(NULL));
6511 39476 : masm.move(ImmPtr(obj), ic.objReg);
6512 : } else {
6513 24 : ic.objReg = frame.copyDataIntoReg(objFe);
6514 24 : ic.shapeReg = frame.allocReg();
6515 24 : ic.objConst = false;
6516 :
6517 24 : masm.loadShape(ic.objReg, ic.shapeReg);
6518 : shapeGuard = masm.branchPtrWithPatch(Assembler::NotEqual, ic.shapeReg,
6519 24 : ic.shape, ImmPtr(NULL));
6520 24 : frame.freeReg(ic.shapeReg);
6521 : }
6522 39500 : ic.shapeGuardJump = shapeGuard;
6523 39500 : ic.slowPathStart = stubcc.linkExit(shapeGuard, Uses(2));
6524 :
6525 39500 : stubcc.leave();
6526 39500 : passMICAddress(ic);
6527 39500 : ic.slowPathCall = OOL_STUBCALL(ic::SetGlobalName, REJOIN_FALLTHROUGH);
6528 :
6529 : /* Garbage value. */
6530 39500 : uint32_t slot = 1 << 24;
6531 :
6532 39500 : masm.loadPtr(Address(ic.objReg, JSObject::offsetOfSlots()), ic.objReg);
6533 39500 : Address address(ic.objReg, slot);
6534 :
6535 39500 : if (ic.vr.isConstant()) {
6536 6281 : ic.store = masm.storeValueWithAddressOffsetPatch(ic.vr.value(), address);
6537 33219 : } else if (ic.vr.isTypeKnown()) {
6538 23829 : ic.store = masm.storeValueWithAddressOffsetPatch(ImmType(ic.vr.knownType()),
6539 47658 : ic.vr.dataReg(), address);
6540 : } else {
6541 9390 : ic.store = masm.storeValueWithAddressOffsetPatch(ic.vr.typeReg(), ic.vr.dataReg(), address);
6542 : }
6543 :
6544 39500 : frame.freeReg(ic.objReg);
6545 39500 : frame.unpinEntry(ic.vr);
6546 39500 : frame.shimmy(1);
6547 :
6548 39500 : stubcc.rejoin(Changes(1));
6549 :
6550 39500 : ic.fastPathRejoin = masm.label();
6551 39500 : setGlobalNames.append(ic);
6552 : #else
6553 : jsop_setgname_slow(name);
6554 : #endif
6555 : }
6556 :
6557 : void
6558 1908 : mjit::Compiler::jsop_setelem_slow()
6559 : {
6560 1908 : prepareStubCall(Uses(3));
6561 1908 : INLINE_STUBCALL(STRICT_VARIANT(stubs::SetElem), REJOIN_FALLTHROUGH);
6562 1908 : frame.popn(3);
6563 1908 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
6564 1908 : }
6565 :
6566 : void
6567 944 : mjit::Compiler::jsop_getelem_slow()
6568 : {
6569 944 : prepareStubCall(Uses(2));
6570 944 : INLINE_STUBCALL(stubs::GetElem, REJOIN_FALLTHROUGH);
6571 944 : testPushedType(REJOIN_FALLTHROUGH, -2, /* ool = */ false);
6572 944 : frame.popn(2);
6573 944 : pushSyncedEntry(0);
6574 944 : }
6575 :
6576 : bool
6577 1615 : mjit::Compiler::jsop_instanceof()
6578 : {
6579 1615 : FrameEntry *lhs = frame.peek(-2);
6580 1615 : FrameEntry *rhs = frame.peek(-1);
6581 :
6582 : // The fast path applies only when both operands are objects.
6583 1615 : if (rhs->isNotType(JSVAL_TYPE_OBJECT) || lhs->isNotType(JSVAL_TYPE_OBJECT)) {
6584 82 : stubcc.linkExit(masm.jump(), Uses(2));
6585 82 : frame.discardFe(lhs);
6586 82 : frame.discardFe(rhs);
6587 : }
6588 :
6589 1615 : MaybeJump firstSlow;
6590 1615 : if (!rhs->isTypeKnown()) {
6591 741 : Jump j = frame.testObject(Assembler::NotEqual, rhs);
6592 741 : stubcc.linkExit(j, Uses(2));
6593 : }
6594 :
6595 1615 : frame.forgetMismatchedObject(lhs);
6596 1615 : frame.forgetMismatchedObject(rhs);
6597 :
6598 1615 : RegisterID tmp = frame.allocReg();
6599 1615 : RegisterID obj = frame.tempRegForData(rhs);
6600 :
6601 1615 : masm.loadBaseShape(obj, tmp);
6602 : Jump notFunction = masm.branchPtr(Assembler::NotEqual,
6603 1615 : Address(tmp, BaseShape::offsetOfClass()),
6604 3230 : ImmPtr(&FunctionClass));
6605 :
6606 1615 : stubcc.linkExit(notFunction, Uses(2));
6607 :
6608 : /* Test for bound functions. */
6609 : Jump isBound = masm.branchTest32(Assembler::NonZero,
6610 1615 : Address(tmp, BaseShape::offsetOfFlags()),
6611 3230 : Imm32(BaseShape::BOUND_FUNCTION));
6612 : {
6613 1615 : stubcc.linkExit(isBound, Uses(2));
6614 1615 : stubcc.leave();
6615 1615 : OOL_STUBCALL(stubs::InstanceOf, REJOIN_FALLTHROUGH);
6616 1615 : firstSlow = stubcc.masm.jump();
6617 : }
6618 :
6619 1615 : frame.freeReg(tmp);
6620 :
6621 : /* This is sadly necessary because the error case needs the object. */
6622 1615 : frame.dup();
6623 :
6624 1615 : if (!jsop_getprop(cx->runtime->atomState.classPrototypeAtom, JSVAL_TYPE_UNKNOWN))
6625 0 : return false;
6626 :
6627 : /* Primitive prototypes are invalid. */
6628 1615 : rhs = frame.peek(-1);
6629 1615 : Jump j = frame.testPrimitive(Assembler::Equal, rhs);
6630 1615 : stubcc.linkExit(j, Uses(3));
6631 :
6632 : /* Allocate registers up front, because of branchiness. */
6633 1615 : obj = frame.copyDataIntoReg(lhs);
6634 1615 : RegisterID proto = frame.copyDataIntoReg(rhs);
6635 1615 : RegisterID temp = frame.allocReg();
6636 :
6637 1615 : MaybeJump isFalse;
6638 1615 : if (!lhs->isTypeKnown())
6639 1298 : isFalse = frame.testPrimitive(Assembler::Equal, lhs);
6640 :
6641 1615 : Label loop = masm.label();
6642 :
6643 : /* Walk prototype chain, break out on NULL or hit. */
6644 1615 : masm.loadPtr(Address(obj, JSObject::offsetOfType()), obj);
6645 1615 : masm.loadPtr(Address(obj, offsetof(types::TypeObject, proto)), obj);
6646 1615 : Jump isFalse2 = masm.branchTestPtr(Assembler::Zero, obj, obj);
6647 1615 : Jump isTrue = masm.branchPtr(Assembler::NotEqual, obj, proto);
6648 1615 : isTrue.linkTo(loop, &masm);
6649 1615 : masm.move(Imm32(1), temp);
6650 1615 : isTrue = masm.jump();
6651 :
6652 1615 : if (isFalse.isSet())
6653 1298 : isFalse.getJump().linkTo(masm.label(), &masm);
6654 1615 : isFalse2.linkTo(masm.label(), &masm);
6655 1615 : masm.move(Imm32(0), temp);
6656 1615 : isTrue.linkTo(masm.label(), &masm);
6657 :
6658 1615 : frame.freeReg(proto);
6659 1615 : frame.freeReg(obj);
6660 :
6661 1615 : stubcc.leave();
6662 1615 : OOL_STUBCALL(stubs::FastInstanceOf, REJOIN_FALLTHROUGH);
6663 :
6664 1615 : frame.popn(3);
6665 1615 : frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, temp);
6666 :
6667 1615 : if (firstSlow.isSet())
6668 1615 : firstSlow.getJump().linkTo(stubcc.masm.label(), &stubcc.masm);
6669 1615 : stubcc.rejoin(Changes(1));
6670 1615 : return true;
6671 : }
6672 :
6673 : void
6674 1978 : mjit::Compiler::emitEval(uint32_t argc)
6675 : {
6676 : /* Check for interrupts on function call */
6677 1978 : interruptCheckHelper();
6678 :
6679 1978 : frame.syncAndKill(Uses(argc + 2));
6680 1978 : prepareStubCall(Uses(argc + 2));
6681 1978 : masm.move(Imm32(argc), Registers::ArgReg1);
6682 1978 : INLINE_STUBCALL(stubs::Eval, REJOIN_FALLTHROUGH);
6683 1978 : frame.popn(argc + 2);
6684 1978 : pushSyncedEntry(0);
6685 1978 : }
6686 :
6687 : void
6688 1459 : mjit::Compiler::jsop_arguments(RejoinState rejoin)
6689 : {
6690 1459 : prepareStubCall(Uses(0));
6691 1459 : INLINE_STUBCALL(stubs::Arguments, rejoin);
6692 1459 : }
6693 :
6694 : bool
6695 19837 : mjit::Compiler::jsop_newinit()
6696 : {
6697 : bool isArray;
6698 19837 : unsigned count = 0;
6699 19837 : JSObject *baseobj = NULL;
6700 19837 : switch (*PC) {
6701 : case JSOP_NEWINIT:
6702 459 : isArray = (GET_UINT8(PC) == JSProto_Array);
6703 459 : break;
6704 : case JSOP_NEWARRAY:
6705 12098 : isArray = true;
6706 12098 : count = GET_UINT24(PC);
6707 12098 : break;
6708 : case JSOP_NEWOBJECT:
6709 : /*
6710 : * Scripts with NEWOBJECT must be compileAndGo, but treat these like
6711 : * NEWINIT if the script's associated global is not known (or is not
6712 : * actually a global object). This should only happen in chrome code.
6713 : */
6714 7280 : isArray = false;
6715 7280 : baseobj = globalObj ? script->getObject(GET_UINT32_INDEX(PC)) : NULL;
6716 7280 : break;
6717 : default:
6718 0 : JS_NOT_REACHED("Bad op");
6719 : return false;
6720 : }
6721 :
6722 : void *stub, *stubArg;
6723 19837 : if (isArray) {
6724 12098 : stub = JS_FUNC_TO_DATA_PTR(void *, stubs::NewInitArray);
6725 12098 : stubArg = (void *) uintptr_t(count);
6726 : } else {
6727 7739 : stub = JS_FUNC_TO_DATA_PTR(void *, stubs::NewInitObject);
6728 7739 : stubArg = (void *) baseobj;
6729 : }
6730 :
6731 : /*
6732 : * Don't bake in types for non-compileAndGo scripts, or at initializers
6733 : * producing objects with singleton types.
6734 : */
6735 19837 : types::TypeObject *type = NULL;
6736 19837 : if (globalObj && !types::UseNewTypeForInitializer(cx, script, PC)) {
6737 : type = types::TypeScript::InitObject(cx, script, PC,
6738 17977 : isArray ? JSProto_Array : JSProto_Object);
6739 17977 : if (!type)
6740 0 : return false;
6741 : }
6742 :
6743 : size_t maxArraySlots =
6744 19837 : gc::GetGCKindSlots(gc::FINALIZE_OBJECT_LAST) - ObjectElements::VALUES_PER_HEADER;
6745 :
6746 47017 : if (!cx->typeInferenceEnabled() ||
6747 : !type ||
6748 : (isArray && count > maxArraySlots) ||
6749 12092 : (!isArray && !baseobj) ||
6750 15088 : (!isArray && baseobj->hasDynamicSlots())) {
6751 7819 : prepareStubCall(Uses(0));
6752 7819 : masm.storePtr(ImmPtr(type), FrameAddress(offsetof(VMFrame, scratch)));
6753 7819 : masm.move(ImmPtr(stubArg), Registers::ArgReg1);
6754 7819 : INLINE_STUBCALL(stub, REJOIN_FALLTHROUGH);
6755 7819 : frame.pushSynced(JSVAL_TYPE_OBJECT);
6756 :
6757 7819 : frame.extra(frame.peek(-1)).initArray = (*PC == JSOP_NEWARRAY);
6758 7819 : frame.extra(frame.peek(-1)).initObject = baseobj;
6759 :
6760 7819 : return true;
6761 : }
6762 :
6763 : JSObject *templateObject;
6764 12018 : if (isArray)
6765 8948 : templateObject = NewDenseUnallocatedArray(cx, count);
6766 : else
6767 3070 : templateObject = CopyInitializerObject(cx, baseobj);
6768 12018 : if (!templateObject)
6769 0 : return false;
6770 12018 : templateObject->setType(type);
6771 :
6772 12018 : RegisterID result = frame.allocReg();
6773 12018 : Jump emptyFreeList = masm.getNewObject(cx, result, templateObject);
6774 :
6775 12018 : stubcc.linkExit(emptyFreeList, Uses(0));
6776 12018 : stubcc.leave();
6777 :
6778 12018 : stubcc.masm.storePtr(ImmPtr(type), FrameAddress(offsetof(VMFrame, scratch)));
6779 12018 : stubcc.masm.move(ImmPtr(stubArg), Registers::ArgReg1);
6780 12018 : OOL_STUBCALL(stub, REJOIN_FALLTHROUGH);
6781 :
6782 12018 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, result);
6783 :
6784 12018 : stubcc.rejoin(Changes(1));
6785 :
6786 12018 : frame.extra(frame.peek(-1)).initArray = (*PC == JSOP_NEWARRAY);
6787 12018 : frame.extra(frame.peek(-1)).initObject = baseobj;
6788 :
6789 12018 : return true;
6790 : }
6791 :
6792 : bool
6793 6317 : mjit::Compiler::jsop_regexp()
6794 : {
6795 6317 : JSObject *obj = script->getRegExp(GET_UINT32_INDEX(PC));
6796 6317 : RegExpStatics *res = globalObj ? globalObj->getRegExpStatics() : NULL;
6797 :
6798 29199 : if (!globalObj ||
6799 6317 : &obj->global() != globalObj ||
6800 6317 : !cx->typeInferenceEnabled() ||
6801 5272 : analysis->localsAliasStack() ||
6802 : types::TypeSet::HasObjectFlags(cx, globalObj->getType(cx),
6803 4976 : types::OBJECT_FLAG_REGEXP_FLAGS_SET) ||
6804 : cx->runtime->gcIncrementalState == gc::MARK)
6805 : {
6806 1347 : prepareStubCall(Uses(0));
6807 1347 : masm.move(ImmPtr(obj), Registers::ArgReg1);
6808 1347 : INLINE_STUBCALL(stubs::RegExp, REJOIN_FALLTHROUGH);
6809 1347 : frame.pushSynced(JSVAL_TYPE_OBJECT);
6810 1347 : return true;
6811 : }
6812 :
6813 4970 : RegExpObject *reobj = &obj->asRegExp();
6814 :
6815 9940 : DebugOnly<uint32_t> origFlags = reobj->getFlags();
6816 9940 : DebugOnly<uint32_t> staticsFlags = res->getFlags();
6817 4970 : JS_ASSERT((origFlags & staticsFlags) == staticsFlags);
6818 :
6819 : /*
6820 : * JS semantics require regular expression literals to create different
6821 : * objects every time they execute. We only need to do this cloning if the
6822 : * script could actually observe the effect of such cloning, by getting
6823 : * or setting properties on it. Particular RegExp and String natives take
6824 : * regular expressions as 'this' or an argument, and do not let that
6825 : * expression escape and be accessed by the script, so avoid cloning in
6826 : * these cases.
6827 : */
6828 : analyze::SSAUseChain *uses =
6829 4970 : analysis->useChain(analyze::SSAValue::PushedValue(PC - script->code, 0));
6830 4970 : if (uses && uses->popped && !uses->next && !reobj->global() && !reobj->sticky()) {
6831 931 : jsbytecode *use = script->code + uses->offset;
6832 931 : uint32_t which = uses->u.which;
6833 931 : if (JSOp(*use) == JSOP_CALLPROP) {
6834 0 : JSObject *callee = analysis->pushedTypes(use, 0)->getSingleton(cx);
6835 0 : if (callee && callee->isFunction()) {
6836 0 : Native native = callee->toFunction()->maybeNative();
6837 0 : if (native == js::regexp_exec || native == js::regexp_test) {
6838 0 : frame.push(ObjectValue(*obj));
6839 0 : return true;
6840 : }
6841 : }
6842 931 : } else if (JSOp(*use) == JSOP_CALL && which == 0) {
6843 69 : uint32_t argc = GET_ARGC(use);
6844 69 : JSObject *callee = analysis->poppedTypes(use, argc + 1)->getSingleton(cx);
6845 69 : if (callee && callee->isFunction() && argc >= 1 && which == argc - 1) {
6846 31 : Native native = callee->toFunction()->maybeNative();
6847 31 : if (native == js::str_match ||
6848 : native == js::str_search ||
6849 : native == js::str_replace ||
6850 : native == js::str_split) {
6851 31 : frame.push(ObjectValue(*obj));
6852 31 : return true;
6853 : }
6854 : }
6855 : }
6856 : }
6857 :
6858 : /*
6859 : * Force creation of the RegExpShared in the script's RegExpObject so that
6860 : * we grab it in the getNewObject template copy. Note that JIT code is
6861 : * discarded on every GC, which permits us to burn in the pointer to the
6862 : * RegExpShared. We don't do this during an incremental
6863 : * GC, since we don't discard JIT code after every marking slice.
6864 : */
6865 9878 : RegExpGuard g;
6866 4939 : if (!reobj->getShared(cx, &g))
6867 0 : return false;
6868 :
6869 4939 : RegisterID result = frame.allocReg();
6870 4939 : Jump emptyFreeList = masm.getNewObject(cx, result, obj);
6871 :
6872 4939 : stubcc.linkExit(emptyFreeList, Uses(0));
6873 4939 : stubcc.leave();
6874 :
6875 4939 : stubcc.masm.move(ImmPtr(obj), Registers::ArgReg1);
6876 4939 : OOL_STUBCALL(stubs::RegExp, REJOIN_FALLTHROUGH);
6877 :
6878 4939 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, result);
6879 :
6880 4939 : stubcc.rejoin(Changes(1));
6881 4939 : return true;
6882 : }
6883 :
6884 : bool
6885 33573 : mjit::Compiler::startLoop(jsbytecode *head, Jump entry, jsbytecode *entryTarget)
6886 : {
6887 33573 : JS_ASSERT(cx->typeInferenceEnabled() && script == outerScript);
6888 33573 : JS_ASSERT(shouldStartLoop(head));
6889 :
6890 33573 : if (loop) {
6891 : /*
6892 : * Convert all loop registers in the outer loop into unassigned registers.
6893 : * We don't keep track of which registers the inner loop uses, so the only
6894 : * registers that can be carried in the outer loop must be mentioned before
6895 : * the inner loop starts.
6896 : */
6897 3756 : loop->clearLoopRegisters();
6898 : }
6899 :
6900 33573 : LoopState *nloop = OffTheBooks::new_<LoopState>(cx, &ssa, this, &frame);
6901 33573 : if (!nloop || !nloop->init(head, entry, entryTarget)) {
6902 0 : js_ReportOutOfMemory(cx);
6903 0 : return false;
6904 : }
6905 :
6906 33573 : nloop->outer = loop;
6907 33573 : loop = nloop;
6908 33573 : frame.setLoop(loop);
6909 :
6910 33573 : return true;
6911 : }
6912 :
6913 : bool
6914 43048 : mjit::Compiler::finishLoop(jsbytecode *head)
6915 : {
6916 43048 : if (!cx->typeInferenceEnabled() || !bytecodeInChunk(head))
6917 9488 : return true;
6918 :
6919 : /*
6920 : * We're done processing the current loop. Every loop has exactly one backedge
6921 : * at the end ('continue' statements are forward jumps to the loop test),
6922 : * and after jumpAndRun'ing on that edge we can pop it from the frame.
6923 : */
6924 33560 : JS_ASSERT(loop && loop->headOffset() == uint32_t(head - script->code));
6925 :
6926 33560 : jsbytecode *entryTarget = script->code + loop->entryOffset();
6927 :
6928 : /*
6929 : * Fix up the jump entering the loop. We are doing this after all code has
6930 : * been emitted for the backedge, so that we are now in the loop's fallthrough
6931 : * (where we will emit the entry code).
6932 : */
6933 33560 : Jump fallthrough = masm.jump();
6934 :
6935 : #ifdef DEBUG
6936 33560 : if (IsJaegerSpewChannelActive(JSpew_Regalloc)) {
6937 0 : RegisterAllocation *alloc = analysis->getAllocation(head);
6938 0 : JaegerSpew(JSpew_Regalloc, "loop allocation at %u:", unsigned(head - script->code));
6939 0 : frame.dumpAllocation(alloc);
6940 : }
6941 : #endif
6942 :
6943 33560 : loop->entryJump().linkTo(masm.label(), &masm);
6944 :
6945 33560 : jsbytecode *oldPC = PC;
6946 :
6947 33560 : PC = entryTarget;
6948 : {
6949 33560 : OOL_STUBCALL(stubs::MissedBoundsCheckEntry, REJOIN_RESUME);
6950 :
6951 33560 : if (loop->generatingInvariants()) {
6952 : /*
6953 : * To do the initial load of the invariants, jump to the invariant
6954 : * restore point after the call just emitted. :XXX: fix hackiness.
6955 : */
6956 2832 : if (oomInVector)
6957 0 : return false;
6958 2832 : Label label = callSites[callSites.length() - 1].loopJumpLabel;
6959 2832 : stubcc.linkExitDirect(masm.jump(), label);
6960 : }
6961 33560 : stubcc.crossJump(stubcc.masm.jump(), masm.label());
6962 : }
6963 33560 : PC = oldPC;
6964 :
6965 33560 : frame.prepareForJump(entryTarget, masm, true);
6966 :
6967 33560 : if (!jumpInScript(masm.jump(), entryTarget))
6968 0 : return false;
6969 :
6970 33560 : PC = head;
6971 33560 : if (!analysis->getCode(head).safePoint) {
6972 : /*
6973 : * Emit a stub into the OOL path which loads registers from a synced state
6974 : * and jumps to the loop head, for rejoining from the interpreter.
6975 : */
6976 33554 : LoopEntry entry;
6977 33554 : entry.pcOffset = head - script->code;
6978 :
6979 33554 : OOL_STUBCALL(stubs::MissedBoundsCheckHead, REJOIN_RESUME);
6980 :
6981 33554 : if (loop->generatingInvariants()) {
6982 2832 : if (oomInVector)
6983 0 : return false;
6984 2832 : entry.label = callSites[callSites.length() - 1].loopJumpLabel;
6985 : } else {
6986 30722 : entry.label = stubcc.masm.label();
6987 : }
6988 :
6989 : /*
6990 : * The interpreter may store integers in slots we assume are doubles,
6991 : * make sure state is consistent before joining. Note that we don't
6992 : * need any handling for other safe points the interpreter can enter
6993 : * from, i.e. from switch and try blocks, as we don't assume double
6994 : * variables are coherent in such cases.
6995 : */
6996 156018 : for (uint32_t slot = ArgSlot(0); slot < TotalSlots(script); slot++) {
6997 122464 : if (a->varTypes[slot].getTypeTag(cx) == JSVAL_TYPE_DOUBLE) {
6998 698 : FrameEntry *fe = frame.getSlotEntry(slot);
6999 698 : stubcc.masm.ensureInMemoryDouble(frame.addressOf(fe));
7000 : }
7001 : }
7002 :
7003 33554 : frame.prepareForJump(head, stubcc.masm, true);
7004 33554 : if (!stubcc.jumpInScript(stubcc.masm.jump(), head))
7005 0 : return false;
7006 :
7007 33554 : loopEntries.append(entry);
7008 : }
7009 33560 : PC = oldPC;
7010 :
7011 : /* Write out loads and tests of loop invariants at all calls in the loop body. */
7012 33560 : loop->flushLoop(stubcc);
7013 :
7014 33560 : LoopState *nloop = loop->outer;
7015 33560 : cx->delete_(loop);
7016 33560 : loop = nloop;
7017 33560 : frame.setLoop(loop);
7018 :
7019 33560 : fallthrough.linkTo(masm.label(), &masm);
7020 :
7021 : /*
7022 : * Clear all registers used for loop temporaries. In the case of loop
7023 : * nesting, we do not allocate temporaries for the outer loop.
7024 : */
7025 33560 : frame.clearTemporaries();
7026 :
7027 33560 : return true;
7028 : }
7029 :
7030 : /*
7031 : * The state at the fast jump must reflect the frame's current state. If specified
7032 : * the state at the slow jump must be fully synced.
7033 : *
7034 : * The 'trampoline' argument indicates whether a trampoline was emitted into
7035 : * the OOL path loading some registers for the target. If this is the case,
7036 : * the fast path jump was redirected to the stub code's initial label, and the
7037 : * same must happen for any other fast paths for the target (i.e. paths from
7038 : * inline caches).
7039 : *
7040 : * The 'fallthrough' argument indicates this is a jump emitted for a fallthrough
7041 : * at the end of the compiled chunk. In this case the opcode may not be a
7042 : * JOF_JUMP opcode, and the compiler should not watch for fusions.
7043 : */
7044 : bool
7045 162359 : mjit::Compiler::jumpAndRun(Jump j, jsbytecode *target, Jump *slow, bool *trampoline,
7046 : bool fallthrough)
7047 : {
7048 162359 : if (trampoline)
7049 5127 : *trampoline = false;
7050 :
7051 162359 : if (!a->parent && !bytecodeInChunk(target)) {
7052 : /*
7053 : * syncForBranch() must have ensured the stack is synced. Figure out
7054 : * the source of the jump, which may be the opcode after PC if two ops
7055 : * were fused for a branch.
7056 : */
7057 1796 : OutgoingChunkEdge edge;
7058 1796 : edge.source = PC - outerScript->code;
7059 1796 : JSOp op = JSOp(*PC);
7060 1796 : if (!fallthrough && !(js_CodeSpec[op].format & JOF_JUMP) && op != JSOP_TABLESWITCH)
7061 67 : edge.source += GetBytecodeLength(PC);
7062 1796 : edge.target = target - outerScript->code;
7063 1796 : edge.fastJump = j;
7064 1796 : if (slow)
7065 93 : edge.slowJump = *slow;
7066 1796 : chunkEdges.append(edge);
7067 1796 : return true;
7068 : }
7069 :
7070 : /*
7071 : * Unless we are coming from a branch which synced everything, syncForBranch
7072 : * must have been called and ensured an allocation at the target.
7073 : */
7074 160563 : RegisterAllocation *lvtarget = NULL;
7075 160563 : bool consistent = true;
7076 160563 : if (cx->typeInferenceEnabled()) {
7077 86445 : RegisterAllocation *&alloc = analysis->getAllocation(target);
7078 86445 : if (!alloc) {
7079 6901 : alloc = cx->typeLifoAlloc().new_<RegisterAllocation>(false);
7080 6901 : if (!alloc) {
7081 0 : js_ReportOutOfMemory(cx);
7082 0 : return false;
7083 : }
7084 : }
7085 86445 : lvtarget = alloc;
7086 86445 : consistent = frame.consistentRegisters(target);
7087 : }
7088 :
7089 160563 : if (!lvtarget || lvtarget->synced()) {
7090 145862 : JS_ASSERT(consistent);
7091 145862 : if (!jumpInScript(j, target))
7092 0 : return false;
7093 145862 : if (slow && !stubcc.jumpInScript(*slow, target))
7094 0 : return false;
7095 : } else {
7096 14701 : if (consistent) {
7097 11293 : if (!jumpInScript(j, target))
7098 0 : return false;
7099 : } else {
7100 : /*
7101 : * Make a trampoline to issue remaining loads for the register
7102 : * state at target.
7103 : */
7104 3408 : Label start = stubcc.masm.label();
7105 3408 : stubcc.linkExitDirect(j, start);
7106 3408 : frame.prepareForJump(target, stubcc.masm, false);
7107 3408 : if (!stubcc.jumpInScript(stubcc.masm.jump(), target))
7108 0 : return false;
7109 3408 : if (trampoline)
7110 311 : *trampoline = true;
7111 3408 : if (pcLengths) {
7112 : /*
7113 : * This is OOL code but will usually be executed, so track
7114 : * it in the CODE_LENGTH for the opcode.
7115 : */
7116 0 : uint32_t offset = ssa.frameLength(a->inlineIndex) + PC - script->code;
7117 0 : size_t length = stubcc.masm.size() - stubcc.masm.distanceOf(start);
7118 0 : pcLengths[offset].codeLength += length;
7119 : }
7120 : }
7121 :
7122 14701 : if (slow) {
7123 12776 : slow->linkTo(stubcc.masm.label(), &stubcc.masm);
7124 12776 : frame.prepareForJump(target, stubcc.masm, true);
7125 12776 : if (!stubcc.jumpInScript(stubcc.masm.jump(), target))
7126 0 : return false;
7127 : }
7128 : }
7129 :
7130 160563 : if (target < PC)
7131 43010 : return finishLoop(target);
7132 117553 : return true;
7133 : }
7134 :
7135 : void
7136 23105 : mjit::Compiler::enterBlock(StaticBlockObject *block)
7137 : {
7138 : /* For now, don't bother doing anything for this opcode. */
7139 23105 : frame.syncAndForgetEverything();
7140 23105 : masm.move(ImmPtr(block), Registers::ArgReg1);
7141 23105 : INLINE_STUBCALL(stubs::EnterBlock, REJOIN_NONE);
7142 23105 : if (*PC == JSOP_ENTERBLOCK)
7143 17698 : frame.enterBlock(StackDefs(script, PC));
7144 23105 : }
7145 :
7146 : void
7147 23335 : mjit::Compiler::leaveBlock()
7148 : {
7149 : /*
7150 : * Note: After bug 535912, we can pass the block obj directly, inline
7151 : * PutBlockObject, and do away with the muckiness in PutBlockObject.
7152 : */
7153 23335 : uint32_t n = StackUses(script, PC);
7154 23335 : prepareStubCall(Uses(n));
7155 23335 : INLINE_STUBCALL(stubs::LeaveBlock, REJOIN_NONE);
7156 23335 : frame.leaveBlock(n);
7157 23335 : }
7158 :
7159 : // Creates the new object expected for constructors, and places it in |thisv|.
7160 : // It is broken down into the following operations:
7161 : // CALLEE
7162 : // GETPROP "prototype"
7163 : // IFPRIMTOP:
7164 : // NULL
7165 : // call js_CreateThisFromFunctionWithProto(...)
7166 : //
7167 : bool
7168 1118 : mjit::Compiler::constructThis()
7169 : {
7170 1118 : JS_ASSERT(isConstructing);
7171 :
7172 1118 : JSFunction *fun = script->function();
7173 :
7174 : do {
7175 2319 : if (!cx->typeInferenceEnabled() ||
7176 673 : !fun->hasSingletonType() ||
7177 528 : fun->getType(cx)->unknownProperties())
7178 : {
7179 592 : break;
7180 : }
7181 :
7182 526 : jsid id = ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom);
7183 526 : types::TypeSet *protoTypes = fun->getType(cx)->getProperty(cx, id, false);
7184 :
7185 526 : JSObject *proto = protoTypes->getSingleton(cx, true);
7186 526 : if (!proto)
7187 262 : break;
7188 :
7189 : /*
7190 : * Generate an inline path to create a 'this' object with the given
7191 : * prototype. Only do this if the type is actually known as a possible
7192 : * 'this' type of the script.
7193 : */
7194 264 : types::TypeObject *type = proto->getNewType(cx, fun);
7195 264 : if (!type)
7196 0 : return false;
7197 264 : if (!types::TypeScript::ThisTypes(script)->hasType(types::Type::ObjectType(type)))
7198 103 : break;
7199 :
7200 161 : JSObject *templateObject = js_CreateThisForFunctionWithProto(cx, fun, proto);
7201 161 : if (!templateObject)
7202 0 : return false;
7203 :
7204 : /*
7205 : * The template incorporates a shape and/or fixed slots from any
7206 : * newScript on its type, so make sure recompilation is triggered
7207 : * should this information change later.
7208 : */
7209 161 : if (templateObject->type()->newScript)
7210 104 : types::TypeSet::WatchObjectStateChange(cx, templateObject->type());
7211 :
7212 161 : RegisterID result = frame.allocReg();
7213 161 : Jump emptyFreeList = masm.getNewObject(cx, result, templateObject);
7214 :
7215 161 : stubcc.linkExit(emptyFreeList, Uses(0));
7216 161 : stubcc.leave();
7217 :
7218 161 : stubcc.masm.move(ImmPtr(proto), Registers::ArgReg1);
7219 161 : OOL_STUBCALL(stubs::CreateThis, REJOIN_RESUME);
7220 :
7221 161 : frame.setThis(result);
7222 :
7223 161 : stubcc.rejoin(Changes(1));
7224 161 : return true;
7225 : } while (false);
7226 :
7227 : // Load the callee.
7228 957 : frame.pushCallee();
7229 :
7230 : // Get callee.prototype.
7231 957 : if (!jsop_getprop(cx->runtime->atomState.classPrototypeAtom, JSVAL_TYPE_UNKNOWN, false, /* forPrototype = */ true))
7232 0 : return false;
7233 :
7234 : // Reach into the proto Value and grab a register for its data.
7235 957 : FrameEntry *protoFe = frame.peek(-1);
7236 957 : RegisterID protoReg = frame.ownRegForData(protoFe);
7237 :
7238 : // Now, get the type. If it's not an object, set protoReg to NULL.
7239 957 : JS_ASSERT_IF(protoFe->isTypeKnown(), protoFe->isType(JSVAL_TYPE_OBJECT));
7240 957 : if (!protoFe->isType(JSVAL_TYPE_OBJECT)) {
7241 957 : Jump isNotObject = frame.testObject(Assembler::NotEqual, protoFe);
7242 957 : stubcc.linkExitDirect(isNotObject, stubcc.masm.label());
7243 957 : stubcc.masm.move(ImmPtr(NULL), protoReg);
7244 957 : stubcc.crossJump(stubcc.masm.jump(), masm.label());
7245 : }
7246 :
7247 : // Done with the protoFe.
7248 957 : frame.pop();
7249 :
7250 957 : prepareStubCall(Uses(0));
7251 957 : if (protoReg != Registers::ArgReg1)
7252 957 : masm.move(protoReg, Registers::ArgReg1);
7253 957 : INLINE_STUBCALL(stubs::CreateThis, REJOIN_RESUME);
7254 957 : frame.freeReg(protoReg);
7255 957 : return true;
7256 : }
7257 :
7258 : bool
7259 298 : mjit::Compiler::jsop_tableswitch(jsbytecode *pc)
7260 : {
7261 : #if defined JS_CPU_ARM
7262 : JS_NOT_REACHED("Implement jump(BaseIndex) for ARM");
7263 : return true;
7264 : #else
7265 298 : jsbytecode *originalPC = pc;
7266 596 : DebugOnly<JSOp> op = JSOp(*originalPC);
7267 298 : JS_ASSERT(op == JSOP_TABLESWITCH);
7268 :
7269 298 : uint32_t defaultTarget = GET_JUMP_OFFSET(pc);
7270 298 : pc += JUMP_OFFSET_LEN;
7271 :
7272 298 : int32_t low = GET_JUMP_OFFSET(pc);
7273 298 : pc += JUMP_OFFSET_LEN;
7274 298 : int32_t high = GET_JUMP_OFFSET(pc);
7275 298 : pc += JUMP_OFFSET_LEN;
7276 298 : int numJumps = high + 1 - low;
7277 298 : JS_ASSERT(numJumps >= 0);
7278 :
7279 298 : FrameEntry *fe = frame.peek(-1);
7280 298 : if (fe->isNotType(JSVAL_TYPE_INT32) || numJumps > 256) {
7281 36 : frame.syncAndForgetEverything();
7282 36 : masm.move(ImmPtr(originalPC), Registers::ArgReg1);
7283 :
7284 : /* prepareStubCall() is not needed due to forgetEverything() */
7285 36 : INLINE_STUBCALL(stubs::TableSwitch, REJOIN_NONE);
7286 36 : frame.pop();
7287 36 : masm.jump(Registers::ReturnReg);
7288 36 : return true;
7289 : }
7290 :
7291 : RegisterID dataReg;
7292 262 : if (fe->isConstant()) {
7293 8 : JS_ASSERT(fe->isType(JSVAL_TYPE_INT32));
7294 8 : dataReg = frame.allocReg();
7295 8 : masm.move(Imm32(fe->getValue().toInt32()), dataReg);
7296 : } else {
7297 254 : dataReg = frame.copyDataIntoReg(fe);
7298 : }
7299 :
7300 262 : RegisterID reg = frame.allocReg();
7301 262 : frame.syncAndForgetEverything();
7302 :
7303 262 : MaybeJump notInt;
7304 262 : if (!fe->isType(JSVAL_TYPE_INT32))
7305 182 : notInt = masm.testInt32(Assembler::NotEqual, frame.addressOf(fe));
7306 :
7307 262 : JumpTable jt;
7308 262 : jt.offsetIndex = jumpTableEdges.length();
7309 262 : jt.label = masm.moveWithPatch(ImmPtr(NULL), reg);
7310 262 : jumpTables.append(jt);
7311 :
7312 1612 : for (int i = 0; i < numJumps; i++) {
7313 1350 : uint32_t target = GET_JUMP_OFFSET(pc);
7314 1350 : if (!target)
7315 20 : target = defaultTarget;
7316 : JumpTableEdge edge;
7317 1350 : edge.source = originalPC - script->code;
7318 1350 : edge.target = (originalPC + target) - script->code;
7319 1350 : jumpTableEdges.append(edge);
7320 1350 : pc += JUMP_OFFSET_LEN;
7321 : }
7322 262 : if (low != 0)
7323 85 : masm.sub32(Imm32(low), dataReg);
7324 262 : Jump defaultCase = masm.branch32(Assembler::AboveOrEqual, dataReg, Imm32(numJumps));
7325 262 : BaseIndex jumpTarget(reg, dataReg, Assembler::ScalePtr);
7326 262 : masm.jump(jumpTarget);
7327 :
7328 262 : if (notInt.isSet()) {
7329 182 : stubcc.linkExitDirect(notInt.get(), stubcc.masm.label());
7330 182 : stubcc.leave();
7331 182 : stubcc.masm.move(ImmPtr(originalPC), Registers::ArgReg1);
7332 182 : OOL_STUBCALL(stubs::TableSwitch, REJOIN_NONE);
7333 182 : stubcc.masm.jump(Registers::ReturnReg);
7334 : }
7335 262 : frame.pop();
7336 262 : return jumpAndRun(defaultCase, originalPC + defaultTarget);
7337 : #endif
7338 : }
7339 :
7340 : void
7341 626 : mjit::Compiler::jsop_toid()
7342 : {
7343 : /* Leave integers alone, stub everything else. */
7344 626 : FrameEntry *top = frame.peek(-1);
7345 :
7346 626 : if (top->isType(JSVAL_TYPE_INT32))
7347 349 : return;
7348 :
7349 277 : if (top->isNotType(JSVAL_TYPE_INT32)) {
7350 42 : prepareStubCall(Uses(2));
7351 42 : INLINE_STUBCALL(stubs::ToId, REJOIN_FALLTHROUGH);
7352 42 : frame.pop();
7353 42 : pushSyncedEntry(0);
7354 42 : return;
7355 : }
7356 :
7357 235 : frame.syncAt(-1);
7358 :
7359 235 : Jump j = frame.testInt32(Assembler::NotEqual, top);
7360 235 : stubcc.linkExit(j, Uses(2));
7361 :
7362 235 : stubcc.leave();
7363 235 : OOL_STUBCALL(stubs::ToId, REJOIN_FALLTHROUGH);
7364 :
7365 235 : frame.pop();
7366 235 : pushSyncedEntry(0);
7367 :
7368 235 : stubcc.rejoin(Changes(1));
7369 : }
7370 :
7371 : void
7372 33970 : mjit::Compiler::jsop_in()
7373 : {
7374 33970 : FrameEntry *obj = frame.peek(-1);
7375 33970 : FrameEntry *id = frame.peek(-2);
7376 :
7377 33970 : if (cx->typeInferenceEnabled() && id->isType(JSVAL_TYPE_INT32)) {
7378 121 : types::TypeSet *types = analysis->poppedTypes(PC, 0);
7379 :
7380 274 : if (obj->mightBeType(JSVAL_TYPE_OBJECT) &&
7381 117 : !types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_DENSE_ARRAY) &&
7382 36 : !types::ArrayPrototypeHasIndexedProperty(cx, outerScript))
7383 : {
7384 28 : bool isPacked = !types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_PACKED_ARRAY);
7385 :
7386 28 : if (!obj->isTypeKnown()) {
7387 7 : Jump guard = frame.testObject(Assembler::NotEqual, obj);
7388 7 : stubcc.linkExit(guard, Uses(2));
7389 : }
7390 :
7391 28 : RegisterID dataReg = frame.copyDataIntoReg(obj);
7392 :
7393 28 : Int32Key key = id->isConstant()
7394 12 : ? Int32Key::FromConstant(id->getValue().toInt32())
7395 40 : : Int32Key::FromRegister(frame.tempRegForData(id));
7396 :
7397 28 : masm.loadPtr(Address(dataReg, JSObject::offsetOfElements()), dataReg);
7398 :
7399 : // Guard on the array's initialized length.
7400 : Jump initlenGuard = masm.guardArrayExtent(ObjectElements::offsetOfInitializedLength(),
7401 28 : dataReg, key, Assembler::BelowOrEqual);
7402 :
7403 : // Guard to make sure we don't have a hole. Skip it if the array is packed.
7404 28 : MaybeJump holeCheck;
7405 28 : if (!isPacked)
7406 22 : holeCheck = masm.guardElementNotHole(dataReg, key);
7407 :
7408 28 : masm.move(Imm32(1), dataReg);
7409 28 : Jump done = masm.jump();
7410 :
7411 28 : Label falseBranch = masm.label();
7412 28 : initlenGuard.linkTo(falseBranch, &masm);
7413 28 : if (!isPacked)
7414 22 : holeCheck.getJump().linkTo(falseBranch, &masm);
7415 28 : masm.move(Imm32(0), dataReg);
7416 :
7417 28 : done.linkTo(masm.label(), &masm);
7418 :
7419 28 : stubcc.leave();
7420 28 : OOL_STUBCALL_USES(stubs::In, REJOIN_PUSH_BOOLEAN, Uses(2));
7421 :
7422 28 : frame.popn(2);
7423 28 : if (dataReg != Registers::ReturnReg)
7424 28 : stubcc.masm.move(Registers::ReturnReg, dataReg);
7425 :
7426 28 : frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, dataReg);
7427 :
7428 28 : stubcc.rejoin(Changes(2));
7429 :
7430 28 : return;
7431 : }
7432 : }
7433 :
7434 33942 : prepareStubCall(Uses(2));
7435 33942 : INLINE_STUBCALL(stubs::In, REJOIN_PUSH_BOOLEAN);
7436 33942 : frame.popn(2);
7437 33942 : frame.takeReg(Registers::ReturnReg);
7438 33942 : frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
7439 : }
7440 :
7441 : /*
7442 : * For any locals or args which we know to be integers but are treated as
7443 : * doubles by the type inference, convert to double. These will be assumed to be
7444 : * doubles at control flow join points. This function must be called before
7445 : * branching to another opcode.
7446 : *
7447 : * We can only carry entries as doubles when we can track all incoming edges to
7448 : * a join point (no try blocks etc.) and when we can track all writes to the
7449 : * local/arg (the slot does not escape) and ensure the Compiler representation
7450 : * matches the inferred type for the variable's SSA value. These properties are
7451 : * both ensured by analysis->trackSlot.
7452 : */
7453 : void
7454 395542 : mjit::Compiler::fixDoubleTypes(jsbytecode *target)
7455 : {
7456 395542 : if (!cx->typeInferenceEnabled())
7457 137514 : return;
7458 :
7459 : /*
7460 : * Fill fixedIntToDoubleEntries with all variables that are known to be an
7461 : * int here and a double at the branch target, and fixedDoubleToAnyEntries
7462 : * with all variables that are known to be a double here but not at the
7463 : * branch target.
7464 : *
7465 : * Per prepareInferenceTypes, the target state consists of the current
7466 : * state plus any phi nodes or other new values introduced at the target.
7467 : */
7468 258028 : JS_ASSERT(fixedIntToDoubleEntries.empty());
7469 258028 : JS_ASSERT(fixedDoubleToAnyEntries.empty());
7470 258028 : const SlotValue *newv = analysis->newValues(target);
7471 258028 : if (newv) {
7472 376150 : while (newv->slot) {
7473 380874 : if (newv->value.kind() != SSAValue::PHI ||
7474 86575 : newv->value.phiOffset() != uint32_t(target - script->code) ||
7475 84883 : !analysis->trackSlot(newv->slot)) {
7476 143373 : newv++;
7477 143373 : continue;
7478 : }
7479 66043 : JS_ASSERT(newv->slot < TotalSlots(script));
7480 66043 : types::TypeSet *targetTypes = analysis->getValueTypes(newv->value);
7481 66043 : FrameEntry *fe = frame.getSlotEntry(newv->slot);
7482 66043 : VarType &vt = a->varTypes[newv->slot];
7483 66043 : JSValueType type = vt.getTypeTag(cx);
7484 66043 : if (targetTypes->getKnownTypeTag(cx) == JSVAL_TYPE_DOUBLE) {
7485 522 : if (type == JSVAL_TYPE_INT32) {
7486 102 : fixedIntToDoubleEntries.append(newv->slot);
7487 102 : frame.ensureDouble(fe);
7488 102 : frame.forgetLoopReg(fe);
7489 420 : } else if (type == JSVAL_TYPE_UNKNOWN) {
7490 : /*
7491 : * Unknown here but a double at the target. The type
7492 : * set for the existing value must be empty, so this
7493 : * code is doomed and we can just mark the value as
7494 : * a double.
7495 : */
7496 3 : frame.ensureDouble(fe);
7497 : } else {
7498 417 : JS_ASSERT(type == JSVAL_TYPE_DOUBLE);
7499 : }
7500 65521 : } else if (type == JSVAL_TYPE_DOUBLE) {
7501 279 : fixedDoubleToAnyEntries.append(newv->slot);
7502 279 : frame.syncAndForgetFe(fe);
7503 279 : frame.forgetLoopReg(fe);
7504 : }
7505 66043 : newv++;
7506 : }
7507 : }
7508 : }
7509 :
7510 : void
7511 130916 : mjit::Compiler::watchGlobalReallocation()
7512 : {
7513 130916 : JS_ASSERT(cx->typeInferenceEnabled());
7514 130916 : if (hasGlobalReallocation)
7515 107716 : return;
7516 23200 : types::TypeSet::WatchObjectStateChange(cx, globalObj->getType(cx));
7517 23200 : hasGlobalReallocation = true;
7518 : }
7519 :
7520 : void
7521 200904 : mjit::Compiler::updateVarType()
7522 : {
7523 200904 : if (!cx->typeInferenceEnabled())
7524 95257 : return;
7525 :
7526 : /*
7527 : * For any non-escaping variable written at the current opcode, update the
7528 : * associated type sets according to the written type, keeping the type set
7529 : * for each variable in sync with what the SSA analysis has determined
7530 : * (see prepareInferenceTypes).
7531 : */
7532 :
7533 105647 : types::TypeSet *types = pushedTypeSet(0);
7534 105647 : uint32_t slot = GetBytecodeSlot(script, PC);
7535 :
7536 105647 : if (analysis->trackSlot(slot)) {
7537 49283 : VarType &vt = a->varTypes[slot];
7538 49283 : vt.setTypes(types);
7539 :
7540 : /*
7541 : * Variables whose type has been inferred as a double need to be
7542 : * maintained by the frame as a double. We might forget the exact
7543 : * representation used by the next call to fixDoubleTypes, fix it now.
7544 : */
7545 49283 : if (vt.getTypeTag(cx) == JSVAL_TYPE_DOUBLE)
7546 834 : frame.ensureDouble(frame.getSlotEntry(slot));
7547 : }
7548 : }
7549 :
7550 : void
7551 297308 : mjit::Compiler::updateJoinVarTypes()
7552 : {
7553 297308 : if (!cx->typeInferenceEnabled())
7554 106611 : return;
7555 :
7556 : /* Update variable types for all new values at this bytecode. */
7557 190697 : const SlotValue *newv = analysis->newValues(PC);
7558 190697 : if (newv) {
7559 243111 : while (newv->slot) {
7560 135507 : if (newv->slot < TotalSlots(script)) {
7561 44748 : VarType &vt = a->varTypes[newv->slot];
7562 44748 : JSValueType type = vt.getTypeTag(cx);
7563 44748 : vt.setTypes(analysis->getValueTypes(newv->value));
7564 44748 : if (vt.getTypeTag(cx) != type) {
7565 : /*
7566 : * If the known type of a variable changes (even if the
7567 : * variable itself has not been reassigned) then we can't
7568 : * carry a loop register for the var.
7569 : */
7570 10171 : FrameEntry *fe = frame.getSlotEntry(newv->slot);
7571 10171 : frame.forgetLoopReg(fe);
7572 : }
7573 : }
7574 135507 : newv++;
7575 : }
7576 : }
7577 : }
7578 :
7579 : void
7580 323675 : mjit::Compiler::restoreVarType()
7581 : {
7582 323675 : if (!cx->typeInferenceEnabled())
7583 76919 : return;
7584 :
7585 246756 : uint32_t slot = GetBytecodeSlot(script, PC);
7586 :
7587 246756 : if (slot >= analyze::TotalSlots(script))
7588 12959 : return;
7589 :
7590 : /*
7591 : * Restore the known type of a live local or argument. We ensure that types
7592 : * of tracked variables match their inferred type (as tracked in varTypes),
7593 : * but may have forgotten it due to a branch or syncAndForgetEverything.
7594 : */
7595 233797 : JSValueType type = a->varTypes[slot].getTypeTag(cx);
7596 236902 : if (type != JSVAL_TYPE_UNKNOWN &&
7597 3105 : (type != JSVAL_TYPE_DOUBLE || analysis->trackSlot(slot))) {
7598 144870 : FrameEntry *fe = frame.getSlotEntry(slot);
7599 144870 : JS_ASSERT_IF(fe->isTypeKnown(), fe->isType(type));
7600 144870 : if (!fe->isTypeKnown())
7601 93447 : frame.learnType(fe, type, false);
7602 : }
7603 : }
7604 :
7605 : JSValueType
7606 1357958 : mjit::Compiler::knownPushedType(uint32_t pushed)
7607 : {
7608 1357958 : if (!cx->typeInferenceEnabled())
7609 781200 : return JSVAL_TYPE_UNKNOWN;
7610 576758 : types::TypeSet *types = analysis->pushedTypes(PC, pushed);
7611 576758 : return types->getKnownTypeTag(cx);
7612 : }
7613 :
7614 : bool
7615 24598 : mjit::Compiler::mayPushUndefined(uint32_t pushed)
7616 : {
7617 24598 : JS_ASSERT(cx->typeInferenceEnabled());
7618 :
7619 : /*
7620 : * This should only be used when the compiler is checking if it is OK to push
7621 : * undefined without going to a stub that can trigger recompilation.
7622 : * If this returns false and undefined subsequently becomes a feasible
7623 : * value pushed by the bytecode, recompilation will *NOT* be triggered.
7624 : */
7625 24598 : types::TypeSet *types = analysis->pushedTypes(PC, pushed);
7626 24598 : return types->hasType(types::Type::UndefinedType());
7627 : }
7628 :
7629 : types::TypeSet *
7630 528885 : mjit::Compiler::pushedTypeSet(uint32_t pushed)
7631 : {
7632 528885 : if (!cx->typeInferenceEnabled())
7633 316762 : return NULL;
7634 212123 : return analysis->pushedTypes(PC, pushed);
7635 : }
7636 :
7637 : bool
7638 394508 : mjit::Compiler::monitored(jsbytecode *pc)
7639 : {
7640 394508 : if (!cx->typeInferenceEnabled())
7641 127454 : return false;
7642 267054 : return analysis->getCode(pc).monitoredTypes;
7643 : }
7644 :
7645 : bool
7646 514561 : mjit::Compiler::hasTypeBarriers(jsbytecode *pc)
7647 : {
7648 514561 : if (!cx->typeInferenceEnabled())
7649 26465 : return false;
7650 :
7651 488096 : return analysis->typeBarriers(cx, pc) != NULL;
7652 : }
7653 :
7654 : void
7655 5956 : mjit::Compiler::pushSyncedEntry(uint32_t pushed)
7656 : {
7657 5956 : frame.pushSynced(knownPushedType(pushed));
7658 5956 : }
7659 :
7660 : JSObject *
7661 986688 : mjit::Compiler::pushedSingleton(unsigned pushed)
7662 : {
7663 986688 : if (!cx->typeInferenceEnabled())
7664 505806 : return NULL;
7665 :
7666 480882 : types::TypeSet *types = analysis->pushedTypes(PC, pushed);
7667 480882 : return types->getSingleton(cx);
7668 : }
7669 :
7670 : /*
7671 : * Barriers overview.
7672 : *
7673 : * After a property fetch finishes, we may need to do type checks on it to make
7674 : * sure it matches the pushed type set for this bytecode. This can be either
7675 : * because there is a type barrier at the bytecode, or because we cannot rule
7676 : * out an undefined result. For such accesses, we push a register pair, and
7677 : * then use those registers to check the fetched type matches the inferred
7678 : * types for the pushed set. The flow here is tricky:
7679 : *
7680 : * frame.pushRegs(type, data, knownType);
7681 : * --- Depending on knownType, the frame's representation for the pushed entry
7682 : * may not be a register pair anymore. knownType is based on the observed
7683 : * types that have been pushed here and may not actually match type/data.
7684 : * pushRegs must not clobber either register, for the test below.
7685 : *
7686 : * testBarrier(type, data)
7687 : * --- Use the type/data regs and generate a single jump taken if the barrier
7688 : * has been violated.
7689 : *
7690 : * --- Rearrange stack, rejoin from stub paths. No code must be emitted into
7691 : * the inline path between testBarrier and finishBarrier. Since a stub path
7692 : * may be in progress we can't call finishBarrier before stubcc.rejoin,
7693 : * and since typeReg/dataReg may not be intact after the stub call rejoin
7694 : * (if knownType != JSVAL_TYPE_UNKNOWN) we can't testBarrier after calling
7695 : * stubcc.rejoin.
7696 : *
7697 : * finishBarrier()
7698 : * --- Link the barrier jump to a new stub code path which updates the pushed
7699 : * types (possibly triggering recompilation). The frame has changed since
7700 : * pushRegs to reflect the final state of the op, which is OK as no inline
7701 : * code has been emitted since the barrier jump.
7702 : */
7703 :
7704 : mjit::Compiler::BarrierState
7705 117960 : mjit::Compiler::pushAddressMaybeBarrier(Address address, JSValueType type, bool reuseBase,
7706 : bool testUndefined)
7707 : {
7708 117960 : if (!hasTypeBarriers(PC) && !testUndefined) {
7709 26207 : frame.push(address, type, reuseBase);
7710 26207 : return BarrierState();
7711 : }
7712 :
7713 : RegisterID typeReg, dataReg;
7714 91753 : frame.loadIntoRegisters(address, reuseBase, &typeReg, &dataReg);
7715 :
7716 91753 : frame.pushRegs(typeReg, dataReg, type);
7717 91753 : return testBarrier(typeReg, dataReg, testUndefined);
7718 : }
7719 :
7720 : MaybeJump
7721 205116 : mjit::Compiler::trySingleTypeTest(types::TypeSet *types, RegisterID typeReg)
7722 : {
7723 : /*
7724 : * If a type set we have a barrier on is monomorphic, generate a single
7725 : * jump taken if a type register has a match. This doesn't handle type sets
7726 : * containing objects, as these require two jumps regardless (test for
7727 : * object, then test the type of the object).
7728 : */
7729 205116 : MaybeJump res;
7730 :
7731 205116 : switch (types->getKnownTypeTag(cx)) {
7732 : case JSVAL_TYPE_INT32:
7733 10624 : res.setJump(masm.testInt32(Assembler::NotEqual, typeReg));
7734 10624 : return res;
7735 :
7736 : case JSVAL_TYPE_DOUBLE:
7737 5038 : res.setJump(masm.testNumber(Assembler::NotEqual, typeReg));
7738 5038 : return res;
7739 :
7740 : case JSVAL_TYPE_BOOLEAN:
7741 1481 : res.setJump(masm.testBoolean(Assembler::NotEqual, typeReg));
7742 1481 : return res;
7743 :
7744 : case JSVAL_TYPE_STRING:
7745 16457 : res.setJump(masm.testString(Assembler::NotEqual, typeReg));
7746 16457 : return res;
7747 :
7748 : default:
7749 171516 : return res;
7750 : }
7751 : }
7752 :
7753 : JSC::MacroAssembler::Jump
7754 171516 : mjit::Compiler::addTypeTest(types::TypeSet *types, RegisterID typeReg, RegisterID dataReg)
7755 : {
7756 : /*
7757 : * :TODO: It would be good to merge this with GenerateTypeCheck, but the
7758 : * two methods have a different format for the tested value (in registers
7759 : * vs. in memory).
7760 : */
7761 :
7762 343032 : Vector<Jump> matches(CompilerAllocPolicy(cx, *this));
7763 :
7764 171516 : if (types->hasType(types::Type::Int32Type()))
7765 1249 : matches.append(masm.testInt32(Assembler::Equal, typeReg));
7766 :
7767 171516 : if (types->hasType(types::Type::DoubleType()))
7768 532 : matches.append(masm.testDouble(Assembler::Equal, typeReg));
7769 :
7770 171516 : if (types->hasType(types::Type::UndefinedType()))
7771 13065 : matches.append(masm.testUndefined(Assembler::Equal, typeReg));
7772 :
7773 171516 : if (types->hasType(types::Type::BooleanType()))
7774 3813 : matches.append(masm.testBoolean(Assembler::Equal, typeReg));
7775 :
7776 171516 : if (types->hasType(types::Type::StringType()))
7777 1171 : matches.append(masm.testString(Assembler::Equal, typeReg));
7778 :
7779 171516 : if (types->hasType(types::Type::NullType()))
7780 6189 : matches.append(masm.testNull(Assembler::Equal, typeReg));
7781 :
7782 171516 : unsigned count = 0;
7783 171516 : if (types->hasType(types::Type::AnyObjectType()))
7784 9463 : matches.append(masm.testObject(Assembler::Equal, typeReg));
7785 : else
7786 162053 : count = types->getObjectCount();
7787 :
7788 171516 : if (count != 0) {
7789 42822 : Jump notObject = masm.testObject(Assembler::NotEqual, typeReg);
7790 42822 : Address typeAddress(dataReg, JSObject::offsetOfType());
7791 :
7792 466490 : for (unsigned i = 0; i < count; i++) {
7793 423668 : if (JSObject *object = types->getSingleObject(i))
7794 122384 : matches.append(masm.branchPtr(Assembler::Equal, dataReg, ImmPtr(object)));
7795 : }
7796 :
7797 466490 : for (unsigned i = 0; i < count; i++) {
7798 423668 : if (types::TypeObject *object = types->getTypeObject(i))
7799 59061 : matches.append(masm.branchPtr(Assembler::Equal, typeAddress, ImmPtr(object)));
7800 : }
7801 :
7802 42822 : notObject.linkTo(masm.label(), &masm);
7803 : }
7804 :
7805 171516 : Jump mismatch = masm.jump();
7806 :
7807 388443 : for (unsigned i = 0; i < matches.length(); i++)
7808 216927 : matches[i].linkTo(masm.label(), &masm);
7809 :
7810 : return mismatch;
7811 : }
7812 :
7813 : mjit::Compiler::BarrierState
7814 799135 : mjit::Compiler::testBarrier(RegisterID typeReg, RegisterID dataReg,
7815 : bool testUndefined, bool testReturn, bool force)
7816 : {
7817 799135 : BarrierState state;
7818 799135 : state.typeReg = typeReg;
7819 799135 : state.dataReg = dataReg;
7820 :
7821 799135 : if (!cx->typeInferenceEnabled() || !(js_CodeSpec[*PC].format & JOF_TYPESET))
7822 466090 : return state;
7823 :
7824 333045 : types::TypeSet *types = analysis->bytecodeTypes(PC);
7825 333045 : if (types->unknown()) {
7826 : /*
7827 : * If the result of this opcode is already unknown, there is no way for
7828 : * a type barrier to fail.
7829 : */
7830 634 : return state;
7831 : }
7832 :
7833 332411 : if (testReturn) {
7834 120754 : JS_ASSERT(!testUndefined);
7835 120754 : if (!analysis->getCode(PC).monitoredTypesReturn)
7836 75172 : return state;
7837 211657 : } else if (!hasTypeBarriers(PC) && !force) {
7838 52123 : if (testUndefined && !types->hasType(types::Type::UndefinedType()))
7839 1017 : state.jump.setJump(masm.testUndefined(Assembler::Equal, typeReg));
7840 52123 : return state;
7841 : }
7842 :
7843 205116 : types->addFreeze(cx);
7844 :
7845 : /* Cannot have type barriers when the result of the operation is already unknown. */
7846 205116 : JS_ASSERT(!types->unknown());
7847 :
7848 205116 : state.jump = trySingleTypeTest(types, typeReg);
7849 205116 : if (!state.jump.isSet())
7850 171516 : state.jump.setJump(addTypeTest(types, typeReg, dataReg));
7851 :
7852 205116 : return state;
7853 : }
7854 :
7855 : void
7856 831054 : mjit::Compiler::finishBarrier(const BarrierState &barrier, RejoinState rejoin, uint32_t which)
7857 : {
7858 831054 : if (!barrier.jump.isSet())
7859 624921 : return;
7860 :
7861 206133 : stubcc.linkExitDirect(barrier.jump.get(), stubcc.masm.label());
7862 :
7863 : /*
7864 : * Before syncing, store the entry to sp[0]. (scanInlineCalls accounted for
7865 : * this when making sure there is enough froom for all frames). The known
7866 : * type in the frame may be wrong leading to an incorrect sync, and this
7867 : * sync may also clobber typeReg and/or dataReg.
7868 : */
7869 206133 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
7870 : stubcc.masm.storeValueFromComponents(barrier.typeReg, barrier.dataReg,
7871 206133 : frame.addressOf(frame.peek(-1)));
7872 206133 : frame.pop();
7873 :
7874 206133 : stubcc.syncExit(Uses(0));
7875 206133 : stubcc.leave();
7876 :
7877 206133 : stubcc.masm.move(ImmIntPtr(intptr_t(which)), Registers::ArgReg1);
7878 206133 : OOL_STUBCALL(stubs::TypeBarrierHelper, rejoin);
7879 206133 : stubcc.rejoin(Changes(0));
7880 : }
7881 :
7882 : void
7883 557977 : mjit::Compiler::testPushedType(RejoinState rejoin, int which, bool ool)
7884 : {
7885 557977 : if (!cx->typeInferenceEnabled() || !(js_CodeSpec[*PC].format & JOF_TYPESET))
7886 414813 : return;
7887 :
7888 143164 : types::TypeSet *types = analysis->bytecodeTypes(PC);
7889 143164 : if (types->unknown())
7890 28 : return;
7891 :
7892 143136 : Assembler &masm = ool ? stubcc.masm : this->masm;
7893 :
7894 143136 : JS_ASSERT(which <= 0);
7895 143136 : Address address = (which == 0) ? frame.addressOfTop() : frame.addressOf(frame.peek(which));
7896 :
7897 286272 : Vector<Jump> mismatches(cx);
7898 143136 : if (!masm.generateTypeCheck(cx, address, types, &mismatches)) {
7899 0 : oomInVector = true;
7900 : return;
7901 : }
7902 :
7903 143136 : Jump j = masm.jump();
7904 :
7905 316211 : for (unsigned i = 0; i < mismatches.length(); i++)
7906 173075 : mismatches[i].linkTo(masm.label(), &masm);
7907 :
7908 143136 : masm.move(Imm32(which), Registers::ArgReg1);
7909 143136 : if (ool)
7910 137558 : OOL_STUBCALL(stubs::StubTypeHelper, rejoin);
7911 : else
7912 5578 : INLINE_STUBCALL(stubs::StubTypeHelper, rejoin);
7913 :
7914 143136 : j.linkTo(masm.label(), &masm);
7915 : }
|