1 : /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=4 sw=4 et tw=99:
3 : *
4 : * ***** BEGIN LICENSE BLOCK *****
5 : * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 : *
7 : * The contents of this file are subject to the Mozilla Public License Version
8 : * 1.1 (the "License"); you may not use this file except in compliance with
9 : * the License. You may obtain a copy of the License at
10 : * http://www.mozilla.org/MPL/
11 : *
12 : * Software distributed under the License is distributed on an "AS IS" basis,
13 : * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 : * for the specific language governing rights and limitations under the
15 : * License.
16 : *
17 : * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 : * May 28, 2008.
19 : *
20 : * The Initial Developer of the Original Code is
21 : * Brendan Eich <brendan@mozilla.org>
22 : *
23 : * Contributor(s):
24 : * David Mandelin <dmandelin@mozilla.com>
25 : *
26 : * Alternatively, the contents of this file may be used under the terms of
27 : * either of the GNU General Public License Version 2 or later (the "GPL"),
28 : * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
29 : * in which case the provisions of the GPL or the LGPL are applicable instead
30 : * of those above. If you wish to allow use of your version of this file only
31 : * under the terms of either the GPL or the LGPL, and not to allow others to
32 : * use your version of this file under the terms of the MPL, indicate your
33 : * decision by deleting the provisions above and replace them with the notice
34 : * and other provisions required by the GPL or the LGPL. If you do not delete
35 : * the provisions above, a recipient may use your version of this file under
36 : * the terms of any one of the MPL, the GPL or the LGPL.
37 : *
38 : * ***** END LICENSE BLOCK ***** */
39 : #include "PolyIC.h"
40 : #include "StubCalls.h"
41 : #include "CodeGenIncludes.h"
42 : #include "StubCalls-inl.h"
43 : #include "BaseCompiler.h"
44 : #include "assembler/assembler/LinkBuffer.h"
45 : #include "TypedArrayIC.h"
46 : #include "jsscope.h"
47 : #include "jsnum.h"
48 : #include "jstypedarray.h"
49 : #include "jsatominlines.h"
50 : #include "jsobjinlines.h"
51 : #include "jsscopeinlines.h"
52 : #include "jsinterpinlines.h"
53 : #include "jsautooplen.h"
54 :
55 : #include "vm/ScopeObject-inl.h"
56 : #include "vm/StringObject-inl.h"
57 :
58 : #if defined JS_POLYIC
59 :
60 : using namespace js;
61 : using namespace js::mjit;
62 : using namespace js::mjit::ic;
63 :
64 : typedef JSC::FunctionPtr FunctionPtr;
65 : typedef JSC::MacroAssembler::RegisterID RegisterID;
66 : typedef JSC::MacroAssembler::Jump Jump;
67 : typedef JSC::MacroAssembler::Imm32 Imm32;
68 :
69 : /* Rough over-estimate of how much memory we need to unprotect. */
70 : static const uint32_t INLINE_PATH_LENGTH = 64;
71 :
72 : // Helper class to simplify LinkBuffer usage in PIC stub generators.
73 : // This guarantees correct OOM and refcount handling for buffers while they
74 : // are instantiated and rooted.
75 : class PICLinker : public LinkerHelper
76 262450 : {
77 : ic::BasePolyIC ⁣
78 :
79 : public:
80 262450 : PICLinker(Assembler &masm, ic::BasePolyIC &ic)
81 262450 : : LinkerHelper(masm, JSC::METHOD_CODE), ic(ic)
82 262450 : { }
83 :
84 262450 : bool init(JSContext *cx) {
85 262450 : JSC::ExecutablePool *pool = LinkerHelper::init(cx);
86 262450 : if (!pool)
87 0 : return false;
88 262450 : if (!ic.addPool(cx, pool)) {
89 0 : pool->release();
90 0 : js_ReportOutOfMemory(cx);
91 0 : return false;
92 : }
93 262450 : return true;
94 : }
95 : };
96 :
97 : class PICStubCompiler : public BaseCompiler
98 : {
99 : protected:
100 : const char *type;
101 : VMFrame &f;
102 : JSScript *script;
103 : ic::PICInfo &pic;
104 : void *stub;
105 : uint64_t gcNumber;
106 :
107 : public:
108 : bool canCallHook;
109 :
110 436336 : PICStubCompiler(const char *type, VMFrame &f, JSScript *script, ic::PICInfo &pic, void *stub)
111 : : BaseCompiler(f.cx), type(type), f(f), script(script), pic(pic), stub(stub),
112 436336 : gcNumber(f.cx->runtime->gcNumber), canCallHook(pic.canCallHook)
113 436336 : { }
114 :
115 0 : LookupStatus error() {
116 : /*
117 : * N.B. Do not try to disable the IC, we do not want to guard on
118 : * whether the IC has been recompiled when propagating errors.
119 : */
120 0 : return Lookup_Error;
121 : }
122 :
123 0 : LookupStatus error(JSContext *cx) {
124 0 : return error();
125 : }
126 :
127 45916 : LookupStatus disable(const char *reason) {
128 45916 : return disable(f.cx, reason);
129 : }
130 :
131 46443 : LookupStatus disable(JSContext *cx, const char *reason) {
132 46443 : return pic.disable(f, reason, stub);
133 : }
134 :
135 10867 : LookupStatus disable(VMFrame &f, const char *reason) {
136 10867 : return pic.disable(f, reason, stub);
137 : }
138 :
139 199274 : bool hadGC() {
140 199274 : return gcNumber != f.cx->runtime->gcNumber;
141 : }
142 :
143 : protected:
144 310414 : void spew(const char *event, const char *op) {
145 : #ifdef JS_METHODJIT_SPEW
146 : JaegerSpew(JSpew_PICs, "%s %s: %s (%s: %d)\n",
147 310414 : type, event, op, script->filename, CurrentLine(cx));
148 : #endif
149 310414 : }
150 : };
151 :
152 : static bool
153 37895 : GeneratePrototypeGuards(JSContext *cx, Vector<JSC::MacroAssembler::Jump,8> &mismatches, Assembler &masm,
154 : JSObject *obj, JSObject *holder,
155 : JSC::MacroAssembler::RegisterID objReg,
156 : JSC::MacroAssembler::RegisterID scratchReg)
157 : {
158 : typedef JSC::MacroAssembler::Address Address;
159 : typedef JSC::MacroAssembler::AbsoluteAddress AbsoluteAddress;
160 : typedef JSC::MacroAssembler::ImmPtr ImmPtr;
161 : typedef JSC::MacroAssembler::Jump Jump;
162 :
163 37895 : if (obj->hasUncacheableProto()) {
164 12 : masm.loadPtr(Address(objReg, JSObject::offsetOfType()), scratchReg);
165 : Jump j = masm.branchPtr(Assembler::NotEqual,
166 : Address(scratchReg, offsetof(types::TypeObject, proto)),
167 12 : ImmPtr(obj->getProto()));
168 12 : if (!mismatches.append(j))
169 0 : return false;
170 : }
171 :
172 37895 : JSObject *pobj = obj->getProto();
173 89526 : while (pobj != holder) {
174 13736 : if (pobj->hasUncacheableProto()) {
175 6263 : Jump j;
176 6263 : if (pobj->hasSingletonType()) {
177 0 : types::TypeObject *type = pobj->getType(cx);
178 : j = masm.branchPtr(Assembler::NotEqual,
179 : AbsoluteAddress(&type->proto),
180 0 : ImmPtr(pobj->getProto()),
181 0 : scratchReg);
182 : } else {
183 : j = masm.branchPtr(Assembler::NotEqual,
184 6263 : AbsoluteAddress(pobj->addressOfType()),
185 6263 : ImmPtr(pobj->type()),
186 12526 : scratchReg);
187 : }
188 6263 : if (!mismatches.append(j))
189 0 : return false;
190 : }
191 13736 : pobj = pobj->getProto();
192 : }
193 :
194 37895 : return true;
195 : }
196 :
197 : class SetPropCompiler : public PICStubCompiler
198 : {
199 : JSObject *obj;
200 : PropertyName *name;
201 : int lastStubSecondShapeGuard;
202 :
203 : public:
204 13026 : SetPropCompiler(VMFrame &f, JSScript *script, JSObject *obj, ic::PICInfo &pic, PropertyName *name,
205 : VoidStubPIC stub)
206 : : PICStubCompiler("setprop", f, script, pic, JS_FUNC_TO_DATA_PTR(void *, stub)),
207 13026 : obj(obj), name(name), lastStubSecondShapeGuard(pic.secondShapeGuard)
208 13026 : { }
209 :
210 : static void reset(Repatcher &repatcher, ic::PICInfo &pic)
211 : {
212 : SetPropLabels &labels = pic.setPropLabels();
213 : repatcher.repatchLEAToLoadPtr(labels.getDslotsLoad(pic.fastPathRejoin, pic.u.vr));
214 : repatcher.repatch(labels.getInlineShapeData(pic.fastPathStart, pic.shapeGuard),
215 : NULL);
216 : repatcher.relink(labels.getInlineShapeJump(pic.fastPathStart.labelAtOffset(pic.shapeGuard)),
217 : pic.slowPathStart);
218 :
219 : FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, ic::SetProp));
220 : repatcher.relink(pic.slowPathCall, target);
221 : }
222 :
223 6105 : LookupStatus patchInline(const Shape *shape)
224 : {
225 6105 : JS_ASSERT(!pic.inlinePathPatched);
226 6105 : JaegerSpew(JSpew_PICs, "patch setprop inline at %p\n", pic.fastPathStart.executableAddress());
227 :
228 12210 : Repatcher repatcher(f.chunk());
229 6105 : SetPropLabels &labels = pic.setPropLabels();
230 :
231 : int32_t offset;
232 6105 : if (obj->isFixedSlot(shape->slot())) {
233 5007 : CodeLocationInstruction istr = labels.getDslotsLoad(pic.fastPathRejoin, pic.u.vr);
234 5007 : repatcher.repatchLoadPtrToLEA(istr);
235 :
236 : //
237 : // We've patched | mov dslots, [obj + DSLOTS_OFFSET]
238 : // To: | lea fslots, [obj + DSLOTS_OFFSET]
239 : //
240 : // Because the offset is wrong, it's necessary to correct it
241 : // below.
242 : //
243 5007 : int32_t diff = int32_t(JSObject::getFixedSlotOffset(0)) -
244 5007 : int32_t(JSObject::offsetOfSlots());
245 5007 : JS_ASSERT(diff != 0);
246 5007 : offset = (int32_t(shape->slot()) * sizeof(Value)) + diff;
247 : } else {
248 1098 : offset = obj->dynamicSlotIndex(shape->slot()) * sizeof(Value);
249 : }
250 :
251 6105 : repatcher.repatch(labels.getInlineShapeData(pic.fastPathStart, pic.shapeGuard),
252 12210 : obj->lastProperty());
253 6105 : repatcher.patchAddressOffsetForValueStore(labels.getInlineValueStore(pic.fastPathRejoin),
254 12210 : offset, pic.u.vr.isTypeKnown());
255 :
256 6105 : pic.inlinePathPatched = true;
257 :
258 6105 : return Lookup_Cacheable;
259 : }
260 :
261 5345 : int getLastStubSecondShapeGuard() const {
262 5345 : return lastStubSecondShapeGuard ? POST_INST_OFFSET(lastStubSecondShapeGuard) : 0;
263 : }
264 :
265 5345 : void patchPreviousToHere(CodeLocationLabel cs)
266 : {
267 10690 : Repatcher repatcher(pic.lastCodeBlock(f.chunk()));
268 5345 : CodeLocationLabel label = pic.lastPathStart();
269 :
270 : // Patch either the inline fast path or a generated stub. The stub
271 : // omits the prefix of the inline fast path that loads the shape, so
272 : // the offsets are different.
273 5345 : if (pic.stubsGenerated) {
274 733 : repatcher.relink(pic.setPropLabels().getStubShapeJump(label), cs);
275 : } else {
276 4612 : CodeLocationLabel shapeGuard = label.labelAtOffset(pic.shapeGuard);
277 4612 : repatcher.relink(pic.setPropLabels().getInlineShapeJump(shapeGuard), cs);
278 : }
279 5345 : if (int secondGuardOffset = getLastStubSecondShapeGuard())
280 469 : repatcher.relink(label.jumpAtOffset(secondGuardOffset), cs);
281 5345 : }
282 :
283 5345 : LookupStatus generateStub(const Shape *initialShape, const Shape *shape, bool adding)
284 : {
285 5345 : if (hadGC())
286 0 : return Lookup_Uncacheable;
287 :
288 : /* Exits to the slow path. */
289 10690 : Vector<Jump, 8> slowExits(cx);
290 10690 : Vector<Jump, 8> otherGuards(cx);
291 :
292 10690 : Assembler masm;
293 :
294 : // Shape guard.
295 5345 : if (pic.shapeNeedsRemat()) {
296 419 : masm.loadShape(pic.objReg, pic.shapeReg);
297 419 : pic.shapeRegHasBaseShape = true;
298 : }
299 :
300 5345 : Label start = masm.label();
301 : Jump shapeGuard = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
302 5345 : ImmPtr(initialShape));
303 :
304 5345 : Label stubShapeJumpLabel = masm.label();
305 :
306 5345 : pic.setPropLabels().setStubShapeJump(masm, start, stubShapeJumpLabel);
307 :
308 5345 : if (pic.typeMonitored) {
309 : /*
310 : * Inference does not know the type of the object being updated,
311 : * and we need to make sure that the updateMonitoredTypes() call
312 : * covers this stub, i.e. we will be writing to an object with the
313 : * same type. Add a type guard in addition to the shape guard.
314 : * Note: it is possible that this test gets a spurious hit if the
315 : * object has a lazy type, but in such cases no analyzed scripts
316 : * depend on the object and we will reconstruct its type from the
317 : * value being written here.
318 : */
319 : Jump typeGuard = masm.branchPtr(Assembler::NotEqual,
320 590 : Address(pic.objReg, JSObject::offsetOfType()),
321 1180 : ImmPtr(obj->getType(cx)));
322 590 : if (!otherGuards.append(typeGuard))
323 0 : return error();
324 : }
325 :
326 5345 : JS_ASSERT_IF(!shape->hasDefaultSetter(), obj->isCall());
327 :
328 5345 : MaybeJump skipOver;
329 :
330 5345 : if (adding) {
331 3412 : JS_ASSERT(shape->hasSlot());
332 3412 : pic.shapeRegHasBaseShape = false;
333 :
334 3412 : if (!GeneratePrototypeGuards(cx, otherGuards, masm, obj, NULL,
335 3412 : pic.objReg, pic.shapeReg)) {
336 0 : return error();
337 : }
338 :
339 : /* Emit shape guards for the object's prototype chain. */
340 3412 : JSObject *proto = obj->getProto();
341 3412 : RegisterID lastReg = pic.objReg;
342 13507 : while (proto) {
343 6683 : masm.loadPtr(Address(lastReg, JSObject::offsetOfType()), pic.shapeReg);
344 6683 : masm.loadPtr(Address(pic.shapeReg, offsetof(types::TypeObject, proto)), pic.shapeReg);
345 6683 : Jump protoGuard = masm.guardShape(pic.shapeReg, proto);
346 6683 : if (!otherGuards.append(protoGuard))
347 0 : return error();
348 :
349 6683 : proto = proto->getProto();
350 6683 : lastReg = pic.shapeReg;
351 : }
352 :
353 3412 : if (pic.kind == ic::PICInfo::SETMETHOD) {
354 : /*
355 : * Guard that the value is equal to the shape's method.
356 : * We already know it is a function, so test the payload.
357 : */
358 100 : JS_ASSERT(shape->isMethod());
359 100 : JSObject *funobj = obj->nativeGetMethod(shape);
360 100 : if (pic.u.vr.isConstant()) {
361 0 : JS_ASSERT(funobj == &pic.u.vr.value().toObject());
362 : } else {
363 : Jump mismatchedFunction =
364 100 : masm.branchPtr(Assembler::NotEqual, pic.u.vr.dataReg(), ImmPtr(funobj));
365 100 : if (!slowExits.append(mismatchedFunction))
366 0 : return error();
367 : }
368 : }
369 :
370 3412 : if (obj->isFixedSlot(shape->slot())) {
371 : Address address(pic.objReg,
372 2907 : JSObject::getFixedSlotOffset(shape->slot()));
373 2907 : masm.storeValue(pic.u.vr, address);
374 : } else {
375 : /*
376 : * Note: the guard on the initial shape determines the object's
377 : * number of fixed slots and slot span, which in turn determine
378 : * the number of dynamic slots allocated for the object.
379 : * We don't need to check capacity here.
380 : */
381 505 : masm.loadPtr(Address(pic.objReg, JSObject::offsetOfSlots()), pic.shapeReg);
382 505 : Address address(pic.shapeReg, obj->dynamicSlotIndex(shape->slot()) * sizeof(Value));
383 505 : masm.storeValue(pic.u.vr, address);
384 : }
385 :
386 3412 : JS_ASSERT(shape == obj->lastProperty());
387 3412 : JS_ASSERT(shape != initialShape);
388 :
389 : /* Write the object's new shape. */
390 3412 : masm.storePtr(ImmPtr(shape), Address(pic.objReg, JSObject::offsetOfShape()));
391 1933 : } else if (shape->hasDefaultSetter()) {
392 985 : JS_ASSERT(!shape->isMethod());
393 985 : Address address = masm.objPropAddress(obj, pic.objReg, shape->slot());
394 985 : masm.storeValue(pic.u.vr, address);
395 : } else {
396 : // \ / In general, two function objects with different JSFunctions
397 : // # can have the same shape, thus we must not rely on the identity
398 : // >--+--< of 'fun' remaining the same. However, since:
399 : // ||| 1. the shape includes all arguments and locals and their setters
400 : // \\ V and getters, and
401 : // \===/ 2. arguments and locals have different getters
402 : // then we can rely on fun->nargs remaining invariant.
403 948 : JSFunction *fun = obj->asCall().getCalleeFunction();
404 948 : uint16_t slot = uint16_t(shape->shortid());
405 :
406 : /* Guard that the call object has a frame. */
407 948 : masm.loadObjPrivate(pic.objReg, pic.shapeReg, obj->numFixedSlots());
408 948 : Jump escapedFrame = masm.branchTestPtr(Assembler::Zero, pic.shapeReg, pic.shapeReg);
409 :
410 : {
411 948 : Address addr(pic.shapeReg, shape->setterOp() == CallObject::setArgOp
412 123 : ? StackFrame::offsetOfFormalArg(fun, slot)
413 1071 : : StackFrame::offsetOfFixed(slot));
414 948 : masm.storeValue(pic.u.vr, addr);
415 948 : skipOver = masm.jump();
416 : }
417 :
418 948 : escapedFrame.linkTo(masm.label(), &masm);
419 : {
420 948 : if (shape->setterOp() == CallObject::setVarOp)
421 825 : slot += fun->nargs;
422 :
423 948 : slot += CallObject::RESERVED_SLOTS;
424 948 : Address address = masm.objPropAddress(obj, pic.objReg, slot);
425 :
426 948 : masm.storeValue(pic.u.vr, address);
427 : }
428 :
429 948 : pic.shapeRegHasBaseShape = false;
430 : }
431 :
432 5345 : Jump done = masm.jump();
433 :
434 : // Common all secondary guards into one big exit.
435 5345 : MaybeJump slowExit;
436 5345 : if (otherGuards.length()) {
437 14135 : for (Jump *pj = otherGuards.begin(); pj != otherGuards.end(); ++pj)
438 10223 : pj->linkTo(masm.label(), &masm);
439 3912 : slowExit = masm.jump();
440 3912 : pic.secondShapeGuard = masm.distanceOf(masm.label()) - masm.distanceOf(start);
441 : } else {
442 1433 : pic.secondShapeGuard = 0;
443 : }
444 :
445 5345 : pic.updatePCCounters(f, masm);
446 :
447 10690 : PICLinker buffer(masm, pic);
448 5345 : if (!buffer.init(cx))
449 0 : return error();
450 :
451 10690 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
452 5345 : !buffer.verifyRange(f.chunk())) {
453 0 : return disable("code memory is out of range");
454 : }
455 :
456 5345 : buffer.link(shapeGuard, pic.slowPathStart);
457 5345 : if (slowExit.isSet())
458 3912 : buffer.link(slowExit.get(), pic.slowPathStart);
459 5445 : for (Jump *pj = slowExits.begin(); pj != slowExits.end(); ++pj)
460 100 : buffer.link(*pj, pic.slowPathStart);
461 5345 : buffer.link(done, pic.fastPathRejoin);
462 5345 : if (skipOver.isSet())
463 948 : buffer.link(skipOver.get(), pic.fastPathRejoin);
464 5345 : CodeLocationLabel cs = buffer.finalize(f);
465 : JaegerSpew(JSpew_PICs, "generate setprop stub %p %p %d at %p\n",
466 : (void*)&pic,
467 : (void*)initialShape,
468 : pic.stubsGenerated,
469 5345 : cs.executableAddress());
470 :
471 : // This function can patch either the inline fast path for a generated
472 : // stub. The stub omits the prefix of the inline fast path that loads
473 : // the shape, so the offsets are different.
474 5345 : patchPreviousToHere(cs);
475 :
476 5345 : pic.stubsGenerated++;
477 5345 : pic.updateLastPath(buffer, start);
478 :
479 5345 : if (pic.stubsGenerated == MAX_PIC_STUBS)
480 12 : disable("max stubs reached");
481 :
482 5345 : return Lookup_Cacheable;
483 : }
484 :
485 443 : bool updateMonitoredTypes()
486 : {
487 443 : JS_ASSERT(pic.typeMonitored);
488 :
489 443 : RecompilationMonitor monitor(cx);
490 443 : jsid id = ATOM_TO_JSID(name);
491 :
492 443 : if (!obj->getType(cx)->unknownProperties()) {
493 798 : types::AutoEnterTypeInference enter(cx);
494 399 : types::TypeSet *types = obj->getType(cx)->getProperty(cx, types::MakeTypeId(cx, id), true);
495 399 : if (!types)
496 0 : return false;
497 798 : pic.rhsTypes->addSubset(cx, types);
498 : }
499 :
500 443 : return !monitor.recompiled();
501 : }
502 :
503 13026 : LookupStatus update()
504 : {
505 13026 : JS_ASSERT(pic.hit);
506 :
507 13026 : if (obj->isDenseArray())
508 67 : return disable("dense array");
509 12959 : if (!obj->isNative())
510 148 : return disable("non-native");
511 12811 : if (obj->watched())
512 47 : return disable("watchpoint");
513 :
514 12764 : Class *clasp = obj->getClass();
515 :
516 12764 : if (clasp->setProperty != JS_StrictPropertyStub)
517 500 : return disable("set property hook");
518 12264 : if (clasp->ops.lookupProperty)
519 4 : return disable("ops lookup property hook");
520 12260 : if (clasp->ops.setProperty)
521 0 : return disable("ops set property hook");
522 :
523 : JSObject *holder;
524 12260 : JSProperty *prop = NULL;
525 :
526 : /* lookupProperty can trigger recompilations. */
527 12260 : RecompilationMonitor monitor(cx);
528 12260 : if (!obj->lookupProperty(cx, name, &holder, &prop))
529 0 : return error();
530 12260 : if (monitor.recompiled())
531 0 : return Lookup_Uncacheable;
532 :
533 : /* If the property exists but is on a prototype, treat as addprop. */
534 12260 : if (prop && holder != obj) {
535 1042 : const Shape *shape = (const Shape *) prop;
536 :
537 1042 : if (!holder->isNative())
538 0 : return disable("non-native holder");
539 :
540 1042 : if (!shape->writable())
541 0 : return disable("readonly");
542 1042 : if (!shape->hasDefaultSetter() || !shape->hasDefaultGetter())
543 495 : return disable("getter/setter in prototype");
544 547 : if (shape->hasShortID())
545 0 : return disable("short ID in prototype");
546 547 : if (!shape->hasSlot())
547 0 : return disable("missing slot");
548 :
549 547 : prop = NULL;
550 : }
551 :
552 11765 : if (!prop) {
553 : /* Adding a property to the object. */
554 3610 : if (obj->isDelegate())
555 6 : return disable("delegate");
556 3604 : if (!obj->isExtensible())
557 0 : return disable("not extensible");
558 :
559 3604 : if (clasp->addProperty != JS_PropertyStub)
560 0 : return disable("add property hook");
561 3604 : if (clasp->ops.defineProperty)
562 0 : return disable("ops define property hook");
563 :
564 : /*
565 : * When adding a property we need to check shapes along the entire
566 : * prototype chain to watch for an added setter.
567 : */
568 3604 : JSObject *proto = obj;
569 17821 : while (proto) {
570 10617 : if (!proto->isNative())
571 4 : return disable("non-native proto");
572 10613 : proto = proto->getProto();
573 : }
574 :
575 3600 : const Shape *initialShape = obj->lastProperty();
576 3600 : uint32_t slots = obj->numDynamicSlots();
577 :
578 3600 : unsigned flags = 0;
579 3600 : PropertyOp getter = clasp->getProperty;
580 :
581 3600 : if (pic.kind == ic::PICInfo::SETMETHOD) {
582 112 : if (!obj->canHaveMethodBarrier())
583 12 : return disable("can't have method barrier");
584 :
585 100 : JSObject *funobj = &f.regs.sp[-1].toObject();
586 100 : if (funobj->toFunction()->isClonedMethod())
587 0 : return disable("mismatched function");
588 :
589 100 : flags |= Shape::METHOD;
590 : }
591 :
592 : /*
593 : * Define the property but do not set it yet. For setmethod,
594 : * populate the slot to satisfy the method invariant (in case we
595 : * hit an early return below).
596 : */
597 : const Shape *shape =
598 : obj->putProperty(cx, name, getter, clasp->setProperty,
599 3588 : SHAPE_INVALID_SLOT, JSPROP_ENUMERATE, flags, 0);
600 3588 : if (!shape)
601 0 : return error();
602 3588 : if (flags & Shape::METHOD)
603 100 : obj->nativeSetSlot(shape->slot(), f.regs.sp[-1]);
604 :
605 3588 : if (monitor.recompiled())
606 0 : return Lookup_Uncacheable;
607 :
608 : /*
609 : * Test after calling putProperty since it can switch obj into
610 : * dictionary mode, specifically if the shape tree ancestor line
611 : * exceeds PropertyTree::MAX_HEIGHT.
612 : */
613 3588 : if (obj->inDictionaryMode())
614 4 : return disable("dictionary");
615 :
616 3584 : if (!shape->hasDefaultSetter())
617 0 : return disable("adding non-default setter");
618 3584 : if (!shape->hasSlot())
619 0 : return disable("adding invalid slot");
620 :
621 : /*
622 : * Watch for cases where the object reallocated its slots when
623 : * adding the property, and disable the PIC. Otherwise we will
624 : * keep generating identical PICs as side exits are taken on the
625 : * capacity checks. Alternatively, we could avoid the disable
626 : * and just not generate a stub in case there are multiple shapes
627 : * that can flow here which don't all require reallocation.
628 : * Doing this would cause us to walk down this same update path
629 : * every time a reallocation is needed, however, which will
630 : * usually be a slowdown even if there *are* other shapes that
631 : * don't realloc.
632 : */
633 3584 : if (obj->numDynamicSlots() != slots)
634 172 : return disable("insufficient slot capacity");
635 :
636 3412 : if (pic.typeMonitored && !updateMonitoredTypes())
637 0 : return Lookup_Uncacheable;
638 :
639 3412 : return generateStub(initialShape, shape, true);
640 : }
641 :
642 8155 : const Shape *shape = (const Shape *) prop;
643 8155 : if (pic.kind == ic::PICInfo::SETMETHOD && !shape->isMethod())
644 19 : return disable("set method on non-method shape");
645 8136 : if (!shape->writable())
646 2 : return disable("readonly");
647 8134 : if (shape->isMethod())
648 24 : return disable("method");
649 :
650 8110 : if (shape->hasDefaultSetter()) {
651 7090 : if (!shape->hasSlot())
652 0 : return disable("invalid slot");
653 7090 : if (pic.typeMonitored && !updateMonitoredTypes())
654 0 : return Lookup_Uncacheable;
655 : } else {
656 1020 : if (shape->hasSetterValue())
657 41 : return disable("scripted setter");
658 1835 : if (shape->setterOp() != CallObject::setArgOp &&
659 856 : shape->setterOp() != CallObject::setVarOp) {
660 31 : return disable("setter");
661 : }
662 948 : JS_ASSERT(obj->isCall());
663 948 : if (pic.typeMonitored) {
664 : /*
665 : * Update the types of the locals/args in the script according
666 : * to the possible RHS types of the assignment. Note that the
667 : * shape guards we have performed do not by themselves
668 : * guarantee that future call objects hit will be for the same
669 : * script. We also depend on the fact that the scope chains hit
670 : * at the same bytecode are all isomorphic: the same scripts,
671 : * in the same order (though the properties on their call
672 : * objects may differ due to eval(), DEFFUN, etc.).
673 : */
674 147 : RecompilationMonitor monitor(cx);
675 147 : JSFunction *fun = obj->asCall().getCalleeFunction();
676 147 : JSScript *script = fun->script();
677 147 : uint16_t slot = uint16_t(shape->shortid());
678 147 : if (!script->ensureHasTypes(cx))
679 0 : return error();
680 : {
681 294 : types::AutoEnterTypeInference enter(cx);
682 147 : if (shape->setterOp() == CallObject::setArgOp)
683 9 : pic.rhsTypes->addSubset(cx, types::TypeScript::ArgTypes(script, slot));
684 : else
685 138 : pic.rhsTypes->addSubset(cx, types::TypeScript::LocalTypes(script, slot));
686 : }
687 147 : if (monitor.recompiled())
688 0 : return Lookup_Uncacheable;
689 : }
690 : }
691 :
692 8038 : JS_ASSERT(obj == holder);
693 28007 : if (!pic.inlinePathPatched &&
694 7406 : shape->hasDefaultSetter() &&
695 6458 : !pic.typeMonitored &&
696 6105 : !obj->isDenseArray()) {
697 6105 : return patchInline(shape);
698 : }
699 :
700 1933 : return generateStub(obj->lastProperty(), shape, false);
701 : }
702 : };
703 :
704 : static bool
705 390131 : IsCacheableProtoChain(JSObject *obj, JSObject *holder)
706 : {
707 824635 : while (obj != holder) {
708 : /*
709 : * We cannot assume that we find the holder object on the prototype
710 : * chain and must check for null proto. The prototype chain can be
711 : * altered during the lookupProperty call.
712 : */
713 44395 : JSObject *proto = obj->getProto();
714 44395 : if (!proto || !proto->isNative())
715 22 : return false;
716 44373 : obj = proto;
717 : }
718 390109 : return true;
719 : }
720 :
721 : template <typename IC>
722 : struct GetPropHelper {
723 : // These fields are set in the constructor and describe a property lookup.
724 : JSContext *cx;
725 : JSObject *obj;
726 : PropertyName *name;
727 : IC ⁣
728 : VMFrame &f;
729 :
730 : // These fields are set by |bind| and |lookup|. After a call to either
731 : // function, these are set exactly as they are in JSOP_GETPROP or JSOP_NAME.
732 : JSObject *aobj;
733 : JSObject *holder;
734 : JSProperty *prop;
735 :
736 : // This field is set by |bind| and |lookup| only if they returned
737 : // Lookup_Cacheable, otherwise it is NULL.
738 : const Shape *shape;
739 :
740 392517 : GetPropHelper(JSContext *cx, JSObject *obj, PropertyName *name, IC &ic, VMFrame &f)
741 392517 : : cx(cx), obj(obj), name(name), ic(ic), f(f), holder(NULL), prop(NULL), shape(NULL)
742 392517 : { }
743 :
744 : public:
745 181743 : LookupStatus bind() {
746 181743 : RecompilationMonitor monitor(cx);
747 181743 : JSObject *scopeChain = cx->stack.currentScriptedScopeChain();
748 181743 : if (js_CodeSpec[*f.pc()].format & JOF_GNAME)
749 0 : scopeChain = &scopeChain->global();
750 181743 : if (!FindProperty(cx, name, scopeChain, &obj, &holder, &prop))
751 0 : return ic.error(cx);
752 181743 : if (monitor.recompiled())
753 2 : return Lookup_Uncacheable;
754 181741 : if (!prop)
755 523 : return ic.disable(cx, "lookup failed");
756 181218 : if (!obj->isNative())
757 0 : return ic.disable(cx, "non-native");
758 181218 : if (!IsCacheableProtoChain(obj, holder))
759 4 : return ic.disable(cx, "non-native holder");
760 181214 : shape = (const Shape *)prop;
761 181214 : return Lookup_Cacheable;
762 : }
763 :
764 210774 : LookupStatus lookup() {
765 210774 : JSObject *aobj = js_GetProtoIfDenseArray(obj);
766 210774 : if (!aobj->isNative())
767 797 : return ic.disable(f, "non-native");
768 :
769 209977 : RecompilationMonitor monitor(cx);
770 209977 : if (!aobj->lookupProperty(cx, name, &holder, &prop))
771 0 : return ic.error(cx);
772 209977 : if (monitor.recompiled())
773 0 : return Lookup_Uncacheable;
774 :
775 209977 : if (!prop)
776 1064 : return ic.disable(f, "lookup failed");
777 208913 : if (!IsCacheableProtoChain(obj, holder))
778 18 : return ic.disable(f, "non-native holder");
779 208895 : shape = (const Shape *)prop;
780 208895 : return Lookup_Cacheable;
781 : }
782 :
783 378755 : LookupStatus testForGet() {
784 378755 : if (!shape->hasDefaultGetter()) {
785 12632 : if (shape->isMethod()) {
786 2271 : if (JSOp(*f.pc()) != JSOP_CALLPROP)
787 137 : return ic.disable(f, "method valued shape");
788 : } else {
789 10361 : if (shape->hasGetterValue())
790 8420 : return ic.disable(f, "getter value shape");
791 1941 : if (shape->hasSlot() && holder != obj)
792 0 : return ic.disable(f, "slotful getter hook through prototype");
793 1941 : if (!ic.canCallHook)
794 562 : return ic.disable(f, "can't call getter hook");
795 1379 : if (f.regs.inlined()) {
796 : /*
797 : * As with native stubs, getter hook stubs can't be
798 : * generated for inline frames. Mark the inner function
799 : * as uninlineable and recompile.
800 : */
801 0 : f.script()->uninlineable = true;
802 0 : MarkTypeObjectFlags(cx, f.script()->function(),
803 : types::OBJECT_FLAG_UNINLINEABLE);
804 0 : return Lookup_Uncacheable;
805 : }
806 : }
807 366123 : } else if (!shape->hasSlot()) {
808 29 : return ic.disable(f, "no slot");
809 : }
810 :
811 369607 : return Lookup_Cacheable;
812 : }
813 :
814 210391 : LookupStatus lookupAndTest() {
815 210391 : LookupStatus status = lookup();
816 210391 : if (status != Lookup_Cacheable)
817 1879 : return status;
818 208512 : return testForGet();
819 : }
820 : };
821 :
822 : class GetPropCompiler : public PICStubCompiler
823 : {
824 : JSObject *obj;
825 : PropertyName *name;
826 : int lastStubSecondShapeGuard;
827 :
828 : public:
829 237889 : GetPropCompiler(VMFrame &f, JSScript *script, JSObject *obj, ic::PICInfo &pic, PropertyName *name,
830 : VoidStubPIC stub)
831 : : PICStubCompiler("getprop", f, script, pic,
832 : JS_FUNC_TO_DATA_PTR(void *, stub)),
833 : obj(obj),
834 : name(name),
835 237889 : lastStubSecondShapeGuard(pic.secondShapeGuard)
836 237889 : { }
837 :
838 62303 : int getLastStubSecondShapeGuard() const {
839 62303 : return lastStubSecondShapeGuard ? POST_INST_OFFSET(lastStubSecondShapeGuard) : 0;
840 : }
841 :
842 : static void reset(Repatcher &repatcher, ic::PICInfo &pic)
843 : {
844 : GetPropLabels &labels = pic.getPropLabels();
845 : repatcher.repatchLEAToLoadPtr(labels.getDslotsLoad(pic.fastPathRejoin));
846 : repatcher.repatch(labels.getInlineShapeData(pic.getFastShapeGuard()), NULL);
847 : repatcher.relink(labels.getInlineShapeJump(pic.getFastShapeGuard()), pic.slowPathStart);
848 :
849 : if (pic.hasTypeCheck()) {
850 : /* TODO: combine pic.u.get into ICLabels? */
851 : repatcher.relink(labels.getInlineTypeJump(pic.fastPathStart), pic.getSlowTypeCheck());
852 : }
853 :
854 : JS_ASSERT(pic.kind == ic::PICInfo::GET);
855 :
856 : FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, ic::GetProp));
857 : repatcher.relink(pic.slowPathCall, target);
858 : }
859 :
860 483 : LookupStatus generateArgsLengthStub()
861 : {
862 966 : Assembler masm;
863 :
864 483 : Jump notArgs = masm.guardShape(pic.objReg, obj);
865 :
866 483 : masm.load32(Address(pic.objReg, JSObject::getFixedSlotOffset(ArgumentsObject::INITIAL_LENGTH_SLOT)), pic.objReg);
867 483 : masm.move(pic.objReg, pic.shapeReg);
868 : Jump overridden = masm.branchTest32(Assembler::NonZero, pic.shapeReg,
869 483 : Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT));
870 483 : masm.rshift32(Imm32(ArgumentsObject::PACKED_BITS_COUNT), pic.objReg);
871 :
872 483 : masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
873 483 : Jump done = masm.jump();
874 :
875 483 : pic.updatePCCounters(f, masm);
876 :
877 966 : PICLinker buffer(masm, pic);
878 483 : if (!buffer.init(cx))
879 0 : return error();
880 :
881 966 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
882 483 : !buffer.verifyRange(f.chunk())) {
883 0 : return disable("code memory is out of range");
884 : }
885 :
886 483 : buffer.link(notArgs, pic.slowPathStart);
887 483 : buffer.link(overridden, pic.slowPathStart);
888 483 : buffer.link(done, pic.fastPathRejoin);
889 :
890 483 : CodeLocationLabel start = buffer.finalize(f);
891 : JaegerSpew(JSpew_PICs, "generate args length stub at %p\n",
892 483 : start.executableAddress());
893 :
894 483 : patchPreviousToHere(start);
895 :
896 483 : disable("args length done");
897 :
898 483 : return Lookup_Cacheable;
899 : }
900 :
901 7517 : LookupStatus generateArrayLengthStub()
902 : {
903 15034 : Assembler masm;
904 :
905 7517 : masm.loadObjClass(pic.objReg, pic.shapeReg);
906 7517 : Jump isDense = masm.testClass(Assembler::Equal, pic.shapeReg, &ArrayClass);
907 7517 : Jump notArray = masm.testClass(Assembler::NotEqual, pic.shapeReg, &SlowArrayClass);
908 :
909 7517 : isDense.linkTo(masm.label(), &masm);
910 7517 : masm.loadPtr(Address(pic.objReg, JSObject::offsetOfElements()), pic.objReg);
911 7517 : masm.load32(Address(pic.objReg, ObjectElements::offsetOfLength()), pic.objReg);
912 7517 : Jump oob = masm.branch32(Assembler::Above, pic.objReg, Imm32(JSVAL_INT_MAX));
913 7517 : masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
914 7517 : Jump done = masm.jump();
915 :
916 7517 : pic.updatePCCounters(f, masm);
917 :
918 15034 : PICLinker buffer(masm, pic);
919 7517 : if (!buffer.init(cx))
920 0 : return error();
921 :
922 15034 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
923 7517 : !buffer.verifyRange(f.chunk())) {
924 0 : return disable("code memory is out of range");
925 : }
926 :
927 7517 : buffer.link(notArray, pic.slowPathStart);
928 7517 : buffer.link(oob, pic.slowPathStart);
929 7517 : buffer.link(done, pic.fastPathRejoin);
930 :
931 7517 : CodeLocationLabel start = buffer.finalize(f);
932 : JaegerSpew(JSpew_PICs, "generate array length stub at %p\n",
933 7517 : start.executableAddress());
934 :
935 7517 : patchPreviousToHere(start);
936 :
937 7517 : disable("array length done");
938 :
939 7517 : return Lookup_Cacheable;
940 : }
941 :
942 69 : LookupStatus generateStringObjLengthStub()
943 : {
944 138 : Assembler masm;
945 :
946 69 : Jump notStringObj = masm.guardShape(pic.objReg, obj);
947 :
948 69 : masm.loadPayload(Address(pic.objReg, StringObject::getPrimitiveValueOffset()), pic.objReg);
949 69 : masm.loadPtr(Address(pic.objReg, JSString::offsetOfLengthAndFlags()), pic.objReg);
950 69 : masm.urshift32(Imm32(JSString::LENGTH_SHIFT), pic.objReg);
951 69 : masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
952 69 : Jump done = masm.jump();
953 :
954 69 : pic.updatePCCounters(f, masm);
955 :
956 138 : PICLinker buffer(masm, pic);
957 69 : if (!buffer.init(cx))
958 0 : return error();
959 :
960 138 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
961 69 : !buffer.verifyRange(f.chunk())) {
962 0 : return disable("code memory is out of range");
963 : }
964 :
965 69 : buffer.link(notStringObj, pic.slowPathStart);
966 69 : buffer.link(done, pic.fastPathRejoin);
967 :
968 69 : CodeLocationLabel start = buffer.finalize(f);
969 : JaegerSpew(JSpew_PICs, "generate string object length stub at %p\n",
970 69 : start.executableAddress());
971 :
972 69 : patchPreviousToHere(start);
973 :
974 69 : disable("string object length done");
975 :
976 69 : return Lookup_Cacheable;
977 : }
978 :
979 19531 : LookupStatus generateStringPropertyStub()
980 : {
981 19531 : if (!f.fp()->script()->hasGlobal())
982 18693 : return disable("String.prototype without compile-and-go global");
983 :
984 838 : RecompilationMonitor monitor(f.cx);
985 :
986 838 : JSObject *obj = f.fp()->scopeChain().global().getOrCreateStringPrototype(f.cx);
987 838 : if (!obj)
988 0 : return error();
989 :
990 838 : if (monitor.recompiled())
991 7 : return Lookup_Uncacheable;
992 :
993 831 : GetPropHelper<GetPropCompiler> getprop(cx, obj, name, *this, f);
994 831 : LookupStatus status = getprop.lookupAndTest();
995 831 : if (status != Lookup_Cacheable)
996 8 : return status;
997 823 : if (getprop.obj != getprop.holder)
998 1 : return disable("proto walk on String.prototype");
999 822 : if (!getprop.shape->hasDefaultGetterOrIsMethod())
1000 0 : return disable("getter hook on String.prototype");
1001 822 : if (hadGC())
1002 0 : return Lookup_Uncacheable;
1003 :
1004 1644 : Assembler masm;
1005 :
1006 : /* Only strings are allowed. */
1007 : Jump notString = masm.branchPtr(Assembler::NotEqual, pic.typeReg(),
1008 822 : ImmType(JSVAL_TYPE_STRING));
1009 :
1010 : /*
1011 : * Clobber objReg with String.prototype and do some PIC stuff. Well,
1012 : * really this is now a MIC, except it won't ever be patched, so we
1013 : * just disable the PIC at the end. :FIXME:? String.prototype probably
1014 : * does not get random shape changes.
1015 : */
1016 822 : masm.move(ImmPtr(obj), pic.objReg);
1017 822 : masm.loadShape(pic.objReg, pic.shapeReg);
1018 : Jump shapeMismatch = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
1019 822 : ImmPtr(obj->lastProperty()));
1020 822 : masm.loadObjProp(obj, pic.objReg, getprop.shape, pic.shapeReg, pic.objReg);
1021 :
1022 822 : Jump done = masm.jump();
1023 :
1024 822 : pic.updatePCCounters(f, masm);
1025 :
1026 1644 : PICLinker buffer(masm, pic);
1027 822 : if (!buffer.init(cx))
1028 0 : return error();
1029 :
1030 1644 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
1031 822 : !buffer.verifyRange(f.chunk())) {
1032 0 : return disable("code memory is out of range");
1033 : }
1034 :
1035 822 : buffer.link(notString, pic.getSlowTypeCheck());
1036 822 : buffer.link(shapeMismatch, pic.slowPathStart);
1037 822 : buffer.link(done, pic.fastPathRejoin);
1038 :
1039 822 : CodeLocationLabel cs = buffer.finalize(f);
1040 : JaegerSpew(JSpew_PICs, "generate string call stub at %p\n",
1041 822 : cs.executableAddress());
1042 :
1043 : /* Patch the type check to jump here. */
1044 822 : if (pic.hasTypeCheck()) {
1045 1644 : Repatcher repatcher(f.chunk());
1046 822 : repatcher.relink(pic.getPropLabels().getInlineTypeJump(pic.fastPathStart), cs);
1047 : }
1048 :
1049 : /* Disable the PIC so we don't keep generating stubs on the above shape mismatch. */
1050 822 : disable("generated string call stub");
1051 822 : return Lookup_Cacheable;
1052 : }
1053 :
1054 6866 : LookupStatus generateStringLengthStub()
1055 : {
1056 6866 : JS_ASSERT(pic.hasTypeCheck());
1057 :
1058 13732 : Assembler masm;
1059 : Jump notString = masm.branchPtr(Assembler::NotEqual, pic.typeReg(),
1060 6866 : ImmType(JSVAL_TYPE_STRING));
1061 6866 : masm.loadPtr(Address(pic.objReg, JSString::offsetOfLengthAndFlags()), pic.objReg);
1062 : // String length is guaranteed to be no more than 2**28, so the 32-bit operation is OK.
1063 6866 : masm.urshift32(Imm32(JSString::LENGTH_SHIFT), pic.objReg);
1064 6866 : masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
1065 6866 : Jump done = masm.jump();
1066 :
1067 6866 : pic.updatePCCounters(f, masm);
1068 :
1069 13732 : PICLinker buffer(masm, pic);
1070 6866 : if (!buffer.init(cx))
1071 0 : return error();
1072 :
1073 13732 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
1074 6866 : !buffer.verifyRange(f.chunk())) {
1075 0 : return disable("code memory is out of range");
1076 : }
1077 :
1078 6866 : buffer.link(notString, pic.getSlowTypeCheck());
1079 6866 : buffer.link(done, pic.fastPathRejoin);
1080 :
1081 6866 : CodeLocationLabel start = buffer.finalize(f);
1082 : JaegerSpew(JSpew_PICs, "generate string length stub at %p\n",
1083 6866 : start.executableAddress());
1084 :
1085 6866 : if (pic.hasTypeCheck()) {
1086 13732 : Repatcher repatcher(f.chunk());
1087 6866 : repatcher.relink(pic.getPropLabels().getInlineTypeJump(pic.fastPathStart), start);
1088 : }
1089 :
1090 6866 : disable("generated string length stub");
1091 :
1092 6866 : return Lookup_Cacheable;
1093 : }
1094 :
1095 138873 : LookupStatus patchInline(JSObject *holder, const Shape *shape)
1096 : {
1097 138873 : spew("patch", "inline");
1098 277746 : Repatcher repatcher(f.chunk());
1099 138873 : GetPropLabels &labels = pic.getPropLabels();
1100 :
1101 : int32_t offset;
1102 138873 : if (holder->isFixedSlot(shape->slot())) {
1103 96847 : CodeLocationInstruction istr = labels.getDslotsLoad(pic.fastPathRejoin);
1104 96847 : repatcher.repatchLoadPtrToLEA(istr);
1105 :
1106 : //
1107 : // We've patched | mov dslots, [obj + DSLOTS_OFFSET]
1108 : // To: | lea fslots, [obj + DSLOTS_OFFSET]
1109 : //
1110 : // Because the offset is wrong, it's necessary to correct it
1111 : // below.
1112 : //
1113 96847 : int32_t diff = int32_t(JSObject::getFixedSlotOffset(0)) -
1114 96847 : int32_t(JSObject::offsetOfSlots());
1115 96847 : JS_ASSERT(diff != 0);
1116 96847 : offset = (int32_t(shape->slot()) * sizeof(Value)) + diff;
1117 : } else {
1118 42026 : offset = holder->dynamicSlotIndex(shape->slot()) * sizeof(Value);
1119 : }
1120 :
1121 138873 : repatcher.repatch(labels.getInlineShapeData(pic.getFastShapeGuard()), obj->lastProperty());
1122 138873 : repatcher.patchAddressOffsetForValueLoad(labels.getValueLoad(pic.fastPathRejoin), offset);
1123 :
1124 138873 : pic.inlinePathPatched = true;
1125 :
1126 138873 : return Lookup_Cacheable;
1127 : }
1128 :
1129 1379 : void generateGetterStub(Assembler &masm, const Shape *shape,
1130 : Label start, Vector<Jump, 8> &shapeMismatches)
1131 : {
1132 : /*
1133 : * Getter hook needs to be called from the stub. The state is fully
1134 : * synced and no registers are live except the result registers.
1135 : */
1136 1379 : JS_ASSERT(pic.canCallHook);
1137 1379 : PropertyOp getter = shape->getterOp();
1138 :
1139 : masm.storePtr(ImmPtr((void *) REJOIN_NATIVE_GETTER),
1140 1379 : FrameAddress(offsetof(VMFrame, stubRejoin)));
1141 :
1142 1379 : Registers tempRegs = Registers::tempCallRegMask();
1143 1379 : if (tempRegs.hasReg(Registers::ClobberInCall))
1144 1379 : tempRegs.takeReg(Registers::ClobberInCall);
1145 :
1146 : /* Get a register to hold obj while we set up the rest of the frame. */
1147 1379 : RegisterID holdObjReg = pic.objReg;
1148 1379 : if (tempRegs.hasReg(pic.objReg)) {
1149 1219 : tempRegs.takeReg(pic.objReg);
1150 : } else {
1151 160 : holdObjReg = tempRegs.takeAnyReg().reg();
1152 160 : masm.move(pic.objReg, holdObjReg);
1153 : }
1154 :
1155 1379 : RegisterID t0 = tempRegs.takeAnyReg().reg();
1156 1379 : masm.bumpStubCounter(f.script(), f.pc(), t0);
1157 :
1158 : /*
1159 : * Initialize vp, which is either a slot in the object (the holder,
1160 : * actually, which must equal the object here) or undefined.
1161 : * Use vp == sp (which for CALLPROP will actually be the original
1162 : * sp + 1), to avoid clobbering stack values.
1163 : */
1164 1379 : int32_t vpOffset = (char *) f.regs.sp - (char *) f.fp();
1165 1379 : if (shape->hasSlot()) {
1166 : masm.loadObjProp(obj, holdObjReg, shape,
1167 46 : Registers::ClobberInCall, t0);
1168 46 : masm.storeValueFromComponents(Registers::ClobberInCall, t0, Address(JSFrameReg, vpOffset));
1169 : } else {
1170 1333 : masm.storeValue(UndefinedValue(), Address(JSFrameReg, vpOffset));
1171 : }
1172 :
1173 1379 : int32_t initialFrameDepth = f.regs.sp - f.fp()->slots();
1174 1379 : masm.setupFallibleABICall(cx->typeInferenceEnabled(), f.regs.pc, initialFrameDepth);
1175 :
1176 : /* Grab cx. */
1177 : #ifdef JS_CPU_X86
1178 1379 : RegisterID cxReg = tempRegs.takeAnyReg().reg();
1179 : #else
1180 : RegisterID cxReg = Registers::ArgReg0;
1181 : #endif
1182 1379 : masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), cxReg);
1183 :
1184 : /* Grap vp. */
1185 1379 : RegisterID vpReg = t0;
1186 1379 : masm.addPtr(Imm32(vpOffset), JSFrameReg, vpReg);
1187 :
1188 1379 : masm.restoreStackBase();
1189 1379 : masm.setupABICall(Registers::NormalCall, 4);
1190 1379 : masm.storeArg(3, vpReg);
1191 1379 : masm.storeArg(2, ImmPtr((void *) JSID_BITS(shape->getUserId())));
1192 1379 : masm.storeArg(1, holdObjReg);
1193 1379 : masm.storeArg(0, cxReg);
1194 :
1195 1379 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, getter), false);
1196 :
1197 1379 : NativeStubLinker::FinalJump done;
1198 1379 : if (!NativeStubEpilogue(f, masm, &done, 0, vpOffset, pic.shapeReg, pic.objReg))
1199 0 : return;
1200 2758 : NativeStubLinker linker(masm, f.chunk(), f.regs.pc, done);
1201 1379 : if (!linker.init(f.cx))
1202 0 : THROW();
1203 :
1204 2758 : if (!linker.verifyRange(pic.lastCodeBlock(f.chunk())) ||
1205 1379 : !linker.verifyRange(f.chunk())) {
1206 0 : disable("code memory is out of range");
1207 : return;
1208 : }
1209 :
1210 1379 : linker.patchJump(pic.fastPathRejoin);
1211 :
1212 1379 : linkerEpilogue(linker, start, shapeMismatches);
1213 : }
1214 :
1215 54234 : LookupStatus generateStub(JSObject *holder, const Shape *shape)
1216 : {
1217 108468 : Vector<Jump, 8> shapeMismatches(cx);
1218 :
1219 108468 : Assembler masm;
1220 :
1221 54234 : Label start;
1222 54234 : Jump shapeGuardJump;
1223 54234 : Jump argsLenGuard;
1224 :
1225 54234 : bool setStubShapeOffset = true;
1226 54234 : if (obj->isDenseArray()) {
1227 8139 : start = masm.label();
1228 : shapeGuardJump = masm.branchPtr(Assembler::NotEqual,
1229 8139 : Address(pic.objReg, JSObject::offsetOfShape()),
1230 16278 : ImmPtr(obj->lastProperty()));
1231 :
1232 : /*
1233 : * No need to assert validity of GETPROP_STUB_SHAPE_JUMP in this case:
1234 : * the IC is disabled after a dense array hit, so no patching can occur.
1235 : */
1236 : #ifndef JS_HAS_IC_LABELS
1237 8139 : setStubShapeOffset = false;
1238 : #endif
1239 : } else {
1240 46095 : if (pic.shapeNeedsRemat()) {
1241 11282 : masm.loadShape(pic.objReg, pic.shapeReg);
1242 11282 : pic.shapeRegHasBaseShape = true;
1243 : }
1244 :
1245 46095 : start = masm.label();
1246 : shapeGuardJump = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
1247 46095 : ImmPtr(obj->lastProperty()));
1248 : }
1249 54234 : Label stubShapeJumpLabel = masm.label();
1250 :
1251 54234 : if (!shapeMismatches.append(shapeGuardJump))
1252 0 : return error();
1253 :
1254 54234 : RegisterID holderReg = pic.objReg;
1255 54234 : if (obj != holder) {
1256 33863 : if (!GeneratePrototypeGuards(cx, shapeMismatches, masm, obj, holder,
1257 33863 : pic.objReg, pic.shapeReg)) {
1258 0 : return error();
1259 : }
1260 :
1261 : // Bake in the holder identity. Careful not to clobber |objReg|, since we can't remat it.
1262 33863 : holderReg = pic.shapeReg;
1263 33863 : masm.move(ImmPtr(holder), holderReg);
1264 33863 : pic.shapeRegHasBaseShape = false;
1265 :
1266 : // Guard on the holder's shape.
1267 33863 : Jump j = masm.guardShape(holderReg, holder);
1268 33863 : if (!shapeMismatches.append(j))
1269 0 : return error();
1270 :
1271 33863 : pic.secondShapeGuard = masm.distanceOf(masm.label()) - masm.distanceOf(start);
1272 : } else {
1273 20371 : pic.secondShapeGuard = 0;
1274 : }
1275 :
1276 54234 : if (!shape->hasDefaultGetterOrIsMethod()) {
1277 1379 : generateGetterStub(masm, shape, start, shapeMismatches);
1278 1379 : if (setStubShapeOffset)
1279 1379 : pic.getPropLabels().setStubShapeJump(masm, start, stubShapeJumpLabel);
1280 1379 : return Lookup_Cacheable;
1281 : }
1282 :
1283 : /* Load the value out of the object. */
1284 52855 : masm.loadObjProp(holder, holderReg, shape, pic.shapeReg, pic.objReg);
1285 52855 : Jump done = masm.jump();
1286 :
1287 52855 : pic.updatePCCounters(f, masm);
1288 :
1289 105710 : PICLinker buffer(masm, pic);
1290 52855 : if (!buffer.init(cx))
1291 0 : return error();
1292 :
1293 105710 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
1294 52855 : !buffer.verifyRange(f.chunk())) {
1295 0 : return disable("code memory is out of range");
1296 : }
1297 :
1298 : // The final exit jumps to the store-back in the inline stub.
1299 52855 : buffer.link(done, pic.fastPathRejoin);
1300 :
1301 52855 : linkerEpilogue(buffer, start, shapeMismatches);
1302 :
1303 52855 : if (setStubShapeOffset)
1304 44716 : pic.getPropLabels().setStubShapeJump(masm, start, stubShapeJumpLabel);
1305 52855 : return Lookup_Cacheable;
1306 : }
1307 :
1308 54234 : void linkerEpilogue(LinkerHelper &buffer, Label start, Vector<Jump, 8> &shapeMismatches)
1309 : {
1310 : // The guard exit jumps to the original slow case.
1311 145131 : for (Jump *pj = shapeMismatches.begin(); pj != shapeMismatches.end(); ++pj)
1312 90897 : buffer.link(*pj, pic.slowPathStart);
1313 :
1314 54234 : CodeLocationLabel cs = buffer.finalize(f);
1315 54234 : JaegerSpew(JSpew_PICs, "generated %s stub at %p\n", type, cs.executableAddress());
1316 :
1317 54234 : patchPreviousToHere(cs);
1318 :
1319 54234 : pic.stubsGenerated++;
1320 54234 : pic.updateLastPath(buffer, start);
1321 :
1322 54234 : if (pic.stubsGenerated == MAX_PIC_STUBS)
1323 759 : disable("max stubs reached");
1324 54234 : if (obj->isDenseArray())
1325 8139 : disable("dense array");
1326 54234 : }
1327 :
1328 62303 : void patchPreviousToHere(CodeLocationLabel cs)
1329 : {
1330 124606 : Repatcher repatcher(pic.lastCodeBlock(f.chunk()));
1331 62303 : CodeLocationLabel label = pic.lastPathStart();
1332 :
1333 : // Patch either the inline fast path or a generated stub. The stub
1334 : // omits the prefix of the inline fast path that loads the shape, so
1335 : // the offsets are different.
1336 : int shapeGuardJumpOffset;
1337 62303 : if (pic.stubsGenerated)
1338 23345 : shapeGuardJumpOffset = pic.getPropLabels().getStubShapeJumpOffset();
1339 : else
1340 38958 : shapeGuardJumpOffset = pic.shapeGuard + pic.getPropLabels().getInlineShapeJumpOffset();
1341 62303 : int secondGuardOffset = getLastStubSecondShapeGuard();
1342 :
1343 : JaegerSpew(JSpew_PICs, "Patching previous (%d stubs) (start %p) (offset %d) (second %d)\n",
1344 : (int) pic.stubsGenerated, label.executableAddress(),
1345 62303 : shapeGuardJumpOffset, secondGuardOffset);
1346 :
1347 62303 : repatcher.relink(label.jumpAtOffset(shapeGuardJumpOffset), cs);
1348 62303 : if (secondGuardOffset)
1349 11282 : repatcher.relink(label.jumpAtOffset(secondGuardOffset), cs);
1350 62303 : }
1351 :
1352 203423 : LookupStatus update()
1353 : {
1354 203423 : JS_ASSERT(pic.hit);
1355 :
1356 203423 : GetPropHelper<GetPropCompiler> getprop(cx, obj, name, *this, f);
1357 203423 : LookupStatus status = getprop.lookupAndTest();
1358 203423 : if (status != Lookup_Cacheable)
1359 10316 : return status;
1360 193107 : if (hadGC())
1361 0 : return Lookup_Uncacheable;
1362 :
1363 511416 : if (obj == getprop.holder &&
1364 159244 : getprop.shape->hasDefaultGetterOrIsMethod() &&
1365 159065 : !pic.inlinePathPatched) {
1366 138873 : return patchInline(getprop.holder, getprop.shape);
1367 : }
1368 :
1369 54234 : return generateStub(getprop.holder, getprop.shape);
1370 : }
1371 : };
1372 :
1373 : class ScopeNameCompiler : public PICStubCompiler
1374 : {
1375 : private:
1376 : typedef Vector<Jump, 8> JumpList;
1377 :
1378 : JSObject *scopeChain;
1379 : PropertyName *name;
1380 : GetPropHelper<ScopeNameCompiler> getprop;
1381 182126 : ScopeNameCompiler *thisFromCtor() { return this; }
1382 :
1383 180283 : void patchPreviousToHere(CodeLocationLabel cs)
1384 : {
1385 180283 : ScopeNameLabels & labels = pic.scopeNameLabels();
1386 360566 : Repatcher repatcher(pic.lastCodeBlock(f.chunk()));
1387 180283 : CodeLocationLabel start = pic.lastPathStart();
1388 180283 : JSC::CodeLocationJump jump;
1389 :
1390 : // Patch either the inline fast path or a generated stub.
1391 180283 : if (pic.stubsGenerated)
1392 8336 : jump = labels.getStubJump(start);
1393 : else
1394 171947 : jump = labels.getInlineJump(start);
1395 180283 : repatcher.relink(jump, cs);
1396 180283 : }
1397 :
1398 180383 : LookupStatus walkScopeChain(Assembler &masm, JumpList &fails)
1399 : {
1400 : /* Walk the scope chain. */
1401 180383 : JSObject *tobj = scopeChain;
1402 :
1403 : /* For GETXPROP, we'll never enter this loop. */
1404 180383 : JS_ASSERT_IF(pic.kind == ic::PICInfo::XNAME, tobj && tobj == getprop.holder);
1405 180383 : JS_ASSERT_IF(pic.kind == ic::PICInfo::XNAME, getprop.obj == tobj);
1406 :
1407 392605 : while (tobj && tobj != getprop.holder) {
1408 31939 : if (!IsCacheableNonGlobalScope(tobj))
1409 100 : return disable("non-cacheable scope chain object");
1410 31839 : JS_ASSERT(tobj->isNative());
1411 :
1412 : /* Guard on intervening shapes. */
1413 31839 : masm.loadShape(pic.objReg, pic.shapeReg);
1414 : Jump j = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
1415 31839 : ImmPtr(tobj->lastProperty()));
1416 31839 : if (!fails.append(j))
1417 0 : return error();
1418 :
1419 : /* Load the next link in the scope chain. */
1420 31839 : Address parent(pic.objReg, ScopeObject::offsetOfEnclosingScope());
1421 31839 : masm.loadPayload(parent, pic.objReg);
1422 :
1423 31839 : tobj = &tobj->asScope().enclosingScope();
1424 : }
1425 :
1426 180283 : if (tobj != getprop.holder)
1427 0 : return disable("scope chain walk terminated early");
1428 :
1429 180283 : return Lookup_Cacheable;
1430 : }
1431 :
1432 : public:
1433 182126 : ScopeNameCompiler(VMFrame &f, JSScript *script, JSObject *scopeChain, ic::PICInfo &pic,
1434 : PropertyName *name, VoidStubPIC stub)
1435 : : PICStubCompiler("name", f, script, pic, JS_FUNC_TO_DATA_PTR(void *, stub)),
1436 : scopeChain(scopeChain), name(name),
1437 182126 : getprop(f.cx, NULL, name, *thisFromCtor(), f)
1438 182126 : { }
1439 :
1440 : static void reset(Repatcher &repatcher, ic::PICInfo &pic)
1441 : {
1442 : ScopeNameLabels &labels = pic.scopeNameLabels();
1443 :
1444 : /* Link the inline path back to the slow path. */
1445 : JSC::CodeLocationJump inlineJump = labels.getInlineJump(pic.fastPathStart);
1446 : repatcher.relink(inlineJump, pic.slowPathStart);
1447 :
1448 : VoidStubPIC stub;
1449 : switch (pic.kind) {
1450 : case ic::PICInfo::NAME:
1451 : stub = ic::Name;
1452 : break;
1453 : case ic::PICInfo::XNAME:
1454 : stub = ic::XName;
1455 : break;
1456 : default:
1457 : JS_NOT_REACHED("Invalid pic kind in ScopeNameCompiler::reset");
1458 : return;
1459 : }
1460 : FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, stub));
1461 : repatcher.relink(pic.slowPathCall, target);
1462 : }
1463 :
1464 169493 : LookupStatus generateGlobalStub(JSObject *obj)
1465 : {
1466 338986 : Assembler masm;
1467 338986 : JumpList fails(cx);
1468 169493 : ScopeNameLabels &labels = pic.scopeNameLabels();
1469 :
1470 : /* For GETXPROP, the object is already in objReg. */
1471 169493 : if (pic.kind == ic::PICInfo::NAME)
1472 169483 : masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg);
1473 :
1474 169493 : JS_ASSERT(obj == getprop.holder);
1475 169493 : JS_ASSERT(getprop.holder == &scopeChain->global());
1476 :
1477 169493 : LookupStatus status = walkScopeChain(masm, fails);
1478 169493 : if (status != Lookup_Cacheable)
1479 60 : return status;
1480 :
1481 : /* If a scope chain walk was required, the final object needs a NULL test. */
1482 169433 : MaybeJump finalNull;
1483 169433 : if (pic.kind == ic::PICInfo::NAME)
1484 169423 : finalNull = masm.branchTestPtr(Assembler::Zero, pic.objReg, pic.objReg);
1485 169433 : masm.loadShape(pic.objReg, pic.shapeReg);
1486 : Jump finalShape = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
1487 169433 : ImmPtr(getprop.holder->lastProperty()));
1488 :
1489 169433 : masm.loadObjProp(obj, pic.objReg, getprop.shape, pic.shapeReg, pic.objReg);
1490 :
1491 169433 : Jump done = masm.jump();
1492 :
1493 : /* All failures flow to here, so there is a common point to patch. */
1494 190349 : for (Jump *pj = fails.begin(); pj != fails.end(); ++pj)
1495 20916 : pj->linkTo(masm.label(), &masm);
1496 169433 : if (finalNull.isSet())
1497 169423 : finalNull.get().linkTo(masm.label(), &masm);
1498 169433 : finalShape.linkTo(masm.label(), &masm);
1499 169433 : Label failLabel = masm.label();
1500 169433 : Jump failJump = masm.jump();
1501 :
1502 169433 : pic.updatePCCounters(f, masm);
1503 :
1504 338866 : PICLinker buffer(masm, pic);
1505 169433 : if (!buffer.init(cx))
1506 0 : return error();
1507 :
1508 338866 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
1509 169433 : !buffer.verifyRange(f.chunk())) {
1510 0 : return disable("code memory is out of range");
1511 : }
1512 :
1513 169433 : buffer.link(failJump, pic.slowPathStart);
1514 169433 : buffer.link(done, pic.fastPathRejoin);
1515 169433 : CodeLocationLabel cs = buffer.finalize(f);
1516 169433 : JaegerSpew(JSpew_PICs, "generated %s global stub at %p\n", type, cs.executableAddress());
1517 169433 : spew("NAME stub", "global");
1518 :
1519 169433 : patchPreviousToHere(cs);
1520 :
1521 169433 : pic.stubsGenerated++;
1522 169433 : pic.updateLastPath(buffer, failLabel);
1523 169433 : labels.setStubJump(masm, failLabel, failJump);
1524 :
1525 169433 : if (pic.stubsGenerated == MAX_PIC_STUBS)
1526 24 : disable("max stubs reached");
1527 :
1528 169433 : return Lookup_Cacheable;
1529 : }
1530 :
1531 : enum CallObjPropKind {
1532 : ARG,
1533 : VAR
1534 : };
1535 :
1536 11310 : LookupStatus generateCallStub(JSObject *obj)
1537 : {
1538 22620 : Assembler masm;
1539 22620 : Vector<Jump, 8> fails(cx);
1540 11310 : ScopeNameLabels &labels = pic.scopeNameLabels();
1541 :
1542 : /* For GETXPROP, the object is already in objReg. */
1543 11310 : if (pic.kind == ic::PICInfo::NAME)
1544 10945 : masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg);
1545 :
1546 11310 : JS_ASSERT(obj == getprop.holder);
1547 11310 : JS_ASSERT(getprop.holder != &scopeChain->global());
1548 :
1549 : CallObjPropKind kind;
1550 11310 : const Shape *shape = getprop.shape;
1551 11310 : if (shape->getterOp() == CallObject::getArgOp) {
1552 2807 : kind = ARG;
1553 8503 : } else if (shape->getterOp() == CallObject::getVarOp) {
1554 8083 : kind = VAR;
1555 : } else {
1556 420 : return disable("unhandled callobj sprop getter");
1557 : }
1558 :
1559 10890 : LookupStatus status = walkScopeChain(masm, fails);
1560 10890 : if (status != Lookup_Cacheable)
1561 40 : return status;
1562 :
1563 : /* If a scope chain walk was required, the final object needs a NULL test. */
1564 10850 : MaybeJump finalNull;
1565 10850 : if (pic.kind == ic::PICInfo::NAME)
1566 10485 : finalNull = masm.branchTestPtr(Assembler::Zero, pic.objReg, pic.objReg);
1567 10850 : masm.loadShape(pic.objReg, pic.shapeReg);
1568 : Jump finalShape = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
1569 10850 : ImmPtr(getprop.holder->lastProperty()));
1570 :
1571 : /* Get callobj's stack frame. */
1572 10850 : masm.loadObjPrivate(pic.objReg, pic.shapeReg, getprop.holder->numFixedSlots());
1573 :
1574 10850 : JSFunction *fun = getprop.holder->asCall().getCalleeFunction();
1575 10850 : uint16_t slot = uint16_t(shape->shortid());
1576 :
1577 10850 : Jump skipOver;
1578 10850 : Jump escapedFrame = masm.branchTestPtr(Assembler::Zero, pic.shapeReg, pic.shapeReg);
1579 :
1580 : /* Not-escaped case. */
1581 : {
1582 2793 : Address addr(pic.shapeReg, kind == ARG ? StackFrame::offsetOfFormalArg(fun, slot)
1583 13643 : : StackFrame::offsetOfFixed(slot));
1584 10850 : masm.loadPayload(addr, pic.objReg);
1585 10850 : masm.loadTypeTag(addr, pic.shapeReg);
1586 10850 : skipOver = masm.jump();
1587 : }
1588 :
1589 10850 : escapedFrame.linkTo(masm.label(), &masm);
1590 :
1591 : {
1592 10850 : if (kind == VAR)
1593 8057 : slot += fun->nargs;
1594 :
1595 10850 : slot += CallObject::RESERVED_SLOTS;
1596 10850 : Address address = masm.objPropAddress(obj, pic.objReg, slot);
1597 :
1598 : /* Safe because type is loaded first. */
1599 10850 : masm.loadValueAsComponents(address, pic.shapeReg, pic.objReg);
1600 : }
1601 :
1602 10850 : skipOver.linkTo(masm.label(), &masm);
1603 10850 : Jump done = masm.jump();
1604 :
1605 : // All failures flow to here, so there is a common point to patch.
1606 21765 : for (Jump *pj = fails.begin(); pj != fails.end(); ++pj)
1607 10915 : pj->linkTo(masm.label(), &masm);
1608 10850 : if (finalNull.isSet())
1609 10485 : finalNull.get().linkTo(masm.label(), &masm);
1610 10850 : finalShape.linkTo(masm.label(), &masm);
1611 10850 : Label failLabel = masm.label();
1612 10850 : Jump failJump = masm.jump();
1613 :
1614 10850 : pic.updatePCCounters(f, masm);
1615 :
1616 21700 : PICLinker buffer(masm, pic);
1617 10850 : if (!buffer.init(cx))
1618 0 : return error();
1619 :
1620 21700 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
1621 10850 : !buffer.verifyRange(f.chunk())) {
1622 0 : return disable("code memory is out of range");
1623 : }
1624 :
1625 10850 : buffer.link(failJump, pic.slowPathStart);
1626 10850 : buffer.link(done, pic.fastPathRejoin);
1627 10850 : CodeLocationLabel cs = buffer.finalize(f);
1628 10850 : JaegerSpew(JSpew_PICs, "generated %s call stub at %p\n", type, cs.executableAddress());
1629 :
1630 10850 : patchPreviousToHere(cs);
1631 :
1632 10850 : pic.stubsGenerated++;
1633 10850 : pic.updateLastPath(buffer, failLabel);
1634 10850 : labels.setStubJump(masm, failLabel, failJump);
1635 :
1636 10850 : if (pic.stubsGenerated == MAX_PIC_STUBS)
1637 8 : disable("max stubs reached");
1638 :
1639 10850 : return Lookup_Cacheable;
1640 : }
1641 :
1642 181743 : LookupStatus updateForName()
1643 : {
1644 : // |getprop.obj| is filled by bind()
1645 181743 : LookupStatus status = getprop.bind();
1646 181743 : if (status != Lookup_Cacheable)
1647 529 : return status;
1648 :
1649 181214 : return update(getprop.obj);
1650 : }
1651 :
1652 383 : LookupStatus updateForXName()
1653 : {
1654 : // |obj| and |getprop.obj| are NULL, but should be the given scopeChain.
1655 383 : getprop.obj = scopeChain;
1656 383 : LookupStatus status = getprop.lookup();
1657 383 : if (status != Lookup_Cacheable)
1658 0 : return status;
1659 :
1660 383 : return update(getprop.obj);
1661 : }
1662 :
1663 181597 : LookupStatus update(JSObject *obj)
1664 : {
1665 181597 : if (obj != getprop.holder)
1666 44 : return disable("property is on proto of a scope object");
1667 :
1668 181553 : if (obj->isCall())
1669 11310 : return generateCallStub(obj);
1670 :
1671 170243 : LookupStatus status = getprop.testForGet();
1672 170243 : if (status != Lookup_Cacheable)
1673 543 : return status;
1674 :
1675 169700 : if (obj->isGlobal())
1676 169493 : return generateGlobalStub(obj);
1677 :
1678 207 : return disable("scope object not handled yet");
1679 : }
1680 :
1681 182126 : bool retrieve(Value *vp, PICInfo::Kind kind)
1682 : {
1683 182126 : JSObject *obj = getprop.obj;
1684 182126 : JSObject *holder = getprop.holder;
1685 182126 : const JSProperty *prop = getprop.prop;
1686 :
1687 182126 : if (!prop) {
1688 : /* Kludge to allow (typeof foo == "undefined") tests. */
1689 525 : if (kind == ic::PICInfo::NAME) {
1690 525 : JSOp op2 = JSOp(f.pc()[JSOP_NAME_LENGTH]);
1691 525 : if (op2 == JSOP_TYPEOF) {
1692 455 : vp->setUndefined();
1693 455 : return true;
1694 : }
1695 : }
1696 70 : ReportAtomNotDefined(cx, name);
1697 70 : return false;
1698 : }
1699 :
1700 : // If the property was found, but we decided not to cache it, then
1701 : // take a slow path and do a full property fetch.
1702 181601 : if (!getprop.shape) {
1703 4 : if (!obj->getProperty(cx, name, vp))
1704 0 : return false;
1705 4 : return true;
1706 : }
1707 :
1708 181597 : const Shape *shape = getprop.shape;
1709 181597 : JSObject *normalized = obj;
1710 181597 : if (obj->isWith() && !shape->hasDefaultGetter())
1711 10 : normalized = &obj->asWith().object();
1712 181597 : NATIVE_GET(cx, normalized, holder, shape, JSGET_METHOD_BARRIER, vp, return false);
1713 181588 : return true;
1714 : }
1715 : };
1716 :
1717 : class BindNameCompiler : public PICStubCompiler
1718 : {
1719 : JSObject *scopeChain;
1720 : PropertyName *name;
1721 :
1722 : public:
1723 3295 : BindNameCompiler(VMFrame &f, JSScript *script, JSObject *scopeChain, ic::PICInfo &pic,
1724 : PropertyName *name, VoidStubPIC stub)
1725 : : PICStubCompiler("bind", f, script, pic, JS_FUNC_TO_DATA_PTR(void *, stub)),
1726 3295 : scopeChain(scopeChain), name(name)
1727 3295 : { }
1728 :
1729 : static void reset(Repatcher &repatcher, ic::PICInfo &pic)
1730 : {
1731 : BindNameLabels &labels = pic.bindNameLabels();
1732 :
1733 : /* Link the inline jump back to the slow path. */
1734 : JSC::CodeLocationJump inlineJump = labels.getInlineJump(pic.getFastShapeGuard());
1735 : repatcher.relink(inlineJump, pic.slowPathStart);
1736 :
1737 : /* Link the slow path to call the IC entry point. */
1738 : FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, ic::BindName));
1739 : repatcher.relink(pic.slowPathCall, target);
1740 : }
1741 :
1742 1009 : void patchPreviousToHere(CodeLocationLabel cs)
1743 : {
1744 1009 : BindNameLabels &labels = pic.bindNameLabels();
1745 2018 : Repatcher repatcher(pic.lastCodeBlock(f.chunk()));
1746 1009 : JSC::CodeLocationJump jump;
1747 :
1748 : /* Patch either the inline fast path or a generated stub. */
1749 1009 : if (pic.stubsGenerated)
1750 22 : jump = labels.getStubJump(pic.lastPathStart());
1751 : else
1752 987 : jump = labels.getInlineJump(pic.getFastShapeGuard());
1753 1009 : repatcher.relink(jump, cs);
1754 1009 : }
1755 :
1756 1185 : LookupStatus generateStub(JSObject *obj)
1757 : {
1758 2370 : Assembler masm;
1759 2370 : Vector<Jump, 8> fails(cx);
1760 :
1761 1185 : BindNameLabels &labels = pic.bindNameLabels();
1762 :
1763 1185 : if (!IsCacheableNonGlobalScope(scopeChain))
1764 9 : return disable("non-cacheable obj at start of scope chain");
1765 :
1766 : /* Guard on the shape of the scope chain. */
1767 1176 : masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg);
1768 1176 : masm.loadShape(pic.objReg, pic.shapeReg);
1769 : Jump firstShape = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
1770 1176 : ImmPtr(scopeChain->lastProperty()));
1771 :
1772 1176 : if (scopeChain != obj) {
1773 : /* Walk up the scope chain. */
1774 270 : JSObject *tobj = &scopeChain->asScope().enclosingScope();
1775 270 : Address parent(pic.objReg, ScopeObject::offsetOfEnclosingScope());
1776 586 : while (tobj) {
1777 316 : if (!IsCacheableNonGlobalScope(tobj))
1778 167 : return disable("non-cacheable obj in scope chain");
1779 149 : masm.loadPayload(parent, pic.objReg);
1780 149 : masm.loadShape(pic.objReg, pic.shapeReg);
1781 : Jump shapeTest = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
1782 149 : ImmPtr(tobj->lastProperty()));
1783 149 : if (!fails.append(shapeTest))
1784 0 : return error();
1785 149 : if (tobj == obj)
1786 103 : break;
1787 46 : tobj = &tobj->asScope().enclosingScope();
1788 : }
1789 103 : if (tobj != obj)
1790 0 : return disable("indirect hit");
1791 : }
1792 :
1793 1009 : Jump done = masm.jump();
1794 :
1795 : // All failures flow to here, so there is a common point to patch.
1796 1126 : for (Jump *pj = fails.begin(); pj != fails.end(); ++pj)
1797 117 : pj->linkTo(masm.label(), &masm);
1798 1009 : firstShape.linkTo(masm.label(), &masm);
1799 1009 : Label failLabel = masm.label();
1800 1009 : Jump failJump = masm.jump();
1801 :
1802 1009 : pic.updatePCCounters(f, masm);
1803 :
1804 2018 : PICLinker buffer(masm, pic);
1805 1009 : if (!buffer.init(cx))
1806 0 : return error();
1807 :
1808 2018 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
1809 1009 : !buffer.verifyRange(f.chunk())) {
1810 0 : return disable("code memory is out of range");
1811 : }
1812 :
1813 1009 : buffer.link(failJump, pic.slowPathStart);
1814 1009 : buffer.link(done, pic.fastPathRejoin);
1815 1009 : CodeLocationLabel cs = buffer.finalize(f);
1816 1009 : JaegerSpew(JSpew_PICs, "generated %s stub at %p\n", type, cs.executableAddress());
1817 :
1818 1009 : patchPreviousToHere(cs);
1819 :
1820 1009 : pic.stubsGenerated++;
1821 1009 : pic.updateLastPath(buffer, failLabel);
1822 1009 : labels.setStubJump(masm, failLabel, failJump);
1823 :
1824 1009 : if (pic.stubsGenerated == MAX_PIC_STUBS)
1825 0 : disable("max stubs reached");
1826 :
1827 1009 : return Lookup_Cacheable;
1828 : }
1829 :
1830 3295 : JSObject *update()
1831 : {
1832 3295 : RecompilationMonitor monitor(cx);
1833 :
1834 3295 : JSObject *obj = FindIdentifierBase(cx, scopeChain, name);
1835 3295 : if (!obj || monitor.recompiled())
1836 2 : return obj;
1837 :
1838 3293 : if (!pic.hit) {
1839 2108 : spew("first hit", "nop");
1840 2108 : pic.hit = true;
1841 2108 : return obj;
1842 : }
1843 :
1844 1185 : LookupStatus status = generateStub(obj);
1845 1185 : if (status == Lookup_Error)
1846 0 : return NULL;
1847 :
1848 1185 : return obj;
1849 : }
1850 : };
1851 :
1852 : static void JS_FASTCALL
1853 5231668 : DisabledGetPropIC(VMFrame &f, ic::PICInfo *pic)
1854 : {
1855 5231668 : stubs::GetProp(f, pic->name);
1856 5231668 : }
1857 :
1858 : static void JS_FASTCALL
1859 0 : DisabledGetPropNoCacheIC(VMFrame &f, ic::PICInfo *pic)
1860 : {
1861 0 : stubs::GetPropNoCache(f, pic->name);
1862 0 : }
1863 :
1864 : static inline void
1865 446673 : GetPropMaybeCached(VMFrame &f, ic::PICInfo *pic, bool cached)
1866 : {
1867 446673 : VoidStubPIC stub = cached ? DisabledGetPropIC : DisabledGetPropNoCacheIC;
1868 :
1869 446673 : JSScript *script = f.fp()->script();
1870 :
1871 446673 : PropertyName *name = pic->name;
1872 446673 : if (name == f.cx->runtime->atomState.lengthAtom) {
1873 15799 : if (f.regs.sp[-1].isMagic(JS_LAZY_ARGUMENTS)) {
1874 0 : f.regs.sp[-1].setInt32(f.regs.fp()->numActualArgs());
1875 0 : return;
1876 15799 : } else if (!f.regs.sp[-1].isPrimitive()) {
1877 8921 : JSObject *obj = &f.regs.sp[-1].toObject();
1878 11817 : if (obj->isArray() ||
1879 1975 : (obj->isArguments() && !obj->asArguments().hasOverriddenLength()) ||
1880 921 : obj->isString()) {
1881 8069 : GetPropCompiler cc(f, script, obj, *pic, NULL, stub);
1882 8069 : if (obj->isArray()) {
1883 7517 : LookupStatus status = cc.generateArrayLengthStub();
1884 7517 : if (status == Lookup_Error)
1885 0 : THROW();
1886 7517 : f.regs.sp[-1].setNumber(obj->getArrayLength());
1887 552 : } else if (obj->isArguments()) {
1888 483 : LookupStatus status = cc.generateArgsLengthStub();
1889 483 : if (status == Lookup_Error)
1890 0 : THROW();
1891 483 : f.regs.sp[-1].setInt32(int32_t(obj->asArguments().initialLength()));
1892 69 : } else if (obj->isString()) {
1893 69 : LookupStatus status = cc.generateStringObjLengthStub();
1894 69 : if (status == Lookup_Error)
1895 0 : THROW();
1896 69 : JSString *str = obj->asString().unbox();
1897 69 : f.regs.sp[-1].setInt32(str->length());
1898 : }
1899 8069 : return;
1900 : }
1901 : }
1902 : }
1903 :
1904 438604 : if (f.regs.sp[-1].isString()) {
1905 26397 : GetPropCompiler cc(f, script, NULL, *pic, name, stub);
1906 26397 : if (name == f.cx->runtime->atomState.lengthAtom) {
1907 6866 : LookupStatus status = cc.generateStringLengthStub();
1908 6866 : if (status == Lookup_Error)
1909 0 : THROW();
1910 6866 : JSString *str = f.regs.sp[-1].toString();
1911 6866 : f.regs.sp[-1].setInt32(str->length());
1912 : } else {
1913 19531 : LookupStatus status = cc.generateStringPropertyStub();
1914 19531 : if (status == Lookup_Error)
1915 0 : THROW();
1916 19531 : JSObject *obj = ValueToObject(f.cx, f.regs.sp[-1]);
1917 19531 : if (!obj)
1918 0 : THROW();
1919 19531 : if (!obj->getProperty(f.cx, name, &f.regs.sp[-1]))
1920 0 : THROW();
1921 : }
1922 26397 : return;
1923 : }
1924 :
1925 412207 : RecompilationMonitor monitor(f.cx);
1926 :
1927 412207 : JSObject *obj = ValueToObject(f.cx, f.regs.sp[-1]);
1928 412207 : if (!obj)
1929 19 : THROW();
1930 :
1931 412188 : if (!monitor.recompiled() && pic->shouldUpdate(f.cx)) {
1932 203423 : GetPropCompiler cc(f, script, obj, *pic, name, stub);
1933 203423 : if (!cc.update())
1934 0 : THROW();
1935 : }
1936 :
1937 : Value v;
1938 412188 : if (cached) {
1939 409648 : if (!GetPropertyOperation(f.cx, f.pc(), f.regs.sp[-1], &v))
1940 27 : THROW();
1941 : } else {
1942 2540 : if (!obj->getProperty(f.cx, name, &v))
1943 0 : THROW();
1944 : }
1945 :
1946 412161 : f.regs.sp[-1] = v;
1947 : }
1948 :
1949 : void JS_FASTCALL
1950 444133 : ic::GetProp(VMFrame &f, ic::PICInfo *pic)
1951 : {
1952 444133 : GetPropMaybeCached(f, pic, /* cache = */ true);
1953 444133 : }
1954 :
1955 : void JS_FASTCALL
1956 2540 : ic::GetPropNoCache(VMFrame &f, ic::PICInfo *pic)
1957 : {
1958 2540 : GetPropMaybeCached(f, pic, /* cache = */ false);
1959 2540 : }
1960 :
1961 : template <JSBool strict>
1962 : static void JS_FASTCALL
1963 57735 : DisabledSetPropIC(VMFrame &f, ic::PICInfo *pic)
1964 : {
1965 57735 : stubs::SetName<strict>(f, pic->name);
1966 57735 : }
1967 :
1968 : void JS_FASTCALL
1969 31259 : ic::SetProp(VMFrame &f, ic::PICInfo *pic)
1970 : {
1971 31259 : JSScript *script = f.fp()->script();
1972 31259 : JS_ASSERT(pic->isSet());
1973 :
1974 31259 : VoidStubPIC stub = STRICT_VARIANT(DisabledSetPropIC);
1975 :
1976 : // Save this in case the compiler triggers a recompilation of this script.
1977 31259 : PropertyName *name = pic->name;
1978 31259 : VoidStubName nstub = STRICT_VARIANT(stubs::SetName);
1979 :
1980 31259 : RecompilationMonitor monitor(f.cx);
1981 :
1982 31259 : JSObject *obj = ValueToObject(f.cx, f.regs.sp[-2]);
1983 31259 : if (!obj)
1984 0 : THROW();
1985 :
1986 : // Note, we can't use SetName for PROPINC PICs because the property
1987 : // cache can't handle a GET and SET from the same scripted PC.
1988 31259 : if (!monitor.recompiled() && pic->shouldUpdate(f.cx)) {
1989 13026 : SetPropCompiler cc(f, script, obj, *pic, name, stub);
1990 13026 : LookupStatus status = cc.update();
1991 13026 : if (status == Lookup_Error)
1992 0 : THROW();
1993 : }
1994 :
1995 31259 : nstub(f, name);
1996 : }
1997 :
1998 : static void JS_FASTCALL
1999 676405 : DisabledNameIC(VMFrame &f, ic::PICInfo *pic)
2000 : {
2001 676405 : stubs::Name(f);
2002 676405 : }
2003 :
2004 : static void JS_FASTCALL
2005 0 : DisabledXNameIC(VMFrame &f, ic::PICInfo *pic)
2006 : {
2007 0 : stubs::GetProp(f, pic->name);
2008 0 : }
2009 :
2010 : void JS_FASTCALL
2011 383 : ic::XName(VMFrame &f, ic::PICInfo *pic)
2012 : {
2013 383 : JSScript *script = f.fp()->script();
2014 :
2015 : /* GETXPROP is guaranteed to have an object. */
2016 383 : JSObject *obj = &f.regs.sp[-1].toObject();
2017 :
2018 383 : ScopeNameCompiler cc(f, script, obj, *pic, pic->name, DisabledXNameIC);
2019 :
2020 383 : LookupStatus status = cc.updateForXName();
2021 383 : if (status == Lookup_Error)
2022 0 : THROW();
2023 :
2024 : Value rval;
2025 383 : if (!cc.retrieve(&rval, PICInfo::XNAME))
2026 0 : THROW();
2027 383 : f.regs.sp[-1] = rval;
2028 : }
2029 :
2030 : void JS_FASTCALL
2031 181743 : ic::Name(VMFrame &f, ic::PICInfo *pic)
2032 : {
2033 181743 : JSScript *script = f.fp()->script();
2034 :
2035 181743 : ScopeNameCompiler cc(f, script, &f.fp()->scopeChain(), *pic, pic->name, DisabledNameIC);
2036 :
2037 181743 : LookupStatus status = cc.updateForName();
2038 181743 : if (status == Lookup_Error)
2039 0 : THROW();
2040 :
2041 : Value rval;
2042 181743 : if (!cc.retrieve(&rval, PICInfo::NAME))
2043 79 : THROW();
2044 181664 : f.regs.sp[0] = rval;
2045 : }
2046 :
2047 : static void JS_FASTCALL
2048 1439 : DisabledBindNameIC(VMFrame &f, ic::PICInfo *pic)
2049 : {
2050 1439 : stubs::BindName(f, pic->name);
2051 1439 : }
2052 :
2053 : void JS_FASTCALL
2054 3295 : ic::BindName(VMFrame &f, ic::PICInfo *pic)
2055 : {
2056 3295 : JSScript *script = f.fp()->script();
2057 :
2058 3295 : VoidStubPIC stub = DisabledBindNameIC;
2059 3295 : BindNameCompiler cc(f, script, &f.fp()->scopeChain(), *pic, pic->name, stub);
2060 :
2061 3295 : JSObject *obj = cc.update();
2062 3295 : if (!obj)
2063 0 : THROW();
2064 :
2065 3295 : f.regs.sp[0].setObject(*obj);
2066 : }
2067 :
2068 : void
2069 296553 : BaseIC::spew(JSContext *cx, const char *event, const char *message)
2070 : {
2071 : #ifdef JS_METHODJIT_SPEW
2072 : JaegerSpew(JSpew_PICs, "%s %s: %s (%s: %d)\n",
2073 296553 : js_CodeName[op], event, message, cx->fp()->script()->filename, CurrentLine(cx));
2074 : #endif
2075 296553 : }
2076 :
2077 : /* Total length of scripts preceding a frame. */
2078 0 : inline uint32_t frameCountersOffset(VMFrame &f)
2079 : {
2080 0 : JSContext *cx = f.cx;
2081 :
2082 0 : uint32_t offset = 0;
2083 0 : if (cx->regs().inlined()) {
2084 0 : offset += cx->fp()->script()->length;
2085 0 : uint32_t index = cx->regs().inlined()->inlineIndex;
2086 0 : InlineFrame *frames = f.chunk()->inlineFrames();
2087 0 : for (unsigned i = 0; i < index; i++)
2088 0 : offset += frames[i].fun->script()->length;
2089 : }
2090 :
2091 : jsbytecode *pc;
2092 0 : JSScript *script = cx->stack.currentScript(&pc);
2093 0 : offset += pc - script->code;
2094 :
2095 0 : return offset;
2096 : }
2097 :
2098 : LookupStatus
2099 62259 : BaseIC::disable(VMFrame &f, const char *reason, void *stub)
2100 : {
2101 62259 : if (f.chunk()->pcLengths) {
2102 0 : uint32_t offset = frameCountersOffset(f);
2103 0 : f.chunk()->pcLengths[offset].picsLength = 0;
2104 : }
2105 :
2106 62259 : spew(f.cx, "disabled", reason);
2107 124518 : Repatcher repatcher(f.chunk());
2108 62259 : repatcher.relink(slowPathCall, FunctionPtr(stub));
2109 62259 : return Lookup_Uncacheable;
2110 : }
2111 :
2112 : void
2113 262450 : BaseIC::updatePCCounters(VMFrame &f, Assembler &masm)
2114 : {
2115 262450 : if (f.chunk()->pcLengths) {
2116 0 : uint32_t offset = frameCountersOffset(f);
2117 0 : f.chunk()->pcLengths[offset].picsLength += masm.size();
2118 : }
2119 262450 : }
2120 :
2121 : bool
2122 443447 : BaseIC::shouldUpdate(JSContext *cx)
2123 : {
2124 443447 : if (!hit) {
2125 226998 : hit = true;
2126 226998 : spew(cx, "ignored", "first hit");
2127 226998 : return false;
2128 : }
2129 216449 : JS_ASSERT(stubsGenerated < MAX_PIC_STUBS);
2130 216449 : return true;
2131 : }
2132 :
2133 : static void JS_FASTCALL
2134 715993 : DisabledGetElem(VMFrame &f, ic::GetElementIC *ic)
2135 : {
2136 715993 : stubs::GetElem(f);
2137 715993 : }
2138 :
2139 : bool
2140 13843 : GetElementIC::shouldUpdate(JSContext *cx)
2141 : {
2142 13843 : if (!hit) {
2143 5024 : hit = true;
2144 5024 : spew(cx, "ignored", "first hit");
2145 5024 : return false;
2146 : }
2147 8819 : JS_ASSERT(stubsGenerated < MAX_GETELEM_IC_STUBS);
2148 8819 : return true;
2149 : }
2150 :
2151 : LookupStatus
2152 3047 : GetElementIC::disable(VMFrame &f, const char *reason)
2153 : {
2154 3047 : slowCallPatched = true;
2155 3047 : void *stub = JS_FUNC_TO_DATA_PTR(void *, DisabledGetElem);
2156 3047 : BaseIC::disable(f, reason, stub);
2157 3047 : return Lookup_Uncacheable;
2158 : }
2159 :
2160 : LookupStatus
2161 0 : GetElementIC::error(JSContext *cx)
2162 : {
2163 0 : return Lookup_Error;
2164 : }
2165 :
2166 : void
2167 0 : GetElementIC::purge(Repatcher &repatcher)
2168 : {
2169 : // Repatch the inline jumps.
2170 0 : if (inlineTypeGuardPatched)
2171 0 : repatcher.relink(fastPathStart.jumpAtOffset(inlineTypeGuard), slowPathStart);
2172 0 : if (inlineShapeGuardPatched)
2173 0 : repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), slowPathStart);
2174 :
2175 0 : if (slowCallPatched) {
2176 : repatcher.relink(slowPathCall,
2177 0 : FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, ic::GetElement)));
2178 : }
2179 :
2180 0 : reset();
2181 0 : }
2182 :
2183 : LookupStatus
2184 6137 : GetElementIC::attachGetProp(VMFrame &f, JSObject *obj, const Value &v, PropertyName *name,
2185 : Value *vp)
2186 : {
2187 6137 : JS_ASSERT(v.isString());
2188 6137 : JSContext *cx = f.cx;
2189 :
2190 6137 : GetPropHelper<GetElementIC> getprop(cx, obj, name, *this, f);
2191 6137 : LookupStatus status = getprop.lookupAndTest();
2192 6137 : if (status != Lookup_Cacheable)
2193 160 : return status;
2194 :
2195 : // With TI enabled, string property stubs can only be added to an opcode if
2196 : // the value read will go through a type barrier afterwards. TI only
2197 : // accounts for integer-valued properties accessed by GETELEM/CALLELEM.
2198 5977 : if (cx->typeInferenceEnabled() && !forcedTypeBarrier)
2199 25 : return disable(f, "string element access may not have type barrier");
2200 :
2201 11904 : Assembler masm;
2202 :
2203 : // Guard on the string's type and identity.
2204 5952 : MaybeJump atomTypeGuard;
2205 5952 : if (hasInlineTypeGuard() && !inlineTypeGuardPatched) {
2206 : // We link all string-key dependent stubs together, and store the
2207 : // first set of guards in the IC, separately, from int-key dependent
2208 : // stubs. As long as we guarantee that the first string-key dependent
2209 : // stub guards on the key type, then all other string-key stubs can
2210 : // omit the guard.
2211 742 : JS_ASSERT(!idRemat.isTypeKnown());
2212 742 : atomTypeGuard = masm.testString(Assembler::NotEqual, typeReg);
2213 : } else {
2214 : // If there was no inline type guard, then a string type is guaranteed.
2215 : // Otherwise, we are guaranteed the type has already been checked, via
2216 : // the comment above.
2217 5210 : JS_ASSERT_IF(!hasInlineTypeGuard(), idRemat.knownType() == JSVAL_TYPE_STRING);
2218 : }
2219 :
2220 : // Reify the shape before guards that could flow into shape guarding stubs.
2221 5952 : if (!obj->isDenseArray() && !typeRegHasBaseShape) {
2222 1384 : masm.loadShape(objReg, typeReg);
2223 1384 : typeRegHasBaseShape = true;
2224 : }
2225 :
2226 5952 : MaybeJump atomIdGuard;
2227 5952 : if (!idRemat.isConstant())
2228 5950 : atomIdGuard = masm.branchPtr(Assembler::NotEqual, idRemat.dataReg(), ImmPtr(v.toString()));
2229 :
2230 : // Guard on the base shape.
2231 5952 : Jump shapeGuard = masm.branchPtr(Assembler::NotEqual, typeReg, ImmPtr(obj->lastProperty()));
2232 :
2233 11904 : Vector<Jump, 8> otherGuards(cx);
2234 :
2235 : // Guard on the prototype, if applicable.
2236 5952 : MaybeJump protoGuard;
2237 5952 : JSObject *holder = getprop.holder;
2238 5952 : RegisterID holderReg = objReg;
2239 5952 : if (obj != holder) {
2240 81 : if (!GeneratePrototypeGuards(cx, otherGuards, masm, obj, holder, objReg, typeReg))
2241 0 : return error(cx);
2242 :
2243 : // Bake in the holder identity. Careful not to clobber |objReg|, since we can't remat it.
2244 81 : holderReg = typeReg;
2245 81 : masm.move(ImmPtr(holder), holderReg);
2246 81 : typeRegHasBaseShape = false;
2247 :
2248 : // Guard on the holder's shape.
2249 81 : protoGuard = masm.guardShape(holderReg, holder);
2250 : }
2251 :
2252 5952 : if (op == JSOP_CALLELEM) {
2253 : // Emit a write of |obj| to the top of the stack, before we lose it.
2254 35 : Value *thisVp = &cx->regs().sp[-1];
2255 35 : Address thisSlot(JSFrameReg, StackFrame::offsetOfFixed(thisVp - cx->fp()->slots()));
2256 35 : masm.storeValueFromComponents(ImmType(JSVAL_TYPE_OBJECT), objReg, thisSlot);
2257 : }
2258 :
2259 : // Load the value.
2260 5952 : const Shape *shape = getprop.shape;
2261 5952 : masm.loadObjProp(holder, holderReg, shape, typeReg, objReg);
2262 :
2263 5952 : Jump done = masm.jump();
2264 :
2265 5952 : updatePCCounters(f, masm);
2266 :
2267 11904 : PICLinker buffer(masm, *this);
2268 5952 : if (!buffer.init(cx))
2269 0 : return error(cx);
2270 :
2271 5952 : if (hasLastStringStub && !buffer.verifyRange(lastStringStub))
2272 0 : return disable(f, "code memory is out of range");
2273 5952 : if (!buffer.verifyRange(f.chunk()))
2274 0 : return disable(f, "code memory is out of range");
2275 :
2276 : // Patch all guards.
2277 5952 : buffer.maybeLink(atomIdGuard, slowPathStart);
2278 5952 : buffer.maybeLink(atomTypeGuard, slowPathStart);
2279 5952 : buffer.link(shapeGuard, slowPathStart);
2280 5952 : buffer.maybeLink(protoGuard, slowPathStart);
2281 5952 : for (Jump *pj = otherGuards.begin(); pj != otherGuards.end(); ++pj)
2282 0 : buffer.link(*pj, slowPathStart);
2283 5952 : buffer.link(done, fastPathRejoin);
2284 :
2285 5952 : CodeLocationLabel cs = buffer.finalize(f);
2286 : #if DEBUG
2287 5952 : char *chars = DeflateString(cx, v.toString()->getChars(cx), v.toString()->length());
2288 : JaegerSpew(JSpew_PICs, "generated %s stub at %p for atom %p (\"%s\") shape %p (%s: %d)\n",
2289 : js_CodeName[op], cs.executableAddress(), (void*)name, chars,
2290 5952 : (void*)holder->lastProperty(), cx->fp()->script()->filename, CurrentLine(cx));
2291 5952 : cx->free_(chars);
2292 : #endif
2293 :
2294 : // Update the inline guards, if needed.
2295 5952 : if (shouldPatchInlineTypeGuard() || shouldPatchUnconditionalShapeGuard()) {
2296 2734 : Repatcher repatcher(f.chunk());
2297 :
2298 1367 : if (shouldPatchInlineTypeGuard()) {
2299 : // A type guard is present in the inline path, and this is the
2300 : // first string stub, so patch it now.
2301 742 : JS_ASSERT(!inlineTypeGuardPatched);
2302 742 : JS_ASSERT(atomTypeGuard.isSet());
2303 :
2304 742 : repatcher.relink(fastPathStart.jumpAtOffset(inlineTypeGuard), cs);
2305 742 : inlineTypeGuardPatched = true;
2306 : }
2307 :
2308 1367 : if (shouldPatchUnconditionalShapeGuard()) {
2309 : // The shape guard is unconditional, meaning there is no type
2310 : // check. This is the first stub, so it has to be patched. Note
2311 : // that it is wrong to patch the inline shape guard otherwise,
2312 : // because it follows an integer-id guard.
2313 625 : JS_ASSERT(!hasInlineTypeGuard());
2314 :
2315 625 : repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), cs);
2316 625 : inlineShapeGuardPatched = true;
2317 : }
2318 : }
2319 :
2320 : // If there were previous stub guards, patch them now.
2321 5952 : if (hasLastStringStub) {
2322 9170 : Repatcher repatcher(lastStringStub);
2323 4585 : CodeLocationLabel stub(lastStringStub.start());
2324 4585 : if (atomGuard)
2325 4585 : repatcher.relink(stub.jumpAtOffset(atomGuard), cs);
2326 4585 : repatcher.relink(stub.jumpAtOffset(firstShapeGuard), cs);
2327 4585 : if (secondShapeGuard)
2328 57 : repatcher.relink(stub.jumpAtOffset(secondShapeGuard), cs);
2329 : }
2330 :
2331 : // Update state.
2332 5952 : hasLastStringStub = true;
2333 5952 : lastStringStub = JITCode(cs.executableAddress(), buffer.size());
2334 5952 : if (atomIdGuard.isSet()) {
2335 5950 : atomGuard = buffer.locationOf(atomIdGuard.get()) - cs;
2336 5950 : JS_ASSERT(atomGuard == buffer.locationOf(atomIdGuard.get()) - cs);
2337 5950 : JS_ASSERT(atomGuard);
2338 : } else {
2339 2 : atomGuard = 0;
2340 : }
2341 5952 : if (protoGuard.isSet()) {
2342 81 : secondShapeGuard = buffer.locationOf(protoGuard.get()) - cs;
2343 81 : JS_ASSERT(secondShapeGuard == buffer.locationOf(protoGuard.get()) - cs);
2344 81 : JS_ASSERT(secondShapeGuard);
2345 : } else {
2346 5871 : secondShapeGuard = 0;
2347 : }
2348 5952 : firstShapeGuard = buffer.locationOf(shapeGuard) - cs;
2349 5952 : JS_ASSERT(firstShapeGuard == buffer.locationOf(shapeGuard) - cs);
2350 5952 : JS_ASSERT(firstShapeGuard);
2351 :
2352 5952 : stubsGenerated++;
2353 :
2354 5952 : if (stubsGenerated == MAX_GETELEM_IC_STUBS)
2355 46 : disable(f, "max stubs reached");
2356 :
2357 : // Finally, fetch the value to avoid redoing the property lookup.
2358 5952 : *vp = holder->getSlot(shape->slot());
2359 :
2360 5952 : return Lookup_Cacheable;
2361 : }
2362 :
2363 : LookupStatus
2364 439 : GetElementIC::attachArguments(VMFrame &f, JSObject *obj, const Value &v, jsid id, Value *vp)
2365 : {
2366 439 : JSContext *cx = f.cx;
2367 :
2368 439 : if (!v.isInt32())
2369 2 : return disable(f, "arguments object with non-integer key");
2370 :
2371 437 : if (op == JSOP_CALLELEM)
2372 4 : return disable(f, "arguments object with call");
2373 :
2374 433 : JS_ASSERT(hasInlineTypeGuard() || idRemat.knownType() == JSVAL_TYPE_INT32);
2375 :
2376 866 : Assembler masm;
2377 :
2378 433 : Jump shapeGuard = masm.testObjClass(Assembler::NotEqual, objReg, typeReg, obj->getClass());
2379 :
2380 433 : masm.move(objReg, typeReg);
2381 433 : masm.load32(Address(objReg, JSObject::getFixedSlotOffset(ArgumentsObject::INITIAL_LENGTH_SLOT)),
2382 866 : objReg);
2383 : Jump overridden = masm.branchTest32(Assembler::NonZero, objReg,
2384 433 : Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT));
2385 433 : masm.rshift32(Imm32(ArgumentsObject::PACKED_BITS_COUNT), objReg);
2386 :
2387 433 : Jump outOfBounds;
2388 433 : if (idRemat.isConstant()) {
2389 254 : outOfBounds = masm.branch32(Assembler::BelowOrEqual, objReg, Imm32(v.toInt32()));
2390 : } else {
2391 179 : outOfBounds = masm.branch32(Assembler::BelowOrEqual, objReg, idRemat.dataReg());
2392 : }
2393 :
2394 433 : masm.loadPrivate(Address(typeReg, JSObject::getFixedSlotOffset(ArgumentsObject::DATA_SLOT)), objReg);
2395 433 : if (idRemat.isConstant()) {
2396 254 : Address slot(objReg, offsetof(ArgumentsData, slots) + v.toInt32() * sizeof(Value));
2397 254 : masm.loadTypeTag(slot, objReg);
2398 : } else {
2399 : BaseIndex slot(objReg, idRemat.dataReg(), Assembler::JSVAL_SCALE,
2400 179 : offsetof(ArgumentsData, slots));
2401 179 : masm.loadTypeTag(slot, objReg);
2402 : }
2403 433 : Jump holeCheck = masm.branchPtr(Assembler::Equal, objReg, ImmType(JSVAL_TYPE_MAGIC));
2404 :
2405 433 : masm.loadPrivate(Address(typeReg, JSObject::getFixedSlotOffset(ArgumentsObject::STACK_FRAME_SLOT)), objReg);
2406 433 : Jump liveArguments = masm.branchPtr(Assembler::NotEqual, objReg, ImmPtr(0));
2407 :
2408 433 : masm.loadPrivate(Address(typeReg, JSObject::getFixedSlotOffset(ArgumentsObject::DATA_SLOT)), objReg);
2409 :
2410 433 : if (idRemat.isConstant()) {
2411 254 : Address slot(objReg, offsetof(ArgumentsData, slots) + v.toInt32() * sizeof(Value));
2412 254 : masm.loadValueAsComponents(slot, typeReg, objReg);
2413 : } else {
2414 : BaseIndex slot(objReg, idRemat.dataReg(), Assembler::JSVAL_SCALE,
2415 179 : offsetof(ArgumentsData, slots));
2416 179 : masm.loadValueAsComponents(slot, typeReg, objReg);
2417 : }
2418 :
2419 433 : Jump done = masm.jump();
2420 :
2421 433 : liveArguments.linkTo(masm.label(), &masm);
2422 :
2423 433 : masm.move(objReg, typeReg);
2424 :
2425 433 : Address fun(typeReg, StackFrame::offsetOfExec());
2426 433 : masm.loadPtr(fun, objReg);
2427 :
2428 433 : Address nargs(objReg, offsetof(JSFunction, nargs));
2429 433 : masm.load16(nargs, objReg);
2430 :
2431 433 : Jump notFormalArg;
2432 433 : if (idRemat.isConstant())
2433 254 : notFormalArg = masm.branch32(Assembler::BelowOrEqual, objReg, Imm32(v.toInt32()));
2434 : else
2435 179 : notFormalArg = masm.branch32(Assembler::BelowOrEqual, objReg, idRemat.dataReg());
2436 :
2437 433 : masm.lshift32(Imm32(3), objReg); /* nargs << 3 == nargs * sizeof(Value) */
2438 433 : masm.subPtr(objReg, typeReg); /* fp - numFormalArgs => start of formal args */
2439 :
2440 433 : Label loadFromStack = masm.label();
2441 433 : masm.move(typeReg, objReg);
2442 :
2443 433 : if (idRemat.isConstant()) {
2444 254 : Address frameEntry(objReg, v.toInt32() * sizeof(Value));
2445 254 : masm.loadValueAsComponents(frameEntry, typeReg, objReg);
2446 : } else {
2447 179 : BaseIndex frameEntry(objReg, idRemat.dataReg(), Assembler::JSVAL_SCALE);
2448 179 : masm.loadValueAsComponents(frameEntry, typeReg, objReg);
2449 : }
2450 433 : Jump done2 = masm.jump();
2451 :
2452 433 : notFormalArg.linkTo(masm.label(), &masm);
2453 :
2454 433 : masm.push(typeReg);
2455 :
2456 433 : Address argsObject(typeReg, StackFrame::offsetOfArgsObj());
2457 433 : masm.loadPtr(argsObject, typeReg);
2458 :
2459 433 : masm.load32(Address(typeReg, JSObject::getFixedSlotOffset(ArgumentsObject::INITIAL_LENGTH_SLOT)),
2460 866 : typeReg);
2461 433 : masm.rshift32(Imm32(ArgumentsObject::PACKED_BITS_COUNT), typeReg);
2462 :
2463 : /* This basically does fp - (numFormalArgs + numActualArgs + 2) */
2464 :
2465 433 : masm.addPtr(typeReg, objReg);
2466 433 : masm.addPtr(Imm32(2), objReg);
2467 433 : masm.lshiftPtr(Imm32(3), objReg);
2468 :
2469 433 : masm.pop(typeReg);
2470 433 : masm.subPtr(objReg, typeReg);
2471 :
2472 433 : masm.jump(loadFromStack);
2473 :
2474 433 : updatePCCounters(f, masm);
2475 :
2476 866 : PICLinker buffer(masm, *this);
2477 :
2478 433 : if (!buffer.init(cx))
2479 0 : return error(cx);
2480 :
2481 433 : if (!buffer.verifyRange(f.chunk()))
2482 0 : return disable(f, "code memory is out of range");
2483 :
2484 433 : buffer.link(shapeGuard, slowPathStart);
2485 433 : buffer.link(overridden, slowPathStart);
2486 433 : buffer.link(outOfBounds, slowPathStart);
2487 433 : buffer.link(holeCheck, slowPathStart);
2488 433 : buffer.link(done, fastPathRejoin);
2489 433 : buffer.link(done2, fastPathRejoin);
2490 :
2491 433 : CodeLocationLabel cs = buffer.finalizeCodeAddendum();
2492 :
2493 433 : JaegerSpew(JSpew_PICs, "generated getelem arguments stub at %p\n", cs.executableAddress());
2494 :
2495 866 : Repatcher repatcher(f.chunk());
2496 433 : repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), cs);
2497 :
2498 433 : JS_ASSERT(!shouldPatchUnconditionalShapeGuard());
2499 433 : JS_ASSERT(!inlineShapeGuardPatched);
2500 :
2501 433 : inlineShapeGuardPatched = true;
2502 433 : stubsGenerated++;
2503 :
2504 433 : if (stubsGenerated == MAX_GETELEM_IC_STUBS)
2505 0 : disable(f, "max stubs reached");
2506 :
2507 433 : disable(f, "generated arguments stub");
2508 :
2509 433 : if (!obj->getGeneric(cx, id, vp))
2510 0 : return Lookup_Error;
2511 :
2512 433 : return Lookup_Cacheable;
2513 : }
2514 :
2515 : #if defined JS_METHODJIT_TYPED_ARRAY
2516 : LookupStatus
2517 816 : GetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, const Value &v, jsid id, Value *vp)
2518 : {
2519 816 : JSContext *cx = f.cx;
2520 :
2521 816 : if (!v.isInt32())
2522 0 : return disable(f, "typed array with string key");
2523 :
2524 816 : if (op == JSOP_CALLELEM)
2525 0 : return disable(f, "typed array with call");
2526 :
2527 : // The fast-path guarantees that after the dense shape guard, the type is
2528 : // known to be int32, either via type inference or the inline type check.
2529 816 : JS_ASSERT(hasInlineTypeGuard() || idRemat.knownType() == JSVAL_TYPE_INT32);
2530 :
2531 1632 : Assembler masm;
2532 :
2533 : // Guard on this typed array's shape/class.
2534 816 : Jump shapeGuard = masm.guardShape(objReg, obj);
2535 :
2536 : // Bounds check.
2537 816 : Jump outOfBounds;
2538 816 : Address typedArrayLength = masm.payloadOf(Address(objReg, TypedArray::lengthOffset()));
2539 816 : if (idRemat.isConstant()) {
2540 643 : JS_ASSERT(idRemat.value().toInt32() == v.toInt32());
2541 643 : outOfBounds = masm.branch32(Assembler::BelowOrEqual, typedArrayLength, Imm32(v.toInt32()));
2542 : } else {
2543 173 : outOfBounds = masm.branch32(Assembler::BelowOrEqual, typedArrayLength, idRemat.dataReg());
2544 : }
2545 :
2546 : // Load the array's packed data vector.
2547 816 : masm.loadPtr(Address(objReg, TypedArray::dataOffset()), objReg);
2548 :
2549 816 : Int32Key key = idRemat.isConstant()
2550 643 : ? Int32Key::FromConstant(v.toInt32())
2551 1459 : : Int32Key::FromRegister(idRemat.dataReg());
2552 :
2553 816 : JSObject *tarray = js::TypedArray::getTypedArray(obj);
2554 816 : if (!masm.supportsFloatingPoint() &&
2555 0 : (TypedArray::getType(tarray) == js::TypedArray::TYPE_FLOAT32 ||
2556 0 : TypedArray::getType(tarray) == js::TypedArray::TYPE_FLOAT64 ||
2557 0 : TypedArray::getType(tarray) == js::TypedArray::TYPE_UINT32))
2558 : {
2559 0 : return disable(f, "fpu not supported");
2560 : }
2561 :
2562 816 : MaybeRegisterID tempReg;
2563 816 : masm.loadFromTypedArray(TypedArray::getType(tarray), objReg, key, typeReg, objReg, tempReg);
2564 :
2565 816 : Jump done = masm.jump();
2566 :
2567 816 : updatePCCounters(f, masm);
2568 :
2569 1632 : PICLinker buffer(masm, *this);
2570 816 : if (!buffer.init(cx))
2571 0 : return error(cx);
2572 :
2573 816 : if (!buffer.verifyRange(f.chunk()))
2574 0 : return disable(f, "code memory is out of range");
2575 :
2576 816 : buffer.link(shapeGuard, slowPathStart);
2577 816 : buffer.link(outOfBounds, slowPathStart);
2578 816 : buffer.link(done, fastPathRejoin);
2579 :
2580 816 : CodeLocationLabel cs = buffer.finalizeCodeAddendum();
2581 816 : JaegerSpew(JSpew_PICs, "generated getelem typed array stub at %p\n", cs.executableAddress());
2582 :
2583 : // If we can generate a typed array stub, the shape guard is conditional.
2584 : // Also, we only support one typed array.
2585 816 : JS_ASSERT(!shouldPatchUnconditionalShapeGuard());
2586 816 : JS_ASSERT(!inlineShapeGuardPatched);
2587 :
2588 1632 : Repatcher repatcher(f.chunk());
2589 816 : repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), cs);
2590 816 : inlineShapeGuardPatched = true;
2591 :
2592 816 : stubsGenerated++;
2593 :
2594 : // In the future, it might make sense to attach multiple typed array stubs.
2595 : // For simplicitly, they are currently monomorphic.
2596 816 : if (stubsGenerated == MAX_GETELEM_IC_STUBS)
2597 0 : disable(f, "max stubs reached");
2598 :
2599 816 : disable(f, "generated typed array stub");
2600 :
2601 : // Fetch the value as expected of Lookup_Cacheable for GetElement.
2602 816 : if (!obj->getGeneric(cx, id, vp))
2603 0 : return Lookup_Error;
2604 :
2605 816 : return Lookup_Cacheable;
2606 : }
2607 : #endif /* JS_METHODJIT_TYPED_ARRAY */
2608 :
2609 : LookupStatus
2610 8819 : GetElementIC::update(VMFrame &f, JSObject *obj, const Value &v, jsid id, Value *vp)
2611 : {
2612 : /*
2613 : * Only treat this as a GETPROP for non-numeric string identifiers. The
2614 : * GETPROP IC assumes the id has already gone through filtering for string
2615 : * indexes in the emitter, i.e. js_GetProtoIfDenseArray is only valid to
2616 : * use when looking up non-integer identifiers.
2617 : */
2618 : uint32_t dummy;
2619 8819 : if (v.isString() && JSID_IS_ATOM(id) && !JSID_TO_ATOM(id)->isIndex(&dummy))
2620 6137 : return attachGetProp(f, obj, v, JSID_TO_ATOM(id)->asPropertyName(), vp);
2621 :
2622 2682 : if (obj->isArguments())
2623 439 : return attachArguments(f, obj, v, id, vp);
2624 :
2625 : #if defined JS_METHODJIT_TYPED_ARRAY
2626 : /*
2627 : * Typed array ICs can make stub calls, and need to know which registers
2628 : * are in use and need to be restored after the call. If type inference is
2629 : * enabled then we don't necessarily know the full set of such registers
2630 : * when generating the IC (loop-carried registers may be allocated later),
2631 : * and additionally the push/pop instructions used to save/restore in the
2632 : * IC are not compatible with carrying entries in floating point registers.
2633 : * Since we can use type information to generate inline paths for typed
2634 : * arrays, just don't generate these ICs with inference enabled.
2635 : */
2636 2243 : if (!f.cx->typeInferenceEnabled() && js_IsTypedArray(obj))
2637 816 : return attachTypedArray(f, obj, v, id, vp);
2638 : #endif
2639 :
2640 1427 : return disable(f, "unhandled object and key type");
2641 : }
2642 :
2643 : void JS_FASTCALL
2644 13977 : ic::GetElement(VMFrame &f, ic::GetElementIC *ic)
2645 : {
2646 13977 : JSContext *cx = f.cx;
2647 :
2648 : // Right now, we don't optimize for strings or lazy arguments.
2649 13977 : if (!f.regs.sp[-2].isObject()) {
2650 118 : ic->disable(f, "non-object");
2651 118 : stubs::GetElem(f);
2652 118 : return;
2653 : }
2654 :
2655 13859 : Value idval = f.regs.sp[-1];
2656 :
2657 13859 : RecompilationMonitor monitor(cx);
2658 :
2659 13859 : JSObject *obj = ValueToObject(cx, f.regs.sp[-2]);
2660 13859 : if (!obj)
2661 0 : THROW();
2662 :
2663 : #if JS_HAS_XML_SUPPORT
2664 : // Some XML properties behave differently when accessed in a call vs. normal
2665 : // context, so we fall back to stubs::GetElem.
2666 13859 : if (obj->isXML()) {
2667 16 : ic->disable(f, "XML object");
2668 16 : stubs::GetElem(f);
2669 16 : return;
2670 : }
2671 : #endif
2672 :
2673 : jsid id;
2674 13843 : if (idval.isInt32() && INT_FITS_IN_JSID(idval.toInt32())) {
2675 5189 : id = INT_TO_JSID(idval.toInt32());
2676 : } else {
2677 8654 : if (!js_InternNonIntElementId(cx, obj, idval, &id))
2678 0 : THROW();
2679 : }
2680 :
2681 13843 : if (!monitor.recompiled() && ic->shouldUpdate(cx)) {
2682 : #ifdef DEBUG
2683 8819 : f.regs.sp[-2] = MagicValue(JS_GENERIC_MAGIC);
2684 : #endif
2685 8819 : LookupStatus status = ic->update(f, obj, idval, id, &f.regs.sp[-2]);
2686 8819 : if (status != Lookup_Uncacheable) {
2687 7201 : if (status == Lookup_Error)
2688 0 : THROW();
2689 :
2690 : // If the result can be cached, the value was already retrieved.
2691 7201 : JS_ASSERT(!f.regs.sp[-2].isMagic());
2692 7201 : return;
2693 : }
2694 : }
2695 :
2696 6642 : if (!obj->getGeneric(cx, id, &f.regs.sp[-2]))
2697 0 : THROW();
2698 :
2699 : #if JS_HAS_NO_SUCH_METHOD
2700 6642 : if (*f.pc() == JSOP_CALLELEM && JS_UNLIKELY(f.regs.sp[-2].isPrimitive())) {
2701 13 : if (!OnUnknownMethod(cx, obj, idval, &f.regs.sp[-2]))
2702 0 : THROW();
2703 : }
2704 : #endif
2705 : }
2706 :
2707 : #define APPLY_STRICTNESS(f, s) \
2708 : (FunctionTemplateConditional(s, f<true>, f<false>))
2709 :
2710 : LookupStatus
2711 1902 : SetElementIC::disable(VMFrame &f, const char *reason)
2712 : {
2713 1902 : slowCallPatched = true;
2714 1902 : VoidStub stub = APPLY_STRICTNESS(stubs::SetElem, strictMode);
2715 1902 : BaseIC::disable(f, reason, JS_FUNC_TO_DATA_PTR(void *, stub));
2716 1902 : return Lookup_Uncacheable;
2717 : }
2718 :
2719 : LookupStatus
2720 0 : SetElementIC::error(JSContext *cx)
2721 : {
2722 0 : return Lookup_Error;
2723 : }
2724 :
2725 : void
2726 0 : SetElementIC::purge(Repatcher &repatcher)
2727 : {
2728 : // Repatch the inline jumps.
2729 0 : if (inlineShapeGuardPatched)
2730 0 : repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), slowPathStart);
2731 0 : if (inlineHoleGuardPatched)
2732 0 : repatcher.relink(fastPathStart.jumpAtOffset(inlineHoleGuard), slowPathStart);
2733 :
2734 0 : if (slowCallPatched) {
2735 0 : void *stub = JS_FUNC_TO_DATA_PTR(void *, APPLY_STRICTNESS(ic::SetElement, strictMode));
2736 0 : repatcher.relink(slowPathCall, FunctionPtr(stub));
2737 : }
2738 :
2739 0 : reset();
2740 0 : }
2741 :
2742 : LookupStatus
2743 547 : SetElementIC::attachHoleStub(VMFrame &f, JSObject *obj, int32_t keyval)
2744 : {
2745 547 : JSContext *cx = f.cx;
2746 :
2747 547 : if (keyval < 0)
2748 4 : return disable(f, "negative key index");
2749 :
2750 : // We may have failed a capacity check instead of a dense array check.
2751 : // However we should still build the IC in this case, since it could
2752 : // be in a loop that is filling in the array.
2753 :
2754 543 : if (js_PrototypeHasIndexedProperties(cx, obj))
2755 4 : return disable(f, "prototype has indexed properties");
2756 :
2757 1078 : Assembler masm;
2758 :
2759 1078 : Vector<Jump, 8> fails(cx);
2760 :
2761 539 : if (!GeneratePrototypeGuards(cx, fails, masm, obj, NULL, objReg, objReg))
2762 0 : return error(cx);
2763 :
2764 : // Test for indexed properties in Array.prototype. We test each shape
2765 : // along the proto chain. This affords us two optimizations:
2766 : // 1) Loading the prototype can be avoided because the shape would change;
2767 : // instead we can bake in their identities.
2768 : // 2) We only have to test the shape, rather than INDEXED.
2769 1613 : for (JSObject *pobj = obj->getProto(); pobj; pobj = pobj->getProto()) {
2770 1074 : if (!pobj->isNative())
2771 0 : return disable(f, "non-native array prototype");
2772 1074 : masm.move(ImmPtr(pobj), objReg);
2773 1074 : Jump j = masm.guardShape(objReg, pobj);
2774 1074 : if (!fails.append(j))
2775 0 : return error(cx);
2776 : }
2777 :
2778 : // Restore |obj|.
2779 539 : masm.rematPayload(StateRemat::FromInt32(objRemat), objReg);
2780 :
2781 : // Load the elements.
2782 539 : masm.loadPtr(Address(objReg, JSObject::offsetOfElements()), objReg);
2783 :
2784 539 : Int32Key key = hasConstantKey ? Int32Key::FromConstant(keyValue) : Int32Key::FromRegister(keyReg);
2785 :
2786 : // Guard that the initialized length is being updated exactly.
2787 : fails.append(masm.guardArrayExtent(ObjectElements::offsetOfInitializedLength(),
2788 539 : objReg, key, Assembler::NotEqual));
2789 :
2790 : // Check the array capacity.
2791 : fails.append(masm.guardArrayExtent(ObjectElements::offsetOfCapacity(),
2792 539 : objReg, key, Assembler::BelowOrEqual));
2793 :
2794 539 : masm.bumpKey(key, 1);
2795 :
2796 : // Update the length and initialized length.
2797 539 : masm.storeKey(key, Address(objReg, ObjectElements::offsetOfInitializedLength()));
2798 : Jump lengthGuard = masm.guardArrayExtent(ObjectElements::offsetOfLength(),
2799 539 : objReg, key, Assembler::AboveOrEqual);
2800 539 : masm.storeKey(key, Address(objReg, ObjectElements::offsetOfLength()));
2801 539 : lengthGuard.linkTo(masm.label(), &masm);
2802 :
2803 539 : masm.bumpKey(key, -1);
2804 :
2805 : // Store the value back.
2806 539 : if (hasConstantKey) {
2807 55 : Address slot(objReg, keyValue * sizeof(Value));
2808 55 : masm.storeValue(vr, slot);
2809 : } else {
2810 484 : BaseIndex slot(objReg, keyReg, Assembler::JSVAL_SCALE);
2811 484 : masm.storeValue(vr, slot);
2812 : }
2813 :
2814 539 : Jump done = masm.jump();
2815 :
2816 539 : JS_ASSERT(!execPool);
2817 539 : JS_ASSERT(!inlineHoleGuardPatched);
2818 :
2819 1078 : LinkerHelper buffer(masm, JSC::METHOD_CODE);
2820 539 : execPool = buffer.init(cx);
2821 539 : if (!execPool)
2822 0 : return error(cx);
2823 :
2824 539 : if (!buffer.verifyRange(f.chunk()))
2825 0 : return disable(f, "code memory is out of range");
2826 :
2827 : // Patch all guards.
2828 3216 : for (size_t i = 0; i < fails.length(); i++)
2829 2677 : buffer.link(fails[i], slowPathStart);
2830 539 : buffer.link(done, fastPathRejoin);
2831 :
2832 539 : CodeLocationLabel cs = buffer.finalize(f);
2833 539 : JaegerSpew(JSpew_PICs, "generated dense array hole stub at %p\n", cs.executableAddress());
2834 :
2835 1078 : Repatcher repatcher(f.chunk());
2836 539 : repatcher.relink(fastPathStart.jumpAtOffset(inlineHoleGuard), cs);
2837 539 : inlineHoleGuardPatched = true;
2838 :
2839 539 : disable(f, "generated dense array hole stub");
2840 :
2841 539 : return Lookup_Cacheable;
2842 : }
2843 :
2844 : #if defined JS_METHODJIT_TYPED_ARRAY
2845 : LookupStatus
2846 640 : SetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, int32_t key)
2847 : {
2848 : // Right now, only one shape guard extension is supported.
2849 640 : JS_ASSERT(!inlineShapeGuardPatched);
2850 :
2851 1280 : Assembler masm;
2852 640 : JSContext *cx = f.cx;
2853 :
2854 : // Restore |obj|.
2855 640 : masm.rematPayload(StateRemat::FromInt32(objRemat), objReg);
2856 :
2857 : // Guard on this typed array's shape.
2858 640 : Jump shapeGuard = masm.guardShape(objReg, obj);
2859 :
2860 : // Bounds check.
2861 640 : Jump outOfBounds;
2862 640 : Address typedArrayLength = masm.payloadOf(Address(objReg, TypedArray::lengthOffset()));
2863 640 : if (hasConstantKey)
2864 149 : outOfBounds = masm.branch32(Assembler::BelowOrEqual, typedArrayLength, Imm32(keyValue));
2865 : else
2866 491 : outOfBounds = masm.branch32(Assembler::BelowOrEqual, typedArrayLength, keyReg);
2867 :
2868 : // Load the array's packed data vector.
2869 640 : masm.loadPtr(Address(objReg, TypedArray::dataOffset()), objReg);
2870 :
2871 640 : JSObject *tarray = js::TypedArray::getTypedArray(obj);
2872 640 : if (!masm.supportsFloatingPoint() &&
2873 0 : (TypedArray::getType(tarray) == js::TypedArray::TYPE_FLOAT32 ||
2874 0 : TypedArray::getType(tarray) == js::TypedArray::TYPE_FLOAT64))
2875 : {
2876 0 : return disable(f, "fpu not supported");
2877 : }
2878 :
2879 640 : int shift = js::TypedArray::slotWidth(obj);
2880 640 : if (hasConstantKey) {
2881 149 : Address addr(objReg, keyValue * shift);
2882 149 : if (!StoreToTypedArray(cx, masm, tarray, addr, vr, volatileMask))
2883 0 : return error(cx);
2884 : } else {
2885 491 : Assembler::Scale scale = Assembler::TimesOne;
2886 491 : switch (shift) {
2887 : case 2:
2888 104 : scale = Assembler::TimesTwo;
2889 104 : break;
2890 : case 4:
2891 148 : scale = Assembler::TimesFour;
2892 148 : break;
2893 : case 8:
2894 32 : scale = Assembler::TimesEight;
2895 32 : break;
2896 : }
2897 491 : BaseIndex addr(objReg, keyReg, scale);
2898 491 : if (!StoreToTypedArray(cx, masm, tarray, addr, vr, volatileMask))
2899 0 : return error(cx);
2900 : }
2901 :
2902 640 : Jump done = masm.jump();
2903 :
2904 : // The stub does not rely on any pointers or numbers that could be ruined
2905 : // by a GC or shape regenerated GC. We let this stub live for the lifetime
2906 : // of the script.
2907 640 : JS_ASSERT(!execPool);
2908 1280 : LinkerHelper buffer(masm, JSC::METHOD_CODE);
2909 640 : execPool = buffer.init(cx);
2910 640 : if (!execPool)
2911 0 : return error(cx);
2912 :
2913 640 : if (!buffer.verifyRange(f.chunk()))
2914 0 : return disable(f, "code memory is out of range");
2915 :
2916 : // Note that the out-of-bounds path simply does nothing.
2917 640 : buffer.link(shapeGuard, slowPathStart);
2918 640 : buffer.link(outOfBounds, fastPathRejoin);
2919 640 : buffer.link(done, fastPathRejoin);
2920 640 : masm.finalize(buffer);
2921 :
2922 640 : CodeLocationLabel cs = buffer.finalizeCodeAddendum();
2923 640 : JaegerSpew(JSpew_PICs, "generated setelem typed array stub at %p\n", cs.executableAddress());
2924 :
2925 1280 : Repatcher repatcher(f.chunk());
2926 640 : repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), cs);
2927 640 : inlineShapeGuardPatched = true;
2928 :
2929 640 : stubsGenerated++;
2930 :
2931 : // In the future, it might make sense to attach multiple typed array stubs.
2932 : // For simplicitly, they are currently monomorphic.
2933 640 : if (stubsGenerated == MAX_GETELEM_IC_STUBS)
2934 0 : disable(f, "max stubs reached");
2935 :
2936 640 : disable(f, "generated typed array stub");
2937 :
2938 640 : return Lookup_Cacheable;
2939 : }
2940 : #endif /* JS_METHODJIT_TYPED_ARRAY */
2941 :
2942 : LookupStatus
2943 1902 : SetElementIC::update(VMFrame &f, const Value &objval, const Value &idval)
2944 : {
2945 1902 : if (!objval.isObject())
2946 0 : return disable(f, "primitive lval");
2947 1902 : if (!idval.isInt32())
2948 182 : return disable(f, "non-int32 key");
2949 :
2950 1720 : JSObject *obj = &objval.toObject();
2951 1720 : int32_t key = idval.toInt32();
2952 :
2953 1720 : if (obj->isDenseArray())
2954 547 : return attachHoleStub(f, obj, key);
2955 :
2956 : #if defined JS_METHODJIT_TYPED_ARRAY
2957 : /* Not attaching typed array stubs with linear scan allocator, see GetElementIC. */
2958 1173 : if (!f.cx->typeInferenceEnabled() && js_IsTypedArray(obj))
2959 640 : return attachTypedArray(f, obj, key);
2960 : #endif
2961 :
2962 533 : return disable(f, "unsupported object type");
2963 : }
2964 :
2965 : bool
2966 4174 : SetElementIC::shouldUpdate(JSContext *cx)
2967 : {
2968 4174 : if (!hit) {
2969 2272 : hit = true;
2970 2272 : spew(cx, "ignored", "first hit");
2971 2272 : return false;
2972 : }
2973 : #ifdef JSGC_INCREMENTAL_MJ
2974 1902 : JS_ASSERT(!cx->compartment->needsBarrier());
2975 : #endif
2976 1902 : JS_ASSERT(stubsGenerated < MAX_PIC_STUBS);
2977 1902 : return true;
2978 : }
2979 :
2980 : template<JSBool strict>
2981 : void JS_FASTCALL
2982 : ic::SetElement(VMFrame &f, ic::SetElementIC *ic)
2983 : {
2984 4174 : JSContext *cx = f.cx;
2985 :
2986 4174 : if (ic->shouldUpdate(cx)) {
2987 1902 : LookupStatus status = ic->update(f, f.regs.sp[-3], f.regs.sp[-2]);
2988 1902 : if (status == Lookup_Error)
2989 0 : THROW();
2990 : }
2991 :
2992 4174 : stubs::SetElem<strict>(f);
2993 : }
2994 :
2995 : template void JS_FASTCALL ic::SetElement<true>(VMFrame &f, SetElementIC *ic);
2996 : template void JS_FASTCALL ic::SetElement<false>(VMFrame &f, SetElementIC *ic);
2997 :
2998 : #endif /* JS_POLYIC */
2999 :
|