public MixedModeIRMethod(IRScope method, Visibility visibility, RubyModule implementationClass) { super(implementationClass, visibility, CallConfiguration.FrameNoneScopeNone, method.getName()); this.method = method; getStaticScope().determineModule(); this.signature = getStaticScope().getSignature(); // disable JIT if JIT is disabled if (!implementationClass.getRuntime().getInstanceConfig().getCompileMode().shouldJIT()) { this.box.callCount = -1; } }
protected void doDebug() { // FIXME: This is printing out IRScope CFG but JIT may be active and it might not reflect // currently executing. Move into JIT and into interp since they will be getting CFG from // different sources // FIXME: This is only printing out CFG once. If we keep applying more passes then we // will want to print out after those new passes. ensureInstrsReady(); LOG.info("Executing '" + method.getName() + "'"); if (!displayedCFG) { LOG.info(method.debugOutput()); displayedCFG = true; } }
private static IRubyObject interpret( ThreadContext context, IRubyObject self, IRScope scope, Visibility visibility, RubyModule implClass, IRubyObject[] args, Block block, Block.Type blockType) { Instr[] instrs = scope.getInstrsForInterpretation(); // The base IR may not have been processed yet if (instrs == null) instrs = scope.prepareForInterpretation(blockType == Block.Type.LAMBDA); int numTempVars = scope.getTemporaryVariableSize(); Object[] temp = numTempVars > 0 ? new Object[numTempVars] : null; int n = instrs.length; int ipc = 0; Instr instr = null; Object exception = null; int kwArgHashCount = (scope.receivesKeywordArgs() && args[args.length - 1] instanceof RubyHash) ? 1 : 0; DynamicScope currDynScope = context.getCurrentScope(); // Counter tpCount = null; // Init profiling this scope boolean debug = IRRuntimeHelpers.isDebug(); boolean profile = IRRuntimeHelpers.inProfileMode(); Integer scopeVersion = profile ? initProfiling(scope) : 0; // Enter the looooop! while (ipc < n) { instr = instrs[ipc]; ipc++; Operation operation = instr.getOperation(); if (debug) { LOG.info("I: {}", instr); interpInstrsCount++; } else if (profile) { if (operation.modifiesCode()) codeModificationsCount++; interpInstrsCount++; /* Counter cnt = opStats.get(operation); if (cnt == null) { cnt = new Counter(); opStats.put(operation, cnt); } cnt.count++; */ } try { switch (operation.opClass) { case ARG_OP: { receiveArg( context, instr, operation, args, kwArgHashCount, currDynScope, temp, exception, block); break; } case BRANCH_OP: { if (operation == Operation.JUMP) { ipc = ((JumpInstr) instr).getJumpTarget().getTargetPC(); } else { ipc = instr.interpretAndGetNewIPC(context, currDynScope, self, temp, ipc); } break; } case CALL_OP: { if (profile) updateCallSite(instr, scope, scopeVersion); processCall( context, instr, operation, scope, currDynScope, temp, self, block, blockType); break; } case BOOK_KEEPING_OP: { switch (operation) { case PUSH_FRAME: { context.preMethodFrameAndClass( implClass, scope.getName(), self, block, scope.getStaticScope()); context.setCurrentVisibility(visibility); break; } case PUSH_BINDING: { // SSS NOTE: Method scopes only! // // Blocks are a headache -- so, these instrs. are only added to IRMethods. // Blocks have more complicated logic for pushing a dynamic scope (see // InterpretedIRBlockBody) // Changed by DPR currDynScope = DynamicScope.newDynamicScope( scope.getStaticScope(), context.getCurrentScope().getDepth()); context.pushScope(currDynScope); break; } case CHECK_ARITY: ((CheckArityInstr) instr).checkArity(context.runtime, args.length); break; case POP_FRAME: context.popFrame(); context.popRubyClass(); break; case POP_BINDING: context.popScope(); break; case THREAD_POLL: if (profile) { // SSS: Not being used currently // tpCount.count++; globalThreadPollCount++; // 20K is arbitrary // Every 20K profile counts, spit out profile stats if (globalThreadPollCount % 20000 == 0) { analyzeProfile(); // outputProfileStats(); } } context.callThreadPoll(); break; case LINE_NUM: context.setLine(((LineNumberInstr) instr).lineNumber); break; case RECORD_END_BLOCK: ((RecordEndBlockInstr) instr).interpret(); break; } break; } case OTHER_OP: { Object result = null; switch (operation) { // --------- Return flavored instructions -------- case BREAK: { BreakInstr bi = (BreakInstr) instr; IRubyObject rv = (IRubyObject) bi.getReturnValue().retrieve(context, self, currDynScope, temp); // This also handles breaks in lambdas -- by converting them to a return return IRRuntimeHelpers.initiateBreak( context, scope, bi.getScopeToReturnTo().getScopeId(), rv, blockType); } case RETURN: { return (IRubyObject) retrieveOp( ((ReturnBase) instr).getReturnValue(), context, self, currDynScope, temp); } case NONLOCAL_RETURN: { NonlocalReturnInstr ri = (NonlocalReturnInstr) instr; IRubyObject rv = (IRubyObject) retrieveOp(ri.getReturnValue(), context, self, currDynScope, temp); ipc = n; // If not in a lambda, check if this was a non-local return if (!IRRuntimeHelpers.inLambda(blockType)) { IRRuntimeHelpers.initiateNonLocalReturn( context, scope, ri.methodToReturnFrom, rv); } return rv; } // ---------- Common instruction --------- case COPY: { CopyInstr c = (CopyInstr) instr; result = retrieveOp(c.getSource(), context, self, currDynScope, temp); setResult(temp, currDynScope, c.getResult(), result); break; } case GET_FIELD: { GetFieldInstr gfi = (GetFieldInstr) instr; IRubyObject object = (IRubyObject) gfi.getSource().retrieve(context, self, currDynScope, temp); VariableAccessor a = gfi.getAccessor(object); result = a == null ? null : (IRubyObject) a.get(object); if (result == null) { result = context.nil; } setResult(temp, currDynScope, gfi.getResult(), result); break; } case SEARCH_CONST: { SearchConstInstr sci = (SearchConstInstr) instr; result = sci.getCachedConst(); if (!sci.isCached(context, result)) result = sci.cache(context, currDynScope, self, temp); setResult(temp, currDynScope, sci.getResult(), result); break; } // ---------- All the rest --------- default: result = instr.interpret(context, currDynScope, self, temp, block); setResult(temp, currDynScope, instr, result); break; } break; } } } catch (Throwable t) { // Unrescuable: // IRReturnJump, ThreadKill, RubyContinuation, MainExitException, etc. // These cannot be rescued -- only run ensure blocks // // Others: // IRBreakJump, Ruby exceptions, errors, and other java exceptions. // These can be rescued -- run rescue blocks if (debug) LOG.info( "in scope: " + scope + ", caught Java throwable: " + t + "; excepting instr: " + instr); ipc = (t instanceof Unrescuable) ? scope.getEnsurerPC(instr) : scope.getRescuerPC(instr); if (debug) LOG.info("ipc for rescuer/ensurer: " + ipc); if (ipc == -1) { Helpers.throwException((Throwable) t); } else { exception = t; } } } // Control should never get here! // SSS FIXME: But looks like BEGIN/END blocks get here -- needs fixing return null; }
@Override public Object execute(IRScope scope, Object... data) { // IRScriptBody do not get explicit call protocol instructions right now. // They dont push/pop a frame and do other special things like run begin/end blocks. // So, for now, they go through the runtime stub in IRScriptBody. // // Add explicit frame and binding push/pop instrs ONLY for methods -- we cannot handle this in // closures and evals yet // If the scope uses $_ or $~ family of vars, has local load/stores, or if its binding has // escaped, we have // to allocate a dynamic scope for it and add binding push/pop instructions. if (explicitCallProtocolSupported(scope)) { StoreLocalVarPlacementProblem slvpp = (StoreLocalVarPlacementProblem) scope.getDataFlowSolution(StoreLocalVarPlacementProblem.NAME); boolean scopeHasLocalVarStores = false; boolean bindingHasEscaped = scope.bindingHasEscaped(); CFG cfg = scope.cfg(); if (slvpp != null && bindingHasEscaped) { scopeHasLocalVarStores = slvpp.scopeHasLocalVarStores(); } else { // We dont require local-var load/stores to have been run. // If it is not run, we go conservative and add push/pop binding instrs. everywhere scopeHasLocalVarStores = bindingHasEscaped; } boolean requireFrame = doesItRequireFrame(scope, bindingHasEscaped); boolean requireBinding = !scope.getFlags().contains(IRFlags.DYNSCOPE_ELIMINATED); if (requireBinding || requireFrame) { BasicBlock entryBB = cfg.getEntryBB(); // Push if (requireFrame) entryBB.addInstr(new PushFrameInstr(scope.getName())); if (requireBinding) entryBB.addInstr(new PushBindingInstr()); // SSS FIXME: We are doing this conservatively. // Only scopes that have unrescued exceptions need a GEB. // // Allocate GEB if necessary for popping BasicBlock geb = cfg.getGlobalEnsureBB(); if (geb == null) { Variable exc = scope.createTemporaryVariable(); geb = new BasicBlock(cfg, Label.getGlobalEnsureBlockLabel()); geb.addInstr( new ReceiveJRubyExceptionInstr(exc)); // JRuby Implementation exception handling geb.addInstr(new ThrowExceptionInstr(exc)); cfg.addGlobalEnsureBB(geb); } // Pop on all scope-exit paths for (BasicBlock bb : cfg.getBasicBlocks()) { Instr i = null; ListIterator<Instr> instrs = bb.getInstrs().listIterator(); while (instrs.hasNext()) { i = instrs.next(); // Right now, we only support explicit call protocol on methods. // So, non-local returns and breaks don't get here. // Non-local-returns and breaks are tricky since they almost always // throw an exception and we don't multiple pops (once before the // return/break, and once when the exception is caught). if (!bb.isExitBB() && i instanceof ReturnBase) { // Add before the break/return instrs.previous(); if (requireBinding) instrs.add(new PopBindingInstr()); if (requireFrame) instrs.add(new PopFrameInstr()); break; } } if (bb.isExitBB() && !bb.isEmpty()) { // Last instr could be a return -- so, move iterator one position back if (i != null && i instanceof ReturnBase) instrs.previous(); if (requireBinding) instrs.add(new PopBindingInstr()); if (requireFrame) instrs.add(new PopFrameInstr()); } if (bb == geb) { // Add before throw-exception-instr which would be the last instr if (i != null) { // Assumption: Last instr should always be a control-transfer instruction assert i.getOperation().transfersControl() : "Last instruction of GEB in scope: " + scope + " is " + i + ", not a control-xfer instruction"; instrs.previous(); } if (requireBinding) instrs.add(new PopBindingInstr()); if (requireFrame) instrs.add(new PopFrameInstr()); } } } // This scope has an explicit call protocol flag now scope.setExplicitCallProtocolFlag(); } // FIXME: Useless for now // Run on all nested closures. for (IRClosure c : scope.getClosures()) run(c, false, true); // LVA information is no longer valid after the pass // FIXME: Grrr ... this seems broken to have to create a new object to invalidate (new LiveVariableAnalysis()).invalidate(scope); return null; }
private static void analyzeProfile() { versionCount++; // if (inlineCount == 2) return; if (codeModificationsCount == 0) numCyclesWithNoModifications++; else numCyclesWithNoModifications = 0; codeModificationsCount = 0; if (numCyclesWithNoModifications < 3) return; // We are now good to go -- start analyzing the profile // System.out.println("-------------------start analysis-----------------------"); final HashMap<IRScope, Long> scopeCounts = new HashMap<IRScope, Long>(); final ArrayList<IRCallSite> callSites = new ArrayList<IRCallSite>(); HashMap<IRCallSite, Long> callSiteCounts = new HashMap<IRCallSite, Long>(); // System.out.println("# call sites: " + callProfile.keySet().size()); long total = 0; for (Long id : callProfile.keySet()) { Long c; CallSiteProfile csp = callProfile.get(id); IRCallSite cs = csp.cs; if (cs.v != scopeVersionMap.get(cs.s).intValue()) { // System.out.println("Skipping callsite: <" + cs.s + "," + cs.v + "> with compiled version: // " + scopeVersionMap.get(cs.s)); continue; } Set<IRScope> calledScopes = csp.counters.keySet(); cs.count = 0; for (IRScope s : calledScopes) { c = scopeCounts.get(s); if (c == null) { c = new Long(0); scopeCounts.put(s, c); } long x = csp.counters.get(s).count; c += x; cs.count += x; } CallBase call = cs.call; if (calledScopes.size() == 1 && !call.inliningBlocked()) { CallSite runtimeCS = call.getCallSite(); if (runtimeCS != null && (runtimeCS instanceof CachingCallSite)) { CachingCallSite ccs = (CachingCallSite) runtimeCS; CacheEntry ce = ccs.getCache(); if (!(ce.method instanceof InterpretedIRMethod)) { // System.out.println("NOT IR-M!"); continue; } else { callSites.add(cs); cs.tgtM = (InterpretedIRMethod) ce.method; } } } total += cs.count; } Collections.sort( callSites, new java.util.Comparator<IRCallSite>() { @Override public int compare(IRCallSite a, IRCallSite b) { if (a.count == b.count) return 0; return (a.count < b.count) ? 1 : -1; } }); // Find top N call sites double freq = 0.0; int i = 0; boolean noInlining = true; Set<IRScope> inlinedScopes = new HashSet<IRScope>(); for (IRCallSite ircs : callSites) { double contrib = (ircs.count * 100.0) / total; // 1% is arbitrary if (contrib < 1.0) break; i++; freq += contrib; // This check is arbitrary if (i == 100 || freq > 99.0) break; // System.out.println("Considering: " + ircs.call + " with id: " + ircs.call.callSiteId + // " in scope " + ircs.s + " with count " + ircs.count + "; contrib " + contrib + "; freq: " + // freq); // Now inline here! CallBase call = ircs.call; IRScope hs = ircs.s; boolean isHotClosure = hs instanceof IRClosure; IRScope hc = isHotClosure ? hs : null; hs = isHotClosure ? hs.getLexicalParent() : hs; IRScope tgtMethod = ircs.tgtM.getIRMethod(); Instr[] instrs = tgtMethod.getInstrsForInterpretation(); // Dont inline large methods -- 500 is arbitrary // Can be null if a previously inlined method hasn't been rebuilt if ((instrs == null) || instrs.length > 500) { // if (instrs == null) System.out.println("no instrs!"); // else System.out.println("large method with " + instrs.length + " instrs. skipping!"); continue; } RubyModule implClass = ircs.tgtM.getImplementationClass(); int classToken = implClass.getGeneration(); String n = tgtMethod.getName(); boolean inlineCall = true; if (isHotClosure) { Operand clArg = call.getClosureArg(null); inlineCall = (clArg instanceof WrappedIRClosure) && (((WrappedIRClosure) clArg).getClosure() == hc); } if (inlineCall) { noInlining = false; long start = new java.util.Date().getTime(); hs.inlineMethod(tgtMethod, implClass, classToken, null, call); inlinedScopes.add(hs); long end = new java.util.Date().getTime(); // System.out.println("Inlined " + tgtMethod + " in " + hs + // " @ instr " + call + " in time (ms): " // + (end-start) + " # instrs: " + instrs.length); inlineCount++; } else { // System.out.println("--no inlining--"); } } for (IRScope x : inlinedScopes) { // update version count for 'hs' scopeVersionMap.put(x, versionCount); // System.out.println("Updating version of " + x + " to " + versionCount); // System.out.println("--- pre-inline-instrs ---"); // System.out.println(x.getCFG().toStringInstrs()); // System.out.println("--- post-inline-instrs ---"); // System.out.println(x.getCFG().toStringInstrs()); } // reset codeModificationsCount = 0; callProfile = new HashMap<Long, CallSiteProfile>(); // Every 1M thread polls, discard stats by reallocating the thread-poll count map if (globalThreadPollCount % 1000000 == 0) { globalThreadPollCount = 0; } }