Example #1
0
  /**
   * Returns a tuple descriptor for the aggregation/analytic's intermediate or final result,
   * depending on whether isOutputTuple is true or false. Also updates the appropriate substitution
   * map, and creates and registers auxiliary equality predicates between the grouping slots and the
   * grouping exprs.
   */
  private TupleDescriptor createTupleDesc(Analyzer analyzer, boolean isOutputTuple) {
    TupleDescriptor result =
        analyzer
            .getDescTbl()
            .createTupleDescriptor(tupleDebugName() + (isOutputTuple ? "-out" : "-intermed"));
    List<Expr> exprs =
        Lists.newArrayListWithCapacity(groupingExprs_.size() + aggregateExprs_.size());
    exprs.addAll(groupingExprs_);
    exprs.addAll(aggregateExprs_);

    int aggregateExprStartIndex = groupingExprs_.size();
    for (int i = 0; i < exprs.size(); ++i) {
      Expr expr = exprs.get(i);
      SlotDescriptor slotDesc = analyzer.addSlotDescriptor(result);
      slotDesc.setLabel(expr.toSql());
      slotDesc.setStats(ColumnStats.fromExpr(expr));
      Preconditions.checkState(expr.getType().isValid());
      slotDesc.setType(expr.getType());
      if (i < aggregateExprStartIndex) {
        // register equivalence between grouping slot and grouping expr;
        // do this only when the grouping expr isn't a constant, otherwise
        // it'll simply show up as a gratuitous HAVING predicate
        // (which would actually be incorrect if the constant happens to be NULL)
        if (!expr.isConstant()) {
          analyzer.createAuxEquivPredicate(new SlotRef(slotDesc), expr.clone());
        }
      } else {
        Preconditions.checkArgument(expr instanceof FunctionCallExpr);
        FunctionCallExpr aggExpr = (FunctionCallExpr) expr;
        if (aggExpr.isMergeAggFn()) {
          slotDesc.setLabel(aggExpr.getChild(0).toSql());
        } else {
          slotDesc.setLabel(aggExpr.toSql());
        }

        // count(*) is non-nullable.
        if (aggExpr.getFnName().getFunction().equals("count")) {
          // TODO: Consider making nullability a property of types or of builtin agg fns.
          // row_number, rank, and dense_rank are non-nullable as well.
          slotDesc.setIsNullable(false);
        }
        if (!isOutputTuple) {
          Type intermediateType = ((AggregateFunction) aggExpr.fn_).getIntermediateType();
          if (intermediateType != null) {
            // Use the output type as intermediate if the function has a wildcard decimal.
            if (!intermediateType.isWildcardDecimal()) {
              slotDesc.setType(intermediateType);
            } else {
              Preconditions.checkState(expr.getType().isDecimal());
            }
          }
        }
      }
    }
    String prefix = (isOutputTuple ? "result " : "intermediate ");
    LOG.trace(prefix + " tuple=" + result.debugString());
    return result;
  }
Example #2
0
 public String debugString() {
   StringBuilder out = new StringBuilder();
   out.append(
       Objects.toStringHelper(this)
           .add("grouping_exprs", Expr.debugString(groupingExprs_))
           .add("aggregate_exprs", Expr.debugString(aggregateExprs_))
           .add(
               "intermediate_tuple",
               (intermediateTupleDesc_ == null) ? "null" : intermediateTupleDesc_.debugString())
           .add(
               "output_tuple",
               (outputTupleDesc_ == null) ? "null" : outputTupleDesc_.debugString())
           .toString());
   return out.toString();
 }
Example #3
0
 /**
  * Expand "*" for a particular tuple descriptor by appending refs for each column to
  * selectListExprs.
  *
  * @param analyzer
  * @param alias
  * @param desc
  * @throws AnalysisException
  */
 private void expandStar(Analyzer analyzer, String alias, TupleDescriptor desc)
     throws AnalysisException {
   for (Column col : desc.getTable().getColumnsInHiveOrder()) {
     resultExprs.add(new SlotRef(new TableName(null, alias), col.getName()));
     colLabels.add(col.getName().toLowerCase());
   }
 }
Example #4
0
 /**
  * Materializes the slots in sortTupleDesc_ referenced in the ordering exprs. Materializes the
  * slots referenced by the corresponding sortTupleSlotExpr after applying the 'smap'.
  */
 public void materializeRequiredSlots(Analyzer analyzer, ExprSubstitutionMap smap)
     throws InternalException {
   Preconditions.checkNotNull(sortTupleDesc_);
   Preconditions.checkNotNull(sortTupleSlotExprs_);
   Preconditions.checkState(sortTupleDesc_.getIsMaterialized());
   analyzer.materializeSlots(orderingExprs_);
   List<SlotDescriptor> sortTupleSlotDescs = sortTupleDesc_.getSlots();
   List<Expr> materializedExprs = Lists.newArrayList();
   for (int i = 0; i < sortTupleSlotDescs.size(); ++i) {
     if (sortTupleSlotDescs.get(i).isMaterialized()) {
       materializedExprs.add(sortTupleSlotExprs_.get(i));
     }
   }
   List<Expr> substMaterializedExprs = Expr.substituteList(materializedExprs, smap, analyzer);
   analyzer.materializeSlots(substMaterializedExprs);
 }
 /**
  * {@inheritDoc}
  *
  * <p>Show the types of local variables and outer variables.
  */
 @Override
 AvailObjectFieldHelper[] o_DescribeForDebugger(final AvailObject object) {
   final List<AvailObjectFieldHelper> fields = new ArrayList<>();
   fields.addAll(Arrays.asList(super.o_DescribeForDebugger(object)));
   for (int i = 1, end = object.numOuters(); i <= end; i++) {
     fields.add(
         new AvailObjectFieldHelper(object, FakeSlots.OUTER_TYPE_, i, object.outerTypeAt(i)));
   }
   for (int i = 1, end = object.numLocals(); i <= end; i++) {
     fields.add(
         new AvailObjectFieldHelper(object, FakeSlots.LOCAL_TYPE_, i, object.localTypeAt(i)));
   }
   final StringBuilder disassembled = new StringBuilder();
   object.printOnAvoidingIndent(disassembled, new IdentityHashMap<A_BasicObject, Void>(), 0);
   final String[] content = disassembled.toString().split("\n");
   fields.add(new AvailObjectFieldHelper(object, FakeSlots.L1_DISASSEMBLY, -1, content));
   final List<AvailObject> allLiterals = new ArrayList<>();
   for (int i = 1; i <= object.numLiterals(); i++) {
     allLiterals.add(object.literalAt(i));
   }
   fields.add(
       new AvailObjectFieldHelper(
           object, FakeSlots.ALL_LITERALS, -1, TupleDescriptor.fromList(allLiterals)));
   return fields.toArray(new AvailObjectFieldHelper[fields.size()]);
 }
Example #6
0
 @Override
 protected A_Type privateBlockTypeRestriction() {
   return FunctionTypeDescriptor.create(
       TupleDescriptor.from(
           SetTypeDescriptor.mostGeneralType(), SetTypeDescriptor.mostGeneralType()),
       SetTypeDescriptor.mostGeneralType());
 }
    @Override
    protected Type visitDereferenceExpression(DereferenceExpression node, AnalysisContext context) {
      QualifiedName qualifiedName = DereferenceExpression.getQualifiedName(node);

      // If this Dereference looks like column reference, try match it to column first.
      if (qualifiedName != null) {
        List<Field> matches = tupleDescriptor.resolveFields(qualifiedName);
        if (matches.size() > 1) {
          throw new SemanticException(AMBIGUOUS_ATTRIBUTE, node, "Column '%s' is ambiguous", node);
        }

        if (matches.size() == 1) {
          Field field = Iterables.getOnlyElement(matches);
          int fieldIndex = tupleDescriptor.indexOf(field);
          resolvedNames.put(node, fieldIndex);
          expressionTypes.put(node, field.getType());
          columnReferences.add(node);
          return field.getType();
        }
      }

      Type baseType = process(node.getBase(), context);
      if (!(baseType instanceof RowType)) {
        throw new SemanticException(
            SemanticErrorCode.TYPE_MISMATCH,
            node.getBase(),
            "Expression %s is not of type ROW",
            node.getBase());
      }

      RowType rowType = (RowType) baseType;

      Type rowFieldType = null;
      for (RowField rowField : rowType.getFields()) {
        if (rowField.getName().equals(Optional.of(node.getFieldName()))) {
          rowFieldType = rowField.getType();
          break;
        }
      }
      if (rowFieldType == null) {
        throw createMissingAttributeException(node);
      }

      expressionTypes.put(node, rowFieldType);
      return rowFieldType;
    }
Example #8
0
 /**
  * Expand "*" for a particular tuple descriptor by appending analyzed slot refs for each column to
  * selectListExprs.
  */
 private void expandStar(Analyzer analyzer, TableName tblName, TupleDescriptor desc)
     throws AnalysisException, AuthorizationException {
   for (Column col : desc.getTable().getColumnsInHiveOrder()) {
     SlotRef slotRef = new SlotRef(tblName, col.getName());
     slotRef.analyze(analyzer);
     resultExprs_.add(slotRef);
     colLabels_.add(col.getName().toLowerCase());
   }
 }
    @Override
    protected Type visitSubqueryExpression(SubqueryExpression node, AnalysisContext context) {
      StatementAnalyzer analyzer = statementAnalyzerFactory.apply(node);
      TupleDescriptor descriptor = analyzer.process(node.getQuery(), context);

      // Scalar subqueries should only produce one column
      if (descriptor.getVisibleFieldCount() != 1) {
        throw new SemanticException(
            MULTIPLE_FIELDS_FROM_SCALAR_SUBQUERY,
            node,
            "Subquery expression must produce only one field. Found %s",
            descriptor.getVisibleFieldCount());
      }

      Type type = Iterables.getOnlyElement(descriptor.getVisibleFields()).getType();

      expressionTypes.put(node, type);
      return type;
    }
Example #10
0
 @Override
 protected A_Type privateBlockTypeRestriction() {
   return FunctionTypeDescriptor.create(
       TupleDescriptor.from(
           MapTypeDescriptor.mapTypeForSizesKeyTypeValueType(
               IntegerRangeTypeDescriptor.wholeNumbers(),
               ATOM.o(),
               InstanceMetaDescriptor.anyMeta())),
       ObjectTypeDescriptor.meta());
 }
 @Override
 protected A_Type privateBlockTypeRestriction() {
   return FunctionTypeDescriptor.create(
       TupleDescriptor.from(
           RAW_POJO.o(),
           TupleTypeDescriptor.mostGeneralType(),
           TupleTypeDescriptor.zeroOrMoreOf(RAW_POJO.o()),
           InstanceMetaDescriptor.topMeta()),
       TOP.o());
 }
Example #12
0
    @Override
    protected Type visitQualifiedNameReference(
        QualifiedNameReference node, AnalysisContext context) {
      List<Field> matches = tupleDescriptor.resolveFields(node.getName());
      if (matches.isEmpty()) {
        throw createMissingAttributeException(node);
      }

      if (matches.size() > 1) {
        throw new SemanticException(
            AMBIGUOUS_ATTRIBUTE, node, "Column '%s' is ambiguous", node.getName());
      }

      Field field = Iterables.getOnlyElement(matches);
      int fieldIndex = tupleDescriptor.indexOf(field);
      resolvedNames.put(node, fieldIndex);
      expressionTypes.put(node, field.getType());
      columnReferences.add(node);
      return field.getType();
    }
Example #13
0
 @Override
 protected A_Type privateBlockTypeRestriction() {
   return FunctionTypeDescriptor.create(
       TupleDescriptor.from(
           ATOM.o(),
           MapTypeDescriptor.mapTypeForSizesKeyTypeValueType(
               IntegerRangeTypeDescriptor.inclusive(
                   IntegerDescriptor.zero(), IntegerDescriptor.fromInt(socketOptions.length - 1)),
               IntegerRangeTypeDescriptor.inclusive(
                   IntegerDescriptor.one(), IntegerDescriptor.fromInt(socketOptions.length - 1)),
               ANY.o())),
       TOP.o());
 }
 @Override
 protected A_Type privateBlockTypeRestriction() {
   return FunctionTypeDescriptor.create(
       TupleDescriptor.from(LiteralTokenTypeDescriptor.mostGeneralType()),
       LITERAL_NODE.mostGeneralType());
 }
Example #15
0
 @Override
 protected A_Type privateBlockTypeRestriction() {
   return FunctionTypeDescriptor.create(
       TupleDescriptor.from(InstanceMetaDescriptor.topMeta(), InstanceMetaDescriptor.topMeta()),
       InstanceMetaDescriptor.topMeta());
 }
Example #16
0
 /** Asserts that all ordering exprs are bound by the sort tuple. */
 public void checkConsistency() {
   for (Expr orderingExpr : orderingExprs_) {
     Preconditions.checkState(orderingExpr.isBound(sortTupleDesc_.getId()));
   }
 }
Example #17
0
 @Override
 protected A_Type privateBlockTypeRestriction() {
   return FunctionTypeDescriptor.create(
       TupleDescriptor.from(ATOM.o(), ATOM.o()), EnumerationTypeDescriptor.booleanObject());
 }
 @Override
 protected A_Type privateBlockTypeRestriction() {
   return FunctionTypeDescriptor.create(
       TupleDescriptor.from(MESSAGE_BUNDLE.o()), IntegerRangeTypeDescriptor.wholeNumbers());
 }
Example #19
0
 @Override
 public Type visitInputReference(InputReference node, AnalysisContext context) {
   Type type = tupleDescriptor.getFieldByIndex(node.getChannel()).getType();
   expressionTypes.put(node, type);
   return type;
 }
Example #20
0
 @Override
 protected A_Type privateBlockTypeRestriction() {
   return FunctionTypeDescriptor.create(
       TupleDescriptor.from(ANY.o()),
       TupleTypeDescriptor.oneOrMoreOf(IntegerRangeTypeDescriptor.bytes()));
 }
Example #21
0
 public TupleId getOutputTupleId() {
   return outputTupleDesc_.getId();
 }
Example #22
0
 public TupleId getIntermediateTupleId() {
   return intermediateTupleDesc_.getId();
 }
  /**
   * Make a ConstraintDescriptor out of a SYSCONSTRAINTS row
   *
   * @param row a SYSCONSTRAINTS row
   * @param parentTupleDescriptor Subconstraint descriptor with auxiliary info.
   * @param dd dataDictionary
   * @throws StandardException thrown on failure
   */
  public TupleDescriptor buildDescriptor(
      ExecRow row, TupleDescriptor parentTupleDescriptor, DataDictionary dd)
      throws StandardException {
    ConstraintDescriptor constraintDesc = null;

    if (SanityManager.DEBUG) {
      SanityManager.ASSERT(
          row.nColumns() == SYSCONSTRAINTS_COLUMN_COUNT,
          "Wrong number of columns for a SYSCONSTRAINTS row");
    }

    DataValueDescriptor col;
    ConglomerateDescriptor conglomDesc;
    DataDescriptorGenerator ddg;
    TableDescriptor td = null;
    int constraintIType = -1;
    int[] keyColumns = null;
    UUID constraintUUID;
    UUID schemaUUID;
    UUID tableUUID;
    UUID referencedConstraintId = null;
    SchemaDescriptor schema;
    String tableUUIDString;
    String constraintName;
    String constraintSType;
    String constraintStateStr;
    boolean constraintEnabled;
    int referenceCount;
    String constraintUUIDString;
    String schemaUUIDString;
    SubConstraintDescriptor scd;

    if (SanityManager.DEBUG) {
      if (!(parentTupleDescriptor instanceof SubConstraintDescriptor)) {
        SanityManager.THROWASSERT(
            "parentTupleDescriptor expected to be instanceof "
                + "SubConstraintDescriptor, not "
                + parentTupleDescriptor.getClass().getName());
      }
    }

    scd = (SubConstraintDescriptor) parentTupleDescriptor;

    ddg = dd.getDataDescriptorGenerator();

    /* 1st column is CONSTRAINTID (UUID - char(36)) */
    col = row.getColumn(SYSCONSTRAINTS_CONSTRAINTID);
    constraintUUIDString = col.getString();
    constraintUUID = getUUIDFactory().recreateUUID(constraintUUIDString);

    /* 2nd column is TABLEID (UUID - char(36)) */
    col = row.getColumn(SYSCONSTRAINTS_TABLEID);
    tableUUIDString = col.getString();
    tableUUID = getUUIDFactory().recreateUUID(tableUUIDString);

    /* Get the TableDescriptor.
     * It may be cached in the SCD,
     * otherwise we need to go to the
     * DD.
     */
    if (scd != null) {
      td = scd.getTableDescriptor();
    }
    if (td == null) {
      td = dd.getTableDescriptor(tableUUID);
    }

    /* 3rd column is NAME (varchar(128)) */
    col = row.getColumn(SYSCONSTRAINTS_CONSTRAINTNAME);
    constraintName = col.getString();

    /* 4th column is TYPE (char(1)) */
    col = row.getColumn(SYSCONSTRAINTS_TYPE);
    constraintSType = col.getString();
    if (SanityManager.DEBUG) {
      SanityManager.ASSERT(constraintSType.length() == 1, "Fourth column type incorrect");
    }

    boolean typeSet = false;
    switch (constraintSType.charAt(0)) {
      case 'P':
        constraintIType = DataDictionary.PRIMARYKEY_CONSTRAINT;
        typeSet = true;
        // fall through

      case 'U':
        if (!typeSet) {
          constraintIType = DataDictionary.UNIQUE_CONSTRAINT;
          typeSet = true;
        }
        // fall through

      case 'F':
        if (!typeSet) constraintIType = DataDictionary.FOREIGNKEY_CONSTRAINT;
        if (SanityManager.DEBUG) {
          if (!(parentTupleDescriptor instanceof SubKeyConstraintDescriptor)) {
            SanityManager.THROWASSERT(
                "parentTupleDescriptor expected to be instanceof "
                    + "SubKeyConstraintDescriptor, not "
                    + parentTupleDescriptor.getClass().getName());
          }
        }
        conglomDesc =
            td.getConglomerateDescriptor(
                ((SubKeyConstraintDescriptor) parentTupleDescriptor).getIndexId());
        /* Take care the rare case of conglomDesc being null.  The
         * reason is that our "td" is out of date.  Another thread
         * which was adding a constraint committed between the moment
         * we got the table descriptor (conglomerate list) and the
         * moment we scanned and got the constraint desc list.  Since
         * that thread just added a new row to SYSCONGLOMERATES,
         * SYSCONSTRAINTS, etc.  We wouldn't have wanted to lock the
         * system tables just to prevent other threads from adding new
         * rows.
         */
        if (conglomDesc == null) {
          // we can't be getting td from cache because if we are
          // here, we must have been in dd's ddl mode (that's why
          // the ddl thread went through), we are not done yet, the
          // dd ref count is not 0, hence it couldn't have turned
          // into COMPILE_ONLY mode
          td = dd.getTableDescriptor(tableUUID);
          if (scd != null) scd.setTableDescriptor(td);
          // try again now
          conglomDesc =
              td.getConglomerateDescriptor(
                  ((SubKeyConstraintDescriptor) parentTupleDescriptor).getIndexId());
        }

        if (SanityManager.DEBUG) {
          SanityManager.ASSERT(
              conglomDesc != null, "conglomDesc is expected to be non-null for backing index");
        }
        referencedConstraintId =
            ((SubKeyConstraintDescriptor) parentTupleDescriptor).getKeyConstraintId();
        keyColumns = conglomDesc.getIndexDescriptor().baseColumnPositions();
        break;

      case 'C':
        constraintIType = DataDictionary.CHECK_CONSTRAINT;
        if (SanityManager.DEBUG) {
          if (!(parentTupleDescriptor instanceof SubCheckConstraintDescriptor)) {
            SanityManager.THROWASSERT(
                "parentTupleDescriptor expected to be instanceof "
                    + "SubCheckConstraintDescriptor, not "
                    + parentTupleDescriptor.getClass().getName());
          }
        }
        break;

      default:
        if (SanityManager.DEBUG) {
          SanityManager.THROWASSERT("Fourth column value invalid");
        }
    }

    /* 5th column is SCHEMAID (UUID - char(36)) */
    col = row.getColumn(SYSCONSTRAINTS_SCHEMAID);
    schemaUUIDString = col.getString();
    schemaUUID = getUUIDFactory().recreateUUID(schemaUUIDString);

    schema = dd.getSchemaDescriptor(schemaUUID, null);

    /* 6th column is STATE (char(1)) */
    col = row.getColumn(SYSCONSTRAINTS_STATE);
    constraintStateStr = col.getString();
    if (SanityManager.DEBUG) {
      SanityManager.ASSERT(constraintStateStr.length() == 1, "Sixth column (state) type incorrect");
    }

    switch (constraintStateStr.charAt(0)) {
      case 'E':
        constraintEnabled = true;
        break;
      case 'D':
        constraintEnabled = false;
        break;
      default:
        constraintEnabled = true;
        if (SanityManager.DEBUG) {
          SanityManager.THROWASSERT(
              "Invalidate state value '" + constraintStateStr + "' for constraint");
        }
    }

    /* 7th column is REFERENCECOUNT, boolean */
    col = row.getColumn(SYSCONSTRAINTS_REFERENCECOUNT);
    referenceCount = col.getInt();

    /* now build and return the descriptor */

    switch (constraintIType) {
      case DataDictionary.PRIMARYKEY_CONSTRAINT:
        constraintDesc =
            ddg.newPrimaryKeyConstraintDescriptor(
                td,
                constraintName,
                false, // deferable,
                false, // initiallyDeferred,
                keyColumns, // genReferencedColumns(dd, td), //int referencedColumns[],
                constraintUUID,
                ((SubKeyConstraintDescriptor) parentTupleDescriptor).getIndexId(),
                schema,
                constraintEnabled,
                referenceCount);
        break;

      case DataDictionary.UNIQUE_CONSTRAINT:
        constraintDesc =
            ddg.newUniqueConstraintDescriptor(
                td,
                constraintName,
                false, // deferable,
                false, // initiallyDeferred,
                keyColumns, // genReferencedColumns(dd, td), //int referencedColumns[],
                constraintUUID,
                ((SubKeyConstraintDescriptor) parentTupleDescriptor).getIndexId(),
                schema,
                constraintEnabled,
                referenceCount);
        break;

      case DataDictionary.FOREIGNKEY_CONSTRAINT:
        if (SanityManager.DEBUG) {
          SanityManager.ASSERT(
              referenceCount == 0, "REFERENCECOUNT column is nonzero for fk constraint");
        }

        constraintDesc =
            ddg.newForeignKeyConstraintDescriptor(
                td,
                constraintName,
                false, // deferable,
                false, // initiallyDeferred,
                keyColumns, // genReferencedColumns(dd, td), //int referencedColumns[],
                constraintUUID,
                ((SubKeyConstraintDescriptor) parentTupleDescriptor).getIndexId(),
                schema,
                referencedConstraintId,
                constraintEnabled,
                ((SubKeyConstraintDescriptor) parentTupleDescriptor).getRaDeleteRule(),
                ((SubKeyConstraintDescriptor) parentTupleDescriptor).getRaUpdateRule());
        break;

      case DataDictionary.CHECK_CONSTRAINT:
        if (SanityManager.DEBUG) {
          SanityManager.ASSERT(
              referenceCount == 0, "REFERENCECOUNT column is nonzero for check constraint");
        }

        constraintDesc =
            ddg.newCheckConstraintDescriptor(
                td,
                constraintName,
                false, // deferable,
                false, // initiallyDeferred,
                constraintUUID,
                ((SubCheckConstraintDescriptor) parentTupleDescriptor).getConstraintText(),
                ((SubCheckConstraintDescriptor) parentTupleDescriptor)
                    .getReferencedColumnsDescriptor(),
                schema,
                constraintEnabled);
        break;
    }
    return constraintDesc;
  }
  /**
   * Create a new compiled code object with the given properties.
   *
   * @param nybbles The nybblecodes.
   * @param locals The number of local variables.
   * @param stack The maximum stack depth.
   * @param functionType The type that the code's functions will have.
   * @param primitive Which primitive to invoke, or zero.
   * @param literals A tuple of literals.
   * @param localTypes A tuple of types of local variables.
   * @param outerTypes A tuple of types of outer (captured) variables.
   * @param module The module in which the code, or nil.
   * @param lineNumber The module line number on which this code starts.
   * @return The new compiled code object.
   */
  public static AvailObject create(
      final A_Tuple nybbles,
      final int locals,
      final int stack,
      final A_Type functionType,
      final @Nullable Primitive primitive,
      final A_Tuple literals,
      final A_Tuple localTypes,
      final A_Tuple outerTypes,
      final A_Module module,
      final int lineNumber) {
    if (primitive != null) {
      // Sanity check for primitive blocks.  Use this to hunt incorrectly
      // specified primitive signatures.
      final boolean canHaveCode = primitive.canHaveNybblecodes();
      assert canHaveCode == (nybbles.tupleSize() > 0);
      final A_Type restrictionSignature = primitive.blockTypeRestriction();
      assert restrictionSignature.isSubtypeOf(functionType);
    } else {
      assert nybbles.tupleSize() > 0;
    }

    assert localTypes.tupleSize() == locals;
    final A_Type argCounts = functionType.argsTupleType().sizeRange();
    final int numArgs = argCounts.lowerBound().extractInt();
    assert argCounts.upperBound().extractInt() == numArgs;
    final int literalsSize = literals.tupleSize();
    final int outersSize = outerTypes.tupleSize();

    assert 0 <= numArgs && numArgs <= 0xFFFF;
    assert 0 <= locals && locals <= 0xFFFF;
    final int slotCount = numArgs + locals + stack;
    assert 0 <= slotCount && slotCount <= 0xFFFF;
    assert 0 <= outersSize && outersSize <= 0xFFFF;

    assert module.equalsNil() || module.isInstanceOf(MODULE.o());
    assert lineNumber >= 0;

    final AvailObject code = mutable.create(literalsSize + outersSize + locals);

    final InvocationStatistic statistic = new InvocationStatistic();
    statistic.countdownToReoptimize.set(L2Chunk.countdownForNewCode());
    final AvailObject statisticPojo = RawPojoDescriptor.identityWrap(statistic);

    code.setSlot(NUM_LOCALS, locals);
    code.setSlot(NUM_ARGS, numArgs);
    code.setSlot(FRAME_SLOTS, slotCount);
    code.setSlot(NUM_OUTERS, outersSize);
    code.setSlot(PRIMITIVE, primitive == null ? 0 : primitive.primitiveNumber);
    code.setSlot(NYBBLES, nybbles.makeShared());
    code.setSlot(FUNCTION_TYPE, functionType.makeShared());
    code.setSlot(PROPERTY_ATOM, NilDescriptor.nil());
    code.setSlot(STARTING_CHUNK, L2Chunk.unoptimizedChunk().chunkPojo);
    code.setSlot(INVOCATION_STATISTIC, statisticPojo);

    // Fill in the literals.
    int dest;
    for (dest = 1; dest <= literalsSize; dest++) {
      code.setSlot(LITERAL_AT_, dest, literals.tupleAt(dest).makeShared());
    }
    for (int i = 1; i <= outersSize; i++) {
      code.setSlot(LITERAL_AT_, dest++, outerTypes.tupleAt(i).makeShared());
    }
    for (int i = 1; i <= locals; i++) {
      code.setSlot(LITERAL_AT_, dest++, localTypes.tupleAt(i).makeShared());
    }
    assert dest == literalsSize + outersSize + locals + 1;

    final A_Atom propertyAtom =
        AtomWithPropertiesDescriptor.create(TupleDescriptor.empty(), module);
    propertyAtom.setAtomProperty(lineNumberKeyAtom(), IntegerDescriptor.fromInt(lineNumber));
    code.setSlot(PROPERTY_ATOM, propertyAtom.makeShared());
    final int hash = propertyAtom.hash() ^ -0x3087B215;
    code.setSlot(HASH, hash);
    code.makeShared();

    // Add the newborn raw function to the weak set being used for code
    // coverage tracking.
    activeRawFunctions.add(code);

    return code;
  }