/** * Get a map from owner value node to type for the child editors used in this editor. * * @return Map For the owner value node of each child editor used by this editor. */ private Map<ValueNode, TypeExpr> getValueNodeToUnconstrainedTypeMap() { Map<ValueNode, TypeExpr> returnMap = new HashMap<ValueNode, TypeExpr>(); TypeExpr leastConstrainedType = getContext().getLeastConstrainedTypeExpr(); if (leastConstrainedType.rootTypeVar() != null) { leastConstrainedType = getDataConstructor().getTypeExpr().getResultType(); } returnMap.put(getValueNode(), leastConstrainedType); for (final DataConstructorEditorPanel childEditor : editorPanelList) { returnMap.putAll(childEditor.getValueNodeToUnconstrainedTypeMap(leastConstrainedType)); } return returnMap; }
/** * Note that the types of combinations considered is subject to MAX_BURN_DEPTH. * * <p>TODOEL: it seems like it would be better if all of the input types in source type pieces * were burnable, and it were up to the caller to figure out how the resulting indices mapped onto * its arguments. * * @param destType for example, if connecting to the first argument of the map gem, this would be * a -> b * @param sourceTypePieces a list of the relevant source type pieces (ie inputs and output). This * includes the burnable and already burnt inputs in proper order, as well as the output piece * (which should appear at the end). It does NOT include any inputs that are already bound. * For example, in the following case: * <p>Int --\ | \ bound ---------- \ | |- a Boolean (burned) >- / | / Double --/ * <p>This would be [Int, Boolean, Double, a] * @param unburnedArguments This is an array of indices into sourceTypePieces that should be * considered for burning sites. This can be null, in which case *every* argument is * interpreted as a possible burn site i.e. equivalent to a value of [0, 1, 2, ..., * sourceTypePieces.length - 1]. For example, if the take gem were the source, and it had no * arguments burnt and none bound, this would be [0, 1]. If its 0th argument were burnt this * would be [1]. If both arguments were burnt, this would be []. Note that since these are * indices into sourceTypePieces, if one or more of the arguments are bound, they are not * counted. So, for example, if the first argument of take was bound to another gem and the * second one was unburned, this would be [0]. In the situation illustrated above, this would * be [0, 2]. * @param info the typeCheck info to use * @param sourceGem the source gem, if one exists. Should be null if otherwise. */ private static AutoburnInfo getAutoburnInfoWorker( TypeExpr destType, TypeExpr[] sourceTypePieces, int[] unburnedArguments, TypeCheckInfo info, Gem sourceGem) { // Possible improvements: // Check matchability of the nth argument burn to the nth dest type piece. // Use the fact that some clients do not want to know all burn combos in the ambiguous case. // Use type piece identity (eg. source type pieces which are the same type var..). Hash for // gem types already tried. // Group types when calculating (eg. if try burning one Double, trying to burn the other // double will give the same result). // Specialize as burns are applied (eg. makeTransform is Typeable a => a -> a. If one a is // specialized, so is the other). // initialize our return type AutoburnUnifyStatus unificationStatus = AutoburnUnifyStatus.NOT_POSSIBLE; int numUnburned; if (unburnedArguments == null) { // all positions are burnable numUnburned = sourceTypePieces.length - 1; unburnedArguments = new int[numUnburned]; for (int i = 0; i < numUnburned; i++) { unburnedArguments[i] = i; } } else { // only some positions are burnable. This corresponds to a gem graph where some arguments have // been already burnt // by the user, or where some of the arguments do not correspond to arguments in the root gem. numUnburned = unburnedArguments.length; } TypeExpr outputType = sourceTypePieces[sourceTypePieces.length - 1]; boolean doNotTryBurning = false; if (!destType.isFunctionType()) { // Don't try burning if the destination type isn't a function. // This is a not a logically required restriction. For example, any burnt gem can be connected // to the Prelude.id gem. // A less trivial example is that Prelude.sin with argument burnt can be connected to the // Prelude.typeOf gem. This is because // Double -> Double can unify with Typeable a => a. // However, it is not a situation that the end user would "likely" want to see in the list and // we found that it results in // many rare situations. doNotTryBurning = true; } else { // If the destination result type is a type constructor, perform a basic check against the // source result type. // The fact used here: // a. for 2 types to unify, their result types must unify. // b. burning doesn't change the result type (in the technical sense of CAL, and not in the // sense of the output type displayed // as the result of a gem in the GemCutter. For example, burning the first argument of take // gem changes the gem to // in the GemCutter to display as a 1 argument gem with output type Int -> [a], but the // overall type of the burnt take gem is // is [a] -> (Int -> [a]) which is just [a] -> Int -> [a] (-> is right associative) and has // result type [a], as with the unburnt // take gem. TypeExpr destResultTypeExpr = destType.getResultType(); if (destResultTypeExpr.rootTypeVar() == null && !TypeExpr.canUnifyType( destResultTypeExpr, outputType.getResultType(), info.getModuleTypeInfo())) { // only do the check if the destination result type is a type constructor or record type. // the reason for this is that if it is a parameteric type, we are likely to succeed here... doNotTryBurning = true; } } int maxTypeCloseness = -1; // the type closeness with no burning int noBurnTypeCloseness = Integer.MIN_VALUE; // The lower bound on the number of burns in the upper range. int upperRangeMinBurns = Math.max(numUnburned - MAX_BURN_DEPTH, MAX_BURN_DEPTH + 1); // List of all burn combinations for which unification can take place. List<BurnCombination> burnCombos = new ArrayList<BurnCombination>(); // Our combination generator will create all combinations of a fixed size. // So, we iterate over all possible sizes. // However, we skip over sizes between MAX_BURN_DEPTH and upperRangeMinBurns // so that our algorithm runs in polynomial ( specifically O(n^MAX_BURN_DEPTH) ) time // rather than exponential ( O(2^n) ). for (int numBurns = 0; numBurns <= numUnburned; numBurns++) { if (doNotTryBurning && numBurns > 0) { break; } // If necessary, skip from MAX_BURN_DEPTH to upperRangeMinBurns. if (numBurns == MAX_BURN_DEPTH + 1) { numBurns = upperRangeMinBurns; } // Iterate through all the combinations of burning inputs of size numBurns. boolean isValidCombo = true; for (int[] burnComboArray = getFirstCombination(numBurns); isValidCombo; isValidCombo = getNextCombination(burnComboArray, numUnburned)) { // calculate what the corresponding output type would be. // Note that we are assuming that the elements of burnComboArray are in // ascending order. int currentSourceTypePiece = sourceTypePieces.length - 2; int nextUnburnedArg = numUnburned - 1; int nextArgToBurn = numBurns - 1; // Loop over currentSourceTypePieces. // These represent all the burned and unburned arguments. TypeExpr newOutputType = outputType; while (currentSourceTypePiece >= 0) { boolean shouldBurn = false; if (nextUnburnedArg >= 0 && currentSourceTypePiece == unburnedArguments[nextUnburnedArg]) { // This is an unburned argument. if (nextArgToBurn >= 0 && nextUnburnedArg == burnComboArray[nextArgToBurn]) { // This is a burnable argument that we want to try burning shouldBurn = true; nextArgToBurn--; } nextUnburnedArg--; } else { // This is a burned argument. shouldBurn = true; } if (shouldBurn) { // We need to add the corresponding type to the output type TypeExpr argType = sourceTypePieces[currentSourceTypePiece]; newOutputType = TypeExpr.makeFunType(argType, newOutputType); } currentSourceTypePiece--; } // Would the resulting output type unify with the type we want? int typeCloseness = TypeExpr.getTypeCloseness(destType, newOutputType, info.getModuleTypeInfo()); if (numBurns == 0) { assert (noBurnTypeCloseness == Integer.MIN_VALUE); noBurnTypeCloseness = typeCloseness; } if (typeCloseness >= 0) { if (typeCloseness > maxTypeCloseness) { maxTypeCloseness = typeCloseness; } boolean autoburnable = false; if (numBurns == 0) { // We didn't have to autoburn. unificationStatus = AutoburnUnifyStatus.NOT_NECESSARY; } else if (unificationStatus == AutoburnUnifyStatus.NOT_POSSIBLE) { // We have our first valid combo and it is not possible to unify without burning. autoburnable = true; unificationStatus = AutoburnUnifyStatus.UNAMBIGUOUS; } else if (unificationStatus == AutoburnUnifyStatus.NOT_NECESSARY) { // We have our first valid combo and it is possible to unify without burning. autoburnable = true; unificationStatus = AutoburnUnifyStatus.UNAMBIGUOUS_NOT_NECESSARY; } else { // We got > 1 valid combos. autoburnable = true; if (unificationStatus == AutoburnUnifyStatus.UNAMBIGUOUS) { unificationStatus = AutoburnUnifyStatus.AMBIGUOUS; } else if (unificationStatus == AutoburnUnifyStatus.UNAMBIGUOUS_NOT_NECESSARY) { unificationStatus = AutoburnUnifyStatus.AMBIGUOUS_NOT_NECESSARY; } } // If unify can take place, copy the array and store it in burn combos. if (autoburnable) { int[] autoburnableCombo; if (sourceGem != null) { autoburnableCombo = translateBurnCombo(burnComboArray, sourceGem); } else { autoburnableCombo = new int[numBurns]; System.arraycopy(burnComboArray, 0, autoburnableCombo, 0, numBurns); } burnCombos.add(new BurnCombination(autoburnableCombo, typeCloseness)); } } } } if (unificationStatus == AutoburnUnifyStatus.NOT_POSSIBLE) { burnCombos = null; } return new AutoburnInfo(unificationStatus, burnCombos, maxTypeCloseness, noBurnTypeCloseness); }