/* ============================================================================= * Vector_alloc * -- Returns null if failed * ============================================================================= */ public static IntVector vector_alloc(int initCapacity) { int capacity = Math.max(initCapacity, 1); IntVector vectorPtr = new IntVector(); vectorPtr.size = 0; vectorPtr.capacity = capacity; vectorPtr.elements = new int[capacity]; return vectorPtr; }
/* ============================================================================= * Vector_copy * ============================================================================= */ public static boolean vector_copy(IntVector dstVectorPtr, IntVector srcVectorPtr) { int dstCapacity = dstVectorPtr.capacity; int srcSize = srcVectorPtr.size; if (dstCapacity < srcSize) { int srcCapacity = srcVectorPtr.capacity; int[] elements = new int[srcCapacity]; dstVectorPtr.elements = null; dstVectorPtr.elements = elements; dstVectorPtr.capacity = srcCapacity; } for (int i = 0; i < srcSize; i++) { dstVectorPtr.elements[i] = srcVectorPtr.elements[i]; } dstVectorPtr.size = srcSize; return true; }
/* ============================================================================= * data_generate * -- Binary variables of random PDFs * -- If seed is <0, do not reseed * -- Returns random network * ============================================================================= */ public Net data_generate(int seed, int maxNumParent, int percentParent) { if (seed >= 0) { randomPtr.random_seed(seed); } /* * Generate random Bayesian network */ Net netPtr = new Net(numVar); netPtr.net_generateRandomEdges(maxNumParent, percentParent, randomPtr); /* * Create a threshold for each of the possible permutation of variable * value instances */ int[][] thresholdsTable = new int[numVar][]; int v; for (v = 0; v < numVar; v++) { IntList parentIdListPtr = netPtr.net_getParentIdListPtr(v); int numThreshold = 1 << parentIdListPtr.list_getSize(); int[] thresholds = new int[numThreshold]; for (int t = 0; t < numThreshold; t++) { int threshold = (int) (randomPtr.random_generate() % (DATA_PRECISION + 1)); thresholds[t] = threshold; } thresholdsTable[v] = thresholds; } /* * Create variable dependency ordering for record generation */ int[] order = new int[numVar]; int numOrder = 0; Queue workQueuePtr = Queue.queue_alloc(-1); IntVector dependencyVectorPtr = IntVector.vector_alloc(1); BitMap orderedBitmapPtr = BitMap.bitmap_alloc(numVar); orderedBitmapPtr.bitmap_clearAll(); BitMap doneBitmapPtr = BitMap.bitmap_alloc(numVar); doneBitmapPtr.bitmap_clearAll(); v = -1; while ((v = doneBitmapPtr.bitmap_findClear(v + 1)) >= 0) { IntList childIdListPtr = netPtr.net_getChildIdListPtr(v); int numChild = childIdListPtr.list_getSize(); if (numChild == 0) { boolean status; /* * Use breadth-first search to find net connected to this leaf */ workQueuePtr.queue_clear(); status = workQueuePtr.queue_push(v); while (!(workQueuePtr.queue_isEmpty())) { int id = workQueuePtr.queue_pop(); status = doneBitmapPtr.bitmap_set(id); status = dependencyVectorPtr.vector_pushBack(id); IntList parentIdListPtr = netPtr.net_getParentIdListPtr(id); IntListNode it = parentIdListPtr.head; while (it.nextPtr.get() != null) { it = it.nextPtr.get(); int parentId = it.dataPtr; status = workQueuePtr.queue_push(parentId); } } /* * Create ordering */ int n = dependencyVectorPtr.vector_getSize(); for (int i = 0; i < n; i++) { int id = dependencyVectorPtr.vector_popBack(); if (!(orderedBitmapPtr.bitmap_isSet(id))) { orderedBitmapPtr.bitmap_set(id); order[numOrder++] = id; } } } } /* * Create records */ int startindex = 0; for (int r = 0; r < numRecord; r++) { for (int o = 0; o < numOrder; o++) { v = order[o]; IntList parentIdListPtr = netPtr.net_getParentIdListPtr(v); int index = 0; IntListNode it = parentIdListPtr.head; while (it.nextPtr.get() != null) { it = it.nextPtr.get(); int parentId = it.dataPtr; int value = records[startindex + parentId]; index = (index << 1) + value; } int rnd = (int) (randomPtr.random_generate() % DATA_PRECISION); int threshold = thresholdsTable[v][index]; records[startindex + v] = (byte) ((rnd < threshold) ? 1 : 0); } startindex += numVar; } return netPtr; }
/** * @return list of nodeids from branchRoot to branchHead, inclusive. IOW, first element of the * list is always root of the branch */ public List<Nodeid> completeBranch(final Nodeid branchRoot, final Nodeid branchHead) throws HgRemoteConnectionException { class DataEntry { public final Nodeid queryHead; public final int headIndex; public List<Nodeid> entries; public DataEntry(Nodeid head, int index, List<Nodeid> data) { queryHead = head; headIndex = index; entries = data; } }; List<Nodeid> initial = remoteRepo.between(branchHead, branchRoot); Nodeid[] result = new Nodeid[1 + (1 << initial.size())]; result[0] = branchHead; int rootIndex = -1; // index in the result, where to place branche's root. if (initial.isEmpty()) { rootIndex = 1; } else if (initial.size() == 1) { rootIndex = 2; } LinkedList<DataEntry> datas = new LinkedList<DataEntry>(); // DataEntry in datas has entries list filled with 'between' data, whereas // DataEntry in toQuery keeps only nodeid and its index, with entries to be initialized before // moving to datas. LinkedList<DataEntry> toQuery = new LinkedList<DataEntry>(); // datas.add(new DataEntry(branchHead, 0, initial)); int totalQueries = 1; HashSet<Nodeid> queried = new HashSet<Nodeid>(); while (!datas.isEmpty()) { // keep record of those planned to be queried next time we call between() // although may keep these in queried, if really don't want separate collection HashSet<Nodeid> scheduled = new HashSet<Nodeid>(); do { DataEntry de = datas.removeFirst(); // populate result with discovered elements between de.qiueryRoot and branch's head for (int i = 1, j = 0; j < de.entries.size(); i = i << 1, j++) { int idx = de.headIndex + i; result[idx] = de.entries.get(j); } // form next query entries from new unknown elements if (de.entries.size() > 1) { /* when entries has only one element, it means de.queryRoot was at head-2 position, and thus * no new information can be obtained. E.g. when it's 2, it might be case of [0..4] query with * [1,2] result, and we need one more query to get element 3. */ for (int i = 1, j = 0; j < de.entries.size(); i = i << 1, j++) { int idx = de.headIndex + i; Nodeid x = de.entries.get(j); if (!queried.contains(x) && !scheduled.contains(x) && (rootIndex == -1 || rootIndex - de.headIndex > 1)) { /*queries for elements right before head is senseless, but unless we know head's index, do it anyway*/ toQuery.add(new DataEntry(x, idx, null)); scheduled.add(x); } } } } while (!datas.isEmpty()); if (!toQuery.isEmpty()) { totalQueries++; } // for each query, create an between request range, keep record Range->DataEntry to know // range's start index LinkedList<HgRemoteRepository.Range> betweenBatch = new LinkedList<HgRemoteRepository.Range>(); HashMap<HgRemoteRepository.Range, DataEntry> rangeToEntry = new HashMap<HgRemoteRepository.Range, DataEntry>(); for (DataEntry de : toQuery) { queried.add(de.queryHead); HgRemoteRepository.Range r = new HgRemoteRepository.Range(branchRoot, de.queryHead); betweenBatch.add(r); rangeToEntry.put(r, de); } if (!betweenBatch.isEmpty()) { Map<Range, List<Nodeid>> between = remoteRepo.between(betweenBatch); for (Entry<Range, List<Nodeid>> e : between.entrySet()) { DataEntry de = rangeToEntry.get(e.getKey()); assert de != null; de.entries = e.getValue(); if (rootIndex == -1 && de.entries.size() == 1) { // returned sequence of length 1 means we used element from [head-2] as root int numberOfElementsExcludingRootAndHead = de.headIndex + 1; rootIndex = numberOfElementsExcludingRootAndHead + 1; if (debug) { System.out.printf( "On query %d found out exact number of missing elements: %d\n", totalQueries, numberOfElementsExcludingRootAndHead); } } datas.add(de); // queue up to record result and construct further requests } betweenBatch.clear(); rangeToEntry.clear(); } toQuery.clear(); } if (rootIndex == -1) { throw new HgInvalidStateException("Shall not happen, provided between output is correct"); } result[rootIndex] = branchRoot; boolean resultOk = true; LinkedList<Nodeid> fromRootToHead = new LinkedList<Nodeid>(); IntVector missing = new IntVector(); for (int i = 0; i <= rootIndex; i++) { Nodeid n = result[i]; if (n == null) { missing.add(i); resultOk = false; } fromRootToHead.addFirst(n); // reverse order } if (debug) { System.out.println("Total queries:" + totalQueries); } if (!resultOk) { assert missing.size() > 0; // TODO post-1.0 perhaps, there's better alternative than HgInvalidStateException, e.g. // HgDataFormatException? throw new HgInvalidStateException( String.format("Missing elements with indexes: %s", Arrays.toString(missing.toArray()))); } return fromRootToHead; }
// Triangulate glyph with 'unicode' if not already done. GeometryArrayRetained triangulateGlyphs(GlyphVector gv, char c) { Character ch = new Character(c); GeometryArrayRetained geo = geomHash.get(ch); if (geo == null) { // Font Y-axis is downwards, so send affine transform to flip it. Rectangle2D bnd = gv.getVisualBounds(); AffineTransform aTran = new AffineTransform(); double tx = bnd.getX() + 0.5 * bnd.getWidth(); double ty = bnd.getY() + 0.5 * bnd.getHeight(); aTran.setToTranslation(-tx, -ty); aTran.scale(1.0, -1.0); aTran.translate(tx, -ty); Shape shape = gv.getOutline(); PathIterator pIt = shape.getPathIterator(aTran, tessellationTolerance); int flag = -1, numPoints = 0, i, j, k, num = 0, vertCnt; UnorderList coords = new UnorderList(100, Point3f.class); float tmpCoords[] = new float[6]; float lastX = .0f, lastY = .0f; float firstPntx = Float.MAX_VALUE, firstPnty = Float.MAX_VALUE; IntVector contours = new IntVector(); float maxY = -Float.MAX_VALUE; int maxYIndex = 0, beginIdx = 0, endIdx = 0, start = 0; boolean setMaxY = false; while (!pIt.isDone()) { Point3f vertex = new Point3f(); flag = pIt.currentSegment(tmpCoords); if (flag == PathIterator.SEG_CLOSE) { if (num > 0) { if (setMaxY) { // Get Previous point beginIdx = start; endIdx = numPoints - 1; } contours.add(num); num = 0; } } else if (flag == PathIterator.SEG_MOVETO) { vertex.x = tmpCoords[0]; vertex.y = tmpCoords[1]; lastX = vertex.x; lastY = vertex.y; if ((lastX == firstPntx) && (lastY == firstPnty)) { pIt.next(); continue; } setMaxY = false; coords.add(vertex); firstPntx = lastX; firstPnty = lastY; if (num > 0) { contours.add(num); num = 0; } num++; numPoints++; // skip checking of first point, // since the last point will repeat this. start = numPoints; } else if (flag == PathIterator.SEG_LINETO) { vertex.x = tmpCoords[0]; vertex.y = tmpCoords[1]; // Check here for duplicate points. Code // later in this function can not handle // duplicate points. if ((vertex.x == lastX) && (vertex.y == lastY)) { pIt.next(); continue; } if (vertex.y > maxY) { maxY = vertex.y; maxYIndex = numPoints; setMaxY = true; } lastX = vertex.x; lastY = vertex.y; coords.add(vertex); num++; numPoints++; } pIt.next(); } // No data(e.g space, control characters) // Two point can't form a valid contour if (numPoints == 0) { return null; } // Determine font winding order use for side triangles Point3f p1 = new Point3f(), p2 = new Point3f(), p3 = new Point3f(); boolean flip_side_orient = true; Point3f vertices[] = (Point3f[]) coords.toArray(false); if (endIdx - beginIdx > 0) { // must be true unless it is a single line // define as "MoveTo p1 LineTo p2 Close" which is // not a valid font definition. if (maxYIndex == beginIdx) { p1.set(vertices[endIdx]); } else { p1.set(vertices[maxYIndex - 1]); } p2.set(vertices[maxYIndex]); if (maxYIndex == endIdx) { p3.set(vertices[beginIdx]); } else { p3.set(vertices[maxYIndex + 1]); } if (p3.x != p2.x) { if (p1.x != p2.x) { // Use the one with smallest slope if (Math.abs((p2.y - p1.y) / (p2.x - p1.x)) > Math.abs((p3.y - p2.y) / (p3.x - p2.x))) { flip_side_orient = (p3.x > p2.x); } else { flip_side_orient = (p2.x > p1.x); } } else { flip_side_orient = (p3.x > p2.x); } } else { // p1.x != p2.x, otherwise all three // point form a straight vertical line with // the middle point the highest. This is not a // valid font definition. flip_side_orient = (p2.x > p1.x); } } // Build a Tree of Islands int startIdx = 0; IslandsNode islandsTree = new IslandsNode(-1, -1); for (int cIdx = 0; cIdx < contours.size; cIdx++) { endIdx = startIdx + contours.data[cIdx]; islandsTree.insert(new IslandsNode(startIdx, endIdx), vertices); startIdx = endIdx; } coords = null; // Free memory contours = null; // Compute islandCounts[][] and outVerts[][] UnorderList islandsList = new UnorderList(10, IslandsNode.class); islandsTree.collectOddLevelNode(islandsList, 0); IslandsNode nodes[] = (IslandsNode[]) islandsList.toArray(false); int islandCounts[][] = new int[islandsList.arraySize()][]; Point3f outVerts[][] = new Point3f[islandCounts.length][]; int nchild, sum; IslandsNode node; for (i = 0; i < islandCounts.length; i++) { node = nodes[i]; nchild = node.numChild(); islandCounts[i] = new int[nchild + 1]; islandCounts[i][0] = node.numVertices(); sum = 0; sum += islandCounts[i][0]; for (j = 0; j < nchild; j++) { islandCounts[i][j + 1] = node.getChild(j).numVertices(); sum += islandCounts[i][j + 1]; } outVerts[i] = new Point3f[sum]; startIdx = 0; for (k = node.startIdx; k < node.endIdx; k++) { outVerts[i][startIdx++] = vertices[k]; } for (j = 0; j < nchild; j++) { endIdx = node.getChild(j).endIdx; for (k = node.getChild(j).startIdx; k < endIdx; k++) { outVerts[i][startIdx++] = vertices[k]; } } } islandsTree = null; // Free memory islandsList = null; vertices = null; int[] contourCounts = new int[1]; ArrayList<GeometryArray> triangData = new ArrayList<GeometryArray>(); Point3f q1 = new Point3f(), q2 = new Point3f(), q3 = new Point3f(); Vector3f n1 = new Vector3f(), n2 = new Vector3f(); numPoints = 0; for (i = 0; i < islandCounts.length; i++) { numPoints += outVerts[i].length; } final GeometryService gs = newGeometryService(); int vertOffset = gs.triangulateIslands(islandCounts, outVerts, contourCounts, triangData); // Multiply by 2 since we create 2 faces of the font // Second term is for side-faces along depth of the font if (fontExtrusion == null) vertCnt = vertOffset; else { if (fontExtrusion.shape == null) vertCnt = vertOffset * 2 + numPoints * 6; else { vertCnt = vertOffset * 2 + numPoints * 6 * (fontExtrusion.pnts.length - 1); } } // XXXX: Should use IndexedTriangleArray to avoid // duplication of vertices. To create triangles for // side faces, every vertex is duplicated currently. TriangleArray triAry = new TriangleArray(vertCnt, GeometryArray.COORDINATES | GeometryArray.NORMALS); boolean flip_orient[] = new boolean[islandCounts.length]; boolean findOrient; // last known non-degenerate normal Vector3f goodNormal = new Vector3f(); int currCoordIndex = 0; for (j = 0; j < islandCounts.length; j++) { GeometryArray ga = triangData.get(j); vertOffset = ga.getVertexCount(); findOrient = false; // Create the triangle array for (i = 0; i < vertOffset; i += 3, currCoordIndex += 3) { // Get 3 points. Since triangle is known to be flat, normal // must be same for all 3 points. ga.getCoordinate(i, p1); ga.getNormal(i, n1); ga.getCoordinate(i + 1, p2); ga.getCoordinate(i + 2, p3); if (!findOrient) { // Check here if triangles are wound incorrectly and need // to be flipped. if (!getNormal(p1, p2, p3, n2)) { continue; } if (n2.z >= EPS) { flip_orient[j] = false; } else if (n2.z <= -EPS) { flip_orient[j] = true; } else { continue; } findOrient = true; } if (flip_orient[j]) { // New Triangulator preserves contour orientation. If contour // input is wound incorrectly, swap 2nd and 3rd points to // sure all triangles are wound correctly for j3d. q1.x = p2.x; q1.y = p2.y; q1.z = p2.z; p2.x = p3.x; p2.y = p3.y; p2.z = p3.z; p3.x = q1.x; p3.y = q1.y; p3.z = q1.z; n1.x = -n1.x; n1.y = -n1.y; n1.z = -n1.z; } if (fontExtrusion != null) { n2.x = -n1.x; n2.y = -n1.y; n2.z = -n1.z; triAry.setCoordinate(currCoordIndex, p1); triAry.setNormal(currCoordIndex, n2); triAry.setCoordinate(currCoordIndex + 1, p3); triAry.setNormal(currCoordIndex + 1, n2); triAry.setCoordinate(currCoordIndex + 2, p2); triAry.setNormal(currCoordIndex + 2, n2); q1.x = p1.x; q1.y = p1.y; q1.z = p1.z + fontExtrusion.length; q2.x = p2.x; q2.y = p2.y; q2.z = p2.z + fontExtrusion.length; q3.x = p3.x; q3.y = p3.y; q3.z = p3.z + fontExtrusion.length; triAry.setCoordinate(currCoordIndex + vertOffset, q1); triAry.setNormal(currCoordIndex + vertOffset, n1); triAry.setCoordinate(currCoordIndex + 1 + vertOffset, q2); triAry.setNormal(currCoordIndex + 1 + vertOffset, n1); triAry.setCoordinate(currCoordIndex + 2 + vertOffset, q3); triAry.setNormal(currCoordIndex + 2 + vertOffset, n1); } else { triAry.setCoordinate(currCoordIndex, p1); triAry.setNormal(currCoordIndex, n1); triAry.setCoordinate(currCoordIndex + 1, p2); triAry.setNormal(currCoordIndex + 1, n1); triAry.setCoordinate(currCoordIndex + 2, p3); triAry.setNormal(currCoordIndex + 2, n1); } } if (fontExtrusion != null) { currCoordIndex += vertOffset; } } // Now add side triangles in both cases. // Since we duplicated triangles with different Z, make sure // currCoordIndex points to correct location. if (fontExtrusion != null) { if (fontExtrusion.shape == null) { boolean smooth; // we'll put a crease if the angle between the normals is // greater than 44 degrees float threshold = (float) Math.cos(44.0 * Math.PI / 180.0); float cosine; // need the previous normals to check for smoothing Vector3f pn1 = null, pn2 = null; // need the next normals to check for smoothing Vector3f n3 = new Vector3f(), n4 = new Vector3f(); // store the normals for each point because they are // the same for both triangles Vector3f p1Normal = new Vector3f(); Vector3f p2Normal = new Vector3f(); Vector3f p3Normal = new Vector3f(); Vector3f q1Normal = new Vector3f(); Vector3f q2Normal = new Vector3f(); Vector3f q3Normal = new Vector3f(); for (i = 0; i < islandCounts.length; i++) { for (j = 0, k = 0, num = 0; j < islandCounts[i].length; j++) { num += islandCounts[i][j]; p1.x = outVerts[i][num - 1].x; p1.y = outVerts[i][num - 1].y; p1.z = 0.0f; q1.x = p1.x; q1.y = p1.y; q1.z = p1.z + fontExtrusion.length; p2.z = 0.0f; q2.z = p2.z + fontExtrusion.length; for (int m = 0; m < num; m++) { p2.x = outVerts[i][m].x; p2.y = outVerts[i][m].y; q2.x = p2.x; q2.y = p2.y; if (getNormal(p1, q1, p2, n1)) { if (!flip_side_orient) { n1.negate(); } goodNormal.set(n1); break; } } for (; k < num; k++) { p2.x = outVerts[i][k].x; p2.y = outVerts[i][k].y; p2.z = 0.0f; q2.x = p2.x; q2.y = p2.y; q2.z = p2.z + fontExtrusion.length; if (!getNormal(p1, q1, p2, n1)) { n1.set(goodNormal); } else { if (!flip_side_orient) { n1.negate(); } goodNormal.set(n1); } if (!getNormal(p2, q1, q2, n2)) { n2.set(goodNormal); } else { if (!flip_side_orient) { n2.negate(); } goodNormal.set(n2); } // if there is a previous normal, see if we need to smooth // this normal or make a crease if (pn1 != null) { cosine = n1.dot(pn2); smooth = cosine > threshold; if (smooth) { p1Normal.x = (pn1.x + pn2.x + n1.x); p1Normal.y = (pn1.y + pn2.y + n1.y); p1Normal.z = (pn1.z + pn2.z + n1.z); normalize(p1Normal); q1Normal.x = (pn2.x + n1.x + n2.x); q1Normal.y = (pn2.y + n1.y + n2.y); q1Normal.z = (pn2.z + n1.z + n2.z); normalize(q1Normal); } // if smooth else { p1Normal.x = n1.x; p1Normal.y = n1.y; p1Normal.z = n1.z; q1Normal.x = n1.x + n2.x; q1Normal.y = n1.y + n2.y; q1Normal.z = n1.z + n2.z; normalize(q1Normal); } // else } // if pn1 != null else { pn1 = new Vector3f(); pn2 = new Vector3f(); p1Normal.x = n1.x; p1Normal.y = n1.y; p1Normal.z = n1.z; q1Normal.x = (n1.x + n2.x); q1Normal.y = (n1.y + n2.y); q1Normal.z = (n1.z + n2.z); normalize(q1Normal); } // else // if there is a next, check if we should smooth normal if (k + 1 < num) { p3.x = outVerts[i][k + 1].x; p3.y = outVerts[i][k + 1].y; p3.z = 0.0f; q3.x = p3.x; q3.y = p3.y; q3.z = p3.z + fontExtrusion.length; if (!getNormal(p2, q2, p3, n3)) { n3.set(goodNormal); } else { if (!flip_side_orient) { n3.negate(); } goodNormal.set(n3); } if (!getNormal(p3, q2, q3, n4)) { n4.set(goodNormal); } else { if (!flip_side_orient) { n4.negate(); } goodNormal.set(n4); } cosine = n2.dot(n3); smooth = cosine > threshold; if (smooth) { p2Normal.x = (n1.x + n2.x + n3.x); p2Normal.y = (n1.y + n2.y + n3.y); p2Normal.z = (n1.z + n2.z + n3.z); normalize(p2Normal); q2Normal.x = (n2.x + n3.x + n4.x); q2Normal.y = (n2.y + n3.y + n4.y); q2Normal.z = (n2.z + n3.z + n4.z); normalize(q2Normal); } else { // if smooth p2Normal.x = n1.x + n2.x; p2Normal.y = n1.y + n2.y; p2Normal.z = n1.z + n2.z; normalize(p2Normal); q2Normal.x = n2.x; q2Normal.y = n2.y; q2Normal.z = n2.z; } // else } else { // if k+1 < num p2Normal.x = (n1.x + n2.x); p2Normal.y = (n1.y + n2.y); p2Normal.z = (n1.z + n2.z); normalize(p2Normal); q2Normal.x = n2.x; q2Normal.y = n2.y; q2Normal.z = n2.z; } // else // add pts for the 2 tris // p1, q1, p2 and p2, q1, q2 if (flip_side_orient) { triAry.setCoordinate(currCoordIndex, p1); triAry.setNormal(currCoordIndex, p1Normal); currCoordIndex++; triAry.setCoordinate(currCoordIndex, q1); triAry.setNormal(currCoordIndex, q1Normal); currCoordIndex++; triAry.setCoordinate(currCoordIndex, p2); triAry.setNormal(currCoordIndex, p2Normal); currCoordIndex++; triAry.setCoordinate(currCoordIndex, p2); triAry.setNormal(currCoordIndex, p2Normal); currCoordIndex++; triAry.setCoordinate(currCoordIndex, q1); triAry.setNormal(currCoordIndex, q1Normal); currCoordIndex++; } else { triAry.setCoordinate(currCoordIndex, q1); triAry.setNormal(currCoordIndex, q1Normal); currCoordIndex++; triAry.setCoordinate(currCoordIndex, p1); triAry.setNormal(currCoordIndex, p1Normal); currCoordIndex++; triAry.setCoordinate(currCoordIndex, p2); triAry.setNormal(currCoordIndex, p2Normal); currCoordIndex++; triAry.setCoordinate(currCoordIndex, q1); triAry.setNormal(currCoordIndex, q1Normal); currCoordIndex++; triAry.setCoordinate(currCoordIndex, p2); triAry.setNormal(currCoordIndex, p2Normal); currCoordIndex++; } triAry.setCoordinate(currCoordIndex, q2); triAry.setNormal(currCoordIndex, q2Normal); currCoordIndex++; pn1.x = n1.x; pn1.y = n1.y; pn1.z = n1.z; pn2.x = n2.x; pn2.y = n2.y; pn2.z = n2.z; p1.x = p2.x; p1.y = p2.y; p1.z = p2.z; q1.x = q2.x; q1.y = q2.y; q1.z = q2.z; } // for k // set the previous normals to null when we are done pn1 = null; pn2 = null; } // for j } // for i } else { // if shape int m, offset = 0; Point3f P2 = new Point3f(), Q2 = new Point3f(), P1 = new Point3f(); Vector3f nn = new Vector3f(), nn1 = new Vector3f(), nn2 = new Vector3f(), nn3 = new Vector3f(); Vector3f nna = new Vector3f(), nnb = new Vector3f(); float length; boolean validNormal = false; // fontExtrusion.shape is specified, and is NOT straight line for (i = 0; i < islandCounts.length; i++) { for (j = 0, k = 0, offset = num = 0; j < islandCounts[i].length; j++) { num += islandCounts[i][j]; p1.x = outVerts[i][num - 1].x; p1.y = outVerts[i][num - 1].y; p1.z = 0.0f; q1.x = p1.x; q1.y = p1.y; q1.z = p1.z + fontExtrusion.length; p3.z = 0.0f; for (m = num - 2; m >= 0; m--) { p3.x = outVerts[i][m].x; p3.y = outVerts[i][m].y; if (getNormal(p3, q1, p1, nn1)) { if (!flip_side_orient) { nn1.negate(); } goodNormal.set(nn1); break; } } for (; k < num; k++) { p2.x = outVerts[i][k].x; p2.y = outVerts[i][k].y; p2.z = 0.0f; q2.x = p2.x; q2.y = p2.y; q2.z = p2.z + fontExtrusion.length; getNormal(p1, q1, p2, nn2); p3.x = outVerts[i][(k + 1) == num ? offset : (k + 1)].x; p3.y = outVerts[i][(k + 1) == num ? offset : (k + 1)].y; p3.z = 0.0f; if (!getNormal(p3, p2, q2, nn3)) { nn3.set(goodNormal); } else { if (!flip_side_orient) { nn3.negate(); } goodNormal.set(nn3); } // Calculate normals at the point by averaging normals // of two faces on each side of the point. nna.x = (nn1.x + nn2.x); nna.y = (nn1.y + nn2.y); nna.z = (nn1.z + nn2.z); normalize(nna); nnb.x = (nn3.x + nn2.x); nnb.y = (nn3.y + nn2.y); nnb.z = (nn3.z + nn2.z); normalize(nnb); P1.x = p1.x; P1.y = p1.y; P1.z = p1.z; P2.x = p2.x; P2.y = p2.y; P2.z = p2.z; Q2.x = q2.x; Q2.y = q2.y; Q2.z = q2.z; for (m = 1; m < fontExtrusion.pnts.length; m++) { q1.z = q2.z = fontExtrusion.pnts[m].x; q1.x = P1.x + nna.x * fontExtrusion.pnts[m].y; q1.y = P1.y + nna.y * fontExtrusion.pnts[m].y; q2.x = P2.x + nnb.x * fontExtrusion.pnts[m].y; q2.y = P2.y + nnb.y * fontExtrusion.pnts[m].y; if (!getNormal(p1, q1, p2, n1)) { n1.set(goodNormal); } else { if (!flip_side_orient) { n1.negate(); } goodNormal.set(n1); } if (flip_side_orient) { triAry.setCoordinate(currCoordIndex, p1); triAry.setNormal(currCoordIndex, n1); currCoordIndex++; triAry.setCoordinate(currCoordIndex, q1); triAry.setNormal(currCoordIndex, n1); currCoordIndex++; } else { triAry.setCoordinate(currCoordIndex, q1); triAry.setNormal(currCoordIndex, n1); currCoordIndex++; triAry.setCoordinate(currCoordIndex, p1); triAry.setNormal(currCoordIndex, n1); currCoordIndex++; } triAry.setCoordinate(currCoordIndex, p2); triAry.setNormal(currCoordIndex, n1); currCoordIndex++; if (!getNormal(p2, q1, q2, n1)) { n1.set(goodNormal); } else { if (!flip_side_orient) { n1.negate(); } goodNormal.set(n1); } if (flip_side_orient) { triAry.setCoordinate(currCoordIndex, p2); triAry.setNormal(currCoordIndex, n1); currCoordIndex++; triAry.setCoordinate(currCoordIndex, q1); triAry.setNormal(currCoordIndex, n1); currCoordIndex++; } else { triAry.setCoordinate(currCoordIndex, q1); triAry.setNormal(currCoordIndex, n1); currCoordIndex++; triAry.setCoordinate(currCoordIndex, p2); triAry.setNormal(currCoordIndex, n1); currCoordIndex++; } triAry.setCoordinate(currCoordIndex, q2); triAry.setNormal(currCoordIndex, n1); currCoordIndex++; p1.x = q1.x; p1.y = q1.y; p1.z = q1.z; p2.x = q2.x; p2.y = q2.y; p2.z = q2.z; } // for m p1.x = P2.x; p1.y = P2.y; p1.z = P2.z; q1.x = Q2.x; q1.y = Q2.y; q1.z = Q2.z; nn1.x = nn2.x; nn1.y = nn2.y; nn1.z = nn2.z; } // for k offset = num; } // for j } // for i } // if shape } // if fontExtrusion geo = (GeometryArrayRetained) triAry.retained; geomHash.put(ch, geo); } return geo; }