예제 #1
0
  /**
   * Update the camera position along the path based on the specified distance from the beginning.
   *
   * @param fNormalizedDist normalized distance from the beginning of the path for the location of
   *     the camera for viewing
   */
  private void setPathDist(float fNormalizedDist) {

    // Make sure the distance is in the [0,1] range.
    fNormalizedDist = clampNormalizedPathDistance(fNormalizedDist);

    // Compute the actual distance along the curve.
    float fDist = fNormalizedDist * getPathLength();

    // Save the current distance.
    m_kBranchState.m_fNormalizedPathDist = fNormalizedDist;

    // Get the path point (position and tangent) based on distance.
    // It needs to be double precision for the view to use.
    Curve3f kCurve = m_kBranchState.m_kBranchCurve;
    float fTime = kCurve.GetTime(fDist, 100, 1e-02f);
    Vector3f kViewPoint = kCurve.GetPosition(fTime);

    // If the gaze distance is zero, then use the tangent vector
    // to the curve.
    // If the path is being followed in the reverse direction,
    // then the direction of looking down the path needs to
    // be reversed (negated).
    Vector3f kLookatVector = new Vector3f();
    boolean bLookatVectorUseTangent = true;

    if (m_fGazeDist > 0.0f) {
      float fTimeGazeDist = m_kBranchState.getForwardNormalizedTime(m_fGazeDist);

      if (fTime != fTimeGazeDist) {
        Vector3f kVec = kCurve.GetPosition(fTimeGazeDist);
        kLookatVector.Sub(kVec, kViewPoint);
        kLookatVector.Normalize();
        bLookatVectorUseTangent = false;
      }
    }

    if (bLookatVectorUseTangent) {
      kLookatVector = kCurve.GetTangent(fTime);

      if (!m_kBranchState.m_bMoveForward) {
        kLookatVector.Neg();
      }
    }

    // Update the view given the view position, view direction,
    // and a hint for the view up vector.
    setView(kViewPoint, kLookatVector);

    // Notify listener that we are updated.
    notifyCallback(EVENT_CHANGE_POSITION);
  }
예제 #2
0
  /**
   * Loop through all of the possible branch choices and select the one that is the closest to the
   * current view direction vector. Take a vector from the current view point to a point along each
   * choice branch. Compute the dot-product between the current view direction vector and each of
   * these branch direction vectors and take the one with the largest positive value, i.e., most in
   * alignment.
   */
  private void setClosestChoiceBranch() {
    // Retrieve the current combined viewing direction vector.
    // Note that the sign of the view direction vector is negated
    // for the reasons described in the setView method.
    // Matrix4f kMatrixView = parentScene.getWVMatrix();
    // Vector3f kViewDirection = new Vector3f(-kMatrixView.M02, -kMatrixView.M12, -kMatrixView.M22);
    Vector3f kViewDirection = new Vector3f(parentScene.getCameraDirection());

    // Record the current view position and combined view orientation.
    // Vector3f kP0 = new Vector3f(kMatrixView.M03, kMatrixView.M13, kMatrixView.M23);
    Vector3f kP0 = new Vector3f(getViewPoint());

    // Check point down path which is maximum of the step distance
    // and the gaze distance.
    float fPointDist = Math.max(m_fGazeDist, m_fPathStep);
    float fBestAlign = -1.0f;
    int iBestAlignBranchChoiceIndex = -1;

    for (int iBranch = 0; iBranch < m_akBranchChoice.length; iBranch++) {

      // Get vector from current view point to point down branch path.
      BranchState kBranch = m_akBranchChoice[iBranch];
      Vector3f kV = new Vector3f();

      kV.Sub(kBranch.getForwardNormalizedPosition(fPointDist), kP0);
      kV.Normalize();

      // Only accept the best aligned branches we can supposedly see.
      float fAlign = kV.Dot(kViewDirection);

      if ((fAlign > 0.0f) && (fAlign > fBestAlign)) {
        fBestAlign = fAlign;
        iBestAlignBranchChoiceIndex = iBranch;
      }
    }

    // Select the "nearest" branch.
    if (iBestAlignBranchChoiceIndex >= 0) {

      // Select the new branch.
      m_iBranchChoiceIndex = iBestAlignBranchChoiceIndex;
      m_bChooseBranch = false;
      setBranch(m_akBranchChoice[m_iBranchChoiceIndex]);
    } else {
      beep();
    }
  }
예제 #3
0
  /**
   * Set the annotation point along the path.
   *
   * @param iItem int
   */
  private void setCurvePathAnnotateItem(int iItem) {

    // Select the curve and the position along the curve.
    // First set the sign of the path step to reflect
    // whether the movement down the path was forward or backward
    // when the annotation point was captured.
    FlyPathAnnotateList_WM.Item kItem = m_kAnnotateList.getItem(iItem);

    m_kBranchState.m_bMoveForward = kItem.isPathMoveForward();
    setBranch(kItem.getBranchIndex());

    m_kViewPoint.Copy(kItem.getCameraLocation());
    m_kViewDirection.Copy(kItem.getCameraDirection());
    m_kViewUp.Copy(kItem.getCameraUp());

    notifyCallback(EVENT_CHANGE_POSITION);
  }
예제 #4
0
  /**
   * Working ... This function is never used. It doesn't modify any parameters or data members and
   * returns void.
   *
   * @param image DOCUMENT ME!
   * @param doColor DOCUMENT ME!
   */
  public void calculatePrincipleAxis(ModelImage image, boolean doColor) {
    int x, y, z;
    int n = 0;
    Matrix3f mat2 = new Matrix3f(); // Row,Col
    Matrix3f meanProduct = new Matrix3f();
    Vector3f mean = new Vector3f(); // Column vector
    double voxVal = 0;
    double total = 0;
    double tot = 0;

    // Moments first and second order
    double mX = 0, mY = 0, mZ = 0, mXX = 0, mXY = 0, mXZ = 0, mYY = 0, mYZ = 0, mZZ = 0;

    float min = (float) image.getMin();

    int xEnd = image.getExtents()[0];
    int yEnd = image.getExtents()[1];
    int zEnd = image.getExtents()[2];

    int nLim = (int) Math.sqrt((double) xEnd * yEnd * zEnd);

    if (nLim < 1000) {
      nLim = 1000;
    }

    for (z = 0; z < zEnd; z++) {

      for (y = 0; y < yEnd; y++) {

        for (x = 0; x < xEnd; x++) {

          if (doColor) {
            voxVal =
                (double)
                    (image.getFloatC(x, y, z, 1)
                        + image.getFloatC(x, y, z, 2)
                        + image.getFloatC(x, y, z, 3));
          } else {
            voxVal = (double) (image.getFloat(x, y, z) - min);
          }

          mX += voxVal * x;
          mY += voxVal * y;
          mZ += voxVal * z;
          mXX += voxVal * x * x;
          mXY += voxVal * x * y;
          mXZ += voxVal * x * z;
          mYY += voxVal * y * y;
          mYZ += voxVal * y * z;
          mZZ += voxVal * z * z;
          tot += voxVal;
          n++;

          if (n > nLim) { // Lets not over run the buffers during summation
            n = 0;
            total += tot;
            mat2.M00 = (float) (mat2.M00 + mXX);
            mat2.M01 = (float) (mat2.M01 + mXY);
            mat2.M02 = (float) (mat2.M02 + mXZ);
            mat2.M11 = (float) (mat2.M11 + mYY);
            mat2.M12 = (float) (mat2.M12 + mYZ);
            mat2.M22 = (float) (mat2.M22 + mZZ);
            mean.X = (float) (mean.X + mX);
            mean.Y = (float) (mean.Y + mY);
            mean.Z = (float) (mean.Z + mZ);
            tot = 0;
            mX = 0;
            mY = 0;
            mZ = 0;
            mXX = 0;
            mXY = 0;
            mXZ = 0;
            mYY = 0;
            mYZ = 0;
            mZZ = 0;
          }
        }
      }
    }

    total += tot;

    if (Math.abs(total) < 1e-5) {
      total = 1.0f;
    }

    mat2.M00 = (float) ((mat2.M00 + mXX) / total);
    mat2.M01 = (float) ((mat2.M01 + mXY) / total);
    mat2.M02 = (float) ((mat2.M02 + mXZ) / total);
    mat2.M11 = (float) ((mat2.M11 + mYY) / total);
    mat2.M12 = (float) ((mat2.M12 + mYZ) / total);
    mat2.M22 = (float) ((mat2.M22 + mZZ) / total);
    mean.X = (float) ((mean.X + mX) / total);
    mean.Y = (float) ((mean.Y + mY) / total);
    mean.Z = (float) ((mean.Z + mZ) / total);

    // Now make it central (taking off the Center of Mass)
    meanProduct.MakeTensorProduct(mean, mean);
    mat2.M00 -= meanProduct.M00;
    mat2.M01 -= meanProduct.M01;
    mat2.M02 -= meanProduct.M02;
    mat2.M10 -= meanProduct.M10;
    mat2.M11 -= meanProduct.M11;
    mat2.M12 -= meanProduct.M12;
    mat2.M20 -= meanProduct.M20;
    mat2.M21 -= meanProduct.M21;
    mat2.M22 -= meanProduct.M22;
  }
예제 #5
0
  /**
   * Call from the JPanelFlythruMove.
   *
   * @param command move command.
   */
  public void move(String command) {

    if (command.equals("lookup")) {
      // pitch - look up
      Vector3f kRight = new Vector3f();
      kRight.UnitCross(m_kViewDirection, m_kViewUp);
      Matrix3f kRotate = new Matrix3f();
      kRotate.FromAxisAngle(kRight, (float) Math.toRadians(1));
      kRotate.Mult(m_kViewDirection, m_kViewDirection);
      kRotate.Mult(m_kViewUp, m_kViewUp);
      // Notify listener that we are updated.
      notifyCallback(EVENT_CHANGE_POSITION);
    } else if (command.equals("lookdown")) {
      // pitch - look down
      Vector3f kRight = new Vector3f();
      kRight.UnitCross(m_kViewDirection, m_kViewUp);
      Matrix3f kRotate = new Matrix3f();
      kRotate.FromAxisAngle(kRight, (float) Math.toRadians(-1));
      kRotate.Mult(m_kViewDirection, m_kViewDirection);
      kRotate.Mult(m_kViewUp, m_kViewUp);
      // Notify listener that we are updated.
      notifyCallback(EVENT_CHANGE_POSITION);
    } else if (command.equals("lookleft")) {
      // yaw - look left
      Matrix3f kRotate = new Matrix3f();
      kRotate.FromAxisAngle(m_kViewUp, (float) Math.toRadians(1));
      kRotate.Mult(m_kViewDirection, m_kViewDirection);
      // Notify listener that we are updated.
      notifyCallback(EVENT_CHANGE_POSITION);
    } else if (command.equals("lookright")) {
      // case KeyEvent.VK_RIGHT:
      // yaw - look right
      Matrix3f kRotate = new Matrix3f();
      kRotate.FromAxisAngle(m_kViewUp, (float) Math.toRadians(-1));
      kRotate.Mult(m_kViewDirection, m_kViewDirection);
      // Notify listener that we are updated.
      notifyCallback(EVENT_CHANGE_POSITION);
    } else if (command.equals("counterclockwise")) {
      // case KeyEvent.VK_F3:
      // roll - counterclockwise
      Matrix3f kRotate = new Matrix3f();
      kRotate.FromAxisAngle(m_kViewDirection, (float) Math.toRadians(-1));
      kRotate.Mult(m_kViewUp, m_kViewUp);
      // Notify listener that we are updated.
      notifyCallback(EVENT_CHANGE_POSITION);
    } else if (command.equals("clockwise")) {
      // roll - clockwise
      Matrix3f kRotate = new Matrix3f();
      kRotate.FromAxisAngle(m_kViewDirection, (float) Math.toRadians(1));
      kRotate.Mult(m_kViewUp, m_kViewUp);
      // Notify listener that we are updated.
      notifyCallback(EVENT_CHANGE_POSITION);
    } else if (command.equals("escape")) {

      // VK_ESCAPE
      setIdentityViewOrientation();
    } else if (command.equals("home")) {

      // case KeyEvent.VK_HOME:
      // reset position to the beginning of the path
      if (!m_bChooseBranch && (null == m_akBranchChoice)) {
        setPathDist(0.0f);
      } else {
        beep();
      }
    } else if (command.equals("end")) {

      // case KeyEvent.VK_END:
      // reset position to the end of the path
      if (!m_bChooseBranch && (null == m_akBranchChoice)) {
        setPathDist(1.0f);
      } else {
        beep();
      }
    } else if (command.equals("forward")) {

      // case KeyEvent.VK_UP:
      // move forward along the path
      if (!m_bChooseBranch) {
        doPathStep(1);
      } else {
        beep();
      }
    } else if (command.equals("backward")) {

      // case KeyEvent.VK_DOWN:
      // move backward along the path
      if (!m_bChooseBranch) {
        doPathStep(-1);
      } else {
        beep();
      }
    } else if (command.equals("reverse")) {

      // case KeyEvent.VK_R:
      // follow path in reverse heading
      m_kBranchState.m_bMoveForward = !m_kBranchState.m_bMoveForward;
      setPathDist(m_kBranchState.m_fNormalizedPathDist);

    } else if (command.equals("prevAnnotatePt")) {

      // case KeyEvent.VK_F5:
      // go to previous annotate point
      if (!m_bChooseBranch && (m_kAnnotateList.getNumItems() > 0)) {

        if (--m_iAnnotateListItemSelected < 0) {
          m_iAnnotateListItemSelected = m_kAnnotateList.getNumItems() - 1;
        }

        setCurvePathAnnotateItem(m_iAnnotateListItemSelected);
      } else {
        beep();
      }
    } else if (command.equals("nextAnnotatePt")) {

      // case KeyEvent.VK_F6:
      // go to next annotate point
      if (!m_bChooseBranch && (m_kAnnotateList.getNumItems() > 0)) {

        if (++m_iAnnotateListItemSelected >= m_kAnnotateList.getNumItems()) {
          m_iAnnotateListItemSelected = 0;
        }

        setCurvePathAnnotateItem(m_iAnnotateListItemSelected);
      } else {
        beep();
      }
    } else if (command.equals("nextBranch")) {

      // case KeyEvent.VK_SPACE:
      // select next branch choice
      if (null != m_akBranchChoice) {
        setClosestChoiceBranch();
      } else {
        beep();
      }
    } else if (command.equals("stepDistanceIncrease")) {
      m_fPathStep += 0.1f;
      setPathDist(m_kBranchState.m_fNormalizedPathDist);
    } else if (command.equals("stepDistanceDecrease")) {
      m_fPathStep -= 0.1f;

      if (m_fPathStep < 0.1f) {
        m_fPathStep = 0.1f;
        beep();
      }

      setPathDist(m_kBranchState.m_fNormalizedPathDist);
    } else if (command.equals("gazeDistanceDecrease")) {
      m_fGazeDist -= 1.0f;

      if (m_fGazeDist < 0.0f) {
        m_fGazeDist = 0.0f;
        beep();
      }

      setPathDist(m_kBranchState.m_fNormalizedPathDist);
    } else if (command.equals("gazeDistanceIncrease")) {
      m_fGazeDist += 1.0f;
      setPathDist(m_kBranchState.m_fNormalizedPathDist);
    }
  }
예제 #6
0
  /**
   * Set the camera to the specified be located at the specified view point and looking in the
   * specified direction.
   *
   * @param kViewPoint coordinates of the camera view point
   * @param kViewdirVector coordinates of the camera view direction vector. This vector must be
   *     normalized.
   */
  private void setView(Vector3f kViewPoint, Vector3f kViewdirVector) {
    // Use the view direction vector to create positive weights where more
    // weight is given to an axis that has less of a component in the
    // direction vector.  Use the weights to create an average of
    // two desired (orthogonal axis) up vectors.  Normalize this average
    // vector to create a combined view up vector to use.
    Vector3f kV = new Vector3f(kViewdirVector);

    kV.Set(Math.abs(kV.X), Math.abs(kV.Y), Math.abs(kV.Z));
    kV.Sub(Vector3f.ONE, kV);

    Vector3f kViewupVector = new Vector3f(0.0f, 0.0f, 0.0f);

    kViewupVector.ScaleAdd(m_kViewup1.Dot(kV), m_kViewup1, kViewupVector);
    kViewupVector.ScaleAdd(m_kViewup2.Dot(kV), m_kViewup2, kViewupVector);
    kViewupVector.Normalize();

    // Project the view-up vector onto the plane which is
    // perpendicular to the view direction vector.  By getting to
    // this point, we know that the view-up vector and the view
    // direction vectors are not aligned.  This projected vector is
    // normalized and becomes the new view-up vector.
    Vector3f kViewdirProjection = new Vector3f();

    kViewdirProjection.Scale(kViewdirVector.Dot(kViewupVector), kViewdirVector);
    kViewupVector.Sub(kViewdirProjection);
    kViewupVector.Normalize();

    Vector3f kViewleftVector = new Vector3f();

    kViewleftVector.Cross(kViewupVector, kViewdirVector);

    m_kViewPoint.Copy(kViewPoint);
    m_kViewDirection.Copy(kViewdirVector);
    m_kViewUp.Copy(kViewupVector);
  }