Example #1
0
  private void updateLoader() {
    if (NavigineApp.Navigation == null) return;

    // Log.d(TAG, String.format(Locale.ENGLISH, "Update loader: %d", mLoader));

    long timeNow = DateTimeUtils.currentTimeMillis();
    if (mLoader < 0) return;

    int status = LocationLoader.checkLocationLoader(mLoader);
    if (status < 100) {
      if ((Math.abs(timeNow - mLoaderTime) > LOADER_TIMEOUT / 3 && status == 0)
          || (Math.abs(timeNow - mLoaderTime) > LOADER_TIMEOUT)) {
        mListView.setVisibility(View.GONE);
        mStatusLabel.setVisibility(View.VISIBLE);
        mStatusLabel.setText("Loading timeout!\nPlease, check your internet connection!");
        Log.d(TAG, String.format(Locale.ENGLISH, "Load stopped on timeout!"));
        LocationLoader.stopLocationLoader(mLoader);
        mLoader = -1;
      } else {
        mListView.setVisibility(View.GONE);
        mStatusLabel.setVisibility(View.VISIBLE);
        mStatusLabel.setText(String.format(Locale.ENGLISH, "Loading content (%d%%)", status));
      }
    } else {
      Log.d(TAG, String.format(Locale.ENGLISH, "Load finished with result: %d", status));
      LocationLoader.stopLocationLoader(mLoader);
      mLoader = -1;

      if (status == 100) {
        parseMapsXml();
        if (mInfoList.isEmpty()) {
          mListView.setVisibility(View.GONE);
          mStatusLabel.setVisibility(View.VISIBLE);
          mStatusLabel.setText("No locations available");
        } else {
          mListView.setVisibility(View.VISIBLE);
          mStatusLabel.setVisibility(View.GONE);
        }
      } else {
        mListView.setVisibility(View.GONE);
        mStatusLabel.setVisibility(View.VISIBLE);
        mStatusLabel.setText("Error loading!\nPlease, check your ID!");
      }
    }
  }
Example #2
0
  // Audio
  public static void audioInit(
      int sampleRate, boolean is16Bit, boolean isStereo, int desiredFrames) {
    int channelConfig =
        isStereo
            ? AudioFormat.CHANNEL_CONFIGURATION_STEREO
            : AudioFormat.CHANNEL_CONFIGURATION_MONO;
    int audioFormat = is16Bit ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT;
    int frameSize = (isStereo ? 2 : 1) * (is16Bit ? 2 : 1);

    Log.v(
        "SDL",
        "SDL audio: wanted "
            + (isStereo ? "stereo" : "mono")
            + " "
            + (is16Bit ? "16-bit" : "8-bit")
            + " "
            + (sampleRate / 1000f)
            + "kHz, "
            + desiredFrames
            + " frames buffer");

    // Let the user pick a larger buffer if they really want -- but ye
    // gods they probably shouldn't, the minimums are horrifyingly high
    // latency already
    desiredFrames =
        Math.max(
            desiredFrames,
            (AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat) + frameSize - 1)
                / frameSize);

    mAudioTrack =
        new AudioTrack(
            AudioManager.STREAM_MUSIC,
            sampleRate,
            channelConfig,
            audioFormat,
            desiredFrames * frameSize,
            AudioTrack.MODE_STREAM);

    audioStartThread();

    Log.v(
        "SDL",
        "SDL audio: got "
            + ((mAudioTrack.getChannelCount() >= 2) ? "stereo" : "mono")
            + " "
            + ((mAudioTrack.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT)
                ? "16-bit"
                : "8-bit")
            + " "
            + (mAudioTrack.getSampleRate() / 1000f)
            + "kHz, "
            + desiredFrames
            + " frames buffer");
  }
Example #3
0
        public void run() {
          if (NavigineApp.Navigation == null) return;

          long timeNow = DateTimeUtils.currentTimeMillis();

          String userHash = NavigineApp.Settings.getString("user_hash", "");
          if (userHash.length() == 0) return;

          if (mLoader >= 0) updateLoader();

          if (Math.abs(timeNow - mUpdateLocationLoadersTime) > 1000) updateLocationLoaders();
        }
Example #4
0
 @Override
 public boolean handleMotionEvent(MotionEvent event) {
   if ((event.getSource() & InputDevice.SOURCE_JOYSTICK) != 0) {
     int actionPointerIndex = event.getActionIndex();
     int action = event.getActionMasked();
     switch (action) {
       case MotionEvent.ACTION_MOVE:
         SDLJoystick joystick = getJoystick(event.getDeviceId());
         if (joystick != null) {
           for (int i = 0; i < joystick.axes.size(); i++) {
             InputDevice.MotionRange range = joystick.axes.get(i);
             /* Normalize the value to -1...1 */
             float value =
                 (event.getAxisValue(range.getAxis(), actionPointerIndex) - range.getMin())
                         / range.getRange()
                         * 2.0f
                     - 1.0f;
             SDLActivity.onNativeJoy(joystick.device_id, i, value);
           }
           for (int i = 0; i < joystick.hats.size(); i += 2) {
             int hatX =
                 Math.round(
                     event.getAxisValue(joystick.hats.get(i).getAxis(), actionPointerIndex));
             int hatY =
                 Math.round(
                     event.getAxisValue(joystick.hats.get(i + 1).getAxis(), actionPointerIndex));
             SDLActivity.onNativeHat(joystick.device_id, i / 2, hatX, hatY);
           }
         }
         break;
       default:
         break;
     }
   }
   return true;
 }
Example #5
0
  private void updateLocationLoaders() {
    if (NavigineApp.Navigation == null) return;

    long timeNow = DateTimeUtils.currentTimeMillis();
    mUpdateLocationLoadersTime = timeNow;

    synchronized (mLoaderMap) {
      Iterator<Map.Entry<String, LoaderState>> iter = mLoaderMap.entrySet().iterator();
      while (iter.hasNext()) {
        Map.Entry<String, LoaderState> entry = iter.next();

        LoaderState loader = entry.getValue();
        if (loader.state < 100) {
          loader.timeLabel = timeNow;
          if (loader.type == DOWNLOAD) loader.state = LocationLoader.checkLocationLoader(loader.id);
          if (loader.type == UPLOAD) loader.state = LocationLoader.checkLocationUploader(loader.id);
        } else if (loader.state == 100) {
          String archivePath = NavigineApp.Navigation.getArchivePath();
          String locationFile =
              LocationLoader.getLocationFile(NavigineApp.AppContext, loader.location);
          if (archivePath != null && archivePath.equals(locationFile)) {
            Log.d(TAG, "Reloading archive " + archivePath);
            if (NavigineApp.Navigation.loadArchive(archivePath)) {
              SharedPreferences.Editor editor = NavigineApp.Settings.edit();
              editor.putString("map_file", archivePath);
              editor.commit();
            }
          }
          if (loader.type == DOWNLOAD) LocationLoader.stopLocationLoader(loader.id);
          if (loader.type == UPLOAD) LocationLoader.stopLocationUploader(loader.id);
          iter.remove();
        } else {
          // Load failed
          if (Math.abs(timeNow - loader.timeLabel) > 5000) {
            if (loader.type == DOWNLOAD) LocationLoader.stopLocationLoader(loader.id);
            if (loader.type == UPLOAD) LocationLoader.stopLocationUploader(loader.id);
            iter.remove();
          }
        }
      }
    }
    updateLocalVersions();
    mAdapter.updateList();
  }
Example #6
0
  // Audio
  public static int audioInit(
      int sampleRate, boolean is16Bit, boolean isStereo, int desiredFrames) {
    int channelConfig =
        isStereo
            ? AudioFormat.CHANNEL_CONFIGURATION_STEREO
            : AudioFormat.CHANNEL_CONFIGURATION_MONO;
    int audioFormat = is16Bit ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT;
    int frameSize = (isStereo ? 2 : 1) * (is16Bit ? 2 : 1);

    Log.v(
        "SDL",
        "SDL audio: wanted "
            + (isStereo ? "stereo" : "mono")
            + " "
            + (is16Bit ? "16-bit" : "8-bit")
            + " "
            + (sampleRate / 1000f)
            + "kHz, "
            + desiredFrames
            + " frames buffer");

    // Let the user pick a larger buffer if they really want -- but ye
    // gods they probably shouldn't, the minimums are horrifyingly high
    // latency already
    desiredFrames =
        Math.max(
            desiredFrames,
            (AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat) + frameSize - 1)
                / frameSize);

    if (mAudioTrack == null) {
      mAudioTrack =
          new AudioTrack(
              AudioManager.STREAM_MUSIC,
              sampleRate,
              channelConfig,
              audioFormat,
              desiredFrames * frameSize,
              AudioTrack.MODE_STREAM);

      // Instantiating AudioTrack can "succeed" without an exception and the track may still be
      // invalid
      // Ref:
      // https://android.googlesource.com/platform/frameworks/base/+/refs/heads/master/media/java/android/media/AudioTrack.java
      // Ref: http://developer.android.com/reference/android/media/AudioTrack.html#getState()

      if (mAudioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
        Log.e("SDL", "Failed during initialization of Audio Track");
        mAudioTrack = null;
        return -1;
      }

      mAudioTrack.play();
    }

    Log.v(
        "SDL",
        "SDL audio: got "
            + ((mAudioTrack.getChannelCount() >= 2) ? "stereo" : "mono")
            + " "
            + ((mAudioTrack.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT)
                ? "16-bit"
                : "8-bit")
            + " "
            + (mAudioTrack.getSampleRate() / 1000f)
            + "kHz, "
            + desiredFrames
            + " frames buffer");

    return 0;
  }
    /**
     * Opens a connection to the media source of the associated <tt>DataSource</tt>.
     *
     * @throws IOException if anything goes wrong while opening a connection to the media source of
     *     the associated <tt>DataSource</tt>
     */
    public synchronized void connect() throws IOException {
      javax.media.format.AudioFormat af = (javax.media.format.AudioFormat) getFormat();
      int channels = af.getChannels();
      int channelConfig;

      switch (channels) {
        case Format.NOT_SPECIFIED:
        case 1:
          channelConfig = AudioFormat.CHANNEL_IN_MONO;
          break;
        case 2:
          channelConfig = AudioFormat.CHANNEL_IN_STEREO;
          break;
        default:
          throw new IOException("channels");
      }

      int sampleSizeInBits = af.getSampleSizeInBits();
      int audioFormat;

      switch (sampleSizeInBits) {
        case 8:
          audioFormat = AudioFormat.ENCODING_PCM_8BIT;
          break;
        case 16:
          audioFormat = AudioFormat.ENCODING_PCM_16BIT;
          break;
        default:
          throw new IOException("sampleSizeInBits");
      }

      double sampleRate = af.getSampleRate();

      length =
          (int)
              Math.round(
                  20 /* milliseconds */ * (sampleRate / 1000) * channels * (sampleSizeInBits / 8));

      /*
       * Apart from the thread in which #read(Buffer) is executed, use the
       * thread priority for the thread which will create the AudioRecord.
       */
      setThreadPriority();
      try {
        int minBufferSize =
            AudioRecord.getMinBufferSize((int) sampleRate, channelConfig, audioFormat);

        audioRecord =
            new AudioRecord(
                MediaRecorder.AudioSource.DEFAULT,
                (int) sampleRate,
                channelConfig,
                audioFormat,
                Math.max(length, minBufferSize));

        // tries to configure audio effects if available
        configureEffects();
      } catch (IllegalArgumentException iae) {
        IOException ioe = new IOException();

        ioe.initCause(iae);
        throw ioe;
      }

      setThreadPriority = true;
    }