@Override
 public void handleMessage(Message msg) {
   data = msg.getData();
   switch (msg.what) {
     case SUBTITLE_TEXT:
       if (oldType != SUBTITLE_TEXT) {
         mSubtitleImage.setVisibility(View.GONE);
         mSubtitleText.setVisibility(View.VISIBLE);
         oldType = SUBTITLE_TEXT;
       }
       text = data.getString(VP.SUB_TEXT_KEY);
       mSubtitleText.setText(text == null ? "" : text.trim());
       break;
     case SUBTITLE_BITMAP:
       if (oldType != SUBTITLE_BITMAP) {
         mSubtitleText.setVisibility(View.GONE);
         mSubtitleImage.setVisibility(View.VISIBLE);
         oldType = SUBTITLE_BITMAP;
       }
       pixels = data.getByteArray(VP.SUB_PIXELS_KEY);
       if (bm == null
           || width != data.getInt(VP.SUB_WIDTH_KEY)
           || height != data.getInt(VP.SUB_HEIGHT_KEY)) {
         width = data.getInt(VP.SUB_WIDTH_KEY);
         height = data.getInt(VP.SUB_HEIGHT_KEY);
         bm = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
       }
       if (pixels != null) bm.copyPixelsFromBuffer(ByteBuffer.wrap(pixels));
       mSubtitleImage.setImageBitmap(bm);
       break;
   }
 }
  public void copyPixelsFromMultiTileLayer(Bitmap target) {
    Canvas c = new Canvas(target);
    ByteBuffer tileBuffer = mBuffer.slice();
    int bpp = CairoUtils.bitsPerPixelForCairoFormat(mFormat) / 8;

    for (int y = 0; y < mBufferSize.height; y += TILE_SIZE.height) {
      for (int x = 0; x < mBufferSize.width; x += TILE_SIZE.width) {
        // Calculate tile size
        IntSize tileSize =
            new IntSize(
                Math.min(mBufferSize.width - x, TILE_SIZE.width),
                Math.min(mBufferSize.height - y, TILE_SIZE.height));

        // Create a Bitmap from this tile
        Bitmap tile =
            Bitmap.createBitmap(
                tileSize.width, tileSize.height, CairoUtils.cairoFormatTobitmapConfig(mFormat));
        tile.copyPixelsFromBuffer(tileBuffer.asIntBuffer());

        // Copy the tile to the master Bitmap and recycle it
        c.drawBitmap(tile, x, y, null);
        tile.recycle();

        // Progress the buffer to the next tile
        tileBuffer.position(tileSize.getArea() * bpp);
        tileBuffer = tileBuffer.slice();
      }
    }
  }
  /**
   * Capture the current image with the size as it is displayed and retrieve it as Bitmap.
   *
   * @return current output as Bitmap
   * @throws InterruptedException
   */
  public Bitmap capture() throws InterruptedException {
    final Semaphore waiter = new Semaphore(0);

    final int width = mGLSurfaceView.getMeasuredWidth();
    final int height = mGLSurfaceView.getMeasuredHeight();

    // Take picture on OpenGL thread
    final int[] pixelMirroredArray = new int[width * height];
    mGPUImage.runOnGLThread(
        new Runnable() {
          @Override
          public void run() {
            final IntBuffer pixelBuffer = IntBuffer.allocate(width * height);
            GLES20.glReadPixels(
                0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuffer);
            int[] pixelArray = pixelBuffer.array();

            // Convert upside down mirror-reversed image to right-side up normal image.
            for (int i = 0; i < height; i++) {
              for (int j = 0; j < width; j++) {
                pixelMirroredArray[(height - i - 1) * width + j] = pixelArray[i * width + j];
              }
            }
            waiter.release();
          }
        });
    requestRender();
    waiter.acquire();

    Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(IntBuffer.wrap(pixelMirroredArray));
    return bitmap;
  }
Example #4
0
  // synchronized
  static void bitblt(ByteBuffer sScreenBuff, boolean inMAME) {

    if (paused) // locks are expensive
    {
      synchronized (lock2) {
        try {
          if (paused) lock2.wait();
        } catch (InterruptedException e) {
          e.printStackTrace();
        }
      }
    }
    synchronized (lock1) {
      // try {

      screenBuff = sScreenBuff;
      Emulator.inMAME = inMAME;

      if (videoRenderMode == PrefsHelper.PREF_RENDER_GL) {
        // if(mm.getEmuView() instanceof EmulatorViewGL)
        ((EmulatorViewGL) mm.getEmuView()).requestRender();
      } else if (videoRenderMode == PrefsHelper.PREF_RENDER_THREADED) {
        videoT.update();
      } else if (videoRenderMode == PrefsHelper.PREF_RENDER_HW) {
        // mm.getEmuView().setWillNotDraw(false);
        videoT.update();
      } else {
        if (holder == null) return;

        Canvas canvas = holder.lockCanvas();
        sScreenBuff.rewind();
        emuBitmap.copyPixelsFromBuffer(sScreenBuff);
        i++;
        canvas.concat(mtx);
        canvas.drawBitmap(emuBitmap, 0, 0, emuPaint);
        // canvas.drawBitmap(emuBitmap, null, frameRect, emuPaint);
        if (isDebug) {
          canvas.drawText("Normal fps:" + fps + " " + inMAME, 5, 40, debugPaint);
          if (System.currentTimeMillis() - millis >= 1000) {
            fps = i;
            i = 0;
            millis = System.currentTimeMillis();
          }
        }
        holder.unlockCanvasAndPost(canvas);
      }
      /*
      } catch (Throwable t) {
      	Log.getStackTraceString(t);
      }
      */
    }
  }
Example #5
0
  @Override
  protected void onDraw(Canvas canvas) {
    super.onDraw(canvas);

    // Bitmap tmpBit = Bitmap.createBitmap(mPixel, 320, 480,
    // Bitmap.Config.RGB_565);//.ARGB_8888);

    VideoBit.copyPixelsFromBuffer(buffer); // makeBuffer(data565, N));
    buffer.position(0);

    canvas.drawBitmap(VideoBit, 0, 0, null);
  }
 private void updateGV() {
   final int h = barV.getHeight();
   bmpV = updateBitmap(bmpV, h);
   bmpBuf = updateBuffer(bmpBuf, h);
   hsvTmp.h = hsv.h;
   hsvTmp.s = hsv.s;
   final double d = (double) (h - 1);
   for (int i = 0; i < h; i++) {
     hsvTmp.v = (double) i / d;
     bmpBuf.put(i, hsvTmp.toRGB(true));
   }
   bmpV.copyPixelsFromBuffer(bmpBuf);
 }
  /**
   * Source: http://stackoverflow.com/questions/4349075/bitmapfactory-decoderesource
   * -returns-a-mutable-bitmap-in-android-2-2-and-an-immu
   *
   * <p>Converts a immutable bitmap to a mutable bitmap. This operation doesn't allocates more
   * memory that there is already allocated.
   *
   * @param imgIn - Source image. It will be released, and should not be used more
   * @return a copy of imgIn, but immutable.
   */
  public static Bitmap convertBitmapToMutable(Bitmap imgIn) {
    try {
      // this is the file going to use temporally to save the bytes.
      // This file will not be a image, it will store the raw image data.
      File file = new File(MyApp.context.getFilesDir() + File.separator + "temp.tmp");

      // Open an RandomAccessFile
      // Make sure you have added uses-permission
      // android:name="android.permission.WRITE_EXTERNAL_STORAGE"
      // into AndroidManifest.xml file
      RandomAccessFile randomAccessFile = new RandomAccessFile(file, "rw");

      // get the width and height of the source bitmap.
      int width = imgIn.getWidth();
      int height = imgIn.getHeight();
      Config type = imgIn.getConfig();

      // Copy the byte to the file
      // Assume source bitmap loaded using options.inPreferredConfig =
      // Config.ARGB_8888;
      FileChannel channel = randomAccessFile.getChannel();
      MappedByteBuffer map = channel.map(MapMode.READ_WRITE, 0, imgIn.getRowBytes() * height);
      imgIn.copyPixelsToBuffer(map);
      // recycle the source bitmap, this will be no longer used.
      imgIn.recycle();
      System.gc(); // try to force the bytes from the imgIn to be released

      // Create a new bitmap to load the bitmap again. Probably the memory
      // will be available.
      imgIn = Bitmap.createBitmap(width, height, type);
      map.position(0);
      // load it back from temporary
      imgIn.copyPixelsFromBuffer(map);
      // close the temporary file and channel , then delete that also
      channel.close();
      randomAccessFile.close();

      // delete the temporary file
      file.delete();

    } catch (FileNotFoundException e) {
      e.printStackTrace();
    } catch (IOException e) {
      e.printStackTrace();
    }

    return imgIn;
  }
 @Override
 protected void applyTransformation(float interpolatedTime, Transformation t) {
   Log.d(TAG, "apply transformation");
   if (bitmap != null) {
     int leftBound = 0;
     leftBound = (int) ((bitmap.getWidth() + bandWidth - 1) * interpolatedTime) - bandWidth;
     /** leftBound ranges from 0 to bitmap.getWidth()-1 */
     int pixelsPerRow = 1;
     if (leftBound < 0) {
       pixelsPerRow = leftBound + bandWidth;
       leftBound = 0;
     } else if (leftBound + bandWidth < bitmap.getWidth()) {
       pixelsPerRow = bandWidth;
     } else {
       pixelsPerRow = bitmap.getWidth() - leftBound;
     }
     ShortBuffer buffer2 = pixelBuffer.duplicate();
     pixelBuffer.rewind();
     buffer2.rewind();
     int start = leftBound;
     int count = 0;
     for (int i = 0; i < bitmap.getHeight(); i++) {
       pixelBuffer.position(start);
       buffer2.position(start);
       for (int j = 0; j < pixelsPerRow; j++) {
         short pixel = pixelBuffer.get();
         pixelBackup[count] = pixel;
         count++;
         pixel = (short) (pixel ^ 0xDDDD);
         buffer2.put(pixel);
       }
       start += bitmap.getWidth();
     }
     bitmap.copyPixelsFromBuffer(pixelBuffer);
     /** restore the pixel value */
     pixelBuffer.rewind();
     start = leftBound;
     count = 0;
     for (int i = 0; i < bitmap.getHeight(); i++) {
       pixelBuffer.position(start);
       for (int j = 0; j < pixelsPerRow; j++) {
         pixelBuffer.put(pixelBackup[count++]);
       }
       start += bitmap.getWidth();
     }
   } else Log.e(TAG, "bitmap==null");
 }
Example #9
0
  /** @return if map could be replaced */
  public void generateNewBitmapNative(
      RenderingContext rc,
      NativeOsmandLibrary library,
      NativeSearchResult searchResultHandler,
      Bitmap bmp,
      RenderingRuleSearchRequest render,
      final List<IMapDownloaderCallback> notifyList) {
    long now = System.currentTimeMillis();
    if (rc.width > 0 && rc.height > 0 && searchResultHandler != null) {
      // init rendering context
      rc.tileDivisor = (int) (1 << (31 - rc.zoom));
      rc.cosRotateTileSize = FloatMath.cos((float) Math.toRadians(rc.rotate)) * TILE_SIZE;
      rc.sinRotateTileSize = FloatMath.sin((float) Math.toRadians(rc.rotate)) * TILE_SIZE;
      try {
        if (Looper.getMainLooper() != null && library.useDirectRendering()) {
          final Handler h = new Handler(Looper.getMainLooper());
          notifyListenersWithDelay(rc, notifyList, h);
        }

        // Native library will decide on it's own best way of rendering
        // If res.bitmapBuffer is null, it indicates that rendering was done directly to
        // memory of passed bitmap, but this is supported only on Android >= 2.2
        final NativeLibrary.RenderingGenerationResult res =
            library.generateRendering(rc, searchResultHandler, bmp, bmp.hasAlpha(), render);
        rc.ended = true;
        notifyListeners(notifyList);
        long time = System.currentTimeMillis() - now;
        rc.renderingDebugInfo =
            String.format(
                "Rendering: %s ms  (%s text)\n"
                    + "(%s points, %s points inside, %s of %s objects visible)\n", //$NON-NLS-1$
                time,
                rc.textRenderingTime,
                rc.pointCount,
                rc.pointInsideCount,
                rc.visible,
                rc.allObjects);

        // See upper note
        if (res.bitmapBuffer != null) {
          bmp.copyPixelsFromBuffer(res.bitmapBuffer);
        }
      } catch (Exception e) {
        e.printStackTrace();
      }
    }
  }
Example #10
0
  public void TakeGLScreenshot(GL10 gl) {
    if (mStrSnapshotName != "") {
      getManagers().getUsageStatisticsManager().TrackEvent("Screenshot", "Count", 0);

      int[] mViewPort = new int[4];
      GL11 gl2 = (GL11) gl;
      gl2.glGetIntegerv(GL11.GL_VIEWPORT, mViewPort, 0);

      int width = mViewPort[2];
      int height = mViewPort[3];

      int size = width * height;
      ByteBuffer buf = ByteBuffer.allocateDirect(size * 4);
      buf.order(ByteOrder.nativeOrder());
      gl.glReadPixels(0, 0, width, height, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, buf);
      int data[] = new int[size];
      buf.asIntBuffer().get(data);
      buf = null;
      Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
      bitmap.setPixels(data, size - width, -width, 0, 0, width, height);
      data = null;

      short sdata[] = new short[size];
      ShortBuffer sbuf = ShortBuffer.wrap(sdata);
      bitmap.copyPixelsToBuffer(sbuf);
      for (int i = 0; i < size; ++i) {
        // BGR-565 to RGB-565
        short v = sdata[i];
        sdata[i] = (short) ((v & 0x1f) << 11 | v & 0x7e0 | (v & 0xf800) >> 11);
      }
      sbuf.rewind();
      bitmap.copyPixelsFromBuffer(sbuf);

      try {
        FileOutputStream fos = new FileOutputStream(mStrSnapshotName);
        bitmap.compress(Bitmap.CompressFormat.PNG, 100, fos);
        fos.flush();
        fos.close();
      } catch (Exception e) {
        e.printStackTrace();
        return;
      }

      mStrSnapshotName = ""; // reset
    }
  }
  private void surfaceRender() {
    synchronized (this) {
      if (mLocalSurface == null
          || !mLocalSurface.isValid()
          || mBitmap == null
          || mByteBuffer == null) return;

      try {
        Canvas c = mLocalSurface.lockCanvas(null);
        mBitmap.copyPixelsFromBuffer(mByteBuffer);
        c.drawBitmap(mBitmap, 0, 0, null);
        mLocalSurface.unlockCanvasAndPost(c);
      } catch (Exception e) {
        Log.e("surfaceRender", e);
      }
    }
  }
  @Override
  public void put(MapGeneratorJob mapGeneratorJob, Bitmap bitmap) {
    if (this.capacity == 0) {
      return;
    }

    synchronized (this.map) {
      if (this.bitmapPool.isEmpty()) {
        return;
      }

      Bitmap pooledBitmap = this.bitmapPool.remove(this.bitmapPool.size() - 1);

      bitmap.copyPixelsToBuffer(this.byteBuffer);
      this.byteBuffer.rewind();
      pooledBitmap.copyPixelsFromBuffer(this.byteBuffer);

      this.map.put(mapGeneratorJob, pooledBitmap);
    }
  }
Example #13
0
 @Override
 public void updateView(ByteBuffer data) {
   final Bitmap sb3 = Bitmap.createBitmap(viewWidth, viewHeight, getBitmapConfig());
   if (sb3 != null) {
     data.rewind();
     try {
       sb3.copyPixelsFromBuffer(data);
     } catch (Exception iae) {
       Log.w(LOGTAG, iae.toString());
     }
     if (zoomedImageView != null) {
       RoundedBitmapDrawable ob3 =
           new RoundedBitmapDrawable(getResources(), sb3, toolbarOnTop, cornerRadius);
       zoomedImageView.setImageDrawable(ob3);
     }
   }
   if (shouldSetVisibleOnUpdate) {
     this.showZoomedView();
   }
   lastStartTimeReRender = startTimeReRender;
   startTimeReRender = 0;
 }
Example #14
0
  /**
   * Saves the EGL surface to a file.
   *
   * <p>Expects that this object's EGL surface is current.
   */
  public void saveFrame(File file) throws IOException {
    if (!mEglCore.isCurrent(mEGLSurface)) {
      throw new RuntimeException("Expected EGL context/surface is not current");
    }

    // glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA
    // data (i.e. a byte of red, followed by a byte of green...).  While the Bitmap
    // constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the
    // Bitmap "copy pixels" method wants the same format GL provides.
    //
    // Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling
    // here often.
    //
    // Making this even more interesting is the upside-down nature of GL, which means
    // our output will look upside down relative to what appears on screen if the
    // typical GL conventions are used.

    String filename = file.toString();

    int width = getWidth();
    int height = getHeight();
    ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
    buf.order(ByteOrder.LITTLE_ENDIAN);
    GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
    GlUtil.checkGlError("glReadPixels");
    buf.rewind();

    BufferedOutputStream bos = null;
    try {
      bos = new BufferedOutputStream(new FileOutputStream(filename));
      Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
      bmp.copyPixelsFromBuffer(buf);
      bmp.compress(Bitmap.CompressFormat.PNG, 90, bos);
      bmp.recycle();
    } finally {
      if (bos != null) bos.close();
    }
    Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'");
  }
Example #15
0
 public Bitmap get() {
   Object localObject1;
   if (!zzaiU)
     localObject1 = new DataInputStream(new ParcelFileDescriptor.AutoCloseInputStream(zzDy));
   try {
     byte[] arrayOfByte = new byte[((DataInputStream) localObject1).readInt()];
     int i = ((DataInputStream) localObject1).readInt();
     int j = ((DataInputStream) localObject1).readInt();
     Object localObject2 = Bitmap.Config.valueOf(((DataInputStream) localObject1).readUTF());
     ((DataInputStream) localObject1).read(arrayOfByte);
     zza((Closeable) localObject1);
     localObject1 = ByteBuffer.wrap(arrayOfByte);
     localObject2 = Bitmap.createBitmap(i, j, (Bitmap.Config) localObject2);
     ((Bitmap) localObject2).copyPixelsFromBuffer((Buffer) localObject1);
     zzaiT = ((Bitmap) localObject2);
     zzaiU = true;
     return zzaiT;
   } catch (IOException localIOException) {
     throw new IllegalStateException(
         "Could not read from parcel file descriptor", localIOException);
   } finally {
     zza((Closeable) localObject1);
   }
 }
  public void saveScreen2(ByteBuffer buffer, int width, int height) throws Exception {
    Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(buffer);
    String urlString = "";
    String serverId = getIPAddress(true);
    boolean isRunningOnEmulator = Build.BRAND.toLowerCase().startsWith("generic");
    if (isRunningOnEmulator) {
      urlString = "http://10.0.2.2:80/AndroidVNCWebServer/UploadScreen.php?serverID=" + serverId;
    } else {
      urlString = "http://192.168.5.1:80/AndroidVNCWebServer/UploadScreen.php?serverID=" + serverId;
    }

    // bitmap =
    // /*getBitmapFromURL("https://yt3.ggpht.com/-cnldRsClRh0/AAAAAAAAAAI/AAAAAAAAAAA/-o9U-7bqgeY/s88-c-k-no/photo.jpg");*/ //BitmapFactory.decodeResource(mActivity.getResources(), R.mipmap.ic_launcher);
    String attachmentName = "bitmap";
    String attachmentFileName = "bitmap.bmp";
    String crlf = "\r\n";
    String twoHyphens = "--";
    String boundary = "*****";

    // Setup the request
    HttpURLConnection httpUrlConnection = null;
    URL url = new URL(urlString);
    httpUrlConnection = (HttpURLConnection) url.openConnection();
    httpUrlConnection.setUseCaches(false);
    httpUrlConnection.setDoOutput(true);

    httpUrlConnection.setRequestMethod("POST");
    httpUrlConnection.setRequestProperty("Connection", "Keep-Alive");
    httpUrlConnection.setRequestProperty("Cache-Control", "no-cache");
    httpUrlConnection.setRequestProperty(
        "Content-Type", "multipart/form-data;boundary=" + boundary);

    // Send server id. Start content wrapper
    DataOutputStream request = new DataOutputStream(httpUrlConnection.getOutputStream());

    request.writeBytes(twoHyphens + boundary + crlf);
    request.writeBytes("Content-Disposition: form-data; name=\"serverID\"" + crlf);
    request.writeBytes(crlf);

    //  Write serverID
    request.write(serverId.getBytes());

    // End content wrapper
    request.writeBytes(crlf);
    request.writeBytes(twoHyphens + boundary + twoHyphens + crlf);

    // Start content wrapper for image
    request.writeBytes(twoHyphens + boundary + crlf);
    request.writeBytes(
        "Content-Disposition: form-data; name=\""
            + attachmentName
            + "\";filename=\""
            + attachmentFileName
            + "\""
            + crlf);
    request.writeBytes(crlf);

    // Convert bitmap to byte buffer
    ByteArrayOutputStream output = new ByteArrayOutputStream(bitmap.getByteCount());
    bitmap.compress(Bitmap.CompressFormat.JPEG, 0, output);
    byte[] pixels = output.toByteArray();
    request.write(pixels);

    // End content wrapper
    request.writeBytes(crlf);
    request.writeBytes(twoHyphens + boundary + twoHyphens + crlf);

    // Flush output buffer
    request.flush();
    request.close();

    // Get Response
    InputStream responseStream = new BufferedInputStream(httpUrlConnection.getInputStream());

    BufferedReader responseStreamReader = new BufferedReader(new InputStreamReader(responseStream));
    String line = "";
    StringBuilder stringBuilder = new StringBuilder();
    while ((line = responseStreamReader.readLine()) != null) {
      stringBuilder.append(line).append("\n");
    }
    responseStreamReader.close();

    String response = stringBuilder.toString();
    Log.e(TAG, "HTTPRESPONSE: " + response);

    // Close response stream
    responseStream.close();

    // Close the connection
    httpUrlConnection.disconnect();
  }
Example #17
0
 public Bitmap getFrameAsBitmap(ByteBuffer frame, int width, int height) {
   Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
   bmp.copyPixelsFromBuffer(frame);
   return bmp;
 }
    @Override
    public void onImageAvailable(ImageReader reader) {
      Image image = null;
      FileOutputStream fos = null;
      Bitmap bitmap = null;

      ByteArrayOutputStream stream = null;

      try {
        image = imageReader.acquireLatestImage();
        if (image != null) {
          Image.Plane[] planes = image.getPlanes();
          ByteBuffer buffer = planes[0].getBuffer();
          int pixelStride = planes[0].getPixelStride();
          int rowStride = planes[0].getRowStride();
          int rowPadding = rowStride - pixelStride * displayWidth;

          // create bitmap
          bitmap =
              Bitmap.createBitmap(
                  displayWidth + rowPadding / pixelStride, displayHeight, Bitmap.Config.ARGB_8888);
          bitmap.copyPixelsFromBuffer(buffer);

          if (skylinkConnection != null && !TextUtils.isEmpty(currentRemotePeerId)) {
            stream = new ByteArrayOutputStream();
            bitmap.compress(Bitmap.CompressFormat.JPEG, 5, stream);
            skylinkConnection.sendData(currentRemotePeerId, stream.toByteArray());
            Log.d(TAG, "sending data to peer :" + currentRemotePeerId);
          }

          imagesProduced++;
          Log.e(TAG, "captured image: " + imagesProduced);
        }

      } catch (Exception e) {
        e.printStackTrace();
      } finally {
        if (fos != null) {
          try {
            fos.close();
          } catch (IOException ioe) {
            ioe.printStackTrace();
          }
        }

        if (stream != null) {
          try {
            stream.close();
          } catch (IOException ioe) {
            ioe.printStackTrace();
          }
        }

        if (bitmap != null) {
          bitmap.recycle();
        }

        if (image != null) {
          image.close();
        }
      }
    }