@Override public void onActivityResult(int requestCode, int resultCode, Intent data) { Log.i(TAG, " requestcode: " + requestCode); Log.i(TAG, " resultCode: " + resultCode); switch (requestCode) { case 100: if (resultCode == Activity.RESULT_OK) { Uri selectedImage = imageUri; getActivity().getContentResolver().notifyChange(selectedImage, null); ContentResolver cr = getActivity().getContentResolver(); Bitmap bitmap; try { bitmap = android.provider.MediaStore.Images.Media.getBitmap(cr, selectedImage); imgFavorite.setImageBitmap(bitmap); Log.i(TAG, " butmap " + bitmap.toString()); Log.i(TAG, " img view " + imgFavorite.toString()); Log.i(TAG, " img uri " + imageUri.toString()); imageAdapter.addmThumbIds(imageUri.toString()); } catch (Exception e) { Log.e("Camera", e.toString()); } finally { imageAdapter.notifyDataSetChanged(); } } } }
@Override public void onPictureTaken(byte[] bytes, Camera camera) { android.util.Log.d("CameraActivity", "In onPictureTaken()."); Bitmap image = BitmapFactory.decodeByteArray(bytes, 0, bytes.length); android.util.Log.d("Image:", image.toString()); if (image != null) { android.util.Log.d("", "image != null"); // chceck internet connection if (ItraffApi.isOnline(getApplicationContext())) { // showWaitDialog(); SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(getBaseContext()); // send photo ItraffApi api = new ItraffApi(CLIENT_API_ID, CLIENT_API_KEY, TAG, true); Log.d("KEY", CLIENT_API_ID.toString()); api.setMode(ItraffApi.MODE_SINGLE); ByteArrayOutputStream stream = new ByteArrayOutputStream(); image.compress(Bitmap.CompressFormat.JPEG, 100, stream); byte[] pictureData = stream.toByteArray(); Log.d("about to send photo", "Yay"); api.sendPhoto(pictureData, itraffApiHandler, prefs.getBoolean("allResults", true)); } } }
/** * Initializes an sprite with a CGImageRef * * @deprecated Use spriteWithCGImage:key: instead. Will be removed in v1.0 final */ public CCSprite(Bitmap image) { assert image != null : "Invalid CGImageRef for sprite"; // XXX: possible bug. See issue #349. New API should be added String key = image.toString(); CCTexture2D texture = CCTextureCache.sharedTextureCache().addImage(image, key); CGSize size = texture.getContentSize(); CGRect rect = CGRect.make(0, 0, size.width, size.height); init(texture, rect); }
private void setImage(Uri theUri) { Log.d("ProfileFragment", "Here in setImage with uri: " + theUri); try { Bitmap bitmap = MediaStore.Images.Media.getBitmap(this.getContentResolver(), theUri); Log.d("ProfileFragment", "bitmap is: " + bitmap.toString()); myPhoto.setImageBitmap(bitmap); // send to parse ByteArrayOutputStream stream = new ByteArrayOutputStream(); bitmap.compress(Bitmap.CompressFormat.JPEG, 100, stream); byte[] array = stream.toByteArray(); ParseFile file = new ParseFile("profilePic.jpeg", array); file.saveInBackground(); TheGroupUtil.getCurrentGroup().put(TheGroupUtil.GROUP_PHOTO, file); TheGroupUtil.getCurrentGroup().saveInBackground(); } catch (FileNotFoundException e) { Log.e("ProfileFragment", "Error: " + e); } catch (IOException e) { Log.e("ProfileFragment", "Error: " + e); } }
@Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); // retrieve selected image Uri from image picker Uri newImageUri = null; if (null != data) { newImageUri = data.getData(); } if (requestCode == TransistorKeys.REQUEST_LOAD_IMAGE && resultCode == Activity.RESULT_OK && newImageUri != null) { ImageHelper imageHelper = new ImageHelper(newImageUri, mActivity); Bitmap newImage = imageHelper.getInputImage(); if (newImage != null && mTempStationID != -1) { // write image to storage File stationImageFile = mTempStation.getStationImageFile(); try (FileOutputStream out = new FileOutputStream(stationImageFile)) { newImage.compress(Bitmap.CompressFormat.PNG, 100, out); } catch (IOException e) { LogHelper.e(LOG_TAG, "Unable to save: " + newImage.toString()); } // update adapter mCollectionAdapter.notifyItemChanged(mTempStationID); } else { LogHelper.e( LOG_TAG, "Unable to get image from media picker. Uri was: " + newImageUri.toString()); } } else { LogHelper.e(LOG_TAG, "Unable to get image from media picker. Did not receive an Uri"); } }
@Override public void runOpMode() throws InterruptedException { climber = hardwareMap.servo.get(Keys.climber); swivel = hardwareMap.servo.get(Keys.swivel); hang = hardwareMap.servo.get(Keys.hang); clampLeft = hardwareMap.servo.get(Keys.clampLeft); clampRight = hardwareMap.servo.get(Keys.clampRight); dump = hardwareMap.servo.get(Keys.dump); fr = hardwareMap.dcMotor.get(Keys.frontRight); fl = hardwareMap.dcMotor.get(Keys.frontLeft); bl = hardwareMap.dcMotor.get(Keys.backLeft); br = hardwareMap.dcMotor.get(Keys.backRight); collector = hardwareMap.dcMotor.get(Keys.collector); fl.setDirection(DcMotor.Direction.REVERSE); bl.setDirection(DcMotor.Direction.REVERSE); dump.setPosition(Keys.DUMP_INIT); swivel.setPosition(Keys.SWIVEL_CENTER); hang.setPosition(Keys.HANG_INIT); clampLeft.setPosition(Keys.CLAMP_LEFT_INIT); clampRight.setPosition(Keys.CLAMP_RIGHT_INIT); climber.setPosition(Keys.CLIMBER_INITIAL_STATE); collector.setDirection(DcMotor.Direction.REVERSE); sonarAbovePhone = hardwareMap.analogInput.get(Keys.SONAR_ABOVE_PHONE); sonarFoot = hardwareMap.analogInput.get(Keys.SONAR_FOOT); navx_device = AHRS.getInstance( hardwareMap.deviceInterfaceModule.get(Keys.advancedSensorModule), Keys.NAVX_DIM_I2C_PORT, AHRS.DeviceDataType.kProcessedData, Keys.NAVX_DEVICE_UPDATE_RATE_HZ); while (!calibration_complete) { calibration_complete = !navx_device.isCalibrating(); if (!calibration_complete) { telemetry.addData("Calibration Complete?", "No"); } } telemetry.addData("Calibration Complete?", "Yes"); // telemetry.addData("Start Autonomous?", "Yes"); waitForStart(); smoothMoveVol2(48, false); telemetry.addData("starting", "smoothDump starting"); timer = new ElapsedTime(); timer.reset(); smoothDump(timer); mCamera = ((FtcRobotControllerActivity) hardwareMap.appContext).mCamera; // i need to init the camera and also get the instance of the camera //on pic take // protocol telemetry.addData("camera", "initingcameraPreview"); ((FtcRobotControllerActivity) hardwareMap.appContext).initCameraPreview(mCamera, this); // wait, because I have handler wait three seconds b4 it'll take a picture, in initCamera sleep(Vision.RETRIEVE_FILE_TIME); // now we are going to retreive the image and convert it to bitmap SharedPreferences prefs = hardwareMap .appContext .getApplicationContext() .getSharedPreferences("com.quan.companion", Context.MODE_PRIVATE); String path = prefs.getString(Keys.pictureImagePathSharedPrefsKeys, "No path found"); Log.e("path", path); telemetry.addData("image", path); // debug stuff - telemetry.addData("camera", "path: " + path); File imgFile = new File(path); image = BitmapFactory.decodeFile(imgFile.getAbsolutePath()); Log.e("image", image.toString()); // cool now u have the image file u just took the picture of VisionProcess mVP = new VisionProcess(image); Log.e("starting output", "start"); telemetry.addData("starting output", "doing smart computer stuff now"); Beacon beacon = mVP.output(hardwareMap.appContext); Log.e("beacon", beacon.toString()); telemetry.addData("beacon", beacon); }