uploading images to firebase via blob fails

I have gone through all of the posts and I’m a bit confused on how this works. I’m new to all this and I’m sorry about my ignorance but I’ve been working on this for days and I’m truly need some help.

I have my code paste below but basically my camera can take a picture and render a preview but I can’t get it to upload it to firebase. If I change data to uri it won’t take a pic or save it and when I do data.uri I can’t pass it into the uploadImageAsync

The entire camera page code is pasted at the very end

reason why I have data.uri vs. uri

// this doesn't work unless I change `uri` to `data.uri`
      const uri = await cameraRef.current.takePictureAsync(options);
      //camera roll (saving picture)
      const asset = await MediaLibrary.createAssetAsync(uri);

part of the code that uploads to firebase

  const takePicture = async () => {
    if (cameraRef.current) {
      const options = { quality: 0.5, base64: true, skipProcessing: true };
      const data = await cameraRef.current.takePictureAsync(options);
      //camera roll (saving picture)
      const asset = await MediaLibrary.createAssetAsync(data.uri);


      const imageurl = await uploadImageAsync(data.uri);

async function uploadImageAsync(imageurl) {
  const blob = await new Promise((resolve, reject) => {
    const xhr = new XMLHttpRequest();
    xhr.onload = function() {
      resolve(xhr.response);
    };
    xhr.onerror = function(e) {
      console.log(e);
      reject(new TypeError('Network request failed'));
    };
    xhr.responseType = 'blob';
    xhr.open('GET', uri, true);
    xhr.send(null);
  });

  const ref = firebase
    .storage()
    .ref()
    .child();
  const snapshot = await ref.put(blob);
  blob.close();
}


      const source = data.uri;
      console.log(data);
      if (source) {
        await cameraRef.current.pausePreview();
        setIsPreview(true);
        console.log("picture source", source);
      }
    }
  };

My entire camera page code


import React, { useState, useRef, useEffect } from "react";
import { StyleSheet, Dimensions, View, Text, TouchableOpacity, SafeAreaView, Alert } from "react-native";
import { Camera } from "expo-camera";
import { Video } from "expo-av";
import { MaterialCommunityIcons } from '@expo/vector-icons';
import * as MediaLibrary from 'expo-media-library';
import * as Permissions from 'expo-permissions';
import * as firebase from 'firebase';
import * as FaceDetector from 'expo-face-detector';
import uuid from '../utils/uuid';

const WINDOW_HEIGHT = Dimensions.get("window").height;
const closeButtonSize = Math.floor(WINDOW_HEIGHT * 0.032);
const captureSize = Math.floor(WINDOW_HEIGHT * 0.09);


export default function CameraPage() {
  const [hasPermission, setHasPermission] = useState(null);
  const [cameraType, setCameraType] = useState(Camera.Constants.Type.back);
  const [isPreview, setIsPreview] = useState(false);
  const [isCameraReady, setIsCameraReady] = useState(false);
  const [isVideoRecording, setIsVideoRecording] = useState(false);
  const [videoSource, setVideoSource] = useState(null);
  const [focusType, setFocusType] = useState(Camera.Constants.AutoFocus.on);
  const [faceDetect, setFaceDetect] = useState()
  const [rollPermision, setRollPermission] = useState(null);
  const cameraRef = useRef();



  useEffect(() => {
    (async () => {
      const { status } = await Camera.requestPermissionsAsync();
      setHasPermission(status === "granted");
      // camera roll
      const { cam_roll } = await Permissions.askAsync(Permissions.CAMERA_ROLL);
      setRollPermission(cam_roll === "granted");
      setRollPermission(true);
    }
    )();
  }, []);

  const onCameraReady = () => {
    setIsCameraReady(true);
  };
  const takePicture = async () => {
    if (cameraRef.current) {
      const options = { quality: 0.5, base64: true, skipProcessing: true };
      const data = await cameraRef.current.takePictureAsync(options);
      //camera roll (saving picture)
      const asset = await MediaLibrary.createAssetAsync(data.uri);

      // uploding image
      const uploadUrl = data.uri

      async function uploadImageAsync(uploadUrl) {
        const blob = await new Promise((resolve, reject) => {
          const xhr = new XMLHttpRequest();
          xhr.onload = function () {
            resolve(xhr.response);
          };
          xhr.onerror = function (e) {
            console.log(e);
            reject(new TypeError('Network request failed'));
          };
          xhr.responseType = 'blob';
          xhr.open('GET', uri, true);
          xhr.send(null);
        });

        const ref = firebase
          .storage()
          .ref()
          .child(uuid.v4());
        const snapshot = await ref.put(blob);

        blob.close();

      }
    // uploading image

      const source = data.uri;
      console.log(data);
      if (source) {
        await cameraRef.current.pausePreview();
        setIsPreview(true);
        console.log("picture source", source);
      }
    }
  };
  const recordVideo = async () => {
    if (cameraRef.current) {
      try {
        const videoRecordPromise = cameraRef.current.recordAsync();
        if (videoRecordPromise) {
          setIsVideoRecording(true);
          const data = await videoRecordPromise;
          const source = data.uri;
          if (source) {
            setIsPreview(true);
            console.log("video source", source);
            setVideoSource(source);
          }
        }
      } catch (error) {
        console.warn(error);
      }
    }
  };
  
  const stopVideoRecording = () => {
    if (cameraRef.current) {
      setIsPreview(false);
      setIsVideoRecording(false);
      cameraRef.current.stopRecording();
    }
  };
  const switchCamera = () => {
    if (isPreview) {
      return;
    }
    setCameraType((prevCameraType) =>
      prevCameraType === Camera.Constants.Type.back
        ? Camera.Constants.Type.front
        : Camera.Constants.Type.back
    );
  };
  const cancelPreview = async () => {
    await cameraRef.current.resumePreview();
    setIsPreview(false);
    setVideoSource(null);
  };



  const renderCancelPreviewButton = () => (
    <TouchableOpacity onPress={cancelPreview} style={styles.closeButton}>
      <View style={[styles.closeCross, { transform: [{ rotate: "45deg" }] }]} />
      <View
        style={[styles.closeCross, { transform: [{ rotate: "-45deg" }] }]}
      />
    </TouchableOpacity>
  );

  const renderVideoPlayer = () => (
    <Video
      source={{ uri: videoSource }}
      shouldPlay={true}
      style={styles.media}
    />
  );
  const renderVideoRecordIndicator = () => (
    <View style={styles.recordIndicatorContainer}>
      <View style={styles.recordDot} />
      <Text style={styles.recordTitle}>{"Recording..."}</Text>
    </View>
  );
  const renderCaptureControl = () => (
    <View style={styles.control}>
      <TouchableOpacity disabled={!isCameraReady} onPress={switchCamera}>
        <Text style={styles.text}>{"Flip"}</Text>
        <MaterialCommunityIcons
          name="camera-switch"
          style={{ color: "#fff", fontSize: 40 }}
        />
      </TouchableOpacity>
      <TouchableOpacity
        activeOpacity={0.7}
        disabled={!isCameraReady}
        onLongPress={recordVideo}
        onPressOut={stopVideoRecording}
        onPress={takePicture}
        style={styles.capture}
      />
    </View>
  );

  if (hasPermission === null || rollPermision === null) {
    return <View />;
  }
  if (hasPermission === false || rollPermision === false) {
    return <Text style={styles.text}>No access to camera</Text>;
  }


  return (
    <SafeAreaView style={styles.container}>
      <Camera
        ref={cameraRef}
        style={styles.container}
        type={cameraType}
        // flashMode={Camera.Constants.FlashMode.on}
        useCamera2Api={true}
        ratio={"16:9"}
        onCameraReady={onCameraReady}
        autoFocus={focusType}
        // onFacesDetected={this.handleFacesDetected}
        faceDetectorSettings={{
          mode: FaceDetector.Constants.Mode.accurate,
          detectLandmarks: FaceDetector.Constants.Landmarks.all,
          runClassifications: FaceDetector.Constants.Classifications.all,
          minDetectionInterval: 100,
          tracking: true,
        }}


        onMountError={(error) => {
          console.log("cammera error", error);
        }}
      />
      <View style={styles.container}>
        {isVideoRecording && renderVideoRecordIndicator()}
        {videoSource && renderVideoPlayer()}
        {isPreview && renderCancelPreviewButton()}
        {isPreview && uploadRenderPreviewButton()}
        {!videoSource && !isPreview && renderCaptureControl()}
      </View>
    </SafeAreaView>
  );
}
const styles = StyleSheet.create({
  container: {
    ...StyleSheet.absoluteFillObject,
  },
  closeButton: {
    position: "absolute",
    top: 35,
    left: 15,
    height: closeButtonSize,
    width: closeButtonSize,
    borderRadius: Math.floor(closeButtonSize / 2),
    justifyContent: "center",
    alignItems: "center",
    backgroundColor: "#c4c5c4",
    opacity: 0.7,
    zIndex: 2,
  },
  media: {
    ...StyleSheet.absoluteFillObject,
  },
  closeCross: {
    width: "68%",
    height: 1,
    backgroundColor: "black",
  },
  control: {
    position: "absolute",
    flexDirection: "row",
    bottom: 38,
    width: "100%",
    alignItems: "center",
    justifyContent: "center",
  },
  capture: {
    backgroundColor: "#f5f6f5",
    borderRadius: 5,
    height: captureSize,
    width: captureSize,
    borderRadius: Math.floor(captureSize / 2),
    marginHorizontal: 31,
  },
  recordIndicatorContainer: {
    flexDirection: "row",
    position: "absolute",
    top: 25,
    alignSelf: "center",
    justifyContent: "center",
    alignItems: "center",
    backgroundColor: "transparent",
    opacity: 0.7,
  },
  recordTitle: {
    fontSize: 14,
    color: "#ffffff",
    textAlign: "center",
  },
  recordDot: {
    borderRadius: 3,
    height: 6,
    width: 6,
    backgroundColor: "#ff0000",
    marginHorizontal: 5,
  },
  text: {
    color: "#fff",
  },
});

Please provide the following:

  1. SDK Version: 39
  2. Platforms(Android/iOS/web/all): android and iOS

When you say “I can’t get it to upload it to firebase” what exactly does that mean? Does the app crash, do you get an XHR error, etc.?

I have also tried this

await new Promise((resolve, reject) => {
    const xhr = new XMLHttpRequest();
    xhr.onload = function() {
      resolve(xhr.response);
    };
    xhr.onerror = function(e) {
      console.log(e);
      reject(new TypeError('Network request failed'));
    };
    xhr.responseType = 'blob';
    xhr.open('GET', uri, true);
    xhr.send(null);
  });

as well as

let picture = await fetch(imagePath);
picture = await picture.blob();

It generally works the first time but then on subsequent calls I get this

Unable to resolve data for blob: 6a275923-2372-4525-b46d-a4f8cf13886b

Is that similar to what you’re experiencing? Btw. I’m on iOS.

@a_creative_username I made another post that might be related to your issue Issues uploading image to S3 via Amplify (iOS)

Mine is not even giving me an error. I think one of the issues might be that I’m saving it to the gallery and then trying to refer to it.

I’m trying to now set the image captured set to a value and then refer to it and see how it goes.

would you be able to share more of your code form capturing the image to sending it to firebase? That way I can read it and understand it more. Sorry I’m new to all of these and it’s a bit harder for me to piece things together.

P.S.I’m testing all of these on android and ios simultaneously.

That sounds frustrating, I’m sorry to hear it’s not even providing an error message. I am curious, if it’s not giving you an error how do you know it’s not working?

As a point of clarification, I’m actually working with AWS S3’s Amplify Storage.put() API, not Firebase. That being said I think the use case is likely quite similar.

You can take a look at this github issue response Expo Blob S3 upload fails on iOS (Unable to resolve data for blob) · Issue #7257 · aws-amplify/amplify-js · GitHub to see a snippet that’s more-or-less identical to what I’m doing. Perhaps you can adapt it to work with Firebase.

1 Like

It’s showing the uri of the image but it’s still not showing it in firebase storage. I’m calling takePicture when the shutter button is clicked. Is there something wrong in my code? I really think something in my code might be wrong.

const takePicture = async () => {
    if (cameraRef.current) {
      const options = { quality: 0.5, base64: true, skipProcessing: true };
      const result = await cameraRef.current.takePictureAsync(options);
      const SavedPic = await fetch(result.uri);
   

      async function uploadImageAsync(SavedPic) {
        const blob = await new Promise((resolve, reject) => {
          const xhr = new XMLHttpRequest();
          xhr.onload = function () {
            resolve(xhr.response);
          };
          xhr.onerror = function (e) {
            console.log(e);
            reject(new TypeError('Network request failed'));
          };
          xhr.responseType = 'blob';
          xhr.open('GET', SavedPic, true);
          xhr.send(null);
        });
      }

      const source = result.uri;
      console.log(result);
      if (source) {
        await cameraRef.current.pausePreview();
        setIsPreview(true);
        console.log("picture source", source);
      }
    }
  };

When I tried the other method that you had tried as well I would simply get an error code that repose is read-only.

It’s showing the uri of the image but it’s still not showing it in firebase storage. I’m calling takePicture when the shutter button is clicked. Is there something wrong in my code? I really think something in my code might be wrong.

  const takePicture = async () => {
    if (cameraRef.current) {
      const options = { quality: 0.5, base64: true, skipProcessing: true };
//my other method of trying to save the picture to firebase
      const result = await cameraRef.current.takePictureAsync(options);
      const SavedPic = await fetch(result.uri);
      SavedPic = await SavedPic.blob();
// end of the variation 

      const source = result.uri;
      console.log(result);
      if (source) {
        await cameraRef.current.pausePreview();
        setIsPreview(true);
        console.log("picture source", source);
      }
    }
  };

Oh, I might be able to help here!

const SavedPic = await fetch(result.uri);
SavedPic = await SavedPic.blob();

try making another variable or changing it to let SavedPic

it takes a picture and shows a preview but still doesn’t upload it to the firebase storage.

    const result = await cameraRef.current.takePictureAsync(options);
      let SavedPic = await fetch(result.uri);
      SavedPic = await SavedPic.blob();

I tried changing SavedPic to something else and it still didn’t work. Is SavedPic.blob() what sends the pic to firebase storage? Sorry I’m new to this and reading up as much as I can.

My guess is it’s still spitting out some sort of error, you may need to look in the debugger/logs to find it though.

To answer your question, no, SavedPick.blob() does not send the picture to firebase. It just converts the image data to a Blob (a type of data structure used to represent a chunk of byte data). This Blob is still only local to your phone/app. The image data is only uploaded when you call

const ref = firebase
          .storage()
          .ref()
          .child(uuid.v4());
        const snapshot = await ref.put(blob);

So here it’s the await ref.put(blob) that actually does the uploading. The rest of the code is simply setting up the camera access and getting access to the image data from the selected photo AFAIK.

Also, in one of your examples you use blob.close(). I believe that the Blob API doesn’t actually support that method and it may actually break your code, so I’d suggest removing it.

Just to clarify, let allows variables to be re-assigned whereas const only allows the variable to be assigned once, so

const someVar = 6;
someVar = 7;

will throw an error but

let someVar = 6;
someVar = 7;

is ok. Personally I find reassignment hard to read so I’d prefer something like this:

const cameraResult = await cameraRef.current.takePictureAsync(options);
const fetchPicResult = await fetch(cameraResult.uri);
const blobPic = await fetchPicResult.blob();

Docs on const: const - JavaScript | MDN
Docs on blob: Blob - Web APIs | MDN

1 Like

Thank you so much for the details explanation! I got it to work. Wha I had to do is have the whole uploading the image outside of the taking a photo function.

Here’s a code for anyone else that I might need it for future. I will also post the firebase rule below it so you won’t have to look for that just in case:
read the edit in the code…firebase.child() needs a string passed into it or it won’t work. You might ask how would you save pictures with different titles and all that and my answer is I don’t know…for real. I don’t know. But the uuid.v4() that was passed into .child() in this code didn’t work for me because the child only can have a string passed into it. And yes I had installed and imported the uuid into the component.

Code to take picture and upload to firebase. Remember I made this in a functional component…just in case for a noob like me that is learning and something for some reason might not work.

 const takePicture = async () => {
    if (cameraRef.current) {
      const options = { quality: 0.5, base64: true, skipProcessing: true };
      const result = await cameraRef.current.takePictureAsync(options);
      let ImageStuff = result.uri
      let SavedPic = await uploadImageAsync(result.uri);
      console.log(ImageStuff);
    }
  };

  async function uploadImageAsync(projectSavePic) {
    const blob = await new Promise((resolve, reject) => {
      const xhr = new XMLHttpRequest();
      xhr.onload = function () {
        resolve(xhr.response);
      };
      xhr.onerror = function (e) {
        console.log(e);
        reject(new TypeError('Network request failed'));
      };
      xhr.responseType = 'blob';
      xhr.open('GET', projectSavePic, true);
      xhr.send(null);
    });

// so this is place
    const ref = firebase
      .storage()
// read spokelse post above on this. ref does the uploading
      .ref()
// child() can not be commented out either. You have to pass a string (anything between a quote) in it.
// This is what the image is going to be saved as. You might as how you can save it as something else 
// and I don't know how as I have explained above. 
      .child('images')
    const snapshot = await ref.put(blob);
// this worked with blob.close() but as post above mentioned it I might remove it just in case making sure
// as little things mess with a code as possible
    blob.close();
    return await snapshot.ref.getDownloadURL();
  }

Firebase rule

rules_version = '2';
service firebase.storage {
  match /b/{bucket}/o {
    match /{allPaths=**} {
      allow read, write;
    }
  }
}

edit: we apperently expo was having an issue and didn’t save or post my edit…this is great but I tried to retype everything

now I have to find a way to display all the images to the users lol.

quick question, if I wanted to save all images to a specific folder would I do that in the child() or that is just for the name?

I’m assuming that’s why I’d have to add uuid into the name so each one would create a new name and not save over the previous image

sorry about coming back you been trying to figure it out

This topic was automatically closed 20 days after the last reply. New replies are no longer allowed.