Closed
Description
I'm experimenting streaming function in LLaVa-13b model and it is working fine with vanilla JS code
import Replicate from "replicate";
import EventSource from "eventsource";
const replicate = new Replicate({
auth: `$$$$$$$$$$$$$$`,
});
async function getStreamOutput() {
// Request a prediction with streaming output
const prediction = await replicate.predictions.create({
version: "2facb4a474a0462c15041b78b1ad70952ea46b5ec6ad29583c0b29dbd4249591",
input: {
image: "https://www.accesstheanimus.com/Ent/38.jpg",
prompt: "Describe the image"
},
stream: true
});
// Check if the prediction has a stream URL
if (prediction && prediction.urls && prediction.urls.stream) {
// Create an EventSource to listen for updates
const source = new EventSource(prediction.urls.stream, { withCredentials: true });
source.addEventListener("output", (e) => {
process.stdout.write(e.data);
});
source.addEventListener("error", (e) => {
console.error("error", JSON.parse(e.data));
});
source.addEventListener("done", (e) => {
source.close();
console.log("done", JSON.parse(e.data));
});
}
}
// Invoke the function
getStreamOutput();
I'm trying to achieve the same result in react-native but the stream doesn't work. The code I have now shows token per line when connection is done, also after much more delay when the dashboard shows it is done with the job
import React, { useState, useEffect } from 'react';
import { View, Text } from 'react-native';
import Replicate from 'replicate';
import EventSource from 'react-native-sse';
function ReplicateOutputScreen({ navigation }) {
let imageUri = "https://i.imgur.com/IVUKHbj.jpeg";
let promptText = "Describe the image";
const [loading, setLoading] = useState(true);
const [predictionStream, setPredictionStream] = useState([]);
useEffect(() => {
async function getStreamOutput() {
const replicate = new Replicate({
auth: '$$$$$$$$$$$$$$$$$$$$$$$$$$',
});
try {
const prediction = await replicate.predictions.create({
version: "2facb4a474a0462c15041b78b1ad70952ea46b5ec6ad29583c0b29dbd4249591",
input: {
image: imageUri,
prompt: promptText
},
stream: true
});
// Check if the model version supports streaming
if (prediction && prediction.urls && prediction.urls.stream) {
const source = new EventSource(prediction.urls.stream, { withCredentials: true });
//try open and see
source.addEventListener("message", (e) => {
setPredictionStream(prev => [...prev, e.data]);
console.log(e.data)
});
source.addEventListener("error", (e) => {
console.error("Error:", JSON.parse(e.data));
});
source.addEventListener("done", (e) => {
source.close();
setLoading(false);
});
}
} catch (error) {
console.error("Error getting prediction:", error);
}
}
getStreamOutput();
}, [imageUri, promptText]);
return (
<View style={{ flex: 1, padding: 20 }}>
{loading ? (
<Text>Loading prediction...</Text>
) : (
<View>
{predictionStream.map((text, index) => <Text key={index}>{text}</Text>)}
</View>
)}
</View>
);
}
export default ReplicateOutputScreen;
Can I know if the public beta for streaming is supported on React Native?
### Tasks
Metadata
Metadata
Assignees
Labels
No labels