javascript - 如何使用 JSX 在 React Native 中实现自动滚动视频
问题描述
import React, {useState, useEffect, useRef} from 'react';
import {
View,
Text,
StyleSheet,
FlatList,
ScrollView,
Image,
TouchableHighlight,
TextInput,
TouchableOpacity,
Modal,
Dimensions,
SafeAreaView,
} from 'react-native';
import Voice from 'react-native-voice';
import Video from 'react-native-video';
import SimpleLineIcons from 'react-native-vector-icons/SimpleLineIcons';
import MaterialCommunityIcons from 'react-native-vector-icons/MaterialCommunityIcons';
import {videos} from './data';
// code started creating state on every event which will be used
enter center export default function App() {
const [pitch, setPitch] = useState('');
const [error, setError] = useState('');
const [end, setEnd] = useState('');
const [started, setStarted] = useState('');
const [results, setResults] = useState([]);
const [videoData, setVideos] = useState([...videos]);
const [showModal, setShowModal] = useState(false);
const [paused, setPaused] = useState(true);
const [position, setPosition] = useState({start: null, end: null});
const [muted, setMuted] = useState(true);
//Setting callbacks for the process status
useEffect(() => {
Voice.onSpeechStart = onSpeechStart;
Voice.onSpeechEnd = onSpeechEnd;
Voice.onSpeechError = onSpeechError;
Voice.onSpeechResults = onSpeechResults;
return () => {
//destroy the process after switching the screen
Voice.destroy().then(Voice.removeAllListeners);
};
}, []);
const onSpeechStart = (e) => {
//Invoked when .start() is called without error
console.log('onSpeechStart: ', e);
setStarted('√');
};
const onSpeechEnd = (e) => {
//Invoked when SpeechRecognizer stops recognition
console.log('onSpeechEnd: ', e);
setEnd('√');
};
const onSpeechError = (e) => {
//Invoked when an error occurs.
console.log('onSpeechError: ', e);
setError(JSON.stringify(e.error));
};
2. in this part we are matching the word sopken by the user and checking is if the same video is available in our data if it si then the video will come on the top
const onSpeechResults = (e) => {
//Invoked when SpeechRecognizer is finished recognizing
console.log('onSpeechResults: ', e.value[0]);
let vResult = e.value[0];
let small = vResult.toLowerCase();
setResults([small]);
setShowModal(false);
for (let i = 0; i < videoData.length - 1; i++) {
let newArray = [...videos];
if (small == newArray[i].name) {
newArray.splice(0, 0, newArray[i]);
const mainArray = newArray.filter((item, index) => {
if (item.name == small && index != 0) {
} else {
return item;
}
});
setVideos(mainArray);
}
}
};
const startRecognizing = async () => {
console.log('start listenind');
//Starts listening for speech for a specific locale
try {
await Voice.start('en-US');
setPitch('');
setError('');
setStarted('');
setResults([]);
setEnd('');
} catch (e) {
//eslint-disable-next-line
console.error(e);
}
};
const stopRecognizing = async () => {
//Stops listening for speech
try {
await Voice.stop();
} catch (e) {
//eslint-disable-next-line
console.error(e);
}
};
3. making video play n pause is out of axis
onScroll = (event) => {
const scrollPosition = event.nativeEvent.contentOffset.y;
const paused = paused;
const {start, end} = position;
if (scrollPosition < start && scrollPosition > end && paused) {
setPaused(false);
} else if (scrollPosition > start && scrollPosition < end && !paused) {
setPaused(true);
} else if (scrollPosition < end && !paused) {
setPaused(true);
}
};
4. getting the scroll y axis
const threshold = 180;
onVideoLayOut = (event) => {
const {height} = Dimensions.get('window');
const start = -(event.nativeEvent.layout.y - height + threshold);
const end =
event.nativeEvent.layout.y + event.nativeEvent.layout.height - threshold;
// console.log(start, end, 'position');
setPosition((state) => {
return {
...state,
start: start,
end: end,
};
});
console.log(position, 'position1');
};
5.this the render par using faltlist to render data
return (
<View
style={{
flex: 1,
backgroundColor: 'black',
}}>
<View
style={{
height: 70,
alignItems: 'center',
justifyContent: 'space-between',
marginHorizontal: 15,
flexDirection: 'row',
marginVertical: 15,
marginTop: 25,
}}>
<View
style={{
height: 40,
borderRadius: 20,
width: 300,
borderWidth: 1,
borderColor: '#F23C29',
}}>
<TextInput
placeholder="Search ..."
style={{flex: 1, paddingHorizontal: 30, color: 'white'}}
placeholderTextColor="grey"
/>
</View>
<View>
<TouchableOpacity
onPress={() => {
startRecognizing();
setShowModal(true);
}}>
<SimpleLineIcons color={'white'} name="microphone" size={30} />
</TouchableOpacity>
</View>
</View>
<SafeAreaView
style={{
flex: 1,
justifyContent: 'center',
alignItems: 'center',
borderColor: 'orange',
borderWidth: 1,
}}>
<FlatList
data={videoData}
// scrollEventThrottle={16}
onScroll={onScroll}
showsVerticalScrollIndicator={true}
renderItem={({item, index}) => {
return (
<View
key={index}
style={{
height: 250,
marginVertical: 20,
}}>
<Video
source={{uri: item.source}}
onLayout={onVideoLayOut}
muted={true}
resizeMode="cover"
paused={!paused}
style={{
width: '100%',
height: undefined,
aspectRatio: 2,
marginBottom: 200,
marginTop: 300,
}}
/>
<Text
style={{
fontSize: 20,
alignItems: 'center',
marginLeft: 20,
marginTop: 15,
color: 'white',
}}>
{item.title}
</Text>
</View>
);
}}
/>
</SafeAreaView>
{/* <ScrollView style={{flex: 2, width: '100%'}}>
{videoData.map((item, index) => {
return (
<View
key={index}
style={{
height: 250,
marginVertical: 20,
}}>
<Video
source={{uri: item.source}}
paused={false}
rate={1.0}
volume={1.0}
isMuted={false}
resizeMode="contain"
shouldPlay
isLooping
style={{width: '100%', height: undefined, aspectRatio: 2}}
onError={(e) => {
console.log(e, 'video data');
}}
/>
<Text
style={{
fontSize: 20,
alignItems: 'center',
marginLeft: 20,
marginTop: 15,
color: 'white',
}}>
{item.title}
</Text>
</View>
);
})}
</ScrollView> */}
<Modal
visible={showModal}
transparent={true}
onRequestClose={() => {
setShowModal(false);
}}>
<View
style={{
flex: 1,
justifyContent: 'center',
alignItems: 'center',
backgroundColor: 'rgba(0,0,0,0.9)',
}}>
<MaterialCommunityIcons
name="microphone-settings"
size={45}
color="white"
/>
<Text
style={{
color: '#fff',
textAlign: 'center',
marginTop: 20,
fontSize: 17,
}}>
We are listening
</Text>
</View>
</Modal>
{/* <View style={{justifyContent: 'center', alignItems: 'center'}}>
<Text>Video</Text>
<View>
<TouchableHighlight
onPress={stopRecognizing}
style={styles.buttonStyle}>
<Text style={styles.buttonTextStyle}>Stop</Text>
</TouchableHighlight>
</View>
<View>
<TouchableHighlight onPress={startRecognizing}>
<SimpleLineIcons name="microphone" size={25} />
</TouchableHighlight>
</View>
{results.map((item, index) => {
return (
<Text key={`result-${index}`} style={styles.textStyle}>
{item}
</Text>
);`enter code here`
})}
<Text>Results</Text>
</View> */}
</View>
);
}
const styles = StyleSheet.create({
backgroundVideo: {
position: 'absolute',
top: 0,
left: 0,
bottom: 0,
right: 0,
},
});
问题一次在我的平面列表中,有三个视频正在渲染..我试图让第一个视频播放并休息到暂停状态,间隔 5 秒后,应该播放下一个视频
我尝试了看起来像 instagram 的演示,其中聚焦的视频会自动播放,其余的都暂停
我尝试使用滚动道具和视频自动滚动库,但无法解决
视频正在渲染,但
解决方案
推荐阅读
- javascript - React 中的选项卡式内容
- angular - Angular 11 三元表达式 *ngIF
- wordpress - 获取 CPT 页面的顶级父类别名称和 URL
- flutter - 如何通过http包上传多张图片到mysql flutter
- swift - Swift 5 中的 QR 扫描仪
- c++ - OpenCV4 BOW + SIFT - 步骤和语法
- python - ImportError:无法从部分初始化的模块“sqlalchemy”导入名称“util”(很可能是由于循环导入)
- docker - 找不到包 buildah
- java - 同步与原子参考
- java - 项目中缺少 Gradle