feat(flow): input feather, center puppet, smiley nav, arc buttons
- Input bar: show bouncing feather icon while typing (circle static); send after pause or on submit; debounced isTyping state - Move puppet to center empty state (replacing feather); hide when there are messages - Add smiley button next to mic; same as Talk (web: location, native: modal) - Puppet actions: place four buttons in arc above puppet with icons; increase spacing between buttons and puppet
This commit is contained in:
43
package-lock.json
generated
43
package-lock.json
generated
@@ -22,12 +22,14 @@
|
|||||||
"buffer": "^6.0.3",
|
"buffer": "^6.0.3",
|
||||||
"expo": "~52.0.0",
|
"expo": "~52.0.0",
|
||||||
"expo-asset": "~11.0.5",
|
"expo-asset": "~11.0.5",
|
||||||
|
"expo-av": "~15.0.2",
|
||||||
"expo-constants": "~17.0.8",
|
"expo-constants": "~17.0.8",
|
||||||
"expo-crypto": "~14.0.2",
|
"expo-crypto": "~14.0.2",
|
||||||
"expo-font": "~13.0.4",
|
"expo-font": "~13.0.4",
|
||||||
"expo-haptics": "~14.0.0",
|
"expo-haptics": "~14.0.0",
|
||||||
"expo-image-picker": "^17.0.10",
|
"expo-image-picker": "^17.0.10",
|
||||||
"expo-linear-gradient": "~14.0.2",
|
"expo-linear-gradient": "~14.0.2",
|
||||||
|
"expo-speech": "~13.0.1",
|
||||||
"expo-status-bar": "~2.0.0",
|
"expo-status-bar": "~2.0.0",
|
||||||
"react": "18.3.1",
|
"react": "18.3.1",
|
||||||
"react-dom": "18.3.1",
|
"react-dom": "18.3.1",
|
||||||
@@ -39,6 +41,7 @@
|
|||||||
"react-native-svg": "^15.15.2",
|
"react-native-svg": "^15.15.2",
|
||||||
"react-native-view-shot": "^3.8.0",
|
"react-native-view-shot": "^3.8.0",
|
||||||
"react-native-web": "~0.19.13",
|
"react-native-web": "~0.19.13",
|
||||||
|
"react-native-webview": "13.12.2",
|
||||||
"readable-stream": "^4.7.0",
|
"readable-stream": "^4.7.0",
|
||||||
"vm-browserify": "^1.1.2"
|
"vm-browserify": "^1.1.2"
|
||||||
},
|
},
|
||||||
@@ -6134,6 +6137,23 @@
|
|||||||
"react-native": "*"
|
"react-native": "*"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/expo-av": {
|
||||||
|
"version": "15.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/expo-av/-/expo-av-15.0.2.tgz",
|
||||||
|
"integrity": "sha512-AHIHXdqLgK1dfHZF0JzX3YSVySGMrWn9QtPzaVjw54FAzvXfMt4sIoq4qRL/9XWCP9+ICcCs/u3EcvmxQjrfcA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"peerDependencies": {
|
||||||
|
"expo": "*",
|
||||||
|
"react": "*",
|
||||||
|
"react-native": "*",
|
||||||
|
"react-native-web": "*"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"react-native-web": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/expo-constants": {
|
"node_modules/expo-constants": {
|
||||||
"version": "17.0.8",
|
"version": "17.0.8",
|
||||||
"resolved": "https://registry.npmjs.org/expo-constants/-/expo-constants-17.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/expo-constants/-/expo-constants-17.0.8.tgz",
|
||||||
@@ -6278,6 +6298,15 @@
|
|||||||
"invariant": "^2.2.4"
|
"invariant": "^2.2.4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/expo-speech": {
|
||||||
|
"version": "13.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/expo-speech/-/expo-speech-13.0.1.tgz",
|
||||||
|
"integrity": "sha512-J7tvFzORsFpIKihMnayeY5lCPc15giDrlN+ws2uUNo0MvLv1HCYEu/5p3+aMmZXXsY5I1QlconD4CwRWw3JFig==",
|
||||||
|
"license": "MIT",
|
||||||
|
"peerDependencies": {
|
||||||
|
"expo": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/expo-status-bar": {
|
"node_modules/expo-status-bar": {
|
||||||
"version": "2.0.1",
|
"version": "2.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/expo-status-bar/-/expo-status-bar-2.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/expo-status-bar/-/expo-status-bar-2.0.1.tgz",
|
||||||
@@ -10034,6 +10063,20 @@
|
|||||||
"integrity": "sha512-rkpe71W0N0c0Xz6QD0eJETuWAJGnJ9afsl1srmwPrI+yBCkge5EycXXbYRyvL29zZVUWQCY7InPRCv3GDXuZNw==",
|
"integrity": "sha512-rkpe71W0N0c0Xz6QD0eJETuWAJGnJ9afsl1srmwPrI+yBCkge5EycXXbYRyvL29zZVUWQCY7InPRCv3GDXuZNw==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/react-native-webview": {
|
||||||
|
"version": "13.12.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/react-native-webview/-/react-native-webview-13.12.2.tgz",
|
||||||
|
"integrity": "sha512-OpRcEhf1IEushREax6rrKTeqGrHZ9OmryhZLBLQQU4PwjqVsq55iC8OdYSD61/F628f9rURn9THyxEZjrknpQQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"escape-string-regexp": "^4.0.0",
|
||||||
|
"invariant": "2.2.4"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"react": "*",
|
||||||
|
"react-native": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/react-native/node_modules/babel-plugin-syntax-hermes-parser": {
|
"node_modules/react-native/node_modules/babel-plugin-syntax-hermes-parser": {
|
||||||
"version": "0.23.1",
|
"version": "0.23.1",
|
||||||
"resolved": "https://registry.npmjs.org/babel-plugin-syntax-hermes-parser/-/babel-plugin-syntax-hermes-parser-0.23.1.tgz",
|
"resolved": "https://registry.npmjs.org/babel-plugin-syntax-hermes-parser/-/babel-plugin-syntax-hermes-parser-0.23.1.tgz",
|
||||||
|
|||||||
@@ -27,8 +27,10 @@
|
|||||||
"expo-crypto": "~14.0.2",
|
"expo-crypto": "~14.0.2",
|
||||||
"expo-font": "~13.0.4",
|
"expo-font": "~13.0.4",
|
||||||
"expo-haptics": "~14.0.0",
|
"expo-haptics": "~14.0.0",
|
||||||
|
"expo-av": "~15.0.2",
|
||||||
"expo-image-picker": "^17.0.10",
|
"expo-image-picker": "^17.0.10",
|
||||||
"expo-linear-gradient": "~14.0.2",
|
"expo-linear-gradient": "~14.0.2",
|
||||||
|
"expo-speech": "~13.0.1",
|
||||||
"expo-status-bar": "~2.0.0",
|
"expo-status-bar": "~2.0.0",
|
||||||
"react": "18.3.1",
|
"react": "18.3.1",
|
||||||
"react-dom": "18.3.1",
|
"react-dom": "18.3.1",
|
||||||
@@ -39,6 +41,7 @@
|
|||||||
"react-native-screens": "~4.4.0",
|
"react-native-screens": "~4.4.0",
|
||||||
"react-native-svg": "^15.15.2",
|
"react-native-svg": "^15.15.2",
|
||||||
"react-native-view-shot": "^3.8.0",
|
"react-native-view-shot": "^3.8.0",
|
||||||
|
"react-native-webview": "13.12.2",
|
||||||
"react-native-web": "~0.19.13",
|
"react-native-web": "~0.19.13",
|
||||||
"readable-stream": "^4.7.0",
|
"readable-stream": "^4.7.0",
|
||||||
"vm-browserify": "^1.1.2"
|
"vm-browserify": "^1.1.2"
|
||||||
|
|||||||
@@ -1,16 +1,35 @@
|
|||||||
/**
|
/**
|
||||||
* FlowPuppetSlot - Slot for FlowScreen to show interactive AI puppet.
|
* FlowPuppetSlot - Slot for FlowScreen to show interactive AI puppet.
|
||||||
* Composes PuppetView and optional action buttons; does not depend on FlowScreen logic.
|
* Composes PuppetView and optional action buttons; does not depend on FlowScreen logic.
|
||||||
|
* Talk button: on web opens AI Studio in current tab (site blocks iframe); on native opens in-app WebView.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import React, { useState, useCallback } from 'react';
|
import React, { useState, useCallback } from 'react';
|
||||||
import { View, Text, StyleSheet, TouchableOpacity } from 'react-native';
|
import { View, Text, StyleSheet, TouchableOpacity, Modal, Platform } from 'react-native';
|
||||||
|
import { SafeAreaView } from 'react-native-safe-area-context';
|
||||||
import { PuppetView } from './PuppetView';
|
import { PuppetView } from './PuppetView';
|
||||||
import type { FlowPuppetSlotProps, PuppetAction } from './types';
|
import type { FlowPuppetSlotProps, PuppetAction } from './types';
|
||||||
import { colors } from '../../theme/colors';
|
import { colors } from '../../theme/colors';
|
||||||
import { borderRadius, spacing } from '../../theme/colors';
|
import { borderRadius, spacing, shadows } from '../../theme/colors';
|
||||||
|
import { Ionicons } from '@expo/vector-icons';
|
||||||
|
|
||||||
const ACTIONS: PuppetAction[] = ['smile', 'jump', 'shake'];
|
const isWeb = Platform.OS === 'web';
|
||||||
|
|
||||||
|
// Only load WebView on native (it does not support web platform)
|
||||||
|
const WebView = isWeb
|
||||||
|
? null
|
||||||
|
: require('react-native-webview').WebView;
|
||||||
|
|
||||||
|
const PUPPET_ACTIONS: PuppetAction[] = ['smile', 'jump', 'shake'];
|
||||||
|
|
||||||
|
const TALK_WEB_URL = 'https://aistudio.google.com/apps/drive/1L39svCbfbRc48Eby64Q0rSbSoQZiWQBp?showPreview=true&showAssistant=true&fullscreenApplet=true';
|
||||||
|
|
||||||
|
const ACTION_CONFIG: Record<string, { label: string; icon: keyof typeof Ionicons.glyphMap }> = {
|
||||||
|
smile: { label: 'Smile', icon: 'happy-outline' },
|
||||||
|
jump: { label: 'Jump', icon: 'arrow-up-circle-outline' },
|
||||||
|
shake: { label: 'Shake', icon: 'swap-horizontal' },
|
||||||
|
talk: { label: 'Talk', icon: 'chatbubble-ellipses-outline' },
|
||||||
|
};
|
||||||
|
|
||||||
export function FlowPuppetSlot({
|
export function FlowPuppetSlot({
|
||||||
currentAction,
|
currentAction,
|
||||||
@@ -19,6 +38,7 @@ export function FlowPuppetSlot({
|
|||||||
showActionButtons = true,
|
showActionButtons = true,
|
||||||
}: FlowPuppetSlotProps) {
|
}: FlowPuppetSlotProps) {
|
||||||
const [localAction, setLocalAction] = useState<PuppetAction>(currentAction);
|
const [localAction, setLocalAction] = useState<PuppetAction>(currentAction);
|
||||||
|
const [showTalkWeb, setShowTalkWeb] = useState(false);
|
||||||
|
|
||||||
const effectiveAction = currentAction !== 'idle' ? currentAction : localAction;
|
const effectiveAction = currentAction !== 'idle' ? currentAction : localAction;
|
||||||
|
|
||||||
@@ -38,21 +58,72 @@ export function FlowPuppetSlot({
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<View style={styles.wrapper}>
|
<View style={styles.wrapper}>
|
||||||
<PuppetView action={effectiveAction} isTalking={isTalking} />
|
{/* Buttons in an arc above puppet, arc follows puppet shape; extra spacing to puppet */}
|
||||||
{showActionButtons && (
|
{showActionButtons && (
|
||||||
<View style={styles.actions}>
|
<View style={styles.actionsRow}>
|
||||||
{ACTIONS.map((act) => (
|
{PUPPET_ACTIONS.map((act, index) => {
|
||||||
|
const config = ACTION_CONFIG[act];
|
||||||
|
const isCenter = index === 1 || index === 2;
|
||||||
|
return (
|
||||||
|
<View key={act} style={[styles.arcSlot, isCenter && styles.arcSlotCenter]}>
|
||||||
<TouchableOpacity
|
<TouchableOpacity
|
||||||
key={act}
|
|
||||||
style={styles.actionBtn}
|
style={styles.actionBtn}
|
||||||
onPress={() => handleAction(act)}
|
onPress={() => handleAction(act)}
|
||||||
activeOpacity={0.8}
|
activeOpacity={0.8}
|
||||||
>
|
>
|
||||||
<Text style={styles.actionLabel}>{act}</Text>
|
<Ionicons name={config.icon} size={22} color={colors.nautical.teal} />
|
||||||
|
<Text style={styles.actionLabel}>{config.label}</Text>
|
||||||
</TouchableOpacity>
|
</TouchableOpacity>
|
||||||
))}
|
</View>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
<View style={styles.arcSlot}>
|
||||||
|
<TouchableOpacity
|
||||||
|
style={[styles.actionBtn, styles.talkBtn]}
|
||||||
|
onPress={() => {
|
||||||
|
if (isWeb && typeof (globalThis as any).window !== 'undefined') {
|
||||||
|
(globalThis as any).window.location.href = TALK_WEB_URL;
|
||||||
|
} else {
|
||||||
|
setShowTalkWeb(true);
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
activeOpacity={0.8}
|
||||||
|
>
|
||||||
|
<Ionicons name={ACTION_CONFIG.talk.icon} size={22} color={colors.nautical.teal} />
|
||||||
|
<Text style={[styles.actionLabel, styles.talkLabel]}>Talk</Text>
|
||||||
|
</TouchableOpacity>
|
||||||
|
</View>
|
||||||
</View>
|
</View>
|
||||||
)}
|
)}
|
||||||
|
<PuppetView action={effectiveAction} isTalking={isTalking} />
|
||||||
|
|
||||||
|
<Modal
|
||||||
|
visible={showTalkWeb}
|
||||||
|
animationType="slide"
|
||||||
|
onRequestClose={() => setShowTalkWeb(false)}
|
||||||
|
>
|
||||||
|
<SafeAreaView style={styles.webModal} edges={['top']}>
|
||||||
|
<View style={styles.webModalHeader}>
|
||||||
|
<TouchableOpacity
|
||||||
|
style={styles.webModalClose}
|
||||||
|
onPress={() => setShowTalkWeb(false)}
|
||||||
|
hitSlop={{ top: 12, bottom: 12, left: 12, right: 12 }}
|
||||||
|
>
|
||||||
|
<Ionicons name="close" size={28} color={colors.flow.text} />
|
||||||
|
</TouchableOpacity>
|
||||||
|
<Text style={styles.webModalTitle} numberOfLines={1}>AI Studio Talk</Text>
|
||||||
|
</View>
|
||||||
|
{WebView ? (
|
||||||
|
<WebView
|
||||||
|
source={{ uri: TALK_WEB_URL }}
|
||||||
|
style={styles.webView}
|
||||||
|
onError={(e) => console.warn('WebView error:', e.nativeEvent)}
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<View style={styles.webView} />
|
||||||
|
)}
|
||||||
|
</SafeAreaView>
|
||||||
|
</Modal>
|
||||||
</View>
|
</View>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -63,23 +134,71 @@ const styles = StyleSheet.create({
|
|||||||
justifyContent: 'center',
|
justifyContent: 'center',
|
||||||
paddingVertical: spacing.lg,
|
paddingVertical: spacing.lg,
|
||||||
},
|
},
|
||||||
actions: {
|
actionsRow: {
|
||||||
flexDirection: 'row',
|
flexDirection: 'row',
|
||||||
marginTop: spacing.lg,
|
flexWrap: 'wrap',
|
||||||
|
justifyContent: 'center',
|
||||||
|
alignItems: 'flex-end',
|
||||||
|
marginBottom: spacing.xxl,
|
||||||
gap: spacing.sm,
|
gap: spacing.sm,
|
||||||
},
|
},
|
||||||
|
arcSlot: {
|
||||||
|
alignItems: 'center',
|
||||||
|
marginBottom: 0,
|
||||||
|
},
|
||||||
|
arcSlotCenter: {
|
||||||
|
marginBottom: 14,
|
||||||
|
},
|
||||||
actionBtn: {
|
actionBtn: {
|
||||||
paddingHorizontal: spacing.md,
|
flexDirection: 'column',
|
||||||
|
alignItems: 'center',
|
||||||
|
justifyContent: 'center',
|
||||||
|
minWidth: 56,
|
||||||
paddingVertical: spacing.sm,
|
paddingVertical: spacing.sm,
|
||||||
borderRadius: borderRadius.lg,
|
paddingHorizontal: spacing.sm,
|
||||||
|
borderRadius: borderRadius.xl,
|
||||||
backgroundColor: colors.flow.cardBackground,
|
backgroundColor: colors.flow.cardBackground,
|
||||||
borderWidth: 1,
|
borderWidth: 1,
|
||||||
borderColor: colors.flow.cardBorder,
|
borderColor: colors.flow.cardBorder,
|
||||||
|
...shadows.soft,
|
||||||
},
|
},
|
||||||
actionLabel: {
|
actionLabel: {
|
||||||
fontSize: 12,
|
fontSize: 11,
|
||||||
fontWeight: '600',
|
fontWeight: '600',
|
||||||
color: colors.flow.primary,
|
color: colors.flow.primary,
|
||||||
|
marginTop: 4,
|
||||||
textTransform: 'capitalize',
|
textTransform: 'capitalize',
|
||||||
},
|
},
|
||||||
|
talkLabel: {
|
||||||
|
color: colors.nautical.teal,
|
||||||
|
},
|
||||||
|
talkBtn: {
|
||||||
|
borderColor: colors.nautical.teal,
|
||||||
|
backgroundColor: colors.nautical.paleAqua,
|
||||||
|
},
|
||||||
|
webModal: {
|
||||||
|
flex: 1,
|
||||||
|
backgroundColor: colors.flow.cardBackground,
|
||||||
|
},
|
||||||
|
webModalHeader: {
|
||||||
|
flexDirection: 'row',
|
||||||
|
alignItems: 'center',
|
||||||
|
paddingHorizontal: spacing.sm,
|
||||||
|
paddingVertical: spacing.sm,
|
||||||
|
borderBottomWidth: 1,
|
||||||
|
borderBottomColor: colors.flow.cardBorder,
|
||||||
|
},
|
||||||
|
webModalClose: {
|
||||||
|
padding: spacing.xs,
|
||||||
|
marginRight: spacing.sm,
|
||||||
|
},
|
||||||
|
webModalTitle: {
|
||||||
|
fontSize: 18,
|
||||||
|
fontWeight: '600',
|
||||||
|
color: colors.flow.text,
|
||||||
|
flex: 1,
|
||||||
|
},
|
||||||
|
webView: {
|
||||||
|
flex: 1,
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ export function PuppetView({ action, isTalking }: PuppetViewProps) {
|
|||||||
const loop = Animated.loop(
|
const loop = Animated.loop(
|
||||||
Animated.sequence([
|
Animated.sequence([
|
||||||
Animated.timing(smileScale, {
|
Animated.timing(smileScale, {
|
||||||
toValue: 1.12,
|
toValue: 1.18,
|
||||||
duration: 400,
|
duration: 400,
|
||||||
useNativeDriver: true,
|
useNativeDriver: true,
|
||||||
easing: Easing.out(Easing.ease),
|
easing: Easing.out(Easing.ease),
|
||||||
@@ -314,26 +314,26 @@ const styles = StyleSheet.create({
|
|||||||
backgroundColor: '#0c4a6e',
|
backgroundColor: '#0c4a6e',
|
||||||
},
|
},
|
||||||
mouthSmile: {
|
mouthSmile: {
|
||||||
width: 22,
|
width: 28,
|
||||||
height: 6,
|
height: 10,
|
||||||
borderBottomLeftRadius: 11,
|
borderBottomLeftRadius: 14,
|
||||||
borderBottomRightRadius: 11,
|
borderBottomRightRadius: 14,
|
||||||
borderTopLeftRadius: 0,
|
borderTopLeftRadius: 0,
|
||||||
borderTopRightRadius: 0,
|
borderTopRightRadius: 0,
|
||||||
},
|
},
|
||||||
mouthOpen: {
|
mouthOpen: {
|
||||||
width: 18,
|
width: 18,
|
||||||
height: 6,
|
height: 8,
|
||||||
top: BODY_SIZE * 0.51,
|
top: BODY_SIZE * 0.51,
|
||||||
borderRadius: 3,
|
borderRadius: 3,
|
||||||
backgroundColor: 'rgba(12, 74, 110, 0.9)',
|
backgroundColor: 'rgba(12, 74, 110, 0.9)',
|
||||||
},
|
},
|
||||||
mouthBigSmile: {
|
mouthBigSmile: {
|
||||||
width: 32,
|
width: 42,
|
||||||
height: 10,
|
height: 24,
|
||||||
top: BODY_SIZE * 0.51,
|
top: BODY_SIZE * 0.50,
|
||||||
borderBottomLeftRadius: 16,
|
borderBottomLeftRadius: 21,
|
||||||
borderBottomRightRadius: 16,
|
borderBottomRightRadius: 21,
|
||||||
borderTopLeftRadius: 0,
|
borderTopLeftRadius: 0,
|
||||||
borderTopRightRadius: 0,
|
borderTopRightRadius: 0,
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -58,6 +58,7 @@ export const API_ENDPOINTS = {
|
|||||||
AI: {
|
AI: {
|
||||||
PROXY: '/ai/proxy',
|
PROXY: '/ai/proxy',
|
||||||
GET_ROLES: '/get_ai_roles',
|
GET_ROLES: '/get_ai_roles',
|
||||||
|
SPEECH_TO_TEXT: '/ai/speech-to-text',
|
||||||
},
|
},
|
||||||
|
|
||||||
// Admin Operations
|
// Admin Operations
|
||||||
|
|||||||
@@ -24,11 +24,15 @@ import {
|
|||||||
FlatList,
|
FlatList,
|
||||||
Animated,
|
Animated,
|
||||||
Image,
|
Image,
|
||||||
|
Platform,
|
||||||
} from 'react-native';
|
} from 'react-native';
|
||||||
import { LinearGradient } from 'expo-linear-gradient';
|
import { LinearGradient } from 'expo-linear-gradient';
|
||||||
import { Ionicons, Feather, FontAwesome5 } from '@expo/vector-icons';
|
import { Ionicons, Feather, FontAwesome5 } from '@expo/vector-icons';
|
||||||
import * as ImagePicker from 'expo-image-picker';
|
import * as ImagePicker from 'expo-image-picker';
|
||||||
|
import { Audio } from 'expo-av';
|
||||||
|
import * as Speech from 'expo-speech';
|
||||||
import { AIRole } from '../types';
|
import { AIRole } from '../types';
|
||||||
|
import { speechToText } from '../services/voice.service';
|
||||||
import { colors, typography, spacing, borderRadius, shadows } from '../theme/colors';
|
import { colors, typography, spacing, borderRadius, shadows } from '../theme/colors';
|
||||||
import { aiService, AIMessage } from '../services/ai.service';
|
import { aiService, AIMessage } from '../services/ai.service';
|
||||||
import { langGraphService } from '../services/langgraph.service';
|
import { langGraphService } from '../services/langgraph.service';
|
||||||
@@ -43,6 +47,10 @@ import { Buffer } from 'buffer';
|
|||||||
import { FlowPuppetSlot } from '../components/puppet';
|
import { FlowPuppetSlot } from '../components/puppet';
|
||||||
import type { PuppetAction } from '../components/puppet';
|
import type { PuppetAction } from '../components/puppet';
|
||||||
|
|
||||||
|
const isWeb = Platform.OS === 'web';
|
||||||
|
const WebView = isWeb ? null : require('react-native-webview').WebView;
|
||||||
|
const TALK_WEB_URL = 'https://aistudio.google.com/apps/drive/1L39svCbfbRc48Eby64Q0rSbSoQZiWQBp?showPreview=true&showAssistant=true&fullscreenApplet=true';
|
||||||
|
|
||||||
// =============================================================================
|
// =============================================================================
|
||||||
// Type Definitions
|
// Type Definitions
|
||||||
// =============================================================================
|
// =============================================================================
|
||||||
@@ -86,6 +94,7 @@ export default function FlowScreen() {
|
|||||||
|
|
||||||
// History modal state
|
// History modal state
|
||||||
const [showHistoryModal, setShowHistoryModal] = useState(false);
|
const [showHistoryModal, setShowHistoryModal] = useState(false);
|
||||||
|
const [showTalkModal, setShowTalkModal] = useState(false);
|
||||||
const modalSlideAnim = useRef(new Animated.Value(0)).current;
|
const modalSlideAnim = useRef(new Animated.Value(0)).current;
|
||||||
|
|
||||||
// Summary state
|
// Summary state
|
||||||
@@ -102,6 +111,45 @@ export default function FlowScreen() {
|
|||||||
|
|
||||||
// AI multimodal puppet (optional; does not affect existing chat logic)
|
// AI multimodal puppet (optional; does not affect existing chat logic)
|
||||||
const [puppetAction, setPuppetAction] = useState<PuppetAction>('idle');
|
const [puppetAction, setPuppetAction] = useState<PuppetAction>('idle');
|
||||||
|
/** True while TTS is playing (puppet mouth open) */
|
||||||
|
const [isPuppetTalking, setIsPuppetTalking] = useState(false);
|
||||||
|
const recordingRef = useRef<Audio.Recording | null>(null);
|
||||||
|
|
||||||
|
/** True while user is actively typing; false after pause or send. Used to show feather vs send button. */
|
||||||
|
const [isTyping, setIsTyping] = useState(false);
|
||||||
|
const typingDebounceRef = useRef<ReturnType<typeof setTimeout> | null>(null);
|
||||||
|
const featherBounceAnim = useRef(new Animated.Value(0)).current;
|
||||||
|
|
||||||
|
const typingActiveRef = useRef(false);
|
||||||
|
// Feather bounce loop when user is typing (like writing with a quill)
|
||||||
|
useEffect(() => {
|
||||||
|
if (!isTyping) {
|
||||||
|
typingActiveRef.current = false;
|
||||||
|
featherBounceAnim.setValue(0);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
typingActiveRef.current = true;
|
||||||
|
const bounce = () => {
|
||||||
|
if (!typingActiveRef.current) return;
|
||||||
|
featherBounceAnim.setValue(0);
|
||||||
|
Animated.sequence([
|
||||||
|
Animated.timing(featherBounceAnim, { toValue: 1, duration: 220, useNativeDriver: true }),
|
||||||
|
Animated.timing(featherBounceAnim, { toValue: 0, duration: 220, useNativeDriver: true }),
|
||||||
|
]).start(({ finished }) => { if (finished && typingActiveRef.current) bounce(); });
|
||||||
|
};
|
||||||
|
bounce();
|
||||||
|
return () => {
|
||||||
|
typingActiveRef.current = false;
|
||||||
|
featherBounceAnim.stopAnimation();
|
||||||
|
};
|
||||||
|
}, [isTyping]);
|
||||||
|
|
||||||
|
const handleInputChange = (text: string) => {
|
||||||
|
setNewContent(text);
|
||||||
|
setIsTyping(true);
|
||||||
|
if (typingDebounceRef.current) clearTimeout(typingDebounceRef.current);
|
||||||
|
typingDebounceRef.current = setTimeout(() => setIsTyping(false), 1200);
|
||||||
|
};
|
||||||
|
|
||||||
const [chatHistory, setChatHistory] = useState<ChatSession[]>([
|
const [chatHistory, setChatHistory] = useState<ChatSession[]>([
|
||||||
// Sample history data
|
// Sample history data
|
||||||
@@ -271,6 +319,7 @@ export default function FlowScreen() {
|
|||||||
const hasText = !!newContent.trim();
|
const hasText = !!newContent.trim();
|
||||||
const hasImage = !!attachedImage;
|
const hasImage = !!attachedImage;
|
||||||
if ((!hasText && !hasImage) || isSending || !selectedRole) return;
|
if ((!hasText && !hasImage) || isSending || !selectedRole) return;
|
||||||
|
setIsTyping(false);
|
||||||
|
|
||||||
// Check authentication
|
// Check authentication
|
||||||
if (!token) {
|
if (!token) {
|
||||||
@@ -396,11 +445,106 @@ export default function FlowScreen() {
|
|||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handle voice recording toggle
|
* Handle voice recording: start/stop, then STT -> chat -> TTS (puppet speaks).
|
||||||
|
* Does not affect existing text/image send logic.
|
||||||
*/
|
*/
|
||||||
const handleVoiceRecord = () => {
|
const handleVoiceRecord = async () => {
|
||||||
setIsRecording(!isRecording);
|
if (isRecording) {
|
||||||
// TODO: Implement voice recording functionality
|
// Stop recording and run voice pipeline
|
||||||
|
const recording = recordingRef.current;
|
||||||
|
if (!recording) {
|
||||||
|
setIsRecording(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
await recording.stopAndUnloadAsync();
|
||||||
|
const uri = recording.getURI();
|
||||||
|
recordingRef.current = null;
|
||||||
|
setIsRecording(false);
|
||||||
|
if (!uri || !token || !selectedRole) return;
|
||||||
|
|
||||||
|
setIsSending(true);
|
||||||
|
setPuppetAction('think');
|
||||||
|
const transcribed = await speechToText(uri, token);
|
||||||
|
if (!transcribed.trim()) {
|
||||||
|
setIsSending(false);
|
||||||
|
setPuppetAction('idle');
|
||||||
|
Alert.alert('Voice', 'No speech detected. Try again.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const userMsg: ChatMessage = {
|
||||||
|
id: Date.now().toString(),
|
||||||
|
role: 'user',
|
||||||
|
content: transcribed,
|
||||||
|
createdAt: new Date(),
|
||||||
|
};
|
||||||
|
setMessages(prev => [...prev, userMsg]);
|
||||||
|
|
||||||
|
const history: (HumanMessage | LangChainAIMessage | SystemMessage)[] = messages.map(msg => {
|
||||||
|
if (msg.role === 'user') return new HumanMessage(msg.content);
|
||||||
|
return new LangChainAIMessage(msg.content);
|
||||||
|
});
|
||||||
|
const systemPrompt = new SystemMessage(selectedRole?.systemPrompt || '');
|
||||||
|
const currentMsg = new HumanMessage(transcribed);
|
||||||
|
const fullMessages = [systemPrompt, ...history, currentMsg];
|
||||||
|
const aiResponse = await langGraphService.execute(fullMessages, token);
|
||||||
|
|
||||||
|
const aiMsg: ChatMessage = {
|
||||||
|
id: (Date.now() + 1).toString(),
|
||||||
|
role: 'assistant',
|
||||||
|
content: aiResponse,
|
||||||
|
createdAt: new Date(),
|
||||||
|
};
|
||||||
|
setMessages(prev => [...prev, aiMsg]);
|
||||||
|
setIsSending(false);
|
||||||
|
setPuppetAction('idle');
|
||||||
|
|
||||||
|
setIsPuppetTalking(true);
|
||||||
|
Speech.speak(aiResponse, {
|
||||||
|
onDone: () => setIsPuppetTalking(false),
|
||||||
|
onStopped: () => setIsPuppetTalking(false),
|
||||||
|
onError: () => setIsPuppetTalking(false),
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
console.error('Voice pipeline failed:', e);
|
||||||
|
setIsRecording(false);
|
||||||
|
setIsSending(false);
|
||||||
|
setPuppetAction('idle');
|
||||||
|
recordingRef.current = null;
|
||||||
|
const msg = e instanceof Error ? e.message : String(e);
|
||||||
|
if (msg.includes('503') || msg.includes('not configured')) {
|
||||||
|
Alert.alert('Voice', 'Speech-to-text is not configured (need active Gemini AI config). Use text input.');
|
||||||
|
} else {
|
||||||
|
Alert.alert('Voice', msg || 'Voice failed. Try again.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start recording
|
||||||
|
try {
|
||||||
|
const { status } = await Audio.requestPermissionsAsync();
|
||||||
|
if (status !== 'granted') {
|
||||||
|
Alert.alert('Permission', 'Microphone access is needed for voice.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
await Audio.setAudioModeAsync({
|
||||||
|
allowsRecordingIOS: true,
|
||||||
|
playsInSilentModeIOS: true,
|
||||||
|
staysActiveInBackground: false,
|
||||||
|
shouldDuckAndroid: true,
|
||||||
|
playThroughEarpieceAndroid: false,
|
||||||
|
});
|
||||||
|
const { recording } = await Audio.Recording.createAsync(
|
||||||
|
Audio.RecordingOptionsPresets.HIGH_QUALITY
|
||||||
|
);
|
||||||
|
recordingRef.current = recording;
|
||||||
|
setIsRecording(true);
|
||||||
|
} catch (e) {
|
||||||
|
console.error('Start recording failed:', e);
|
||||||
|
Alert.alert('Voice', 'Could not start recording.');
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -663,13 +807,16 @@ export default function FlowScreen() {
|
|||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Render empty state when no messages
|
* Render empty state when no messages: center puppet (same position as before feather), hide after send
|
||||||
*/
|
*/
|
||||||
const renderEmptyState = () => (
|
const renderEmptyState = () => (
|
||||||
<View style={styles.emptyState}>
|
<View style={styles.emptyState}>
|
||||||
<View style={styles.emptyIcon}>
|
<FlowPuppetSlot
|
||||||
<Feather name="feather" size={48} color={colors.nautical.seafoam} />
|
currentAction={puppetAction}
|
||||||
</View>
|
isTalking={isSending || isPuppetTalking}
|
||||||
|
onAction={setPuppetAction}
|
||||||
|
showActionButtons={true}
|
||||||
|
/>
|
||||||
<Text style={styles.emptyTitle}>Chatting with {selectedRole?.name || 'AI'}</Text>
|
<Text style={styles.emptyTitle}>Chatting with {selectedRole?.name || 'AI'}</Text>
|
||||||
<Text style={styles.emptySubtitle}>
|
<Text style={styles.emptySubtitle}>
|
||||||
{selectedRole?.description || 'Loading AI Assistant...'}
|
{selectedRole?.description || 'Loading AI Assistant...'}
|
||||||
@@ -765,14 +912,6 @@ export default function FlowScreen() {
|
|||||||
</TouchableOpacity>
|
</TouchableOpacity>
|
||||||
</View>
|
</View>
|
||||||
|
|
||||||
{/* AI multimodal puppet (optional slot; code in components/puppet) */}
|
|
||||||
<FlowPuppetSlot
|
|
||||||
currentAction={puppetAction}
|
|
||||||
isTalking={isSending}
|
|
||||||
onAction={setPuppetAction}
|
|
||||||
showActionButtons={true}
|
|
||||||
/>
|
|
||||||
|
|
||||||
{/* Chat Messages */}
|
{/* Chat Messages */}
|
||||||
<ScrollView
|
<ScrollView
|
||||||
ref={scrollViewRef}
|
ref={scrollViewRef}
|
||||||
@@ -837,32 +976,80 @@ export default function FlowScreen() {
|
|||||||
placeholder={attachedImage ? '输入对图片的说明(可选)...' : 'Message...'}
|
placeholder={attachedImage ? '输入对图片的说明(可选)...' : 'Message...'}
|
||||||
placeholderTextColor={colors.flow.textSecondary}
|
placeholderTextColor={colors.flow.textSecondary}
|
||||||
value={newContent}
|
value={newContent}
|
||||||
onChangeText={setNewContent}
|
onChangeText={handleInputChange}
|
||||||
multiline
|
multiline
|
||||||
maxLength={500}
|
maxLength={500}
|
||||||
/>
|
/>
|
||||||
</View>
|
</View>
|
||||||
|
|
||||||
{/* Send or Voice button: show send when has text or attached image */}
|
{/* Send / typing feather / Voice: feather while typing, send when paused or done, mic when empty */}
|
||||||
{newContent.trim() || attachedImage || isSending ? (
|
{(newContent.trim() || attachedImage) || isSending ? (
|
||||||
|
isSending ? (
|
||||||
<TouchableOpacity
|
<TouchableOpacity
|
||||||
style={[styles.sendButton, isSending && styles.sendButtonDisabled]}
|
style={[styles.sendButton, styles.sendButtonDisabled]}
|
||||||
onPress={handleSendMessage}
|
disabled
|
||||||
activeOpacity={0.8}
|
|
||||||
disabled={isSending}
|
|
||||||
>
|
>
|
||||||
<LinearGradient
|
<LinearGradient
|
||||||
colors={[colors.nautical.teal, colors.nautical.seafoam]}
|
colors={[colors.nautical.teal, colors.nautical.seafoam]}
|
||||||
style={styles.sendButtonGradient}
|
style={styles.sendButtonGradient}
|
||||||
>
|
>
|
||||||
{isSending ? (
|
|
||||||
<ActivityIndicator size="small" color="#fff" />
|
<ActivityIndicator size="small" color="#fff" />
|
||||||
) : (
|
|
||||||
<Ionicons name="arrow-up" size={20} color="#fff" />
|
|
||||||
)}
|
|
||||||
</LinearGradient>
|
</LinearGradient>
|
||||||
</TouchableOpacity>
|
</TouchableOpacity>
|
||||||
|
) : (newContent.trim() || attachedImage) && isTyping ? (
|
||||||
|
<View style={styles.sendButton}>
|
||||||
|
<View
|
||||||
|
style={[
|
||||||
|
styles.sendButtonGradient,
|
||||||
|
{ backgroundColor: colors.nautical.teal },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
<Animated.View
|
||||||
|
style={[
|
||||||
|
StyleSheet.absoluteFill,
|
||||||
|
styles.sendButtonGradient,
|
||||||
|
{
|
||||||
|
transform: [{
|
||||||
|
translateY: featherBounceAnim.interpolate({
|
||||||
|
inputRange: [0, 1],
|
||||||
|
outputRange: [0, -5],
|
||||||
|
}),
|
||||||
|
}],
|
||||||
|
},
|
||||||
|
]}
|
||||||
|
>
|
||||||
|
<Feather name="feather" size={20} color="#fff" />
|
||||||
|
</Animated.View>
|
||||||
|
</View>
|
||||||
) : (
|
) : (
|
||||||
|
<TouchableOpacity
|
||||||
|
style={styles.sendButton}
|
||||||
|
onPress={handleSendMessage}
|
||||||
|
activeOpacity={0.8}
|
||||||
|
>
|
||||||
|
<LinearGradient
|
||||||
|
colors={[colors.nautical.teal, colors.nautical.seafoam]}
|
||||||
|
style={styles.sendButtonGradient}
|
||||||
|
>
|
||||||
|
<Ionicons name="arrow-up" size={20} color="#fff" />
|
||||||
|
</LinearGradient>
|
||||||
|
</TouchableOpacity>
|
||||||
|
)
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<TouchableOpacity
|
||||||
|
style={styles.inputBarButton}
|
||||||
|
onPress={() => {
|
||||||
|
if (isWeb && typeof (globalThis as any).window !== 'undefined') {
|
||||||
|
(globalThis as any).window.location.href = TALK_WEB_URL;
|
||||||
|
} else {
|
||||||
|
setShowTalkModal(true);
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
activeOpacity={0.7}
|
||||||
|
>
|
||||||
|
<Feather name="smile" size={22} color={colors.flow.textSecondary} />
|
||||||
|
</TouchableOpacity>
|
||||||
<TouchableOpacity
|
<TouchableOpacity
|
||||||
style={[styles.inputBarButton, isRecording && styles.recordingButton]}
|
style={[styles.inputBarButton, isRecording && styles.recordingButton]}
|
||||||
onPress={handleVoiceRecord}
|
onPress={handleVoiceRecord}
|
||||||
@@ -874,12 +1061,42 @@ export default function FlowScreen() {
|
|||||||
color={isRecording ? '#fff' : colors.flow.textSecondary}
|
color={isRecording ? '#fff' : colors.flow.textSecondary}
|
||||||
/>
|
/>
|
||||||
</TouchableOpacity>
|
</TouchableOpacity>
|
||||||
|
</>
|
||||||
)}
|
)}
|
||||||
</View>
|
</View>
|
||||||
</View>
|
</View>
|
||||||
</SafeAreaView>
|
</SafeAreaView>
|
||||||
</LinearGradient>
|
</LinearGradient>
|
||||||
|
|
||||||
|
{/* Talk / AI Studio modal (from smiley button on native) */}
|
||||||
|
<Modal
|
||||||
|
visible={showTalkModal}
|
||||||
|
animationType="slide"
|
||||||
|
onRequestClose={() => setShowTalkModal(false)}
|
||||||
|
>
|
||||||
|
<SafeAreaView style={styles.talkModal}>
|
||||||
|
<View style={styles.talkModalHeader}>
|
||||||
|
<TouchableOpacity
|
||||||
|
style={styles.talkModalClose}
|
||||||
|
onPress={() => setShowTalkModal(false)}
|
||||||
|
hitSlop={{ top: 12, bottom: 12, left: 12, right: 12 }}
|
||||||
|
>
|
||||||
|
<Ionicons name="close" size={28} color={colors.flow.text} />
|
||||||
|
</TouchableOpacity>
|
||||||
|
<Text style={styles.talkModalTitle} numberOfLines={1}>AI Studio Talk</Text>
|
||||||
|
</View>
|
||||||
|
{WebView ? (
|
||||||
|
<WebView
|
||||||
|
source={{ uri: TALK_WEB_URL }}
|
||||||
|
style={styles.talkWebView}
|
||||||
|
onError={(e: any) => console.warn('WebView error:', e.nativeEvent)}
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<View style={styles.talkWebView} />
|
||||||
|
)}
|
||||||
|
</SafeAreaView>
|
||||||
|
</Modal>
|
||||||
|
|
||||||
<Modal
|
<Modal
|
||||||
visible={showHistoryModal}
|
visible={showHistoryModal}
|
||||||
animationType="none"
|
animationType="none"
|
||||||
@@ -1594,6 +1811,31 @@ const styles = StyleSheet.create({
|
|||||||
},
|
},
|
||||||
|
|
||||||
// Modal styles
|
// Modal styles
|
||||||
|
talkModal: {
|
||||||
|
flex: 1,
|
||||||
|
backgroundColor: colors.flow.cardBackground,
|
||||||
|
},
|
||||||
|
talkModalHeader: {
|
||||||
|
flexDirection: 'row',
|
||||||
|
alignItems: 'center',
|
||||||
|
paddingHorizontal: spacing.sm,
|
||||||
|
paddingVertical: spacing.sm,
|
||||||
|
borderBottomWidth: 1,
|
||||||
|
borderBottomColor: colors.flow.cardBorder,
|
||||||
|
},
|
||||||
|
talkModalClose: {
|
||||||
|
padding: spacing.xs,
|
||||||
|
marginRight: spacing.sm,
|
||||||
|
},
|
||||||
|
talkModalTitle: {
|
||||||
|
fontSize: 18,
|
||||||
|
fontWeight: '600',
|
||||||
|
color: colors.flow.text,
|
||||||
|
flex: 1,
|
||||||
|
},
|
||||||
|
talkWebView: {
|
||||||
|
flex: 1,
|
||||||
|
},
|
||||||
modalOverlay: {
|
modalOverlay: {
|
||||||
flex: 1,
|
flex: 1,
|
||||||
backgroundColor: 'rgba(26, 58, 74, 0.4)',
|
backgroundColor: 'rgba(26, 58, 74, 0.4)',
|
||||||
|
|||||||
@@ -29,3 +29,4 @@ export {
|
|||||||
type CreateVaultPayloadResult,
|
type CreateVaultPayloadResult,
|
||||||
type CreateAssetPayloadResult,
|
type CreateAssetPayloadResult,
|
||||||
} from './vault.service';
|
} from './vault.service';
|
||||||
|
export { speechToText, type SpeechToTextResult } from './voice.service';
|
||||||
|
|||||||
66
src/services/voice.service.ts
Normal file
66
src/services/voice.service.ts
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
/**
|
||||||
|
* Voice Service
|
||||||
|
* Speech-to-text for puppet voice interaction (record -> STT -> chat -> TTS).
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { NO_BACKEND_MODE, API_ENDPOINTS, buildApiUrl, logApiDebug } from '../config';
|
||||||
|
|
||||||
|
export interface SpeechToTextResult {
|
||||||
|
text: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send recorded audio to backend for transcription (OpenAI Whisper).
|
||||||
|
* @param audioUri - Local file URI from expo-av recording (e.g. file:///.../recording.m4a)
|
||||||
|
* @param token - JWT for auth
|
||||||
|
* @returns Transcribed text, or empty string on failure/not configured
|
||||||
|
*/
|
||||||
|
export async function speechToText(audioUri: string, token?: string): Promise<string> {
|
||||||
|
if (NO_BACKEND_MODE) {
|
||||||
|
logApiDebug('Voice', 'Using mock STT');
|
||||||
|
return 'Mock voice input (backend not connected)';
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = buildApiUrl(API_ENDPOINTS.AI.SPEECH_TO_TEXT);
|
||||||
|
const headers: Record<string, string> = {};
|
||||||
|
if (token) headers['Authorization'] = `Bearer ${token}`;
|
||||||
|
// Do not set Content-Type; FormData sets multipart boundary
|
||||||
|
|
||||||
|
const formData = new FormData();
|
||||||
|
(formData as any).append('file', {
|
||||||
|
uri: audioUri,
|
||||||
|
name: 'voice.m4a',
|
||||||
|
type: 'audio/m4a',
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers,
|
||||||
|
body: formData,
|
||||||
|
});
|
||||||
|
logApiDebug('Voice STT Status', response.status);
|
||||||
|
|
||||||
|
if (response.status === 503) {
|
||||||
|
const d = await response.json().catch(() => ({}));
|
||||||
|
throw new Error(d.detail || 'Speech-to-text not configured');
|
||||||
|
}
|
||||||
|
if (!response.ok) {
|
||||||
|
const errText = await response.text();
|
||||||
|
let detail = errText;
|
||||||
|
try {
|
||||||
|
const data = JSON.parse(errText);
|
||||||
|
detail = data.detail || errText;
|
||||||
|
} catch {}
|
||||||
|
throw new Error(detail);
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
const text = (data.text ?? '').trim();
|
||||||
|
logApiDebug('Voice STT', { length: text.length });
|
||||||
|
return text;
|
||||||
|
} catch (e) {
|
||||||
|
logApiDebug('Voice STT Error', e);
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user