refact: android audio input, voice call (#8037)
Signed-off-by: fufesou <shuanglongchen@yeah.net>
This commit is contained in:
parent
d70b0cdd4f
commit
0500bf070e
@ -0,0 +1,200 @@
|
||||
package com.carriez.flutter_hbb
|
||||
|
||||
import ffi.FFI
|
||||
|
||||
import android.Manifest
|
||||
import android.content.Context
|
||||
import android.media.*
|
||||
import android.content.pm.PackageManager
|
||||
import android.media.projection.MediaProjection
|
||||
import androidx.annotation.RequiresApi
|
||||
import androidx.core.app.ActivityCompat
|
||||
import android.os.Build
|
||||
import android.util.Log
|
||||
import kotlin.concurrent.thread
|
||||
|
||||
const val AUDIO_ENCODING = AudioFormat.ENCODING_PCM_FLOAT // ENCODING_OPUS need API 30
|
||||
const val AUDIO_SAMPLE_RATE = 48000
|
||||
const val AUDIO_CHANNEL_MASK = AudioFormat.CHANNEL_IN_STEREO
|
||||
|
||||
class AudioRecordHandle(private var context: Context, private var isVideoStart: ()->Boolean, private var isAudioStart: ()->Boolean) {
|
||||
private val logTag = "LOG_AUDIO_RECORD_HANDLE"
|
||||
|
||||
private var audioRecorder: AudioRecord? = null
|
||||
private var audioReader: AudioReader? = null
|
||||
private var minBufferSize = 0
|
||||
private var audioRecordStat = false
|
||||
private var audioThread: Thread? = null
|
||||
|
||||
@RequiresApi(Build.VERSION_CODES.M)
|
||||
fun createAudioRecorder(inVoiceCall: Boolean, mediaProjection: MediaProjection?): Boolean {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
return false
|
||||
}
|
||||
if (ActivityCompat.checkSelfPermission(
|
||||
context,
|
||||
Manifest.permission.RECORD_AUDIO
|
||||
) != PackageManager.PERMISSION_GRANTED
|
||||
) {
|
||||
Log.d(logTag, "createAudioRecorder failed, no RECORD_AUDIO permission")
|
||||
return false
|
||||
}
|
||||
|
||||
var builder = AudioRecord.Builder()
|
||||
.setAudioFormat(
|
||||
AudioFormat.Builder()
|
||||
.setEncoding(AUDIO_ENCODING)
|
||||
.setSampleRate(AUDIO_SAMPLE_RATE)
|
||||
.setChannelMask(AUDIO_CHANNEL_MASK).build()
|
||||
);
|
||||
if (inVoiceCall) {
|
||||
builder.setAudioSource(MediaRecorder.AudioSource.VOICE_COMMUNICATION)
|
||||
} else {
|
||||
mediaProjection?.let {
|
||||
var apcc = AudioPlaybackCaptureConfiguration.Builder(it)
|
||||
.addMatchingUsage(AudioAttributes.USAGE_MEDIA)
|
||||
.addMatchingUsage(AudioAttributes.USAGE_ALARM)
|
||||
.addMatchingUsage(AudioAttributes.USAGE_GAME)
|
||||
.addMatchingUsage(AudioAttributes.USAGE_UNKNOWN).build();
|
||||
builder.setAudioPlaybackCaptureConfig(apcc);
|
||||
} ?: let {
|
||||
Log.d(logTag, "createAudioRecorder failed, mediaProjection null")
|
||||
return false
|
||||
}
|
||||
}
|
||||
audioRecorder = builder.build()
|
||||
Log.d(logTag, "createAudioRecorder done,minBufferSize:$minBufferSize")
|
||||
return true
|
||||
}
|
||||
|
||||
@RequiresApi(Build.VERSION_CODES.M)
|
||||
private fun checkAudioReader() {
|
||||
if (audioReader != null && minBufferSize != 0) {
|
||||
return
|
||||
}
|
||||
// read f32 to byte , length * 4
|
||||
minBufferSize = 2 * 4 * AudioRecord.getMinBufferSize(
|
||||
AUDIO_SAMPLE_RATE,
|
||||
AUDIO_CHANNEL_MASK,
|
||||
AUDIO_ENCODING
|
||||
)
|
||||
if (minBufferSize == 0) {
|
||||
Log.d(logTag, "get min buffer size fail!")
|
||||
return
|
||||
}
|
||||
audioReader = AudioReader(minBufferSize, 4)
|
||||
Log.d(logTag, "init audioData len:$minBufferSize")
|
||||
}
|
||||
|
||||
@RequiresApi(Build.VERSION_CODES.M)
|
||||
fun startAudioRecorder() {
|
||||
checkAudioReader()
|
||||
if (audioReader != null && audioRecorder != null && minBufferSize != 0) {
|
||||
try {
|
||||
FFI.setFrameRawEnable("audio", true)
|
||||
audioRecorder!!.startRecording()
|
||||
audioRecordStat = true
|
||||
audioThread = thread {
|
||||
while (audioRecordStat) {
|
||||
audioReader!!.readSync(audioRecorder!!)?.let {
|
||||
FFI.onAudioFrameUpdate(it)
|
||||
}
|
||||
}
|
||||
// let's release here rather than onDestroy to avoid threading issue
|
||||
audioRecorder?.release()
|
||||
audioRecorder = null
|
||||
minBufferSize = 0
|
||||
FFI.setFrameRawEnable("audio", false)
|
||||
Log.d(logTag, "Exit audio thread")
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Log.d(logTag, "startAudioRecorder fail:$e")
|
||||
}
|
||||
} else {
|
||||
Log.d(logTag, "startAudioRecorder fail")
|
||||
}
|
||||
}
|
||||
|
||||
fun onVoiceCallStarted(mediaProjection: MediaProjection?): Boolean {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.R) {
|
||||
return false
|
||||
}
|
||||
if (isVideoStart() || isAudioStart()) {
|
||||
if (!switchToVoiceCall(mediaProjection)) {
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
if (!switchToVoiceCall(mediaProjection)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
fun onVoiceCallClosed(mediaProjection: MediaProjection?): Boolean {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.R) {
|
||||
return false
|
||||
}
|
||||
if (isVideoStart()) {
|
||||
switchOutVoiceCall(mediaProjection)
|
||||
}
|
||||
tryReleaseAudio()
|
||||
return true
|
||||
}
|
||||
|
||||
@RequiresApi(Build.VERSION_CODES.M)
|
||||
fun switchToVoiceCall(mediaProjection: MediaProjection?): Boolean {
|
||||
audioRecorder?.let {
|
||||
if (it.getAudioSource() == MediaRecorder.AudioSource.VOICE_COMMUNICATION) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
audioRecordStat = false
|
||||
audioThread?.join()
|
||||
audioThread = null
|
||||
|
||||
if (!createAudioRecorder(true, mediaProjection)) {
|
||||
Log.e(logTag, "createAudioRecorder fail")
|
||||
return false
|
||||
}
|
||||
startAudioRecorder()
|
||||
return true
|
||||
}
|
||||
|
||||
@RequiresApi(Build.VERSION_CODES.M)
|
||||
fun switchOutVoiceCall(mediaProjection: MediaProjection?): Boolean {
|
||||
audioRecorder?.let {
|
||||
if (it.getAudioSource() != MediaRecorder.AudioSource.VOICE_COMMUNICATION) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
audioRecordStat = false
|
||||
audioThread?.join()
|
||||
|
||||
if (!createAudioRecorder(false, mediaProjection)) {
|
||||
Log.e(logTag, "createAudioRecorder fail")
|
||||
return false
|
||||
}
|
||||
startAudioRecorder()
|
||||
return true
|
||||
}
|
||||
|
||||
fun tryReleaseAudio() {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.R) {
|
||||
return
|
||||
}
|
||||
if (isAudioStart() || isVideoStart()) {
|
||||
return
|
||||
}
|
||||
audioRecordStat = false
|
||||
audioThread?.join()
|
||||
audioThread = null
|
||||
}
|
||||
|
||||
fun destroy() {
|
||||
Log.d(logTag, "destroy audio record handle")
|
||||
|
||||
audioRecordStat = false
|
||||
audioThread?.join()
|
||||
}
|
||||
}
|
@ -42,6 +42,9 @@ class MainActivity : FlutterActivity() {
|
||||
private val logTag = "mMainActivity"
|
||||
private var mainService: MainService? = null
|
||||
|
||||
private var isAudioStart = false
|
||||
private val audioRecordHandle = AudioRecordHandle(this, { false }, { isAudioStart })
|
||||
|
||||
override fun configureFlutterEngine(flutterEngine: FlutterEngine) {
|
||||
super.configureFlutterEngine(flutterEngine)
|
||||
if (MainService.isReady) {
|
||||
@ -230,6 +233,12 @@ class MainActivity : FlutterActivity() {
|
||||
result.success(false)
|
||||
}
|
||||
}
|
||||
"on_voice_call_started" -> {
|
||||
onVoiceCallStarted()
|
||||
}
|
||||
"on_voice_call_closed" -> {
|
||||
onVoiceCallClosed()
|
||||
}
|
||||
else -> {
|
||||
result.error("-1", "No such method", null)
|
||||
}
|
||||
@ -319,4 +328,44 @@ class MainActivity : FlutterActivity() {
|
||||
result.put("codecs", codecArray)
|
||||
FFI.setCodecInfo(result.toString())
|
||||
}
|
||||
|
||||
private fun onVoiceCallStarted() {
|
||||
var ok = false
|
||||
mainService?.let {
|
||||
ok = it.onVoiceCallStarted()
|
||||
} ?: let {
|
||||
isAudioStart = true
|
||||
ok = audioRecordHandle.onVoiceCallStarted(null)
|
||||
}
|
||||
if (!ok) {
|
||||
// Rarely happens, So we just add log and msgbox here.
|
||||
Log.e(logTag, "onVoiceCallStarted fail")
|
||||
flutterMethodChannel?.invokeMethod("msgbox", mapOf(
|
||||
"type" to "custom-nook-nocancel-hasclose-error",
|
||||
"title" to "Voice call",
|
||||
"text" to "Failed to start voice call."))
|
||||
} else {
|
||||
Log.d(logTag, "onVoiceCallStarted success")
|
||||
}
|
||||
}
|
||||
|
||||
private fun onVoiceCallClosed() {
|
||||
var ok = false
|
||||
mainService?.let {
|
||||
ok = it.onVoiceCallClosed()
|
||||
} ?: let {
|
||||
isAudioStart = false
|
||||
ok = audioRecordHandle.onVoiceCallClosed(null)
|
||||
}
|
||||
if (!ok) {
|
||||
// Rarely happens, So we just add log and msgbox here.
|
||||
Log.e(logTag, "onVoiceCallClosed fail")
|
||||
flutterMethodChannel?.invokeMethod("msgbox", mapOf(
|
||||
"type" to "custom-nook-nocancel-hasclose-error",
|
||||
"title" to "Voice call",
|
||||
"text" to "Failed to stop voice call."))
|
||||
} else {
|
||||
Log.d(logTag, "onVoiceCallClosed success")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -58,11 +58,6 @@ const val MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_VP9
|
||||
const val VIDEO_KEY_BIT_RATE = 1024_000
|
||||
const val VIDEO_KEY_FRAME_RATE = 30
|
||||
|
||||
// audio const
|
||||
const val AUDIO_ENCODING = AudioFormat.ENCODING_PCM_FLOAT // ENCODING_OPUS need API 30
|
||||
const val AUDIO_SAMPLE_RATE = 48000
|
||||
const val AUDIO_CHANNEL_MASK = AudioFormat.CHANNEL_IN_STEREO
|
||||
|
||||
class MainService : Service() {
|
||||
|
||||
@Keep
|
||||
@ -138,6 +133,39 @@ class MainService : Service() {
|
||||
e.printStackTrace()
|
||||
}
|
||||
}
|
||||
"update_voice_call_state" -> {
|
||||
try {
|
||||
val jsonObject = JSONObject(arg1)
|
||||
val id = jsonObject["id"] as Int
|
||||
val username = jsonObject["name"] as String
|
||||
val peerId = jsonObject["peer_id"] as String
|
||||
val inVoiceCall = jsonObject["in_voice_call"] as Boolean
|
||||
val incomingVoiceCall = jsonObject["incoming_voice_call"] as Boolean
|
||||
if (!inVoiceCall) {
|
||||
if (incomingVoiceCall) {
|
||||
voiceCallRequestNotification(id, "Voice Call Request", username, peerId)
|
||||
} else {
|
||||
if (!audioRecordHandle.switchOutVoiceCall(mediaProjection)) {
|
||||
Log.e(logTag, "switchOutVoiceCall fail")
|
||||
MainActivity.flutterMethodChannel?.invokeMethod("msgbox", mapOf(
|
||||
"type" to "custom-nook-nocancel-hasclose-error",
|
||||
"title" to "Voice call",
|
||||
"text" to "Failed to switch out voice call."))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (!audioRecordHandle.switchToVoiceCall(mediaProjection)) {
|
||||
Log.e(logTag, "switchToVoiceCall fail")
|
||||
MainActivity.flutterMethodChannel?.invokeMethod("msgbox", mapOf(
|
||||
"type" to "custom-nook-nocancel-hasclose-error",
|
||||
"title" to "Voice call",
|
||||
"text" to "Failed to switch to voice call."))
|
||||
}
|
||||
}
|
||||
} catch (e: JSONException) {
|
||||
e.printStackTrace()
|
||||
}
|
||||
}
|
||||
"stop_capture" -> {
|
||||
Log.d(logTag, "from rust:stop_capture")
|
||||
stopCapture()
|
||||
@ -161,10 +189,13 @@ class MainService : Service() {
|
||||
companion object {
|
||||
private var _isReady = false // media permission ready status
|
||||
private var _isStart = false // screen capture start status
|
||||
private var _isAudioStart = false // audio capture start status
|
||||
val isReady: Boolean
|
||||
get() = _isReady
|
||||
val isStart: Boolean
|
||||
get() = _isStart
|
||||
val isAudioStart: Boolean
|
||||
get() = _isAudioStart
|
||||
}
|
||||
|
||||
private val logTag = "LOG_SERVICE"
|
||||
@ -182,10 +213,7 @@ class MainService : Service() {
|
||||
private var virtualDisplay: VirtualDisplay? = null
|
||||
|
||||
// audio
|
||||
private var audioRecorder: AudioRecord? = null
|
||||
private var audioReader: AudioReader? = null
|
||||
private var minBufferSize = 0
|
||||
private var audioRecordStat = false
|
||||
private val audioRecordHandle = AudioRecordHandle(this, { isStart }, { isAudioStart })
|
||||
|
||||
// notification
|
||||
private lateinit var notificationManager: NotificationManager
|
||||
@ -349,6 +377,14 @@ class MainService : Service() {
|
||||
}
|
||||
}
|
||||
|
||||
fun onVoiceCallStarted(): Boolean {
|
||||
return audioRecordHandle.onVoiceCallStarted(mediaProjection)
|
||||
}
|
||||
|
||||
fun onVoiceCallClosed(): Boolean {
|
||||
return audioRecordHandle.onVoiceCallClosed(mediaProjection)
|
||||
}
|
||||
|
||||
fun startCapture(): Boolean {
|
||||
if (isStart) {
|
||||
return true
|
||||
@ -369,12 +405,16 @@ class MainService : Service() {
|
||||
}
|
||||
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
|
||||
startAudioRecorder()
|
||||
if (!audioRecordHandle.createAudioRecorder(false, mediaProjection)) {
|
||||
Log.d(logTag, "createAudioRecorder fail")
|
||||
} else {
|
||||
Log.d(logTag, "audio recorder start")
|
||||
audioRecordHandle.startAudioRecorder()
|
||||
}
|
||||
}
|
||||
checkMediaPermission()
|
||||
_isStart = true
|
||||
FFI.setFrameRawEnable("video",true)
|
||||
FFI.setFrameRawEnable("audio",true)
|
||||
return true
|
||||
}
|
||||
|
||||
@ -382,7 +422,6 @@ class MainService : Service() {
|
||||
fun stopCapture() {
|
||||
Log.d(logTag, "Stop Capture")
|
||||
FFI.setFrameRawEnable("video",false)
|
||||
FFI.setFrameRawEnable("audio",false)
|
||||
_isStart = false
|
||||
// release video
|
||||
if (reuseVirtualDisplay) {
|
||||
@ -411,12 +450,14 @@ class MainService : Service() {
|
||||
surface?.release()
|
||||
|
||||
// release audio
|
||||
audioRecordStat = false
|
||||
_isAudioStart = false
|
||||
audioRecordHandle.tryReleaseAudio()
|
||||
}
|
||||
|
||||
fun destroy() {
|
||||
Log.d(logTag, "destroy service")
|
||||
_isReady = false
|
||||
_isAudioStart = false
|
||||
|
||||
stopCapture()
|
||||
|
||||
@ -514,7 +555,6 @@ class MainService : Service() {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private fun createMediaCodec() {
|
||||
Log.d(logTag, "MediaFormat.MIMETYPE_VIDEO_VP9 :$MIME_TYPE")
|
||||
videoEncoder = MediaCodec.createEncoderByType(MIME_TYPE)
|
||||
@ -534,80 +574,6 @@ class MainService : Service() {
|
||||
}
|
||||
}
|
||||
|
||||
@RequiresApi(Build.VERSION_CODES.M)
|
||||
private fun startAudioRecorder() {
|
||||
checkAudioRecorder()
|
||||
if (audioReader != null && audioRecorder != null && minBufferSize != 0) {
|
||||
try {
|
||||
audioRecorder!!.startRecording()
|
||||
audioRecordStat = true
|
||||
thread {
|
||||
while (audioRecordStat) {
|
||||
audioReader!!.readSync(audioRecorder!!)?.let {
|
||||
FFI.onAudioFrameUpdate(it)
|
||||
}
|
||||
}
|
||||
// let's release here rather than onDestroy to avoid threading issue
|
||||
audioRecorder?.release()
|
||||
audioRecorder = null
|
||||
minBufferSize = 0
|
||||
Log.d(logTag, "Exit audio thread")
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Log.d(logTag, "startAudioRecorder fail:$e")
|
||||
}
|
||||
} else {
|
||||
Log.d(logTag, "startAudioRecorder fail")
|
||||
}
|
||||
}
|
||||
|
||||
@RequiresApi(Build.VERSION_CODES.M)
|
||||
private fun checkAudioRecorder() {
|
||||
if (audioRecorder != null && audioRecorder != null && minBufferSize != 0) {
|
||||
return
|
||||
}
|
||||
// read f32 to byte , length * 4
|
||||
minBufferSize = 2 * 4 * AudioRecord.getMinBufferSize(
|
||||
AUDIO_SAMPLE_RATE,
|
||||
AUDIO_CHANNEL_MASK,
|
||||
AUDIO_ENCODING
|
||||
)
|
||||
if (minBufferSize == 0) {
|
||||
Log.d(logTag, "get min buffer size fail!")
|
||||
return
|
||||
}
|
||||
audioReader = AudioReader(minBufferSize, 4)
|
||||
Log.d(logTag, "init audioData len:$minBufferSize")
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
|
||||
mediaProjection?.let {
|
||||
val apcc = AudioPlaybackCaptureConfiguration.Builder(it)
|
||||
.addMatchingUsage(AudioAttributes.USAGE_MEDIA)
|
||||
.addMatchingUsage(AudioAttributes.USAGE_ALARM)
|
||||
.addMatchingUsage(AudioAttributes.USAGE_GAME)
|
||||
.addMatchingUsage(AudioAttributes.USAGE_UNKNOWN).build()
|
||||
if (ActivityCompat.checkSelfPermission(
|
||||
this,
|
||||
Manifest.permission.RECORD_AUDIO
|
||||
) != PackageManager.PERMISSION_GRANTED
|
||||
) {
|
||||
return
|
||||
}
|
||||
audioRecorder = AudioRecord.Builder()
|
||||
.setAudioFormat(
|
||||
AudioFormat.Builder()
|
||||
.setEncoding(AUDIO_ENCODING)
|
||||
.setSampleRate(AUDIO_SAMPLE_RATE)
|
||||
.setChannelMask(AUDIO_CHANNEL_MASK).build()
|
||||
)
|
||||
.setAudioPlaybackCaptureConfig(apcc)
|
||||
.setBufferSizeInBytes(minBufferSize).build()
|
||||
Log.d(logTag, "createAudioRecorder done,minBufferSize:$minBufferSize")
|
||||
return
|
||||
}
|
||||
}
|
||||
Log.d(logTag, "createAudioRecorder fail")
|
||||
}
|
||||
|
||||
private fun initNotification() {
|
||||
notificationManager = getSystemService(Context.NOTIFICATION_SERVICE) as NotificationManager
|
||||
notificationChannel = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
|
||||
@ -692,6 +658,21 @@ class MainService : Service() {
|
||||
notificationManager.notify(getClientNotifyID(clientID), notification)
|
||||
}
|
||||
|
||||
private fun voiceCallRequestNotification(
|
||||
clientID: Int,
|
||||
type: String,
|
||||
username: String,
|
||||
peerId: String
|
||||
) {
|
||||
val notification = notificationBuilder
|
||||
.setOngoing(false)
|
||||
.setPriority(NotificationCompat.PRIORITY_MAX)
|
||||
.setContentTitle(translate("Do you accept?"))
|
||||
.setContentText("$type:$username-$peerId")
|
||||
.build()
|
||||
notificationManager.notify(getClientNotifyID(clientID), notification)
|
||||
}
|
||||
|
||||
private fun getClientNotifyID(clientID: Int): Int {
|
||||
return clientID + NOTIFY_ID_OFFSET
|
||||
}
|
||||
|
@ -8,6 +8,7 @@ import 'package:flutter_hbb/consts.dart';
|
||||
import 'package:flutter_hbb/mobile/widgets/gesture_help.dart';
|
||||
import 'package:flutter_hbb/models/chat_model.dart';
|
||||
import 'package:flutter_keyboard_visibility/flutter_keyboard_visibility.dart';
|
||||
import 'package:flutter_svg/svg.dart';
|
||||
import 'package:get/get.dart';
|
||||
import 'package:provider/provider.dart';
|
||||
import 'package:wakelock_plus/wakelock_plus.dart';
|
||||
@ -79,7 +80,7 @@ class _RemotePageState extends State<RemotePage> {
|
||||
initSharedStates(widget.id);
|
||||
gFFI.chatModel
|
||||
.changeCurrentKey(MessageKey(widget.id, ChatModel.clientModeID));
|
||||
|
||||
gFFI.chatModel.voiceCallStatus.value = VoiceCallStatus.notStarted;
|
||||
_blockableOverlayState.applyFfi(gFFI);
|
||||
}
|
||||
|
||||
@ -102,6 +103,11 @@ class _RemotePageState extends State<RemotePage> {
|
||||
}
|
||||
await keyboardSubscription.cancel();
|
||||
removeSharedStates(widget.id);
|
||||
if (isAndroid) {
|
||||
// Only one client is considered here for now.
|
||||
// TODO: take into account the case where there are multiple clients
|
||||
gFFI.invokeMethod("on_voice_call_closed");
|
||||
}
|
||||
}
|
||||
|
||||
// to-do: It should be better to use transparent color instead of the bgColor.
|
||||
@ -369,9 +375,7 @@ class _RemotePageState extends State<RemotePage> {
|
||||
onPressed: () {
|
||||
clientClose(sessionId, gFFI.dialogManager);
|
||||
},
|
||||
)
|
||||
] +
|
||||
<Widget>[
|
||||
),
|
||||
IconButton(
|
||||
color: Colors.white,
|
||||
icon: Icon(Icons.tv),
|
||||
@ -416,11 +420,9 @@ class _RemotePageState extends State<RemotePage> {
|
||||
IconButton(
|
||||
color: Colors.white,
|
||||
icon: Icon(Icons.message),
|
||||
onPressed: () {
|
||||
gFFI.chatModel.changeCurrentKey(MessageKey(
|
||||
widget.id, ChatModel.clientModeID));
|
||||
gFFI.chatModel.toggleChatOverlay();
|
||||
},
|
||||
onPressed: () => isAndroid
|
||||
? showChatOptions(widget.id)
|
||||
: onPressedTextChat(widget.id),
|
||||
)
|
||||
]) +
|
||||
[
|
||||
@ -538,6 +540,82 @@ class _RemotePageState extends State<RemotePage> {
|
||||
}();
|
||||
}
|
||||
|
||||
onPressedTextChat(String id) {
|
||||
gFFI.chatModel.changeCurrentKey(MessageKey(id, ChatModel.clientModeID));
|
||||
gFFI.chatModel.toggleChatOverlay();
|
||||
}
|
||||
|
||||
showChatOptions(String id) async {
|
||||
onPressVoiceCall() => bind.sessionRequestVoiceCall(sessionId: sessionId);
|
||||
onPressEndVoiceCall() => bind.sessionCloseVoiceCall(sessionId: sessionId);
|
||||
|
||||
makeTextMenu(String label, String svg, VoidCallback onPressed,
|
||||
{ColorFilter? colorFilter, TextStyle? labelStyle}) =>
|
||||
TTextMenu(
|
||||
child: Text(translate(label), style: labelStyle),
|
||||
trailingIcon: Transform.scale(
|
||||
scale: (isDesktop || isWebDesktop) ? 0.8 : 1,
|
||||
child: IconButton(
|
||||
onPressed: onPressed,
|
||||
icon: SvgPicture.asset(
|
||||
svg,
|
||||
colorFilter: colorFilter ??
|
||||
ColorFilter.mode(MyTheme.accent, BlendMode.srcIn),
|
||||
),
|
||||
),
|
||||
),
|
||||
onPressed: onPressed,
|
||||
);
|
||||
|
||||
final isInVoice = [
|
||||
VoiceCallStatus.waitingForResponse,
|
||||
VoiceCallStatus.connected
|
||||
].contains(gFFI.chatModel.voiceCallStatus.value);
|
||||
final menus = [
|
||||
makeTextMenu(
|
||||
'Text chat', 'assets/chat.svg', () => onPressedTextChat(widget.id)),
|
||||
isInVoice
|
||||
? makeTextMenu(
|
||||
'End voice call', 'assets/call_wait.svg', onPressEndVoiceCall,
|
||||
colorFilter: ColorFilter.mode(Colors.redAccent, BlendMode.srcIn),
|
||||
labelStyle: TextStyle(color: Colors.redAccent))
|
||||
: makeTextMenu(
|
||||
'Voice call', 'assets/call_wait.svg', onPressVoiceCall),
|
||||
];
|
||||
getChild(TTextMenu menu) {
|
||||
if (menu.trailingIcon != null) {
|
||||
return Row(
|
||||
mainAxisAlignment: MainAxisAlignment.spaceBetween,
|
||||
children: [
|
||||
menu.child,
|
||||
menu.trailingIcon!,
|
||||
]);
|
||||
} else {
|
||||
return menu.child;
|
||||
}
|
||||
}
|
||||
|
||||
final menuItems = menus
|
||||
.asMap()
|
||||
.entries
|
||||
.map((e) => PopupMenuItem<int>(child: getChild(e.value), value: e.key))
|
||||
.toList();
|
||||
Future.delayed(Duration.zero, () async {
|
||||
final size = MediaQuery.of(context).size;
|
||||
final x = 120.0;
|
||||
final y = size.height;
|
||||
var index = await showMenu(
|
||||
context: context,
|
||||
position: RelativeRect.fromLTRB(x, y, x, y),
|
||||
items: menuItems,
|
||||
elevation: 8,
|
||||
);
|
||||
if (index != null && index < menus.length) {
|
||||
menus[index].onPressed.call();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// aka changeTouchMode
|
||||
BottomAppBar getGestureHelp() {
|
||||
return BottomAppBar(
|
||||
|
@ -637,40 +637,94 @@ class ConnectionManager extends StatelessWidget {
|
||||
style: Theme.of(context).textTheme.bodyMedium,
|
||||
).marginOnly(bottom: 5),
|
||||
client.authorized
|
||||
? Container(
|
||||
alignment: Alignment.centerRight,
|
||||
child: ElevatedButton.icon(
|
||||
style: ButtonStyle(
|
||||
backgroundColor:
|
||||
MaterialStatePropertyAll(Colors.red)),
|
||||
icon: const Icon(Icons.close),
|
||||
onPressed: () {
|
||||
bind.cmCloseConnection(connId: client.id);
|
||||
gFFI.invokeMethod(
|
||||
"cancel_notification", client.id);
|
||||
},
|
||||
label: Text(translate("Disconnect"))))
|
||||
: Row(
|
||||
mainAxisAlignment: MainAxisAlignment.end,
|
||||
children: [
|
||||
TextButton(
|
||||
child: Text(translate("Dismiss")),
|
||||
onPressed: () {
|
||||
serverModel.sendLoginResponse(
|
||||
client, false);
|
||||
}).marginOnly(right: 15),
|
||||
if (serverModel.approveMode != 'password')
|
||||
ElevatedButton.icon(
|
||||
icon: const Icon(Icons.check),
|
||||
label: Text(translate("Accept")),
|
||||
onPressed: () {
|
||||
serverModel.sendLoginResponse(
|
||||
client, true);
|
||||
}),
|
||||
]),
|
||||
? _buildDisconnectButton(client)
|
||||
: _buildNewConnectionHint(serverModel, client),
|
||||
if (client.incomingVoiceCall && !client.inVoiceCall)
|
||||
..._buildNewVoiceCallHint(context, serverModel, client),
|
||||
])))
|
||||
.toList());
|
||||
}
|
||||
|
||||
Widget _buildDisconnectButton(Client client) {
|
||||
final disconnectButton = ElevatedButton.icon(
|
||||
style: ButtonStyle(backgroundColor: MaterialStatePropertyAll(Colors.red)),
|
||||
icon: const Icon(Icons.close),
|
||||
onPressed: () {
|
||||
bind.cmCloseConnection(connId: client.id);
|
||||
gFFI.invokeMethod("cancel_notification", client.id);
|
||||
},
|
||||
label: Text(translate("Disconnect")),
|
||||
);
|
||||
final buttons = [disconnectButton];
|
||||
if (client.inVoiceCall) {
|
||||
buttons.insert(
|
||||
0,
|
||||
ElevatedButton.icon(
|
||||
style: ButtonStyle(
|
||||
backgroundColor: MaterialStatePropertyAll(Colors.red)),
|
||||
icon: const Icon(Icons.phone),
|
||||
label: Text(translate("Stop")),
|
||||
onPressed: () {
|
||||
bind.cmCloseVoiceCall(id: client.id);
|
||||
gFFI.invokeMethod("cancel_notification", client.id);
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
if (buttons.length == 1) {
|
||||
return Container(
|
||||
alignment: Alignment.centerRight,
|
||||
child: disconnectButton,
|
||||
);
|
||||
} else {
|
||||
return Row(
|
||||
children: buttons,
|
||||
mainAxisAlignment: MainAxisAlignment.spaceBetween,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Widget _buildNewConnectionHint(ServerModel serverModel, Client client) {
|
||||
return Row(mainAxisAlignment: MainAxisAlignment.end, children: [
|
||||
TextButton(
|
||||
child: Text(translate("Dismiss")),
|
||||
onPressed: () {
|
||||
serverModel.sendLoginResponse(client, false);
|
||||
}).marginOnly(right: 15),
|
||||
if (serverModel.approveMode != 'password')
|
||||
ElevatedButton.icon(
|
||||
icon: const Icon(Icons.check),
|
||||
label: Text(translate("Accept")),
|
||||
onPressed: () {
|
||||
serverModel.sendLoginResponse(client, true);
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
||||
List<Widget> _buildNewVoiceCallHint(
|
||||
BuildContext context, ServerModel serverModel, Client client) {
|
||||
return [
|
||||
Text(
|
||||
translate("android_new_voice_call_tip"),
|
||||
style: Theme.of(context).textTheme.bodyMedium,
|
||||
).marginOnly(bottom: 5),
|
||||
Row(mainAxisAlignment: MainAxisAlignment.end, children: [
|
||||
TextButton(
|
||||
child: Text(translate("Dismiss")),
|
||||
onPressed: () {
|
||||
serverModel.handleVoiceCall(client, false);
|
||||
}).marginOnly(right: 15),
|
||||
if (serverModel.approveMode != 'password')
|
||||
ElevatedButton.icon(
|
||||
icon: const Icon(Icons.check),
|
||||
label: Text(translate("Accept")),
|
||||
onPressed: () {
|
||||
serverModel.handleVoiceCall(client, true);
|
||||
}),
|
||||
])
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
class PaddingCard extends StatelessWidget {
|
||||
@ -787,6 +841,15 @@ void androidChannelInit() {
|
||||
gFFI.serverModel.stopService();
|
||||
break;
|
||||
}
|
||||
case "msgbox":
|
||||
{
|
||||
var type = arguments["type"] as String;
|
||||
var title = arguments["title"] as String;
|
||||
var text = arguments["text"] as String;
|
||||
var link = (arguments["link"] ?? '') as String;
|
||||
msgBox(gFFI.sessionId, type, title, text, link, gFFI.dialogManager);
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
debugPrintStack(label: "MethodCallHandler err:$e");
|
||||
|
@ -527,10 +527,16 @@ class ChatModel with ChangeNotifier {
|
||||
|
||||
void onVoiceCallStarted() {
|
||||
_voiceCallStatus.value = VoiceCallStatus.connected;
|
||||
if (isAndroid) {
|
||||
parent.target?.invokeMethod("on_voice_call_started");
|
||||
}
|
||||
}
|
||||
|
||||
void onVoiceCallClosed(String reason) {
|
||||
_voiceCallStatus.value = VoiceCallStatus.notStarted;
|
||||
if (isAndroid) {
|
||||
parent.target?.invokeMethod("on_voice_call_closed");
|
||||
}
|
||||
}
|
||||
|
||||
void onVoiceCallIncoming() {
|
||||
|
@ -550,37 +550,60 @@ class ServerModel with ChangeNotifier {
|
||||
}
|
||||
|
||||
void showLoginDialog(Client client) {
|
||||
showClientDialog(
|
||||
client,
|
||||
client.isFileTransfer ? "File Connection" : "Screen Connection",
|
||||
'Do you accept?',
|
||||
'android_new_connection_tip',
|
||||
() => sendLoginResponse(client, false),
|
||||
() => sendLoginResponse(client, true),
|
||||
);
|
||||
}
|
||||
|
||||
handleVoiceCall(Client client, bool accept) {
|
||||
parent.target?.invokeMethod("cancel_notification", client.id);
|
||||
bind.cmHandleIncomingVoiceCall(id: client.id, accept: accept);
|
||||
}
|
||||
|
||||
showVoiceCallDialog(Client client) {
|
||||
showClientDialog(
|
||||
client,
|
||||
'Voice call',
|
||||
'Do you accept?',
|
||||
'android_new_voice_call_tip',
|
||||
() => handleVoiceCall(client, false),
|
||||
() => handleVoiceCall(client, true),
|
||||
);
|
||||
}
|
||||
|
||||
showClientDialog(Client client, String title, String contentTitle,
|
||||
String content, VoidCallback onCancel, VoidCallback onSubmit) {
|
||||
parent.target?.dialogManager.show((setState, close, context) {
|
||||
cancel() {
|
||||
sendLoginResponse(client, false);
|
||||
onCancel();
|
||||
close();
|
||||
}
|
||||
|
||||
submit() {
|
||||
sendLoginResponse(client, true);
|
||||
onSubmit();
|
||||
close();
|
||||
}
|
||||
|
||||
return CustomAlertDialog(
|
||||
title:
|
||||
Row(mainAxisAlignment: MainAxisAlignment.spaceBetween, children: [
|
||||
Text(translate(
|
||||
client.isFileTransfer ? "File Connection" : "Screen Connection")),
|
||||
IconButton(
|
||||
onPressed: () {
|
||||
close();
|
||||
},
|
||||
icon: const Icon(Icons.close))
|
||||
Text(translate(title)),
|
||||
IconButton(onPressed: close, icon: const Icon(Icons.close))
|
||||
]),
|
||||
content: Column(
|
||||
mainAxisSize: MainAxisSize.min,
|
||||
mainAxisAlignment: MainAxisAlignment.center,
|
||||
crossAxisAlignment: CrossAxisAlignment.start,
|
||||
children: [
|
||||
Text(translate("Do you accept?")),
|
||||
Text(translate(contentTitle)),
|
||||
ClientInfo(client),
|
||||
Text(
|
||||
translate("android_new_connection_tip"),
|
||||
translate(content),
|
||||
style: Theme.of(globalKey.currentContext!).textTheme.bodyMedium,
|
||||
),
|
||||
],
|
||||
@ -676,10 +699,14 @@ class ServerModel with ChangeNotifier {
|
||||
_clients[index].inVoiceCall = client.inVoiceCall;
|
||||
_clients[index].incomingVoiceCall = client.incomingVoiceCall;
|
||||
if (client.incomingVoiceCall) {
|
||||
// Has incoming phone call, let's set the window on top.
|
||||
Future.delayed(Duration.zero, () {
|
||||
windowOnTop(null);
|
||||
});
|
||||
if (isAndroid) {
|
||||
showVoiceCallDialog(client);
|
||||
} else {
|
||||
// Has incoming phone call, let's set the window on top.
|
||||
Future.delayed(Duration.zero, () {
|
||||
windowOnTop(null);
|
||||
});
|
||||
}
|
||||
}
|
||||
notifyListeners();
|
||||
}
|
||||
|
@ -1239,11 +1239,11 @@ pub mod connection_manager {
|
||||
fn add_connection(&self, client: &crate::ui_cm_interface::Client) {
|
||||
let client_json = serde_json::to_string(&client).unwrap_or("".into());
|
||||
// send to Android service, active notification no matter UI is shown or not.
|
||||
#[cfg(any(target_os = "android"))]
|
||||
#[cfg(target_os = "android")]
|
||||
if let Err(e) =
|
||||
call_main_service_set_by_name("add_connection", Some(&client_json), None)
|
||||
{
|
||||
log::debug!("call_service_set_by_name fail,{}", e);
|
||||
log::debug!("call_main_service_set_by_name fail,{}", e);
|
||||
}
|
||||
// send to UI, refresh widget
|
||||
self.push_event("add_connection", &[("client", &client_json)]);
|
||||
@ -1277,6 +1277,13 @@ pub mod connection_manager {
|
||||
|
||||
fn update_voice_call_state(&self, client: &crate::ui_cm_interface::Client) {
|
||||
let client_json = serde_json::to_string(&client).unwrap_or("".into());
|
||||
// send to Android service, active notification no matter UI is shown or not.
|
||||
#[cfg(target_os = "android")]
|
||||
if let Err(e) =
|
||||
call_main_service_set_by_name("update_voice_call_state", Some(&client_json), None)
|
||||
{
|
||||
log::debug!("call_main_service_set_by_name fail,{}", e);
|
||||
}
|
||||
self.push_event("update_voice_call_state", &[("client", &client_json)]);
|
||||
}
|
||||
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", "清除 Wayland 的屏幕选择"),
|
||||
("clear_Wayland_screen_selection_tip", "清除 Wayland 的屏幕选择后,您可以重新选择分享的屏幕。"),
|
||||
("confirm_clear_Wayland_screen_selection_tip", "是否确认清除 Wayland 的分享屏幕选择?"),
|
||||
("android_new_voice_call_tip", "收到新的语音呼叫请求。如果您接受,音频将切换为语音通信。"),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", "Vymazat výběr obrazovky Wayland"),
|
||||
("clear_Wayland_screen_selection_tip", "Po vymazání výběru obrazovky můžete znovu vybrat obrazovku, kterou chcete sdílet."),
|
||||
("confirm_clear_Wayland_screen_selection_tip", "Opravdu chcete vymazat výběr obrazovky Wayland?"),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", "Wayland-Bildschirmauswahl löschen"),
|
||||
("clear_Wayland_screen_selection_tip", "Nachdem Sie die Bildschirmauswahl gelöscht haben, können Sie den freizugebenden Bildschirm erneut auswählen."),
|
||||
("confirm_clear_Wayland_screen_selection_tip", "Sind Sie sicher, dass Sie die Auswahl des Wayland-Bildschirms löschen möchten?"),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -227,5 +227,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("no_audio_input_device_tip", "No audio input device found."),
|
||||
("clear_Wayland_screen_selection_tip", "After clearing the screen selection, you can reselect the screen to share."),
|
||||
("confirm_clear_Wayland_screen_selection_tip", "Are you sure to clear the Wayland screen selection?"),
|
||||
("android_new_voice_call_tip", "A new voice call request was received. If you accept, the audio will switch to voice communication."),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", "Annulla selezione schermata Wayland"),
|
||||
("clear_Wayland_screen_selection_tip", "Dopo aver annullato la selezione schermo, è possibile selezionare nuovamente lo schermo da condividere."),
|
||||
("confirm_clear_Wayland_screen_selection_tip", "Sei sicuro di voler annullare la selezione schermo Wayland?"),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", "Notīrīt Wayland ekrāna atlasi"),
|
||||
("clear_Wayland_screen_selection_tip", "Pēc ekrāna atlases notīrīšanas varat atkārtoti atlasīt ekrānu, ko kopīgot."),
|
||||
("confirm_clear_Wayland_screen_selection_tip", "Vai tiešām notīrīt Wayland ekrāna atlasi?"),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", "Отменить выбор экрана Wayland"),
|
||||
("clear_Wayland_screen_selection_tip", "После отмены можно заново выбрать экран для демонстрации."),
|
||||
("confirm_clear_Wayland_screen_selection_tip", "Отменить выбор экрана Wayland?"),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", "Vyčistiť výber obrazovky Wayland"),
|
||||
("clear_Wayland_screen_selection_tip", "Po vymazaní výberu obrazovky môžete znova vybrať obrazovku, ktorú chcete zdieľať."),
|
||||
("confirm_clear_Wayland_screen_selection_tip", "Určite ste si istý, že chcete vyčistiť výber obrazovky Wayland?"),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", "清除 Wayland 的螢幕選擇"),
|
||||
("clear_Wayland_screen_selection_tip", "清除 Wayland 的螢幕選擇後,您可以重新選擇分享的螢幕。"),
|
||||
("confirm_clear_Wayland_screen_selection_tip", "是否確認清除 Wayland 的分享螢幕選擇?"),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -612,5 +612,6 @@ pub static ref T: std::collections::HashMap<&'static str, &'static str> =
|
||||
("Clear Wayland screen selection", ""),
|
||||
("clear_Wayland_screen_selection_tip", ""),
|
||||
("confirm_clear_Wayland_screen_selection_tip", ""),
|
||||
("android_new_voice_call_tip", ""),
|
||||
].iter().cloned().collect();
|
||||
}
|
||||
|
@ -221,7 +221,7 @@ impl<T: InvokeUiCM> ConnectionManager<T> {
|
||||
self.ui_handler.show_elevation(show);
|
||||
}
|
||||
|
||||
#[cfg(not(any(target_os = "android", target_os = "ios")))]
|
||||
#[cfg(not(target_os = "ios"))]
|
||||
fn voice_call_started(&self, id: i32) {
|
||||
if let Some(client) = CLIENTS.write().unwrap().get_mut(&id) {
|
||||
client.incoming_voice_call = false;
|
||||
@ -230,7 +230,7 @@ impl<T: InvokeUiCM> ConnectionManager<T> {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(any(target_os = "android", target_os = "ios")))]
|
||||
#[cfg(not(target_os = "ios"))]
|
||||
fn voice_call_incoming(&self, id: i32) {
|
||||
if let Some(client) = CLIENTS.write().unwrap().get_mut(&id) {
|
||||
client.incoming_voice_call = true;
|
||||
@ -239,7 +239,7 @@ impl<T: InvokeUiCM> ConnectionManager<T> {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(any(target_os = "android", target_os = "ios")))]
|
||||
#[cfg(not(target_os = "ios"))]
|
||||
fn voice_call_closed(&self, id: i32, _reason: &str) {
|
||||
if let Some(client) = CLIENTS.write().unwrap().get_mut(&id) {
|
||||
client.incoming_voice_call = false;
|
||||
@ -656,6 +656,15 @@ pub async fn start_listen<T: InvokeUiCM>(
|
||||
Some(Data::Close) => {
|
||||
break;
|
||||
}
|
||||
Some(Data::StartVoiceCall) => {
|
||||
cm.voice_call_started(current_id);
|
||||
}
|
||||
Some(Data::VoiceCallIncoming) => {
|
||||
cm.voice_call_incoming(current_id);
|
||||
}
|
||||
Some(Data::CloseVoiceCall(reason)) => {
|
||||
cm.voice_call_closed(current_id, reason.as_str());
|
||||
}
|
||||
None => {
|
||||
break;
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user