diff --git a/Cargo.lock b/Cargo.lock
index e15641363..52fcc76cd 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1334,8 +1334,9 @@ checksum = "f578e8e2c440e7297e008bb5486a3a8a194775224bbc23729b0dbdfaeebf162e"
[[package]]
name = "default-net"
-version = "0.11.0"
-source = "git+https://github.com/Kingtous/default-net#bdaad8dd5b08efcba303e71729d3d0b1d5ccdb25"
+version = "0.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "14e349ed1e06fb344a7dd8b5a676375cf671b31e8900075dd2be816efc063a63"
dependencies = [
"libc",
"memalloc",
diff --git a/Cargo.toml b/Cargo.toml
index 936b9e349..b315024e9 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -59,7 +59,7 @@ base64 = "0.13"
sysinfo = "0.24"
num_cpus = "1.13"
bytes = { version = "1.2", features = ["serde"] }
-default-net = { git = "https://github.com/Kingtous/default-net" }
+default-net = "0.12.0"
wol-rs = "0.9.1"
flutter_rust_bridge = { version = "1.61.1", optional = true }
errno = "0.2.8"
diff --git a/flutter/assets/chat.svg b/flutter/assets/chat.svg
new file mode 100644
index 000000000..03491be6e
--- /dev/null
+++ b/flutter/assets/chat.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/flutter/assets/record_screen.svg b/flutter/assets/record_screen.svg
new file mode 100644
index 000000000..e1b962124
--- /dev/null
+++ b/flutter/assets/record_screen.svg
@@ -0,0 +1,24 @@
+
+
+
+
\ No newline at end of file
diff --git a/flutter/assets/voice_call.svg b/flutter/assets/voice_call.svg
new file mode 100644
index 000000000..5654befc7
--- /dev/null
+++ b/flutter/assets/voice_call.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/flutter/assets/voice_call_waiting.svg b/flutter/assets/voice_call_waiting.svg
new file mode 100644
index 000000000..fd8334f92
--- /dev/null
+++ b/flutter/assets/voice_call_waiting.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/flutter/lib/common.dart b/flutter/lib/common.dart
index 30d38b8db..df2a75f56 100644
--- a/flutter/lib/common.dart
+++ b/flutter/lib/common.dart
@@ -1723,3 +1723,30 @@ Future updateSystemWindowTheme() async {
}
}
}
+/// macOS only
+///
+/// Note: not found a general solution for rust based AVFoundation bingding.
+/// [AVFoundation] crate has compile error.
+const kMacOSPermChannel = MethodChannel("org.rustdesk.rustdesk/macos");
+
+enum PermissionAuthorizeType {
+ undetermined,
+ authorized,
+ denied, // and restricted
+}
+
+Future osxCanRecordAudio() async {
+ int res = await kMacOSPermChannel.invokeMethod("canRecordAudio");
+ print(res);
+ if (res > 0) {
+ return PermissionAuthorizeType.authorized;
+ } else if (res == 0) {
+ return PermissionAuthorizeType.undetermined;
+ } else {
+ return PermissionAuthorizeType.denied;
+ }
+}
+
+Future osxRequestAudio() async {
+ return await kMacOSPermChannel.invokeMethod("requestRecordAudio");
+}
diff --git a/flutter/lib/consts.dart b/flutter/lib/consts.dart
index c95c62fcc..26e25a209 100644
--- a/flutter/lib/consts.dart
+++ b/flutter/lib/consts.dart
@@ -106,6 +106,12 @@ const kRemoteImageQualityLow = 'low';
/// [kRemoteImageQualityCustom] Custom image quality.
const kRemoteImageQualityCustom = 'custom';
+/// [kRemoteAudioGuestToHost] Guest to host audio mode(default).
+const kRemoteAudioGuestToHost = 'guest-to-host';
+
+/// [kRemoteAudioDualWay] dual-way audio mode(default).
+const kRemoteAudioDualWay = 'dual-way';
+
const kIgnoreDpi = true;
/// flutter/packages/flutter/lib/src/services/keyboard_key.dart -> _keyLabels
diff --git a/flutter/lib/desktop/pages/desktop_home_page.dart b/flutter/lib/desktop/pages/desktop_home_page.dart
index 0501c298a..2986adc7a 100644
--- a/flutter/lib/desktop/pages/desktop_home_page.dart
+++ b/flutter/lib/desktop/pages/desktop_home_page.dart
@@ -44,6 +44,7 @@ class _DesktopHomePageState extends State
var watchIsCanScreenRecording = false;
var watchIsProcessTrust = false;
var watchIsInputMonitoring = false;
+ var watchIsCanRecordAudio = false;
Timer? _updateTimer;
@override
@@ -79,7 +80,16 @@ class _DesktopHomePageState extends State
buildTip(context),
buildIDBoard(context),
buildPasswordBoard(context),
- buildHelpCards(),
+ FutureBuilder(
+ future: buildHelpCards(),
+ builder: (_, data) {
+ if (data.hasData) {
+ return data.data!;
+ } else {
+ return const Offstage();
+ }
+ },
+ ),
],
),
),
@@ -302,7 +312,7 @@ class _DesktopHomePageState extends State
);
}
- Widget buildHelpCards() {
+ Future buildHelpCards() async {
if (updateUrl.isNotEmpty) {
return buildInstallCard(
"Status",
@@ -349,6 +359,15 @@ class _DesktopHomePageState extends State
bind.mainIsInstalledDaemon(prompt: true);
});
}
+ //// Disable microphone configuration for macOS. We will request the permission when needed.
+ // else if ((await osxCanRecordAudio() !=
+ // PermissionAuthorizeType.authorized)) {
+ // return buildInstallCard("Permissions", "config_microphone", "Configure",
+ // () async {
+ // osxRequestAudio();
+ // watchIsCanRecordAudio = true;
+ // });
+ // }
} else if (Platform.isLinux) {
if (bind.mainCurrentIsWayland()) {
return buildInstallCard(
@@ -481,6 +500,20 @@ class _DesktopHomePageState extends State
setState(() {});
}
}
+ if (watchIsCanRecordAudio) {
+ if (Platform.isMacOS) {
+ Future.microtask(() async {
+ if ((await osxCanRecordAudio() ==
+ PermissionAuthorizeType.authorized)) {
+ watchIsCanRecordAudio = false;
+ setState(() {});
+ }
+ });
+ } else {
+ watchIsCanRecordAudio = false;
+ setState(() {});
+ }
+ }
});
Get.put(svcStopped, tag: 'stop-service');
rustDeskWinManager.registerActiveWindowListener(onActiveWindowChanged);
diff --git a/flutter/lib/desktop/pages/server_page.dart b/flutter/lib/desktop/pages/server_page.dart
index 521413647..66a043fef 100644
--- a/flutter/lib/desktop/pages/server_page.dart
+++ b/flutter/lib/desktop/pages/server_page.dart
@@ -521,6 +521,39 @@ class _CmControlPanel extends StatelessWidget {
return Column(
mainAxisAlignment: MainAxisAlignment.end,
children: [
+ Offstage(
+ offstage: !client.inVoiceCall,
+ child: buildButton(context,
+ color: Colors.red,
+ onClick: () => closeVoiceCall(),
+ icon: Icon(Icons.phone_disabled_rounded, color: Colors.white),
+ text: "Stop voice call",
+ textColor: Colors.white),
+ ),
+ Offstage(
+ offstage: !client.incomingVoiceCall,
+ child: Row(
+ children: [
+ Expanded(
+ child: buildButton(context,
+ color: MyTheme.accent,
+ onClick: () => handleVoiceCall(true),
+ icon: Icon(Icons.phone_enabled, color: Colors.white),
+ text: "Accept",
+ textColor: Colors.white),
+ ),
+ Expanded(
+ child: buildButton(context,
+ color: Colors.red,
+ onClick: () => handleVoiceCall(false),
+ icon:
+ Icon(Icons.phone_disabled_rounded, color: Colors.white),
+ text: "Dismiss",
+ textColor: Colors.white),
+ )
+ ],
+ ),
+ ),
Offstage(
offstage: !client.fromSwitch,
child: buildButton(context,
@@ -626,7 +659,7 @@ class _CmControlPanel extends StatelessWidget {
.marginSymmetric(horizontal: showElevation ? 0 : bigMargin);
}
- buildButton(
+ Widget buildButton(
BuildContext context, {
required Color? color,
required Function() onClick,
@@ -692,6 +725,14 @@ class _CmControlPanel extends StatelessWidget {
void handleSwitchBack(BuildContext context) {
bind.cmSwitchBack(connId: client.id);
}
+
+ void handleVoiceCall(bool accept) {
+ bind.cmHandleIncomingVoiceCall(id: client.id, accept: accept);
+ }
+
+ void closeVoiceCall() {
+ bind.cmCloseVoiceCall(id: client.id);
+ }
}
void checkClickTime(int id, Function() callback) async {
diff --git a/flutter/lib/desktop/widgets/remote_menubar.dart b/flutter/lib/desktop/widgets/remote_menubar.dart
index 36b9504c0..dcc531408 100644
--- a/flutter/lib/desktop/widgets/remote_menubar.dart
+++ b/flutter/lib/desktop/widgets/remote_menubar.dart
@@ -9,6 +9,7 @@ import 'package:flutter_hbb/models/chat_model.dart';
import 'package:flutter_hbb/models/state_model.dart';
import 'package:flutter_hbb/consts.dart';
import 'package:flutter_hbb/utils/multi_window_manager.dart';
+import 'package:flutter_svg/flutter_svg.dart';
import 'package:get/get.dart';
import 'package:provider/provider.dart';
import 'package:debounce_throttle/debounce_throttle.dart';
@@ -425,6 +426,7 @@ class _RemoteMenubarState extends State {
menubarItems.add(_buildKeyboard(context));
if (!isWeb) {
menubarItems.add(_buildChat(context));
+ menubarItems.add(_buildVoiceCall(context));
}
menubarItems.add(_buildRecording(context));
menubarItems.add(_buildClose(context));
@@ -478,20 +480,6 @@ class _RemoteMenubarState extends State {
);
}
- Widget _buildChat(BuildContext context) {
- return IconButton(
- tooltip: translate('Chat'),
- onPressed: () {
- widget.ffi.chatModel.changeCurrentID(ChatModel.clientModeID);
- widget.ffi.chatModel.toggleChatOverlay();
- },
- icon: const Icon(
- Icons.message,
- color: _MenubarTheme.commonColor,
- ),
- );
- }
-
Widget _buildMonitor(BuildContext context) {
final pi = widget.ffi.ffiModel.pi;
return mod_menu.PopupMenuButton(
@@ -669,12 +657,17 @@ class _RemoteMenubarState extends State {
? translate('Stop session recording')
: translate('Start session recording'),
onPressed: () => value.toggle(),
- icon: Icon(
- value.start
- ? Icons.pause_circle_filled
- : Icons.videocam_outlined,
- color: _MenubarTheme.commonColor,
- ),
+ icon: value.start
+ ? Icon(
+ Icons.pause_circle_filled,
+ color: _MenubarTheme.commonColor,
+ )
+ : SvgPicture.asset(
+ "assets/record_screen.svg",
+ color: _MenubarTheme.commonColor,
+ width: Theme.of(context).iconTheme.size ?? 22.0,
+ height: Theme.of(context).iconTheme.size ?? 22.0,
+ ),
));
} else {
return Offstage();
@@ -695,6 +688,119 @@ class _RemoteMenubarState extends State {
);
}
+ Widget _buildChat(BuildContext context) {
+ FfiModel ffiModel = Provider.of(context);
+ return mod_menu.PopupMenuButton(
+ padding: EdgeInsets.zero,
+ icon: SvgPicture.asset(
+ "assets/chat.svg",
+ color: _MenubarTheme.commonColor,
+ width: Theme.of(context).iconTheme.size ?? 24.0,
+ height: Theme.of(context).iconTheme.size ?? 24.0,
+ ),
+ tooltip: translate('Chat'),
+ position: mod_menu.PopupMenuPosition.under,
+ itemBuilder: (BuildContext context) => _getChatMenu(context)
+ .map((entry) => entry.build(
+ context,
+ const MenuConfig(
+ commonColor: _MenubarTheme.commonColor,
+ height: _MenubarTheme.height,
+ dividerHeight: _MenubarTheme.dividerHeight,
+ )))
+ .expand((i) => i)
+ .toList(),
+ );
+ }
+
+ Widget _getVoiceCallIcon() {
+ switch (widget.ffi.chatModel.voiceCallStatus.value) {
+ case VoiceCallStatus.waitingForResponse:
+ return IconButton(
+ onPressed: () {
+ widget.ffi.chatModel.closeVoiceCall(widget.id);
+ },
+ icon: SvgPicture.asset(
+ "assets/voice_call_waiting.svg",
+ color: Colors.red,
+ width: Theme.of(context).iconTheme.size ?? 20.0,
+ height: Theme.of(context).iconTheme.size ?? 20.0,
+ ));
+ case VoiceCallStatus.connected:
+ return IconButton(
+ onPressed: () {
+ widget.ffi.chatModel.closeVoiceCall(widget.id);
+ },
+ icon: Icon(
+ Icons.phone_disabled_rounded,
+ color: Colors.red,
+ size: Theme.of(context).iconTheme.size ?? 22.0,
+ ),
+ );
+ default:
+ return const Offstage();
+ }
+ }
+
+ String? _getVoiceCallTooltip() {
+ switch (widget.ffi.chatModel.voiceCallStatus.value) {
+ case VoiceCallStatus.waitingForResponse:
+ return "Waiting";
+ case VoiceCallStatus.connected:
+ return "Disconnect";
+ default:
+ return null;
+ }
+ }
+
+ Widget _buildVoiceCall(BuildContext context) {
+ return Obx(
+ () {
+ final tooltipText = _getVoiceCallTooltip();
+ return tooltipText == null
+ ? const Offstage()
+ : IconButton(
+ padding: EdgeInsets.zero,
+ icon: _getVoiceCallIcon(),
+ tooltip: translate(tooltipText),
+ onPressed: () => bind.sessionRequestVoiceCall(id: widget.id),
+ );
+ },
+ );
+ }
+
+ List> _getChatMenu(BuildContext context) {
+ final List> chatMenu = [];
+ const EdgeInsets padding = EdgeInsets.only(left: 14.0, right: 5.0);
+ chatMenu.addAll([
+ MenuEntryButton(
+ childBuilder: (TextStyle? style) => Text(
+ translate('Text chat'),
+ style: style,
+ ),
+ proc: () {
+ widget.ffi.chatModel.changeCurrentID(ChatModel.clientModeID);
+ widget.ffi.chatModel.toggleChatOverlay();
+ },
+ padding: padding,
+ dismissOnClicked: true,
+ ),
+ MenuEntryButton(
+ childBuilder: (TextStyle? style) => Text(
+ translate('Voice call'),
+ style: style,
+ ),
+ proc: () {
+ // Request a voice call.
+ bind.sessionRequestVoiceCall(id: widget.id);
+ },
+ padding: padding,
+ dismissOnClicked: true,
+ ),
+ ]);
+ return chatMenu;
+ }
+
List> _getControlMenu(BuildContext context) {
final pi = widget.ffi.ffiModel.pi;
final perms = widget.ffi.ffiModel.permissions;
@@ -884,7 +990,6 @@ class _RemoteMenubarState extends State {
// ));
// }
}
-
return displayMenu;
}
@@ -1337,6 +1442,8 @@ class _RemoteMenubarState extends State {
if (perms['audio'] != false) {
displayMenu
.add(_createSwitchMenuEntry('Mute', 'disable-audio', padding, true));
+ displayMenu
+ .add(_createSwitchMenuEntry('Mute', 'disable-audio', padding, true));
}
if (Platform.isWindows &&
diff --git a/flutter/lib/main.dart b/flutter/lib/main.dart
index c19adf753..c61287d4f 100644
--- a/flutter/lib/main.dart
+++ b/flutter/lib/main.dart
@@ -216,6 +216,7 @@ void runMultiWindow(
void runConnectionManagerScreen(bool hide) async {
await initEnv(kAppTypeConnectionManager);
+ await bind.cmStartListenIpcThread();
_runApp(
'',
const DesktopServerPage(),
diff --git a/flutter/lib/models/chat_model.dart b/flutter/lib/models/chat_model.dart
index 18a0be279..bf7f8773d 100644
--- a/flutter/lib/models/chat_model.dart
+++ b/flutter/lib/models/chat_model.dart
@@ -2,6 +2,7 @@ import 'package:dash_chat_2/dash_chat_2.dart';
import 'package:draggable_float_widget/draggable_float_widget.dart';
import 'package:flutter/material.dart';
import 'package:flutter_hbb/models/platform_model.dart';
+import 'package:get/get.dart';
import 'package:window_manager/window_manager.dart';
import '../consts.dart';
@@ -33,8 +34,13 @@ class ChatModel with ChangeNotifier {
OverlayState? _overlayState;
OverlayEntry? chatIconOverlayEntry;
OverlayEntry? chatWindowOverlayEntry;
+
bool isConnManager = false;
+ final Rx _voiceCallStatus = Rx(VoiceCallStatus.notStarted);
+
+ Rx get voiceCallStatus => _voiceCallStatus;
+
final ChatUser me = ChatUser(
id: "",
firstName: "Me",
@@ -292,4 +298,34 @@ class ChatModel with ChangeNotifier {
resetClientMode() {
_messages[clientModeID]?.clear();
}
+
+ void onVoiceCallWaiting() {
+ _voiceCallStatus.value = VoiceCallStatus.waitingForResponse;
+ }
+
+ void onVoiceCallStarted() {
+ _voiceCallStatus.value = VoiceCallStatus.connected;
+ }
+
+ void onVoiceCallClosed(String reason) {
+ _voiceCallStatus.value = VoiceCallStatus.notStarted;
+ }
+
+ void onVoiceCallIncoming() {
+ if (isConnManager) {
+ _voiceCallStatus.value = VoiceCallStatus.incoming;
+ }
+ }
+
+ void closeVoiceCall(String id) {
+ bind.sessionCloseVoiceCall(id: id);
+ }
+}
+
+enum VoiceCallStatus {
+ notStarted,
+ waitingForResponse,
+ connected,
+ // Connection manager only.
+ incoming
}
diff --git a/flutter/lib/models/model.dart b/flutter/lib/models/model.dart
index daf7bfe34..a2fe205af 100644
--- a/flutter/lib/models/model.dart
+++ b/flutter/lib/models/model.dart
@@ -203,6 +203,23 @@ class FfiModel with ChangeNotifier {
} else if (name == "on_url_scheme_received") {
final url = evt['url'].toString();
parseRustdeskUri(url);
+ } else if (name == "on_voice_call_waiting") {
+ // Waiting for the response from the peer.
+ parent.target?.chatModel.onVoiceCallWaiting();
+ } else if (name == "on_voice_call_started") {
+ // Voice call is connected.
+ parent.target?.chatModel.onVoiceCallStarted();
+ } else if (name == "on_voice_call_closed") {
+ // Voice call is closed with reason.
+ final reason = evt['reason'].toString();
+ parent.target?.chatModel.onVoiceCallClosed(reason);
+ } else if (name == "on_voice_call_incoming") {
+ // Voice call is requested by the peer.
+ parent.target?.chatModel.onVoiceCallIncoming();
+ } else if (name == "update_voice_call_state") {
+ parent.target?.serverModel.updateVoiceCallState(evt);
+ } else {
+ debugPrint("Unknown event name: $name");
}
};
}
diff --git a/flutter/lib/models/native_model.dart b/flutter/lib/models/native_model.dart
index 628bf502d..34a673953 100644
--- a/flutter/lib/models/native_model.dart
+++ b/flutter/lib/models/native_model.dart
@@ -118,8 +118,12 @@ class PlatformFFI {
// Start a dbus service, no need to await
_ffiBind.mainStartDbusServer();
} else if (Platform.isMacOS && isMain) {
- // Start an ipc server for handling url schemes.
- _ffiBind.mainStartIpcUrlServer();
+ Future.wait([
+ // Start dbus service.
+ _ffiBind.mainStartDbusServer(),
+ // Start local audio pulseaudio server.
+ _ffiBind.mainStartPa()
+ ]);
}
_startListenEvent(_ffiBind); // global event
try {
diff --git a/flutter/lib/models/server_model.dart b/flutter/lib/models/server_model.dart
index 56dca4cdf..aab12ab5d 100644
--- a/flutter/lib/models/server_model.dart
+++ b/flutter/lib/models/server_model.dart
@@ -579,6 +579,26 @@ class ServerModel with ChangeNotifier {
notifyListeners();
}
}
+
+ void updateVoiceCallState(Map evt) {
+ try {
+ final client = Client.fromJson(jsonDecode(evt["client"]));
+ final index = _clients.indexWhere((element) => element.id == client.id);
+ if (index != -1) {
+ _clients[index].inVoiceCall = client.inVoiceCall;
+ _clients[index].incomingVoiceCall = client.incomingVoiceCall;
+ if (client.incomingVoiceCall) {
+ // Has incoming phone call, let's set the window on top.
+ Future.delayed(Duration.zero, () {
+ window_on_top(null);
+ });
+ }
+ notifyListeners();
+ }
+ } catch (e) {
+ debugPrint("updateVoiceCallState failed: $e");
+ }
+ }
}
enum ClientType {
@@ -602,6 +622,8 @@ class Client {
bool recording = false;
bool disconnected = false;
bool fromSwitch = false;
+ bool inVoiceCall = false;
+ bool incomingVoiceCall = false;
RxBool hasUnreadChatMessage = false.obs;
@@ -623,6 +645,8 @@ class Client {
recording = json['recording'];
disconnected = json['disconnected'];
fromSwitch = json['from_switch'];
+ inVoiceCall = json['in_voice_call'];
+ incomingVoiceCall = json['incoming_voice_call'];
}
Map toJson() {
diff --git a/flutter/macos/Runner/Info.plist b/flutter/macos/Runner/Info.plist
index c926019ab..96616e8c4 100644
--- a/flutter/macos/Runner/Info.plist
+++ b/flutter/macos/Runner/Info.plist
@@ -43,6 +43,8 @@
$(PRODUCT_COPYRIGHT)
NSMainNibFile
MainMenu
+ NSMicrophoneUsageDescription
+ Record the sound from microphone for the purpose of the remote desktop.
NSPrincipalClass
NSApplication
diff --git a/flutter/macos/Runner/MainFlutterWindow.swift b/flutter/macos/Runner/MainFlutterWindow.swift
index 97b46bb84..21e870320 100644
--- a/flutter/macos/Runner/MainFlutterWindow.swift
+++ b/flutter/macos/Runner/MainFlutterWindow.swift
@@ -1,4 +1,5 @@
import Cocoa
+import AVFoundation
import FlutterMacOS
import desktop_multi_window
// import bitsdojo_window_macos
@@ -81,6 +82,23 @@ class MainFlutterWindow: NSWindow {
case "terminate":
NSApplication.shared.terminate(self)
result(nil)
+ case "canRecordAudio":
+ switch AVCaptureDevice.authorizationStatus(for: .audio) {
+ case .authorized:
+ result(1)
+ break
+ case .notDetermined:
+ result(0)
+ break
+ default:
+ result(-1)
+ break
+ }
+ case "requestRecordAudio":
+ AVCaptureDevice.requestAccess(for: .audio, completionHandler: { granted in
+ result(granted)
+ })
+ break
default:
result(FlutterMethodNotImplemented)
}
diff --git a/libs/hbb_common/protos/message.proto b/libs/hbb_common/protos/message.proto
index b7965f237..ed2706382 100644
--- a/libs/hbb_common/protos/message.proto
+++ b/libs/hbb_common/protos/message.proto
@@ -598,6 +598,18 @@ message Misc {
}
}
+message VoiceCallRequest {
+ int64 req_timestamp = 1;
+ // Indicates whether the request is a connect action or a disconnect action.
+ bool is_connect = 2;
+}
+
+message VoiceCallResponse {
+ bool accepted = 1;
+ int64 req_timestamp = 2; // Should copy from [VoiceCallRequest::req_timestamp].
+ int64 ack_timestamp = 3;
+}
+
message Message {
oneof union {
SignedId signed_id = 3;
@@ -620,5 +632,7 @@ message Message {
Cliprdr cliprdr = 20;
MessageBox message_box = 21;
SwitchSidesResponse switch_sides_response = 22;
+ VoiceCallRequest voice_call_request = 23;
+ VoiceCallResponse voice_call_response = 24;
}
}
diff --git a/src/client.rs b/src/client.rs
index e0ac68c5d..020bea1f0 100644
--- a/src/client.rs
+++ b/src/client.rs
@@ -1,58 +1,61 @@
-pub use async_trait::async_trait;
-use bytes::Bytes;
-#[cfg(not(any(target_os = "android", target_os = "linux")))]
-use cpal::{
- traits::{DeviceTrait, HostTrait, StreamTrait},
- Device, Host, StreamConfig,
-};
-use magnum_opus::{Channels::*, Decoder as AudioDecoder};
-use sha2::{Digest, Sha256};
use std::{
collections::HashMap,
net::SocketAddr,
ops::{Deref, Not},
str::FromStr,
- sync::{atomic::AtomicBool, mpsc, Arc, Mutex, RwLock},
+ sync::{Arc, atomic::AtomicBool, mpsc, Mutex, RwLock},
};
+
+pub use async_trait::async_trait;
+use bytes::Bytes;
+#[cfg(not(any(target_os = "android", target_os = "linux")))]
+use cpal::{
+ Device,
+ Host, StreamConfig, traits::{DeviceTrait, HostTrait, StreamTrait},
+};
+use magnum_opus::{Channels::*, Decoder as AudioDecoder};
+use sha2::{Digest, Sha256};
use uuid::Uuid;
pub use file_trait::FileManager;
use hbb_common::{
+ AddrMangle,
allow_err,
anyhow::{anyhow, Context},
bail,
config::{
- Config, PeerConfig, PeerInfoSerde, CONNECT_TIMEOUT, READ_TIMEOUT, RELAY_PORT,
+ Config, CONNECT_TIMEOUT, PeerConfig, PeerInfoSerde, READ_TIMEOUT, RELAY_PORT,
RENDEZVOUS_TIMEOUT,
- },
- get_version_number, log,
- message_proto::{option_message::BoolOption, *},
+ }, get_version_number,
+ log,
+ message_proto::{*, option_message::BoolOption},
protobuf::Message as _,
rand,
rendezvous_proto::*,
+ ResultType,
socket_client,
sodiumoxide::crypto::{box_, secretbox, sign},
- timeout,
- tokio::time::Duration,
- AddrMangle, ResultType, Stream,
+ Stream, timeout, tokio::time::Duration,
};
-pub use helper::LatencyController;
pub use helper::*;
+pub use helper::LatencyController;
use scrap::{
codec::{Decoder, DecoderCfg},
record::{Recorder, RecorderContext},
VpxDecoderConfig, VpxVideoCodecId,
};
+use crate::{
+ common::{self, is_keyboard_mode_supported},
+ server::video_service::{SCRAP_X11_REF_URL, SCRAP_X11_REQUIRED},
+};
+
pub use super::lang::*;
pub mod file_trait;
pub mod helper;
pub mod io_loop;
-use crate::{
- common::{self, is_keyboard_mode_supported},
- server::video_service::{SCRAP_X11_REF_URL, SCRAP_X11_REQUIRED},
-};
+
pub static SERVER_KEYBOARD_ENABLED: AtomicBool = AtomicBool::new(true);
pub static SERVER_FILE_TRANSFER_ENABLED: AtomicBool = AtomicBool::new(true);
pub static SERVER_CLIPBOARD_ENABLED: AtomicBool = AtomicBool::new(true);
@@ -714,6 +717,7 @@ impl AudioHandler {
.check_audio(frame.timestamp)
.not()
{
+ log::debug!("audio frame {} is ignored", frame.timestamp);
return;
}
}
@@ -724,6 +728,7 @@ impl AudioHandler {
}
#[cfg(target_os = "linux")]
if self.simple.is_none() {
+ log::debug!("PulseAudio simple binding does not exists");
return;
}
#[cfg(target_os = "android")]
@@ -1543,7 +1548,6 @@ where
F: 'static + FnMut(&[u8]) + Send,
{
let (video_sender, video_receiver) = mpsc::channel::();
- let (audio_sender, audio_receiver) = mpsc::channel::();
let mut video_callback = video_callback;
let latency_controller = LatencyController::new();
@@ -1573,8 +1577,19 @@ where
}
log::info!("Video decoder loop exits");
});
+ let audio_sender = start_audio_thread(Some(latency_controller_cl));
+ return (video_sender, audio_sender);
+}
+
+/// Start an audio thread
+/// Return a audio [`MediaSender`]
+pub fn start_audio_thread(
+ latency_controller: Option>>,
+) -> MediaSender {
+ let latency_controller = latency_controller.unwrap_or(LatencyController::new());
+ let (audio_sender, audio_receiver) = mpsc::channel::();
std::thread::spawn(move || {
- let mut audio_handler = AudioHandler::new(latency_controller_cl);
+ let mut audio_handler = AudioHandler::new(latency_controller);
loop {
if let Ok(data) = audio_receiver.recv() {
match data {
@@ -1582,6 +1597,7 @@ where
audio_handler.handle_frame(af);
}
MediaData::AudioFormat(f) => {
+ log::debug!("recved audio format, sample rate={}", f.sample_rate);
audio_handler.handle_format(f);
}
_ => {}
@@ -1592,7 +1608,7 @@ where
}
log::info!("Audio decoder loop exits");
});
- return (video_sender, audio_sender);
+ audio_sender
}
/// Handle latency test.
@@ -1934,6 +1950,8 @@ pub enum Data {
RecordScreen(bool, i32, i32, String),
ElevateDirect,
ElevateWithLogon(String, String),
+ NewVoiceCall,
+ CloseVoiceCall,
}
/// Keycode for key events.
diff --git a/src/client/helper.rs b/src/client/helper.rs
index e4736c0e8..20acd811a 100644
--- a/src/client/helper.rs
+++ b/src/client/helper.rs
@@ -5,7 +5,7 @@ use std::{
use hbb_common::{
log,
- message_proto::{video_frame, VideoFrame},
+ message_proto::{video_frame, VideoFrame, Message, VoiceCallRequest, VoiceCallResponse}, get_time,
};
const MAX_LATENCY: i64 = 500;
@@ -18,6 +18,7 @@ pub struct LatencyController {
last_video_remote_ts: i64, // generated on remote device
update_time: Instant,
allow_audio: bool,
+ audio_only: bool
}
impl Default for LatencyController {
@@ -26,6 +27,7 @@ impl Default for LatencyController {
last_video_remote_ts: Default::default(),
update_time: Instant::now(),
allow_audio: Default::default(),
+ audio_only: false
}
}
}
@@ -36,6 +38,11 @@ impl LatencyController {
Arc::new(Mutex::new(LatencyController::default()))
}
+ /// Set whether this [LatencyController] should be working in audio only mode.
+ pub fn set_audio_only(&mut self, only: bool) {
+ self.audio_only = only;
+ }
+
/// Update the latency controller with the latest video timestamp.
pub fn update_video(&mut self, timestamp: i64) {
self.last_video_remote_ts = timestamp;
@@ -46,7 +53,11 @@ impl LatencyController {
pub fn check_audio(&mut self, timestamp: i64) -> bool {
// Compute audio latency.
let expected = self.update_time.elapsed().as_millis() as i64 + self.last_video_remote_ts;
- let latency = expected - timestamp;
+ let latency = if self.audio_only {
+ expected
+ } else {
+ expected - timestamp
+ };
// Set MAX and MIN, avoid fixing too frequently.
if self.allow_audio {
if latency.abs() > MAX_LATENCY {
@@ -59,6 +70,9 @@ impl LatencyController {
self.allow_audio = true;
}
}
+ // No video frame here, which means the update time is not up to date.
+ // We manually update the time here.
+ self.update_time = Instant::now();
self.allow_audio
}
}
@@ -101,3 +115,24 @@ pub struct QualityStatus {
pub target_bitrate: Option,
pub codec_format: Option,
}
+
+#[inline]
+pub fn new_voice_call_request(is_connect: bool) -> Message {
+ let mut req = VoiceCallRequest::new();
+ req.is_connect = is_connect;
+ req.req_timestamp = get_time();
+ let mut msg = Message::new();
+ msg.set_voice_call_request(req);
+ msg
+}
+
+#[inline]
+pub fn new_voice_call_response(request_timestamp: i64, accepted: bool) -> Message {
+ let mut resp = VoiceCallResponse::new();
+ resp.accepted = accepted;
+ resp.req_timestamp = request_timestamp;
+ resp.ack_timestamp = get_time();
+ let mut msg = Message::new();
+ msg.set_voice_call_response(resp);
+ msg
+}
\ No newline at end of file
diff --git a/src/client/io_loop.rs b/src/client/io_loop.rs
index 0178fe9e8..f5792bce3 100644
--- a/src/client/io_loop.rs
+++ b/src/client/io_loop.rs
@@ -1,17 +1,10 @@
-use crate::client::{
- Client, CodecFormat, MediaData, MediaSender, QualityStatus, MILLI1, SEC30,
- SERVER_CLIPBOARD_ENABLED, SERVER_FILE_TRANSFER_ENABLED, SERVER_KEYBOARD_ENABLED,
-};
-use crate::common;
-#[cfg(not(any(target_os = "android", target_os = "ios")))]
-use crate::common::{check_clipboard, update_clipboard, ClipboardContext, CLIPBOARD_INTERVAL};
+use std::collections::HashMap;
+use std::num::NonZeroI64;
+use std::sync::atomic::{AtomicUsize, Ordering};
+use std::sync::{Arc, Mutex};
#[cfg(windows)]
use clipboard::{cliprdr::CliprdrClientContext, ContextSend};
-
-use crate::ui_session_interface::{InvokeUiSession, Session};
-use crate::{client::Data, client::Interface};
-
use hbb_common::config::{PeerConfig, TransferSerde};
use hbb_common::fs::{
can_enable_overwrite_detection, get_job, get_string, new_send_confirm, DigestCheckResult,
@@ -20,6 +13,7 @@ use hbb_common::fs::{
use hbb_common::message_proto::permission_info::Permission;
use hbb_common::protobuf::Message as _;
use hbb_common::rendezvous_proto::ConnType;
+use hbb_common::tokio::sync::mpsc::error::TryRecvError;
#[cfg(windows)]
use hbb_common::tokio::sync::Mutex as TokioMutex;
use hbb_common::tokio::{
@@ -27,12 +21,20 @@ use hbb_common::tokio::{
sync::mpsc,
time::{self, Duration, Instant, Interval},
};
-use hbb_common::{allow_err, message_proto::*, sleep};
+use hbb_common::{allow_err, get_time, message_proto::*, sleep};
use hbb_common::{fs, log, Stream};
-use std::collections::HashMap;
-use std::sync::atomic::{AtomicUsize, Ordering};
-use std::sync::{Arc, Mutex};
+use crate::client::{
+ new_voice_call_request, Client, CodecFormat, LoginConfigHandler, MediaData, MediaSender,
+ QualityStatus, MILLI1, SEC30, SERVER_CLIPBOARD_ENABLED, SERVER_FILE_TRANSFER_ENABLED,
+ SERVER_KEYBOARD_ENABLED,
+};
+#[cfg(not(any(target_os = "android", target_os = "ios")))]
+use crate::common::{check_clipboard, update_clipboard, ClipboardContext, CLIPBOARD_INTERVAL};
+use crate::common::{get_default_sound_input, set_sound_input};
+use crate::ui_session_interface::{InvokeUiSession, Session};
+use crate::{audio_service, common, ConnInner, CLIENT_SERVER};
+use crate::{client::Data, client::Interface};
pub struct Remote {
handler: Session,
@@ -40,6 +42,9 @@ pub struct Remote {
audio_sender: MediaSender,
receiver: mpsc::UnboundedReceiver,
sender: mpsc::UnboundedSender,
+ // Stop sending local audio to remote client.
+ stop_voice_call_sender: Option>,
+ voice_call_request_timestamp: Option,
old_clipboard: Arc>,
read_jobs: Vec,
write_jobs: Vec,
@@ -81,6 +86,8 @@ impl Remote {
data_count: Arc::new(AtomicUsize::new(0)),
frame_count,
video_format: CodecFormat::Unknown,
+ stop_voice_call_sender: None,
+ voice_call_request_timestamp: None,
}
}
@@ -93,6 +100,7 @@ impl Remote {
} else {
ConnType::default()
};
+
match Client::start(
&self.handler.id,
key,
@@ -212,6 +220,10 @@ impl Remote {
}
}
log::debug!("Exit io_loop of id={}", self.handler.id);
+ // Stop client audio server.
+ if let Some(s) = self.stop_voice_call_sender.take() {
+ s.send(()).ok();
+ }
}
Err(err) => {
self.handler
@@ -253,6 +265,81 @@ impl Remote {
}
}
+ fn stop_voice_call(&mut self) {
+ let voice_call_sender = std::mem::replace(&mut self.stop_voice_call_sender, None);
+ if let Some(stopper) = voice_call_sender {
+ let _ = stopper.send(());
+ }
+ }
+
+ // Start a voice call recorder, records audio and send to remote
+ fn start_voice_call(&mut self) -> Option> {
+ if self.handler.is_file_transfer() || self.handler.is_port_forward() {
+ return None;
+ }
+ // Switch to default input device
+ let default_sound_device = get_default_sound_input();
+ if let Some(device) = default_sound_device {
+ set_sound_input(device);
+ }
+ // Create a channel to receive error or closed message
+ let (tx, rx) = std::sync::mpsc::channel();
+ let (tx_audio_data, mut rx_audio_data) = hbb_common::tokio::sync::mpsc::unbounded_channel();
+ // Create a stand-alone inner, add subscribe to audio service
+ let conn_id = CLIENT_SERVER.write().unwrap().get_new_id();
+ let client_conn_inner = ConnInner::new(conn_id.clone(), Some(tx_audio_data), None);
+ // now we subscribe
+ CLIENT_SERVER.write().unwrap().subscribe(
+ audio_service::NAME,
+ client_conn_inner.clone(),
+ true,
+ );
+ let tx_audio = self.sender.clone();
+ std::thread::spawn(move || {
+ loop {
+ // check if client is closed
+ match rx.try_recv() {
+ Ok(_) | Err(std::sync::mpsc::TryRecvError::Disconnected) => {
+ log::debug!("Exit voice call audio service of client");
+ // unsubscribe
+ CLIENT_SERVER.write().unwrap().subscribe(
+ audio_service::NAME,
+ client_conn_inner,
+ false,
+ );
+ break;
+ }
+ _ => {}
+ }
+ match rx_audio_data.try_recv() {
+ Ok((_instant, msg)) => match &msg.union {
+ Some(message::Union::AudioFrame(frame)) => {
+ let mut msg = Message::new();
+ msg.set_audio_frame(frame.clone());
+ tx_audio.send(Data::Message(msg)).ok();
+ log::debug!("send audio frame {}", frame.timestamp);
+ }
+ Some(message::Union::Misc(misc)) => {
+ let mut msg = Message::new();
+ msg.set_misc(misc.clone());
+ tx_audio.send(Data::Message(msg)).ok();
+ log::debug!("send audio misc {:?}", misc.audio_format());
+ }
+ _ => {}
+ },
+ Err(err) => {
+ if err == TryRecvError::Empty {
+ // ignore
+ } else {
+ log::debug!("Failed to record local audio channel: {}", err);
+ }
+ }
+ }
+ }
+ });
+ Some(tx)
+ }
+
fn start_clipboard(&mut self) -> Option> {
if self.handler.is_file_transfer() || self.handler.is_port_forward() {
return None;
@@ -654,6 +741,22 @@ impl Remote {
msg.set_misc(misc);
allow_err!(peer.send(&msg).await);
}
+ Data::NewVoiceCall => {
+ let msg = new_voice_call_request(true);
+ // Save the voice call request timestamp for the further validation.
+ self.voice_call_request_timestamp = Some(
+ NonZeroI64::new(msg.voice_call_request().req_timestamp)
+ .unwrap_or(NonZeroI64::new(get_time()).unwrap()),
+ );
+ allow_err!(peer.send(&msg).await);
+ self.handler.on_voice_call_waiting();
+ }
+ Data::CloseVoiceCall => {
+ self.stop_voice_call();
+ let msg = new_voice_call_request(false);
+ self.handler.on_voice_call_closed("Closed manually by the peer");
+ allow_err!(peer.send(&msg).await);
+ }
_ => {}
}
true
@@ -1146,6 +1249,34 @@ impl Remote {
self.handler
.msgbox(&msgbox.msgtype, &msgbox.title, &msgbox.text, &link);
}
+ Some(message::Union::VoiceCallRequest(request)) => {
+ if request.is_connect {
+ // TODO: maybe we will do a voice call from the peer in the future.
+ } else {
+ log::debug!("The remote has requested to close the voice call");
+ if let Some(sender) = self.stop_voice_call_sender.take() {
+ allow_err!(sender.send(()));
+ self.handler.on_voice_call_closed("");
+ }
+ }
+ }
+ Some(message::Union::VoiceCallResponse(response)) => {
+ let ts = std::mem::replace(&mut self.voice_call_request_timestamp, None);
+ if let Some(ts) = ts {
+ if response.req_timestamp != ts.get() {
+ log::debug!("Possible encountering a voice call attack.");
+ } else {
+ if response.accepted {
+ // The peer accepted the voice call.
+ self.handler.on_voice_call_started();
+ self.stop_voice_call_sender = self.start_voice_call();
+ } else {
+ // The peer refused the voice call.
+ self.handler.on_voice_call_closed("");
+ }
+ }
+ }
+ }
_ => {}
}
}
diff --git a/src/common.rs b/src/common.rs
index c2d5a81f0..3e6409c53 100644
--- a/src/common.rs
+++ b/src/common.rs
@@ -30,6 +30,8 @@ use hbb_common::{
// #[cfg(any(target_os = "android", target_os = "ios", feature = "cli"))]
use hbb_common::{config::RENDEZVOUS_PORT, futures::future::join_all};
+use crate::ui_interface::{set_option, get_option};
+
pub type NotifyMessageBox = fn(String, String, String, String) -> dyn Future