Jelajahi Sumber

添加手动选择音频规格功能,zlm修改sdp进行编码测试

kindring 2 tahun lalu
induk
melakukan
954352fa4f

+ 39 - 0
src/main/java/com/genersoft/iot/vmp/gb28181/GBStore.java

@@ -0,0 +1,39 @@
+package com.genersoft.iot.vmp.gb28181;
+
+import com.genersoft.iot.vmp.gb28181.bean.BroadcastItem;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import java.util.Map;
+import java.util.Objects;
+import java.util.concurrent.ConcurrentHashMap;
+
+@Component
+public class GBStore {
+    private final static Logger logger = LoggerFactory.getLogger(GBStore.class);
+
+    private Map<String, BroadcastItem> store = new ConcurrentHashMap<>();
+
+    public void addBroadcastStore(String key,BroadcastItem broadcastItem){
+        store.computeIfAbsent(
+                key,
+                k -> broadcastItem
+        );
+    }
+    public BroadcastItem queryBroadcastStore(String key){
+        BroadcastItem broadcastItem = store.get(key);
+        if(broadcastItem == null){
+            logger.error("无法找到对应的broadcast");
+            return null;
+        }
+        // 移除broadcast信息
+        delBroadcastStore(key);
+        return broadcastItem;
+    }
+
+    public void delBroadcastStore(String key){
+        BroadcastItem broadcastItem = store.get(key);
+        store.keySet().removeIf(_key -> !Objects.equals(_key, key));
+    }
+}

+ 11 - 0
src/main/java/com/genersoft/iot/vmp/gb28181/bean/Device.java

@@ -191,6 +191,9 @@ public class Device {
 	@Schema(description = "是否作为消息通道")
 	private boolean asMessageChannel;
 
+	@Schema(description = "设备支持的音频编码集合")
+	private String audioEncodePt;
+
 
 	public String getDeviceId() {
 		return deviceId;
@@ -439,4 +442,12 @@ public class Device {
 	public void setAsMessageChannel(boolean asMessageChannel) {
 		this.asMessageChannel = asMessageChannel;
 	}
+
+	public String getAudioEncodePt() {
+		return audioEncodePt;
+	}
+
+	public void setAudioEncodePt(String audioEncodePt) {
+		this.audioEncodePt = audioEncodePt;
+	}
 }

+ 2 - 0
src/main/java/com/genersoft/iot/vmp/gb28181/transmit/cmd/impl/SIPCommander.java

@@ -744,6 +744,8 @@ public class SIPCommander implements ISIPCommander {
         sipSender.transmitRequest(sipLayer.getLocalIp(device.getLocalIp()), byteRequest, null, okEvent);
     }
 
+
+
     /**
      * 语音广播
      *

+ 8 - 8
src/main/java/com/genersoft/iot/vmp/media/zlm/ZLMRTPServerFactory.java

@@ -391,14 +391,14 @@ public class ZLMRTPServerFactory {
      * @return
      */
     public Map<String, Object> createStartSendRtpStreamAudioData(MediaServerItem serverItem,String app, String streamId,String recv_stream_id,String ssrc,String dst_url,int dst_port){
-        String sendRtpPortRange = serverItem.getSendRtpPortRange();
-        if (ObjectUtils.isEmpty(sendRtpPortRange)) {
-            logger.error("无法随机获取rtp端口");
-//            return null;
-            sendRtpPortRange="30000,35000";
-        }
-
-        String[] portRangeStrArray = sendRtpPortRange.split(",");
+//        String sendRtpPortRange = serverItem.getSendRtpPortRange();
+//        if (ObjectUtils.isEmpty(sendRtpPortRange)) {
+//            logger.error("无法随机获取rtp端口");
+////            return null;
+//            sendRtpPortRange="30000,35000";
+//        }
+//
+//        String[] portRangeStrArray = sendRtpPortRange.split(",");
         int localPort = -1;
 //        if (portRangeStrArray.length != 2) {
 //            localPort = getFreePort(serverItem, 30000, 30500, null);

+ 12 - 2
src/main/java/com/genersoft/iot/vmp/storager/dao/DeviceMapper.java

@@ -43,6 +43,7 @@ public interface DeviceMapper {
             "geoCoordSys," +
             "treeType," +
             "mediaServerId," +
+            "audioEncodePt," +
             "online" +
             " FROM device WHERE deviceId = #{deviceId}")
     Device getDeviceByDeviceId(String deviceId);
@@ -125,6 +126,8 @@ public interface DeviceMapper {
                 "<if test=\"keepaliveTime != null\">, keepaliveTime=#{keepaliveTime}</if>" +
                 "<if test=\"keepaliveIntervalTime != null\">, keepaliveIntervalTime=#{keepaliveIntervalTime}</if>" +
                 "<if test=\"expires != null\">, expires=#{expires}</if>" +
+                "<if test=\"mediaServerId != null\">, mediaServerId=#{mediaServerId}</if>" +
+                "<if test=\"audioEncodePt != null\">, audioEncodePt=#{audioEncodePt}</if>" +
                 "WHERE deviceId=#{deviceId}"+
             " </script>"})
     int update(Device device);
@@ -160,6 +163,8 @@ public interface DeviceMapper {
             "geoCoordSys," +
             "treeType," +
             "online," +
+            "audioEncodePt," +
+            "mediaServerId," +
             "(SELECT count(0) FROM device_channel WHERE deviceId=de.deviceId) as channelCount  FROM device de" +
             "<if test=\"online != null\"> where online=${online}</if>"+
             " </script>"
@@ -200,7 +205,9 @@ public interface DeviceMapper {
             "asMessageChannel," +
             "geoCoordSys," +
             "treeType," +
-            "online " +
+            "online," +
+            "mediaServerId," +
+            "audioEncodePt" +
             " FROM device WHERE online = 1")
     List<Device> getOnlineDevices();
     @Select("SELECT " +
@@ -231,7 +238,9 @@ public interface DeviceMapper {
             "asMessageChannel," +
             "geoCoordSys," +
             "treeType," +
-            "online" +
+            "online," +
+            "mediaServerId," +
+            "audioEncodePt" +
             " FROM device WHERE ip = #{host} AND port=#{port}")
     Device getDeviceByHostAndPort(String host, int port);
 
@@ -254,6 +263,7 @@ public interface DeviceMapper {
             "<if test=\"geoCoordSys != null\">, geoCoordSys=#{geoCoordSys}</if>" +
             "<if test=\"treeType != null\">, treeType=#{treeType}</if>" +
             "<if test=\"mediaServerId != null\">, mediaServerId=#{mediaServerId}</if>" +
+            "<if test=\"audioEncodePt != null\">, audioEncodePt=#{audioEncodePt}</if>" +
             "WHERE deviceId=#{deviceId}"+
             " </script>"})
     int updateCustom(Device device);

+ 4 - 4
src/main/java/com/genersoft/iot/vmp/vmanager/gb28181/device/DeviceQuery.java

@@ -455,10 +455,10 @@ public class DeviceQuery {
 	 * @return
 	 */
 	@Operation(summary = "更新设备信息")
-	@Parameter(name = "device", description = "设备", required = true)
-	@PostMapping("/device/update/")
-	public void updateDevice(Device device){
-
+//	@RequestBody(name = "device", description = "设备", required = true)
+	@PostMapping("/device/update")
+	public void updateDevice(@RequestBody Device device){
+		logger.info("更新设备信息 audioEncodePt:{}",device.getAudioEncodePt());
 		if (device != null && device.getDeviceId() != null) {
 			deviceService.updateCustomDevice(device);
 		}

+ 173 - 114
src/main/java/com/genersoft/iot/vmp/vmanager/gb28181/play/PlayController.java

@@ -10,6 +10,7 @@ import com.genersoft.iot.vmp.conf.security.SecurityUtils;
 import com.genersoft.iot.vmp.conf.security.dto.LoginUser;
 import com.genersoft.iot.vmp.gb28181.GBEventSubscribe;
 import com.genersoft.iot.vmp.gb28181.GBHookSubscribeFactory;
+import com.genersoft.iot.vmp.gb28181.GBStore;
 import com.genersoft.iot.vmp.gb28181.HookSubscribeForKey;
 import com.genersoft.iot.vmp.gb28181.bean.BroadcastItem;
 import com.genersoft.iot.vmp.gb28181.bean.Device;
@@ -92,6 +93,9 @@ public class PlayController {
 	@Autowired
 	private IUserService userService;
 
+	@Autowired
+	private GBStore gbStore;
+
 	@Operation(summary = "开始点播")
 	@Parameter(name = "deviceId", description = "设备国标编号", required = true)
 	@Parameter(name = "channelId", description = "通道国标编号", required = true)
@@ -352,132 +356,129 @@ public class PlayController {
 	}
 
 	/**
-	 * 开始语音广播,获取设备invite,音频编码协商 步骤1
+	 * 开始语音广播
 	 * @param deviceId
 	 * @param channelId
 	 * @return
 	 */
-	@Operation(summary = "开始语音广播")
+	@Operation(summary = "开始语音广播,获取参数")
 	@Parameter(name = "deviceId", description = "设备国标编号", required = true)
 	@Parameter(name = "channelId", description = "设备国标编号", required = true)
 	@Parameter(name = "waitTime", description = "设备国标编号", required = false)
 	@GetMapping("/startBroadcast")
-	public DeferredResult<WVPResult<String>> startBroadcast(@RequestParam String deviceId,
+	public WVPResult startBroadcast(@RequestParam String deviceId,
 								   @RequestParam("channelId") String channelId,
 									@RequestParam(value = "waitTime",
 											required = false,
 											defaultValue = "5000") int waitTimeStr			   ) {
 
-		RequestMessage msg = new RequestMessage();
-		String key  = DeferredResultHolder.CALLBACK_CMD_BROADCAST + deviceId;
-		msg.setKey(key);
-		String uuid = UUID.randomUUID().toString();
-		msg.setId(uuid);
-		DeferredResult<WVPResult<String>> result = new DeferredResult<>(10*1000l);
 		WVPResult wvpResult = new WVPResult();
-		resultHolder.put(key, uuid, result);
 
+		Device device = storager.queryVideoDevice(deviceId);
+		MediaServerItem mediaServerItem = playService.getNewMediaServerItem(device);
 		String app = "audio";
 		String stream = "rtc_" + deviceId + "_" + channelId;
 		String type = "push";
-//		if(waitTimeStr.isEmpty() || waitTimeStr==""){
-//			waitTimeStr = "5000";
-//		}
-		int waitTime = waitTimeStr;
+		Map<String, Object> resultData = new HashMap<>(16);
+		LoginUser userInfo = SecurityUtils.getUserInfo();
+		String sign = Md5Utils.hash(userService.getUserByUsername(userInfo.getUsername()).getPushKey()); //获取推流鉴权密钥
+		String webRtcPushUrl = String.format("https://%s:%s/index/api/webrtc?app=%s&stream=%s&type=%s&sign=%s", mediaServerItem.getIp(), mediaServerItem.getHttpSSlPort(), app, stream, type,sign);
 
 		//首先判断设备是否正在对讲
 		if (redisCatchStorage.isBroadcastItem(deviceId)) {
 			// 设备正在进行语音对讲
 			wvpResult.setCode(ErrorCode.ERROR_Device_Busy.getCode());
 			wvpResult.setMsg(ErrorCode.ERROR_Device_Busy.getMsg());
-			msg.setData(wvpResult);
-			resultHolder.invokeAllResult(msg);
-			return result;
+			return wvpResult;
 		}
 
-		Device device = storager.queryVideoDevice(deviceId);
-		MediaServerItem mediaServerItem = playService.getNewMediaServerItem(device);
-
 		if (mediaServerItem == null) {
 			logger.error("流媒体未找到");
 			wvpResult.setCode(ErrorCode.ERR_MEDIA.getCode());
 			wvpResult.setMsg(ErrorCode.ERR_MEDIA.getMsg());
-			msg.setData(wvpResult);
-			resultHolder.invokeAllResult(msg);
-			return result;
+			return wvpResult;
 		}
-		Map<String, Object> resultData = new HashMap<>(16);
-		LoginUser userInfo = SecurityUtils.getUserInfo();
-		String sign = Md5Utils.hash(userService.getUserByUsername(userInfo.getUsername()).getPushKey()); //获取推流鉴权密钥
+
+		resultData.put("mediaId",mediaServerItem.getId());
+		resultData.put("app",app);
+		resultData.put("stream",stream);
+		resultData.put("type",type);
+		resultData.put("sign",sign);
+		resultData.put("webRtcPushUrl", webRtcPushUrl);
+		resultData.put("audioEncodePt",device.getAudioEncodePt());
+		wvpResult.setCode(ErrorCode.SUCCESS.getCode());
+		wvpResult.setMsg(ErrorCode.SUCCESS.getMsg());
+		wvpResult.setData(resultData);
+		return wvpResult;
 		//示例 https://192.168.126.111:9443/index/api/webrtc?app=live&stream=test&type=play&sign=...
-		String webRtcPushUrl = String.format("https://%s:%s/index/api/webrtc?app=%s&stream=%s&type=%s&sign=%s", mediaServerItem.getIp(), mediaServerItem.getHttpSSlPort(), app, stream, type,sign);
 		// 下发broadcast给设备
-		playService.openBroadcast(
-				mediaServerItem,
-				device,
-				waitTime,
-				(int code, JSONObject json, SIPRequest request)->{
-					// 0 ok,1 超时,2 异常
-					// invite sdp , message data
-					// request , null
-					if(code == 1){
-						logger.warn("invite超时");
-						wvpResult.setCode(ErrorCode.ERR_TIMEOUT.getCode());
-						wvpResult.setMsg(ErrorCode.ERR_TIMEOUT.getMsg());
-					} else if (code == 2) {
-						wvpResult.setCode(ErrorCode.ERROR100.getCode());
-						wvpResult.setMsg((String) json.get("msg"));
-					} else if (code == 0) {
-						logger.info("收到设备invite信息: {}",json);
-
-						BroadcastItem broadcastItem = new BroadcastItem();
-						broadcastItem.setMediaId(mediaServerItem.getId());
-						broadcastItem.setDeviceId(deviceId);
-						broadcastItem.setApp(app);
-						broadcastItem.setStream(stream);
-						broadcastItem.setIpcIp((String) json.get("addr"));
-						broadcastItem.setIpcAudioPort((Integer) json.get("port"));
-						broadcastItem.setSsrc((String) json.get("ssrc"));
-						broadcastItem.setRequest(request);
-						broadcastItem.setAudioFormats((Vector) json.get("audioFormats"));
-						// 获取id
-						resultData.put("mediaId",mediaServerItem.getId());
-						resultData.put("app",app);
-						resultData.put("stream",stream);
-						resultData.put("type",type);
-						resultData.put("sign",sign);
-						resultData.put("webRtcPushUrl", webRtcPushUrl);
-						resultData.put("audioFormats",json.get("audioFormats"));
-						logger.info("获取webrtc推流地址:{}",webRtcPushUrl);
-						//存储invite信息和request信息至redis中
-						if(redisCatchStorage.addBroadcastItem(
-								deviceId,
-								broadcastItem
-						)){
-							logger.info("语音对讲信息存储成功");
-						}else{
-							logger.warn("无法存储数据至zlm");
-						}
-						//设置过期时间
-						wvpResult.setCode(ErrorCode.SUCCESS.getCode());
-						wvpResult.setMsg(ErrorCode.SUCCESS.getMsg());
-						wvpResult.setData(resultData);
-					}
-					msg.setData(wvpResult);
-					resultHolder.invokeAllResult(msg);
-//					return result;
-				}
-		);
-		return result;
+//		playService.openBroadcast(
+//				mediaServerItem,
+//				device,
+//				waitTime,
+//				(int code, JSONObject json, SIPRequest request)->{
+//					// 0 ok,1 超时,2 异常
+//					// invite sdp , message data
+//					// request , null
+//					if(code == 1){
+//						logger.warn("invite超时");
+//						wvpResult.setCode(ErrorCode.ERR_TIMEOUT.getCode());
+//						wvpResult.setMsg(ErrorCode.ERR_TIMEOUT.getMsg());
+//					} else if (code == 2) {
+//						wvpResult.setCode(ErrorCode.ERROR100.getCode());
+//						wvpResult.setMsg((String) json.get("msg"));
+//					} else if (code == 0) {
+//						logger.info("收到设备invite信息: {}",json);
+//
+//						BroadcastItem broadcastItem = new BroadcastItem();
+//						broadcastItem.setMediaId(mediaServerItem.getId());
+//						broadcastItem.setDeviceId(deviceId);
+//						broadcastItem.setApp(app);
+//						broadcastItem.setStream(stream);
+//						broadcastItem.setIpcIp((String) json.get("addr"));
+//						broadcastItem.setIpcAudioPort((Integer) json.get("port"));
+//						broadcastItem.setSsrc((String) json.get("ssrc"));
+//						broadcastItem.setRequest(request);
+//						broadcastItem.setAudioFormats((Vector) json.get("audioFormats"));
+//						// 获取id
+//
+//						//存储invite信息和request信息至redis中
+//						gbStore.addBroadcastStore(
+//								"broadcast_"+deviceId,
+//								broadcastItem
+//						);
+////						if(redisCatchStorage.addBroadcastItem(
+////								deviceId,
+////								broadcastItem
+////						)){
+////							logger.info("语音对讲信息存储成功");
+////						}else{
+////							logger.warn("无法存储数据至zlm");
+////						}
+//						//设置过期时间
+//
+//					}
+//					msg.setData(wvpResult);
+//					resultHolder.invokeAllResult(msg);
+////					return result;
+//				}
+//		);
+//		return result;
 	}
 
 	@Operation(summary = "开始建立语音广播连接")
 	@Parameter(name = "deviceId", description = "设备国标编号", required = true)
-	@Parameter(name = "audioCoded", description = "音频编码信息", required = false)
+	@Parameter(name = "app", description = "流的app名称")
+	@Parameter(name = "stream", description = "流名称")
+	@Parameter(name = "waitTime", description = "invite等待时长", required = false)
 	@GetMapping("/broadcast")
 	public DeferredResult<WVPResult<String>> broadcast(
 			@RequestParam("deviceId") String deviceId,
-			@RequestParam(value = "audioCoded",required = false) String audioCoded
+			@RequestParam(value = "waitTime",
+					required = false,
+					defaultValue = "5000") int waitTime,
+			@RequestParam(value = "app") String app,
+			@RequestParam(value = "stream") String stream
 	){
 		logger.info("[语音对讲] web端已经开启推流");
 		RequestMessage msg = new RequestMessage();
@@ -491,16 +492,6 @@ public class PlayController {
 		Map<String, Object> resultData = new HashMap<>(16);
 		resultHolder.put(key, uuid, result);
 
-
-		// 从redis中拉取语音对讲数据
-		BroadcastItem broadcastItem = redisCatchStorage.queryBroadcastItem(deviceId);
-		if(broadcastItem == null){
-			logger.info("[语音对讲 invite] invite交互超时");
-			wvpResult.setCode(ErrorCode.ERROR404.getCode());
-			wvpResult.setMsg("无法找到设备invite信息,可能是交互超时");
-			msg.setData(wvpResult);
-			resultHolder.invokeAllResult(msg);
-		}
 		// 检查设备是否存在
 		Device device = storager.queryVideoDevice(deviceId);
 		if (device == null){
@@ -510,6 +501,7 @@ public class PlayController {
 			msg.setData(wvpResult);
 			resultHolder.invokeAllResult(msg);
 		}
+
 		// 获取对应的媒体服务
 		MediaServerItem mediaServerItem = playService.getNewMediaServerItem(device);
 		logger.info("[语言广播] 分配的流媒体服务器为 {}",mediaServerItem.getId());
@@ -520,15 +512,16 @@ public class PlayController {
 			msg.setData(wvpResult);
 			resultHolder.invokeAllResult(msg);
 		}else{
-			playService.broadcast(
+			playService.openBroadcast(
 				mediaServerItem,
 				device,
-				broadcastItem,
-				5000,
+				waitTime,
 				(int code, JSONObject json, SIPRequest request)->{
-					// todo 处理并返回语音广播请求,应该有一个ack
+					// 0 ok,1 超时,2 异常
+					// invite sdp , message data
+					// request , null
 					if(code == 1){
-						logger.warn("等待设备ack超时");
+						logger.warn("invite超时");
 						wvpResult.setCode(ErrorCode.ERR_TIMEOUT.getCode());
 						wvpResult.setMsg(ErrorCode.ERR_TIMEOUT.getMsg());
 					} else if (code == 2) {
@@ -536,24 +529,66 @@ public class PlayController {
 						wvpResult.setMsg((String) json.get("msg"));
 					} else if (code == 0) {
 						logger.info("收到设备invite信息: {}",json);
+
+						BroadcastItem broadcastItem = new BroadcastItem();
+						broadcastItem.setMediaId(mediaServerItem.getId());
+						broadcastItem.setDeviceId(deviceId);
+						broadcastItem.setApp(app);
+						broadcastItem.setStream(stream);
+						broadcastItem.setIpcIp((String) json.get("addr"));
+						broadcastItem.setIpcAudioPort((Integer) json.get("port"));
+						broadcastItem.setSsrc((String) json.get("ssrc"));
+						broadcastItem.setRequest(request);
+						broadcastItem.setAudioFormats((Vector) json.get("audioFormats"));
 						// 获取id
-						resultData.put("mediaId",mediaServerItem.getId());
-//						resultData.put("app",app);
-//						resultData.put("stream",stream);
-//						resultData.put("type",type);
-//						resultData.put("sign",sign);
-//						resultData.put("webRtcPushUrl", webRtcPushUrl);
-//						resultData.put("audioFormats",json.get("audioFormats"));
-//						logger.info("获取webrtc推流地址:{}",webRtcPushUrl);
-
-						wvpResult.setCode(ErrorCode.SUCCESS.getCode());
-						wvpResult.setMsg(ErrorCode.SUCCESS.getMsg());
-						wvpResult.setData(resultData);
+						playService.broadcast(
+								mediaServerItem,
+								device,
+								broadcastItem,
+								5000,
+								(int _code, JSONObject _json, SIPRequest _request)->{
+									// todo 处理并返回语音广播请求,应该有一个ack
+									if(_code == 1){
+										logger.warn("等待设备ack超时");
+										wvpResult.setCode(ErrorCode.ERR_TIMEOUT.getCode());
+										wvpResult.setMsg(ErrorCode.ERR_TIMEOUT.getMsg());
+									} else if (_code == 2) {
+										wvpResult.setCode(ErrorCode.ERROR100.getCode());
+										wvpResult.setMsg((String) _json.get("msg"));
+									} else if (_code == 0) {
+										logger.info("收到设备invite信息 开始建立连接: {}",_json);
+										// 获取id
+										resultData.put("mediaId",mediaServerItem.getId());
+										wvpResult.setCode(ErrorCode.SUCCESS.getCode());
+										wvpResult.setMsg(ErrorCode.SUCCESS.getMsg());
+										wvpResult.setData(resultData);
+									}
+									msg.setData(wvpResult);
+									resultHolder.invokeAllResult(msg);
+								}
+						);
+						//存储invite信息和request信息至redis中
+						gbStore.addBroadcastStore(
+								"broadcast_"+deviceId,
+								broadcastItem
+						);
+//						if(redisCatchStorage.addBroadcastItem(
+//								deviceId,
+//								broadcastItem
+//						)){
+//							logger.info("语音对讲信息存储成功");
+//						}else{
+//							logger.warn("无法存储数据至zlm");
+//						}
+						//设置过期时间
+
 					}
 					msg.setData(wvpResult);
 					resultHolder.invokeAllResult(msg);
+//					return result;
 				}
-			);
+		);
+
 		}
 		// 获取zlm推流端口
 		// todo 回复 invite 200 给设备
@@ -561,6 +596,30 @@ public class PlayController {
 		return result;
 	}
 
+	@Operation(summary = "停止语音广播")
+	@Parameter(name = "deviceId", description = "设备国标编号", required = true)
+	@Parameter(name = "channelId", description = "设备通道编号", required = true)
+	@GetMapping("/stopBroadcast")
+	public WVPResult stopBroadcast(@RequestParam("deviceId") String deviceId,@RequestParam("channelId") String channelId){
+		BroadcastItem broadcastItem = gbStore.queryBroadcastStore(
+				"broadcast_"+deviceId
+		);
+		WVPResult wvpResult = new WVPResult();
+		Device device = storager.queryVideoDevice(deviceId);
+		// 停止音频流,给设备发送bye
+		try {
+			logger.warn("[停止点播] {}/{}", device.getDeviceId(), channelId);
+			cmder.streamByeCmd(device, channelId, broadcastItem.getStream(), null, null);
+			wvpResult.setCode(ErrorCode.SUCCESS.getCode());
+		} catch (InvalidArgumentException | SipException | ParseException | SsrcTransactionNotFoundException e) {
+			logger.error("[命令发送失败] 停止语音广播, 发送BYE: {}", e.getMessage());
+			wvpResult.setCode(ErrorCode.ERROR100.getCode());
+			wvpResult.setMsg("[命令发送失败] 停止语音广播, 发送BYE: {}");
+//			throw new ControllerException(ErrorCode.ERROR100.getCode(), "命令发送失败: " + e.getMessage());
+		}
+		return wvpResult;
+	}
+
 
 	@Operation(summary = "获取所有的ssrc")
 	@GetMapping("/ssrc")

+ 54 - 7
web_src/src/assets/ZLMRTCClient.js

@@ -15,6 +15,24 @@ let ZLMRTCClient = (function (exports) {
 	  CAPTURE_STREAM_FAILED: 'CAPTURE_STREAM_FAILED'
 	};
 
+  const audioEncoding = {
+    0:{
+      name: "PCMU",
+      replaceStr:"a=rtpmap:0 PCMU/8000\n"
+    },
+    8:{
+      name: "PCMA",
+      replaceStr:"a=rtpmap:8 PCMA/8000\n"
+    },
+    9:{
+      name: "G722",
+      replaceStr:"a=rtpmap:9 G722/8000\n"
+    },
+    13:{
+      name: "CN",
+      replaceStr:"a=rtpmap:13 CN/8000\n"
+    }
+  }
 	const VERSION$1 = '1.0.1';
 	const BUILD_DATE = 'Mon Jul 04 2022 19:50:55 GMT+0800 (China Standard Time)';
 
@@ -7839,6 +7857,7 @@ let ZLMRTCClient = (function (exports) {
 
 	var axios = axios_1;
 
+
 	class RTCEndpoint extends Event$1 {
 	  constructor(options) {
 	    super('RTCPusherPlayer');
@@ -7858,10 +7877,12 @@ let ZLMRTCClient = (function (exports) {
 	        w: 0,
 	        h: 0
 	      },
+        enableCodings: [0,8,9,13],
 	      usedatachannel: false
 	    };
+      console.log(options);
 	    this.options = Object.assign({}, defaults, options);
-
+      console.log(this.options );
 	    if (this.options.debug) {
 	      setLogger();
 	    }
@@ -7918,10 +7939,8 @@ let ZLMRTCClient = (function (exports) {
 	    this.pc.createOffer().then(desc => {
 	      log(this.TAG, 'offer:', desc.sdp);
         log(this.TAG, desc);
-        let sdp = desc.sdp;
-        sdp = sdp.replace("a=rtpmap:9 G722/8000\n","");
-        sdp = sdp.replace("a=rtpmap:0 PCMU/8000\n","");
-        sdp = sdp.replace("a=rtpmap:13 CN/8000\n","");
+        let sdp = this.reWriteAudioCodingSdp(desc.sdp);
+
         // sdp = sdp.replace("a=rtpmap:9 G722/8000","");
         log(this.Tag,"testSdp:",sdp)
 	      this.pc.setLocalDescription(desc).then(() => {
@@ -8038,12 +8057,15 @@ let ZLMRTCClient = (function (exports) {
 
 	      this.pc.createOffer().then(desc => {
 	        log(this.TAG, 'offer:', desc.sdp);
-	        this.pc.setLocalDescription(desc).then(() => {
+          let sdp = this.reWriteAudioCodingSdp(desc.sdp);
+          log(this.TAG, 'new offer                            :', sdp);
+
+          this.pc.setLocalDescription(desc).then(() => {
 	          axios({
 	            method: 'post',
 	            url: this.options.zlmsdpUrl,
 	            responseType: 'json',
-	            data: desc.sdp,
+	            data: sdp,
 	            headers: {
 	              'Content-Type': 'text/plain;charset=utf-8'
 	            }
@@ -8178,6 +8200,29 @@ let ZLMRTCClient = (function (exports) {
 	    return this._localStream;
 	  }
 
+    reWriteAudioCodingSdp(sdp){
+      // 获取需要修改
+      console.log(sdp);
+      let _sdp = sdp;
+      let audioEncodingKeys = Object.keys(audioEncoding);
+      console.log(audioEncoding);
+      console.log(audioEncodingKeys);
+
+      for(let key of audioEncodingKeys){
+        if(this.options.enableCodings.includes(key)){
+          console.log("已经启用的音频编码");
+          // continue;
+        }else{
+          console.log(key);
+          console.log(audioEncoding[key]);
+          console.log(audioEncoding[key].replaceStr);
+          _sdp = _sdp.replace(audioEncoding[key].replaceStr,"");
+        }
+      }
+      console.log(_sdp);
+      return _sdp;
+    }
+
 	}
 
 	const quickScan = [{
@@ -8267,6 +8312,8 @@ let ZLMRTCClient = (function (exports) {
 	  });
 	}
 
+
+
 	console.log('build date:', BUILD_DATE);
 	console.log('version:', VERSION$1);
 	const Events = Events$1;

+ 18 - 1
web_src/src/components/common/microphone.vue

@@ -49,6 +49,7 @@ export default {
       this.player.close();
       this.player = null;
       this.mediaStream = null;
+      this.stopBroadcast();
     }
   },
   methods: {
@@ -104,7 +105,9 @@ export default {
         // webrtc 拉流地址
         let playAudioStreamUrl = zlmSdpUrl.replace("push","play");
         console.log(playAudioStreamUrl);
-
+        let audioEncodeArr = this.pushConfig.audioEncodePt.split(",");
+        console.log("启用音频编码")
+        console.log(audioEncodeArr)
         let player = new ZLMRTCClient.Endpoint(
           {
             // element: document.getElementById('video'),// video 标签
@@ -117,6 +120,8 @@ export default {
             recvOnly: false,
             resolution: {w: 0, h: 0},
             usedatachannel: false,
+            // 启用的编码
+            enableCodings: audioEncodeArr
           }
         );
         this.player = player;
@@ -183,13 +188,16 @@ export default {
       if (err){
         console.log(err)
         this.isLoadAddr = false;
+        this.audioStartFailed(err,err.message);
         return 0;
       }
       console.log(res);
       let data = res.data;
       if(data.code===0){
         this.pushConfig = data.data;
+        await this.startRecordAudio();
       }else{
+        this.audioStartFailed(data,data.msg);
         console.log(res);
         console.error(data.msg);
       }
@@ -203,6 +211,15 @@ export default {
         method: 'get',
         url: url
       });
+    },
+    // 停止语音广播
+    stopBroadcast() {
+      let url = `/api/play/stopBroadcast`
+      url += `?deviceId=${this.deviceId}&channelId=${this.channelId}`;
+      return this.$axios.axios({
+        method: 'get',
+        url: url
+      });
     }
   }
 

+ 20 - 2
web_src/src/components/dialog/deviceEdit.vue

@@ -37,6 +37,17 @@
             </el-select>
           </el-form-item>
 
+          <el-form-item label="对讲音频编码" prop="audioEncodePt">
+            <el-select v-model="form.audioEncodePt" multiple style="float: left; width: 100%" >
+              <el-option
+                v-for="item in audioEncodeArr"
+                :key="item.val"
+                :label="item.name"
+                :value="item.val">
+              </el-option>
+            </el-select>
+          </el-form-item>
+
           <el-form-item label="字符集" prop="charset" >
             <el-select v-model="form.charset" style="float: left; width: 100%" >
                 <el-option key="GB2312" label="GB2312" value="gb2312"></el-option>
@@ -83,6 +94,7 @@
 
 <script>
 import MediaServer from '../service/MediaServer'
+import {getPtArr} from "@/map/ptMap";
 export default {
   name: "deviceEdit",
   props: {},
@@ -95,6 +107,7 @@ export default {
       isLoging: false,
       hostNames:[],
       mediaServerList: [], // 滅体节点列表
+      audioEncodeArr: getPtArr(),// 音频编码
       mediaServerObj : new MediaServer(),
       form: {},
       isEdit: false,
@@ -115,6 +128,9 @@ export default {
       this.listChangeCallback = callback;
       if (row != null) {
         this.form = row;
+        if(this.form.audioEncodePt){
+          this.form.audioEncodePt = this.form.audioEncodePt.split(",");
+        }
       }
       this.getMediaServerList();
     },
@@ -130,10 +146,12 @@ export default {
       this.form.subscribeCycleForCatalog = this.form.subscribeCycleForCatalog||0
       this.form.subscribeCycleForMobilePosition = this.form.subscribeCycleForMobilePosition||0
       this.form.mobilePositionSubmissionInterval = this.form.mobilePositionSubmissionInterval||0
+      this.form.audioEncodePt = this.form.audioEncodePt.join(',');
+
       this.$axios.axios({
         method: 'post',
-        url:`/api/device/query/device/${this.isEdit?'update':'add'}/`,
-        params: this.form
+        url:`/api/device/query/device/${this.isEdit?'update':'add'}`,
+        data: this.form
       }).then((res) => {
         console.log(res.data)
         if (res.data.code === 0) {

+ 30 - 0
web_src/src/map/ptMap.js

@@ -0,0 +1,30 @@
+const encodeMaps = {
+  0:{
+    name: "PCMU",
+    replaceStr:"a=rtpmap:0 PCMU/8000\n"
+  },
+  8:{
+    name: "PCMA",
+    replaceStr:"a=rtpmap:8 PCMA/8000\n"
+  },
+  9:{
+    name: "G722",
+    replaceStr:"a=rtpmap:9 G722/8000\n"
+  },
+  13:{
+    name: "CN",
+    replaceStr:"a=rtpmap:13 CN/8000\n"
+  }
+}
+
+
+export function getPtArr(){
+  return Object.keys(encodeMaps).map(key=>{
+    return {
+      id: key,
+      val: key,
+      ...encodeMaps[key],
+      disabled: false,
+    };
+  })
+}