分包+刷新逻辑
This commit is contained in:
78
src/composables/useCanvasPointer.js
Normal file
78
src/composables/useCanvasPointer.js
Normal file
@@ -0,0 +1,78 @@
|
||||
// src/composables/useCanvasPointer.js
|
||||
import { ref } from "vue";
|
||||
|
||||
/**
|
||||
* @param {{ phone, toBuffer, getWs: (index:number)=>WebSocket|null }} deps
|
||||
* 依赖项对象,包含手机信息、缓冲转换和WebSocket获取函数
|
||||
*/
|
||||
export function useCanvasPointer(deps) {
|
||||
const { phone, toBuffer, getWs } = deps;
|
||||
|
||||
const canvasRef = ref({}); // { [udid]: HTMLCanvasElement } - 存储设备ID到Canvas元素的映射
|
||||
const frameMeta = ref({}); // { [udid]: { w,h, rotation? } } - 存储设备ID到帧元数据的映射
|
||||
|
||||
/**
|
||||
* 初始化画布
|
||||
* @param {string} udid - 设备唯一标识符
|
||||
*/
|
||||
function initCanvas(udid) {
|
||||
const canvas = canvasRef.value[udid];
|
||||
if (!canvas) return;
|
||||
const dpr = window.devicePixelRatio || 1; // 获取设备像素比
|
||||
// 设置画布样式尺寸
|
||||
canvas.style.width = `${phone.value.width * 1.4}px`;
|
||||
canvas.style.height = `${phone.value.height * 1.4}px`;
|
||||
// 设置画布实际像素尺寸
|
||||
canvas.width = phone.value.width * 1.4 * dpr;
|
||||
canvas.height = phone.value.height * 1.4 * dpr;
|
||||
const ctx = canvas.getContext("2d");
|
||||
ctx.scale(dpr, dpr);
|
||||
// 可选:参考网格(已设为透明)
|
||||
ctx.strokeStyle = "#ffffff00";
|
||||
for (let x = 0; x <= phone.value.width; x += 100) {
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(x, 0);
|
||||
ctx.lineTo(x, phone.value.height);
|
||||
ctx.stroke();
|
||||
}
|
||||
}
|
||||
|
||||
function getCanvasCoordinate(event, udid) {
|
||||
const canvas = canvasRef.value[udid];
|
||||
const rect = canvas.getBoundingClientRect();
|
||||
const rx = (event.clientX - rect.left) / rect.width;
|
||||
const ry = (event.clientY - rect.top) / rect.height;
|
||||
|
||||
const meta = frameMeta.value[udid] || { w: 320, h: 720, rotation: 0 };
|
||||
let x = rx * meta.w;
|
||||
let y = ry * meta.h;
|
||||
|
||||
switch (meta.rotation ?? 0) {
|
||||
case 90: [x, y] = [meta.w - y, x]; break;
|
||||
case 180: [x, y] = [meta.w - x, meta.h - y]; break;
|
||||
case 270: [x, y] = [y, meta.h - x]; break;
|
||||
}
|
||||
x = Math.max(0, Math.min(meta.w - 1, x));
|
||||
y = Math.max(0, Math.min(meta.h - 1, y));
|
||||
return { x: Math.round(x), y: Math.round(y), w: meta.w, h: meta.h };
|
||||
}
|
||||
|
||||
// 统一发包:point 用帧坐标,screenSize 用帧宽高
|
||||
function sendPointer(udid, index, action /* 0 down,1 up,2 move */, x, y) {
|
||||
const meta = frameMeta.value[udid] || { w: 320, h: 720, rotation: 0 };
|
||||
const payload = {
|
||||
type: 2,
|
||||
action,
|
||||
pointerId: 0,
|
||||
position: { point: { x, y }, screenSize: { width: meta.w, height: meta.h } },
|
||||
pressure: action === 1 ? 0 : 1,
|
||||
buttons: action === 1 ? 0 : 1,
|
||||
};
|
||||
const ws = getWs(index);
|
||||
if (ws && ws.readyState === WebSocket.OPEN) {
|
||||
ws.send(toBuffer(payload));
|
||||
}
|
||||
}
|
||||
|
||||
return { canvasRef, frameMeta, initCanvas, getCanvasCoordinate, sendPointer };
|
||||
}
|
||||
30
src/composables/useTaskQueue.js
Normal file
30
src/composables/useTaskQueue.js
Normal file
@@ -0,0 +1,30 @@
|
||||
// src/composables/useTaskQueue.js
|
||||
// 创建任务队列
|
||||
|
||||
const _queues = new Map(); // index -> task[]
|
||||
|
||||
export function createTaskQueue(index) {
|
||||
if (!_queues.has(index)) _queues.set(index, []);
|
||||
return {
|
||||
enqueue(task) {
|
||||
const q = _queues.get(index);
|
||||
q.push(task);
|
||||
if (q.length === 1) task(); // 只有第一个任务时立即执行
|
||||
},
|
||||
next() {
|
||||
const q = _queues.get(index) || [];
|
||||
q.shift();
|
||||
if (q.length > 0) q[0](); // 执行下一个
|
||||
},
|
||||
clear() {
|
||||
_queues.set(index, []);
|
||||
},
|
||||
getNum() {
|
||||
return _queues.get(index) || [];
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export function clearAllQueues() {
|
||||
_queues.clear();
|
||||
}
|
||||
106
src/composables/useVideoStream.js
Normal file
106
src/composables/useVideoStream.js
Normal file
@@ -0,0 +1,106 @@
|
||||
// src/composables/useVideoStream.js
|
||||
|
||||
/**
|
||||
* 将 h264-converter 的 MSE SourceBuffer 做“回放缓冲裁剪”,
|
||||
* 防止 buffered 越积越多导致内存上涨。
|
||||
*
|
||||
* @param {Array<object>} instanceList - 你的 instanceList(每个 index 有 converter)
|
||||
* @param {object} videoElementRef - 你的 videoElement ref 对象(udid->video)
|
||||
* @param {import('vue').Ref<Array>} deviceInformationRef - 设备列表(取 udid)
|
||||
* @param {number} index
|
||||
* @param {number} backBufferSec - 保留最近多少秒
|
||||
* @param {number} intervalMs - 多久裁剪一次
|
||||
*/
|
||||
export function attachTrimmerForIndex(
|
||||
instanceList,
|
||||
videoElementRef,
|
||||
deviceInformationRef,
|
||||
index,
|
||||
backBufferSec = 10,
|
||||
intervalMs = 2000
|
||||
) {
|
||||
const conv = instanceList[index]?.converter;
|
||||
if (!conv) return;
|
||||
|
||||
const ensureAttach = () => {
|
||||
const ms = conv.mediaSource;
|
||||
if (!ms) return false;
|
||||
if (ms.readyState !== "open") return false;
|
||||
if (!conv.sourceBuffer) return false;
|
||||
return true;
|
||||
};
|
||||
|
||||
if (conv._trimTimer) {
|
||||
clearInterval(conv._trimTimer);
|
||||
conv._trimTimer = null;
|
||||
}
|
||||
if (conv._mseListenersInstalled !== true && conv.mediaSource) {
|
||||
conv._mseListenersInstalled = true;
|
||||
conv.mediaSource.addEventListener("sourceopen", () => {
|
||||
attachTrimmerForIndex(
|
||||
instanceList,
|
||||
videoElementRef,
|
||||
deviceInformationRef,
|
||||
index,
|
||||
backBufferSec,
|
||||
intervalMs
|
||||
);
|
||||
});
|
||||
conv.mediaSource.addEventListener("sourceclose", () => {
|
||||
if (conv._trimTimer) {
|
||||
clearInterval(conv._trimTimer);
|
||||
conv._trimTimer = null;
|
||||
}
|
||||
});
|
||||
conv.mediaSource.addEventListener("error", () => {
|
||||
if (conv._trimTimer) {
|
||||
clearInterval(conv._trimTimer);
|
||||
conv._trimTimer = null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (!ensureAttach()) {
|
||||
const waitId = setInterval(() => {
|
||||
if (ensureAttach()) {
|
||||
clearInterval(waitId);
|
||||
attachTrimmerForIndex(
|
||||
instanceList,
|
||||
videoElementRef,
|
||||
deviceInformationRef,
|
||||
index,
|
||||
backBufferSec,
|
||||
intervalMs
|
||||
);
|
||||
}
|
||||
}, 300);
|
||||
return;
|
||||
}
|
||||
|
||||
conv._trimTimer = setInterval(() => {
|
||||
const currentConv = instanceList[index]?.converter;
|
||||
const ms = currentConv?.mediaSource;
|
||||
const sb = currentConv?.sourceBuffer;
|
||||
const udid = deviceInformationRef.value[index]?.udid;
|
||||
const video = udid ? videoElementRef.value[udid] : null;
|
||||
|
||||
if (!currentConv || !ms || ms.readyState !== "open" || !sb || !video) return;
|
||||
if (sb.updating || video.seeking || video.readyState < 2) return;
|
||||
|
||||
const cur = video.currentTime || 0;
|
||||
const trimTo = Math.max(0, cur - backBufferSec);
|
||||
|
||||
try {
|
||||
for (let i = 0; i < sb.buffered.length; i++) {
|
||||
const start = sb.buffered.start(i);
|
||||
const end = sb.buffered.end(i);
|
||||
if (end < trimTo - 0.25) {
|
||||
try { sb.remove(0, end); } catch { }
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// 忽略一次性错误(例如 SourceBuffer 被移除)
|
||||
}
|
||||
}, intervalMs);
|
||||
}
|
||||
@@ -64,7 +64,7 @@
|
||||
</template>
|
||||
|
||||
<script setup>
|
||||
import { ref, onMounted, onUnmounted, onBeforeUnmount, watch, inject } from "vue";
|
||||
import { ref, onMounted, onUnmounted, onBeforeUnmount, watch, inject, nextTick } from "vue";
|
||||
import VideoConverter from "h264-converter";
|
||||
import { useRouter } from 'vue-router';
|
||||
import {
|
||||
@@ -77,20 +77,22 @@ import { toBufferBtn, stringToUtf8ByteArray, getClipboard, setClipboard, bufferT
|
||||
import { createWsActions } from '@/utils/wsActions';
|
||||
import { ElMessage, ElMessageBox, ElLoading } from 'element-plus'
|
||||
import { chat, translationToChinese, translation } from "@/api/chat";
|
||||
import { update } from '@/api/account';
|
||||
import { update } from '@/api/account'; //更新主播信息
|
||||
import MultiLineInputDialog from '@/components/MultiLineInputDialog.vue'; // 根据实际路径修改
|
||||
import ChatDialog from '@/components/ChatDialog.vue'
|
||||
import { splitArray } from '@/utils/arrUtil'
|
||||
// import { splitArray } from '@/utils/arrUtil' //分割数组 分配主播 已废弃
|
||||
import { chooseFile } from '@/utils/fileUtil'
|
||||
import { connectSSE } from '@/utils/sseUtils'
|
||||
import { set } from "lodash";
|
||||
import { prologue, comment } from '@/api/account';
|
||||
|
||||
import { createTaskQueue } from '@/composables/useTaskQueue' //创建任务
|
||||
import { useCanvasPointer } from '@/composables/useCanvasPointer' //canvas 初始化 点击转换
|
||||
import { attachTrimmerForIndex } from '@/composables/useVideoStream' //修剪器
|
||||
const router = useRouter();
|
||||
let wsActions = null;
|
||||
let userdata = getUser();
|
||||
// 引入刷新方法
|
||||
const reload = inject("reload")
|
||||
// const reload = inject("reload")
|
||||
|
||||
let phone = ref({ width: 207, height: 470 });
|
||||
const openStr = base64ToBinary("ZQBwAAAAAAA8CgLQAtAAAAAAAAAAAAD/AAAAAAAAAAAAAAAA"); //开启视频流的启动命令
|
||||
const eitwo = base64ToBinary("BAIAAABHVFJD"); //开启设备信息的命令
|
||||
@@ -120,6 +122,7 @@ let selectedDevice = ref(999);
|
||||
let wslist = [];
|
||||
// 是否停止
|
||||
let isStop = ref(false);
|
||||
|
||||
//sse弹窗是否存在
|
||||
let isMsgPop = ref(false);
|
||||
//播放器列表
|
||||
@@ -131,8 +134,6 @@ let isMonitor = ref(false);
|
||||
let iponeCoefficient = ref([{ width: 1, height: 1 }, { width: 1, height: 1 }, { width: 1, height: 1 }, { width: 1, height: 1 }, { width: 1, height: 1 }, { width: 1, height: 1 }, { width: 1, height: 1 }, { width: 1, height: 1 }]);
|
||||
//是否是发送的内容
|
||||
let isSend = ref(false)
|
||||
//adb任务队列
|
||||
const taskQueues = new Map(); // 每个设备一个队列
|
||||
//弹窗是否显示
|
||||
let showDialog = ref(false);
|
||||
//监听消息定时器
|
||||
@@ -161,7 +162,7 @@ const isMonitorOn = ref(false) // false 表示关闭,true 表示开启
|
||||
const buttons = [
|
||||
{
|
||||
label: '刷新',
|
||||
onClick: () => reload(),
|
||||
onClick: () => reload({ onlySelected: selectedDevice.value !== 999, hard: true }),
|
||||
show: () => true,
|
||||
img: {
|
||||
normal: new URL('@/assets/video/leftBtn1.png', import.meta.url).href,
|
||||
@@ -274,6 +275,15 @@ const buttons = [
|
||||
}
|
||||
]
|
||||
|
||||
// 放在变量都已声明之后(要能拿到 phone、toBuffer、wslist)
|
||||
const { canvasRef, frameMeta, initCanvas, getCanvasCoordinate, sendPointer } =
|
||||
useCanvasPointer({
|
||||
phone, // 你已有的 ref({ width, height })
|
||||
toBuffer, // 你已有的工具函数
|
||||
getWs: (i) => wslist[i] // 取对应 index 的 WebSocket
|
||||
});
|
||||
|
||||
|
||||
const feedState = Array(8).fill(null).map(() => ({
|
||||
processing: false,
|
||||
pending: null, // ArrayBuffer 等最新一段
|
||||
@@ -301,10 +311,10 @@ function pushFrame(index, buf) {
|
||||
}
|
||||
const wsCache = new Map();
|
||||
|
||||
//``````````````````````````````````````````````````````````````````````````````````
|
||||
//````````````````````````````````````````````````````````````````````````````````````````````````````````````````````````
|
||||
// 初始化 手机显示WebSocket 和视频流
|
||||
const initVideoStream = (udid, index) => {
|
||||
//``````````````````````````````````````````````````````````````````````````````````
|
||||
const initVideoStream = async (udid, index) => {
|
||||
//````````````````````````````````````````````````````````````````````````````````````````````````````````````````````````
|
||||
// 1. 检查缓存中是否已有实例
|
||||
if (wsCache.has(udid)) {
|
||||
const cached = wsCache.get(udid);
|
||||
@@ -314,14 +324,22 @@ const initVideoStream = (udid, index) => {
|
||||
// 如果连接已关闭,清除缓存并重新创建
|
||||
wsCache.delete(udid);
|
||||
}
|
||||
// 2. 创建专用实例容器
|
||||
instanceList[index] = {
|
||||
// wsVideo: null,
|
||||
converter: null,
|
||||
timer: null
|
||||
};
|
||||
//``````````````````````````````````````````````````````````````````````````````````
|
||||
|
||||
// 等待 <video> 元素挂载
|
||||
const el = await waitForVideoEl(udid);
|
||||
if (!el) {
|
||||
console.error('[initVideoStream] video element not ready for', udid);
|
||||
return;
|
||||
}
|
||||
|
||||
// 准备容器
|
||||
instanceList[index] = { converter: null, timer: null };
|
||||
//````````````````````````````````````````````````````````````````````````````````````````````````````````````````````````
|
||||
if (!videoElement.value) return;
|
||||
if (!videoElement.value?.[udid]) {
|
||||
console.warn('video element missing for', udid);
|
||||
return; // 不创建 converter,避免传 null 进去
|
||||
}
|
||||
// 1. 创建 h264-converter 实例
|
||||
instanceList[index].converter = new VideoConverter(videoElement.value[udid], 60, 1);
|
||||
// 2. 连接 WebSocket
|
||||
@@ -329,7 +347,7 @@ const initVideoStream = (udid, index) => {
|
||||
`ws://127.0.0.1:8000/?action=proxy-adb&remote=tcp%3A8886&udid=${udid}`
|
||||
);
|
||||
wslist[index].binaryType = "arraybuffer";
|
||||
attachTrimmerForIndex(index, 10, 2000); // 挂上修剪器
|
||||
attachTrimmerForIndex(instanceList, videoElement, deviceInformation, index, 10, 2000); // 挂上修剪器
|
||||
wslist[index].onopen = () => {
|
||||
console.log("手机显示ws已开启");
|
||||
wsActions = createWsActions(wslist);
|
||||
@@ -463,9 +481,6 @@ const initVideoStream = (udid, index) => {
|
||||
console.log(resData.type, '坐标返回:x:', resData.x, 'y:', resData.y);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
phoneXYinfo.value[index].id = resData.device
|
||||
if (resData.type == 'Likes') {//判断是否是 点赞
|
||||
phoneXYinfo.value[index].Likes = { x: resData.x * iponeCoefficient.value[index].width, y: resData.y * iponeCoefficient.value[index].height }
|
||||
@@ -719,7 +734,7 @@ const initVideoStream = (udid, index) => {
|
||||
|
||||
// 视频流
|
||||
if (instanceList[index].converter && isshow.value) {
|
||||
pushFrame(index, buf); // 用下方新的 pushFrame
|
||||
pushFrame(index, buf); //处理视频流帧
|
||||
}
|
||||
}
|
||||
|
||||
@@ -738,27 +753,7 @@ const initVideoStream = (udid, index) => {
|
||||
//``````````````````````````````````````````````````````````````````````````````````
|
||||
};
|
||||
|
||||
// 配置参数
|
||||
let canvasRef = ref({});
|
||||
// 初始化画布
|
||||
const initCanvas = (udid) => {
|
||||
const canvas = canvasRef.value[udid];
|
||||
const dpr = window.devicePixelRatio || 1;
|
||||
canvas.style.width = `${phone.value.width * 1.4}px`;
|
||||
canvas.style.height = `${phone.value.height * 1.4}px`;
|
||||
canvas.width = phone.value.width * 1.4 * dpr;
|
||||
canvas.height = phone.value.height * 1.4 * dpr;
|
||||
const ctx = canvas.getContext("2d");
|
||||
ctx.scale(dpr, dpr);
|
||||
// 绘制参考网格(可选)
|
||||
ctx.strokeStyle = "#ffffff00";
|
||||
for (let x = 0; x <= phone.value.width; x += 100) {
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(x, 0);
|
||||
ctx.lineTo(x, phone.value.height);
|
||||
ctx.stroke();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// 鼠标按下事件处理
|
||||
const handleCanvasdown = (udid, e, index) => {
|
||||
@@ -782,59 +777,10 @@ const handleMouseMove = (udid, e, index) => {
|
||||
sendPointer(udid, index, 2, x, y);
|
||||
};
|
||||
|
||||
// 统一发包:point 用帧坐标,screenSize 用帧宽高
|
||||
function sendPointer(udid, index, action /* 0 down,1 up,2 move */, x, y) {
|
||||
// const meta = frameMeta.value[udid] || { w: 320, h: 720 };
|
||||
const meta = frameMeta.value[udid] || { w: 320, h: 720, rotation: 0 };
|
||||
// console.log("frameMeta.value", frameMeta.value)
|
||||
// console.log("udid", udid)
|
||||
|
||||
const payload = {
|
||||
type: 2,
|
||||
action,
|
||||
pointerId: 0,
|
||||
position: {
|
||||
point: { x, y },
|
||||
screenSize: { width: meta.w, height: meta.h },
|
||||
},
|
||||
pressure: action === 1 ? 0 : 1,
|
||||
buttons: action === 1 ? 0 : 1,
|
||||
};
|
||||
console.log("发送坐标", payload)
|
||||
wslist[index]?.send(toBuffer(payload));
|
||||
}
|
||||
|
||||
// 坐标计算
|
||||
const frameMeta = ref({});
|
||||
|
||||
// —— 坐标换算:DOM -> 帧坐标
|
||||
const getCanvasCoordinate = (event, udid) => {
|
||||
const canvas = canvasRef.value[udid];
|
||||
const rect = canvas.getBoundingClientRect();
|
||||
|
||||
// 鼠标在“可视区域”的比例(CSS 尺寸)
|
||||
const rx = (event.clientX - rect.left) / rect.width;
|
||||
const ry = (event.clientY - rect.top) / rect.height;
|
||||
|
||||
// 当前帧尺寸(优先 videoWidth/Height,兜底用 getSize 回包)
|
||||
// const meta = frameMeta.value[udid] || { w: 320, h: 720, rotation: 0 };
|
||||
const meta = frameMeta.value[udid] || { w: 320, h: 720, rotation: 0 }; // 统一
|
||||
|
||||
// 映射到“帧坐标”
|
||||
let x = rx * meta.w;
|
||||
let y = ry * meta.h;
|
||||
|
||||
// 如有旋转在此处理(你如果已经在渲染层旋转了,这里就不需要)
|
||||
switch (meta.rotation ?? 0) {
|
||||
case 90: [x, y] = [meta.w - y, x]; break;
|
||||
case 180: [x, y] = [meta.w - x, meta.h - y]; break;
|
||||
case 270: [x, y] = [y, meta.h - x]; break;
|
||||
}
|
||||
|
||||
x = Math.max(0, Math.min(meta.w - 1, x));
|
||||
y = Math.max(0, Math.min(meta.h - 1, y));
|
||||
return { x: Math.round(x), y: Math.round(y), w: meta.w, h: meta.h };
|
||||
};
|
||||
|
||||
// ======= 对齐工具 =======
|
||||
const ALIGN_BASE = 16; // 改成 32 就是 32 对齐
|
||||
@@ -1019,7 +965,7 @@ const ObtainDeviceInformation = () => {
|
||||
ws.send(eitwo);
|
||||
};
|
||||
// 3. 处理接收到的二进制数据
|
||||
ws.onmessage = (event) => {
|
||||
ws.onmessage = async (event) => {
|
||||
const data = JSON.parse(new TextDecoder('utf-8').decode(event.data).replace(/[^\x20-\x7F]/g, ''));
|
||||
try {
|
||||
console.log('数组', data)
|
||||
@@ -1027,20 +973,15 @@ const ObtainDeviceInformation = () => {
|
||||
deviceInformation.value = [];
|
||||
const filteredList = data.data.list.filter(item => item.state === 'device');
|
||||
//检测到设备列表时,渲染所有设备
|
||||
filteredList.forEach((item, index) => {
|
||||
console.log(item);
|
||||
if (item.state === "device") {
|
||||
deviceInformation.value.push(item);
|
||||
console.log("deviceInformation", deviceInformation.value);
|
||||
setTimeout(() => {
|
||||
initVideoStream(item.udid, index);
|
||||
initCanvas(item.udid);
|
||||
setTimeout(() => {
|
||||
wsActions.getSize(item.udid, index)
|
||||
}, 2000)
|
||||
}, 300);
|
||||
}
|
||||
})
|
||||
for (const item of filteredList) {
|
||||
deviceInformation.value.push(item);
|
||||
await nextTick(); // 等 v-for 渲染出 <video>
|
||||
initCanvas(item.udid); // 如果它也依赖 DOM,同样要在 nextTick 之后
|
||||
initVideoStream(item.udid, deviceInformation.value.length - 1);
|
||||
// getSize 建议放到 wslist[index].onopen 里最稳,
|
||||
// 若保留延时也可以:
|
||||
setTimeout(() => wsActions?.getSize(item.udid, deviceInformation.value.length - 1), 2000);
|
||||
}
|
||||
} else if (data.type == "device") {
|
||||
if (data.data.device.state === "offline") {
|
||||
//监听设备信息,出现离线设备,则删除设备信息
|
||||
@@ -1051,7 +992,7 @@ const ObtainDeviceInformation = () => {
|
||||
deviceInformation.value.forEach((item, index1) => {
|
||||
//关闭websocket连接
|
||||
wslist.forEach((item, index) => {
|
||||
item.close();
|
||||
new VideoConvertitem.close();
|
||||
})
|
||||
//重新连接websocket
|
||||
new Promise((resolve, reject) => {
|
||||
@@ -1299,37 +1240,63 @@ async function clickxy(x, y, index, type) {
|
||||
}
|
||||
}
|
||||
|
||||
//创建任务实例
|
||||
function createTaskQueue(index) {
|
||||
if (!taskQueues.has(index)) {
|
||||
taskQueues.set(index, []);
|
||||
}
|
||||
return {
|
||||
enqueue(task) {
|
||||
taskQueues.get(index).push(task);
|
||||
if (taskQueues.get(index).length === 1) {
|
||||
console.log("执行任务第一个任务");
|
||||
task(); // 执行第一个任务
|
||||
}
|
||||
},
|
||||
next() {
|
||||
console.log('执行等待中任务')
|
||||
const queue = taskQueues.get(index);
|
||||
queue.shift(); // 移除已完成任务
|
||||
if (queue.length > 0) {
|
||||
queue[0](); // 执行下一个任务
|
||||
}
|
||||
},
|
||||
clear() {
|
||||
taskQueues.set(index, []); // 清除所有任务
|
||||
},
|
||||
getNum() {
|
||||
return taskQueues.get(index)
|
||||
}
|
||||
};
|
||||
// 清空喂帧状态(避免旧帧冲突)
|
||||
function resetFeedState(index) {
|
||||
const st = feedState[index];
|
||||
if (!st) return;
|
||||
st.processing = false;
|
||||
st.pending = null;
|
||||
}
|
||||
|
||||
//发送检查该手机的xy坐标任务
|
||||
const reload = (opts = {}) => {
|
||||
const { onlySelected = false, hard = false } = opts;
|
||||
const targets = (onlySelected && selectedDevice.value !== 999)
|
||||
? [selectedDevice.value]
|
||||
: deviceInformation.value.map((_, i) => i);
|
||||
|
||||
targets.forEach(i => refreshStream(i, hard));
|
||||
ElMessage.success(`已刷新${onlySelected ? '当前设备' : '全部设备'}视频流`);
|
||||
};
|
||||
|
||||
/** 重建某台设备的视频解码器,不动 ws、不动 canvas */
|
||||
function refreshStream(index, hard = false) {
|
||||
const dev = deviceInformation.value[index];
|
||||
if (!dev) return;
|
||||
|
||||
const udid = dev.udid;
|
||||
const video = videoElement.value && videoElement.value[udid];
|
||||
if (!video || !instanceList[index]) return;
|
||||
|
||||
// 1) 停止旧的喂帧状态,销毁旧 converter
|
||||
resetFeedState(index);
|
||||
try {
|
||||
const conv = instanceList[index].converter;
|
||||
if (conv && typeof conv.destroy === 'function') conv.destroy();
|
||||
} catch (e) { }
|
||||
instanceList[index].converter = null;
|
||||
|
||||
// 2) 可选“硬刷新”:彻底重置 <video>,规避 SourceBuffer 残留
|
||||
if (hard) {
|
||||
try { video.pause && video.pause(); } catch (e) { }
|
||||
try { video.removeAttribute && video.removeAttribute('src'); } catch (e) { }
|
||||
try { video.load && video.load(); } catch (e) { }
|
||||
}
|
||||
|
||||
// 3) 新建 converter 挂到同一个 <video>
|
||||
instanceList[index].converter = new VideoConverter(video, 60, 1);
|
||||
|
||||
// 4) 让后端立刻推关键帧/重开编码
|
||||
try { wslist[index] && wslist[index].send(openStr); } catch (e) { }
|
||||
|
||||
// 5) 同步尺寸(不影响已有 canvas 坐标换算)
|
||||
setTimeout(() => {
|
||||
if (wsActions && typeof wsActions.getSize === 'function') {
|
||||
wsActions.getSize(udid, index);
|
||||
}
|
||||
}, 300);
|
||||
}
|
||||
|
||||
//发送任务前的处理
|
||||
function sendWsTask(index, data) {
|
||||
console.log('任务等待中', data.type);
|
||||
return new Promise((resolve) => {
|
||||
@@ -1616,91 +1583,21 @@ function getTransformStyle(index) {
|
||||
? 'translateY(-30%)'
|
||||
: 'none';
|
||||
}
|
||||
function attachTrimmerForIndex(index, backBufferSec = 10, intervalMs = 2000) {
|
||||
const conv = instanceList[index]?.converter;
|
||||
if (!conv) return;
|
||||
|
||||
// 如果还没创建好 MSE/SourceBuffer,则等 sourceopen 再挂
|
||||
const ensureAttach = () => {
|
||||
const ms = conv.mediaSource;
|
||||
if (!ms) return false;
|
||||
if (ms.readyState !== 'open') return false;
|
||||
if (!conv.sourceBuffer) return false;
|
||||
return true;
|
||||
};
|
||||
|
||||
// 先清旧的
|
||||
if (conv._trimTimer) {
|
||||
clearInterval(conv._trimTimer);
|
||||
conv._trimTimer = null;
|
||||
}
|
||||
if (conv._mseListenersInstalled !== true && conv.mediaSource) {
|
||||
conv._mseListenersInstalled = true;
|
||||
conv.mediaSource.addEventListener('sourceopen', () => {
|
||||
// MSE 重新 open 时,重新挂修剪器
|
||||
attachTrimmerForIndex(index, backBufferSec, intervalMs);
|
||||
});
|
||||
conv.mediaSource.addEventListener('sourceclose', () => {
|
||||
if (conv._trimTimer) {
|
||||
clearInterval(conv._trimTimer);
|
||||
conv._trimTimer = null;
|
||||
}
|
||||
});
|
||||
conv.mediaSource.addEventListener('error', () => {
|
||||
if (conv._trimTimer) {
|
||||
clearInterval(conv._trimTimer);
|
||||
conv._trimTimer = null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// 可能还没 ready,轮询等待
|
||||
if (!ensureAttach()) {
|
||||
const waitId = setInterval(() => {
|
||||
if (ensureAttach()) {
|
||||
clearInterval(waitId);
|
||||
attachTrimmerForIndex(index, backBufferSec, intervalMs);
|
||||
}
|
||||
}, 300);
|
||||
return;
|
||||
}
|
||||
|
||||
conv._trimTimer = setInterval(() => {
|
||||
// 每次都重新取,避免拿到被移除的旧 sb 引用
|
||||
const currentConv = instanceList[index]?.converter;
|
||||
const ms = currentConv?.mediaSource;
|
||||
const sb = currentConv?.sourceBuffer;
|
||||
const video = videoElement.value[deviceInformation.value[index]?.udid];
|
||||
|
||||
if (!currentConv || !ms || ms.readyState !== 'open' || !sb || !video) return;
|
||||
if (sb.updating || video.seeking || video.readyState < 2) return;
|
||||
|
||||
const cur = video.currentTime || 0;
|
||||
const trimTo = Math.max(0, cur - backBufferSec);
|
||||
|
||||
try {
|
||||
// buffered 可能是多段,仅删除完全早于 trimTo 的最前一段
|
||||
for (let i = 0; i < sb.buffered.length; i++) {
|
||||
const start = sb.buffered.start(i);
|
||||
const end = sb.buffered.end(i);
|
||||
if (end < trimTo - 0.25) {
|
||||
try { sb.remove(0, end); } catch { }
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// 包含 “SourceBuffer has been removed ...” 等异常时,停止本轮,等下次 tick 重取 sb
|
||||
// console.warn('[trimmer]', e);
|
||||
}
|
||||
}, intervalMs);
|
||||
}
|
||||
|
||||
|
||||
function manualGc() {
|
||||
window.electronAPI.manualGc()
|
||||
}
|
||||
|
||||
|
||||
// 等待 video 引用就绪的小工具
|
||||
async function waitForVideoEl(udid, tries = 20, delay = 16) {
|
||||
for (let i = 0; i < tries; i++) {
|
||||
const el = videoElement.value?.[udid];
|
||||
if (el) return el;
|
||||
await nextTick(); // 等下一次 DOM 刷新
|
||||
await new Promise(r => setTimeout(r, delay)); // 再小等一帧
|
||||
}
|
||||
return null;
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped lang="less">
|
||||
|
||||
BIN
tk-ai-adb.zip
Normal file
BIN
tk-ai-adb.zip
Normal file
Binary file not shown.
Reference in New Issue
Block a user