Przeglądaj źródła

master: Fixed 优化界面 更换接口

gitboyzcf 7 miesięcy temu
rodzic
commit
33d21faaaf

+ 2 - 2
.env.development

@@ -4,8 +4,8 @@
 VITE_APP_TITLE = 铁塔大视野
 
 # 网络请求
-VITE_APP_API_BASEURL = https://192.168.10.100
-# VITE_APP_API_BASEURL = https://192.168.211.44
+# VITE_APP_API_BASEURL = https://192.168.10.100
+VITE_APP_API_BASEURL = https://192.168.211.89
 
 # 项目localStorage存储前缀
 VITE_APP_PREFIX = monitoring

+ 4 - 1
package.json

@@ -16,6 +16,9 @@
     "@kjgl77/datav-vue3": "^1.6.1",
     "@popperjs/core": "^2.11.8",
     "@vueuse/core": "^10.3.0",
+    "@ffmpeg/ffmpeg": "^0.12.10",
+    "@ffmpeg/util": "^0.12.1",
+    "native-file-system-adapter": "^3.0.1",
     "animate.css": "^4.1.1",
     "animejs": "^3.2.1",
     "axios": "^1.4.0",
@@ -67,4 +70,4 @@
     "vite": "^4.5.0",
     "vite-plugin-compression": "^0.5.1"
   }
-}
+}

+ 57 - 0
pnpm-lock.yaml

@@ -11,6 +11,12 @@ dependencies:
   '@antv/l7-maps':
     specifier: ^2.17.7
     version: 2.17.7
+  '@ffmpeg/ffmpeg':
+    specifier: ^0.12.10
+    version: 0.12.10
+  '@ffmpeg/util':
+    specifier: ^0.12.1
+    version: 0.12.1
   '@floating-ui/vue':
     specifier: ^1.0.2
     version: 1.0.2(vue@3.3.4)
@@ -65,6 +71,9 @@ dependencies:
   mp4box:
     specifier: ^0.5.2
     version: 0.5.2
+  native-file-system-adapter:
+    specifier: ^3.0.1
+    version: 3.0.1
   node-forge:
     specifier: ^1.3.1
     version: 1.3.1
@@ -936,6 +945,23 @@ packages:
     engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
     dev: true
 
+  /@ffmpeg/ffmpeg@0.12.10:
+    resolution: {integrity: sha512-lVtk8PW8e+NUzGZhPTWj2P1J4/NyuCrbDD3O9IGpSeLYtUZKBqZO8CNj1WYGghep/MXoM8e1qVY1GztTkf8YYQ==}
+    engines: {node: '>=18.x'}
+    dependencies:
+      '@ffmpeg/types': 0.12.2
+    dev: false
+
+  /@ffmpeg/types@0.12.2:
+    resolution: {integrity: sha512-NJtxwPoLb60/z1Klv0ueshguWQ/7mNm106qdHkB4HL49LXszjhjCCiL+ldHJGQ9ai2Igx0s4F24ghigy//ERdA==}
+    engines: {node: '>=16.x'}
+    dev: false
+
+  /@ffmpeg/util@0.12.1:
+    resolution: {integrity: sha512-10jjfAKWaDyb8+nAkijcsi9wgz/y26LOc1NKJradNMyCIl6usQcBbhkjX5qhALrSBcOy6TOeksunTYa+a03qNQ==}
+    engines: {node: '>=18.x'}
+    dev: false
+
   /@floating-ui/core@1.6.0:
     resolution: {integrity: sha512-PcF++MykgmTj3CIyOQbKA/hDzOAiqI3mhuoN44WRCopIs1sgoDoU4oty4Jtqaj/y3oDU6fnVSm4QG0a3t5i0+g==}
     dependencies:
@@ -2763,6 +2789,16 @@ packages:
       reusify: 1.0.4
     dev: true
 
+  /fetch-blob@3.2.0:
+    resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==}
+    engines: {node: ^12.20 || >= 14.13}
+    requiresBuild: true
+    dependencies:
+      node-domexception: 1.0.0
+      web-streams-polyfill: 3.3.3
+    dev: false
+    optional: true
+
   /file-entry-cache@6.0.1:
     resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==}
     engines: {node: ^10.12.0 || >=12.0.0}
@@ -3561,10 +3597,24 @@ packages:
     engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1}
     hasBin: true
 
+  /native-file-system-adapter@3.0.1:
+    resolution: {integrity: sha512-ocuhsYk2SY0906LPc3QIMW+rCV3MdhqGiy7wV5Bf0e8/5TsMjDdyIwhNiVPiKxzTJLDrLT6h8BoV9ERfJscKhw==}
+    engines: {node: '>=14.8.0'}
+    optionalDependencies:
+      fetch-blob: 3.2.0
+    dev: false
+
   /natural-compare@1.4.0:
     resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==}
     dev: true
 
+  /node-domexception@1.0.0:
+    resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==}
+    engines: {node: '>=10.5.0'}
+    requiresBuild: true
+    dev: false
+    optional: true
+
   /node-fetch-native@1.6.4:
     resolution: {integrity: sha512-IhOigYzAKHd244OC0JIMIUrjzctirCmPkaIfhDeGcEETWof5zKYUW7e7MYvChGWh/4CJeXEgsRyGzuF334rOOQ==}
     dev: true
@@ -4752,6 +4802,13 @@ packages:
     dev: false
     optional: true
 
+  /web-streams-polyfill@3.3.3:
+    resolution: {integrity: sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==}
+    engines: {node: '>= 8'}
+    requiresBuild: true
+    dev: false
+    optional: true
+
   /web-worker-helper@0.0.3:
     resolution: {integrity: sha512-/TllNPjGenDwjE67M16TD9ALwuY847/zIoH7r+e5rSeG4kEa3HiMTAsUDj80yzIzhtshkv215KfsnQ/RXR3nVA==}
     dev: false

+ 3 - 0
public/config.json

@@ -0,0 +1,3 @@
+{
+  "ttTitle": "首创工地安全监理系统"
+}

+ 9 - 1
src/api/modules/system.js

@@ -114,5 +114,13 @@ export default {
       method: 'get',
       data
     })
-  }
+  },
+  // 获取配置文件
+  API_GET_CONFIG() {
+    return request({
+      baseURL:'/',
+      url: '/foreground/config.json',
+      method: 'get'
+    })
+  },
 }

+ 42 - 0
src/assets/core/package/package.json

@@ -0,0 +1,42 @@
+{
+  "name": "@ffmpeg/core",
+  "version": "0.12.5",
+  "description": "FFmpeg WebAssembly version (single thread)",
+  "main": "./dist/umd/ffmpeg-core.js",
+  "exports": {
+    ".": {
+      "import": "./dist/esm/ffmpeg-core.js",
+      "require": "./dist/umd/ffmpeg-core.js"
+    },
+    "./wasm": {
+      "import": "./dist/esm/ffmpeg-core.wasm",
+      "require": "./dist/umd/ffmpeg-core.wasm"
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/ffmpegwasm/ffmpeg.wasm.git"
+  },
+  "keywords": [
+    "ffmpeg",
+    "WebAssembly",
+    "video",
+    "audio",
+    "transcode"
+  ],
+  "author": "Jerome Wu <jeromewus@gmail.com>",
+  "license": "MIT",
+  "bugs": {
+    "url": "https://github.com/ffmpegwasm/ffmpeg.wasm/issues"
+  },
+  "engines": {
+    "node": ">=16.x"
+  },
+  "homepage": "https://github.com/ffmpegwasm/ffmpeg.wasm#readme",
+  "publishConfig": {
+    "access": "public"
+  }
+}

Plik diff jest za duży
+ 2337 - 0
src/assets/core/package/pkg/esm/ffmpeg-core.js


BIN
src/assets/core/package/pkg/esm/ffmpeg-core.wasm


Plik diff jest za duży
+ 2326 - 0
src/assets/core/package/pkg/umd/ffmpeg-core.js


BIN
src/assets/core/package/pkg/umd/ffmpeg-core.wasm


+ 74 - 3
src/assets/js/GetVideoStreaming.js

@@ -1,5 +1,7 @@
 import MP4Box from 'mp4box'
 
+self.Lx = false
+
 var ws = null
 var flag = 1
 
@@ -27,7 +29,17 @@ class MP4FileSink {
     this.#file.flush()
   }
 }
+
 var file = null
+var showData = null
+
+var SaveStep = 0
+var SaveBuffer = []
+let Mp4HeardBuffer = []
+
+var kSaveBufferSize = 0
+let Mp4HeardBufferSize = 0
+
 function WsMessage(event) {
   const dv = new DataView(event.data)
   const int32Value = dv.getInt32(0, true)
@@ -40,21 +52,45 @@ function WsMessage(event) {
       file.onSamples = onSamples
       this.fileSink = new MP4FileSink(file)
       vidoeData = dv.buffer.slice(4)
+      if (SaveStep === 0) {
+        Mp4HeardBuffer.push(event.data.slice(4))
+        Mp4HeardBufferSize += event.data.byteLength - 4
+        SaveStep = 1
+      }
       this.fileSink.write(vidoeData)
       break
     case 2:
+      vidoeData = dv.buffer.slice(4)
+      if (SaveStep === 1) {
+        Mp4HeardBuffer.push(event.data.slice(4))
+        Mp4HeardBufferSize += event.data.byteLength - 4
+        SaveStep = 2
+      }
+      this.fileSink.write(vidoeData)
+      break
     case 3:
+      vidoeData = dv.buffer.slice(4)
+      if (Lx) {
+        SaveBuffer.push(event.data.slice(4))
+        kSaveBufferSize += event.data.byteLength - 4
+      }
+      this.fileSink.write(vidoeData)
+      break
     case 4:
       vidoeData = dv.buffer.slice(4)
+      if (Lx) {
+        SaveBuffer.push(event.data.slice(4))
+        kSaveBufferSize += event.data.byteLength - 4
+      }
       this.fileSink.write(vidoeData)
       break
     case 5:
       jsonData = new TextDecoder().decode(dv.buffer.slice(4))
       if (jsonData) {
         jsonObject = JSON.parse(jsonData)
-        this.showData = Object.values(jsonObject.Res)[0]
+        showData = Object.values(jsonObject.Res)[0]
       } else {
-        this.showData = []
+        showData = []
       }
       break
   }
@@ -77,7 +113,7 @@ function onReady(info) {
 function onSamples(track_id, ref, samples) {
   for (const sample of samples) {
     var DataType = 'Samples'
-    self.postMessage({ DataType, sample })
+    self.postMessage({ DataType, sample, showData })
   }
 }
 
@@ -94,6 +130,16 @@ function description(trak) {
 }
 
 self.addEventListener('message', (message) => {
+  if (message.data.type === 'lx') {
+    self.Lx = message.data.lx
+    if (!message.data.lx) {
+      self.postMessage({
+        DataType: 'lx',
+        Data: mergeAndDownloadBuffers(Mp4HeardBuffer, SaveBuffer)
+      })
+    }
+    return
+  }
   ws = new WebSocket(message.data.url)
   ws.binaryType = 'arraybuffer'
   ws.onopen = WsOpen
@@ -101,3 +147,28 @@ self.addEventListener('message', (message) => {
   ws.onerror = WsError
   ws.onmessage = WsMessage
 })
+
+function mergeAndDownloadBuffers(bufferArray1, bufferArray2) {
+  // 创建一个新的 ArrayBuffer 来存放合并后的数据
+  var mergedBuffer = new Uint8Array(Mp4HeardBufferSize + kSaveBufferSize)
+
+  // 记录当前写入的位置
+  var offset = 0
+
+  // 将 bufferArray1 中的每个 ArrayBuffer 的内容复制到新的 ArrayBuffer 中
+  bufferArray1.forEach((buffer) => {
+    mergedBuffer.set(new Uint8Array(buffer), offset)
+    offset += buffer.byteLength
+  })
+
+  // 将 bufferArray2 中的每个 ArrayBuffer 的内容复制到新的 ArrayBuffer 中
+  bufferArray2.forEach((buffer) => {
+    mergedBuffer.set(new Uint8Array(buffer), offset)
+    offset += buffer.byteLength
+  })
+  kSaveBufferSize = 0
+  SaveBuffer = []
+
+  return new Blob([mergedBuffer], { type: 'application/octet-stream' })
+
+}

+ 61 - 15
src/assets/js/renderer_2d.js

@@ -1,22 +1,68 @@
-// import { useOutsideSystemStore } from '@/stores/modules/system'
-// const systemStore = useOutsideSystemStore()
-class Canvas2DRenderer {
-  #canvas = null
-  #ctx = null
 
-  constructor(canvas) {
-    this.#canvas = canvas
-    this.#ctx = canvas.getContext('2d')
+export class Canvas2DRenderer {
+  canvas = null
+  ctx = null
+  getShowData = null
+  storage = []
+
+  constructor(canvas, getShowData, outputLayer) {
+    this.canvas = canvas
+    this.ctx = canvas.getContext('2d')
+    this.getShowData = getShowData
+    this.outputLayer = outputLayer
+  }
+  #ArraysAreEqual(arr1, arr2) {
+    if (arr1.length !== arr2.length) {
+      return false
+    }
+    return arr1.every(function (val, index) {
+      return val === arr2[index]
+    })
   }
 
+  #DrawBox(arr) {
+    if (arr.length) {
+      for (let i = 0; i < arr.length; i++) {
+        this.ctx.strokeStyle = 'red'
+        this.ctx.lineWidth = 10
+        if (this.outputLayer) {
+          let [x, y, w, h] = [
+            arr[i].x * this.outputLayer.WidthRatio,
+            arr[i].y * this.outputLayer.HeightRatio,
+            arr[i].w * this.outputLayer.WidthRatio,
+            arr[i].h * this.outputLayer.HeightRatio
+          ]
+          this.ctx.strokeRect(x, y, w, h)
+          this.storage = [x, y, w, h]
+        } else {
+          this.ctx.strokeRect(arr[i].x, arr[i].y, arr[i].w, arr[i].h)
+          this.storage = [arr[i].x, arr[i].y, arr[i].w, arr[i].h]
+        }
+      }
+    } else {
+      let [x, y, w, h] = this.storage
+      this.ctx.strokeStyle = 'transparent'
+      this.ctx.lineWidth = 10
+      this.ctx.strokeRect(x, y, w, h)
+    }
+  }
+  temp = null
   draw(frame) {
-    this.#canvas.width = frame.displayHeight
-    this.#canvas.height = frame.displayWidth
-    this.#ctx.translate(frame.displayHeight, 0);
-    this.#ctx.rotate(Math.PI / 2)
-    this.#ctx.drawImage(frame, 0, 0, frame.displayWidth, frame.displayHeight)
+    this.canvas.width = frame.displayHeight
+    this.canvas.height = frame.displayWidth
+    this.ctx.translate(frame.displayHeight, 0);
+    this.ctx.rotate(Math.PI / 2)
+    this.ctx.drawImage(frame, 0, 0, frame.displayWidth, frame.displayHeight)
+    if (self.GetImg){
+      this.ctx.canvas.convertToBlob().then((blob) => {
+        self.postMessage({ img: blob, type: 'img' })
+      })
+      self.GetImg = false
+    }
+    const showData = this.getShowData()
+    if (showData) {
+      this.#DrawBox(showData)
+    }
     frame.close()
   }
 }
-
-export { Canvas2DRenderer }

+ 43 - 12
src/assets/js/worker.js

@@ -1,8 +1,9 @@
 import { Canvas2DRenderer } from '@/assets/js/renderer_2d.js'
+self.GetImg = false
 
 // 渲染,每个动画帧只能绘制一次
 let renderer = null
-
+let showData = null
 class Queue {
   constructor() {
     this._items = []
@@ -56,13 +57,38 @@ var TimedRendering = setInterval(function () {
   if (FrameBuffer.size() > 0) {
     renderer.draw(FrameBuffer.dequeue())
   }
-}, 39)
+}, 25)
+
+function getShowData() {
+  return showData
+}
 
 var flagPost = 0
 // Listen for the start request.
 self.addEventListener('message', (message) => {
+  let { canvas, device, url, type } = message.data
+  if (type === 'jt') {
+    self.GetImg = true
+  }
   if (message.data.DataType === 'Start') {
-    renderer = new Canvas2DRenderer(message.data.canvas)
+    if (!device) {
+      renderer = new Canvas2DRenderer(canvas, getShowData, null)
+    } else {
+      // renderer = new Canvas2DRenderer(message.data.canvas)
+      // 请求输出层到推流层比率 》 返回结果可能会出现问题=============
+      const ip = url.split('//')[1].split('/')[0]
+      const fetchUrl = `https://${ip}/ly?address=/OutputLayer`
+      fetch(fetchUrl)
+        .then((response) => response.json())
+        .then((res) => {
+          renderer = new Canvas2DRenderer(canvas, getShowData, res.data)
+        })
+        .catch((err) => {
+          alert('视频出错:', err)
+          renderer = new Canvas2DRenderer(canvas, getShowData, null)
+        })
+      // =============
+    }
     if (!VideoDecoder) throw new Error('The current browser does not support the VideoDecoder API')
   }
   if (message.data.DataType === 'Track') {
@@ -72,17 +98,22 @@ self.addEventListener('message', (message) => {
       codedWidth: message.data.codedWidth,
       description: message.data.description
     })
-    console.log(message.data);
+    console.log(message.data)
   }
   if (message.data.DataType === 'Samples') {
-    decoder.decode(
-      new EncodedVideoChunk({
-        type: message.data.type,
-        timestamp: message.data.timestamp,
-        duration: message.data.duration,
-        data: message.data.data
-      })
-    )
+    try {
+      showData = message.data.showData || null
+      decoder.decode(
+        new EncodedVideoChunk({
+          type: message.data.type,
+          timestamp: message.data.timestamp,
+          duration: message.data.duration,
+          data: message.data.data
+        })
+      )
+    } catch (error) {
+      throw new Error(error)
+    }
     if (flagPost === 3) {
       var connect = true
       self.postMessage(connect)

+ 3 - 3
src/components/Icon.vue

@@ -20,7 +20,7 @@ import IconamoonStarLight from '~icons/iconamoon/star-light'
 import IconamoonStarFill from '~icons/iconamoon/star-fill'
 import AntDesignTagOutlined from '~icons/ant-design/tag-outlined'
 import IconoirScreenshot from '~icons/iconoir/screenshot'
-import UilVideo from '~icons/uil/video'
+import CarbonVideo from '~icons/carbon/video';
 import MdiVideo3d from '~icons/mdi/video-3d'
 import EpSetUp from '~icons/ep/set-up'
 import FadArmrecording from '~icons/fad/armrecording'
@@ -117,8 +117,8 @@ onMounted(() => {
     case 'iconoir:screenshot':
       iconCom.value = IconoirScreenshot
       break
-    case 'uil:video':
-      iconCom.value = UilVideo
+    case 'carbon:video':
+      iconCom.value = CarbonVideo
       break
     case 'mdi:video-3d':
       iconCom.value = MdiVideo3d

+ 105 - 106
src/directives/modules/touch.js

@@ -3,113 +3,112 @@ import { isWap } from '@/utils'
 export default {
   mounted(el, binding, vnode) {
     if (!isWap()) return
-    let type = binding.arg // 传入点击的类型
-    let coordinate = {} // 记录坐标点的对象
-    let timeOutTap
-    let timeOutLong
-    let scaleSize // 缩放尺寸
-    let displacement = {} //移动的位移
-    // 勾股定理计算距离
-    function getDistance(bg, end) {
-      return Math.sqrt(Math.pow(end.x - bg.x, 2) + Math.pow(end.y - bg.y, 2))
+    function point2D(x, y) {
+      return { x: x, y: y }
     }
-    // 点击开始的时候
-    el.addEventListener(
-      'touchstart',
-      function (e) {
-        // 获取第一个手指点击的坐标
-        let x = e.touches[0].pageX
-        let y = e.touches[0].pageY
-        // 如果点击的时间很长,那么点击的类型就是长按 --- longTouch
-        timeOutLong = setTimeout(() => {
-          timeOutLong = 0
-          if (type === 'longTouch') {
-            binding.value.func(binding.value.param)
-          }
-        }, 2000)
-        timeOutTap = setTimeout(() => {
-          timeOutTap = 0
-          if (type === 'tap' && e.touches.length === 1) {
-            binding.value.func(binding.value.param)
-          }
-        }, 200)
-        // 如果是两个手指,而且类型是缩放 --- scaleTocuh
-        if (e.touches.length > 1 && type === 'scaleTouch') {
-          // 记录双指的间距长度
-          coordinate.start = getDistance(
-            {
-              x: e.touches[0].pageX,
-              y: e.touches[0].pageY
-            },
-            {
-              x: e.touches[1].pageX,
-              y: e.touches[1].pageY
-            }
-          )
-        }
-        // 如果是移动的话,还要记录下来最开始的位置,只能一个手指位移
-        if (type === 'slideTouch' && e.touches.length == 1) {
-          // debugger
-          displacement.start = {
-            x: e.touches[0].pageX,
-            y: e.touches[0].pageY
-          }
-        }
-      },
-      false
-    )
-    el.addEventListener(
-      'touchmove',
-      function (e) {
-        clearTimeout(timeOutTap)
-        clearTimeout(timeOutLong)
-        timeOutTap = 0
-        timeOutLong = 0
-        // 如果是缩放类型
-        if (type == 'scaleTouch' && e.touches.length === 2) {
-          // 计算移动过程中的两个手指的距离
-          coordinate.stop = getDistance(
-            {
-              x: e.touches[0].pageX,
-              y: e.touches[0].pageY
-            },
-            {
-              x: e.touches[1].pageX,
-              y: e.touches[1].pageY
-            }
-          )
-          // 设置缩放尺寸
-          scaleSize = coordinate.stop / coordinate.start - 1
-          // 这里设置图片不能无限放大与缩小
-          // 这里设置放大缩小速度慢一点,所以 /4一下
-          binding.value.func(scaleSize / 2, false)
-        }
-        // 如果是移动类型
-        if (type == 'slideTouch' && e.touches.length === 1) {
-          displacement.end = {
-            x: e.changedTouches[0].pageX,
-            y: e.changedTouches[0].pageY
-          }
-          binding.value.func({
-            x: displacement.end.x - displacement.start.x,
-            y: displacement.end.y - displacement.start.y,
-            is_endMove: false
-          })
-        }
-      },
-      false
-    )
-    el.addEventListener(
-      'touchend',
-      function (e) {
-        if (type === 'scaleTouch') {
-          binding.value.func(0, true)
-        }
-        if (type === 'slideTouch') {
-          binding.value.func({ x: 0, y: 0, is_endMove: true })
+
+    var reqAnimationFrame = (function () {
+      return (
+        window[window.Hammer.prefixed(window, 'requestAnimationFrame')] ||
+        function (callback) {
+          window.setTimeout(callback, 1000 / 60)
         }
-      },
-      false
-    )
+      )
+    })()
+
+    var tMatrix = [1, 0, 0, 1, 0, 0] //x缩放,无,无,y缩放,x平移,y平移
+
+    var initScale = 1 //初始化scale
+    var mc = new window.Hammer.Manager(el)
+    var ticking = false
+    var poscenter = point2D(0, 0) //缓存双指的中心坐标
+    var duration = '' //设置过渡效果,用于双击缩放效果
+    var lastTranslate = point2D(0, 0) //记录上次的偏移值
+    var lastcenter = point2D(el.offsetWidth / 2, el.offsetHeight / 2) //图像的中心点,用于对比双指中心点
+
+    var center = lastcenter
+    mc.add(new window.Hammer.Pan({ threshold: 0, pointers: 1 }))
+    mc.add(new window.Hammer.Pinch({ threshold: 0 }))
+    mc.add(new window.Hammer.Tap({ event: 'doubletap', taps: 2 }))
+    mc.on('panmove', onPan)
+    mc.on('panstart', onPanStart)
+    mc.on('pinchmove', onPinch)
+    mc.on('pinchstart', onPinchStart)
+    mc.on('doubletap', onDoubleTap)
+
+    function onPanStart(ev) {
+      lastTranslate = point2D(tMatrix[4], tMatrix[5]) //缓存上一次的偏移值
+    }
+    function onPan(ev) {
+      duration = ''
+      el.className = ''
+      tMatrix[4] = lastTranslate.x + ev.deltaX
+      tMatrix[5] = lastTranslate.y + ev.deltaY
+      requestElementUpdate('onpan')
+    }
+    function onPinchStart(ev) {
+      duration = ''
+      lastTranslate = point2D(tMatrix[4], tMatrix[5]) //记录上一次的偏移值
+      initScale = tMatrix[0] || 1
+      poscenter = point2D(ev.center.x, ev.center.y)
+
+      lastcenter = point2D(center.x + lastTranslate.x, center.y + lastTranslate.y) //重新计算放大后的中心坐标
+      poscenter = point2D(ev.center.x - lastcenter.x, ev.center.y - lastcenter.y)
+      console.log('center', lastcenter.x, lastcenter.y)
+
+      requestElementUpdate('onpinchStart')
+    }
+    function onPinch(ev) {
+      var nowScale = (tMatrix[0] = tMatrix[3] = initScale * ev.scale)
+      var composscal = 1 - ev.scale
+      //tMatrix[4] = poscenter.x - ((poscenter.x - lastcenter.x) *  ev.scale + lastcenter.x)  + lastTranslate.x//最后加上上一次的偏移值
+      //tMatrix[5] = poscenter.y - ((poscenter.y - lastcenter.y) *  ev.scale + lastcenter.y)  + lastTranslate.y
+      tMatrix[4] = (1 - ev.scale) * poscenter.x + lastTranslate.x
+      tMatrix[5] = (1 - ev.scale) * poscenter.y + lastTranslate.y
+      requestElementUpdate('onpinch')
+    }
+
+    function onDoubleTap(ev) {
+      duration = '.3s ease all'
+      var nowScale = tMatrix[0]
+      if (nowScale != 1 || tMatrix[4] != 0) {
+        //scale不等于1,要重回1
+        tMatrix[0] = tMatrix[3] = 1
+        tMatrix[4] = tMatrix[5] = 0
+      } else {
+        var pointer = ev.center
+        var scale = 2
+        tMatrix[0] = tMatrix[3] = scale
+        //var last = point2D
+        //tMatrix[4] = pointer.x - ((pointer.x-lastcenter.x) * scale + lastcenter.x);
+        //tMatrix[5] = pointer.y - ((pointer.y-lastcenter.y) * scale + lastcenter.y);
+        tMatrix[4] = (1 - scale) * (pointer.x - center.x)
+        tMatrix[5] = (1 - scale) * (pointer.y - center.y)
+      }
+      requestElementUpdate('doubleTap')
+    }
+
+    function updateElementTransform() {
+      el.style.transition = duration
+      var tmp = tMatrix.join(',')
+      console.log(tmp)
+      el.style.transform = 'matrix(' + tmp + ')'
+      ticking = false
+    }
+
+    function requestElementUpdate() {
+      arguments && console.log(arguments[0])
+
+      if (!ticking) {
+        reqAnimationFrame(updateElementTransform)
+        ticking = true
+      }
+    }
+
+    /**
+    初始化设置
+    */
+
+    requestElementUpdate()
   }
 }

+ 93 - 8
src/hooks/useWorker.js

@@ -1,28 +1,91 @@
+import { FFmpeg } from '@ffmpeg/ffmpeg'
+import { fetchFile, toBlobURL } from '@ffmpeg/util'
+import { showSaveFilePicker } from 'native-file-system-adapter'
+import { useOutsideSystemStore } from '@/stores/modules/system.js'
+import workerUrl from '../../node_modules/@ffmpeg/ffmpeg/dist/esm/worker.js?worker&url'
+
+const ffmpeg = new FFmpeg()
+
+;(async () => {
+  await ffmpeg.load({
+    coreURL: await toBlobURL(
+      new URL('../assets/core/package/pkg/esm/ffmpeg-core.js', import.meta.url),
+      'text/javascript'
+    ),
+    wasmURL: await toBlobURL(
+      new URL('../assets/core/package/pkg/esm/ffmpeg-core.wasm', import.meta.url),
+      'application/wasm'
+    ),
+    classWorkerURL: new URL(workerUrl, import.meta.url).toString()
+  })
+})()
+
+function formatDateTime(date) {
+  function padZero(num) {
+    return num < 10 ? '0' + num : num
+  }
+
+  var year = date.getFullYear()
+  var month = padZero(date.getMonth() + 1)
+  var day = padZero(date.getDate())
+  var hours = padZero(date.getHours())
+  var minutes = padZero(date.getMinutes())
+  var seconds = padZero(date.getSeconds())
+  var Milliseconds = date.getMilliseconds()
+
+  return `${year}${month}${day}${hours}${minutes}${seconds}${Milliseconds}`
+}
+
+async function DownloadStreamSaver(blob, fileName) {
+  const opts = {
+    suggestedName: fileName,
+    types: [{ 'image/png': ['png'] }]
+  }
+  const handle = await showSaveFilePicker(opts)
+  const ws = await handle.createWritable()
+  ws.write(blob)
+  ws.close()
+}
+
 export default function useWorker(url, className, callback = () => {}) {
+  const useSystem = useOutsideSystemStore()
 
   let canvas = document.querySelector(className)
   canvas = canvas.transferControlToOffscreen()
   const worker = new Worker(new URL('../assets/js/worker.js', import.meta.url), {
     type: 'module'
   })
-  worker.addEventListener('message', () =>{
-    callback()
+  worker.addEventListener('message', (msg) => {
+    if (msg.data.type === 'img') {
+      // saveAs(msg.data.img,`${formatDateTime(new Date())}.png`)
+      DownloadStreamSaver(msg.data.img, `${formatDateTime(new Date())}.png`)
+    } else {
+      callback()
+    }
   })
   let DataType = 'Start'
-  worker.postMessage({  DataType,canvas }, [canvas])
+  worker.postMessage({ DataType, canvas, url, device: useSystem.deviceInfo?.value || null }, [
+    canvas
+  ])
 
   const WebSocketWork = new Worker(new URL('../assets/js/GetVideoStreaming.js', import.meta.url), {
     type: 'module'
   })
   WebSocketWork.postMessage({ url })
-  WebSocketWork.addEventListener('message', (message) => {
+  WebSocketWork.addEventListener('message', async (message) => {
     if (message.data.DataType === 'Track') {
       let DataType = 'Track'
-      let codec =  message.data.track.codec.startsWith('vp08') ? 'vp8' : message.data.track.codec
+      let codec = message.data.track.codec.startsWith('vp08') ? 'vp8' : message.data.track.codec
       let codedHeight = message.data.track.video.height
       let codedWidth = message.data.track.video.width
       let description = message.data.tkinfo
-      worker.postMessage({DataType,codec,codedHeight,codedWidth,description})
+      worker.postMessage({
+        DataType,
+        codec,
+        codedHeight,
+        codedWidth,
+        description
+      })
     }
     if (message.data.DataType === 'Samples') {
       let DataType = 'Samples'
@@ -30,9 +93,31 @@ export default function useWorker(url, className, callback = () => {}) {
       let timestamp = (1e6 * message.data.sample.cts) / message.data.sample.timescale
       let duration = (1e6 * message.data.sample.duration) / message.data.sample.timescale
       let data = message.data.sample.data
-      worker.postMessage({DataType,type,timestamp,duration,data})
+      let showData = message.data.showData
+      worker.postMessage({
+        DataType,
+        type,
+        timestamp,
+        duration,
+        data,
+        showData
+      })
+    }
+    if (message.data.DataType === 'lx') {
+      ffmpeg.on('log', ({ message: msg }) => {
+        console.log(msg)
+      })
+      await ffmpeg.writeFile('test.mp4', await fetchFile(message.data.Data))
+      ffmpeg.exec(['-i', 'test.mp4', '-c', 'copy', '-map', '0', 'out.mp4'])
+      const data = await ffmpeg.readFile('out.mp4')
+      ffmpeg.deleteFile('test.mp4')
+      DownloadStreamSaver(
+        new Blob([data.buffer], { type: 'video/mp4' }),
+        `${formatDateTime(new Date())}.mp4`
+      )
+      ffmpeg.deleteFile('out.mp4')
     }
   })
 
-  return {worker, WebSocketWork}
+  return { worker, WebSocketWork }
 }

+ 7 - 1
src/stores/modules/screenOperation.js

@@ -11,11 +11,17 @@ export const useScreenOperation = defineStore('screenOperation', {
     clearScreen: false,
     videoW: 2942,
     ratio: 0,
-    title: import.meta.env.VITE_APP_TITLE
+    title: ''
   }),
   actions: {
     setClearScreen(bool) {
       this.clearScreen = bool
+    },
+    getConfig() {
+      const { API_GET_CONFIG } = useRequest()
+      API_GET_CONFIG().then(({ ttTitle }) => {
+        this.title = ttTitle ? ttTitle : import.meta.env.VITE_APP_TITLE
+      })
     }
   }
 })

+ 2 - 1
src/stores/modules/system.js

@@ -37,7 +37,8 @@ export const useSystemStore = defineStore('systemStore', {
     event: isWap() ? 'touchstart' : 'click', // 事件
     cilpInfo: {}, // 裁剪信息
     rtspVideo: '', // rtsp视频
-    nvrId: '' // 硬盘录列id
+    nvrId: '', // 硬盘录列id
+    videoLoading: false // 全景loading
   }),
   getters: {
     ipF() {

+ 23 - 13
src/views/VideoMonitoring/VideoMonitoring.vue

@@ -5,10 +5,10 @@
       <video-com v-show="isVideo"></video-com>
       <main-box />
     </div>
+    <audio ref="alertAudioRef" loop src="/audio/alert.mp3">
+      Your browser does not support the <code>audio</code> element.
+    </audio>
   </layout-box>
-  <audio ref="alertAudioRef" loop src="/audio/alert.mp3">
-    Your browser does not support the <code>audio</code> element.
-  </audio>
 </template>
 <script setup>
 import HeaderBox from './components/Header.vue'
@@ -16,15 +16,21 @@ import MainBox from './components/MainBox.vue'
 import LayoutBox from '@/Layout/index.vue'
 import VideoCom from './components/VideoCom.vue'
 import { useOutsideSystemStore } from '@/stores/modules/system.js'
+import { useOutsideScreenOperation } from '@/stores/modules/screenOperation.js'
 import { useRatio } from '@/hooks/useRatio.js'
 import { dialogFn } from '@/utils'
 import useWorker from '@/hooks/useWorker'
+
+const useSO = useOutsideScreenOperation()
 const route = useRoute()
 const useSystem = useOutsideSystemStore()
 const isVideo = ref(true)
 
 let preRouteName = null
-let worker = null
+
+let workerObj = shallowRef(null)
+
+provide('workerObj', workerObj)
 
 const alertAudioRef = ref(null)
 const handleAudio = (newV) => {
@@ -48,21 +54,22 @@ const controlVideo = () => {
 }
 const init = () => {
   // 重置全景再次连接
-  useSystem.resetQj(worker).then(() => {
+  useSystem.resetQj(workerObj.value).then(() => {
+    useRatio()
     switch (parseInt(useSystem.videoSwitch)) {
       case 1:
         useSystem.getVideoUrl(0).then(() => {
           console.log('websocket请求')
-          worker = useWorker(useSystem.videoUrl, '.pub-video', () => {
-            useRatio()
+          workerObj.value = useWorker(useSystem.videoUrl, '.pub-video', () => {
+            useSystem.videoLoading = false
           })
         })
         break
       case 2:
         useSystem.getVideoUrl(1).then(() => {
           console.log('websocketAi请求')
-          worker = useWorker(useSystem.videoUrl, '.pub-video', () => {
-            useRatio()
+          workerObj.value = useWorker(useSystem.videoUrl, '.pub-video', () => {
+            useSystem.videoLoading = false
           })
         })
         break
@@ -73,9 +80,11 @@ const init = () => {
 watch(
   [route, () => useSystem.videoSwitch],
   ([newV, vV], [prevR, prevV]) => {
+    useSystem.videoLoading = true
     if (['RemotePlayback'].includes(newV.name)) {
-      worker?.WebSocketWork.terminate()
-      worker?.worker.terminate()
+      workerObj.value?.WebSocketWork.terminate()
+      workerObj.value?.worker.terminate()
+      useSystem.videoLoading = false
     }
     if (['RemotePlayback'].includes(preRouteName)) {
       init()
@@ -109,6 +118,7 @@ watch(
 
 onMounted(() => {
   useSystem.connectAlarmWS()
+  useSO.getConfig()
   preRouteName = route.name
   dialogFn(
     'info',
@@ -124,7 +134,7 @@ onMounted(() => {
   )
 })
 onUnmounted(() => {
-  worker?.WebSocketWork.terminate()
-  worker?.worker.terminate()
+  workerObj.value?.WebSocketWork.terminate()
+  workerObj.value?.worker.terminate()
 })
 </script>

+ 108 - 29
src/views/VideoMonitoring/components/Header.vue

@@ -12,7 +12,7 @@
     <div class="layout-header-box-address" flex flex-items-center>
       <div :key="i" v-for="(btn, i) in btns">
         <dv-button :bg="false" @click="open(btn)"
-          ><Icon :icon="btn.icon" width="2vw" height="2vw" />{{ btn.name }}</dv-button
+          ><Icon :icon="btn.icon" width="2vw" height="2vw" mr-1 />{{ btn.name }}</dv-button
         >
       </div>
       <!-- <i-mdi:address-marker-outline color="#0080ff" width="40" height="40" />
@@ -31,7 +31,24 @@
       flex-items-center
       color="#8ac5ff"
     >
-      <div
+      <div class="layout-header-box-right-btn" flex flex-items-center cursor-pointer gap-3>
+        <div
+          class="flex justify-center items-center"
+          :style="{ width: item.w + 'px', height: item.h + 'px' }"
+          v-for="(item, i) in listComputed"
+          :key="i"
+          v-on:[event]="toolsClick(item.type)"
+        >
+          <Icon
+            :class="[item.class]"
+            :icon="item.iCom"
+            :width="item.w"
+            :height="item.h"
+            color="#8ac5ff"
+          />
+        </div>
+      </div>
+      <!-- <div
         class="layout-header-box-right-hat"
         flex
         flex-items-center
@@ -40,8 +57,8 @@
         v-on:[event]="openModal('aqm')"
       >
         <Icon icon="lucide:hard-hat" width="2.5vw" height="2.5vw" style="color: #8ac5ff" />
-      </div>
-      <div class="layout-header-box-right-switch" flex flex-items-center cursor-pointer>
+      </div> -->
+      <!-- <div class="layout-header-box-right-switch" flex flex-items-center cursor-pointer>
         <svg
           xmlns="http://www.w3.org/2000/svg"
           v-on:[event]="toAi"
@@ -58,24 +75,7 @@
             d="M20 11V9a2 2 0 0 0-2-2H6a2 2 0 0 0-2 2v5a2 2 0 0 0 2 2h4m4 5v-4a2 2 0 1 1 4 0v4m-4-2h4m3-4v6"
           />
         </svg>
-        <!-- <n-dropdown
-          trigger="click"
-          :options="ai.options"
-          placement="buttom"
-          @select="(v) => ai.handleSelect(v, ai)"
-        >
-          <svg xmlns="http://www.w3.org/2000/svg" width="3vw" height="3vw" viewBox="0 0 24 24">
-            <path
-              fill="none"
-              :stroke="ai.color"
-              stroke-linecap="round"
-              stroke-linejoin="round"
-              stroke-width="2"
-              d="M20 11V9a2 2 0 0 0-2-2H6a2 2 0 0 0-2 2v5a2 2 0 0 0 2 2h4m4 5v-4a2 2 0 1 1 4 0v4m-4-2h4m3-4v6"
-            />
-          </svg>
-        </n-dropdown> -->
-      </div>
+      </div> -->
       <!-- <div
         class="layout-header-box-right-compass"
         flex
@@ -90,7 +90,7 @@
           <Compass />
         </n-popover>
       </div> -->
-      <div class="layout-header-box-right-tq" mx-20px>
+      <div class="layout-header-box-right-tq" mx-3>
         <Weather />
       </div>
       <div class="layout-header-box-right-icon screen-full-trigger" flex flex-items-center>
@@ -98,16 +98,16 @@
           icon="akar-icons:reduce"
           cursor-pointer
           v-if="isFullscreen"
-          width="3vw"
-          height="3vw"
+          width="20"
+          height="20"
           color="#8ac5ff"
         ></Icon>
         <Icon
           icon="akar-icons:full-screen"
           v-else
           cursor-pointer
-          width="3vw"
-          height="3vw"
+          width="20"
+          height="20"
           color="#8ac5ff"
         ></Icon>
       </div>
@@ -175,6 +175,10 @@ const ai = reactive({
   }
 })
 
+const route = useRoute()
+const state = ref(false)
+const workerObj = inject('workerObj')
+
 const btns = [
   {
     name: '全景态势',
@@ -191,7 +195,7 @@ const btns = [
     color: '#3299ff',
     icon: 'solar:playback-speed-bold',
     auth: ['PlaybackView']
-  },
+  }
   // {
   //   name: '设备维护',
   //   path: '/video_monitoring/device_management',
@@ -202,6 +206,73 @@ const btns = [
   // }
 ]
 
+const list = shallowRef([
+  // {
+  //   iCom: 'ant-design:tag-outlined',
+  //   label: '标签过滤',
+  //   color: 'white',
+  //   isPopover: true,
+  //   component: RightBqglBox
+  // },
+  {
+    iCom: 'iconoir:screenshot',
+    label: '截图',
+    color: 'white',
+    type: 'jt',
+    w: 20,
+    h: 20
+  },
+  {
+    iCom: 'carbon:video',
+    label: '录像',
+    color: 'white',
+    type: 'lx',
+    class: '',
+    w: 25,
+    h: 25
+  }
+  // {
+  //   iCom: 'mdi:video-3d',
+  //   label: '3D缩放',
+  // },
+  // {
+  //   iCom: 'ep:set-up',
+  //   label: '码流切换',
+  //   color: 'white'
+  // }
+])
+const listComputed = computed(() => {
+  return (
+    route.name == 'RemotePlayback'
+      ? list.value.filter((v) => !['gl', 'lx', 'jt', 'dropdown'].includes(v.type))
+      : list.value
+  ).map((item) => {
+    if (item.type === 'lx') {
+      if (state.value) {
+        item.iCom = 'fad:armrecording'
+        item.color = 'red'
+        item.class = 'update-style'
+      } else {
+        item.iCom = 'carbon:video'
+        item.color = 'white'
+        item.class = ''
+      }
+    }
+    return item
+  })
+})
+const toolsClick = (type) => {
+  switch (type) {
+    case 'lx':
+      state.value = !state.value
+      workerObj.value.WebSocketWork.postMessage({ type: 'lx', lx: state.value })
+      useSystem.recordingStatus = state.value
+      break
+    case 'jt':
+      workerObj.value.worker.postMessage({ type: 'jt' })
+      break
+  }
+}
 const open = (nav) => {
   if (nav.type == 'link') {
     window.open(nav.path, '_blank')
@@ -251,7 +322,15 @@ onMounted(() => {
   position: relative;
   background: url('@/assets/images/h-bg.png') no-repeat center;
   background-size: 100% 100%;
-  min-height: 8vw;
+  min-height: 5vw;
+
+  .update-style {
+    animation-name: showHide;
+    animation-duration: 1s;
+    animation-direction: alternate;
+    animation-iteration-count: infinite;
+    animation-timing-function: linear;
+  }
 
   & > span {
     position: absolute;

+ 13 - 0
src/views/VideoMonitoring/components/VideoCom.vue

@@ -25,6 +25,19 @@
       </template>
     </template>
     <!-- <tag-list></tag-list> -->
+    <div
+      v-if="useSystem.videoLoading"
+      absolute
+      top-0
+      right-0
+      left-0
+      bottom-0
+      flex
+      flex-justify-center
+      flex-items-center
+    >
+      <Icon icon="loading" color="#007bff" width="60" height="60"/>
+    </div>
     <div class="r-box position-box">
       <right-box></right-box>
     </div>

+ 22 - 14
src/views/VideoMonitoring/pages/RemotePlayback/components/PartBox.vue

@@ -1,7 +1,7 @@
 <!-- 细节 -->
 <template>
   <div class="part-box-wrapper h-full w-full flex flex-col gap-6px">
-    <div
+    <!-- <div
       :class="['part-box', partActive === i ? 'active' : '']"
       v-for="(item, i) in partUrl"
       :key="item.UUID"
@@ -12,7 +12,6 @@
       <div :class="[`screen-full-target${i}`, 'w-full aspect-ratio-video']">
         <template v-if="item.UUID">
           <pub-video :new-class="'playback-video' + item.UUID" :is-scale="false" />
-          <!-- 加载 -->
           <div
             v-if="item.loading"
             absolute
@@ -27,7 +26,6 @@
             <Icon icon="loading" color="#007bff" width="60" />
           </div>
         </template>
-        <!-- 操作按钮 -->
         <div
           class="panoramic-situation-main-btn-box"
           absolute
@@ -39,10 +37,11 @@
           <OperatingButton @closeWorker="closeWorkerBtn" :i="i" :part="item" />
         </div>
       </div>
-    </div>
+    </div> -->
   </div>
 </template>
 <script setup>
+import { v4 as uuidv4 } from 'uuid'
 import useWorker from '@/hooks/useWorker'
 import { useOutsideSystemStore } from '@/stores/modules/system.js'
 import { useDebounceFn } from '@vueuse/core'
@@ -93,27 +92,36 @@ let cloneParts = null
 
 const getWss = (type, timestamp, x = 0, y = 0, index) => {
   return new Promise((resolve, reject) => {
-    API_VIEW_PLAYBACK_POST({
+    // API_VIEW_PLAYBACK_POST({
+    //   NvrId: useSystem.nvrId,
+    //   Type: type,
+    //   UTCDiff: new Date().getTimezoneOffset(),
+    //   Timestamp: timestamp,
+    //   X: x,
+    //   Y: y
+    // }).then((res) => {
+    if (type) {
+      partUrl[index].UUID = uuidv4()
+      partUrl[index].PartCenterX = x
+      partUrl[index].PartCenterY = y
+      partUrl[index].loading = true
+    }
+    resolve({
       NvrId: useSystem.nvrId,
       Type: type,
       UTCDiff: new Date().getTimezoneOffset(),
       Timestamp: timestamp,
       X: x,
       Y: y
-    }).then((res) => {
-      if (type) {
-        partUrl[index].UUID = res.wss
-        partUrl[index].PartCenterX = x
-        partUrl[index].PartCenterY = y
-        partUrl[index].loading = true
-      }
-      resolve(res)
     })
+    // })
   })
 }
 const openWork = (res, index, cName = '.rpb-video') => {
   return new Promise((resolve) => {
-    const url = `wss://${useSystem.ip.split('//')[1]}/VideoShow/Common?UUID=${res.wss}`
+    const url = `wss://${useSystem.ip.split('//')[1]}/VideoShow/Playback/${
+      res.type ? 'Part' : 'Full'
+    }/Main?Timestamp=${res.Timestamp}`
 
     if (!cName) {
       nextTick(() => {

+ 1 - 1
vite.config.js

@@ -18,7 +18,7 @@ import deletePlugin from 'rollup-plugin-delete'
 export default ({ mode, command }) => {
   // const env = loadEnv(mode, process.cwd())
   return defineConfig({
-    base: './',
+    base: '/foreground/',
     build: {
       outDir: 'dist-tt',
       sourcemap: false,

Niektóre pliki nie zostały wyświetlone z powodu dużej ilości zmienionych plików