add video
This commit is contained in:
parent
21fd3e32a6
commit
1e23ce4721
@ -22,6 +22,7 @@ from flandre.nodes.Midi import Midi
|
||||
from flandre.nodes.Mi import Mi
|
||||
from flandre.nodes.Recorder import Recorder
|
||||
from flandre.nodes.Web import Web
|
||||
from flandre.nodes.VideoQt import VideoQt
|
||||
from flandre.utils.Msg import KillMsg, NodeOnlineMsg, Msg1, Msg2
|
||||
from flandre.config import CONFIG_FOLDER
|
||||
|
||||
@ -40,6 +41,7 @@ class LaunchComponent(Enum):
|
||||
Midi = Midi
|
||||
Mi = Mi
|
||||
Web = Web
|
||||
VideoQt = VideoQt
|
||||
|
||||
|
||||
def launch(arg: dict[LaunchComponent, dict]):
|
||||
|
||||
147
flandre/nodes/VideoQt.py
Normal file
147
flandre/nodes/VideoQt.py
Normal file
@ -0,0 +1,147 @@
|
||||
import sys
|
||||
import time
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from PyQt6.QtCore import QByteArray, Qt
|
||||
from PyQt6.QtGui import QImage, QPixmap, QKeyEvent, QWheelEvent
|
||||
from PyQt6.QtWidgets import QMainWindow, QApplication, QGraphicsPixmapItem, QGraphicsScene
|
||||
|
||||
from flandre.config import C
|
||||
from flandre.nodes.Node import Node
|
||||
from flandre.pyqt.FFmpegReceiver import FFmpegReceiver
|
||||
from flandre.pyqt.Image import Ui_MainWindow
|
||||
from flandre.pyqt.ZMQReceiver import ZMQReceiver
|
||||
from flandre.utils.Msg import KillMsg, Msg, BMMsg, RfMatMsg, KeyPressMsg, RGB888Msg
|
||||
from flandre.utils.RfMat import RfMat
|
||||
|
||||
|
||||
class Adv(QMainWindow, Ui_MainWindow):
|
||||
def __init__(self, p: Node, parent=None):
|
||||
super(Adv, self).__init__(parent)
|
||||
self.p = p
|
||||
self.setupUi(self)
|
||||
zmq_receiver = ZMQReceiver(self)
|
||||
zmq_receiver.zmq_event.connect(self.on_zmq_event)
|
||||
zmq_receiver.start()
|
||||
|
||||
ffmpeg_receiver = FFmpegReceiver(self)
|
||||
ffmpeg_receiver.zmq_event.connect(self.on_ffmpeg_event)
|
||||
ffmpeg_receiver.start()
|
||||
|
||||
self.g = QGraphicsPixmapItem()
|
||||
self.s = QGraphicsScene()
|
||||
self.s.addItem(self.g)
|
||||
self.graphicsView.setScene(self.s)
|
||||
self.grey = False
|
||||
self.scale = False
|
||||
self.watermark = True
|
||||
self.zoom = 1.0
|
||||
self.need_fit = False
|
||||
|
||||
def keyPressEvent(self, a0: QKeyEvent):
|
||||
t = a0.text()
|
||||
match t:
|
||||
case 'm':
|
||||
self.grey = not self.grey
|
||||
case 's':
|
||||
self.scale = not self.scale
|
||||
if not self.scale:
|
||||
self.need_fit = True
|
||||
case 't':
|
||||
self.watermark = not self.watermark
|
||||
|
||||
def wheelEvent(self, a0: QWheelEvent):
|
||||
if a0.angleDelta().y() > 0:
|
||||
self.zoom += 0.1
|
||||
if a0.angleDelta().y() < 0:
|
||||
self.zoom -= 0.1
|
||||
|
||||
def on_ffmpeg_event(self, msg: QByteArray):
|
||||
b = msg.data()
|
||||
w = 1920
|
||||
h = 1080
|
||||
qImg = QImage(
|
||||
b,
|
||||
w, h, 3 * w,
|
||||
QImage.Format.Format_RGB888
|
||||
)
|
||||
self.g.setPixmap(QPixmap(qImg))
|
||||
self.s.setSceneRect(0.0, 0.0, w, h)
|
||||
self.graphicsView.fitInView(self.s.sceneRect(), Qt.AspectRatioMode.KeepAspectRatio)
|
||||
|
||||
def on_zmq_event(self, msg: QByteArray):
|
||||
msg = Msg.decode_msg(msg.data())
|
||||
if isinstance(msg, KillMsg):
|
||||
if msg.name == '':
|
||||
self.close()
|
||||
elif isinstance(msg, RfMatMsg):
|
||||
w = msg.rfmat.w
|
||||
h = msg.rfmat.h
|
||||
d: RfMat = msg.rfmat
|
||||
d2 = (d
|
||||
.resize((int(w * self.zoom), int(h * self.zoom)))
|
||||
.watermark(cond=self.watermark)
|
||||
)
|
||||
w = d2.w
|
||||
h = d2.h
|
||||
qImg = QImage(
|
||||
d2.__bytes__(),
|
||||
w, h, 1 * w,
|
||||
QImage.Format.Format_Grayscale8
|
||||
)
|
||||
self.g.setPixmap(QPixmap(qImg))
|
||||
self.s.setSceneRect(0.0, 0.0, w, h)
|
||||
if self.scale:
|
||||
self.graphicsView.fitInView(self.s.sceneRect())
|
||||
else:
|
||||
if self.need_fit:
|
||||
self.graphicsView.fitInView(self.s.sceneRect(), Qt.AspectRatioMode.KeepAspectRatio)
|
||||
self.need_fit = False
|
||||
# elif isinstance(msg, RGB888Msg):
|
||||
# w = msg.w
|
||||
# h = msg.h
|
||||
# d: RfMat = msg.rfmat
|
||||
# d2 = (d
|
||||
# .resize((int(w * self.zoom), int(h * self.zoom)))
|
||||
# .watermark(cond=self.watermark)
|
||||
# )
|
||||
# w = d2.w
|
||||
# h = d2.h
|
||||
# if self.grey:
|
||||
# qImg = QImage(
|
||||
# d2.__bytes__(),
|
||||
# w, h, w,
|
||||
# QImage.Format.Format_Grayscale8
|
||||
# )
|
||||
# else:
|
||||
# qImg = QImage(
|
||||
# d2.__bytes__(),
|
||||
# w, h, 3 * w,
|
||||
# QImage.Format.Format_BGR888
|
||||
# )
|
||||
# self.g.setPixmap(QPixmap(qImg))
|
||||
# self.s.setSceneRect(0.0, 0.0, w, h)
|
||||
# if self.scale:
|
||||
# self.graphicsView.fitInView(self.s.sceneRect())
|
||||
# else:
|
||||
# if self.need_fit:
|
||||
# self.graphicsView.fitInView(self.s.sceneRect(), Qt.AspectRatioMode.KeepAspectRatio)
|
||||
# self.need_fit = False
|
||||
#
|
||||
# RGB888Msg
|
||||
|
||||
|
||||
class VideoQt(Node):
|
||||
topics = [KeyPressMsg]
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def loop(self):
|
||||
app = QApplication(sys.argv)
|
||||
MainWindow = Adv(self)
|
||||
# MainWindow.move(int(px), int(py))
|
||||
# MainWindow.resize(int(sx), int(sy))
|
||||
MainWindow.show()
|
||||
app.exec()
|
||||
27
flandre/pyqt/FFmpegReceiver.py
Normal file
27
flandre/pyqt/FFmpegReceiver.py
Normal file
@ -0,0 +1,27 @@
|
||||
import subprocess
|
||||
import threading
|
||||
|
||||
from PyQt6 import QtCore
|
||||
|
||||
from flandre.nodes.Node import Node
|
||||
|
||||
|
||||
class FFmpegReceiver(QtCore.QObject):
|
||||
zmq_event = QtCore.pyqtSignal('QByteArray')
|
||||
|
||||
def start(self):
|
||||
threading.Thread(target=self._execute, daemon=True).start()
|
||||
|
||||
def _execute(self):
|
||||
p = subprocess.Popen([
|
||||
'ffmpeg',
|
||||
'-loglevel', 'quiet',
|
||||
'-flags', 'low_delay',
|
||||
'-fflags', 'nobuffer',
|
||||
'-i', 'rtsp://admin:ab12ab12@11.6.2.4:554/h264/ch1/main/av_stream',
|
||||
'-pix_fmt', 'rgb24',
|
||||
'-f', 'rawvideo',
|
||||
'-'
|
||||
], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
while True:
|
||||
self.zmq_event.emit(p.stdout.read(1920 * 1080 * 3))
|
||||
27
test/test_ffmpeg_stdout.py
Normal file
27
test/test_ffmpeg_stdout.py
Normal file
@ -0,0 +1,27 @@
|
||||
import subprocess
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
|
||||
cv2.namedWindow('video', cv2.WINDOW_AUTOSIZE)
|
||||
|
||||
|
||||
def f1():
|
||||
p = subprocess.Popen([
|
||||
'ffmpeg',
|
||||
'-loglevel', 'quiet',
|
||||
'-flags', 'low_delay',
|
||||
'-fflags', 'nobuffer',
|
||||
'-i', 'rtsp://admin:ab12ab12@11.6.2.4:554/h264/ch1/main/av_stream',
|
||||
'-pix_fmt', 'rgb24',
|
||||
'-f', 'rawvideo',
|
||||
'-'
|
||||
], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
while True:
|
||||
b = p.stdout.read(1920 * 1080 * 3)
|
||||
cv2.imshow('video', np.frombuffer(b, dtype=np.uint8).reshape((1080, 1920, 3)))
|
||||
cv2.waitKey(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
f1()
|
||||
Loading…
Reference in New Issue
Block a user