refactor: delete legacy test

This commit is contained in:
flandre 2025-06-10 19:22:56 +08:00
parent 35d0cac7c6
commit 6a79d640b6
33 changed files with 0 additions and 1560 deletions

View File

@ -1,182 +0,0 @@
import fractions
import logging
from typing import cast
import av
import numpy as np
from av import VideoCodecContext, VideoFrame
# for e in av.codecs_available:
# print(e)
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('libav').setLevel(logging.DEBUG)
MAX_FRAME_RATE = 60
def f0():
# input_ = av.open('asd','w')
# in_stream = input_.streams.video[0]
# libx264
codec: VideoCodecContext = av.CodecContext.create('libx264', "w")
codec.width = 640
codec.height = 480
codec.pix_fmt = 'yuv420p' # 常用格式, 大部分解码器都支持
# codec.width = 100
# codec.height = 100
# codec.bit_rate = 100000
# codec.pix_fmt = "yuv420p"
# codec.framerate = fractions.Fraction(MAX_FRAME_RATE, 1)
# codec.time_base = fractions.Fraction(1, MAX_FRAME_RATE)
# codec.options = {
# "profile": "baseline",
# "level": "31",
# "tune": "zerolatency",
# }
codec.open()
pts = 0
while True:
f = VideoFrame.from_ndarray(np.zeros((640, 480, 3), dtype=np.uint8))
pts += 1
f.pts = pts
r = codec.encode(f)
if r.__len__() > 0:
print(r[0].__buffer__(0).__len__())
def f1():
import av
# 创建一个输出容器
output = av.open('output.mp4', 'w')
# 添加一个视频流
stream = output.add_stream('libx264', rate=24)
stream.width = 640
stream.height = 480
stream.pix_fmt = 'yuv420p'
# 编码循环
for i in range(100):
frame = av.VideoFrame(width=640, height=480, format='rgb24')
# ... 对帧进行处理 ...
for packet in stream.encode(frame):
output.mux(packet)
# 完成编码
for packet in stream.encode():
output.mux(packet)
# 关闭输出容器
output.close()
def f2():
import av
from av import VideoFrame
from fractions import Fraction
# 1. 创建输出容器
output_path = 'output.mp4'
output = av.open(output_path, 'w')
# 2. 创建视频流(使用 libx264 编码器)
stream = output.add_stream('libx264', rate=24) # 帧率
# 3. 设置编解码器上下文参数
stream.width = 640
stream.height = 480
stream.pix_fmt = 'yuv420p' # 常用格式, 大部分解码器都支持
stream.codec_context.time_base = Fraction(1, 24)
# 4. 手动打开编解码器上下文 (可选, add_stream 已经完成了这一步)
stream.codec_context.open()
# 5. 编码循环
for i in range(100): # 生成 100 帧
frame = av.VideoFrame(width=640, height=480, format='rgb24')
# 生成测试数据
# 在实际应用中,您需要从图像源获取数据
import numpy as np
import colorsys
cx = int(640 / 100 * i)
cy = int(480 / 100 * i)
rgb = (np.array(colorsys.hsv_to_rgb(i / 100.0, 1.0, 1.0)) * 255).astype(np.uint8)
data = np.zeros((frame.height, frame.width, 3), dtype=np.uint8)
data[cy - 10:cy + 10, cx - 10:cx + 10, :] = rgb
frame.planes[0].update(data)
for packet in stream.encode(frame):
output.mux(packet)
# 6. 刷新编码器
for packet in stream.encode():
output.mux(packet)
# 7. 关闭容器
output.close()
def f3():
import av
from av import VideoFrame
from fractions import Fraction
# 1. 创建输出容器
output_path = 'output.mp4'
output = av.open(output_path, 'w')
# 2. 获取编码器codec
codec = av.Codec('libx264', "w")
# 3. 手动创建并配置编解码器上下文
ctx = av.CodecContext.create(codec, mode="w")
ctx.width = 640
ctx.height = 480
ctx.pix_fmt = 'yuv420p' # 常用格式, 大部分解码器都支持
ctx.time_base = Fraction(1, 24)
# 4. 添加视频流 (使用创建好的编解码器上下文).
stream = output.add_stream('libx264', rate=24) # 使用编码器名称
stream.codec_context = ctx # 将之前创建好的 ctx 赋值给 stream
stream.width = ctx.width # 宽度
stream.height = ctx.height
stream.pix_fmt = ctx.pix_fmt
# 5. 手动打开编解码器上下文 (在添加到 stream 后会自动打开)
ctx.open()
# 6. 编码循环
for i in range(100): # 生成 100 帧
frame = av.VideoFrame(width=640, height=480, format='rgb24')
# 生成测试数据
# 在实际应用中,您需要从图像源获取数据
import numpy as np
import colorsys
cx = int(640 / 100 * i)
cy = int(480 / 100 * i)
rgb = (np.array(colorsys.hsv_to_rgb(i / 100.0, 1.0, 1.0)) * 255).astype(np.uint8)
data = np.zeros((frame.height, frame.width, 3), dtype=np.uint8)
data[cy - 10:cy + 10, cx - 10:cx + 10, :] = rgb
frame.planes[0].update(data)
for packet in stream.encode(frame):
output.mux(packet)
# 7. 刷新编码器
for packet in stream.encode(None):
output.mux(packet)
# 8. 关闭容器
output.close()
print(f"视频已保存到 {output_path}")
if __name__ == '__main__':
f0()

View File

@ -1,16 +0,0 @@
import zmq
from flandre import C
from flandre.utils.RfFrame import b2t
if __name__ == '__main__':
context = zmq.Context()
device_socket = context.socket(zmq.PULL)
device_socket.connect(C.live_push_socket)
last_ts = 0
while True:
buffer = device_socket.recv()
ts, sequence_id, encoder, s = b2t(buffer)
if last_ts == ts:
print(ts)
last_ts = ts

View File

@ -1,47 +0,0 @@
import struct
from pathlib import Path
import zmq
import time
import sys
def exit():
ctx = zmq.Context()
sock = ctx.socket(zmq.REQ)
sock.connect('tcp://11.6.1.66:5556')
# sock.send(b'file' + Path('/home/lambda/source/scarlet/flandre/config/64-1.txt').read_bytes())
sock.send(b'exit')
def cmd(c: str):
ctx = zmq.Context()
sock = ctx.socket(zmq.REQ)
sock.connect('tcp://11.6.1.66:5556')
sock.send(c.encode())
print(sock.recv().decode())
def file():
ctx = zmq.Context()
sock = ctx.socket(zmq.REQ)
sock.connect('tcp://11.6.1.66:5556')
sock.send(b'file' + Path('/home/lambda/source/scarlet/flandre/config/64-1.txt').read_bytes())
def test():
ctx = zmq.Context()
sock = ctx.socket(zmq.PULL)
sock.connect('tcp://11.6.1.66:5555')
while True:
s = sock.recv()
print(struct.unpack_from('=iqi', s))
print(s[4 + 8 + 4:].__len__())
break
if __name__ == '__main__':
if len(sys.argv) < 2:
test()
else:
globals()[sys.argv[1]](*sys.argv[2:])

View File

@ -1,53 +0,0 @@
import logging
import multiprocessing
from BusClient import BusClient
from nodes.Beamformer import Beamformer
from nodes.Broker import Broker
from nodes.Device import Device
from nodes.ImageCV import ImageCV
from nodes.ImageFFMPEG import ImageFFMPEG
from nodes.JoyStick import Joystick
from nodes.Loader import Loader
from nodes.Mi import Mi
from nodes.Midi import Midi
from nodes.Monitor import Monitor
from nodes.Muxer import Muxer
from nodes.Recorder import Recorder
from nodes.Robot import Robot
from qtonly import kde_pyqt6_mainui
from utils.Msg import KillMsg
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
multiprocessing.set_start_method('spawn')
multiprocessing.Pool()
pps = []
ps = [
Broker(),
kde_pyqt6_mainui,
Device(),
ImageFFMPEG(),
ImageCV(level=logging.DEBUG),
Beamformer(level=logging.DEBUG),
Loader(),
Muxer(level=logging.DEBUG),
# Midi(),
Joystick(),
Robot(),
Recorder(),
# Monitor(),
Mi(),
]
for p in ps:
pps.append(multiprocessing.Process(target=p))
for p in pps:
p.start()
c = BusClient(KillMsg)
while True:
x: KillMsg = c.recv()
if x.name == '':
break
for p in pps:
p.kill()

View File

@ -1,11 +0,0 @@
import time
from BusClient import BusClient
from utils.Msg import KillMsg
if __name__ == '__main__':
c = BusClient()
time.sleep(1)
for i in range(100):
c.send(KillMsg())

View File

@ -1,67 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"id": "initial_id",
"metadata": {
"collapsed": true,
"ExecuteTime": {
"end_time": "2025-01-12T12:27:14.384233Z",
"start_time": "2025-01-12T12:27:13.172285Z"
}
},
"source": [
"from beamformer.process import pwi_process\n",
"from utils.RfFile import RfFile\n",
"from utils.ScanData import ScanData\n",
"\n",
"f = RfFile.from_path('/run/media/lambda/b86dccdc-f134-464b-a310-6575ee9ae85c/cap4/trim/R1,L=30,C=PAR/S=1063,E=4.bin')\n",
"s = ScanData.from_file(f)\n",
"pwi_process()"
],
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/home/lambda/source/scarlet/flandre/.venv/lib/python3.12/site-packages/cupyx/jit/_interface.py:173: FutureWarning: cupyx.jit.rawkernel is experimental. The interface can change in the future.\n",
" cupy._util.experimental('cupyx.jit.rawkernel')\n"
]
},
{
"ename": "TypeError",
"evalue": "ScanData.from_file() missing 1 required positional argument: 'shape'",
"output_type": "error",
"traceback": [
"\u001B[0;31m---------------------------------------------------------------------------\u001B[0m",
"\u001B[0;31mTypeError\u001B[0m Traceback (most recent call last)",
"Cell \u001B[0;32mIn[2], line 6\u001B[0m\n\u001B[1;32m 3\u001B[0m \u001B[38;5;28;01mfrom\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[38;5;21;01mutils\u001B[39;00m\u001B[38;5;21;01m.\u001B[39;00m\u001B[38;5;21;01mScanData\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[38;5;28;01mimport\u001B[39;00m ScanData\n\u001B[1;32m 5\u001B[0m f \u001B[38;5;241m=\u001B[39m RfFile\u001B[38;5;241m.\u001B[39mfrom_path(\u001B[38;5;124m'\u001B[39m\u001B[38;5;124m/run/media/lambda/b86dccdc-f134-464b-a310-6575ee9ae85c/cap4/trim/R1,L=30,C=PAR/S=1063,E=4.bin\u001B[39m\u001B[38;5;124m'\u001B[39m)\n\u001B[0;32m----> 6\u001B[0m s \u001B[38;5;241m=\u001B[39m \u001B[43mScanData\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mfrom_file\u001B[49m\u001B[43m(\u001B[49m\u001B[43mf\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m 7\u001B[0m pwi_process()\n",
"\u001B[0;31mTypeError\u001B[0m: ScanData.from_file() missing 1 required positional argument: 'shape'"
]
}
],
"execution_count": 2
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.6"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@ -1,75 +0,0 @@
import base64
import threading
import time
from pathlib import Path
import cv2
import numpy as np
from tqdm import tqdm
import zmq
if __name__ == '__main__':
# r1 = base64.b64encode(Path('/home/lambda/Pictures/drawing.png').read_bytes()).decode()
# r1 = base64.b64encode(Path('/home/lambda/Pictures/remilia3.png').read_bytes()).decode()
# requests.post('http://localhost:12345/sendpic/test', json=dict(value='data:image/png;base64, ' + r1))
# for p in tqdm(Path('/home/lambda/Videos/pngs/New Folder/').glob('*.png')):
# r1 = base64.b64encode(p.read_bytes()).decode()
# requests.post('http://localhost:12345/sendpic/test', json=dict(value='data:image/png;base64, ' + r1))
# arr = [cv2.imread(str(img)).tobytes() for img in Path('/home/lambda/Videos/pngs/New Folder/').glob('*.png')]
arr = []
for img in tqdm(list(Path('/home/lambda/Videos/pngs/New Folder/').glob('*.png'))):
img = cv2.imread(str(img))
# img = cv2.resize(img, (1920 // 2, 1080 // 2))
img = img.reshape(1080, 1920, 3)
z = np.zeros((1080, 1920, 4), dtype=np.uint8)
z[:, :, :3] = img
img = z
img = cv2.cvtColor(img, cv2.COLOR_BGRA2RGBA)
arr.append(img.tobytes())
# img = cv2.resize(img, (1920 // 4, 1080 // 4,))
# while True:
# for p in Path('/home/lambda/Videos/pngs/New Folder/').glob('*.png'):
# img = cv2.imread(str(p))
# # print(img.shape)
# input()
# socket.send(img.tobytes())
# while True:
# for b in arr:
# input()
# socket.send(b)
lii = [0, True]
def t(li):
context = zmq.Context()
socket = context.socket(zmq.PUSH)
socket.connect("tcp://localhost:5555")
while True:
for i, b in enumerate(arr):
while True:
socket.send(arr[li[0]])
time.sleep(1 / 120)
if li[1]:
li[0] = i
break
threading.Thread(target=t, args=(lii,)).start()
# while True:
# for i, b in enumerate(arr):
# # input()
# lii[0] = i
# time.sleep(1 / 60)
while True:
input()
lii[1] = not lii[1]

View File

@ -1,38 +0,0 @@
import cupy as cp
from flandre.beamformer.das import gen_pwi
from flandre.beamformer.dist import direct_dist
from flandre.utils.Config import DeviceConfig
from flandre.utils.Msg import ImageArgMsg
from flandre.utils.RfMat import RfMat
from flandre.utils.RfSequence import RfSequence
if __name__ == '__main__':
arg = ImageArgMsg(
sender='',
t_end=2900,
t_start=0,
v2=1524,
dct_center=1086,
dct_bandwidth=915,
f_rows=6002,
beta=40,
tgc=0,
g8=80
)
dc = DeviceConfig(v2=1540, rows=5999)
pwi, _, la = gen_pwi(direct_dist(dc, p=cp), dc)
seq = RfSequence('/run/media/lambda/b86dccdc-f134-464b-a310-6575ee9ae85c/us/baby789,S=(256 6002),M=PWI,U=120/')
for i, frame in enumerate(seq.frames):
data = RfMat.from_rf_frame(frame, device='gpu')
data = data.dct_center(arg.dct_center, arg.dct_bandwidth)
data = data.call(lambda m: m.astype(cp.int16))
data = data.call(pwi)
data = data.call(cp.asarray, order='C')
data = data.argrelextrema()
data = data.conv_guass(b=arg.beta * 0.01)
data = data.crop(arg.t_start, arg.t_end)
data = data.time_gain_compensation_global((1 - arg.g8 * (1.0 / 128)) ** 2)
data = data.rotate90()
data.png(f'/home/lambda/source/scarlet/flandre/@DS/test/{i}.png', pre=300)

View File

@ -1,191 +0,0 @@
import hashlib
import shutil
from pathlib import Path
from flandre.utils.RfMeta import RfFrameMeta, RfSequenceMeta
from flandre.utils.RfSequence import RfSequence
from flandre.utils.archive import to_zip
def f1():
# rr = RfSequenceMeta
tempdst = Path('/mnt/16T/private_dataset/ustemp')
arr = []
for (i,
file) in enumerate(Path('/mnt/16T/private_dataset/New Folder/steel-top/').glob('*pwi.bin')):
file = Path(file)
x, y, _ = file.name.split('_')
r = RfFrameMeta(encoder=0, robot_x=int(x) * 100, robot_y=int(y) * 100)
arr.append((file, r))
arr.sort(key=lambda item: (item[1].robot_y, item[1].robot_x))
arg = []
for i, item in enumerate(arr):
file, meta = item
meta.sequence_id = i
print(file.name, meta.sequence_id, meta.robot_x, meta.robot_y)
pic = file.with_suffix('.png')
farr = []
if pic.exists():
farr.append((pic, '.png'))
arg.append((file, meta, farr))
to_zip(arg, tempdst, Path('/mnt/16T/private_dataset/us/steel-top,U=30,M=PWI,S=(256 1502).zip'))
def f2():
tempdst = Path('/mnt/16T/private_dataset/ustemp')
src = Path('/mnt/16T/private_dataset/New Folder/T1,U=30,M=FMC,S=(256 256 1502)/')
arr = []
for (i,
file) in enumerate(Path('/mnt/16T/private_dataset/New Folder/steel-top/').glob('*pwi.bin')):
file = Path(file)
x, _ = file.name.split('_')
r = RfFrameMeta(encoder=0, robot_x=int(x) * 100, robot_y=int(y) * 100)
arr.append((file, r))
arr.sort(key=lambda item: (item[1].robot_y, item[1].robot_x))
print(arr)
WRITE_TMP = Path('/run/media/lambda/040fb2b8-1584-4683-bac8-fec3b264167d/write_cache/')
def proc(src: Path | str, dst: Path | str, fx, tmp=WRITE_TMP):
src = Path(src)
dst = Path(dst)
assert not dst.exists()
shutil.rmtree(tmp)
tmp.mkdir(parents=True)
arr = fx(src)
print(arr)
# to_zip(arr, tmp, dst)
def f_ok(src: Path) -> list[tuple[Path, RfFrameMeta, list[tuple[Path, str]]]]:
arr: list[tuple[Path, RfFrameMeta, list[tuple[Path, str]]]] = []
for p in src.glob('*'):
m = RfFrameMeta.from_name(p.stem)
arr.append((p, m, []))
arr.sort(key=lambda item: item[1].sequence_id)
return arr
def f_x(src: Path) -> list[tuple[Path, RfFrameMeta, list[tuple[Path, str]]]]:
arr: list[tuple[Path, RfFrameMeta, list[tuple[Path, str]]]] = []
for p in src.glob('*'):
m = RfFrameMeta.from_name(p.stem)
arr.append((p, m, []))
arr.sort(key=lambda item: item[1].robot_x)
for i, a in enumerate(arr):
a[1].sequence_id = i
a[1].encoder = 0
return arr
def f_xy(src: Path) -> list[tuple[Path, RfFrameMeta, list[tuple[Path, str]]]]:
arr: list[tuple[Path, RfFrameMeta, list[tuple[Path, str]]]] = []
for p in src.glob('*'):
m = RfFrameMeta.from_name(p.stem)
arr.append((p, m, []))
arr.sort(key=lambda item: (item[1].robot_y, item[1].robot_x))
for i, a in enumerate(arr):
a[1].sequence_id = i
a[1].encoder = 0
return arr
def f_legacy4(src: Path, t, xx=100, yy=100) -> list[tuple[Path, RfFrameMeta, list[tuple[Path, str]]]]:
arr = []
for p in src.glob(f'*{t}.bin'):
x, y, _ = p.name.split('_')
r = RfFrameMeta(encoder=0, robot_x=int(x) * xx, robot_y=int(y) * yy)
arr.append((p, r))
arr.sort(key=lambda item: (item[1].robot_y, item[1].robot_x))
arg: list[tuple[Path, RfFrameMeta, list[tuple[Path, str]]]] = []
for i, item in enumerate(arr):
file, meta = item
meta.sequence_id = i
pic = file.with_suffix('.png')
farr = []
if pic.exists():
farr.append((pic, '.png'))
arg.append((file, meta, farr))
return arg
def f_legacy4_pwi(src: Path) -> list[tuple[Path, RfFrameMeta, list[tuple[Path, str]]]]:
return f_legacy4(src, 'pwi')
def f_legacy4_tfm(src: Path) -> list[tuple[Path, RfFrameMeta, list[tuple[Path, str]]]]:
return f_legacy4(src, 'fmc')
def f_legacy4_pwi_y10(src: Path) -> list[tuple[Path, RfFrameMeta, list[tuple[Path, str]]]]:
return f_legacy4(src, 'pwi', yy=10)
def f_legacy4_tfm_y10(src: Path) -> list[tuple[Path, RfFrameMeta, list[tuple[Path, str]]]]:
return f_legacy4(src, 'fmc', yy=10)
def f_cap(src: Path) -> list[tuple[Path, RfFrameMeta, list[tuple[Path, str]]]]:
arr = []
for p in src.glob(f'*parallel.bin'):
y, _ = p.name.split('_')
r = RfFrameMeta(encoder=0, robot_y=int(y) * 100)
arr.append((p, r))
arr.sort(key=lambda item: item[1].robot_y)
arg: list[tuple[Path, RfFrameMeta, list[tuple[Path, str]]]] = []
for i, item in enumerate(arr):
file, meta = item
meta.sequence_id = i
arg.append((file, meta, []))
return arg
def f_legacy_unknown(src: Path, t) -> list[tuple[Path, RfFrameMeta, list[tuple[Path, str]]]]:
arr = []
for p in src.glob(f'*{t}.bin'):
x, y, _ = p.name.split('_')
r = RfFrameMeta(encoder=0, robot_x=int(x) * xx, robot_y=int(y) * yy)
arr.append((p, r))
arr.sort(key=lambda item: (item[1].robot_y, item[1].robot_x))
arg: list[tuple[Path, RfFrameMeta, list[tuple[Path, str]]]] = []
for i, item in enumerate(arr):
file, meta = item
meta.sequence_id = i
pic = file.with_suffix('.png')
farr = []
if pic.exists():
farr.append((pic, '.png'))
arg.append((file, meta, farr))
return arg
if __name__ == '__main__':
b2b = hashlib.blake2b(
Path('/run/media/lambda/040fb2b8-1584-4683-bac8-fec3b264167d/c1/remilia/cap1/34_4040_fmc.bin').read_bytes(),
digest_size=4).hexdigest()
print(b2b)
print('--')
for f in Path('/mnt/16T/private_dataset/us/').glob('*.zip'):
m = RfSequenceMeta.from_path(f)
if m.mode == RfSequenceMeta.RfSequenceMode.TFM and m.us == 30:
for f in RfSequence(f).frames:
print(m.commit, f.meta.sequence_id, f.meta.encoder, f.meta.robot_x, f.meta.robot_y, f.meta.blake2b)
# RfSequence()
# proc(
# '/run/media/lambda/040fb2b8-1584-4683-bac8-fec3b264167d/c2/Desktop/New folder/4d/cap1',
# '/mnt/16T/private_dataset/us/cap1,U=30,M=PWI,S=(256 1502).zip',
# f_legacy_unknown,
# )

View File

@ -1,23 +0,0 @@
import logging
import logging
import os
import subprocess
from PyQt6 import QtWidgets
from nodes.MainUI import MainUI
def kde_pyqt6_mainui():
subprocess.run(['python', __file__],
env=dict(XDG_CURRENT_DESKTOP="KDE",
XDG_RUNTIME_DIR="/run/user/1000",
XDG_SESSION_TYPE="wayland",
PYTHONPATH=os.environ['PYTHONPATH'])
)
if __name__ == '__main__':
print(QtWidgets.QStyleFactory.keys())
logging.basicConfig(level=logging.INFO)
MainUI()()

View File

@ -1,4 +0,0 @@
import PyQt6.QtWidgets as QtWidgets
if __name__ == '__main__':
print(QtWidgets.QStyleFactory.keys())

View File

@ -1,44 +0,0 @@
import struct
from pathlib import Path
import numpy as np
import zmq
import time
import sys
from matplotlib import pyplot as plt
def test():
ctx = zmq.Context()
sock = ctx.socket(zmq.PULL)
sock.bind('tcp://0.0.0.0:5555')
li = []
cnt = 0
while True:
s = sock.recv_pyobj()
cnt += 1
if cnt == 1:
cnt = 0
li.append(s)
if li.__len__() > 100:
li = li[1:]
# print(li.__len__())
aa = np.array(li)
plt.cla()
# plt.plot(aa[:,0])
# plt.plot(aa[:,1])
plt.plot(aa[:,2]*1000)
plt.plot(aa[:,3])
plt.pause(0.0001)
# plt.axis([0, 10, 0, 1])
# for i in range(10):
# y = np.random.random()
# plt.scatter(i, y)
# plt.pause(0.05)
if __name__ == '__main__':
test()

View File

@ -1,69 +0,0 @@
import asyncio
import json
import logging
import os
import aiohttp_cors
from aiohttp import web
from aiortc import MediaStreamTrack, RTCPeerConnection, RTCSessionDescription, RTCConfiguration, RTCRtpCodecCapability
from aiortc.contrib.media import MediaBlackhole, MediaPlayer, MediaRecorder, MediaRelay
ROOT = os.path.dirname(__file__)
web.WebSocketResponse()
logger = logging.getLogger(__name__)
pcs = set()
async def offer(request):
params = await request.json_str()
offer = RTCSessionDescription(sdp=params["sdp"], type=params["type"])
pc = RTCPeerConnection(RTCConfiguration([]))
pcs.add(pc)
player = MediaPlayer(os.path.join(ROOT, "demo-instruct.wav"))
rc = pc.addTransceiver(player.video, 'sendonly')
rc.setCodecPreferences([RTCRtpCodecCapability(mimeType='video/H264',
clockRate=90000,
channels=None,
parameters={
'level-asymmetry-allowed': '1',
'packetization-mode': '1',
'profile-level-id': '42e01f'
})])
await pc.setRemoteDescription(offer)
answer = await pc.createAnswer()
await pc.setLocalDescription(answer)
return web.Response(
content_type="application/json",
text=json.dumps(
{"sdp": pc.localDescription.sdp, "type": pc.localDescription.type}
),
)
async def on_shutdown(app):
# close peer connections
coros = [pc.close() for pc in pcs]
await asyncio.gather(*coros)
pcs.clear()
if __name__ == '__main__':
app = web.Application()
app.on_shutdown.append(on_shutdown)
app.router.add_post("/offer", offer)
cors = aiohttp_cors.setup(app, defaults={
"*": aiohttp_cors.ResourceOptions(
allow_credentials=True,
expose_headers="*",
allow_headers="*"
)
})
for route in list(app.router.routes()):
cors.add(route)
web.run_app(
app, access_log=None, host='0.0.0.0', port=8081
)

View File

@ -1,8 +0,0 @@
import cupy as cp
from ctypes import CDLL
if __name__ == '__main__':
# libc = CDLL("libnvrtc.alt.so.12")
libc = CDLL("libnvrtc.so.12")
cp.zeros(1) + 1
# print(libc.__dir__())

View File

@ -1,6 +0,0 @@
import subprocess
if __name__ == '__main__':
code = subprocess.run(['curl', '-m', '1', 'http://11.6.1.66:5556'], stderr=subprocess.DEVNULL,
stdout=subprocess.DEVNULL).returncode
print(code)

View File

@ -1,4 +0,0 @@
if __name__ == '__main__':
print(tuple(str((1, 2, 3))))
print([1,2,3][-1:1])
print([1,2,3][1:])

View File

@ -1,45 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<ui version="4.0">
<class>MainWindow</class>
<widget class="QMainWindow" name="MainWindow">
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>800</width>
<height>600</height>
</rect>
</property>
<property name="windowTitle">
<string>MainWindow</string>
</property>
<widget class="QWidget" name="centralwidget">
<widget class="QPushButton" name="b_test">
<property name="geometry">
<rect>
<x>330</x>
<y>230</y>
<width>301</width>
<height>251</height>
</rect>
</property>
<property name="text">
<string>PushButton</string>
</property>
</widget>
</widget>
<widget class="QMenuBar" name="menubar">
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>800</width>
<height>30</height>
</rect>
</property>
</widget>
<widget class="QStatusBar" name="statusbar"/>
</widget>
<resources/>
<connections/>
</ui>

View File

@ -1,36 +0,0 @@
# Form implementation generated from reading ui file 'test.ui'
#
# Created by: PyQt6 UI code generator 6.8.0
#
# WARNING: Any manual changes made to this file will be lost when pyuic6 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt6 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(800, 600)
self.centralwidget = QtWidgets.QWidget(parent=MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.b_test = QtWidgets.QPushButton(parent=self.centralwidget)
self.b_test.setGeometry(QtCore.QRect(330, 230, 301, 251))
self.b_test.setObjectName("b_test")
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(parent=MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 30))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(parent=MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.b_test.setText(_translate("MainWindow", "PushButton"))

View File

@ -1,24 +0,0 @@
import sys
from PyQt6.QtCore import pyqtSlot
from PyQt6.QtWidgets import QApplication, QMainWindow
from test2 import Ui_MainWindow
class Adv(QMainWindow, Ui_MainWindow):
def __init__(self, parent=None):
super(Adv, self).__init__(parent)
self.setupUi(self)
@pyqtSlot(bool)
def on_b_test_clicked(self,a):
print('on_b_test',a)
if __name__ == '__main__':
app = QApplication(sys.argv)
app.setDesktopFileName('TestTest')
MainWindow = Adv()
MainWindow.show()
app.exec()

View File

@ -1,32 +0,0 @@
import time
from pathlib import Path
from nodes.Device import Device
def test1():
d = Device()
d.setup()
d.connect()
d.enable()
print(d.setfile(
Path('/home/lambda/source/scarlet/flandre/config/device/AA256,U=30,M=PWI,S=(256 1502).txt').read_text()))
d.disable()
d.disconnect()
def test2():
d = Device()
d.setup()
# d.connect()
# d.enable()
# d.setfile(Path('/home/lambda/source/scarlet/flandre/config/device/AA256,U=30,M=PWI,S=(256 1502).txt').read_text())
# time.sleep(2)
r = d.get_data()
print(r.__len__())
# d.disable()
# d.disconnect()
if __name__ == '__main__':
test2()

View File

@ -1,27 +0,0 @@
import subprocess
import cv2
import numpy as np
cv2.namedWindow('video', cv2.WINDOW_AUTOSIZE)
def f1():
p = subprocess.Popen([
'ffmpeg',
'-loglevel', 'quiet',
'-flags', 'low_delay',
'-fflags', 'nobuffer',
'-i', 'rtsp://admin:ab12ab12@11.6.2.4:554/h264/ch1/main/av_stream',
'-pix_fmt', 'rgb24',
'-f', 'rawvideo',
'-'
], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
while True:
b = p.stdout.read(1920 * 1080 * 3)
cv2.imshow('video', np.frombuffer(b, dtype=np.uint8).reshape((1080, 1920, 3)))
cv2.waitKey(1)
if __name__ == '__main__':
f1()

View File

@ -1,31 +0,0 @@
import multiprocessing
from multiprocessing import Process
from threading import Thread
import cupy as cp
class TestProcessCupy:
def __init__(self):
pass
def t1(self):
print(cp.zeros((10, 10, 10)))
def __call__(self, *args, **kwargs):
print(cp.zeros((10, 10, 10)))
def p1():
# import cupy as cp
print(cp.zeros((10, 10, 10)))
# print(cp.asarray(z))
if __name__ == '__main__':
def ff():
print(cp.zeros((10, 10, 10)))
tpc = TestProcessCupy()
p2 = p1
z = cp.zeros((10, 10, 10))
multiprocessing.set_start_method('spawn')
# p = Process(target=p2)
p = Process(target=tpc)
p.start()
p.join()

View File

@ -1,34 +0,0 @@
import zmq
import time
class MyReqClient:
def __init__(self, s, context=None, timeout=4000):
if context is None:
context = zmq.Context()
self.context = context
self.timeout = timeout
self.socket = context.socket(zmq.REQ)
self.socket.connect(s)
self.s = s
def recv(self):
r = self.socket.poll(self.timeout)
print(r)
if r == 1:
return self.socket.recv()
self.socket.close()
self.socket = self.context.socket(zmq.REQ)
self.socket.connect(self.s)
def send(self, data):
self.socket.send(data)
def request(self, data: bytes):
self.socket.send(data)
return self.recv()
c = MyReqClient(f"tcp://127.0.0.1:5555")
while True:
print(c.request(b'asd'))

View File

@ -1,17 +0,0 @@
import time
import zmq
c = zmq.Context()
s = c.socket(zmq.REP)
# s.setsockopt(zmq.HEARTBEAT_IVL, 1000)
# s.setsockopt(zmq.HEARTBEAT_TTL, 1000)
# s.setsockopt(zmq.HEARTBEAT_TIMEOUT, 1000)
s.bind('tcp://127.0.0.1:5555')
while True:
s.recv()
print('recv')
time.sleep(2)
print('send')
s.send(b'hello')
# break

View File

@ -1,14 +0,0 @@
import json
import cupy as cp
import numpy as np
if __name__ == '__main__':
arr = np.array([
[1, 2, 3],
[1, 2, 3],
])
print(type(arr.shape))
print(np.frombuffer(arr.tobytes(), dtype=np.dtype(str(arr.dtype))).reshape(arr.shape))

View File

@ -1,39 +0,0 @@
import struct
import time
import cv2
import numpy as np
import zmq
from flandre import C
from flandre.nodes.Device import Device, DeviceCmd
from flandre.nodes.Mi import Mi
def t1():
context = zmq.Context()
req_driver_socket = context.socket(zmq.REQ)
req_driver_socket.connect(C.live_rep_socket)
magic = 7355608
cmd = DeviceCmd.GetData
cv2.namedWindow("test", cv2.WINDOW_AUTOSIZE)
while True:
req_driver_socket.send(struct.pack('i', magic) + struct.pack('i', cmd.value))
b = req_driver_socket.recv()
print(b[4:12])
print(struct.unpack('<I', b[8:12]))
offset = 4 + 8 + 4
# offset = 0
arr = np.frombuffer(b, dtype=np.int16, offset=offset).reshape((256, 5002))
cv2.imshow('test', arr)
cv2.waitKey(1)
# print(b.__len__())
if __name__ == '__main__':
context = zmq.Context()
req_driver_socket = context.socket(zmq.REQ)
req_driver_socket.connect(C.live_rep_socket)
magic = 7355608
cmd = DeviceCmd.GetEncoder
while True:
req_driver_socket.send(struct.pack('i', magic) + struct.pack('i', cmd.value))
print(req_driver_socket.recv())
time.sleep(0.2)

View File

@ -1,36 +0,0 @@
import struct
import time
import cv2
import numpy as np
import zmq
from flandre import C
from flandre.nodes.Device import Device, DeviceCmd
from flandre.nodes.Mi import Mi
def t1():
context = zmq.Context()
req_driver_socket = context.socket(zmq.REQ)
req_driver_socket.connect(C.live_rep_socket)
while True:
req_driver_socket.send(struct.pack('i', magic) + struct.pack('i', cmd.value))
b = req_driver_socket.recv()
print(b[4:12])
print(struct.unpack('<I', b[8:12]))
offset = 4 + 8 + 4
# offset = 0
arr = np.frombuffer(b, dtype=np.int16, offset=offset).reshape((256, 5002))
cv2.imshow('test', arr)
cv2.waitKey(1)
# print(b.__len__())
if __name__ == '__main__':
context = zmq.Context()
req_driver_socket = context.socket(zmq.REQ)
req_driver_socket.connect(C.live_rep_socket)
req_driver_socket.send(b'')
r = struct.unpack_from('=IQi', req_driver_socket.recv())
print(r)

View File

@ -1,36 +0,0 @@
from miio.miioprotocol import MiIOProtocol
from config import SWITCH1_IP, SWITCH1_TOKEN, SWITCH2_IP, SWITCH2_TOKEN
from miio import Device
# from utils.mi import c1_disconnect, c1_connect, c2_connect
#
# if __name__ == '__main__':
# # m = MiIOProtocol(
# # SWITCH1_IP, SWITCH1_TOKEN,
# # )
# # r = m.send('get_properties', [{'did': 'MYDID', 'siid': 2, 'piid': 1}])
# # print(r[0]['value'])
# c1_connect()
# c2_connect()
import click
@click.group()
def cli():
pass
@cli.command()
@click.option('--count', default=1, help='Number of greetings.')
@click.option('--name', prompt='Your name',
help='The person to greet.')
def con(count, name):
"""Simple program that greets NAME for a total of COUNT times."""
for x in range(count):
click.echo(f"Hello {name}!")
@cli.command()
def disconnect():
click.echo('Dropped the database')
if __name__ == '__main__':
cli()

View File

@ -1,114 +0,0 @@
import dataclasses
import hashlib
import inspect
import sys
import time
from enum import Enum
from pathlib import Path, PosixPath
@dataclasses.dataclass
class ASD:
aaa: int = 1
bbb: int = 1
ccc: int = 1
@property
def b(self):
return self.aaa
class EE(Enum):
asd = ASD
x = 1
def skip1(f):
def wrapper(self, *args, **kwargs):
if 'cond' not in kwargs:
return f(self, *args, **kwargs)
if kwargs['cond']:
del kwargs['cond']
return f(self, *args, **kwargs)
else:
return self
return wrapper
def skip(original_class):
def f2(self, x):
print(x)
for name, f in inspect.getmembers(original_class, inspect.isfunction):
print(name, f)
setattr(original_class, name, skip1(f))
return original_class
class Decorator(object):
def __init__(self, arg):
self.arg = arg
def __call__(self, cls):
pass
# def wrappedClass(*args):
# return cls(*args)
#
# return type("TestClass", (cls,), dict(newMethod=self.newMethod, classattr=self.arg))
def newMethod(self, value):
return value * 2
@skip
class DC:
def __init__(self):
pass
def f1(self, arg):
print('f1', arg)
def f2(self, arg):
print('f2', arg)
class E1:
a = 1
if __name__ == '__main__':
# print(sys.argv)
# print(dir(ASD()))
# print(ASD.__dict__)
# print(inspect.getmembers(ASD))
# print(inspect.getmembers(ASD))
# for f in dataclasses.fields(ASD):
# print(f.name, f.type)
# p = Path('asd')
# pt = type(p)
# print(pt)
# print(pt.__name__)
# print(type(type(p)))
# match pt:
# case _x if isinstance(_x, PosixPath):
# print(-2)
# case type(__name__='PosixPath'):
# print(-1)
# case type():
# print(0)
# case Path():
# print(1)
# case PosixPath():
# print(3)
# case _:
# print(2)
# dc = DC()
# dc.f1(11,cond=False)
# dc.f2(22)
a = E1()
b = E1()
a.a = 1413
print(b.a)
print()

View File

@ -1,15 +0,0 @@
import socket
def send_message(message: str, host='11.6.1.53', port=29999):
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_socket.connect((host, port))
try:
client_socket.sendall(message.encode())
print(f"Sent: {message}")
finally:
client_socket.close()
if __name__ == "__main__":
# send_message('remoteControl -on\nplay\nremoteControl -off\n')
send_message('remoteControl -on\nstop\nremoteControl -off\n')

View File

@ -1,52 +0,0 @@
import os
import platform
if platform.system() == 'Windows':
os.add_dll_directory(r'C:\Users\lambda\source\scarlet\ffmpeg-n7.0.1-221-g0ab20b5788-win64-gpl-shared-7.0\bin')
import logging
import multiprocessing
from BusClient import BusClient
from nodes.WebRTC import WebRTC
from nodes.Beamformer import Beamformer
from nodes.Loader import Loader
from nodes.Broker import Broker
from nodes.Device import Device
from nodes.ImageCV import ImageCV
from nodes.MainUI import MainUI
from nodes.Robot import Robot
from utils.Msg import KillMsg
def main():
logging.basicConfig(level=logging.INFO)
multiprocessing.set_start_method('spawn')
multiprocessing.Pool()
pps = []
ps = [
Broker(),
WebRTC(),
MainUI(level=logging.DEBUG),
Device(level=logging.DEBUG),
ImageCV(),
Beamformer(level=logging.DEBUG),
Loader(level=logging.DEBUG),
Robot(),
]
for p in ps:
pps.append(multiprocessing.Process(target=p))
for p in pps:
p.start()
c = BusClient(KillMsg)
while True:
x: KillMsg = c.recv()
if x.name == '':
break
for p in pps:
p.kill()
if __name__ == '__main__':
main()

View File

@ -1,34 +0,0 @@
import multiprocessing
import threading
import time
import zmq
from nodes.Broker import Broker
def thread2():
c = zmq.Context()
pull = c.socket(zmq.PULL)
pull.setsockopt(zmq.CONFLATE, 1)
pull.connect('tcp://127.0.0.1:5555')
while True:
print(pull.recv())
time.sleep(1)
if __name__ == '__main__':
c = zmq.Context()
push = c.socket(zmq.PUSH)
push.bind('tcp://*:5555')
cnt = 0
t = threading.Thread(target=thread2)
t.start()
for i in range(30):
cnt += 1
push.send(str(cnt).encode())
time.sleep(0.4)

View File

@ -1,136 +0,0 @@
import logging
import multiprocessing
import time
from pathlib import Path
import cv2
import numpy as np
from tqdm import tqdm
from nodes.Beamformer import Beamformer
from nodes.Broker import Broker
from nodes.ImageCV import ImageCV
from nodes.Loader import Loader
from nodes.MainUI import MainUI
from nodes.Node import Node
from BusClient import BusClient
from utils.Msg import Msg1, Msg2, BMMsg, TickMsg, StrMsg, KillMsg
from nodes.WebRTC import WebRTC
class M1(Node):
def loop(self):
cnt = 0
while True:
cnt += 1
self.send(str(cnt).encode())
time.sleep(1)
class M2(Node):
def loop(self):
while True:
print(self.recv())
class M3(Node):
topics = [StrMsg]
def loop(self):
arr = []
for img in tqdm(list(Path('/home/lambda/Videos/pngs/New Folder/').glob('*.png'))):
img = cv2.imread(str(img))
# img = cv2.resize(img, (1920 // 2, 1080 // 2))
img = img.reshape(1080, 1920, 3)
z = np.zeros((1080, 1920, 4), dtype=np.uint8)
z[:, :, :3] = img
img = z
img = cv2.cvtColor(img, cv2.COLOR_BGRA2RGBA)
arr.append(img.tobytes())
# while self.isalive:
# for b in arr:
# self.send(BMMsg(0, b))
# # self.pub_socket.send(b)
# # self.send(b)
# r = self.c.poller.poll(int(1000 / 60))
# # print(r)
# if r and Msg.decode_msg(r[0][0].recv()).name == '':
# self.isalive = False
# break
# # time.sleep(1 / 60)
while self.isalive:
for b in arr:
msg = self.recv()
if isinstance(msg, KillMsg):
if msg.name == '':
self.isalive = False
break
self.send(BMMsg(0, b))
# if r and Msg.decode_msg(r[0][0].recv()).name == '':
class M4(Node):
def loop(self):
while True:
self.send(Msg1())
time.sleep(1)
class MTIME(Node):
def loop(self):
while True:
t = time.time()
self.send(TickMsg(t))
time.sleep(10)
# print(t)
class MLISTEN(Node):
topics = [StrMsg]
def loop(self):
while self.isalive:
r = self.recv()
print(r)
if isinstance(r, KillMsg) and r.name == '':
self.isalive = False
break
self.send(TickMsg(time.time()))
class M6(Node):
topics = [Msg2.eid()]
def loop(self):
while True:
print(self.recv())
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
multiprocessing.set_start_method('spawn')
pps = []
ps = [
Broker(),
WebRTC(),
# M3(),
MainUI(),
ImageCV(),
MLISTEN(),
Beamformer(),
Loader(),
]
for p in ps:
pps.append(multiprocessing.Process(target=p))
for p in pps:
p.start()
c = BusClient(KillMsg)
while True:
x: KillMsg = c.recv()
if x.name == '':
break
for p in pps:
p.kill()