commit ae9041e7696a13fa48ddbac1fde255dbceae8811 Author: Vladislav Ostapov Date: Thu May 2 10:48:04 2024 +0300 initial commit diff --git a/AI.cfg b/AI.cfg new file mode 100644 index 0000000..d990b51 --- /dev/null +++ b/AI.cfg @@ -0,0 +1,294 @@ +[net] +# Testing +#batch=1 +#subdivisions=1 +# Training +batch=64 +subdivisions=1 +width=416 +height=416 +channels=3 +momentum=0.9 +decay=0.0005 +angle=0 +saturation = 1.5 +exposure = 1.5 +hue=.1 + +learning_rate=0.00261 +burn_in=1000 + +max_batches = 2000200 +policy=steps +steps=1600000,1800000 +scales=.1,.1 + + +#weights_reject_freq=1001 +#ema_alpha=0.9998 +#equidistant_point=1000 +#num_sigmas_reject_badlabels=3 +#badlabels_rejection_percentage=0.2 + + +[convolutional] +batch_normalize=1 +filters=32 +size=3 +stride=2 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=64 +size=3 +stride=2 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=64 +size=3 +stride=1 +pad=1 +activation=leaky + +[route] +layers=-1 +groups=2 +group_id=1 + +[convolutional] +batch_normalize=1 +filters=32 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=32 +size=3 +stride=1 +pad=1 +activation=leaky + +[route] +layers = -1,-2 + +[convolutional] +batch_normalize=1 +filters=64 +size=1 +stride=1 +pad=1 +activation=leaky + +[route] +layers = -6,-1 + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=leaky + +[route] +layers=-1 +groups=2 +group_id=1 + +[convolutional] +batch_normalize=1 +filters=64 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=64 +size=3 +stride=1 +pad=1 +activation=leaky + +[route] +layers = -1,-2 + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[route] +layers = -6,-1 + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=leaky + +[route] +layers=-1 +groups=2 +group_id=1 + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=leaky + +[route] +layers = -1,-2 + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[route] +layers = -6,-1 + +[maxpool] +size=2 +stride=2 + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +################################## + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +size=1 +stride=1 +pad=1 +filters=255 +activation=linear + + + +[yolo] +mask = 3,4,5 +anchors = 10,14, 23,27, 37,58, 81,82, 135,169, 344,319 +classes=80 +num=6 +jitter=.3 +scale_x_y = 1.05 +cls_normalizer=1.0 +iou_normalizer=0.07 +iou_loss=ciou +ignore_thresh = .7 +truth_thresh = 1 +random=0 +resize=1.5 +nms_kind=greedynms +beta_nms=0.6 +#new_coords=1 +#scale_x_y = 2.0 + +[route] +layers = -4 + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[upsample] +stride=2 + +[route] +layers = -1, 23 + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=leaky + +[convolutional] +size=1 +stride=1 +pad=1 +filters=255 +activation=linear + +[yolo] +mask = 1,2,3 +anchors = 10,14, 23,27, 37,58, 81,82, 135,169, 344,319 +classes=80 +num=6 +jitter=.3 +scale_x_y = 1.05 +cls_normalizer=1.0 +iou_normalizer=0.07 +iou_loss=ciou +ignore_thresh = .7 +truth_thresh = 1 +random=0 +resize=1.5 +nms_kind=greedynms +beta_nms=0.6 +#new_coords=1 +#scale_x_y = 2.0 diff --git a/AI.weights b/AI.weights new file mode 100644 index 0000000..27edc5d Binary files /dev/null and b/AI.weights differ diff --git a/Nab.wav b/Nab.wav new file mode 100644 index 0000000..9def235 Binary files /dev/null and b/Nab.wav differ diff --git a/New.wav b/New.wav new file mode 100644 index 0000000..6540d5b Binary files /dev/null and b/New.wav differ diff --git a/Pot.wav b/Pot.wav new file mode 100644 index 0000000..12727b2 Binary files /dev/null and b/Pot.wav differ diff --git a/README.md b/README.md new file mode 100644 index 0000000..0fd71b7 --- /dev/null +++ b/README.md @@ -0,0 +1,151 @@ +# work-soc-streamer + +## Описание содержания файлов + +В этом разделе описано краткое содержание файлов проекта, которые были изменены разработчиком (Мария Сухоносова). +Описание может содержать неточности. + +### Файл /streamer_ulils.py + +Это самописная библиотека, содержит класс SocketBlocksWrapper - обертку над сокетом для чтения/записи объектов или сырых блоков данных в сокет. +Используется остальными компонентами проекта. + +### Файл /server.py + +Это имполняемый файл сервера, то есть машины, которая ожидает подключений плат и компьютеров. +Задача сервера - прослушивать порт. На любом входящем соединении требовать авторизацию и после успешной авторизации создавать некое подобие локальной сети между нужными соединениями. Принцп действия схож с принципом действия свича с поддержкой VLAN с разницей в том, что сервер сначала требует авторизацию. + +Требует наличия файла /streamer_ulils.py рядом во время исполнения.ы + +### Файл /client.py + +Это исполняемый файл клиента, то есть программы, которая создает окно, показывает картинку и реагирует на действия пользователя. + +Требует наличия файла /streamer_ulils.py рядом во время исполнения. + +В программе отдельно создается поток, работающий с сокетом и потом, работающий с ГУИ. + +Пользователь (например) нажимает кнопку в ГУИ, дальше формируется сообщение с типом 'command' и отправляется в сокет путем неблокирующей записи, если сокет существует и открыт. + +Поток, работающий с сокетом пытается установить соединение с сервером. +После успешной попытки происходит авторизация (сообщения типа 'auth'). +После успешной авторизации клиент ожидает сообщений с типом 'video'. +Сообщение с этим типом интерпретируются как сообщения с картинкой. После чего картинка восстанавливается из сообщения и обновляется в потоке ГУИ. + +В случае неудачной попытки подключения будет предпринята еще одна попытка подключения, и так до закрытия программы. + +### Файл /board.py + +Это основной исполняемый файл для компьютера с камерой. За основу был взят Final-Ochka-v1.2-beta.py +В него был встроен один класс в начале скрипта, был модифицирован алгоритм отправки фреймов и обработки классов. + +Скрипт так же как и скрипт для клиента запускает поток, работающий с соединением. В данном случае поток слушает все сообщения с типом 'command'. Сразу после принятия сообщения такого типа вызывается обработчик команд, который уже выполняет команду. + +### Файл /board-config.json + +Файл содержит json структуру и хранит в себе настройки подключения к серверу. + +### Файл /client-config.json + +Файл содержит json структуру и хранит в себе настройки подключения и авторизации клиента на сервере. + +### Файл /classes.json + +Файл имеет структуру json и содержит массив классов в формате + +```json +{ + "class": "nameOfClass", + "label": "Название класса", + "color": [, , ] +} +``` + +Поле class содержит имя класса. Поле label содержит название класса, которое будет отображено на клиенте. Поле label пока не используется. + +### Файл /boards.sqlite3 + +Файл базы данных, в которой хранятся все наименования плат и пароль для подключения к каждой плате. +Содержит одну единственную таблицу, созданную таким кодом: +```sqlite +CREATE TABLE IF NOT EXISTS users ( + board_name TEXT NOT NULL PRIMARY KEY, + password TEXT NOT NULL DEFAULT '' +); +``` + +## КАК ЗАПУСКАТЬ + +Всего в этом проекте 3 исполняемых файла: board.py, computer.py, server.py. +Не так важно в каком порядке их запускать, однако если есть желание увидеть картинку, то нужно запустить все 3. + +При копировании проекта по разным машинам следует убедиться, что вы взяли все ресурсы для конкретного файла. + +### Board (плата с камерой) + +Команда запуска: +```bash +python board.py +``` + +Используемые файлы: +* board.py (исполняемый файл) +* board-config.json +* classes.json +* Nab.wav +* Pot.wav +* Zah.wav +* AI.cfg +* AI.weights +* streamer_utils.py + +В конфигурационном файле прописываются: + +### Server (комп, к которому все подключатся) + +Команда запуска +```bash +python server.py +``` + +Используемые файлы: +* server.py (исполняемый файл) +* streamer_utils.py +* boards.sqlite3 + +### Client (комп, который должен показывать картинку) + +Команда запуска +```bash +python client.py +``` + +Используемые файлы: +* client.py (исполняемый файл) +* classes.json +* client-config.json +* streamer_utils.py +* logo.png + +В конфигурационном файле прописываются: адрес сервера, порт, имя платы для подключения, пароль + +## Как добавить плату + +Нужно отредактировать базу данных, то есть файл boards.sqlite3. + +Открыть базу можно любой утилитой, которая поддерживает SQLite3. +Самый простой метод - выполнить в терминале +```bash +sqlite3 boards.sqlite3 +``` + +Посмотреть все платы и пароли к ним +```sqlite +SELECT board_name, password FROM users; +``` + +Добавление/удаление/обновление информации делается нативным образом для этой базы данных, то есть через запросы. + +База данных позволяет на лету менять данные. Изменения применяются сразу и только для новых подключений. +Уже установленные подключения не будут разорваны. + diff --git a/Zah.wav b/Zah.wav new file mode 100644 index 0000000..adf5b7a Binary files /dev/null and b/Zah.wav differ diff --git a/__pycache__/streamer_utils.cpython-38.pyc b/__pycache__/streamer_utils.cpython-38.pyc new file mode 100644 index 0000000..8b6edaa Binary files /dev/null and b/__pycache__/streamer_utils.cpython-38.pyc differ diff --git a/board-config.json b/board-config.json new file mode 100644 index 0000000..797d75f --- /dev/null +++ b/board-config.json @@ -0,0 +1,5 @@ +{ + "name": "test-board", + "server-address": "192.168.2.91", + "server-port": 40100 +} diff --git a/board.py b/board.py new file mode 100644 index 0000000..9c1b04b --- /dev/null +++ b/board.py @@ -0,0 +1,337 @@ +# import packages +from openal import * +from imutils.video import VideoStream +import itertools +import imutils +import time +import cv2 +import json +import numpy as np +import spidev + +# Client +import io +import json +import time +import traceback +from threading import Thread +from streamer_utils import SocketBlocksWrapper, read_json_config +from PIL import Image + +spi = spidev.SpiDev() +spi.open(1, 0) + +spi.bits_per_word = 8 +spi.max_speed_hz = 500000 + +X = bool(0) # иниц-я глоб. переменной +X_New = bool(0) +X_pred = bool(0) # иниц-я глоб. переменной +startTime = float(time.time() - 10) # иниц-я глоб. переменной + + +CONFIG = read_json_config('board-config.json') +CLASSES = read_json_config('classes.json') + + +class ConnectionDaemon(Thread): + def __init__(self): + super().__init__(daemon=True) + self._sock = None + self._message_handler = None + + def set_message_handler(self, handler: callable): + self._message_handler = handler + + def __do_call_message_handler(self, res): + if self._message_handler is not None: + try: + self._message_handler(res) + except Exception: + traceback.print_exc() + + def __do_session(self): + try: + with SocketBlocksWrapper.connect(CONFIG['server-address'], CONFIG['server-port']) as sock: + print("ConnectionDaemon: open connection") + self._sock = sock + self._sock.write_object({'type': 'auth', 'client-type': 'board', 'name': CONFIG['name']}) + res = self._sock.read_object() + if res is None: + return + print(res) + if 'status' in res: + if res['status'] == 'success': + while True: + res = self._sock.read_object() + if res is None: + break + self.__do_call_message_handler(res) + except Exception: + traceback.print_exc() + finally: + self.socket = None + + def run(self): + while True: + print("ConnectionDaemon: start session...") + self.__do_session() + time.sleep(5) + + def send_frame(self, fr): + if self._sock is not None: + try: + to_send = { + 'type': 'video', + 'data': None, + "selected-class": selected_class_id + } + if fr is not None: + fr = imutils.resize(fr, width=640, height=360) + buffer = cv2.imencode('.jpg', fr, [int(cv2.IMWRITE_JPEG_QUALITY), 60])[1] + data_encode = np.array(buffer) + to_send["data"] = data_encode.tobytes() + self._sock.write_object(to_send) + except Exception: + traceback.print_exc() + + def send_image(self, img: Image): + if self._sock is not None: + try: + out = io.BytesIO() + img.save(out, format="JPEG") + self._sock.write_object({ + 'type': 'video', + 'data': out.getvalue(), + "selected-class": selected_class_id + }) + except Exception: + traceback.print_exc() + + +# камера не движется +Left = bool(0) +Right = bool(0) +Up = bool(0) +Down = bool(0) + +# -2 = нейронка отключена, -1 = включены все классы, остальное - id класса из списка CLASSES +selected_class_id = -2 + + + +# функция, которая вызывается при получении команды +def message_handler(msg): + global selected_class_id + global Left + global Right + global Up + global Down + print(msg) + if msg["type"] == "command": + # отлично, наше сообщение + act = msg["data"]["action"] + if act == "left": + Left = 1 + if act == "right": + Right = 1 + if act == "up": + Up = 1 + if act == "down": + Down = 1 + if act == "start": + selected_class_id = -1 + elif act == "stop": + selected_class_id = -2 + elif act == "set-class": + if selected_class_id < -1: + print("message_handler: WARMING: set class-id while board is stop") + else: + cl = msg["data"]["class"] + selected_class_id = -1 # если не найдем, будут выбраны все классы + for i in range(0, len(CLASSES)): + if CLASSES[i]["class"] == cl: + selected_class_id = i + break + + +print("============ Initialize connection daemon ============") +connection_daemon = ConnectionDaemon() +connection_daemon.set_message_handler(message_handler) +connection_daemon.start() + + + +def notify(): + global startTime + endTime = time.time() + if endTime - startTime > 1.5: # прошло 1.5 секунды + # if 1>0: #режим прерывания сообщений + global X + global X_New + global X_pred + if X == 0 and X_pred == 1: # поменялось на 0 + source = oalOpen("Pot.wav") # Потерян + source.play() # воспр. 1 раз + startTime = time.time() # отсчёт времени + if X==1 and X_pred==1 and X_New==0 and (endTime - startTime > 6): + source = oalOpen("Nab.wav") #Потерян + source.play() #воспр. 1 раз + startTime = time.time() #отсчёт времени + if X==1 and X_pred==1 and X_New==1: + source = oalOpen("New.wav") #new object + source.play() #воспр. 1 раз + startTime = time.time() #отсчёт времени + elif X == 1 and X_pred == 0: # поменялось на 1 + source = oalOpen("Zah.wav") # Захвачен + source.play() # воспр. 1 раз + startTime = time.time() # отсчёт времени + X_pred = X # обновляем предыдущее значение + + +print("[INFO] loading model...") +net = cv2.dnn_DetectionModel('AI.cfg', 'AI.weights') + +#net.setPreferableBackend(cv2.dnn.DNN_BACKEND_INFERENCE_ENGINE) +#net.setPreferableTarget(cv2.dnn.DNN_TARGET_MYRIAD) + +picSize_X = 640 +picSize_Y = 480 +net.setInputSize(128, 128) +net.setInputScale(1.0 / 255) +net.setInputSwapRB(True) + +print("[INFO] starting video stream...") +vs = VideoStream(src=0).start() +# warm up the camera for a couple of seconds +time.sleep(2.0) + +MAX_sX = 0 +MAX_sY = 0 +MAX_eX = 0 +MAX_eY = 0 + +centr_X = 0 +centr_Y = 0 + +pred_centr_X = 0 +pred_centr_Y = 0 + +while True: + if selected_class_id >= 0: + t0 = time.time() + frame = vs.read() + #frame = imutils.resize(frame, width=1280, height=720) + #(h, w) = frame.shape[:2] + + S_MAX = 0 + X = 0 + X_New = 0 + # находим объекты и возвращаем их параметры + classes, confidences, boxes = net.detect(frame, confThreshold=0.18, nmsThreshold=0.5) + # создаём рамки и надписи + for classId, confidence, box in zip(list(itertools.chain(classes)), list(itertools.chain(confidences)), boxes): + # if classId == 39: # вот так делать не стоит, работать такое точно не будет + if selected_class_id == -1 or classId == selected_class_id: + X = 1 + + label = f"{CLASSES[classId]['class']}" + label = '%s: %.2f' % (label, confidence) + color = CLASSES[classId]["color"] + + labelSize, baseLine = cv2.getTextSize(label, cv2.FONT_HERSHEY_SIMPLEX, 0.5, 1) + left, top, width, heigth = box + S = width * heigth + print ('S =', S, 'pics') + if S>S_MAX: + S_MAX = S + MAX_sX = left + MAX_sY = top + MAX_eX = left + width + MAX_eY = top + heigth + MAX_label = label + print("Object detected: ", label) + + if (X == 1): + # Draw a rectangle across the boundary of the object + cv2.rectangle(frame, (MAX_sX, MAX_sY), (MAX_eX, MAX_eY), color, 2) + y = MAX_sY - 15 if MAX_sY - 15 > 15 else MAX_sY + 15 + # Put a text outside the rectangular detection + # Choose the font of your choice: FONT_HERSHEY_SIMPLEX, FONT_HERSHEY_PL> + cv2.putText(frame, MAX_label, (MAX_sX, y), cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2) + + centr_X = (MAX_sX+MAX_eX)/2 + centr_Y = (MAX_sY+MAX_eY)/2 + + if (abs(centr_X-pred_centr_X) > picSize_X/4 or abs(centr_Y-pred_centr_Y) > picSize_Y/4): + X_New = 1 + + if (X == 1 and Left == 0 and Right == 0 and Up == 0 and Down == 0): + if (centr_X > (picSize_X/2+picSize_X/10) and centr_Y < (picSize_Y/2+picSize_Y/10) and centr_Y > (picSize_Y/2-picSize_Y/10)): + txData = [0b00000111] #Вправо + spi.xfer(txData) + elif (centr_X < (picSize_X/2-picSize_X/10) and centr_Y < (picSize_Y/2+picSize_Y/10) and centr_Y > (picSize_Y/2-picSize_Y/10)): + txData = [0b00000110] #Влево + spi.xfer(txData) + elif (centr_Y > (picSize_Y/2+picSize_Y/10) and centr_X < (picSize_X/2+picSize_X/10) and centr_X > (picSize_X/2-picSize_X/10)): + txData = [0b00001101] #Вверх + spi.xfer(txData) + elif (centr_Y < (picSize_Y/2-picSize_Y/10) and centr_X < (picSize_X/2+picSize_X/10) and centr_X > (picSize_X/2-picSize_X/10)): + txData = [0b00001001] #Вниз + spi.xfer(txData) + elif (centr_X < (picSize_X/2-picSize_X/10) and centr_Y < (picSize_Y/2-picSize_Y/10)): + txData = [0b00001010] #Влево/вниз + spi.xfer(txData) + elif (centr_X > (picSize_X/2+picSize_X/10) and centr_Y < (picSize_Y/2-picSize_Y/10)): + txData = [0b00001011] #Вправо/вниз + spi.xfer(txData) + elif (centr_X < (picSize_X/2-picSize_X/10) and centr_Y > (picSize_Y/2+picSize_Y/10)): + txData = [0b00001110] #Влево/вверх + spi.xfer(txData) + elif (centr_X > (picSize_X/2+picSize_X/10) and centr_Y > (picSize_Y/2+picSize_Y/10)): + txData = [0b00001111] #Вправо/вверх + spi.xfer(txData) + else: + txData = [0b00000101] #Центр + spi.xfer(txData) + elif (Left == 0 and Right == 1 and Up == 0 and Down == 0): + txData = [0b00000111] #Вправо + spi.xfer(txData) + elif (Left == 1 and Right == 0 and Up == 0 and Down == 0): + txData = [0b00000110] #Влево + spi.xfer(txData) + elif (Left == 0 and Right == 0 and Up == 1 and Down == 0): + txData = [0b00001001] #Вверх + spi.xfer(txData) + elif (Left == 0 and Right == 0 and Up == 0 and Down == 1): + txData = [0b00001101] #Вниз + spi.xfer(txData) + + pred_centr_X = centr_X + pred_centr_Y = centr_Y + + # обнуление + Left = 0 + Right = 0 + Up = 0 + Down = 0 + My_FPS = 1 / (time.time() - t0) + FPS_label = 'FPS=%2.f' % My_FPS + labelSize, baseLine = cv2.getTextSize(FPS_label, cv2.FONT_HERSHEY_SIMPLEX, 1.5, 1) + cv2.rectangle(frame, (4, 4), (4 + labelSize[0], 4 + labelSize[1] + baseLine), (255, 0, 155), cv2.FILLED) + cv2.putText(frame, FPS_label, (4, 4 + labelSize[1]), cv2.FONT_HERSHEY_SIMPLEX, 1.5, (0, 0, 0)) + notify() + + # отправка фрейма на сервер + connection_daemon.send_frame(frame) + + else: + # отправка раз в секунду пустого фрейма + connection_daemon.send_frame(None) + time.sleep(1) + +spi.close() +# Destroy windows and cleanup +cv2.destroyAllWindows() +# Stop the video stream +vs.stop() diff --git a/board5.py b/board5.py new file mode 100644 index 0000000..f679e47 --- /dev/null +++ b/board5.py @@ -0,0 +1,350 @@ +# import packages +from openal import * +from imutils.video import VideoStream +import itertools +import imutils +import time +import cv2 +import json +import numpy as np +import spidev + +# Client +import io +import json +import time +import traceback +from threading import Thread +from streamer_utils import SocketBlocksWrapper, read_json_config +from PIL import Image + +spi = spidev.SpiDev() +spi.open(1, 0) + +spi.bits_per_word = 8 +spi.max_speed_hz = 500000 + +X = bool(0) # иниц-я глоб. переменной +X_New = bool(0) +X_pred = bool(0) # иниц-я глоб. переменной +startTime = float(time.time() - 10) # иниц-я глоб. переменной + + +CONFIG = read_json_config('board-config.json') +CLASSES = read_json_config('classes.json') + + +class ConnectionDaemon(Thread): + def __init__(self): + super().__init__(daemon=True) + self._sock = None + self._message_handler = None + + def set_message_handler(self, handler: callable): + self._message_handler = handler + + def __do_call_message_handler(self, res): + if self._message_handler is not None: + try: + self._message_handler(res) + except Exception: + traceback.print_exc() + + def __do_session(self): + try: + with SocketBlocksWrapper.connect(CONFIG['server-address'], CONFIG['server-port']) as sock: + print("ConnectionDaemon: open connection") + self._sock = sock + self._sock.write_object({'type': 'auth', 'client-type': 'board', 'name': CONFIG['name']}) + res = self._sock.read_object() + if res is None: + return + print(res) + if 'status' in res: + if res['status'] == 'success': + while True: + res = self._sock.read_object() + if res is None: + break + self.__do_call_message_handler(res) + except Exception: + traceback.print_exc() + finally: + self.socket = None + + def run(self): + while True: + print("ConnectionDaemon: start session...") + self.__do_session() + time.sleep(5) + + def send_frame(self, fr): + if self._sock is not None: + try: + to_send = { + 'type': 'video', + 'data': None, + "selected-class": selected_class_id + } + if fr is not None: + fr = imutils.resize(fr, width=640, height=360) + buffer = cv2.imencode('.jpg', fr, [int(cv2.IMWRITE_JPEG_QUALITY), 60])[1] + data_encode = np.array(buffer) + to_send["data"] = data_encode.tobytes() + self._sock.write_object(to_send) + except Exception: + traceback.print_exc() + + def send_image(self, img: Image): + if self._sock is not None: + try: + out = io.BytesIO() + img.save(out, format="JPEG") + self._sock.write_object({ + 'type': 'video', + 'data': out.getvalue(), + "selected-class": selected_class_id + }) + except Exception: + traceback.print_exc() + + +# камера не движется +Left = bool(0) +Right = bool(0) +Up = bool(0) +Down = bool(0) +lazer = bool(0) + +# -2 = нейронка отключена, -1 = включены все классы, остальное - id класса из списка CLASSES +selected_class_id = -2 + + + +# функция, которая вызывается при получении команды +def message_handler(msg): + global selected_class_id + global Left + global Right + global Up + global Down + global lazer + print(msg) + if msg["type"] == "command": + # отлично, наше сообщение + act = msg["data"]["action"] + if act == "lazerOn": + lazer = 1 + if act == "lazerOff": + lazer = 0 + if act == "left": + Left = 1 + if act == "right": + Right = 1 + if act == "up": + Up = 1 + if act == "down": + Down = 1 + if act == "start": + selected_class_id = -1 + elif act == "stop": + selected_class_id = -2 + elif act == "set-class": + if selected_class_id < -1: + print("message_handler: WARMING: set class-id while board is stop") + else: + cl = msg["data"]["class"] + selected_class_id = -1 # если не найдем, будут выбраны все классы + for i in range(0, len(CLASSES)): + if CLASSES[i]["class"] == cl: + selected_class_id = i + break + + +print("============ Initialize connection daemon ============") +connection_daemon = ConnectionDaemon() +connection_daemon.set_message_handler(message_handler) +connection_daemon.start() + + + +def notify(): + global startTime + endTime = time.time() + if endTime - startTime > 1.5: # прошло 1.5 секунды + # if 1>0: #режим прерывания сообщений + global X + global X_New + global X_pred + if X == 0 and X_pred == 1: # поменялось на 0 + source = oalOpen("Pot.wav") # Потерян + source.play() # воспр. 1 раз + startTime = time.time() # отсчёт времени + if X==1 and X_pred==1 and X_New==0 and (endTime - startTime > 6): + source = oalOpen("Nab.wav") #Потерян + source.play() #воспр. 1 раз + startTime = time.time() #отсчёт времени + if X==1 and X_pred==1 and X_New==1: + source = oalOpen("New.wav") #new object + source.play() #воспр. 1 раз + startTime = time.time() #отсчёт времени + elif X == 1 and X_pred == 0: # поменялось на 1 + source = oalOpen("Zah.wav") # Захвачен + source.play() # воспр. 1 раз + startTime = time.time() # отсчёт времени + X_pred = X # обновляем предыдущее значение + + +print("[INFO] loading model...") +net = cv2.dnn_DetectionModel('AI.cfg', 'AI.weights') + +#net.setPreferableBackend(cv2.dnn.DNN_BACKEND_INFERENCE_ENGINE) +#net.setPreferableTarget(cv2.dnn.DNN_TARGET_MYRIAD) + +picSize_X = 640 +picSize_Y = 480 +net.setInputSize(128, 128) +net.setInputScale(1.0 / 255) +net.setInputSwapRB(True) + +print("[INFO] starting video stream...") +vs = VideoStream(src=0).start() +# warm up the camera for a couple of seconds +time.sleep(2.0) + +MAX_sX = 0 +MAX_sY = 0 +MAX_eX = 0 +MAX_eY = 0 + +centr_X = 0 +centr_Y = 0 + +pred_centr_X = 0 +pred_centr_Y = 0 + +while True: + if lazer == 1: + txData = [0b11000000] #Вкл + spi.xfer(txData) + elif lazer == 0: + txData = [0b10000000] #Выкл + spi.xfer(txData) + if selected_class_id >= -1: + t0 = time.time() + frame = vs.read() + #frame = imutils.resize(frame, width=1280, height=720) + #(h, w) = frame.shape[:2] + + S_MAX = 0 + X = 0 + X_New = 0 + # находим объекты и возвращаем их параметры + classes, confidences, boxes = net.detect(frame, confThreshold=0.18, nmsThreshold=0.5) + # создаём рамки и надписи + for classId, confidence, box in zip(list(itertools.chain(classes)), list(itertools.chain(confidences)), boxes): + # if classId == 39: # вот так делать не стоит, работать такое точно не будет + if selected_class_id == -1 or classId == selected_class_id: + X = 1 + + label = f"{CLASSES[classId]['class']}" + label = '%s: %.2f' % (label, confidence) + color = CLASSES[classId]["color"] + + labelSize, baseLine = cv2.getTextSize(label, cv2.FONT_HERSHEY_SIMPLEX, 0.5, 1) + left, top, width, heigth = box + S = width * heigth + print ('S =', S, 'pics') + if S>S_MAX: + S_MAX = S + MAX_sX = left + MAX_sY = top + MAX_eX = left + width + MAX_eY = top + heigth + MAX_label = label + print("Object detected: ", label) + + # TODO этот кусок кода перенести чуть выше и сделать так, чтобы он корректно отрисовывал все найденые объекты, а не только один как сейчас + if (X == 1): + # Draw a rectangle across the boundary of the object + cv2.rectangle(frame, (MAX_sX, MAX_sY), (MAX_eX, MAX_eY), color, 2) + y = MAX_sY - 15 if MAX_sY - 15 > 15 else MAX_sY + 15 + # Put a text outside the rectangular detection + # Choose the font of your choice: FONT_HERSHEY_SIMPLEX, FONT_HERSHEY_PL> + cv2.putText(frame, MAX_label, (MAX_sX, y), cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2) + + centr_X = (MAX_sX+MAX_eX)/2 + centr_Y = (MAX_sY+MAX_eY)/2 + + if (abs(centr_X-pred_centr_X) > picSize_X/4 or abs(centr_Y-pred_centr_Y) > picSize_Y/4): + X_New = 1 + + if (X == 1 and Left == 0 and Right == 0 and Up == 0 and Down == 0): + if (centr_X > (picSize_X/2+picSize_X/10) and centr_Y < (picSize_Y/2+picSize_Y/10) and centr_Y > (picSize_Y/2-picSize_Y/10)): + txData = [0b00000111] #Вправо + spi.xfer(txData) + elif (centr_X < (picSize_X/2-picSize_X/10) and centr_Y < (picSize_Y/2+picSize_Y/10) and centr_Y > (picSize_Y/2-picSize_Y/10)): + txData = [0b00000110] #Влево + spi.xfer(txData) + elif (centr_Y > (picSize_Y/2+picSize_Y/10) and centr_X < (picSize_X/2+picSize_X/10) and centr_X > (picSize_X/2-picSize_X/10)): + txData = [0b00001101] #Вверх + spi.xfer(txData) + elif (centr_Y < (picSize_Y/2-picSize_Y/10) and centr_X < (picSize_X/2+picSize_X/10) and centr_X > (picSize_X/2-picSize_X/10)): + txData = [0b00001001] #Вниз + spi.xfer(txData) + elif (centr_X < (picSize_X/2-picSize_X/10) and centr_Y < (picSize_Y/2-picSize_Y/10)): + txData = [0b00001010] #Влево/вниз + spi.xfer(txData) + elif (centr_X > (picSize_X/2+picSize_X/10) and centr_Y < (picSize_Y/2-picSize_Y/10)): + txData = [0b00001011] #Вправо/вниз + spi.xfer(txData) + elif (centr_X < (picSize_X/2-picSize_X/10) and centr_Y > (picSize_Y/2+picSize_Y/10)): + txData = [0b00001110] #Влево/вверх + spi.xfer(txData) + elif (centr_X > (picSize_X/2+picSize_X/10) and centr_Y > (picSize_Y/2+picSize_Y/10)): + txData = [0b00001111] #Вправо/вверх + spi.xfer(txData) + else: + txData = [0b00000101] #Центр + spi.xfer(txData) + elif (Left == 0 and Right == 1 and Up == 0 and Down == 0): + txData = [0b00000111] #Вправо + spi.xfer(txData) + elif (Left == 1 and Right == 0 and Up == 0 and Down == 0): + txData = [0b00000110] #Влево + spi.xfer(txData) + elif (Left == 0 and Right == 0 and Up == 1 and Down == 0): + txData = [0b00001001] #Вверх + spi.xfer(txData) + elif (Left == 0 and Right == 0 and Up == 0 and Down == 1): + txData = [0b00001101] #Вниз + spi.xfer(txData) + + pred_centr_X = centr_X + pred_centr_Y = centr_Y + + # обнуление + Left = 0 + Right = 0 + Up = 0 + Down = 0 + My_FPS = 1 / (time.time() - t0) + FPS_label = 'FPS=%2.f' % My_FPS + labelSize, baseLine = cv2.getTextSize(FPS_label, cv2.FONT_HERSHEY_SIMPLEX, 1.5, 1) + cv2.rectangle(frame, (4, 4), (4 + labelSize[0], 4 + labelSize[1] + baseLine), (255, 0, 155), cv2.FILLED) + cv2.putText(frame, FPS_label, (4, 4 + labelSize[1]), cv2.FONT_HERSHEY_SIMPLEX, 1.5, (0, 0, 0)) + notify() + + # отправка фрейма на сервер + connection_daemon.send_frame(frame) + + else: + # отправка раз в секунду пустого фрейма, типа нет видео + connection_daemon.send_frame(None) + time.sleep(1) + +spi.close() +# Destroy windows and cleanup +cv2.destroyAllWindows() +# Stop the video stream +vs.stop() diff --git a/boards.sqlite3 b/boards.sqlite3 new file mode 100644 index 0000000..937ec58 Binary files /dev/null and b/boards.sqlite3 differ diff --git a/classes.json b/classes.json new file mode 100644 index 0000000..0f95667 --- /dev/null +++ b/classes.json @@ -0,0 +1,722 @@ +[ + { + "class": "person", + "label": "человек", + "color": [ + 8, + 191, + 82 + ] + }, + { + "class": "bicycle", + "label": "велосипед", + "color": [ + 213, + 80, + 133 + ] + }, + { + "class": "car", + "label": "машина", + "color": [ + 63, + 126, + 162 + ] + }, + { + "class": "motorbike", + "label": "motorbike", + "color": [ + 183, + 90, + 201 + ] + }, + { + "class": "aeroplane", + "label": "aeroplane", + "color": [ + 6, + 14, + 236 + ] + }, + { + "class": "bus", + "label": "автобус", + "color": [ + 213, + 87, + 227 + ] + }, + { + "class": "train", + "label": "поезд", + "color": [ + 193, + 123, + 6 + ] + }, + { + "class": "truck", + "label": "truck", + "color": [ + 116, + 122, + 201 + ] + }, + { + "class": "boat", + "label": "лодка", + "color": [ + 194, + 224, + 88 + ] + }, + { + "class": "traffic light", + "label": "traffic light", + "color": [ + 46, + 164, + 10 + ] + }, + { + "class": "fire hydrant", + "label": "fire hydrant", + "color": [ + 181, + 19, + 144 + ] + }, + { + "class": "stop sign", + "label": "stop sign", + "color": [ + 17, + 245, + 50 + ] + }, + { + "class": "parking meter", + "label": "parking meter", + "color": [ + 185, + 144, + 158 + ] + }, + { + "class": "bench", + "label": "bench", + "color": [ + 93, + 179, + 129 + ] + }, + { + "class": "bird", + "label": "птица", + "color": [ + 97, + 55, + 110 + ] + }, + { + "class": "cat", + "label": "кошка", + "color": [ + 151, + 228, + 14 + ] + }, + { + "class": "dog", + "label": "собака", + "color": [ + 105, + 169, + 98 + ] + }, + { + "class": "horse", + "label": "лошадь", + "color": [ + 29, + 183, + 166 + ] + }, + { + "class": "sheep", + "label": "овца", + "color": [ + 43, + 245, + 65 + ] + }, + { + "class": "cow", + "label": "короча", + "color": [ + 33, + 34, + 24 + ] + }, + { + "class": "elephant", + "label": "слон", + "color": [ + 191, + 42, + 137 + ] + }, + { + "class": "bear", + "label": "мишка", + "color": [ + 151, + 21, + 242 + ] + }, + { + "class": "zebra", + "label": "зебра", + "color": [ + 211, + 10, + 237 + ] + }, + { + "class": "giraffe", + "label": "giraffe", + "color": [ + 94, + 156, + 124 + ] + }, + { + "class": "backpack", + "label": "backpack", + "color": [ + 255, + 194, + 176 + ] + }, + { + "class": "umbrella", + "label": "umbrella", + "color": [ + 173, + 196, + 240 + ] + }, + { + "class": "handbag", + "label": "handbag", + "color": [ + 3, + 157, + 60 + ] + }, + { + "class": "tie", + "label": "tie", + "color": [ + 47, + 19, + 83 + ] + }, + { + "class": "suitcase", + "label": "suitcase", + "color": [ + 84, + 62, + 207 + ] + }, + { + "class": "frisbee", + "label": "frisbee", + "color": [ + 140, + 135, + 50 + ] + }, + { + "class": "skis", + "label": "skis", + "color": [ + 37, + 133, + 177 + ] + }, + { + "class": "snowboard", + "label": "snowboard", + "color": [ + 88, + 128, + 229 + ] + }, + { + "class": "sports ball", + "label": "sports ball", + "color": [ + 39, + 30, + 120 + ] + }, + { + "class": "kite", + "label": "kite", + "color": [ + 104, + 15, + 104 + ] + }, + { + "class": "baseball bat", + "label": "baseball bat", + "color": [ + 136, + 0, + 226 + ] + }, + { + "class": "baseball glove", + "label": "baseball glove", + "color": [ + 129, + 16, + 120 + ] + }, + { + "class": "skateboard", + "label": "skateboard", + "color": [ + 245, + 31, + 8 + ] + }, + { + "class": "surfboard", + "label": "surfboard", + "color": [ + 15, + 23, + 32 + ] + }, + { + "class": "tennis racket", + "label": "tennis racket", + "color": [ + 191, + 175, + 44 + ] + }, + { + "class": "bottle", + "label": "бутылка", + "color": [ + 130, + 81, + 23 + ] + }, + { + "class": "wine glass", + "label": "wine glass", + "color": [ + 52, + 204, + 75 + ] + }, + { + "class": "cup", + "label": "cup", + "color": [ + 217, + 4, + 5 + ] + }, + { + "class": "fork", + "label": "fork", + "color": [ + 22, + 155, + 17 + ] + }, + { + "class": "knife", + "label": "knife", + "color": [ + 195, + 230, + 217 + ] + }, + { + "class": "spoon", + "label": "spoon", + "color": [ + 196, + 155, + 208 + ] + }, + { + "class": "bowl", + "label": "bowl", + "color": [ + 53, + 79, + 142 + ] + }, + { + "class": "banana", + "label": "banana", + "color": [ + 151, + 207, + 131 + ] + }, + { + "class": "apple", + "label": "apple", + "color": [ + 199, + 225, + 68 + ] + }, + { + "class": "sandwich", + "label": "sandwich", + "color": [ + 193, + 158, + 167 + ] + }, + { + "class": "orange", + "label": "orange", + "color": [ + 74, + 189, + 95 + ] + }, + { + "class": "broccoli", + "label": "broccoli", + "color": [ + 48, + 234, + 238 + ] + }, + { + "class": "carrot", + "label": "carrot", + "color": [ + 225, + 113, + 215 + ] + }, + { + "class": "hot dog", + "label": "hot dog", + "color": [ + 68, + 168, + 87 + ] + }, + { + "class": "pizza", + "label": "pizza", + "color": [ + 163, + 151, + 216 + ] + }, + { + "class": "donut", + "label": "donut", + "color": [ + 211, + 179, + 218 + ] + }, + { + "class": "cake", + "label": "cake", + "color": [ + 45, + 98, + 135 + ] + }, + { + "class": "chair", + "label": "chair", + "color": [ + 11, + 22, + 204 + ] + }, + { + "class": "sofa", + "label": "sofa", + "color": [ + 187, + 207, + 214 + ] + }, + { + "class": "pottedplant", + "label": "pottedplant", + "color": [ + 88, + 7, + 174 + ] + }, + { + "class": "bed", + "label": "bed", + "color": [ + 4, + 180, + 42 + ] + }, + { + "class": "diningtable", + "label": "diningtable", + "color": [ + 39, + 112, + 122 + ] + }, + { + "class": "toilet", + "label": "toilet", + "color": [ + 99, + 23, + 252 + ] + }, + { + "class": "tvmonitor", + "label": "tvmonitor", + "color": [ + 147, + 33, + 230 + ] + }, + { + "class": "laptop", + "label": "laptop", + "color": [ + 240, + 9, + 130 + ] + }, + { + "class": "mouse", + "label": "mouse", + "color": [ + 83, + 215, + 128 + ] + }, + { + "class": "remote", + "label": "remote", + "color": [ + 112, + 73, + 202 + ] + }, + { + "class": "keyboard", + "label": "keyboard", + "color": [ + 222, + 219, + 122 + ] + }, + { + "class": "cell phone", + "label": "cell phone", + "color": [ + 95, + 241, + 55 + ] + }, + { + "class": "microwave", + "label": "microwave", + "color": [ + 1, + 207, + 104 + ] + }, + { + "class": "oven", + "label": "oven", + "color": [ + 68, + 0, + 254 + ] + }, + { + "class": "toaster", + "label": "toaster", + "color": [ + 69, + 118, + 241 + ] + }, + { + "class": "sink", + "label": "sink", + "color": [ + 147, + 186, + 199 + ] + }, + { + "class": "refrigerator", + "label": "refrigerator", + "color": [ + 140, + 150, + 173 + ] + }, + { + "class": "book", + "label": "book", + "color": [ + 151, + 28, + 129 + ] + }, + { + "class": "clock", + "label": "clock", + "color": [ + 87, + 114, + 56 + ] + }, + { + "class": "vase", + "label": "vase", + "color": [ + 228, + 65, + 145 + ] + }, + { + "class": "scissors", + "label": "scissors", + "color": [ + 106, + 245, + 80 + ] + }, + { + "class": "teddy bear", + "label": "teddy bear", + "color": [ + 56, + 115, + 221 + ] + }, + { + "class": "hair drier", + "label": "hair drier", + "color": [ + 213, + 201, + 111 + ] + }, + { + "class": "toothbrush", + "label": "toothbrush", + "color": [ + 125, + 106, + 34 + ] + } +] \ No newline at end of file diff --git a/client-config.json b/client-config.json new file mode 100644 index 0000000..a7aa600 --- /dev/null +++ b/client-config.json @@ -0,0 +1,4 @@ +{ + "server-address": "192.168.2.91", + "server-port": 40100 +} diff --git a/client.py b/client.py new file mode 100644 index 0000000..ed67d73 --- /dev/null +++ b/client.py @@ -0,0 +1,383 @@ +#!/bin/python + +import io +import time +import traceback +from threading import Thread, Event, Lock +import gi +from PIL import Image + +from streamer_utils import SocketBlocksWrapper, read_json_config + +gi.require_version("Gtk", "3.0") +from gi.repository import Gtk, GLib, GdkPixbuf, Gdk + +CLASSES = read_json_config('classes.json') +CONFIG = read_json_config('client-config.json') + + +class ConnectionDaemon: + def __init__(self, address, port, object_receive_callback: callable): + self._address = address + self._port = port + self._object_receive_callback = object_receive_callback + + self._sock = None + self._lock = Lock() + + self.__login = None + self.__password = None + + # объект события, только для ожидания авторизации + self.__auth_event = Event() + self.__auth_callback = None + + self.__auth_callback_is_done = False + self.__auth_success = False + + self._conn_thread = Thread(target=self.__run, daemon=True) + self._conn_thread.start() + + def auth(self, login, password, callback: callable): + with self._lock: + self.__login = login + self.__password = password + self.__auth_callback = callback + self.__auth_event.set() + + def __do_session(self, login, password, auth_callback: callable): + try: + with SocketBlocksWrapper.connect(CONFIG["server-address"], CONFIG["server-port"]) as sock: + with self._lock: + self._sock = sock + sock.write_object( + { + 'type': 'auth', + 'client-type': 'client', + 'target': login, + 'password': password + }) + res = sock.read_object() + print(res) + if 'status' in res: + if res['status'] == 'success': + self.__auth_success = True + if auth_callback is not None: + auth_callback(True, "success") + self.__auth_callback_is_done = True + while True: + res = sock.read_object() + if res is None: + break + with self._lock: + callback = self._object_receive_callback + callback(res) + else: + self.__auth_callback_is_done = True + if auth_callback is not None: + auth_callback(False, res['description']) + else: + raise Exception("'status' is not defined in response") + except Exception: + traceback.print_exc() + finally: + with self._lock: + self._sock = None + + def __run(self): + # нужно ждать данных авторизации + while True: + if not self.__auth_success: + self.__auth_event.wait() + with self._lock: + login, password, callback = self.__login, self.__password, self.__auth_callback + self.__auth_callback_is_done = False + self.__auth_event.clear() + + if login is None or password is None: + continue + + # делаем сессию + self.__do_session(login, password, callback) + + if not self.__auth_callback_is_done and callback is not None and callable(callback): + callback(False, "Ошибка ввода-вывода") + + if not self.__auth_success: + with self._lock: + if not self.__auth_event.is_set(): + self.__login = None + self.__password = None + else: + time.sleep(5) + print("Try to reconnect...") + + def send_object(self, obj: dict): + try: + self._sock.write_object(obj) + except Exception as e: + # traceback.print_exc() + print(f"Failed to send command {obj} ({e})") + + +class MainWindow(Gtk.Window): + def __create_main_widget(self): + # основная "коробка" + root_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL) + root_box.connect("key-press-event", self.on_key_press) + + self.image = Gtk.Image() + self.image.set_vexpand(True) + self.image.set_can_focus(True) + + no_connection_label = Gtk.Label(label="Нет подключения к плате") + no_connection_label.set_vexpand(True) + no_connection_label.set_hexpand(True) + + no_video_label = Gtk.Label(label="Плата подключена\nОжидание действия пользователя") + no_video_label.set_vexpand(True) + no_video_label.set_hexpand(True) + + self._image_stack = Gtk.Stack() + self._image_stack.set_transition_type(Gtk.StackTransitionType.SLIDE_DOWN) + self._image_stack.add_named(no_connection_label, "no-connection") + self._image_stack.add_named(no_video_label, "no-video") + self._image_stack.add_named(self.image, "video") + + root_box.pack_start(self._image_stack, expand=True, fill=True, padding=0) + + # нижняя сторона окна, панелька с управлением + toolbar = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=4) + toolbar.set_halign(Gtk.Align.CENTER) + + button_start = Gtk.Button(label="Старт") + button_start.connect("clicked", lambda widget: self.__send_action('start')) + + button_stop = Gtk.Button(label="Стоп") + button_stop.connect("clicked", lambda widget: self.__send_action('stop')) + + button_lazer_start = Gtk.Button(label="Старт лазер") + button_lazer_start.connect("clicked", lambda widget: self.__send_action('lazerOn')) + + button_lazer_stop = Gtk.Button(label="Стоп лазер") + button_lazer_stop.connect("clicked", lambda widget: self.__send_action('lazerOff')) + + self.select_class = Gtk.ComboBoxText() + self.select_class.connect("changed", self.on_class_select) + + sorted_list = [c for c in CLASSES] + self.select_class.append_text("<__ (все) __>") + for c in sorted_list: + self.select_class.append_text(c["label"]) + + toolbar.pack_start(button_start, expand=False, fill=False, padding=0) + toolbar.pack_start(button_stop, expand=False, fill=False, padding=0) + toolbar.pack_start(self.select_class, expand=False, fill=False, padding=0) + toolbar.pack_start(button_lazer_start, expand=False, fill=False, padding=0) + toolbar.pack_start(button_lazer_stop, expand=False, fill=False, padding=0) + + root_box.pack_start(toolbar, expand=False, fill=True, padding=0) + + return root_box + + def __connection_auth_handler(self, result, message): + def gui_function(): + self._spinner.stop() + self.__password_entry.set_editable(True) + self.__login_entry.set_editable(True) + self.__button_auth.set_sensitive(True) + if result: + self.root.set_visible_child_name("main") + else: + dialog = Gtk.MessageDialog( + transient_for=self, + flags=0, + message_type=Gtk.MessageType.ERROR, + buttons=Gtk.ButtonsType.OK, + text=message, + title="Ошибка аутентификации" + ) + dialog.run() + dialog.destroy() + + GLib.idle_add(lambda: gui_function()) + + def __auth_handler(self, widget): + self._spinner.start() + self.__password_entry.set_editable(False) + self.__login_entry.set_editable(False) + self.__button_auth.set_sensitive(False) + self._daemon.auth(self.__login_entry.get_text(), + self.__password_entry.get_text(), + self.__connection_auth_handler) + + def __create_auth_widget(self): + hbox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL) + hbox.set_halign(Gtk.Align.CENTER) + hbox.set_valign(Gtk.Align.CENTER) + + logo = Gtk.Image.new_from_file("logo.png") + logo_label = Gtk.Label(label="Ведапроект") + self.__button_auth = Gtk.Button(label="Вход") + self.__button_auth.connect("clicked", self.__auth_handler) + + label_login = Gtk.Label(label="Логин") + self.__login_entry = Gtk.Entry(can_default=True) + + label_password = Gtk.Label(label="Пароль") + self.__password_entry = Gtk.Entry() + self.__password_entry.set_visibility(False) + + self.__login_entry.connect("activate", lambda widget: self.__password_entry.grab_focus()) + self.__password_entry.connect("activate", self.__auth_handler) + + hbox.pack_start(logo, expand=False, fill=False, padding=0) + hbox.pack_start(logo_label, expand=False, fill=False, padding=10) + + hbox.pack_start(label_login, expand=False, fill=False, padding=10) + hbox.pack_start(self.__login_entry, expand=False, fill=False, padding=0) + + hbox.pack_start(label_password, expand=False, fill=False, padding=10) + hbox.pack_start(self.__password_entry, expand=False, fill=False, padding=0) + + hbox.pack_start(self.__button_auth, expand=False, fill=False, padding=10) + + # спинер, хрень которая будет показывать что идет процесс подключения + self._spinner = Gtk.Spinner() + hbox.pack_start(self._spinner, True, True, 0) + + return hbox + + def __init__(self, **kwargs): + super().__init__(**kwargs) + + self._remote_selected_class = False + + self._visible_image_child = "no-connection" + + self._daemon = ConnectionDaemon(CONFIG["server-address"], CONFIG["server-port"], self.__on_object_received) + + self.set_default_size(600, 400) + self.set_title("Ведапроект") + + # основная "коробка" + self.root = Gtk.Stack() + self.root.set_transition_type(Gtk.StackTransitionType.SLIDE_LEFT) + self.root.add_named(self.__create_auth_widget(), "auth") + self.root.add_named(self.__create_main_widget(), "main") + self.add(self.root) + + # ---------------------- Handlers -------------------------- + + def _send_command(self, cmd): + self._daemon.send_object({'type': 'command', 'data': cmd}) + + def __send_action(self, act): + self._send_command({'action': act}) + + def __get_selected_class(self): + label = self.select_class.get_active_text() + for i in range(0, len(CLASSES)): + if CLASSES[i]["label"] == label: + # теперь метка "не выбрано" имеет индекс 0, поэтому все остальные смещены на +1 + return i + + return None + + def on_class_select(self, widget): + c = self.__get_selected_class() + if self._remote_selected_class: + self._remote_selected_class = False + else: + if c is None: + cl = "__all__" + else: + cl = CLASSES[c]["class"] + print(f'select class: id={c}, {cl}') + self._send_command({'action': 'set-class', 'class': cl}) + + def on_key_press(self, window, event_key: Gdk.EventKey): + commands = { + "up": [Gdk.KEY_w, Gdk.KEY_W], # Gdk.KEY_Up + "left": [Gdk.KEY_a, Gdk.KEY_A], # Gdk.KEY_Left + "down": [Gdk.KEY_s, Gdk.KEY_S], # Gdk.KEY_Down + "right": [Gdk.KEY_d, Gdk.KEY_D], # Gdk.KEY_Right + "start": Gdk.KEY_z, + "stop": Gdk.KEY_x, + } + keyval = event_key.get_keyval() + action = None + if keyval[0]: + keyval = keyval[1] + # прошерстим команды + for key in commands: + if type(commands[key]) == list: + if keyval in commands[key]: + action = key + break + else: + if commands[key] == keyval: + action = key + break + + if action is not None: + self._send_command({'action': action}) + + def __update_image(self, image): + need_frame = "no-video" + if image is not None: + need_frame = "video" + data = image.tobytes() + w, h = image.size + data = GLib.Bytes.new(data) + pix = GdkPixbuf.Pixbuf.new_from_bytes(data, GdkPixbuf.Colorspace.RGB, False, 8, w, h, w * 3) + self.image.set_from_pixbuf(pix) + + if self._visible_image_child is not need_frame: + self._have_image = need_frame + self._image_stack.set_visible_child_name(need_frame) + + # print(f"{datetime.datetime.now()} MyWindow: image received") + + def __update_selected_class(self, cl): + c = self.__get_selected_class() + + if c is None: + c = 0 + else: + c += 1 + + if cl < -1: + cl = 0 + else: + cl += 1 + + if c != cl: + if cl != c: + self._remote_selected_class = True + # то же самое, из-за метки все классы уехали на +1 + self.select_class.set_active(cl) + + def __on_object_received(self, res): + if res["type"] == "video": + if res["data"] is None: + GLib.idle_add(lambda: self.__update_image(None)) + else: + data = io.BytesIO(res["data"]) + img = Image.open(data) + GLib.idle_add(lambda: self.__update_image(img)) + + if 'selected-class' in res: + GLib.idle_add(lambda: self.__update_selected_class(res['selected-class'])) + + +def main(): + """ Run the main application""" + win = MainWindow() + win.connect("destroy", Gtk.main_quit) + win.show_all() + Gtk.main() + + +if __name__ == '__main__': + main() diff --git a/host_test.py b/host_test.py new file mode 100644 index 0000000..e1eb4c0 --- /dev/null +++ b/host_test.py @@ -0,0 +1,337 @@ +# import packages +from openal import * +from imutils.video import VideoStream +import itertools +import imutils +import time +import cv2 +import json +import numpy as np +import spidev + +# Client +import io +import json +import time +import traceback +from threading import Thread +from streamer_utils import SocketBlocksWrapper, read_json_config +from PIL import Image + +#spi = spidev.SpiDev() +#spi.open(1, 0) + +#spi.bits_per_word = 8 +#spi.max_speed_hz = 500000 + +X = bool(0) # иниц-я глоб. переменной +X_New = bool(0) +X_pred = bool(0) # иниц-я глоб. переменной +startTime = float(time.time() - 10) # иниц-я глоб. переменной + + +CONFIG = read_json_config('board-config.json') +CLASSES = read_json_config('classes.json') + + +class ConnectionDaemon(Thread): + def __init__(self): + super().__init__(daemon=True) + self._sock = None + self._message_handler = None + + def set_message_handler(self, handler: callable): + self._message_handler = handler + + def __do_call_message_handler(self, res): + if self._message_handler is not None: + try: + self._message_handler(res) + except Exception: + traceback.print_exc() + + def __do_session(self): + try: + with SocketBlocksWrapper.connect(CONFIG['server-address'], CONFIG['server-port']) as sock: + print("ConnectionDaemon: open connection") + self._sock = sock + self._sock.write_object({'type': 'auth', 'client-type': 'board', 'name': CONFIG['name']}) + res = self._sock.read_object() + if res is None: + return + print(res) + if 'status' in res: + if res['status'] == 'success': + while True: + res = self._sock.read_object() + if res is None: + break + self.__do_call_message_handler(res) + except Exception: + traceback.print_exc() + finally: + self.socket = None + + def run(self): + while True: + print("ConnectionDaemon: start session...") + self.__do_session() + time.sleep(5) + + def send_frame(self, fr): + if self._sock is not None: + try: + to_send = { + 'type': 'video', + 'data': None, + "selected-class": selected_class_id + } + if fr is not None: + fr = imutils.resize(fr, width=640, height=360) + buffer = cv2.imencode('.jpg', fr, [int(cv2.IMWRITE_JPEG_QUALITY), 60])[1] + data_encode = np.array(buffer) + to_send["data"] = data_encode.tobytes() + self._sock.write_object(to_send) + except Exception: + traceback.print_exc() + + def send_image(self, img: Image): + if self._sock is not None: + try: + out = io.BytesIO() + img.save(out, format="JPEG") + self._sock.write_object({ + 'type': 'video', + 'data': out.getvalue(), + "selected-class": selected_class_id + }) + except Exception: + traceback.print_exc() + + +# камера не движется +Left = bool(0) +Right = bool(0) +Up = bool(0) +Down = bool(0) + +# -2 = нейронка отключена, -1 = включены все классы, остальное - id класса из списка CLASSES +selected_class_id = -2 + + + +# функция, которая вызывается при получении команды +def message_handler(msg): + global selected_class_id + global Left + global Right + global Up + global Down + print(msg) + if msg["type"] == "command": + # отлично, наше сообщение + act = msg["data"]["action"] + if act == "left": + Left = 1 + if act == "right": + Right = 1 + if act == "up": + Up = 1 + if act == "down": + Down = 1 + if act == "start": + selected_class_id = -1 + elif act == "stop": + selected_class_id = -2 + elif act == "set-class": + if selected_class_id < -1: + print("message_handler: WARMING: set class-id while board is stop") + else: + cl = msg["data"]["class"] + selected_class_id = -1 # если не найдем, будут выбраны все классы + for i in range(0, len(CLASSES)): + if CLASSES[i]["class"] == cl: + selected_class_id = i + break + + +print("============ Initialize connection daemon ============") +connection_daemon = ConnectionDaemon() +connection_daemon.set_message_handler(message_handler) +connection_daemon.start() + + + +def notify(): + global startTime + endTime = time.time() + if endTime - startTime > 1.5: # прошло 1.5 секунды + # if 1>0: #режим прерывания сообщений + global X + global X_New + global X_pred + if X == 0 and X_pred == 1: # поменялось на 0 + source = oalOpen("Pot.wav") # Потерян + source.play() # воспр. 1 раз + startTime = time.time() # отсчёт времени + if X==1 and X_pred==1 and X_New==0 and (endTime - startTime > 6): + source = oalOpen("Nab.wav") #Потерян + source.play() #воспр. 1 раз + startTime = time.time() #отсчёт времени + if X==1 and X_pred==1 and X_New==1: + source = oalOpen("New.wav") #new object + source.play() #воспр. 1 раз + startTime = time.time() #отсчёт времени + elif X == 1 and X_pred == 0: # поменялось на 1 + source = oalOpen("Zah.wav") # Захвачен + source.play() # воспр. 1 раз + startTime = time.time() # отсчёт времени + X_pred = X # обновляем предыдущее значение + + +print("[INFO] loading model...") +net = cv2.dnn_DetectionModel('AI.cfg', 'AI.weights') + +#net.setPreferableBackend(cv2.dnn.DNN_BACKEND_INFERENCE_ENGINE) +#net.setPreferableTarget(cv2.dnn.DNN_TARGET_MYRIAD) + +picSize_X = 640 +picSize_Y = 480 +net.setInputSize(128, 128) +net.setInputScale(1.0 / 255) +net.setInputSwapRB(True) + +print("[INFO] starting video stream...") +vs = VideoStream(src=0).start() +# warm up the camera for a couple of seconds +time.sleep(2.0) + +MAX_sX = 0 +MAX_sY = 0 +MAX_eX = 0 +MAX_eY = 0 + +centr_X = 0 +centr_Y = 0 + +pred_centr_X = 0 +pred_centr_Y = 0 + +while True: + if selected_class_id >= 0: + t0 = time.time() + frame = vs.read() + #frame = imutils.resize(frame, width=1280, height=720) + #(h, w) = frame.shape[:2] + + S_MAX = 0 + X = 0 + X_New = 0 + # находим объекты и возвращаем их параметры + classes, confidences, boxes = net.detect(frame, confThreshold=0.18, nmsThreshold=0.5) + # создаём рамки и надписи + for classId, confidence, box in zip(list(itertools.chain(classes)), list(itertools.chain(confidences)), boxes): + # if classId == 39: # вот так делать не стоит, работать такое точно не будет + if selected_class_id == -1 or classId == selected_class_id: + X = 1 + + label = f"{CLASSES[classId]['class']}" + label = '%s: %.2f' % (label, confidence) + color = CLASSES[classId]["color"] + + labelSize, baseLine = cv2.getTextSize(label, cv2.FONT_HERSHEY_SIMPLEX, 0.5, 1) + left, top, width, heigth = box + S = width * heigth + print ('S =', S, 'pics') + if S>S_MAX: + S_MAX = S + MAX_sX = left + MAX_sY = top + MAX_eX = left + width + MAX_eY = top + heigth + MAX_label = label + print("Object detected: ", label) + + if (X == 1): + # Draw a rectangle across the boundary of the object + cv2.rectangle(frame, (MAX_sX, MAX_sY), (MAX_eX, MAX_eY), color, 2) + y = MAX_sY - 15 if MAX_sY - 15 > 15 else MAX_sY + 15 + # Put a text outside the rectangular detection + # Choose the font of your choice: FONT_HERSHEY_SIMPLEX, FONT_HERSHEY_PL> + cv2.putText(frame, MAX_label, (MAX_sX, y), cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2) + + centr_X = (MAX_sX+MAX_eX)/2 + centr_Y = (MAX_sY+MAX_eY)/2 + + if (abs(centr_X-pred_centr_X) > picSize_X/4 or abs(centr_Y-pred_centr_Y) > picSize_Y/4): + X_New = 1 + + # if (X == 1 and Left == 0 and Right == 0 and Up == 0 and Down == 0): + # if (centr_X > (picSize_X/2+picSize_X/10) and centr_Y < (picSize_Y/2+picSize_Y/10) and centr_Y > (picSize_Y/2-picSize_Y/10)): + # txData = [0b00000111] #Вправо + # spi.xfer(txData) + # elif (centr_X < (picSize_X/2-picSize_X/10) and centr_Y < (picSize_Y/2+picSize_Y/10) and centr_Y > (picSize_Y/2-picSize_Y/10)): + # txData = [0b00000110] #Влево + # spi.xfer(txData) + # elif (centr_Y > (picSize_Y/2+picSize_Y/10) and centr_X < (picSize_X/2+picSize_X/10) and centr_X > (picSize_X/2-picSize_X/10)): + # txData = [0b00001101] #Вверх + # spi.xfer(txData) + # elif (centr_Y < (picSize_Y/2-picSize_Y/10) and centr_X < (picSize_X/2+picSize_X/10) and centr_X > (picSize_X/2-picSize_X/10)): + # txData = [0b00001001] #Вниз + # spi.xfer(txData) + # elif (centr_X < (picSize_X/2-picSize_X/10) and centr_Y < (picSize_Y/2-picSize_Y/10)): + # txData = [0b00001010] #Влево/вниз + # spi.xfer(txData) + # elif (centr_X > (picSize_X/2+picSize_X/10) and centr_Y < (picSize_Y/2-picSize_Y/10)): + # txData = [0b00001011] #Вправо/вниз + # spi.xfer(txData) + # elif (centr_X < (picSize_X/2-picSize_X/10) and centr_Y > (picSize_Y/2+picSize_Y/10)): + # txData = [0b00001110] #Влево/вверх + # spi.xfer(txData) + # elif (centr_X > (picSize_X/2+picSize_X/10) and centr_Y > (picSize_Y/2+picSize_Y/10)): + # txData = [0b00001111] #Вправо/вверх + # spi.xfer(txData) + # else: + # txData = [0b00000101] #Центр + # spi.xfer(txData) + # elif (Left == 0 and Right == 1 and Up == 0 and Down == 0): + # txData = [0b00000111] #Вправо + # spi.xfer(txData) + # elif (Left == 1 and Right == 0 and Up == 0 and Down == 0): + # txData = [0b00000110] #Влево + # spi.xfer(txData) + # elif (Left == 0 and Right == 0 and Up == 1 and Down == 0): + # txData = [0b00001001] #Вверх + # spi.xfer(txData) + # elif (Left == 0 and Right == 0 and Up == 0 and Down == 1): + # txData = [0b00001101] #Вниз + # spi.xfer(txData) + + pred_centr_X = centr_X + pred_centr_Y = centr_Y + + # обнуление + Left = 0 + Right = 0 + Up = 0 + Down = 0 + My_FPS = 1 / (time.time() - t0) + FPS_label = 'FPS=%2.f' % My_FPS + labelSize, baseLine = cv2.getTextSize(FPS_label, cv2.FONT_HERSHEY_SIMPLEX, 1.5, 1) + cv2.rectangle(frame, (4, 4), (4 + labelSize[0], 4 + labelSize[1] + baseLine), (255, 0, 155), cv2.FILLED) + cv2.putText(frame, FPS_label, (4, 4 + labelSize[1]), cv2.FONT_HERSHEY_SIMPLEX, 1.5, (0, 0, 0)) + notify() + + # отправка фрейма на сервер + connection_daemon.send_frame(frame) + + else: + # отправка раз в секунду пустого фрейма + connection_daemon.send_frame(None) + time.sleep(1) + +spi.close() +# Destroy windows and cleanup +cv2.destroyAllWindows() +# Stop the video stream +vs.stop() diff --git a/logo.png b/logo.png new file mode 100644 index 0000000..5edcf78 Binary files /dev/null and b/logo.png differ diff --git a/server.py b/server.py new file mode 100644 index 0000000..093e46a --- /dev/null +++ b/server.py @@ -0,0 +1,222 @@ +#!/bin/python + +import traceback +from datetime import datetime, timedelta +import socket +from threading import Thread, Lock +from streamer_utils import SocketBlocksWrapper + +import sqlite3 +db_connection = sqlite3.connect("boards.sqlite3", check_same_thread=False) + +LISTEN = ('', 40100) +MAX_ATTEMPTS = 3 +BLOCK_TIME_MINUTES = 3 + +__log_lock = Lock() + + +def _log(message, owner="__base__"): + with __log_lock: + print(f"[{datetime.now().strftime('%H:%M:%S.%f')[:-3]}] {owner}: {message}") + + +class ServerWorker(Thread): + def __init__(self, factory, conn, addr): + super().__init__(daemon=True) + self.conn = conn + self._log_name = f"Worker-{addr}" + self.factory = factory + self._client_type = None + self._client_name = None + self._cursor = db_connection.cursor() + + def _auth_board(self, obj): + if 'name' not in obj: + return True, 'missing field "name"!' + + self._cursor.execute("SELECT board_name, password FROM users WHERE board_name = ?;", (obj['name'], )) + res = self._cursor.fetchone() + if res is None: + return True, 'this board is not registered!' + + self._client_name = obj['name'] + self._client_type = "board" + return False, None + + def _auth_client(self, obj): + if 'target' not in obj: + return True, 'missing field "target"!' + if 'password' not in obj: + return True, 'missing field "password"!' + + self._cursor.execute("SELECT board_name, password, blocked_until, wrong_attempts FROM users " + "WHERE board_name = ?;", + (obj['target'], )) + res = self._cursor.fetchone() + + # сначала проверка на то, что плата существует + if res is None: + return True, 'Аккаунт не найден!' + + # теперь на то, что аккаунт не заблокирован + now = datetime.now() + delta = res[2] - int(round(now.timestamp())) + print(delta) + if delta >= 0: + return True, f'Доступ отклонен: аккаунт заблокирован! Разблокировка через {delta} секунд' + + # проверка пароля + if res[1] != obj['password']: + if res[3] >= MAX_ATTEMPTS - 1: + dt = now + timedelta(minutes=BLOCK_TIME_MINUTES) + t = int(round(dt.timestamp())) + self._cursor.execute("UPDATE users SET wrong_attempts = 0, blocked_until = ? WHERE board_name = ?;", + (t, res[0])) + db_connection.commit() + return True, f'Доступ отклонен: аккаунт заблокирован! Разблокировка через {BLOCK_TIME_MINUTES} минут(ы)' + else: + self._cursor.execute("UPDATE users SET wrong_attempts = wrong_attempts + 1 WHERE board_name = ?;", + (res[0], )) + db_connection.commit() + return True, 'Доступ отклонен: неверный пароль!' + + # обновление неудачных попыток в случае если пароль верный + if res[3] != 0: + self._cursor.execute("UPDATE users SET wrong_attempts = 0 WHERE board_name = ?;", + (res[0], )) + db_connection.commit() + + self._client_name = obj['target'] + self._client_type = "client" + return False, None + + def _auth(self): + _log("Wait for auth...", self._log_name) + err = True + + obj = self.conn.read_object() + if type(obj) != dict: + description = "invalid object type!" + elif 'type' not in obj: + description = 'missing field "type"!' + elif obj['type'] != 'auth': + description = 'field "type" must have value "auth"!' + elif 'client-type' not in obj: + description = 'missing field "client-type"!' + elif obj['client-type'] != 'board' and obj['client-type'] != 'client': + description = f'unsupported client type: "{obj["client-type"]}"' + else: + if obj['client-type'] == 'board': + err, description = self._auth_board(obj) + else: + err, description = self._auth_client(obj) + + response = { + 'type': 'auth-response' + } + + if err: + response["status"] = "failed" + response["description"] = description + else: + response["status"] = "success" + + _log(f"auth {response['status']}! request={obj}, response={response}", owner=self._log_name) + + self.conn.write_object(response) + return not err + + def run(self): + try: + with self.conn: + if self._auth(): + while True: + recv = self.conn.read_object() + if recv is None: + break + # _log(f"received {recv['type']} frame", self._log_name) + self.factory.route_packet(self, recv) + + except Exception: + traceback.print_exc() + + finally: + self.factory.remove_connection(self) + + _log(f"Close connection!", self._log_name) + + def get_name(self): + return self._client_name + + def get_log_name(self): + return self._log_name + + def send_packet(self, packet): + self.conn.write_object(packet) + # _log("send routed packet", self._log_name) + + +class ServerWorkerFactory: + def __init__(self): + self._lock = Lock() + self._connections = [] + + def add_connection(self, conn, addr): + with self._lock: + worker = ServerWorker(self, conn, addr) + worker.start() + self._connections.append(worker) + + def remove_connection(self, conn: ServerWorker): + with self._lock: + if conn in self._connections: + _log(f"remove connection {conn.get_log_name()}", "ServerWorkerFactory") + self._connections.remove(conn) + + def route_packet(self, owner: ServerWorker, data): + connections = None + with self._lock: + if owner in self._connections: + connections = self._connections.copy() + + if connections is not None: + name = owner.get_name() + for c in connections: + if c == owner: + continue + if c.get_name() == name: + c.send_packet(data) + + +def server_listener(): + _log("============ SERVER ============") + _log("Creating table...") + cur = db_connection.cursor() + cur.execute("""CREATE TABLE IF NOT EXISTS users ( + board_name TEXT NOT NULL PRIMARY KEY, + password TEXT DEFAULT '' NOT NULL, + blocked_until INT DEFAULT 0 NOT NULL, + wrong_attempts INT DEFAULT 0 NOT NULL + );""") + db_connection.commit() + + with socket.socket() as sock: + sock.bind(LISTEN) + _log(f"socket bind success", "ServerTask") + sock.listen(8) + + _log(f"socket listen...", "ServerTask") + worker = ServerWorkerFactory() + + while True: + connection, addr = sock.accept() + _log(f"connected from {addr}", "ServerTask") + worker.add_connection(SocketBlocksWrapper(connection), addr) + + +if __name__ == '__main__': + server_listener() + + + diff --git a/streamer_utils.py b/streamer_utils.py new file mode 100644 index 0000000..2760032 --- /dev/null +++ b/streamer_utils.py @@ -0,0 +1,70 @@ +import pickle +import socket +import json + + +def read_json_config(filename: str): + with open(filename, 'r') as file: + return json.loads(file.read()) + + +TCP_CHUNK_SIZE = 16384 + + +class SocketBlocksWrapper: + def __init__(self, s, address: str = None, port: int = None): + self.sock = s + + def get_socket(self): + return self.sock + + def _read_block(self): + arr = self.sock.recv(8) + if len(arr) < 8: + return None + data_size = int.from_bytes(arr, 'little') + + # далее нужно читать данные чанками, потому что в противном случае будет говно + received_size = 0 + data = bytearray() + while received_size < data_size: + to_recv = data_size - received_size + if to_recv > TCP_CHUNK_SIZE: + to_recv = TCP_CHUNK_SIZE + r = self.sock.recv(to_recv) + if len(r) == 0: + continue + received_size += len(r) + data += r + return data + + def read_object(self): + raw = self._read_block() + if raw is None: + return None + return pickle.loads(raw) + + def _write_block(self, data: bytes): + data_size = len(data).to_bytes(8, 'little') + to_send = bytearray(data_size) + to_send += data + # print(f"libstreamer: write {len(to_send)} bytes") + self.sock.send(to_send) + + def write_object(self, obj): + to_send = pickle.dumps(obj) + self._write_block(to_send) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.sock.close() + + @staticmethod + def connect(host: str, port: int): + sock = socket.socket() + sock.connect((host, port)) + return SocketBlocksWrapper(sock) + +