From 9b6bbd23421044af9eb45893dcbb7a0a20dc8a59 Mon Sep 17 00:00:00 2001 From: laemmerzahlkr91200 Date: Wed, 26 Nov 2025 16:58:44 +0100 Subject: [PATCH] Test+andere input option MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Testfläche und Plan B für input --- gesture_input_osc_kris.py | 171 ++++++++++++++++++++++++++++++++++++++ test_touch_area.py | 77 +++++++++++++++++ 2 files changed, 248 insertions(+) create mode 100644 gesture_input_osc_kris.py create mode 100644 test_touch_area.py diff --git a/gesture_input_osc_kris.py b/gesture_input_osc_kris.py new file mode 100644 index 0000000..a94f0c3 --- /dev/null +++ b/gesture_input_osc_kris.py @@ -0,0 +1,171 @@ +import cv2 +import numpy as np +import mediapipe as mp +import time +import json +from pythonosc.udp_client import SimpleUDPClient + +# =========================== +# OSC Setup +# =========================== +OSC_IP = "127.0.0.1" +OSC_PORT = 5005 +client = SimpleUDPClient(OSC_IP, OSC_PORT) + +# =========================== +# Game Screen Size +# =========================== +SCREEN_WIDTH = 900 +SCREEN_HEIGHT = 600 + +# =========================== +# Load Calibration +# =========================== +try: + with open("calibration.json", "r") as f: + src_points = np.array(json.load(f), dtype=np.float32) + print("📐 Calibration loaded:", src_points) +except: + print("❌ No calibration.json found! Touch mapping will be wrong!") + src_points = np.array([[0,0],[1,0],[1,1],[0,1]], dtype=np.float32) + +dst_points = np.array([ + [0, 0], + [SCREEN_WIDTH, 0], + [SCREEN_WIDTH, SCREEN_HEIGHT], + [0, SCREEN_HEIGHT] +], dtype=np.float32) + +H, _ = cv2.findHomography(src_points, dst_points) + +def map_point_homography(x, y): + p = np.array([[x, y]], dtype=np.float32) + p = np.array([p]) + mapped = cv2.perspectiveTransform(p, H)[0][0] + return int(mapped[0]), int(mapped[1]) + +# =========================== +# Mediapipe Setup (optional) +# =========================== +mp_hands = mp.solutions.hands +mp_draw = mp.solutions.drawing_utils +hands = mp_hands.Hands(max_num_hands=1, min_detection_confidence=0.6) + +# =========================== +# CAMERA +# =========================== +cap = cv2.VideoCapture(0) +if not cap.isOpened(): + print("❌ Touch camera not available!") + exit(1) + +print("\n🟦 TOP-DOWN TOUCH DETECTION MODE") +print(" Camera above surface → looking down") +print(" Fingertip = dark spot on bright surface") +print(" Touch = fingertip stable and near surface\n") + +# =========================== +# PARAMETERS +# =========================== +MIN_AREA = 250 # Minimum fingertip blob size +MAX_AREA = 7000 # Maximum fingertip blob size (hand = too big) +THRESH = 160 # threshold for binary inversion (dark finger over bright background) +DEBOUNCE = 0.18 # time between 2 touch events (seconds) + +last_touch_time = 0 + +# =========================== +# MAIN LOOP +# =========================== +while True: + ret, frame = cap.read() + if not ret: + break + + frame = cv2.flip(frame, 1) + h, w, _ = frame.shape + + # -------- Mediapipe landmark (not used for touch, but helps stabilize) + rgb_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) + res = hands.process(rgb_frame) + + fingertip_hint = None + + if res.multi_hand_landmarks: + lm = res.multi_hand_landmarks[0] + mp_draw.draw_landmarks(frame, lm, mp_hands.HAND_CONNECTIONS) + + # Landmark 8 = index fingertip + fx = int(lm.landmark[8].x * w) + fy = int(lm.landmark[8].y * h) + fingertip_hint = (fx, fy) + + cv2.circle(frame, fingertip_hint, 5, (0, 255, 255), -1) + + # -------- THRESHOLD FOR TOP-DOWN (dark fingertip) + gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) + blur = cv2.GaussianBlur(gray, (11, 11), 0) + + # invert threshold = dark objects → white + _, binary = cv2.threshold(blur, THRESH, 255, cv2.THRESH_BINARY_INV) + + kernel = np.ones((5,5), np.uint8) + binary = cv2.morphologyEx(binary, cv2.MORPH_OPEN, kernel) + + contours, _ = cv2.findContours(binary, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) + + touch_point = None + + # -------- find the correct fingertip blob + for c in contours: + area = cv2.contourArea(c) + if area < MIN_AREA or area > MAX_AREA: + continue + + M = cv2.moments(c) + if M["m00"] == 0: + continue + + cx = int(M["m10"]/M["m00"]) + cy = int(M["m01"]/M["m00"]) + + # match to mediapipe if available (more stable) + if fingertip_hint: + if abs(cx - fingertip_hint[0]) < 100 and abs(cy - fingertip_hint[1]) < 100: + touch_point = (cx, cy) + break + else: + touch_point = (cx, cy) + break + + # -------- FOUND TOUCH + if touch_point is not None: + now = time.time() + if now - last_touch_time > DEBOUNCE: + last_touch_time = now + + tx, ty = touch_point + sx, sy = map_point_homography(tx, ty) + + # keep inside game window + sx = max(0, min(SCREEN_WIDTH, sx)) + sy = max(0, min(SCREEN_HEIGHT, sy)) + + client.send_message("/touch", [sx, sy]) + print(f"📨 TOUCH → {sx}, {sy}") + + # debug draw + cv2.circle(frame, (tx, ty), 12, (0, 255, 0), 2) + cv2.putText(frame, "TOUCH", (tx+10, ty), + cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0,255,0), 2) + + # -------- windows + cv2.imshow("Top-Down Touch", frame) + cv2.imshow("Binary", binary) + + if cv2.waitKey(1) & 0xFF == 27: + break + +cap.release() +cv2.destroyAllWindows() + diff --git a/test_touch_area.py b/test_touch_area.py new file mode 100644 index 0000000..6ee5315 --- /dev/null +++ b/test_touch_area.py @@ -0,0 +1,77 @@ +import pygame +from pythonosc import dispatcher, osc_server +import threading +#python test_touch_area.py +SCREEN_WIDTH = 900 +SCREEN_HEIGHT = 600 + +# Letzter Touchpunkt +touch_pos = None + +def osc_touch(address, x, y): + global touch_pos + touch_pos = (int(x), int(y)) + print("Touch empfangen:", touch_pos) + + +def start_osc(): + disp = dispatcher.Dispatcher() + disp.map("/touch", osc_touch) + + server = osc_server.ThreadingOSCUDPServer(("127.0.0.1", 5005), disp) + print("🔌 OSC Touch-Test läuft auf Port 5005") + server.serve_forever() + + +def draw_quadrants(screen): + """Farbliche Quadranten zur Orientierung.""" + colors = [(50, 50, 200), (200, 50, 50), (50, 180, 50), (200, 200, 50)] + rects = [ + pygame.Rect(0, 0, SCREEN_WIDTH//2, SCREEN_HEIGHT//2), + pygame.Rect(SCREEN_WIDTH//2, 0, SCREEN_WIDTH//2, SCREEN_HEIGHT//2), + pygame.Rect(0, SCREEN_HEIGHT//2, SCREEN_WIDTH//2, SCREEN_HEIGHT//2), + pygame.Rect(SCREEN_WIDTH//2, SCREEN_HEIGHT//2, SCREEN_WIDTH//2, SCREEN_HEIGHT//2), + ] + for i, r in enumerate(rects): + pygame.draw.rect(screen, colors[i], r) + pygame.draw.rect(screen, (0,0,0), r, 2) + + +def main(): + pygame.init() + screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT)) + pygame.display.set_caption("🧪 Touch-Testfläche") + + font = pygame.font.SysFont(None, 28) + + clock = pygame.time.Clock() + + # OSC starten + threading.Thread(target=start_osc, daemon=True).start() + + global touch_pos + + while True: + for event in pygame.event.get(): + if event.type == pygame.QUIT: + return + + screen.fill((30, 30, 30)) + + # Quadranten zeigen + draw_quadrants(screen) + + # Touch anzeigen + if touch_pos is not None: + pygame.draw.circle(screen, (255, 255, 255), touch_pos, 12) + pygame.draw.circle(screen, (0, 0, 0), touch_pos, 12, 2) + + txt = font.render(f"{touch_pos[0]}, {touch_pos[1]}", True, (255, 255, 255)) + screen.blit(txt, (touch_pos[0] + 15, touch_pos[1] - 10)) + + pygame.display.flip() + clock.tick(60) + + +if __name__ == "__main__": + main()