From 4ed8120690ab82df61c901fe57166dc257c323d5 Mon Sep 17 00:00:00 2001 From: Elise Angela Bwemba Date: Wed, 19 Nov 2025 17:17:33 +0000 Subject: [PATCH] =?UTF-8?q?=C3=A4nderung=20von=20Spiel=20und=20Gesteerkenn?= =?UTF-8?q?ung?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Finished_Memory_Mouse.py | 142 ++++++++++++++++++--------------------- gesture_input_osc.py | 100 +++++++++++++++++---------- 2 files changed, 128 insertions(+), 114 deletions(-) diff --git a/Finished_Memory_Mouse.py b/Finished_Memory_Mouse.py index fca0051..7da0f43 100644 --- a/Finished_Memory_Mouse.py +++ b/Finished_Memory_Mouse.py @@ -28,7 +28,6 @@ def start_osc_server(): disp.map("/touch", osc_touch) disp.map("/clap", osc_clap) - # ⚠ Port an deine Gesture-App anpassen: # Dein gesture_input_osc sendet an 127.0.0.1:5005 → hier auch 5005 server = osc_server.ThreadingOSCUDPServer(("127.0.0.1", 5005), disp) print("🔊 OSC server läuft auf Port 5005") @@ -103,12 +102,10 @@ class MemoryGame: self.pair_count = 6 self.player_mode = 2 - # Confirmation phase + # "Bestätigungsphase" = wir warten auf Klatschen self.awaiting_confirmation = False - self.confirmation_result = None - self.correct_answer_expected = None self.confirmation_start_time = None - self.confirmation_time_limit = 5 + self.confirmation_time_limit = 5 # Sekunden # Visuals self.card_back = None @@ -169,6 +166,8 @@ class MemoryGame: self.found_pairs = 0 self.current_player = 0 self.scores = [0, 0] + self.awaiting_confirmation = False + self.confirmation_start_time = None cols = 4 rows = (len(self.cards) + cols - 1) // cols @@ -214,6 +213,11 @@ class MemoryGame: screen.blit(title, (20, 20)) screen.blit(score_text, (20, 50)) + # Hinweis: wenn 2 Karten offen sind, warte auf Clap + if self.awaiting_confirmation and len(self.selected) == 2: + hint = self.small_font.render("👏 Klatschen, um das Paar zu bestätigen!", True, (255, 255, 0)) + screen.blit(hint, (SCREEN_WIDTH // 2 - hint.get_width() // 2, 80)) + for i, rect in enumerate(self.card_rects): if self.matched[i]: pygame.draw.rect(screen, MATCH_COLOR, rect) @@ -233,34 +237,13 @@ class MemoryGame: ) screen.blit(label, label.get_rect(center=(rect.centerx, rect.centery + 20))) - if self.awaiting_confirmation: - self.draw_confirmation_box(screen) - pygame.display.flip() - def draw_confirmation_box(self, screen): - box = pygame.Rect(SCREEN_WIDTH // 2 - 150, SCREEN_HEIGHT // 2 - 320, 300, 160) - pygame.draw.rect(screen, (250, 250, 250), box) - pygame.draw.rect(screen, (0, 0, 0), box, 3) - text = self.font.render("Is that correct?", True, (0, 0, 0)) - screen.blit(text, (box.centerx - text.get_width() // 2, box.y + 20)) - - elapsed = time.time() - self.confirmation_start_time - remaining = max(0, self.confirmation_time_limit - elapsed) - timer = self.small_font.render(f"{remaining:.1f}s left", True, (150, 0, 0)) - screen.blit(timer, (box.centerx - timer.get_width() // 2, box.y + 60)) - - self.yes_rect = pygame.Rect(box.x + 50, box.y + 90, 80, 40) - self.no_rect = pygame.Rect(box.x + 170, box.y + 90, 80, 40) - pygame.draw.rect(screen, (0, 200, 0), self.yes_rect) - pygame.draw.rect(screen, (200, 0, 0), self.no_rect) - screen.blit(self.font.render("Yes", True, (255, 255, 255)), self.yes_rect.move(20, 5)) - screen.blit(self.font.render("No", True, (255, 255, 255)), self.no_rect.move(25, 5)) - # ------------------------------- # Interaction Logic # ------------------------------- def handle_click(self, pos): + # Menüs: mit Touch durchklicken if self.state in ["mode", "continent", "americas", "difficulty", "pairs", "timer"]: for rect, option in self.buttons: if rect.collidepoint(pos): @@ -297,12 +280,11 @@ class MemoryGame: self.setup_game() self.state = "game" return + + # Spiel: Karten nur anklicken, wenn wir NICHT gerade auf Clap warten elif self.state == "game": - if self.awaiting_confirmation: - if self.yes_rect.collidepoint(pos): - self.confirmation_result = "yes" - elif self.no_rect.collidepoint(pos): - self.confirmation_result = "no" + if self.awaiting_confirmation or len(self.selected) >= 2: + # Während wir auf Klatschen warten, keine weiteren Karten öffnen return for i, rect in enumerate(self.card_rects): @@ -318,19 +300,49 @@ class MemoryGame: """Verarbeite die aktuellen OSC-Eingaben (Touch & Clap).""" global touch_x, touch_y, clap_trigger - # TOUCH: als Klick ins Spiel + # TOUCH: als Klick ins Spiel (Menü oder Karte) if touch_x is not None and touch_y is not None: pos = (int(touch_x), int(touch_y)) self.handle_click(pos) - # Reset, damit es nicht mehrfach feuert - touch_x, touch_y = None, None + touch_x, touch_y = None, None # Reset - # CLAP: bestätige "Yes", wenn wir gerade fragen + # CLAP: wenn 2 Karten offen und wir warten → Paar auswerten if clap_trigger: - if self.awaiting_confirmation and self.confirmation_result is None: - self.confirmation_result = "yes" + if self.awaiting_confirmation and len(self.selected) == 2: + self.resolve_pair() clap_trigger = False + # ------------------------------- + # Paar auswerten (nach Clap oder Timeout) + # ------------------------------- + def resolve_pair(self): + """Prüft das aktuelle Kartenpaar und aktualisiert Punkte / Spieler.""" + if len(self.selected) != 2: + return + + a, b = self.selected + text_a = self.cards[a]["text"] + text_b = self.cards[b]["text"] + is_match = self.pair_map.get(text_a) == text_b + + if is_match: + self.matched[a] = self.matched[b] = True + self.scores[self.current_player] += 1 + self.found_pairs += 1 + else: + # Falsches Paar: Karten wieder umdrehen, Punkt abziehen + self.revealed[a] = self.revealed[b] = False + self.scores[self.current_player] -= 1 + + # Reset für nächste Runde + self.selected = [] + self.awaiting_confirmation = False + self.confirmation_start_time = None + + # Spielerwechsel immer nach einem Paar (wie vorher) + if self.player_mode == 2: + self.current_player = 1 - self.current_player + # ------------------------------- # Game Logic # ------------------------------- @@ -338,47 +350,22 @@ class MemoryGame: if self.state != "game": return + # Wenn zwei Karten offen sind → Warte auf Clap if len(self.selected) == 2 and not self.awaiting_confirmation: - a, b = self.selected - text_a = self.cards[a]["text"] - text_b = self.cards[b]["text"] - is_match = self.pair_map.get(text_a) == text_b - self.correct_answer_expected = "yes" if is_match else "no" self.awaiting_confirmation = True self.confirmation_start_time = time.time() - # Timeout logic - if self.awaiting_confirmation and time.time() - self.confirmation_start_time > self.confirmation_time_limit: - a, b = self.selected - self.revealed[a] = self.revealed[b] = False - self.awaiting_confirmation = False - self.selected = [] - if self.player_mode == 2: - self.current_player = 1 - self.current_player - return - - # Player response - if self.awaiting_confirmation and self.confirmation_result: - a, b = self.selected - expected = self.correct_answer_expected - player_correct = self.confirmation_result == expected - - if player_correct: - if expected == "yes": - self.matched[a] = self.matched[b] = True - self.scores[self.current_player] += 1 - self.found_pairs += 1 - else: + # Timeout: wenn zu lange kein Clap → Karten zurückdrehen, Spielerwechsel + if self.awaiting_confirmation and self.confirmation_start_time is not None: + if time.time() - self.confirmation_start_time > self.confirmation_time_limit: + if len(self.selected) == 2: + a, b = self.selected self.revealed[a] = self.revealed[b] = False - else: - self.scores[self.current_player] -= 1 - self.revealed[a] = self.revealed[b] = False - - self.awaiting_confirmation = False - self.confirmation_result = None - self.selected = [] - if self.player_mode == 2: - self.current_player = 1 - self.current_player + self.selected = [] + self.awaiting_confirmation = False + self.confirmation_start_time = None + if self.player_mode == 2: + self.current_player = 1 - self.current_player # ------------------------------- # Winner Screen @@ -413,9 +400,10 @@ class MemoryGame: for event in pygame.event.get(): if event.type == pygame.QUIT: self.running = False - elif event.type == pygame.MOUSEBUTTONDOWN: - # Maus bleibt optional als Eingabe - self.handle_click(event.pos) + # Maus komplett ignorieren (kein Klick mehr) + # elif event.type == pygame.MOUSEBUTTONDOWN: + # self.handle_click(event.pos) + # Wenn du zum Debuggen Maus willst, obige Zeilen entkommentieren. # 💡 HIER werden jetzt *jeden Frame* die OSC-Eingaben verarbeitet self.process_osc_input() diff --git a/gesture_input_osc.py b/gesture_input_osc.py index d27b4d5..ee6a16e 100644 --- a/gesture_input_osc.py +++ b/gesture_input_osc.py @@ -1,87 +1,112 @@ import cv2 import mediapipe as mp -import json, time, math, numpy as np +import numpy as np +import math, time from pythonosc import udp_client -# Verbindung zum Spiel herstellen +# ------------------------------- +# SETTINGS +# ------------------------------- +TOUCH_CAM_INDEX = 1 # deine Touch-Kamera +GESTURE_CAM_INDEX = 0 # deine Clap / Gesture Kamera + +GAME_SCREEN_WIDTH = 900 # muss zu deinem Pygame-Fenster passen! +GAME_SCREEN_HEIGHT = 600 + client = udp_client.SimpleUDPClient("127.0.0.1", 5005) -# Hilfsfunktion zur Kalibrierung (wie zuvor) -def map_to_screen(x, y, calib_points, screen_size=(800, 600)): - pts_src = np.array(calib_points, dtype=np.float32) - pts_dst = np.array([ - [0, 0], - [screen_size[0], 0], - [screen_size[0], screen_size[1]], - [0, screen_size[1]] - ], dtype=np.float32) - M = cv2.getPerspectiveTransform(pts_src, pts_dst) - p = np.array([[[x, y]]], dtype=np.float32) - mapped = cv2.perspectiveTransform(p, M)[0][0] - return int(mapped[0]), int(mapped[1]) +# ------------------------------- +# MAIN FUNCTION +# ------------------------------- +def run_gesture_input(): -def run_gesture_input(touch_cam_index=0, gesture_cam_index=1, screen_size=(800, 600)): mp_hands = mp.solutions.hands mp_draw = mp.solutions.drawing_utils + hands_touch = mp_hands.Hands(max_num_hands=1, min_detection_confidence=0.6) hands_gesture = mp_hands.Hands(max_num_hands=2, min_detection_confidence=0.6) - # Kalibrierung laden - try: - calib_points = json.load(open("calibration.json")) - print("📄 Kalibrierung geladen:", calib_points) - except: - print("⚠️ Keine calibration.json – zuerst calibrate_touch.py ausführen!") - return + # Kameras öffnen + cam_touch = cv2.VideoCapture(TOUCH_CAM_INDEX) + cam_gesture = cv2.VideoCapture(GESTURE_CAM_INDEX) + + if not cam_touch.isOpened(): + print("❌ Touch-Kamera konnte NICHT geöffnet werden!") + else: + print(f"✅ Touch-Kamera geöffnet (Index {TOUCH_CAM_INDEX})") + + if not cam_gesture.isOpened(): + print("❌ Gesture-Kamera konnte NICHT geöffnet werden!") + else: + print(f"✅ Gesture-Kamera geöffnet (Index {GESTURE_CAM_INDEX})") - cam_touch = cv2.VideoCapture(touch_cam_index) - cam_gesture = cv2.VideoCapture(gesture_cam_index) prev_clap_time = 0 clap_cooldown = 1.5 while True: + ok1, frame_touch = cam_touch.read() - ok2, frame_gest = cam_gesture.read() + ok2, frame_gest = cam_gesture.read() + if not ok1 or not ok2: - print("⚠️ Kamera nicht verfügbar"); break + print("⚠️ Eine Kamera liefert kein Bild.") + break frame_touch = cv2.flip(frame_touch, 1) frame_gest = cv2.flip(frame_gest, 1) - # --- Touch-Erkennung --- + # --------------------------------------- + # TOUCH (Zeigefinger) ohne Kalibrierung + # --------------------------------------- rgb_t = cv2.cvtColor(frame_touch, cv2.COLOR_BGR2RGB) res_t = hands_touch.process(rgb_t) - h, w, _ = frame_touch.shape + + th, tw, _ = frame_touch.shape + if res_t.multi_hand_landmarks: lm = res_t.multi_hand_landmarks[0] mp_draw.draw_landmarks(frame_touch, lm, mp_hands.HAND_CONNECTIONS) - fx = int(lm.landmark[8].x * w) - fy = int(lm.landmark[8].y * h) - sx, sy = map_to_screen(fx, fy, calib_points, screen_size) + + fx = int(lm.landmark[8].x * tw) + fy = int(lm.landmark[8].y * th) + + # einfache Skalierung auf dein Spiel-Fenster + sx = int(fx * (GAME_SCREEN_WIDTH / tw)) + sy = int(fy * (GAME_SCREEN_HEIGHT / th)) + + # Finger unten? (Touch) if lm.landmark[8].y > 0.8: client.send_message("/touch", [sx, sy]) - cv2.putText(frame_touch, f"Touch ({sx},{sy})", (40, 60), - cv2.FONT_HERSHEY_SIMPLEX, 0.8, (0, 255, 0), 2) + cv2.putText(frame_touch, f"Touch {sx},{sy}", (40, 60), + cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 255, 0), 2) - # --- Clap-Erkennung --- + # --------------------------------------- + # CLAP (zwei Hände) + # --------------------------------------- rgb_g = cv2.cvtColor(frame_gest, cv2.COLOR_BGR2RGB) res_g = hands_gesture.process(rgb_g) + gh, gw, _ = frame_gest.shape + if res_g.multi_hand_landmarks and len(res_g.multi_hand_landmarks) == 2: h1, h2 = res_g.multi_hand_landmarks + x1 = np.mean([p.x for p in h1.landmark]) * gw y1 = np.mean([p.y for p in h1.landmark]) * gh x2 = np.mean([p.x for p in h2.landmark]) * gw y2 = np.mean([p.y for p in h2.landmark]) * gh + dist = math.hypot(x2 - x1, y2 - y1) + if dist < 100 and (time.time() - prev_clap_time) > clap_cooldown: - client.send_message("/clap", 1) prev_clap_time = time.time() - cv2.putText(frame_gest, "👏", (int(gw / 2) - 20, 80), + client.send_message("/clap", 1) + cv2.putText(frame_gest, "👏", (int(gw/2)-20, 80), cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 255, 255), 3) cv2.imshow("Touch-Cam", frame_touch) cv2.imshow("Gesture-Cam", frame_gest) + if cv2.waitKey(5) & 0xFF == 27: break @@ -89,5 +114,6 @@ def run_gesture_input(touch_cam_index=0, gesture_cam_index=1, screen_size=(800, cam_gesture.release() cv2.destroyAllWindows() + if __name__ == "__main__": run_gesture_input()