änderung von Spiel und Gesteerkennung

This commit is contained in:
Elise Angela Bwemba 2025-12-03 13:21:34 +00:00
parent 9b6bbd2342
commit 316a2be7f0
2 changed files with 114 additions and 33 deletions

View File

@ -30,7 +30,7 @@ def start_osc_server():
# Dein gesture_input_osc sendet an 127.0.0.1:5005 → hier auch 5005
server = osc_server.ThreadingOSCUDPServer(("127.0.0.1", 5005), disp)
print("🔊 OSC server läuft auf Port 5005")
print(" OSC server läuft auf Port 5005")
server.serve_forever()
@ -68,10 +68,10 @@ def build_card_back(image_path, size):
img = pygame.transform.smoothscale(img, (w, h))
surf.blit(img, (0, 0))
except pygame.error as e:
print(f"⚠️ Error loading {image_path}: {e}")
print(f" Error loading {image_path}: {e}")
surf.fill((127, 127, 200))
else:
print(f"⚠️ Image not found: {image_path}")
print(f"Image not found: {image_path}")
surf.fill((127, 127, 200))
return surf
@ -115,7 +115,7 @@ class MemoryGame:
# -------------------------------
def load_cards(self, filename):
if not os.path.exists(filename):
print(f"⚠️ File not found: {filename}")
print(f" File not found: {filename}")
return []
pairs = []
with open(filename, "r", encoding="utf-8") as f:
@ -140,7 +140,7 @@ class MemoryGame:
self.deck += self.load_cards(base + "-Dependent.txt")
if not self.deck:
print("⚠️ No cards loaded. Check your text files.")
print("No cards loaded. Check your text files.")
sys.exit()
random.shuffle(self.deck)
@ -243,6 +243,10 @@ class MemoryGame:
# Interaction Logic
# -------------------------------
def handle_click(self, pos):
# ✅ Blockiere Touch über dem Spielfeld (Menü-Bereich oben)
if self.state == "game" and pos[1] < 80:
return
# Menüs: mit Touch durchklicken
if self.state in ["mode", "continent", "americas", "difficulty", "pairs", "timer"]:
for rect, option in self.buttons:
@ -372,14 +376,14 @@ class MemoryGame:
# -------------------------------
def display_winner(self, screen):
if self.player_mode == 1:
text = f"🏆 Final Score: {self.scores[0]}"
text = f" Final Score: {self.scores[0]}"
else:
if self.scores[0] > self.scores[1]:
text = "🏆 Player 1 Wins!"
text = " Player 1 Wins!"
elif self.scores[1] > self.scores[0]:
text = "🏆 Player 2 Wins!"
text = " Player 2 Wins!"
else:
text = "🤝 Draw!"
text = " Draw!"
win_text = self.font.render(text, True, (255, 255, 0))
rect = win_text.get_rect(center=(SCREEN_WIDTH // 2, SCREEN_HEIGHT // 2))
screen.blit(win_text, rect)
@ -405,7 +409,7 @@ class MemoryGame:
# self.handle_click(event.pos)
# Wenn du zum Debuggen Maus willst, obige Zeilen entkommentieren.
# 💡 HIER werden jetzt *jeden Frame* die OSC-Eingaben verarbeitet
# HIER werden jetzt *jeden Frame* die OSC-Eingaben verarbeitet
self.process_osc_input()
# Menü + Spiel zeichnen

View File

@ -7,18 +7,29 @@ from pythonosc import udp_client
# -------------------------------
# SETTINGS
# -------------------------------
TOUCH_CAM_INDEX = 1 # deine Touch-Kamera
GESTURE_CAM_INDEX = 0 # deine Clap / Gesture Kamera
TOUCH_CAM_INDEX = 0 # deine Touch-Kamera
GESTURE_CAM_INDEX = 2 # deine Clap / Gesture Kamera
GAME_SCREEN_WIDTH = 900 # muss zu deinem Pygame-Fenster passen!
GAME_SCREEN_HEIGHT = 600
# Wie "streng" ist der Touch?
STILL_REQUIRED = 1.0 # Sekunden, die der Finger fast still sein muss
MOVE_TOLERANCE = 25 # maximal erlaubte Bewegung (Pixel)
# OSC Client → sendet ans Spiel
client = udp_client.SimpleUDPClient("127.0.0.1", 5005)
# -------------------------------
# MAIN FUNCTION
# -------------------------------
# Globale Zustände
last_finger_pos = None
finger_still_start = None
prev_touch_time = 0.0
prev_clap_time = 0.0
def run_gesture_input():
global last_finger_pos, finger_still_start
global prev_touch_time, prev_clap_time
mp_hands = mp.solutions.hands
mp_draw = mp.solutions.drawing_utils
@ -31,61 +42,126 @@ def run_gesture_input():
cam_gesture = cv2.VideoCapture(GESTURE_CAM_INDEX)
if not cam_touch.isOpened():
print(" Touch-Kamera konnte NICHT geöffnet werden!")
print(" Touch-Kamera konnte NICHT geöffnet werden!")
else:
print(f"Touch-Kamera geöffnet (Index {TOUCH_CAM_INDEX})")
print(f"Touch-Kamera geöffnet (Index {TOUCH_CAM_INDEX})")
if not cam_gesture.isOpened():
print(" Gesture-Kamera konnte NICHT geöffnet werden!")
print(" Gesture-Kamera konnte NICHT geöffnet werden!")
else:
print(f"Gesture-Kamera geöffnet (Index {GESTURE_CAM_INDEX})")
print(f"Gesture-Kamera geöffnet (Index {GESTURE_CAM_INDEX})")
prev_clap_time = 0
clap_cooldown = 1.5
while True:
ok1, frame_touch = cam_touch.read()
ok2, frame_gest = cam_gesture.read()
if not ok1 or not ok2:
print("⚠️ Eine Kamera liefert kein Bild.")
print(" Eine Kamera liefert kein Bild.")
break
frame_touch = cv2.flip(frame_touch, 1)
frame_gest = cv2.flip(frame_gest, 1)
frame_gest = cv2.flip(frame_gest, 1)
# ---------------------------------------
# TOUCH (Zeigefinger) ohne Kalibrierung
# TOUCH (Zeigefinger) mit STILLSTAND
# ---------------------------------------
rgb_t = cv2.cvtColor(frame_touch, cv2.COLOR_BGR2RGB)
res_t = hands_touch.process(rgb_t)
th, tw, _ = frame_touch.shape
th, tw, _ = frame_touch.shape #h= Höhe, w = Breite
if res_t.multi_hand_landmarks:
lm = res_t.multi_hand_landmarks[0]
mp_draw.draw_landmarks(frame_touch, lm, mp_hands.HAND_CONNECTIONS)
# Finger muss nach UNTEN zeigen (8 tiefer als 5)
if lm.landmark[8].y < lm.landmark[5].y:
last_finger_pos = None
finger_still_start = None
continue
fx = int(lm.landmark[8].x * tw)
fy = int(lm.landmark[8].y * th)
# einfache Skalierung auf dein Spiel-Fenster
sx = int(fx * (GAME_SCREEN_WIDTH / tw))
sy = int(fy * (GAME_SCREEN_HEIGHT / th))
# Finger unten? (Touch)
if lm.landmark[8].y > 0.8:
client.send_message("/touch", [sx, sy])
cv2.putText(frame_touch, f"Touch {sx},{sy}", (40, 60),
cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 255, 0), 2)
now = time.time()
current_pos = (fx, fy)
# erster Messpunkt
if last_finger_pos is None:
#erster Punkt
last_finger_pos = current_pos
finger_still_start = now
else:
dist = math.hypot(current_pos[0] - last_finger_pos[0],
current_pos[1] - last_finger_pos[1])
if dist < MOVE_TOLERANCE:
#Finger ist "ruhig"
if finger_still_start is None:
finger_still_start = now
else:
still_time = now - finger_still_start
if still_time >= STILL_REQUIRED and (now - prev_touch_time) > 0.5:
client.send_message("/touch", [sx, sy])
print(f"👉 STABILER TOUCH bei {sx},{sy} nach {still_time:.2f}s")
prev_touch_time = now
finger_still_start = None
else:
finger_still_start = now
# IMMER aktualisieren
last_finger_pos = current_pos
# Finger visualisieren
cv2.circle(frame_touch, (fx, fy), 10, (0, 255, 0), -1)
cv2.putText(frame_touch, f"{sx},{sy}", (fx + 10, fy - 10),
cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 0), 2)
now = time.time()
current_pos = (fx, fy)
if last_finger_pos is None:
# erster Punkt
last_finger_pos = current_pos
finger_still_start = now
else:
dist = math.hypot(current_pos[0] - last_finger_pos[0],
current_pos[1] - last_finger_pos[1])
if dist < MOVE_TOLERANCE:
# Finger ist "ruhig"
if finger_still_start is None:
finger_still_start = now
else:
still_time = now - finger_still_start
if still_time >= STILL_REQUIRED and (now - prev_touch_time) > 0.5:
# JETZT: stabiler Touch → sende genau 1 Klick
client.send_message("/touch", [sx, sy])
print(f"STABILER TOUCH bei {sx},{sy} nach {still_time:.2f}s")
print("SCREEN COORD:", sx, sy)
prev_touch_time = now
# reset, damit der nächste Touch erst nach neuer Bewegung kommt
finger_still_start = None
else:
# Finger hat sich deutlich bewegt → Timer neu starten
finger_still_start = now
last_finger_pos = current_pos
else:
# keine Hand → Reset
last_finger_pos = None
finger_still_start = None
# ---------------------------------------
# CLAP (zwei Hände)
# ---------------------------------------
rgb_g = cv2.cvtColor(frame_gest, cv2.COLOR_BGR2RGB)
res_g = hands_gesture.process(rgb_g)
gh, gw, _ = frame_gest.shape
if res_g.multi_hand_landmarks and len(res_g.multi_hand_landmarks) == 2:
@ -101,6 +177,7 @@ def run_gesture_input():
if dist < 100 and (time.time() - prev_clap_time) > clap_cooldown:
prev_clap_time = time.time()
client.send_message("/clap", 1)
print("👏 SEND /clap")
cv2.putText(frame_gest, "👏", (int(gw/2)-20, 80),
cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 255, 255), 3)