import cv2, mediapipe as mp, json mp_hands=mp.solutions.hands; mp_draw=mp.solutions.drawing_utils hands=mp_hands.Hands(max_num_hands=1,min_detection_confidence=0.6) cap=cv2.VideoCapture(0) points=[]; labels=["oben-links","oben-rechts","unten-rechts","unten-links"] print("📏 Zeige nacheinander jede Ecke (Finger runter zum Bestätigen)") while True: ok,frame=cap.read() if not ok: break frame=cv2.flip(frame,1) rgb=cv2.cvtColor(frame,cv2.COLOR_BGR2RGB) res=hands.process(rgb); h,w,_=frame.shape if res.multi_hand_landmarks: lm=res.multi_hand_landmarks[0] mp_draw.draw_landmarks(frame,lm,mp_hands.HAND_CONNECTIONS) x=int(lm.landmark[8].x*w); y=int(lm.landmark[8].y*h) cv2.circle(frame,(x,y),10,(0,255,255),-1) if len(points)<4: cv2.putText(frame,f"Zeige {labels[len(points)]}",(40,40), cv2.FONT_HERSHEY_SIMPLEX,1,(255,255,0),2) if lm.landmark[8].y>0.8: print(f"✅ {labels[len(points)]} gespeichert") points.append((x,y)); cv2.waitKey(1000) if len(points)==4: json.dump(points,open("calibration.json","w")) print("📄 calibration.json gespeichert ✅"); break cv2.imshow("Kalibrierung",frame) if cv2.waitKey(5)&0xFF==27: break cap.release(); cv2.destroyAllWindows()