You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

localization.py 4.3KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091
  1. import numpy as np
  2. import os
  3. class Localization:
  4. def __init__(self):
  5. self.scale_factor = 0.003
  6. self.k_nearest = 15
  7. #self.table = np.load('fingerprinting_tables/Julian_1cm_precision_corrected_antenas.npy') # corrected table made by Ihm (No ferrite)
  8. #self.table = np.load('fingerprinting_tables/SmallManualFingerprint_Ferrite.npy') # manual fingerprinting table (Ferrite) [A bit off]
  9. #self.table = np.load('fingerprinting_tables/Julian_BThesis_table2_switchedAnt5&6 and 7&8.npy') # Switched Ant5<->6 and 7<->8 in Excel (No Ferrite) [Does not work!]
  10. self.table = np.load('fingerprinting_tables/Julian_THIS_ONE_IS_IT.npy') # 2cm precision this definetly worked for (No ferrite)
  11. self.data = np.load('recorded_data/current_recording.npy')
  12. def localize(self, fv):
  13. """ Input: - fv [type = numpy array (mb list) [15, 1]]:
  14. A feature vector contains the antenna voltages of one sample in following order:
  15. frame1, frame2, ... frame 8, main1, main2, ... main8
  16. - scale_factor [type = float]:
  17. This is multiplied with the fv to adjust the difference in constants between the real world
  18. and the measurements. Things like resistance, coil windings, amplification factors
  19. - k_nearest [type = int]:
  20. This tells the localization method k-nearest how many neighbours it should take into account
  21. to average the position in the end
  22. Output: - position [type = np.array[3]]:
  23. The estimated position of the object in this sample x,y,z
  24. """
  25. #print("fv =", fv)
  26. feature_vector = fv * self.scale_factor
  27. repeated_feature_vector = np.square(self.table[:, 9:] - feature_vector)
  28. euclidean_distances = np.sum(repeated_feature_vector, 1)
  29. order = np.argsort(euclidean_distances)
  30. minDist = np.sqrt(euclidean_distances[order[0]])
  31. maxDist = np.sqrt(euclidean_distances[order[self.k_nearest - 1]])
  32. dsum = 0.0
  33. position = np.array([0.0, 0.0, 0.0])
  34. for idx in order[:self.k_nearest]:
  35. d = (maxDist - np.sqrt(euclidean_distances[idx])) / (maxDist - minDist)
  36. dsum += d
  37. position += self.table[idx][:3] * d
  38. position /= dsum
  39. #print(position)
  40. return position
  41. def localize_all_samples(self, input_path, output_path):
  42. # Load data
  43. data = np.load('recorded_data/' + input_path + ".npy")
  44. # Just User Feedback
  45. print("Start calculating positions from: recorded_data/" + input_path + ".npy")
  46. print("With: scale_factor=", self.scale_factor, ", k_nearest=", self.k_nearest, ", every 10th sample")
  47. data = data[::10, :] # taking only every 10th sample
  48. positions = np.empty((np.shape(data)[0], 3), dtype=np.float)
  49. #Normal Localization
  50. positions = np.empty((np.shape(data)[0], 3), dtype=np.float)
  51. for i in range(np.shape(data)[0]):
  52. fv = data[i, 3:]
  53. positions[i, :] = self.localize(fv)
  54. #print("loc progress=", i)
  55. # Save result
  56. np.save('calculated_positions/' + output_path, positions)
  57. print("Saved result in: calculated_positions/" + output_path + ".npy")
  58. def localize_averaged_samples(self, input_path, output_path):
  59. # Load data
  60. data = np.load('recorded_data/' + input_path + ".npy")
  61. # Just User Feedback
  62. print("Start calculating positions from: recorded_data/" + input_path + ".npy")
  63. print("With: scale_factor=", self.scale_factor, ", k_nearest=", self.k_nearest)
  64. # Average the recorded samples before localization
  65. positions = np.empty((np.shape(data)[0], 3), dtype=np.float)
  66. mean_data = np.zeros(np.shape(data))
  67. mean_data[:, :] = np.mean(data, axis=0) # average all recorded samples
  68. fv = mean_data[0, 3:] # as now all of mean_data is the same in every entry, just take any entry (at pos 0)
  69. positions = self.localize(fv) # we get a single position out
  70. print("Averaged position: x=", positions[0], ", y=", positions[1], ", z=", positions[2])
  71. # Save result
  72. np.save('calculated_positions/'+output_path, positions)
  73. print("Saved result in: calculated_positions/"+output_path+".npy")