Skip to content

Commit 3018cff

Browse files
committed
refactor: Adapt scripts to work with new data structure
Signed-off-by: Gilbert Tanner <gilberttanner.work@gmail.com>
1 parent 4a50520 commit 3018cff

File tree

8 files changed

+85
-73
lines changed

8 files changed

+85
-73
lines changed

.gitignore

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -160,4 +160,5 @@ cython_debug/
160160
# and can be added to the global gitignore or merged into this file. For a more nuclear
161161
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
162162
.idea/
163-
.vscode/
163+
.vscode/
164+
recordings/

README.md

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -42,21 +42,25 @@ Data collection can be done using the [`record_data.py` script](uMyo_python_tool
4242

4343
Usage:
4444
```bash
45-
usage: record_data.py [-h] [--output OUTPUT] [--num_sensors NUM_SENSORS] [-p PORT]
45+
usage: record_data.py [-h] [--output_folder OUTPUT_FOLDER] [--subject SUBJECT] [--gesture GESTURE] [--num_sensors NUM_SENSORS]
46+
[-p PORT]
4647

4748
Record data from uMyo
4849

4950
options:
5051
-h, --help show this help message and exit
51-
--output OUTPUT Output file to save sensor data
52+
--output_folder OUTPUT_FOLDER
53+
Output file to save sensor data
54+
--subject SUBJECT Name of the subject
55+
--gesture GESTURE Name of the gesture
5256
--num_sensors NUM_SENSORS
5357
Number of sensors to record
5458
-p PORT, --port PORT USB receiving station port
5559
```
5660

5761
Example
5862
```bash
59-
python uMyo_python_tools/record_data.py --num_sensors 5 --output recordings/23_02_25/data/person_open.csv -p /dev/ttyUSB0
63+
python uMyo_python_tools/record_data.py --num_sensors 5 --output_folder recordings/session-24_02_25 --subject person --gesture fist
6064
```
6165

6266
## Training

requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,3 +7,4 @@ scikit_learn
77
scipy
88
torch
99
datasets
10+
huggingface-hub

training/scripts/gesture-recognition.ipynb

Lines changed: 25 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,6 @@
9494
"GESTURES_TYPE = parameters.GESTURES_TYPE\n",
9595
"\n",
9696
"DATA_DIR = \"../../recordings/\"\n",
97-
"SENSOR_PLACEMENT = 0\n",
9897
"\n",
9998
"lf = parameters.LF\n",
10099
"hf = parameters.HF\n",
@@ -203,7 +202,7 @@
203202
" df = dataset['raw'].to_pandas()\n",
204203
" df.columns = range(df.shape[1])\n",
205204
" df_fft = dataset['fft'].to_pandas()\n",
206-
" df.columns = range(df.shape[1])\n",
205+
" df_fft.columns = range(df.shape[1])\n",
207206
" dfs[GESTURES.index(recording[\"gesture\"])].append(df)\n",
208207
" if USE_FFT:\n",
209208
" fft_dfs[GESTURES.index(recording[\"gesture\"])].append(df_fft)"
@@ -220,25 +219,25 @@
220219
" return len(df.columns) >= num_sensors * NUM_READINGS\n",
221220
"\n",
222221
"def load_local_dataset(dfs=dfs, fft_dfs=fft_dfs):\n",
223-
" # structure: recordings/date/data/*.csv\n",
224222
" recordings = []\n",
225-
" baseline_file = None\n",
226223
" \n",
227-
" pattern = re.compile(rf\"({'|'.join(TEST_SUBJECTS)})_({'|'.join(GESTURES)})\\d*_{SENSOR_PLACEMENT}.csv\")\n",
224+
" pattern = re.compile(rf\"({'|'.join(TEST_SUBJECTS)})_({'|'.join(GESTURES)}).csv\")\n",
228225
" \n",
229226
" for date_dir, _, filenames in os.walk(DATA_DIR):\n",
230-
" if date_dir.endswith('data') and any(day in date_dir for day in DAYS) and not 'static' in date_dir:\n",
227+
" if \"raw\" in date_dir and any(day in date_dir for day in DAYS):\n",
231228
" for filename in filenames:\n",
232229
" if pattern.match(filename):\n",
233230
" gesture = next((gesture for gesture in GESTURES if gesture in filename), None)\n",
234-
" \n",
231+
"\n",
235232
" emg_data_file = os.path.join(date_dir, filename)\n",
233+
" fft_data_file = os.path.join(date_dir.replace('raw', 'fft'), f'fft_{filename}')\n",
234+
" imu_data_file = os.path.join(date_dir.replace('raw', 'imu'), f'imu_{filename}')\n",
236235
" if _check_sensors_present(emg_data_file):\n",
237236
" recordings.append({\n",
238237
" \"gesture\": gesture,\n",
239238
" \"raw_data_filepath\": emg_data_file,\n",
240-
" \"imu_data_filepath\": os.path.join(date_dir, f'imu_{filename}') if os.path.exists(os.path.join(date_dir, f'imu_{filename}')) else None,\n",
241-
" 'fft_data_filepath': os.path.join(date_dir, f'fft_{filename}') if os.path.exists(os.path.join(date_dir, f'fft_{filename}')) else None,\n",
239+
" \"imu_data_filepath\": imu_data_file if os.path.exists(imu_data_file) else None,\n",
240+
" 'fft_data_filepath': fft_data_file if os.path.exists(fft_data_file) else None,\n",
242241
" })\n",
243242
" else:\n",
244243
" print(f\"File {emg_data_file} has the wrong number of sensors. Should be {NUM_SENSORS}. Skipping.\")\n",
@@ -265,8 +264,22 @@
265264
"source": [
266265
"if DATASET_SOURCE == parameters.DATASET.HUGGING_FACE:\n",
267266
" load_remote_dataset()\n",
267+
"elif DATASET_SOURCE == parameters.DATASET.LOCAL:\n",
268+
" load_local_dataset()\n",
268269
"else:\n",
269-
" load_local_dataset()"
270+
" load_remote_dataset()\n",
271+
" dfs_remote = dfs\n",
272+
" fft_dfs_remote = fft_dfs\n",
273+
" \n",
274+
" # Reset global variables\n",
275+
" dfs = [[] for _ in range(NUM_CLASSES)]\n",
276+
" fft_dfs = [[] for _ in range(NUM_CLASSES)]\n",
277+
" \n",
278+
" load_local_dataset()\n",
279+
" # Combine\n",
280+
" for i in range(NUM_CLASSES):\n",
281+
" dfs[i] = dfs_remote[i] + dfs[i]\n",
282+
" fft_dfs[i] = fft_dfs_remote[i] + fft_dfs[i]"
270283
]
271284
},
272285
{
@@ -1882,7 +1895,7 @@
18821895
"sourceType": "notebook"
18831896
},
18841897
"kernelspec": {
1885-
"display_name": "base",
1898+
"display_name": "Python 3",
18861899
"language": "python",
18871900
"name": "python3"
18881901
},
@@ -1896,7 +1909,7 @@
18961909
"name": "python",
18971910
"nbconvert_exporter": "python",
18981911
"pygments_lexer": "ipython3",
1899-
"version": "3.12.3"
1912+
"version": "3.11.4"
19001913
}
19011914
},
19021915
"nbformat": 4,

uMyo_python_tools/offline_classifier.py

Lines changed: 17 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -18,48 +18,34 @@
1818
USE_FFT = True
1919

2020

21-
def check_sensors_present(file, num_sensors=NUM_SENSORS):
21+
def _check_sensors_present(file, num_sensors=NUM_SENSORS):
2222
df = pd.read_csv(file)
2323
return len(df.columns) >= num_sensors * NUM_READINGS
2424

2525

26-
def load_data(data_dir, test_subjects, gestures, window_size, sensor_placement=0):
26+
def load_data(data_dir, test_subjects, gestures, window_size):
2727
recordings = []
2828

29-
pattern = re.compile(
30-
rf"({'|'.join(test_subjects)})_({'|'.join(gestures)})\d*_{sensor_placement}.csv"
31-
)
32-
29+
pattern = re.compile(rf"({'|'.join(test_subjects)})_({'|'.join(gestures)}).csv")
30+
3331
for date_dir, _, filenames in os.walk(data_dir):
34-
if date_dir.endswith("data") and not "static" in date_dir:
32+
if "raw" in date_dir:
3533
for filename in filenames:
3634
if pattern.match(filename):
37-
gesture = next(
38-
(gesture for gesture in gestures if gesture in filename), None
39-
)
35+
gesture = next((gesture for gesture in gestures if gesture in filename), None)
4036

4137
emg_data_file = os.path.join(date_dir, filename)
42-
if check_sensors_present(emg_data_file):
43-
recordings.append(
44-
{
45-
"gesture": gesture,
46-
"raw_data_filepath": emg_data_file,
47-
"imu_data_filepath": (
48-
os.path.join(date_dir, f"imu_{filename}")
49-
if os.path.exists(
50-
os.path.join(date_dir, f"imu_{filename}")
51-
)
52-
else None
53-
),
54-
"fft_data_filepath": (
55-
os.path.join(date_dir, f"fft_{filename}")
56-
if os.path.exists(
57-
os.path.join(date_dir, f"fft_{filename}")
58-
)
59-
else None
60-
),
61-
}
62-
)
38+
fft_data_file = os.path.join(date_dir.replace('raw', 'fft'), f'fft_{filename}')
39+
imu_data_file = os.path.join(date_dir.replace('raw', 'imu'), f'imu_{filename}')
40+
if _check_sensors_present(emg_data_file):
41+
recordings.append({
42+
"gesture": gesture,
43+
"raw_data_filepath": emg_data_file,
44+
"imu_data_filepath": imu_data_file if os.path.exists(imu_data_file) else None,
45+
'fft_data_filepath': fft_data_file if os.path.exists(fft_data_file) else None,
46+
})
47+
else:
48+
print(f"File {emg_data_file} has the wrong number of sensors. Should be {NUM_SENSORS}. Skipping.")
6349

6450
dfs = [[] for _ in range(len(gestures))]
6551
fft_dfs = [[] for _ in range(len(gestures))]

uMyo_python_tools/parameters.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
class DATASET(Enum):
1717
HUGGING_FACE = 1
1818
LOCAL = 2
19+
BOTH = 3
1920

2021
GESTURES_TYPE = ("dynamic_gestures", "static_gestures")
2122
GESTURES = ("baseline", "fist", "peace", "up", "down", "lift")

uMyo_python_tools/record_data.py

Lines changed: 20 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -4,23 +4,32 @@
44

55
import serial
66
import umyo_parser
7-
87
from parameters import IDS
98

109
MAX_DATA_LAG = 400
1110

1211
if __name__ == "__main__":
1312
parser = argparse.ArgumentParser(description="Record data from uMyo")
1413
parser.add_argument(
15-
"--output",
14+
"--output_folder",
1615
type=str,
17-
default="sensor_data.csv",
16+
default="../recordings",
1817
help="Output file to save sensor data",
1918
)
19+
parser.add_argument(
20+
"--subject",
21+
type=str,
22+
help="Name of the subject",
23+
)
24+
parser.add_argument(
25+
"--gesture",
26+
type=str,
27+
help="Name of the gesture",
28+
)
2029
parser.add_argument(
2130
"--num_sensors",
2231
type=int,
23-
default=4,
32+
default=5,
2433
help="Number of sensors to record",
2534
)
2635
parser.add_argument("-p", "--port", type=str, default="COM7", help="USB receiving station port")
@@ -35,9 +44,10 @@
3544
timeout=0,
3645
)
3746

38-
output_path = pathlib.Path(args.output)
39-
if output_path.parent.exists() is False:
40-
output_path.parent.mkdir(parents=True)
47+
output_folder = pathlib.Path(args.output_folder)
48+
(output_folder / args.subject / 'raw').mkdir(parents=True, exist_ok=True)
49+
(output_folder / args.subject / 'fft').mkdir(parents=True, exist_ok=True)
50+
(output_folder / args.subject / 'imu').mkdir(parents=True, exist_ok=True)
4151

4252
recordings = 0
4353
last_data_ids = [0] * args.num_sensors
@@ -93,11 +103,11 @@
93103
flattened_fft = [item for sublist in spectrum_data for item in sublist]
94104
flattened_imu = [item for sublist in imu_data for item in sublist]
95105

96-
with open(output_path, "a") as f:
106+
with open(output_folder / args.subject / "raw" / f"{args.subject}_{args.gesture}.csv", "a") as f:
97107
f.write(",".join(map(str, flattened_data)) + "\n")
98-
with open(output_path.parent / ("fft_" + output_path.name), "a") as f:
108+
with open(output_folder / args.subject / "fft" / f"fft_{args.subject}_{args.gesture}.csv", "a") as f:
99109
f.write(",".join(map(str, flattened_fft)) + "\n")
100-
with open(output_path.parent / ("imu_" + output_path.name), "a") as f:
110+
with open(output_folder / args.subject / "imu" / f"imu_{args.subject}_{args.gesture}.csv", "a") as f:
101111
f.write(",".join(map(str, flattened_imu)) + "\n")
102112
recordings += 1
103113
print("Recordings done: ", recordings)

uMyo_python_tools/visualize_data.py

Lines changed: 12 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -196,9 +196,9 @@ def preprocess_data(data):
196196
data = _remove_outliers(data)
197197
return data
198198

199-
def show_and_get_baseline(folder, subject, placement):
199+
def show_and_get_baseline(folder, subject):
200200
global power_noise, power_noise_filtered, baseline
201-
file = os.path.join(folder, subject + "_baseline_" + str(placement) + ".csv")
201+
file = os.path.join(folder, f"{subject}_baseline.csv")
202202
df = pd.read_csv(file, header=None)
203203
sensor1 = np.array(df.iloc[:, :8]).flatten() #* (1/fs)
204204
sensor1 = sensor1 - np.mean(sensor1)
@@ -248,8 +248,8 @@ def show_and_get_baseline(folder, subject, placement):
248248
plt.subplots_adjust(hspace=0.5)
249249
return np.vstack([sensor1, sensor2, sensor3, sensor4])
250250

251-
def show_all_gestures(gestures_files, folder, subject, placement):
252-
baseline = show_and_get_baseline(folder, subject, placement) # show neutral gesture for reference
251+
def show_all_gestures(gestures_files, folder, subject):
252+
baseline = show_and_get_baseline(folder, subject) # show neutral gesture for reference
253253
gestures_data = [[] for _ in range(len(gestures_files))]
254254

255255
for i, file in enumerate(gestures_files):
@@ -292,16 +292,16 @@ def show_all_gestures(gestures_files, folder, subject, placement):
292292
sensor4 = preprocess_data(gestures_data[i][3])
293293

294294
b1 = baseline[0]
295-
b1 = np.append(b1, np.zeros(len(sensor1)-len(b1)))
295+
b1 = np.append(b1, np.zeros(len(sensor1)-len(b1))) if len(sensor1) > len(b1) else b1[:len(sensor1)]
296296
sensor1 -= b1
297297
b2 = baseline[1]
298-
b2 = np.append(b2, np.zeros(len(sensor2)-len(b2)))
298+
b2 = np.append(b2, np.zeros(len(sensor2)-len(b2))) if len(sensor2) > len(b2) else b2[:len(sensor2)]
299299
sensor2 -= b2
300300
b3 = baseline[2]
301-
b3 = np.append(b3, np.zeros(len(sensor3)-len(b3)))
301+
b3 = np.append(b3, np.zeros(len(sensor3)-len(b3))) if len(sensor3) > len(b3) else b3[:len(sensor3)]
302302
sensor3 -= b3
303303
b4 = baseline[3]
304-
b4 = np.append(b4, np.zeros(len(sensor4)-len(b4)))
304+
b4 = np.append(b4, np.zeros(len(sensor4)-len(b4))) if len(sensor4) > len(b4) else b4[:len(sensor4)]
305305
sensor4 -= b4
306306

307307
axs[4].set_title("Filtered sensor 1")
@@ -417,22 +417,18 @@ def show_psd(gesture_files):
417417
parser = argparse.ArgumentParser(description="Visualize data from uMyo")
418418

419419
parser.add_argument(
420-
"--folder", type=str, default="../recordings/01_12_24_initial_placement_test/data", help="Folder with recordings"
420+
"--folder", type=str, default="../recordings/session-24_02_25", help="Folder with recordings"
421421
)
422422
parser.add_argument(
423423
"--subject", type=str, default="nad", help="Test subject name"
424424
)
425-
parser.add_argument(
426-
"--placement", type=int, default=0, help="Placement version of the sensors"
427-
)
428425

429426
args = parser.parse_args()
430427

431-
folder = args.folder
428+
raw_folder = os.path.join(args.folder, args.subject, 'raw')
432429
subject = args.subject
433-
placement = args.placement
434430
gestures_files = [
435-
os.path.join(folder, file) for file in os.listdir(folder) if file.endswith(str(placement)+".csv") and file.startswith(subject)
431+
os.path.join(raw_folder, file) for file in os.listdir(raw_folder) if file.endswith(".csv") and file.startswith(subject)
436432
]
437433

438-
show_all_gestures(gestures_files, folder, subject, placement)
434+
show_all_gestures(gestures_files, raw_folder, subject)

0 commit comments

Comments
 (0)