Skip to content

Commit be59da9

Browse files
committed
clean the package space
1 parent 19e3eae commit be59da9

File tree

22 files changed

+261
-134
lines changed

22 files changed

+261
-134
lines changed

README.md

+5-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,9 @@
11
# Interaction ProMPs
2-
Interaction ProMPs generate a robot collaborative motion based on the prediction from a set of partial human motion observations. The approach also works in multi-task scenarios. This package use EMG signals to enhance the task recognition.
2+
Interaction ProMPs generate a robot collaborative motion based on the prediction from a set of partial human motion observations.
3+
4+
![generalization](./docs/media/generalization.png "generalization.png")
5+
6+
The approach also works in multi-task scenarios. This package use EMG signals to enhance the task recognition.
37
Not make sure if the EMG signals are correlated with robot motion and we will confirm it latter.
48

59

datasets/guilherme_datasets/main.m

+60
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
2+
% Read and plot data from human-robot collaboration
3+
% taskProMP{1}: task 1
4+
% taskProMP{2}: task 2
5+
% taskProMP{3}: task 3
6+
%
7+
% Trajectories of both human and robot are time aligned.
8+
% Each column is one degree of freedom.
9+
% Each row is a time step.
10+
% data{1}(:,1:3) xyz coordinates of the human wrist recorded via optical
11+
% marker traking
12+
% data{1}(:,4:10) joint angles of the KUKA lightweight arm, starting from
13+
% base towards the end effector
14+
% data{1}(:,11:13) xyz coordinates of the robot end-effector
15+
16+
function main()
17+
18+
clear; clc; close all;
19+
dbstop if error;
20+
21+
22+
load('taskProMP.mat');
23+
24+
number_of_tasks = numel(taskProMP);
25+
26+
for j=1:number_of_tasks
27+
plot_data(taskProMP{j});
28+
end
29+
30+
for j=1:number_of_tasks
31+
gen_csv(taskProMP{j}, j);
32+
end
33+
34+
end
35+
36+
37+
function plot_data(data)
38+
h1 = figure; axis equal; grid on; hold on;
39+
xlabel 'x (m)';
40+
ylabel 'y (m)';
41+
zlabel 'z (m)';
42+
view([-1 1 1])
43+
title('Blue: Human wrist. Red: robot hand')
44+
45+
number_of_training_data = numel(data);
46+
for k=1:number_of_training_data
47+
plot3(data{k}(:,1), data{k}(:,2), data{k}(:,3), 'b' );
48+
plot3(data{k}(:,11), data{k}(:,12), data{k}(:,13), 'r' );
49+
end
50+
end
51+
52+
% created by Longxin to output the csv file that can be read by python
53+
function gen_csv(data, task_idx)
54+
number_of_training_data = numel(data);
55+
for k=1:number_of_training_data
56+
data_temp = [data{k}(:,1), data{k}(:,2), data{k}(:,3), data{k}(:,11), data{k}(:,12), data{k}(:,13)];
57+
file_name = ['./csv_datasets/', 'task', num2str(task_idx), '_', num2str(k)];
58+
csvwrite(file_name, data_temp);
59+
end
60+
end
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
#!/usr/bin/env bash
2+
3+
function usage() {
4+
echo
5+
echo "Usage:"
6+
echo "bag_to_csv.sh <input bag> <output directory>"
7+
echo
8+
echo "<input bag> - the input bag file"
9+
echo "<output directory> - the directory to output the csv in"
10+
echo
11+
echo "e.g."
12+
echo "bag_to_csv.sh input.bag output_dir"
13+
echo
14+
}
15+
16+
if [[ $# -ne 2 ]]; then
17+
echo "Invalid parameters"
18+
usage
19+
fi
20+
21+
# make the output directory
22+
output_dir=$2/$(basename $1 | sed 's/\..*$//')
23+
mkdir -p $output_dir
24+
25+
# find where topics starts
26+
topics_line=$(rosbag info $1 | grep -n "topics:" | cut -f1 -d:)
27+
28+
# get the number of lines
29+
end_line=$(rosbag info $1 | wc -l)
30+
31+
# get the topic on the "topics:" line
32+
topics[0]=$(rosbag info $1 | sed -n "${topics_line},${topics_line} p" | awk '{ print $2; }')
33+
34+
# get the rest of the topics
35+
for (( i=$topics_line + 1; i <= $end_line; i++)); do
36+
topics[$i]=$(rosbag info $1 | sed -n "$i,$i p" | awk '{ print $1; }')
37+
done
38+
39+
# for each topic
40+
for topic in ${topics[@]}; do
41+
# create the csv output directory
42+
dir_name=$(dirname $topic)
43+
base_name=$(basename $topic)
44+
45+
mkdir -p $output_dir/$dir_name
46+
47+
echo "Creating csv: $output_dir/$dir_name/$base_name.csv"
48+
rostopic echo -b $1 -p $topic > $output_dir/$dir_name/$base_name.csv
49+
done
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
#!/usr/bin/env python
2+
import glob
3+
import os
4+
import subprocess
5+
6+
# load the raw data set
7+
file_path = os.path.dirname(__file__)
8+
datasets_raw_path = '../datasets/tape/raw'
9+
10+
11+
def main():
12+
# run sh script for each rosbag file in datasets path
13+
task_path_list = glob.glob(os.path.join(datasets_raw_path, "*"))
14+
for task_path in task_path_list:
15+
demo_path_list = glob.glob(os.path.join(task_path, "*.bag"))
16+
for demo_path in demo_path_list:
17+
subprocess.Popen([os.path.join(file_path, './bag_to_csv.sh') + ' ' +
18+
demo_path + ' ' +
19+
os.path.join(task_path, 'csv')], shell=True) # non block function
20+
# os.path.join(task_path, 'csv')], shell = True).wait() # the block function
21+
22+
if __name__ == '__main__':
23+
main()
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
#!/usr/bin/python
2+
import numpy as np
3+
import pandas as pd
4+
import glob
5+
import os
6+
from sklearn.externals import joblib
7+
import scipy.signal as signal
8+
from scipy.ndimage.filters import gaussian_filter1d
9+
import scipy.io as sio
10+
11+
12+
data_mat = sio.loadmat('./../taskProMP.mat')
13+
data = data_mat['taskProMP'][0]
14+
15+
datasets_raw = []
16+
for task_idx in range(len(data)):
17+
demo_temp = []
18+
for demo_idx in range(len(data[task_idx])):
19+
# the info of interest: convert the object to int / float
20+
demo_temp.append({
21+
'stamp': demo_idx,
22+
'left_hand': data[task_idx][demo_idx][0][:, 0:3].astype(float),
23+
'left_joints': data[task_idx][demo_idx][0][:, 10:13].astype(float)
24+
})
25+
datasets_raw.append(demo_temp)
26+
27+
28+
29+
30+
31+
print('Saving the datasets as pkl ...')
32+
joblib.dump(datasets_raw, '../datasets_raw.pkl')
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
#!/usr/bin/python
2+
import numpy as np
3+
import pandas as pd
4+
import glob
5+
import os
6+
import matplotlib.pyplot as plt
7+
from mpl_toolkits.mplot3d import Axes3D
8+
9+
datasets_path = '../datasets/raw/rect/2018-01-18-10-12-41/multiModal_states.csv'
10+
data_csv = pd.read_csv(datasets_path)
11+
data = data_csv.values[:, 207:210].astype(float)
12+
13+
fig = plt.figure()
14+
ax = fig.add_subplot(111, projection='3d')
15+
16+
ax.scatter(data[:,0], data[:,1], data[:,2], c='r', marker='o')
17+
plt.xlim([-0.5, 1.5])
18+
plt.ylim([-0.5, 1.5])
19+
ax.set_zlim(-0.5, 1.5)
20+
ax.set_xlabel('X Label')
21+
ax.set_ylabel('Y Label')
22+
ax.set_zlabel('Z Label')
23+
24+
plt.show()
25+
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
#!/usr/bin/python
2+
import numpy as np
3+
from scipy.stats import multivariate_normal as mvn
4+
import matplotlib.pyplot as plt
5+
from scipy.stats import multivariate_normal as mvn
6+
from mpl_toolkits.mplot3d import Axes3D
7+
8+
9+
sample = np.random.random((2, 50))*100
10+
11+
cov = np.cov(sample)
12+
mean = np.mean(sample, 1)
13+
x, y = np.mgrid[(mean[0]-10):(mean[0]+10):1.0, (mean[1]-10):(mean[1]+10):1.0]
14+
15+
16+
x = x.reshape(400, 1)[:, 0]
17+
y = y.reshape(400, 1)[:, 0]
18+
19+
test = [[x[i], y[i]] for i in range(400)]
20+
21+
h = mvn.pdf(test, mean, cov)
22+
23+
fig = plt.figure()
24+
ax = Axes3D(fig)
25+
ax.scatter3D(x, y, h)
26+
# ax.plot_surface(x, y, h)
27+
# ax.plot_surface(x, y, h, color='b')
28+
29+
plt.show()
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
#!/usr/bin/python
2+
import numpy as np
3+
import matplotlib.pyplot as plt
4+
import os
5+
from sklearn.externals import joblib
6+
7+
joint_num = 3
8+
datasets_path = '../datasets/tape/'
9+
datasets_raw = joblib.load(os.path.join(datasets_path, 'pkl/datasets_raw.pkl'))
10+
datasets_filtered = joblib.load(os.path.join(datasets_path, 'pkl/datasets_filtered.pkl'))
11+
num_demo = 15
12+
publish_rate = 50.0
13+
info = 'left_hand'
14+
15+
fig = plt.figure(0)
16+
fig.suptitle('the raw data of ' + info)
17+
for demo_idx in range(num_demo):
18+
for joint_idx in range(joint_num):
19+
ax = fig.add_subplot(joint_num, 1, 1 + joint_idx)
20+
data = datasets_raw[0][demo_idx][info][:, joint_idx]
21+
plt.plot(np.array(range(len(data)))/publish_rate, data)
22+
23+
# plot the filtered data
24+
fig = plt.figure(2)
25+
fig.suptitle('the filtered data of ' + info)
26+
for demo_idx in range(num_demo):
27+
for joint_idx in range(joint_num):
28+
ax = fig.add_subplot(joint_num, 1, 1 + joint_idx)
29+
data = datasets_filtered[0][demo_idx][info][:, joint_idx]
30+
plt.plot(np.array(range(len(data)))/publish_rate, data)
31+
32+
plt.show()
984 KB
Binary file not shown.

datasets/handover_20171228/info/cfg/datasets.cfg

+5
Original file line numberDiff line numberDiff line change
@@ -2,3 +2,8 @@
22
data_index0: 0, 1, 2, 3, 10, 23, 24
33
data_index1: 0, 1, 5, 7, 8, 14, 24
44
data_index2: 0, 12, 14, 15, 19, 22, 23
5+
6+
[index_12]
7+
data_index0: 0, 1, 3, 10, 11, 14, 17, 18, 21, 22, 23, 24
8+
data_index1: 0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 14, 24
9+
data_index2: 0, 1, 2, 3, 4, 5, 6, 8, 15, 19, 22, 23
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
11
[index_12]
22
data_index0: 0,1,3,4,14,15,17,18,19,21,22,23,25,27,28
3-
#5,6,7,8,9,13
4-
# 0,1,3,4,5,6,7,8,9,13,14,15,17,18,19,21,22,23,25,27,28
3+
#5,6,7,8,9,13

docs/media/generalization.png

319 KB
Loading

0 commit comments

Comments
 (0)