add groundtruth to each pickle, if available
format change in pickle dump to [rss_map, groundtruth_loc]
This commit is contained in:
parent
8f5ca06ce5
commit
1841b28ca3
|
|
@ -0,0 +1,5 @@
|
|||
10a4be8db0d2, 53, 21
|
||||
10a4bec51ff4, 3, 48
|
||||
10a4beca20ad, 30, 5
|
||||
98fc11691fc5, 35, 24
|
||||
9c8ecd102ace, 59, 46
|
||||
|
|
@ -87,6 +87,7 @@ def blocking_display_rss_map(rss_map: np.ndarray, visualize: bool = False, outpu
|
|||
def convert_to_pickle_rss(
|
||||
fp: str,
|
||||
orientation: int,
|
||||
labels: list = None, # the right groundtruth in rss map pixels
|
||||
visualize: bool = False,
|
||||
output_map: bool = False,
|
||||
filters: int = None,
|
||||
|
|
@ -167,7 +168,7 @@ def convert_to_pickle_rss(
|
|||
rss_map[i, j] = max(np.median(data_fullfilled), -85.0)
|
||||
|
||||
filepath = fp.replace(
|
||||
".csv", "{}_pkt_{}_map{}_{}"
|
||||
".csv", "{}_pkttype_{}_map{}_{}"
|
||||
.format(
|
||||
"_s{}".format(np.random.randint(0, 999999)) if sampling else "",
|
||||
pkt_types[0][0],
|
||||
|
|
@ -180,7 +181,7 @@ def convert_to_pickle_rss(
|
|||
blocking_display_rss_map(rss_map, visualize=visualize, output_map=output_map, fp=filepath)
|
||||
|
||||
with open("{}.pickle".format(filepath), "wb") as f:
|
||||
pickle.dump(rss_map, f)
|
||||
pickle.dump([rss_map, labels], f)
|
||||
|
||||
|
||||
def extract_dev_from_combined(fp, minimalCounts=100, cleanup=True):
|
||||
|
|
|
|||
|
|
@ -13,11 +13,29 @@ from libs.parser_post import get_locs_from_parsed_sig_data
|
|||
from libs.parser_post import extract_dev_from_combined
|
||||
|
||||
|
||||
def get_groundtruth_dict(f_gt):
|
||||
gt = {}
|
||||
if f_gt is None:
|
||||
return gt
|
||||
with open(f_gt, 'r') as f:
|
||||
lines = f.readlines()
|
||||
for line in lines:
|
||||
if '#' in line:
|
||||
continue
|
||||
tmp = line.rstrip('\n').split(',')
|
||||
addr = tmp[0]
|
||||
loc_x = float(tmp[1])
|
||||
loc_y = float(tmp[2])
|
||||
gt[addr] = [loc_x, loc_y]
|
||||
return gt
|
||||
|
||||
|
||||
def get_files(folder):
|
||||
files = os.listdir(folder)
|
||||
f_map_image = None
|
||||
f_loc_est = None
|
||||
f_sig_data = None
|
||||
f_groundtruth = None
|
||||
is_csi = False
|
||||
for file in files:
|
||||
if '.pcap' in file:
|
||||
|
|
@ -28,7 +46,9 @@ def get_files(folder):
|
|||
f_loc_est = "{0}/{1}".format(folder, file)
|
||||
elif 'map.ppm' in file:
|
||||
f_map_image = "{0}/{1}".format(folder, file)
|
||||
return f_map_image, f_loc_est, f_sig_data, is_csi
|
||||
elif 'gt.txt' in file:
|
||||
f_groundtruth = "{0}/{1}".format(folder, file)
|
||||
return f_map_image, f_loc_est, f_sig_data, f_groundtruth, is_csi
|
||||
|
||||
|
||||
def generate_floorplan_map(f_map, f_loc, f_sig_extracted, is_csi):
|
||||
|
|
@ -67,7 +87,8 @@ def generate_floorplan_map(f_map, f_loc, f_sig_extracted, is_csi):
|
|||
|
||||
def convert_to_pickle(
|
||||
filepaths,
|
||||
orientation,
|
||||
orientation,
|
||||
groundtruth=None,
|
||||
filters=None,
|
||||
visualize=False,
|
||||
is_csi=False,
|
||||
|
|
@ -90,6 +111,7 @@ def convert_to_pickle(
|
|||
for fff in range(0, 6):
|
||||
convert_to_pickle_rss(
|
||||
filepath, orientation,
|
||||
labels=groundtruth.get(os.path.splitext(os.path.basename(filepath))[0], None),
|
||||
visualize=visualize,
|
||||
output_map=output_map,
|
||||
filters=fff,
|
||||
|
|
@ -98,6 +120,7 @@ def convert_to_pickle(
|
|||
else:
|
||||
convert_to_pickle_rss(
|
||||
filepath, orientation,
|
||||
labels=groundtruth.get(os.path.splitext(os.path.basename(filepath))[0], None),
|
||||
visualize=visualize,
|
||||
output_map=output_map,
|
||||
filters=filters,
|
||||
|
|
@ -112,7 +135,7 @@ def main(args):
|
|||
if not os.path.isdir(args.folder):
|
||||
print("Err: folder {} does not exist".format(args.folder))
|
||||
sys.exit(2)
|
||||
f_map, f_loc, f_sig, is_csi = get_files(args.folder)
|
||||
f_map, f_loc, f_sig, f_gt, is_csi = get_files(args.folder)
|
||||
if f_loc is None or f_sig is None:
|
||||
print("Err: desired files not exist")
|
||||
sys.exit(2)
|
||||
|
|
@ -122,11 +145,14 @@ def main(args):
|
|||
f_sig_combined = combine_sig_loc(f_sig_parsed, f_loc)
|
||||
f_sig_extracted = extract_dev_from_combined(f_sig_combined, minimalCounts=5000)
|
||||
|
||||
gts = get_groundtruth_dict(f_gt)
|
||||
|
||||
if args.pickle:
|
||||
# f_sig_extracted = [x for x in f_sig_extracted if '98fc11691fc5' in x]
|
||||
convert_to_pickle(
|
||||
f_sig_extracted,
|
||||
args.orientation,
|
||||
groundtruth=gts,
|
||||
filters=args.filters,
|
||||
visualize=args.visualize,
|
||||
is_csi=is_csi,
|
||||
|
|
|
|||
|
|
@ -1,189 +0,0 @@
|
|||
import csv
|
||||
import numpy as np
|
||||
import sys
|
||||
import pickle
|
||||
import matplotlib.pyplot as plt
|
||||
import bisect
|
||||
np.set_printoptions(threshold=sys.maxsize)
|
||||
|
||||
types = []
|
||||
type_values = []
|
||||
xy_locs_rss = np.empty([1,3])
|
||||
rss_map = np.empty([64, 64])
|
||||
picked_type = -1
|
||||
|
||||
def main():
|
||||
if sys.argv[1] == "-help":
|
||||
print("usage: python3 rss_map_parser.py path/to/file/filename.csv -u/-d/-l/-r (when dot at upper/lower/left/right edge) -d/-v/-dv (download/&visulize parsed rss map)")
|
||||
else:
|
||||
pathtofile = sys.argv[1]
|
||||
orientation = sys.argv[2]
|
||||
operation = sys.argv[3]
|
||||
|
||||
if len(pathtofile) == 0:
|
||||
print("error: missing arguments[1] indicating the csv file path")
|
||||
return
|
||||
|
||||
find_type(pathtofile)
|
||||
if orientation == '-r':
|
||||
readCSV_r(pathtofile)
|
||||
elif orientation == '-d':
|
||||
readCSV_d(pathtofile)
|
||||
elif orientation == '-l':
|
||||
readCSV_l(pathtofile)
|
||||
elif orientation == '-u':
|
||||
readCSV_u(pathtofile)
|
||||
else:
|
||||
print("error: missing arguments[2] indicating the dot position")
|
||||
return
|
||||
parse_map()
|
||||
|
||||
if operation == '-d':
|
||||
write_pickle(pathtofile)
|
||||
elif operation == '-v':
|
||||
visualize_map()
|
||||
elif operation == '-dv':
|
||||
write_pickle(pathtofile)
|
||||
visualize_map()
|
||||
else:
|
||||
print("error: missing arguments[3] indicating the operation (e.g., download & visulize)")
|
||||
return
|
||||
|
||||
def find_type(pathtofile):
|
||||
global type_values
|
||||
global picked_type
|
||||
with open(pathtofile) as csvfile:
|
||||
csv_reader = csv.reader(csvfile, delimiter=',')
|
||||
for row in csv_reader:
|
||||
if row[1]=='y':
|
||||
continue
|
||||
if len(types)==0:
|
||||
types.append(row[8])
|
||||
type_values.append(1)
|
||||
elif row[8] in types:
|
||||
type_values[types.index(row[8])] += 1
|
||||
else:
|
||||
types.append(row[8])
|
||||
type_values.append(1)
|
||||
|
||||
picked_type = types[type_values.index(max(type_values))]
|
||||
|
||||
def readCSV_u(pathtofile):
|
||||
global xy_locs_rss
|
||||
global picked_type
|
||||
with open(pathtofile) as csvfile:
|
||||
csv_reader = csv.reader(csvfile, delimiter=',')
|
||||
for row in csv_reader:
|
||||
if row[8]==picked_type:
|
||||
x_loc = -float(row[0])
|
||||
y_loc = -float(row[1])
|
||||
rss = float(row[4])
|
||||
xy_locs_rss = np.vstack([xy_locs_rss, [x_loc, y_loc, rss]])
|
||||
|
||||
xy_locs_rss = sorted(xy_locs_rss, key=lambda x: x[0])
|
||||
xy_locs_rss = np.transpose(xy_locs_rss)
|
||||
|
||||
def readCSV_d(pathtofile):
|
||||
global xy_locs_rss
|
||||
global picked_type
|
||||
with open(pathtofile) as csvfile:
|
||||
csv_reader = csv.reader(csvfile, delimiter=',')
|
||||
for row in csv_reader:
|
||||
if row[8]==picked_type:
|
||||
x_loc = float(row[0])
|
||||
y_loc = float(row[1])
|
||||
rss = float(row[4])
|
||||
xy_locs_rss = np.vstack([xy_locs_rss, [x_loc, y_loc, rss]])
|
||||
|
||||
xy_locs_rss = sorted(xy_locs_rss, key=lambda x: x[0])
|
||||
xy_locs_rss = np.transpose(xy_locs_rss)
|
||||
|
||||
def readCSV_l(pathtofile):
|
||||
global xy_locs_rss
|
||||
global picked_type
|
||||
with open(pathtofile) as csvfile:
|
||||
csv_reader = csv.reader(csvfile, delimiter=',')
|
||||
for row in csv_reader:
|
||||
if row[8]==picked_type:
|
||||
x_loc = -float(row[1])
|
||||
y_loc = float(row[0])
|
||||
rss = float(row[4])
|
||||
xy_locs_rss = np.vstack([xy_locs_rss, [x_loc, y_loc, rss]])
|
||||
|
||||
xy_locs_rss = sorted(xy_locs_rss, key=lambda x: x[0])
|
||||
xy_locs_rss = np.transpose(xy_locs_rss)
|
||||
|
||||
def readCSV_r(pathtofile):
|
||||
global xy_locs_rss
|
||||
global picked_type
|
||||
with open(pathtofile) as csvfile:
|
||||
csv_reader = csv.reader(csvfile, delimiter=',')
|
||||
for row in csv_reader:
|
||||
if row[8]==picked_type:
|
||||
x_loc = float(row[1])
|
||||
y_loc = -float(row[0])
|
||||
rss = float(row[4])
|
||||
xy_locs_rss = np.vstack([xy_locs_rss, [x_loc, y_loc, rss]])
|
||||
|
||||
xy_locs_rss = sorted(xy_locs_rss, key=lambda x: x[0])
|
||||
xy_locs_rss = np.transpose(xy_locs_rss)
|
||||
|
||||
def parse_map():
|
||||
global rss_map
|
||||
rss_sum_count = 0
|
||||
rss_sum_temp = 0
|
||||
for i in range(64):
|
||||
for ii in range(64):
|
||||
upper_bound_x = (-3.2+0.1*i+0.1)
|
||||
lower_bound_x = (-3.2+0.1*(i-1)-0.1)
|
||||
lower_bound_iii = bisect.bisect_left(xy_locs_rss[0,:], lower_bound_x)
|
||||
upper_bound_iii = bisect.bisect_right(xy_locs_rss[0,:], upper_bound_x, lo=lower_bound_iii)
|
||||
nums = xy_locs_rss[:, lower_bound_iii:upper_bound_iii]
|
||||
for iii in range(len(nums[0,:])):
|
||||
if nums[1,iii]>(5.8-0.1*ii-0.025) and nums[1,iii]<=(5.8-0.1*(ii-1)+0.025):
|
||||
rss_sum_temp += nums[2,iii]
|
||||
rss_sum_count += 1
|
||||
if rss_sum_count != 0:
|
||||
rss_map[i,ii] = rss_sum_temp / rss_sum_count
|
||||
else:
|
||||
rss_map[i,ii] = -85
|
||||
rss_sum_temp = 0
|
||||
rss_sum_count = 0
|
||||
|
||||
rss_map = np.transpose(rss_map)
|
||||
|
||||
def write_pickle(pathtofile):
|
||||
file = open(pathtofile+".pickle", 'wb')
|
||||
pickle.dump(rss_map, file)
|
||||
file.close()
|
||||
|
||||
def visualize_map():
|
||||
plt.imshow(rss_map, cmap='hot', interpolation='nearest')
|
||||
plt.colorbar()
|
||||
plt.show()
|
||||
|
||||
main()
|
||||
|
||||
#def parse_horizontal_map():
|
||||
# global rss_map
|
||||
# rss_sum_count = 0
|
||||
# rss_sum_temp = 0
|
||||
# for i in range(64):
|
||||
# for ii in range(64):
|
||||
# upper_bound_x = (-5.8+0.1*i+0.025)
|
||||
# lower_bound_x = (-5.8+0.1*(i-1)-0.025)
|
||||
# lower_bound_iii = bisect.bisect_left(xy_locs_rss[0,:], lower_bound_x)
|
||||
# upper_bound_iii = bisect.bisect_right(xy_locs_rss[0,:], upper_bound_x, lo=lower_bound_iii)
|
||||
# nums = xy_locs_rss[:, lower_bound_iii:upper_bound_iii]
|
||||
# for iii in range(len(nums[0,:])):
|
||||
# if nums[1,iii]>(3.2-0.1*ii-0.075) and nums[1,iii]<=(3.2-0.1*(ii-1)+0.075):
|
||||
# rss_sum_temp += nums[2,iii]
|
||||
# rss_sum_count += 1
|
||||
# if rss_sum_count != 0:
|
||||
# rss_map[i,ii] = rss_sum_temp / rss_sum_count
|
||||
# else:
|
||||
# rss_map[i,ii] = -85
|
||||
# rss_sum_temp = 0
|
||||
# rss_sum_count = 0
|
||||
|
||||
# rss_map = np.transpose(rss_map)
|
||||
Loading…
Reference in New Issue