Commit 26a94b62 authored by Bryson Howell's avatar Bryson Howell

Prototype for script to cluster incident locations and download feature maps.

parent b7a4e114
......@@ -21,8 +21,6 @@ import csv
import os
import glob
import matlab.engine
def trim_extent(x_pts, y_pts, scaled_extent):
# trim data to only within extents
rm_mask = np.logical_or(x_pts < 0, x_pts > scaled_extent) # mask to remove points (x axis)
......@@ -301,6 +299,13 @@ def grab_features(anchor_point, extent, sample_dist = 10, case_name = 'blah', he
if __name__ == "__main__":
#Note - this is using importmap_py.m to save data in a Matlab matrix.
#Why don't we just do this in Numpy, or pickle....
#above function gives us csv files anyways.
ics = [
[37.197730, -80.585233,'kentland'],
[36.891640, -81.524214,'hmpark'],
......
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from scipy.cluster.vq import kmeans, whiten
from scipy.io import loadmat
import pandas as pd
from arcgis_terrain import get_terrain_map, lat_lon2meters
from feature_set import grab_features
#Bryson Howell (blhowell@vt.edu), 7/10/24
#Script made to create 3D Gridworld environments for RL experiments
#Script made to create a set of SAR datasets.
#Formats data into a csv that can be loaded as a Pandas dataframe.
......@@ -43,19 +46,55 @@ def incident_list():
#new_df = pd.DataFrame(columns=incidents.columns)
#new_df.insert(0,'key')
#print(new_df.columns)
return
#My way of checking matlab datasets without using matlab
def investigate():
dir = './matlab_data/'
savedir = './plots_test/'
filename = 'BW_LFandInac_Zelev_kentland.mat'
matdict = loadmat(dir+filename, appendmat=False)
return
#From the set of LPM coordinates, download and save map/linear features for areas as necessary
#Also make images to show where areas are.
def collect_terrain():
extent_download = 200000 #Size of maps to dowload, in meters (big...)
extent_test = 20000 #Size of experiment area. 20km = 3000 grid cells
#Collection of map centers, in meters. Use to check if we need to download a new area
centers = []
ipp_list = []
#Used to group search incidents in the same area together. Added to DF at end
keys = []
listname = './lp_data/incident_locations.csv'
samples = 1
incidents = pd.read_csv(listname,nrows=samples)
#samples = 1
#incidents = pd.read_csv(listname,nrows=samples)
incidents = pd.read_csv(listname)
samples = incidents.shape[0]
#try kmeans=
latlons = incidents[["IPP_lat","IPP_lon"]].to_numpy(dtype=np.float32())
#latlons = np.concatenate(([incidents['IPP_lat']],[incidents['IPP_lon']]),axis=0)
print(latlons[0])
latlons = whiten(latlons)
clusters, distortions = kmeans(whiten(latlons),4)
plt.scatter(latlons[:,0],latlons[:,1])
plt.scatter(clusters[:,0],clusters[:,1],c='r')
plt.show()
return
#This gives a loose clustering algorithm, such that we know maps can be made from groups
#Loop through search incidents
for index, row in incidents.iterrows():
ipp_point = [row['IPP_lat'],row['IPP_lon']]
......@@ -63,17 +102,76 @@ def collect_terrain():
#Convert to meters
ipp_xy = lat_lon2meters(ipp_point[0],ipp_point[1])
find_xy = lat_lon2meters(find_point[0],find_point[1])
#See if IPP has at least 1500 cells on either side.
#First find nearest center point
#See if we need to download a new map
if(len(ipp_list) == 0):
ipp_list.append(ipp_xy)
keys.append(len(ipp_list)-1)
else:
#Find nearest downloaded center point
ipp_array = np.asarray(ipp_list)
distance = np.sum((ipp_array - ipp_xy)**2,axis=1)
closest = np.argmin(distance)
#print(distance[closest])
target = ipp_array[closest]
#Now see if map centered on IPP will fit:
topedge = (ipp_xy[1] + 0.5*extent_test <= target[1] + 0.5*extent_download)
botedge = (ipp_xy[1] - 0.5*extent_test >= target[1] - 0.5*extent_download)
rightedge = (ipp_xy[0] + 0.5*extent_test <= target[0] + 0.5*extent_download)
leftedge = (ipp_xy[0] - 0.5*extent_test >= target[0] - 0.5*extent_download)
if(not(topedge and botedge and rightedge and leftedge)):
#print("Downloading new map")
ipp_list.append(ipp_xy)
keys.append(len(ipp_list)-1)
#Add key to incident locations so we know the general area
else:
keys.append(closest)
print("Found %d maps to collect." % len(ipp_list))
print(keys)
#Add group keys to dataframe
incidents.insert(0,'area',keys)
#Now, if there's only one key in an area we can avoid downloading a lot.
for i in range(0,len(ipp_list)):
#Download local map
if(keys.count(i) <= 1):
size = extent_test
else:
size = extent_download
#For visualizing...
draw = True
if(draw):
for i in range(0,len(ipp_list)):
#Draw download area
rect = matplotlib.patches.Rectangle((target[0]-0.5*extent_download,target[1]-0.5*extent_download),extent_download,extent_download,facecolor="#bec8d1")
#Draw local experiments
#Plot
fig = plt.figure()
ax = fig.add_subplot(111)
rect = matplotlib.patches.Rectangle((target[0]-0.5*extent_download,target[1]-0.5*extent_download),extent_download,extent_download,facecolor="#bec8d1")
rect2 = matplotlib.patches.Rectangle((ipp_xy[0]-0.5*extent_test,ipp_xy[1]-0.5*extent_test),extent_test,extent_download,facecolor="red")
ax.add_patch(rect)
ax.add_patch(rect2)
plt.show()
return
#Load GIS Features for new area
#grab_features()
#Load elevation
res = 10 #Resolution of GIS collection
size = 6000 #Maximum size of map. Is this is latlon, grid cells, ?
#res = 10 #Resolution of GIS collection
#size = 6000 #Maximum size of map. Is this is latlon, grid cells, ?
#heading is direction of trajectories from north (default 0 degrees?)
[e,e_interp,x,y,data,lat_lon] = get_terrain_map(ipp_point, sample_dist = res, extent = size, show_plot = True)
#[e,e_interp,x,y,data,lat_lon] = get_terrain_map(ipp_point, sample_dist = res, extent = size, show_plot = True)
#Load Linear Features. Probably need to convert some Matlab code...
......@@ -81,7 +179,7 @@ def collect_terrain():
return
#return
#Collect GIS maps and layers for SAR initial positions
......
import matlab.engine
from feature_set import grab_features
#Downloads features with feature_set.py, then uses Matlab to save as csv files.
#Used to be in feature_set.py
#Not tested because I don't have Matlab lol.
#Bryson Howell, 7/12/24
if __name__ == "__main__":
ics = [
[37.197730, -80.585233,'kentland'],
[36.891640, -81.524214,'hmpark'],
# [38.29288, -78.65848, 'brownmountain'],
# [38.44706, -78.46993, 'devilsditch'],
# [37.67752, -79.33887, 'punchbowl'],
# [37.99092, -78.52798, 'biscuitrun'],
# [37.82520, -79.081910, 'priest'] ,
# [34.12751, -116.93247, 'sanbernardino'] ,
# [31.92245, -109.9673,'[31.92245, -109.9673]'],
# [31.9024, -109.2785,'[31.9024, -109.2785]'],
# [31.42903, -110.2933,'[31.42903, -110.2933]'],
# [34.55, -111.6333,'[34.55, -111.6333]'],
# [34.6, -112.55,'[34.6, -112.55]'],
# [34.82167, -111.8067,'[34.82167, -111.8067]'],
# [33.3975, -111.3478,'[33.3975, -111.3478]'],
# [33.70542, -111.338,'[33.70542, -111.338]'],
# [31.39708, -111.2064,'[31.39708, -111.2064]'],
# [32.4075, -110.825,'[32.4075, -110.825]'],
# [34.89333, -111.8633,'[34.89333, -111.8633]'],
# [34.94833, -111.795,'[34.94833, -111.795]'],
# [31.72262, -110.1878,'[31.72262, -110.1878]'],
# [33.39733, -111.348,'[33.39733, -111.348]'],
# [34.63042, -112.5553,'[34.63042, -112.5553]'],
# [34.55977, -111.6539,'[34.55977, -111.6539]'],
# [34.90287, -111.8131,'[34.90287, -111.8131]'],
# [34.86667, -111.8833,'[34.86667, -111.8833]'],
# [32.43543, -110.7893,'[32.43543, -110.7893]'],
# [32.40917, -110.7098,'[32.40917, -110.7098]'],
# [35.33068, -111.7111,'[35.33068, -111.7111]'],
# [32.01237, -109.3157,'[32.01237, -109.3157]'],
# [31.85073, -109.4219,'[31.85073, -109.4219]'],
# [34.88683, -111.784,'[34.88683, -111.784]'],
# [32.41977, -110.7473,'[32.41977, -110.7473]'],
# [33.60398, -112.5151,'[33.60398, -112.5151]'],
# [33.3968, -111.3481,'[33.3968, -111.3481]'],
# [33.52603, -111.3905,'[33.52603, -111.3905]'],
# [32.33333, -110.8528,'[32.33333, -110.8528]'],
# [32.33583, -110.9102,'[32.33583, -110.9102]'],
# [32.337, -110.9167,'[32.337, -110.9167]'],
# [35.08133, -111.0711,'[35.08133, -111.0711]'],
# [33.25, -113.5093,'[33.25, -113.5093]'],
# [31.50572, -110.6762,'[31.50572, -110.6762]'],
# [34.91667, -111.8,'[34.91667, -111.8]'],
# [35.1938, -114.057,'[35.1938, -114.057]'],
# [33.39715, -111.3479,'[33.39715, -111.3479]'],
# [33.37055, -111.1152,'[33.37055, -111.1152]'],
# [34.0927, -111.4246,'[34.0927, -111.4246]'],
# [31.83522, -110.3567,'[31.83522, -110.3567]'],
# [35.24375, -111.5997,'[35.24375, -111.5997]'],
# [34.82513, -111.7875,'[34.82513, -111.7875]'],
# [33.39705, -111.3479,'[33.39705, -111.3479]'],
# [33.38885, -111.3657,'[33.38885, -111.3657]'],
# [32.82142, -111.2021,'[32.82142, -111.2021]'],
# [34.97868, -111.8964,'[34.97868, -111.8964]'],
# [33.47802, -111.4377,'[33.47802, -111.4377]'],
# [34.82387, -111.7751,'[34.82387, -111.7751]'],
# [34.9253, -111.7341,'[34.9253, -111.7341]'],
# [34.09278, -111.421,'[34.09278, -111.421]'],
# [36.23878, -112.6892,'[36.23878, -112.6892]'],
# [31.33447, -109.8186,'[31.33447, -109.8186]'],
# [36.37473, -106.6795,'[36.37473, -106.6795]'],
# [42.02893, -74.33659,'[42.02893, -74.33659]'],
# [41.54819, -80.33056,'[41.54819, -80.33056]'],
# [42.0097, -74.42595,'[42.0097, -74.42595]'],
# [42.17965, -74.21362,'[42.17965, -74.21362]'],
# [42.55121, -73.4874,'[42.55121, -73.4874]'],
# [42.01362, -80.35002,'[42.01362, -80.35002]'],
# [42.74271, -73.45475,'[42.74271, -73.45475]'],
# [42.1549, -74.20523,'[42.1549, -74.20523]'],
# [42.90324, -73.8047,'[42.90324, -73.8047]'],
# [44.16065, -73.85545,'[44.16065, -73.85545]'],
# [43.42736, -74.4481,'[43.42736, -74.4481]'],
# [44.18, -72.99531,'[44.18, -72.99531]'],
# [43.52022, -73.59601,'[43.52022, -73.59601]'],
# [44.12531, -73.78635,'[44.12531, -73.78635]'],
# [43.73413, -74.25577,'[43.73413, -74.25577]'],
# [43.06902, -74.48481,'[43.06902, -74.48481]'],
# [43.8756, -74.43076,'[43.8756, -74.43076]'],
# [43.41544, -74.4148,'[43.41544, -74.4148]'],
# [43.42473, -73.73209,'[43.42473, -73.73209]'],
# [43.59779, -74.55354,'[43.59779, -74.55354]'],
# [43.4449, -74.4086,'[43.4449, -74.4086]'],
# [43.4332, -74.41433,'[43.4332, -74.41433]'],
# [43.4539, -74.52317,'[43.4539, -74.52317]'],
# [43.63354, -74.55927,'[43.63354, -74.55927]'],
# [44.19204, -74.26329,'[44.19204, -74.26329]'],
# [44.31349, -74.56818,'[44.31349, -74.56818]'],
# [43.42498, -74.41496,'[43.42498, -74.41496]'],
# [43.33195, -74.49095,'[43.33195, -74.49095]'],
# [43.95385, -75.15748,'[43.95385, -75.15748]'],
# [44.12993, -74.58844,'[44.12993, -74.58844]'],
# [44.28656, -74.61429,'[44.28656, -74.61429]'],
# [43.51063, -74.57393,'[43.51063, -74.57393]'],
# [44.19013, -74.81336,'[44.19013, -74.81336]'],
# [43.65649, -76.00019,'[43.65649, -76.00019]'],
# [42.42501, -76.47478,'[42.42501, -76.47478]'],
# [42.31735, -76.47791,'[42.31735, -76.47791]'],
# [42.34432, -77.47638,'[42.34432, -77.47638]'],
# [42.25618, -77.78988,'[42.25618, -77.78988]'],
# [42.39831, -72.88675,'[42.39831, -72.88675]'],
# [48.1103, -121.4917,'[48.1103, -121.4917]'],
# [48.64606, -122.4247,'[48.64606, -122.4247]']
]
#You should be running this script from the base dir
base_dir = './'
# for item in ics:
# try:
# os.mkdir(base_dir + '/map_layers/' + item[2])
# except FileExistsError:
# pass
#
eng = matlab.engine.start_matlab() # engine for running matlab
for i,ics_pt in enumerate(ics):
# try:
# os.mkdir(base_dir + '/map_layers/' + str(ics_pt[2]))
# except FileExistsError:
# pass
anchor_point = [float(ics_pt[0]), float(ics_pt[1])]
extent = 10e3
save_flag = True
plot_flag = True
file_extension = 'temp'
sample_dist = int(extent/100)
heading = 0
start_time = time.time()
grab_features(anchor_point = anchor_point, extent = extent, sample_dist = sample_dist, case_name = str(ics_pt[2]),
heading = heading, save_files = save_flag, save_to_folder = True, file_id = file_extension, plot_data = plot_flag)
time.sleep(1) # wait for... files to settle?
# run matlab
if save_flag:
res = eng.importmap_py(str(ics_pt[2]), base_dir)
print("------- total time = {} seconds, iteration {}/{} ------".format(time.time() - start_time,i,len(ics)))
eng.quit()
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment