Skip to content

Commit

Permalink
Test rectifying individual pings #120
Browse files Browse the repository at this point in the history
  • Loading branch information
CameronBodine committed Jun 24, 2024
1 parent 5efb74f commit 9d98868
Show file tree
Hide file tree
Showing 5 changed files with 248 additions and 154 deletions.
36 changes: 25 additions & 11 deletions src/class_portstarObj.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,8 @@ def _createMosaicTransect(self,
overview=True,
threadCnt=cpu_count(),
son=True,
maxChunk = 50):
maxChunk = 50,
cog=True):
'''
Main function to mosaic exported rectified sonograms into a mosaic. If
overview=True, overviews of the mosaic will be built, enhancing view
Expand Down Expand Up @@ -384,6 +385,13 @@ def _createMosaicTransect(self,
# maxChunk = 50 # Max chunks per mosaic. Limits each mosaic file size.
self.imgsToMosaic = [] # List to store files to mosaic.

if cog:
chunkField = 'chunk_id'
else:
chunkField = 'chunk_id_2'

print(chunkField)

if son:
if self.port.rect_wcp: # Moscaic wcp sonograms if previousl exported
self.port._loadSonMeta()
Expand All @@ -393,7 +401,7 @@ def _createMosaicTransect(self,

port = []
for name, group in df.groupby('transect'):
chunks = pd.unique(group['chunk_id'])
chunks = pd.unique(group[chunkField])
port_transect = []
for chunk in chunks:
img_path = os.path.join(portPath, '*{}.tif'.format(chunk))
Expand All @@ -408,7 +416,7 @@ def _createMosaicTransect(self,

star = []
for name, group in df.groupby('transect'):
chunks = pd.unique(group['chunk_id'])
chunks = pd.unique(group[chunkField])
star_transect = []
for chunk in chunks:
img_path = os.path.join(starPath, '*{}.tif'.format(chunk))
Expand All @@ -427,12 +435,15 @@ def _createMosaicTransect(self,

port = []
for name, group in df.groupby('transect'):
chunks = pd.unique(group['chunk_id'])
chunks = pd.unique(group[chunkField])
port_transect = []
for chunk in chunks:
img_path = os.path.join(portPath, '*{}.tif'.format(chunk))
img = glob(img_path)[0]
port_transect.append(img)
try:
img_path = os.path.join(portPath, '*{}.tif'.format(chunk))
img = glob(img_path)[0]
port_transect.append(img)
except:
pass
port.append(port_transect)

self.star._loadSonMeta()
Expand All @@ -442,12 +453,15 @@ def _createMosaicTransect(self,

star = []
for name, group in df.groupby('transect'):
chunks = pd.unique(group['chunk_id'])
chunks = pd.unique(group[chunkField])
star_transect = []
for chunk in chunks:
img_path = os.path.join(starPath, '*{}.tif'.format(chunk))
img = glob(img_path)[0]
star_transect.append(img)
try:
img_path = os.path.join(starPath, '*{}.tif'.format(chunk))
img = glob(img_path)[0]
star_transect.append(img)
except:
pass
star.append(star_transect)

srcToMosaic = [list(itertools.chain(*i)) for i in zip(port, star)]
Expand Down
55 changes: 46 additions & 9 deletions src/class_rectObj.py
Original file line number Diff line number Diff line change
Expand Up @@ -447,7 +447,8 @@ def _applyPosOffset(self, x_offset, y_offset):
#===========================================
def _getRangeCoords(self,
flip = False,
filt = 25):
filt = 25,
cog = True):
'''
Humminbird SSS store one set geographic coordinates where each ping
orriginates from (assuming GPS is located directly above sonar transducer).
Expand Down Expand Up @@ -520,7 +521,7 @@ def _getRangeCoords(self,
rotate *= -1

# Calculate ping bearing and normalize to range 0-360
cog = True
# cog = False
if cog:
sDF[ping_bearing] = (sDF['cog']+rotate) % 360
else:
Expand Down Expand Up @@ -586,7 +587,20 @@ def _getRangeCoords(self,

##########################################
# Smooth and interpolate range coordinates
self._interpRangeCoords(filt)
if cog:
self._interpRangeCoords(filt)
else:
sDF = sDF[['record_num', 'chunk_id', 'ping_cnt', 'time_s', 'lons', 'lats', 'utm_es', 'utm_ns', 'instr_heading', 'cog', 'dep_m', 'range', 'range_lon', 'range_lat', 'range_e', 'range_n']].copy()
sDF.rename(columns={'lons': 'trk_lons', 'lats': 'trk_lats', 'utm_es': 'trk_utm_es', 'utm_ns': 'trk_utm_ns', 'cog': 'trk_cog', 'range_lat':'range_lats', 'range_lon':'range_lons', 'range_e':'range_es', 'range_n':'range_ns'}, inplace=True)
sDF['chunk_id_2'] = sDF.index.astype(int)

###########################################
# Overwrite Trackline_Smth_son.beamName.csv
# outCSV = os.path.join(self.metaDir, "Trackline_Smth_"+self.beamName+".csv")
outCSV = self.smthTrkFile
sDF.to_csv(outCSV, index=True, float_format='%.14f')

# sys.exit()
gc.collect()
self._pickleSon()
return #self
Expand Down Expand Up @@ -1122,6 +1136,7 @@ def _exportCovShp(self,
def _rectSonParallel(self,
chunk,
filt=50,
cog=True,
wgs=False,
son=True):
'''
Expand Down Expand Up @@ -1209,7 +1224,14 @@ def _rectSonParallel(self,
self._loadSonMeta()

sonMetaAll = self.sonMetaDF
isChunk = sonMetaAll['chunk_id']==chunk
if cog:
isChunk = sonMetaAll['chunk_id']==chunk
else:
isChunk = sonMetaAll['chunk_id_2']==chunk
# next = sonMetaAll['chunk_id_2']==(chunk+1)
# isChunk = pd.concat([isChunk, next], ignore_index=True)
isChunk.iloc[chunk+1] = True

sonMeta = sonMetaAll[isChunk].reset_index()

# Update class attributes based on current chunk
Expand All @@ -1219,7 +1241,7 @@ def _rectSonParallel(self,

if son:
# Open image to rectify
self._getScanChunkSingle(chunk)
self._getScanChunkSingle(chunk, cog)
else:
# Rectifying substrate classification
pass
Expand All @@ -1234,6 +1256,9 @@ def _rectSonParallel(self,
del self.shadowMask

img = self.sonDat
# if not cog:
# # Zero out second ping
# img[:,1] = 0

# For each ping, we need the pixel coordinates where the sonar
## originates on the trackline, and where it terminates based on the
Expand All @@ -1248,9 +1273,12 @@ def _rectSonParallel(self,
# Create mask for filtering array. This makes fitting PiecewiseAffineTransform
## more efficient
mask = np.zeros(len(pixAll), dtype=bool) # Create mask same size as pixAll
mask[0::filt] = 1 # Filter row coordinates
mask[1::filt] = 1 # Filter column coordinates
mask[-2], mask[-1] = 1, 1 # Make sure we keep last row/col coordinates
if cog:
mask[0::filt] = 1 # Filter row coordinates
mask[1::filt] = 1 # Filter column coordinates
mask[-2], mask[-1] = 1, 1 # Make sure we keep last row/col coordinates
else:
mask[:] = 1

# Filter pix
pix = pixAll[mask]
Expand All @@ -1262,7 +1290,16 @@ def _rectSonParallel(self,

# Open smoothed trackline/range extent file
trkMeta = pd.read_csv(trkMetaFile)
trkMeta = trkMeta[trkMeta['chunk_id']==chunk].reset_index(drop=False) # Filter df by chunk_id
if cog:
trkMeta = trkMeta[trkMeta['chunk_id']==chunk].reset_index(drop=False) # Filter df by chunk_id
else:
# trkMeta = trkMeta[trkMeta['chunk_id_2']==chunk].reset_index(drop=False)
# next = trkMeta[trkMeta['chunk_id_2']==chunk+1].reset_index(drop=False)
# trkMeta = pd.concat([trkMeta, next], ignore_index=True)
isChunk = trkMeta['chunk_id_2']==chunk
isChunk.iloc[chunk+1] = True
trkMeta = trkMeta[isChunk].reset_index(drop=False)

pix_m = self.pixM # Get pixel size

# Get range (outer extent) coordinates [xR, yR] to transposed numpy arrays
Expand Down
30 changes: 29 additions & 1 deletion src/class_sonObj.py
Original file line number Diff line number Diff line change
Expand Up @@ -1909,6 +1909,7 @@ def _doSpdCor(self, chunk, lbl_set=1, spdCor=1, maxCrop=0, son=True, integer=Tru
# ======================================================================
def _getScanChunkSingle(self,
chunk,
cog=True,
filterIntensity = False,
remWater = False):
'''
Expand Down Expand Up @@ -1946,7 +1947,11 @@ def _getScanChunkSingle(self,
sonMetaAll = pd.read_csv(self.sonMetaFile)

# Filter df by chunk
isChunk = sonMetaAll['chunk_id']==chunk
if cog:
isChunk = sonMetaAll['chunk_id']==chunk
else:
isChunk = sonMetaAll['chunk_id_2']==chunk
isChunk.iloc[chunk+1] = True
sonMeta = sonMetaAll[isChunk].reset_index()

# Update class attributes based on current chunk
Expand Down Expand Up @@ -1994,6 +1999,29 @@ def _getChunkID(self):

del self.sonMetaDF, df
return chunks

# ======================================================================
def _getChunkID_Update(self):
'''
Utility to load unique chunk ID's from son obj and return in a list
'''

# Load son metadata csv to df
self._loadSonMeta()

# # Get unique chunk id's
# df = self.sonMetaDF.groupby(['chunk_id', 'index']).size().reset_index().rename(columns={0:'count'})
# chunks = pd.unique(df['chunk_id']).astype(int)

# Use index as chunk id
df = self.sonMetaDF
chunks = df.index.values.astype(int)

df['chunk_id_2'] = chunks
self._saveSonMetaCSV(df)

del self.sonMetaDF, df
return chunks

# ======================================================================
def _addZero(self, chunk):
Expand Down
Loading

0 comments on commit 9d98868

Please sign in to comment.