Skip to content

Commit

Permalink
fixed tif crs error, analysis ready!
Browse files Browse the repository at this point in the history
  • Loading branch information
ksharonin committed Apr 15, 2024
1 parent a0835c1 commit c9be770
Show file tree
Hide file tree
Showing 6 changed files with 5,250 additions and 191 deletions.
5 changes: 4 additions & 1 deletion Input_FEDS.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,9 +190,12 @@ def __set_api_polygons(self):

# set/to crs based on usr input
try:
df = df.set_crs(self._crs)
# TRY: set to 9311 and then convert
df = df.set_crs(4326)

except Exception as e:
logging.error(f'Encountered {e}, no FEDS geom found. Retry with different dates / region')
# df = df.to_crs(5070)
df = df.to_crs(self._crs)

# apply finalized fire perim: take highest indices of duplicate fire ids
Expand Down
24 changes: 15 additions & 9 deletions Input_Reference.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,8 +221,8 @@ def __set_polygon_arcgis_online(self):
# manually move filtering due to bug
df_date = datetime.datetime.fromisoformat(self._usr_start)
df_year = df_date.year
df = gdf.set_crs(self._crs, allow_override=True)
df = gdf.to_crs(self._crs)
df = gdf.set_crs(4326, allow_override=True)
df = df.to_crs(self._crs)

# condition based on custom time col
# TODO: improve handling by making a special dict mapping for arcgis cases
Expand Down Expand Up @@ -275,7 +275,8 @@ def __set_polygon_arcgis_online(self):

gdf = gdf[gdf.geometry != None]
gdf = gdf[gdf.GIS_ACRES != 0]
gdf = gdf.set_crs(self._crs, allow_override=True)
gdf = gdf.set_crs(4236, allow_override=True)
gdf = gdf.to_crs(self._crs)

assert gdf.shape[0] != 0, "Invalid shape identified in ArcGIS API read"

Expand All @@ -298,8 +299,10 @@ def __set_polygon_arcgis_online(self):
# outcast non matches in year self._usr_start
gdf = gdf[gdf.DATE_CUR_STAMP.dt.year == int(self._usr_start[:4])]

gdf = gdf.set_crs(self._crs, allow_override=True)
# gdf = gdf.to_crs(self._crs)
gdf = gdf.set_crs(4326, allow_override=True)
# gdf = gdf.to_crs(5070)
gdf = gdf.to_crs(self._crs)


gdf['index'] = gdf.index

Expand Down Expand Up @@ -335,7 +338,7 @@ def filter_custom_local(self, df):
df_year = df_date.year

# crs management
df = gdf.set_crs(self._crs, allow_override=True)
gdf = gdf.set_crs(4326, allow_override=True)
df = gdf.to_crs(self._crs)

# geom validity
Expand Down Expand Up @@ -386,7 +389,8 @@ def filter_nifc_interagency_history_local(self, df):
# actions as docstring specifies
df = df[df.geometry != None]
df = df[df.GIS_ACRES != 0]
df = df.set_crs(self._crs, allow_override=True)
df = df.set_crs(4326, allow_override=True)
df = df.to_crs(self._crs)

if df.shape[0] == 0:
assert 1 == 0, "Not possible"
Expand All @@ -413,7 +417,8 @@ def filter_WFIGS_current_interagency_fire_perimeters(self, df):
"""
df_date = datetime.fromisoformat(self._usr_start)
df_year = df_date.year
df = df.set_crs(self._crs, allow_override=True)
df = df.set_crs(4326, allow_override=True)
df = df.to_crs(self._crs)

df['DATE_NOT_NONE'] = df.apply(lambda row : getattr(row, 'poly_PolygonDateTime') is not None, axis = 1)
df = df[df.DATE_NOT_NONE == True]
Expand All @@ -433,7 +438,8 @@ def filter_geomac(self, df):

df_date = datetime.fromisoformat(self._usr_start)
df_year = df_date.year
df = df.set_crs(self._crs, allow_override=True)
df = df.set_crs(4326, allow_override=True)
df = df.to_crs(self._crs)

df['DATE_NOT_NONE'] = df.apply(lambda row : getattr(row, 'perimeterdatetime') is not None, axis = 1)
df = df[df.DATE_NOT_NONE == True]
Expand Down
88 changes: 17 additions & 71 deletions Output_Calculation.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
import logging
import csv
import rasterio
import rioxarray
import rioxarray as rio
import matplotlib.pyplot as plt
import pyproj

Expand Down Expand Up @@ -461,7 +461,7 @@ def tif_analysis(self, tif_path: str, req_calc: str, date_restrict = None):
mass_results = []

# rioaxarry attempt
xds = rioxarray.open_rasterio(tif_path)
xds = rio.open_rasterio(tif_path)
xds_lonlat = xds.rio.reproject(dst_crs)

for pair in indices:
Expand Down Expand Up @@ -614,15 +614,12 @@ def write_to_csv(self):
ref_polygons = self._ref_input.polygons

# exp: hardcoded tif path
tif_path = "/projects/CONUS-Down/LF2020_SlpD_220_CONUS/Tif/LC20_SlpD_220.tif"
tif_path = '/projects/shared-buckets/gsfc_landslides/LANDFIRE/LF2022_FBFM40_220_CONUS/Tif/LC22_F40_220.tif'

# "/projects/CONUS-Down/LF2020_SlpD_220_CONUS/Tif/LC20_SlpD_220.tif"
# "/projects/shared-buckets/gsfc_landslides/LANDFIRE/LF2022_FBFM40_220_CONUS/Tif/LC22_F40_220.tif"

print("VERBOSE: OPEN RASTER")
xds = rioxarray.open_rasterio(tif_path)
# xds_lonlat = xds.rio.reproject(self._feds_input._crs)
print("VERBOSE: RASTER CRS")
xds_crs = xds.rio.crs
evt20 = rio.open_rasterio(tif_path)

with open(file_name, 'w', newline='') as csvfile:
# erase prev content
Expand Down Expand Up @@ -650,22 +647,6 @@ def write_to_csv(self):
writer.writeheader()
num_rows = len(calculations['index_pairs'])

# with rasterio.open(tif_path) as src:
# masked_tif, _ = mask(src, diff_area.geometry, crop=True)

# transform, width, height = calculate_default_transform(src.crs, self._feds_input._crs, src.width, src.height, *src.bounds)
# kwargs = src.meta.copy()
# kwargs.update({
# 'crs': self._feds_input._crs,
# 'transform': transform,
# 'width': width,
# 'height': height
# })

# gead and reproject the GeoTIFF data
# reprojected_src = src.read(1, out_shape=(height, width), resampling=Resampling.nearest)


for i in range(num_rows):
# skip None values for write in
if all( value is None for value in
Expand All @@ -683,14 +664,16 @@ def write_to_csv(self):

else:
# poly extraction
print(f'VERBOSE: BOUNDS OF ALL FEDS SET: {feds_polygons.total_bounds}')

feds_poly = feds_polygons[feds_polygons['index'] == calculations['index_pairs'][i][0]]
ref_poly = ref_polygons[ref_polygons['index'] == calculations['index_pairs'][i][1]]
print(f'VERBOSE: BOUNDS OF SINGE POLY: {feds_poly.geometry.bounds}')

# timestamp extract
feds_time = feds_poly.t.values[0]
ref_time = ref_poly['DATE_CUR_STAMP'].values[0]

# print(f"DEBUG type for feds time {type(feds_time)}")

# convert for abso
feds_time_int = datetime.strptime(feds_time, "%Y-%m-%dT%H:%M:%S")
ref_time_str = str(ref_time)
Expand All @@ -717,59 +700,22 @@ def write_to_csv(self):
####### TIF DATA ########

diff_area = feds_poly.symmetric_difference(ref_poly, align=False)
# reproject to tif crs
print("VERBOSE: PROJECT DIFF_AREA")
print(f"VERBOSE: OLD CRS: {diff_area.crs}")

## PLOT ##
fig, ax = plt.subplots(figsize=(15, 15))

# plot tif bounds
bounds = xds.rio.bounds()
minx, miny, maxx, maxy = bounds
rect = plt.Rectangle((minx, miny), maxx - minx, maxy - miny, linewidth=1, edgecolor='blue', facecolor='none', label='Raster Bounds')
ax.add_patch(rect)

feds_plot = diff_area.plot(ax=ax, color="red",edgecolor="black", linewidth=10, label="FEDS Fire Estimate")

# diff = self.transform_polygon(diff_area)
diff = diff_area.to_crs(xds_crs)
# diff = diff_area.to_crs(4269)

ref_plot = diff.plot(ax=ax, color="gold", edgecolor="black", linewidth=10, hatch='\\', alpha=0.7, label="Reference Nearest Date + Intersection")
ax.set_title("PROJECTION VISUAL", fontsize=17)
ax.set_xlabel("Longitude", fontsize=14)
ax.set_ylabel("Latitude", fontsize=14)
plt.grid(True)
plt.show()

## END PLOT ##

print(f"VERBOSE: NEW CRS: {diff.crs}")
print(f"VERBOSE: TIF CRS: {xds_crs}")
minx, miny, maxx, maxy = diff_area.to_crs(evt20.rio.crs).total_bounds

# gen mask
# OLD MASK
print("VERBOSE: MASK TIF")
# masked_tif = xds.rio.clip(diff_area)

minx, miny, maxx, maxy = diff.total_bounds

print('VERBOSE: GEOM BOUNDS')
print('VERBOSE: NEW DIFF_AREA BOUNDS')
print(minx, miny, maxx, maxy)

print('VERBOSE: TIF BOUNDS')
print(xds.rio.bounds())
masked_tif = xds.sel(x=slice(minx, maxx), y=slice(miny, maxy))
print(evt20.rio.bounds())

# append requested val generated from usr req
assert(masked_tif is not None)
lf_subset = evt20.sel(x=slice(minx,maxx), y=slice(maxy,miny)).squeeze()

# calculations
print("VERBOSE: CALCULATION EXTRACTION")
mean_tif = np.nanmean(masked_tif)
median_tif = np.nanmedian(masked_tif)
unique_vals = np.unique(masked_tif)
vals_use = lf_subset.values
mean_tif = np.mean(vals_use)
median_tif = np.median(vals_use)
unique_vals = np.unique(vals_use)

##############################

Expand Down
Loading

0 comments on commit c9be770

Please sign in to comment.