Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixes for uninitialized variables (snan testing) #579

Merged
merged 11 commits into from
Mar 25, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions cicecore/cicedynB/general/ice_flux.F90
Original file line number Diff line number Diff line change
Expand Up @@ -654,9 +654,7 @@ subroutine init_coupler_flux
enddo
enddo

#ifndef CICE_IN_NEMO
sst (:,:,:) = Tf(:,:,:) ! sea surface temp (C)
#endif
qdp (:,:,:) = c0 ! deep ocean heat flux (W/m^2)
hmix (:,:,:) = c20 ! ocean mixed layer depth (m)
hwater(:,:,:) = bathymetry(:,:,:) ! ocean water depth (m)
Expand Down
240 changes: 157 additions & 83 deletions cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90

Large diffs are not rendered by default.

226 changes: 154 additions & 72 deletions cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90

Large diffs are not rendered by default.

8 changes: 4 additions & 4 deletions cicecore/cicedynB/infrastructure/ice_blocks.F90
Original file line number Diff line number Diff line change
Expand Up @@ -252,8 +252,8 @@ subroutine create_blocks(nx_global, ny_global, ew_boundary_type, &
!*** set last physical point if padded domain

else if (j_global(j,n) == ny_global .and. &
j > all_blocks(n)%jlo .and. &
j < all_blocks(n)%jhi) then
j >= all_blocks(n)%jlo .and. &
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If I understand correctly, this change allows jlo=jhi, i.e. 1x1 domains, which weren't allowed before?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Correct. This is where the 1x1 blocks failed. The 1x1 and 2x2 are still producing different results from the other block sizes and decomps, but they are running and part of our test suite now.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Just to elaborate a bit further. Before this fix, jhi was not set properly if the active block size was 1. It remained the default (ny_block - nghost), see line 194. This section of code is where the lo/hi indices are set for padded blocks. It worked fine as long as the block size was at least 2.

j < all_blocks(n)%jhi) then
all_blocks(n)%jhi = j ! last physical point in padded domain
endif
end do
Expand Down Expand Up @@ -300,8 +300,8 @@ subroutine create_blocks(nx_global, ny_global, ew_boundary_type, &
!*** last physical point in padded domain

else if (i_global(i,n) == nx_global .and. &
i > all_blocks(n)%ilo .and. &
i < all_blocks(n)%ihi) then
i >= all_blocks(n)%ilo .and. &
i < all_blocks(n)%ihi) then
all_blocks(n)%ihi = i
endif
end do
Expand Down
76 changes: 39 additions & 37 deletions cicecore/cicedynB/infrastructure/ice_grid.F90
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ module ice_grid
use ice_domain, only: blocks_ice, nblocks, halo_info, distrb_info, &
ew_boundary_type, ns_boundary_type, init_domain_distribution
use ice_fileunits, only: nu_diag, nu_grid, nu_kmt, &
get_fileunit, release_fileunit
get_fileunit, release_fileunit, flush_fileunit
use ice_gather_scatter, only: gather_global, scatter_global
use ice_read_write, only: ice_read, ice_read_nc, ice_read_global, &
ice_read_global_nc, ice_open, ice_open_nc, ice_close_nc
Expand Down Expand Up @@ -384,11 +384,9 @@ subroutine init_grid2
! T-grid cell and U-grid cell quantities
!-----------------------------------------------------------------

! tarea(:,:,:) = c0

!$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block)
do iblk = 1, nblocks
this_block = get_block(blocks_ice(iblk),iblk)
this_block = get_block(blocks_ice(iblk),iblk)
ilo = this_block%ilo
ihi = this_block%ihi
jlo = this_block%jlo
Expand Down Expand Up @@ -486,7 +484,7 @@ subroutine init_grid2
!$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block, &
!$OMP angle_0,angle_w,angle_s,angle_sw)
do iblk = 1, nblocks
this_block = get_block(blocks_ice(iblk),iblk)
this_block = get_block(blocks_ice(iblk),iblk)
ilo = this_block%ilo
ihi = this_block%ihi
jlo = this_block%jlo
Expand Down Expand Up @@ -642,7 +640,7 @@ subroutine popgrid
kmt(:,:,:) = c0
!$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block)
do iblk = 1, nblocks
this_block = get_block(blocks_ice(iblk),iblk)
this_block = get_block(blocks_ice(iblk),iblk)
ilo = this_block%ilo
ihi = this_block%ihi
jlo = this_block%jlo
Expand Down Expand Up @@ -785,7 +783,7 @@ subroutine popgrid_nc
kmt(:,:,:) = c0
!$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block)
do iblk = 1, nblocks
this_block = get_block(blocks_ice(iblk),iblk)
this_block = get_block(blocks_ice(iblk),iblk)
ilo = this_block%ilo
ihi = this_block%ihi
jlo = this_block%jlo
Expand Down Expand Up @@ -1104,7 +1102,7 @@ subroutine latlongrid

!$OMP PARALLEL DO PRIVATE(iblk,this_block,ilo,ihi,jlo,jhi,i,j)
do iblk = 1, nblocks
this_block = get_block(blocks_ice(iblk),iblk)
this_block = get_block(blocks_ice(iblk),iblk)
ilo = this_block%ilo
ihi = this_block%ihi
jlo = this_block%jlo
Expand Down Expand Up @@ -1198,15 +1196,9 @@ subroutine rectgrid
if (icepack_warnings_aborted()) call abort_ice(error_message=subname, &
file=__FILE__, line=__LINE__)

!$OMP PARALLEL DO PRIVATE(iblk,i,j)
do iblk = 1, nblocks
do j = 1, ny_block
do i = 1, nx_block
ANGLE(i,j,iblk) = c0 ! "square with the world"
enddo
enddo
enddo
!$OMP END PARALLEL DO
hm (:,:,:) = c0
Copy link
Contributor

@TillRasmussen TillRasmussen Mar 18, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is it necessary to initialize to zero in all of the block (also valid for kmt) and angle?
Wouldn't it be more correct to update from ilo to ihi and jlo to jhi and then use icehaloupdate

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There are a few different grid initialiizations (popgrid, popgrid_nc, latlon, rectangular, etc), and they don't all initialize the variables in the same way. I believe zero-ing these out here is required for the rectangular grid option that is used for the box setups. It is not needed for the popgrid option that we use for our more standard gx3+ testing. It could be that a better solution is to make all the grid options behave in the same way, but it wasn't clear to me what those requirements should be, so I went this direction.

kmt(:,:,:) = c0
angle(:,:,:) = c0 ! "square with the world"

allocate(work_g1(nx_global,ny_global))

Expand Down Expand Up @@ -1396,7 +1388,7 @@ subroutine cpomgrid
kmt(:,:,:) = c0
!$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block)
do iblk = 1, nblocks
this_block = get_block(blocks_ice(iblk),iblk)
this_block = get_block(blocks_ice(iblk),iblk)
ilo = this_block%ilo
ihi = this_block%ihi
jlo = this_block%jlo
Expand Down Expand Up @@ -1636,11 +1628,10 @@ subroutine makemask
!-----------------------------------------------------------------

bm = c0
! uvm = c0

!$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block)
do iblk = 1, nblocks
this_block = get_block(blocks_ice(iblk),iblk)
this_block = get_block(blocks_ice(iblk),iblk)
ilo = this_block%ilo
ihi = this_block%ihi
jlo = this_block%jlo
Expand All @@ -1663,12 +1654,19 @@ subroutine makemask
field_loc_center, field_type_scalar)
call ice_timer_stop(timer_bound)

!$OMP PARALLEL DO PRIVATE(iblk,i,j)
!$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block)
do iblk = 1, nblocks
do j = 1, ny_block
do i = 1, nx_block
tmask(i,j,iblk) = .false.
umask(i,j,iblk) = .false.
this_block = get_block(blocks_ice(iblk),iblk)
ilo = this_block%ilo
ihi = this_block%ihi
jlo = this_block%jlo
jhi = this_block%jhi

! needs to cover halo (no halo update for logicals)
tmask(:,:,iblk) = .false.
umask(:,:,iblk) = .false.
do j = jlo-nghost, jhi+nghost
do i = ilo-nghost, ihi+nghost
if ( hm(i,j,iblk) > p5) tmask(i,j,iblk) = .true.
if (uvm(i,j,iblk) > p5) umask(i,j,iblk) = .true.
enddo
Expand All @@ -1684,11 +1682,14 @@ subroutine makemask
tarean(:,:,iblk) = c0
tareas(:,:,iblk) = c0

do j = 1, ny_block
do i = 1, nx_block
do j = jlo,jhi
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Are the ghost cells for these masks filled later?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

They are not, but the halos are never accessed, so there is no error when we initialize with snan. There are probably many arrays that have uninitialized memory, but that's fine. In many ways, that's what we want. We want to only initialize memory that we think is used. That way we can trap code (or learn more about our codes) when an snan actually triggers an error. If we initialized everything explicitly to zero just for the sake of doing so, the snan testing wouldn't tell us anything.

do i = ilo,ihi

if (ULAT(i,j,iblk) >= -puny) lmask_n(i,j,iblk) = .true. ! N. Hem.
if (ULAT(i,j,iblk) < -puny) lmask_s(i,j,iblk) = .true. ! S. Hem.
if (ULAT(i,j,iblk) >= -puny) then
lmask_n(i,j,iblk) = .true. ! N. Hem.
else
lmask_s(i,j,iblk) = .true. ! S. Hem.
endif

! N hemisphere area mask (m^2)
if (lmask_n(i,j,iblk)) tarean(i,j,iblk) = tarea(i,j,iblk) &
Expand Down Expand Up @@ -1743,7 +1744,7 @@ subroutine Tlatlon
!$OMP x1,y1,z1,x2,y2,z2,x3,y3,z3,x4,y4,z4, &
!$OMP tx,ty,tz,da)
do iblk = 1, nblocks
this_block = get_block(blocks_ice(iblk),iblk)
this_block = get_block(blocks_ice(iblk),iblk)
ilo = this_block%ilo
ihi = this_block%ihi
jlo = this_block%jlo
Expand Down Expand Up @@ -1915,7 +1916,7 @@ subroutine to_ugrid(work1,work2)

!$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block)
do iblk = 1, nblocks
this_block = get_block(blocks_ice(iblk),iblk)
this_block = get_block(blocks_ice(iblk),iblk)
ilo = this_block%ilo
ihi = this_block%ihi
jlo = this_block%jlo
Expand Down Expand Up @@ -2000,7 +2001,7 @@ subroutine to_tgrid(work1, work2)

!$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block)
do iblk = 1, nblocks
this_block = get_block(blocks_ice(iblk),iblk)
this_block = get_block(blocks_ice(iblk),iblk)
ilo = this_block%ilo
ihi = this_block%ihi
jlo = this_block%jlo
Expand Down Expand Up @@ -2073,7 +2074,7 @@ subroutine gridbox_corners

!$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block)
do iblk = 1, nblocks
this_block = get_block(blocks_ice(iblk),iblk)
this_block = get_block(blocks_ice(iblk),iblk)
ilo = this_block%ilo
ihi = this_block%ihi
jlo = this_block%jlo
Expand Down Expand Up @@ -2400,8 +2401,9 @@ subroutine get_bathymetry
do iblk = 1, nblocks
do j = 1, ny_block
do i = 1, nx_block
k = min(nint(kmt(i,j,iblk)),nlevel)
if (k > puny) bathymetry(i,j,iblk) = depth(k)
k = nint(kmt(i,j,iblk))
if (k > nlevel) call abort_ice(subname//' kmt gt nlevel error')
if (k > 0) bathymetry(i,j,iblk) = depth(k)
enddo
enddo
enddo
Expand Down Expand Up @@ -2491,8 +2493,8 @@ subroutine get_bathymetry_popfile
do iblk = 1, nblocks
do j = 1, ny_block
do i = 1, nx_block
k = kmt(i,j,iblk)
if (k > nlevel) call abort_ice(subname//' kmt/nlevel error')
k = nint(kmt(i,j,iblk))
if (k > nlevel) call abort_ice(subname//' kmt gt nlevel error')
if (k > 0) bathymetry(i,j,iblk) = depth(k)
enddo
enddo
Expand Down
5 changes: 5 additions & 0 deletions cicecore/cicedynB/infrastructure/ice_restoring.F90
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,11 @@ subroutine ice_HaloRestore_init
vsnon_rest(nx_block,ny_block,ncat,max_blocks), &
trcrn_rest(nx_block,ny_block,ntrcr,ncat,max_blocks))

aicen_rest(:,:,:,:) = c0
vicen_rest(:,:,:,:) = c0
vsnon_rest(:,:,:,:) = c0
trcrn_rest(:,:,:,:,:) = c0

!-----------------------------------------------------------------------
! initialize
! halo cells have to be filled manually at this stage
Expand Down
3 changes: 2 additions & 1 deletion cicecore/drivers/standalone/cice/CICE_RunMod.F90
Original file line number Diff line number Diff line change
Expand Up @@ -628,11 +628,12 @@ subroutine sfcflux_to_ocn(nx_block, ny_block, &

real (kind=dbl_kind) :: &
puny, & !
Lsub, & !
rLsub ! 1/Lsub

character(len=*), parameter :: subname = '(sfcflux_to_ocn)'

call icepack_query_parameters(puny_out=puny)
call icepack_query_parameters(puny_out=puny, Lsub_out=Lsub)
call icepack_warnings_flush(nu_diag)
if (icepack_warnings_aborted()) call abort_ice(error_message=subname, &
file=__FILE__, line=__LINE__)
Expand Down
8 changes: 4 additions & 4 deletions cicecore/shared/ice_init_column.F90
Original file line number Diff line number Diff line change
Expand Up @@ -408,7 +408,7 @@ subroutine init_shortwave
do i = ilo, ihi

if (aicen(i,j,n,iblk) > puny) then

alvdf(i,j,iblk) = alvdf(i,j,iblk) &
+ alvdfn(i,j,n,iblk)*aicen(i,j,n,iblk)
alidf(i,j,iblk) = alidf(i,j,iblk) &
Expand All @@ -417,7 +417,7 @@ subroutine init_shortwave
+ alvdrn(i,j,n,iblk)*aicen(i,j,n,iblk)
alidr(i,j,iblk) = alidr(i,j,iblk) &
+ alidrn(i,j,n,iblk)*aicen(i,j,n,iblk)

netsw = swvdr(i,j,iblk) + swidr(i,j,iblk) &
+ swvdf(i,j,iblk) + swidf(i,j,iblk)
if (netsw > puny) then ! sun above horizon
Expand All @@ -428,12 +428,12 @@ subroutine init_shortwave
albpnd(i,j,iblk) = albpnd(i,j,iblk) &
+ albpndn(i,j,n,iblk)*aicen(i,j,n,iblk)
endif

apeff_ai(i,j,iblk) = apeff_ai(i,j,iblk) &
+ apeffn(i,j,n,iblk)*aicen(i,j,n,iblk)
snowfrac(i,j,iblk) = snowfrac(i,j,iblk) &
+ snowfracn(i,j,n,iblk)*aicen(i,j,n,iblk)

endif ! aicen > puny

enddo ! i
Expand Down
Loading