Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update velocity and bedmachine in shop #1753

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions docs/api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,8 @@ OGGM Shop
shop.millan22.thickness_to_gdir
shop.millan22.velocity_to_gdir
shop.millan22.compile_millan_statistics
shop.bedmachine.bedmachine_to_gdir
shop.bedmachine.compile_bedmachine_statistics
shop.rgitopo.init_glacier_directories_from_rgitopo
shop.rgitopo.select_dem_from_dir
shop.rgitopo.dem_quality_check
Expand Down
22 changes: 20 additions & 2 deletions oggm/cli/prepro_levels.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,8 @@ def run_prepro_levels(rgi_version=None, rgi_reg=None, border=None,
select_source_from_dir=None, keep_dem_folders=False,
add_consensus_thickness=False, add_itslive_velocity=False,
add_millan_thickness=False, add_millan_velocity=False,
add_hugonnet_dhdt=False, add_glathida=False,
add_hugonnet_dhdt=False, add_bedmachine=False,
add_glathida=False,
start_level=None, start_base_url=None, max_level=5,
logging_level='WORKFLOW',
dynamic_spinup=False, err_dmdtda_scaling_factor=0.2,
Expand Down Expand Up @@ -159,6 +160,9 @@ def run_prepro_levels(rgi_version=None, rgi_reg=None, border=None,
add_hugonnet_dhdt : bool
adds (reprojects) the hugonnet dhdt maps to the glacier
directories. With elev_bands=True, the data will also be binned.
add_bedmachine : bool
adds (reprojects) the bedmachine ice thickness maps to the glacier
directories. With elev_bands=True, the data will also be binned.
add_glathida : bool
adds (reprojects) the glathida thickness data to the glacier
directories. Data points are stored as csv files.
Expand Down Expand Up @@ -481,6 +485,10 @@ def _time_log():
from oggm.shop.hugonnet_maps import hugonnet_to_gdir
workflow.execute_entity_task(hugonnet_to_gdir, gdirs)
bin_variables.append('hugonnet_dhdt')
if add_bedmachine:
from oggm.shop.bedmachine import bedmachine_to_gdir
workflow.execute_entity_task(bedmachine_to_gdir, gdirs)
bin_variables.append('bedmachine_ice_thickness')
if add_glathida:
from oggm.shop.glathida import glathida_to_gdir
workflow.execute_entity_task(glathida_to_gdir, gdirs)
Expand Down Expand Up @@ -550,6 +558,10 @@ def _time_log():
from oggm.shop.hugonnet_maps import compile_hugonnet_statistics
opath = os.path.join(sum_dir, 'hugonnet_statistics_{}.csv'.format(rgi_reg))
compile_hugonnet_statistics(gdirs, path=opath)
if add_bedmachine:
from oggm.shop.bedmachine import compile_bedmachine_statistics
opath = os.path.join(sum_dir, 'bedmachine_statistics_{}.csv'.format(rgi_reg))
compile_bedmachine_statistics(gdirs, path=opath)
if add_glathida:
from oggm.shop.glathida import compile_glathida_statistics
opath = os.path.join(sum_dir, 'glathida_statistics_{}.csv'.format(rgi_reg))
Expand Down Expand Up @@ -899,7 +911,12 @@ def parse_args(args):
'With --elev-bands, the data will also be '
'binned.')
parser.add_argument('--add-hugonnet-dhdt', nargs='?', const=True, default=False,
help='adds (reprojects) the millan dhdt '
help='adds (reprojects) the hugonnet dhdt '
'maps to the glacier directories. '
'With --elev-bands, the data will also be '
'binned.')
parser.add_argument('--add-bedmachine', nargs='?', const=True, default=False,
help='adds (reprojects) the Bedmachine ice thickness '
'maps to the glacier directories. '
'With --elev-bands, the data will also be '
'binned.')
Expand Down Expand Up @@ -995,6 +1012,7 @@ def parse_args(args):
add_itslive_velocity=args.add_itslive_velocity,
add_millan_velocity=args.add_millan_velocity,
add_hugonnet_dhdt=args.add_hugonnet_dhdt,
add_bedmachine=args.add_bedmachine,
add_glathida=args.add_glathida,
dynamic_spinup=dynamic_spinup,
err_dmdtda_scaling_factor=args.err_dmdtda_scaling_factor,
Expand Down
2 changes: 1 addition & 1 deletion oggm/core/gis.py
Original file line number Diff line number Diff line change
Expand Up @@ -2087,7 +2087,7 @@ def rgi7g_to_complex(gdir, rgi7g_file=None, rgi7c_to_g_links=None):
if vn in nc.variables:
v = nc.variables[vn]
else:
v = nc.createVariable(vn, 'i2', ('y', 'x', ))
v = nc.createVariable(vn, 'i4', ('y', 'x', ))
v.units = '-'
v.long_name = 'Sub-entities glacier mask (number is index)'
v[:] = mask
142 changes: 142 additions & 0 deletions oggm/shop/bedmachine.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
import logging
import warnings
from packaging.version import Version

import numpy as np
import pandas as pd
import xarray as xr
import os

try:
import rasterio
from rasterio.warp import reproject, Resampling, calculate_default_transform
from rasterio import MemoryFile
try:
# rasterio V > 1.0
from rasterio.merge import merge as merge_tool
except ImportError:
from rasterio.tools.merge import merge as merge_tool
except ImportError:
pass


from oggm import utils, cfg

# Module logger
log = logging.getLogger(__name__)

aa_base_url = ('https://n5eil01u.ecs.nsidc.org/MEASURES/'
'NSIDC-0756.003/1970.01.01/BedMachineAntarctica-v3.nc')
grl_base_url = ('https://n5eil01u.ecs.nsidc.org/ICEBRIDGE/IDBMG4.005/'
'1993.01.01/BedMachineGreenland-v5.nc')


@utils.entity_task(log, writes=['gridded_data'])
def bedmachine_to_gdir(gdir):
"""Add the Bedmachine ice thickness maps to this glacier directory.

For Antarctica: BedMachineAntarctica-v3.nc
For Greenland: BedMachineGreenland-v5.nc

Parameters
----------
gdir : :py:class:`oggm.GlacierDirectory`
the glacier directory to process
"""

if gdir.rgi_region == '19':
file_url = aa_base_url
elif gdir.rgi_region == '05':
file_url = grl_base_url
else:
raise NotImplementedError('Bedmachine data not available '
f'for this region: {gdir.rgi_region}')

file_local = utils.download_with_authentication(file_url,
'urs.earthdata.nasa.gov')

with xr.open_dataset(file_local) as ds:
ds.attrs['pyproj_srs'] = ds.proj4

x0, x1, y0, y1 = gdir.grid.extent_in_crs(ds.proj4)
dsroi = ds.salem.subset(corners=((x0, y0), (x1, y1)), crs=ds.proj4, margin=10)
thick = gdir.grid.map_gridded_data(dsroi['thickness'].data,
grid=dsroi.salem.grid,
interp='linear')
thick[thick <= 0] = np.NaN

# Write
with utils.ncDataset(gdir.get_filepath('gridded_data'), 'a') as nc:

vn = 'bedmachine_ice_thickness'
if vn in nc.variables:
v = nc.variables[vn]
else:
v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True)
v.units = 'm'
ln = 'Ice thickness from BedMachine'
v.long_name = ln
v.data_source = file_url
v[:] = thick.data


@utils.entity_task(log)
def bedmachine_statistics(gdir):
"""Gather statistics about the Bedmachine data interpolated to this glacier.
"""

d = dict()

# Easy stats - this should always be possible
d['rgi_id'] = gdir.rgi_id
d['rgi_region'] = gdir.rgi_region
d['rgi_subregion'] = gdir.rgi_subregion
d['rgi_area_km2'] = gdir.rgi_area_km2
d['bedmachine_area_km2'] = 0
d['bedmachine_perc_cov'] = 0
d['bedmachine_vol_km3'] = np.nan

try:
with xr.open_dataset(gdir.get_filepath('gridded_data')) as ds:
thick = ds['bedmachine_ice_thickness'].where(ds['glacier_mask'], np.nan).load()
gridded_area = ds['glacier_mask'].sum() * gdir.grid.dx ** 2 * 1e-6
d['bedmachine_area_km2'] = float((~thick.isnull()).sum() * gdir.grid.dx ** 2 * 1e-6)
d['bedmachine_perc_cov'] = float(d['bedmachine_area_km2'] / gridded_area)
d['bedmachine_vol_km3'] = float(thick.sum() * gdir.grid.dx ** 2 * 1e-9)
except (FileNotFoundError, AttributeError, KeyError):
pass

return d


@utils.global_task(log)
def compile_bedmachine_statistics(gdirs, filesuffix='', path=True):
"""Gather as much statistics as possible about a list of glaciers.

It can be used to do result diagnostics and other stuffs.

Parameters
----------
gdirs : list of :py:class:`oggm.GlacierDirectory` objects
the glacier directories to process
filesuffix : str
add suffix to output file
path : str, bool
Set to "True" in order to store the info in the working directory
Set to a path to store the file to your chosen location
"""
from oggm.workflow import execute_entity_task

out_df = execute_entity_task(bedmachine_statistics, gdirs)

out = pd.DataFrame(out_df).set_index('rgi_id')

if path:
if path is True:
out.to_csv(os.path.join(cfg.PATHS['working_dir'],
('bedmachine_statistics' +
filesuffix + '.csv')))
else:
out.to_csv(path)

return out
3 changes: 2 additions & 1 deletion oggm/shop/bedtopo.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,9 +97,10 @@ def consensus_statistics(gdir):
try:
with xr.open_dataset(gdir.get_filepath('gridded_data')) as ds:
thick = ds['consensus_ice_thickness'].where(ds['glacier_mask'], np.nan).load()
gridded_area = ds['glacier_mask'].sum() * gdir.grid.dx ** 2 * 1e-6
d['consensus_vol_km3'] = float(thick.sum() * gdir.grid.dx ** 2 * 1e-9)
d['consensus_area_km2'] = float((~thick.isnull()).sum() * gdir.grid.dx ** 2 * 1e-6)
d['consensus_perc_cov'] = float(d['consensus_area_km2'] / gdir.rgi_area_km2)
d['consensus_perc_cov'] = float(d['consensus_area_km2'] / gridded_area)
except (FileNotFoundError, AttributeError, KeyError):
pass

Expand Down
3 changes: 2 additions & 1 deletion oggm/shop/cook23.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,12 +133,13 @@ def cook23_statistics(gdir):
try:
with xr.open_dataset(gdir.get_filepath('gridded_data')) as ds:
thick = ds['cook23_thk'].where(ds['glacier_mask'], np.nan).load()
gridded_area = ds['glacier_mask'].sum() * gdir.grid.dx ** 2 * 1e-6
with warnings.catch_warnings():
# For operational runs we ignore the warnings
warnings.filterwarnings('ignore', category=RuntimeWarning)
d['cook23_vol_km3'] = float(thick.sum() * gdir.grid.dx ** 2 * 1e-9)
d['cook23_area_km2'] = float((~thick.isnull()).sum() * gdir.grid.dx ** 2 * 1e-6)
d['cook23_perc_cov'] = float(d['cook23_area_km2'] / gdir.rgi_area_km2)
d['cook23_perc_cov'] = float(d['cook23_area_km2'] / gridded_area)

except (FileNotFoundError, AttributeError, KeyError):
pass
Expand Down
12 changes: 2 additions & 10 deletions oggm/shop/hugonnet_maps.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,6 @@
except ImportError:
pass

try:
import salem
except ImportError:
pass

try:
import geopandas as gpd
except ImportError:
pass

from oggm import utils, cfg

Expand Down Expand Up @@ -224,8 +215,9 @@ def hugonnet_statistics(gdir):
try:
with xr.open_dataset(gdir.get_filepath('gridded_data')) as ds:
dhdt = ds['hugonnet_dhdt'].where(ds['glacier_mask'], np.nan).load()
gridded_area = ds['glacier_mask'].sum() * gdir.grid.dx ** 2 * 1e-6
d['hugonnet_area_km2'] = float((~dhdt.isnull()).sum() * gdir.grid.dx ** 2 * 1e-6)
d['hugonnet_perc_cov'] = float(d['hugonnet_area_km2'] / gdir.rgi_area_km2)
d['hugonnet_perc_cov'] = float(d['hugonnet_area_km2'] / gridded_area)
with warnings.catch_warnings():
# This can trigger an empty mean warning
warnings.filterwarnings("ignore", category=RuntimeWarning)
Expand Down
Loading