cable_mpimaster.F90 Source File


This file depends on

sourcefile~~cable_mpimaster.f90~~EfferentGraph sourcefile~cable_mpimaster.f90 cable_mpimaster.F90 sourcefile~bgcdriver.f90 bgcdriver.F90 sourcefile~cable_mpimaster.f90->sourcefile~bgcdriver.f90 sourcefile~cable_carbon.f90 cable_carbon.F90 sourcefile~cable_mpimaster.f90->sourcefile~cable_carbon.f90 sourcefile~cable_checks.f90 cable_checks.F90 sourcefile~cable_mpimaster.f90->sourcefile~cable_checks.f90 sourcefile~cable_climate.f90 cable_climate.F90 sourcefile~cable_mpimaster.f90->sourcefile~cable_climate.f90 sourcefile~cable_common.f90 cable_common.F90 sourcefile~cable_mpimaster.f90->sourcefile~cable_common.f90 sourcefile~cable_cru_trendy.f90 cable_cru_TRENDY.F90 sourcefile~cable_mpimaster.f90->sourcefile~cable_cru_trendy.f90 sourcefile~cable_define_types.f90 cable_define_types.F90 sourcefile~cable_mpimaster.f90->sourcefile~cable_define_types.f90 sourcefile~cable_driver_common.f90 cable_driver_common.F90 sourcefile~cable_mpimaster.f90->sourcefile~cable_driver_common.f90 sourcefile~cable_input.f90 cable_input.F90 sourcefile~cable_mpimaster.f90->sourcefile~cable_input.f90 sourcefile~cable_iovars.f90 cable_iovars.F90 sourcefile~cable_mpimaster.f90->sourcefile~cable_iovars.f90 sourcefile~cable_luc_expt.f90 cable_LUC_EXPT.F90 sourcefile~cable_mpimaster.f90->sourcefile~cable_luc_expt.f90 sourcefile~cable_mpicommon.f90 cable_mpicommon.F90 sourcefile~cable_mpimaster.f90->sourcefile~cable_mpicommon.f90 sourcefile~cable_output.f90 cable_output.F90 sourcefile~cable_mpimaster.f90->sourcefile~cable_output.f90 sourcefile~cable_phys_constants_mod.f90 cable_phys_constants_mod.F90 sourcefile~cable_mpimaster.f90->sourcefile~cable_phys_constants_mod.f90 sourcefile~cable_plume_mip.f90 cable_plume_mip.F90 sourcefile~cable_mpimaster.f90->sourcefile~cable_plume_mip.f90 sourcefile~cable_write.f90 cable_write.F90 sourcefile~cable_mpimaster.f90->sourcefile~cable_write.f90 sourcefile~casa_cable.f90 casa_cable.F90 sourcefile~cable_mpimaster.f90->sourcefile~casa_cable.f90 sourcefile~casa_dimension.f90 casa_dimension.F90 sourcefile~cable_mpimaster.f90->sourcefile~casa_dimension.f90 sourcefile~casa_inout.f90 casa_inout.F90 sourcefile~cable_mpimaster.f90->sourcefile~casa_inout.f90 sourcefile~casa_ncdf.f90 casa_ncdf.F90 sourcefile~cable_mpimaster.f90->sourcefile~casa_ncdf.f90 sourcefile~casa_offline_inout.f90 casa_offline_inout.F90 sourcefile~cable_mpimaster.f90->sourcefile~casa_offline_inout.f90 sourcefile~casa_param.f90 casa_param.F90 sourcefile~cable_mpimaster.f90->sourcefile~casa_param.f90 sourcefile~casa_phenology.f90 casa_phenology.F90 sourcefile~cable_mpimaster.f90->sourcefile~casa_phenology.f90 sourcefile~casa_variable.f90 casa_variable.F90 sourcefile~cable_mpimaster.f90->sourcefile~casa_variable.f90 sourcefile~cbl_model_driver_offline.f90 cbl_model_driver_offline.F90 sourcefile~cable_mpimaster.f90->sourcefile~cbl_model_driver_offline.f90 sourcefile~landuse3.f90 landuse3.F90 sourcefile~cable_mpimaster.f90->sourcefile~landuse3.f90 sourcefile~landuse_constant.f90 landuse_constant.F90 sourcefile~cable_mpimaster.f90->sourcefile~landuse_constant.f90 sourcefile~pop.f90 POP.F90 sourcefile~cable_mpimaster.f90->sourcefile~pop.f90 sourcefile~pop_constants.f90 pop_constants.F90 sourcefile~cable_mpimaster.f90->sourcefile~pop_constants.f90 sourcefile~pop_def.f90 pop_def.F90 sourcefile~cable_mpimaster.f90->sourcefile~pop_def.f90 sourcefile~pop_mpi.f90 pop_mpi.F90 sourcefile~cable_mpimaster.f90->sourcefile~pop_mpi.f90 sourcefile~pop_types.f90 pop_types.F90 sourcefile~cable_mpimaster.f90->sourcefile~pop_types.f90 sourcefile~popluc.f90 POPLUC.F90 sourcefile~cable_mpimaster.f90->sourcefile~popluc.f90 sourcefile~bgcdriver.f90->sourcefile~cable_common.f90 sourcefile~bgcdriver.f90->sourcefile~cable_define_types.f90 sourcefile~bgcdriver.f90->sourcefile~casa_dimension.f90 sourcefile~bgcdriver.f90->sourcefile~casa_param.f90 sourcefile~bgcdriver.f90->sourcefile~casa_phenology.f90 sourcefile~bgcdriver.f90->sourcefile~casa_variable.f90 sourcefile~bgcdriver.f90->sourcefile~pop.f90 sourcefile~bgcdriver.f90->sourcefile~pop_def.f90 sourcefile~bgcdriver.f90->sourcefile~pop_types.f90 sourcefile~biogeochem_casa.f90 biogeochem_casa.F90 sourcefile~bgcdriver.f90->sourcefile~biogeochem_casa.f90 sourcefile~cable_phenology.f90 cable_phenology.F90 sourcefile~bgcdriver.f90->sourcefile~cable_phenology.f90 sourcefile~cable_carbon.f90->sourcefile~cable_common.f90 sourcefile~cable_carbon.f90->sourcefile~cable_define_types.f90 sourcefile~cable_carbon.f90->sourcefile~cable_phys_constants_mod.f90 sourcefile~cable_checks.f90->sourcefile~cable_common.f90 sourcefile~cable_checks.f90->sourcefile~cable_define_types.f90 sourcefile~cable_checks.f90->sourcefile~cable_iovars.f90 sourcefile~cable_abort.f90 cable_abort.F90 sourcefile~cable_checks.f90->sourcefile~cable_abort.f90 sourcefile~cable_climate.f90->sourcefile~cable_common.f90 sourcefile~cable_climate.f90->sourcefile~cable_define_types.f90 sourcefile~cable_climate.f90->sourcefile~cable_iovars.f90 sourcefile~cable_climate.f90->sourcefile~casa_ncdf.f90 sourcefile~cable_climate.f90->sourcefile~pop_def.f90 sourcefile~cable_runtime_opts_mod.f90 cable_runtime_opts_mod.F90 sourcefile~cable_common.f90->sourcefile~cable_runtime_opts_mod.f90 sourcefile~cable_cru_trendy.f90->sourcefile~cable_checks.f90 sourcefile~cable_cru_trendy.f90->sourcefile~cable_define_types.f90 sourcefile~cable_cru_trendy.f90->sourcefile~cable_iovars.f90 sourcefile~cable_cru_trendy.f90->sourcefile~casa_ncdf.f90 sourcefile~cable_weathergenerator.f90 cable_weathergenerator.F90 sourcefile~cable_cru_trendy.f90->sourcefile~cable_weathergenerator.f90 sourcefile~cable_climate_type_mod.f90 cable_climate_type_mod.F90 sourcefile~cable_define_types.f90->sourcefile~cable_climate_type_mod.f90 sourcefile~cable_driver_common.f90->sourcefile~cable_carbon.f90 sourcefile~cable_driver_common.f90->sourcefile~cable_common.f90 sourcefile~cable_driver_common.f90->sourcefile~cable_cru_trendy.f90 sourcefile~cable_driver_common.f90->sourcefile~cable_define_types.f90 sourcefile~cable_driver_common.f90->sourcefile~cable_input.f90 sourcefile~cable_driver_common.f90->sourcefile~cable_iovars.f90 sourcefile~cable_driver_common.f90->sourcefile~cable_luc_expt.f90 sourcefile~cable_driver_common.f90->sourcefile~cable_phys_constants_mod.f90 sourcefile~cable_driver_common.f90->sourcefile~cable_plume_mip.f90 sourcefile~cable_driver_common.f90->sourcefile~casa_dimension.f90 sourcefile~cable_driver_common.f90->sourcefile~casa_ncdf.f90 sourcefile~cable_driver_common.f90->sourcefile~casa_param.f90 sourcefile~cable_driver_common.f90->sourcefile~casa_variable.f90 sourcefile~cable_driver_common.f90->sourcefile~pop.f90 sourcefile~cable_driver_common.f90->sourcefile~pop_def.f90 sourcefile~cable_driver_common.f90->sourcefile~pop_types.f90 sourcefile~cable_driver_common.f90->sourcefile~popluc.f90 sourcefile~cable_mpi.f90 cable_mpi.F90 sourcefile~cable_driver_common.f90->sourcefile~cable_mpi.f90 sourcefile~cable_namelist_input.f90 cable_namelist_input.F90 sourcefile~cable_driver_common.f90->sourcefile~cable_namelist_input.f90 sourcefile~cable_site.f90 cable_site.F90 sourcefile~cable_driver_common.f90->sourcefile~cable_site.f90 sourcefile~cable_input.f90->sourcefile~cable_checks.f90 sourcefile~cable_input.f90->sourcefile~cable_common.f90 sourcefile~cable_input.f90->sourcefile~cable_define_types.f90 sourcefile~cable_input.f90->sourcefile~cable_iovars.f90 sourcefile~cable_input.f90->sourcefile~cable_luc_expt.f90 sourcefile~cable_input.f90->sourcefile~casa_dimension.f90 sourcefile~cable_input.f90->sourcefile~casa_inout.f90 sourcefile~cable_input.f90->sourcefile~casa_ncdf.f90 sourcefile~cable_input.f90->sourcefile~casa_param.f90 sourcefile~cable_input.f90->sourcefile~casa_phenology.f90 sourcefile~cable_input.f90->sourcefile~casa_variable.f90 sourcefile~cable_input.f90->sourcefile~pop.f90 sourcefile~cable_input.f90->sourcefile~pop_types.f90 sourcefile~cable_input.f90->sourcefile~popluc.f90 sourcefile~cable_input.f90->sourcefile~cable_abort.f90 sourcefile~cable_initialise.f90 cable_initialise.F90 sourcefile~cable_input.f90->sourcefile~cable_initialise.f90 sourcefile~cable_metutils.f90 cable_metutils.F90 sourcefile~cable_input.f90->sourcefile~cable_metutils.f90 sourcefile~cable_parameters.f90 cable_parameters.F90 sourcefile~cable_input.f90->sourcefile~cable_parameters.f90 sourcefile~cable_read.f90 cable_read.F90 sourcefile~cable_input.f90->sourcefile~cable_read.f90 sourcefile~casa_readbiome.f90 casa_readbiome.F90 sourcefile~cable_input.f90->sourcefile~casa_readbiome.f90 sourcefile~cbl_sinbet.f90 cbl_sinbet.F90 sourcefile~cable_input.f90->sourcefile~cbl_sinbet.f90 sourcefile~cable_iovars.f90->sourcefile~cable_define_types.f90 sourcefile~cable_luc_expt.f90->sourcefile~cable_common.f90 sourcefile~cable_luc_expt.f90->sourcefile~cable_define_types.f90 sourcefile~cable_luc_expt.f90->sourcefile~cable_iovars.f90 sourcefile~cable_luc_expt.f90->sourcefile~casa_ncdf.f90 sourcefile~cable_mpicommon.f90->sourcefile~cable_define_types.f90 sourcefile~cable_mpicommon.f90->sourcefile~cable_iovars.f90 sourcefile~cable_output.f90->sourcefile~cable_checks.f90 sourcefile~cable_output.f90->sourcefile~cable_common.f90 sourcefile~cable_output.f90->sourcefile~cable_define_types.f90 sourcefile~cable_output.f90->sourcefile~cable_iovars.f90 sourcefile~cable_output.f90->sourcefile~cable_write.f90 sourcefile~cable_output.f90->sourcefile~casa_variable.f90 sourcefile~cable_output.f90->sourcefile~cable_abort.f90 sourcefile~cable_plume_mip.f90->sourcefile~cable_checks.f90 sourcefile~cable_plume_mip.f90->sourcefile~cable_common.f90 sourcefile~cable_plume_mip.f90->sourcefile~cable_define_types.f90 sourcefile~cable_plume_mip.f90->sourcefile~cable_iovars.f90 sourcefile~cable_plume_mip.f90->sourcefile~casa_ncdf.f90 sourcefile~cable_plume_mip.f90->sourcefile~cable_weathergenerator.f90 sourcefile~cable_write.f90->sourcefile~cable_define_types.f90 sourcefile~cable_write.f90->sourcefile~cable_iovars.f90 sourcefile~cable_write.f90->sourcefile~cable_abort.f90 sourcefile~casa_cable.f90->sourcefile~cable_carbon.f90 sourcefile~casa_cable.f90->sourcefile~cable_common.f90 sourcefile~casa_cable.f90->sourcefile~cable_define_types.f90 sourcefile~casa_cable.f90->sourcefile~casa_dimension.f90 sourcefile~casa_cable.f90->sourcefile~casa_ncdf.f90 sourcefile~casa_cable.f90->sourcefile~casa_param.f90 sourcefile~casa_cable.f90->sourcefile~casa_phenology.f90 sourcefile~casa_cable.f90->sourcefile~casa_variable.f90 sourcefile~casa_cable.f90->sourcefile~pop.f90 sourcefile~casa_cable.f90->sourcefile~pop_def.f90 sourcefile~casa_cable.f90->sourcefile~pop_types.f90 sourcefile~casa_cnp.f90 casa_cnp.F90 sourcefile~casa_cable.f90->sourcefile~casa_cnp.f90 sourcefile~casa_dimension.f90->sourcefile~cable_define_types.f90 sourcefile~casa_inout.f90->sourcefile~cable_common.f90 sourcefile~casa_inout.f90->sourcefile~cable_define_types.f90 sourcefile~casa_inout.f90->sourcefile~cable_iovars.f90 sourcefile~casa_inout.f90->sourcefile~casa_dimension.f90 sourcefile~casa_inout.f90->sourcefile~casa_offline_inout.f90 sourcefile~casa_inout.f90->sourcefile~casa_param.f90 sourcefile~casa_inout.f90->sourcefile~casa_phenology.f90 sourcefile~casa_inout.f90->sourcefile~casa_variable.f90 sourcefile~casa_ncdf.f90->sourcefile~cable_common.f90 sourcefile~casa_ncdf.f90->sourcefile~cable_define_types.f90 sourcefile~casa_ncdf.f90->sourcefile~cable_iovars.f90 sourcefile~casa_offline_inout.f90->sourcefile~cable_common.f90 sourcefile~casa_offline_inout.f90->sourcefile~cable_define_types.f90 sourcefile~casa_offline_inout.f90->sourcefile~casa_ncdf.f90 sourcefile~casa_offline_inout.f90->sourcefile~casa_phenology.f90 sourcefile~casa_offline_inout.f90->sourcefile~casa_variable.f90 sourcefile~casa_param.f90->sourcefile~casa_dimension.f90 sourcefile~casa_phenology.f90->sourcefile~casa_dimension.f90 sourcefile~casa_variable.f90->sourcefile~casa_dimension.f90 sourcefile~casa_variable.f90->sourcefile~casa_param.f90 sourcefile~cbl_model_driver_offline.f90->sourcefile~cable_carbon.f90 sourcefile~cbl_model_driver_offline.f90->sourcefile~cable_common.f90 sourcefile~cbl_model_driver_offline.f90->sourcefile~cable_define_types.f90 sourcefile~cbl_model_driver_offline.f90->sourcefile~cable_phys_constants_mod.f90 sourcefile~cbl_model_driver_offline.f90->sourcefile~casa_dimension.f90 sourcefile~cable_air.f90 cable_air.F90 sourcefile~cbl_model_driver_offline.f90->sourcefile~cable_air.f90 sourcefile~cable_canopy.f90 cable_canopy.F90 sourcefile~cbl_model_driver_offline.f90->sourcefile~cable_canopy.f90 sourcefile~cable_maths_constants_mod.f90 cable_maths_constants_mod.F90 sourcefile~cbl_model_driver_offline.f90->sourcefile~cable_maths_constants_mod.f90 sourcefile~cable_other_constants_mod.f90 cable_other_constants_mod.F90 sourcefile~cbl_model_driver_offline.f90->sourcefile~cable_other_constants_mod.f90 sourcefile~cable_roughness.f90 cable_roughness.F90 sourcefile~cbl_model_driver_offline.f90->sourcefile~cable_roughness.f90 sourcefile~cable_sli_main.f90 cable_sli_main.F90 sourcefile~cbl_model_driver_offline.f90->sourcefile~cable_sli_main.f90 sourcefile~cable_surface_types.f90 cable_surface_types.F90 sourcefile~cbl_model_driver_offline.f90->sourcefile~cable_surface_types.f90 sourcefile~cbl_albedo.f90 cbl_albedo.F90 sourcefile~cbl_model_driver_offline.f90->sourcefile~cbl_albedo.f90 sourcefile~cbl_init_radiation.f90 cbl_init_radiation.F90 sourcefile~cbl_model_driver_offline.f90->sourcefile~cbl_init_radiation.f90 sourcefile~cbl_snow_aging.f90 cbl_snow_aging.F90 sourcefile~cbl_model_driver_offline.f90->sourcefile~cbl_snow_aging.f90 sourcefile~cbl_soilsnow_main.f90 cbl_soilsnow_main.F90 sourcefile~cbl_model_driver_offline.f90->sourcefile~cbl_soilsnow_main.f90 sourcefile~grid_constants_cbl.f90 grid_constants_cbl.F90 sourcefile~cbl_model_driver_offline.f90->sourcefile~grid_constants_cbl.f90 sourcefile~masks_cbl.f90 masks_cbl.F90 sourcefile~cbl_model_driver_offline.f90->sourcefile~masks_cbl.f90 sourcefile~landuse3.f90->sourcefile~cable_define_types.f90 sourcefile~landuse3.f90->sourcefile~cable_iovars.f90 sourcefile~landuse3.f90->sourcefile~casa_dimension.f90 sourcefile~landuse3.f90->sourcefile~casa_param.f90 sourcefile~landuse3.f90->sourcefile~casa_phenology.f90 sourcefile~landuse3.f90->sourcefile~casa_variable.f90 sourcefile~landuse3.f90->sourcefile~landuse_constant.f90 sourcefile~landuse_constant.f90->sourcefile~cable_define_types.f90 sourcefile~landuse_constant.f90->sourcefile~casa_dimension.f90 sourcefile~pop.f90->sourcefile~pop_constants.f90 sourcefile~pop.f90->sourcefile~pop_def.f90 sourcefile~pop.f90->sourcefile~pop_types.f90 sourcefile~pop_constants.f90->sourcefile~pop_def.f90 sourcefile~pop_mpi.f90->sourcefile~pop_constants.f90 sourcefile~pop_mpi.f90->sourcefile~pop_types.f90 sourcefile~pop_types.f90->sourcefile~pop_constants.f90 sourcefile~pop_types.f90->sourcefile~pop_def.f90 sourcefile~popluc.f90->sourcefile~cable_common.f90 sourcefile~popluc.f90->sourcefile~cable_define_types.f90 sourcefile~popluc.f90->sourcefile~cable_iovars.f90 sourcefile~popluc.f90->sourcefile~cable_luc_expt.f90 sourcefile~popluc.f90->sourcefile~casa_ncdf.f90 sourcefile~popluc.f90->sourcefile~casa_param.f90 sourcefile~popluc.f90->sourcefile~casa_variable.f90 sourcefile~popluc.f90->sourcefile~pop.f90 sourcefile~popluc.f90->sourcefile~pop_def.f90 sourcefile~popluc.f90->sourcefile~pop_types.f90 sourcefile~biogeochem_casa.f90->sourcefile~cable_common.f90 sourcefile~biogeochem_casa.f90->sourcefile~cable_define_types.f90 sourcefile~biogeochem_casa.f90->sourcefile~casa_dimension.f90 sourcefile~biogeochem_casa.f90->sourcefile~casa_inout.f90 sourcefile~biogeochem_casa.f90->sourcefile~pop_types.f90 sourcefile~biogeochem_casa.f90->sourcefile~casa_cnp.f90 sourcefile~casa_rplant.f90 casa_rplant.F90 sourcefile~biogeochem_casa.f90->sourcefile~casa_rplant.f90 sourcefile~cable_abort.f90->sourcefile~cable_define_types.f90 sourcefile~cable_abort.f90->sourcefile~cable_iovars.f90 sourcefile~cable_air.f90->sourcefile~cable_define_types.f90 sourcefile~cable_air.f90->sourcefile~cable_phys_constants_mod.f90 sourcefile~cable_canopy.f90->sourcefile~cable_common.f90 sourcefile~cable_canopy.f90->sourcefile~cable_define_types.f90 sourcefile~cable_canopy.f90->sourcefile~cable_phys_constants_mod.f90 sourcefile~cable_canopy.f90->sourcefile~cable_air.f90 sourcefile~cable_canopy.f90->sourcefile~cable_maths_constants_mod.f90 sourcefile~cable_canopy.f90->sourcefile~cable_other_constants_mod.f90 sourcefile~cable_canopy.f90->sourcefile~cable_roughness.f90 sourcefile~cable_canopy.f90->sourcefile~grid_constants_cbl.f90 sourcefile~cable_photo_constants_mod.f90 cable_photo_constants_mod.F90 sourcefile~cable_canopy.f90->sourcefile~cable_photo_constants_mod.f90 sourcefile~cbl_dryleaf.f90 cbl_dryLeaf.F90 sourcefile~cable_canopy.f90->sourcefile~cbl_dryleaf.f90 sourcefile~cbl_friction_vel.f90 cbl_friction_vel.F90 sourcefile~cable_canopy.f90->sourcefile~cbl_friction_vel.f90 sourcefile~cbl_latent_heat.f90 cbl_latent_heat.F90 sourcefile~cable_canopy.f90->sourcefile~cbl_latent_heat.f90 sourcefile~cbl_pot_evap_snow.f90 cbl_pot_evap_snow.F90 sourcefile~cable_canopy.f90->sourcefile~cbl_pot_evap_snow.f90 sourcefile~cbl_qsat.f90 cbl_qsat.F90 sourcefile~cable_canopy.f90->sourcefile~cbl_qsat.f90 sourcefile~cbl_radiation.f90 cbl_radiation.F90 sourcefile~cable_canopy.f90->sourcefile~cbl_radiation.f90 sourcefile~cbl_surfacewetness.f90 cbl_SurfaceWetness.F90 sourcefile~cable_canopy.f90->sourcefile~cbl_surfacewetness.f90 sourcefile~cbl_wetleaf.f90 cbl_wetleaf.F90 sourcefile~cable_canopy.f90->sourcefile~cbl_wetleaf.f90 sourcefile~cbl_within_canopy.f90 cbl_within_canopy.F90 sourcefile~cable_canopy.f90->sourcefile~cbl_within_canopy.f90 sourcefile~cbl_zetar.f90 cbl_zetar.F90 sourcefile~cable_canopy.f90->sourcefile~cbl_zetar.f90 sourcefile~cable_climate_type_mod.f90->sourcefile~cable_common.f90 sourcefile~cable_climate_type_mod.f90->sourcefile~grid_constants_cbl.f90 sourcefile~cable_initialise.f90->sourcefile~cable_common.f90 sourcefile~cable_initialise.f90->sourcefile~cable_define_types.f90 sourcefile~cable_initialise.f90->sourcefile~cable_iovars.f90 sourcefile~cable_initialise.f90->sourcefile~cable_abort.f90 sourcefile~cable_initialise.f90->sourcefile~cable_read.f90 sourcefile~cable_other_constants_mod.f90->sourcefile~grid_constants_cbl.f90 sourcefile~cable_parameters.f90->sourcefile~cable_common.f90 sourcefile~cable_parameters.f90->sourcefile~cable_define_types.f90 sourcefile~cable_parameters.f90->sourcefile~cable_iovars.f90 sourcefile~cable_parameters.f90->sourcefile~cable_luc_expt.f90 sourcefile~cable_parameters.f90->sourcefile~cable_phys_constants_mod.f90 sourcefile~cable_parameters.f90->sourcefile~casa_dimension.f90 sourcefile~cable_parameters.f90->sourcefile~casa_param.f90 sourcefile~cable_parameters.f90->sourcefile~casa_phenology.f90 sourcefile~cable_parameters.f90->sourcefile~casa_variable.f90 sourcefile~cable_parameters.f90->sourcefile~cable_abort.f90 sourcefile~cable_parameters.f90->sourcefile~cable_surface_types.f90 sourcefile~cable_parameters.f90->sourcefile~grid_constants_cbl.f90 sourcefile~cable_pft_params.f90 cable_pft_params.F90 sourcefile~cable_parameters.f90->sourcefile~cable_pft_params.f90 sourcefile~cable_soil_params.f90 cable_soil_params.F90 sourcefile~cable_parameters.f90->sourcefile~cable_soil_params.f90 sourcefile~cable_phenology.f90->sourcefile~cable_common.f90 sourcefile~cable_phenology.f90->sourcefile~cable_define_types.f90 sourcefile~cable_phenology.f90->sourcefile~cable_iovars.f90 sourcefile~cable_phenology.f90->sourcefile~casa_dimension.f90 sourcefile~cable_phenology.f90->sourcefile~casa_ncdf.f90 sourcefile~cable_phenology.f90->sourcefile~casa_param.f90 sourcefile~cable_phenology.f90->sourcefile~casa_phenology.f90 sourcefile~cable_phenology.f90->sourcefile~casa_variable.f90 sourcefile~cable_phenology.f90->sourcefile~pop_def.f90 sourcefile~cable_read.f90->sourcefile~cable_define_types.f90 sourcefile~cable_read.f90->sourcefile~cable_iovars.f90 sourcefile~cable_read.f90->sourcefile~cable_abort.f90 sourcefile~cable_roughness.f90->sourcefile~cable_common.f90 sourcefile~cable_roughness.f90->sourcefile~cable_define_types.f90 sourcefile~cable_roughness.f90->sourcefile~cable_phys_constants_mod.f90 sourcefile~cable_roughness.f90->sourcefile~cable_other_constants_mod.f90 sourcefile~cable_roughness.f90->sourcefile~cable_surface_types.f90 sourcefile~roughnesshgt_efflai_cbl.f90 roughnessHGT_effLAI_cbl.F90 sourcefile~cable_roughness.f90->sourcefile~roughnesshgt_efflai_cbl.f90 sourcefile~cable_site.f90->sourcefile~cable_common.f90 sourcefile~cable_site.f90->sourcefile~cable_iovars.f90 sourcefile~cable_site.f90->sourcefile~casa_ncdf.f90 sourcefile~cable_sli_main.f90->sourcefile~cable_common.f90 sourcefile~cable_sli_main.f90->sourcefile~cable_define_types.f90 sourcefile~cable_sli_main.f90->sourcefile~cable_iovars.f90 sourcefile~cable_sli_numbers.f90 cable_sli_numbers.F90 sourcefile~cable_sli_main.f90->sourcefile~cable_sli_numbers.f90 sourcefile~cable_sli_roots.f90 cable_sli_roots.F90 sourcefile~cable_sli_main.f90->sourcefile~cable_sli_roots.f90 sourcefile~cable_sli_solve.f90 cable_sli_solve.F90 sourcefile~cable_sli_main.f90->sourcefile~cable_sli_solve.f90 sourcefile~cable_sli_utils.f90 cable_sli_utils.F90 sourcefile~cable_sli_main.f90->sourcefile~cable_sli_utils.f90 sourcefile~casa_cnp.f90->sourcefile~cable_common.f90 sourcefile~casa_cnp.f90->sourcefile~cable_define_types.f90 sourcefile~casa_cnp.f90->sourcefile~casa_dimension.f90 sourcefile~casa_cnp.f90->sourcefile~casa_param.f90 sourcefile~casa_cnp.f90->sourcefile~casa_phenology.f90 sourcefile~casa_cnp.f90->sourcefile~casa_variable.f90 sourcefile~casa_cnp.f90->sourcefile~landuse_constant.f90 sourcefile~casa_readbiome.f90->sourcefile~cable_common.f90 sourcefile~casa_readbiome.f90->sourcefile~cable_define_types.f90 sourcefile~casa_readbiome.f90->sourcefile~casa_dimension.f90 sourcefile~casa_readbiome.f90->sourcefile~casa_param.f90 sourcefile~casa_readbiome.f90->sourcefile~casa_phenology.f90 sourcefile~casa_readbiome.f90->sourcefile~casa_variable.f90 sourcefile~cbl_snow_albedo.f90 cbl_snow_albedo.F90 sourcefile~cbl_albedo.f90->sourcefile~cbl_snow_albedo.f90 sourcefile~cbl_rhoch.f90 cbl_rhoch.F90 sourcefile~cbl_init_radiation.f90->sourcefile~cbl_rhoch.f90 sourcefile~cbl_spitter.f90 cbl_spitter.F90 sourcefile~cbl_init_radiation.f90->sourcefile~cbl_spitter.f90 sourcefile~cbl_sinbet.f90->sourcefile~cable_maths_constants_mod.f90 sourcefile~cbl_snow_aging.f90->sourcefile~cable_phys_constants_mod.f90 sourcefile~cbl_soilsnow_main.f90->sourcefile~cable_common.f90 sourcefile~cbl_hyd_redistrib.f90 cbl_hyd_redistrib.F90 sourcefile~cbl_soilsnow_main.f90->sourcefile~cbl_hyd_redistrib.f90 sourcefile~cbl_remove_trans.f90 cbl_remove_trans.F90 sourcefile~cbl_soilsnow_main.f90->sourcefile~cbl_remove_trans.f90 sourcefile~cbl_snowaccum.f90 cbl_snowAccum.F90 sourcefile~cbl_soilsnow_main.f90->sourcefile~cbl_snowaccum.f90 sourcefile~cbl_snowcheck.f90 cbl_snowCheck.F90 sourcefile~cbl_soilsnow_main.f90->sourcefile~cbl_snowcheck.f90 sourcefile~cbl_snowdensity.f90 cbl_snowDensity.F90 sourcefile~cbl_soilsnow_main.f90->sourcefile~cbl_snowdensity.f90 sourcefile~cbl_snowl_adjust.f90 cbl_snowl_adjust.F90 sourcefile~cbl_soilsnow_main.f90->sourcefile~cbl_snowl_adjust.f90 sourcefile~cbl_snowmelt.f90 cbl_snowMelt.F90 sourcefile~cbl_soilsnow_main.f90->sourcefile~cbl_snowmelt.f90 sourcefile~cbl_soilfreeze.f90 cbl_soilfreeze.F90 sourcefile~cbl_soilsnow_main.f90->sourcefile~cbl_soilfreeze.f90 sourcefile~cbl_soilsnow_data.f90 cbl_soilsnow_data.F90 sourcefile~cbl_soilsnow_main.f90->sourcefile~cbl_soilsnow_data.f90 sourcefile~cbl_stempv.f90 cbl_stempv.F90 sourcefile~cbl_soilsnow_main.f90->sourcefile~cbl_stempv.f90 sourcefile~cbl_surfbv.f90 cbl_surfbv.F90 sourcefile~cbl_soilsnow_main.f90->sourcefile~cbl_surfbv.f90 sourcefile~cable_pft_params.f90->sourcefile~cable_define_types.f90 sourcefile~cable_pft_params.f90->sourcefile~grid_constants_cbl.f90 sourcefile~cable_sli_numbers.f90->sourcefile~cable_define_types.f90 sourcefile~cable_sli_roots.f90->sourcefile~cable_define_types.f90 sourcefile~cable_sli_roots.f90->sourcefile~cable_sli_numbers.f90 sourcefile~cable_sli_solve.f90->sourcefile~cable_define_types.f90 sourcefile~cable_sli_solve.f90->sourcefile~cable_iovars.f90 sourcefile~cable_sli_solve.f90->sourcefile~cable_sli_numbers.f90 sourcefile~cable_sli_solve.f90->sourcefile~cable_sli_utils.f90 sourcefile~cable_sli_utils.f90->sourcefile~cable_define_types.f90 sourcefile~cable_sli_utils.f90->sourcefile~cable_sli_numbers.f90 sourcefile~cable_soil_params.f90->sourcefile~cable_define_types.f90 sourcefile~cable_soil_params.f90->sourcefile~grid_constants_cbl.f90 sourcefile~casa_rplant.f90->sourcefile~cable_common.f90 sourcefile~casa_rplant.f90->sourcefile~cable_define_types.f90 sourcefile~casa_rplant.f90->sourcefile~casa_dimension.f90 sourcefile~casa_rplant.f90->sourcefile~casa_param.f90 sourcefile~casa_rplant.f90->sourcefile~casa_phenology.f90 sourcefile~casa_rplant.f90->sourcefile~casa_variable.f90 sourcefile~casa_rplant.f90->sourcefile~landuse_constant.f90 sourcefile~casa_rplant.f90->sourcefile~casa_cnp.f90 sourcefile~cbl_dryleaf.f90->sourcefile~cable_common.f90 sourcefile~cbl_dryleaf.f90->sourcefile~cable_define_types.f90 sourcefile~cbl_dryleaf.f90->sourcefile~cable_phys_constants_mod.f90 sourcefile~cbl_dryleaf.f90->sourcefile~cable_other_constants_mod.f90 sourcefile~cbl_dryleaf.f90->sourcefile~cable_surface_types.f90 sourcefile~cbl_dryleaf.f90->sourcefile~cable_photo_constants_mod.f90 sourcefile~cbl_fwsoil.f90 cbl_fwsoil.F90 sourcefile~cbl_dryleaf.f90->sourcefile~cbl_fwsoil.f90 sourcefile~cbl_photosynthesis.f90 cbl_photosynthesis.F90 sourcefile~cbl_dryleaf.f90->sourcefile~cbl_photosynthesis.f90 sourcefile~cbl_hyd_redistrib.f90->sourcefile~cable_common.f90 sourcefile~cbl_hyd_redistrib.f90->sourcefile~cbl_soilsnow_data.f90 sourcefile~cbl_latent_heat.f90->sourcefile~cable_common.f90 sourcefile~cbl_latent_heat.f90->sourcefile~cable_define_types.f90 sourcefile~cbl_latent_heat.f90->sourcefile~cable_phys_constants_mod.f90 sourcefile~cbl_pot_evap_snow.f90->sourcefile~cbl_qsat.f90 sourcefile~cbl_radiation.f90->sourcefile~cable_define_types.f90 sourcefile~cbl_radiation.f90->sourcefile~cable_other_constants_mod.f90 sourcefile~cbl_remove_trans.f90->sourcefile~cable_common.f90 sourcefile~cbl_remove_trans.f90->sourcefile~cbl_soilsnow_data.f90 sourcefile~cbl_snow_albedo.f90->sourcefile~cable_phys_constants_mod.f90 sourcefile~cbl_snowaccum.f90->sourcefile~cable_common.f90 sourcefile~cbl_snowaccum.f90->sourcefile~cbl_soilsnow_data.f90 sourcefile~cbl_snowcheck.f90->sourcefile~cable_common.f90 sourcefile~cbl_snowcheck.f90->sourcefile~cbl_soilsnow_data.f90 sourcefile~cbl_snowdensity.f90->sourcefile~cbl_soilsnow_data.f90 sourcefile~cbl_snowl_adjust.f90->sourcefile~cbl_soilsnow_data.f90 sourcefile~cbl_snowmelt.f90->sourcefile~cable_common.f90 sourcefile~cbl_snowmelt.f90->sourcefile~cbl_soilsnow_data.f90 sourcefile~cbl_soilfreeze.f90->sourcefile~cable_common.f90 sourcefile~cbl_soilfreeze.f90->sourcefile~cbl_soilsnow_data.f90 sourcefile~cbl_soilsnow_data.f90->sourcefile~cable_common.f90 sourcefile~cbl_soilsnow_data.f90->sourcefile~cable_define_types.f90 sourcefile~cbl_soilsnow_data.f90->sourcefile~cable_phys_constants_mod.f90 sourcefile~cbl_stempv.f90->sourcefile~cable_common.f90 sourcefile~cbl_stempv.f90->sourcefile~cbl_soilsnow_data.f90 sourcefile~cbl_conductivity.f90 cbl_conductivity.F90 sourcefile~cbl_stempv.f90->sourcefile~cbl_conductivity.f90 sourcefile~cbl_oldconductivity.f90 cbl_Oldconductivity.F90 sourcefile~cbl_stempv.f90->sourcefile~cbl_oldconductivity.f90 sourcefile~cbl_trimb.f90 cbl_trimb.F90 sourcefile~cbl_stempv.f90->sourcefile~cbl_trimb.f90 sourcefile~cbl_surfacewetness.f90->sourcefile~cable_common.f90 sourcefile~cbl_surfacewetness.f90->sourcefile~cable_define_types.f90 sourcefile~cbl_surfacewetness.f90->sourcefile~cable_phys_constants_mod.f90 sourcefile~cbl_init_wetfac_mod.f90 cbl_init_wetfac_mod.F90 sourcefile~cbl_surfacewetness.f90->sourcefile~cbl_init_wetfac_mod.f90 sourcefile~cbl_surfbv.f90->sourcefile~cable_common.f90 sourcefile~cbl_surfbv.f90->sourcefile~cable_surface_types.f90 sourcefile~cbl_surfbv.f90->sourcefile~cbl_soilsnow_data.f90 sourcefile~cbl_smoisturev.f90 cbl_smoisturev.F90 sourcefile~cbl_surfbv.f90->sourcefile~cbl_smoisturev.f90 sourcefile~cbl_wetleaf.f90->sourcefile~cable_define_types.f90 sourcefile~cbl_within_canopy.f90->sourcefile~cable_common.f90 sourcefile~cbl_within_canopy.f90->sourcefile~cable_define_types.f90 sourcefile~cbl_within_canopy.f90->sourcefile~cbl_qsat.f90 sourcefile~cbl_conductivity.f90->sourcefile~cbl_soilsnow_data.f90 sourcefile~cbl_fwsoil.f90->sourcefile~cable_common.f90 sourcefile~cbl_fwsoil.f90->sourcefile~cable_define_types.f90 sourcefile~cbl_init_wetfac_mod.f90->sourcefile~cable_define_types.f90 sourcefile~cbl_init_wetfac_mod.f90->sourcefile~cable_other_constants_mod.f90 sourcefile~cbl_init_wetfac_mod.f90->sourcefile~cable_surface_types.f90 sourcefile~cbl_oldconductivity.f90->sourcefile~cbl_soilsnow_data.f90 sourcefile~cbl_photosynthesis.f90->sourcefile~cable_define_types.f90 sourcefile~cbl_photosynthesis.f90->sourcefile~cable_other_constants_mod.f90 sourcefile~cbl_photosynthesis.f90->sourcefile~cable_photo_constants_mod.f90 sourcefile~cbl_smoisturev.f90->sourcefile~cable_common.f90 sourcefile~cbl_smoisturev.f90->sourcefile~cbl_soilsnow_data.f90 sourcefile~cbl_smoisturev.f90->sourcefile~cbl_trimb.f90 sourcefile~cbl_trimb.f90->sourcefile~cbl_soilsnow_data.f90

Source Code

!==============================================================================
! This source code is part of the
! Australian Community Atmosphere Biosphere Land Exchange (CABLE) model.
! This work is licensed under the CSIRO Open Source Software License
! Agreement (variation of the BSD / MIT License).
!
! You may not use this file except in compliance with this License.
! A copy of the License (CSIRO_BSD_MIT_License_v2.0_CABLE.txt) is located
! in each directory containing CABLE code.
!
! ==============================================================================
! Purpose: Offline driver for mpi master in CABLE global run
!
! Contact: Bernard.Pak@csiro.au
!
! History: Since 1.4b, capability to run global offline (ncciy = YEAR),
!          inclusion of call to CASA-CNP (icycle>0)
!          soil_snow_type now ssnow (instead of ssoil)
!
!          MPI wrapper developed by Maciej Golebiewski (2012)
!          Modified from cable_serial.F90 in CABLE-2.0_beta r171 by B Pak
!
! ==============================================================================
! Uses:           mpi
!                 cable_mpicommon
!                 cable_def_types_mod
!                 cable_IO_vars_module
!                 cable_common_module
!                 cable_input_module
!                 cable_output_module
!                 cable_cbm_module
!                 casadimension
!                 casavariable
!                 phenvariable
!                 casa_cable
!                 casa_inout_module
!
! CALLs:       point2constants
!              open_met_file
!              load_parameters
!              open_output_file
!              get_met_data
!              write_output
!              casa_poolout
!              casa_fluxout
!              create_restart
!              close_met_file
!              close_output_file
!              prepareFiles
!              find_extents
!              master_decomp
!              master_cable_params
!              master_casa_params
!              master_intypes
!              master_outtypes
!              master_casa_types
!              master_restart_types
!              master_send_input
!              master_receive
!              master_end
!              master_casa_dump_types
!              master_casa_LUC_types
!
! input  file: [SiteName].nc
!              poolcnpIn[SiteName].csv -- for CASA-CNP only
!              gridinfo_CSIRO_1x1.nc
!              def_veg_params.txt
!              def_soil_params.txt -- nearly redundant, can be switched on
!              restart_in.nc -- not strictly required
!
! output file: log_cable.txt
!              out_cable.nc
!              restart_out.nc
!              poolcnpOut.csv -- from CASA-CNP
!==============================================================================
MODULE cable_mpimaster

  USE cable_driver_common_mod, ONLY : &
    vegparmnew,                       &
    spinup,                           &
    spincasa,                         &
    CASAONLY,                         &
    l_landuse,                        &
    delsoilM,                         &
    delsoilT,                         &
    delgwM,                           &
    LALLOC,                           &
    prepareFiles,                     &
    renameFiles,                      &
    LUCdriver
  USE cable_mpicommon
  USE cable_IO_vars_module, ONLY : NO_CHECK
  USE casa_cable
  USE casa_inout_module
  USE cable_checks_module, ONLY: constant_check_range

  IMPLICIT NONE

  SAVE

  PRIVATE

  ! number of workers; set in master_decomp
  INTEGER :: wnp

  ! TODO: m3d_t mat_t and vec_t to be factored out from here and from
  ! master_outtypes
  ! MPI: slices of 3D arrays
  INTEGER, ALLOCATABLE, DIMENSION(:,:) :: m3d_t
  ! MPI: slices of matrices (2D)
  INTEGER, ALLOCATABLE, DIMENSION(:,:) :: mat_t

  ! MPI: parts of vectors (1D)
  ! MPI: vec_t dimension is wnp; as each worker gets a single hindexed
  ! with nvec blocks
  INTEGER, ALLOCATABLE, DIMENSION(:) :: vec_t

  ! MPI derived datatype handles for sending input data to the workers
  INTEGER, ALLOCATABLE, DIMENSION(:) :: inp_ts

  ! MPI derived datatype handles for receiving output from the workers
  INTEGER, ALLOCATABLE, DIMENSION(:) :: recv_ts

  ! master's struct for receiving restart data from the workers
  INTEGER, ALLOCATABLE, DIMENSION(:) :: restart_ts

  ! CASA related derived types

  ! MPI derived datatype handles for receiving casa results from the workers
  ! and restart values
  INTEGER, ALLOCATABLE, DIMENSION(:) :: casa_ts

  ! MPI derived datatype handles for send/receiving casa dump values from the workers
  INTEGER, ALLOCATABLE, DIMENSION(:) :: casa_dump_ts

  ! MPI derived datatype handles for send/receiving casa pool values (needed for LUC)
  !  from the workers
  INTEGER, ALLOCATABLE, DIMENSION(:) :: casa_LUC_ts

  !CLN  ! MPI derived datatype handles for receiving casa restart values from the workers
  !CLN  INTEGER, ALLOCATABLE, DIMENSION(:) :: casa_restart_ts

  ! climate derived type
  INTEGER, ALLOCATABLE, DIMENSION(:) :: climate_ts

  ! POP related derived types

  ! MPI derived datatype handles for receiving POP results from the workers
  INTEGER :: pop_ts

  ! MPI: isend request array for scattering input data to the workers
  INTEGER, ALLOCATABLE, DIMENSION(:) :: inp_req
  ! MPI: isend status array for scattering input data to the workers
  INTEGER, ALLOCATABLE, DIMENSION(:,:) :: inp_stats

  ! MPI: irecv request array for gathering results from the workers
  INTEGER, ALLOCATABLE, DIMENSION(:) :: recv_req
  ! MPI: irecv status array for gathering results from the workers
  INTEGER, ALLOCATABLE, DIMENSION(:,:) :: recv_stats

  ! MPI: landpoints decomposition; global info used by the master process
  TYPE(lpdecomp_t), ALLOCATABLE, DIMENSION(:) :: wland

  PUBLIC :: mpidrv_master

CONTAINS

  SUBROUTINE mpidrv_master (comm, trunk_sumbal, dels, koffset, kend, PLUME, CRU)

    USE mpi

    USE cable_def_types_mod
    USE cable_IO_vars_module, ONLY: logn,gswpfile,ncciy,leaps,globalMetfile, &
         output,check,&
         patch_type,landpt,&
         timeunits, output, &
         calendar
    USE cable_common_module,  ONLY: ktau_gl, kend_gl, knode_gl, cable_user,     &
         cable_runtime, fileName,            &
         CurYear,    &
         IS_LEAPYEAR, calcsoilalbedo,                &
         kwidth_gl
    USE casa_ncdf_module, ONLY: is_casa_time
    ! physical constants
    USE cable_phys_constants_mod, ONLY : CTFRZ   => TFRZ
    USE cable_phys_constants_mod, ONLY : CEMLEAF => EMLEAF
    USE cable_phys_constants_mod, ONLY : CEMSOIL => EMSOIL
    USE cable_phys_constants_mod, ONLY : CSBOLTZ => SBOLTZ
    USE cable_input_module,   ONLY: open_met_file,load_parameters,              &
         get_met_data,close_met_file
    USE cable_output_module,  ONLY: create_restart,open_output_file,            &
         write_output,close_output_file
    USE cable_write_module,   ONLY: nullify_write
    USE cable_cbm_module
    USE cable_climate_mod


    ! modules related to CASA-CNP
    USE casadimension,        ONLY: icycle,mplant,mlitter,msoil,mwood
    USE casavariable,         ONLY: casafile, casa_biome, casa_pool, casa_flux,  &
         casa_met, casa_balance, zero_sum_casa, update_sum_casa
    USE phenvariable,         ONLY: phen_variable
    USE casa_cable
    USE casa_inout_module

    !CLN added
    ! modules related to POP
    USE POP_Types,            ONLY: POP_TYPE
    USE POP_Constants,        ONLY: HEIGHT_BINS, NCOHORT_MAX

    ! LUC_EXPT only
    USE CABLE_LUC_EXPT, ONLY: LUC_EXPT_TYPE, LUC_EXPT_INIT
    USE POPLUC_Types, ONLY : POPLUC_Type
    USE POPLUC_Module, ONLY:  WRITE_LUC_OUTPUT_NC, WRITE_LUC_OUTPUT_GRID_NC, &
         POP_LUC_CASA_transfer,  WRITE_LUC_RESTART_NC, POPLUC_set_patchfrac

    ! PLUME-MIP only
    USE CABLE_PLUME_MIP,      ONLY: PLUME_MIP_TYPE, PLUME_MIP_GET_MET,&
         PLUME_MIP_INIT
    USE CABLE_CRU,            ONLY: CRU_TYPE, CRU_GET_SUBDIURNAL_MET

    USE landuse_constant,     ONLY: mstate,mvmax,mharvw
    USE landuse_variable
    USE bgcdriver_mod, ONLY : bgcdriver
    USE casa_offline_inout_module, ONLY : WRITE_CASA_RESTART_NC, WRITE_CASA_OUTPUT_NC
    IMPLICIT NONE

    ! MPI:
    INTEGER               :: comm ! MPI communicator for comms with the workers
    DOUBLE PRECISION, INTENT(IN) :: trunk_sumbal
      !! Reference value for quasi-bitwise reproducibility checks.
    REAL, INTENT(INOUT) :: dels !! Time step size in seconds
    INTEGER, INTENT(INOUT) :: koffset !! Timestep to start at
    INTEGER, INTENT(INOUT) :: kend !! No. of time steps in run
    TYPE(PLUME_MIP_TYPE), INTENT(IN) :: PLUME
    TYPE(CRU_TYPE), INTENT(IN) :: CRU

    ! timing variables
    INTEGER, PARAMETER ::  kstart = 1   ! start of simulation

    INTEGER        ::                                                           &
         ktau,       &  ! increment equates to timestep, resets if spinning up
         ktau_tot = 0,   &  ! NO reset when spinning up, total timesteps by model
                                !CLN      kstart = 1, &  ! timestep to start at
         ktauday,    &  ! day counter for CASA-CNP
         idoy,       &  ! day of year (1:365) counter for CASA-CNP
         nyear,      &  ! year counter for CASA-CNP
         ctime = 0,  &  ! day count for casacnp
         YYYY,       &  !
         LOY,        &  ! Length of Year
         maxdiff(2)     ! location of maximum in convergence test

    CHARACTER :: dum*9, str1*9, str2*9, str3*9  ! dummy char for fileName generation

    ! CABLE variables
    TYPE (met_type)       :: met     ! met input variables: see below for imet in MPI variables
    TYPE (air_type)       :: air     ! air property variables
    TYPE (canopy_type)    :: canopy  ! vegetation variables
    TYPE (radiation_type) :: rad     ! radiation variables
    TYPE (roughness_type) :: rough   ! roughness varibles
    TYPE (balances_type)  :: bal     ! energy and water balance variables
    TYPE (soil_snow_type) :: ssnow   ! soil and snow variables
    TYPE (climate_type)   :: climate     ! climate variables

    ! CABLE parameters
    TYPE (soil_parameter_type) :: soil ! soil parameters
    TYPE (veg_parameter_type)  :: veg  ! vegetation parameters: see below for iveg in MPI variables

    TYPE (sum_flux_type)  :: sum_flux ! cumulative flux variables
    TYPE (bgc_pool_type)  :: bgc  ! carbon pool variables

    ! CASA-CNP variables
    TYPE (casa_biome)     :: casabiome
    TYPE (casa_pool)      :: casapool
    TYPE (casa_flux)      :: casaflux
    TYPE (casa_pool)      :: sum_casapool
    TYPE (casa_flux)      :: sum_casaflux
    TYPE (casa_met)       :: casamet
    TYPE (casa_balance)   :: casabal
    TYPE (phen_variable)  :: phen
    TYPE (POP_TYPE)       :: POP
    TYPE(POPLUC_TYPE) :: POPLUC
    TYPE (LUC_EXPT_TYPE) :: LUC_EXPT
    TYPE (landuse_mp)     :: lucmp
    CHARACTER             :: cyear*4
    CHARACTER             :: ncfile*99

    LOGICAL, SAVE           :: &
         spinConv      = .FALSE., & ! has spinup converged?
         CALL1         = .TRUE.

    ! temporary storage for soil moisture/temp. in spin up mode
    REAL, ALLOCATABLE, DIMENSION(:,:)  :: &
         soilMtemp,                         &
         soilTtemp

    REAL, ALLOCATABLE, DIMENSION(:) :: &
         GWtemp

    ! MPI:
    TYPE (met_type)       :: imet  ! read ahead met input variables
    TYPE (veg_parameter_type) :: iveg  ! MPI read ahead vegetation parameters
    LOGICAL :: loop_exit     ! MPI: exit flag for bcast to workers
    INTEGER :: iktau    ! read ahead index of time step = 1 ..  kend
    INTEGER :: oktau    ! ktau = 1 ..  kend for output
    INTEGER :: icomm ! separate dupes of MPI communicator for send and recv
    INTEGER :: ocomm ! separate dupes of MPI communicator for send and recv
    INTEGER :: ierr
    INTEGER :: rank, off, cnt

    DOUBLE PRECISION, SAVE ::                                                         &
         new_sumbal   = 0.0, &
         new_sumfpn   = 0.0, &
         new_sumfe    = 0.0

    INTEGER :: count_bal = 0
    INTEGER :: nkend=0

    INTEGER :: kk,m,np,ivt
    INTEGER, PARAMETER :: mloop = 30   ! CASA-CNP PreSpinup loops
    REAL    :: etime

! for landuse
    integer     mlon,mlat
    real(r_2), dimension(:,:,:),   allocatable,  save  :: luc_atransit
    real(r_2), dimension(:,:),     allocatable,  save  :: luc_fharvw
    real(r_2), dimension(:,:,:),   allocatable,  save  :: luc_xluh2cable
    real(r_2), dimension(:),       allocatable,  save  :: arealand
    integer,   dimension(:,:),     allocatable,  save  :: landmask
    integer,   dimension(:),       allocatable,  save  :: cstart,cend,nap  
    real(r_2), dimension(:,:,:),   allocatable,  save  :: patchfrac_new    



    ! END header

    ! outer loop - spinup loop no. ktau_tot :
    ktau     = 0
    SPINLOOP:DO
      YEARLOOP: DO YYYY= CABLE_USER%YearStart,  CABLE_USER%YearEnd

        CurYear = YYYY

        !ccc Set calendar attribute: dependant on the value of `leaps`
        ! dependant on the MetType and set during initialisation.
        calendar = "noleap"
        LOY = 365
        IF ( leaps ) THEN
          calendar = "standard"
        ENDIF
        IF ( leaps .AND. IS_LEAPYEAR( YYYY ) ) THEN
          LOY = 366
        ENDIF

        SELECT CASE (TRIM(cable_user%MetType))
        CASE ('gswp')
          ncciy = CurYear

          WRITE(*,*) 'Looking for global offline run info.'
          CALL prepareFiles(ncciy)
          CALL open_met_file( dels, koffset, kend, spinup, CTFRZ )

        CASE ('plum')
          ! PLUME experiment setup using WATCH
          IF ( .NOT. PLUME%LeapYears ) LOY = 365
          kend = NINT(24.0*3600.0/dels) * LOY

        CASE ('cru')
          ! TRENDY experiment using CRU-NCEP
          LOY = 365
          kend = NINT(24.0*3600.0/dels) * LOY

        CASE ('gswp3')
          ncciy = CurYear
          WRITE(*,*) 'Looking for global offline run info.'
          CALL open_met_file( dels, koffset, kend, spinup, CTFRZ )

        CASE ('site')
          STOP 'MetType "site" can only be used in serial'

        CASE DEFAULT
          IF ( globalMetfile%l_gpcc ) THEN
            ncciy = CurYear
            WRITE(*,*) 'Looking for global offline run info.'
            CALL open_met_file( dels, koffset, kend, spinup, CTFRZ )

          ENDIF
        END SELECT

        CALL MPI_Bcast (kend, 1, MPI_INTEGER, 0, comm, ierr)

        ! somethings (e.g. CASA-CNP) only need to be done once per day
        ktauday=INT(24.0*3600.0/dels)

        ! Checks where parameters and initialisations should be loaded from.
        ! If they can be found in either the met file or restart file, they will
        ! load from there, with the met file taking precedence. Otherwise, they'll
        ! be chosen from a coarse global grid of veg and soil types, based on
        ! the lat/lon coordinates. Allocation of CABLE's main variables also here.
        IF ( CALL1 ) THEN

          IF (cable_user%POPLUC) THEN
            CALL LUC_EXPT_INIT (LUC_EXPT)
          ENDIF
          ! vh_js !
          CALL load_parameters( met, air, ssnow, veg,climate,bgc, &
               soil, canopy, rough, rad, sum_flux, &
               bal, logn, vegparmnew, casabiome, casapool, &
               casaflux, sum_casapool, sum_casaflux, &
               casamet, casabal, phen, POP, spinup, &
               CEMSOIL, CTFRZ, LUC_EXPT, POPLUC )

          IF (check%ranges /= NO_CHECK) THEN
            WRITE (*, *) "Checking parameter ranges"
            CALL constant_check_range(soil, veg, 0, met)
          END IF

          IF (CABLE_USER%POPLUC .AND. TRIM(CABLE_USER%POPLUC_RunType) .EQ. 'static') &
            CABLE_USER%POPLUC= .FALSE.

          ! Open output file:
          IF (.NOT.CASAONLY) THEN
            IF ( TRIM(filename%out) .EQ. '' ) THEN
              IF ( CABLE_USER%YEARSTART .GT. 0 ) THEN
                WRITE( dum, FMT="(I4,'_',I4)")CABLE_USER%YEARSTART, &
                  CABLE_USER%YEAREND
                filename%out = TRIM(filename%path)//'/'//&
                  TRIM(cable_user%RunIden)//'_'//&
                  TRIM(dum)//'_cable_out.nc'
              ELSE
                filename%out = TRIM(filename%path)//'/'//&
                  TRIM(cable_user%RunIden)//'_cable_out.nc'
              ENDIF
            ENDIF
            CALL nullify_write() ! nullify pointers
            CALL open_output_file( dels, soil, veg, bgc, rough, met)
          ENDIF

          ssnow%otss_0 = ssnow%tgg(:,1)
          ssnow%otss = ssnow%tgg(:,1)
          ssnow%tss = ssnow%tgg(:,1)
          canopy%fes_cor = 0.
          canopy%fhs_cor = 0.
          met%ofsd = 0.1


          IF (.NOT.spinup) spinConv=.TRUE.

          ! MPI: above was standard serial code
          ! now it's time to initialize the workers

          ! MPI: bcast to workers so that they don't need to open the met
          ! file themselves
          CALL MPI_Bcast (dels, 1, MPI_REAL, 0, comm, ierr)

          ! MPI: need to know extents before creating datatypes
          CALL find_extents

          ! MPI: calculate and broadcast landpoint decomposition to the workers
          CALL master_decomp(comm, mland)

          ! MPI: set up stuff for new irecv isend code that separates completion
          ! from posting of requests
          ! wnp is set in master_decomp above
          ALLOCATE (inp_req(wnp))
          ALLOCATE (inp_stats(MPI_STATUS_SIZE, wnp))
          ALLOCATE (recv_req(wnp))
          ALLOCATE (recv_stats(MPI_STATUS_SIZE, wnp))
          CALL MPI_Comm_dup (comm, icomm, ierr)
          CALL MPI_Comm_dup (comm, ocomm, ierr)



          ! MPI: data set in load_parameter is now scattered out to the
          ! workers

          CALL master_cable_params(comm, met,air,ssnow,veg,bgc,soil,canopy,&
                                   rough,rad,sum_flux,bal)


          IF (cable_user%call_climate) THEN
            CALL master_climate_types(comm, climate, ktauday)
          ENDIF

          ! MPI: mvtype and mstype send out here instead of inside master_casa_params
          !      so that old CABLE carbon module can use them. (BP May 2013)
          CALL MPI_Bcast (mvtype, 1, MPI_INTEGER, 0, comm, ierr)
          CALL MPI_Bcast (mstype, 1, MPI_INTEGER, 0, comm, ierr)

          ! MPI: casa parameters scattered only if cnp module is active
          IF (icycle>0) THEN
            ! MPI:
            CALL master_casa_params (comm,casabiome,casapool,casaflux,casamet,&
                                     casabal,phen)

            IF ( CABLE_USER%CALL_POP ) CALL master_pop_types (comm,pop)
          END IF

          ! MPI: allocate read ahead buffers for input met and veg data
          CALL alloc_cbm_var (imet, mp)
          CALL alloc_cbm_var (iveg, mp)

          ! MPI: create inp_t types to scatter input data to the workers
          ! at the start of every timestep
          !CALL master_intypes (comm,met,veg)
          ! for read ahead use the new variables
          CALL master_intypes (comm,imet,iveg)

          ! MPI: create recv_t types to receive results from the workers
          ! at the end of every timestep
          CALL master_outtypes (comm,met,canopy,ssnow,rad,bal,air,soil,veg)

          ! MPI: create type for receiving casa results
          ! only if cnp module is active

          IF (icycle>0) THEN
            CALL master_casa_types (comm, casapool, casaflux, &
                 casamet, casabal, phen)

            IF ( CABLE_USER%CASA_DUMP_READ .OR. CABLE_USER%CASA_DUMP_WRITE ) &
              CALL master_casa_dump_types( comm, casamet, casaflux, phen )
            WRITE(*,*) 'cable_mpimaster, POPLUC: ' ,  CABLE_USER%POPLUC
            IF ( CABLE_USER%POPLUC ) &
              CALL master_casa_LUC_types( comm, casapool, casabal)

          END IF

          ! MPI: create type to send restart data back to the master
          ! only if restart file is to be created
          IF(output%restart) THEN
            CALL master_restart_types (comm, canopy, air)
          END IF

          !  CALL zero_sum_casa(sum_casapool, sum_casaflux)
          !  count_sum_casa = 0

          ! CALL master_sumcasa_types(comm, sum_casapool, sum_casaflux)
          IF( icycle>0 .AND. spincasa) THEN
            PRINT *, 'EXT spincasacnp enabled with mloop= ', mloop, dels, kstart, kend
            CALL master_spincasacnp(dels,kstart,kend,mloop,veg,soil,casabiome,casapool, &
                 casaflux,casamet,casabal,phen,POP,climate,icomm, ocomm)
            SPINconv = .FALSE.
            CASAONLY                   = .TRUE.
            ktau_gl = 0
            ktau = 0

          ELSEIF ( casaonly .AND. (.NOT. spincasa) .AND. cable_user%popluc) THEN
            CALL master_CASAONLY_LUC(dels,kstart,kend,veg,casabiome,casapool, &
                 casaflux,casamet,casabal,phen,POP,climate,LUC_EXPT, POPLUC, &
                 icomm, ocomm)
            SPINconv = .FALSE.
            ktau_gl = 0
            ktau = 0
          ENDIF

          ! MPI: mostly original serial code follows...
        ENDIF ! CALL1

        ! globally (WRT code) accessible kend through USE cable_common_module
        ktau_gl = 0
        kwidth_gl = INT(dels)
        kend_gl = kend
        knode_gl = 0

        ! MPI: separate time step counters for reading and writing
        ! (ugly, I know)
        iktau = ktau_gl
        oktau = ktau_gl

        ! MPI: read ahead
        iktau = iktau + 1

        ! MPI: flip ktau_gl
        !tmp_kgl = ktau_gl
        ktau_gl = iktau

        IF (.NOT.casaonly) THEN
          SELECT CASE (TRIM(cable_user%MetType))
          CASE ('plum')
            CALL PLUME_MIP_GET_MET(PLUME, iMET, YYYY, 1, kend, &
                 (YYYY.EQ.CABLE_USER%YearEnd .AND. 1.EQ.kend))

          CASE ('cru')

            CALL CRU_GET_SUBDIURNAL_MET(CRU, imet, YYYY, 1, kend, &
                 (YYYY.EQ.CABLE_USER%YearEnd))

          CASE ('site')
            STOP 'MetType "site" can only be used in serial'

          CASE DEFAULT
            CALL get_met_data( spinup, spinConv, imet, soil,                 &
                 rad, iveg, kend, dels, CTFRZ, iktau+koffset,                &
                 kstart+koffset )

          END SELECT
        ENDIF


        !  IF ( CASAONLY .AND. IS_CASA_TIME("dread", yyyy, iktau, kstart, koffset, &
        !       kend, ktauday, logn) ) THEN
        !     WRITE(CYEAR,FMT="(I4)")CurYear + INT((ktau-kstart+koffset)/(LOY*ktauday))
        !     ncfile  = TRIM(casafile%c2cdumppath)//'c2c_'//CYEAR//'_dump.nc'
        !     casa_it = NINT( REAL(iktau / ktauday) )
        !     CALL read_casa_dump( ncfile, casamet, casaflux,phen, casa_it, kend, .FALSE. )
        !  ENDIF

        canopy%oldcansto=canopy%cansto

        ! Zero out lai where there is no vegetation acc. to veg. index
        WHERE ( iveg%iveg(:) .GE. 14 ) iveg%vlai = 0.



        IF ( .NOT. CASAONLY ) THEN
          ! MPI: scatter input data to the workers

          CALL master_send_input (icomm, inp_ts, iktau)

          !  CALL MPI_Waitall (wnp, inp_req, inp_stats, ierr)
        ELSE
          CALL master_send_input (icomm, casa_dump_ts, iktau)
          CALL MPI_Waitall (wnp, inp_req, inp_stats, ierr)
        ENDIF

        IF (spincasa.OR. casaonly) THEN
          EXIT
        ENDIF
        !IF (.NOT.spincasa) THEN
        ! time step loop over ktau
        KTAULOOP:DO ktau=kstart, kend - 1
          !         ! increment total timstep counter
          !         ktau_tot = ktau_tot + 1
          iktau = iktau + 1
          oktau = oktau + 1

          WRITE(logn,*) 'Progress -',REAL(ktau)/REAL(kend)*100.0

          met%year = imet%year
          met%doy = imet%doy

          ! globally (WRT code) accessible kend through USE cable_common_module
          ktau_tot= ktau_tot + 1
          ktau_gl = iktau

          ! some things (e.g. CASA-CNP) only need to be done once per day
          idoy =INT( MOD((REAL(ktau+koffset)/REAL(ktauday)),REAL(LOY)))
          IF ( idoy .EQ. 0 ) idoy = LOY

          ! needed for CASA-CNP
          nyear =INT((kend-kstart+1)/(LOY*ktauday))

          ! Get met data and LAI, set time variables.
          ! Rainfall input may be augmented for spinup purposes:
          !          met%ofsd = met%fsd(:,1) + met%fsd(:,2)
          SELECT CASE (TRIM(cable_user%MetType))
          CASE ('plum')
            CALL PLUME_MIP_GET_MET(PLUME, iMET, YYYY, iktau, kend, &
                 (YYYY.EQ.CABLE_USER%YearEnd .AND. iktau.EQ.kend))

          CASE ('cru')
            CALL CRU_GET_SUBDIURNAL_MET(CRU, imet, YYYY, iktau, kend, &
                 (YYYY.EQ.CABLE_USER%YearEnd) )
          CASE DEFAULT

            CALL get_met_data( spinup, spinConv, imet, soil,                 &
                 rad, iveg, kend, dels, CTFRZ, iktau+koffset,                &
                 kstart+koffset )

          END SELECT
          IF ( (TRIM(cable_user%MetType) .NE. 'gswp') .AND. &
               (TRIM(cable_user%MetType) .NE. 'gswp3') ) CurYear = met%year(1)

!$        IF ( CASAONLY .AND. IS_CASA_TIME("dread", yyyy, iktau, kstart, koffset, &
!$              kend, ktauday, logn) )  THEN
!$          ! CLN READ FROM FILE INSTEAD !
!$          WRITE(CYEAR,FMT="(I4)")CurYear + INT((ktau-kstart+koffset)/(LOY*ktauday))
!$          ncfile  = TRIM(casafile%c2cdumppath)//'c2c_'//CYEAR//'_dump.nc'
!$          casa_it = NINT( REAL(iktau / ktauday) )
!$          CALL read_casa_dump( ncfile, casamet, casaflux, casa_it, kend, .FALSE. )
!$        ENDIF


!$        ! At first time step of year, set tile area according to updated LU areas
!$        IF (ktau == 1 .and. CABLE_USER%POPLUC) THEN
!$          CALL POPLUC_set_patchfrac(POPLUC,LUC_EXPT)
!$        ENDIF


          IF ( .NOT. CASAONLY ) THEN

            IF ( icycle > 0 ) THEN
              ! receive casa update from worker
              CALL master_receive (ocomm, oktau, casa_ts)

              CALL MPI_Waitall (wnp, recv_req, recv_stats, ierr)
              ! receive casa dump requirements from worker
              IF ( ((.NOT.spinup).OR.(spinup.AND.spinConv)) .AND.   &
                    ( IS_CASA_TIME("dwrit", yyyy, oktau, kstart, &
                    koffset, kend, ktauday, logn) ) ) THEN
                CALL master_receive ( ocomm, oktau, casa_dump_ts )

                ! CALL MPI_Waitall (wnp, recv_req, recv_stats, ierr)
              ENDIF
            ENDIF

            ! MPI: receive this time step's results from the workers
            CALL master_receive (ocomm, oktau, recv_ts)
            ! CALL MPI_Waitall (wnp, recv_req, recv_stats, ierr)


            ! MPI: scatter input data to the workers
            CALL master_send_input (icomm, inp_ts, iktau)
            !  CALL MPI_Waitall (wnp, inp_req, inp_stats, ierr)

!$          IF ( ((.NOT.spinup).OR.(spinup.AND.spinConv)) .AND.   &
!$                ( IS_CASA_TIME("dwrit", yyyy, oktau, kstart, &
!$                               koffset, kend, ktauday, logn) ) ) THEN
!$            WRITE(CYEAR,FMT="(I4)") CurYear + INT((ktau-kstart)/(LOY*ktauday))
!$            ncfile = TRIM(casafile%c2cdumppath)//'c2c_'//CYEAR//'_dump.nc'
!$            CALL write_casa_dump( ncfile, casamet , casaflux, idoy, &
!$                kend/ktauday )
!$
!$          ENDIF

            IF (((.NOT.spinup).OR.(spinup.AND.spinConv)).AND. &
                  MOD((ktau-kstart+1),ktauday)==0) THEN
              IF ( CABLE_USER%CASA_DUMP_WRITE )  THEN


                !CLN CHECK FOR LEAP YEAR
                WRITE(CYEAR,FMT="(I4)") CurYear + INT((ktau-kstart)/(LOY*ktauday))
                ncfile = TRIM(casafile%c2cdumppath)//'c2c_'//CYEAR//'_dump.nc'
                CALL write_casa_dump( ncfile, casamet , casaflux, phen, climate, idoy, &
                     kend/ktauday )

              ENDIF
            ENDIF

          ELSE IF ( MOD((ktau-kstart+1+koffset),ktauday)==0 ) THEN

            CALL master_send_input (icomm, casa_dump_ts, iktau )
            !    CALL MPI_Waitall (wnp, inp_req, inp_stats, ierr)

          ENDIF

          !             CALL MPI_Waitall (wnp, recv_req, recv_stats, ierr)
          !             CALL MPI_Waitall (wnp, inp_req, inp_stats, ierr)

          met%ofsd = met%fsd(:,1) + met%fsd(:,2)
          canopy%oldcansto=canopy%cansto

          ! Zero out lai where there is no vegetation acc. to veg. index
          WHERE ( iveg%iveg(:) .GE. 14 ) iveg%vlai = 0.

          ! Write time step's output to file if either: we're not spinning up
          ! or we're spinning up and the spinup has converged:
          ! MPI: TODO: pull mass and energy balance calculation from write_output
          ! and refactor into worker code

          ktau_gl = oktau

          IF ((.NOT.spinup).OR.(spinup.AND.spinConv)) THEN
            IF (icycle >0) THEN
              IF ( IS_CASA_TIME("write", yyyy, oktau, kstart, &
                                koffset, kend, ktauday, logn) ) THEN
                ctime = ctime +1


                CALL WRITE_CASA_OUTPUT_NC (veg, casamet, casapool, casabal, casaflux, &
                     CASAONLY, ctime, &
                     ( ktau.EQ.kend .AND. YYYY .EQ.cable_user%YearEnd ) )
              ENDIF
            ENDIF


            IF ( (.NOT. CASAONLY).AND. spinConv  ) THEN

              SELECT CASE (TRIM(cable_user%MetType))
              CASE ('plum', 'cru', 'gswp', 'gswp3')
                CALL write_output( dels, ktau_tot, met, canopy, casaflux, casapool, &
                     casamet,ssnow,         &
                     rad, bal, air, soil, veg, CSBOLTZ,     &
                     CEMLEAF, CEMSOIL )
              CASE DEFAULT
                CALL write_output( dels, ktau, met, canopy, casaflux, casapool, &
                     casamet, ssnow,   &
                     rad, bal, air, soil, veg, CSBOLTZ, CEMLEAF, CEMSOIL )

              END SELECT
            END IF
          ENDIF

          !---------------------------------------------------------------------!
          ! Check this run against standard for quasi-bitwise reproducability   !
          ! Check triggered by cable_user%consistency_check=.TRUE. in cable.nml !
          !---------------------------------------------------------------------!
          IF(cable_user%consistency_check) THEN

            count_bal = count_bal +1;
            new_sumbal = new_sumbal + SUM(bal%wbal)/mp +  SUM(bal%ebal)/mp
            new_sumfpn = new_sumfpn + SUM(canopy%fpn)/mp
            new_sumfe = new_sumfe + SUM(canopy%fe)/mp
!$                    if (ktau == kend-1) PRINT*, "time-space-averaged energy & water balances"
!$                    if (ktau == kend-1) PRINT*,"Ebal_tot[Wm-2], Wbal_tot[mm]", &
!$                         sum(bal%ebal_tot)/mp/count_bal, sum(bal%wbal_tot)/mp/count_bal
!$                    if (ktau == kend-1) PRINT*, "time-space-averaged latent heat and net photosynthesis"
!$                    if (ktau == kend-1) PRINT*, "sum_fe[Wm-2], sum_fpn[umol/m2/s]",  &
!$                         new_sumfe/count_bal, new_sumfpn/count_bal

            ! check for Nans in biophysical outputs and abort if there are any
            IF (ANY( canopy%fe.NE. canopy%fe)) THEN
              DO kk=1,mp

                IF (canopy%fe(kk).NE. canopy%fe(kk)) THEN
                  WRITE(*,*) 'Nan in evap flux,', kk, patch(kk)%latitude, patch(kk)%longitude
                  WRITE(*,*) 'fe nan', kk, ktau,met%qv(kk), met%precip(kk),met%precip_sn(kk), &
                    met%fld(kk), met%fsd(kk,:), met%tk(kk), met%ua(kk), &
                    ssnow%potev(kk), met%pmb(kk), &
                    canopy%ga(kk), ssnow%tgg(kk,:), canopy%fwsoil(kk), &
                    rad%fvlai(kk,:) ,  rad%fvlai(kk,1), &
                    rad%fvlai(kk,2), canopy%vlaiw(kk)

                  CALL MPI_Abort(comm, 0, ierr)
                ENDIF

              ENDDO

            ENDIF
            IF(ktau==(kend-1)) THEN

              nkend = nkend+1
              IF( ABS(new_sumbal-trunk_sumbal) < 1.e-7) THEN

                PRINT *, ""
                PRINT *, &
                      "NB. Offline-parallel runs spinup cycles:", nkend
                PRINT *, &
                      "Internal check shows this version reproduces the trunk sumbal"

              ELSE

                PRINT *, ""
                PRINT *, &
                      "NB. Offline-parallel runs spinup cycles:", nkend
                PRINT *, &
                      "Internal check shows in this version new_sumbal != trunk sumbal"
                PRINT *, "The difference is: ", new_sumbal - trunk_sumbal
                PRINT *, &
                      "Writing new_sumbal to the file:", TRIM(filename%new_sumbal)

                !CLN                      OPEN( 12, FILE = filename%new_sumbal )
                !CLN                      WRITE( 12, '(F20.7)' ) new_sumbal  ! written by previous trunk version
                !CLN                      CLOSE(12)

              ENDIF

            ENDIF

          ENDIF

          CALL1 = .FALSE.

          !WRITE(*,*) " ktauloop end ", ktau, CurYear


        END DO KTAULOOP ! END Do loop over timestep ktau

        CALL1 = .FALSE.


        ! MPI: read ahead tail to receive (last step and write)
        met%year = imet%year
        met%doy  = imet%doy
        oktau    = oktau + 1
        ktau_tot = ktau_tot + 1
        ktau_gl  = oktau

        IF ( .NOT. CASAONLY ) THEN

          IF ( icycle >0 ) THEN

            CALL master_receive (ocomm, oktau, casa_ts)

            IF ( ((.NOT.spinup).OR.(spinup.AND.spinConv)) .AND. &
                 ( IS_CASA_TIME("dwrit", yyyy, oktau, kstart, &
                 koffset, kend, ktauday, logn) ) ) THEN
              CALL master_receive ( ocomm, oktau, casa_dump_ts )

            ENDIF

          ENDIF


          CALL master_receive (ocomm, oktau, recv_ts)

        ENDIF

        met%ofsd = met%fsd(:,1) + met%fsd(:,2)
        canopy%oldcansto=canopy%cansto

        IF ( (TRIM(cable_user%MetType) .EQ. "gswp") .OR. (TRIM(cable_user%MetType) .EQ. "gswp3") ) &
          CALL close_met_file

        IF (icycle>0 .AND.   cable_user%CALL_POP)  THEN
          WRITE(*,*)  'b4 annual calcs'

          IF (CABLE_USER%POPLUC) THEN


            ! master receives casa updates required for LUC calculations here
            CALL master_receive (ocomm, 0, casa_LUC_ts)

            ! Dynamic LUC
            CALL LUCdriver( casabiome,casapool,casaflux,POP,LUC_EXPT, POPLUC, veg )


            ! transfer POP updates to workers
            off = 1
            DO rank = 1, wnp
              IF ( rank .GT. 1 ) off = off + wland(rank-1)%npop_iwood
              cnt = wland(rank)%npop_iwood
              CALL MPI_Send( POP%pop_grid(off), cnt, pop_ts, rank, 0, icomm, ierr )
            END DO
          ENDIF

          ! one annual time-step of POP (worker calls POP here)
          !CALL POPdriver(casaflux,casabal,veg, POP)
          CALL master_receive_pop(POP, ocomm)

          IF (CABLE_USER%POPLUC) THEN
            ! Dynamic LUC: update casa pools according to LUC transitions
            CALL POP_LUC_CASA_transfer(POPLUC,POP,LUC_EXPT,casapool,casabal,casaflux,ktauday)
            ! Dynamic LUC: write output
            IF (output%grid(1:3) == 'lan') THEN
              CALL WRITE_LUC_OUTPUT_NC( POPLUC, YYYY, ( YYYY.EQ.cable_user%YearEnd ))
            ELSE
              CALL WRITE_LUC_OUTPUT_GRID_NC( POPLUC, YYYY, ( YYYY.EQ.cable_user%YearEnd ))
              !CALL WRITE_LUC_OUTPUT_NC( POPLUC, YYYY, ( YYYY.EQ.cable_user%YearEnd ))
            ENDIF

          ENDIF

          IF (CABLE_USER%POPLUC) &
            ! send updates for CASA pools, resulting from LUC
            CALL master_send_input (icomm, casa_LUC_ts, nyear)


        ENDIF
        WRITE(*,*) 'after annual calcs'


        ! WRITE OUTPUT
        IF((.NOT.spinup).OR.(spinup.AND.spinConv)) THEN
          IF(icycle >0) THEN
            ctime = ctime +1
            CALL WRITE_CASA_OUTPUT_NC (veg, casamet, casapool, casabal, casaflux, &
                 CASAONLY, ctime, ( ktau.EQ.kend .AND. YYYY .EQ.               &
                 cable_user%YearEnd ) )
            IF ( cable_user%CALL_POP ) THEN

              ! CALL master_receive_pop(POP, ocomm)
              ! CALL MPI_Waitall (wnp, recv_req, recv_stats, ierr)
              IF ( TRIM(cable_user%POP_out).EQ.'epi' ) THEN
                CALL POP_IO( pop, casamet, CurYear, 'WRITE_EPI', &
                     (CurYear.EQ.CABLE_USER%YearEnd) )
              ENDIF


            ENDIF
          END IF

!$        IF ( CABLE_USER%CASA_DUMP_WRITE )  THEN
!$          !CLN CHECK FOR LEAP YEAR
!$          WRITE(CYEAR,FMT="(I4)") CurYear + INT((ktau-kstart)/(LOY*ktauday))
!$          ncfile = TRIM(casafile%c2cdumppath)//'c2c_'//CYEAR//'_dump.nc'
!$          CALL write_casa_dump( ncfile, casamet , casaflux, idoy, &
!$               kend/ktauday )
!$
!$        ENDIF

          IF (((.NOT.spinup).OR.(spinup.AND.spinConv)).AND. &
               MOD((ktau-kstart+1),ktauday)==0) THEN
            IF ( CABLE_USER%CASA_DUMP_WRITE )  THEN
              !CLN CHECK FOR LEAP YEAR
              WRITE(CYEAR,FMT="(I4)") CurYear + INT((ktau-kstart)/(LOY*ktauday))
              ncfile = TRIM(casafile%c2cdumppath)//'c2c_'//CYEAR//'_dump.nc'

              CALL write_casa_dump( ncfile, casamet , casaflux, phen, climate, LOY, &
                   kend/ktauday )

            ENDIF
          ENDIF

          IF ( (.NOT. CASAONLY) .AND. spinConv ) THEN
            SELECT CASE (TRIM(cable_user%MetType))
            CASE ('plum', 'cru', 'gswp', 'gswp3')
              CALL write_output( dels, ktau_tot, met, canopy, casaflux, casapool, casamet, &
                   ssnow, rad, bal, air, soil, veg, CSBOLTZ, CEMLEAF, CEMSOIL )
            CASE DEFAULT
              CALL write_output( dels, ktau, met, canopy, casaflux, casapool, casamet, &
                   ssnow, rad, bal, air, soil, veg, CSBOLTZ, CEMLEAF, CEMSOIL )

            END SELECT
          END IF

          IF(cable_user%consistency_check) THEN

            count_bal = count_bal +1;
            new_sumbal = new_sumbal + SUM(bal%wbal)/mp +  SUM(bal%ebal)/mp
            new_sumfpn = new_sumfpn + SUM(canopy%fpn)/mp
            new_sumfe = new_sumfe + SUM(canopy%fe)/mp
            IF (ktau == kend) PRINT*
            IF (ktau == kend) PRINT*, "time-space-averaged energy & water balances"
            IF (ktau == kend) PRINT*,"Ebal_tot[Wm-2], Wbal_tot[mm per timestep]", &
              SUM(bal%ebal_tot)/mp/count_bal, SUM(bal%wbal_tot)/mp/count_bal
            IF (ktau == kend) PRINT*, "time-space-averaged latent heat and &
              net photosynthesis"
            IF (ktau == kend) PRINT*, "sum_fe[Wm-2], sum_fpn[umol/m2/s]",  &
              new_sumfe/count_bal, new_sumfpn/count_bal
            IF (ktau == kend) WRITE(logn,*)
            IF (ktau == kend) WRITE(logn,*)  "time-space-averaged energy & water balances"
            IF (ktau == kend) WRITE(logn,*) "Ebal_tot[Wm-2], Wbal_tot[mm per timestep]", &
              SUM(bal%ebal_tot)/mp/count_bal, SUM(bal%wbal_tot)/mp/count_bal
            IF (ktau == kend) WRITE(logn,*)  "time-space-averaged latent heat and &
              net photosynthesis"
            IF (ktau == kend) WRITE(logn,*)  "sum_fe[Wm-2], sum_fpn[umol/m2/s]",  &
              new_sumfe/count_bal, new_sumfpn/count_bal



          ENDIF



        END IF
        ! set tile area according to updated LU areas
        IF (CABLE_USER%POPLUC) THEN
          CALL POPLUC_set_patchfrac(POPLUC,LUC_EXPT)
        ENDIF


      END DO YEARLOOP

      IF (spincasa.OR.casaonly) THEN
        EXIT
      ENDIF

      ! jhan this is insufficient testing. condition for
      ! spinup=.false. & we want CASA_dump.nc (spinConv=.true.)
      ! see if spinup (if conducting one) has converged:
      IF(spinup.AND..NOT.spinConv.AND. .NOT. CASAONLY) THEN

        ! Write to screen and log file:
        WRITE(*,'(A18,I3,A24)') ' Spinning up: run ',INT(ktau_tot/kend),      &
             ' of data set complete...'
        WRITE(logn,'(A18,I3,A24)') ' Spinning up: run ',INT(ktau_tot/kend),   &
             ' of data set complete...'

        ! IF not 1st run through whole dataset:
        IF( INT( ktau_tot/kend ) > 1 ) THEN

          ! evaluate spinup
          IF (ANY( ABS(ssnow%wb-soilMtemp)>delsoilM).OR.                     &
              ANY(ABS(ssnow%tgg-soilTtemp)>delsoilT) .OR. &
              MAXVAL(ABS(ssnow%GWwb-GWtemp),dim=1)>delgwM)  THEN

            ! No complete convergence yet
            !               PRINT *, 'ssnow%wb : ', ssnow%wb
            !               PRINT *, 'soilMtemp: ', soilMtemp
            !               PRINT *, 'ssnow%tgg: ', ssnow%tgg
            !               PRINT *, 'soilTtemp: ', soilTtemp
            maxdiff = MAXLOC(ABS(ssnow%wb-soilMtemp))
            PRINT *, 'Example location of moisture non-convergence: ',maxdiff
            PRINT *, 'ssnow%wb : ', ssnow%wb(maxdiff(1),maxdiff(2))
            PRINT *, 'soilMtemp: ', soilMtemp(maxdiff(1),maxdiff(2))
            maxdiff = MAXLOC(ABS(ssnow%tgg-soilTtemp))
            PRINT *, 'Example location of temperature non-convergence: ',maxdiff
            PRINT *, 'ssnow%tgg: ', ssnow%tgg(maxdiff(1),maxdiff(2))
            PRINT *, 'soilTtemp: ', soilTtemp(maxdiff(1),maxdiff(2))

            IF (cable_user%gw_model) THEN
              maxdiff(1) = MAXLOC(ABS(ssnow%GWwb-GWtemp),dim=1)
              PRINT *,'ssnow%GWwb: ', ssnow%GWwb(maxdiff(1))
              PRINT *, 'GWtemp: ', GWtemp(maxdiff(1))
            ENDIF


          ELSE ! spinup has converged

            spinConv = .TRUE.
            ! Write to screen and log file:
            WRITE(*,'(A33)') ' Spinup has converged - final run'
            WRITE(logn,'(A52)')                                             &
                 ' Spinup has converged - final run - writing all data'
            WRITE(logn,'(A37,F8.5,A28)')                                    &
                 ' Criteria: Change in soil moisture < ',             &
                 delsoilM, ' in any layer over whole run'
            WRITE(logn,'(A40,F8.5,A28)' )                                   &
                 '           Change in soil temperature < ',          &
                     delsoilT, ' in any layer over whole run'
          END IF

        ELSE ! allocate variables for storage

          IF (.NOT.ALLOCATED(soilMtemp)) ALLOCATE(  soilMtemp(mp,ms) )
          IF (.NOT.ALLOCATED(soilTtemp)) ALLOCATE(  soilTtemp(mp,ms) )
          IF (.NOT.ALLOCATED(GWtemp))    ALLOCATE(  GWtemp(mp) )

        END IF

        IF (cable_user%max_spins .GT. 0) THEN
          IF (INT( ktau_tot/kend ) .GT. cable_user%max_spins ) THEN
            spinConv = .TRUE.
            ! Write to screen and log file:
            WRITE(*,*) ' Spinup exceeded max ',cable_user%max_spins,' cycles '
            WRITE(*,*) ' Forcing the final run without spin up convergence '
            WRITE(logn,*) ' Spinup exceeded max ',cable_user%max_spins,' cycles '
            WRITE(logn,*) ' Forcing the final run without spin up convergence '
          END IF
        END IF

        IF ( YYYY.GT. CABLE_USER%YearEnd ) THEN
          ! store soil moisture and temperature
          soilTtemp = ssnow%tgg
          soilMtemp = REAL(ssnow%wb)
        END IF
        ! MPI:
        loop_exit = .FALSE.

      ELSE

        ! if not spinning up, or spin up has converged, exit:
        ! EXIT
        ! MPI:
        loop_exit = .TRUE.

      END IF

      ! MPI: let the workers know whether it's time to quit
      CALL MPI_Bcast (loop_exit, 1, MPI_LOGICAL, 0, comm, ierr)

      IF (loop_exit) THEN
        EXIT
      END IF

    END DO SPINLOOP

    IF ( SpinConv .AND. .NOT. CASAONLY) THEN
      ! Close output file and deallocate main variables:
      CALL close_output_file( bal, air, bgc, canopy, met,                         &
           rad, rough, soil, ssnow,                            &
           sum_flux, veg )
    ENDIF

    IF (icycle > 0 .AND. (.NOT.spincasa).AND. (.NOT.casaonly)) THEN
      ! MPI: gather casa results from all the workers

      CALL master_receive (ocomm, ktau_gl, casa_ts)

      !CALL MPI_Waitall (wnp, recv_req, recv_stats, ierr)

!$     CALL casa_poolout( ktau, veg, soil, casabiome,                           &
!$          casapool, casaflux, casamet, casabal, phen )
      CALL casa_fluxout( nyear, veg, soil, casabal, casamet)

      if(.not.l_landuse) then
        CALL write_casa_restart_nc ( casamet, casapool,casaflux,phen,CASAONLY )
      endif

      !CALL write_casa_restart_nc ( casamet, casapool, met, CASAONLY )


      IF (.not.l_landuse.and.CABLE_USER%CALL_POP .AND.POP%np.GT.0 ) THEN
        IF ( CASAONLY .OR. cable_user%POP_fromZero &
             .OR.TRIM(cable_user%POP_out).EQ.'ini' ) THEN
          CALL POP_IO( pop, casamet, CurYear+1, 'WRITE_INI', .TRUE.)

        ELSE
          CALL POP_IO( pop, casamet, CurYear+1, 'WRITE_RST', .TRUE.)
        ENDIF
      END IF
      IF (.not.l_landuse.and.cable_user%POPLUC .AND. .NOT. CASAONLY ) THEN
        CALL WRITE_LUC_RESTART_NC ( POPLUC, YYYY )
      ENDIF


    END IF

    ! Write restart file if requested:
    IF(output%restart .AND. (.NOT. CASAONLY)) THEN
      ! MPI: TODO: receive variables that are required by create_restart
      ! but not write_output
      !CALL receive_restart (comm,ktau,dels,soil,veg,ssnow, &
      !       &              canopy,rough,rad,bgc,bal)
      ! gol124: how about call master_receive (comm, ktau, restart_ts)
      ! instead of a separate receive_restart sub?
      CALL master_receive (comm, ktau_gl, restart_ts)

      !       CALL MPI_Waitall (wnp, recv_req, recv_stats, ierr)

      if(.not.l_landuse) then
        CALL create_restart( logn, dels, ktau, soil, veg, ssnow,                 &
             canopy, rough, rad, bgc, bal, met  )
      endif

      IF (cable_user%CALL_climate) THEN
        CALL master_receive (comm, ktau_gl, climate_ts)

        !CALL MPI_Waitall (wnp, recv_req, recv_stats, ierr)


        CALL WRITE_CLIMATE_RESTART_NC ( climate, ktauday )
      END IF

    END IF


    IF(l_landuse.and. .not. CASAONLY) then
      mlon = maxval(landpt(1:mland)%ilon)
      mlat = maxval(landpt(1:mland)%ilat)
      allocate(luc_atransit(mland,mvmax,mvmax))
      allocate(luc_fharvw(mland,mharvw))
      allocate(luc_xluh2cable(mland,mvmax,mstate))
      allocate(landmask(mlon,mlat))
      allocate(arealand(mland))
      allocate(patchfrac_new(mlon,mlat,mvmax))
      allocate(cstart(mland),cend(mland),nap(mland))

      do m=1,mland
        cstart(m) = landpt(m)%cstart
        cend(m)   = landpt(m)%cend
        nap(m)    = landpt(m)%nap
      enddo

      call landuse_data(mlon,mlat,landmask,arealand,luc_atransit,luc_fharvw,luc_xluh2cable)

      call  landuse_driver(mlon,mlat,landmask,arealand,ssnow,soil,veg,bal,canopy,  &
                           phen,casapool,casabal,casamet,casabiome,casaflux,bgc,rad, &
                           cstart,cend,nap,lucmp)

      print *, 'writing new gridinfo: landuse'
      print *, 'new patch information. mland= ',mland

      do m=1,mland
        do np=cstart(m),cend(m)
          ivt=lucmp%iveg(np)
          if(ivt<1.or.ivt>mvmax) then
            print *, 'landuse: error in vegtype',m,np,ivt
            stop
          endif
          patchfrac_new(landpt(m)%ilon,landpt(m)%ilat,ivt) = lucmp%patchfrac(np)
        enddo
      enddo

      call create_new_gridinfo(filename%type,filename%gridnew,mlon,mlat,landmask,patchfrac_new)

      print *, 'writing casapools: land use'
      call WRITE_LANDUSE_CASA_RESTART_NC(cend(mland), lucmp, CASAONLY )

      print *, 'writing cable restart: land use'
      call create_landuse_cable_restart(logn, dels, ktau, soil, cend(mland),lucmp,cstart,cend,nap, met)

      print *, 'deallocating'
      call landuse_deallocate_mp(cend(mland),ms,msn,nrb,mplant,mlitter,msoil,mwood,lucmp)
    ENDIF



    ! Close met data input file:
    IF ( TRIM(cable_user%MetType) .NE. "gswp"  .AND. &
         TRIM(cable_user%MetType) .NE. "gswp3" .AND. &
         TRIM(cable_user%MetType) .NE. "plum"  .AND. &
         TRIM(cable_user%MetType) .NE. "cru") CALL close_met_file

    ! Close log file
    CLOSE(logn)

    CALL CPU_TIME(etime)
    PRINT *, 'Master End. ', etime, ' seconds'
    ! MPI: cleanup
    CALL master_end (icycle, output%restart)

    RETURN

  END SUBROUTINE mpidrv_master

  ! ============== PRIVATE SUBROUTINES USED ONLY BY THE MPI MASTER ===============


  ! MPI: calculates and sends grid decomposition info to the workers
  SUBROUTINE master_decomp (comm, mland)

    USE mpi

    USE cable_IO_vars_module, ONLY : landpt

    IMPLICIT NONE

    INTEGER, INTENT(IN)   :: comm ! MPI communicator to talk to the workers
    INTEGER, INTENT(IN)   :: mland ! total number of landpoints in the global grid

    INTEGER :: lpw  ! average number of landpoints per worker
    INTEGER :: rank, rest, nxt, pcnt, ierr, i, tmp
    INTEGER :: patchcnt  ! sum of patches for a range landpoints

    ! how many workers do we have?
    CALL MPI_Comm_size (comm, wnp, ierr)
    wnp = wnp - 1

    ALLOCATE (wland(wnp), STAT=ierr)
    IF (ierr /= 0) THEN
       ! TODO: print an error message
       PRINT*, 'master-decomp MPI_ABORT'
       CALL MPI_Abort(comm, 0, ierr)
    END IF

    ! MPI: calculate landpoint distribution among the workers
    ! this version distributes landpoints rather than active patches,
    ! but perhaps this will be easy to change in the future?
    lpw = mland / wnp
    rest = MOD(mland, wnp)
    nxt = 1

    DO rank = 1, wnp
       wland(rank)%landp0 = nxt
       pcnt = lpw
       ! MPI: poor man's load balancing:
       ! - difference between number of landpoints assigned to
       ! different workers is 1 at most
       ! - inherent load balance because calculations are done on "patches"
       ! whose number differ between landpoints
       ! TODO: - the above to be addressed in the next version
       IF (rest > 0) THEN
          pcnt = pcnt + 1
          rest = rest - 1
       END IF
       wland(rank)%nland = pcnt

       ! MPI: let each worker know their assignement
       ! in this version of cable workers care only about the number of points
       ! CALL MPI_Send (nxt, 1, MPI_INTEGER, rank, 0, comm, ierr)

       CALL MPI_Send (pcnt, 1, MPI_INTEGER, rank, 0, comm, ierr)

       ! MPI: should be the same as landpt(nxt)%cstart
       wland(rank)%patch0 = landpt(nxt)%cstart
       ! MPI: calculate no of patches for pcnt landpoint starting from nxt
       ! MPI: TODO: workers only need to know the number of their patches
       ! or maybe not (if they need patch displacement in the input)

       ! MPI: find number of patches in all landpoints assigned to this
       ! worker (difference between last patch of last landpoint and first of
       ! first)
       patchcnt = landpt(nxt+pcnt-1)%cend - landpt(nxt)%cstart + 1
       wland(rank)%npatch = patchcnt

       ! MPI: development check
       tmp = 0
       DO i = 1, pcnt
          tmp = tmp + landpt(nxt+i-1)%nap
       END DO
       IF (patchcnt /= tmp) THEN
          WRITE (*,*) 'invalid patch number for worker ', &
               &           patchcnt,tmp,rank
          CALL MPI_Abort (comm, 0, ierr)
       END IF

       ! MPI: at this point workers can't determine patches on their own
       ! so we need to send it explicitely
       ! in this version of cable workers care only about the number of patches
       ! CALL MPI_Send (wland(rank)%patch0, 1, MPI_INTEGER, rank, 0, comm, ierr)
       CALL MPI_Send (wland(rank)%npatch, 1, MPI_INTEGER, rank, 0, comm, ierr)

       nxt = nxt + pcnt
    END DO

    RETURN

  END SUBROUTINE master_decomp


  ! MPI: creates param_t type for the master to scatter the default parameters
  ! to the workers
  ! then sends the parameters
  ! and finally frees the MPI type
  SUBROUTINE master_cable_params (comm,met,air,ssnow,veg,bgc,soil,canopy,&
       rough,rad,sum_flux,bal)

    USE mpi

    USE cable_def_types_mod
    USE cable_IO_vars_module
    ! switch soil colour albedo calc - Ticket #27
    USE cable_common_module,  ONLY: calcsoilalbedo

    IMPLICIT NONE

    ! subroutine arguments

    INTEGER, INTENT(IN) :: comm ! MPI communicator

    TYPE (met_type), INTENT(INOUT) :: met
    TYPE (air_type), INTENT(INOUT) :: air
    TYPE (soil_snow_type),      INTENT(INOUT) :: ssnow
    TYPE (veg_parameter_type),  INTENT(INOUT) :: veg
    TYPE (bgc_pool_type),       INTENT(INOUT) :: bgc
    TYPE (soil_parameter_type), INTENT(INOUT) :: soil
    TYPE (canopy_type),         INTENT(INOUT) :: canopy
    TYPE (roughness_type),      INTENT(INOUT) :: rough
    TYPE (radiation_type),      INTENT(INOUT) :: rad
    TYPE (sum_flux_type),       INTENT(INOUT) :: sum_flux
    TYPE (balances_type),       INTENT(INOUT) :: bal


    ! local vars

    ! temp arrays for marshalling all fields into a single struct
    INTEGER, ALLOCATABLE, DIMENSION(:) :: blen
    INTEGER(KIND=MPI_ADDRESS_KIND), ALLOCATABLE, DIMENSION(:) :: displs
    INTEGER, ALLOCATABLE, DIMENSION(:) :: types

    ! temp vars for verifying block number and total length of inp_t
    INTEGER(KIND=MPI_ADDRESS_KIND) :: text, tmplb
    INTEGER :: tsize, localtotal, remotetotal

    INTEGER :: ierr
    INTEGER, ALLOCATABLE, DIMENSION(:) :: param_ts

    INTEGER(KIND=MPI_ADDRESS_KIND) :: r1stride, r2stride, istride
    INTEGER :: r1len, r2len, I1LEN, llen ! block lengths
    INTEGER :: bidx ! block index
    INTEGER :: ntyp ! total number of blocks
    INTEGER :: rank

    INTEGER :: landpt_t, patch_t

    INTEGER :: nxt, pcnt, off, cnt


    ! create MPI types for exchanging slices of landpt and patch arrays
    CALL decomp_types (landpt_t, patch_t)

    ! MPI: TODO: replace sends with isends
    DO rank = 1, wnp

       ! MPI: send worker's landpts
       nxt = wland(rank)%landp0
       pcnt = wland(rank)%nland
       CALL MPI_Send (landpt(nxt), pcnt, landpt_t, rank, 0, comm, ierr)

       ! MPI: send worker's patch
       nxt = wland(rank)%patch0
       pcnt = wland(rank)%npatch
       CALL MPI_Send (patch(nxt), pcnt, patch_t, rank, 0, comm, ierr)

    END DO

    ! MPI: TODO: free landp_t and patch_t types?

    ntyp = nparam

    ! vars intro for Ticket #27
    IF (calcsoilalbedo) THEN
       ntyp = nparam + 1
    END IF

    ALLOCATE (param_ts(wnp))

    ALLOCATE (blen(ntyp))
    ALLOCATE (displs(ntyp))
    ALLOCATE (types(ntyp))

    ! MPI: array strides for multi-dimensional types
    r1stride = mp * extr1
    r2stride = mp * extr2
    istride  = mp * extid

    ! default type is byte, to be overriden for multi-D types
    types = MPI_BYTE

    ! total size of input data sent to all workers
    localtotal = 0

    ! create a separate MPI derived datatype for each worker
    DO rank = 1, wnp

       ! starting patch and number for each worker rank
       off = wland(rank)%patch0
       cnt = wland(rank)%npatch

       r1len = cnt * extr1
       r2len = cnt * extr2
       I1LEN  = cnt * extid
       llen  = cnt * extl

       bidx = 0

       ! the order of variables follows argument list
       ! the order of fields within follows alloc_*_type subroutines

       ! ----------- met --------------

       bidx = bidx + 1
       CALL MPI_Get_address (met%ca(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (met%year(off), displs(bidx), ierr)
       blen(bidx) = I1LEN

       bidx = bidx + 1
       CALL MPI_Get_address (met%moy(off), displs(bidx), ierr)
       blen(bidx) = I1LEN

       bidx = bidx + 1
       CALL MPI_Get_address (met%doy(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (met%hod(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (met%fsd(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (swb, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1

       bidx = bidx + 1
       CALL MPI_Get_address (met%fld(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (met%precip(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (met%precip_sn(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (met%tk(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (met%tvair(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (met%tvrad(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (met%pmb(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (met%ua(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (met%qv(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (met%qvair(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (met%da(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (met%dva(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (met%coszen(off), displs(bidx), ierr)
       blen(bidx) = r1len


       ! ----------- air --------------

       bidx = bidx + 1
       CALL MPI_Get_address (air%rho(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (air%volm(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (air%rlam(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (air%qsat(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (air%epsi(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (air%visc(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (air%psyc(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (air%dsatdk(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (air%cmolar(off), displs(bidx), ierr)
       blen(bidx) = r1len


       ! ----------- ssnow --------------

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%dtmlt(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (3, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%pudsto(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%pudsmx(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%albsoilsn(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (nrb, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%cls(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%dfn_dtg(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%dfh_dtg(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%dfe_dtg(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%dfe_ddq(off), displs(bidx), ierr)
       blen(bidx) = r1len

       !INH - REV_CORR new variable
       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%ddq_dtg(off), displs(bidx), ierr)
       blen(bidx) = r1len


       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%evapsn(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%fwtop(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%fwtop1(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%fwtop2(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%fwtop3(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%gammzz(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (ms, r2len, r2stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = ms * r2len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%isflag(off), displs(bidx), ierr)
       blen(bidx) = I1LEN

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%osnowd(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%potev(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (soil%pwb_min(off), displs(bidx), ierr)
       blen(bidx) = r2len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%runoff(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%rnof1(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%rnof2(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%rtsoil(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%sconds(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (msn, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = msn * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%sdepth(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (msn, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = msn * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%smass(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (msn, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = msn * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%snage(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%snowd(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%smelt(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%ssdn(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (msn, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = msn * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%ssdnn(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%tgg(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (ms, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = ms * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%tggsn(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (msn, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = msn * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%tss(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%wb(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (ms, r2len, r2stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = ms * r2len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%wbfice(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (ms, r2len, r2stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = ms * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%wbice(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (ms, r2len, r2stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = ms * r2len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%wblf(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (ms, r2len, r2stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1

       ! additional  for sli
       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%S(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (ms, r2len, r2stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1


       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%Tsoil(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (ms, r2len, r2stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
!$
       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%thetai(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (ms, r2len, r2stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1


       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%snowliq(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (3, r2len, r2stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%Tsurface(off), displs(bidx), ierr)
       blen(bidx) = r2len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%h0(off), displs(bidx), ierr)
       blen(bidx) = r2len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%nsnow(off), displs(bidx), ierr)
       blen(bidx) = I1len
       ! end additional for sli


       !blen(bidx) = ms * r2len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%wbtot(off), displs(bidx), ierr)
       blen(bidx) = r2len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%wb_lake(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%sinfil(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%evapfbl(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (ms, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%qstss(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%wetfac(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%owetfac(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%t_snwlr(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%tggav(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%otss(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (ssnow%otss_0(off), displs(bidx), ierr)
       blen(bidx) = r1len

       ! ----------- veg --------------

       bidx = bidx + 1
       CALL MPI_Get_address (veg%canst1(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%dleaf(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%ejmax(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%frac4(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%froot(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (ms, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = ms * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%hc(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%iveg(off), displs(bidx), ierr)
       blen(bidx) = I1LEN

       bidx = bidx + 1
       CALL MPI_Get_address (veg%meth(off), displs(bidx), ierr)
       ! Maciej: veg%meth is REAL
       !     blen(bidx) = I1LEN
       blen(bidx) = R1LEN

       bidx = bidx + 1
       CALL MPI_Get_address (veg%rp20(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%rpcoef(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%shelrb(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%wai(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%vegcf(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%tminvj(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%tmaxvj(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%vbeta(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%xalbnir(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%vcmax(off), displs(bidx), ierr)
       blen(bidx) = r1len

       ! bidx = bidx + 1
       ! CALL MPI_Get_address (veg%vlai(off), displs(bidx), ierr)
       ! blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%xfang(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%extkn(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%deciduous(off), displs(bidx), ierr)
       ! Maciej: deciduous is logical
       !     blen(bidx) = r1len
       blen(bidx) = llen

       bidx = bidx + 1
       CALL MPI_Get_address (veg%a1gs(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%d0gs(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%alpha(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%convex(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%cfrd(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%gswmin(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%conkc0(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%conko0(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%ekc(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%eko(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%clitt(off), displs(bidx), ierr)
       blen(bidx) = r2len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%zr(off), displs(bidx), ierr)
       blen(bidx) = r2len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%gamma(off), displs(bidx), ierr)
       blen(bidx) = r2len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%refl(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (2, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1

       bidx = bidx + 1
       CALL MPI_Get_address (veg%taul(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (2, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1


       bidx = bidx + 1
       CALL MPI_Get_address (veg%disturbance_interval(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (2, i1len, istride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1

       bidx = bidx + 1
       CALL MPI_Get_address (veg%disturbance_intensity(off,1), displs(bidx), ierr)
       ! Maciej: disturbance_intensity is REAL(r_2)
       !     CALL MPI_Type_create_hvector (2, r1len, r1stride, MPI_BYTE, &
       !          &                             types(bidx), ierr)
       CALL MPI_Type_create_hvector (2, r2len, r2stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1

       ! Ticket #56, adding veg parms for Medlyn model
       bidx = bidx + 1
       CALL MPI_Get_address (veg%g0(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%g1(off), displs(bidx), ierr)
       blen(bidx) = r1len
       ! Ticket #56, finish adding new veg parms



       ! ----------- bgc --------------

       bidx = bidx + 1
       CALL MPI_Get_address (bgc%cplant(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (ncp, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = ncp * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (bgc%csoil(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (ncs, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = ncs * r1len

       ! constant * ncp, each worker gets the same copy of whole array
       bidx = bidx + 1
       CALL MPI_Get_address (bgc%ratecp, displs(bidx), ierr)
       blen(bidx) = ncp * extr1

       ! constant * ncs, each worker gets the same copy of whole array
       bidx = bidx + 1
       CALL MPI_Get_address (bgc%ratecs, displs(bidx), ierr)
       blen(bidx) = ncs * extr1

       ! ----------- soil --------------

       bidx = bidx + 1
       CALL MPI_Get_address (soil%albsoil(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (nrb, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = nrb * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (soil%bch(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (soil%c3(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (soil%clay(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (soil%cnsd(off), displs(bidx), ierr)
       blen(bidx) = r2len

       bidx = bidx + 1
       CALL MPI_Get_address (soil%css(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (soil%hsbh(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (soil%hyds(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (soil%i2bp3(off), displs(bidx), ierr)
       ! Maciej: i2bp3 is REAL
       !     blen(bidx) = I1LEN
       blen(bidx) = R1LEN

       bidx = bidx + 1
       CALL MPI_Get_address (soil%ibp2(off), displs(bidx), ierr)
       ! Maciej: ibp2 is REAL
       !     blen(bidx) = I1LEN
       blen(bidx) = R1LEN

       bidx = bidx + 1
       CALL MPI_Get_address (soil%isoilm(off), displs(bidx), ierr)
       ! Maciej isoilm is INTEGER
       !     blen(bidx) = r1len
       blen(bidx) = i1len

       bidx = bidx + 1
       CALL MPI_Get_address (soil%rhosoil(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (veg%rs20(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (soil%sand(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (soil%sfc(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (soil%silt(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (soil%ssat(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (soil%sucs(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (soil%swilt(off), displs(bidx), ierr)
       blen(bidx) = r1len

       ! the next two are extra for sli

       bidx = bidx + 1
       CALL MPI_Get_address (soil%zeta(off), displs(bidx), ierr)
       blen(bidx) = r2len

       bidx = bidx + 1
       CALL MPI_Get_address (soil%fsatmax(off), displs(bidx), ierr)
       blen(bidx) = r2len

       ! end extra sli

       ! constant * ms, each worker gets the same copy of whole array
       bidx = bidx + 1
       CALL MPI_Get_address (soil%zse, displs(bidx), ierr)
       blen(bidx) = ms * extr1

       ! constant * (ms+1), each worker gets the same copy of whole array
       bidx = bidx + 1
       CALL MPI_Get_address (soil%zshh, displs(bidx), ierr)
       blen(bidx) = (ms + 1) * extr1

       ! vars intro for Ticket #27
       IF (calcsoilalbedo) THEN
          bidx = bidx + 1
          CALL MPI_Get_address (soil%soilcol(off), displs(bidx), ierr)
          blen(bidx) = r1len
       END IF

       ! ----------- canopy --------------

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fess(off), displs(bidx), ierr)
       blen(bidx) = r2len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fesp(off), displs(bidx), ierr)
       blen(bidx) = r2len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%cansto(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%oldcansto(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%cduv(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%delwc(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%dewmm(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%dgdtg(off), displs(bidx), ierr)
       blen(bidx) = r2len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fe(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fh(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fpn(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%frp(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%frpw(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%frpr(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%frs(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fnee(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%frday(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fnv(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fev(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fevc(off), displs(bidx), ierr)
       blen(bidx) = r2len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fevw(off), displs(bidx), ierr)
       blen(bidx) = r1len

       !  bidx = bidx + 1
       !  CALL MPI_Get_address (canopy%potev_c(off), displs(bidx), ierr)
       !  blen(bidx) = r2len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fhv(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fhvw(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fns(off), displs(bidx), ierr)
       blen(bidx) = r1len

       !INH - REV_CORR new variable - temporary?
       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fns_cor(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fes(off), displs(bidx), ierr)
       blen(bidx) = r2len

       !INH - REV_CORR new variable - temporary?
       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fes_cor(off), displs(bidx), ierr)
       blen(bidx) = r2len

       !INH - SSEB new variable - temporary?
       !bidx = bidx + 1
       !CALL MPI_Get_address (canopy%fescor_upp(off), displs(bidx), ierr)
       !blen(bidx) = r2len

       !INH - SSEB new variable - temporary?
       !bidx = bidx + 1
       !CALL MPI_Get_address (canopy%fescor_low(off), displs(bidx), ierr)
       !blen(bidx) = r2len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fhs(off), displs(bidx), ierr)
       blen(bidx) = r1len

       !INH - REV_CORR - temporary?
       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fhs_cor(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fwet(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%ga(off), displs(bidx), ierr)
       blen(bidx) = r1len

       !INH - REV_CORR - temporary?
       bidx = bidx + 1
       CALL MPI_Get_address (canopy%ga_cor(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%ghflux(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%precis(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%qscrn(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%rnet(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%segg(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%sghflux(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%spill(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%through(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%tscrn(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%tv(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%us(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%uscrn(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%vlaiw(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%rghlai(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%wcint(off), displs(bidx), ierr)
       blen(bidx) = r1len

       !  bidx = bidx + 1
       !  CALL MPI_Get_address (canopy%rwater(off,1), displs(bidx), ierr)
       !  CALL MPI_Type_create_hvector (ms, r1len, r1stride, MPI_BYTE, &
       !  &                             types(bidx), ierr)
       !  blen(bidx) = 1
       !  !blen(bidx) = ms * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%evapfbl(off,1), displs(bidx), ierr)
       ! MPI: gol124: changed to r1 when Bernard ported to CABLE_r491
       CALL MPI_Type_create_hvector (ms, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = ms * r2len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%epot(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fnpp(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fevw_pot(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%gswx_T(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%cdtq(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%wetfac_cs(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%fwsoil(off), displs(bidx), ierr)
       blen(bidx) = r2len

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%gswx(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (mf, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1

       bidx = bidx + 1
       CALL MPI_Get_address (canopy%zetar(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (niter, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1


       ! ------- rough -------

       bidx = bidx + 1
       CALL MPI_Get_address (rough%coexp(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rough%disp(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rough%hruff(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rough%hruff_grmx(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rough%rt0us(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rough%rt1usa(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rough%rt1usb(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rough%rt1(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rough%term2(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rough%term3(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rough%term5(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rough%term6(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rough%usuh(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rough%za_uv(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rough%za_tq(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rough%z0m(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rough%zref_uv(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rough%zref_tq(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rough%zruffs(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rough%z0soilsn(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rough%z0soil(off), displs(bidx), ierr)
       blen(bidx) = r1len

       ! --------rad --------

       bidx = bidx + 1
       CALL MPI_Get_address (rad%albedo(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (nrb, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = nrb * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%extkb(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%extkd2(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%extkd(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%flws(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%fvlai(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (mf, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = mf * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%gradis(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (mf, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = mf * r2len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%latitude(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%lwabv(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%qcan(off,1,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (mf*nrb, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       ! blen(bidx) = mf * nrb * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%qssabs(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%rhocdf(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (nrb, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = nrb * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%rniso(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (mf, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = mf * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%scalex(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (mf, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = mf * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%transd(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%trad(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%reffdf(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (nrb, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = nrb * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%reffbm(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (nrb, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = nrb * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%extkbm(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (nrb, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = nrb * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%extkdm(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (nrb, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = nrb * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%fbeam(off,1), displs(bidx), ierr)
       CALL MPI_Type_create_hvector (nrb, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%cexpkbm(off,1), displs(bidx), ierr)
       ! Maciej: cexpkbm is mp*swb
       !     CALL MPI_Type_create_hvector (nrb, r1len, r1stride, MPI_BYTE, &
       !          &                             types(bidx), ierr)
       CALL MPI_Type_create_hvector (swb, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = nrb * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%cexpkdm(off,1), displs(bidx), ierr)
       ! Maciej: cexpkdm is mp*swb
       !     CALL MPI_Type_create_hvector (nrb, r1len, r1stride, MPI_BYTE, &
       !          &                             types(bidx), ierr)
       CALL MPI_Type_create_hvector (swb, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1
       !blen(bidx) = nrb * r1len

       bidx = bidx + 1
       CALL MPI_Get_address (rad%rhocbm(off,1), displs(bidx), ierr)
       ! Maciej: rhocbm is mp*nrb
       !     CALL MPI_Type_create_hvector (swb, r1len, r1stride, MPI_BYTE, &
       !          &                             types(bidx), ierr)
       CALL MPI_Type_create_hvector (nrb, r1len, r1stride, MPI_BYTE, &
            &                             types(bidx), ierr)
       blen(bidx) = 1

       bidx = bidx + 1
       CALL MPI_Get_address (rad%transb(off), displs(bidx), ierr)
       blen(bidx) = r1len

       ! ------- sum_flux -----

       bidx = bidx + 1
       CALL MPI_Get_address (sum_flux%sumpn(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (sum_flux%sumrp(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (sum_flux%sumrpw(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (sum_flux%sumrpr(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (sum_flux%sumrs(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (sum_flux%sumrd(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (sum_flux%dsumpn(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (sum_flux%dsumrp(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (sum_flux%dsumrs(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (sum_flux%dsumrd(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (sum_flux%sumxrp(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (sum_flux%sumxrs(off), displs(bidx), ierr)
       blen(bidx) = r1len

       ! ------- bal ----

       bidx = bidx + 1
       CALL MPI_Get_address (bal%drybal(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (bal%ebal(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (bal%ebal_tot(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (bal%ebal_cncheck(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (bal%ebal_tot_cncheck(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (bal%evap_tot(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (bal%osnowd0(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (bal%precip_tot(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (bal%rnoff_tot(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (bal%wbal(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (bal%wbal_tot(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (bal%wbtot0(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (bal%wetbal(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (bal%owbtot(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (bal%evapc_tot(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx + 1
       CALL MPI_Get_address (bal%evaps_tot(off), displs(bidx), ierr)
       blen(bidx) = r1len

       bidx = bidx +