23 #include <petscdraw.h>
26 #include "pism/util/array/Array.hh"
27 #include "pism/util/array/Array_impl.hh"
29 #include "pism/util/Time.hh"
30 #include "pism/util/Grid.hh"
31 #include "pism/util/ConfigInterface.hh"
33 #include "pism/util/error_handling.hh"
34 #include "pism/util/io/io_helpers.hh"
35 #include "pism/util/Logger.hh"
36 #include "pism/util/Profiling.hh"
37 #include "pism/util/petscwrappers/VecScatter.hh"
38 #include "pism/util/petscwrappers/Viewer.hh"
39 #include "pism/util/Context.hh"
40 #include "pism/util/VariableMetadata.hh"
41 #include "pism/util/io/File.hh"
42 #include "pism/util/pism_utilities.hh"
43 #include "pism/util/io/IO_Flags.hh"
52 ierr = DMGlobalToLocalBegin(dm, source, INSERT_VALUES, destination);
53 PISM_CHK(ierr,
"DMGlobalToLocalBegin");
55 ierr = DMGlobalToLocalEnd(dm, source, INSERT_VALUES, destination);
56 PISM_CHK(ierr,
"DMGlobalToLocalEnd");
60 const std::string &name,
64 const std::vector<double> &zlevels) {
74 const auto &config =
grid->ctx()->config();
76 auto max_stencil_width =
static_cast<size_t>(config->get_number(
"grid.max_stencil_width"));
85 auto system =
m_impl->
grid->ctx()->unit_system();
88 for (
unsigned int j = 0; j <
m_impl->
dof; ++j) {
96 if (zlevels.size() > 1) {
100 "Failed to allocate a GSL interpolation accelerator");
169 return {(int)
grid->My(), (int)
grid->Mx()};
182 "invalid interpolation type: %d", (
int)type);
198 ierr = VecMin(
vec(), NULL, &
min);
202 ierr = VecMax(
vec(), NULL, &
max);
227 return NORM_INFINITY;
238 PetscErrorCode ierr = VecAXPY(
vec(), alpha, x.
vec());
246 PetscErrorCode ierr = VecShift(
vec(), alpha);
254 PetscErrorCode ierr = VecScale(
vec(), alpha);
274 this->
get_dof(destination_da, destination, 0, N);
278 unsigned int count)
const {
285 double ***result_a =
static_cast<double ***
>(tmp_res.get()),
286 ***source_a =
static_cast<double ***
>(tmp_v.
get());
290 for (
auto p =
m_impl->
grid->points(); p; p.next()) {
291 const int i = p.i(), j = p.j();
292 PetscErrorCode ierr =
293 PetscMemcpy(result_a[j][i], &source_a[j][i][start],
count *
sizeof(PetscScalar));
303 unsigned int count) {
310 double ***source_a =
static_cast<double ***
>(tmp_src.get()),
311 ***result_a =
static_cast<double ***
>(tmp_v.
get());
315 for (
auto p =
m_impl->
grid->points(); p; p.next()) {
316 const int i = p.i(), j = p.j();
317 PetscErrorCode ierr =
318 PetscMemcpy(&result_a[j][i][start], source_a[j][i],
count *
sizeof(PetscScalar));
341 PetscErrorCode ierr = 0;
344 PISM_CHK(ierr,
"DMCreateLocalVector");
347 PISM_CHK(ierr,
"DMCreateGlobalVector");
391 if (not default_value.
exists()) {
394 "Can't find '%s' in the regridding file '%s'.",
395 variable.
get_name().c_str(), filename.c_str());
402 std::string spacer(variable.
get_name().size(),
' ');
404 " absent %s / %-10s\n"
405 " %s \\ not found; using default constant %7.2f (%s)\n",
406 variable.
get_name().c_str(), variable.
get_string(
"long_name").c_str(), spacer.c_str(),
407 c(default_value), variable.
get_string(
"output_units").c_str());
409 PetscErrorCode ierr = VecSet(output, default_value);
413 static std::array<double, 2>
compute_range(MPI_Comm com,
const double *data,
size_t data_size) {
414 double min_result = data[0], max_result = data[0];
415 for (
size_t k = 0;
k < data_size; ++
k) {
416 min_result =
std::min(min_result, data[
k]);
417 max_result =
std::max(max_result, data[
k]);
428 auto log =
grid()->ctx()->log();
432 auto da2 =
grid()->get_dm(1, 0);
437 for (
unsigned int j = 0; j <
ndof(); ++j) {
440 auto V = file.
find_variable(variable.get_name(), variable[
"standard_name"]);
447 input_grid.
report(*log, 4, variable.unit_system());
462 const size_t data_size =
grid()->xm() *
grid()->ym() *
levels().size();
467 if ((not std::isfinite(
min)) or (not std::isfinite(
max))) {
470 "Variable '%s' ('%s') contains numbers that are not finite (NaN or infinity)",
471 variable.get_name().c_str(), variable.get_string(
"long_name").c_str());
477 variable.report_range(*log,
min,
max, V.found_using_standard_name);
495 auto log =
grid()->ctx()->log();
496 log->message(4,
" Reading %s...\n",
m_impl->
name.c_str());
516 auto da2 =
grid()->get_dm(1, 0);
522 for (
unsigned int j = 0; j <
ndof(); ++j) {
541 for (
unsigned int j = 0; j <
ndof(); ++j) {
554 assert(N < m_impl->dof);
559 assert(N < m_impl->dof);
570 log->message(3,
"[%s] Writing %s...\n",
596 for (
unsigned int j = 0; j <
ndof(); ++j) {
600 log->message(3,
"[%s] Writing %s...\n",
626 PetscInt X_size, Y_size;
633 ierr = VecGetSize(
vec(), &X_size);
636 ierr = VecGetSize(other.
vec(), &Y_size);
639 if (X_size != Y_size) {
656 PISM_CHK(ierr,
"DMDAVecGetArrayDOF");
672 "Array::end_access(): a == NULL (looks like begin_access() was not called)");
683 PISM_CHK(ierr,
"DMDAVecRestoreArrayDOF");
686 PISM_CHK(ierr,
"DMDAVecRestoreArray");
699 ierr = DMLocalToLocalBegin(*
dm(),
vec(), INSERT_VALUES,
vec());
700 PISM_CHK(ierr,
"DMLocalToLocalBegin");
702 ierr = DMLocalToLocalEnd(*
dm(),
vec(), INSERT_VALUES,
vec());
703 PISM_CHK(ierr,
"DMLocalToLocalEnd");
708 PetscErrorCode ierr = VecSet(
vec(),c);
715 double ghost_width = 0;
725 bool out_of_range = (i <
m_impl->
grid->xs() - ghost_width) ||
727 (j < m_impl->
grid->ys() - ghost_width) ||
752 PetscErrorCode ierr = VecStrideNormAll(
vec(), type, result.data());
755 PetscErrorCode ierr = VecNorm(
vec(), type, result.data());
765 "Array::norm_all(...): NORM_1_AND_2"
766 " not implemented (called as %s.norm_all(...))",
782 case NORM_INFINITY: {
790 "Array::norm_all(...): unknown norm type"
791 " (called as %s.norm_all(...))",
809 void Array::read(
const std::string &filename,
unsigned int time) {
811 this->
read(file, time);
816 this->
regrid(file, default_value);
845 m_impl->
grid->ctx()->log()->message(3,
" [%s] Regridding %s...\n",
848 m_impl->
grid->ctx()->profiling().begin(
"io.regridding");
857 m_impl->
grid->ctx()->profiling().end(
"io.regridding");
862 if (time_spent > 1.0) {
863 m_impl->
grid->ctx()->log()->message(3,
" done in %f seconds.\n", time_spent);
865 m_impl->
grid->ctx()->log()->message(3,
" done.\n");
885 megabyte = pow(2, 20),
886 N =
static_cast<double>(
size()),
887 mb_double =
static_cast<double>(
sizeof(
double)) * N / megabyte,
888 mb_float =
static_cast<double>(
sizeof(
float)) * N / megabyte;
891 std::string spacer(
timestamp.size(),
' ');
892 if (time_spent > 1) {
895 " [%s] Done writing %s (%f Mb double, %f Mb float)\n"
896 " %s in %f seconds (%f minutes).\n"
897 " %s Effective throughput: double: %f Mb/s, float: %f Mb/s.\n",
899 spacer.c_str(), time_spent, time_spent / minute,
901 mb_double / time_spent, mb_float / time_spent);
903 m_impl->
grid->ctx()->log()->message(3,
" done.\n");
912 while (not
m_vecs.empty()) {
914 m_vecs.back()->end_access();
923 for (
const auto *j : vecs) {
924 assert(j !=
nullptr);
939 for (
const auto *v : vecs) {
940 assert(v !=
nullptr);
958 return Mx * My * Mz * dof;
968 ierr = PetscObjectQuery((PetscObject)
dm()->get(),
"v_proc0", (PetscObject*)&v_proc0);
971 if (v_proc0 == NULL) {
978 ierr = DMDACreateNaturalVector(*
dm(), natural_work.
rawptr());
979 PISM_CHK(ierr,
"DMDACreateNaturalVector");
982 ierr = PetscObjectCompose((PetscObject)
dm()->get(),
"natural_work",
983 (PetscObject)((::Vec)natural_work));
984 PISM_CHK(ierr,
"PetscObjectCompose");
992 ierr = VecScatterCreateToZero(natural_work, scatter_to_zero.
rawptr(),
994 PISM_CHK(ierr,
"VecScatterCreateToZero");
997 ierr = PetscObjectCompose((PetscObject)
dm()->get(),
"scatter_to_zero",
998 (PetscObject)((::VecScatter)scatter_to_zero));
999 PISM_CHK(ierr,
"PetscObjectCompose");
1002 ierr = PetscObjectCompose((PetscObject)
dm()->get(),
"v_proc0",
1003 (PetscObject)v_proc0);
1004 PISM_CHK(ierr,
"PetscObjectCompose");
1012 ierr = VecDuplicate(v_proc0, &result);
1015 return std::shared_ptr<petsc::Vec>(
new petsc::Vec(result));
1019 PetscErrorCode ierr = 0;
1020 VecScatter scatter_to_zero = NULL;
1021 Vec natural_work = NULL;
1023 ierr = PetscObjectQuery((PetscObject)
dm()->get(),
"scatter_to_zero",
1024 (PetscObject*)&scatter_to_zero);
1025 PISM_CHK(ierr,
"PetscObjectQuery");
1027 ierr = PetscObjectQuery((PetscObject)
dm()->get(),
"natural_work",
1028 (PetscObject*)&natural_work);
1029 PISM_CHK(ierr,
"PetscObjectQuery");
1031 if (natural_work == NULL || scatter_to_zero == NULL) {
1035 ierr = DMDAGlobalToNaturalBegin(*
dm(), parallel, INSERT_VALUES, natural_work);
1036 PISM_CHK(ierr,
"DMDAGlobalToNaturalBegin");
1038 ierr = DMDAGlobalToNaturalEnd(*
dm(), parallel, INSERT_VALUES, natural_work);
1039 PISM_CHK(ierr,
"DMDAGlobalToNaturalEnd");
1041 ierr = VecScatterBegin(scatter_to_zero, natural_work, onp0,
1042 INSERT_VALUES, SCATTER_FORWARD);
1045 ierr = VecScatterEnd(scatter_to_zero, natural_work, onp0,
1046 INSERT_VALUES, SCATTER_FORWARD);
1063 PetscErrorCode ierr;
1065 VecScatter scatter_to_zero = NULL;
1066 Vec natural_work = NULL;
1067 ierr = PetscObjectQuery((PetscObject)
dm()->get(),
"scatter_to_zero",
1068 (PetscObject*)&scatter_to_zero);
1069 PISM_CHK(ierr,
"PetscObjectQuery");
1071 ierr = PetscObjectQuery((PetscObject)
dm()->get(),
"natural_work",
1072 (PetscObject*)&natural_work);
1073 PISM_CHK(ierr,
"PetscObjectQuery");
1075 if (natural_work == NULL || scatter_to_zero == NULL) {
1079 ierr = VecScatterBegin(scatter_to_zero, onp0, natural_work,
1080 INSERT_VALUES, SCATTER_REVERSE);
1083 ierr = VecScatterEnd(scatter_to_zero, onp0, natural_work,
1084 INSERT_VALUES, SCATTER_REVERSE);
1087 ierr = DMDANaturalToGlobalBegin(*
dm(), natural_work, INSERT_VALUES, parallel);
1088 PISM_CHK(ierr,
"DMDANaturalToGlobalBegin");
1090 ierr = DMDANaturalToGlobalEnd(*
dm(), natural_work, INSERT_VALUES, parallel);
1091 PISM_CHK(ierr,
"DMDANaturalToGlobalEnd");
1120 MPI_Comm_rank(com, &rank);
1122 uint64_t result = 0;
1127 PetscErrorCode ierr = VecGetLocalSize(*v, &
size);
1132 MPI_Bcast(&result, 1, MPI_UINT64_T, 0, com);
1145 static_assert(
sizeof(
double) == 2 *
sizeof(uint32_t),
1146 "Cannot compile Array::fletcher64() (sizeof(double) != 2 * sizeof(uint32_t))");
1148 MPI_Status mpi_stat;
1149 const int checksum_tag = 42;
1154 MPI_Comm_rank(com, &rank);
1157 MPI_Comm_size(com, &comm_size);
1159 PetscInt local_size = 0;
1160 PetscErrorCode ierr = VecGetLocalSize(
vec(), &local_size);
PISM_CHK(ierr,
"VecGetLocalSize");
1169 std::vector<uint64_t> sums(comm_size);
1173 for (
int r = 1; r < comm_size; ++r) {
1174 MPI_Recv(&sums[r], 1, MPI_UINT64_T, r, checksum_tag, com, &mpi_stat);
1180 MPI_Send(&
sum, 1, MPI_UINT64_T, 0, checksum_tag, com);
1184 MPI_Bcast(&
sum, 1, MPI_UINT64_T, 0, com);
1201 log->message(1,
"%s%s: %s\n", prefix,
m_impl->
name.c_str(),
checksum(serial).c_str());
1205 void Array::view(std::vector<std::shared_ptr<petsc::Viewer> > viewers)
const {
1206 PetscErrorCode ierr;
1210 "cannot 'view' a 3D field '%s'",
1220 for (
unsigned int i = 0; i <
ndof(); ++i) {
1227 output_units.c_str());
1229 PetscViewer v = *viewers[i].get();
1231 PetscDraw draw = NULL;
1232 ierr = PetscViewerDrawGetDraw(v, 0, &draw);
1233 PISM_CHK(ierr,
"PetscViewerDrawGetDraw");
1235 ierr = PetscDrawSetTitle(draw, title.c_str());
1236 PISM_CHK(ierr,
"PetscDrawSetTitle");
1241 units, output_units);
1243 double bounds[2] = {0.0, 0.0};
1244 ierr = VecMin(tmp, NULL, &bounds[0]);
PISM_CHK(ierr,
"VecMin");
1245 ierr = VecMax(tmp, NULL, &bounds[1]);
PISM_CHK(ierr,
"VecMax");
1247 if (bounds[0] == bounds[1]) {
1252 ierr = PetscViewerDrawSetBounds(v, 1, bounds);
1253 PISM_CHK(ierr,
"PetscViewerDrawSetBounds");
1255 ierr = VecView(tmp, v);
1263 const std::string &spec1,
const std::string &spec2) {
1267 PetscInt data_size = 0;
1268 PetscErrorCode ierr = VecGetLocalSize(v, &data_size);
VariableLookupData find_variable(const std::string &short_name, const std::string &std_name) const
Find a variable using its standard name and/or short name.
std::string filename() const
High-level PISM I/O class.
void message(int threshold, const char format[],...) const __attribute__((format(printf
Print a message to the log.
void failed()
Indicates a failure of a parallel section.
virtual void begin_access() const =0
void add_context(const std::string &message)
Add a message providing some context. This way we can (sort of) get a stack trace even though C++ exc...
static RuntimeError formatted(const ErrorLocation &location, const char format[],...) __attribute__((format(printf
build a RuntimeError with a formatted message
void add(const PetscAccessible &v)
std::vector< const PetscAccessible * > m_vecs
void read(const std::string &filename, unsigned int time)
void write_impl(const File &file) const
Writes an Array to a NetCDF file.
virtual void end_access() const
Checks if an Array is allocated and calls DAVecRestoreArray.
virtual void regrid_impl(const File &file, io::Default default_value)
Gets an Array from a file file, interpolating onto the current grid.
Array(std::shared_ptr< const Grid > grid, const std::string &name, Kind ghostedp, size_t dof, size_t stencil_width, const std::vector< double > &zlevels)
unsigned int ndof() const
Returns the number of degrees of freedom per grid point.
void set_interpolation_type(InterpolationType type)
void set_name(const std::string &name)
Sets the variable name to name.
void dump(const char filename[]) const
Dumps a variable to a file, overwriting this file's contents (for debugging).
std::array< double, 2 > range() const
Result: min <- min(v[j]), max <- max(v[j]).
void define(const File &file, io::Type default_type) const
Define variables corresponding to an Array in a file opened using file.
virtual void begin_access() const
Checks if an Array is allocated and calls DAVecGetArray.
void shift(double alpha)
Result: v[j] <- v[j] + alpha for all j. Calls VecShift.
const std::vector< double > & levels() const
size_t size() const
Return the total number of elements in the owned part of an array.
void get_from_proc0(petsc::Vec &onp0)
Gets a local Array2 from processor 0.
void write(const std::string &filename) const
void copy_to_vec(std::shared_ptr< petsc::DM > destination_da, petsc::Vec &destination) const
Copies v to a global vector 'destination'. Ghost points are discarded.
const std::string & get_name() const
Get the name of an Array object.
uint64_t fletcher64_serial() const
void get_dof(std::shared_ptr< petsc::DM > da_result, petsc::Vec &result, unsigned int start, unsigned int count=1) const
void scale(double alpha)
Result: v <- v * alpha. Calls VecScale.
void print_checksum(const char *prefix="", bool serial=false) const
int state_counter() const
Get the object state counter.
std::vector< int > shape() const
std::string checksum(bool serial) const
void view(std::vector< std::shared_ptr< petsc::Viewer > > viewers) const
View a 2D vector field using existing PETSc viewers.
std::shared_ptr< const Grid > grid() const
void set(double c)
Result: v[j] <- c for all j.
void set_dof(std::shared_ptr< petsc::DM > da_source, petsc::Vec &source, unsigned int start, unsigned int count=1)
void inc_state_counter()
Increment the object state counter.
std::shared_ptr< petsc::DM > dm() const
unsigned int ndims() const
Returns the number of spatial dimensions.
void regrid(const std::string &filename, io::Default default_value)
std::shared_ptr< petsc::Vec > allocate_proc0_copy() const
uint64_t fletcher64() const
void put_on_proc0(petsc::Vec &onp0) const
Puts a local array::Scalar on processor 0.
void set_begin_access_use_dof(bool flag)
void add(double alpha, const Array &x)
Result: v <- v + alpha * x. Calls VecAXPY.
std::vector< double > norm(int n) const
Computes the norm of all the components of an Array.
void update_ghosts()
Updates ghost points.
unsigned int stencil_width() const
Get the stencil width of the current Array. Returns 0 if ghosts are not available.
void checkCompatibility(const char *function, const Array &other) const
Checks if two Arrays have compatible sizes, dimensions and numbers of degrees of freedom.
void read_impl(const File &file, unsigned int time)
Reads appropriate NetCDF variable(s) into an Array.
void check_array_indices(int i, int j, unsigned int k) const
Check array indices and warn if they are out of range.
SpatialVariableMetadata & metadata(unsigned int N=0)
Returns a reference to the SpatialVariableMetadata object containing metadata for the compoment N.
Abstract class for reading, writing, allocating, and accessing a DA-based PETSc Vec (2D and 3D fields...
Wrapper around VecGetArray and VecRestoreArray.
void convert_doubles(double *data, size_t length) const
std::shared_ptr< System > Ptr
#define PISM_CHK(errcode, name)
#define PISM_ERROR_LOCATION
void set_default_value_or_stop(const std::string &filename, const VariableMetadata &variable, io::Default default_value, const Logger &log, Vec output)
void global_to_local(petsc::DM &dm, Vec source, Vec destination)
double max(const array::Scalar &input)
Finds maximum over all the values in an array::Scalar object. Ignores ghosts.
double sum(const array::Scalar &input)
Sums up all the values in an array::Scalar object. Ignores ghosts.
Kind
What "kind" of a vector to create: with or without ghosts.
static std::array< double, 2 > compute_range(MPI_Comm com, const double *data, size_t data_size)
static NormType int_to_normtype(int input)
double min(const array::Scalar &input)
Finds minimum over all the values in an array::Scalar object. Ignores ghosts.
void append_time(const File &file, const Config &config, double time_seconds)
Prepare a file for output.
void regrid_spatial_variable(SpatialVariableMetadata &variable, const Grid &internal_grid, const LocalInterpCtx &lic, const File &file, double *output)
Regrid from a NetCDF file into a distributed array output.
void read_spatial_variable(const SpatialVariableMetadata &variable, const Grid &grid, const File &file, unsigned int time, double *output)
Read a variable from a file into an array output.
@ PISM_READWRITE_CLOBBER
create a file for writing, overwrite if present
@ PISM_READONLY
open an existing file for reading only
@ PISM_READWRITE
open an existing file for reading and writing
void write_spatial_variable(const SpatialVariableMetadata &metadata, const Grid &grid, const File &file, const double *input)
Write a double array to a file.
void check_input_grid(const grid::InputGridInfo &input_grid, const Grid &internal_grid, const std::vector< double > &internal_z_levels)
Check that x, y, and z coordinates of the input grid are strictly increasing.
void define_spatial_variable(const SpatialVariableMetadata &metadata, const Grid &grid, const File &file, io::Type default_type)
Define a NetCDF variable corresponding to a VariableMetadata object.
void define_time(const File &file, const Context &ctx)
Prepare a file for output.
double get_time(MPI_Comm comm)
io::Backend string_to_backend(const std::string &backend)
static double start_time(const Config &config, const Logger &log, const File *file, const std::string &reference_date, const std::string &calendar, const units::Unit &time_units)
void GlobalMax(MPI_Comm comm, double *local, double *result, int count)
std::string printf(const char *format,...)
uint64_t fletcher64(const uint32_t *data, size_t length)
static double end_time(const Config &config, double time_start, const std::string &calendar, const units::Unit &time_units)
std::string timestamp(MPI_Comm com)
Creates a time-stamp used for the history NetCDF attribute.
void handle_fatal_errors(MPI_Comm com)
void GlobalMin(MPI_Comm comm, double *local, double *result, int count)
void GlobalSum(MPI_Comm comm, double *local, double *result, int count)
void convert_vec(petsc::Vec &v, units::System::Ptr system, const std::string &spec1, const std::string &spec2)
bool begin_access_use_dof
If true, use DMDAVecGetArrayDOF() in begin_access()
int state_counter
Internal array::Array "revision number".
bool ghosted
true if this Array is ghosted
std::shared_ptr< const Grid > grid
The computational grid.
InterpolationType interpolation_type
bool report_range
If true, report range when regridding.
gsl_interp_accel * bsearch_accel
unsigned int da_stencil_width
stencil width supported by the DA
std::vector< SpatialVariableMetadata > metadata
Metadata (NetCDF variable attributes)
std::vector< double > zlevels
Vertical levels (for 3D fields)
std::shared_ptr< petsc::DM > da
distributed mesh manager (DM)
unsigned int dof
number of "degrees of freedom" per grid point