Loading [MathJax]/extensions/tex2jax.js
PISM, A Parallel Ice Sheet Model 2.2.1-cd005eec8 committed by Constantine Khrulev on 2025-03-07
All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
Array.cc
Go to the documentation of this file.
1// Copyright (C) 2008--2024 Ed Bueler, Constantine Khroulev, and David Maxwell
2//
3// This file is part of PISM.
4//
5// PISM is free software; you can redistribute it and/or modify it under the
6// terms of the GNU General Public License as published by the Free Software
7// Foundation; either version 3 of the License, or (at your option) any later
8// version.
9//
10// PISM is distributed in the hope that it will be useful, but WITHOUT ANY
11// WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
12// FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
13// details.
14//
15// You should have received a copy of the GNU General Public License
16// along with PISM; if not, write to the Free Software
17// Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
18
19#include <cassert>
20
21#include <cmath>
22#include <cstddef>
23#include <memory>
24#include <petscdraw.h>
25#include <string>
26#include <vector>
27
28#include "pism/util/array/Array.hh"
29#include "pism/util/array/Array_impl.hh"
30
31#include "pism/util/ConfigInterface.hh"
32#include "pism/util/Grid.hh"
33#include "pism/util/Time.hh"
34
35#include "pism/util/Context.hh"
36#include "pism/util/Logger.hh"
37#include "pism/util/Profiling.hh"
38#include "pism/util/VariableMetadata.hh"
39#include "pism/util/error_handling.hh"
40#include "pism/util/io/File.hh"
41#include "pism/util/io/IO_Flags.hh"
42#include "pism/util/io/io_helpers.hh"
43#include "pism/util/petscwrappers/VecScatter.hh"
44#include "pism/util/petscwrappers/Viewer.hh"
45#include "pism/util/pism_utilities.hh"
46
47#include "pism/util/InputInterpolation.hh"
48
49namespace pism {
50
51namespace array {
52
53void global_to_local(petsc::DM &dm, Vec source, Vec destination) {
54 PetscErrorCode ierr;
55
56 ierr = DMGlobalToLocalBegin(dm, source, INSERT_VALUES, destination);
57 PISM_CHK(ierr, "DMGlobalToLocalBegin");
58
59 ierr = DMGlobalToLocalEnd(dm, source, INSERT_VALUES, destination);
60 PISM_CHK(ierr, "DMGlobalToLocalEnd");
61}
62
63Array::Array(std::shared_ptr<const Grid> grid, const std::string &name, Kind ghostedp, size_t dof,
64 size_t stencil_width, const std::vector<double> &zlevels) {
65 m_impl = new Impl();
66 m_array = nullptr;
67
68 m_impl->name = name;
69 m_impl->grid = grid;
70 m_impl->ghosted = (ghostedp == WITH_GHOSTS);
71 m_impl->dof = dof;
72 m_impl->zlevels = zlevels;
73
74 const auto &config = grid->ctx()->config();
75
76 auto max_stencil_width = static_cast<size_t>(config->get_number("grid.max_stencil_width"));
77 if ((dof != 1) or (stencil_width > max_stencil_width)) {
78 // use the requested stencil width *if* we have to
80 } else {
81 // otherwise use the "standard" stencil width
82 m_impl->da_stencil_width = max_stencil_width;
83 }
84
85 auto system = m_impl->grid->ctx()->unit_system();
86 if (m_impl->dof > 1) {
87 // dof > 1: this is a 2D vector
88 for (unsigned int j = 0; j < m_impl->dof; ++j) {
89 m_impl->metadata.push_back({ system, pism::printf("%s[%d]", name.c_str(), j) });
90 }
91 } else {
92 // both 2D and 3D vectors
93 m_impl->metadata = { { system, name, zlevels } };
94 }
95
96 if (zlevels.size() > 1) {
97 m_impl->bsearch_accel = gsl_interp_accel_alloc();
98 if (m_impl->bsearch_accel == NULL) {
99 throw RuntimeError(PISM_ERROR_LOCATION, "Failed to allocate a GSL interpolation accelerator");
100 }
101 }
102}
103
105 assert(m_impl->access_counter == 0);
106
107 if (m_impl->bsearch_accel != nullptr) {
108 gsl_interp_accel_free(m_impl->bsearch_accel);
109 m_impl->bsearch_accel = nullptr;
110 }
111
112 delete m_impl;
113 m_impl = nullptr;
114}
115
116//! \brief Get the object state counter.
117/*!
118 * This method returns the "revision number" of an Array.
119 *
120 * It can be used to determine it a field was updated and if a certain
121 * computation needs to be re-done. One example is computing the smoothed bed
122 * for the SIA computation, which is only necessary if the bed deformation code
123 * fired.
124 *
125 * See also inc_state_counter().
126 */
128 return m_impl->state_counter;
129}
130
131std::shared_ptr<const Grid> Array::grid() const {
132 return m_impl->grid;
133}
134
135unsigned int Array::ndof() const {
136 return m_impl->dof;
137}
138
139const std::vector<double> &Array::levels() const {
140 return m_impl->zlevels;
141}
142
143//! \brief Increment the object state counter.
144/*!
145 * See the documentation of get_state_counter(). This method is the
146 * *only* way to manually increment the state counter. It is also
147 * automatically updated by Array methods that are known to
148 * change stored values.
149 */
153
154//! Returns the number of spatial dimensions.
155unsigned int Array::ndims() const {
156 return m_impl->zlevels.size() > 1 ? 3 : 2;
157}
158
159std::vector<int> Array::shape() const {
160
161 auto grid = m_impl->grid;
162
163 if (ndims() == 3) {
164 return { (int)grid->My(), (int)grid->Mx(), (int)levels().size() };
165 }
166
167 if (ndof() == 1) {
168 return { (int)grid->My(), (int)grid->Mx() };
169 }
170
171 return { (int)grid->My(), (int)grid->Mx(), (int)ndof() };
172}
173
177
179 if (not(type == LINEAR or type == NEAREST)) {
180 throw RuntimeError::formatted(PISM_ERROR_LOCATION, "invalid interpolation type: %d", (int)type);
181 }
183}
184
185/** Convert from `int` to PETSc's `NormType`.
186 *
187 *
188 * @param[in] input norm type as an integer
189 *
190 * @return norm type as PETSc's `NormType`.
191 */
192static NormType int_to_normtype(int input) {
193 switch (input) {
194 case NORM_1:
195 return NORM_1;
196 case NORM_2:
197 return NORM_2;
198 case NORM_INFINITY:
199 return NORM_INFINITY;
200 default:
201 throw RuntimeError::formatted(PISM_ERROR_LOCATION, "invalid norm type: %d", input);
202 }
203}
204
205//! Result: v <- v + alpha * x. Calls VecAXPY.
206void Array::add(double alpha, const Array &x) {
207 checkCompatibility("add", x);
208
209 PetscErrorCode ierr = VecAXPY(vec(), alpha, x.vec());
210 PISM_CHK(ierr, "VecAXPY");
211
212 inc_state_counter(); // mark as modified
213}
214
215//! Result: v[j] <- v[j] + alpha for all j. Calls VecShift.
216void Array::shift(double alpha) {
217 PetscErrorCode ierr = VecShift(vec(), alpha);
218 PISM_CHK(ierr, "VecShift");
219
220 inc_state_counter(); // mark as modified
221}
222
223//! Result: v <- v * alpha. Calls VecScale.
224void Array::scale(double alpha) {
225 PetscErrorCode ierr = VecScale(vec(), alpha);
226 PISM_CHK(ierr, "VecScale");
227
228 inc_state_counter(); // mark as modified
229}
230
231//! Copies v to a global vector 'destination'. Ghost points are discarded.
232/*! This is potentially dangerous: make sure that `destination` has the same
233 dimensions as the current Array.
234
235 DMLocalToGlobalBegin/End is broken in PETSc 3.5, so we roll our
236 own.
237 */
238void Array::copy_to_vec(std::shared_ptr<petsc::DM> destination_da, petsc::Vec &destination) const {
239 // m_dof > 1 for vector, staggered grid 2D fields, etc. In this case
240 // zlevels.size() == 1. For 3D fields, m_dof == 1 (all 3D fields are
241 // scalar) and zlevels.size() corresponds to dof of the underlying PETSc
242 // DM object. So we want the bigger of the two numbers here.
243 unsigned int N = std::max((size_t)m_impl->dof, m_impl->zlevels.size());
244
245 this->get_dof(destination_da, destination, 0, N);
246}
247
248void Array::get_dof(std::shared_ptr<petsc::DM> da_result, petsc::Vec &result, unsigned int start,
249 unsigned int count) const {
250 if (start >= m_impl->dof) {
251 throw RuntimeError::formatted(PISM_ERROR_LOCATION, "invalid argument (start); got %d", start);
252 }
253
254 petsc::DMDAVecArrayDOF tmp_res(da_result, result), tmp_v(dm(), vec());
255
256 double ***result_a = static_cast<double ***>(tmp_res.get()),
257 ***source_a = static_cast<double ***>(tmp_v.get());
258
259 ParallelSection loop(m_impl->grid->com);
260 try {
261 for (auto p = m_impl->grid->points(); p; p.next()) {
262 const int i = p.i(), j = p.j();
263 PetscErrorCode ierr =
264 PetscMemcpy(result_a[j][i], &source_a[j][i][start], count * sizeof(PetscScalar));
265 PISM_CHK(ierr, "PetscMemcpy");
266 }
267 } catch (...) {
268 loop.failed();
269 }
270 loop.check();
271}
272
273void Array::set_dof(std::shared_ptr<petsc::DM> da_source, petsc::Vec &source, unsigned int start,
274 unsigned int count) {
275 if (start >= m_impl->dof) {
276 throw RuntimeError::formatted(PISM_ERROR_LOCATION, "invalid argument (start); got %d", start);
277 }
278
279 petsc::DMDAVecArrayDOF tmp_src(da_source, source), tmp_v(dm(), vec());
280
281 double ***source_a = static_cast<double ***>(tmp_src.get()),
282 ***result_a = static_cast<double ***>(tmp_v.get());
283
284 ParallelSection loop(m_impl->grid->com);
285 try {
286 for (auto p = m_impl->grid->points(); p; p.next()) {
287 const int i = p.i(), j = p.j();
288 PetscErrorCode ierr =
289 PetscMemcpy(&result_a[j][i][start], source_a[j][i], count * sizeof(PetscScalar));
290 PISM_CHK(ierr, "PetscMemcpy");
291 }
292 } catch (...) {
293 loop.failed();
294 }
295 loop.check();
296
297 inc_state_counter(); // mark as modified
298}
299
300//! @brief Get the stencil width of the current Array. Returns 0
301//! if ghosts are not available.
302unsigned int Array::stencil_width() const {
303 if (m_impl->ghosted) {
304 return m_impl->da_stencil_width;
305 }
306
307 return 0;
308}
309
311 if (m_impl->v.get() == nullptr) {
312 PetscErrorCode ierr = 0;
313 if (m_impl->ghosted) {
314 ierr = DMCreateLocalVector(*dm(), m_impl->v.rawptr());
315 PISM_CHK(ierr, "DMCreateLocalVector");
316 } else {
317 ierr = DMCreateGlobalVector(*dm(), m_impl->v.rawptr());
318 PISM_CHK(ierr, "DMCreateGlobalVector");
319 }
320 }
321 return m_impl->v;
322}
323
324std::shared_ptr<petsc::DM> Array::dm() const {
325 if (m_impl->da == nullptr) {
326 // dof > 1 for vector, staggered grid 2D fields, etc. In this case zlevels.size() ==
327 // 1. For 3D fields, dof == 1 (all 3D fields are scalar) and zlevels.size()
328 // corresponds to dof of the underlying PETSc DM object.
329 auto da_dof = std::max((unsigned int)m_impl->zlevels.size(), m_impl->dof);
330
331 // initialize the da member:
332 m_impl->da = grid()->get_dm(da_dof, m_impl->da_stencil_width);
333 }
334 return m_impl->da;
335}
336
337//! Sets the variable name to `name`.
338/**
339 * This is the "overall" name of a field. This is **not** the same as the
340 * NetCDF variable name. Use `metadata(...).set_name(...)` to set that.
341 */
342void Array::set_name(const std::string &name) {
343 m_impl->name = name;
344}
345
346//! @brief Get the name of an Array object.
347/**
348 * This is the name used to refer to this object in PISM (e.g. via the
349 * Vars class), **not** the name of the corresponding NetCDF variable.
350 * (The problem is that one Array instance may correspond to
351 * several NetCDF variables. Use metadata(...).get_name() to get the
352 * name of NetCDF variables an Array is saved to.)
353 */
354const std::string &Array::get_name() const {
355 return m_impl->name;
356}
357
358void set_default_value_or_stop(const VariableMetadata &variable, io::Default default_value,
359 const Logger &log, Vec output) {
360
361 if (default_value.exists()) {
362 // If it is optional, fill with the provided default value.
363 log.message(2, " variable %s (%s) is not found: using the default constant\n",
364 variable.get_name().c_str(), variable.get_string("long_name").c_str());
365
366 PetscErrorCode ierr = VecSet(output, default_value);
367 PISM_CHK(ierr, "VecSet");
368 } else {
369 // if it's critical, print an error message and stop
370 throw RuntimeError::formatted(PISM_ERROR_LOCATION, "Can't find variable '%s'",
371 variable.get_name().c_str());
372 }
373}
374
375//! Gets an Array from a file `file`, interpolating onto the current grid.
376/*! Stops if the variable was not found and `critical` == true.
377 */
378void Array::regrid_impl(const File &file, io::Default default_value) {
379
380 assert(ndims() == 2);
381
382 auto log = grid()->ctx()->log();
383
384 // Get the dof=1, stencil_width=0 DMDA (components are always scalar
385 // and we just need a global Vec):
386 auto dm_for_io = ndof() == 1 ? dm() : grid()->get_dm(1, 0);
387
388 // a temporary one-component vector that uses a compatible domain decomposition
389 petsc::TemporaryGlobalVec tmp(dm_for_io);
390
391 for (unsigned int j = 0; j < ndof(); ++j) {
392 auto variable = metadata(j);
393
394 auto V = file.find_variable(variable.get_name(), variable["standard_name"]);
395
396 if (V.exists) {
397 auto interp = grid()->get_interpolation(levels(), file, V.name, m_impl->interpolation_type);
398
399 int last_record = -1;
400 interp->regrid(variable, file, last_record, *grid(), tmp);
401 } else {
402 set_default_value_or_stop(variable, default_value, *log, tmp);
403 }
404
405 set_dof(dm_for_io, tmp, j);
406 } // end of the loop over degrees of freedom
407
408 // The calls above only set the values owned by a processor, so we need to
409 // communicate if m_has_ghosts == true:
410 if (m_impl->ghosted) {
412 }
413}
414
415//! Reads appropriate NetCDF variable(s) into an Array.
416void Array::read_impl(const File &file, const unsigned int time) {
417
418 auto log = grid()->ctx()->log();
419 log->message(4, " Reading %s...\n", m_impl->name.c_str());
420
421 if (ndof() == 1) {
422 // This takes are of scalar variables (both 2D and 3D).
423 if (m_impl->ghosted) {
425
426 petsc::VecArray tmp_array(tmp);
427 io::read_spatial_variable(metadata(0), *grid(), file, time, tmp_array.get());
428
429 global_to_local(*dm(), tmp, vec());
430 } else {
431 petsc::VecArray v_array(vec());
432 io::read_spatial_variable(metadata(0), *grid(), file, time, v_array.get());
433 }
434 return;
435 }
436
437 // Get the dof=1, stencil_width=0 DMDA (components are always scalar
438 // and we just need a global Vec):
439 auto da2 = grid()->get_dm(1, 0);
440
441 // A temporary one-component vector, distributed across processors
442 // the same way v is
444
445 for (unsigned int j = 0; j < ndof(); ++j) {
446
447 {
448 petsc::VecArray tmp_array(tmp);
449 io::read_spatial_variable(metadata(j), *grid(), file, time, tmp_array.get());
450 }
451
452 set_dof(da2, tmp, j);
453 }
454
455 // The calls above only set the values owned by a processor, so we need to
456 // communicate if m_impl->ghosted is true:
457 if (m_impl->ghosted) {
459 }
460}
461
462//! \brief Define variables corresponding to an Array in a file opened using `file`.
463void Array::define(const File &file, io::Type default_type) const {
464 for (unsigned int j = 0; j < ndof(); ++j) {
466 if (type == io::PISM_NAT) {
467 type = default_type;
468 }
469
471 }
472}
473
474//! @brief Returns a reference to the SpatialVariableMetadata object
475//! containing metadata for the compoment N.
477 assert(N < m_impl->dof);
478 return m_impl->metadata[N];
479}
480
481const SpatialVariableMetadata& Array::metadata(unsigned int N) const {
482 assert(N < m_impl->dof);
483 return m_impl->metadata[N];
484}
485
486//! Writes an Array to a NetCDF file.
487void Array::write_impl(const File &file) const {
488 auto log = m_impl->grid->ctx()->log();
489 auto time = timestamp(m_impl->grid->com);
490
491 // The simplest case:
492 if (ndof() == 1) {
493 log->message(3, "[%s] Writing %s...\n",
494 time.c_str(), metadata(0).get_name().c_str());
495
496 if (m_impl->ghosted) {
498
499 this->copy_to_vec(dm(), tmp);
500
501 petsc::VecArray tmp_array(tmp);
502
503 io::write_spatial_variable(metadata(0), *grid(), file, tmp_array.get());
504 } else {
505 petsc::VecArray v_array(vec());
506 io::write_spatial_variable(metadata(0), *grid(), file, v_array.get());
507 }
508 return;
509 }
510
511 // Get the dof=1, stencil_width=0 DMDA (components are always scalar
512 // and we just need a global Vec):
513 auto da2 = m_impl->grid->get_dm(1, 0);
514
515 // a temporary one-component vector, distributed across processors
516 // the same way v is
518
519 for (unsigned int j = 0; j < ndof(); ++j) {
520 get_dof(da2, tmp, j);
521
522 petsc::VecArray tmp_array(tmp);
523 log->message(3, "[%s] Writing %s...\n",
524 time.c_str(), metadata(j).get_name().c_str());
525 io::write_spatial_variable(metadata(j), *grid(), file, tmp_array.get());
526 }
527}
528
529//! Dumps a variable to a file, overwriting this file's contents (for debugging).
530void Array::dump(const char filename[]) const {
531 File file(m_impl->grid->com, filename,
532 string_to_backend(m_impl->grid->ctx()->config()->get_string("output.format")),
534
535 if (not m_impl->metadata[0].get_time_independent()) {
536 io::define_time(file, *m_impl->grid->ctx());
537 io::append_time(file, *m_impl->grid->ctx()->config(), m_impl->grid->ctx()->time()->current());
538 }
539
540 define(file, io::PISM_DOUBLE);
541 write(file);
542}
543
544//! Checks if two Arrays have compatible sizes, dimensions and numbers of degrees of freedom.
545void Array::checkCompatibility(const char* func, const Array &other) const {
546 PetscErrorCode ierr;
547 // We have to use PetscInt because of VecGetSizes below.
548 PetscInt X_size, Y_size;
549
550 if (m_impl->dof != other.m_impl->dof) {
551 throw RuntimeError::formatted(PISM_ERROR_LOCATION, "Array::%s(...): operands have different numbers of degrees of freedom",
552 func);
553 }
554
555 ierr = VecGetSize(vec(), &X_size);
556 PISM_CHK(ierr, "VecGetSize");
557
558 ierr = VecGetSize(other.vec(), &Y_size);
559 PISM_CHK(ierr, "VecGetSize");
560
561 if (X_size != Y_size) {
562 throw RuntimeError::formatted(PISM_ERROR_LOCATION, "Array::%s(...): incompatible Vec sizes (called as %s.%s(%s))",
563 func, m_impl->name.c_str(), func, other.m_impl->name.c_str());
564 }
565}
566
567//! Checks if an Array is allocated and calls DAVecGetArray.
569
570 if (m_impl->access_counter < 0) {
571 throw RuntimeError(PISM_ERROR_LOCATION, "Array::begin_access(): m_access_counter < 0");
572 }
573
574 if (m_impl->access_counter == 0) {
575 PetscErrorCode ierr;
577 ierr = DMDAVecGetArrayDOF(*dm(), vec(), &m_array);
578 PISM_CHK(ierr, "DMDAVecGetArrayDOF");
579 } else {
580 ierr = DMDAVecGetArray(*dm(), vec(), &m_array);
581 PISM_CHK(ierr, "DMDAVecGetArray");
582 }
583 }
584
586}
587
588//! Checks if an Array is allocated and calls DAVecRestoreArray.
589void Array::end_access() const {
590 PetscErrorCode ierr;
591
592 if (m_array == NULL) {
594 "Array::end_access(): a == NULL (looks like begin_access() was not called)");
595 }
596
597 if (m_impl->access_counter < 0) {
598 throw RuntimeError(PISM_ERROR_LOCATION, "Array::end_access(): m_access_counter < 0");
599 }
600
602 if (m_impl->access_counter == 0) {
604 ierr = DMDAVecRestoreArrayDOF(*dm(), vec(), &m_array);
605 PISM_CHK(ierr, "DMDAVecRestoreArrayDOF");
606 } else {
607 ierr = DMDAVecRestoreArray(*dm(), vec(), &m_array);
608 PISM_CHK(ierr, "DMDAVecRestoreArray");
609 }
610 m_array = NULL;
611 }
612}
613
614//! Updates ghost points.
616 PetscErrorCode ierr;
617 if (not m_impl->ghosted) {
618 return;
619 }
620
621 ierr = DMLocalToLocalBegin(*dm(), vec(), INSERT_VALUES, vec());
622 PISM_CHK(ierr, "DMLocalToLocalBegin");
623
624 ierr = DMLocalToLocalEnd(*dm(), vec(), INSERT_VALUES, vec());
625 PISM_CHK(ierr, "DMLocalToLocalEnd");
626}
627
628//! Result: v[j] <- c for all j.
629void Array::set(const double c) {
630 PetscErrorCode ierr = VecSet(vec(),c);
631 PISM_CHK(ierr, "VecSet");
632
633 inc_state_counter(); // mark as modified
634}
635
636void Array::check_array_indices(int i, int j, unsigned int k) const {
637 double ghost_width = 0;
638 if (m_impl->ghosted) {
639 ghost_width = m_impl->da_stencil_width;
640 }
641 // m_impl->dof > 1 for vector, staggered grid 2D fields, etc. In this case
642 // zlevels.size() == 1. For 3D fields, m_impl->dof == 1 (all 3D fields are
643 // scalar) and zlevels.size() corresponds to dof of the underlying PETSc
644 // DM object. So we want the bigger of the two numbers here.
645 unsigned int N = std::max((size_t)m_impl->dof, m_impl->zlevels.size());
646
647 bool out_of_range = (i < m_impl->grid->xs() - ghost_width) ||
648 (i > m_impl->grid->xs() + m_impl->grid->xm() + ghost_width) ||
649 (j < m_impl->grid->ys() - ghost_width) ||
650 (j > m_impl->grid->ys() + m_impl->grid->ym() + ghost_width) ||
651 (k >= N);
652
653 if (out_of_range) {
654 throw RuntimeError::formatted(PISM_ERROR_LOCATION, "%s(%d, %d, %d) is out of bounds",
655 m_impl->name.c_str(), i, j, k);
656 }
657
658 if (m_array == NULL) {
659 throw RuntimeError::formatted(PISM_ERROR_LOCATION, "%s: begin_access() was not called", m_impl->name.c_str());
660 }
661}
662
663//! Computes the norm of all the components of an Array.
664/*!
665See comment for range(); because local Vecs are VECSEQ, needs a reduce operation.
666See src/trypetsc/localVecMax.c.
667 */
668std::vector<double> Array::norm(int n) const {
669 std::vector<double> result(m_impl->dof);
670
671 NormType type = int_to_normtype(n);
672
673 if (m_impl->dof > 1) {
674 PetscErrorCode ierr = VecStrideNormAll(vec(), type, result.data());
675 PISM_CHK(ierr, "VecStrideNormAll");
676 } else {
677 PetscErrorCode ierr = VecNorm(vec(), type, result.data());
678 PISM_CHK(ierr, "VecNorm");
679 }
680
681 if (m_impl->ghosted) {
682 // needs a reduce operation; use GlobalMax() if NORM_INFINITY,
683 // otherwise GlobalSum; carefully in NORM_2 case
684 switch (type) {
685 case NORM_1_AND_2: {
687 "Array::norm_all(...): NORM_1_AND_2"
688 " not implemented (called as %s.norm_all(...))",
689 m_impl->name.c_str());
690 }
691 case NORM_1: {
692 for (unsigned int k = 0; k < m_impl->dof; ++k) {
693 result[k] = GlobalSum(m_impl->grid->com, result[k]);
694 }
695 return result;
696 }
697 case NORM_2: {
698 for (unsigned int k = 0; k < m_impl->dof; ++k) {
699 // undo sqrt in VecNorm before sum; sum up; take sqrt
700 result[k] = sqrt(GlobalSum(m_impl->grid->com, result[k] * result[k]));
701 }
702 return result;
703 }
704 case NORM_INFINITY: {
705 for (unsigned int k = 0; k < m_impl->dof; ++k) {
706 result[k] = GlobalMax(m_impl->grid->com, result[k]);
707 }
708 return result;
709 }
710 default: {
712 "Array::norm_all(...): unknown norm type"
713 " (called as %s.norm_all(...))",
714 m_impl->name.c_str());
715 }
716 } // switch
717 } else {
718 return result;
719 }
720}
721
722void Array::write(const std::string &filename) const {
723 // We expect the file to be present and ready to write into.
724 File file(m_impl->grid->com, filename,
725 string_to_backend(m_impl->grid->ctx()->config()->get_string("output.format")),
727
728 this->write(file);
729}
730
731void Array::read(const std::string &filename, unsigned int time) {
732 File file(m_impl->grid->com, filename, io::PISM_GUESS, io::PISM_READONLY);
733 this->read(file, time);
734}
735
736void Array::regrid(const std::string &filename, io::Default default_value) {
737 File file(m_impl->grid->com, filename, io::PISM_GUESS, io::PISM_READONLY);
738 this->regrid(file, default_value);
739}
740
741static void check_range(petsc::Vec &v,
742 const SpatialVariableMetadata &metadata,
743 const std::string &filename,
744 const Logger &log,
745 bool report_range) {
746 PetscErrorCode ierr = 0;
747
748 double min = 0.0;
749 double max = 0.0;
750
751 ierr = VecMin(v, NULL, &min);
752 PISM_CHK(ierr, "VecMin");
753 ierr = VecMax(v, NULL, &max);
754 PISM_CHK(ierr, "VecMax");
755
756 // Check the range and warn the user if needed:
757 metadata.check_range(filename, min, max);
758 if (report_range) {
759 metadata.report_range(log, min, max);
760 }
761}
762
763
764/** Read a field from a file, interpolating onto the current grid.
765 *
766 * When `flag` is set to `CRITICAL`, stop if could not find the variable
767 * in the provided input file; `default_value` is ignored.
768 *
769 * When `flag` is set to `OPTIONAL`, fill this Array with
770 * `default_value` if could not find the variable in the provided
771 * input file.
772 *
773 * When `flag` is set to `CRITICAL_FILL_MISSING`, replace missing
774 * values matching the `_FillValue` attribute with `default_value`,
775 * stop if could not find the variable.
776 *
777 * When `flag` is set to `OPTIONAL_FILL_MISSING`, replace missing
778 * values matching the `_FillValue` attribute with `default_value`;
779 * fill the whole Array with `default_value` if could not find
780 * the variable.
781 *
782 * @param file input file
783 * @param flag regridding mode, see above
784 * @param default_value default value, meaning depends on the
785 * regridding mode flag
786 *
787 * @return 0 on success
788 */
789void Array::regrid(const File &file, io::Default default_value) {
790 auto log = m_impl->grid->ctx()->log();
791
792 log->message(3, " [%s] Regridding %s...\n", timestamp(m_impl->grid->com).c_str(),
793 m_impl->name.c_str());
794 double start_time = get_time(m_impl->grid->com);
795 m_impl->grid->ctx()->profiling().begin("io.regridding");
796 try {
797 this->regrid_impl(file, default_value);
798 inc_state_counter(); // mark as modified
799
800 if (ndims() == 2) {
801 for (unsigned k = 0; k < ndof(); ++k) {
802 auto dm = grid()->get_dm(1, 0);
803
805
806 get_dof(dm, v, k);
807
808 check_range(v, metadata(k), file.name(), *log, m_impl->report_range);
809 }
810 } else {
811 check_range(vec(), metadata(0), file.name(), *log, m_impl->report_range);
812 }
813
814
815 } catch (RuntimeError &e) {
816 e.add_context("regridding '%s' from '%s'",
817 this->get_name().c_str(), file.name().c_str());
818 throw;
819 }
820 m_impl->grid->ctx()->profiling().end("io.regridding");
821 double
822 end_time = get_time(m_impl->grid->com),
823 time_spent = end_time - start_time;
824
825 if (time_spent > 1.0) {
826 log->message(3, " done in %f seconds.\n", time_spent);
827 } else {
828 log->message(3, " done.\n");
829 }
830}
831
832void Array::read(const File &file, const unsigned int time) {
833 this->read_impl(file, time);
834 inc_state_counter(); // mark as modified
835}
836
837void Array::write(const File &file) const {
838 define(file, io::PISM_DOUBLE);
839
840 MPI_Comm com = m_impl->grid->com;
841 double start_time = get_time(com);
842 write_impl(file);
843 double end_time = get_time(com);
844
845 const double
846 minute = 60.0, // one minute in seconds
847 time_spent = end_time - start_time,
848 megabyte = pow(2, 20),
849 N = static_cast<double>(size()),
850 mb_double = static_cast<double>(sizeof(double)) * N / megabyte,
851 mb_float = static_cast<double>(sizeof(float)) * N / megabyte;
852
853 std::string timestamp = pism::timestamp(com);
854 std::string spacer(timestamp.size(), ' ');
855 if (time_spent > 1) {
856 m_impl->grid->ctx()->log()->message(3,
857 "\n"
858 " [%s] Done writing %s (%f Mb double, %f Mb float)\n"
859 " %s in %f seconds (%f minutes).\n"
860 " %s Effective throughput: double: %f Mb/s, float: %f Mb/s.\n",
861 timestamp.c_str(), m_impl->name.c_str(), mb_double, mb_float,
862 spacer.c_str(), time_spent, time_spent / minute,
863 spacer.c_str(),
864 mb_double / time_spent, mb_float / time_spent);
865 } else {
866 m_impl->grid->ctx()->log()->message(3, " done.\n");
867 }
868}
869
871 // empty
872}
873
875 while (not m_vecs.empty()) {
876 try {
877 m_vecs.back()->end_access();
878 m_vecs.pop_back();
879 } catch (...) {
880 handle_fatal_errors(MPI_COMM_SELF);
881 }
882 }
883}
884
885AccessScope::AccessScope(std::initializer_list<const PetscAccessible *> vecs) {
886 for (const auto *j : vecs) {
887 assert(j != nullptr);
888 add(*j);
889 }
890}
891
893 add(vec);
894}
895
897 vec.begin_access();
898 m_vecs.push_back(&vec);
899}
900
901void AccessScope::add(const std::vector<const PetscAccessible*> &vecs) {
902 for (const auto *v : vecs) {
903 assert(v != nullptr);
904 add(*v);
905 }
906}
907
908//! Return the total number of elements in the *owned* part of an array.
909size_t Array::size() const {
910 // m_impl->dof > 1 for vector, staggered grid 2D fields, etc. In this case
911 // zlevels.size() == 1. For 3D fields, m_impl->dof == 1 (all 3D fields are
912 // scalar) and zlevels.size() corresponds to dof of the underlying PETSc
913 // DM object.
914
915 size_t
916 Mx = m_impl->grid->Mx(),
917 My = m_impl->grid->My(),
918 Mz = m_impl->zlevels.size(),
919 dof = m_impl->dof;
920
921 return Mx * My * Mz * dof;
922}
923
924/*! Allocate a copy on processor zero and the scatter needed to move data.
925 */
926std::shared_ptr<petsc::Vec> Array::allocate_proc0_copy() const {
927 PetscErrorCode ierr;
928 Vec v_proc0 = NULL;
929 Vec result = NULL;
930
931 ierr = PetscObjectQuery((PetscObject)dm()->get(), "v_proc0", (PetscObject*)&v_proc0);
932 PISM_CHK(ierr, "PetscObjectQuery")
933 ;
934 if (v_proc0 == NULL) {
935
936 // natural_work will be destroyed at the end of scope, but it will
937 // only decrement the reference counter incremented by
938 // PetscObjectCompose below.
939 petsc::Vec natural_work;
940 // create a work vector with natural ordering:
941 ierr = DMDACreateNaturalVector(*dm(), natural_work.rawptr());
942 PISM_CHK(ierr, "DMDACreateNaturalVector");
943
944 // this increments the reference counter of natural_work
945 ierr = PetscObjectCompose((PetscObject)dm()->get(), "natural_work",
946 (PetscObject)((::Vec)natural_work));
947 PISM_CHK(ierr, "PetscObjectCompose");
948
949 // scatter_to_zero will be destroyed at the end of scope, but it
950 // will only decrement the reference counter incremented by
951 // PetscObjectCompose below.
952 petsc::VecScatter scatter_to_zero;
953
954 // initialize the scatter to processor 0 and create storage on processor 0
955 ierr = VecScatterCreateToZero(natural_work, scatter_to_zero.rawptr(),
956 &v_proc0);
957 PISM_CHK(ierr, "VecScatterCreateToZero");
958
959 // this increments the reference counter of scatter_to_zero
960 ierr = PetscObjectCompose((PetscObject)dm()->get(), "scatter_to_zero",
961 (PetscObject)((::VecScatter)scatter_to_zero));
962 PISM_CHK(ierr, "PetscObjectCompose");
963
964 // this increments the reference counter of v_proc0
965 ierr = PetscObjectCompose((PetscObject)dm()->get(), "v_proc0",
966 (PetscObject)v_proc0);
967 PISM_CHK(ierr, "PetscObjectCompose");
968
969 // We DO NOT call VecDestroy(v_proc0): the petsc::Vec wrapper will
970 // take care of this.
971 result = v_proc0;
972 } else {
973 // We DO NOT call VecDestroy(result): the petsc::Vec wrapper will
974 // take care of this.
975 ierr = VecDuplicate(v_proc0, &result);
976 PISM_CHK(ierr, "VecDuplicate");
977 }
978 return std::shared_ptr<petsc::Vec>(new petsc::Vec(result));
979}
980
981void Array::put_on_proc0(petsc::Vec &parallel, petsc::Vec &onp0) const {
982 PetscErrorCode ierr = 0;
983 VecScatter scatter_to_zero = NULL;
984 Vec natural_work = NULL;
985
986 ierr = PetscObjectQuery((PetscObject)dm()->get(), "scatter_to_zero",
987 (PetscObject*)&scatter_to_zero);
988 PISM_CHK(ierr, "PetscObjectQuery");
989
990 ierr = PetscObjectQuery((PetscObject)dm()->get(), "natural_work",
991 (PetscObject*)&natural_work);
992 PISM_CHK(ierr, "PetscObjectQuery");
993
994 if (natural_work == NULL || scatter_to_zero == NULL) {
995 throw RuntimeError(PISM_ERROR_LOCATION, "call allocate_proc0_copy() before calling put_on_proc0");
996 }
997
998 ierr = DMDAGlobalToNaturalBegin(*dm(), parallel, INSERT_VALUES, natural_work);
999 PISM_CHK(ierr, "DMDAGlobalToNaturalBegin");
1000
1001 ierr = DMDAGlobalToNaturalEnd(*dm(), parallel, INSERT_VALUES, natural_work);
1002 PISM_CHK(ierr, "DMDAGlobalToNaturalEnd");
1003
1004 ierr = VecScatterBegin(scatter_to_zero, natural_work, onp0,
1005 INSERT_VALUES, SCATTER_FORWARD);
1006 PISM_CHK(ierr, "VecScatterBegin");
1007
1008 ierr = VecScatterEnd(scatter_to_zero, natural_work, onp0,
1009 INSERT_VALUES, SCATTER_FORWARD);
1010 PISM_CHK(ierr, "VecScatterEnd");
1011}
1012
1013
1014//! Puts a local array::Scalar on processor 0.
1016 if (m_impl->ghosted) {
1018 this->copy_to_vec(dm(), tmp);
1019 put_on_proc0(tmp, onp0);
1020 } else {
1021 put_on_proc0(vec(), onp0);
1022 }
1023}
1024
1025void Array::get_from_proc0(petsc::Vec &onp0, petsc::Vec &parallel) const {
1026 PetscErrorCode ierr;
1027
1028 VecScatter scatter_to_zero = NULL;
1029 Vec natural_work = NULL;
1030 ierr = PetscObjectQuery((PetscObject)dm()->get(), "scatter_to_zero",
1031 (PetscObject*)&scatter_to_zero);
1032 PISM_CHK(ierr, "PetscObjectQuery");
1033
1034 ierr = PetscObjectQuery((PetscObject)dm()->get(), "natural_work",
1035 (PetscObject*)&natural_work);
1036 PISM_CHK(ierr, "PetscObjectQuery");
1037
1038 if (natural_work == NULL || scatter_to_zero == NULL) {
1039 throw RuntimeError(PISM_ERROR_LOCATION, "call allocate_proc0_copy() before calling get_from_proc0");
1040 }
1041
1042 ierr = VecScatterBegin(scatter_to_zero, onp0, natural_work,
1043 INSERT_VALUES, SCATTER_REVERSE);
1044 PISM_CHK(ierr, "VecScatterBegin");
1045
1046 ierr = VecScatterEnd(scatter_to_zero, onp0, natural_work,
1047 INSERT_VALUES, SCATTER_REVERSE);
1048 PISM_CHK(ierr, "VecScatterEnd");
1049
1050 ierr = DMDANaturalToGlobalBegin(*dm(), natural_work, INSERT_VALUES, parallel);
1051 PISM_CHK(ierr, "DMDANaturalToGlobalBegin");
1052
1053 ierr = DMDANaturalToGlobalEnd(*dm(), natural_work, INSERT_VALUES, parallel);
1054 PISM_CHK(ierr, "DMDANaturalToGlobalEnd");
1055}
1056
1057//! Gets a local Array2 from processor 0.
1059 if (m_impl->ghosted) {
1061 get_from_proc0(onp0, tmp);
1062 global_to_local(*dm(), tmp, vec());
1063 } else {
1064 get_from_proc0(onp0, vec());
1065 }
1066 inc_state_counter(); // mark as modified
1067}
1068
1069/*!
1070 * Copy data to rank 0 and compute the checksum.
1071 *
1072 * Does not use ghosts. Results should be independent of the parallel domain
1073 * decomposition.
1074 */
1076
1077 auto v = allocate_proc0_copy();
1078 put_on_proc0(*v);
1079
1080 MPI_Comm com = m_impl->grid->ctx()->com();
1081
1082 int rank = 0;
1083 MPI_Comm_rank(com, &rank);
1084
1085 uint64_t result = 0;
1086 if (rank == 0) {
1087 petsc::VecArray array(*v);
1088
1089 PetscInt size = 0;
1090 PetscErrorCode ierr = VecGetLocalSize(*v, &size);
1091 PISM_CHK(ierr, "VecGetLocalSize");
1092
1093 result = pism::fletcher64((uint32_t*)array.get(), static_cast<size_t>(size) * 2);
1094 }
1095 MPI_Bcast(&result, 1, MPI_UINT64_T, 0, com);
1096
1097 return result;
1098}
1099
1100/*!
1101 * Compute a checksum of a vector.
1102 *
1103 * The result depends on the number of processors used.
1104 *
1105 * We assume that sizeof(double) == 2 * sizeof(uint32_t), i.e. double uses 64 bits.
1106 */
1107uint64_t Array::fletcher64() const {
1108 static_assert(sizeof(double) == 2 * sizeof(uint32_t),
1109 "Cannot compile Array::fletcher64() (sizeof(double) != 2 * sizeof(uint32_t))");
1110
1111 MPI_Status mpi_stat;
1112 const int checksum_tag = 42;
1113
1114 MPI_Comm com = m_impl->grid->ctx()->com();
1115
1116 int rank = 0;
1117 MPI_Comm_rank(com, &rank);
1118
1119 int comm_size = 0;
1120 MPI_Comm_size(com, &comm_size);
1121
1122 PetscInt local_size = 0;
1123 PetscErrorCode ierr = VecGetLocalSize(vec(), &local_size); PISM_CHK(ierr, "VecGetLocalSize");
1124 uint64_t sum = 0;
1125 {
1126 petsc::VecArray v(vec());
1127 // compute checksums for local patches on all ranks
1128 sum = pism::fletcher64((uint32_t*)v.get(), static_cast<size_t>(local_size) * 2);
1129 }
1130
1131 if (rank == 0) {
1132 std::vector<uint64_t> sums(comm_size);
1133
1134 // gather checksums of patches on rank 0
1135 sums[0] = sum;
1136 for (int r = 1; r < comm_size; ++r) {
1137 MPI_Recv(&sums[r], 1, MPI_UINT64_T, r, checksum_tag, com, &mpi_stat);
1138 }
1139
1140 // compute the checksum of checksums
1141 sum = pism::fletcher64((uint32_t*)sums.data(), static_cast<size_t>(comm_size) * 2);
1142 } else {
1143 MPI_Send(&sum, 1, MPI_UINT64_T, 0, checksum_tag, com);
1144 }
1145
1146 // broadcast to all ranks
1147 MPI_Bcast(&sum, 1, MPI_UINT64_T, 0, com);
1148
1149 return sum;
1150}
1151
1152std::string Array::checksum(bool serial) const {
1153 if (serial) {
1154 // unsigned long long is supposed to be at least 64 bit long
1155 return pism::printf("%016llx", (unsigned long long int)this->fletcher64_serial());
1156 }
1157 // unsigned long long is supposed to be at least 64 bit long
1158 return pism::printf("%016llx", (unsigned long long int)this->fletcher64());
1159}
1160
1161void Array::print_checksum(const char *prefix, bool serial) const {
1162 auto log = m_impl->grid->ctx()->log();
1163
1164 log->message(1, "%s%s: %s\n", prefix, m_impl->name.c_str(), checksum(serial).c_str());
1165}
1166
1167//! \brief View a 2D vector field using existing PETSc viewers.
1168void Array::view(std::vector<std::shared_ptr<petsc::Viewer> > viewers) const {
1169 PetscErrorCode ierr;
1170
1171 if (ndims() == 3) {
1173 "cannot 'view' a 3D field '%s'",
1174 get_name().c_str());
1175 }
1176
1177 // Get the dof=1, stencil_width=0 DMDA (components are always scalar
1178 // and we just need a global Vec):
1179 auto da2 = m_impl->grid->get_dm(1, 0);
1180
1182
1183 for (unsigned int i = 0; i < ndof(); ++i) {
1184 std::string
1185 long_name = m_impl->metadata[i]["long_name"],
1186 units = m_impl->metadata[i]["units"],
1187 output_units = m_impl->metadata[i]["output_units"],
1188 title = pism::printf("%s (%s)",
1189 long_name.c_str(),
1190 output_units.c_str());
1191
1192 PetscViewer v = *viewers[i].get();
1193
1194 PetscDraw draw = NULL;
1195 ierr = PetscViewerDrawGetDraw(v, 0, &draw);
1196 PISM_CHK(ierr, "PetscViewerDrawGetDraw");
1197
1198 ierr = PetscDrawSetTitle(draw, title.c_str());
1199 PISM_CHK(ierr, "PetscDrawSetTitle");
1200
1201 get_dof(da2, tmp, i);
1202
1203 convert_vec(tmp, m_impl->metadata[i].unit_system(),
1204 units, output_units);
1205
1206 double bounds[2] = {0.0, 0.0};
1207 ierr = VecMin(tmp, NULL, &bounds[0]); PISM_CHK(ierr, "VecMin");
1208 ierr = VecMax(tmp, NULL, &bounds[1]); PISM_CHK(ierr, "VecMax");
1209
1210 if (bounds[0] == bounds[1]) {
1211 bounds[0] = -1.0;
1212 bounds[1] = 1.0;
1213 }
1214
1215 ierr = PetscViewerDrawSetBounds(v, 1, bounds);
1216 PISM_CHK(ierr, "PetscViewerDrawSetBounds");
1217
1218 ierr = VecView(tmp, v);
1219 PISM_CHK(ierr, "VecView");
1220 }
1221}
1222
1223} // end of namespace array
1224
1226 const std::string &spec1, const std::string &spec2) {
1227 units::Converter c(system, spec1, spec2);
1228
1229 // has to be a PetscInt because of the VecGetLocalSize() call
1230 PetscInt data_size = 0;
1231 PetscErrorCode ierr = VecGetLocalSize(v, &data_size);
1232 PISM_CHK(ierr, "VecGetLocalSize");
1233
1234 petsc::VecArray data(v);
1235 c.convert_doubles(data.get(), data_size);
1236}
1237
1238} // end of namespace pism
VariableLookupData find_variable(const std::string &short_name, const std::string &std_name) const
Find a variable using its standard name and/or short name.
Definition File.cc:328
std::string name() const
Definition File.cc:274
High-level PISM I/O class.
Definition File.hh:55
void message(int threshold, const char format[],...) const __attribute__((format(printf
Print a message to the log.
Definition Logger.cc:49
A basic logging class.
Definition Logger.hh:40
void failed()
Indicates a failure of a parallel section.
virtual void begin_access() const =0
void add_context(const std::string &message)
Add a message providing some context. This way we can (sort of) get a stack trace even though C++ exc...
static RuntimeError formatted(const ErrorLocation &location, const char format[],...) __attribute__((format(printf
build a RuntimeError with a formatted message
Spatial NetCDF variable (corresponding to a 2D or 3D scalar field).
std::string get_string(const std::string &name) const
Get a string attribute.
void report_range(const Logger &log, double min, double max) const
Report the range of a global Vec v.
io::Type get_output_type() const
std::string get_name() const
void check_range(const std::string &filename, double min, double max) const
Check if the range [min, max] is a subset of [valid_min, valid_max].
T * rawptr()
Definition Wrapper.hh:39
T get() const
Definition Wrapper.hh:36
void add(const PetscAccessible &v)
Definition Array.cc:896
std::vector< const PetscAccessible * > m_vecs
Definition Array.hh:73
void read(const std::string &filename, unsigned int time)
Definition Array.cc:731
void write_impl(const File &file) const
Writes an Array to a NetCDF file.
Definition Array.cc:487
virtual void end_access() const
Checks if an Array is allocated and calls DAVecRestoreArray.
Definition Array.cc:589
virtual void regrid_impl(const File &file, io::Default default_value)
Gets an Array from a file file, interpolating onto the current grid.
Definition Array.cc:378
Array(std::shared_ptr< const Grid > grid, const std::string &name, Kind ghostedp, size_t dof, size_t stencil_width, const std::vector< double > &zlevels)
Definition Array.cc:63
unsigned int ndof() const
Returns the number of degrees of freedom per grid point.
Definition Array.cc:135
void set_interpolation_type(InterpolationType type)
Definition Array.cc:178
void set_name(const std::string &name)
Sets the variable name to name.
Definition Array.cc:342
void dump(const char filename[]) const
Dumps a variable to a file, overwriting this file's contents (for debugging).
Definition Array.cc:530
void define(const File &file, io::Type default_type) const
Define variables corresponding to an Array in a file opened using file.
Definition Array.cc:463
virtual void begin_access() const
Checks if an Array is allocated and calls DAVecGetArray.
Definition Array.cc:568
void shift(double alpha)
Result: v[j] <- v[j] + alpha for all j. Calls VecShift.
Definition Array.cc:216
const std::vector< double > & levels() const
Definition Array.cc:139
size_t size() const
Return the total number of elements in the owned part of an array.
Definition Array.cc:909
void get_from_proc0(petsc::Vec &onp0)
Gets a local Array2 from processor 0.
Definition Array.cc:1058
void write(const std::string &filename) const
Definition Array.cc:722
void copy_to_vec(std::shared_ptr< petsc::DM > destination_da, petsc::Vec &destination) const
Copies v to a global vector 'destination'. Ghost points are discarded.
Definition Array.cc:238
const std::string & get_name() const
Get the name of an Array object.
Definition Array.cc:354
virtual ~Array()
Definition Array.cc:104
petsc::Vec & vec() const
Definition Array.cc:310
uint64_t fletcher64_serial() const
Definition Array.cc:1075
void get_dof(std::shared_ptr< petsc::DM > da_result, petsc::Vec &result, unsigned int start, unsigned int count=1) const
Definition Array.cc:248
void scale(double alpha)
Result: v <- v * alpha. Calls VecScale.
Definition Array.cc:224
void print_checksum(const char *prefix="", bool serial=false) const
Definition Array.cc:1161
int state_counter() const
Get the object state counter.
Definition Array.cc:127
std::vector< int > shape() const
Definition Array.cc:159
std::string checksum(bool serial) const
Definition Array.cc:1152
void view(std::vector< std::shared_ptr< petsc::Viewer > > viewers) const
View a 2D vector field using existing PETSc viewers.
Definition Array.cc:1168
std::shared_ptr< const Grid > grid() const
Definition Array.cc:131
void set(double c)
Result: v[j] <- c for all j.
Definition Array.cc:629
void set_dof(std::shared_ptr< petsc::DM > da_source, petsc::Vec &source, unsigned int start, unsigned int count=1)
Definition Array.cc:273
void inc_state_counter()
Increment the object state counter.
Definition Array.cc:150
std::shared_ptr< petsc::DM > dm() const
Definition Array.cc:324
unsigned int ndims() const
Returns the number of spatial dimensions.
Definition Array.cc:155
void regrid(const std::string &filename, io::Default default_value)
Definition Array.cc:736
std::shared_ptr< petsc::Vec > allocate_proc0_copy() const
Definition Array.cc:926
uint64_t fletcher64() const
Definition Array.cc:1107
void put_on_proc0(petsc::Vec &onp0) const
Puts a local array::Scalar on processor 0.
Definition Array.cc:1015
void set_begin_access_use_dof(bool flag)
Definition Array.cc:174
void add(double alpha, const Array &x)
Result: v <- v + alpha * x. Calls VecAXPY.
Definition Array.cc:206
std::vector< double > norm(int n) const
Computes the norm of all the components of an Array.
Definition Array.cc:668
void update_ghosts()
Updates ghost points.
Definition Array.cc:615
unsigned int stencil_width() const
Get the stencil width of the current Array. Returns 0 if ghosts are not available.
Definition Array.cc:302
void checkCompatibility(const char *function, const Array &other) const
Checks if two Arrays have compatible sizes, dimensions and numbers of degrees of freedom.
Definition Array.cc:545
void read_impl(const File &file, unsigned int time)
Reads appropriate NetCDF variable(s) into an Array.
Definition Array.cc:416
void check_array_indices(int i, int j, unsigned int k) const
Check array indices and warn if they are out of range.
Definition Array.cc:636
SpatialVariableMetadata & metadata(unsigned int N=0)
Returns a reference to the SpatialVariableMetadata object containing metadata for the compoment N.
Definition Array.cc:476
Abstract class for reading, writing, allocating, and accessing a DA-based PETSc Vec (2D and 3D fields...
Definition Array.hh:207
bool exists() const
Definition IO_Flags.hh:108
double * get()
Definition Vec.cc:54
Wrapper around VecGetArray and VecRestoreArray.
Definition Vec.hh:44
void convert_doubles(double *data, size_t length) const
Definition Units.cc:250
std::shared_ptr< System > Ptr
Definition Units.hh:47
#define PISM_CHK(errcode, name)
#define PISM_ERROR_LOCATION
#define n
Definition exactTestM.c:37
static void check_range(petsc::Vec &v, const SpatialVariableMetadata &metadata, const std::string &filename, const Logger &log, bool report_range)
Definition Array.cc:741
void global_to_local(petsc::DM &dm, Vec source, Vec destination)
Definition Array.cc:53
double max(const array::Scalar &input)
Finds maximum over all the values in an array::Scalar object. Ignores ghosts.
Definition Scalar.cc:165
void set_default_value_or_stop(const VariableMetadata &variable, io::Default default_value, const Logger &log, Vec output)
Definition Array.cc:358
double sum(const array::Scalar &input)
Sums up all the values in an array::Scalar object. Ignores ghosts.
Definition Scalar.cc:150
Kind
What "kind" of a vector to create: with or without ghosts.
Definition Array.hh:61
@ WITH_GHOSTS
Definition Array.hh:61
static NormType int_to_normtype(int input)
Definition Array.cc:192
double min(const array::Scalar &input)
Finds minimum over all the values in an array::Scalar object. Ignores ghosts.
Definition Scalar.cc:193
void append_time(const File &file, const Config &config, double time_seconds)
Prepare a file for output.
@ PISM_GUESS
Definition IO_Flags.hh:56
void read_spatial_variable(const SpatialVariableMetadata &variable, const Grid &grid, const File &file, unsigned int time, double *output)
Read a variable from a file into an array output.
@ PISM_READWRITE_CLOBBER
create a file for writing, overwrite if present
Definition IO_Flags.hh:72
@ PISM_READONLY
open an existing file for reading only
Definition IO_Flags.hh:68
@ PISM_READWRITE
open an existing file for reading and writing
Definition IO_Flags.hh:70
void write_spatial_variable(const SpatialVariableMetadata &metadata, const Grid &grid, const File &file, const double *input)
Write a double array to a file.
@ PISM_DOUBLE
Definition IO_Flags.hh:52
void define_spatial_variable(const SpatialVariableMetadata &metadata, const Grid &grid, const File &file, io::Type default_type)
Define a NetCDF variable corresponding to a VariableMetadata object.
void define_time(const File &file, const Context &ctx)
Prepare a file for output.
double get_time(MPI_Comm comm)
io::Backend string_to_backend(const std::string &backend)
Definition File.cc:56
void GlobalMax(MPI_Comm comm, double *local, double *result, int count)
std::string printf(const char *format,...)
uint64_t fletcher64(const uint32_t *data, size_t length)
static const double k
Definition exactTestP.cc:42
static double end_time(const Config &config, double time_start, const std::string &calendar, const units::Unit &time_units)
Definition Time.cc:395
static void report_range(const std::vector< double > &data, const VariableMetadata &metadata, const Logger &log)
Report the range of a time-series stored in data.
std::string timestamp(MPI_Comm com)
Creates a time-stamp used for the history NetCDF attribute.
void handle_fatal_errors(MPI_Comm com)
static double start_time(const Config &config, const File *file, const std::string &reference_date, const std::string &calendar, const units::Unit &time_units)
Definition Time.cc:349
void GlobalSum(MPI_Comm comm, double *local, double *result, int count)
void convert_vec(petsc::Vec &v, units::System::Ptr system, const std::string &spec1, const std::string &spec2)
Definition Array.cc:1225
bool begin_access_use_dof
If true, use DMDAVecGetArrayDOF() in begin_access()
Definition Array_impl.hh:94
int state_counter
Internal array::Array "revision number".
bool ghosted
true if this Array is ghosted
Definition Array_impl.hh:86
std::shared_ptr< const Grid > grid
The computational grid.
Definition Array_impl.hh:77
InterpolationType interpolation_type
bool report_range
If true, report range when regridding.
Definition Array_impl.hh:62
gsl_interp_accel * bsearch_accel
unsigned int da_stencil_width
stencil width supported by the DA
Definition Array_impl.hh:83
std::vector< SpatialVariableMetadata > metadata
Metadata (NetCDF variable attributes)
Definition Array_impl.hh:74
std::vector< double > zlevels
Vertical levels (for 3D fields)
std::shared_ptr< petsc::DM > da
Definition Array_impl.hh:91
unsigned int dof
number of "degrees of freedom" per grid point
Definition Array_impl.hh:80
int count
Definition test_cube.c:16