const QGauss<dim> quadrature_formula(fe.degree+1); FEValues<dim> fe_values (fe, quadrature_formula,
update_gradients |
update_quadrature_points |
update_JxW_values);
const unsigned int dofs_per_cell = fe.dofs_per_cell;
const unsigned int n_q_points = quadrature_formula.size();
Vector<double> cell_residual (dofs_per_cell);
std::vector<Tensor<1, dim> > gradients(n_q_points);
std::vector<types::global_dof_index> local_dof_indices (dofs_per_cell);
print_status_update(std::string("Starting looping over cells in residual\n"), false);
for (auto cell = dof_handler.begin_active(); cell!=dof_handler.end(); ++cell)
{
if(cell->is_locally_owned())
{
cell_residual = 0;
fe_values.reinit (cell);
fe_values.get_function_gradients (evaluation_point,
gradients);
for (unsigned int q_point=0; q_point<n_q_points; ++q_point)
{
const double coeff = 1/std::sqrt(1 +
gradients[q_point] *
gradients[q_point]);
for (unsigned int i = 0; i < dofs_per_cell; ++i)
cell_residual(i) -= (fe_values.shape_grad(i, q_point)
* coeff
* gradients[q_point]
* fe_values.JxW(q_point));
}
cell->get_dof_indices (local_dof_indices);
hanging_node_constraints.distribute_local_to_global(cell_residual, local_dof_indices, residual);
}
}
residual.compress(VectorOperation::add);//Have to call it after "distribute_local_to_global
print_status_update(std::string("Setting boundary dofs in residual calculation\n"), true);
std::vector<bool> boundary_dofs (dof_handler.n_locally_owned_dofs());
DoFTools::extract_boundary_dofs (dof_handler,
ComponentMask(),
boundary_dofs);
print_status_update(std::string("Boundary dofs extracted\n"), true);
print_status_update(std::string("Residual size is " + std::to_string(residual.size()) + " and boundary dofs size is " + std::to_string(boundary_dofs.size()) + "\n"), true);
for (unsigned int i=0; i<dof_handler.n_locally_owned_dofs(); ++i)
if (boundary_dofs[i] == true)
residual(i) = 0;
residual.compress(VectorOperation::insert);//Have to call it after setting the boundary elements
print_status_update(std::string("Returning l2 norm: " + std::to_string(residual.l2_norm()) + "\n"), true);//Crash here
// At the end of the function, we return the norm of the residual:
return residual.l2_norm();
----------------------------------------------------
Exception on processing:
--------------------------------------------------------
An error occurred in line <1774> of file </opt/dealII/include/deal.II/lac/trilinos_vector.h> in function
dealii::TrilinosWrappers::MPI::Vector::real_type dealii::TrilinosWrappers::MPI::Vector::l2_norm() const
The violated condition was:
ierr == 0
Additional information:
An error with error number -1 occurred while calling a Trilinos function
--------------------------------------------------------
Aborting!
----------------------------------------------------
ERROR: Uncaught exception in MPI_InitFinalize on proc 1. Skipping MPI_Finalize() to avoid a deadlock.
----------------------------------------------------
Exception on processing:
--------------------------------------------------------
An error occurred in line <1774> of file </opt/dealII/include/deal.II/lac/trilinos_vector.h> in function
dealii::TrilinosWrappers::MPI::Vector::real_type dealii::TrilinosWrappers::MPI::Vector::l2_norm() const
The violated condition was:
ierr == 0
Additional information:
An error with error number -1 occurred while calling a Trilinos function
--------------------------------------------------------
Aborting!
----------------------------------------------------
-------------------------------------------------------
Primary job terminated normally, but 1 process returned
a non-zero exit code.. Per user-direction, the job has been aborted.
-------------------------------------------------------
--------------------------------------------------------------------------
mpirun detected that one or more processes exited with non-zero status, thus causing
the job to be terminated. The first process to do so was:
Process name: [[51822,1],0]
Exit code: 1
--------------------------------------------------------------------------
LinearAlgebraTrilinos::MPI::Vector residual;
IndexSet solution_relevant_partitioning(dof_handler.n_dofs());
DoFTools::extract_locally_relevant_dofs(dof_handler, solution_relevant_partitioning);
residual.reinit(solution_relevant_partitioning, MPI_COMM_WORLD);
but works when declaring it locally in the function with
LinearAlgebraTrilinos::MPI::Vector local_residual(dof_handler.locally_owned_dofs(), MPI_COMM_WORLD);What is the difference here?
for (unsigned int i=0; i<dof_handler.n_locally_owned_dofs(); ++i)
if (boundary_dofs[i] == true)
residual(i) = 0;
evaluation_point.add (alpha, newton_update);
with the comment
--------------------------------------------------------
An error occurred in line <1927> of file </opt/dealII/include/deal.II/lac/trilinos_vector.h> in function
void dealii::TrilinosWrappers::MPI::Vector::add(dealii::TrilinosScalar, const dealii::TrilinosWrappers::MPI::Vector&)
The violated condition was:
!has_ghost_elements()
Additional information:
You are trying an operation on a vector that is only allowed if the vector has no ghost elements, but the vector you are operating on does have ghost elements. Specifically, vectors with ghost elements are read-only and cannot appear in operations that write into the
se vectors.
See the glossary entry on 'Ghosted vectors' for more information.
Stacktrace:
-----------
#0 main: dealii::TrilinosWrappers::MPI::Vector::add(double, dealii::TrilinosWrappers::MPI::Vector const&)
#1 main: Step15::MinimalSurfaceProblem<2>::compute_residual(double)
#2 main: Step15::MinimalSurfaceProblem<2>::run()
#3 main: main
--------------------------------------------------------
Calling MPI_Abort now.
To break execution in a GDB session, execute 'break MPI_Abort' before running. You can also put the following into your ~/.gdbinit:
set breakpoint pending on
break MPI_Abort
set breakpoint pending auto
--------------------------------------------------------------------------
MPI_ABORT was invoked on rank 1 in communicator MPI_COMM_WORLD
with errorcode 255.
Looks as if I have to check both vectors for ghost elements before...
Thanks!