Skip to content

Commit 4e5fb42

Browse files
committed
Add OverlappingCouplingGhostingTest
This is the test for the correct sparsity pattern. The test for correctness at this point is seeing if PETSc throws an extra malloc error or not. Would be nice to make this a richer test.
1 parent 718f201 commit 4e5fb42

File tree

1 file changed

+177
-0
lines changed

1 file changed

+177
-0
lines changed

tests/base/overlapping_coupling_test.C

Lines changed: 177 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
#include <libmesh/mesh_refinement.h>
1818
#include <libmesh/mesh_modification.h>
1919
#include <libmesh/partitioner.h>
20+
#include <libmesh/sparse_matrix.h>
2021

2122

2223
#include "test_comm.h"
@@ -619,11 +620,187 @@ private:
619620

620621
};
621622

623+
624+
// This testing class now exercises testing the sparsity
625+
// pattern augmented by the OverlappingCouplingFunctor
626+
class OverlappingCouplingGhostingTest : public CppUnit::TestCase,
627+
public OverlappingTestBase
628+
{
629+
public:
630+
CPPUNIT_TEST_SUITE( OverlappingCouplingGhostingTest );
631+
632+
CPPUNIT_TEST( testSparsityCouplingMatrix );
633+
CPPUNIT_TEST( testSparsityNullCouplingMatrix );
634+
CPPUNIT_TEST( testSparsityNullCouplingMatrixUnifRef );
635+
636+
CPPUNIT_TEST_SUITE_END();
637+
638+
public:
639+
640+
void setUp()
641+
{}
642+
643+
void tearDown()
644+
{ this->clear(); }
645+
646+
void testSparsityCouplingMatrix()
647+
{
648+
this->run_sparsity_pattern_test(0, true);
649+
}
650+
651+
void testSparsityNullCouplingMatrix()
652+
{
653+
this->run_sparsity_pattern_test(0, false);
654+
}
655+
656+
void testSparsityNullCouplingMatrixUnifRef()
657+
{
658+
this->run_sparsity_pattern_test(1, false);
659+
}
660+
661+
private:
662+
663+
void run_sparsity_pattern_test(const unsigned int n_refinements, bool build_coupling_matrix)
664+
{
665+
this->build_quad_mesh(n_refinements);
666+
this->init(*_mesh);
667+
668+
std::unique_ptr<CouplingMatrix> coupling_matrix;
669+
if (build_coupling_matrix)
670+
this->setup_coupling_matrix(coupling_matrix);
671+
672+
LinearImplicitSystem & system = _es->get_system<LinearImplicitSystem>("SimpleSystem");
673+
674+
// If we don't add this coupling functor and properly recompute the
675+
// sparsity pattern, then PETSc will throw a malloc error when we
676+
// try to assemble into the global matrix
677+
OverlappingCouplingFunctor coupling_functor(system);
678+
coupling_functor.set_coupling_matrix(coupling_matrix);
679+
680+
DofMap & dof_map = system.get_dof_map();
681+
dof_map.add_coupling_functor(coupling_functor);
682+
dof_map.reinit_send_list(system.get_mesh());
683+
684+
// Update current local solution
685+
system.current_local_solution = libMesh::NumericVector<libMesh::Number>::build(system.comm());
686+
687+
system.current_local_solution->init(system.n_dofs(), system.n_local_dofs(),
688+
dof_map.get_send_list(), false,
689+
libMesh::GHOSTED);
690+
691+
system.solution->localize(*(system.current_local_solution),dof_map.get_send_list());
692+
693+
// Now that we've added the coupling functor, we need
694+
// to recompute the sparsity
695+
dof_map.clear_sparsity();
696+
dof_map.compute_sparsity(system.get_mesh());
697+
698+
// Now that we've recomputed the sparsity pattern, we need
699+
// to reinitialize the system matrix.
700+
libMesh::SparseMatrix<libMesh::Number> & matrix = system.get_matrix("System Matrix");
701+
libmesh_assert(dof_map.is_attached(matrix));
702+
matrix.init();
703+
704+
std::unique_ptr<PointLocatorBase> point_locator = _mesh->sub_point_locator();
705+
706+
const unsigned int u_var = system.variable_number("U");
707+
const unsigned int v_var = system.variable_number("V");
708+
709+
DenseMatrix<Number> K12, K21;
710+
711+
FEMContext subdomain_one_context(system);
712+
FEMContext subdomain_two_context(system);
713+
714+
// Add normally coupled parts of the matrix
715+
for (const auto & elem : _mesh->active_local_subdomain_elements_ptr_range(1))
716+
{
717+
subdomain_one_context.pre_fe_reinit(system,elem);
718+
subdomain_one_context.elem_fe_reinit();
719+
720+
std::vector<dof_id_type> & rows = subdomain_one_context.get_dof_indices();
721+
722+
// Fill with ones in case PETSc ignores the zeros at some point
723+
std::fill( subdomain_one_context.get_elem_jacobian().get_values().begin(),
724+
subdomain_one_context.get_elem_jacobian().get_values().end(),
725+
1);
726+
727+
// Insert the Jacobian for the dofs for this element
728+
system.matrix->add_matrix( subdomain_one_context.get_elem_jacobian(), rows );
729+
}
730+
731+
for (const auto & elem : _mesh->active_local_subdomain_elements_ptr_range(2))
732+
{
733+
// A little extra unit testing on the range iterator
734+
CPPUNIT_ASSERT_EQUAL(2, (int)elem->subdomain_id());
735+
736+
const std::vector<libMesh::Point> & qpoints = subdomain_two_context.get_element_fe(u_var)->get_xyz();
737+
738+
// Setup the context for the current element
739+
subdomain_two_context.pre_fe_reinit(system,elem);
740+
subdomain_two_context.elem_fe_reinit();
741+
742+
// We're only assembling rows for the dofs on subdomain 2 (U,L), so
743+
// the current element will have all those dof_indices.
744+
std::vector<dof_id_type> & rows = subdomain_two_context.get_dof_indices();
745+
746+
std::fill( subdomain_two_context.get_elem_jacobian().get_values().begin(),
747+
subdomain_two_context.get_elem_jacobian().get_values().end(),
748+
1);
749+
750+
// Insert the Jacobian for the normally coupled dofs for this element
751+
system.matrix->add_matrix( subdomain_two_context.get_elem_jacobian(), rows );
752+
753+
std::set<subdomain_id_type> allowed_subdomains;
754+
allowed_subdomains.insert(1);
755+
756+
// Now loop over the quadrature points and find the subdomain-one element that overlaps
757+
// with the current subdomain-two element and then add a local element matrix with
758+
// the coupling to the global matrix to try and trip any issues with sparsity pattern
759+
// construction
760+
for ( const auto & qp : qpoints )
761+
{
762+
const Elem * overlapping_elem = (*point_locator)( qp, &allowed_subdomains );
763+
CPPUNIT_ASSERT(overlapping_elem);
764+
765+
// Setup the context for the overlapping element
766+
subdomain_one_context.pre_fe_reinit(system,overlapping_elem);
767+
subdomain_one_context.elem_fe_reinit();
768+
769+
// We're only coupling to the "V" variable so only need those dof indices
770+
std::vector<dof_id_type> & v_indices = subdomain_one_context.get_dof_indices(v_var);
771+
std::vector<dof_id_type> columns(rows);
772+
columns.insert( columns.end(), v_indices.begin(), v_indices.end() );
773+
774+
// This will also zero the matrix so we can just insert zeros for this test
775+
K21.resize( rows.size(), columns.size() );
776+
777+
std::fill(K21.get_values().begin(), K21.get_values().end(), 1);
778+
779+
// Now adding this local matrix to the global would trip a PETSc
780+
// malloc error if the sparsity pattern hasn't been correctly
781+
// built to include the overlapping coupling.
782+
system.matrix->add_matrix (K21, rows, columns);
783+
784+
// Now add the other part of the overlapping coupling
785+
K12.resize(v_indices.size(), rows.size());
786+
std::fill(K12.get_values().begin(), K12.get_values().end(), 1);
787+
system.matrix->add_matrix(K12,v_indices,rows);
788+
}
789+
} // end element loop
790+
791+
// We need to make sure to close the matrix for this test. There could still
792+
// be PETSc malloc errors tripped here if we didn't allocate the off-processor
793+
// part of the sparsity pattern correctly.
794+
system.matrix->close();
795+
}
796+
797+
};
622798
#endif // LIBMESH_HAVE_PETSC
623799

624800

625801
CPPUNIT_TEST_SUITE_REGISTRATION( OverlappingFunctorTest );
626802

627803
#ifdef LIBMESH_HAVE_PETSC
628804
CPPUNIT_TEST_SUITE_REGISTRATION( OverlappingAlgebraicGhostingTest );
805+
CPPUNIT_TEST_SUITE_REGISTRATION( OverlappingCouplingGhostingTest );
629806
#endif

0 commit comments

Comments
 (0)