27 #include "petsc_utils.h" 44 logger =
f_open(
"electrics.log", param_globals::experiment != 4 ?
"w" :
"r");
59 if (param_globals::bidomain || param_globals::extracell_monodomain_stim) {
68 param_globals::dt, 0,
"elec::ref_dt",
"TS");
88 if(strlen(param_globals::phie_rec_ptf) > 0)
95 if (param_globals::prepacing_bcl > 0)
105 m->
regions.resize(param_globals::num_gregions);
108 log_msg(logger, 0, 0,
"Setting up %s tissue poperties for %d regions ..", grid_name,
109 param_globals::num_gregions);
114 for (
size_t i=0; i<m->
regions.size(); i++, reg++) {
115 if(!strcmp(param_globals::gregion[i].
name,
"")) {
116 snprintf(buf,
sizeof buf,
", gregion_%d",
int(i));
117 param_globals::gregion[i].name =
dupstr(buf);
120 reg->
regname = strdup(param_globals::gregion[i].name);
122 reg->
nsubregs = param_globals::gregion[i].num_IDs;
129 reg->
subregtags[j] = param_globals::gregion[i].ID[j];
136 emat->
InVal[0] = param_globals::gregion[i].g_il;
137 emat->
InVal[1] = param_globals::gregion[i].g_it;
138 emat->
InVal[2] = param_globals::gregion[i].g_in;
140 emat->
ExVal[0] = param_globals::gregion[i].g_el;
141 emat->
ExVal[1] = param_globals::gregion[i].g_et;
142 emat->
ExVal[2] = param_globals::gregion[i].g_en;
144 emat->
BathVal[0] = param_globals::gregion[i].g_bath;
145 emat->
BathVal[1] = param_globals::gregion[i].g_bath;
146 emat->
BathVal[2] = param_globals::gregion[i].g_bath;
149 for (
int j=0; j<3; j++) {
150 emat->
InVal[j] *= 1e-3 * param_globals::gregion[i].g_mult;
151 emat->
ExVal[j] *= 1e-3 * param_globals::gregion[i].g_mult;
152 emat->
BathVal[j] *= 1e-3 * param_globals::gregion[i].g_mult;
162 const char* file = g ==
Electrics::intra_grid ? param_globals::gi_scale_vec : param_globals::ge_scale_vec;
167 log_msg(0,4,0,
"%s warning: number of %s conductivity scaling entries does not match number of elements!",
191 void Electrics::setup_mappings()
200 log_msg(
logger, 0, 0,
"%s: Setting up intracellular algebraic-to-nodal scattering.", __func__);
204 log_msg(
logger, 0, 0,
"%s: Setting up intracellular PETSc to canonical permutation.", __func__);
210 log_msg(
logger, 0, 0,
"%s: Setting up extracellular algebraic-to-nodal scattering.", __func__);
212 log_msg(
logger, 0, 0,
"%s: Setting up extracellular PETSc to canonical permutation.", __func__);
214 log_msg(
logger, 0, 0,
"%s: Setting up intra-to-extra scattering.", __func__);
218 bool check_i2e =
false;
219 if(check_i2e && extra_exists) {
238 for(
size_t i=0; i<intra_alg_nod.
size(); i++) {
240 id[lpidx] = intra_ref_nbr[intra_alg_nod[i]];
245 for(
size_t i=0; i<extra_alg_nod.
size(); i++) {
247 ed[lpidx] = extra_ref_nbr[extra_alg_nod[i]];
251 i2e_testvec->
set(-1.0);
252 i2e.
forward(*intra_testvec, *i2e_testvec);
255 for(
size_t i=0; i<extra_alg_nod.
size(); i++) {
256 auto id = i2e_testvec->
get(i);
257 auto ed = extra_testvec->
get(i);
258 if(
id > -1 &&
id != ed)
263 log_msg(0,5,0,
"Electrics mapping test failed!");
265 log_msg(0,5,0,
"Electrics mapping test succeeded!");
289 stimulate_extracellular();
291 if(param_globals::bidomain ==
BIDOMAIN)
299 stimulate_intracellular();
309 if(param_globals::bidomain ==
BIDOMAIN)
338 double curtime =
timing(t2, t1);
368 log_msg( NULL, 0, 0,
"Balancing stimulus %d with %d %s-wise.",balance_from, balance_to,
369 is_current(stimuli[balance_from].phys.type) ?
"current" :
"voltage" );
371 stimulus & from = stimuli[balance_from];
372 stimulus & to = stimuli[balance_to];
391 void Electrics::balance_electrodes()
393 for(
int i=0; i<param_globals::num_stim; i++) {
394 if(param_globals::stim[i].crct.balance != -1) {
395 int from = param_globals::stim[i].crct.balance;
403 void Electrics::setup_stimuli()
407 bool dumpTrace =
true;
411 stimuli.resize(param_globals::num_stim);
412 for(
int i=0; i<param_globals::num_stim; i++)
429 double val; s.
value(val);
434 for(
size_t i=0; i<idx.
size(); i++) {
437 v[vidx] = add ? b + val : val;
442 void Electrics::stimulate_intracellular()
453 if(param_globals::operator_splitting) {
480 if(illum_vec == NULL) {
481 log_msg(0,5,0,
"Cannot apply illumination stim: global vector not present!");
496 void Electrics::clamp_Vm() {
498 if(s.phys.type ==
Vm_clmp && s.is_active())
503 void Electrics::stimulate_extracellular()
505 if(param_globals::bidomain) {
510 if(dbcs_have_updated && time_not_final)
516 if(s.is_active() && s.phys.type ==
I_ex)
534 for(
int pid=0; pid < mpi_size; pid++) {
535 if(mpi_rank == pid) {
537 buffsize = sndbuff.
size();
540 MPI_Bcast(&buffsize,
sizeof(
size_t), MPI_BYTE, pid, PETSC_COMM_WORLD);
542 MPI_Bcast(sndbuff.
data(), buffsize*
sizeof(
mesh_int_t), MPI_BYTE, pid, PETSC_COMM_WORLD);
544 mesh_int_t start = layout[mpi_rank], stop = layout[mpi_rank+1];
547 if(i >= start && i < stop)
571 for(
int pid=0; pid < mpi_size; pid++) {
572 if(mpi_rank == pid) {
574 buffsize = sndbuff.
size();
577 MPI_Bcast(&buffsize,
sizeof(
size_t), MPI_BYTE, pid, PETSC_COMM_WORLD);
579 MPI_Bcast(sndbuff.
data(), buffsize*
sizeof(
mesh_int_t), MPI_BYTE, pid, PETSC_COMM_WORLD);
653 void Electrics::setup_output()
661 restr_i, param_globals::num_io_nodes > 0);
663 if(param_globals::dataout_i)
666 if(param_globals::bidomain) {
668 restr_e, param_globals::num_io_nodes > 0);
670 if(param_globals::dataout_i)
672 if(param_globals::dataout_e)
681 if(param_globals::num_trace) {
683 open_trace(
ion.
miif, param_globals::num_trace, param_globals::trace_node, NULL, &imesh);
690 void Electrics::dump_matrices()
692 std::string bsname = param_globals::dump_basename;
698 if ( param_globals::parab_solve==1 ) {
700 fn = bsname +
"_Ki_CN.bin";
703 fn = bsname +
"_Ki.bin";
706 fn = bsname +
"_Mi.bin";
709 if ( param_globals::bidomain ) {
710 fn = bsname +
"_Kie.bin";
713 fn = bsname +
"_Me.bin";
730 val = std::nan(
"NaN");
744 s_unit =
stimuli[sidx].pulse.wave.f_unit;
757 for(
size_t i = 0; i<stimuli.
size(); i++)
781 double* reg_kappa =
new double[miif.
N_IIF];
783 for(
int i=0; i<miif.
N_IIF; i++)
784 reg_kappa[i] = k * miif.
IIF[i]->cgeom().SVratio * ir[i].volFrac;
786 double *kd = kappa.
ptr();
788 for(
int i = 0; i < miif.
numNode; i++)
789 kd[i] = reg_kappa[(
int) miif.
IIFmask[i]];
805 for(
size_t i=0; i < m.
regions.size(); i++) {
811 void Electrics::setup_solvers()
817 if (param_globals::bidomain) {
822 if(param_globals::dump2MatLab)
840 double min_diff = 1e100;
842 for(
int i=0; i<param_globals::num_tsav; i++)
844 double diff = fabs(param_globals::tsav[i] - time);
845 if(min_diff > diff) {
854 return param_globals::tsav_ext[min_idx];
857 void Electrics::checkpointing()
866 snprintf(save_fnm,
sizeof save_fnm,
"%s.%s.roe", param_globals::write_statef, tsav_ext);
874 snprintf(save_fnm,
sizeof save_fnm,
"checkpoint.%.1f.roe", tm.
time);
883 stats.init_logger(
"ell_stats.dat");
906 phie_mat->init(extra_mesh, dpn, dpn, max_row_entries);
908 mass_e ->init(extra_mesh, dpn, dpn, param_globals::mass_lumping ? 1 : max_row_entries);
918 rebuild_stiffness(mtype, stimuli, logger);
919 rebuild_mass(logger);
928 int log_flag = param_globals::output_level > 1 ?
ECHO : 0;
946 phie_mat->scale(-1.0);
949 log_msg(logger,0,log_flag,
"Computed ellipitc stiffness matrix in %.3f seconds.", dur);
953 log_msg(logger,0,log_flag,
"Elliptic lhs matrix enforcing Dirichlet boundaries.");
959 dbc->recompute_dbcs();
961 dbc->enforce_dbc_lhs();
964 log_msg(logger,0,log_flag,
"Elliptic lhs matrix Dirichlet enforcing done in %.3f seconds.", dur);
967 log_msg(logger,1,
ECHO,
"Elliptic lhs matrix is singular!");
969 phie_mat_has_nullspace =
true;
976 setup_linear_solver(logger);
979 log_msg(logger,0,log_flag,
"Initializing elliptic solver in %.5f seconds.", dur);
985 int log_flag = param_globals::output_level > 1 ?
ECHO : 0;
994 if(param_globals::mass_lumping) {
1001 log_msg(logger,0,log_flag,
"Computed elliptic mass matrix in %.3f seconds.", dur);
1007 tol = param_globals::cg_tol_ellip;
1008 max_it = param_globals::cg_maxit_ellip;
1010 std::string solver_file;
1011 solver_file = param_globals::ellip_options_file;
1012 lin_solver->setup_solver(*phie_mat, tol, max_it, param_globals::cg_norm_ellip,
1013 "elliptic PDE", phie_mat_has_nullspace,
1014 logger, solver_file.c_str(),
1015 "-ksp_type cg -pc_type hypre -pc_hypre_type boomeramg " 1016 "-pc_hypre_boomeramg_max_iter 1 " 1017 "-pc_hypre_boomeramg_strong_threshold 0.0 -options_left");
1025 if (phiesrc->mag() > 0.0) {
1026 mass_e->mult(*phiesrc, *currtmp);
1027 phiesrc->deep_copy(*currtmp);
1030 Ki.
mult(Vmv, tmp_i);
1033 i2e->
forward(tmp_i, *phiesrc, add);
1036 dbc->enforce_dbc_rhs(*phiesrc);
1039 (*lin_solver)(*phie, *phiesrc);
1042 auto dur =
timing(t1, t0);
1043 lin_solver->time += dur;
1044 stats.slvtime += dur;
1045 stats.update_iter(lin_solver->niter);
1046 if(lin_solver->reason < 0) {
1047 log_msg(0, 5, 0,
"%s solver diverged. Reason: %s.", lin_solver->name.c_str(),
1048 petsc_get_converged_reason_str(lin_solver->reason));
1053 i2e->
backward(*phie, *phie_i, add);
1061 dbc->enforce_dbc_rhs(*phiesrc);
1064 (*lin_solver)(*phie, *phiesrc);
1067 auto dur =
timing(t1, t0);
1068 lin_solver->time += dur;
1069 stats.slvtime += dur;
1070 stats.update_iter(lin_solver->niter);
1072 if(lin_solver->reason < 0) {
1073 log_msg(0, 5, 0,
"%s solver diverged. Reason: %s.", lin_solver->name.c_str(),
1074 petsc_get_converged_reason_str(lin_solver->reason));
1082 i2e->
backward(*phie, *phie_i, add);
1090 stats.init_logger(
"par_stats.dat");
1098 if(!(vm_ptr != NULL && iion_ptr != NULL)) {
1099 log_msg(0,5,0,
"%s error: global Vm and Iion vectors not properly set up! Ionics seem invalid! Aborting!",
1105 Vmv->shallow_copy(*vm_ptr);
1107 IIon->shallow_copy(*iion_ptr);
1118 if(!param_globals::operator_splitting)
1128 rhs_parab->init(intra_mesh, dpn, dpn, max_row_entries);
1129 mass_i ->init(intra_mesh, dpn, dpn, param_globals::mass_lumping ? 1 : max_row_entries);
1137 double start, end, period;
1143 int log_flag = param_globals::output_level > 1 ?
ECHO : 0;
1152 if( (param_globals::bidomain ==
MONODOMAIN && param_globals::bidm_eqv_mono) ||
1167 log_msg(logger,0,log_flag,
"Computed parabolic stiffness matrix in %.3f seconds.", dur);
1170 if(param_globals::mass_lumping)
1176 mass_i->mult_LR(*kappa_i, *empty);
1179 log_msg(logger,0,log_flag,
"Computed parabolic mass matrix in %.3f seconds.", dur);
1184 if(parab_tech != EXPLICIT) {
1185 lhs_parab->duplicate(*rhs_parab);
1187 bool same_nonzero = param_globals::mass_lumping ==
false;
1189 if (parab_tech==CN) {
1190 lhs_parab->scale(-param_globals::theta);
1191 lhs_parab->add_scaled_matrix(*mass_i, 1.0, same_nonzero);
1193 else if (parab_tech==O2dT) {
1194 lhs_parab->scale(-0.5);
1196 lhs_parab->add_scaled_matrix(*mass_i, 1.0, same_nonzero);
1197 lhs_parab->add_scaled_matrix(*mass_i, 1.0, same_nonzero);
1198 lhs_parab->add_scaled_matrix(*mass_i, 1.0, same_nonzero);
1203 mass_i->get_diagonal(*inv_mass_diag);
1205 SF_real* p = inv_mass_diag->ptr();
1207 for(
int i=0; i<inv_mass_diag->lsize(); i++)
1210 inv_mass_diag->release_ptr(p);
1213 if(parab_tech == CN || parab_tech == O2dT) {
1217 setup_linear_solver(logger);
1220 log_msg(logger,0,log_flag,
"Initializing parabolic solver in %.5f seconds.", dur);
1223 period =
timing(end, start);
1226 void parabolic_solver::setup_linear_solver(
FILE_SPEC logger)
1228 tol = param_globals::cg_tol_parab;
1229 max_it = param_globals::cg_maxit_parab;
1231 std::string solver_file;
1232 solver_file = param_globals::parab_options_file;
1233 lin_solver->setup_solver(*lhs_parab, tol, max_it, param_globals::cg_norm_parab,
1234 "parabolic PDE",
false, logger, solver_file.c_str(),
1235 "-pc_type bjacobi -sub_pc_type ilu -ksp_type cg");
1240 switch (parab_tech) {
1241 case CN: solve_CN(phie_i);
break;
1242 case O2dT: solve_O2dT(phie_i);
break;
1243 default: solve_EF(phie_i);
break;
1247 void parabolic_solver::solve_CN(
sf_vec & phie_i)
1251 if (param_globals::bidomain ==
BIDOMAIN) {
1252 tmp_i1->deep_copy(phie_i);
1253 tmp_i1->add_scaled(*Vmv, 1.0 - param_globals::theta);
1254 rhs_parab->mult(*tmp_i1, *tmp_i2);
1257 rhs_parab->mult(*Vmv, *tmp_i2);
1258 *tmp_i2 *= 1.0 - param_globals::theta;
1261 mass_i->mult(*Vmv, *tmp_i1);
1265 if(!param_globals::operator_splitting)
1266 tmp_i1->add_scaled(*Irhs, -1.0);
1270 (*lin_solver)(*Vmv, *tmp_i1);
1272 if(lin_solver->reason < 0) {
1273 log_msg(0, 5, 0,
"%s solver diverged. Reason: %s.", lin_solver->name.c_str(),
1274 petsc_get_converged_reason_str(lin_solver->reason));
1279 auto dur =
timing(t1, t0);
1280 lin_solver->time += dur;
1281 stats.slvtime += dur;
1282 stats.update_iter(lin_solver->niter);
1285 void parabolic_solver::solve_O2dT(
sf_vec & phie_i)
1289 if (param_globals::bidomain ==
BIDOMAIN) {
1290 tmp_i2->deep_copy(phie_i);
1291 tmp_i2->add_scaled(*Vmv, 0.5);
1292 rhs_parab->mult(*tmp_i2, *tmp_i1);
1295 rhs_parab->mult(*Vmv, *tmp_i1);
1299 mass_i->mult(*Vmv, *tmp_i2);
1300 tmp_i1->add_scaled(*tmp_i2, 4.0);
1301 mass_i->mult(*old_vm, *tmp_i2);
1303 tmp_i1->add_scaled(*tmp_i2, -1.0);
1309 (*lin_solver)(*Vmv, *tmp_i1);
1312 stats.slvtime +=
timing(t1, t0);
1313 stats.update_iter(lin_solver->niter);
1316 void parabolic_solver::solve_EF(
sf_vec & phie_i)
1322 if (param_globals::bidomain ==
BIDOMAIN) {
1323 tmp_i2->deep_copy(phie_i);
1325 rhs_parab->mult(*tmp_i2, *tmp_i1);
1328 rhs_parab->mult(*Vmv, *tmp_i1);
1331 *tmp_i1 *= *inv_mass_diag;
1332 Vmv->add_scaled(*tmp_i1, 1.0);
1334 if(param_globals::operator_splitting ==
false)
1335 Vmv->add_scaled(*Irhs, -1.0);
1338 stats.slvtime +=
timing(t1, t0);
1343 char* prvSimDir = strlen(param_globals::start_statef) ?
1346 const char* extn =
".dat";
1349 int addLATs = param_globals::compute_APD ? 2 : 0;
1351 bool have_sentinel = param_globals::t_sentinel > 0.0;
1352 bool need_to_add_sentinel = have_sentinel && (param_globals::sentinel_ID < 0);
1354 addLATs += need_to_add_sentinel ? 1 : 0;
1355 acts.resize(param_globals::num_LATs + addLATs);
1358 for (
int i = 0; i < param_globals::num_LATs; i++ )
1361 if (param_globals::lats[i].method <= 0 || (param_globals::lats[i].measurand ==
PHIE && !param_globals::bidomain)) {
1362 log_msg(NULL, 3, 0,
"Phie-based LAT measurement requires bidomain >=1 Ignoring lats[%d].", i);
1366 acts[j].method = (
ActMethod)param_globals::lats[i].method;
1367 acts[j].threshold = param_globals::lats[i].threshold;
1368 acts[j].mode = param_globals::lats[i].mode;
1369 acts[j].all = param_globals::lats[i].all;
1370 acts[j].measurand = (
PotType)param_globals::lats[i].measurand;
1371 acts[j].ID = param_globals::lats[i].ID;
1372 acts[j].fout = NULL;
1374 if(param_globals::lats[i].all) {
1375 acts[j].fname = (
char*) malloc((strlen(param_globals::lats[i].ID)+strlen(extn)+1)*
sizeof(
char));
1376 snprintf(acts[j].fname, strlen(param_globals::lats[i].ID)+strlen(extn)+1,
"%s%s", param_globals::lats[i].ID, extn);
1379 char prfx[] =
"init_acts_";
1380 int max_len = strlen(prfx) + strlen(param_globals::lats[i].ID) + strlen(extn) + 1;
1382 acts[j].fname = (
char*) malloc(max_len*
sizeof(
char));
1383 snprintf(acts[j].fname, max_len,
"%s%s%s", prfx, param_globals::lats[i].ID, extn);
1387 if(prvSimDir != NULL) {
1388 int len_fname = strlen(prvSimDir)+strlen(acts[j].fname)+2;
1389 acts[j].prv_fname = (
char*) malloc(len_fname*
sizeof(
char));
1390 snprintf(acts[j].prv_fname, len_fname,
"%s/%s", prvSimDir, acts[j].fname);
1396 if(param_globals::compute_APD) {
1398 acts[j].threshold = param_globals::actthresh;
1401 acts[j].measurand =
VM;
1403 acts[j].fout = NULL;
1404 acts[j].fname =
dupstr(
"vm_activation.dat");
1408 acts[j].threshold = param_globals::recovery_thresh;
1411 acts[j].measurand =
VM;
1413 acts[j].fout = NULL;
1414 acts[j].fname =
dupstr(
"vm_repolarisation.dat");
1420 sntl.activated = have_sentinel;
1421 sntl.t_start = param_globals::t_sentinel_start;
1422 sntl.t_window = param_globals::t_sentinel;
1424 sntl.ID = param_globals::sentinel_ID;
1426 if(need_to_add_sentinel) {
1429 acts[j].threshold = param_globals::actthresh;
1432 acts[j].measurand =
VM;
1434 acts[j].fout = NULL;
1435 acts[j].fname =
dupstr(
"vm_sentinel.dat");
1441 if(prvSimDir) free(prvSimDir);
1449 log_msg(logger, 0, 0,
"LAT detector [%2d]", idx);
1450 log_msg(logger, 0, 0,
"-----------------\n");
1453 log_msg(logger, 0, 0,
"All: %s", act.
all ?
"All" :
"Only first");
1454 log_msg(logger, 0, 0,
"Method: %s", act.
method==
ACT_DT ?
"Derivative" :
"Threshold crossing");
1456 char buf[64], gt[2], sgn[2];
1457 snprintf(sgn,
sizeof sgn,
"%s", act.
mode?
"-":
"+");
1458 snprintf(gt,
sizeof gt,
"%s", act.
mode?
"<":
">");
1461 snprintf(buf,
sizeof buf,
"Maximum %sdf/dt %s %.2f mV", sgn, gt, act.
threshold);
1463 snprintf(buf,
sizeof buf,
"Intersection %sdf/dt with %.2f", sgn, act.
threshold);
1465 log_msg(logger, 0, 0,
"Mode: %s", buf);
1476 for(
size_t i = 0; i < acts.size(); ++i) {
1479 acts[i].phi->shallow_copy(!acts[i].measurand ? vm : phie);
1480 acts[i].offset = offset;
1483 *acts[i].phip = *acts[i].phi;
1486 if (acts[i].method ==
ACT_DT) {
1490 log_msg(NULL,2,0,
"Detection of -df/dt|max not implemented, +df/dt|max will be detected.");
1494 acts[i].ibuf = (
int *)malloc(acts[i].phi->lsize()*
sizeof(int));
1495 acts[i].actbuf = (
double *)malloc(acts[i].phi->lsize()*
sizeof(double));
1498 SF::init_vector(&acts[i].tm, acts[i].phi->gsize(), acts[i].phi->lsize());
1499 acts[i].tm->set(-1.);
1502 if(acts[i].prv_fname != NULL) {
1504 size_t nread = acts[i].tm->read_ascii(acts[i].prv_fname);
1507 log_msg(NULL,2,
ECHO,
"Warning: Initialization of LAT[%2d] failed.", i);
1515 if(acts[i].prv_fname!=NULL) {
1519 log_msg(NULL,2,0,
"Copying over of previous activation file not implemented.\n");
f_close(in);
1522 log_msg(NULL,3,0,
"Warning: Initialization in %s - \n" 1523 "Failed to read activation file %s.\n", __func__, acts[i].prv_fname);
1527 acts[i].fout =
f_open( acts[i].fname, acts[i].prv_fname==NULL?
"w":
"a" );
1544 for (
int i=0; i<nlacts; i++)
1545 fprintf(fp->
fd,
"%d\t%.6f\n", ibuf[i], act_tbuf[i]);
1552 for (
int j=1; j<numProc; j++) {
1555 MPI_Recv(&acts, 1, MPI_INT, j, 110, PETSC_COMM_WORLD, &status);
1561 MPI_Recv(buf_inds.
data(), acts, MPI_INT, j, 110, PETSC_COMM_WORLD, &status);
1562 MPI_Recv(buf_acts.
data(), acts, MPI_DOUBLE, j, 110, PETSC_COMM_WORLD, &status);
1564 for(
int ii=0; ii<acts; ii++)
1565 fprintf(fp->
fd,
"%d\t%.6f\n", buf_inds[ii], buf_acts[ii]);
1573 MPI_Send(&nlacts, 1, MPI_INT, 0, 110, PETSC_COMM_WORLD);
1575 MPI_Send(ibuf, nlacts, MPI_INT, 0, 110, PETSC_COMM_WORLD);
1576 MPI_Send(act_tbuf, nlacts, MPI_DOUBLE, 0, 110, PETSC_COMM_WORLD);
1580 MPI_Bcast(&gacts, 1, MPI_INT, 0, PETSC_COMM_WORLD);
1589 for(
Activation* aptr = acts.data(); aptr != acts.end(); aptr++)
1592 switch (aptr->method) {
1594 lacts = check_cross_threshold(*aptr->phi, *aptr->phip, tm,
1595 aptr->ibuf, aptr->actbuf, aptr->threshold, aptr->mode);
1599 lacts = check_mx_derivative (*aptr->phi, *aptr->phip, tm,
1600 aptr->ibuf, aptr->actbuf, *aptr->dvp0, *aptr->dvp1,
1601 aptr->threshold, aptr->mode);
1609 a = aptr->tm->ptr();
1613 for(
int j=0; j<lacts; j++) {
1615 int nodal_idx = this->petsc_to_nodal.forward_map(aptr->ibuf[j]);
1616 aptr->ibuf[j] = canon_nbr[nodal_idx] + aptr->offset;
1619 if(a[aptr->ibuf[j]] == -1)
1620 a[aptr->ibuf[j]] = aptr->actbuf[j];
1627 aptr->tm->release_ptr(a);
1629 MPI_Allreduce(MPI_IN_PLACE, &lacts, 1, MPI_INT, MPI_SUM, PETSC_COMM_WORLD);
1632 aptr->nacts = nacts;
1641 static int savequitFlag = 0;
1642 int numNodesActivated = -1;
1644 if(sntl.activated) {
1646 if(sntl.t_quiesc < 0. && sntl.t_window >= 0.0 ) {
1647 log_msg(0,0,
ECHO |
NONL,
"================================================================================================\n");
1648 log_msg(0,0,
ECHO |
NONL,
"%s() WARNING: simulation is configured to savequit() after %.2f ms of quiescence\n", __func__, sntl.t_window);
1649 log_msg(0,0,
ECHO |
NONL,
"================================================================================================\n");
1650 sntl.t_quiesc = 0.0;
1653 if(tm >= sntl.t_start && !savequitFlag)
1655 numNodesActivated = acts[sntl.ID].nacts;
1657 if(numNodesActivated) sntl.t_quiesc = 0.0;
1658 else sntl.t_quiesc += dt;
1660 if(sntl.t_window >= 0.0 && sntl.t_quiesc > sntl.t_window && !savequitFlag) {
1667 return numNodesActivated;
1673 int LAT_detector::check_cross_threshold(
sf_vec & vm,
sf_vec & vmp,
double tm,
1674 int *ibuf,
double *actbuf,
float threshold,
int mode)
1678 int lsize = vm.
lsize();
1679 int nacts = 0, gnacts = 0;
1681 for (
int i=0; i<lsize; i++) {
1683 bool triggered =
false;
1685 triggered = p[i] <= threshold && c[i] > threshold; }
1687 triggered = p[i] >= threshold && c[i] < threshold;
1692 double tact = tm - param_globals::dt + (threshold-p[i])/(c[i]-p[i])*sgn*param_globals::dt;
1694 actbuf[nacts] = tact;
1705 int LAT_detector::check_mx_derivative(
sf_vec & vm,
sf_vec & vmp,
double tm,
1706 int *ibuf,
double *actbuf,
sf_vec & dvp0,
sf_vec & dvp1,
1707 float threshold,
int mode)
1709 int nacts = 0, gnacts = 0;
1710 double tact, dt2 = 2 * param_globals::dt;
1711 int lsize = vm.
lsize();
1720 for (
int i=0; i<lsize; i++ ) {
1722 dvdt = dv/param_globals::dt;
1723 ddv0 = pd1[i]-pd0[i];
1726 if (dvdt>=threshold && ddv0>0 && ddv1<0) {
1727 tact = tm-dt2+(ddv0/(ddv0-ddv1))*param_globals::dt;
1729 actbuf[nacts] = tact;
1753 bool forward =
true;
1755 for (
size_t i = 0; i < acts.size(); i++) {
1757 (*sc)(*acts[i].tm, forward);
1758 acts[i].tm->write_ascii(acts[i].fname,
false);
1763 void Electrics::prepace() {
1767 float stimstr = 60.;
1769 log_msg(NULL, 0, 0,
"Using activation times from file %s to distribute prepacing states\n",
1770 param_globals::prepacing_lats);
1771 log_msg(NULL, 1, 0,
"Assuming stimulus strength %f uA/uF with duration %f ms for prepacing\n",
1782 size_t numread = read_lats->
read_ascii(param_globals::prepacing_lats);
1784 log_msg(NULL, 5, 0,
"Failed reading required LATs! Skipping prepacing!");
1793 bool forward =
false;
1794 (*sc)(*read_lats, forward);
1798 PetscReal* lp = read_lats->ptr();
1799 for(
int i=0; i<read_lats->lsize(); i++)
1800 if(lp[i] < 0.0) lp[i] = param_globals::tend + 10.0;
1802 read_lats->release_ptr(lp);
1807 SF_real LATmin = read_lats->min();
1810 log_msg(0,3,0,
"LAT data is not complete. Skipping prepacing.");
1814 SF_real offset = floor(LATmin / param_globals::prepacing_bcl) * param_globals::prepacing_bcl;
1815 SF_real last_tm = param_globals::prepacing_bcl * param_globals::prepacing_beats;
1818 *read_lats += -offset;
1820 *read_lats += last_tm;
1823 SF_real *save_tm = read_lats->ptr();
1826 for (
int ii = 0; ii < miif->
N_IIF; ii++) {
1827 if (!miif->
N_Nodes[ii])
continue;
1831 for (
int kk = 0; kk < miif->
N_Nodes[ii]; kk++) {
1832 sorted_save[kk].v1 = save_tm[miif->
NodeLists[ii][kk]];
1833 sorted_save[kk].v2 = kk;
1835 std::sort(sorted_save.
begin(), sorted_save.
end());
1837 size_t lastidx = sorted_save.
size() - 1;
1838 int paced = sorted_save[lastidx].v2;
1841 for (
double t = 0; t < sorted_save[lastidx].v1; t += param_globals::dt) {
1842 if (fmod(t, param_globals::prepacing_bcl) < stimdur &&
1843 t < param_globals::prepacing_bcl * param_globals::prepacing_beats - 1)
1844 miif->
ldata[ii][limpet::Vm][paced] += stimstr * param_globals::dt;
1849 miif->
ldata[ii][limpet::Vm][paced] -= miif->
ldata[ii][limpet::Iion][paced] * param_globals::dt;
1850 vm[miif->
NodeLists[ii][paced]] = miif->
ldata[ii][limpet::Vm][paced];
1852 while (csav < miif->N_Nodes[ii] - 1 && t >= sorted_save[csav].v1)
1857 while (csav < miif->N_Nodes[ii] - 1)
1860 for (
int k = 0; k < miif->
N_Nodes[ii]; k++) vm[miif->
NodeLists[ii][k]] = miif->
ldata[ii][limpet::Vm][k];
1863 read_lats->release_ptr(save_tm);
1897 float minDist = 2. / param_globals::imp_region[0].cellSurfVolRatio;
1900 int numpts = rcv.
pts.
size() / 3;
1903 for (
int j=0; j<numpts; j++) {
1909 for (
size_t i = 0; i<alg_nod.
size(); i++)
1913 cpt = imesh.
xyz.
data()+loc_nodal_idx*3;
1915 double r =
dist(fpt, cpt) + minDist;
1916 dp[loc_petsc_idx] /= r;
1922 if ( (j>=r_start) && (j<r_end) )
1923 ph_r[j-r_start] = phi;
1945 param_globals::phie_recovery_file,
"mV");
1959 if(vm_igb.
x() != vm->
gsize()) {
1960 log_msg(0,4,0,
"%s error: Vm dimension does not fit to %s file. Aborting recovery! \n",
1961 __func__, param_globals::vofile);
1973 FILE* fd =
static_cast<FILE*
>(vm_igb.
fileptr());
1980 assert(petsc_to_canonical != NULL);
1983 for(
int i=0; i<num_io; i++) {
1984 log_msg(0,0,0,
"Step %d / %d", i+1, num_io);
1987 if(nread !=
size_t(vm->
gsize())) {
1988 log_msg(0,3,0,
"%s warning: read incomplete data slice! Aborting!", __func__);
1994 bool forward =
false;
1995 (*petsc_to_canonical)(*vm, forward);
2014 const std::string basename = param_globals::phie_rec_ptf;
2037 assert(param_globals::bidomain ==
BIDOMAIN);
2062 stimuli.resize(param_globals::num_stim);
2064 for(
int i=0; i<param_globals::num_stim; i++) {
2084 if(param_globals::dump2MatLab) {
2085 std::string bsname = param_globals::dump_basename;
2089 fn = bsname +
"_Kie.bin";
2100 restr_e, param_globals::num_io_nodes > 0);
2101 if(param_globals::dataout_e)
2106 restr_i, param_globals::num_io_nodes > 0);
2107 if(param_globals::dataout_i)
2122 log_msg(0,0,0,
"Solving Laplace problem ..");
2128 log_msg(0,0,0,
"Done in %.5f seconds.", dur);
2151 if(sidx != -1)
stimuli[sidx].value(val);
2152 else val = std::nan(
"NaN");
2160 if(sidx != -1) s_unit =
stimuli[sidx].pulse.wave.f_unit;
2173 if(
is_current(s.phys.type) && s.phys.total_current) {
2175 if (s.phys.type ==
I_ex) {
2181 float scale = 1.e12/vol;
2183 s.pulse.strength *= scale;
2186 "%s [Stimulus %d]: current density scaled to %.4g uA/cm^3\n",
2187 s.name.c_str(), s.idx, s.pulse.strength*1.e12);
2189 else if (s.phys.type ==
I_tm) {
2195 if(alg_idx_map.
size() == 0) {
2198 alg_idx_map[n] = lidx;
2205 if(alg_idx_map.
count(n)) {
2209 surf = vol * miif->
IIF[r]->cgeom().SVratio * param_globals::imp_region[r].volFrac;
2215 surf =
get_global(surf, MPI_MAX, PETSC_COMM_WORLD);
2218 s.pulse.strength /= surf;
2220 "%s [Stimulus %d]: current density scaled to %.4g uA/cm^2\n",
2221 s.name.c_str(), s.idx, s.pulse.strength);
SF::scattering * get_permutation(const int mesh_id, const int perm_id, const int dpn)
Get the PETSC to canonical permutation scattering for a given mesh and number of dpn.
SF_real gBath
Bath conductivity.
bool have_dbc_stims(const SF::vector< stimulus > &stimuli)
return wheter any stimuli require dirichlet boundary conditions
int ** NodeLists
local partitioned node lists for each IMP stored
stim_electrode electrode
electrode geometry
dbc_manager * dbc
dbcs require a dbc manager
void init_solver(SF::abstract_linear_solver< T, S > **sol)
igb_output_manager output_manager
class handling the igb output
gvec_data gvec
datastruct holding global IMP state variable output
int * subregtags
FEM tags forming this region.
char * regname
name of region
The mesh storage class. It contains both element and vertex data.
void log_stats(double tm, bool cflg)
virtual void release_ptr(S *&p)=0
vector< T > & get_numbering(SF_nbr nbr_type)
Get the vector defining a certain numbering.
cond_t g
rule to build conductivity tensor
stim_pulse pulse
stimulus wave form
char * dupstr(const char *old_str)
description of materal properties in a mesh
void init(sf_vec &vm, sf_vec &phie, int offset)
initializes all datastructs after electric solver setup
physMaterial * material
material parameter description
sf_vec * Irhs
weighted transmembrane currents
void read_indices(SF::vector< T > &idx, const std::string filename, const hashmap::unordered_map< mesh_int_t, mesh_int_t > &dd_map, MPI_Comm comm)
Read indices from a file.
const char * get_tsav_ext(double time)
void setup_phie_recovery_data(phie_recovery_data &data)
void local_petsc_to_nodal_mapping(const meshdata< T, S > &mesh, index_mapping< T > &petsc_to_nodal)
MPI_Comm comm
the parallel mesh is defined on a MPI world
void recover_phie_std(sf_vec &vm, phie_recovery_data &rcv)
SF::vector< mesh_real_t > pts
The phie recovery locations.
void layout_from_count(const T count, vector< T > &layout, MPI_Comm comm)
void compute_surface_mesh(const meshdata< T, S > &mesh, const SF_nbr numbering, const hashmap::unordered_set< T > &tags, meshdata< T, S > &surfmesh)
Compute the surface of a given mesh.
PETSc numbering of nodes.
cond_t
description of electrical tissue properties
#define PETSC_TO_CANONICAL
Permute algebraic data from PETSC to canonical ordering.
physMat_t material_type
ID of physics material.
void solve(sf_mat &Ki, sf_vec &Vmv, sf_vec &tmp_i)
overlapping_layout< T > pl
nodal parallel layout
int calls
# calls for this interval, this is incremented externally
IIF_Mask_t * IIFmask
region for each node
elliptic_solver ellip_solver
Solver for the elliptic bidomain equation.
double timer_val(const int timer_id)
figure out current value of a signal linked to a given timer
void solve(sf_vec &phie_i)
void get_kappa(sf_vec &kappa, IMPregion *ir, limpet::MULTI_IF &miif, double k)
compute the vector
virtual T lsize() const =0
FILE_SPEC logger
The logger of the physic, each physic should have one.
timer_manager * tm_manager
a manager for the various physics timers
double ExVal[3]
extracellular conductivity eigenvalues
std::string timer_unit(const int timer_id)
figure out units of a signal linked to a given timer
The nodal numbering of the reference mesh (the one stored on HD).
void unique_resize(vector< T > &_P)
bool is_current(stim_t type)
uses current as stimulation
void setup_dataout(const int dataout, std::string dataout_vtx, mesh_t grid, SF::vector< mesh_int_t > *&restr, bool async=false)
const char * name
The name of the physic, each physic should have one.
sf_mat * lhs_parab
lhs matrix (CN) to solve parabolic
int all
determine all or first instants of activation
int add_eq_timer(double istart, double iend, int ntrig, double iintv, double idur, const char *iname, const char *poolname=nullptr)
Add a equidistant step timer to the array of timers.
SF::vector< mesh_int_t > vertices
size_t read_ascii(FILE *fd)
double time_step
global reference time step
void savequit()
save state and quit simulator
void constant_total_stimulus_current(SF::vector< stimulus > &stimuli, sf_mat &mass_i, sf_mat &mass_e, limpet::MULTI_IF *miif, FILE_SPEC logger)
const char * get_mesh_type_name(mesh_t t)
get a char* to the name of a mesh type
vector< S > xyz
node cooridnates
T * data()
Pointer to the vector's start.
sf_vec * tmp_i1
scratch vector for i-grid
mesh_t
The enum identifying the different meshes we might want to load.
bool trigger(int ID) const
SF_real get_volume_from_nodes(sf_mat &mass, SF::vector< mesh_int_t > &local_idx)
double InVal[3]
intracellular conductivity eigenvalues
int N_IIF
how many different IIF's
void init_stim_info(void)
void assemble_lumped_matrix(abstract_matrix< T, S > &mat, meshdata< mesh_int_t, mesh_real_t > &domain, matrix_integrator< mesh_int_t, mesh_real_t > &integrator)
void close_files_and_cleanup()
close file descriptors
PotType measurand
quantity being monitored
generic_timing_stats IO_stats
void init_logger(const char *filename)
void init_vector(SF::abstract_vector< T, S > **vec)
void rebuild_mass(FILE_SPEC logger)
void region_mask(mesh_t meshspec, SF::vector< RegionSpecs > ®spec, SF::vector< int > ®ionIDs, bool mask_elem, const char *reglist)
classify elements/points as belonging to a region
std::vector< IonIfBase * > IIF
array of IIF's
#define ELEM_PETSC_TO_CANONICAL
Permute algebraic element data from PETSC to canonical ordering.
int mode
toggle mode from standard to reverse
bool value(double &v) const
Get the current value if the stimulus is active.
char * get_file_dir(const char *file)
bool is_init(const abstract_vector< T, S > *v)
sf_mat * phie_mat
lhs matrix to solve elliptic
const T * end() const
Pointer to the vector's end.
waveform_t wform
wave form of stimulus
parabolic_solver parab_solver
Solver for the parabolic bidomain equation.
void compute_IIF(limpet::IonIfBase &pIF, limpet::GlobalData_t **impdata, int n)
Tissue level electrics, main Electrics physics class.
SF::scattering * get_scattering(const int from, const int to, const SF::SF_nbr nbr, const int dpn)
Get a scattering from the global scatter registry.
bool using_legacy_stimuli
flag storing whether legacy stimuli are used
virtual void mult(const abstract_vector< T, S > &x, abstract_vector< T, S > &b) const =0
event detection data structures
void dump_state(char *, float, opencarp::mesh_t gid, bool, unsigned int)
sf_vec * Vmv
global Vm vector
double tot_time
total time, this is incremented externally
virtual void write(const char *filename) const =0
Submesh nodal numbering: The globally ascending sorted reference indices are reindexed.
void assemble_sv_gvec(gvec_data &gvecs, limpet::MULTI_IF *miif)
int max_nodal_edgecount(const meshdata< T, S > &mesh)
Compute the maximum number of node-to-node edges for a mesh.
region based variations of arbitrary material parameters
GlobalData_t *** ldata
data local to each IMP
void forward(abstract_vector< T, S > &in, abstract_vector< T, S > &out, bool add=false)
Forward scattering.
int check_acts(double tm)
check activations at sim time tm
double BathVal[3]
bath conductivity eigenvalues
virtual void set(const vector< T > &idx, const vector< S > &vals, const bool additive=false)=0
std::string name
label stimulus
ActMethod method
method to check whether activation occured
int timer_idx
the timer index received from the timer manager
void write_data()
write registered data to disk
long d_time
current time instance index
sf_sol * lin_solver
petsc or ginkgo lin_solver
sf_mesh & get_mesh(const mesh_t gt)
Get a mesh by specifying the gridID.
void register_output_sync(sf_vec *inp_data, const mesh_t inp_meshid, const int dpn, const char *name, const char *units, const SF::vector< mesh_int_t > *idx=NULL, bool elem_data=false)
float threshold
threshold for detection of activation
SF::scattering * register_permutation(const int mesh_id, const int perm_id, const int dpn)
Register a permutation between two orderings for a mesh.
size_t read_binary(FILE *fd)
int idx
index in global input stimulus array
void init_matrix(SF::abstract_matrix< T, S > **mat)
manager for dirichlet boundary conditions
#define ALG_TO_NODAL
Scatter algebraic to nodal.
double timer_val(const int timer_id)
figure out current value of a signal linked to a given timer
void destroy()
Currently we only need to close the file logger.
void set_elec_tissue_properties(MaterialType *mtype, Electrics::grid_t g, FILE_SPEC logger)
Fill the RegionSpec of an electrics grid with the associated inputs from the param parameters...
void set_cond_type(MaterialType &m, cond_t type)
void make_global(const vector< T > &vec, vector< T > &out, MPI_Comm comm)
make a parallel vector global
SF::vector< double > el_scale
optionally provided per-element params scale
void get_time(double &tm)
Basic_physic * get_physics(physic_t p, bool error_if_missing)
Convinience function to get a physics.
int write_trace()
write traces to file
bool dbc_update()
check if dbcs have updated
void initialize()
Initialize the Electrics.
int timer_id
timer for stimulus
void open_trace(MULTI_IF *MIIF, int n_traceNodes, int *traceNodes, int *label, opencarp::sf_mesh *imesh)
Set up ionic model traces at some global node numbers.
T get_global(T in, MPI_Op OP, MPI_Comm comm=PETSC_COMM_WORLD)
Do a global reduction on a variable.
int * N_Nodes
#nodes for each IMP
size_t root_count_ascii_lines(std::string file, MPI_Comm comm)
count the lines in a ascii file
void print_act_log(FILE_SPEC logger, const SF::vector< Activation > &acts, int idx)
int check_quiescence(double tm, double dt)
check for quiescence
V timing(V &t2, const V &t1)
void compute_restr_idx(sf_mesh &mesh, SF::vector< mesh_int_t > &inp_idx, SF::vector< mesh_int_t > &idx)
sig::time_trace wave
wave form of stimulus pulse
void setup(int idx)
Setup from a param stimulus index.
SF::vector< stimulus > stimuli
the electrical stimuli
sf_vec * dphi
Auxiliary vectors.
void rebuild_matrices(MaterialType *mtype, limpet::MULTI_IF &miif, FILE_SPEC logger)
MaterialType mtype[2]
the material types of intra_grid and extra_grid grids.
void update_cwd()
save the current working directory to curdir so that we can switch back to it if needed.
void assemble_matrix(abstract_matrix< T, S > &mat, meshdata< mesh_int_t, mesh_real_t > &domain, matrix_integrator< mesh_int_t, mesh_real_t > &integrator)
Generalized matrix assembly.
opencarp::sf_vec * gdata[NUM_IMP_DATA_TYPES]
data used by all IMPs
void dup_IMP_node_state(IonIfBase &IF, int from, int to, GlobalData_t **localdata)
hm_int count(const K &key) const
Check if key exists.
sf_mat * mass_e
mass matrix for RHS elliptic calc
sf_vec * phie_rec
The phie recovery output vector buffer.
size_t size() const
The current size of the vector.
void log_stats(double tm, bool cflg)
void rebuild_stiffness(MaterialType *mtype, SF::vector< stimulus > &stimuli, FILE_SPEC logger)
int nsubregs
#subregions forming this region
Electrical stimulation functions.
void log_msg(FILE_SPEC out, int level, unsigned char flag, const char *fmt,...)
void init_sv_gvec(gvec_data &GVs, limpet::MULTI_IF *miif, sf_vec &tmpl, igb_output_manager &output_manager)
bool mesh_is_registered(const mesh_t gt)
check wheter a SF mesh is set
stim_t type
type of stimulus
void sample_wave_form(stim_pulse &sp, int idx)
sample a signal given in analytic form
virtual void get_ownership_range(T &start, T &stop) const =0
void output_initial_activations()
output one nodal vector of initial activation time
void dump_trace(MULTI_IF *MIIF, limpet::Real time)
grid_t
An electrics grid identifier to distinguish between intra and extra grids.
int numNode
local number of nodes
const T * begin() const
Pointer to the vector's start.
void f_close(FILE_SPEC &f)
Close a FILE_SPEC.
std::vector< base_timer * > timers
vector containing individual timers
const meshdata< mesh_int_t, mesh_real_t > * mesh
the connected mesh
std::string timer_unit(const int timer_id)
figure out units of a signal linked to a given timer
void balance_electrode(SF::vector< stimulus > &stimuli, int balance_from, int balance_to)
sf_mat * mass_i
lumped for parabolic problem
sf_vec * phie_i
phi_e on intracellular grid
sf_vec * IIon
ionic currents
V dist(const vec3< V > &p1, const vec3< V > &p2)
void register_output(sf_vec *inp_data, const mesh_t inp_meshid, const int dpn, const char *name, const char *units, const SF::vector< mesh_int_t > *idx=NULL, bool elem_data=false)
Register a data vector for output.
LAT_detector lat
the activation time detector
void assign(InputIterator s, InputIterator e)
Assign a memory range.
int output_all_activations(FILE_SPEC fp, int *ibuf, double *act_tbuf, int nlacts)
std::int32_t SF_int
Use the general std::int32_t as int type.
void rebuild_matrices(MaterialType *mtype, SF::vector< stimulus > &stimuli, FILE_SPEC logger)
double strength
strength of stimulus
size_t g_numelem
global number of elements
stim_protocol ptcl
applied stimulation protocol used
double SF_real
Use the general double as real type.
void binary_sort(vector< T > &_V)
void translate(int id)
convert legacy definitions to new format
stim_physics phys
physics of stimulus
void apply_stim_to_vector(const stimulus &s, sf_vec &vec, bool add)
virtual void get(const vector< T > &idx, S *out)=0
SF::vector< RegionSpecs > regions
array with region params
void resize(size_t n)
Resize a vector.
sf_vec * get_data(datavec_t d)
Retrieve a petsc data vector from the data registry.
int get_size(MPI_Comm comm=PETSC_COMM_WORLD)
SF::scattering * register_scattering(const int from, const int to, const SF::SF_nbr nbr, const int dpn)
Register a scattering between to grids, or between algebraic and nodal representation of data on the ...
int stimidx_from_timeridx(const SF::vector< stimulus > &stimuli, const int timer_id)
determine link between timer and stimulus
sf_mat * rhs_parab
rhs matrix to solve parabolic
bool is_dbc(stim_t type)
whether stimulus is a dirichlet type. implies boundary conditions on matrix
void backward(abstract_vector< T, S > &in, abstract_vector< T, S > &out, bool add=false)
Backward scattering.
phie_recovery_data phie_rcv
struct holding helper data for phie recovery
centralize time managment and output triggering
int get_rank(MPI_Comm comm=PETSC_COMM_WORLD)
void compute_restr_idx_async(sf_mesh &mesh, SF::vector< mesh_int_t > &inp_idx, SF::vector< mesh_int_t > &idx)
int postproc_recover_phie()
T local_nodal_to_local_petsc(const meshdata< T, S > &mesh, int rank, T local_nodal)
virtual T gsize() const =0
LAT_detector()
constructor, sets up basic datastructs from global_params
#define UM2_to_CM2
convert um^2 to cm^2
long d_end
final index in multiples of dt
FILE_SPEC f_open(const char *fname, const char *mode)
Open a FILE_SPEC.
void read_points(const std::string basename, const MPI_Comm comm, vector< S > &pts, vector< T > &ptsidx)
Read the points and insert them into a list of meshes.
Container for a PETSc VecScatter.
int add_singlestep_timer(double tg, double idur, const char *iname, const char *poolname=nullptr)