27 #include "petsc_utils.h"
44 logger =
f_open(
"electrics.log", param_globals::experiment != 4 ?
"w" :
"r");
59 if (param_globals::bidomain || param_globals::extracell_monodomain_stim) {
68 param_globals::dt, 0,
"elec::ref_dt",
"TS");
88 if(strlen(param_globals::phie_rec_ptf) > 0)
95 if (param_globals::prepacing_bcl > 0)
105 m->
regions.resize(param_globals::num_gregions);
108 log_msg(logger, 0, 0,
"Setting up %s tissue properties for %d regions ..", grid_name,
109 param_globals::num_gregions);
114 for (
size_t i=0; i<m->
regions.size(); i++, reg++) {
115 if(!strcmp(param_globals::gregion[i].name,
"")) {
116 snprintf(buf,
sizeof buf,
", gregion_%d",
int(i));
117 param_globals::gregion[i].name =
dupstr(buf);
120 reg->
regname = strdup(param_globals::gregion[i].name);
122 reg->
nsubregs = param_globals::gregion[i].num_IDs;
128 reg->
subregtags[j] = param_globals::gregion[i].ID[j];
130 log_msg(NULL,3,
ECHO,
"Warning: not all %u IDs provided for gregion[%u]!\n", reg->
nsubregs, i);
138 emat->
InVal[0] = param_globals::gregion[i].g_il;
139 emat->
InVal[1] = param_globals::gregion[i].g_it;
140 emat->
InVal[2] = param_globals::gregion[i].g_in;
142 emat->
ExVal[0] = param_globals::gregion[i].g_el;
143 emat->
ExVal[1] = param_globals::gregion[i].g_et;
144 emat->
ExVal[2] = param_globals::gregion[i].g_en;
146 emat->
BathVal[0] = param_globals::gregion[i].g_bath;
147 emat->
BathVal[1] = param_globals::gregion[i].g_bath;
148 emat->
BathVal[2] = param_globals::gregion[i].g_bath;
151 for (
int j=0; j<3; j++) {
152 emat->
InVal[j] *= 1e-3 * param_globals::gregion[i].g_mult;
153 emat->
ExVal[j] *= 1e-3 * param_globals::gregion[i].g_mult;
154 emat->
BathVal[j] *= 1e-3 * param_globals::gregion[i].g_mult;
164 const char* file = g ==
Electrics::intra_grid ? param_globals::gi_scale_vec : param_globals::ge_scale_vec;
169 log_msg(0,4,0,
"%s warning: number of %s conductivity scaling entries does not match number of elements!",
193 void Electrics::setup_mappings()
202 log_msg(
logger, 0, 0,
"%s: Setting up intracellular algebraic-to-nodal scattering.", __func__);
206 log_msg(
logger, 0, 0,
"%s: Setting up intracellular PETSc to canonical permutation.", __func__);
212 log_msg(
logger, 0, 0,
"%s: Setting up extracellular algebraic-to-nodal scattering.", __func__);
214 log_msg(
logger, 0, 0,
"%s: Setting up extracellular PETSc to canonical permutation.", __func__);
216 log_msg(
logger, 0, 0,
"%s: Setting up intra-to-extra scattering.", __func__);
220 bool check_i2e =
false;
221 if(check_i2e && extra_exists) {
239 SF_real*
id = intra_testvec->ptr();
240 for(
size_t i=0; i<intra_alg_nod.
size(); i++) {
242 id[lpidx] = intra_ref_nbr[intra_alg_nod[i]];
244 intra_testvec->release_ptr(
id);
246 SF_real* ed = extra_testvec->ptr();
247 for(
size_t i=0; i<extra_alg_nod.
size(); i++) {
249 ed[lpidx] = extra_ref_nbr[extra_alg_nod[i]];
251 extra_testvec->release_ptr(ed);
253 i2e_testvec->set(-1.0);
254 i2e.
forward(*intra_testvec, *i2e_testvec);
257 for(
size_t i=0; i<extra_alg_nod.
size(); i++) {
258 auto id = i2e_testvec->get(i);
259 auto ed = extra_testvec->get(i);
260 if(
id > -1 &&
id != ed)
265 log_msg(0,5,0,
"Electrics mapping test failed!");
267 log_msg(0,5,0,
"Electrics mapping test succeeded!");
291 stimulate_extracellular();
293 if(param_globals::bidomain ==
BIDOMAIN)
301 stimulate_intracellular();
304 if(param_globals::dump_data &
DUMP_IC)
315 if(param_globals::bidomain ==
BIDOMAIN)
344 if(param_globals::bidomain && (param_globals::dump_data &
DUMP_IACT)) {
348 if(param_globals::dump_data &
DUMP_IC) {
352 Ic[i] = (Ic[i] - Vmv[i]) / (-time_step);
365 double curtime =
timing(t2, t1);
395 log_msg( NULL, 0, 0,
"Balancing stimulus %d with %d %s-wise.",balance_from, balance_to,
396 is_current(stimuli[balance_from].phys.type) ?
"current" :
"voltage" );
398 stimulus & from = stimuli[balance_from];
399 stimulus & to = stimuli[balance_to];
421 void Electrics::balance_electrodes()
423 for(
int i=0; i<param_globals::num_stim; i++) {
424 if(param_globals::stim[i].crct.balance != -1) {
425 int from = param_globals::stim[i].crct.balance;
433 void Electrics::setup_stimuli()
437 bool dumpTrace =
true;
441 stimuli.resize(param_globals::num_stim);
442 for(
int i=0; i<param_globals::num_stim; i++)
453 s.pulse.wave.write_trace(s.name+
".trc");
459 double val; s.
value(val);
463 for (
size_t i = 0; i < idx.
size(); i++) {
466 vec.
set(local_idx, val, add,
true);
469 void Electrics::stimulate_intracellular()
480 if(param_globals::operator_splitting) {
491 *ps.tmp_i1 = *ps.IIon;
492 *ps.tmp_i1 -= *ps.Irhs;
497 ps.mass_i->mult(*ps.tmp_i1, *ps.Irhs);
499 *ps.Irhs = *ps.tmp_i1;
507 if(illum_vec == NULL) {
508 log_msg(0,5,0,
"Cannot apply illumination stim: global vector not present!");
523 void Electrics::clamp_Vm() {
525 if(s.phys.type ==
Vm_clmp && s.is_active())
530 void Electrics::stimulate_extracellular()
532 if(param_globals::bidomain) {
537 if(dbcs_have_updated && time_not_final)
542 for(
const stimulus & s :
stimuli) {
543 if(s.is_active() && s.phys.type ==
I_ex)
561 for(
int pid=0; pid < mpi_size; pid++) {
562 if(mpi_rank == pid) {
564 buffsize = sndbuff.
size();
567 MPI_Bcast(&buffsize,
sizeof(
size_t), MPI_BYTE, pid, PETSC_COMM_WORLD);
569 MPI_Bcast(sndbuff.
data(), buffsize*
sizeof(
mesh_int_t), MPI_BYTE, pid, PETSC_COMM_WORLD);
571 mesh_int_t start = layout[mpi_rank], stop = layout[mpi_rank+1];
574 if(i >= start && i < stop)
598 for(
int pid=0; pid < mpi_size; pid++) {
599 if(mpi_rank == pid) {
601 buffsize = sndbuff.
size();
604 MPI_Bcast(&buffsize,
sizeof(
size_t), MPI_BYTE, pid, PETSC_COMM_WORLD);
606 MPI_Bcast(sndbuff.
data(), buffsize*
sizeof(
mesh_int_t), MPI_BYTE, pid, PETSC_COMM_WORLD);
678 void Electrics::setup_output()
686 restr_i, param_globals::num_io_nodes > 0);
688 if(param_globals::dataout_i)
691 if(param_globals::bidomain) {
693 restr_e, param_globals::num_io_nodes > 0);
695 if(param_globals::dataout_i)
697 if(param_globals::dataout_e)
701 if(param_globals::dump_data &
DUMP_IC) {
716 if(param_globals::num_trace) {
718 open_trace(
ion.
miif, param_globals::num_trace, param_globals::trace_node, NULL, &imesh);
725 void Electrics::dump_matrices()
727 std::string bsname = param_globals::dump_basename;
733 if ( param_globals::parab_solve==1 ) {
735 fn = bsname +
"_Ki_CN.bin";
738 fn = bsname +
"_Ki.bin";
741 fn = bsname +
"_Mi.bin";
744 if ( param_globals::bidomain ) {
745 fn = bsname +
"_Kie.bin";
748 fn = bsname +
"_Me.bin";
765 val = std::nan(
"NaN");
779 s_unit =
stimuli[sidx].pulse.wave.f_unit;
792 for(
size_t i = 0; i<stimuli.
size(); i++)
816 double* reg_kappa =
new double[miif.
N_IIF];
818 for(
int i=0; i<miif.
N_IIF; i++)
819 reg_kappa[i] = k * miif.
IIF[i]->cgeom().SVratio * ir[i].volFrac;
821 double *kd = kappa.
ptr();
823 for(
int i = 0; i < miif.
numNode; i++)
824 kd[i] = reg_kappa[(
int) miif.
IIFmask[i]];
840 for(
size_t i=0; i < m.
regions.size(); i++) {
846 void Electrics::setup_solvers()
852 if (param_globals::bidomain) {
857 if(param_globals::dump2MatLab)
873 double min_diff = 1e100;
875 for(
int i=0; i<param_globals::num_tsav; i++)
877 double diff = fabs(param_globals::tsav[i] - time);
878 if(min_diff > diff) {
887 return param_globals::tsav_ext[min_idx];
890 void Electrics::checkpointing()
899 snprintf(save_fnm,
sizeof save_fnm,
"%s.%s.roe", param_globals::write_statef, tsav_ext);
907 snprintf(save_fnm,
sizeof save_fnm,
"checkpoint.%.1f.roe", tm.time);
941 mass_e ->
init(extra_mesh, dpn, dpn, param_globals::mass_lumping ? 1 : max_row_entries);
961 int log_flag = param_globals::output_level > 1 ?
ECHO : 0;
982 log_msg(logger,0,log_flag,
"Computed ellipitc stiffness matrix in %.3f seconds.", dur);
986 log_msg(logger,0,log_flag,
"Elliptic lhs matrix enforcing Dirichlet boundaries.");
997 log_msg(logger,0,log_flag,
"Elliptic lhs matrix Dirichlet enforcing done in %.3f seconds.", dur);
1000 log_msg(logger,1,
ECHO,
"Elliptic lhs matrix is singular!");
1009 setup_linear_solver(logger);
1012 log_msg(logger,0,log_flag,
"Initializing elliptic solver in %.5f seconds.", dur);
1018 int log_flag = param_globals::output_level > 1 ?
ECHO : 0;
1027 if(param_globals::mass_lumping) {
1034 log_msg(logger,0,log_flag,
"Computed elliptic mass matrix in %.3f seconds.", dur);
1037 void elliptic_solver::setup_linear_solver(
FILE_SPEC logger)
1040 tol = param_globals::cg_tol_ellip;
1041 max_it = param_globals::cg_maxit_ellip;
1043 std::string default_opts;
1044 std::string solver_file;
1045 solver_file = param_globals::ellip_options_file;
1046 if (param_globals::flavor == std::string(
"ginkgo")) {
1047 default_opts = std::string(
1049 "type": "solver::Cg",
1052 "type": "Iteration",
1056 "type": "ResidualNorm",
1057 "reduction_factor": 1e-4
1061 "type": "solver::Multigrid",
1064 "type": "multigrid::Pgm",
1065 "deterministic": true
1070 "type": "Iteration",
1075 "type": "preconditioner::Schwarz",
1077 "type": "preconditioner::Ilu"
1081 "min_coarse_rows": 8,
1082 "default_initial_guess": "zero"
1085 } else if (param_globals::flavor == std::string(
"petsc")) {
1086 default_opts = std::string(
"-ksp_type cg -pc_type hypre -pc_hypre_type boomeramg -pc_hypre_boomeramg_max_iter 1 -pc_hypre_boomeramg_strong_threshold 0.0 -options_left");
1091 logger, solver_file.c_str(), default_opts.c_str());
1104 Ki.
mult(Vmv, tmp_i);
1116 auto dur =
timing(t1, t0);
1141 auto dur =
timing(t1, t0);
1172 if(!(vm_ptr != NULL && iion_ptr != NULL)) {
1173 log_msg(0,5,0,
"%s error: global Vm and Iion vectors not properly set up! Ionics seem invalid! Aborting!",
1199 if(!param_globals::operator_splitting)
1210 mass_i ->
init(intra_mesh, dpn, dpn, param_globals::mass_lumping ? 1 : max_row_entries);
1218 double start, end, period;
1224 int log_flag = param_globals::output_level > 1 ?
ECHO : 0;
1233 if( (param_globals::bidomain ==
MONODOMAIN && param_globals::bidm_eqv_mono) ||
1248 log_msg(logger,0,log_flag,
"Computed parabolic stiffness matrix in %.3f seconds.", dur);
1251 if(param_globals::mass_lumping)
1260 log_msg(logger,0,log_flag,
"Computed parabolic mass matrix in %.3f seconds.", dur);
1268 bool same_nonzero = param_globals::mass_lumping ==
false;
1298 setup_linear_solver(logger);
1301 log_msg(logger,0,log_flag,
"Initializing parabolic solver in %.5f seconds.", dur);
1304 period =
timing(end, start);
1307 void parabolic_solver::setup_linear_solver(
FILE_SPEC logger)
1309 tol = param_globals::cg_tol_parab;
1310 max_it = param_globals::cg_maxit_parab;
1312 std::string default_opts;
1313 std::string solver_file;
1314 solver_file = param_globals::parab_options_file;
1315 if (param_globals::flavor == std::string(
"ginkgo")) {
1316 default_opts = std::string(
1319 "type": "solver::Cg",
1322 "type": "Iteration",
1326 "type": "ResidualNorm",
1327 "reduction_factor": 1e-4
1331 "type": "preconditioner::Schwarz",
1333 "type": "preconditioner::Ilu"
1338 } else if (param_globals::flavor == std::string(
"petsc")) {
1339 default_opts = std::string(
"-pc_type bjacobi -sub_pc_type ilu -ksp_type cg");
1343 "parabolic PDE",
false, logger, solver_file.c_str(),
1344 default_opts.c_str());
1350 case CN: solve_CN(phie_i);
break;
1351 case O2dT: solve_O2dT(phie_i);
break;
1352 default: solve_EF(phie_i);
break;
1356 void parabolic_solver::solve_CN(
sf_vec & phie_i)
1360 if (param_globals::bidomain ==
BIDOMAIN) {
1367 *
tmp_i2 *= 1.0 - param_globals::theta;
1374 if(!param_globals::operator_splitting)
1388 auto dur =
timing(t1, t0);
1394 void parabolic_solver::solve_O2dT(
sf_vec & phie_i)
1398 if (param_globals::bidomain ==
BIDOMAIN) {
1425 void parabolic_solver::solve_EF(
sf_vec & phie_i)
1431 if (param_globals::bidomain ==
BIDOMAIN) {
1443 if(param_globals::operator_splitting ==
false)
1452 char* prvSimDir = strlen(param_globals::start_statef) ?
1455 const char* extn =
".dat";
1458 int addLATs = param_globals::compute_APD ? 2 : 0;
1460 bool have_sentinel = param_globals::t_sentinel > 0.0;
1461 bool need_to_add_sentinel = have_sentinel && (param_globals::sentinel_ID < 0);
1463 addLATs += need_to_add_sentinel ? 1 : 0;
1464 acts.resize(param_globals::num_LATs + addLATs);
1467 for (
int i = 0; i < param_globals::num_LATs; i++ )
1470 if (param_globals::lats[i].method <= 0 || (param_globals::lats[i].measurand ==
PHIE && !param_globals::bidomain)) {
1471 log_msg(NULL, 3, 0,
"Phie-based LAT measurement requires bidomain >=1 Ignoring lats[%d].", i);
1476 acts[j].threshold = param_globals::lats[i].threshold;
1477 acts[j].mode = param_globals::lats[i].mode;
1478 acts[j].all = param_globals::lats[i].all;
1479 acts[j].measurand = (
PotType)param_globals::lats[i].measurand;
1480 acts[j].ID = param_globals::lats[i].ID;
1481 acts[j].fout = NULL;
1483 if(param_globals::lats[i].all) {
1484 acts[j].fname = (
char*) malloc((strlen(param_globals::lats[i].ID)+strlen(extn)+1)*
sizeof(
char));
1485 snprintf(
acts[j].fname, strlen(param_globals::lats[i].ID)+strlen(extn)+1,
"%s%s", param_globals::lats[i].ID, extn);
1488 char prfx[] =
"init_acts_";
1489 int max_len = strlen(prfx) + strlen(param_globals::lats[i].ID) + strlen(extn) + 1;
1491 acts[j].fname = (
char*) malloc(max_len*
sizeof(
char));
1492 snprintf(
acts[j].fname, max_len,
"%s%s%s", prfx, param_globals::lats[i].ID, extn);
1496 if(prvSimDir != NULL) {
1497 int len_fname = strlen(prvSimDir)+strlen(
acts[j].fname)+2;
1498 acts[j].prv_fname = (
char*) malloc(len_fname*
sizeof(
char));
1499 snprintf(
acts[j].prv_fname, len_fname,
"%s/%s", prvSimDir,
acts[j].fname);
1505 if(param_globals::compute_APD) {
1507 acts[j].threshold = param_globals::actthresh;
1512 acts[j].fout = NULL;
1517 acts[j].threshold = param_globals::recovery_thresh;
1523 acts[j].fname =
dupstr(
"vm_repolarisation.dat");
1533 sntl.
ID = param_globals::sentinel_ID;
1535 if(need_to_add_sentinel) {
1538 acts[j].threshold = param_globals::actthresh;
1543 acts[j].fout = NULL;
1550 if(prvSimDir) free(prvSimDir);
1555 const Activation & act = acts[idx];
1558 log_msg(logger, 0, 0,
"LAT detector [%2d]", idx);
1559 log_msg(logger, 0, 0,
"-----------------\n");
1561 log_msg(logger, 0, 0,
"Measurand: %s", act.measurand ?
"Phie" :
"Vm");
1562 log_msg(logger, 0, 0,
"All: %s", act.all ?
"All" :
"Only first");
1563 log_msg(logger, 0, 0,
"Method: %s", act.method==
ACT_DT ?
"Derivative" :
"Threshold crossing");
1565 char buf[64], gt[2], sgn[2];
1566 snprintf(sgn,
sizeof sgn,
"%s", act.mode?
"-":
"+");
1567 snprintf(gt,
sizeof gt,
"%s", act.mode?
"<":
">");
1570 snprintf(buf,
sizeof buf,
"Maximum %sdf/dt %s %.2f mV", sgn, gt, act.threshold);
1572 snprintf(buf,
sizeof buf,
"Intersection %sdf/dt with %.2f", sgn, act.threshold);
1574 log_msg(logger, 0, 0,
"Mode: %s", buf);
1575 log_msg(logger, 0, 0,
"Threshold: %.2f mV\n", act.threshold);
1581 log_msg(0,0,5,
"There seems to be no EP is defined. LAT detector requires active EP! Aborting LAT setup!");
1590 for(
size_t i = 0; i <
acts.size(); ++i) {
1593 acts[i].phi->shallow_copy(!
acts[i].measurand ? vm : phie);
1594 acts[i].offset = offset;
1604 log_msg(NULL,2,0,
"Detection of -df/dt|max not implemented, +df/dt|max will be detected.");
1608 acts[i].ibuf = (
int *)malloc(
acts[i].phi->lsize()*
sizeof(int));
1609 acts[i].actbuf = (
double *)malloc(
acts[i].phi->lsize()*
sizeof(double));
1613 acts[i].tm->set(-1.);
1616 if(
acts[i].prv_fname != NULL) {
1618 size_t nread =
acts[i].tm->read_ascii(
acts[i].prv_fname);
1621 log_msg(NULL,2,
ECHO,
"Warning: Initialization of LAT[%2d] failed.", i);
1629 if(
acts[i].prv_fname!=NULL) {
1633 log_msg(NULL,2,0,
"Copying over of previous activation file not implemented.\n");
f_close(in);
1636 log_msg(NULL,3,0,
"Warning: Initialization in %s - \n"
1637 "Failed to read activation file %s.\n", __func__,
acts[i].prv_fname);
1658 for (
int i=0; i<nlacts; i++)
1659 fprintf(fp->fd,
"%d\t%.6f\n", ibuf[i], act_tbuf[i]);
1666 for (
int j=1; j<numProc; j++) {
1669 MPI_Recv(&acts, 1, MPI_INT, j, 110, PETSC_COMM_WORLD, &status);
1675 MPI_Recv(buf_inds.
data(), acts, MPI_INT, j, 110, PETSC_COMM_WORLD, &status);
1676 MPI_Recv(buf_acts.
data(), acts, MPI_DOUBLE, j, 110, PETSC_COMM_WORLD, &status);
1678 for(
int ii=0; ii<acts; ii++)
1679 fprintf(fp->fd,
"%d\t%.6f\n", buf_inds[ii], buf_acts[ii]);
1687 MPI_Send(&nlacts, 1, MPI_INT, 0, 110, PETSC_COMM_WORLD);
1689 MPI_Send(ibuf, nlacts, MPI_INT, 0, 110, PETSC_COMM_WORLD);
1690 MPI_Send(act_tbuf, nlacts, MPI_DOUBLE, 0, 110, PETSC_COMM_WORLD);
1694 MPI_Bcast(&gacts, 1, MPI_INT, 0, PETSC_COMM_WORLD);
1706 switch (aptr->method) {
1708 lacts = check_cross_threshold(*aptr->phi, *aptr->phip, tm,
1709 aptr->ibuf, aptr->actbuf, aptr->threshold, aptr->mode);
1713 lacts = check_mx_derivative (*aptr->phi, *aptr->phip, tm,
1714 aptr->ibuf, aptr->actbuf, *aptr->dvp0, *aptr->dvp1,
1715 aptr->threshold, aptr->mode);
1723 a = aptr->tm->ptr();
1727 for(
int j=0; j<lacts; j++) {
1730 aptr->ibuf[j] = canon_nbr[nodal_idx] + aptr->offset;
1733 if(a[aptr->ibuf[j]] == -1)
1734 a[aptr->ibuf[j]] = aptr->actbuf[j];
1741 aptr->tm->release_ptr(a);
1743 MPI_Allreduce(MPI_IN_PLACE, &lacts, 1, MPI_INT, MPI_SUM, PETSC_COMM_WORLD);
1746 aptr->nacts = nacts;
1755 static int savequitFlag = 0;
1756 int numNodesActivated = -1;
1761 log_msg(0,0,
ECHO |
NONL,
"================================================================================================\n");
1762 log_msg(0,0,
ECHO |
NONL,
"%s() WARNING: simulation is configured to savequit() after %.2f ms of quiescence\n", __func__,
sntl.
t_window);
1763 log_msg(0,0,
ECHO |
NONL,
"================================================================================================\n");
1781 return numNodesActivated;
1787 int LAT_detector::check_cross_threshold(
sf_vec & vm,
sf_vec & vmp,
double tm,
1788 int *ibuf,
double *actbuf,
float threshold,
int mode)
1792 int lsize = vm.lsize();
1793 int nacts = 0, gnacts = 0;
1795 for (
int i=0; i<lsize; i++) {
1797 bool triggered =
false;
1799 triggered = p[i] <= threshold && c[i] > threshold; }
1801 triggered = p[i] >= threshold && c[i] < threshold;
1806 double tact = tm - param_globals::dt + (threshold-p[i])/(c[i]-p[i])*sgn*param_globals::dt;
1808 actbuf[nacts] = tact;
1819 int LAT_detector::check_mx_derivative(
sf_vec & vm,
sf_vec & vmp,
double tm,
1820 int *ibuf,
double *actbuf,
sf_vec & dvp0,
sf_vec & dvp1,
1821 float threshold,
int mode)
1823 int nacts = 0, gnacts = 0;
1824 double tact, dt2 = 2 * param_globals::dt;
1825 int lsize = vm.lsize();
1834 for (
int i=0; i<lsize; i++ ) {
1836 dvdt = dv/param_globals::dt;
1837 ddv0 = pd1[i]-pd0[i];
1840 if (dvdt>=threshold && ddv0>0 && ddv1<0) {
1841 tact = tm-dt2+(ddv0/(ddv0-ddv1))*param_globals::dt;
1843 actbuf[nacts] = tact;
1852 vmp .release_ptr(p);
1853 dvp0.release_ptr(pd0);
1854 dvp1.release_ptr(pd1);
1867 bool forward =
true;
1869 for (
size_t i = 0; i <
acts.size(); i++) {
1871 (*sc)(*
acts[i].tm, forward);
1872 acts[i].tm->write_ascii(
acts[i].fname,
false);
1877 void Electrics::prepace() {
1878 log_msg(NULL, 0, 0,
"Using activation times from file %s to distribute prepacing states\n",
1879 param_globals::prepacing_lats);
1880 log_msg(NULL, 0, 0,
"Assuming stimulus strength %f uA/uF with duration %f ms for prepacing\n",
1881 param_globals::prepacing_stimstr, param_globals::prepacing_stimdur);
1890 size_t numread = read_lats->read_ascii(param_globals::prepacing_lats);
1892 log_msg(NULL, 5, 0,
"Failed reading required LATs! Skipping prepacing!");
1901 bool forward =
false;
1902 (*sc)(*read_lats, forward);
1906 PetscReal* lp = read_lats->ptr();
1907 for(
int i=0; i<read_lats->lsize(); i++)
1908 if(lp[i] < 0.0) lp[i] = param_globals::tend + 10.0;
1910 read_lats->release_ptr(lp);
1915 SF_real LATmin = read_lats->min();
1918 log_msg(0,3,0,
"LAT data is not complete. Skipping prepacing.");
1922 SF_real offset = floor(LATmin / param_globals::prepacing_bcl) * param_globals::prepacing_bcl;
1923 SF_real last_tm = param_globals::prepacing_bcl * param_globals::prepacing_beats;
1926 *read_lats += -offset;
1928 *read_lats += last_tm;
1931 SF_real *save_tm = read_lats->ptr();
1934 for (
int ii = 0; ii < miif->
N_IIF; ii++) {
1935 if (!miif->
N_Nodes[ii])
continue;
1939 for (
int kk = 0; kk < miif->
N_Nodes[ii]; kk++) {
1940 sorted_save[kk].v1 = save_tm[miif->
NodeLists[ii][kk]];
1941 sorted_save[kk].v2 = kk;
1943 std::sort(sorted_save.
begin(), sorted_save.
end());
1945 size_t lastidx = sorted_save.
size() - 1;
1946 int paced = sorted_save[lastidx].v2;
1949 for (
double t = 0; t < sorted_save[lastidx].v1; t += param_globals::dt) {
1950 if (fmod(t, param_globals::prepacing_bcl) < param_globals::prepacing_stimdur &&
1951 t < param_globals::prepacing_bcl * param_globals::prepacing_beats - 1)
1952 miif->
ldata[ii][limpet::Vm][paced] += param_globals::prepacing_stimstr * param_globals::dt;
1957 miif->
ldata[ii][limpet::Vm][paced] -= miif->
ldata[ii][limpet::Iion][paced] * param_globals::dt;
1958 vm[miif->
NodeLists[ii][paced]] = miif->
ldata[ii][limpet::Vm][paced];
1960 while (csav < miif->N_Nodes[ii] - 1 && t >= sorted_save[csav].v1)
1965 while (csav < miif->N_Nodes[ii] - 1)
1968 for (
int k = 0; k < miif->
N_Nodes[ii]; k++) vm[miif->
NodeLists[ii][k]] = miif->
ldata[ii][limpet::Vm][k];
1971 read_lats->release_ptr(save_tm);
1979 if (!rcv.pts.size())
1985 log_msg(0,0,5,
"There seems to be no EP is defined. Phie recovery requires active EP! Aborting!");
1990 sf_mat & Ki = *elec->parab_solver.rhs_parab;
1996 vm.get_ownership_range(start, end);
2004 rcv.phie_rec->get_ownership_range(r_start, r_end);
2006 SF_real *ph_r = rcv.phie_rec->ptr();
2011 float minDist = 2. / param_globals::imp_region[0].cellSurfVolRatio;
2013 Ki.mult(vm, *rcv.Im);
2014 int numpts = rcv.pts.size() / 3;
2017 for (
int j=0; j<numpts; j++) {
2018 fpt = rcv.pts.data() + j*3;
2020 *rcv.dphi = *rcv.Im;
2021 SF_real* dp = rcv.dphi->ptr();
2023 for (
size_t i = 0; i<alg_nod.
size(); i++)
2027 cpt = imesh.xyz.data()+loc_nodal_idx*3;
2029 double r =
dist(fpt, cpt) + minDist;
2030 dp[loc_petsc_idx] /= r;
2033 rcv.dphi->release_ptr(dp);
2035 SF_real phi = rcv.dphi->sum() / 4. /
M_PI / rcv.gBath;
2036 if ( (j>=r_start) && (j<r_end) )
2037 ph_r[j-r_start] = phi;
2040 rcv.phie_rec->release_ptr(ph_r);
2048 log_msg(0,0,5,
"There seems to be no EP is defined. Phie recovery requires active EP! Aborting!");
2054 phie_recovery_data & phie_rcv = elec->phie_rcv;
2057 elec->output_manager.close_files_and_cleanup();
2061 igb_output_manager phie_rec_out;
2062 phie_rec_out.register_output(phie_rcv.phie_rec,
phie_recv_msh, 1,
2063 param_globals::phie_recovery_file,
"mV");
2077 if(vm_igb.
x() != vm->gsize()) {
2078 log_msg(0,4,0,
"%s error: Vm dimension does not fit to %s file. Aborting recovery! \n",
2079 __func__, param_globals::vofile);
2091 FILE* fd =
static_cast<FILE*
>(vm_igb.
fileptr());
2098 assert(petsc_to_canonical != NULL);
2101 for(
int i=0; i<num_io; i++) {
2102 log_msg(0,0,0,
"Step %d / %d", i+1, num_io);
2103 size_t nread = vm->read_binary<
float>(fd);
2105 if(nread !=
size_t(vm->gsize())) {
2106 log_msg(0,3,0,
"%s warning: read incomplete data slice! Aborting!", __func__);
2112 bool forward =
false;
2113 (*petsc_to_canonical)(*vm, forward);
2118 phie_rec_out.write_data();
2121 phie_rec_out.close_files_and_cleanup();
2129 log_msg(0,0,5,
"There seems to be no EP is defined. Phie recovery requires active EP! Aborting!");
2137 const std::string basename = param_globals::phie_rec_ptf;
2153 data.gBath =
static_cast<elecMaterial*
>(intra_regions[0].material)->BathVal[0];
2160 assert(param_globals::bidomain ==
BIDOMAIN);
2185 stimuli.resize(param_globals::num_stim);
2187 for(
int i=0; i<param_globals::num_stim; i++) {
2207 if(param_globals::dump2MatLab) {
2208 std::string bsname = param_globals::dump_basename;
2212 fn = bsname +
"_Kie.bin";
2223 restr_e, param_globals::num_io_nodes > 0);
2224 if(param_globals::dataout_e)
2229 restr_i, param_globals::num_io_nodes > 0);
2230 if(param_globals::dataout_i)
2248 log_msg(0,0,0,
"Solving Laplace problem ..");
2254 log_msg(0,0,0,
"Done in %.5f seconds.", dur);
2277 if(sidx != -1)
stimuli[sidx].value(val);
2278 else val = std::nan(
"NaN");
2286 if(sidx != -1) s_unit =
stimuli[sidx].pulse.wave.f_unit;
2298 for(stimulus & s : stimuli) {
2299 if(
is_current(s.phys.type) && s.phys.total_current) {
2301 if (s.phys.type ==
I_ex) {
2310 float scale = 1.e12/vol;
2312 s.pulse.strength *= scale;
2315 "%s [Stimulus %d]: current density scaled to %.4g uA/cm^3\n",
2316 s.name.c_str(), s.idx, s.pulse.strength);
2318 else if (s.phys.type ==
I_tm) {
2324 if(alg_idx_map.
size() == 0) {
2327 alg_idx_map[n] = lidx;
2334 if(alg_idx_map.
count(n)) {
2338 surf = vol * miif->
IIF[r]->cgeom().SVratio * param_globals::imp_region[r].volFrac;
2344 surf =
get_global(surf, MPI_MAX, PETSC_COMM_WORLD);
2347 s.pulse.strength /= surf;
2349 "%s [Stimulus %d]: current density scaled to %.4g uA/cm^2\n",
2350 s.name.c_str(), s.idx, s.pulse.strength);
double SF_real
Use the general double as real type.
std::int32_t SF_int
Use the general std::int32_t as int type.
virtual void mult(const abstract_vector< T, S > &x, abstract_vector< T, S > &b) const =0
virtual void scale(S s)=0
virtual void get_diagonal(abstract_vector< T, S > &vec) const =0
virtual void mult_LR(const abstract_vector< T, S > &L, const abstract_vector< T, S > &R)=0
virtual void init(T iNRows, T iNCols, T ilrows, T ilcols, T loc_offset, T mxent)
virtual void duplicate(const abstract_matrix< T, S > &M)=0
virtual void add_scaled_matrix(const abstract_matrix< T, S > &A, const S s, const bool same_nnz)=0
virtual void write(const char *filename) const =0
size_t read_ascii(FILE *fd)
virtual void release_ptr(S *&p)=0
virtual void deep_copy(const abstract_vector< T, S > &v)=0
virtual void shallow_copy(const abstract_vector< T, S > &v)=0
virtual void add_scaled(const abstract_vector< T, S > &vec, S k)=0
virtual T lsize() const =0
virtual void set(const vector< T > &idx, const vector< S > &vals, const bool additive=false, const bool local=false)=0
const meshdata< mesh_int_t, mesh_real_t > * mesh
the connected mesh
T forward_map(T idx) const
Map one index from a to b.
overlapping_layout< T > pl
nodal parallel layout
size_t g_numelem
global number of elements
MPI_Comm comm
the parallel mesh is defined on a MPI world
vector< T > & get_numbering(SF_nbr nbr_type)
Get the vector defining a certain numbering.
Container for a PETSc VecScatter.
void forward(abstract_vector< T, S > &in, abstract_vector< T, S > &out, bool add=false)
Forward scattering.
void backward(abstract_vector< T, S > &in, abstract_vector< T, S > &out, bool add=false)
Backward scattering.
size_t size() const
The current size of the vector.
void resize(size_t n)
Resize a vector.
const T * end() const
Pointer to the vector's end.
void assign(InputIterator s, InputIterator e)
Assign a memory range.
const T * begin() const
Pointer to the vector's start.
T * data()
Pointer to the vector's start.
hm_int count(const K &key) const
Check if key exists.
int numNode
local number of nodes
std::vector< IonIfBase * > IIF
array of IIF's
opencarp::sf_vec * gdata[NUM_IMP_DATA_TYPES]
data used by all IMPs
void dump_state(char *, float, opencarp::mesh_t gid, bool, unsigned int)
GlobalData_t *** ldata
data local to each IMP
int N_IIF
how many different IIF's
int * N_Nodes
#nodes for each IMP
int ** NodeLists
local partitioned node lists for each IMP stored
IIF_Mask_t * IIFmask
region for each node
int timer_idx
the timer index received from the timer manager
FILE_SPEC logger
The logger of the physic, each physic should have one.
SF::vector< stimulus > stimuli
the electrical stimuli
LAT_detector lat
the activation time detector
grid_t
An electrics grid identifier to distinguish between intra and extra grids.
phie_recovery_data phie_rcv
struct holding helper data for phie recovery
generic_timing_stats IO_stats
void destroy()
Currently we only need to close the file logger.
gvec_data gvec
datastruct holding global IMP state variable output
elliptic_solver ellip_solver
Solver for the elliptic bidomain equation.
MaterialType mtype[2]
the material types of intra_grid and extra_grid grids.
std::string timer_unit(const int timer_id)
figure out units of a signal linked to a given timer
parabolic_solver parab_solver
Solver for the parabolic bidomain equation.
double timer_val(const int timer_id)
figure out current value of a signal linked to a given timer
void initialize()
Initialize the Electrics.
igb_output_manager output_manager
class handling the igb output
SF::index_mapping< mesh_int_t > petsc_to_nodal
int check_quiescence(double tm, double dt)
check for quiescence
void output_initial_activations()
output one nodal vector of initial activation time
void init(sf_vec &vm, sf_vec &phie, int offset, enum physic_t=elec_phys)
initializes all datastructs after electric solver setup
int check_acts(double tm)
check activations at sim time tm
SF::vector< Activation > acts
LAT_detector()
constructor, sets up basic datastructs from global_params
SF::vector< stimulus > stimuli
the electrical stimuli
elliptic_solver ellip_solver
Solver for the elliptic bidomain equation.
double timer_val(const int timer_id)
figure out current value of a signal linked to a given timer
std::string timer_unit(const int timer_id)
figure out units of a signal linked to a given timer
MaterialType mtype[2]
the material types of intra_grid and extra_grid grids.
igb_output_manager output_manager
class handling the igb output
manager for dirichlet boundary conditions
void enforce_dbc_rhs(sf_vec &rhs)
void recompute_dbcs()
recompute the dbc data.
bool dbc_update()
check if dbcs have updated
sf_mat * phie_mat
lhs matrix to solve elliptic
void rebuild_stiffness(MaterialType *mtype, SF::vector< stimulus > &stimuli, FILE_SPEC logger)
void rebuild_matrices(MaterialType *mtype, SF::vector< stimulus > &stimuli, FILE_SPEC logger)
sf_vec * phie_i
phi_e on intracellular grid
void solve(sf_mat &Ki, sf_vec &Vmv, sf_vec &tmp_i)
sf_sol * lin_solver
petsc or ginkgo lin_solver
sf_mat * mass_e
mass matrix for RHS elliptic calc
double tol
CG stopping tolerance.
bool phie_mat_has_nullspace
sf_vec * currtmp
temp vector for phiesrc
dbc_manager * dbc
dbcs require a dbc manager
int max_it
maximum number of iterations
void rebuild_mass(FILE_SPEC logger)
void write_data()
write registered data to disk
void register_output_sync(sf_vec *inp_data, const mesh_t inp_meshid, const int dpn, const char *name, const char *units, const SF::vector< mesh_int_t > *idx=NULL, bool elem_data=false)
void close_files_and_cleanup()
close file descriptors
void register_output(sf_vec *inp_data, const mesh_t inp_meshid, const int dpn, const char *name, const char *units, const SF::vector< mesh_int_t > *idx=NULL, bool elem_data=false)
Register a data vector for output.
sf_vec * Ivol
global Vm vector
double tol
CG stopping tolerance.
sf_vec * Iact
global Vm vector
sf_vec * Diff_term
Diffusion current.
sf_mat * rhs_parab
rhs matrix to solve parabolic
sf_vec * kappa_i
scaling vector for intracellular mass matrix, M
void rebuild_matrices(MaterialType *mtype, limpet::MULTI_IF &miif, FILE_SPEC logger)
parabolic_t parab_tech
manner in which parabolic equations are solved
void solve(sf_vec &phie_i)
sf_vec * inv_mass_diag
inverse diagonal of mass matrix, for EXPLICIT solving
sf_mat * mass_i
lumped for parabolic problem
sf_vec * Ic
global Vm vector
sf_vec * tmp_i2
scratch vector for i-grid
int max_it
maximum number of iterations
sf_vec * tmp_i1
scratch vector for i-grid
sf_mat * lhs_parab
lhs matrix (CN) to solve parabolic
sf_vec * Vmv
global Vm vector
sf_vec * Irhs
weighted transmembrane currents
sf_vec * old_vm
older Vm needed for 2nd order dT
sf_sol * lin_solver
petsc or ginkgo lin_solver
sf_vec * IIon
ionic currents
SF::vector< mesh_int_t > vertices
bool total_current
whether we apply total current scaling
stim_t type
type of stimulus
int timer_id
timer for stimulus
waveform_t wform
wave form of stimulus
double strength
strength of stimulus
stim_protocol ptcl
applied stimulation protocol used
int idx
index in global input stimulus array
stim_electrode electrode
electrode geometry
stim_pulse pulse
stimulus wave form
void translate(int id)
convert legacy definitions to new format
void setup(int idx)
Setup from a param stimulus index.
stim_physics phys
physics of stimulus
bool value(double &v) const
Get the current value if the stimulus is active.
long d_time
current time instance index
double time_step
global reference time step
int add_eq_timer(double istart, double iend, int ntrig, double iintv, double idur, const char *iname, const char *poolname=nullptr)
Add a equidistant step timer to the array of timers.
int add_singlestep_timer(double tg, double idur, const char *iname, const char *poolname=nullptr)
long d_end
final index in multiples of dt
std::vector< base_timer * > timers
vector containing individual timers
Tissue level electrics, main Electrics physics class.
void init_solver(SF::abstract_linear_solver< T, S > **sol)
void compute_surface_mesh(const meshdata< T, S > &mesh, const SF_nbr numbering, const hashmap::unordered_set< T > &tags, meshdata< T, S > &surfmesh)
Compute the surface of a given mesh.
void read_points(const std::string basename, const MPI_Comm comm, vector< S > &pts, vector< T > &ptsidx)
Read the points and insert them into a list of meshes.
void make_global(const vector< T > &vec, vector< T > &out, MPI_Comm comm)
make a parallel vector global
void unique_resize(vector< T > &_P)
void assemble_matrix(abstract_matrix< T, S > &mat, meshdata< mesh_int_t, mesh_real_t > &domain, matrix_integrator< mesh_int_t, mesh_real_t > &integrator)
Generalized matrix assembly.
int max_nodal_edgecount(const meshdata< T, S > &mesh)
Compute the maximum number of node-to-node edges for a mesh.
void local_petsc_to_nodal_mapping(const meshdata< T, S > &mesh, index_mapping< T > &petsc_to_nodal)
T local_nodal_to_local_petsc(const meshdata< T, S > &mesh, int rank, T local_nodal)
size_t root_count_ascii_lines(std::string file, MPI_Comm comm)
count the lines in a ascii file
void assemble_lumped_matrix(abstract_matrix< T, S > &mat, meshdata< mesh_int_t, mesh_real_t > &domain, matrix_integrator< mesh_int_t, mesh_real_t > &integrator)
bool is_init(const abstract_vector< T, S > *v)
void layout_from_count(const T count, vector< T > &layout, MPI_Comm comm)
void init_vector(SF::abstract_vector< T, S > **vec)
void binary_sort(vector< T > &_V)
void init_matrix(SF::abstract_matrix< T, S > **mat)
@ NBR_PETSC
PETSc numbering of nodes.
@ NBR_REF
The nodal numbering of the reference mesh (the one stored on HD).
@ NBR_SUBMESH
Submesh nodal numbering: The globally ascending sorted reference indices are reindexed.
void dup_IMP_node_state(IonIfBase &IF, int from, int to, GlobalData_t **localdata)
void dump_trace(MULTI_IF *MIIF, limpet::Real time)
void open_trace(MULTI_IF *MIIF, int n_traceNodes, int *traceNodes, int *label, opencarp::sf_mesh *imesh)
Set up ionic model traces at some global node numbers.
timer_manager * tm_manager
a manager for the various physics timers
bool using_legacy_stimuli
flag storing whether legacy stimuli are used
void get_kappa(sf_vec &kappa, IMPregion *ir, limpet::MULTI_IF &miif, double k)
compute the vector
physic_t
Identifier for the different physics we want to set up.
int stimidx_from_timeridx(const SF::vector< stimulus > &stimuli, const int timer_id)
determine link between timer and stimulus
sf_vec * get_data(datavec_t d)
Retrieve a petsc data vector from the data registry.
SF::scattering * get_scattering(const int from, const int to, const SF::SF_nbr nbr, const int dpn)
Get a scattering from the global scatter registry.
void set_cond_type(MaterialType &m, cond_t type)
void sample_wave_form(stim_pulse &sp, int idx)
sample a signal given in analytic form
SF_real get_volume_from_nodes(sf_mat &mass, SF::vector< mesh_int_t > &local_idx)
sf_mesh & get_mesh(const mesh_t gt)
Get a mesh by specifying the gridID.
SF::scattering * register_scattering(const int from, const int to, const SF::SF_nbr nbr, const int dpn)
Register a scattering between to grids, or between algebraic and nodal representation of data on the ...
cond_t
description of electrical tissue properties
void print_act_log(FILE_SPEC logger, const SF::vector< Activation > &acts, int idx)
SF::scattering * get_permutation(const int mesh_id, const int perm_id, const int dpn)
Get the PETSC to canonical permutation scattering for a given mesh and number of dpn.
bool is_dbc(stim_t type)
whether stimulus is a dirichlet type. implies boundary conditions on matrix
SF::meshdata< mesh_int_t, mesh_real_t > sf_mesh
void compute_restr_idx_async(sf_mesh &mesh, SF::vector< mesh_int_t > &inp_idx, SF::vector< mesh_int_t > &idx)
void apply_stim_to_vector(const stimulus &s, sf_vec &vec, bool add)
void recover_phie_std(sf_vec &vm, phie_recovery_data &rcv)
int get_rank(MPI_Comm comm=PETSC_COMM_WORLD)
T get_global(T in, MPI_Op OP, MPI_Comm comm=PETSC_COMM_WORLD)
Do a global reduction on a variable.
V dist(const vec3< V > &p1, const vec3< V > &p2)
const char * get_mesh_type_name(mesh_t t)
get a char* to the name of a mesh type
void region_mask(mesh_t meshspec, SF::vector< RegionSpecs > ®spec, SF::vector< int > ®ionIDs, bool mask_elem, const char *reglist)
classify elements/points as belonging to a region
void init_stim_info(void)
uses potential for stimulation
int output_all_activations(FILE_SPEC fp, int *ibuf, double *act_tbuf, int nlacts)
FILE_SPEC f_open(const char *fname, const char *mode)
Open a FILE_SPEC.
void savequit()
save state and quit simulator
bool have_dbc_stims(const SF::vector< stimulus > &stimuli)
return wheter any stimuli require dirichlet boundary conditions
SF::scattering * register_permutation(const int mesh_id, const int perm_id, const int dpn)
Register a permutation between two orderings for a mesh.
bool is_current(stim_t type)
uses current as stimulation
void setup_dataout(const int dataout, std::string dataout_vtx, mesh_t grid, SF::vector< mesh_int_t > *&restr, bool async)
char * get_file_dir(const char *file)
void init_sv_gvec(gvec_data &GVs, limpet::MULTI_IF *miif, sf_vec &tmpl, igb_output_manager &output_manager)
void assemble_sv_gvec(gvec_data &gvecs, limpet::MULTI_IF *miif)
void constant_total_stimulus_current(SF::vector< stimulus > &stimuli, sf_mat &mass_i, sf_mat &mass_e, limpet::MULTI_IF *miif, FILE_SPEC logger)
Scales stimulus current to maintain constant total current across affected regions.
int postproc_recover_phie()
char * dupstr(const char *old_str)
void balance_electrode(elliptic_solver &ellip, SF::vector< stimulus > &stimuli, int balance_from, int balance_to)
void set_elec_tissue_properties(MaterialType *mtype, Electrics::grid_t g, FILE_SPEC logger)
Fill the RegionSpec of an electrics grid with the associated inputs from the param parameters.
void compute_restr_idx(sf_mesh &mesh, SF::vector< mesh_int_t > &inp_idx, SF::vector< mesh_int_t > &idx)
void log_msg(FILE_SPEC out, int level, unsigned char flag, const char *fmt,...)
mesh_t
The enum identifying the different meshes we might want to load.
void get_time(double &tm)
bool mesh_is_registered(const mesh_t gt)
check wheter a SF mesh is set
void setup_phie_recovery_data(phie_recovery_data &data)
SF::abstract_vector< SF_int, SF_real > sf_vec
int get_size(MPI_Comm comm=PETSC_COMM_WORLD)
Basic_physic * get_physics(physic_t p, bool error_if_missing)
Convinience function to get a physics.
const char * get_tsav_ext(double time)
SF::abstract_matrix< SF_int, SF_real > sf_mat
void compute_IIF(limpet::IonIfBase &pIF, limpet::GlobalData_t **impdata, int n)
V timing(V &t2, const V &t1)
void read_indices(SF::vector< T > &idx, const std::string filename, const hashmap::unordered_map< mesh_int_t, mesh_int_t > &dd_map, MPI_Comm comm)
Read indices from a file.
void update_cwd()
save the current working directory to curdir so that we can switch back to it if needed.
void f_close(FILE_SPEC &f)
Close a FILE_SPEC.
#define UM2_to_CM2
convert um^2 to cm^2
#define PETSC_TO_CANONICAL
Permute algebraic data from PETSC to canonical ordering.
#define ALG_TO_NODAL
Scatter algebraic to nodal.
#define ELEM_PETSC_TO_CANONICAL
Permute algebraic element data from PETSC to canonical ordering.
Electrical stimulation functions.
SF_int niter
number of iterations
SF_real time
solver runtime
SF_int reason
number of iterations
std::string name
the solver name
virtual void setup_solver(abstract_matrix< T, S > &mat, double tol, int max_it, short norm, std::string name, bool has_nullspace, void *logger, const char *solver_opts_file, const char *default_opts)=0
event detection data structures
description of materal properties in a mesh
SF::vector< RegionSpecs > regions
array with region params
SF::vector< double > el_scale
optionally provided per-element params scale
region based variations of arbitrary material parameters
physMaterial * material
material parameter description
int nsubregs
#subregions forming this region
int * subregtags
FEM tags forming this region.
char * regname
name of region
bool activated
flag sentinel activation
int ID
ID of LAT detector used as sentinel.
double t_start
start of observation window
double t_window
duration of observation window
double t_quiesc
measure current duration of quiescence
double ExVal[3]
extracellular conductivity eigenvalues
cond_t g
rule to build conductivity tensor
double InVal[3]
intracellular conductivity eigenvalues
double BathVal[3]
bath conductivity eigenvalues
void log_stats(double tm, bool cflg)
void init_logger(const char *filename)
int calls
# calls for this interval, this is incremented externally
double tot_time
total time, this is incremented externally
void init_logger(const char *filename)
void log_stats(double tm, bool cflg)
void update_iter(const int curiter)
double slvtime
total solver time
sf_vec * phie_rec
The phie recovery output vector buffer.
SF::vector< mesh_real_t > pts
The phie recovery locations.
physMat_t material_type
ID of physics material.