10 class LocalThreadScalarStorage
15 LocalThreadScalarStorage()
21 class LocalThreadMatStorage
26 LocalThreadMatStorage(
const int row,
const int col = 1)
35 std::vector<double> weights;
43 weights[0] =
dt_ / 2.;
44 weights[weights.size() - 1] =
dt_ / 2.;
48 weights[0] =
dt_ / 3.;
49 weights[weights.size() - 1] =
dt_ / 3.;
50 for (
int i = 1; i < weights.size() - 1; i++)
53 weights[i] =
dt_ * 4. / 3.;
55 weights[i] =
dt_ * 2. / 4.;
66 for (
const int step :
steps_)
68 assert(step > 0 && step < weights.size());
69 weights[step] += 1. /
steps_.size();
86 for (
int i = start; i < end; i++)
90 const double tmp =
obj_->value_unweighted_step(i,
x);
91 local_storage.val += (weights[i] *
obj_->weight()) * tmp;
96 for (
const LocalThreadScalarStorage &local_storage : storage)
97 value += local_storage.val;
103 Eigen::MatrixXd terms;
112 for (
int i = start; i < end; i++)
116 local_storage.mat.col(i) = weights[i] *
obj_->compute_adjoint_rhs_step(i,
x, state);
117 if (
obj_->depends_on_step_prev() && i > 0)
118 local_storage.mat.col(i - 1) = weights[i] *
obj_->compute_adjoint_rhs_step_prev(i,
x, state);
122 for (
const LocalThreadMatStorage &local_storage : storage)
123 terms += local_storage.mat;
129 gradv.setZero(
x.size());
137 for (
int i = start; i < end; i++)
141 obj_->compute_partial_gradient_step(i,
x, tmp);
142 local_storage.mat += weights[i] * tmp;
146 for (
const LocalThreadMatStorage &local_storage : storage)
147 gradv += local_storage.mat;
158 return obj_->is_step_valid(x0, x1);
163 return obj_->max_step_size(x0, x1);
168 obj_->line_search_begin(x0, x1);
173 obj_->line_search_end();
178 obj_->post_step(data);
184 obj_->solution_changed(new_x);
190 obj_->solution_changed_step(i, new_x);
195 return obj_->is_step_collision_free(x0, x1);
main class that contains the polyfem solver and all its state
auto & get_local_thread_storage(Storages &storage, int thread_id)
auto create_thread_storage(const LocalStorage &initial_local_storage)
void maybe_parallel_for(int size, const std::function< void(int, int, int)> &partial_for)