Doxygen 1.9.1
Toolkit for Adaptive Stochastic Modeling and Non-Intrusive ApproximatioN: Tasmanian v8.2 (development)
tsgOptimizationUtils.hpp
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2022, Miroslav Stoyanov & Weiwei Kong
3  *
4  * This file is part of
5  * Toolkit for Adaptive Stochastic Modeling And Non-Intrusive ApproximatioN: TASMANIAN
6  *
7  * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following
8  * conditions are met:
9  *
10  * 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
11  *
12  * 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions
13  * and the following disclaimer in the documentation and/or other materials provided with the distribution.
14  *
15  * 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse
16  * or promote products derived from this software without specific prior written permission.
17  *
18  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
19  * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20  * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
21  * OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
22  * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
23  * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
24  * POSSIBILITY OF SUCH DAMAGE.
25  *
26  * UT-BATTELLE, LLC AND THE UNITED STATES GOVERNMENT MAKE NO REPRESENTATIONS AND DISCLAIM ALL WARRANTIES, BOTH EXPRESSED AND
27  * IMPLIED. THERE ARE NO EXPRESS OR IMPLIED WARRANTIES OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE, OR THAT THE USE OF
28  * THE SOFTWARE WILL NOT INFRINGE ANY PATENT, COPYRIGHT, TRADEMARK, OR OTHER PROPRIETARY RIGHTS, OR THAT THE SOFTWARE WILL
29  * ACCOMPLISH THE INTENDED RESULTS OR THAT THE SOFTWARE OR ITS USE WILL NOT RESULT IN INJURY OR DAMAGE. THE USER ASSUMES
30  * RESPONSIBILITY FOR ALL LIABILITIES, PENALTIES, FINES, CLAIMS, CAUSES OF ACTION, AND COSTS AND EXPENSES, CAUSED BY, RESULTING
31  * FROM OR ARISING OUT OF, IN WHOLE OR IN PART THE USE, STORAGE OR DISPOSAL OF THE SOFTWARE.
32  */
33 
34 #ifndef __TASMANIAN_OPTIM_ENUMERATES_HPP
35 #define __TASMANIAN_OPTIM_ENUMERATES_HPP
36 
37 #include "TasmanianDREAM.hpp"
38 
58 namespace TasOptimization {
59 
69  double residual;
70 };
71 
80 inline void checkVarSize(const std::string method_name, const std::string var_name, const int var_size, const int exp_size) {
81  if (var_size != exp_size) {
82  throw std::runtime_error("Size of " + var_name + " (" + std::to_string(var_size) + ") in the function " + method_name +
83  "() is not equal to its expected value of (" + std::to_string(exp_size) + ")");
84  }
85 }
86 
87 // Functions used in optimization.
88 
102 using ObjectiveFunctionSingle = std::function<double(const std::vector<double> &x)>;
103 
124 using ObjectiveFunction = std::function<void(const std::vector<double> &x_batch, std::vector<double> &fval_batch)>;
125 
133 inline ObjectiveFunction makeObjectiveFunction(const int num_dimensions, const ObjectiveFunctionSingle f_single) {
134  return [=](const std::vector<double> &x_values, std::vector<double> &fval_values)->void {
135  int num_points = x_values.size() / num_dimensions;
136  std::vector<double> x(num_dimensions);
137  for (int i=0; i<num_points; i++) {
138  std::copy_n(x_values.begin() + i * num_dimensions, num_dimensions, x.begin());
139  fval_values[i] = f_single(x);
140  }
141  };
142 }
143 
159 using GradientFunctionSingle = std::function<void(const std::vector<double> &x_single, std::vector<double> &grad)>;
160 
175 using ProjectionFunctionSingle = std::function<void(const std::vector<double> &x_single, std::vector<double> &proj)>;
176 
181 inline void identity(const std::vector<double> &x, std::vector<double> &y) { std::copy(x.begin(), x.end(), y.begin()); }
182 
194 inline double computeStationarityResidual(const std::vector<double> &x, const std::vector<double> &x0, const std::vector<double> &gx,
195  const std::vector<double> &gx0, const double lambda) {
196  double residual = 0.0;
197  for (size_t i=0; i<x.size(); i++) {
198  double subdiff = (x0[i] - x[i]) / lambda + gx[i] - gx0[i];
199  residual += subdiff * subdiff;
200  }
201  return std::sqrt(residual);
202 }
203 
204 
205 } // End namespace
206 
207 #endif
DiffeRential Evolution Adaptive Metropolis methods.
std::function< double(const std::vector< double > &x)> ObjectiveFunctionSingle
Generic non-batched objective function signature.
Definition: tsgOptimizationUtils.hpp:102
std::function< void(const std::vector< double > &x_single, std::vector< double > &grad)> GradientFunctionSingle
Generic non-batched gradient function signature.
Definition: tsgOptimizationUtils.hpp:159
void identity(const std::vector< double > &x, std::vector< double > &y)
Generic identity projection function.
Definition: tsgOptimizationUtils.hpp:181
std::function< void(const std::vector< double > &x_single, std::vector< double > &proj)> ProjectionFunctionSingle
Generic non-batched projection function signature.
Definition: tsgOptimizationUtils.hpp:175
ObjectiveFunction makeObjectiveFunction(const int num_dimensions, const ObjectiveFunctionSingle f_single)
Creates a TasOptimization::ObjectiveFunction object from a TasOptimization::ObjectiveFunctionSingle o...
Definition: tsgOptimizationUtils.hpp:133
void checkVarSize(const std::string method_name, const std::string var_name, const int var_size, const int exp_size)
Definition: tsgOptimizationUtils.hpp:80
double computeStationarityResidual(const std::vector< double > &x, const std::vector< double > &x0, const std::vector< double > &gx, const std::vector< double > &gx0, const double lambda)
Definition: tsgOptimizationUtils.hpp:194
std::function< void(const std::vector< double > &x_batch, std::vector< double > &fval_batch)> ObjectiveFunction
Generic batched objective function signature.
Definition: tsgOptimizationUtils.hpp:124
Encapsulates the Tasmanian Optimization module.
Definition: TasmanianOptimization.hpp:86
Definition: tsgOptimizationUtils.hpp:65
int performed_iterations
The number of iterations performed by the current optimization call.
Definition: tsgOptimizationUtils.hpp:67
double residual
The current residual, e.g., the stationarity residual for the gradient descent.
Definition: tsgOptimizationUtils.hpp:69