-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathlambda_strategy.html
422 lines (397 loc) · 24.1 KB
/
lambda_strategy.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.1">
<title>selection.lambda_strategy API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/typography.min.css" integrity="sha512-Y1DYSb995BAfxobCkKepB1BqJJTPrOp3zPL74AWFugHHmmdcvO+C48WLrUOlhGMc0QG7AE3f7gmvvcrmX2fDoA==" crossorigin>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/default.min.css" crossorigin>
<style>:root{--highlight-color:#fe9}.flex{display:flex !important}body{line-height:1.5em}#content{padding:20px}#sidebar{padding:1.5em;overflow:hidden}#sidebar > *:last-child{margin-bottom:2cm}.http-server-breadcrumbs{font-size:130%;margin:0 0 15px 0}#footer{font-size:.75em;padding:5px 30px;border-top:1px solid #ddd;text-align:right}#footer p{margin:0 0 0 1em;display:inline-block}#footer p:last-child{margin-right:30px}h1,h2,h3,h4,h5{font-weight:300}h1{font-size:2.5em;line-height:1.1em}h2{font-size:1.75em;margin:2em 0 .50em 0}h3{font-size:1.4em;margin:1.6em 0 .7em 0}h4{margin:0;font-size:105%}h1:target,h2:target,h3:target,h4:target,h5:target,h6:target{background:var(--highlight-color);padding:.2em 0}a{color:#058;text-decoration:none;transition:color .2s ease-in-out}a:visited{color:#503}a:hover{color:#b62}.title code{font-weight:bold}h2[id^="header-"]{margin-top:2em}.ident{color:#900;font-weight:bold}pre code{font-size:.8em;line-height:1.4em;padding:1em;display:block}code{background:#f3f3f3;font-family:"DejaVu Sans Mono",monospace;padding:1px 4px;overflow-wrap:break-word}h1 code{background:transparent}pre{border-top:1px solid #ccc;border-bottom:1px solid #ccc;margin:1em 0}#http-server-module-list{display:flex;flex-flow:column}#http-server-module-list div{display:flex}#http-server-module-list dt{min-width:10%}#http-server-module-list p{margin-top:0}.toc ul,#index{list-style-type:none;margin:0;padding:0}#index code{background:transparent}#index h3{border-bottom:1px solid #ddd}#index ul{padding:0}#index h4{margin-top:.6em;font-weight:bold}@media (min-width:200ex){#index .two-column{column-count:2}}@media (min-width:300ex){#index .two-column{column-count:3}}dl{margin-bottom:2em}dl dl:last-child{margin-bottom:4em}dd{margin:0 0 1em 3em}#header-classes + dl > dd{margin-bottom:3em}dd dd{margin-left:2em}dd p{margin:10px 0}.name{background:#eee;font-size:.85em;padding:5px 10px;display:inline-block;min-width:40%}.name:hover{background:#e0e0e0}dt:target .name{background:var(--highlight-color)}.name > span:first-child{white-space:nowrap}.name.class > span:nth-child(2){margin-left:.4em}.inherited{color:#999;border-left:5px solid #eee;padding-left:1em}.inheritance em{font-style:normal;font-weight:bold}.desc h2{font-weight:400;font-size:1.25em}.desc h3{font-size:1em}.desc dt code{background:inherit}.source summary,.git-link-div{color:#666;text-align:right;font-weight:400;font-size:.8em;text-transform:uppercase}.source summary > *{white-space:nowrap;cursor:pointer}.git-link{color:inherit;margin-left:1em}.source pre{max-height:500px;overflow:auto;margin:0}.source pre code{font-size:12px;overflow:visible}.hlist{list-style:none}.hlist li{display:inline}.hlist li:after{content:',\2002'}.hlist li:last-child:after{content:none}.hlist .hlist{display:inline;padding-left:1em}img{max-width:100%}td{padding:0 .5em}.admonition{padding:.1em 1em;margin-bottom:1em}.admonition-title{font-weight:bold}.admonition.note,.admonition.info,.admonition.important{background:#aef}.admonition.todo,.admonition.versionadded,.admonition.tip,.admonition.hint{background:#dfd}.admonition.warning,.admonition.versionchanged,.admonition.deprecated{background:#fd4}.admonition.error,.admonition.danger,.admonition.caution{background:lightpink}</style>
<style media="screen and (min-width: 700px)">@media screen and (min-width:700px){#sidebar{width:30%;height:100vh;overflow:auto;position:sticky;top:0}#content{width:70%;max-width:100ch;padding:3em 4em;border-left:1px solid #ddd}pre code{font-size:1em}.name{font-size:1em}main{display:flex;flex-direction:row-reverse;justify-content:flex-end}.toc ul ul,#index ul ul{padding-left:1em}.toc > ul > li{margin-top:.5em}}</style>
<style media="print">@media print{#sidebar h1{page-break-before:always}.source{display:none}}@media print{*{background:transparent !important;color:#000 !important;box-shadow:none !important;text-shadow:none !important}a[href]:after{content:" (" attr(href) ")";font-size:90%}a[href][title]:after{content:none}abbr[title]:after{content:" (" attr(title) ")"}.ir a:after,a[href^="javascript:"]:after,a[href^="#"]:after{content:""}pre,blockquote{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100% !important}@page{margin:0.5cm}p,h2,h3{orphans:3;widows:3}h1,h2,h3,h4,h5,h6{page-break-after:avoid}}</style>
<script defer src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/highlight.min.js" integrity="sha512-D9gUyxqja7hBtkWpPWGt9wfbfaMGVt9gnyCvYa+jojwwPHLCzUm5i8rpk7vD7wNee9bA35eYIjobYPaQuKS1MQ==" crossorigin></script>
<script>window.addEventListener('DOMContentLoaded', () => {
hljs.configure({languages: ['bash', 'css', 'diff', 'graphql', 'ini', 'javascript', 'json', 'plaintext', 'python', 'python-repl', 'rust', 'shell', 'sql', 'typescript', 'xml', 'yaml']});
hljs.highlightAll();
})</script>
</head>
<body>
<main>
<article id="content">
<header>
<h1 class="title">Module <code>selection.lambda_strategy</code></h1>
</header>
<section id="section-intro">
</section>
<section>
</section>
<section>
</section>
<section>
</section>
<section>
<h2 class="section-title" id="header-classes">Classes</h2>
<dl>
<dt id="selection.lambda_strategy.ConstantStrategy"><code class="flex name class">
<span>class <span class="ident">ConstantStrategy</span></span>
<span>(</span><span>max_lambda, n_iterations=20)</span>
</code></dt>
<dd>
<div class="desc"><p>Initialize a ConstantStrategy object.
Assumes that all values of lambda are the same and performs a binary search to find the optimal lambda.</p>
<p>Parameters:
- max_lambda (float): The maximum value for lambda.
- n_iterations (int, optional): The number of iterations. Default is 20.</p></div>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">class ConstantStrategy(Strategy):
def __init__(self, max_lambda, n_iterations=20):
"""
Initialize a ConstantStrategy object.
Assumes that all values of lambda are the same and performs a binary search to find the optimal lambda.
Parameters:
- max_lambda (float): The maximum value for lambda.
- n_iterations (int, optional): The number of iterations. Default is 20.
"""
super().__init__()
self.max_lambda = max_lambda
self.n_iterations = n_iterations
def compute_lambdas(self, current_optimal_lambdas, execute_function, cost_target, args_execute_function):
logger.debug("Computing lambdas with constant strategy")
lambda_min = (0, None, None)
lambda_max = (self.max_lambda, None, None)
max_lambda = self.max_lambda
while max_lambda < 1e10:
lambdas = [max_lambda for _ in range(len(current_optimal_lambdas))]
output_dict = execute_function(lambdas, *args_execute_function)
cost = output_dict['cost']
if cost > cost_target:
logger.info(f'Max lambda {max_lambda} is too low, increasing max lambda')
lambda_min = (max_lambda, cost, output_dict['quality'])
max_lambda *= 2
lambda_max = (max_lambda, None, None)
else:
lambda_max = (max_lambda, cost, output_dict['quality'])
break
# Do binary search to find the optimal lambda
for _ in range(self.n_iterations):
lambda_mid = (lambda_min[0] + lambda_max[0]) / 2
logger.debug(f"Lambda mid: {lambda_mid}")
lambdas = [lambda_mid for _ in range(len(current_optimal_lambdas))]
output_dict = execute_function(lambdas, *args_execute_function)
cost = output_dict['cost']
if cost < cost_target:
lambda_max = (lambda_mid, cost, output_dict['quality'])
else:
lambda_min = (lambda_mid, cost, output_dict['quality'])
logger.debug(f"Cost: {cost}")
logger.debug(f"Quality: {output_dict['quality']}")
if lambda_max[2] is None:
lambdas = [lambda_max[0] for _ in range(len(current_optimal_lambdas))]
output_dict = execute_function(lambdas, *args_execute_function)
lambda_max = (lambda_max[0], output_dict['cost'], output_dict['quality'])
logger.info(f"Final Lambda: {lambda_max[0]}")
logger.info(f"Final Cost: {lambda_max[1]}")
logger.info(f"Final Quality: {lambda_max[2]}")
return [lambda_max[0] for _ in range(len(current_optimal_lambdas))], lambda_max[1], lambda_max[2]</code></pre>
</details>
<h3>Ancestors</h3>
<ul class="hlist">
<li><a title="selection.lambda_strategy.Strategy" href="#selection.lambda_strategy.Strategy">Strategy</a></li>
</ul>
<h3>Inherited members</h3>
<ul class="hlist">
<li><code><b><a title="selection.lambda_strategy.Strategy" href="#selection.lambda_strategy.Strategy">Strategy</a></b></code>:
<ul class="hlist">
<li><code><a title="selection.lambda_strategy.Strategy.compute_lambdas" href="#selection.lambda_strategy.Strategy.compute_lambdas">compute_lambdas</a></code></li>
</ul>
</li>
</ul>
</dd>
<dt id="selection.lambda_strategy.HyperoptStrategy"><code class="flex name class">
<span>class <span class="ident">HyperoptStrategy</span></span>
<span>(</span><span>max_lambda, n_searches=100, max_factor=4, from_scratch=False, optimize_max_depth=False)</span>
</code></dt>
<dd>
<div class="desc"><p>Initialize the HyperoptStrategy object.
Uses the hyperopt library to optimize the lambda values.</p>
<p>Parameters:
- max_lambda (int): The maximum lambda value.
- n_searches (int, optional): The number of searches to perform. Defaults to 100.
- max_factor (int, optional): The maximum factor to increase the prior optimal lambdas by. Defaults to 4.
- from_scratch (bool, optional): Whether to start from scratch and ignore the prior optimal lambdas.
Defaults to False.
- optimize_max_depth (bool, optional): Whether to optimize the maximum depth. Defaults to False.</p></div>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">class HyperoptStrategy(Strategy):
def __init__(self, max_lambda, n_searches=100, max_factor=4, from_scratch=False,
optimize_max_depth=False):
"""
Initialize the HyperoptStrategy object.
Uses the hyperopt library to optimize the lambda values.
Parameters:
- max_lambda (int): The maximum lambda value.
- n_searches (int, optional): The number of searches to perform. Defaults to 100.
- max_factor (int, optional): The maximum factor to increase the prior optimal lambdas by. Defaults to 4.
- from_scratch (bool, optional): Whether to start from scratch and ignore the prior optimal lambdas.
Defaults to False.
- optimize_max_depth (bool, optional): Whether to optimize the maximum depth. Defaults to False.
"""
super().__init__()
self.max_lambda = max_lambda
self.n_searches = n_searches
self.max_factor = max_factor
self.from_scratch = from_scratch
self.optimize_max_depth = optimize_max_depth
self.all_results = []
def objective(self, lambdas, cost_init, lambda_tradeoff, execute_function, *args):
if self.optimize_max_depth:
lambdas, max_depth = lambdas[:-1], int(lambdas[-1])
lambdas = [lambda_ if i < max_depth else None for i, lambda_ in enumerate(lambdas)]
output_dict = execute_function(lambdas, *args)
output_dict['lambdas'] = lambdas
self.all_results.append(output_dict)
return -output_dict['quality'] + lambda_tradeoff * max(0, output_dict['cost'] - cost_init)
def compute_lambdas(self, current_optimal_lambdas, execute_function, cost_target, args_execute_function):
logger.debug("Computing lambdas with hyperopt strategy")
self.all_results = []
space = []
max_init_val = max([lambda_ for lambda_ in current_optimal_lambdas if lambda_ is not None])
for i in range(len(current_optimal_lambdas)):
if current_optimal_lambdas[i] is None and not self.optimize_max_depth:
space.append(hp.choice(f'lambda_{i}', [None]))
else:
max_val = max(0, (1 - self.max_factor) * max_init_val)
if max_val == 0 or self.from_scratch or self.max_lambda == 1:
max_val = self.max_lambda
space.append(hp.uniform(f'lambda_{i}', 0, max_val))
if self.optimize_max_depth:
space.append(hp.choice('max_depth', [i for i in range(1, len(current_optimal_lambdas) + 1)]))
results_init = execute_function(current_optimal_lambdas, *args_execute_function)
cost_init = results_init['cost']
lambda_tradeoff = results_init['quality'] / cost_init
objective = lambda x: self.objective(x, cost_init, lambda_tradeoff, execute_function, *args_execute_function)
best = fmin(objective, space, algo=tpe.suggest, max_evals=self.n_searches,
rstate= np.random.default_rng(0))
all_results_low_cost = [result for result in self.all_results if result['cost'] < cost_target]
if len(all_results_low_cost) == 0:
return current_optimal_lambdas, cost_init, results_init['quality']
best_lambda_index = np.argmax([result['quality'] for result in all_results_low_cost])
best_lambdas = all_results_low_cost[best_lambda_index]['lambdas']
cost_best = all_results_low_cost[best_lambda_index]['cost']
quality_best = all_results_low_cost[best_lambda_index]['quality']
logger.info(f"Final Lambdas: {best_lambdas}")
logger.info(f"Final Cost: {cost_best}")
logger.info(f"Final Quality: {quality_best}")
return best_lambdas, cost_best, quality_best</code></pre>
</details>
<h3>Ancestors</h3>
<ul class="hlist">
<li><a title="selection.lambda_strategy.Strategy" href="#selection.lambda_strategy.Strategy">Strategy</a></li>
</ul>
<h3>Methods</h3>
<dl>
<dt id="selection.lambda_strategy.HyperoptStrategy.objective"><code class="name flex">
<span>def <span class="ident">objective</span></span>(<span>self, lambdas, cost_init, lambda_tradeoff, execute_function, *args)</span>
</code></dt>
<dd>
<div class="desc"></div>
</dd>
</dl>
<h3>Inherited members</h3>
<ul class="hlist">
<li><code><b><a title="selection.lambda_strategy.Strategy" href="#selection.lambda_strategy.Strategy">Strategy</a></b></code>:
<ul class="hlist">
<li><code><a title="selection.lambda_strategy.Strategy.compute_lambdas" href="#selection.lambda_strategy.Strategy.compute_lambdas">compute_lambdas</a></code></li>
</ul>
</li>
</ul>
</dd>
<dt id="selection.lambda_strategy.RepetitiveConstantStrategy"><code class="flex name class">
<span>class <span class="ident">RepetitiveConstantStrategy</span></span>
<span>(</span><span>max_lambda, n_iterations=20)</span>
</code></dt>
<dd>
<div class="desc"><p>Initialize the RepetitiveConstantStrategy object.
Assumes that lambda values are of the form [lambda, …, lambda, None, …, None]
and performs a binary search to find the optimal lambda and number of None values.</p>
<p>Parameters:
- max_lambda (float): The maximum value for lambda.
- n_iterations (int, optional): The number of iterations. Default is 20.</p></div>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">class RepetitiveConstantStrategy(Strategy):
def __init__(self, max_lambda, n_iterations=20):
"""
Initialize the RepetitiveConstantStrategy object.
Assumes that lambda values are of the form [lambda, ..., lambda, None, ..., None]
and performs a binary search to find the optimal lambda and number of None values.
Parameters:
- max_lambda (float): The maximum value for lambda.
- n_iterations (int, optional): The number of iterations. Default is 20.
"""
super().__init__()
self.max_lambda = max_lambda
self.n_iterations = n_iterations
def compute_lambdas(self, current_optimal_lambdas, execute_function, cost_target, args_execute_function):
logger.debug("Computing lambdas with repetitive constant strategy")
optimal_lambdas = None
optimal_value = None
optimal_cost = None
# Do binary search to find the optimal lambda
for i in range(len(current_optimal_lambdas)):
lambda_min = (0, None, None)
lambda_max = (self.max_lambda, None, None)
max_lambda = self.max_lambda
while max_lambda < 1e10:
lambdas = [max_lambda for _ in range(len(current_optimal_lambdas))]
output_dict = execute_function(lambdas, *args_execute_function)
cost = output_dict['cost']
if cost > cost_target:
logger.info(f'Max lambda {max_lambda} is too low, increasing max lambda')
lambda_min = (max_lambda, cost, output_dict['quality'])
max_lambda *= 2
lambda_max = (max_lambda, None, None)
else:
lambda_max = (max_lambda, cost, output_dict['quality'])
break
logger.debug(f"Step: {i}")
for _ in range(self.n_iterations):
lambda_mid = (lambda_min[0] + lambda_max[0]) / 2
logger.debug(f"Lambda mid: {lambda_mid}")
lambdas = [lambda_mid if j <= i else None for j in range(len(current_optimal_lambdas))]
output_dict = execute_function(lambdas, *args_execute_function)
cost = output_dict['cost']
if cost < cost_target:
lambda_max = (lambda_mid, cost, output_dict['quality'])
else:
lambda_min = (lambda_mid, cost, output_dict['quality'])
logger.debug(f"Cost: {cost}")
logger.debug(f"Quality: {output_dict['quality']}")
if optimal_value is None or (lambda_max[2] is not None and lambda_max[2] > optimal_value):
optimal_lambdas = [lambda_max[0] if j <= i else None for j in range(len(current_optimal_lambdas))]
optimal_value = lambda_max[2]
optimal_cost = lambda_max[1]
logger.info(f"Final Lambda: {optimal_lambdas}")
logger.info(f"Final Cost: {optimal_cost}")
logger.info(f"Final Quality: {optimal_value}")
return optimal_lambdas, optimal_cost, optimal_value</code></pre>
</details>
<h3>Ancestors</h3>
<ul class="hlist">
<li><a title="selection.lambda_strategy.Strategy" href="#selection.lambda_strategy.Strategy">Strategy</a></li>
</ul>
<h3>Inherited members</h3>
<ul class="hlist">
<li><code><b><a title="selection.lambda_strategy.Strategy" href="#selection.lambda_strategy.Strategy">Strategy</a></b></code>:
<ul class="hlist">
<li><code><a title="selection.lambda_strategy.Strategy.compute_lambdas" href="#selection.lambda_strategy.Strategy.compute_lambdas">compute_lambdas</a></code></li>
</ul>
</li>
</ul>
</dd>
<dt id="selection.lambda_strategy.Strategy"><code class="flex name class">
<span>class <span class="ident">Strategy</span></span>
</code></dt>
<dd>
<div class="desc"><p>Initializes an instance of the Strategy class. These strategies optimize the lambda parameters.</p></div>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">class Strategy:
def __init__(self):
"""
Initializes an instance of the Strategy class. These strategies optimize the lambda parameters.
"""
pass
def compute_lambdas(self, current_optimal_lambdas, execute_function,
cost_target, args_execute_function):
"""
Compute the lambdas for model selection.
Parameters:
- current_optimal_lambdas (list): The current optimal lambdas in the selection algorithm.
- execute_function (function): The function to execute for each lambda which returns both cost and quality.
- cost_target (float): The target cost for model selection.
- args_execute_function (tuple): The arguments to pass to the execute_function.
Raises:
- NotImplementedError: This method is not implemented yet.
"""
raise NotImplementedError</code></pre>
</details>
<h3>Subclasses</h3>
<ul class="hlist">
<li><a title="selection.lambda_strategy.ConstantStrategy" href="#selection.lambda_strategy.ConstantStrategy">ConstantStrategy</a></li>
<li><a title="selection.lambda_strategy.HyperoptStrategy" href="#selection.lambda_strategy.HyperoptStrategy">HyperoptStrategy</a></li>
<li><a title="selection.lambda_strategy.RepetitiveConstantStrategy" href="#selection.lambda_strategy.RepetitiveConstantStrategy">RepetitiveConstantStrategy</a></li>
</ul>
<h3>Methods</h3>
<dl>
<dt id="selection.lambda_strategy.Strategy.compute_lambdas"><code class="name flex">
<span>def <span class="ident">compute_lambdas</span></span>(<span>self, current_optimal_lambdas, execute_function, cost_target, args_execute_function)</span>
</code></dt>
<dd>
<div class="desc"><p>Compute the lambdas for model selection.</p>
<p>Parameters:
- current_optimal_lambdas (list): The current optimal lambdas in the selection algorithm.
- execute_function (function): The function to execute for each lambda which returns both cost and quality.
- cost_target (float): The target cost for model selection.
- args_execute_function (tuple): The arguments to pass to the execute_function.</p>
<p>Raises:
- NotImplementedError: This method is not implemented yet.</p></div>
</dd>
</dl>
</dd>
</dl>
</section>
</article>
<nav id="sidebar">
<div class="toc">
<ul></ul>
</div>
<ul id="index">
<li><h3>Super-module</h3>
<ul>
<li><code><a title="selection" href="index.html">selection</a></code></li>
</ul>
</li>
<li><h3><a href="#header-classes">Classes</a></h3>
<ul>
<li>
<h4><code><a title="selection.lambda_strategy.ConstantStrategy" href="#selection.lambda_strategy.ConstantStrategy">ConstantStrategy</a></code></h4>
</li>
<li>
<h4><code><a title="selection.lambda_strategy.HyperoptStrategy" href="#selection.lambda_strategy.HyperoptStrategy">HyperoptStrategy</a></code></h4>
<ul class="">
<li><code><a title="selection.lambda_strategy.HyperoptStrategy.objective" href="#selection.lambda_strategy.HyperoptStrategy.objective">objective</a></code></li>
</ul>
</li>
<li>
<h4><code><a title="selection.lambda_strategy.RepetitiveConstantStrategy" href="#selection.lambda_strategy.RepetitiveConstantStrategy">RepetitiveConstantStrategy</a></code></h4>
</li>
<li>
<h4><code><a title="selection.lambda_strategy.Strategy" href="#selection.lambda_strategy.Strategy">Strategy</a></code></h4>
<ul class="">
<li><code><a title="selection.lambda_strategy.Strategy.compute_lambdas" href="#selection.lambda_strategy.Strategy.compute_lambdas">compute_lambdas</a></code></li>
</ul>
</li>
</ul>
</li>
</ul>
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.1</a>.</p>
</footer>
</body>
</html>