-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathindex.html
390 lines (358 loc) · 50.7 KB
/
index.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>DiffSharp: Differentiable Tensor Programming Made Simple
</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="author" content="Atılım Güneş Baydin, Don Syme, Barak A. Pearlmutter, Jeffrey Siskind, and DiffSharp contributors">
<meta name="description" content="DiffSharp is a tensor library with support for differentiable programming. It is designed for use in machine learning, probabilistic programming, optimization and other domains.">
<script src="https://code.jquery.com/jquery-1.8.0.js"></script>
<script src="https://code.jquery.com/ui/1.8.23/jquery-ui.js"></script>
<script src="https://netdna.bootstrapcdn.com/twitter-bootstrap/2.2.1/js/bootstrap.min.js"></script>
<script type="text/javascript" async src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
<link href="https://netdna.bootstrapcdn.com/twitter-bootstrap/2.2.1/css/bootstrap-combined.min.css" rel="stylesheet">
<link type="text/css" rel="stylesheet" href="https://diffsharp.github.io/content/fsdocs-default.css" />
<script src="https://diffsharp.github.io/content/fsdocs-tips.js" type="text/javascript"></script>
<!-- BEGIN SEARCH BOX: this adds support for the search box -->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/JavaScript-autoComplete/1.0.4/auto-complete.css" />
<!-- END SEARCH BOX: this adds support for the search box -->
</head>
<body>
<div class="container">
<!-- <div class="masthead">
<ul class="nav nav-pills pull-right">
<li><a href="https://fsharp.org">fsharp.org</a></li>
</ul>
<h3 class="muted">DiffSharp</h3>
</div> -->
<!-- <hr /> -->
<div class="row">
<div class="col-xs-12" style="height:10px;"></div>
</div>
<div class="row">
<div class="span3" id="fsdocs-nav">
<a href="index.html"><img class="logo" src="https://diffsharp.github.io/img/diffsharp-logo-text.png"/></a>
<!-- BEGIN SEARCH BOX: this adds support for the search box -->
<div id="header">
<div class="searchbox">
<label for="search-by">
<i class="fas fa-search"></i>
</label>
<input data-search-input="" id="search-by" type="search" placeholder="Search..." />
<span data-search-clear="">
<i class="fas fa-times"></i>
</span>
</div>
</div>
<!-- END SEARCH BOX: this adds support for the search box -->
<ul class="nav nav-list" id="menu">
<!-- <li class="nav-header">DiffSharp</li> -->
<!-- <li class="divider"></li> -->
<li><a href="https://diffsharp.github.io/index.html">Home</a></li>
<li><a href="https://github.com/DiffSharp/DiffSharp/">GitHub</a></li>
<li><a href="https://github.com/DiffSharp/DiffSharp/blob/dev/LICENSE">License (BSD)</a></li>
<li class="nav-header">Getting Started</li>
<!-- <li class="divider"></li> -->
<li><a href="https://diffsharp.github.io/install.html">Install</a></li>
<li><a href="https://diffsharp.github.io/quickstart.html">Quickstart</a></li>
<!-- <li><a href="https://diffsharp.github.io/tensors.html">Tensors</a></li> -->
<!-- <li><a href="https://diffsharp.github.io/differentiable-programming.html">Differentiable Programming</a></li> -->
<!-- <li><a href="https://diffsharp.github.io/nested-derivatives.html">Nested Derivatives</a></li> -->
<!-- <li><a href="https://diffsharp.github.io/models.html">Models</a></li> -->
<!-- <li><a href="https://diffsharp.github.io/optimization.html">Optimization</a></li> -->
<!-- <li><a href="https://diffsharp.github.io/probability-distributions.html">Probability Distributions</a></li> -->
<li class="nav-header">Tutorials</li>
<!-- <li class="divider"></li> -->
<!-- <li><a href="https://diffsharp.github.io/tutorial-classifier.html">Classifier</a></li> -->
<!-- <li><a href="https://diffsharp.github.io/tutorial-gan.html">GAN</a></li> -->
<!-- <li><a href="https://diffsharp.github.io/tutorial-vae.html">VAE</a></li> -->
<!-- <li><a href="https://diffsharp.github.io/tutorial-language.html">Language Models</a></li> -->
<li><a href="https://github.com/DiffSharp/DiffSharp/tree/dev/examples">More Examples</a></li>
<li class="nav-header">API Documentation</li>
<li><a href="https://diffsharp.github.io/reference/index.html">API Reference</a></li>
<li><a href="https://diffsharp.github.io/extensions.html">Extensions</a></li>
<!-- <li class="nav-header">Examples</li> -->
<!-- <li class="divider"></li> -->
<!-- <li class="nav-header">Machine Learning</li> -->
<!-- <li><a href="https://diffsharp.github.io/examples-topic1.html">Topic 1</a></li> -->
<!-- <li class="divider"></li>
<li class="nav-header">Authors</li>
<li><a href="http://www.robots.ox.ac.uk/~gunes/">Atılım Güneş Baydin</a></li>
<li><a href="http://www.bcl.hamilton.ie/~barak/">Barak A. Pearlmutter</a></li>
<li><a href="https://www.microsoft.com/en-us/research/people/dsyme/">Don Syme</a></li> -->
</ul>
</div>
<div class="span9" id="fsdocs-content">
<p><a href="https://colab.research.google.com/github/DiffSharp/diffsharp.github.io/blob/master/index.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Binder" /></a> 
<a href="https://mybinder.org/v2/gh/diffsharp/diffsharp.github.io/master?filepath=index.ipynb"><img src="img/badge-binder.svg" alt="Binder" /></a> 
<a href="index.fsx"><img src="img/badge-script.svg" alt="Script" /></a> 
<a href="index.ipynb"><img src="img/badge-notebook.svg" alt="Script" /></a></p>
<h1><a name="DiffSharp-Differentiable-Tensor-Programming-Made-Simple" class="anchor" href="#DiffSharp-Differentiable-Tensor-Programming-Made-Simple">DiffSharp: Differentiable Tensor Programming Made Simple</a></h1>
<p>DiffSharp is a tensor library with support for <a href="https://en.wikipedia.org/wiki/Differentiable_programming">differentiable programming</a>.
It is designed for use in machine learning, probabilistic programming, optimization and other domains.</p>
<button class="button" style="vertical-align:middle" onclick="window.location.href='https://diffsharp.github.io/install.html'"><span>Install »</span></button>
<h2><a name="Key-Features" class="anchor" href="#Key-Features">Key Features</a></h2>
<p>🗹 Nested and mixed-mode differentiation</p>
<p>🗹 Common optimizers, model elements, differentiable probability distributions</p>
<p>🗹 F# for robust functional programming</p>
<p>🗹 PyTorch familiar naming and idioms, efficient LibTorch CUDA/C++ tensors with GPU support</p>
<p>🗹 Linux, macOS, Windows supported</p>
<p>🗹 Use interactive notebooks in Jupyter and Visual Studio Code</p>
<p>🗹 100% open source</p>
<h2><a name="Differentiable-Programming" class="anchor" href="#Differentiable-Programming">Differentiable Programming</a></h2>
<p>DiffSharp provides world-leading automatic differentiation capabilities for tensor code, including composable gradients, Hessians, Jacobians, directional derivatives, and matrix-free Hessian- and Jacobian-vector products over arbitrary user code. This goes beyond conventional tensor libraries such as PyTorch and TensorFlow, allowing the use of nested forward and reverse differentiation up to any level.</p>
<p>With DiffSharp, you can compute higher-order derivatives efficiently and differentiate functions that are internally making use of differentiation and gradient-based optimization.</p>
</br>
<img src="img/anim-intro-2.gif" width="85%" />
<h2><a name="Practical-Familiar-and-Efficient" class="anchor" href="#Practical-Familiar-and-Efficient">Practical, Familiar and Efficient</a></h2>
<p>DiffSharp comes with a <a href="https://pytorch.org/cppdocs/">LibTorch</a> backend, using the same C++ and CUDA implementations for tensor computations that power <a href="https://pytorch.org/">PyTorch</a>. On top of these raw tensors (LibTorch's ATen, excluding autograd), DiffSharp implements its own computation graph and differentiation capabilities. It is tested on Linux, macOS, and Windows, and it supports CUDA and GPUs.</p>
<p>The DiffSharp API is designed to be similar to <a href="https://pytorch.org/docs/stable/index.html">the PyTorch Python API</a> through very similar naming and idioms, and where elements have similar names the PyTorch documentation can generally be used as a guide.</p>
<p>DiffSharp uses <a href="https://dot.net/fsharp">the incredible F# programming language</a> for tensor programming. F# code is generally faster and more robust than equivalent Python code, while still being succinct and compact like Python, making it an ideal modern AI and machine learning implementation language. This allows fluent and productive code for tensor programming.</p>
</br>
<iframe width="85%" src="https://www.youtube.com/embed/_QnbV6CAWXc" title="YouTube video player" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>
<h2><a name="Interactive-Notebooks" class="anchor" href="#Interactive-Notebooks">Interactive Notebooks</a></h2>
<p>All documentation pages in this website are interactive notebooks which you can execute directly in your browser without installing anything in your local machine.</p>
<p>Using the buttons <a href="https://colab.research.google.com/github/DiffSharp/diffsharp.github.io/blob/master/index.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Binder" /></a> <a href="https://mybinder.org/v2/gh/diffsharp/diffsharp.github.io/master?filepath=index.ipynb"><img src="img/badge-binder.svg" alt="Binder" /></a> on the top of each page, you can execute the page as an interactive notebook running on cloud servers provided by <a href="https://colab.research.google.com/">Google Colab</a> and <a href="https://mybinder.org/">Binder</a>.</p>
<p>Using the buttons <a href="index.fsx"><img src="img/badge-script.svg" alt="Script" /></a>
<a href="index.ipynb"><img src="img/badge-notebook.svg" alt="Script" /></a> you can also download a page as a script or an interactive notebook, which you can execute locally in <a href="https://jupyter.org/">Jupyter</a> or <a href="https://code.visualstudio.com/">Visual Studio Code</a> using <a href="https://github.com/dotnet/interactive">dotnet interactive</a>.</p>
<h2><a name="Example" class="anchor" href="#Example">Example</a></h2>
<p>Define and add two tensors:</p>
<pre class="fssnip highlighted"><code lang="fsharp"><span class="k">open</span> <span onmouseout="hideTip(event, 'fs1', 10)" onmouseover="showTip(event, 'fs1', 10)" class="id">DiffSharp</span>
<span class="k">let</span> <span onmouseout="hideTip(event, 'fs7', 11)" onmouseover="showTip(event, 'fs7', 11)" class="id">t1</span> <span class="o">=</span> <span onmouseout="hideTip(event, 'fs2', 12)" onmouseover="showTip(event, 'fs2', 12)" class="rt">dsharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs8', 13)" onmouseover="showTip(event, 'fs8', 13)" class="id">tensor</span> <span class="pn">[</span> <span class="o">0.0</span> <span class="o">..</span><span class="n">0.2</span><span class="o">..</span> <span class="n">1.0</span> <span class="pn">]</span> <span class="c">// Gives [0., 0.2, 0.4, 0.6, 0.8, 1.]</span>
<span class="k">let</span> <span onmouseout="hideTip(event, 'fs9', 14)" onmouseover="showTip(event, 'fs9', 14)" class="id">t2</span> <span class="o">=</span> <span onmouseout="hideTip(event, 'fs2', 15)" onmouseover="showTip(event, 'fs2', 15)" class="rt">dsharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs8', 16)" onmouseover="showTip(event, 'fs8', 16)" class="id">tensor</span> <span class="pn">[</span> <span class="n">1</span><span class="pn">,</span> <span class="n">2</span><span class="pn">,</span> <span class="n">3</span><span class="pn">,</span> <span class="n">4</span><span class="pn">,</span> <span class="n">5</span><span class="pn">,</span> <span class="n">6</span> <span class="pn">]</span>
<span onmouseout="hideTip(event, 'fs7', 17)" onmouseover="showTip(event, 'fs7', 17)" class="id">t1</span> <span class="o">+</span> <span onmouseout="hideTip(event, 'fs9', 18)" onmouseover="showTip(event, 'fs9', 18)" class="id">t2</span>
</code></pre>
<table class="pre"><tr><td><pre><code>tensor([1.0000, 2.2000, 3.4000, 4.6000, 5.8000, 7.0000])</code></pre></td></tr></table>
<p>Compute a convolution:</p>
<pre class="fssnip highlighted"><code lang="fsharp"><span class="k">let</span> <span onmouseout="hideTip(event, 'fs10', 19)" onmouseover="showTip(event, 'fs10', 19)" class="id">t3</span> <span class="o">=</span> <span onmouseout="hideTip(event, 'fs2', 20)" onmouseover="showTip(event, 'fs2', 20)" class="rt">dsharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs8', 21)" onmouseover="showTip(event, 'fs8', 21)" class="id">tensor</span> <span class="pn">[</span><span class="pn">[</span><span class="pn">[</span><span class="pn">[</span><span class="n">0.0</span> <span class="o">..</span> <span class="n">10.0</span><span class="pn">]</span><span class="pn">]</span><span class="pn">]</span><span class="pn">]</span>
<span class="k">let</span> <span onmouseout="hideTip(event, 'fs11', 22)" onmouseover="showTip(event, 'fs11', 22)" class="id">t4</span> <span class="o">=</span> <span onmouseout="hideTip(event, 'fs2', 23)" onmouseover="showTip(event, 'fs2', 23)" class="rt">dsharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs8', 24)" onmouseover="showTip(event, 'fs8', 24)" class="id">tensor</span> <span class="pn">[</span><span class="pn">[</span><span class="pn">[</span><span class="pn">[</span><span class="o">0.0</span> <span class="o">..</span><span class="n">0.1</span><span class="o">..</span> <span class="n">1.0</span><span class="pn">]</span><span class="pn">]</span><span class="pn">]</span><span class="pn">]</span>
<span onmouseout="hideTip(event, 'fs10', 25)" onmouseover="showTip(event, 'fs10', 25)" class="id">t3</span><span class="pn">.</span><span class="id">conv2d</span><span class="pn">(</span><span onmouseout="hideTip(event, 'fs11', 26)" onmouseover="showTip(event, 'fs11', 26)" class="id">t4</span><span class="pn">)</span>
</code></pre>
<table class="pre"><tr><td><pre><code>tensor([[[[38.5000]]]])</code></pre></td></tr></table>
<p>Take the gradient of a vector-to-scalar function:</p>
<pre class="fssnip highlighted"><code lang="fsharp"><span class="k">let</span> <span onmouseout="hideTip(event, 'fs12', 27)" onmouseover="showTip(event, 'fs12', 27)" class="fn">f</span> <span class="pn">(</span><span onmouseout="hideTip(event, 'fs13', 28)" onmouseover="showTip(event, 'fs13', 28)" class="fn">x</span><span class="pn">:</span> <span onmouseout="hideTip(event, 'fs14', 29)" onmouseover="showTip(event, 'fs14', 29)" class="rt">Tensor</span><span class="pn">)</span> <span class="o">=</span> <span onmouseout="hideTip(event, 'fs13', 30)" onmouseover="showTip(event, 'fs13', 30)" class="fn">x</span><span class="pn">.</span><span class="id">exp</span><span class="pn">(</span><span class="pn">)</span><span class="pn">.</span><span class="id">sum</span><span class="pn">(</span><span class="pn">)</span>
<span onmouseout="hideTip(event, 'fs2', 31)" onmouseover="showTip(event, 'fs2', 31)" class="rt">dsharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs15', 32)" onmouseover="showTip(event, 'fs15', 32)" class="id">grad</span> <span onmouseout="hideTip(event, 'fs12', 33)" onmouseover="showTip(event, 'fs12', 33)" class="fn">f</span> <span class="pn">(</span><span onmouseout="hideTip(event, 'fs2', 34)" onmouseover="showTip(event, 'fs2', 34)" class="rt">dsharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs8', 35)" onmouseover="showTip(event, 'fs8', 35)" class="id">tensor</span><span class="pn">(</span><span class="pn">[</span><span class="n">1.8</span><span class="pn">,</span> <span class="n">2.5</span><span class="pn">]</span><span class="pn">)</span><span class="pn">)</span>
</code></pre>
<table class="pre"><tr><td><pre><code>tensor([ 6.0496, 12.1825])</code></pre></td></tr></table>
<p>Compute a nested derivative (checking for <a href="https://doi.org/10.1007/s10990-008-9037-1">perturbation confusion</a>):</p>
<pre class="fssnip highlighted"><code lang="fsharp"><span class="k">let</span> <span onmouseout="hideTip(event, 'fs16', 36)" onmouseover="showTip(event, 'fs16', 36)" class="id">x0</span> <span class="o">=</span> <span onmouseout="hideTip(event, 'fs2', 37)" onmouseover="showTip(event, 'fs2', 37)" class="rt">dsharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs8', 38)" onmouseover="showTip(event, 'fs8', 38)" class="id">tensor</span><span class="pn">(</span><span class="n">1.</span><span class="pn">)</span>
<span class="k">let</span> <span onmouseout="hideTip(event, 'fs17', 39)" onmouseover="showTip(event, 'fs17', 39)" class="id">y0</span> <span class="o">=</span> <span onmouseout="hideTip(event, 'fs2', 40)" onmouseover="showTip(event, 'fs2', 40)" class="rt">dsharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs8', 41)" onmouseover="showTip(event, 'fs8', 41)" class="id">tensor</span><span class="pn">(</span><span class="n">2.</span><span class="pn">)</span>
<span onmouseout="hideTip(event, 'fs2', 42)" onmouseover="showTip(event, 'fs2', 42)" class="rt">dsharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs18', 43)" onmouseover="showTip(event, 'fs18', 43)" class="id">diff</span> <span class="pn">(</span><span class="k">fun</span> <span onmouseout="hideTip(event, 'fs13', 44)" onmouseover="showTip(event, 'fs13', 44)" class="fn">x</span> <span class="k">-></span> <span onmouseout="hideTip(event, 'fs13', 45)" onmouseover="showTip(event, 'fs13', 45)" class="fn">x</span> <span class="o">*</span> <span onmouseout="hideTip(event, 'fs2', 46)" onmouseover="showTip(event, 'fs2', 46)" class="rt">dsharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs18', 47)" onmouseover="showTip(event, 'fs18', 47)" class="id">diff</span> <span class="pn">(</span><span class="k">fun</span> <span onmouseout="hideTip(event, 'fs19', 48)" onmouseover="showTip(event, 'fs19', 48)" class="fn">y</span> <span class="k">-></span> <span onmouseout="hideTip(event, 'fs13', 49)" onmouseover="showTip(event, 'fs13', 49)" class="fn">x</span> <span class="o">*</span> <span onmouseout="hideTip(event, 'fs19', 50)" onmouseover="showTip(event, 'fs19', 50)" class="fn">y</span><span class="pn">)</span> <span onmouseout="hideTip(event, 'fs17', 51)" onmouseover="showTip(event, 'fs17', 51)" class="id">y0</span><span class="pn">)</span> <span onmouseout="hideTip(event, 'fs16', 52)" onmouseover="showTip(event, 'fs16', 52)" class="id">x0</span>
</code></pre>
<table class="pre"><tr><td><pre><code>tensor(2.)</code></pre></td></tr></table>
<p>Define a model and optimize it:</p>
<pre class="fssnip highlighted"><code lang="fsharp"><span class="k">open</span> <span onmouseout="hideTip(event, 'fs1', 53)" onmouseover="showTip(event, 'fs1', 53)" class="id">DiffSharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs20', 54)" onmouseover="showTip(event, 'fs20', 54)" class="id">Data</span>
<span class="k">open</span> <span onmouseout="hideTip(event, 'fs1', 55)" onmouseover="showTip(event, 'fs1', 55)" class="id">DiffSharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs21', 56)" onmouseover="showTip(event, 'fs21', 56)" class="id">Model</span>
<span class="k">open</span> <span onmouseout="hideTip(event, 'fs1', 57)" onmouseover="showTip(event, 'fs1', 57)" class="id">DiffSharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs22', 58)" onmouseover="showTip(event, 'fs22', 58)" class="m">Compose</span>
<span class="k">open</span> <span onmouseout="hideTip(event, 'fs1', 59)" onmouseover="showTip(event, 'fs1', 59)" class="id">DiffSharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs23', 60)" onmouseover="showTip(event, 'fs23', 60)" class="id">Util</span>
<span class="k">open</span> <span onmouseout="hideTip(event, 'fs1', 61)" onmouseover="showTip(event, 'fs1', 61)" class="id">DiffSharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs24', 62)" onmouseover="showTip(event, 'fs24', 62)" class="id">Optim</span>
<span class="k">let</span> <span onmouseout="hideTip(event, 'fs25', 63)" onmouseover="showTip(event, 'fs25', 63)" class="id">epochs</span> <span class="o">=</span> <span class="n">2</span>
<span class="k">let</span> <span onmouseout="hideTip(event, 'fs26', 64)" onmouseover="showTip(event, 'fs26', 64)" class="id">batchSize</span> <span class="o">=</span> <span class="n">32</span>
<span class="k">let</span> <span onmouseout="hideTip(event, 'fs27', 65)" onmouseover="showTip(event, 'fs27', 65)" class="id">numBatches</span> <span class="o">=</span> <span class="n">5</span>
<span class="k">let</span> <span onmouseout="hideTip(event, 'fs28', 66)" onmouseover="showTip(event, 'fs28', 66)" class="id">trainSet</span> <span class="o">=</span> <span onmouseout="hideTip(event, 'fs29', 67)" onmouseover="showTip(event, 'fs29', 67)" class="fn">MNIST</span><span class="pn">(</span><span class="s">"../data"</span><span class="pn">,</span> <span class="fn">train</span><span class="o">=</span><span class="k">true</span><span class="pn">,</span> <span class="fn">transform</span><span class="o">=</span><span onmouseout="hideTip(event, 'fs30', 68)" onmouseover="showTip(event, 'fs30', 68)" class="fn">id</span><span class="pn">)</span>
<span class="k">let</span> <span onmouseout="hideTip(event, 'fs31', 69)" onmouseover="showTip(event, 'fs31', 69)" class="id">trainLoader</span> <span class="o">=</span> <span onmouseout="hideTip(event, 'fs28', 70)" onmouseover="showTip(event, 'fs28', 70)" class="id">trainSet</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs32', 71)" onmouseover="showTip(event, 'fs32', 71)" class="id">loader</span><span class="pn">(</span><span onmouseout="hideTip(event, 'fs26', 72)" onmouseover="showTip(event, 'fs26', 72)" class="fn">batchSize</span><span class="o">=</span><span onmouseout="hideTip(event, 'fs26', 73)" onmouseover="showTip(event, 'fs26', 73)" class="id">batchSize</span><span class="pn">,</span> <span class="fn">shuffle</span><span class="o">=</span><span class="k">true</span><span class="pn">)</span>
<span class="k">let</span> <span onmouseout="hideTip(event, 'fs33', 74)" onmouseover="showTip(event, 'fs33', 74)" class="id">validSet</span> <span class="o">=</span> <span onmouseout="hideTip(event, 'fs29', 75)" onmouseover="showTip(event, 'fs29', 75)" class="fn">MNIST</span><span class="pn">(</span><span class="s">"../data"</span><span class="pn">,</span> <span class="fn">train</span><span class="o">=</span><span class="k">false</span><span class="pn">,</span> <span class="fn">transform</span><span class="o">=</span><span onmouseout="hideTip(event, 'fs30', 76)" onmouseover="showTip(event, 'fs30', 76)" class="fn">id</span><span class="pn">)</span>
<span class="k">let</span> <span onmouseout="hideTip(event, 'fs34', 77)" onmouseover="showTip(event, 'fs34', 77)" class="id">validLoader</span> <span class="o">=</span> <span onmouseout="hideTip(event, 'fs33', 78)" onmouseover="showTip(event, 'fs33', 78)" class="id">validSet</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs32', 79)" onmouseover="showTip(event, 'fs32', 79)" class="id">loader</span><span class="pn">(</span><span onmouseout="hideTip(event, 'fs26', 80)" onmouseover="showTip(event, 'fs26', 80)" class="fn">batchSize</span><span class="o">=</span><span onmouseout="hideTip(event, 'fs26', 81)" onmouseover="showTip(event, 'fs26', 81)" class="id">batchSize</span><span class="pn">,</span> <span class="fn">shuffle</span><span class="o">=</span><span class="k">false</span><span class="pn">)</span>
<span class="k">let</span> <span onmouseout="hideTip(event, 'fs35', 82)" onmouseover="showTip(event, 'fs35', 82)" class="id">encoder</span> <span class="o">=</span>
<span onmouseout="hideTip(event, 'fs36', 83)" onmouseover="showTip(event, 'fs36', 83)" class="fn">Conv2d</span><span class="pn">(</span><span class="n">1</span><span class="pn">,</span> <span class="n">32</span><span class="pn">,</span> <span class="n">4</span><span class="pn">,</span> <span class="n">2</span><span class="pn">)</span>
<span class="pn">--></span> <span onmouseout="hideTip(event, 'fs2', 84)" onmouseover="showTip(event, 'fs2', 84)" class="rt">dsharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs37', 85)" onmouseover="showTip(event, 'fs37', 85)" class="id">relu</span>
<span class="pn">--></span> <span onmouseout="hideTip(event, 'fs36', 86)" onmouseover="showTip(event, 'fs36', 86)" class="fn">Conv2d</span><span class="pn">(</span><span class="n">32</span><span class="pn">,</span> <span class="n">64</span><span class="pn">,</span> <span class="n">4</span><span class="pn">,</span> <span class="n">2</span><span class="pn">)</span>
<span class="pn">--></span> <span onmouseout="hideTip(event, 'fs2', 87)" onmouseover="showTip(event, 'fs2', 87)" class="rt">dsharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs37', 88)" onmouseover="showTip(event, 'fs37', 88)" class="id">relu</span>
<span class="pn">--></span> <span onmouseout="hideTip(event, 'fs36', 89)" onmouseover="showTip(event, 'fs36', 89)" class="fn">Conv2d</span><span class="pn">(</span><span class="n">64</span><span class="pn">,</span> <span class="n">128</span><span class="pn">,</span> <span class="n">4</span><span class="pn">,</span> <span class="n">2</span><span class="pn">)</span>
<span class="pn">--></span> <span onmouseout="hideTip(event, 'fs2', 90)" onmouseover="showTip(event, 'fs2', 90)" class="rt">dsharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs38', 91)" onmouseover="showTip(event, 'fs38', 91)" class="id">flatten</span><span class="pn">(</span><span class="n">1</span><span class="pn">)</span>
<span class="k">let</span> <span onmouseout="hideTip(event, 'fs39', 92)" onmouseover="showTip(event, 'fs39', 92)" class="id">decoder</span> <span class="o">=</span>
<span onmouseout="hideTip(event, 'fs2', 93)" onmouseover="showTip(event, 'fs2', 93)" class="rt">dsharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs40', 94)" onmouseover="showTip(event, 'fs40', 94)" class="id">unflatten</span><span class="pn">(</span><span class="n">1</span><span class="pn">,</span> <span class="pn">[</span><span class="n">128</span><span class="pn">;</span><span class="n">1</span><span class="pn">;</span><span class="n">1</span><span class="pn">]</span><span class="pn">)</span>
<span class="pn">--></span> <span onmouseout="hideTip(event, 'fs41', 95)" onmouseover="showTip(event, 'fs41', 95)" class="fn">ConvTranspose2d</span><span class="pn">(</span><span class="n">128</span><span class="pn">,</span> <span class="n">64</span><span class="pn">,</span> <span class="n">4</span><span class="pn">,</span> <span class="n">2</span><span class="pn">)</span>
<span class="pn">--></span> <span onmouseout="hideTip(event, 'fs2', 96)" onmouseover="showTip(event, 'fs2', 96)" class="rt">dsharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs37', 97)" onmouseover="showTip(event, 'fs37', 97)" class="id">relu</span>
<span class="pn">--></span> <span onmouseout="hideTip(event, 'fs41', 98)" onmouseover="showTip(event, 'fs41', 98)" class="fn">ConvTranspose2d</span><span class="pn">(</span><span class="n">64</span><span class="pn">,</span> <span class="n">32</span><span class="pn">,</span> <span class="n">4</span><span class="pn">,</span> <span class="n">3</span><span class="pn">)</span>
<span class="pn">--></span> <span onmouseout="hideTip(event, 'fs2', 99)" onmouseover="showTip(event, 'fs2', 99)" class="rt">dsharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs37', 100)" onmouseover="showTip(event, 'fs37', 100)" class="id">relu</span>
<span class="pn">--></span> <span onmouseout="hideTip(event, 'fs41', 101)" onmouseover="showTip(event, 'fs41', 101)" class="fn">ConvTranspose2d</span><span class="pn">(</span><span class="n">32</span><span class="pn">,</span> <span class="n">1</span><span class="pn">,</span> <span class="n">4</span><span class="pn">,</span> <span class="n">2</span><span class="pn">)</span>
<span class="pn">--></span> <span onmouseout="hideTip(event, 'fs2', 102)" onmouseover="showTip(event, 'fs2', 102)" class="rt">dsharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs42', 103)" onmouseover="showTip(event, 'fs42', 103)" class="id">sigmoid</span>
<span class="k">let</span> <span onmouseout="hideTip(event, 'fs43', 104)" onmouseover="showTip(event, 'fs43', 104)" class="id">model</span> <span class="o">=</span> <span onmouseout="hideTip(event, 'fs44', 105)" onmouseover="showTip(event, 'fs44', 105)" class="fn">VAE</span><span class="pn">(</span><span class="pn">[</span><span class="n">1</span><span class="pn">;</span><span class="n">28</span><span class="pn">;</span><span class="n">28</span><span class="pn">]</span><span class="pn">,</span> <span class="n">64</span><span class="pn">,</span> <span onmouseout="hideTip(event, 'fs35', 106)" onmouseover="showTip(event, 'fs35', 106)" class="id">encoder</span><span class="pn">,</span> <span onmouseout="hideTip(event, 'fs39', 107)" onmouseover="showTip(event, 'fs39', 107)" class="id">decoder</span><span class="pn">)</span>
<span class="k">let</span> <span onmouseout="hideTip(event, 'fs45', 108)" onmouseover="showTip(event, 'fs45', 108)" class="id">lr</span> <span class="o">=</span> <span onmouseout="hideTip(event, 'fs2', 109)" onmouseover="showTip(event, 'fs2', 109)" class="rt">dsharp</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs46', 110)" onmouseover="showTip(event, 'fs46', 110)" class="id">tensor</span><span class="pn">(</span><span class="n">0.001</span><span class="pn">)</span>
<span class="k">let</span> <span onmouseout="hideTip(event, 'fs47', 111)" onmouseover="showTip(event, 'fs47', 111)" class="id">optimizer</span> <span class="o">=</span> <span onmouseout="hideTip(event, 'fs48', 112)" onmouseover="showTip(event, 'fs48', 112)" class="fn">Adam</span><span class="pn">(</span><span onmouseout="hideTip(event, 'fs43', 113)" onmouseover="showTip(event, 'fs43', 113)" class="id">model</span><span class="pn">,</span> <span onmouseout="hideTip(event, 'fs45', 114)" onmouseover="showTip(event, 'fs45', 114)" class="fn">lr</span><span class="o">=</span><span onmouseout="hideTip(event, 'fs45', 115)" onmouseover="showTip(event, 'fs45', 115)" class="id">lr</span><span class="pn">)</span>
<span class="k">for</span> <span onmouseout="hideTip(event, 'fs49', 116)" onmouseover="showTip(event, 'fs49', 116)" class="fn">epoch</span> <span class="o">=</span> <span class="n">1</span> <span class="k">to</span> <span onmouseout="hideTip(event, 'fs25', 117)" onmouseover="showTip(event, 'fs25', 117)" class="id">epochs</span> <span class="k">do</span>
<span class="k">let</span> <span onmouseout="hideTip(event, 'fs50', 118)" onmouseover="showTip(event, 'fs50', 118)" class="fn">batches</span> <span class="o">=</span> <span onmouseout="hideTip(event, 'fs31', 119)" onmouseover="showTip(event, 'fs31', 119)" class="id">trainLoader</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs51', 120)" onmouseover="showTip(event, 'fs51', 120)" class="id">epoch</span><span class="pn">(</span><span onmouseout="hideTip(event, 'fs27', 121)" onmouseover="showTip(event, 'fs27', 121)" class="id">numBatches</span><span class="pn">)</span>
<span class="k">for</span> <span onmouseout="hideTip(event, 'fs52', 122)" onmouseover="showTip(event, 'fs52', 122)" class="fn">i</span><span class="pn">,</span> <span onmouseout="hideTip(event, 'fs13', 123)" onmouseover="showTip(event, 'fs13', 123)" class="fn">x</span><span class="pn">,</span> <span class="id">_</span> <span class="k">in</span> <span onmouseout="hideTip(event, 'fs50', 124)" onmouseover="showTip(event, 'fs50', 124)" class="fn">batches</span> <span class="k">do</span>
<span onmouseout="hideTip(event, 'fs43', 125)" onmouseover="showTip(event, 'fs43', 125)" class="id">model</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs53', 126)" onmouseover="showTip(event, 'fs53', 126)" class="id">reverseDiff</span><span class="pn">(</span><span class="pn">)</span>
<span class="k">let</span> <span onmouseout="hideTip(event, 'fs54', 127)" onmouseover="showTip(event, 'fs54', 127)" class="fn">l</span> <span class="o">=</span> <span onmouseout="hideTip(event, 'fs43', 128)" onmouseover="showTip(event, 'fs43', 128)" class="id">model</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs55', 129)" onmouseover="showTip(event, 'fs55', 129)" class="id">loss</span><span class="pn">(</span><span onmouseout="hideTip(event, 'fs13', 130)" onmouseover="showTip(event, 'fs13', 130)" class="fn">x</span><span class="pn">)</span>
<span onmouseout="hideTip(event, 'fs54', 131)" onmouseover="showTip(event, 'fs54', 131)" class="fn">l</span><span class="pn">.</span><span class="id">reverse</span><span class="pn">(</span><span class="pn">)</span>
<span onmouseout="hideTip(event, 'fs47', 132)" onmouseover="showTip(event, 'fs47', 132)" class="id">optimizer</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs56', 133)" onmouseover="showTip(event, 'fs56', 133)" class="id">step</span><span class="pn">(</span><span class="pn">)</span>
<span onmouseout="hideTip(event, 'fs57', 134)" onmouseover="showTip(event, 'fs57', 134)" class="fn">print</span> <span class="s">$"Epoch: {</span><span onmouseout="hideTip(event, 'fs49', 135)" onmouseover="showTip(event, 'fs49', 135)" class="fn">epoch</span><span class="s">} minibatch: {</span><span onmouseout="hideTip(event, 'fs52', 136)" onmouseover="showTip(event, 'fs52', 136)" class="fn">i</span><span class="s">} loss: {</span><span onmouseout="hideTip(event, 'fs54', 137)" onmouseover="showTip(event, 'fs54', 137)" class="fn">l</span><span class="s">}"</span>
<span class="k">let</span> <span onmouseout="hideTip(event, 'fs58', 138)" onmouseover="showTip(event, 'fs58', 138)" class="id">validLoss</span> <span class="o">=</span>
<span onmouseout="hideTip(event, 'fs34', 139)" onmouseover="showTip(event, 'fs34', 139)" class="id">validLoader</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs51', 140)" onmouseover="showTip(event, 'fs51', 140)" class="id">epoch</span><span class="pn">(</span><span class="pn">)</span>
<span class="o">|></span> <span onmouseout="hideTip(event, 'fs59', 141)" onmouseover="showTip(event, 'fs59', 141)" class="m">Seq</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs60', 142)" onmouseover="showTip(event, 'fs60', 142)" class="id">sumBy</span> <span class="pn">(</span><span class="k">fun</span> <span class="pn">(</span><span class="id">_</span><span class="pn">,</span> <span onmouseout="hideTip(event, 'fs13', 143)" onmouseover="showTip(event, 'fs13', 143)" class="fn">x</span><span class="pn">,</span> <span class="id">_</span><span class="pn">)</span> <span class="k">-></span> <span onmouseout="hideTip(event, 'fs43', 144)" onmouseover="showTip(event, 'fs43', 144)" class="id">model</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs55', 145)" onmouseover="showTip(event, 'fs55', 145)" class="id">loss</span><span class="pn">(</span><span onmouseout="hideTip(event, 'fs13', 146)" onmouseover="showTip(event, 'fs13', 146)" class="fn">x</span><span class="pn">,</span> <span class="fn">normalize</span><span class="o">=</span><span class="k">false</span><span class="pn">)</span><span class="pn">)</span>
<span onmouseout="hideTip(event, 'fs57', 147)" onmouseover="showTip(event, 'fs57', 147)" class="fn">print</span> <span class="s">$"Validation loss: {</span><span onmouseout="hideTip(event, 'fs58', 148)" onmouseover="showTip(event, 'fs58', 148)" class="id">validLoss</span><span class="o">/</span><span onmouseout="hideTip(event, 'fs33', 149)" onmouseover="showTip(event, 'fs33', 149)" class="id">validSet</span><span class="pn">.</span><span onmouseout="hideTip(event, 'fs61', 150)" onmouseover="showTip(event, 'fs61', 150)" class="id">length</span><span class="s">}"</span>
</code></pre>
<p>Numerous other model definition, differentiation, and training patterns are supported. See the tutorials in the left-hand menu and <a href="https://github.com/DiffSharp/DiffSharp/tree/dev/examples">examples</a> on GitHub.</p>
<h2><a name="More-Information" class="anchor" href="#More-Information">More Information</a></h2>
<p>DiffSharp is developed by <a href="http://www.robots.ox.ac.uk/~gunes/">Atılım Güneş Baydin</a>, <a href="https://www.microsoft.com/en-us/research/people/dsyme/">Don Syme</a>
and other contributors, having started as a project supervised by the automatic differentiation wizards <a href="https://scholar.google.com/citations?user=AxFrw0sAAAAJ&hl=en">Barak Pearlmutter</a> and <a href="https://scholar.google.com/citations?user=CgSBtPYAAAAJ&hl=en">Jeffrey Siskind</a>.</p>
<p>Please join us <a href="https://github.com/DiffSharp/DiffSharp">on GitHub</a>!</p>
<div class="fsdocs-tip" id="fs1">namespace DiffSharp</div>
<div class="fsdocs-tip" id="fs2">type dsharp =
static member abs: input: Tensor -> Tensor
static member acos: input: Tensor -> Tensor
static member add: a: Tensor * b: Tensor -> Tensor
static member arange: endVal: float * ?startVal: float * ?step: float * ?device: Device * ?dtype: Dtype * ?backend: Backend -> Tensor + 1 overload
static member arangeLike: input: Tensor * endVal: float * ?startVal: float * ?step: float * ?device: Device * ?dtype: Dtype * ?backend: Backend -> Tensor + 1 overload
static member argmax: input: Tensor -> int[] + 1 overload
static member argmin: input: Tensor -> int[] + 1 overload
static member asin: input: Tensor -> Tensor
static member atan: input: Tensor -> Tensor
static member backends: unit -> Backend list
...<br /><em><summary>
Tensor operations
</summary></em></div>
<div class="fsdocs-tip" id="fs3">static member DiffSharp.dsharp.config: unit -> DiffSharp.Device * DiffSharp.Dtype * DiffSharp.Backend * DiffSharp.Printer<br />static member DiffSharp.dsharp.config: configuration: (DiffSharp.Device * DiffSharp.Dtype * DiffSharp.Backend * DiffSharp.Printer) -> unit<br />static member DiffSharp.dsharp.config: ?device: DiffSharp.Device * ?dtype: DiffSharp.Dtype * ?backend: DiffSharp.Backend * ?printer: DiffSharp.Printer -> unit</div>
<div class="fsdocs-tip" id="fs4">Multiple items<br />module Backend
from DiffSharp<br /><em><summary>
Contains functions and settings related to backend specifications.
</summary></em><br /><br />--------------------<br />type Backend =
| Reference
| Torch
| Other of name: string * code: int
override ToString: unit -> string
member Name: string<br /><em><summary>
Represents a backend for DiffSharp tensors
</summary></em></div>
<div class="fsdocs-tip" id="fs5">union case DiffSharp.Backend.Reference: DiffSharp.Backend<br /><em><summary>
The reference backend
</summary></em></div>
<div class="fsdocs-tip" id="fs6">static member DiffSharp.dsharp.seed: ?seed: int -> unit</div>
<div class="fsdocs-tip" id="fs7">val t1: Tensor</div>
<div class="fsdocs-tip" id="fs8">static member dsharp.tensor: value: obj * ?device: Device * ?dtype: Dtype * ?backend: Backend -> Tensor</div>
<div class="fsdocs-tip" id="fs9">val t2: Tensor</div>
<div class="fsdocs-tip" id="fs10">val t3: Tensor</div>
<div class="fsdocs-tip" id="fs11">val t4: Tensor</div>
<div class="fsdocs-tip" id="fs12">val f: x: Tensor -> Tensor</div>
<div class="fsdocs-tip" id="fs13">val x: Tensor</div>
<div class="fsdocs-tip" id="fs14">type Tensor =
private | TensorC of primalRaw: RawTensor
| TensorF of primal: Tensor * derivative: Tensor * nestingTag: uint32
| TensorR of primal: Tensor * derivative: Tensor ref * parentOp: TensorOp * fanout: uint32 ref * nestingTag: uint32
interface IConvertible
interface IComparable
override Equals: other: obj -> bool
override GetHashCode: unit -> int
member GetSlice: bounds: int[,] -> Tensor
override ToString: unit -> string
member abs: unit -> Tensor
member acos: unit -> Tensor
member add: b: Tensor -> Tensor + 1 overload
member addSlice: location: seq<int> * b: Tensor -> Tensor
...<br /><em><summary>
Represents a multi-dimensional data type containing elements of a single data type.
</summary><br /><example>
A tensor can be constructed from a list or sequence using <see cref="M:DiffSharp.dsharp.tensor(System.Object)" /><code>
let t = dsharp.tensor([[1.; -1.]; [1.; -1.]])
</code></example></em></div>
<div class="fsdocs-tip" id="fs15">static member dsharp.grad: f: (Tensor -> Tensor) -> x: Tensor -> Tensor</div>
<div class="fsdocs-tip" id="fs16">val x0: Tensor</div>
<div class="fsdocs-tip" id="fs17">val y0: Tensor</div>
<div class="fsdocs-tip" id="fs18">static member dsharp.diff: f: (Tensor -> Tensor) -> x: Tensor -> Tensor</div>
<div class="fsdocs-tip" id="fs19">val y: Tensor</div>
<div class="fsdocs-tip" id="fs20">namespace DiffSharp.Data</div>
<div class="fsdocs-tip" id="fs21">namespace DiffSharp.Model</div>
<div class="fsdocs-tip" id="fs22">module Compose
from DiffSharp</div>
<div class="fsdocs-tip" id="fs23">namespace DiffSharp.Util</div>
<div class="fsdocs-tip" id="fs24">namespace DiffSharp.Optim</div>
<div class="fsdocs-tip" id="fs25">val epochs: int</div>
<div class="fsdocs-tip" id="fs26">val batchSize: int</div>
<div class="fsdocs-tip" id="fs27">val numBatches: int</div>
<div class="fsdocs-tip" id="fs28">val trainSet: MNIST</div>
<div class="fsdocs-tip" id="fs29">Multiple items<br />type MNIST =
inherit Dataset
new: path: string * ?urls: seq<string> * ?train: bool * ?transform: (Tensor -> Tensor) * ?targetTransform: (Tensor -> Tensor) * ?n: int -> MNIST
override item: i: int -> Tensor * Tensor
member classNames: string[]
member classes: int
override length: int<br /><br />--------------------<br />new: path: string * ?urls: seq<string> * ?train: bool * ?transform: (Tensor -> Tensor) * ?targetTransform: (Tensor -> Tensor) * ?n: int -> MNIST</div>
<div class="fsdocs-tip" id="fs30">val id: x: 'T -> 'T<br /><em><summary>The identity function</summary><br /><param name="x">The input value.</param><br /><returns>The same value.</returns><br /><example id="id-example"><code lang="fsharp">
id 12 // Evaulates to 12
id "abc" // Evaulates to "abc"
</code></example></em></div>
<div class="fsdocs-tip" id="fs31">val trainLoader: DataLoader</div>
<div class="fsdocs-tip" id="fs32">member Dataset.loader: batchSize: int * ?shuffle: bool * ?dropLast: bool * ?device: Device * ?dtype: Dtype * ?backend: Backend * ?targetDevice: Device * ?targetDtype: Dtype * ?targetBackend: Backend -> DataLoader</div>
<div class="fsdocs-tip" id="fs33">val validSet: MNIST</div>
<div class="fsdocs-tip" id="fs34">val validLoader: DataLoader</div>
<div class="fsdocs-tip" id="fs35">val encoder: Model<Tensor,Tensor></div>
<div class="fsdocs-tip" id="fs36">Multiple items<br />type Conv2d =
inherit Model
new: inChannels: int * outChannels: int * ?kernelSize: int * ?stride: int * ?padding: int * ?dilation: int * ?kernelSizes: seq<int> * ?strides: seq<int> * ?paddings: seq<int> * ?dilations: seq<int> * ?bias: bool -> Conv2d
override ToString: unit -> string
override forward: value: Tensor -> Tensor
member bias: Tensor
member weight: Tensor<br /><em><summary>A model that applies a 2D convolution over an input signal composed of several input planes</summary></em><br /><br />--------------------<br />new: inChannels: int * outChannels: int * ?kernelSize: int * ?stride: int * ?padding: int * ?dilation: int * ?kernelSizes: seq<int> * ?strides: seq<int> * ?paddings: seq<int> * ?dilations: seq<int> * ?bias: bool -> Conv2d</div>
<div class="fsdocs-tip" id="fs37">static member dsharp.relu: input: Tensor -> Tensor</div>
<div class="fsdocs-tip" id="fs38">static member dsharp.flatten: startDim: int * ?endDim: int -> (Tensor -> Tensor)<br />static member dsharp.flatten: input: Tensor * ?startDim: int * ?endDim: int -> Tensor</div>
<div class="fsdocs-tip" id="fs39">val decoder: Model<Tensor,Tensor></div>
<div class="fsdocs-tip" id="fs40">static member dsharp.unflatten: dim: int * unflattenedShape: seq<int> -> (Tensor -> Tensor)<br />static member dsharp.unflatten: input: Tensor * dim: int * unflattenedShape: seq<int> -> Tensor</div>
<div class="fsdocs-tip" id="fs41">Multiple items<br />type ConvTranspose2d =
inherit Model
new: inChannels: int * outChannels: int * ?kernelSize: int * ?stride: int * ?padding: int * ?dilation: int * ?kernelSizes: seq<int> * ?strides: seq<int> * ?paddings: seq<int> * ?dilations: seq<int> * ?bias: bool -> ConvTranspose2d
override ToString: unit -> string
override forward: value: Tensor -> Tensor
member bias: Tensor
member weight: Tensor<br /><em><summary>A model that applies a 2D transposed convolution operator over an input image composed of several input planes.</summary></em><br /><br />--------------------<br />new: inChannels: int * outChannels: int * ?kernelSize: int * ?stride: int * ?padding: int * ?dilation: int * ?kernelSizes: seq<int> * ?strides: seq<int> * ?paddings: seq<int> * ?dilations: seq<int> * ?bias: bool -> ConvTranspose2d</div>
<div class="fsdocs-tip" id="fs42">static member dsharp.sigmoid: input: Tensor -> Tensor</div>
<div class="fsdocs-tip" id="fs43">val model: VAE</div>
<div class="fsdocs-tip" id="fs44">Multiple items<br />type VAE =
inherit VAEBase
new: xShape: seq<int> * zDim: int * encoder: Model * decoder: Model -> VAE
override ToString: unit -> string
override decode: z: Tensor -> Tensor
override encode: x: Tensor -> Tensor * Tensor<br /><em><summary>Variational auto-encoder</summary></em><br /><br />--------------------<br />new: xShape: seq<int> * zDim: int * encoder: Model * decoder: Model -> VAE</div>
<div class="fsdocs-tip" id="fs45">val lr: Tensor</div>
<div class="fsdocs-tip" id="fs46">static member dsharp.tensor: ?device: Device * ?dtype: Dtype * ?backend: Backend -> ('a -> Tensor)<br />static member dsharp.tensor: value: obj * ?device: Device * ?dtype: Dtype * ?backend: Backend -> Tensor</div>
<div class="fsdocs-tip" id="fs47">val optimizer: Adam</div>
<div class="fsdocs-tip" id="fs48">Multiple items<br />type Adam =
inherit Optimizer
new: model: Model * ?lr: Tensor * ?beta1: Tensor * ?beta2: Tensor * ?eps: Tensor * ?weightDecay: Tensor * ?reversible: bool -> Adam
override updateRule: name: string -> t: Tensor -> Tensor<br /><em><summary>TBD</summary></em><br /><br />--------------------<br />new: model: Model * ?lr: Tensor * ?beta1: Tensor * ?beta2: Tensor * ?eps: Tensor * ?weightDecay: Tensor * ?reversible: bool -> Adam</div>
<div class="fsdocs-tip" id="fs49">val epoch: int</div>
<div class="fsdocs-tip" id="fs50">val batches: seq<int * Tensor * Tensor></div>
<div class="fsdocs-tip" id="fs51">member DataLoader.epoch: ?numBatches: int -> seq<int * Tensor * Tensor></div>
<div class="fsdocs-tip" id="fs52">val i: int</div>
<div class="fsdocs-tip" id="fs53">member ModelBase.reverseDiff: ?nestingTag: uint32 -> unit</div>
<div class="fsdocs-tip" id="fs54">val l: Tensor</div>
<div class="fsdocs-tip" id="fs55">member VAEBase.loss: x: Tensor * ?normalize: bool -> Tensor</div>
<div class="fsdocs-tip" id="fs56">member Optimizer.step: unit -> unit</div>
<div class="fsdocs-tip" id="fs57">val print: x: 'a -> unit<br /><em><summary>
Print the given value to the console using the '%A' printf format specifier
</summary></em></div>
<div class="fsdocs-tip" id="fs58">val validLoss: Tensor</div>
<div class="fsdocs-tip" id="fs59">Multiple items<br />module Seq
from DiffSharp.Util<br /><em><summary>
Contains extensions to the F# Seq module.
</summary></em><br /><br />--------------------<br />module Seq
from Microsoft.FSharp.Collections<br /><em><summary>Contains operations for working with values of type <see cref="T:Microsoft.FSharp.Collections.seq`1" />.</summary></em></div>
<div class="fsdocs-tip" id="fs60">val sumBy: projection: ('T -> 'U) -> source: seq<'T> -> 'U (requires member (+) and member get_Zero)<br /><em><summary>Returns the sum of the results generated by applying the function to each element of the sequence.</summary><br /><remarks>The generated elements are summed using the <c>+</c> operator and <c>Zero</c> property associated with the generated type.</remarks><br /><param name="projection">A function to transform items from the input sequence into the type that will be summed.</param><br /><param name="source">The input sequence.</param><br /><returns>The computed sum.</returns><br /><example id="sumby-1"><code lang="fsharp">
let input = [ "aa"; "bbb"; "cc" ]
input |&gt; Seq.sumBy (fun s -&gt; s.Length)
</code>
Evaluates to <c>7</c>.
</example></em></div>
<div class="fsdocs-tip" id="fs61">property MNIST.length: int with get</div>
</div>
</div>
<div class="row">
<div class="span3"></div>
<div class="span9">
<hr>
<p style="height:50px; display: table-cell; vertical-align: bottom;">© Copyright 2021, DiffSharp Contributors.</p>
<br>
</div>
</div>
</div>
<!-- BEGIN SEARCH BOX: this adds support for the search box -->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/JavaScript-autoComplete/1.0.4/auto-complete.css" />
<script type="text/javascript">var fsdocs_search_baseurl = 'https://diffsharp.github.io/'</script>
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/lunr.js/2.3.8/lunr.min.js"></script>
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/JavaScript-autoComplete/1.0.4/auto-complete.min.js"></script>
<script type="text/javascript" src="https://diffsharp.github.io/content/fsdocs-search.js"></script>
<!-- END SEARCH BOX: this adds support for the search box -->
</body>
</html>