-
Notifications
You must be signed in to change notification settings - Fork 0
/
my_thesis.out
70 lines (70 loc) · 4.67 KB
/
my_thesis.out
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
\BOOKMARK [0][-]{chapter*.2}{Abstract}{}% 1
\BOOKMARK [0][-]{chapter.1}{Introduction}{}% 2
\BOOKMARK [0][-]{chapter.1}{Introduction}{}% 3
\BOOKMARK [1][-]{section.1.1}{Sparse Data is Very Common in Machine Learning}{chapter.1}% 4
\BOOKMARK [1][-]{section.1.2}{The Advantages of Sparse Data}{chapter.1}% 5
\BOOKMARK [2][-]{subsection.1.2.1}{A Real Case of Sparse Dataset}{section.1.2}% 6
\BOOKMARK [1][-]{section.1.3}{Solution: Sparse Representation and Operations}{chapter.1}% 7
\BOOKMARK [0][-]{chapter.2}{Sparsity and Formats}{}% 8
\BOOKMARK [1][-]{section.2.1}{Definition}{chapter.2}% 9
\BOOKMARK [1][-]{section.2.2}{Formats}{chapter.2}% 10
\BOOKMARK [2][-]{subsection.2.2.1}{Matrices}{section.2.2}% 11
\BOOKMARK [2][-]{subsection.2.2.2}{Tensors - Multi-dimensional arrays}{section.2.2}% 12
\BOOKMARK [0][-]{chapter.3}{The Deeplearning4j Library}{}% 13
\BOOKMARK [1][-]{section.3.1}{Architecture of the library}{chapter.3}% 14
\BOOKMARK [1][-]{section.3.2}{The Importance of Nd4j in the Library}{chapter.3}% 15
\BOOKMARK [1][-]{section.3.3}{Nd4j Needs a Sparse Representation}{chapter.3}% 16
\BOOKMARK [0][-]{chapter.4}{Structure of an Multi-dimensional Array}{}% 17
\BOOKMARK [1][-]{section.4.1}{Storing an Array}{chapter.4}% 18
\BOOKMARK [2][-]{subsection.4.1.1}{Data Buffer}{section.4.1}% 19
\BOOKMARK [2][-]{subsection.4.1.2}{Parameters of an Array}{section.4.1}% 20
\BOOKMARK [1][-]{section.4.2}{Views}{chapter.4}% 21
\BOOKMARK [1][-]{section.4.3}{Indexes}{chapter.4}% 22
\BOOKMARK [1][-]{section.4.4}{Operations}{chapter.4}% 23
\BOOKMARK [0][-]{chapter.5}{Implementation}{}% 24
\BOOKMARK [1][-]{section.5.1}{Hierarchy of Arrays}{chapter.5}% 25
\BOOKMARK [1][-]{section.5.2}{Limitations and Constraints}{chapter.5}% 26
\BOOKMARK [2][-]{subsection.5.2.1}{Storing Off-heap}{section.5.2}% 27
\BOOKMARK [2][-]{subsection.5.2.2}{Workspaces}{section.5.2}% 28
\BOOKMARK [2][-]{subsection.5.2.3}{DataBuffer Length}{section.5.2}% 29
\BOOKMARK [1][-]{section.5.3}{CSR Matrices Implementation}{chapter.5}% 30
\BOOKMARK [2][-]{subsection.5.3.1}{Structure}{section.5.3}% 31
\BOOKMARK [2][-]{subsection.5.3.2}{Put a Value}{section.5.3}% 32
\BOOKMARK [2][-]{subsection.5.3.3}{Get a Sub-array}{section.5.3}% 33
\BOOKMARK [2][-]{subsection.5.3.4}{Limits of this Format}{section.5.3}% 34
\BOOKMARK [1][-]{section.5.4}{COO Tensors}{chapter.5}% 35
\BOOKMARK [2][-]{subsection.5.4.1}{Naive Implementation}{section.5.4}% 36
\BOOKMARK [2][-]{subsection.5.4.2}{Reverse the Coordinates}{section.5.4}% 37
\BOOKMARK [2][-]{subsection.5.4.3}{Put a Value}{section.5.4}% 38
\BOOKMARK [2][-]{subsection.5.4.4}{More Parameters are Needed to Define the Tensors}{section.5.4}% 39
\BOOKMARK [2][-]{subsection.5.4.5}{Sparse Indexes Translation}{section.5.4}% 40
\BOOKMARK [2][-]{subsection.5.4.6}{Computations of the the Parameters}{section.5.4}% 41
\BOOKMARK [2][-]{subsection.5.4.7}{Final Implementation}{section.5.4}% 42
\BOOKMARK [2][-]{subsection.5.4.8}{Get a Sub-Array}{section.5.4}% 43
\BOOKMARK [0][-]{chapter.6}{Operations}{}% 44
\BOOKMARK [1][-]{section.6.1}{Basic Linear Algebra Subprograms \(BLAS\)}{chapter.6}% 45
\BOOKMARK [1][-]{section.6.2}{Backends}{chapter.6}% 46
\BOOKMARK [2][-]{subsection.6.2.1}{In-Place Routines}{section.6.2}% 47
\BOOKMARK [2][-]{subsection.6.2.2}{Level 1 Routines}{section.6.2}% 48
\BOOKMARK [2][-]{subsection.6.2.3}{Level 2 and Level 3 Routines}{section.6.2}% 49
\BOOKMARK [1][-]{section.6.3}{Libnd4j}{chapter.6}% 50
\BOOKMARK [0][-]{chapter.7}{Results}{}% 51
\BOOKMARK [1][-]{section.7.1}{Storing a Huge Array is now Possible}{chapter.7}% 52
\BOOKMARK [0][-]{chapter.8}{Future}{}% 53
\BOOKMARK [1][-]{section.8.1}{Operations}{chapter.8}% 54
\BOOKMARK [1][-]{section.8.2}{Support of the GPU backend}{chapter.8}% 55
\BOOKMARK [1][-]{section.8.3}{Make the Sparse Array Compliant with the API}{chapter.8}% 56
\BOOKMARK [1][-]{section.8.4}{Support More Sparse Formats}{chapter.8}% 57
\BOOKMARK [1][-]{section.8.5}{Tensor Contraction Indexing}{chapter.8}% 58
\BOOKMARK [1][-]{section.8.6}{Optimizations}{chapter.8}% 59
\BOOKMARK [0][-]{chapter.9}{Conclusion}{}% 60
\BOOKMARK [0][-]{appendix.A}{Appendices}{}% 61
\BOOKMARK [1][-]{section.A.1}{Algorithm Execution Example}{appendix.A}% 62
\BOOKMARK [2][-]{subsection.A.1.1}{Sparse Offset Computation Algorithm}{section.A.1}% 63
\BOOKMARK [2][-]{subsection.A.1.2}{Indexes Translation Algorithm}{section.A.1}% 64
\BOOKMARK [1][-]{section.A.2}{Code Snippets}{appendix.A}% 65
\BOOKMARK [2][-]{subsection.A.2.1}{Extract a sub-array of a CSR matrix}{section.A.2}% 66
\BOOKMARK [2][-]{subsection.A.2.2}{Put a Value into a CSR matrix}{section.A.2}% 67
\BOOKMARK [2][-]{subsection.A.2.3}{Add a value into a buffer}{section.A.2}% 68
\BOOKMARK [0][-]{appendix*.23}{Acknowledgements}{}% 69
\BOOKMARK [0][-]{appendix*.25}{Bibliography}{}% 70