Skip to content

Layers #54

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 38 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 37 commits
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
ac67f77
added a graph class
Dec 18, 2024
5f36230
gtest dir fix
Dec 19, 2024
4867937
CMake fix 1.0
Feb 25, 2025
fb57554
CMake fix 2.0
Feb 25, 2025
f2afeea
CMake fix 2.0
Feb 25, 2025
82f0dc4
clang-tidy, clang-format and ubuntu-build fix 1.0
Feb 27, 2025
2f1141a
clang-tidy, clang-format, ubuntu-build and cmake fix 2.0
Feb 27, 2025
fdfe528
CMake, builts, clang format/tidy fix 3.0
Feb 27, 2025
116cf4a
tensor add 1.0
Mar 4, 2025
55a9ee8
graph pr fix
Mar 23, 2025
b3e62d4
tensor class v0.1
Mar 23, 2025
f65be87
tensor v0.2
Mar 24, 2025
7e9a389
tensor v0.3
Mar 24, 2025
f2547da
tensor v0.3
Mar 25, 2025
a89b0ba
tensor v0.4
Mar 25, 2025
9dcf39d
tensor built fix
Mar 25, 2025
fbf472e
tensor fix
Mar 25, 2025
ad52dd0
codecov fix
Mar 30, 2025
2fc6832
macos-clang-build
Mar 30, 2025
0efcb04
clang-format
Mar 30, 2025
4d68495
clang-format 2
Mar 30, 2025
2c2d708
clang-tidy
Mar 30, 2025
7d84878
errors fix
Mar 30, 2025
d07c3d8
errors fix 2
Mar 30, 2025
96b403c
error fix 3
Mar 30, 2025
7be04f0
error fix 4
Mar 30, 2025
aaa5d80
errors fix 5
Mar 30, 2025
0ca0331
errors fix 7
Mar 30, 2025
ff1a8f8
add layer, change graph
Mar 31, 2025
abd51c9
clang-tidy fix
Mar 31, 2025
e901675
clang-tidy and clang-format fix
Mar 31, 2025
f379e51
clang-format and clang-tidy fix 2
Mar 31, 2025
b185542
clang-format fix
Mar 31, 2025
5ae7ad8
clang-format fix 2
Mar 31, 2025
e1350f0
tensor fix
Apr 7, 2025
3155cd8
clang-tidy fix
Apr 7, 2025
9de22dc
add layer's mocks, change graph
Lepsps May 13, 2025
d64ad06
throws fix
Jul 8, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ jobs:
submodules: recursive
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install -y cmake ninja-build ccache scons
- name: ccache
uses: hendrikmuhs/[email protected]
Expand All @@ -37,6 +38,7 @@ jobs:
submodules: recursive
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install -y cmake ninja-build ccache scons
- name: ccache
uses: hendrikmuhs/[email protected]
Expand Down Expand Up @@ -81,6 +83,7 @@ jobs:
submodules: true
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install -y cmake ninja-build ccache gcovr lcov scons
- uses: actions/checkout@v4
with:
Expand All @@ -102,7 +105,7 @@ jobs:
cmake --build build --parallel
- name: Test
run: |
build/bin/run_tests
build/test/run_test
env:
CTEST_OUTPUT_ON_FAILURE: 1
- name: Generate lcov Coverage Data
Expand Down
19 changes: 13 additions & 6 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,15 +1,22 @@
cmake_minimum_required(VERSION 3.20)
set(CMAKE_CXX_STANDARD 11)

project(cpp_template)

include(cmake/configure.cmake)
set(ProjectName "itlab")
project(${ProjectName})

include_directories(include)

enable_testing()

add_subdirectory(3rdparty)
add_subdirectory(app)
add_subdirectory(include)

add_subdirectory(3rdparty/googletest)
add_subdirectory(src)
add_subdirectory(test)

# REPORT
message( STATUS "")
message( STATUS "General configuration for ${PROJECT_NAME}")
message( STATUS "======================================")
message( STATUS "")
message( STATUS " Configuration: ${CMAKE_BUILD_TYPE}")
message( STATUS "")
39 changes: 39 additions & 0 deletions include/graph/graph.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
#ifndef GRAPH_H
#define GRAPH_H

#include <unordered_map>
#include <vector>

#include "./layer/layer.h"
#include "./tensor/tensor.h"

class Network {
private:
std::unordered_map<int, Layer*> layers_;
Tensor<double> inputTensor_;
Tensor<double>* outputTensor_;
int start_ = -1;
int end_ = -1;
bool bfs_helper(int start, int vert, bool flag,
std::vector<int>* v_ord) const;

public:
Network();

bool addLayer(Layer& lay, const std::vector<int>& inputs ={}, const std::vector<int>& outputs = {});
void addEdge(Layer& layPrev, Layer& layNext);
void removeEdge(Layer& layPrev, Layer& layNext);
void removeLayer(Layer& lay);
int getLayers() const;
int getEdges() const;
bool isEmpty() const;
bool hasPath(Layer& layPrev, Layer& layNext) const;
std::vector<int> inference(int start) const;
void setInput(Layer& lay, Tensor<double>& vec);
void setOutput(Layer& lay, Tensor<double>& vec);
void run();
std::vector<std::string> getLayersTypeVector() const;
~Network();
};

#endif
31 changes: 31 additions & 0 deletions include/layer/layer.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
#ifndef LAYER_H
#define LAYER_H

#include <list>

#include "./tensor/tensor.h"

struct LayerAttributes {
int id = -1;
};

class Layer {
protected:
int id_;

public:
Layer() = default;
explicit Layer(const LayerAttributes& attrs) : id_(attrs.id) {}
virtual ~Layer() = default;
void setID(int id) { id_ = id; }
int getID() const { return id_; }
virtual std::string getInfoString() const;
virtual void exec(const Tensor<double>& input, Tensor<double>& output) = 0;
virtual Shape get_output_shape() = 0;

virtual std::string get_type_name() const = 0;
void addNeighbor(Layer* neighbor);
void removeNeighbor(Layer* neighbor);
std::list<Layer*> neighbors_;
};
#endif
104 changes: 104 additions & 0 deletions include/tensor/tensor.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
#ifndef TENSOR_H
#define TENSOR_H

#include <algorithm>
#include <cstddef>
#include <cstdint>
#include <stdexcept>
#include <string>
#include <utility>
#include <vector>

struct Shape {
std::vector<size_t> dimensions;
size_t total_elements;

Shape() : dimensions(), total_elements(0) {}
Shape(std::vector<size_t> dims);
size_t get_rank() const;
};

enum Layout : std::uint8_t { kNchw, kNhwc, kNd };

template <typename T>
class Tensor {
public:
Shape shape;
Layout layout;
std::vector<T> data;

Tensor() : shape(), layout(Layout::kNd), data() {}
Tensor(const Shape &sh, Layout l = Layout::kNd);
Tensor(std::vector<size_t> dims, Layout l = Layout::kNd);
size_t get_linear_index(const std::vector<size_t> &indices) const;
T &at(const std::vector<size_t> &indices);
const T &at(const std::vector<size_t> &indices) const;
};

template <typename T>
Tensor<T>::Tensor(const Shape &sh, Layout l) : shape(sh), layout(l), data(sh.total_elements) {}

template <typename T>
Tensor<T>::Tensor(std::vector<size_t> dims, Layout l) : Tensor(Shape(std::move(dims)), l) {}

template <typename T>
size_t Tensor<T>::get_linear_index(const std::vector<size_t> &indices) const {
if (indices.size() != shape.get_rank()) {
throw std::runtime_error("Incorrect number of indices provided.");
}
for (size_t i = 0; i < indices.size(); ++i) {
if (indices[i] >= shape.dimensions[i]) {
std::string error_msg = "Index out of range for dimension ");
throw std::out_of_range(error_msg);
}
}

size_t linear_index = 0;
size_t N = shape.get_rank();

if (N == 0) {
if (shape.total_elements == 1 && indices.empty())
return 0;
if (shape.total_elements == 0 && indices.empty())
return 0;
throw std::logic_error("Invalid access to rank-0 tensor or empty tensor.");
}

if (N == 4 && layout == Layout::kNhwc) {
if (shape.dimensions.size() != 4) {
throw std::logic_error(
"kNhwc layout is specified for a tensor not of rank 4.");
}

size_t C_dim = shape.dimensions[1];
size_t H_dim = shape.dimensions[2];
size_t W_dim = shape.dimensions[3];

linear_index = indices[0] * (H_dim * W_dim * C_dim) +
indices[2] * (W_dim * C_dim) + indices[3] * (C_dim) +
indices[1];
}
else
{
for (size_t i = 0; i < N; ++i) {
size_t term_stride = 1;
for (size_t j = i + 1; j < N; ++j) {
term_stride *= shape.dimensions[j];
}
linear_index += indices[i] * term_stride;
}
}
return linear_index;
}

template <typename T>
T &Tensor<T>::at(const std::vector<size_t> &indices) {
return data[get_linear_index(indices)];
}

template <typename T>
const T &Tensor<T>::at(const std::vector<size_t> &indices) const {
return data[get_linear_index(indices)];
}

#endif
7 changes: 7 additions & 0 deletions src/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
file(GLOB_RECURSE HEADER_FILES "${CMAKE_SOURCE_DIR}/include/*.h")
file(GLOB_RECURSE SOURCE_FILES "${CMAKE_SOURCE_DIR}/src/*.cpp")

add_library(${ProjectName} STATIC ${SOURCE_FILES} ${HEADER_FILES})
target_sources(${ProjectName} PRIVATE ${HEADER_FILES})

target_include_directories(${ProjectName} PUBLIC ${CMAKE_SOURCE_DIR}/src)
Loading
Loading