Skip to content

autograd #

fn add_gate #

fn add_gate[T]() &AddGate[T]

fn cos_gate #

fn cos_gate[T](a &Variable[T]) &CosGate[T]

fn ctx #

fn ctx[T]() &Context[T]

Contexts can only be initialized as empty, and a generic type must be provided

fn divide_gate #

fn divide_gate[T](a &Variable[T], b &Variable[T]) &DivideGate[T]

fn exp_gate #

fn exp_gate[T](a &Variable[T]) &ExpGate[T]

fn gate_backward #

fn gate_backward[T](gate Gate, payload &Payload[T]) ![]&vtl.Tensor[T]

Todo: Implement this somehow :D

fn gate_cache #

fn gate_cache[T](gate Gate, mut result Variable[T], args ...CacheParam) !

Todo: Implement this somehow :D

fn matmul_gate #

fn matmul_gate[T](a &Variable[T], b &Variable[T]) &MatMulGate[T]

fn multiply_gate #

fn multiply_gate[T](a &Variable[T], b &Variable[T]) &MultiplyGate[T]

fn node #

fn node[T](gate Gate, parents []&Variable[T], payload &Payload[T], name string) &Node[T]

node

fn payload #

fn payload[T](variable &Variable[T]) &Payload[T]

fn pow_gate #

fn pow_gate[T](a &Variable[T], b &Variable[T]) &PowGate[T]

fn register #

fn register[T](name string, gate Gate, result &Variable[T], parents []&Variable[T]) !

fn sin_gate #

fn sin_gate[T](a &Variable[T]) &SinGate[T]

fn subtract_gate #

fn subtract_gate[T]() &SubstractGate[T]

fn tan_gate #

fn tan_gate[T](a &Variable[T]) &TanGate[T]

fn variable #

fn variable[T](context &Context[T], value &vtl.Tensor[T], data VariableData) &Variable[T]

variable

interface CacheParam #

interface CacheParam {}

interface Gate #

interface Gate {
	// backward(payload &Payload) []&vtl.Tensor
	// cache(mut result Variable, args ...CacheParam)
}

Gate is an object that can cache the result of an operation, as well as backpropagate a payload backwards along the computational graph

Structs that implement from this interface can add instance variables if additional caching is needed, and these need to be populated when writing the cached operation

Todo: Make this generic once it works as expected

fn (AddGate[T]) backward #

fn (g &AddGate[T]) backward[T](payload &Payload[T]) ![]&vtl.Tensor[T]

fn (AddGate[T]) cache #

fn (g &AddGate[T]) cache[T](mut result Variable[T], args ...CacheParam) !

fn (Context[T]) len #

fn (ctx &Context[T]) len() int

fn (Context[T]) push #

fn (mut ctx Context[T]) push[T](node &Node[T])

fn (Context[T]) last #

fn (ctx &Context[T]) last[T]() !&Node[T]

fn (Context[T]) pop #

fn (mut ctx Context[T]) pop[T]() !&Node[T]

fn (Context[T]) variable #

fn (ctx &Context[T]) variable[T](value &vtl.Tensor[T], data ContextVariableData) &Variable[T]

fn (Context[T]) str #

fn (ctx &Context[T]) str() string

fn (CosGate[T]) backward #

fn (g &CosGate[T]) backward[T](payload &Payload[T]) ![]&vtl.Tensor[T]

fn (CosGate[T]) cache #

fn (g &CosGate[T]) cache[T](mut result Variable[T], args ...CacheParam) !

fn (DivideGate[T]) backward #

fn (g &DivideGate[T]) backward[T](payload &Payload[T]) ![]&vtl.Tensor[T]

fn (DivideGate[T]) cache #

fn (g &DivideGate[T]) cache[T](mut result Variable[T], args ...CacheParam) !

fn (ExpGate[T]) backward #

fn (g &ExpGate[T]) backward[T](payload &Payload[T]) ![]&vtl.Tensor[T]

fn (ExpGate[T]) cache #

fn (g &ExpGate[T]) cache[T](mut result Variable[T], args ...CacheParam) !

fn (MatMulGate[T]) backward #

fn (g &MatMulGate[T]) backward[T](payload &Payload[T]) ![]&vtl.Tensor[T]

fn (MatMulGate[T]) cache #

fn (g &MatMulGate[T]) cache[T](mut result Variable[T], args ...CacheParam) !

fn (MultiplyGate[T]) backward #

fn (g &MultiplyGate[T]) backward[T](payload &Payload[T]) ![]&vtl.Tensor[T]

fn (MultiplyGate[T]) cache #

fn (g &MultiplyGate[T]) cache[T](mut result Variable[T], args ...CacheParam) !

fn (PowGate[T]) backward #

fn (g &PowGate[T]) backward[T](payload &Payload[T]) ![]&vtl.Tensor[T]

fn (PowGate[T]) cache #

fn (g &PowGate[T]) cache[T](mut result Variable[T], args ...CacheParam) !

fn (SinGate[T]) backward #

fn (g &SinGate[T]) backward[T](payload &Payload[T]) ![]&vtl.Tensor[T]

fn (SinGate[T]) cache #

fn (g &SinGate[T]) cache[T](mut result Variable[T], args ...CacheParam) !

fn (SubstractGate[T]) backward #

fn (g &SubstractGate[T]) backward[T](payload &Payload[T]) ![]&vtl.Tensor[T]

fn (SubstractGate[T]) cache #

fn (g &SubstractGate[T]) cache[T](mut result Variable[T], args ...CacheParam) !

fn (TanGate[T]) backward #

fn (g &TanGate[T]) backward[T](payload &Payload[T]) ![]&vtl.Tensor[T]

fn (TanGate[T]) cache #

fn (g &TanGate[T]) cache[T](mut result Variable[T], args ...CacheParam) !

fn (Variable[T]) add #

fn (v &Variable[T]) add[T](other &Variable[T]) !&Variable[T]

add Adds two variables together.

fn (Variable[T]) backprop #

fn (mut v Variable[T]) backprop[T]() !

backprop Back propagates an operation along a computational graph. This operation will destroy the operational graph, populating the gradients for all variables that are predecessors of the Variable this is called on. Even if this is called on the first node in a graph, it will destroy all descendents of this variable stored by the Context

fn (Variable[T]) cos #

fn (v &Variable[T]) cos[T]() !&Variable[T]

cos Cosine of a variable.

fn (Variable[T]) divide #

fn (v &Variable[T]) divide[T](other &Variable[T]) !&Variable[T]

divide Divides two variables.

fn (Variable[T]) exp #

fn (v &Variable[T]) exp[T]() !&Variable[T]

exp Exponentiates a variable.

fn (Variable[T]) is_grad_needed #

fn (v &Variable[T]) is_grad_needed() bool

fn (Variable[T]) matmul #

fn (v &Variable[T]) matmul[T](other &Variable[T]) !&Variable[T]

matmul Multiplies two matrices.

fn (Variable[T]) multiply #

fn (v &Variable[T]) multiply[T](other &Variable[T]) !&Variable[T]

multiply Multiplies two variables.

fn (Variable[T]) pow #

fn (v &Variable[T]) pow[T](other &Variable[T]) !&Variable[T]

pow raises a variable to a power.

fn (Variable[T]) sin #

fn (v &Variable[T]) sin[T]() !&Variable[T]

sin Sine of a variable.

fn (Variable[T]) slice #

fn (v &Variable[T]) slice[T](idx ...[]int) !&Variable[T]

fn (Variable[T]) slice_hilo #

fn (v &Variable[T]) slice_hilo[T](idx1 []int, idx2 []int) !&Variable[T]

fn (Variable[T]) str #

fn (v &Variable[T]) str() string

fn (Variable[T]) subtract #

fn (v &Variable[T]) subtract[T](other &Variable[T]) !&Variable[T]

subtract Subtracts two variables.

fn (Variable[T]) tan #

fn (v &Variable[T]) tan[T]() !&Variable[T]

tan Tan of a variable.

struct AddGate #

struct AddGate[T] {}

struct Context #

@[heap]
struct Context[T] {
pub mut:
	// A list of all variables present in an operation.
	// This list can contain duplicates
	nodes []&Node[T]
	// If no_grad is set to true, operations will not
	// be cached, and backpropagation will not be possible
	no_grad bool
}

Context keeps track of the computational graph for a number of operations. Variables that interact with each other must belong to the same context, or state will be lost while tracking operations done.

struct ContextVariableData #

@[params]
struct ContextVariableData {
pub:
	requires_grad bool = true
}

struct CosGate #

struct CosGate[T] {
pub:
	a &Variable[T] = unsafe { nil }
}

struct DivideGate #

struct DivideGate[T] {
pub:
	a &Variable[T] = unsafe { nil }
	b &Variable[T] = unsafe { nil }
}

struct ExpGate #

struct ExpGate[T] {
pub:
	a &Variable[T] = unsafe { nil }
}

struct MatMulGate #

struct MatMulGate[T] {
pub:
	a &Variable[T] = unsafe { nil }
	b &Variable[T] = unsafe { nil }
}

struct MultiplyGate #

struct MultiplyGate[T] {
pub:
	a &Variable[T] = unsafe { nil }
	b &Variable[T] = unsafe { nil }
}

struct Node #

@[heap]
struct Node[T] {
pub:
	// A Gate containing a backwards and cache function for
	// a node
	gate Gate
pub mut:
	// The variables that created this node
	parents []&Variable[T]
	// Wrapper around a Tensor, contains operation data
	payload &Payload[T] = unsafe { nil }
	// Debug use only, contains a name for a node
	name string
}

Node is a member of a computational graph that contains a reference to a gate, as well as the parents of the operation and the payload that resulted from the operation.

struct Payload #

@[heap]
struct Payload[T] {
pub:
	// Contents of the paylod
	variable &Variable[T] = unsafe { nil }
}

Payload is a simple wrapper around a Variable. It is only abstracted out to be a bit more explicit that it is being passed around through an operation

struct PowGate #

struct PowGate[T] {
	a &Variable[T] = unsafe { nil }
	b &Variable[T] = unsafe { nil }
}

struct SinGate #

struct SinGate[T] {
pub:
	a &Variable[T] = unsafe { nil }
}

struct SubstractGate #

struct SubstractGate[T] {}

struct TanGate #

struct TanGate[T] {
pub:
	a &Variable[T] = unsafe { nil }
}

struct Variable #

@[heap]
struct Variable[T] {
pub mut:
	// The value of the Variable.  This should not be edited outside
	// of Variable operations, as other edits will not be tracked
	// and will lead to incorrect results
	value &vtl.Tensor[T] = unsafe { nil }
	// The graph the variable is associated with.  This is a reference,
	// as a variable does not own its context
	context &Context[T] = unsafe { nil }
	// The gradient of the Variable.  This is set as a reference to
	// the value of a Variable unless `backprop` has been called, in
	// which case all related Variables will have their gradient
	// updated correctly
	grad &vtl.Tensor[T] = unsafe { nil }
	// If set to true, this variable will track its operations,
	// otherwise it will act similar to a vtl.Tensor, only calculating
	// forward operations
	requires_grad bool
}

Variable is an abstraction of a vtl.Tensor that tracks the operations done to the vtl.Tensor. It also keeps track of the gradient of the operation if a Variable needs to backpropagate. This is the fundamental object used in automatic differentiation, as well as the neural network aspects of VTL

struct VariableData #

@[params]
struct VariableData {
	requires_grad bool = true
}