Darknet/YOLO v6.0-37-gb57f9029
Object Detection Framework
 
Loading...
Searching...
No Matches
Apple MPS Backend

MPS/Metal inference entry points and helpers. More...

Namespaces

namespace  Darknet
 The namespace for the C++ Darknet API.
 

Functions

bool mps_avgpool_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, bool defer_readback, const char **reason)
 
static bool mps_avgpool_forward (const Darknet::Layer &l, const float *input, float *output)
 
bool mps_connected_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, bool defer_readback, bool *activation_applied, const char **reason)
 Try to execute connected layer forward using MPS (batchnorm/activation).
 
static bool mps_connected_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, const char **reason)
 
bool mps_convolution_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, bool defer_readback, bool *activation_applied, const char **reason)
 Try to execute convolution forward using MPS.
 
static bool mps_convolution_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, const char **reason)
 
static bool mps_convolution_forward (const Darknet::Layer &l, const float *input, float *output)
 
static bool mps_convolution_forward (const Darknet::Layer &l, const float *input, float *output, const char **reason)
 
void mps_flush_deferred_output (const Darknet::Layer *layer)
 
void mps_flush_output_if_needed (const Darknet::Layer *layer, float *output)
 
bool mps_gemm (int TA, int TB, int M, int N, int K, float ALPHA, float *A, int lda, float *B, int ldb, float BETA, float *C, int ldc)
 Try to execute GEMM using MPS.
 
bool mps_global_avgpool_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, bool defer_readback, const char **reason)
 Try to execute global avgpool forward using MPS.
 
static bool mps_global_avgpool_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, const char **reason)
 
bool mps_is_available ()
 Returns true if MPS is available and initialized.
 
bool mps_is_output_deferred (const Darknet::Layer *layer)
 
bool mps_layer_can_run (const Darknet::Layer &l, bool train)
 
bool mps_maxpool_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, bool defer_readback, const char **reason)
 
static bool mps_maxpool_forward (const Darknet::Layer &l, const float *input, float *output)
 
const Darknet::Layermps_prev_layer (const Darknet::NetworkState &state)
 
bool mps_reorg_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, bool defer_readback, const char **reason)
 Try to execute reorg using a Metal kernel on GPU.
 
static bool mps_reorg_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, const char **reason)
 
bool mps_route_forward (const Darknet::Layer &l, const Darknet::Network &net, float *output, bool defer_readback, const char **reason)
 Try to concatenate route inputs using MPS.
 
static bool mps_route_forward (const Darknet::Layer &l, const Darknet::Network &net, float *output, const char **reason)
 
static bool mps_shortcut_forward (const Darknet::Layer &l, const Darknet::Layer *from, const float *input, float *output, const char **reason)
 
bool mps_shortcut_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const Darknet::Layer *from, const float *input, float *output, bool defer_readback, bool *activation_applied, const char **reason)
 Try to execute shortcut add using MPS.
 
bool mps_should_defer_readback (const Darknet::NetworkState &state)
 
bool mps_softmax_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, const char **reason)
 Try to execute softmax on GPU.
 
bool mps_upsample_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, bool defer_readback, const char **reason)
 Try to upsample using a Metal kernel on GPU.
 
static bool mps_upsample_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, const char **reason)
 

Detailed Description

MPS/Metal inference entry points and helpers.

Function Documentation

◆ mps_avgpool_forward() [1/2]

bool mps_avgpool_forward ( const Darknet::Layer l,
const Darknet::Layer prev,
const float *  input,
float *  output,
bool  defer_readback,
const char **  reason 
)
Here is the caller graph for this function:

◆ mps_avgpool_forward() [2/2]

static bool mps_avgpool_forward ( const Darknet::Layer l,
const float *  input,
float *  output 
)
inlinestatic
Here is the call graph for this function:

◆ mps_connected_forward() [1/2]

bool mps_connected_forward ( const Darknet::Layer l,
const Darknet::Layer prev,
const float *  input,
float *  output,
bool  defer_readback,
bool *  activation_applied,
const char **  reason 
)

Try to execute connected layer forward using MPS (batchnorm/activation).

Here is the call graph for this function:
Here is the caller graph for this function:

◆ mps_connected_forward() [2/2]

static bool mps_connected_forward ( const Darknet::Layer l,
const Darknet::Layer prev,
const float *  input,
float *  output,
const char **  reason 
)
inlinestatic
Here is the call graph for this function:

◆ mps_convolution_forward() [1/4]

bool mps_convolution_forward ( const Darknet::Layer l,
const Darknet::Layer prev,
const float *  input,
float *  output,
bool  defer_readback,
bool *  activation_applied,
const char **  reason 
)

Try to execute convolution forward using MPS.

Parameters
defer_readbackIf true, output may remain on GPU for a subsequent MPS layer.
activation_appliedSet to true if MPS applied the activation.
reasonOptional fallback reason string when returning false.
Here is the caller graph for this function:

◆ mps_convolution_forward() [2/4]

static bool mps_convolution_forward ( const Darknet::Layer l,
const Darknet::Layer prev,
const float *  input,
float *  output,
const char **  reason 
)
inlinestatic
Here is the call graph for this function:

◆ mps_convolution_forward() [3/4]

static bool mps_convolution_forward ( const Darknet::Layer l,
const float *  input,
float *  output 
)
inlinestatic
Here is the call graph for this function:

◆ mps_convolution_forward() [4/4]

static bool mps_convolution_forward ( const Darknet::Layer l,
const float *  input,
float *  output,
const char **  reason 
)
inlinestatic
Here is the call graph for this function:

◆ mps_flush_deferred_output()

void mps_flush_deferred_output ( const Darknet::Layer layer)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ mps_flush_output_if_needed()

void mps_flush_output_if_needed ( const Darknet::Layer layer,
float *  output 
)
Here is the caller graph for this function:

◆ mps_gemm()

bool mps_gemm ( int  TA,
int  TB,
int  M,
int  N,
int  K,
float  ALPHA,
float *  A,
int  lda,
float *  B,
int  ldb,
float  BETA,
float *  C,
int  ldc 
)

Try to execute GEMM using MPS.

Returns true if MPS handled the op.

◆ mps_global_avgpool_forward() [1/2]

bool mps_global_avgpool_forward ( const Darknet::Layer l,
const Darknet::Layer prev,
const float *  input,
float *  output,
bool  defer_readback,
const char **  reason 
)

Try to execute global avgpool forward using MPS.

Here is the caller graph for this function:

◆ mps_global_avgpool_forward() [2/2]

static bool mps_global_avgpool_forward ( const Darknet::Layer l,
const Darknet::Layer prev,
const float *  input,
float *  output,
const char **  reason 
)
inlinestatic
Here is the call graph for this function:

◆ mps_is_available()

bool mps_is_available ( )

Returns true if MPS is available and initialized.

◆ mps_is_output_deferred()

bool mps_is_output_deferred ( const Darknet::Layer layer)
Here is the caller graph for this function:

◆ mps_layer_can_run()

bool mps_layer_can_run ( const Darknet::Layer l,
bool  train 
)
Here is the caller graph for this function:

◆ mps_maxpool_forward() [1/2]

bool mps_maxpool_forward ( const Darknet::Layer l,
const Darknet::Layer prev,
const float *  input,
float *  output,
bool  defer_readback,
const char **  reason 
)
Here is the caller graph for this function:

◆ mps_maxpool_forward() [2/2]

static bool mps_maxpool_forward ( const Darknet::Layer l,
const float *  input,
float *  output 
)
inlinestatic
Here is the call graph for this function:

◆ mps_prev_layer()

const Darknet::Layer * mps_prev_layer ( const Darknet::NetworkState state)
Here is the caller graph for this function:

◆ mps_reorg_forward() [1/2]

bool mps_reorg_forward ( const Darknet::Layer l,
const Darknet::Layer prev,
const float *  input,
float *  output,
bool  defer_readback,
const char **  reason 
)

Try to execute reorg using a Metal kernel on GPU.

Here is the call graph for this function:
Here is the caller graph for this function:

◆ mps_reorg_forward() [2/2]

static bool mps_reorg_forward ( const Darknet::Layer l,
const Darknet::Layer prev,
const float *  input,
float *  output,
const char **  reason 
)
inlinestatic
Here is the call graph for this function:

◆ mps_route_forward() [1/2]

bool mps_route_forward ( const Darknet::Layer l,
const Darknet::Network net,
float *  output,
bool  defer_readback,
const char **  reason 
)

Try to concatenate route inputs using MPS.

Here is the call graph for this function:
Here is the caller graph for this function:

◆ mps_route_forward() [2/2]

static bool mps_route_forward ( const Darknet::Layer l,
const Darknet::Network net,
float *  output,
const char **  reason 
)
inlinestatic
Here is the call graph for this function:

◆ mps_shortcut_forward() [1/2]

static bool mps_shortcut_forward ( const Darknet::Layer l,
const Darknet::Layer from,
const float *  input,
float *  output,
const char **  reason 
)
inlinestatic
Here is the call graph for this function:

◆ mps_shortcut_forward() [2/2]

bool mps_shortcut_forward ( const Darknet::Layer l,
const Darknet::Layer prev,
const Darknet::Layer from,
const float *  input,
float *  output,
bool  defer_readback,
bool *  activation_applied,
const char **  reason 
)

Try to execute shortcut add using MPS.

Here is the call graph for this function:
Here is the caller graph for this function:

◆ mps_should_defer_readback()

bool mps_should_defer_readback ( const Darknet::NetworkState state)
Here is the call graph for this function:
Here is the caller graph for this function:

◆ mps_softmax_forward()

bool mps_softmax_forward ( const Darknet::Layer l,
const Darknet::Layer prev,
const float *  input,
float *  output,
const char **  reason 
)

Try to execute softmax on GPU.

Here is the call graph for this function:
Here is the caller graph for this function:

◆ mps_upsample_forward() [1/2]

bool mps_upsample_forward ( const Darknet::Layer l,
const Darknet::Layer prev,
const float *  input,
float *  output,
bool  defer_readback,
const char **  reason 
)

Try to upsample using a Metal kernel on GPU.

Here is the caller graph for this function:

◆ mps_upsample_forward() [2/2]

static bool mps_upsample_forward ( const Darknet::Layer l,
const Darknet::Layer prev,
const float *  input,
float *  output,
const char **  reason 
)
inlinestatic
Here is the call graph for this function: