Darknet/YOLO v6.0-37-gb57f9029
Object Detection Framework
 
Loading...
Searching...
No Matches
apple_mps.mm File Reference
#include "darknet_internal.hpp"
#include "darknet_layers.hpp"
#include "apple_mps.hpp"
#include "metal_backend.hpp"
#include "gemm.hpp"
import <Metal/Metal.h>
import <MetalPerformanceShaders/MetalPerformanceShaders.h>
#include <cstring>
#include <memory>
#include <unordered_set>
Include dependency graph for apple_mps.mm:

Classes

class  MpsBatchNormDataSource
 

Functions

bool mps_avgpool_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, bool defer_readback, const char **reason)
 
bool mps_connected_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, bool defer_readback, bool *activation_applied, const char **reason)
 Try to execute connected layer forward using MPS (batchnorm/activation).
 
bool mps_convolution_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, bool defer_readback, bool *activation_applied, const char **reason)
 Try to execute convolution forward using MPS.
 
void mps_flush_deferred_output (const Darknet::Layer *layer)
 
void mps_flush_output_if_needed (const Darknet::Layer *layer, float *output)
 
bool mps_gemm (int TA, int TB, int M, int N, int K, float ALPHA, float *A, int lda, float *B, int ldb, float BETA, float *C, int ldc)
 Try to execute GEMM using MPS.
 
bool mps_global_avgpool_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, bool defer_readback, const char **reason)
 Try to execute global avgpool forward using MPS.
 
bool mps_is_available ()
 Returns true if MPS is available and initialized.
 
bool mps_is_output_deferred (const Darknet::Layer *layer)
 
bool mps_layer_can_run (const Darknet::Layer &l, bool train)
 
bool mps_maxpool_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, bool defer_readback, const char **reason)
 
const Darknet::Layermps_prev_layer (const Darknet::NetworkState &state)
 
bool mps_reorg_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, bool defer_readback, const char **reason)
 Try to execute reorg using a Metal kernel on GPU.
 
bool mps_route_forward (const Darknet::Layer &l, const Darknet::Network &net, float *output, bool defer_readback, const char **reason)
 Try to concatenate route inputs using MPS.
 
bool mps_shortcut_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const Darknet::Layer *from, const float *input, float *output, bool defer_readback, bool *activation_applied, const char **reason)
 Try to execute shortcut add using MPS.
 
bool mps_should_defer_readback (const Darknet::NetworkState &state)
 
bool mps_softmax_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, const char **reason)
 Try to execute softmax on GPU.
 
bool mps_upsample_forward (const Darknet::Layer &l, const Darknet::Layer *prev, const float *input, float *output, bool defer_readback, const char **reason)
 Try to upsample using a Metal kernel on GPU.