Skip to main content

Video extension

The video filters you created are easily integrated into apps to supply your voice effects and noise cancellation.

Understand the tech

A video filter accesses video data when it is captured from the user's local device, modifies it, then plays the updated data to local and remote video channels.

img

A typical transmission pipeline consists of a chain of procedures, including capture, pre-processing, encoding, transmitting, decoding, post-processing, and play. In order to modify the voice or video data in the transmission pipeline, video extensions are inserted into either the pre-processing or post-processing procedure.

Prerequisites

In order to follow this procedure you must have:

  • Android Studio 4.1 or higher.
  • Android SDK API Level 24 or higher.
  • A mobile device that runs Android 4.1 or higher.
  • A project to develop in.

Project setup

In order to integrate an extension into your project:

To integrate into your project:

  1. Unzip Video SDK to a local directory.
  2. Copy the header files in rtc/sdk/low_level_api/include under the directory of your project file.

You are now ready to develop your extension.

Create an video extension

To build a video filter extension, you use the following APIs:

  • IExtensionVideoFilter: This interface implements the function of receiving, processing, and delivering video data.
  • IExtensionProvider: This interface encapsulates the functions in IExtensionVideoFilter into an extension.

Develop a video filter

Use the IExtensionVideoFilter interface to implement an audio filter. You can find the interface in the NGIAgoraMediaNode.h file. You need to implement this interface first, and you must implement at least the following methods from this interface:

Methods include:

The following code sample shows how to use these APIs together to implement a video filter:


_116
#include "ExtensionVideoFilter.h"
_116
#include "../logutils.h"
_116
#include <sstream>
_116
_116
namespace agora {
_116
namespace extension {
_116
_116
ExtensionVideoFilter::ExtensionVideoFilter(agora_refptr<ByteDanceProcessor> byteDanceProcessor):threadPool_(1) {
_116
byteDanceProcessor_ = byteDanceProcessor;
_116
}
_116
_116
ExtensionVideoFilter::~ExtensionVideoFilter() {
_116
byteDanceProcessor_->releaseOpenGL();
_116
}
_116
_116
_116
// Set how the SDK communicates with your video filter extension.
_116
void ExtensionVideoFilter::getProcessMode(ProcessMode& mode, bool& independent_thread) {
_116
mode = ProcessMode::kSync;
_116
independent_thread = false;
_116
mode_ = mode;
_116
}
_116
_116
_116
// Set the type and format of the video frame sent to your extension.
_116
void ExtensionVideoFilter::getVideoFormatWanted(rtc::VideoFrameData::Type& type,
_116
rtc::RawPixelBuffer::Format& format) {
_116
type = rtc::VideoFrameData::Type::kRawPixels;
_116
format = rtc::RawPixelBuffer::Format::kI420;
_116
}
_116
_116
// Save the Control object and initialize OpenGL.
_116
int ExtensionVideoFilter::start(agora::agora_refptr<Control> control) {
_116
PRINTF_INFO("ExtensionVideoFilter::start");
_116
if (!byteDanceProcessor_) {
_116
return -1;
_116
}
_116
if (control) {
_116
control_ = control;
_116
byteDanceProcessor_->setExtensionControl(control);
_116
}
_116
if (mode_ == ProcessMode::kAsync){
_116
invoker_id = threadPool_.RegisterInvoker("thread_videofilter");
_116
auto res = threadPool_.PostTaskWithRes(invoker_id, [byteDanceProcessor=byteDanceProcessor_] {
_116
return byteDanceProcessor->initOpenGL();
_116
});
_116
isInitOpenGL = res.get();
_116
} else {
_116
isInitOpenGL = byteDanceProcessor_->initOpenGL();
_116
}
_116
return 0;
_116
}
_116
_116
// Release OpenGL.
_116
int ExtensionVideoFilter::stop() {
_116
PRINTF_INFO("ExtensionVideoFilter::stop");
_116
if (byteDanceProcessor_) {
_116
byteDanceProcessor_->releaseOpenGL();
_116
isInitOpenGL = false;
_116
}
_116
return 0;
_116
}
_116
_116
_116
// When mode is set to Async, the SDK and extension transfer video frames through pendVideoFrame and deliverVideoFrame.
_116
rtc::IExtensionVideoFilter::ProcessResult ExtensionVideoFilter::pendVideoFrame(agora::agora_refptr<rtc::IVideoFrame> frame) {
_116
if (!frame || !isInitOpenGL) {
_116
return kBypass;
_116
}
_116
_116
bool isAsyncMode = (mode_ == ProcessMode::kAsync);
_116
if (isAsyncMode && byteDanceProcessor_ && control_ && invoker_id >= 0) {
_116
threadPool_.PostTask(invoker_id, [videoFrame=frame, byteDanceProcessor=byteDanceProcessor_, control=control_] {
_116
rtc::VideoFrameData srcData;
_116
videoFrame->getVideoFrameData(srcData);
_116
byteDanceProcessor->processFrame(srcData);
_116
control->deliverVideoFrame(videoFrame);
_116
});
_116
return kSuccess;
_116
}
_116
return kBypass;
_116
}
_116
_116
// When mode is set to Sync, the SDK and extension transfer video frames through adaptVideoFrame.
_116
rtc::IExtensionVideoFilter::ProcessResult ExtensionVideoFilter::adaptVideoFrame(agora::agora_refptr<rtc::IVideoFrame> src,
_116
agora::agora_refptr<rtc::IVideoFrame>& dst) {
_116
if (!isInitOpenGL) {
_116
return kBypass;
_116
}
_116
bool isSyncMode = (mode_ == ProcessMode::kSync);
_116
if (isSyncMode && byteDanceProcessor_) {
_116
rtc::VideoFrameData srcData;
_116
src->getVideoFrameData(srcData);
_116
byteDanceProcessor_->processFrame(srcData);
_116
dst = src;
_116
return kSuccess;
_116
}
_116
return kBypass;
_116
}
_116
_116
_116
// Set the property of the video filter.
_116
int ExtensionVideoFilter::setProperty(const char *key, const void *buf,
_116
size_t buf_size) {
_116
PRINTF_INFO("setProperty %s %s", key, buf);
_116
std::string stringParameter((char*)buf);
_116
byteDanceProcessor_->setParameters(stringParameter);
_116
return 0;
_116
}
_116
_116
// Get the property of the video filter.
_116
int ExtensionVideoFilter::getProperty(const char *key, void *buf, size_t buf_size) {
_116
return -1;
_116
}
_116
}
_116
}

Encapsulate the filter into an extension

To encapsulate the video filter into an extension, you need to implement the IExtensionProvider interface. You can find the interface in the NGIAgoraExtensionProvider.h file. The following methods from this interface must be implemented:

The following code sample shows how to use these APIs to encapsulate the video filter:


_40
#include "ExtensionProvider.h"
_40
#include "../logutils.h"
_40
#include "VideoProcessor.h"
_40
#include "plugin_source_code/JniHelper.h"
_40
_40
namespace agora {
_40
namespace extension {
_40
ExtensionProvider::ExtensionProvider() {
_40
PRINTF_INFO("ExtensionProvider create");
_40
byteDanceProcessor_ = new agora::RefCountedObject<ByteDanceProcessor>();
_40
audioProcessor_ = new agora::RefCountedObject<AdjustVolumeAudioProcessor>();
_40
}
_40
_40
ExtensionProvider::~ExtensionProvider() {
_40
PRINTF_INFO("ExtensionProvider destroy");
_40
byteDanceProcessor_.reset();
_40
audioProcessor_.reset();
_40
}
_40
_40
// Enumerate your extensions that can be encapsulated
_40
void ExtensionProvider::enumerateExtensions(ExtensionMetaInfo* extension_list,
_40
int& extension_count) {
_40
extension_count = 1;
_40
ExtensionMetaInfo i;
_40
i.type = EXTENSION_TYPE::VIDEO_PRE_PROCESSING_FILTER;
_40
i.extension_name = agora::extension::VIDEO_FILTER_NAME;
_40
extension_list[0] = i;
_40
}
_40
_40
// Create a video filter
_40
agora_refptr<agora::rtc::IExtensionVideoFilter> ExtensionProvider::createVideoFilter(const char* name) {
_40
PRINTF_INFO("ExtensionProvider::createVideoFilter %s", name);
_40
auto videoFilter = new agora::RefCountedObject<agora::extension::ExtensionVideoFilter>(byteDanceProcessor_);
_40
return videoFilter;
_40
}
_40
_40
void ExtensionProvider::setExtensionControl(rtc::IExtensionControl* control){
_40
}
_40
}
_40
}

Package the extension

After encapsulating the filter into an extension, you need to register and package it into a .aar or .so file, and submit it together with a file that contains the extension name, vendor name and filter name to Agora.

  1. Register the extension

    Register the extension with the macro REGISTER_AGORA_EXTENSION_PROVIDER, which is in the AgoraExtensionProviderEntry.h file. Use this macro at the entrance of the extension implementation. When the SDK loads the extension, this macro automatically registers it to the SDK. For example:


    _1
    REGISTER_AGORA_EXTENSION_PROVIDER(ByteDance, agora::extension::ExtensionProvider);

  2. Link the libagora-rtc-sdk-jni.so file

    In CMakeLists.txt, specify the path to save the libagora-rtc-sdk-jni.so file in the downloaded SDK package according to the following table:

    FilePath
    64-bit libagora-rtc-sdk-jni.soAgoraWithByteDanceAndroid/agora-bytedance/src/main/agoraLibs/arm64-v8a
    32-bit libagora-rtc-sdk-jni.soAgoraWithByteDanceAndroid/agora-bytedance/src/main/agoraLibs/arm64-v7a
  3. Provide extension information

    Create a .java or .md file to provide the following information:

    • EXTENSION_NAME: The name of the target link library used in CMakeLists.txt. For example, for a .so file named libagora-bytedance.so, the EXTENSION_NAME should be agora-bytedance.
    • EXTENSION_VENDOR_NAME: The name of the extension provider, which is used for registering in the agora-bytedance.cpp file.
    • EXTENSION_FILTER_NAME: The name of the filter, which is defined in ExtensionProvider.h.

Test your implementation

To ensure that you have integrated the extension in your app:

Once you have developed your extension and API endpoints, the next step is to test whether they work properly.

  • Functional and performance tests

    Test the functionality and performance of your extension and submit a test report to Agora. This report must contain:

    • The following proof of functionality:
      • The extension is enabled and loaded in the SDK normally.
      • All key-value pairs in the setExtensionProperty or setExtensionPropertyWithVendor method work properly.
      • All event callbacks of your extension work properly through IMediaExtensionObserver.
    • The following performance data:
      • The average time the extension needs to process an audio or video frame.
      • The maximum amount of memory required by the extension.
      • The maximum amount of CPU/GPU consumption required by the extension.
  • Extension listing test The Extensions Marketplace is where developers discover your extension. In the Marketplace, each extension has a product listing that provides detailed information such as feature overview and implementation guides. Before making your extension listing publicly accessible, the best practice is to see how everything looks and try every function in a test environment.

  • Write the integration document for your extension

    The easier it is for other developers to integrate your extension, the more it will be used. Follow the guidelines and create the best Integration guide for your extension

  • Apply for testing

    To apply for access to the test environment, contact Agora and provide the following:

    • Your extension package
    • Extension listing assets, including:
    • Your company name
    • Your public email address
    • The Provisioning API endpoints
    • The Usage and Billing API endpoints
    • Your draft business model or pricing plan
    • Your support page URL
    • Your official website URL
    • Your implementation guides URL
  • Test your extension listing

    Once your application is approved, Agora publishes your extension in the test environment and sends you an e-mail.

    To test if everything works properly with your extension in the Marketplace, do the following:

    • Activate and deactivate your extension in an Agora project, and see whether the Provisioning APIs work properly.
    • Follow your implementation guides to implement your extension in an Agora project, and see whether you need to update your documentation.
    • By the end of the month, check the billing information and see whether the Usage and Billing APIs work properly.

Now you are ready to submit your extension for final review by Agora. You can now Publish Your Extension.

Reference

This section contains information that completes the information in this page, or points you to documentation that explains other aspects to this product.

Sample project

Agora provides an Android sample project agora-simple-filter for developing audio and video filter extensions.

API reference

The classes used to create and encapsulate filters are:

IExtensionVideoFilter

Implement receiving, processing, and delivering video data.

Methods include:

getProcessMode

Sets how the SDK communicates with your video filter extension. The SDK triggers this callback first when loading the extension. After receiving the callback, you need to return mode and independent_thread to specify how the SDK communicates with the extension.


_1
virtual void getProcessMode(ProcessMode& mode, bool& independent_thread) = 0;

ParameterDescription
modeThe mode for transferring video frames between the SDK and extension. You can set it to the following values:
  • Sync: Synchronous mode, where the SDK and extension transfer video frames through adaptVideoFrame.
  • Async: Asynchronous mode, where the SDK sends video frames to the extension through pendVideoFrame, and the extension returns processed video frames to the SDK through deliverVideoFrame.
  • independent_threadWhether to create an independent thread for the extension:
  • true: Create an independent thread for the extension, so that the SDK sends all the callbacks to the extension in the created thread.
  • false: Do not create an independent thread for the extension. In this case, the SDK sends all the callbacks to the extension in its original video processing thread.
  • You can set the value of mode and independent_thread as follows:

    • If your extension uses complicated YUV algorithm, Agora recommends setting mode to Async and independent_thread to false; if your extension does not use complicated YUV algorithm, Agora recommends setting mode to Sync and independent_thread to false.
    • If your extension uses OpenGL for data processing, Agora recommends setting mode to Sync and independent_thread to true.

    start


    _1
    virtual int start(agora::agora_refptr<Control> control) = 0;

    The SDK triggers this callback after the video transmission pipeline starts. You can initialize OpenGL in this callback.

    The SDK also passes a Control object to the extension in this method. The Control class provides methods for the extension to interact with the SDK. You can implement the methods in the Control class based on your actual needs:


    _32
    class Control : public RefCountInterface {
    _32
    public:
    _32
    /**
    _32
    * In asynchronous mode (mode is set to Async), the extension calls this methods to
    _32
    * return the processed video frame to the SDK.
    _32
    * Before calling this method, ensure that the SDK submits the video frame to
    _32
    * the extension through pendVideoFrame.
    _32
    */
    _32
    virtual ProcessResult deliverVideoFrame(agora::agora_refptr<IVideoFrame> frame) = 0;
    _32
    /**
    _32
    * If the extension needs a new memory pool, call this method to create a new
    _32
    * IVideoFrame object for better memory management.
    _32
    * For example, an image enhancement extension can call this method to save both
    _32
    * the original frame and processed frame with more efficient memory management.
    _32
    */
    _32
    virtual agora::agora_refptr<IVideoFrameMemoryPool> getMemoryPool() = 0;
    _32
    /**
    _32
    * Call this method to report an event to the SDK. The SDK then sends the event
    _32
    * notification to the app.
    _32
    */
    _32
    virtual int postEvent(const char* key, const char* value) = 0;
    _32
    /**
    _32
    * Call this method to print logs to the SDK.
    _32
    */
    _32
    virtual void printLog(commons::LOG_LEVEL level, const char* format, ...) = 0;
    _32
    /**
    _32
    * If an unrecoverable error occurs within the extension, call this method to report
    _32
    * the error and stop SDK from sending video frames to the extension. The SDK then
    _32
    * passes the error message to the app.
    _32
    */
    _32
    virtual void disableMe(int error, const char* msg) = 0;
    _32
    };

    stop


    _1
    virtual int stop() = 0;

    The SDK triggers this callback before the video transmission pipeline stops. You can release OpenGL in this callback.

    getVideoFormatWanted

    Sets the type and format of the video frame sent to your extension. The SDK triggers this callback before sending a video frame to the extension. In the callback, you need to specify the type and format for the frame. You can change the type and format of subsequent frames when you receive the next callback.


    _1
    virtual void getVideoFormatWanted(VideoFrameData::Type& type, RawPixelBuffer::Format& format) = 0;

    ParameterDescription
    typeThe type of the video frame. Currently you can only set it to RawPixels, which means raw data.
    formatThe format of the video frame. You can set it to the following values:
  • Unknown: An unknown format.
  • I420: The I420 format.
  • I422: The I420 format.``
  • NV21: The NV21 format.
  • NV12: The NV12 format.
  • RGBA: The RGBA format.
  • ARGB: The AGRB format.
  • BGRA: The BGRA format.
  • adaptVideoFrame

    Adapts the video frame. In synchronous mode (mode is set to Sync), the SDK and extension transfer video frames through this method. By calling this method, the SDK sends video frames to the extension with in, and the extension returns the processed frames with out.


    _3
    virtual ProcessResult adaptVideoFrame(agora::agora_refptr<IVideoFrame> in, agora::agora_refptr<IVideoFrame>& out) {
    _3
    return ProcessResult::kBypass;
    _3
    }

    Parameters

    ParameterDescription
    inAn input parameter. The video frame to be processed by the extension.
    outAn output parameter. The processed video frame.

    Returns

    The result of processing the video frame:

    • Success: The extension has processed the frame successfully.
    • ByPass: The extension does not process the frame and passes it to the subsequent link in the filter chain.
    • Drop: The extension discards the frame.

    pendVideoFrame

    Submits the video frame. In asynchronous mode (mode is set to Async), the SDK submits the video frame to the extension through this method. After calling this method, the extension must return the processed video frame through deliverVideoFrame in the Control class.


    _3
    virtual ProcessResult pendVideoFrame(agora::agora_refptr<IVideoFrame> frame) {
    _3
    return ProcessResult::kBypass;
    _3
    }

    Parameters

    ParameterDescription
    frameThe video frame to be processed by the extension.

    Returns

    The result of processing the video frame:

    • Success: The extension has processed the frame successfully.
    • ByPass: The extension does not process the frame and passes it to the subsequent link in the chain.
    • Drop: The extension discards the frame.

    setProperty

    Sets the property of the video filter extension. When an app client calls setExtensionProperty, the SDK triggers this callback. In the callback, you need to return the extension property.


    _1
    int ExtensionVideoFilter::setProperty(const char *key, const void *buf, size_t buf_size)

    ParameterDescription
    keyThe key of the property.
    bufThe buffer of the property in the JSON format. You can use the open source nlohmann/json library for the serialization and deserialization between the C++ struct and the JSON string.
    buf_sizeThe size of the buffer.

    getProperty

    Gets the property of the video filter extension. When the app client calls getExtensionProperty, the SDK calls this method to get the extension property.


    _1
    int ExtensionVideoFilter::getProperty(const char *key, void *buf, size_t buf_size)

    ParameterDescription
    keyThe key of the property.
    propertyThe pointer to the property.
    buf_sizeThe size of the buffer.

    IExtensionProvider

    Encapsulate your IExtensionVideoFilter implementation into an extension.

    Methods include:

    enumerateExtensions

    Enumerates your extensions that can be encapsulated. The SDK triggers this callback when loading the extension. In the callback, you need to return information about all of your extensions that can be encapsulated.


    _5
    virtual void enumerateExtensions(ExtensionMetaInfo* extension_list,
    _5
    int& extension_count) {
    _5
    (void) extension_list;
    _5
    extension_count = 0;
    _5
    }

    ParameterDescription
    extension_listExtension information, including extension type and name. For details, see the definition of ExtensionMetaInfo.
    extension_countThe total number of the extensions that can be encapsulated.

    The definition of ExtensionExtensionMetaInfo is as follows:


    _21
    // EXTENSION_TYPE represents where the extension is located in the media transmission pipeline
    _21
    enum EXTENSION_TYPE {
    _21
    // Audio processing filter
    _21
    AUDIO_FILTER,
    _21
    // Video preprocessing filter
    _21
    VIDEO_PRE_PROCESSING_FILTER,
    _21
    // Video postprocessing filter
    _21
    VIDEO_POST_PROCESSING_FILTER,
    _21
    // Reserved for future use
    _21
    AUDIO_SINK,
    _21
    // Reserved for future use
    _21
    VIDEO_SINK,
    _21
    // Reserved for future use
    _21
    UNKNOWN,
    _21
    };
    _21
    _21
    // Extension information, including extension type and name
    _21
    struct ExtensionMetaInfo {
    _21
    EXTENSION_TYPE type;
    _21
    const char * extension_name;
    _21
    };

    If you specify VIDEO_PRE_PROCESSING_FILTER or VIDEO_POST_PROCESSING_FILTER as EXTENSION_TYPE, the SDK calls the createVideoFilter method after the customer creates the IExtensionVideoProvider object when initializing RtcEngine.

    createVideoFilter

    Creates an video filter. You need to pass the IExtensionVideoFilter instance in this method.


    _3
    virtual agora_refptr < IExtensionVideoFilter > createVideoFilter(const char * name) {
    _3
    return NULL;
    _3
    }

    After the IExtensionVideoFilter instance is created, the extension processes video frames with methods in the IExtensionVideoFilter class.

    vundefined