diff --git a/.github/ci/build/build_ios_ipa.sh b/.github/ci/build/build_ios_ipa.sh
index 01501f2f0..722f2b866 100755
--- a/.github/ci/build/build_ios_ipa.sh
+++ b/.github/ci/build/build_ios_ipa.sh
@@ -17,10 +17,8 @@ TARGET_NAME=${PROJECT_PATH##*/}
KEYCENTER_PATH=${PROJECT_PATH}"/"${TARGET_NAME}"/Common/KeyCenter.swift"
-METHOD_PATH=${PROJECT_PATH}"/ExportOptions.plist"
-
# 打包环境
-CONFIGURATION=$method
+CONFIGURATION="Debug"
#工程文件路径
APP_PATH="${PROJECT_PATH}/${TARGET_NAME}.xcworkspace"
@@ -65,8 +63,6 @@ echo PBXPROJ_PATH: $PBXPROJ_PATH
# Release
/usr/libexec/PlistBuddy -c "Set :objects:03D13BF82448758C00B599B3:buildSettings:CURRENT_PROJECT_VERSION ${BUILD_NUMBER}" $PBXPROJ_PATH
-#修改打包方式
-/usr/libexec/PlistBuddy -c "Set :method $CONFIGURATION" $METHOD_PATH
# 读取APPID环境变量
echo AGORA_APP_ID:$APP_ID
@@ -87,33 +83,39 @@ xcodebuild clean -workspace "${APP_PATH}" -configuration "${CONFIGURATION}" -sch
CURRENT_TIME=$(date "+%Y-%m-%d %H-%M-%S")
# 归档路径
-ARCHIVE_PATH="${PROJECT_PATH}/${TARGET_NAME} ${CURRENT_TIME}/${TARGET_NAME}.xcarchive"
+ARCHIVE_PATH="${WORKSPACE}/${TARGET_NAME}_${BUILD_NUMBER}.xcarchive"
# 编译环境
-# 导出路径
-EXPORT_PATH="${PROJECT_PATH}/${TARGET_NAME} ${CURRENT_TIME}"
-
# plist路径
PLIST_PATH="${PROJECT_PATH}/ExportOptions.plist"
echo PLIST_PATH: $PLIST_PATH
# archive 这边使用的工作区间 也可以使用project
-xcodebuild archive -workspace "${APP_PATH}" -scheme "${TARGET_NAME}" -configuration "${CONFIGURATION}" -archivePath "${ARCHIVE_PATH}" -destination 'generic/platform=iOS'
+xcodebuild CODE_SIGN_STYLE="Manual" archive -workspace "${APP_PATH}" -scheme "${TARGET_NAME}" clean CODE_SIGNING_REQUIRED=NO CODE_SIGNING_ALLOWED=NO -configuration "${CONFIGURATION}" -archivePath "${ARCHIVE_PATH}" -destination 'generic/platform=iOS' -quiet || exit
+
+cd ${WORKSPACE}
-# 导出ipa
-xcodebuild -exportArchive -archivePath "${ARCHIVE_PATH}" -exportPath "${EXPORT_PATH}" -exportOptionsPlist "${PLIST_PATH}"
+# 压缩archive
+7za a -tzip "${TARGET_NAME}_${BUILD_NUMBER}.xcarchive.zip" "${ARCHIVE_PATH}"
+
+# 签名
+# sh sign "${TARGET_NAME}_${BUILD_NUMBER}.xcarchive.zip" --type xcarchive --plist "${PLIST_PATH}"
+sh export "${TARGET_NAME}_${BUILD_NUMBER}.xcarchive.zip" --plist "${PLIST_PATH}"
# 上传IPA
-7za a "$WORKSPACE/${TARGET_NAME}_${BUILD_NUMBER}_IPA.zip" -r "${EXPORT_PATH}/${TARGET_NAME}.ipa"
+PAYLOAD_PATH="${TARGET_NAME}_${BUILD_NUMBER}_Payload"
+mkdir "${PAYLOAD_PATH}"
+# mv "${TARGET_NAME}_${BUILD_NUMBER}_iOS.ipa" "${PAYLOAD_PATH}"
+mv "${TARGET_NAME}_${BUILD_NUMBER}.ipa" "${PAYLOAD_PATH}"
-# 删除IPA文件夹
-rm -rf "${EXPORT_PATH}"
+7za a "${TARGET_NAME}_${BUILD_NUMBER}_IPA.zip" -r "${PAYLOAD_PATH}"
+python3 artifactory_utils.py --action=upload_file --file="${TARGET_NAME}_${BUILD_NUMBER}_IPA.zip" --project
-# rm -rf "${EXPORT_PATH}/${TARGET_NAME}.xcarchive"
-# rm -rf "${EXPORT_PATH}/Packaging.log"
-# rm -rf "${EXPORT_PATH}/ExportOptions.plist"
-# rm -rf "${EXPORT_PATH}/DistributionSummary.plist"
+# 删除IPA文件夹
+rm -rf ${TARGET_NAME}_${BUILD_NUMBER}.xcarchive
+rm -rf *.zip
+rm -rf ${PAYLOAD_PATH}
#复原Keycenter文件
python3 /tmp/jenkins/api-examples/.github/ci/build/modify_ios_keycenter.py $KEYCENTER_PATH 1
diff --git a/.github/ci/build/build_mac.sh b/.github/ci/build/build_mac.sh
index 315de4f6b..1ba0323f4 100644
--- a/.github/ci/build/build_mac.sh
+++ b/.github/ci/build/build_mac.sh
@@ -71,7 +71,7 @@ else
echo "failed"
exit 1
fi
-cp -rf ./macOS/** ./$unzip_name/samples/APIExample
+cp -a ./macOS/** ./$unzip_name/samples/APIExample
mv ./$unzip_name/samples/APIExample/sdk.podspec ./$unzip_name/
python3 ./.github/ci/build/modify_podfile.py ./$unzip_name/samples/APIExample/Podfile
diff --git a/.github/ci/build/build_mac_ipa.sh b/.github/ci/build/build_mac_ipa.sh
index 3eb0c737e..21e5b2125 100755
--- a/.github/ci/build/build_mac_ipa.sh
+++ b/.github/ci/build/build_mac_ipa.sh
@@ -17,7 +17,7 @@ TARGET_NAME=${PROJECT_PATH##*/}
KEYCENTER_PATH=${PROJECT_PATH}"/"${TARGET_NAME}"/Common/KeyCenter.swift"
# 打包环境
-CONFIGURATION=developer-id
+CONFIGURATION=Release
#工程文件路径
APP_PATH="${PROJECT_PATH}/${TARGET_NAME}.xcworkspace"
@@ -69,12 +69,9 @@ xcodebuild clean -workspace "${APP_PATH}" -configuration "${CONFIGURATION}" -sch
CURRENT_TIME=$(date "+%Y-%m-%d %H-%M-%S")
# 归档路径
-ARCHIVE_PATH="${PROJECT_PATH}/${TARGET_NAME} ${CURRENT_TIME}/${TARGET_NAME}.xcarchive"
+ARCHIVE_PATH="${WORKSPACE}/${TARGET_NAME}_${BUILD_NUMBER}.xcarchive"
# 编译环境
-# 导出路径
-EXPORT_PATH="${PROJECT_PATH}/${TARGET_NAME} ${CURRENT_TIME}"
-
# plist路径
PLIST_PATH="${PROJECT_PATH}/ExportOptions.plist"
@@ -83,20 +80,20 @@ echo PLIST_PATH: $PLIST_PATH
# archive 这边使用的工作区间 也可以使用project
xcodebuild archive -workspace "${APP_PATH}" -scheme "${TARGET_NAME}" -configuration "${CONFIGURATION}" -archivePath "${ARCHIVE_PATH}"
-# 导出ipa
-xcodebuild -exportArchive -archivePath "${ARCHIVE_PATH}" -exportPath "${EXPORT_PATH}" -exportOptionsPlist "${PLIST_PATH}"
+cd ${WORKSPACE}
-# 删除archive文件
-rm -rf "${EXPORT_PATH}/${TARGET_NAME}.xcarchive"
-rm -rf "${EXPORT_PATH}/Packaging.log"
-rm -rf "${EXPORT_PATH}/ExportOptions.plist"
-rm -rf "${EXPORT_PATH}/DistributionSummary.plist"
+# 压缩archive
+7za a -slp "${TARGET_NAME}_${BUILD_NUMBER}.xcarchive.zip" "${ARCHIVE_PATH}"
+
+# 签名
+sh sign "${WORKSPACE}/${TARGET_NAME}_${BUILD_NUMBER}.xcarchive.zip" --type xcarchive --plist "${PLIST_PATH}" --application macApp
# 上传IPA
-7za a "$WORKSPACE/${TARGET_NAME}_Mac_${BUILD_NUMBER}_APP.zip" -r "${EXPORT_PATH}"
+python3 artifactory_utils.py --action=upload_file --file="${TARGET_NAME}_${BUILD_NUMBER}.app.zip" --project
-# 删除IPA文件夹
-rm -rf "${EXPORT_PATH}"
+# 删除archive文件
+rm -rf ${TARGET_NAME}_${BUILD_NUMBER}.xcarchive
+rm -rf *.zip
#复原Keycenter文件
python3 /tmp/jenkins/api-examples/.github/ci/build/modify_ios_keycenter.py $KEYCENTER_PATH 1
diff --git a/Android/APIExample-Audio/app/build.gradle b/Android/APIExample-Audio/app/build.gradle
index 334dd33a0..6f958c176 100644
--- a/Android/APIExample-Audio/app/build.gradle
+++ b/Android/APIExample-Audio/app/build.gradle
@@ -48,7 +48,7 @@ dependencies {
implementation fileTree(dir: "${localSdkPath}", include: ['*.jar', '*.aar'])
}
else{
- def agora_sdk_version = "4.1.1"
+ def agora_sdk_version = "4.2.0"
// case 1: full single lib with voice only
implementation "io.agora.rtc:voice-sdk:${agora_sdk_version}"
// case 2: partial libs with voice only
diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java
index d212f82b0..41e6b2c7f 100644
--- a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java
+++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java
@@ -27,6 +27,10 @@
import static io.agora.rtc2.Constants.ULTRA_HIGH_QUALITY_VOICE;
import static io.agora.rtc2.Constants.VOICE_BEAUTIFIER_OFF;
import static io.agora.rtc2.Constants.VOICE_CHANGER_BASS;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_CARTOON;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_CHILDLIKE;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_CHIPMUNK;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_DARTH_VADER;
import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_BOY;
import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_GIRL;
import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_HULK;
@@ -34,9 +38,16 @@
import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_PIGKING;
import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_SISTER;
import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_UNCLE;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_GIRLISH_MAN;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_GROOT;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_IRON_LADY;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_MONSTER;
import static io.agora.rtc2.Constants.VOICE_CHANGER_NEUTRAL;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_PHONE_OPERATOR;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_SHIN_CHAN;
import static io.agora.rtc2.Constants.VOICE_CHANGER_SOLID;
import static io.agora.rtc2.Constants.VOICE_CHANGER_SWEET;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_TRANSFORMERS;
import static io.agora.rtc2.Constants.VOICE_CONVERSION_OFF;
import android.content.Context;
@@ -97,7 +108,7 @@ public class VoiceEffects extends BaseFragment implements View.OnClickListener,
chatBeautifier, timbreTransformation, voiceChanger, styleTransformation, roomAcoustics, pitchCorrection, _pitchModeOption, _pitchValueOption, voiceConversion,
customBandFreq, customReverbKey;
private ViewGroup _voice3DLayout, _pitchModeLayout, _pitchValueLayout;
- private SeekBar _voice3DCircle, customPitch, customBandGain, customReverbValue;
+ private SeekBar _voice3DCircle, customPitch, customBandGain, customReverbValue, customVoiceFormant;
private AudioSeatManager audioSeatManager;
@@ -159,10 +170,12 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat
customBandGain = view.findViewById(R.id.audio_custom_band_gain); // engine.setLocalVoiceEqualization()
customReverbKey = view.findViewById(R.id.audio_custom_reverb_key);
customReverbValue = view.findViewById(R.id.audio_custom_reverb_value); //engine.setLocalVoiceReverb()
+ customVoiceFormant = view.findViewById(R.id.audio_voice_formant_value); //engine.setLocalVoiceFormant()
customPitch.setOnSeekBarChangeListener(this);
customBandGain.setOnSeekBarChangeListener(this);
customReverbValue.setOnSeekBarChangeListener(this);
+ customVoiceFormant.setOnSeekBarChangeListener(this);
customBandFreq.setOnItemSelectedListener(this);
customReverbKey.setOnItemSelectedListener(this);
@@ -194,6 +207,7 @@ private void resetControlLayoutByJoined() {
customBandGain.setEnabled(joined);
customReverbKey.setEnabled(joined);
customReverbValue.setEnabled(joined);
+ customVoiceFormant.setEnabled(joined);
chatBeautifier.setSelection(0);
@@ -206,6 +220,7 @@ private void resetControlLayoutByJoined() {
customPitch.setProgress(0);
customBandGain.setProgress(0);
customReverbValue.setProgress(0);
+ customVoiceFormant.setProgress(50);
}
@Override
@@ -407,8 +422,8 @@ private void joinChannel(String channelId) {
private final IRtcEngineEventHandler iRtcEngineEventHandler = new IRtcEngineEventHandler() {
/**
* Error code description can be found at:
- * en: https://api-ref.agora.io/en/voice-sdk/android/4.x/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror
- * cn: https://docs.agora.io/cn/voice-call-4.x/API%20Reference/java_ng/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror
+ * en: https://api-ref.agora.io/en/video-sdk/android/4.x/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror
+ * cn: https://docs.agora.io/cn/video-call-4.x/API%20Reference/java_ng/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror
*/
@Override
public void onError(int err) {
@@ -617,6 +632,28 @@ private int getVoiceConversionValue(String label) {
return VOICE_CHANGER_SOLID;
case "VOICE_CHANGER_BASS":
return VOICE_CHANGER_BASS;
+ case "VOICE_CHANGER_CARTOON":
+ return VOICE_CHANGER_CARTOON;
+ case "VOICE_CHANGER_CHILDLIKE":
+ return VOICE_CHANGER_CHILDLIKE;
+ case "VOICE_CHANGER_PHONE_OPERATOR":
+ return VOICE_CHANGER_PHONE_OPERATOR;
+ case "VOICE_CHANGER_MONSTER":
+ return VOICE_CHANGER_MONSTER;
+ case "VOICE_CHANGER_TRANSFORMERS":
+ return VOICE_CHANGER_TRANSFORMERS;
+ case "VOICE_CHANGER_GROOT":
+ return VOICE_CHANGER_GROOT;
+ case "VOICE_CHANGER_DARTH_VADER":
+ return VOICE_CHANGER_DARTH_VADER;
+ case "VOICE_CHANGER_IRON_LADY":
+ return VOICE_CHANGER_IRON_LADY;
+ case "VOICE_CHANGER_SHIN_CHAN":
+ return VOICE_CHANGER_SHIN_CHAN;
+ case "VOICE_CHANGER_GIRLISH_MAN":
+ return VOICE_CHANGER_GIRLISH_MAN;
+ case "VOICE_CHANGER_CHIPMUNK":
+ return VOICE_CHANGER_CHIPMUNK;
case "VOICE_CONVERSION_OFF":
default:
return VOICE_CONVERSION_OFF;
@@ -743,6 +780,9 @@ public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
+ if (!fromUser) {
+ return;
+ }
if(seekBar == _voice3DCircle){
int cicle = (int) (1 + 59 * progress * 1.0f / seekBar.getMax());
// [1,60], 10 default
@@ -771,6 +811,10 @@ public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
value = (int) (100 * progress * 1.0f / seekBar.getMax());
}
engine.setLocalVoiceReverb(reverbKey, value);
+ } else if (seekBar == customVoiceFormant) {
+ // [-1, 1]
+ double value = (progress - 50) * 1.0f / 100;
+ engine.setLocalVoiceFormant(value);
}
}
diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java
index 75b8bb6a1..e66641d57 100755
--- a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java
+++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java
@@ -236,6 +236,7 @@ public void onClick(View v) {
engine.leaveChannel();
pulling = false;
join.setText(getString(R.string.join));
+ audioSeatManager.downAllSeats();
if(pullingTask != null){
try {
pullingTask.join();
@@ -268,7 +269,7 @@ private void joinChannel(String channelId) {
* 0: Success.
* < 0: Failure.
* PS: Ensure that you call this method before the joinChannel method.*/
- engine.setExternalAudioSource(true, SAMPLE_RATE, SAMPLE_NUM_OF_CHANNEL, 2, false, true);
+ // engine.setExternalAudioSource(true, SAMPLE_RATE, SAMPLE_NUM_OF_CHANNEL, 2, false, true);
@@ -303,9 +304,8 @@ private void joinChannel(String channelId) {
private final IRtcEngineEventHandler iRtcEngineEventHandler = new IRtcEngineEventHandler() {
/**
- * Error code description can be found at:
- * en: https://api-ref.agora.io/en/voice-sdk/android/4.x/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror
- * cn: https://docs.agora.io/cn/voice-call-4.x/API%20Reference/java_ng/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror
+ * en: https://api-ref.agora.io/en/video-sdk/android/4.x/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror
+ * cn: https://docs.agora.io/cn/video-call-4.x/API%20Reference/java_ng/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror
*/
@Override
public void onError(int err) {
diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java
index b4abaa337..a94f62cfb 100755
--- a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java
+++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java
@@ -5,7 +5,6 @@
import android.content.Context;
import android.os.Bundle;
import android.os.Handler;
-import android.os.Process;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
@@ -21,14 +20,12 @@
import com.yanzhenjie.permission.AndPermission;
import com.yanzhenjie.permission.runtime.Permission;
-import java.io.IOException;
-import java.io.InputStream;
-
import io.agora.api.example.MainApplication;
import io.agora.api.example.R;
import io.agora.api.example.annotation.Example;
import io.agora.api.example.common.BaseFragment;
import io.agora.api.example.common.widget.AudioSeatManager;
+import io.agora.api.example.utils.AudioFileReader;
import io.agora.api.example.utils.CommonUtil;
import io.agora.api.example.utils.TokenUtils;
import io.agora.rtc2.ChannelMediaOptions;
@@ -37,6 +34,7 @@
import io.agora.rtc2.RtcEngine;
import io.agora.rtc2.RtcEngineConfig;
import io.agora.rtc2.RtcEngineEx;
+import io.agora.rtc2.audio.AudioTrackConfig;
/**
* This demo demonstrates how to make a one-to-one voice call
@@ -57,19 +55,11 @@ public class CustomAudioSource extends BaseFragment implements View.OnClickListe
public static RtcEngineEx engine;
private Switch mic, pcm;
private ChannelMediaOptions option = new ChannelMediaOptions();
- private static final String AUDIO_FILE = "output.raw";
- private static final Integer SAMPLE_RATE = 44100;
- private static final Integer SAMPLE_NUM_OF_CHANNEL = 2;
- private static final Integer BITS_PER_SAMPLE = 16;
- private static final Integer SAMPLES = 441;
- private static final Integer BUFFER_SIZE = SAMPLES * BITS_PER_SAMPLE / 8 * SAMPLE_NUM_OF_CHANNEL;
- private static final Integer PUSH_INTERVAL = SAMPLES * 1000 / SAMPLE_RATE;
-
- private InputStream inputStream;
- private Thread pushingTask;
- private boolean pushing = false;
+ private int pushTimes = 0;
private AudioSeatManager audioSeatManager;
+ private AudioFileReader audioPushingHelper;
+ private int customAudioTrack = -1;
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
@@ -87,36 +77,6 @@ private void initMediaOption() {
option.enableAudioRecordingOrPlayout = true;
}
- private void openAudioFile() {
- try {
- inputStream = this.getResources().getAssets().open(AUDIO_FILE);
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
-
- private void closeAudioFile() {
- try {
- inputStream.close();
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
-
- private byte[] readBuffer() {
- int byteSize = BUFFER_SIZE;
- byte[] buffer = new byte[byteSize];
- try {
- if (inputStream.read(buffer) < 0) {
- inputStream.reset();
- return readBuffer();
- }
- } catch (IOException e) {
- e.printStackTrace();
- }
- return buffer;
- }
-
@Nullable
@Override
public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
@@ -195,7 +155,13 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) {
+ "}");
/* setting the local access point if the private cloud ip was set, otherwise the config will be invalid.*/
engine.setLocalAccessPoint(((MainApplication) getActivity().getApplication()).getGlobalSettings().getPrivateCloudConfig());
- openAudioFile();
+
+ audioPushingHelper = new AudioFileReader(requireContext(), (buffer, timestamp) -> {
+ if(joined && engine != null && customAudioTrack != -1){
+ int ret = engine.pushExternalAudioFrame(buffer, timestamp, AudioFileReader.SAMPLE_RATE, AudioFileReader.SAMPLE_NUM_OF_CHANNEL, Constants.BytesPerSample.TWO_BYTES_PER_SAMPLE, customAudioTrack);
+ Log.i(TAG, "pushExternalAudioFrame times:" + (++pushTimes) + ", ret=" + ret);
+ }
+ });
} catch (Exception e) {
e.printStackTrace();
getActivity().onBackPressed();
@@ -205,34 +171,32 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) {
@Override
public void onDestroy() {
super.onDestroy();
- pushing = false;
+ if(customAudioTrack != -1){
+ engine.destroyCustomAudioTrack(customAudioTrack);
+ customAudioTrack = -1;
+ }
+ if(audioPushingHelper != null){
+ audioPushingHelper.stop();
+ }
/**leaveChannel and Destroy the RtcEngine instance*/
if (engine != null) {
engine.leaveChannel();
}
handler.post(RtcEngine::destroy);
engine = null;
- closeAudioFile();
}
@Override
- public void onCheckedChanged(CompoundButton compoundButton, boolean b) {
+ public void onCheckedChanged(CompoundButton compoundButton, boolean checked) {
if (compoundButton.getId() == R.id.microphone) {
- if (b) {
- option.publishMicrophoneTrack = true;
- } else {
- option.publishMicrophoneTrack = false;
- }
+ option.publishMicrophoneTrack = checked;
engine.updateChannelMediaOptions(option);
} else if (compoundButton.getId() == R.id.localAudio) {
- if (b) {
- option.publishCustomAudioTrack = true;
- } else {
- option.publishCustomAudioTrack = false;
- }
+ option.publishCustomAudioTrackId = customAudioTrack;
+ option.publishCustomAudioTrack = checked;
engine.updateChannelMediaOptions(option);
- engine.enableCustomAudioLocalPlayback(0, b);
+ engine.enableCustomAudioLocalPlayback(customAudioTrack, checked);
}
}
@@ -278,19 +242,13 @@ public void onClick(View v) {
* 2:If you call the leaveChannel method during CDN live streaming, the SDK
* triggers the removeInjectStreamUrl method.*/
engine.leaveChannel();
- pushing = false;
join.setText(getString(R.string.join));
mic.setEnabled(false);
pcm.setEnabled(false);
pcm.setChecked(false);
mic.setChecked(true);
- if(pushingTask != null){
- try {
- pushingTask.join();
- pushingTask = null;
- } catch (InterruptedException e) {
- // do nothing
- }
+ if(audioPushingHelper != null){
+ audioPushingHelper.stop();
}
audioSeatManager.downAllSeats();
}
@@ -317,9 +275,9 @@ private void joinChannel(String channelId) {
* 0: Success.
* < 0: Failure.
* PS: Ensure that you call this method before the joinChannel method.*/
- engine.setExternalAudioSource(true, SAMPLE_RATE, SAMPLE_NUM_OF_CHANNEL, 2, false, true);
-
-
+ AudioTrackConfig config = new AudioTrackConfig();
+ config.enableLocalPlayback = false;
+ customAudioTrack = engine.createCustomAudioTrack(Constants.AudioTrackType.AUDIO_TRACK_MIXABLE, config);
/**Please configure accessToken in the string_config file.
* A temporary token generated in Console. A temporary token is valid for 24 hours. For details, see
@@ -349,10 +307,11 @@ private void joinChannel(String channelId) {
* The SDK uses this class to report to the app on SDK runtime events.
*/
private final IRtcEngineEventHandler iRtcEngineEventHandler = new IRtcEngineEventHandler() {
+
/**
* Error code description can be found at:
- * en: https://api-ref.agora.io/en/voice-sdk/android/4.x/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror
- * cn: https://docs.agora.io/cn/voice-call-4.x/API%20Reference/java_ng/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror
+ * en: https://api-ref.agora.io/en/video-sdk/android/4.x/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror
+ * cn: https://docs.agora.io/cn/video-call-4.x/API%20Reference/java_ng/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror
*/
@Override
public void onError(int err) {
@@ -379,12 +338,14 @@ public void run() {
pcm.setEnabled(true);
join.setEnabled(true);
join.setText(getString(R.string.leave));
- pushing = true;
- if(pushingTask == null){
- pushingTask = new Thread(new PushingTask());
- pushingTask.start();
+ if(audioPushingHelper != null){
+ pushTimes = 0;
+ audioPushingHelper.start();
}
audioSeatManager.upLocalSeat(uid);
+ if (pcm.isChecked()) {
+ engine.enableCustomAudioLocalPlayback(0, true);
+ }
}
});
}
@@ -403,26 +364,4 @@ public void onUserOffline(int uid, int reason) {
}
};
- class PushingTask implements Runnable {
- long number = 0;
-
- @Override
- public void run() {
- Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
- while (pushing) {
- Log.i(TAG, "pushExternalAudioFrame times:" + number++);
- long before = System.currentTimeMillis();
- engine.pushExternalAudioFrame(readBuffer(), 0);
- long now = System.currentTimeMillis();
- long consuming = now - before;
- if(consuming < PUSH_INTERVAL){
- try {
- Thread.sleep(PUSH_INTERVAL - consuming);
- } catch (InterruptedException e) {
- Log.e(TAG, "PushingTask Interrupted");
- }
- }
- }
- }
- }
}
diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/utils/AudioFileReader.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/utils/AudioFileReader.java
new file mode 100644
index 000000000..387463604
--- /dev/null
+++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/utils/AudioFileReader.java
@@ -0,0 +1,116 @@
+package io.agora.api.example.utils;
+
+import android.content.Context;
+import android.os.Process;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+public class AudioFileReader {
+ private static final String AUDIO_FILE = "output.raw";
+ public static final int SAMPLE_RATE = 44100;
+ public static final int SAMPLE_NUM_OF_CHANNEL = 2;
+ public static final int BITS_PER_SAMPLE = 16;
+
+ public static final float BYTE_PER_SAMPLE = 1.0f * BITS_PER_SAMPLE / 8 * SAMPLE_NUM_OF_CHANNEL;
+ public static final float DURATION_PER_SAMPLE = 1000.0f / SAMPLE_RATE; // ms
+ public static final float SAMPLE_COUNT_PER_MS = SAMPLE_RATE * 1.0f / 1000; // ms
+
+ private static final int BUFFER_SAMPLE_COUNT = (int) (SAMPLE_COUNT_PER_MS * 10); // 10ms sample count
+ private static final int BUFFER_BYTE_SIZE = (int) (BUFFER_SAMPLE_COUNT * BYTE_PER_SAMPLE); // byte
+ private static final long BUFFER_DURATION = (long) (BUFFER_SAMPLE_COUNT * DURATION_PER_SAMPLE); // ms
+
+ private final Context context;
+ private final OnAudioReadListener audioReadListener;
+ private volatile boolean pushing = false;
+ private InnerThread thread;
+ private InputStream inputStream;
+
+ public AudioFileReader(Context context, OnAudioReadListener listener){
+ this.context = context;
+ this.audioReadListener = listener;
+ }
+
+ public void start() {
+ if(thread == null){
+ thread = new InnerThread();
+ thread.start();
+ }
+ }
+
+ public void stop(){
+ pushing = false;
+ if(thread != null){
+ try {
+ thread.join();
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ } finally {
+ thread = null;
+ }
+ }
+ }
+
+ public interface OnAudioReadListener {
+ void onAudioRead(byte[] buffer, long timestamp);
+ }
+
+ private class InnerThread extends Thread{
+
+ @Override
+ public void run() {
+ super.run();
+ try {
+ inputStream = context.getAssets().open(AUDIO_FILE);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
+ pushing = true;
+
+ long start_time = System.currentTimeMillis();;
+ int sent_audio_frames = 0;
+ while (pushing) {
+ if(audioReadListener != null){
+ audioReadListener.onAudioRead(readBuffer(), System.currentTimeMillis());
+ }
+ ++ sent_audio_frames;
+ long next_frame_start_time = sent_audio_frames * BUFFER_DURATION + start_time;
+ long now = System.currentTimeMillis();
+
+ if(next_frame_start_time > now){
+ long sleep_duration = next_frame_start_time - now;
+ try {
+ Thread.sleep(sleep_duration);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ if (inputStream != null) {
+ try {
+ inputStream.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ } finally {
+ inputStream = null;
+ }
+ }
+ }
+
+ private byte[] readBuffer() {
+ int byteSize = BUFFER_BYTE_SIZE;
+ byte[] buffer = new byte[byteSize];
+ try {
+ if (inputStream.read(buffer) < 0) {
+ inputStream.reset();
+ return readBuffer();
+ }
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ return buffer;
+ }
+ }
+}
diff --git a/Android/APIExample-Audio/app/src/main/res/layout/fragment_voice_effects.xml b/Android/APIExample-Audio/app/src/main/res/layout/fragment_voice_effects.xml
index 9990b1a41..96db9042b 100644
--- a/Android/APIExample-Audio/app/src/main/res/layout/fragment_voice_effects.xml
+++ b/Android/APIExample-Audio/app/src/main/res/layout/fragment_voice_effects.xml
@@ -439,6 +439,33 @@
+
+
+
+
+
+
+
+
+
diff --git a/Android/APIExample-Audio/app/src/main/res/values/arrays.xml b/Android/APIExample-Audio/app/src/main/res/values/arrays.xml
index 8a2ba89f6..615b4fb69 100644
--- a/Android/APIExample-Audio/app/src/main/res/values/arrays.xml
+++ b/Android/APIExample-Audio/app/src/main/res/values/arrays.xml
@@ -63,6 +63,17 @@
- VOICE_CHANGER_SWEET
- VOICE_CHANGER_SOLID
- VOICE_CHANGER_BASS
+ - VOICE_CHANGER_CARTOON
+ - VOICE_CHANGER_CHILDLIKE
+ - VOICE_CHANGER_PHONE_OPERATOR
+ - VOICE_CHANGER_MONSTER
+ - VOICE_CHANGER_TRANSFORMERS
+ - VOICE_CHANGER_GROOT
+ - VOICE_CHANGER_DARTH_VADER
+ - VOICE_CHANGER_IRON_LADY
+ - VOICE_CHANGER_SHIN_CHAN
+ - VOICE_CHANGER_GIRLISH_MAN
+ - VOICE_CHANGER_CHIPMUNK
- AUDIO_EQUALIZATION_BAND_31
diff --git a/Android/APIExample/README.md b/Android/APIExample/README.md
index aa0d6d7ab..6f2c16d37 100644
--- a/Android/APIExample/README.md
+++ b/Android/APIExample/README.md
@@ -46,12 +46,11 @@ This project contains third-party beauty integration examples, which are disable
### For Agora Extension Developers
-从4.0.0SDK开始,Agora SDK支持插件系统和开放的云市场帮助开发者发布自己的音视频插件,本项目包含了一个SimpleFilter示例,默认是禁用的状态,如果需要开启编译和使用需要完成以下步骤:
Since version 4.0.0, Agora SDK provides an Extension Interface Framework. Developers could publish their own video/audio extension to Agora Extension Market. In this project includes a sample SimpleFilter example, by default it is disabled.
In order to enable it, you could do as follows:
1. Download [opencv](https://agora-adc-artifacts.s3.cn-north-1.amazonaws.com.cn/androidLibs/opencv4.zip) library, unzip it and copy into Android/APIExample/agora-simple-filter/src/main/jniLibs
-2. Download [Agora SDK包](https://download.agora.io/sdk/release/Agora_Native_SDK_for_Android_v4.1.0_FULL.zip), unzip it and copy c++ .so library (keeps arch folder) to Android/APIExample/agora-simple-filter/src/main/agoraLibs
+2. Download [Agora SDK包](https://docs.agora.io/cn/video-call-4.x/downloads?platform=Android), unzip it and copy c++ .so library (keeps arch folder) to Android/APIExample/agora-simple-filter/src/main/agoraLibs
3. Modify simpleFilter to true in Android/APIExample/gradle.properties
## Contact Us
diff --git a/Android/APIExample/README.zh.md b/Android/APIExample/README.zh.md
index 6e1fb2672..922ea03c4 100644
--- a/Android/APIExample/README.zh.md
+++ b/Android/APIExample/README.zh.md
@@ -51,7 +51,7 @@
从4.0.0SDK开始,Agora SDK支持插件系统和开放的云市场帮助开发者发布自己的音视频插件,本项目包含了一个SimpleFilter示例,默认是禁用的状态,如果需要开启编译和使用需要完成以下步骤:
1. 下载 [opencv](https://agora-adc-artifacts.s3.cn-north-1.amazonaws.com.cn/androidLibs/opencv4.zip) 解压后复制到 Android/APIExample/agora-simple-filter/src/main/jniLibs
-2. 手动下载[Agora SDK包](https://download.agora.io/sdk/release/Agora_Native_SDK_for_Android_v4.1.0_FULL.zip), 解压后将c++动态库(包括架构文件夹)copy到Android/APIExample/agora-simple-filter/src/main/agoraLibs
+2. 手动下载[Agora SDK包](https://docs.agora.io/cn/video-call-4.x/downloads?platform=Android), 解压后将c++动态库(包括架构文件夹)copy到Android/APIExample/agora-simple-filter/src/main/agoraLibs
3. 修改Android/APIExample/gradle.properties配置文件中simpleFilter值为true
## 联系我们
diff --git a/Android/APIExample/app/build.gradle b/Android/APIExample/app/build.gradle
index a340a149a..43c2ba5fc 100644
--- a/Android/APIExample/app/build.gradle
+++ b/Android/APIExample/app/build.gradle
@@ -8,6 +8,7 @@ android {
defaultConfig {
applicationId "io.agora.api.example"
+ // ndk.abiFilters 'armeabi-v7a'//, 'arm64-v8a', 'x86', 'x86-64'
minSdkVersion 21
targetSdkVersion 32
versionCode 1
@@ -60,7 +61,7 @@ dependencies {
implementation fileTree(dir: "${localSdkPath}", include: ['*.jar', '*.aar'])
}
else{
- def agora_sdk_version = "4.1.1"
+ def agora_sdk_version = "4.2.0"
// case 1: full libs
implementation "io.agora.rtc:full-sdk:${agora_sdk_version}"
implementation "io.agora.rtc:full-screen-sharing:${agora_sdk_version}"
@@ -95,8 +96,8 @@ dependencies {
implementation project(path: ':agora-simple-filter')
}
testImplementation 'junit:junit:4.12'
- androidTestImplementation 'androidx.test.ext:junit:1.1.1'
- androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
+ androidTestImplementation 'androidx.test.ext:junit:1.1.3'
+ androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'
implementation 'io.github.luizgrp.sectionedrecyclerviewadapter:sectionedrecyclerviewadapter:1.2.0'
implementation 'com.yanzhenjie:permission:2.0.3'
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/common/gles/GLTestUtils.java b/Android/APIExample/app/src/main/java/io/agora/api/example/common/gles/GLTestUtils.java
new file mode 100644
index 000000000..f909044d2
--- /dev/null
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/common/gles/GLTestUtils.java
@@ -0,0 +1,125 @@
+package io.agora.api.example.common.gles;
+
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.ImageFormat;
+import android.graphics.Rect;
+import android.graphics.YuvImage;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.util.Log;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.IntBuffer;
+
+public class GLTestUtils {
+ private static final String TAG = "GLUtils";
+
+ public static Bitmap getTexture2DImage(int textureID, int width, int height) {
+ try {
+ int[] oldFboId = new int[1];
+ GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, IntBuffer.wrap(oldFboId));
+
+ int[] framebuffers = new int[1];
+ GLES20.glGenFramebuffers(1, framebuffers, 0);
+ int framebufferId = framebuffers[0];
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, framebufferId);
+
+ int[] renderbuffers = new int[1];
+ GLES20.glGenRenderbuffers(1, renderbuffers, 0);
+ int renderId = renderbuffers[0];
+ GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, renderId);
+ GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16, width, height);
+
+ GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureID, 0);
+ GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT, GLES20.GL_RENDERBUFFER, renderId);
+ if (GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER) != GLES20.GL_FRAMEBUFFER_COMPLETE) {
+ Log.d(TAG, "Framebuffer error");
+ }
+
+ ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4);
+ rgbaBuf.position(0);
+ GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaBuf);
+
+ Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
+ bitmap.copyPixelsFromBuffer(rgbaBuf);
+
+ GLES20.glDeleteRenderbuffers(1, IntBuffer.wrap(framebuffers));
+ GLES20.glDeleteFramebuffers(1, IntBuffer.allocate(framebufferId));
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, oldFboId[0]);
+
+ return bitmap;
+ } catch (Exception e) {
+ Log.e(TAG, "", e);
+ }
+ return null;
+ }
+
+ public static Bitmap getTextureOESImage(int textureID, int width, int height) {
+ try {
+ int[] oldFboId = new int[1];
+ GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, IntBuffer.wrap(oldFboId));
+
+ int[] framebuffers = new int[1];
+ GLES20.glGenFramebuffers(1, framebuffers, 0);
+ int framebufferId = framebuffers[0];
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, framebufferId);
+
+ int[] renderbuffers = new int[1];
+ GLES20.glGenRenderbuffers(1, renderbuffers, 0);
+ int renderId = renderbuffers[0];
+ GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, renderId);
+ GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16, width, height);
+
+ GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID, 0);
+ GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT, GLES20.GL_RENDERBUFFER, renderId);
+ if (GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER) != GLES20.GL_FRAMEBUFFER_COMPLETE) {
+ Log.d(TAG, "Framebuffer error");
+ }
+
+ ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4);
+ rgbaBuf.position(0);
+ GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaBuf);
+
+ Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
+ bitmap.copyPixelsFromBuffer(rgbaBuf);
+
+ GLES20.glDeleteRenderbuffers(1, IntBuffer.wrap(framebuffers));
+ GLES20.glDeleteFramebuffers(1, IntBuffer.allocate(framebufferId));
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, oldFboId[0]);
+
+ return bitmap;
+ } catch (Exception e) {
+ Log.e(TAG, "", e);
+ }
+ return null;
+ }
+
+ private static Bitmap nv21ToBitmap(byte[] nv21, int width, int height) {
+ Bitmap bitmap = null;
+ try {
+ YuvImage image = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
+ ByteArrayOutputStream stream = new ByteArrayOutputStream();
+ image.compressToJpeg(new Rect(0, 0, width, height), 80, stream);
+ bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
+ stream.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ return bitmap;
+ }
+
+ private static Bitmap readBitmap(int width, int height){
+ ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4);
+ rgbaBuf.position(0);
+ GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaBuf);
+
+ Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
+ bitmap.copyPixelsFromBuffer(rgbaBuf);
+ return bitmap;
+ }
+}
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/common/gles/GLThread.java b/Android/APIExample/app/src/main/java/io/agora/api/example/common/gles/GLThread.java
new file mode 100644
index 000000000..daeb29a38
--- /dev/null
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/common/gles/GLThread.java
@@ -0,0 +1,11 @@
+package io.agora.api.example.common.gles;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+public @interface GLThread {
+}
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/common/model/GlobalSettings.java b/Android/APIExample/app/src/main/java/io/agora/api/example/common/model/GlobalSettings.java
index ff2453e02..4ce81f103 100644
--- a/Android/APIExample/app/src/main/java/io/agora/api/example/common/model/GlobalSettings.java
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/common/model/GlobalSettings.java
@@ -2,7 +2,7 @@
import static io.agora.rtc2.video.VideoEncoderConfiguration.FRAME_RATE.FRAME_RATE_FPS_15;
import static io.agora.rtc2.video.VideoEncoderConfiguration.ORIENTATION_MODE.ORIENTATION_MODE_ADAPTIVE;
-import static io.agora.rtc2.video.VideoEncoderConfiguration.VD_640x360;
+import static io.agora.rtc2.video.VideoEncoderConfiguration.VD_960x540;
import android.text.TextUtils;
import android.util.Log;
@@ -66,17 +66,15 @@ public LocalAccessPointConfiguration getPrivateCloudConfig() {
}
public VideoEncoderConfiguration.VideoDimensions getVideoEncodingDimensionObject() {
- if (videoEncodingDimension == null)
- return VD_640x360;
- VideoEncoderConfiguration.VideoDimensions value = VD_640x360;
+ VideoEncoderConfiguration.VideoDimensions value = VD_960x540;
try {
- Field tmp = VideoEncoderConfiguration.class.getDeclaredField(videoEncodingDimension);
+ Field tmp = VideoEncoderConfiguration.class.getDeclaredField(getVideoEncodingDimension());
tmp.setAccessible(true);
value = (VideoEncoderConfiguration.VideoDimensions) tmp.get(null);
} catch (NoSuchFieldException e) {
- Log.e("Field", "Can not find field " + videoEncodingDimension);
+ Log.e("Field", "Can not find field " + getVideoEncodingDimension());
} catch (IllegalAccessException e) {
- Log.e("Field", "Could not access field " + videoEncodingDimension);
+ Log.e("Field", "Could not access field " + getVideoEncodingDimension());
}
return value;
}
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/common/widget/VideoReportLayout.java b/Android/APIExample/app/src/main/java/io/agora/api/example/common/widget/VideoReportLayout.java
index 89b42c43b..16986f1c7 100644
--- a/Android/APIExample/app/src/main/java/io/agora/api/example/common/widget/VideoReportLayout.java
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/common/widget/VideoReportLayout.java
@@ -48,8 +48,10 @@ public void onViewAttachedToWindow(View v) {
@Override
public void onViewDetachedFromWindow(View v) {
- reportTextView.removeOnAttachStateChangeListener(this);
- reportTextView = null;
+ if (reportTextView != null) {
+ reportTextView.removeOnAttachStateChangeListener(this);
+ reportTextView = null;
+ }
}
});
reportTextView.setTextColor(Color.parseColor("#eeeeee"));
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CDNStreaming/AudienceFragment.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CDNStreaming/AudienceFragment.java
index 84327a0df..e71b715b6 100644
--- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CDNStreaming/AudienceFragment.java
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CDNStreaming/AudienceFragment.java
@@ -157,8 +157,9 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) {
}
fl_local.addView(surfaceView);
// Setup local video to render your local media player view
- VideoCanvas videoCanvas = new VideoCanvas(surfaceView, Constants.RENDER_MODE_HIDDEN, Constants.VIDEO_MIRROR_MODE_AUTO,
- Constants.VIDEO_SOURCE_MEDIA_PLAYER, mediaPlayer.getMediaPlayerId(), 0);
+ VideoCanvas videoCanvas = new VideoCanvas(surfaceView, Constants.RENDER_MODE_HIDDEN, 0);
+ videoCanvas.sourceType = Constants.VIDEO_SOURCE_MEDIA_PLAYER;
+ videoCanvas.mediaPlayerId = mediaPlayer.getMediaPlayerId();
engine.setupLocalVideo(videoCanvas);
// Your have to call startPreview to see player video
engine.startPreview();
@@ -384,8 +385,9 @@ private void toggleVideoLayout(boolean isMultiple) {
}
fl_local.addView(surfaceView);
// Setup local video to render your local media player view
- VideoCanvas videoCanvas = new VideoCanvas(surfaceView, Constants.RENDER_MODE_HIDDEN, Constants.VIDEO_MIRROR_MODE_AUTO,
- Constants.VIDEO_SOURCE_MEDIA_PLAYER, mediaPlayer.getMediaPlayerId(), 0);
+ VideoCanvas videoCanvas = new VideoCanvas(surfaceView, Constants.RENDER_MODE_HIDDEN, 0);
+ videoCanvas.sourceType = Constants.VIDEO_SOURCE_MEDIA_PLAYER;
+ videoCanvas.mediaPlayerId = mediaPlayer.getMediaPlayerId();
engine.setupLocalVideo(videoCanvas);
}
engine.startPreview();
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CustomRemoteVideoRender.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CustomRemoteVideoRender.java
index 23c4b89b0..1e2290158 100644
--- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CustomRemoteVideoRender.java
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CustomRemoteVideoRender.java
@@ -472,22 +472,12 @@ public void onUserOffline(int uid, int reason) {
IVideoFrameObserver videoFrameObserver = new IVideoFrameObserver() {
@Override
- public boolean onCaptureVideoFrame(VideoFrame videoFrame) {
+ public boolean onCaptureVideoFrame(int sourceType, VideoFrame videoFrame) {
return false;
}
@Override
- public boolean onPreEncodeVideoFrame(VideoFrame videoFrame) {
- return false;
- }
-
- @Override
- public boolean onScreenCaptureVideoFrame(VideoFrame videoFrame) {
- return false;
- }
-
- @Override
- public boolean onPreEncodeScreenVideoFrame(VideoFrame videoFrame) {
+ public boolean onPreEncodeVideoFrame(int sourceType, VideoFrame videoFrame) {
return false;
}
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/LocalVideoTranscoding.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/LocalVideoTranscoding.java
new file mode 100644
index 000000000..e352b6289
--- /dev/null
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/LocalVideoTranscoding.java
@@ -0,0 +1,494 @@
+package io.agora.api.example.examples.advanced;
+
+import static io.agora.api.example.common.model.Examples.ADVANCED;
+import static io.agora.rtc2.Constants.RENDER_MODE_HIDDEN;
+import static io.agora.rtc2.video.VideoEncoderConfiguration.STANDARD_BITRATE;
+
+import android.annotation.SuppressLint;
+import android.content.Context;
+import android.graphics.Color;
+import android.os.Bundle;
+import android.util.DisplayMetrics;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.SurfaceView;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.Button;
+import android.widget.CompoundButton;
+import android.widget.EditText;
+import android.widget.FrameLayout;
+import android.widget.Switch;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+
+import java.util.ArrayList;
+
+import io.agora.api.example.MainApplication;
+import io.agora.api.example.R;
+import io.agora.api.example.annotation.Example;
+import io.agora.api.example.common.BaseFragment;
+import io.agora.api.example.common.widget.VideoReportLayout;
+import io.agora.api.example.utils.CommonUtil;
+import io.agora.api.example.utils.TokenUtils;
+import io.agora.rtc2.ChannelMediaOptions;
+import io.agora.rtc2.Constants;
+import io.agora.rtc2.IRtcEngineEventHandler;
+import io.agora.rtc2.LocalTranscoderConfiguration;
+import io.agora.rtc2.RtcEngine;
+import io.agora.rtc2.RtcEngineConfig;
+import io.agora.rtc2.ScreenCaptureParameters;
+import io.agora.rtc2.video.CameraCapturerConfiguration;
+import io.agora.rtc2.video.SegmentationProperty;
+import io.agora.rtc2.video.VideoCanvas;
+import io.agora.rtc2.video.VideoEncoderConfiguration;
+import io.agora.rtc2.video.VirtualBackgroundSource;
+
+/**This demo demonstrates how to make a one-to-one video call*/
+@Example(
+ index = 19,
+ group = ADVANCED,
+ name = R.string.item_localvideotranscoding,
+ actionId = R.id.action_mainFragment_to_LocalVideoTranscoding,
+ tipsId = R.string.localvideotranscoding
+)
+public class LocalVideoTranscoding extends BaseFragment implements View.OnClickListener, CompoundButton.OnCheckedChangeListener
+{
+ private static final String TAG = LocalVideoTranscoding.class.getSimpleName();
+
+ private VideoReportLayout videoReportLayout;
+ private Button join;
+ private Switch switchTransparentBackground;
+ private EditText et_channel;
+ private RtcEngine engine;
+ private int myUid;
+ private boolean joined = false;
+
+
+ @Nullable
+ @Override
+ public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState)
+ {
+ return inflater.inflate(R.layout.fragment_localvideotranscoding, container, false);
+ }
+
+ @Override
+ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState)
+ {
+ super.onViewCreated(view, savedInstanceState);
+ join = view.findViewById(R.id.btn_join);
+ switchTransparentBackground = view.findViewById(R.id.btn_transparent_background);
+ et_channel = view.findViewById(R.id.et_channel);
+ view.findViewById(R.id.btn_join).setOnClickListener(this);
+ switchTransparentBackground.setOnCheckedChangeListener(this);
+ videoReportLayout = view.findViewById(R.id.videoReportLayout);
+ }
+
+ @Override
+ public void onActivityCreated(@Nullable Bundle savedInstanceState)
+ {
+ super.onActivityCreated(savedInstanceState);
+ // Check if the context is valid
+ Context context = getContext();
+ if (context == null)
+ {
+ return;
+ }
+ try
+ {
+ RtcEngineConfig config = new RtcEngineConfig();
+ /**
+ * The context of Android Activity
+ */
+ config.mContext = context.getApplicationContext();
+ /**
+ * The App ID issued to you by Agora. See How to get the App ID
+ */
+ config.mAppId = getString(R.string.agora_app_id);
+ /** Sets the channel profile of the Agora RtcEngine.
+ CHANNEL_PROFILE_COMMUNICATION(0): (Default) The Communication profile.
+ Use this profile in one-on-one calls or group calls, where all users can talk freely.
+ CHANNEL_PROFILE_LIVE_BROADCASTING(1): The Live-Broadcast profile. Users in a live-broadcast
+ channel have a role as either broadcaster or audience. A broadcaster can both send and receive streams;
+ an audience can only receive streams.*/
+ config.mChannelProfile = Constants.CHANNEL_PROFILE_LIVE_BROADCASTING;
+ /**
+ * IRtcEngineEventHandler is an abstract class providing default implementation.
+ * The SDK uses this class to report to the app on SDK runtime events.
+ */
+ config.mEventHandler = iRtcEngineEventHandler;
+ config.mAudioScenario = Constants.AudioScenario.getValue(Constants.AudioScenario.DEFAULT);
+ config.mAreaCode = ((MainApplication)getActivity().getApplication()).getGlobalSettings().getAreaCode();
+ engine = RtcEngine.create(config);
+ /**
+ * This parameter is for reporting the usages of APIExample to agora background.
+ * Generally, it is not necessary for you to set this parameter.
+ */
+ engine.setParameters("{"
+ + "\"rtc.report_app_scenario\":"
+ + "{"
+ + "\"appScenario\":" + 100 + ","
+ + "\"serviceType\":" + 11 + ","
+ + "\"appVersion\":\"" + RtcEngine.getSdkVersion() + "\""
+ + "}"
+ + "}");
+ /* setting the local access point if the private cloud ip was set, otherwise the config will be invalid.*/
+ engine.setLocalAccessPoint(((MainApplication) getActivity().getApplication()).getGlobalSettings().getPrivateCloudConfig());
+ }
+ catch (Exception e)
+ {
+ e.printStackTrace();
+ getActivity().onBackPressed();
+ }
+ }
+
+ @Override
+ public void onDestroy()
+ {
+ super.onDestroy();
+ /**leaveChannel and Destroy the RtcEngine instance*/
+ if(engine != null)
+ {
+ engine.leaveChannel();
+ engine.stopPreview(Constants.VideoSourceType.VIDEO_SOURCE_TRANSCODED);
+ engine.stopCameraCapture(Constants.VideoSourceType.VIDEO_SOURCE_CAMERA_PRIMARY);
+ engine.stopScreenCapture();
+ }
+ handler.post(RtcEngine::destroy);
+ engine = null;
+ }
+
+ @SuppressLint("WrongConstant")
+ @Override
+ public void onClick(View v)
+ {
+ if (v.getId() == R.id.btn_join)
+ {
+ if (!joined)
+ {
+ CommonUtil.hideInputBoard(getActivity(), et_channel);
+ // call when join button hit
+ String channelId = et_channel.getText().toString();
+ // Check permission
+ joinChannel(channelId);
+ }
+ else
+ {
+ joined = false;
+ /**After joining a channel, the user must call the leaveChannel method to end the
+ * call before joining another channel. This method returns 0 if the user leaves the
+ * channel and releases all resources related to the call. This method call is
+ * asynchronous, and the user has not exited the channel when the method call returns.
+ * Once the user leaves the channel, the SDK triggers the onLeaveChannel callback.
+ * A successful leaveChannel method call triggers the following callbacks:
+ * 1:The local client: onLeaveChannel.
+ * 2:The remote client: onUserOffline, if the user leaving the channel is in the
+ * Communication channel, or is a BROADCASTER in the Live Broadcast profile.
+ * @returns 0: Success.
+ * < 0: Failure.
+ * PS:
+ * 1:If you call the destroy method immediately after calling the leaveChannel
+ * method, the leaveChannel process interrupts, and the SDK does not trigger
+ * the onLeaveChannel callback.
+ * 2:If you call the leaveChannel method during CDN live streaming, the SDK
+ * triggers the removeInjectStreamUrl method.*/
+ engine.leaveChannel();
+ engine.stopPreview(Constants.VideoSourceType.VIDEO_SOURCE_TRANSCODED);
+ engine.stopCameraCapture(Constants.VideoSourceType.VIDEO_SOURCE_CAMERA_PRIMARY);
+ engine.stopScreenCapture();
+ join.setText(getString(R.string.join));
+ videoReportLayout.removeAllViews();
+ }
+ }
+ }
+
+ private void joinChannel(String channelId)
+ {
+ // Check if the context is valid
+ Context context = getContext();
+ if (context == null)
+ {
+ return;
+ }
+ DisplayMetrics metrics = new DisplayMetrics();
+ requireActivity().getWindowManager().getDefaultDisplay().getRealMetrics(metrics);
+ int width = 720;
+ int height = (int) (width * 1.0f / metrics.widthPixels * metrics.heightPixels);
+
+ /**In the demo, the default is to enter as the anchor.*/
+ engine.setClientRole(Constants.CLIENT_ROLE_BROADCASTER);
+ // Enable video module
+ engine.enableVideo();
+ // Setup video encoding configs
+ engine.setVideoEncoderConfiguration(new VideoEncoderConfiguration(
+ new VideoEncoderConfiguration.VideoDimensions(width, height),
+ VideoEncoderConfiguration.FRAME_RATE.valueOf(((MainApplication)getActivity().getApplication()).getGlobalSettings().getVideoEncodingFrameRate()),
+ STANDARD_BITRATE,
+ VideoEncoderConfiguration.ORIENTATION_MODE.valueOf(((MainApplication)getActivity().getApplication()).getGlobalSettings().getVideoEncodingOrientation())
+ ));
+
+ // Set audio route to microPhone
+ engine.setDefaultAudioRoutetoSpeakerphone(true);
+
+
+ engine.startCameraCapture(Constants.VideoSourceType.VIDEO_SOURCE_CAMERA_PRIMARY, new CameraCapturerConfiguration(CameraCapturerConfiguration.CAMERA_DIRECTION.CAMERA_FRONT));
+ ScreenCaptureParameters screenCaptureParameters = new ScreenCaptureParameters();
+ screenCaptureParameters.captureVideo = true;
+ screenCaptureParameters.videoCaptureParameters.width = width;
+ screenCaptureParameters.videoCaptureParameters.height = height;
+ engine.startScreenCapture(screenCaptureParameters);
+
+
+ LocalTranscoderConfiguration config = new LocalTranscoderConfiguration();
+ config.videoOutputConfiguration = new VideoEncoderConfiguration(
+ new VideoEncoderConfiguration.VideoDimensions(width, height),
+ VideoEncoderConfiguration.FRAME_RATE.FRAME_RATE_FPS_24,
+ STANDARD_BITRATE,
+ VideoEncoderConfiguration.ORIENTATION_MODE.ORIENTATION_MODE_ADAPTIVE
+ );
+ config.transcodingVideoStreams = new ArrayList<>();
+
+
+ LocalTranscoderConfiguration.TranscodingVideoStream screenStream = new LocalTranscoderConfiguration.TranscodingVideoStream();
+ screenStream.sourceType = Constants.VideoSourceType.VIDEO_SOURCE_SCREEN_PRIMARY;
+ screenStream.width = width;
+ screenStream.height = height;
+ screenStream.zOrder = 1;
+ config.transcodingVideoStreams.add(screenStream);
+
+ LocalTranscoderConfiguration.TranscodingVideoStream cameraStream = new LocalTranscoderConfiguration.TranscodingVideoStream();
+ cameraStream.sourceType = Constants.VideoSourceType.VIDEO_SOURCE_CAMERA_PRIMARY;
+ cameraStream.width = width / 2;
+ cameraStream.height = height / 2;
+ cameraStream.x = 0;
+ cameraStream.y = height / 2;
+ cameraStream.zOrder = 2;
+ cameraStream.mirror = true;
+ config.transcodingVideoStreams.add(cameraStream);
+
+ engine.startLocalVideoTranscoder(config);
+
+ // Create render view by RtcEngine
+ SurfaceView surfaceView = new SurfaceView(context);
+ if(videoReportLayout.getChildCount() > 0)
+ {
+ videoReportLayout.removeAllViews();
+ }
+ // Setup local video to render your local camera preview
+ VideoCanvas local = new VideoCanvas(surfaceView, RENDER_MODE_HIDDEN, 0);
+ local.sourceType = Constants.VideoSourceType.VIDEO_SOURCE_TRANSCODED.getValue();
+ local.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED;
+ engine.setupLocalVideo(local);
+ // Add to the local container
+ videoReportLayout.addView(surfaceView, new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT));
+
+ engine.startPreview(Constants.VideoSourceType.VIDEO_SOURCE_TRANSCODED);
+
+ ChannelMediaOptions option = new ChannelMediaOptions();
+ option.autoSubscribeAudio = true;
+ option.autoSubscribeVideo = true;
+ option.publishMicrophoneTrack = true;
+ option.publishTranscodedVideoTrack = true;
+
+ /**Please configure accessToken in the string_config file.
+ * A temporary token generated in Console. A temporary token is valid for 24 hours. For details, see
+ * https://docs.agora.io/en/Agora%20Platform/token?platform=All%20Platforms#get-a-temporary-token
+ * A token generated at the server. This applies to scenarios with high-security requirements. For details, see
+ * https://docs.agora.io/en/cloud-recording/token_server_java?platform=Java*/
+ TokenUtils.gen(requireContext(), channelId, 0, ret -> {
+
+ /** Allows a user to join a channel.
+ if you do not specify the uid, we will generate the uid for you*/
+ int res = engine.joinChannel(ret, channelId, 0, option);
+ if (res != 0)
+ {
+ // Usually happens with invalid parameters
+ // Error code description can be found at:
+ // en: https://docs.agora.io/en/Voice/API%20Reference/java/classio_1_1agora_1_1rtc_1_1_i_rtc_engine_event_handler_1_1_error_code.html
+ // cn: https://docs.agora.io/cn/Voice/API%20Reference/java/classio_1_1agora_1_1rtc_1_1_i_rtc_engine_event_handler_1_1_error_code.html
+ showAlert(RtcEngine.getErrorDescription(Math.abs(res)));
+ return;
+ }
+ // Prevent repeated entry
+ join.setEnabled(false);
+ });
+
+
+
+
+ }
+
+ /**
+ * IRtcEngineEventHandler is an abstract class providing default implementation.
+ * The SDK uses this class to report to the app on SDK runtime events.
+ */
+ private final IRtcEngineEventHandler iRtcEngineEventHandler = new IRtcEngineEventHandler()
+ {
+ @Override
+ public void onLocalVideoTranscoderError(LocalTranscoderConfiguration.TranscodingVideoStream stream, int error) {
+ super.onLocalVideoTranscoderError(stream, error);
+ Log.i(TAG, "LocalVideoTranscoding -- onLocalVideoTranscoderError stream=" + stream + ", error=" + error);
+ }
+
+ /**
+ * Error code description can be found at:
+ * en: https://api-ref.agora.io/en/video-sdk/android/4.x/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror
+ * cn: https://docs.agora.io/cn/video-call-4.x/API%20Reference/java_ng/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror
+ */
+ @Override
+ public void onError(int err) {
+ super.onError(err);
+ showLongToast("Error code:" + err + ", msg:" + RtcEngine.getErrorDescription(err));
+ if (err == Constants.ERR_INVALID_TOKEN || err == Constants.ERR_TOKEN_EXPIRED) {
+ engine.leaveChannel();
+ runOnUIThread(() -> join.setEnabled(true));
+
+ if (Constants.ERR_INVALID_TOKEN == err) {
+ showAlert(getString(R.string.token_invalid));
+ } if (Constants.ERR_TOKEN_EXPIRED == err) {
+ showAlert(getString(R.string.token_expired));
+ }
+ }
+ }
+
+ /**Occurs when a user leaves the channel.
+ * @param stats With this callback, the application retrieves the channel information,
+ * such as the call duration and statistics.*/
+ @Override
+ public void onLeaveChannel(RtcStats stats)
+ {
+ super.onLeaveChannel(stats);
+ Log.i(TAG, String.format("local user %d leaveChannel!", myUid));
+ showLongToast(String.format("local user %d leaveChannel!", myUid));
+ }
+
+ /**Occurs when the local user joins a specified channel.
+ * The channel name assignment is based on channelName specified in the joinChannel method.
+ * If the uid is not specified when joinChannel is called, the server automatically assigns a uid.
+ * @param channel Channel name
+ * @param uid User ID
+ * @param elapsed Time elapsed (ms) from the user calling joinChannel until this callback is triggered*/
+ @Override
+ public void onJoinChannelSuccess(String channel, int uid, int elapsed)
+ {
+ Log.i(TAG, String.format("onJoinChannelSuccess channel %s uid %d", channel, uid));
+ showLongToast(String.format("onJoinChannelSuccess channel %s uid %d", channel, uid));
+ myUid = uid;
+ joined = true;
+ handler.post(new Runnable()
+ {
+ @Override
+ public void run()
+ {
+ join.setEnabled(true);
+ join.setText(getString(R.string.leave));
+ videoReportLayout.setReportUid(uid);
+ }
+ });
+ }
+
+ /**Since v2.9.0.
+ * This callback indicates the state change of the remote audio stream.
+ * PS: This callback does not work properly when the number of users (in the Communication profile) or
+ * broadcasters (in the Live-broadcast profile) in the channel exceeds 17.
+ * @param uid ID of the user whose audio state changes.
+ * @param state State of the remote audio
+ * REMOTE_AUDIO_STATE_STOPPED(0): The remote audio is in the default state, probably due
+ * to REMOTE_AUDIO_REASON_LOCAL_MUTED(3), REMOTE_AUDIO_REASON_REMOTE_MUTED(5),
+ * or REMOTE_AUDIO_REASON_REMOTE_OFFLINE(7).
+ * REMOTE_AUDIO_STATE_STARTING(1): The first remote audio packet is received.
+ * REMOTE_AUDIO_STATE_DECODING(2): The remote audio stream is decoded and plays normally,
+ * probably due to REMOTE_AUDIO_REASON_NETWORK_RECOVERY(2),
+ * REMOTE_AUDIO_REASON_LOCAL_UNMUTED(4) or REMOTE_AUDIO_REASON_REMOTE_UNMUTED(6).
+ * REMOTE_AUDIO_STATE_FROZEN(3): The remote audio is frozen, probably due to
+ * REMOTE_AUDIO_REASON_NETWORK_CONGESTION(1).
+ * REMOTE_AUDIO_STATE_FAILED(4): The remote audio fails to start, probably due to
+ * REMOTE_AUDIO_REASON_INTERNAL(0).
+ * @param reason The reason of the remote audio state change.
+ * REMOTE_AUDIO_REASON_INTERNAL(0): Internal reasons.
+ * REMOTE_AUDIO_REASON_NETWORK_CONGESTION(1): Network congestion.
+ * REMOTE_AUDIO_REASON_NETWORK_RECOVERY(2): Network recovery.
+ * REMOTE_AUDIO_REASON_LOCAL_MUTED(3): The local user stops receiving the remote audio
+ * stream or disables the audio module.
+ * REMOTE_AUDIO_REASON_LOCAL_UNMUTED(4): The local user resumes receiving the remote audio
+ * stream or enables the audio module.
+ * REMOTE_AUDIO_REASON_REMOTE_MUTED(5): The remote user stops sending the audio stream or
+ * disables the audio module.
+ * REMOTE_AUDIO_REASON_REMOTE_UNMUTED(6): The remote user resumes sending the audio stream
+ * or enables the audio module.
+ * REMOTE_AUDIO_REASON_REMOTE_OFFLINE(7): The remote user leaves the channel.
+ * @param elapsed Time elapsed (ms) from the local user calling the joinChannel method
+ * until the SDK triggers this callback.*/
+ @Override
+ public void onRemoteAudioStateChanged(int uid, int state, int reason, int elapsed) {
+ super.onRemoteAudioStateChanged(uid, state, reason, elapsed);
+ Log.i(TAG, "onRemoteAudioStateChanged->" + uid + ", state->" + state + ", reason->" + reason);
+ }
+
+ /**Since v2.9.0.
+ * Occurs when the remote video state changes.
+ * PS: This callback does not work properly when the number of users (in the Communication
+ * profile) or broadcasters (in the Live-broadcast profile) in the channel exceeds 17.
+ * @param uid ID of the remote user whose video state changes.
+ * @param state State of the remote video:
+ * REMOTE_VIDEO_STATE_STOPPED(0): The remote video is in the default state, probably due
+ * to REMOTE_VIDEO_STATE_REASON_LOCAL_MUTED(3), REMOTE_VIDEO_STATE_REASON_REMOTE_MUTED(5),
+ * or REMOTE_VIDEO_STATE_REASON_REMOTE_OFFLINE(7).
+ * REMOTE_VIDEO_STATE_STARTING(1): The first remote video packet is received.
+ * REMOTE_VIDEO_STATE_DECODING(2): The remote video stream is decoded and plays normally,
+ * probably due to REMOTE_VIDEO_STATE_REASON_NETWORK_RECOVERY (2),
+ * REMOTE_VIDEO_STATE_REASON_LOCAL_UNMUTED(4), REMOTE_VIDEO_STATE_REASON_REMOTE_UNMUTED(6),
+ * or REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK_RECOVERY(9).
+ * REMOTE_VIDEO_STATE_FROZEN(3): The remote video is frozen, probably due to
+ * REMOTE_VIDEO_STATE_REASON_NETWORK_CONGESTION(1) or REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK(8).
+ * REMOTE_VIDEO_STATE_FAILED(4): The remote video fails to start, probably due to
+ * REMOTE_VIDEO_STATE_REASON_INTERNAL(0).
+ * @param reason The reason of the remote video state change:
+ * REMOTE_VIDEO_STATE_REASON_INTERNAL(0): Internal reasons.
+ * REMOTE_VIDEO_STATE_REASON_NETWORK_CONGESTION(1): Network congestion.
+ * REMOTE_VIDEO_STATE_REASON_NETWORK_RECOVERY(2): Network recovery.
+ * REMOTE_VIDEO_STATE_REASON_LOCAL_MUTED(3): The local user stops receiving the remote
+ * video stream or disables the video module.
+ * REMOTE_VIDEO_STATE_REASON_LOCAL_UNMUTED(4): The local user resumes receiving the remote
+ * video stream or enables the video module.
+ * REMOTE_VIDEO_STATE_REASON_REMOTE_MUTED(5): The remote user stops sending the video
+ * stream or disables the video module.
+ * REMOTE_VIDEO_STATE_REASON_REMOTE_UNMUTED(6): The remote user resumes sending the video
+ * stream or enables the video module.
+ * REMOTE_VIDEO_STATE_REASON_REMOTE_OFFLINE(7): The remote user leaves the channel.
+ * REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK(8): The remote media stream falls back to the
+ * audio-only stream due to poor network conditions.
+ * REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK_RECOVERY(9): The remote media stream switches
+ * back to the video stream after the network conditions improve.
+ * @param elapsed Time elapsed (ms) from the local user calling the joinChannel method until
+ * the SDK triggers this callback.*/
+ @Override
+ public void onRemoteVideoStateChanged(int uid, int state, int reason, int elapsed)
+ {
+ super.onRemoteVideoStateChanged(uid, state, reason, elapsed);
+ Log.i(TAG, "onRemoteVideoStateChanged->" + uid + ", state->" + state + ", reason->" + reason);
+ }
+
+ @Override
+ public void onLocalAudioStats(LocalAudioStats stats) {
+ super.onLocalAudioStats(stats);
+ videoReportLayout.setLocalAudioStats(stats);
+ }
+
+
+ @Override
+ public void onLocalVideoStats(Constants.VideoSourceType source, LocalVideoStats stats) {
+ super.onLocalVideoStats(source, stats);
+ videoReportLayout.setLocalVideoStats(stats);
+ }
+
+ };
+
+ @Override
+ public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
+ if(buttonView == switchTransparentBackground){
+ engine.enableVirtualBackground(isChecked, new VirtualBackgroundSource(VirtualBackgroundSource.BACKGROUND_COLOR, Color.TRANSPARENT, "", VirtualBackgroundSource.BLUR_DEGREE_HIGH), new SegmentationProperty());
+ }
+ }
+}
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaPlayer.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaPlayer.java
index ae2565a40..b813feb6b 100644
--- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaPlayer.java
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaPlayer.java
@@ -71,7 +71,7 @@ public class MediaPlayer extends BaseFragment implements View.OnClickListener, I
private SeekBar progressBar;
private long playerDuration = 0;
- private static final String SAMPLE_MOVIE_URL = "https://webdemo.agora.io/agora-web-showcase/examples/Agora-Custom-VideoSource-Web/assets/sample.mp4";
+ private static final String SAMPLE_MOVIE_URL = "https://agora-adc-artifacts.s3.cn-north-1.amazonaws.com.cn/resources/sample.mp4";
@Nullable
@Override
@@ -271,8 +271,9 @@ private void joinChannel(String channelId) {
}
fl_local.addView(surfaceView);
// Setup local video to render your local media player view
- VideoCanvas videoCanvas = new VideoCanvas(surfaceView, Constants.RENDER_MODE_HIDDEN, Constants.VIDEO_MIRROR_MODE_AUTO,
- Constants.VIDEO_SOURCE_MEDIA_PLAYER, mediaPlayer.getMediaPlayerId(), 0);
+ VideoCanvas videoCanvas = new VideoCanvas(surfaceView, Constants.RENDER_MODE_HIDDEN, 0);
+ videoCanvas.sourceType = Constants.VIDEO_SOURCE_MEDIA_PLAYER;
+ videoCanvas.mediaPlayerId = mediaPlayer.getMediaPlayerId();
engine.setupLocalVideo(videoCanvas);
// Set audio route to microPhone
engine.setDefaultAudioRoutetoSpeakerphone(true);
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaRecorder.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaRecorder.java
new file mode 100644
index 000000000..abfbcb247
--- /dev/null
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaRecorder.java
@@ -0,0 +1,674 @@
+package io.agora.api.example.examples.advanced;
+
+import static io.agora.api.example.common.model.Examples.ADVANCED;
+import static io.agora.rtc2.Constants.RENDER_MODE_HIDDEN;
+import static io.agora.rtc2.video.VideoEncoderConfiguration.STANDARD_BITRATE;
+
+import android.Manifest;
+import android.annotation.SuppressLint;
+import android.content.Context;
+import android.os.Build;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.SurfaceView;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.Button;
+import android.widget.EditText;
+import android.widget.FrameLayout;
+import android.widget.Toast;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.appcompat.app.AlertDialog;
+
+import com.yanzhenjie.permission.AndPermission;
+import com.yanzhenjie.permission.runtime.Permission;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+
+import io.agora.api.example.MainApplication;
+import io.agora.api.example.R;
+import io.agora.api.example.annotation.Example;
+import io.agora.api.example.common.BaseFragment;
+import io.agora.api.example.common.widget.VideoReportLayout;
+import io.agora.api.example.examples.basic.JoinChannelVideo;
+import io.agora.api.example.utils.CommonUtil;
+import io.agora.api.example.utils.TokenUtils;
+import io.agora.rtc2.AgoraMediaRecorder;
+import io.agora.rtc2.ChannelMediaOptions;
+import io.agora.rtc2.Constants;
+import io.agora.rtc2.IMediaRecorderCallback;
+import io.agora.rtc2.IRtcEngineEventHandler;
+import io.agora.rtc2.RecorderInfo;
+import io.agora.rtc2.RecorderStreamInfo;
+import io.agora.rtc2.RtcEngine;
+import io.agora.rtc2.RtcEngineConfig;
+import io.agora.rtc2.video.VideoCanvas;
+import io.agora.rtc2.video.VideoEncoderConfiguration;
+
+@Example(
+ index = 17,
+ group = ADVANCED,
+ name = R.string.item_media_recorder,
+ actionId = R.id.action_mainFragment_to_MediaRecorder,
+ tipsId = R.string.media_recorder
+)
+public class MediaRecorder extends BaseFragment implements View.OnClickListener {
+ private static final String TAG = JoinChannelVideo.class.getSimpleName();
+
+ private VideoReportLayout fl_local, fl_remote, fl_remote_2, fl_remote_3;
+ private Button join, switch_camera;
+ private EditText et_channel;
+ private RtcEngine engine;
+ private int myUid;
+ private String channelId;
+ private boolean joined = false;
+ private final Map remoteViews = new ConcurrentHashMap();
+ private AgoraMediaRecorder localMediaRecorder;
+ private final Map remoteMediaRecorders = new HashMap<>();
+
+
+ @Nullable
+ @Override
+ public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
+ return inflater.inflate(R.layout.fragment_media_recorder, container, false);
+ }
+
+ @Override
+ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) {
+ super.onViewCreated(view, savedInstanceState);
+ join = view.findViewById(R.id.btn_join);
+ switch_camera = view.findViewById(R.id.btn_switch_camera);
+ et_channel = view.findViewById(R.id.et_channel);
+ view.findViewById(R.id.btn_join).setOnClickListener(this);
+ switch_camera.setOnClickListener(this);
+ fl_local = view.findViewById(R.id.fl_local);
+ fl_remote = view.findViewById(R.id.fl_remote);
+ fl_remote_2 = view.findViewById(R.id.fl_remote2);
+ fl_remote_3 = view.findViewById(R.id.fl_remote3);
+ }
+
+ @Override
+ public void onActivityCreated(@Nullable Bundle savedInstanceState) {
+ super.onActivityCreated(savedInstanceState);
+ // Check if the context is valid
+ Context context = getContext();
+ if (context == null) {
+ return;
+ }
+ try {
+ RtcEngineConfig config = new RtcEngineConfig();
+ /**
+ * The context of Android Activity
+ */
+ config.mContext = context.getApplicationContext();
+ /**
+ * The App ID issued to you by Agora. See How to get the App ID
+ */
+ config.mAppId = getString(R.string.agora_app_id);
+ /** Sets the channel profile of the Agora RtcEngine.
+ CHANNEL_PROFILE_COMMUNICATION(0): (Default) The Communication profile.
+ Use this profile in one-on-one calls or group calls, where all users can talk freely.
+ CHANNEL_PROFILE_LIVE_BROADCASTING(1): The Live-Broadcast profile. Users in a live-broadcast
+ channel have a role as either broadcaster or audience. A broadcaster can both send and receive streams;
+ an audience can only receive streams.*/
+ config.mChannelProfile = Constants.CHANNEL_PROFILE_LIVE_BROADCASTING;
+ /**
+ * IRtcEngineEventHandler is an abstract class providing default implementation.
+ * The SDK uses this class to report to the app on SDK runtime events.
+ */
+ config.mEventHandler = iRtcEngineEventHandler;
+ config.mAudioScenario = Constants.AudioScenario.getValue(Constants.AudioScenario.DEFAULT);
+ config.mAreaCode = ((MainApplication) getActivity().getApplication()).getGlobalSettings().getAreaCode();
+ engine = RtcEngine.create(config);
+ /**
+ * This parameter is for reporting the usages of APIExample to agora background.
+ * Generally, it is not necessary for you to set this parameter.
+ */
+ engine.setParameters("{"
+ + "\"rtc.report_app_scenario\":"
+ + "{"
+ + "\"appScenario\":" + 100 + ","
+ + "\"serviceType\":" + 11 + ","
+ + "\"appVersion\":\"" + RtcEngine.getSdkVersion() + "\""
+ + "}"
+ + "}");
+ /* setting the local access point if the private cloud ip was set, otherwise the config will be invalid.*/
+ engine.setLocalAccessPoint(((MainApplication) getActivity().getApplication()).getGlobalSettings().getPrivateCloudConfig());
+ } catch (Exception e) {
+ e.printStackTrace();
+ getActivity().onBackPressed();
+ }
+ }
+
+ @Override
+ public void onDestroy() {
+ super.onDestroy();
+ stopAllMediaRecorder();
+ /**leaveChannel and Destroy the RtcEngine instance*/
+ if (engine != null) {
+ engine.leaveChannel();
+ }
+ handler.post(RtcEngine::destroy);
+ engine = null;
+ }
+
+ @SuppressLint("WrongConstant")
+ @Override
+ public void onClick(View v) {
+ if (v.getId() == R.id.btn_join) {
+ if (!joined) {
+ CommonUtil.hideInputBoard(getActivity(), et_channel);
+ // call when join button hit
+ String channelId = et_channel.getText().toString();
+ // Check permission
+ List permissionList = new ArrayList<>();
+ permissionList.add(Permission.READ_EXTERNAL_STORAGE);
+ permissionList.add(Permission.WRITE_EXTERNAL_STORAGE);
+ permissionList.add(Permission.RECORD_AUDIO);
+ permissionList.add(Permission.CAMERA);
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
+ permissionList.add(Manifest.permission.BLUETOOTH_CONNECT);
+ }
+
+ String[] permissionArray = new String[permissionList.size()];
+ permissionList.toArray(permissionArray);
+
+ if (AndPermission.hasPermissions(this, permissionArray)) {
+ joinChannel(channelId);
+ return;
+ }
+ // Request permission
+ AndPermission.with(this).runtime().permission(
+ permissionArray
+ ).onGranted(permissions ->
+ {
+ // Permissions Granted
+ joinChannel(channelId);
+ }).start();
+ } else {
+ joined = false;
+ stopAllMediaRecorder();
+ /**After joining a channel, the user must call the leaveChannel method to end the
+ * call before joining another channel. This method returns 0 if the user leaves the
+ * channel and releases all resources related to the call. This method call is
+ * asynchronous, and the user has not exited the channel when the method call returns.
+ * Once the user leaves the channel, the SDK triggers the onLeaveChannel callback.
+ * A successful leaveChannel method call triggers the following callbacks:
+ * 1:The local client: onLeaveChannel.
+ * 2:The remote client: onUserOffline, if the user leaving the channel is in the
+ * Communication channel, or is a BROADCASTER in the Live Broadcast profile.
+ * @returns 0: Success.
+ * < 0: Failure.
+ * PS:
+ * 1:If you call the destroy method immediately after calling the leaveChannel
+ * method, the leaveChannel process interrupts, and the SDK does not trigger
+ * the onLeaveChannel callback.
+ * 2:If you call the leaveChannel method during CDN live streaming, the SDK
+ * triggers the removeInjectStreamUrl method.*/
+ engine.leaveChannel();
+ join.setText(getString(R.string.join));
+ for (ViewGroup value : remoteViews.values()) {
+ value.removeAllViews();
+ resetLayoutRecording(value);
+ }
+ remoteViews.clear();
+ fl_local.removeAllViews();
+ resetLayoutRecording(fl_local);
+ }
+ } else if (v.getId() == switch_camera.getId()) {
+ if (engine != null && joined) {
+ engine.switchCamera();
+ }
+ }
+ }
+
+ private void joinChannel(String channelId) {
+ // Check if the context is valid
+ Context context = getContext();
+ if (context == null) {
+ return;
+ }
+
+ // Create render view by RtcEngine
+ SurfaceView surfaceView = new SurfaceView(context);
+ if (fl_local.getChildCount() > 0) {
+ fl_local.removeAllViews();
+ }
+ // Add to the local container
+ fl_local.addView(surfaceView, 0, new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT));
+ setupLayoutRecording(fl_local, () -> startLocalMediaRecorder(channelId), this::stopLocalMediaRecorder);
+ // Setup local video to render your local camera preview
+ engine.setupLocalVideo(new VideoCanvas(surfaceView, RENDER_MODE_HIDDEN, 0));
+ // Set audio route to microPhone
+ engine.setDefaultAudioRoutetoSpeakerphone(true);
+
+ /**In the demo, the default is to enter as the anchor.*/
+ engine.setClientRole(Constants.CLIENT_ROLE_BROADCASTER);
+ // Enable video module
+ engine.enableVideo();
+ // Setup video encoding configs
+ engine.setVideoEncoderConfiguration(new VideoEncoderConfiguration(
+ ((MainApplication) getActivity().getApplication()).getGlobalSettings().getVideoEncodingDimensionObject(),
+ VideoEncoderConfiguration.FRAME_RATE.valueOf(((MainApplication) getActivity().getApplication()).getGlobalSettings().getVideoEncodingFrameRate()),
+ STANDARD_BITRATE,
+ VideoEncoderConfiguration.ORIENTATION_MODE.valueOf(((MainApplication) getActivity().getApplication()).getGlobalSettings().getVideoEncodingOrientation())
+ ));
+
+ ChannelMediaOptions option = new ChannelMediaOptions();
+ option.autoSubscribeAudio = true;
+ option.autoSubscribeVideo = true;
+ option.publishMicrophoneTrack = true;
+ option.publishCameraTrack = true;
+
+ /**Please configure accessToken in the string_config file.
+ * A temporary token generated in Console. A temporary token is valid for 24 hours. For details, see
+ * https://docs.agora.io/en/Agora%20Platform/token?platform=All%20Platforms#get-a-temporary-token
+ * A token generated at the server. This applies to scenarios with high-security requirements. For details, see
+ * https://docs.agora.io/en/cloud-recording/token_server_java?platform=Java*/
+ TokenUtils.gen(requireContext(), channelId, 0, ret -> {
+
+ /** Allows a user to join a channel.
+ if you do not specify the uid, we will generate the uid for you*/
+ int res = engine.joinChannel(ret, channelId, 0, option);
+ if (res != 0) {
+ // Usually happens with invalid parameters
+ // Error code description can be found at:
+ // en: https://docs.agora.io/en/Voice/API%20Reference/java/classio_1_1agora_1_1rtc_1_1_i_rtc_engine_event_handler_1_1_error_code.html
+ // cn: https://docs.agora.io/cn/Voice/API%20Reference/java/classio_1_1agora_1_1rtc_1_1_i_rtc_engine_event_handler_1_1_error_code.html
+ showAlert(RtcEngine.getErrorDescription(Math.abs(res)));
+ return;
+ }
+ // Prevent repeated entry
+ join.setEnabled(false);
+ });
+ }
+
+ private void stopAllMediaRecorder(){
+ stopLocalMediaRecorder();
+ Set remoteUidList = remoteMediaRecorders.keySet();
+ for (Integer uid : remoteUidList) {
+ stopRemoteMediaRecorder(uid);
+ }
+ }
+
+ private void stopRemoteMediaRecorder(int uid) {
+ AgoraMediaRecorder mediaRecorder = remoteMediaRecorders.get(uid);
+ if(mediaRecorder == null){
+ return;
+ }
+ // Stop Local Recording
+ int ret = mediaRecorder.stopRecording();
+ Toast.makeText(requireContext(), "StopRecording ret=" + ret, Toast.LENGTH_SHORT).show();
+ mediaRecorder.setMediaRecorderObserver(null);
+ engine.destroyMediaRecorder(mediaRecorder);
+ remoteMediaRecorders.remove(uid);
+ }
+
+ private void startRemoteMediaRecorder(String channelId, int uid) {
+ // Start Local Recording
+ AgoraMediaRecorder mediaRecorder = remoteMediaRecorders.get(uid);
+ String storagePath = requireContext().getExternalCacheDir().getAbsolutePath() + File.separator + "media_recorder_" + channelId + "_" + uid + ".mp4";
+ if (mediaRecorder == null) {
+ mediaRecorder = engine.createMediaRecorder(new RecorderStreamInfo(channelId, uid));
+ // Before starting recoding, you must call setMediaRecorderObserver firstly. Otherwise, recoding will fail with code -4.
+ mediaRecorder.setMediaRecorderObserver(new IMediaRecorderCallback() {
+ @Override
+ public void onRecorderStateChanged(String channelId, int uid, int state, int error) {
+ Log.d(TAG, "RemoteMediaRecorder -- onRecorderStateChanged channelId=" + channelId + ", uid=" + uid + ", state=" + state + ", error=" + error);
+ if (state == AgoraMediaRecorder.RECORDER_STATE_STOP) {
+ showRecordMediaPathDialog(storagePath);
+ }
+ }
+
+ @Override
+ public void onRecorderInfoUpdated(String channelId, int uid, RecorderInfo info) {
+ Log.d(TAG, "RemoteMediaRecorder -- onRecorderInfoUpdated channelId=" + channelId + ", uid=" + uid + ", fileName=" + info.fileName + ", durationMs=" + info.durationMs + ", fileSize=" + info.fileSize);
+ }
+ });
+ remoteMediaRecorders.put(uid, mediaRecorder);
+ }
+ int ret = mediaRecorder.startRecording(new AgoraMediaRecorder.MediaRecorderConfiguration(
+ storagePath,
+ AgoraMediaRecorder.CONTAINER_MP4, AgoraMediaRecorder.STREAM_TYPE_BOTH, 120000, 0
+ ));
+ Toast.makeText(requireContext(), "StartRecording ret=" + ret, Toast.LENGTH_SHORT).show();
+ }
+
+ private void stopLocalMediaRecorder() {
+ if(localMediaRecorder == null){
+ return;
+ }
+ // Stop Local Recording
+ int ret = localMediaRecorder.stopRecording();
+ Toast.makeText(requireContext(), "StopRecording ret=" + ret, Toast.LENGTH_SHORT).show();
+ localMediaRecorder.setMediaRecorderObserver(null);
+ engine.destroyMediaRecorder(localMediaRecorder);
+ localMediaRecorder = null;
+ }
+
+ private void startLocalMediaRecorder(String channelId) {
+ // Start Local Recording
+ String storagePath = requireContext().getExternalCacheDir().getAbsolutePath() + File.separator + "media_recorder_" + channelId + "_local.mp4";
+
+ if (localMediaRecorder == null) {
+ localMediaRecorder = engine.createMediaRecorder(new RecorderStreamInfo(channelId, myUid));
+ // Before starting recoding, you must call setMediaRecorderObserver firstly. Otherwise, recoding will fail with code -4.
+ localMediaRecorder.setMediaRecorderObserver(new IMediaRecorderCallback() {
+ @Override
+ public void onRecorderStateChanged(String channelId, int uid, int state, int error) {
+ Log.d(TAG, "LocalMediaRecorder -- onRecorderStateChanged channelId=" + channelId + ", uid=" + uid + ", state=" + state + ", error=" + error);
+ if (state == AgoraMediaRecorder.RECORDER_STATE_STOP) {
+ showRecordMediaPathDialog(storagePath);
+ }
+ }
+
+ @Override
+ public void onRecorderInfoUpdated(String channelId, int uid, RecorderInfo info) {
+ Log.d(TAG, "LocalMediaRecorder -- onRecorderInfoUpdated channelId=" + channelId + ", uid=" + uid + ", fileName=" + info.fileName + ", durationMs=" + info.durationMs + ", fileSize=" + info.fileSize);
+ }
+ });
+ }
+ int ret = localMediaRecorder.startRecording(new AgoraMediaRecorder.MediaRecorderConfiguration(
+ storagePath,
+ AgoraMediaRecorder.CONTAINER_MP4, AgoraMediaRecorder.STREAM_TYPE_BOTH, 120000, 0
+ ));
+ Toast.makeText(requireContext(), "StartRecording ret=" + ret, Toast.LENGTH_SHORT).show();
+ }
+
+ private void setupLayoutRecording(@NonNull ViewGroup reportLayout, @NonNull Runnable onStart, @NonNull Runnable onStop) {
+ Button btnRecording = ((ViewGroup)reportLayout.getParent()).findViewWithTag(getString(R.string.recording_tag));
+ if (btnRecording == null) {
+ return;
+ }
+ btnRecording.setText(R.string.start_recording);
+ btnRecording.setVisibility(View.VISIBLE);
+ btnRecording.setOnClickListener(v -> {
+ if (btnRecording.getText().equals(getString(R.string.start_recording))) {
+
+ btnRecording.setText(R.string.stop_recording);
+ onStart.run();
+ } else {
+ // Stop Recording
+ btnRecording.setText(R.string.start_recording);
+ onStop.run();
+ }
+ });
+ }
+
+ private void showRecordMediaPathDialog(String path){
+ runOnUIThread(() -> {
+ new AlertDialog.Builder(requireContext())
+ .setTitle("MediaFilePath")
+ .setMessage(path)
+ .setPositiveButton(R.string.confirm, (dialog, which) -> dialog.dismiss())
+ .show();
+ });
+ }
+
+ private void resetLayoutRecording(@NonNull ViewGroup reportLayout) {
+ Button btnRecording = ((ViewGroup)reportLayout.getParent()).findViewWithTag(getString(R.string.recording_tag));
+ if (btnRecording == null) {
+ return;
+ }
+ btnRecording.setVisibility(View.GONE);
+ btnRecording.setText(R.string.start_recording);
+ }
+
+ /**
+ * IRtcEngineEventHandler is an abstract class providing default implementation.
+ * The SDK uses this class to report to the app on SDK runtime events.
+ */
+ private final IRtcEngineEventHandler iRtcEngineEventHandler = new IRtcEngineEventHandler() {
+ /**
+ * Error code description can be found at:
+ * en: https://api-ref.agora.io/en/video-sdk/android/4.x/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror
+ * cn: https://docs.agora.io/cn/video-call-4.x/API%20Reference/java_ng/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror
+ */
+ @Override
+ public void onError(int err) {
+ super.onError(err);
+ showLongToast("Error code:" + err + ", msg:" + RtcEngine.getErrorDescription(err));
+ if (err == Constants.ERR_INVALID_TOKEN || err == Constants.ERR_TOKEN_EXPIRED) {
+ engine.leaveChannel();
+ runOnUIThread(() -> join.setEnabled(true));
+
+ if (Constants.ERR_INVALID_TOKEN == err) {
+ showAlert(getString(R.string.token_invalid));
+ }
+ if (Constants.ERR_TOKEN_EXPIRED == err) {
+ showAlert(getString(R.string.token_expired));
+ }
+ }
+ }
+
+ /**Occurs when a user leaves the channel.
+ * @param stats With this callback, the application retrieves the channel information,
+ * such as the call duration and statistics.*/
+ @Override
+ public void onLeaveChannel(RtcStats stats) {
+ super.onLeaveChannel(stats);
+ Log.i(TAG, String.format("local user %d leaveChannel!", myUid));
+ showLongToast(String.format("local user %d leaveChannel!", myUid));
+ }
+
+ /**Occurs when the local user joins a specified channel.
+ * The channel name assignment is based on channelName specified in the joinChannel method.
+ * If the uid is not specified when joinChannel is called, the server automatically assigns a uid.
+ * @param channel Channel name
+ * @param uid User ID
+ * @param elapsed Time elapsed (ms) from the user calling joinChannel until this callback is triggered*/
+ @Override
+ public void onJoinChannelSuccess(String channel, int uid, int elapsed) {
+ Log.i(TAG, String.format("onJoinChannelSuccess channel %s uid %d", channel, uid));
+ showLongToast(String.format("onJoinChannelSuccess channel %s uid %d", channel, uid));
+ myUid = uid;
+ channelId = channel;
+ joined = true;
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ join.setEnabled(true);
+ join.setText(getString(R.string.leave));
+ fl_local.setReportUid(uid);
+ }
+ });
+ }
+
+ /**Since v2.9.0.
+ * This callback indicates the state change of the remote audio stream.
+ * PS: This callback does not work properly when the number of users (in the Communication profile) or
+ * broadcasters (in the Live-broadcast profile) in the channel exceeds 17.
+ * @param uid ID of the user whose audio state changes.
+ * @param state State of the remote audio
+ * REMOTE_AUDIO_STATE_STOPPED(0): The remote audio is in the default state, probably due
+ * to REMOTE_AUDIO_REASON_LOCAL_MUTED(3), REMOTE_AUDIO_REASON_REMOTE_MUTED(5),
+ * or REMOTE_AUDIO_REASON_REMOTE_OFFLINE(7).
+ * REMOTE_AUDIO_STATE_STARTING(1): The first remote audio packet is received.
+ * REMOTE_AUDIO_STATE_DECODING(2): The remote audio stream is decoded and plays normally,
+ * probably due to REMOTE_AUDIO_REASON_NETWORK_RECOVERY(2),
+ * REMOTE_AUDIO_REASON_LOCAL_UNMUTED(4) or REMOTE_AUDIO_REASON_REMOTE_UNMUTED(6).
+ * REMOTE_AUDIO_STATE_FROZEN(3): The remote audio is frozen, probably due to
+ * REMOTE_AUDIO_REASON_NETWORK_CONGESTION(1).
+ * REMOTE_AUDIO_STATE_FAILED(4): The remote audio fails to start, probably due to
+ * REMOTE_AUDIO_REASON_INTERNAL(0).
+ * @param reason The reason of the remote audio state change.
+ * REMOTE_AUDIO_REASON_INTERNAL(0): Internal reasons.
+ * REMOTE_AUDIO_REASON_NETWORK_CONGESTION(1): Network congestion.
+ * REMOTE_AUDIO_REASON_NETWORK_RECOVERY(2): Network recovery.
+ * REMOTE_AUDIO_REASON_LOCAL_MUTED(3): The local user stops receiving the remote audio
+ * stream or disables the audio module.
+ * REMOTE_AUDIO_REASON_LOCAL_UNMUTED(4): The local user resumes receiving the remote audio
+ * stream or enables the audio module.
+ * REMOTE_AUDIO_REASON_REMOTE_MUTED(5): The remote user stops sending the audio stream or
+ * disables the audio module.
+ * REMOTE_AUDIO_REASON_REMOTE_UNMUTED(6): The remote user resumes sending the audio stream
+ * or enables the audio module.
+ * REMOTE_AUDIO_REASON_REMOTE_OFFLINE(7): The remote user leaves the channel.
+ * @param elapsed Time elapsed (ms) from the local user calling the joinChannel method
+ * until the SDK triggers this callback.*/
+ @Override
+ public void onRemoteAudioStateChanged(int uid, int state, int reason, int elapsed) {
+ super.onRemoteAudioStateChanged(uid, state, reason, elapsed);
+ Log.i(TAG, "onRemoteAudioStateChanged->" + uid + ", state->" + state + ", reason->" + reason);
+ }
+
+ /**Since v2.9.0.
+ * Occurs when the remote video state changes.
+ * PS: This callback does not work properly when the number of users (in the Communication
+ * profile) or broadcasters (in the Live-broadcast profile) in the channel exceeds 17.
+ * @param uid ID of the remote user whose video state changes.
+ * @param state State of the remote video:
+ * REMOTE_VIDEO_STATE_STOPPED(0): The remote video is in the default state, probably due
+ * to REMOTE_VIDEO_STATE_REASON_LOCAL_MUTED(3), REMOTE_VIDEO_STATE_REASON_REMOTE_MUTED(5),
+ * or REMOTE_VIDEO_STATE_REASON_REMOTE_OFFLINE(7).
+ * REMOTE_VIDEO_STATE_STARTING(1): The first remote video packet is received.
+ * REMOTE_VIDEO_STATE_DECODING(2): The remote video stream is decoded and plays normally,
+ * probably due to REMOTE_VIDEO_STATE_REASON_NETWORK_RECOVERY (2),
+ * REMOTE_VIDEO_STATE_REASON_LOCAL_UNMUTED(4), REMOTE_VIDEO_STATE_REASON_REMOTE_UNMUTED(6),
+ * or REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK_RECOVERY(9).
+ * REMOTE_VIDEO_STATE_FROZEN(3): The remote video is frozen, probably due to
+ * REMOTE_VIDEO_STATE_REASON_NETWORK_CONGESTION(1) or REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK(8).
+ * REMOTE_VIDEO_STATE_FAILED(4): The remote video fails to start, probably due to
+ * REMOTE_VIDEO_STATE_REASON_INTERNAL(0).
+ * @param reason The reason of the remote video state change:
+ * REMOTE_VIDEO_STATE_REASON_INTERNAL(0): Internal reasons.
+ * REMOTE_VIDEO_STATE_REASON_NETWORK_CONGESTION(1): Network congestion.
+ * REMOTE_VIDEO_STATE_REASON_NETWORK_RECOVERY(2): Network recovery.
+ * REMOTE_VIDEO_STATE_REASON_LOCAL_MUTED(3): The local user stops receiving the remote
+ * video stream or disables the video module.
+ * REMOTE_VIDEO_STATE_REASON_LOCAL_UNMUTED(4): The local user resumes receiving the remote
+ * video stream or enables the video module.
+ * REMOTE_VIDEO_STATE_REASON_REMOTE_MUTED(5): The remote user stops sending the video
+ * stream or disables the video module.
+ * REMOTE_VIDEO_STATE_REASON_REMOTE_UNMUTED(6): The remote user resumes sending the video
+ * stream or enables the video module.
+ * REMOTE_VIDEO_STATE_REASON_REMOTE_OFFLINE(7): The remote user leaves the channel.
+ * REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK(8): The remote media stream falls back to the
+ * audio-only stream due to poor network conditions.
+ * REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK_RECOVERY(9): The remote media stream switches
+ * back to the video stream after the network conditions improve.
+ * @param elapsed Time elapsed (ms) from the local user calling the joinChannel method until
+ * the SDK triggers this callback.*/
+ @Override
+ public void onRemoteVideoStateChanged(int uid, int state, int reason, int elapsed) {
+ super.onRemoteVideoStateChanged(uid, state, reason, elapsed);
+ Log.i(TAG, "onRemoteVideoStateChanged->" + uid + ", state->" + state + ", reason->" + reason);
+ }
+
+ /**Occurs when a remote user (Communication)/host (Live Broadcast) joins the channel.
+ * @param uid ID of the user whose audio state changes.
+ * @param elapsed Time delay (ms) from the local user calling joinChannel/setClientRole
+ * until this callback is triggered.*/
+ @Override
+ public void onUserJoined(int uid, int elapsed) {
+ super.onUserJoined(uid, elapsed);
+ Log.i(TAG, "onUserJoined->" + uid);
+ showLongToast(String.format("user %d joined!", uid));
+ /**Check if the context is correct*/
+ Context context = getContext();
+ if (context == null) {
+ return;
+ }
+ if (remoteViews.containsKey(uid)) {
+ return;
+ } else {
+ handler.post(() ->
+ {
+ /**Display remote video stream*/
+ SurfaceView surfaceView = null;
+ // Create render view by RtcEngine
+ surfaceView = new SurfaceView(context);
+ surfaceView.setZOrderMediaOverlay(true);
+ VideoReportLayout view = getAvailableView();
+ view.setReportUid(uid);
+ setupLayoutRecording(view, () -> startRemoteMediaRecorder(channelId, uid), () -> stopRemoteMediaRecorder(uid));
+ remoteViews.put(uid, view);
+ // Add to the remote container
+ view.addView(surfaceView, new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT));
+ // Setup remote video to render
+ engine.setupRemoteVideo(new VideoCanvas(surfaceView, RENDER_MODE_HIDDEN, uid));
+ });
+ }
+ }
+
+ /**Occurs when a remote user (Communication)/host (Live Broadcast) leaves the channel.
+ * @param uid ID of the user whose audio state changes.
+ * @param reason Reason why the user goes offline:
+ * USER_OFFLINE_QUIT(0): The user left the current channel.
+ * USER_OFFLINE_DROPPED(1): The SDK timed out and the user dropped offline because no data
+ * packet was received within a certain period of time. If a user quits the
+ * call and the message is not passed to the SDK (due to an unreliable channel),
+ * the SDK assumes the user dropped offline.
+ * USER_OFFLINE_BECOME_AUDIENCE(2): (Live broadcast only.) The client role switched from
+ * the host to the audience.*/
+ @Override
+ public void onUserOffline(int uid, int reason) {
+ Log.i(TAG, String.format("user %d offline! reason:%d", uid, reason));
+ showLongToast(String.format("user %d offline! reason:%d", uid, reason));
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ /**Clear render view
+ Note: The video will stay at its last frame, to completely remove it you will need to
+ remove the SurfaceView from its parent*/
+ engine.setupRemoteVideo(new VideoCanvas(null, RENDER_MODE_HIDDEN, uid));
+ stopRemoteMediaRecorder(uid);
+ ViewGroup viewGroup = remoteViews.get(uid);
+ resetLayoutRecording(viewGroup);
+ viewGroup.removeAllViews();
+ remoteViews.remove(uid);
+ }
+ });
+ }
+
+ @Override
+ public void onLocalAudioStats(LocalAudioStats stats) {
+ super.onLocalAudioStats(stats);
+ fl_local.setLocalAudioStats(stats);
+ }
+
+ @Override
+ public void onRemoteAudioStats(RemoteAudioStats stats) {
+ super.onRemoteAudioStats(stats);
+ fl_remote.setRemoteAudioStats(stats);
+ fl_remote_2.setRemoteAudioStats(stats);
+ fl_remote_3.setRemoteAudioStats(stats);
+ }
+
+ @Override
+ public void onLocalVideoStats(Constants.VideoSourceType source, LocalVideoStats stats) {
+ super.onLocalVideoStats(source, stats);
+ fl_local.setLocalVideoStats(stats);
+ }
+
+ @Override
+ public void onRemoteVideoStats(RemoteVideoStats stats) {
+ super.onRemoteVideoStats(stats);
+ fl_remote.setRemoteVideoStats(stats);
+ fl_remote_2.setRemoteVideoStats(stats);
+ fl_remote_3.setRemoteVideoStats(stats);
+ }
+ };
+
+ private VideoReportLayout getAvailableView() {
+ if (fl_remote.getChildCount() == 0) {
+ return fl_remote;
+ } else if (fl_remote_2.getChildCount() == 0) {
+ return fl_remote_2;
+ } else if (fl_remote_3.getChildCount() == 0) {
+ return fl_remote_3;
+ } else {
+ return fl_remote;
+ }
+ }
+
+}
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MultiVideoSourceTracks.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MultiVideoSourceTracks.java
index 05c28b5f9..cadacc747 100644
--- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MultiVideoSourceTracks.java
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MultiVideoSourceTracks.java
@@ -5,6 +5,7 @@
import static io.agora.rtc2.video.VideoEncoderConfiguration.STANDARD_BITRATE;
import android.content.Context;
+import android.graphics.Matrix;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
@@ -13,6 +14,8 @@
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.EditText;
+import android.widget.Spinner;
+import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
@@ -20,18 +23,27 @@
import com.yanzhenjie.permission.AndPermission;
import com.yanzhenjie.permission.runtime.Permission;
+import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
+import java.util.concurrent.Callable;
import io.agora.api.example.MainApplication;
import io.agora.api.example.R;
import io.agora.api.example.annotation.Example;
import io.agora.api.example.common.BaseFragment;
import io.agora.api.example.common.widget.VideoReportLayout;
+import io.agora.api.example.examples.advanced.videoRender.YuvFboProgram;
import io.agora.api.example.utils.CommonUtil;
import io.agora.api.example.utils.TokenUtils;
import io.agora.api.example.utils.VideoFileReader;
+import io.agora.base.JavaI420Buffer;
+import io.agora.base.NV12Buffer;
+import io.agora.base.NV21Buffer;
+import io.agora.base.TextureBufferHelper;
+import io.agora.base.VideoFrame;
+import io.agora.base.internal.video.YuvHelper;
import io.agora.rtc2.ChannelMediaOptions;
import io.agora.rtc2.Constants;
import io.agora.rtc2.IRtcEngineEventHandler;
@@ -39,6 +51,7 @@
import io.agora.rtc2.RtcEngine;
import io.agora.rtc2.RtcEngineConfig;
import io.agora.rtc2.RtcEngineEx;
+import io.agora.rtc2.gl.EglBaseProvider;
import io.agora.rtc2.video.VideoCanvas;
import io.agora.rtc2.video.VideoEncoderConfiguration;
@@ -59,10 +72,13 @@ public class MultiVideoSourceTracks extends BaseFragment implements View.OnClick
private RtcEngineEx engine;
private int myUid;
private volatile boolean joined = false;
+ private Spinner sp_push_buffer_type;
private final List videoTrackIds = new ArrayList<>();
private final List videoFileReaders = new ArrayList<>();
private final List connections = new ArrayList<>();
+ private YuvFboProgram yuvFboProgram;
+ private TextureBufferHelper textureBufferHelper;
@Nullable
@@ -86,6 +102,7 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat
};
view.findViewById(R.id.btn_track_create).setOnClickListener(v -> createPushingVideoTrack());
view.findViewById(R.id.btn_track_destroy).setOnClickListener(v -> destroyLastPushingVideoTrack());
+ sp_push_buffer_type = view.findViewById(R.id.sp_buffer_type);
}
@Override
@@ -222,8 +239,6 @@ private void joinChannel(String channelId) {
option.clientRoleType = Constants.CLIENT_ROLE_AUDIENCE;
option.autoSubscribeAudio = true;
option.autoSubscribeVideo = true;
- option.publishCameraTrack = false;
- option.publishMicrophoneTrack = false;
int res = engine.joinChannel(accessToken, channelId, 0, option);
if (res != 0) {
// Usually happens with invalid parameters
@@ -239,35 +254,190 @@ private void joinChannel(String channelId) {
}
private void createPushingVideoTrack() {
- if(!joined || videoTrackIds.size() >= 4){
+ if (!joined || videoTrackIds.size() >= 4) {
return;
}
+ /*
+ * Get an custom video track id created by internal,which could used to publish or preview
+ *
+ * @return
+ * - > 0: the useable video track id.
+ * - < 0: Failure.
+ */
int videoTrack = engine.createCustomVideoTrack();
+ if (videoTrack < 0) {
+ Toast.makeText(requireContext(), "createCustomVideoTrack failed!", Toast.LENGTH_LONG).show();
+ return;
+ }
String channelId = et_channel.getText().toString();
int uid = new Random().nextInt(1000) + 20000;
RtcConnection connection = new RtcConnection(channelId, uid);
+ /*
+ Generate a token by restful api, which could be used to join channel with token.
+ */
TokenUtils.gen(requireContext(), channelId, uid, accessToken -> {
ChannelMediaOptions option = new ChannelMediaOptions();
option.clientRoleType = Constants.CLIENT_ROLE_BROADCASTER;
option.autoSubscribeAudio = true;
option.autoSubscribeVideo = true;
- option.publishCameraTrack = false;
option.publishCustomVideoTrack = true;
+ /*
+ specify custom video track id to publish in this channel.
+ */
option.customVideoTrackId = videoTrack;
+ /*
+ * Joins a channel.
+ *
+ * @return
+ * - 0: Success.
+ * - < 0: Failure.
+ * - -2: The parameter is invalid. For example, the token is invalid, the uid parameter is not set
+ * to an integer, or the value of a member in the `ChannelMediaOptions` structure is invalid. You need
+ * to pass in a valid parameter and join the channel again.
+ * - -3: Failes to initialize the `IRtcEngine` object. You need to reinitialize the IRtcEngine object.
+ * - -7: The IRtcEngine object has not been initialized. You need to initialize the IRtcEngine
+ * object before calling this method.
+ * - -8: The internal state of the IRtcEngine object is wrong. The typical cause is that you call
+ * this method to join the channel without calling `stopEchoTest` to stop the test after calling
+ * `startEchoTest` to start a call loop test. You need to call `stopEchoTest` before calling this method.
+ * - -17: The request to join the channel is rejected. The typical cause is that the user is in the
+ * channel. Agora recommends using the `onConnectionStateChanged` callback to get whether the user is
+ * in the channel. Do not call this method to join the channel unless you receive the
+ * `CONNECTION_STATE_DISCONNECTED(1)` state.
+ * - -102: The channel name is invalid. You need to pass in a valid channel name in channelId to
+ * rejoin the channel.
+ * - -121: The user ID is invalid. You need to pass in a valid user ID in uid to rejoin the channel.
+ */
int res = engine.joinChannelEx(accessToken, connection, option, new IRtcEngineEventHandler() {
});
if (res != 0) {
+ /*
+ * destroy a created custom video track id
+ *
+ * @param video_track_id The video track id which was created by createCustomVideoTrack
+ * @return
+ * - 0: Success.
+ * - < 0: Failure.
+ */
engine.destroyCustomVideoTrack(videoTrack);
showAlert(RtcEngine.getErrorDescription(Math.abs(res)));
} else {
- VideoFileReader videoFileReader = new VideoFileReader(requireContext(), videoFrame -> {
- if (engine != null && joined) {
- engine.pushExternalVideoFrameEx(videoFrame, videoTrack);
+ /*
+ * VideoFileReader can get nv21 buffer data of sample.yuv file in assets cyclically.
+ */
+ VideoFileReader videoFileReader = new VideoFileReader(requireContext(), (yuv, width, height) -> {
+ if (joined && engine != null) {
+ String selectedItem = (String) sp_push_buffer_type.getSelectedItem();
+
+ /*
+ * Below show how to create different type buffers.
+ */
+ VideoFrame.Buffer frameBuffer;
+ if ("NV21".equals(selectedItem)) {
+ int srcStrideY = width;
+ int srcHeightY = height;
+ int srcSizeY = srcStrideY * srcHeightY;
+ ByteBuffer srcY = ByteBuffer.allocateDirect(srcSizeY);
+ srcY.put(yuv, 0, srcSizeY);
+
+ int srcStrideU = width / 2;
+ int srcHeightU = height / 2;
+ int srcSizeU = srcStrideU * srcHeightU;
+ ByteBuffer srcU = ByteBuffer.allocateDirect(srcSizeU);
+ srcU.put(yuv, srcSizeY, srcSizeU);
+
+ int srcStrideV = width / 2;
+ int srcHeightV = height / 2;
+ int srcSizeV = srcStrideV * srcHeightV;
+ ByteBuffer srcV = ByteBuffer.allocateDirect(srcSizeV);
+ srcV.put(yuv, srcSizeY + srcSizeU, srcSizeV);
+
+ int desSize = srcSizeY + srcSizeU + srcSizeV;
+ ByteBuffer des = ByteBuffer.allocateDirect(desSize);
+ YuvHelper.I420ToNV12(srcY, srcStrideY, srcV, srcStrideV, srcU, srcStrideU, des, width, height);
+
+ byte[] nv21 = new byte[desSize];
+ des.position(0);
+ des.get(nv21);
+
+ frameBuffer = new NV21Buffer(nv21, width, height, null);
+ } else if ("NV12".equals(selectedItem)) {
+ int srcStrideY = width;
+ int srcHeightY = height;
+ int srcSizeY = srcStrideY * srcHeightY;
+ ByteBuffer srcY = ByteBuffer.allocateDirect(srcSizeY);
+ srcY.put(yuv, 0, srcSizeY);
+
+ int srcStrideU = width / 2;
+ int srcHeightU = height / 2;
+ int srcSizeU = srcStrideU * srcHeightU;
+ ByteBuffer srcU = ByteBuffer.allocateDirect(srcSizeU);
+ srcU.put(yuv, srcSizeY, srcSizeU);
+
+ int srcStrideV = width / 2;
+ int srcHeightV = height / 2;
+ int srcSizeV = srcStrideV * srcHeightV;
+ ByteBuffer srcV = ByteBuffer.allocateDirect(srcSizeV);
+ srcV.put(yuv, srcSizeY + srcSizeU, srcSizeV);
+
+ int desSize = srcSizeY + srcSizeU + srcSizeV;
+ ByteBuffer des = ByteBuffer.allocateDirect(desSize);
+ YuvHelper.I420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, des, width, height);
+
+ frameBuffer = new NV12Buffer(width, height, width, height, des, null);
+ } else if ("Texture2D".equals(selectedItem)) {
+ if (textureBufferHelper == null) {
+ textureBufferHelper = TextureBufferHelper.create("PushExternalVideoYUV", EglBaseProvider.instance().getRootEglBase().getEglBaseContext());
+ }
+ if (yuvFboProgram == null) {
+ textureBufferHelper.invoke((Callable) () -> {
+ yuvFboProgram = new YuvFboProgram();
+ return null;
+ });
+ }
+ Integer textureId = textureBufferHelper.invoke(() -> yuvFboProgram.drawYuv(yuv, width, height));
+ frameBuffer = textureBufferHelper.wrapTextureBuffer(width, height, VideoFrame.TextureBuffer.Type.RGB, textureId, new Matrix());
+ } else {
+ // I420 type default
+ JavaI420Buffer i420Buffer = JavaI420Buffer.allocate(width, height);
+ i420Buffer.getDataY().put(yuv, 0, i420Buffer.getDataY().limit());
+ i420Buffer.getDataU().put(yuv, i420Buffer.getDataY().limit(), i420Buffer.getDataU().limit());
+ i420Buffer.getDataV().put(yuv, i420Buffer.getDataY().limit() + i420Buffer.getDataU().limit(), i420Buffer.getDataV().limit());
+ frameBuffer = i420Buffer;
+ }
+
+
+ /*
+ * Get monotonic time in ms which can be used by capture time,
+ * typical scenario is as follows:
+ */
+ long currentMonotonicTimeInMs = engine.getCurrentMonotonicTimeInMs();
+ /*
+ * Create a video frame to push.
+ */
+ VideoFrame videoFrame = new VideoFrame(frameBuffer, 0, currentMonotonicTimeInMs * 1000000);
+
+ /*
+ * Pushes the external video frame to the app.
+ *
+ * @param frame The external video frame: ExternalVideoFrame.
+ * @param videoTrackId The id of the video track.
+ * - 0: Success.
+ * - < 0: Failure.
+ */
+ int ret = engine.pushExternalVideoFrameEx(videoFrame, videoTrack);
+ if (ret < 0) {
+ Log.w(TAG, "pushExternalVideoFrameEx error code=" + ret);
+ }
}
});
videoFileReader.start();
+
+ /*
+ * cache video track ids , video file readers and rtc connection to release while fragment destroying.
+ */
videoTrackIds.add(videoTrack);
videoFileReaders.add(videoFileReader);
connections.add(connection);
@@ -277,22 +447,30 @@ private void createPushingVideoTrack() {
private int destroyLastPushingVideoTrack() {
int lastIndex = videoTrackIds.size() - 1;
- if(lastIndex < 0){
+ if (lastIndex < 0) {
return lastIndex;
}
int videoTrack = videoTrackIds.remove(lastIndex);
VideoFileReader videoFileReader = videoFileReaders.remove(lastIndex);
RtcConnection connection = connections.remove(lastIndex);
- videoFileReader.stop();
+ /*
+ * destroy a created custom video track id
+ *
+ * @param video_track_id The video track id which was created by createCustomVideoTrack
+ * @return
+ * - 0: Success.
+ * - < 0: Failure.
+ */
engine.destroyCustomVideoTrack(videoTrack);
+ videoFileReader.stop();
engine.leaveChannelEx(connection);
return lastIndex;
}
- private void destroyAllPushingVideoTrack(){
+ private void destroyAllPushingVideoTrack() {
int index = videoTrackIds.size() - 1;
- while (index >= 0){
+ while (index >= 0) {
index = destroyLastPushingVideoTrack();
}
}
@@ -443,7 +621,7 @@ public void onUserOffline(int uid, int reason) {
public void onRemoteVideoStats(RemoteVideoStats stats) {
super.onRemoteVideoStats(stats);
VideoReportLayout videoLayoutByUid = getVideoLayoutByUid(stats.uid);
- if(videoLayoutByUid != null){
+ if (videoLayoutByUid != null) {
videoLayoutByUid.setRemoteVideoStats(stats);
}
}
@@ -452,7 +630,7 @@ public void onRemoteVideoStats(RemoteVideoStats stats) {
public void onRemoteAudioStats(RemoteAudioStats stats) {
super.onRemoteAudioStats(stats);
VideoReportLayout videoLayoutByUid = getVideoLayoutByUid(stats.uid);
- if(videoLayoutByUid != null){
+ if (videoLayoutByUid != null) {
videoLayoutByUid.setRemoteAudioStats(stats);
}
}
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ProcessRawData.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ProcessRawData.java
index aa8755f0f..8884f1a97 100644
--- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ProcessRawData.java
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ProcessRawData.java
@@ -239,8 +239,7 @@ private void joinChannel(String channelId) {
/**Set up to play remote sound with receiver*/
engine.setDefaultAudioRoutetoSpeakerphone(true);
- int ret = engine.registerVideoFrameObserver(iVideoFrameObserver);
- // Enable video module should be after calling registerVideoFrameObserver
+ engine.registerVideoFrameObserver(iVideoFrameObserver);
engine.enableVideo();
engine.startPreview();
@@ -274,7 +273,7 @@ private void joinChannel(String channelId) {
private final IVideoFrameObserver iVideoFrameObserver = new IVideoFrameObserver() {
@Override
- public boolean onCaptureVideoFrame(VideoFrame videoFrame) {
+ public boolean onCaptureVideoFrame(int sourceType, VideoFrame videoFrame) {
Log.i(TAG, "OnEncodedVideoImageReceived"+Thread.currentThread().getName());
long startTime = System.currentTimeMillis();
@@ -339,17 +338,7 @@ public boolean onCaptureVideoFrame(VideoFrame videoFrame) {
}
@Override
- public boolean onPreEncodeVideoFrame(VideoFrame videoFrame) {
- return false;
- }
-
- @Override
- public boolean onScreenCaptureVideoFrame(VideoFrame videoFrame) {
- return false;
- }
-
- @Override
- public boolean onPreEncodeScreenVideoFrame(VideoFrame videoFrame) {
+ public boolean onPreEncodeVideoFrame(int sourceType, VideoFrame videoFrame) {
return false;
}
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PushExternalVideoYUV.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PushExternalVideoYUV.java
index d8a7d050e..20b76ba04 100644
--- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PushExternalVideoYUV.java
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PushExternalVideoYUV.java
@@ -6,6 +6,7 @@
import static io.agora.rtc2.video.VideoEncoderConfiguration.STANDARD_BITRATE;
import android.content.Context;
+import android.graphics.Matrix;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
@@ -16,6 +17,7 @@
import android.widget.Button;
import android.widget.EditText;
import android.widget.FrameLayout;
+import android.widget.Spinner;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
@@ -23,19 +25,30 @@
import com.yanzhenjie.permission.AndPermission;
import com.yanzhenjie.permission.runtime.Permission;
+import java.nio.ByteBuffer;
+import java.util.concurrent.Callable;
+
import io.agora.api.example.MainApplication;
import io.agora.api.example.R;
import io.agora.api.example.annotation.Example;
import io.agora.api.example.common.BaseFragment;
+import io.agora.api.example.examples.advanced.videoRender.YuvFboProgram;
import io.agora.api.example.utils.CommonUtil;
import io.agora.api.example.utils.TokenUtils;
import io.agora.api.example.utils.VideoFileReader;
+import io.agora.base.JavaI420Buffer;
+import io.agora.base.NV12Buffer;
+import io.agora.base.NV21Buffer;
+import io.agora.base.TextureBufferHelper;
+import io.agora.base.VideoFrame;
+import io.agora.base.internal.video.YuvHelper;
import io.agora.rtc2.ChannelMediaOptions;
import io.agora.rtc2.Constants;
import io.agora.rtc2.IRtcEngineEventHandler;
import io.agora.rtc2.RtcEngine;
import io.agora.rtc2.RtcEngineConfig;
import io.agora.rtc2.RtcEngineEx;
+import io.agora.rtc2.gl.EglBaseProvider;
import io.agora.rtc2.video.VideoCanvas;
import io.agora.rtc2.video.VideoEncoderConfiguration;
@@ -53,11 +66,16 @@ public class PushExternalVideoYUV extends BaseFragment implements View.OnClickLi
private Button join;
private EditText et_channel;
private RtcEngineEx engine;
+ private Spinner sp_push_buffer_type;
private int myUid;
private volatile boolean joined = false;
private VideoFileReader videoFileReader;
+ private YuvFboProgram yuvFboProgram;
+ private TextureBufferHelper textureBufferHelper;
+
+
@Nullable
@Override
public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
@@ -73,6 +91,7 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat
view.findViewById(R.id.btn_join).setOnClickListener(this);
fl_local = view.findViewById(R.id.fl_local);
fl_remote = view.findViewById(R.id.fl_remote);
+ sp_push_buffer_type = view.findViewById(R.id.sp_buffer_type);
}
@Override
@@ -106,7 +125,7 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) {
*/
config.mEventHandler = iRtcEngineEventHandler;
config.mAudioScenario = Constants.AudioScenario.getValue(Constants.AudioScenario.DEFAULT);
- config.mAreaCode = ((MainApplication)getActivity().getApplication()).getGlobalSettings().getAreaCode();
+ config.mAreaCode = ((MainApplication) getActivity().getApplication()).getGlobalSettings().getAreaCode();
engine = (RtcEngineEx) RtcEngine.create(config);
/**
* This parameter is for reporting the usages of APIExample to agora background.
@@ -122,6 +141,8 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) {
+ "}");
/* setting the local access point if the private cloud ip was set, otherwise the config will be invalid.*/
engine.setLocalAccessPoint(((MainApplication) getActivity().getApplication()).getGlobalSettings().getPrivateCloudConfig());
+
+
} catch (Exception e) {
e.printStackTrace();
getActivity().onBackPressed();
@@ -131,9 +152,20 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) {
@Override
public void onDestroy() {
- if(videoFileReader != null){
+ if (videoFileReader != null) {
videoFileReader.stop();
}
+ if (textureBufferHelper != null) {
+ textureBufferHelper.invoke(() -> {
+ if (yuvFboProgram != null) {
+ yuvFboProgram.release();
+ yuvFboProgram = null;
+ }
+ return null;
+ });
+ textureBufferHelper.dispose();
+ textureBufferHelper = null;
+ }
/**leaveChannel and Destroy the RtcEngine instance*/
if (engine != null) {
@@ -187,7 +219,7 @@ public void onClick(View v) {
} else {
joined = false;
join.setText(getString(R.string.join));
- if(videoFileReader != null){
+ if (videoFileReader != null) {
videoFileReader.stop();
}
fl_remote.removeAllViews();
@@ -230,12 +262,13 @@ private void joinChannel(String channelId) {
* @param pushMode
* VIDEO_FRAME: Use the ENCODED_VIDEO_FRAME.
* ENCODED_VIDEO_FRAME: Use the ENCODED_VIDEO_FRAME*/
- engine.setExternalVideoSource(true, false, Constants.ExternalVideoSourceType.VIDEO_FRAME);
+ engine.setExternalVideoSource(true, true, Constants.ExternalVideoSourceType.VIDEO_FRAME);
TextureView textureView = new TextureView(getContext());
- engine.setupLocalVideo(new VideoCanvas(textureView,
- Constants.RENDER_MODE_FIT, Constants.VIDEO_MIRROR_MODE_DISABLED,
- Constants.VIDEO_SOURCE_CUSTOM, 0));
+ VideoCanvas local = new VideoCanvas(textureView, Constants.RENDER_MODE_FIT, 0);
+ local.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED;
+ local.sourceType = Constants.VIDEO_SOURCE_CUSTOM;
+ engine.setupLocalVideo(local);
// Add to the local container
fl_local.removeAllViews();
fl_local.addView(textureView, new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT,
@@ -254,7 +287,6 @@ private void joinChannel(String channelId) {
ChannelMediaOptions option = new ChannelMediaOptions();
option.autoSubscribeAudio = true;
option.autoSubscribeVideo = true;
- option.publishCameraTrack = false;
option.publishCustomVideoTrack = true;
int res = engine.joinChannel(accessToken, channelId, 0, option);
if (res != 0) {
@@ -278,12 +310,6 @@ private void joinChannel(String channelId) {
* The SDK uses this class to report to the app on SDK runtime events.
*/
private final IRtcEngineEventHandler iRtcEngineEventHandler = new IRtcEngineEventHandler() {
- /**Reports a warning during SDK runtime.
- * Warning code: https://docs.agora.io/en/Voice/API%20Reference/java/classio_1_1agora_1_1rtc_1_1_i_rtc_engine_event_handler_1_1_warn_code.html*/
- @Override
- public void onWarning(int warn) {
- Log.w(TAG, String.format("onWarning code %d message %s", warn, RtcEngine.getErrorDescription(warn)));
- }
/**Occurs when a user leaves the channel.
* @param stats With this callback, the application retrieves the channel information,
@@ -314,9 +340,106 @@ public void run() {
join.setText(getString(R.string.leave));
if (videoFileReader == null) {
- videoFileReader = new VideoFileReader(requireContext(), videoFrame -> {
- if(joined && engine != null){
- engine.pushExternalVideoFrame(videoFrame);
+ /*
+ * VideoFileReader can get nv21 buffer data of sample.yuv file in assets cyclically.
+ */
+ videoFileReader = new VideoFileReader(requireContext(), (yuv, width, height) -> {
+ if (joined && engine != null) {
+ String selectedItem = (String) sp_push_buffer_type.getSelectedItem();
+ /*
+ * Below show how to create different type buffers.
+ */
+ VideoFrame.Buffer frameBuffer;
+ if ("NV21".equals(selectedItem)) {
+ int srcStrideY = width;
+ int srcHeightY = height;
+ int srcSizeY = srcStrideY * srcHeightY;
+ ByteBuffer srcY = ByteBuffer.allocateDirect(srcSizeY);
+ srcY.put(yuv, 0, srcSizeY);
+
+ int srcStrideU = width / 2;
+ int srcHeightU = height / 2;
+ int srcSizeU = srcStrideU * srcHeightU;
+ ByteBuffer srcU = ByteBuffer.allocateDirect(srcSizeU);
+ srcU.put(yuv, srcSizeY, srcSizeU);
+
+ int srcStrideV = width / 2;
+ int srcHeightV = height / 2;
+ int srcSizeV = srcStrideV * srcHeightV;
+ ByteBuffer srcV = ByteBuffer.allocateDirect(srcSizeV);
+ srcV.put(yuv, srcSizeY + srcSizeU, srcSizeV);
+
+ int desSize = srcSizeY + srcSizeU + srcSizeV;
+ ByteBuffer des = ByteBuffer.allocateDirect(desSize);
+ YuvHelper.I420ToNV12(srcY, srcStrideY, srcV, srcStrideV, srcU, srcStrideU, des, width, height);
+
+ byte[] nv21 = new byte[desSize];
+ des.position(0);
+ des.get(nv21);
+
+ frameBuffer = new NV21Buffer(nv21, width, height, null);
+ } else if ("NV12".equals(selectedItem)) {
+ int srcStrideY = width;
+ int srcHeightY = height;
+ int srcSizeY = srcStrideY * srcHeightY;
+ ByteBuffer srcY = ByteBuffer.allocateDirect(srcSizeY);
+ srcY.put(yuv, 0, srcSizeY);
+
+ int srcStrideU = width / 2;
+ int srcHeightU = height / 2;
+ int srcSizeU = srcStrideU * srcHeightU;
+ ByteBuffer srcU = ByteBuffer.allocateDirect(srcSizeU);
+ srcU.put(yuv, srcSizeY, srcSizeU);
+
+ int srcStrideV = width / 2;
+ int srcHeightV = height / 2;
+ int srcSizeV = srcStrideV * srcHeightV;
+ ByteBuffer srcV = ByteBuffer.allocateDirect(srcSizeV);
+ srcV.put(yuv, srcSizeY + srcSizeU, srcSizeV);
+
+ int desSize = srcSizeY + srcSizeU + srcSizeV;
+ ByteBuffer des = ByteBuffer.allocateDirect(desSize);
+ YuvHelper.I420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, des, width, height);
+
+ frameBuffer = new NV12Buffer(width, height, width, height, des, null);
+ } else if ("Texture2D".equals(selectedItem)) {
+ if (textureBufferHelper == null) {
+ textureBufferHelper = TextureBufferHelper.create("PushExternalVideoYUV", EglBaseProvider.instance().getRootEglBase().getEglBaseContext());
+ }
+ if (yuvFboProgram == null) {
+ textureBufferHelper.invoke((Callable) () -> {
+ yuvFboProgram = new YuvFboProgram();
+ return null;
+ });
+ }
+ Integer textureId = textureBufferHelper.invoke(() -> yuvFboProgram.drawYuv(yuv, width, height));
+ frameBuffer = textureBufferHelper.wrapTextureBuffer(width, height, VideoFrame.TextureBuffer.Type.RGB, textureId, new Matrix());
+ } else {
+ // I420 type default
+ JavaI420Buffer i420Buffer = JavaI420Buffer.allocate(width, height);
+ i420Buffer.getDataY().put(yuv, 0, i420Buffer.getDataY().limit());
+ i420Buffer.getDataU().put(yuv, i420Buffer.getDataY().limit(), i420Buffer.getDataU().limit());
+ i420Buffer.getDataV().put(yuv, i420Buffer.getDataY().limit() + i420Buffer.getDataU().limit(), i420Buffer.getDataV().limit());
+ frameBuffer = i420Buffer;
+ }
+
+ /*
+ * Get monotonic time in ms which can be used by capture time,
+ * typical scenario is as follows:
+ */
+ long currentMonotonicTimeInMs = engine.getCurrentMonotonicTimeInMs();
+ /*
+ * Create a video frame to push.
+ */
+ VideoFrame videoFrame = new VideoFrame(frameBuffer, 0, currentMonotonicTimeInMs * 1000000);
+
+ /*
+ * Pushes the external video frame to the app.
+ */
+ boolean success = engine.pushExternalVideoFrame(videoFrame);
+ if (!success) {
+ Log.w(TAG, "pushExternalVideoFrame error");
+ }
}
});
}
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ScreenSharing.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ScreenSharing.java
index 1c322d70f..6e102c6f8 100644
--- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ScreenSharing.java
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ScreenSharing.java
@@ -23,11 +23,13 @@
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
+import android.widget.AdapterView;
import android.widget.Button;
import android.widget.CompoundButton;
import android.widget.EditText;
import android.widget.FrameLayout;
import android.widget.SeekBar;
+import android.widget.Spinner;
import android.widget.Switch;
import androidx.annotation.NonNull;
@@ -66,7 +68,7 @@
tipsId = R.string.screensharing
)
public class ScreenSharing extends BaseFragment implements View.OnClickListener,
- CompoundButton.OnCheckedChangeListener, SeekBar.OnSeekBarChangeListener {
+ CompoundButton.OnCheckedChangeListener, SeekBar.OnSeekBarChangeListener, AdapterView.OnItemSelectedListener {
private static final String TAG = ScreenSharing.class.getSimpleName();
private static final int PROJECTION_REQ_CODE = 1 << 2;
private static final int DEFAULT_SHARE_FRAME_RATE = 15;
@@ -81,6 +83,7 @@ public class ScreenSharing extends BaseFragment implements View.OnClickListener,
private final ScreenCaptureParameters screenCaptureParameters = new ScreenCaptureParameters();
private Intent fgServiceIntent;
+ private Spinner screenScenarioType;
@Nullable
@Override
@@ -102,7 +105,9 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat
screenPreview = view.findViewById(R.id.screen_preview);
screenAudio = view.findViewById(R.id.screen_audio);
screenAudioVolume = view.findViewById(R.id.screen_audio_volume);
+ screenScenarioType = view.findViewById(R.id.spinner_screen_scenario_type);
+ screenScenarioType.setOnItemSelectedListener(this);
screenPreview.setOnCheckedChangeListener(this);
screenAudio.setOnCheckedChangeListener(this);
screenAudioVolume.setOnSeekBarChangeListener(this);
@@ -244,20 +249,17 @@ private void startScreenSharePreview() {
// Add to the local container
fl_local.addView(surfaceView, new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT));
// Setup local video to render your local camera preview
- engine.setupLocalVideo(new VideoCanvas(surfaceView, Constants.RENDER_MODE_FIT,
- Constants.VIDEO_MIRROR_MODE_DISABLED,
- Constants.VIDEO_SOURCE_SCREEN_PRIMARY,
- 0));
+ VideoCanvas local = new VideoCanvas(surfaceView, Constants.RENDER_MODE_FIT, 0);
+ local.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED;
+ local.sourceType = Constants.VIDEO_SOURCE_SCREEN_PRIMARY;
+ engine.setupLocalVideo(local);
engine.startPreview(Constants.VideoSourceType.VIDEO_SOURCE_SCREEN_PRIMARY);
}
private void stopScreenSharePreview() {
fl_local.removeAllViews();
- engine.setupLocalVideo(new VideoCanvas(null, Constants.RENDER_MODE_FIT,
- Constants.VIDEO_MIRROR_MODE_DISABLED,
- Constants.VIDEO_SOURCE_SCREEN_PRIMARY,
- 0));
+ engine.setupLocalVideo(new VideoCanvas(null));
engine.stopPreview(Constants.VideoSourceType.VIDEO_SOURCE_SCREEN_PRIMARY);
}
@@ -512,6 +514,18 @@ public void onStopTrackingTouch(SeekBar seekBar) {
}
+ @Override
+ public void onItemSelected(AdapterView> parent, View view, int position, long id) {
+ if (parent == screenScenarioType) {
+ engine.setScreenCaptureScenario(Constants.ScreenScenarioType.valueOf(screenScenarioType.getSelectedItem().toString()));
+ }
+ }
+
+ @Override
+ public void onNothingSelected(AdapterView> parent) {
+
+ }
+
public static class MediaProjectFgService extends Service {
@Nullable
@Override
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/SwitchCameraScreenShare.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/SwitchCameraScreenShare.java
index 2f7805e67..0854c2576 100644
--- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/SwitchCameraScreenShare.java
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/SwitchCameraScreenShare.java
@@ -327,10 +327,10 @@ private void addScreenSharePreview() {
// Add to the local container
fl_screen.addView(surfaceView, new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT));
// Setup local video to render your local camera preview
- engine.setupLocalVideo(new VideoCanvas(surfaceView, Constants.RENDER_MODE_FIT,
- Constants.VIDEO_MIRROR_MODE_DISABLED,
- Constants.VIDEO_SOURCE_SCREEN_PRIMARY,
- 0));
+ VideoCanvas local = new VideoCanvas(surfaceView, Constants.RENDER_MODE_FIT, 0);
+ local.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED;
+ local.sourceType = Constants.VIDEO_SOURCE_SCREEN_PRIMARY;
+ engine.setupLocalVideo(local);
engine.startPreview(Constants.VideoSourceType.VIDEO_SOURCE_SCREEN_PRIMARY);
}
@@ -349,12 +349,9 @@ private void addCameraPreview() {
// Add to the local container
fl_camera.addView(surfaceView, new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT));
// Setup local video to render your local camera preview
- engine.setupLocalVideo(new VideoCanvas(
- surfaceView,
- RENDER_MODE_HIDDEN,
- Constants.VIDEO_MIRROR_MODE_AUTO,
- Constants.VIDEO_SOURCE_CAMERA_PRIMARY,
- 0));
+ VideoCanvas local = new VideoCanvas(surfaceView, RENDER_MODE_HIDDEN, 0);
+ local.sourceType = Constants.VIDEO_SOURCE_CAMERA_PRIMARY;
+ engine.setupLocalVideo(local);
engine.startPreview(Constants.VideoSourceType.VIDEO_SOURCE_CAMERA_PRIMARY);
}
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VideoProcessExtension.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VideoProcessExtension.java
index c83325d87..dbdd36f11 100644
--- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VideoProcessExtension.java
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VideoProcessExtension.java
@@ -6,7 +6,6 @@
import android.content.Context;
import android.os.Bundle;
-import android.os.Environment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.SurfaceView;
@@ -132,7 +131,7 @@ private void resetVirtualBackground() {
SegmentationProperty segproperty = new SegmentationProperty();
if (checkedId == R.id.virtual_bg_image) {
backgroundSource.backgroundSourceType = VirtualBackgroundSource.BACKGROUND_IMG;
- String imagePath = Environment.getExternalStorageDirectory().getPath();
+ String imagePath = requireContext().getExternalCacheDir().getPath();
String imageName = "agora-logo.png";
FileUtils.copyFilesFromAssets(getContext(), imageName, imagePath);
backgroundSource.source = imagePath + FileUtils.SEPARATOR + imageName;
@@ -142,6 +141,9 @@ private void resetVirtualBackground() {
} else if (checkedId == R.id.virtual_bg_blur) {
backgroundSource.backgroundSourceType = VirtualBackgroundSource.BACKGROUND_BLUR;
backgroundSource.blurDegree = VirtualBackgroundSource.BLUR_DEGREE_MEDIUM;
+ } else if (checkedId == R.id.virtual_bg_video) {
+ backgroundSource.backgroundSourceType = VirtualBackgroundSource.BACKGROUND_VIDEO;
+ backgroundSource.source = "https://agora-adc-artifacts.s3.cn-north-1.amazonaws.com.cn/resources/sample.mp4";
}
engine.enableVirtualBackground(true, backgroundSource, segproperty);
}else{
@@ -200,6 +202,8 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState)
+ "}");
/* setting the local access point if the private cloud ip was set, otherwise the config will be invalid.*/
engine.setLocalAccessPoint(((MainApplication) getActivity().getApplication()).getGlobalSettings().getPrivateCloudConfig());
+
+ engine.enableExtension("agora_video_filters_clear_vision", "clear_vision", true);
}
catch (Exception e)
{
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java
index dea59d728..63b0ed474 100644
--- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java
@@ -27,6 +27,10 @@
import static io.agora.rtc2.Constants.ULTRA_HIGH_QUALITY_VOICE;
import static io.agora.rtc2.Constants.VOICE_BEAUTIFIER_OFF;
import static io.agora.rtc2.Constants.VOICE_CHANGER_BASS;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_CARTOON;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_CHILDLIKE;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_CHIPMUNK;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_DARTH_VADER;
import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_BOY;
import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_GIRL;
import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_HULK;
@@ -34,9 +38,16 @@
import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_PIGKING;
import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_SISTER;
import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_UNCLE;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_GIRLISH_MAN;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_GROOT;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_IRON_LADY;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_MONSTER;
import static io.agora.rtc2.Constants.VOICE_CHANGER_NEUTRAL;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_PHONE_OPERATOR;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_SHIN_CHAN;
import static io.agora.rtc2.Constants.VOICE_CHANGER_SOLID;
import static io.agora.rtc2.Constants.VOICE_CHANGER_SWEET;
+import static io.agora.rtc2.Constants.VOICE_CHANGER_TRANSFORMERS;
import static io.agora.rtc2.Constants.VOICE_CONVERSION_OFF;
import android.content.Context;
@@ -97,7 +108,7 @@ public class VoiceEffects extends BaseFragment implements View.OnClickListener,
chatBeautifier, timbreTransformation, voiceChanger, styleTransformation, roomAcoustics, pitchCorrection, _pitchModeOption, _pitchValueOption, voiceConversion,
customBandFreq, customReverbKey;
private ViewGroup _voice3DLayout, _pitchModeLayout, _pitchValueLayout;
- private SeekBar _voice3DCircle, customPitch, customBandGain, customReverbValue;
+ private SeekBar _voice3DCircle, customPitch, customBandGain, customReverbValue, customVoiceFormant;
private AudioSeatManager audioSeatManager;
@@ -159,10 +170,12 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat
customBandGain = view.findViewById(R.id.audio_custom_band_gain); // engine.setLocalVoiceEqualization()
customReverbKey = view.findViewById(R.id.audio_custom_reverb_key);
customReverbValue = view.findViewById(R.id.audio_custom_reverb_value); //engine.setLocalVoiceReverb()
+ customVoiceFormant = view.findViewById(R.id.audio_voice_formant_value); //engine.setLocalVoiceFormant()
customPitch.setOnSeekBarChangeListener(this);
customBandGain.setOnSeekBarChangeListener(this);
customReverbValue.setOnSeekBarChangeListener(this);
+ customVoiceFormant.setOnSeekBarChangeListener(this);
customBandFreq.setOnItemSelectedListener(this);
customReverbKey.setOnItemSelectedListener(this);
@@ -194,6 +207,7 @@ private void resetControlLayoutByJoined() {
customBandGain.setEnabled(joined);
customReverbKey.setEnabled(joined);
customReverbValue.setEnabled(joined);
+ customVoiceFormant.setEnabled(joined);
chatBeautifier.setSelection(0);
@@ -206,6 +220,7 @@ private void resetControlLayoutByJoined() {
customPitch.setProgress(0);
customBandGain.setProgress(0);
customReverbValue.setProgress(0);
+ customVoiceFormant.setProgress(50);
}
@Override
@@ -617,6 +632,28 @@ private int getVoiceConversionValue(String label) {
return VOICE_CHANGER_SOLID;
case "VOICE_CHANGER_BASS":
return VOICE_CHANGER_BASS;
+ case "VOICE_CHANGER_CARTOON":
+ return VOICE_CHANGER_CARTOON;
+ case "VOICE_CHANGER_CHILDLIKE":
+ return VOICE_CHANGER_CHILDLIKE;
+ case "VOICE_CHANGER_PHONE_OPERATOR":
+ return VOICE_CHANGER_PHONE_OPERATOR;
+ case "VOICE_CHANGER_MONSTER":
+ return VOICE_CHANGER_MONSTER;
+ case "VOICE_CHANGER_TRANSFORMERS":
+ return VOICE_CHANGER_TRANSFORMERS;
+ case "VOICE_CHANGER_GROOT":
+ return VOICE_CHANGER_GROOT;
+ case "VOICE_CHANGER_DARTH_VADER":
+ return VOICE_CHANGER_DARTH_VADER;
+ case "VOICE_CHANGER_IRON_LADY":
+ return VOICE_CHANGER_IRON_LADY;
+ case "VOICE_CHANGER_SHIN_CHAN":
+ return VOICE_CHANGER_SHIN_CHAN;
+ case "VOICE_CHANGER_GIRLISH_MAN":
+ return VOICE_CHANGER_GIRLISH_MAN;
+ case "VOICE_CHANGER_CHIPMUNK":
+ return VOICE_CHANGER_CHIPMUNK;
case "VOICE_CONVERSION_OFF":
default:
return VOICE_CONVERSION_OFF;
@@ -743,6 +780,9 @@ public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
+ if (!fromUser) {
+ return;
+ }
if(seekBar == _voice3DCircle){
int cicle = (int) (1 + 59 * progress * 1.0f / seekBar.getMax());
// [1,60], 10 default
@@ -771,6 +811,10 @@ public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
value = (int) (100 * progress * 1.0f / seekBar.getMax());
}
engine.setLocalVoiceReverb(reverbKey, value);
+ } else if (seekBar == customVoiceFormant) {
+ // [-1, 1]
+ double value = (progress - 50) * 1.0f / 100;
+ engine.setLocalVoiceFormant(value);
}
}
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeauty.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeauty.java
index 7df8c25d8..00db31803 100644
--- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeauty.java
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeauty.java
@@ -1,15 +1,21 @@
package io.agora.api.example.examples.advanced.beauty;
+import android.graphics.Matrix;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
import android.os.Bundle;
import android.util.Log;
+import android.util.Size;
import android.view.LayoutInflater;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
+import android.view.ViewParent;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
+import java.nio.ByteBuffer;
import java.util.Locale;
import java.util.Random;
@@ -20,17 +26,20 @@
import io.agora.api.example.utils.TokenUtils;
import io.agora.base.TextureBufferHelper;
import io.agora.base.VideoFrame;
+import io.agora.base.internal.video.YuvHelper;
import io.agora.beauty.base.IBeautyByteDance;
import io.agora.rtc2.ChannelMediaOptions;
import io.agora.rtc2.Constants;
import io.agora.rtc2.IRtcEngineEventHandler;
import io.agora.rtc2.RtcEngine;
+import io.agora.rtc2.gl.EglBaseProvider;
+import io.agora.rtc2.video.ColorEnhanceOptions;
import io.agora.rtc2.video.IVideoFrameObserver;
import io.agora.rtc2.video.VideoCanvas;
public class ByteDanceBeauty extends BaseFragment {
private static final String TAG = "SceneTimeBeauty";
-
+ private static final Matrix IDENTITY_MATRIX = new Matrix();
private IBeautyByteDance iBeautyByteDance;
private FragmentBeautyBytedanceBinding mBinding;
private RtcEngine rtcEngine;
@@ -47,6 +56,8 @@ public class ByteDanceBeauty extends BaseFragment {
private volatile boolean isDestroyed = false;
private int mFrameRotation;
+ private ByteBuffer nv21ByteBuffer;
+ private byte[] nv21ByteArray;
@Nullable
@Override
@@ -67,27 +78,54 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat
initVideoView();
initRtcEngine();
joinChannel();
+ mBinding.switchVideoEffect.setOnCheckedChangeListener((buttonView, isChecked) ->
+ {
+ ColorEnhanceOptions options = new ColorEnhanceOptions();
+ options.strengthLevel = (float) 0.5f;
+ options.skinProtectLevel = (float) 0.5f;
+ rtcEngine.setColorEnhanceOptions(isChecked, options);
+ });
}
@Override
public void onDestroyView() {
super.onDestroyView();
- isDestroyed = true;
+ if (rtcEngine != null) {
+ rtcEngine.leaveChannel();
+ }
if (mTextureBufferHelper != null) {
mTextureBufferHelper.invoke(() -> {
iBeautyByteDance.release();
iBeautyByteDance = null;
return null;
});
- mTextureBufferHelper.dispose();
+ boolean disposeSuccess = false;
+ while (!disposeSuccess) {
+ try {
+ mTextureBufferHelper.dispose();
+ disposeSuccess = true;
+ } catch (Exception e) {
+ try {
+ Thread.sleep(50);
+ } catch (InterruptedException ex) {
+ // do nothing
+ }
+ }
+ }
mTextureBufferHelper = null;
}
- if (rtcEngine != null) {
- rtcEngine.leaveChannel();
- }
RtcEngine.destroy();
}
+ @Override
+ protected void onBackPressed() {
+ isDestroyed = true;
+ mBinding.fullVideoContainer.removeAllViews();
+ mBinding.smallVideoContainer.removeAllViews();
+ super.onBackPressed();
+
+ }
+
private void initVideoView() {
mBinding.cbFaceBeautify.setOnCheckedChangeListener((buttonView, isChecked) -> {
if (iBeautyByteDance == null) {
@@ -195,65 +233,17 @@ public void onRemoteVideoStats(RemoteVideoStats stats) {
if (rtcEngine == null) {
return;
}
+ rtcEngine.enableExtension("agora_video_filters_clear_vision", "clear_vision", true);
mVideoFrameObserver = new IVideoFrameObserver() {
@Override
- public boolean onCaptureVideoFrame(VideoFrame videoFrame) {
- if (isDestroyed) {
- return false;
- }
- VideoFrame.Buffer buffer = videoFrame.getBuffer();
- if (!(buffer instanceof VideoFrame.TextureBuffer)) {
- return false;
- }
-
- VideoFrame.TextureBuffer texBuffer = (VideoFrame.TextureBuffer) buffer;
-
- if (mTextureBufferHelper == null) {
- doOnBeautyCreatingBegin();
- mTextureBufferHelper = TextureBufferHelper.create("STRender", texBuffer.getEglBaseContext());
- mTextureBufferHelper.invoke(() -> {
- iBeautyByteDance = IBeautyByteDance.create(getContext());
- return null;
- });
- doOnBeautyCreatingEnd();
- }
-
- int width = texBuffer.getWidth();
- int height = texBuffer.getHeight();
-
- Integer processTexId = mTextureBufferHelper.invoke(() -> iBeautyByteDance.process(
- texBuffer.getTextureId(),
- width, height, mFrameRotation
- ));
-
- // drag one frame to avoid reframe when switching camera.
- if(mFrameRotation != videoFrame.getRotation()){
- mFrameRotation = videoFrame.getRotation();
- return false;
- }
-
- VideoFrame.TextureBuffer processBuffer = mTextureBufferHelper.wrapTextureBuffer(
- width, height, VideoFrame.TextureBuffer.Type.RGB, processTexId,
- texBuffer.getTransformMatrix());
-
- videoFrame.replaceBuffer(processBuffer, mFrameRotation, videoFrame.getTimestampNs());
- return true;
+ public boolean onCaptureVideoFrame(int sourceType, VideoFrame videoFrame) {
+ return processBeauty(videoFrame);
}
@Override
- public boolean onPreEncodeVideoFrame(VideoFrame videoFrame) {
- return false;
- }
-
- @Override
- public boolean onScreenCaptureVideoFrame(VideoFrame videoFrame) {
- return false;
- }
-
- @Override
- public boolean onPreEncodeScreenVideoFrame(VideoFrame videoFrame) {
+ public boolean onPreEncodeVideoFrame(int sourceType, VideoFrame videoFrame) {
return false;
}
@@ -301,6 +291,89 @@ public int getObservedFramePosition() {
}
}
+ private boolean processBeauty(VideoFrame videoFrame) {
+ if (isDestroyed) {
+ return false;
+ }
+ VideoFrame.Buffer buffer = videoFrame.getBuffer();
+ if (mTextureBufferHelper == null) {
+ doOnBeautyCreatingBegin();
+ mTextureBufferHelper = TextureBufferHelper.create("ByteDanceProcess", EglBaseProvider.instance().getRootEglBase().getEglBaseContext());
+ mTextureBufferHelper.invoke(() -> {
+ iBeautyByteDance = IBeautyByteDance.create(getContext());
+ return null;
+ });
+ doOnBeautyCreatingEnd();
+ }
+
+ int width = buffer.getWidth();
+ int height = buffer.getHeight();
+
+
+ int processTexId = -1;
+ Matrix transformMatrix = IDENTITY_MATRIX;
+ int rotation = videoFrame.getRotation();
+ boolean skipFrame = false;
+ if (buffer instanceof VideoFrame.TextureBuffer) {
+ VideoFrame.TextureBuffer texBuffer = (VideoFrame.TextureBuffer) buffer;
+ transformMatrix = texBuffer.getTransformMatrix();
+ Size captureOriginSize = VideoCaptureUtils.getCaptureOriginSize(texBuffer);
+ processTexId = mTextureBufferHelper.invoke(() -> iBeautyByteDance.process(
+ texBuffer.getTextureId(),
+ texBuffer.getType() == VideoFrame.TextureBuffer.Type.OES ? GLES11Ext.GL_TEXTURE_EXTERNAL_OES : GLES20.GL_TEXTURE_2D,
+ captureOriginSize.getWidth(), captureOriginSize.getHeight(), rotation
+ ));
+ if (nv21ByteBuffer != null) {
+ nv21ByteBuffer.clear();
+ nv21ByteBuffer = null;
+ skipFrame = true;
+ }
+ } else {
+ // Obtain nv21 pixel data
+ int nv21Size = (int) (width * height * 3.0f / 2.0f + 0.5f);
+ if (nv21ByteBuffer == null || nv21ByteBuffer.capacity() != nv21Size) {
+ if (nv21ByteBuffer != null) {
+ nv21ByteBuffer.clear();
+ }
+ nv21ByteBuffer = ByteBuffer.allocateDirect(nv21Size);
+ nv21ByteArray = new byte[nv21Size];
+ skipFrame = true;
+ }
+
+ VideoFrame.I420Buffer i420Buffer = buffer.toI420();
+ YuvHelper.I420ToNV12(i420Buffer.getDataY(), i420Buffer.getStrideY(),
+ i420Buffer.getDataV(), i420Buffer.getStrideV(),
+ i420Buffer.getDataU(), i420Buffer.getStrideU(),
+ nv21ByteBuffer, width, height);
+ nv21ByteBuffer.position(0);
+ nv21ByteBuffer.get(nv21ByteArray);
+ i420Buffer.release();
+ if(mTextureBufferHelper != null){
+ processTexId = mTextureBufferHelper.invoke(() -> iBeautyByteDance.process(
+ nv21ByteArray,
+ width, height, rotation
+ ));
+ }
+ }
+
+ // drag one frame to avoid reframe when switching camera.
+ if (mFrameRotation != rotation) {
+ mFrameRotation = rotation;
+ skipFrame = true;
+ }
+
+ if(processTexId < 0 || skipFrame){
+ return false;
+ }
+ if(mTextureBufferHelper != null){
+ VideoFrame.TextureBuffer processBuffer = mTextureBufferHelper.wrapTextureBuffer(
+ width, height, VideoFrame.TextureBuffer.Type.RGB, processTexId,
+ transformMatrix);
+ videoFrame.replaceBuffer(processBuffer, mFrameRotation, videoFrame.getTimestampNs());
+ }
+ return true;
+ }
+
private void joinChannel() {
int uid = new Random(System.currentTimeMillis()).nextInt(1000) + 10000;
ChannelMediaOptions options = new ChannelMediaOptions();
@@ -315,7 +388,9 @@ private void joinChannel() {
mLocalVideoLayout = new VideoReportLayout(requireContext());
TextureView videoView = new TextureView(requireContext());
- rtcEngine.setupLocalVideo(new VideoCanvas(videoView, Constants.RENDER_MODE_HIDDEN));
+ VideoCanvas local = new VideoCanvas(videoView, Constants.RENDER_MODE_HIDDEN, 0);
+ local.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED;
+ rtcEngine.setupLocalVideo(local);
mLocalVideoLayout.addView(videoView);
rtcEngine.startPreview();
@@ -324,24 +399,47 @@ private void joinChannel() {
private void updateVideoLayouts(boolean isLocalFull) {
this.isLocalFull = isLocalFull;
- mBinding.fullVideoContainer.removeAllViews();
- mBinding.smallVideoContainer.removeAllViews();
if (isLocalFull) {
if (mLocalVideoLayout != null) {
- mBinding.fullVideoContainer.addView(mLocalVideoLayout);
+ ViewParent parent = mLocalVideoLayout.getParent();
+ if (parent instanceof ViewGroup && parent != mBinding.fullVideoContainer) {
+ ((ViewGroup) parent).removeView(mLocalVideoLayout);
+ mBinding.fullVideoContainer.addView(mLocalVideoLayout);
+ } else if (parent == null) {
+ mBinding.fullVideoContainer.addView(mLocalVideoLayout);
+ }
}
if (mRemoteVideoLayout != null) {
mRemoteVideoLayout.getChildAt(0).setOnClickListener(v -> updateVideoLayouts(!ByteDanceBeauty.this.isLocalFull));
- mBinding.smallVideoContainer.addView(mRemoteVideoLayout);
+ ViewParent parent = mRemoteVideoLayout.getParent();
+ if (parent instanceof ViewGroup && parent != mBinding.smallVideoContainer) {
+ ((ViewGroup) parent).removeView(mRemoteVideoLayout);
+ mBinding.smallVideoContainer.addView(mRemoteVideoLayout);
+ } else if(parent == null){
+ mBinding.smallVideoContainer.addView(mRemoteVideoLayout);
+ }
}
} else {
if (mLocalVideoLayout != null) {
mLocalVideoLayout.getChildAt(0).setOnClickListener(v -> updateVideoLayouts(!ByteDanceBeauty.this.isLocalFull));
- mBinding.smallVideoContainer.addView(mLocalVideoLayout);
+ ViewParent parent = mLocalVideoLayout.getParent();
+ if (parent instanceof ViewGroup && parent != mBinding.smallVideoContainer) {
+ ((ViewGroup) parent).removeView(mLocalVideoLayout);
+ mBinding.smallVideoContainer.addView(mLocalVideoLayout);
+ } else if(parent == null){
+ mBinding.smallVideoContainer.addView(mLocalVideoLayout);
+ }
}
+
if (mRemoteVideoLayout != null) {
- mBinding.fullVideoContainer.addView(mRemoteVideoLayout);
+ ViewParent parent = mRemoteVideoLayout.getParent();
+ if (parent instanceof ViewGroup && parent != mBinding.fullVideoContainer) {
+ ((ViewGroup) parent).removeView(mRemoteVideoLayout);
+ mBinding.fullVideoContainer.addView(mRemoteVideoLayout);
+ } else if(parent == null) {
+ mBinding.fullVideoContainer.addView(mRemoteVideoLayout);
+ }
}
}
}
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeauty.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeauty.java
index 6c4681977..8bd2dcd3a 100644
--- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeauty.java
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeauty.java
@@ -1,11 +1,15 @@
package io.agora.api.example.examples.advanced.beauty;
+import static io.agora.rtc2.video.VideoEncoderConfiguration.STANDARD_BITRATE;
+
+import android.graphics.Matrix;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
+import android.view.ViewParent;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
@@ -14,6 +18,7 @@
import java.util.Locale;
import java.util.Random;
+import io.agora.api.example.MainApplication;
import io.agora.api.example.R;
import io.agora.api.example.common.BaseFragment;
import io.agora.api.example.common.widget.VideoReportLayout;
@@ -27,8 +32,11 @@
import io.agora.rtc2.Constants;
import io.agora.rtc2.IRtcEngineEventHandler;
import io.agora.rtc2.RtcEngine;
+import io.agora.rtc2.gl.EglBaseProvider;
+import io.agora.rtc2.video.ColorEnhanceOptions;
import io.agora.rtc2.video.IVideoFrameObserver;
import io.agora.rtc2.video.VideoCanvas;
+import io.agora.rtc2.video.VideoEncoderConfiguration;
public class FaceUnityBeauty extends BaseFragment {
private static final String TAG = "SceneTimeBeauty";
@@ -42,7 +50,6 @@ public class FaceUnityBeauty extends BaseFragment {
private boolean isFrontCamera = true;
private TextureBufferHelper mTextureBufferHelper;
- private boolean isSingleInput = true;
private VideoReportLayout mLocalVideoLayout;
private VideoReportLayout mRemoteVideoLayout;
@@ -72,27 +79,53 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat
initVideoView();
initRtcEngine();
joinChannel();
+ mBinding.switchVideoEffect.setOnCheckedChangeListener((buttonView, isChecked) ->
+ {
+ ColorEnhanceOptions options = new ColorEnhanceOptions();
+ options.strengthLevel = (float) 0.5f;
+ options.skinProtectLevel = (float) 0.5f;
+ rtcEngine.setColorEnhanceOptions(isChecked, options);
+ });
}
@Override
public void onDestroyView() {
super.onDestroyView();
isDestroyed = true;
+ if (rtcEngine != null) {
+ rtcEngine.leaveChannel();
+ }
if (mTextureBufferHelper != null) {
mTextureBufferHelper.invoke(() -> {
iBeautyFaceUnity.release();
iBeautyFaceUnity = null;
return null;
});
- mTextureBufferHelper.dispose();
+ boolean disposeSuccess = false;
+ while (!disposeSuccess) {
+ try {
+ mTextureBufferHelper.dispose();
+ disposeSuccess = true;
+ } catch (Exception e) {
+ try {
+ Thread.sleep(50);
+ } catch (InterruptedException ex) {
+ // do nothing
+ }
+ }
+ }
mTextureBufferHelper = null;
}
- if (rtcEngine != null) {
- rtcEngine.leaveChannel();
- }
RtcEngine.destroy();
}
+ @Override
+ protected void onBackPressed() {
+ mBinding.fullVideoContainer.removeAllViews();
+ mBinding.smallVideoContainer.removeAllViews();
+ super.onBackPressed();
+ }
+
private void initVideoView() {
mBinding.cbFaceBeautify.setOnCheckedChangeListener((buttonView, isChecked) -> {
if (iBeautyFaceUnity == null) {
@@ -122,11 +155,6 @@ private void initVideoView() {
rtcEngine.switchCamera();
isFrontCamera = !isFrontCamera;
});
- mBinding.tvBeautyInput.setText(isSingleInput ? R.string.beauty_input_single : R.string.beauty_input_double);
- mBinding.tvBeautyInput.setOnClickListener(v -> {
- isSingleInput = !isSingleInput;
- mBinding.tvBeautyInput.setText(isSingleInput ? R.string.beauty_input_single : R.string.beauty_input_double);
- });
mBinding.smallVideoContainer.setOnClickListener(v -> updateVideoLayouts(!FaceUnityBeauty.this.isLocalFull));
}
@@ -205,61 +233,16 @@ public void onRemoteVideoStats(RemoteVideoStats stats) {
if (rtcEngine == null) {
return;
}
-
+ rtcEngine.enableExtension("agora_video_filters_clear_vision", "clear_vision", true);
mVideoFrameObserver = new IVideoFrameObserver() {
@Override
- public boolean onCaptureVideoFrame(VideoFrame videoFrame) {
- if (isDestroyed) {
- return true;
- }
- VideoFrame.Buffer buffer = videoFrame.getBuffer();
- if (!(buffer instanceof VideoFrame.TextureBuffer)) {
- return true;
- }
-
- VideoFrame.TextureBuffer texBuffer = (VideoFrame.TextureBuffer) buffer;
-
- if (mTextureBufferHelper == null) {
- doOnBeautyCreatingBegin();
- mTextureBufferHelper = TextureBufferHelper.create("STRender", texBuffer.getEglBaseContext());
- mTextureBufferHelper.invoke(() -> {
- iBeautyFaceUnity = IBeautyFaceUnity.create(getContext());
- return null;
- });
- doOnBeautyCreatingEnd();
- }
-
- VideoFrame.TextureBuffer processBuffer;
- if (isSingleInput) {
- processBuffer = processSingleInput(texBuffer);
- } else {
- processBuffer = processDoubleInput(texBuffer);
- }
- if(processBuffer == null){
- return true;
- }
- // drag one frame to avoid reframe when switching camera.
- if(mFrameRotation != videoFrame.getRotation()){
- mFrameRotation = videoFrame.getRotation();
- return false;
- }
- videoFrame.replaceBuffer(processBuffer, mFrameRotation, videoFrame.getTimestampNs());
- return true;
- }
-
- @Override
- public boolean onPreEncodeVideoFrame(VideoFrame videoFrame) {
- return false;
+ public boolean onCaptureVideoFrame(int sourceType, VideoFrame videoFrame) {
+ return processBeauty(videoFrame);
}
@Override
- public boolean onScreenCaptureVideoFrame(VideoFrame videoFrame) {
- return false;
- }
-
- @Override
- public boolean onPreEncodeScreenVideoFrame(VideoFrame videoFrame) {
+ public boolean onPreEncodeVideoFrame(int sourceType, VideoFrame videoFrame) {
return false;
}
@@ -299,6 +282,13 @@ public int getObservedFramePosition() {
}
};
rtcEngine.registerVideoFrameObserver(mVideoFrameObserver);
+ // Setup video encoding configs
+ rtcEngine.setVideoEncoderConfiguration(new VideoEncoderConfiguration(
+ ((MainApplication) getActivity().getApplication()).getGlobalSettings().getVideoEncodingDimensionObject(),
+ VideoEncoderConfiguration.FRAME_RATE.valueOf(((MainApplication) getActivity().getApplication()).getGlobalSettings().getVideoEncodingFrameRate()),
+ STANDARD_BITRATE,
+ VideoEncoderConfiguration.ORIENTATION_MODE.valueOf(((MainApplication) getActivity().getApplication()).getGlobalSettings().getVideoEncodingOrientation())
+ ));
rtcEngine.enableVideo();
rtcEngine.disableAudio();
@@ -307,27 +297,29 @@ public int getObservedFramePosition() {
}
}
- private VideoFrame.TextureBuffer processSingleInput(VideoFrame.TextureBuffer texBuffer) {
-
- int width = texBuffer.getWidth();
- int height = texBuffer.getHeight();
-
- Integer processTexId = mTextureBufferHelper.invoke(() -> iBeautyFaceUnity.process(
- texBuffer.getTextureId(),
- width, height
- ));
-
- return mTextureBufferHelper.wrapTextureBuffer(
- width, height, VideoFrame.TextureBuffer.Type.RGB, processTexId,
- texBuffer.getTransformMatrix());
- }
+ private boolean processBeauty(VideoFrame videoFrame) {
+ if (isDestroyed) {
+ return true;
+ }
- private VideoFrame.TextureBuffer processDoubleInput(VideoFrame.TextureBuffer texBuffer) {
+ if (mTextureBufferHelper == null) {
+ doOnBeautyCreatingBegin();
+ mTextureBufferHelper = TextureBufferHelper.create("STRender", EglBaseProvider.instance().getRootEglBase().getEglBaseContext());
+ mTextureBufferHelper.invoke(() -> {
+ iBeautyFaceUnity = IBeautyFaceUnity.create(getContext());
+ return null;
+ });
+ doOnBeautyCreatingEnd();
+ }
- int textureId = texBuffer.getTextureId();
- int width = texBuffer.getWidth();
- int height = texBuffer.getHeight();
+ VideoFrame.Buffer buffer = videoFrame.getBuffer();
+ int width = buffer.getWidth();
+ int height = buffer.getHeight();
+ int processTexId = -1;
+ Matrix transformMatrix = new Matrix();
+ int rotation = videoFrame.getRotation();
+ boolean skipFrame = false;
int nv21Size = (int) (width * height * 3.0f / 2.0f + 0.5f);
if (nv21ByteBuffer == null || nv21ByteBuffer.capacity() != nv21Size) {
if (nv21ByteBuffer != null) {
@@ -335,10 +327,10 @@ private VideoFrame.TextureBuffer processDoubleInput(VideoFrame.TextureBuffer tex
}
nv21ByteBuffer = ByteBuffer.allocateDirect(nv21Size);
nv21ByteArray = new byte[nv21Size];
+ skipFrame = true;
}
-
- VideoFrame.I420Buffer i420Buffer = texBuffer.toI420();
+ VideoFrame.I420Buffer i420Buffer = buffer.toI420();
YuvHelper.I420ToNV12(i420Buffer.getDataY(), i420Buffer.getStrideY(),
i420Buffer.getDataV(), i420Buffer.getStrideV(),
i420Buffer.getDataU(), i420Buffer.getStrideU(),
@@ -347,14 +339,32 @@ private VideoFrame.TextureBuffer processDoubleInput(VideoFrame.TextureBuffer tex
nv21ByteBuffer.get(nv21ByteArray);
i420Buffer.release();
- Integer processTexId = mTextureBufferHelper.invoke(() -> iBeautyFaceUnity.process(
- nv21ByteArray,
- textureId,
- width, height
- ));
+ if (mTextureBufferHelper != null) {
+ processTexId = mTextureBufferHelper.invoke(() -> iBeautyFaceUnity.process(
+ nv21ByteArray,
+ width, height,
+ videoFrame.getSourceType() == VideoFrame.SourceType.kFrontCamera
+ ));
+ }
+
+
+ // drag one frame to avoid reframe when switching camera.
+ if (mFrameRotation != rotation) {
+ mFrameRotation = rotation;
+ skipFrame = true;
+ }
+
+ if (processTexId < 0 || skipFrame) {
+ return false;
+ }
+
+ if (mTextureBufferHelper != null) {
+ VideoFrame.TextureBuffer textureBuffer = mTextureBufferHelper.wrapTextureBuffer(
+ width, height, VideoFrame.TextureBuffer.Type.RGB, processTexId, transformMatrix);
+ videoFrame.replaceBuffer(textureBuffer, mFrameRotation, videoFrame.getTimestampNs());
+ }
- return mTextureBufferHelper.wrapTextureBuffer(
- width, height, VideoFrame.TextureBuffer.Type.RGB, processTexId, texBuffer.getTransformMatrix());
+ return true;
}
private void joinChannel() {
@@ -371,7 +381,9 @@ private void joinChannel() {
mLocalVideoLayout = new VideoReportLayout(requireContext());
TextureView videoView = new TextureView(requireContext());
- rtcEngine.setupLocalVideo(new VideoCanvas(videoView, Constants.RENDER_MODE_HIDDEN));
+ VideoCanvas local = new VideoCanvas(videoView, Constants.RENDER_MODE_HIDDEN, 0);
+ local.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED;
+ rtcEngine.setupLocalVideo(local);
mLocalVideoLayout.addView(videoView);
rtcEngine.startPreview();
@@ -380,24 +392,47 @@ private void joinChannel() {
private void updateVideoLayouts(boolean isLocalFull) {
this.isLocalFull = isLocalFull;
- mBinding.fullVideoContainer.removeAllViews();
- mBinding.smallVideoContainer.removeAllViews();
if (isLocalFull) {
if (mLocalVideoLayout != null) {
- mBinding.fullVideoContainer.addView(mLocalVideoLayout);
+ ViewParent parent = mLocalVideoLayout.getParent();
+ if (parent instanceof ViewGroup && parent != mBinding.fullVideoContainer) {
+ ((ViewGroup) parent).removeView(mLocalVideoLayout);
+ mBinding.fullVideoContainer.addView(mLocalVideoLayout);
+ } else if (parent == null) {
+ mBinding.fullVideoContainer.addView(mLocalVideoLayout);
+ }
}
if (mRemoteVideoLayout != null) {
mRemoteVideoLayout.getChildAt(0).setOnClickListener(v -> updateVideoLayouts(!FaceUnityBeauty.this.isLocalFull));
- mBinding.smallVideoContainer.addView(mRemoteVideoLayout);
+ ViewParent parent = mRemoteVideoLayout.getParent();
+ if (parent instanceof ViewGroup && parent != mBinding.smallVideoContainer) {
+ ((ViewGroup) parent).removeView(mRemoteVideoLayout);
+ mBinding.smallVideoContainer.addView(mRemoteVideoLayout);
+ } else if (parent == null) {
+ mBinding.smallVideoContainer.addView(mRemoteVideoLayout);
+ }
}
} else {
if (mLocalVideoLayout != null) {
mLocalVideoLayout.getChildAt(0).setOnClickListener(v -> updateVideoLayouts(!FaceUnityBeauty.this.isLocalFull));
- mBinding.smallVideoContainer.addView(mLocalVideoLayout);
+ ViewParent parent = mLocalVideoLayout.getParent();
+ if (parent instanceof ViewGroup && parent != mBinding.smallVideoContainer) {
+ ((ViewGroup) parent).removeView(mLocalVideoLayout);
+ mBinding.smallVideoContainer.addView(mLocalVideoLayout);
+ } else if (parent == null) {
+ mBinding.smallVideoContainer.addView(mLocalVideoLayout);
+ }
}
+
if (mRemoteVideoLayout != null) {
- mBinding.fullVideoContainer.addView(mRemoteVideoLayout);
+ ViewParent parent = mRemoteVideoLayout.getParent();
+ if (parent instanceof ViewGroup && parent != mBinding.fullVideoContainer) {
+ ((ViewGroup) parent).removeView(mRemoteVideoLayout);
+ mBinding.fullVideoContainer.addView(mRemoteVideoLayout);
+ } else if (parent == null) {
+ mBinding.fullVideoContainer.addView(mRemoteVideoLayout);
+ }
}
}
}
@@ -416,11 +451,4 @@ private void doOnBeautyCreatingEnd() {
});
}
- private void doOnBeautyReleasingBegin() {
- Log.d(TAG, "doOnBeautyReleasingBegin...");
- }
-
- private void doOnBeautyReleasingEnd() {
- Log.d(TAG, "doOnBeautyReleasingEnd.");
- }
}
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SceneTimeBeauty.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SceneTimeBeauty.java
index f4426987a..cee00eeb6 100644
--- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SceneTimeBeauty.java
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SceneTimeBeauty.java
@@ -1,5 +1,7 @@
package io.agora.api.example.examples.advanced.beauty;
+import static io.agora.rtc2.video.VideoEncoderConfiguration.STANDARD_BITRATE;
+
import android.graphics.Matrix;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
@@ -9,6 +11,7 @@
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
+import android.view.ViewParent;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
@@ -17,6 +20,7 @@
import java.util.Locale;
import java.util.Random;
+import io.agora.api.example.MainApplication;
import io.agora.api.example.R;
import io.agora.api.example.common.BaseFragment;
import io.agora.api.example.common.widget.VideoReportLayout;
@@ -24,14 +28,18 @@
import io.agora.api.example.utils.TokenUtils;
import io.agora.base.TextureBufferHelper;
import io.agora.base.VideoFrame;
+import io.agora.base.internal.video.RendererCommon;
import io.agora.base.internal.video.YuvHelper;
import io.agora.beauty.base.IBeautySenseTime;
import io.agora.rtc2.ChannelMediaOptions;
import io.agora.rtc2.Constants;
import io.agora.rtc2.IRtcEngineEventHandler;
import io.agora.rtc2.RtcEngine;
+import io.agora.rtc2.gl.EglBaseProvider;
+import io.agora.rtc2.video.ColorEnhanceOptions;
import io.agora.rtc2.video.IVideoFrameObserver;
import io.agora.rtc2.video.VideoCanvas;
+import io.agora.rtc2.video.VideoEncoderConfiguration;
public class SceneTimeBeauty extends BaseFragment {
private static final String TAG = "SceneTimeBeauty";
@@ -40,13 +48,8 @@ public class SceneTimeBeauty extends BaseFragment {
private FragmentBeautyScenetimeBinding mBinding;
private RtcEngine rtcEngine;
private String channelId;
- private ByteBuffer nv21ByteBuffer;
- private byte[] nv21ByteArray;
- private boolean isFrontCamera = true;
- private TextureBufferHelper mDoubleTextureBufferHelper;
- private TextureBufferHelper mSingleTextureBufferHelper;
- private boolean isSingleInput = true;
+ private boolean isFrontCamera = true;
private VideoReportLayout mLocalVideoLayout;
private VideoReportLayout mRemoteVideoLayout;
@@ -54,8 +57,11 @@ public class SceneTimeBeauty extends BaseFragment {
private IVideoFrameObserver mVideoFrameObserver;
private IRtcEngineEventHandler mRtcEngineEventHandler;
+ // Beauty process require parameters
+ private TextureBufferHelper mTextureBufferHelper;
+ private ByteBuffer nv21ByteBuffer;
+ private byte[] nv21ByteArray;
private volatile boolean isDestroyed = false;
- private int mFrameRotation;
@Nullable
@Override
@@ -67,45 +73,43 @@ public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup c
@Override
public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
+ isDestroyed = false;
if (!IBeautySenseTime.hasIntegrated()) {
mBinding.tvIntegrateTip.setVisibility(View.VISIBLE);
return;
}
-
channelId = getArguments().getString(getString(R.string.key_channel_name));
initVideoView();
initRtcEngine();
joinChannel();
+ mBinding.switchVideoEffect.setOnCheckedChangeListener((buttonView, isChecked) ->
+ {
+ ColorEnhanceOptions options = new ColorEnhanceOptions();
+ options.strengthLevel = (float) 0.5f;
+ options.skinProtectLevel = (float) 0.5f;
+ rtcEngine.setColorEnhanceOptions(isChecked, options);
+ });
}
+
@Override
public void onDestroyView() {
super.onDestroyView();
- isDestroyed = true;
- if (mSingleTextureBufferHelper != null) {
- mSingleTextureBufferHelper.invoke(() -> {
- iBeautySenseTime.release();
- iBeautySenseTime = null;
- return null;
- });
- mSingleTextureBufferHelper.dispose();
- mSingleTextureBufferHelper = null;
- }
- if (mDoubleTextureBufferHelper != null) {
- mDoubleTextureBufferHelper.invoke(() -> {
- iBeautySenseTime.release();
- iBeautySenseTime = null;
- return null;
- });
- mDoubleTextureBufferHelper.dispose();
- mDoubleTextureBufferHelper = null;
- }
if (rtcEngine != null) {
rtcEngine.leaveChannel();
}
+ unInitBeauty();
RtcEngine.destroy();
}
+ @Override
+ protected void onBackPressed() {
+ isDestroyed = true;
+ mBinding.fullVideoContainer.removeAllViews();
+ mBinding.smallVideoContainer.removeAllViews();
+ super.onBackPressed();
+ }
+
private void initVideoView() {
mBinding.cbFaceBeautify.setOnCheckedChangeListener((buttonView, isChecked) -> {
if (iBeautySenseTime == null) {
@@ -135,11 +139,6 @@ private void initVideoView() {
rtcEngine.switchCamera();
isFrontCamera = !isFrontCamera;
});
- mBinding.tvBeautyInput.setText(isSingleInput ? R.string.beauty_input_single : R.string.beauty_input_double);
- mBinding.tvBeautyInput.setOnClickListener(v -> {
- isSingleInput = !isSingleInput;
- mBinding.tvBeautyInput.setText(isSingleInput ? R.string.beauty_input_single : R.string.beauty_input_double);
- });
mBinding.smallVideoContainer.setOnClickListener(v -> updateVideoLayouts(!SceneTimeBeauty.this.isLocalFull));
}
@@ -177,7 +176,7 @@ public void onUserJoined(int uid, int elapsed) {
public void onUserOffline(int uid, int reason) {
super.onUserOffline(uid, reason);
runOnUIThread(() -> {
- if(mRemoteVideoLayout != null && mRemoteVideoLayout.getReportUid() == uid){
+ if (mRemoteVideoLayout != null && mRemoteVideoLayout.getReportUid() == uid) {
mRemoteVideoLayout.removeAllViews();
mRemoteVideoLayout = null;
updateVideoLayouts(isLocalFull);
@@ -219,35 +218,17 @@ public void onRemoteVideoStats(RemoteVideoStats stats) {
return;
}
+ rtcEngine.enableExtension("agora_video_filters_clear_vision", "clear_vision", true);
- mVideoFrameObserver = new IVideoFrameObserver() {
- @Override
- public boolean onCaptureVideoFrame(VideoFrame videoFrame) {
- if (isDestroyed) {
- return true;
- }
- if (isSingleInput) {
- return processSingleInput(videoFrame);
- } else {
- if (!processDoubleInput(videoFrame)) {
- return processSingleInput(videoFrame);
- }
- return true;
- }
- }
-
- @Override
- public boolean onPreEncodeVideoFrame(VideoFrame videoFrame) {
- return false;
- }
+ mVideoFrameObserver = new IVideoFrameObserver() {
@Override
- public boolean onScreenCaptureVideoFrame(VideoFrame videoFrame) {
- return false;
+ public boolean onCaptureVideoFrame(int sourceType, VideoFrame videoFrame) {
+ return processBeauty(videoFrame);
}
@Override
- public boolean onPreEncodeScreenVideoFrame(VideoFrame videoFrame) {
+ public boolean onPreEncodeVideoFrame(int sourceType, VideoFrame videoFrame) {
return false;
}
@@ -278,7 +259,7 @@ public boolean getRotationApplied() {
@Override
public boolean getMirrorApplied() {
- return false;
+ return true;
}
@Override
@@ -287,6 +268,13 @@ public int getObservedFramePosition() {
}
};
rtcEngine.registerVideoFrameObserver(mVideoFrameObserver);
+ // Setup video encoding configs
+ rtcEngine.setVideoEncoderConfiguration(new VideoEncoderConfiguration(
+ ((MainApplication) getActivity().getApplication()).getGlobalSettings().getVideoEncodingDimensionObject(),
+ VideoEncoderConfiguration.FRAME_RATE.valueOf(((MainApplication) getActivity().getApplication()).getGlobalSettings().getVideoEncodingFrameRate()),
+ STANDARD_BITRATE,
+ VideoEncoderConfiguration.ORIENTATION_MODE.valueOf(((MainApplication) getActivity().getApplication()).getGlobalSettings().getVideoEncodingOrientation())
+ ));
rtcEngine.enableVideo();
rtcEngine.disableAudio();
@@ -295,32 +283,29 @@ public int getObservedFramePosition() {
}
}
- private boolean processSingleInput(VideoFrame videoFrame) {
- VideoFrame.Buffer buffer = videoFrame.getBuffer();
-
- if (mDoubleTextureBufferHelper != null) {
- doOnBeautyReleasingBegin();
- mDoubleTextureBufferHelper.invoke(() -> {
+ private void unInitBeauty() {
+ if (mTextureBufferHelper != null) {
+ mTextureBufferHelper.invoke(() -> {
iBeautySenseTime.release();
+ iBeautySenseTime = null;
return null;
});
- mDoubleTextureBufferHelper.dispose();
- mDoubleTextureBufferHelper = null;
- doOnBeautyReleasingEnd();
+ mTextureBufferHelper.dispose();
+ mTextureBufferHelper = null;
}
- if (mSingleTextureBufferHelper == null) {
- doOnBeautyCreatingBegin();
- mSingleTextureBufferHelper = TextureBufferHelper.create("STRender", null);
- mSingleTextureBufferHelper.invoke(() -> {
- iBeautySenseTime = IBeautySenseTime.create(getContext());
- return null;
- });
- doOnBeautyCreatingEnd();
+ }
+
+ private boolean processBeauty(VideoFrame videoFrame) {
+ if (isDestroyed) {
+ return false;
}
+ VideoFrame.Buffer buffer = videoFrame.getBuffer();
int width = buffer.getWidth();
int height = buffer.getHeight();
+
+ // Obtain nv21 pixel data
int nv21Size = (int) (width * height * 3.0f / 2.0f + 0.5f);
if (nv21ByteBuffer == null || nv21ByteBuffer.capacity() != nv21Size) {
if (nv21ByteBuffer != null) {
@@ -328,9 +313,8 @@ private boolean processSingleInput(VideoFrame videoFrame) {
}
nv21ByteBuffer = ByteBuffer.allocateDirect(nv21Size);
nv21ByteArray = new byte[nv21Size];
+ return false;
}
-
-
VideoFrame.I420Buffer i420Buffer = buffer.toI420();
YuvHelper.I420ToNV12(i420Buffer.getDataY(), i420Buffer.getStrideY(),
i420Buffer.getDataV(), i420Buffer.getStrideV(),
@@ -340,94 +324,52 @@ private boolean processSingleInput(VideoFrame videoFrame) {
nv21ByteBuffer.get(nv21ByteArray);
i420Buffer.release();
- Integer processTexId = mSingleTextureBufferHelper.invoke(() -> iBeautySenseTime.process(
- nv21ByteArray,
- width, height, mFrameRotation
- ));
-
- // drag one frame to avoid reframe when switching camera.
- if(mFrameRotation != videoFrame.getRotation()){
- mFrameRotation = videoFrame.getRotation();
- return false;
- }
-
- VideoFrame.TextureBuffer processBuffer = mSingleTextureBufferHelper.wrapTextureBuffer(
- width, height, VideoFrame.TextureBuffer.Type.RGB, processTexId,
- buffer instanceof VideoFrame.TextureBuffer ? ((VideoFrame.TextureBuffer) buffer).getTransformMatrix(): new Matrix());
- videoFrame.replaceBuffer(processBuffer, mFrameRotation, videoFrame.getTimestampNs());
- buffer.release();
-
- return true;
- }
-
- private boolean processDoubleInput(VideoFrame videoFrame) {
- VideoFrame.Buffer buffer = videoFrame.getBuffer();
- if (!(buffer instanceof VideoFrame.TextureBuffer)) {
- return false;
- }
- VideoFrame.TextureBuffer texBuffer = (VideoFrame.TextureBuffer) buffer;
-
- if (mSingleTextureBufferHelper != null) {
- doOnBeautyReleasingBegin();
- mSingleTextureBufferHelper.invoke(() -> {
- iBeautySenseTime.release();
- return null;
- });
- mSingleTextureBufferHelper.dispose();
- mSingleTextureBufferHelper = null;
- doOnBeautyReleasingEnd();
- }
-
- if (mDoubleTextureBufferHelper == null) {
- doOnBeautyCreatingBegin();
- mDoubleTextureBufferHelper = TextureBufferHelper.create("STRender", texBuffer.getEglBaseContext());
- mDoubleTextureBufferHelper.invoke(() -> {
+ if (mTextureBufferHelper == null) {
+ Log.d(TAG, "doOnBeautyCreatingBegin...");
+ mTextureBufferHelper = TextureBufferHelper.create("STRender", EglBaseProvider.instance().getRootEglBase().getEglBaseContext());
+ mTextureBufferHelper.invoke(() -> {
iBeautySenseTime = IBeautySenseTime.create(getContext());
return null;
});
- doOnBeautyCreatingEnd();
+ Log.d(TAG, "doOnBeautyCreatingEnd.");
+ runOnUIThread(() -> {
+ mBinding.cbFilter.setChecked(false);
+ mBinding.cbFaceBeautify.setChecked(false);
+ mBinding.cbSticker.setChecked(false);
+ mBinding.cbMakeup.setChecked(false);
+ });
}
- int textureId = texBuffer.getTextureId();
- int textureFormat = texBuffer.getType() == VideoFrame.TextureBuffer.Type.OES ? GLES11Ext.GL_TEXTURE_EXTERNAL_OES : GLES20.GL_TEXTURE_2D;
- int width = buffer.getWidth();
- int height = buffer.getHeight();
+ int processTexId = -1;
+ if (buffer instanceof VideoFrame.TextureBuffer) {
+ VideoFrame.TextureBuffer texBuffer = (VideoFrame.TextureBuffer) buffer;
+ int textureFormat = texBuffer.getType() == VideoFrame.TextureBuffer.Type.OES ? GLES11Ext.GL_TEXTURE_EXTERNAL_OES : GLES20.GL_TEXTURE_2D;
+ float[] transformMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(texBuffer.getTransformMatrix());
+
+ if(mTextureBufferHelper != null){
+ processTexId = mTextureBufferHelper.invoke(() -> iBeautySenseTime.process(
+ nv21ByteArray,
+ texBuffer.getTextureId(), textureFormat,
+ width, height, videoFrame.getRotation(), transformMatrix
+ ));
+ }
- int nv21Size = (int) (width * height * 3.0f / 2.0f + 0.5f);
- if (nv21ByteBuffer == null || nv21ByteBuffer.capacity() != nv21Size) {
- if (nv21ByteBuffer != null) {
- nv21ByteBuffer.clear();
+ } else {
+ if(mTextureBufferHelper != null){
+ processTexId = mTextureBufferHelper.invoke(() ->iBeautySenseTime.process(
+ nv21ByteArray,
+ width, height, videoFrame.getRotation()
+ ));
}
- nv21ByteBuffer = ByteBuffer.allocateDirect(nv21Size);
- nv21ByteArray = new byte[nv21Size];
}
-
-
- VideoFrame.I420Buffer i420Buffer = buffer.toI420();
- YuvHelper.I420ToNV12(i420Buffer.getDataY(), i420Buffer.getStrideY(),
- i420Buffer.getDataV(), i420Buffer.getStrideV(),
- i420Buffer.getDataU(), i420Buffer.getStrideU(),
- nv21ByteBuffer, width, height);
- nv21ByteBuffer.position(0);
- nv21ByteBuffer.get(nv21ByteArray);
- i420Buffer.release();
-
- Integer processTexId = mDoubleTextureBufferHelper.invoke(() -> iBeautySenseTime.process(
- nv21ByteArray,
- textureId, textureFormat,
- width, height, mFrameRotation
- ));
-
- // drag one frame to avoid reframe when switching camera.
- if(mFrameRotation != videoFrame.getRotation()){
- mFrameRotation = videoFrame.getRotation();
+ if (processTexId < 0) {
return false;
}
-
- VideoFrame.TextureBuffer processBuffer = mDoubleTextureBufferHelper.wrapTextureBuffer(
- width, height, VideoFrame.TextureBuffer.Type.RGB, processTexId, texBuffer.getTransformMatrix());
- videoFrame.replaceBuffer(processBuffer, mFrameRotation, videoFrame.getTimestampNs());
- buffer.release();
+ if(mTextureBufferHelper != null){
+ VideoFrame.TextureBuffer processBuffer = mTextureBufferHelper.wrapTextureBuffer(
+ width, height, VideoFrame.TextureBuffer.Type.RGB, processTexId, new Matrix());
+ videoFrame.replaceBuffer(processBuffer, videoFrame.getRotation(), videoFrame.getTimestampNs());
+ }
return true;
}
@@ -446,56 +388,59 @@ private void joinChannel() {
mLocalVideoLayout = new VideoReportLayout(requireContext());
TextureView videoView = new TextureView(requireContext());
- rtcEngine.setupLocalVideo(new VideoCanvas(videoView, Constants.RENDER_MODE_HIDDEN));
+ VideoCanvas local = new VideoCanvas(videoView, Constants.RENDER_MODE_HIDDEN, 0);
+ local.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED;
+ rtcEngine.setupLocalVideo(local);
mLocalVideoLayout.addView(videoView);
- rtcEngine.startPreview();
updateVideoLayouts(isLocalFull);
}
private void updateVideoLayouts(boolean isLocalFull) {
this.isLocalFull = isLocalFull;
- mBinding.fullVideoContainer.removeAllViews();
- mBinding.smallVideoContainer.removeAllViews();
if (isLocalFull) {
if (mLocalVideoLayout != null) {
- mBinding.fullVideoContainer.addView(mLocalVideoLayout);
+ ViewParent parent = mLocalVideoLayout.getParent();
+ if (parent instanceof ViewGroup && parent != mBinding.fullVideoContainer) {
+ ((ViewGroup) parent).removeView(mLocalVideoLayout);
+ mBinding.fullVideoContainer.addView(mLocalVideoLayout);
+ } else if (parent == null) {
+ mBinding.fullVideoContainer.addView(mLocalVideoLayout);
+ }
}
if (mRemoteVideoLayout != null) {
mRemoteVideoLayout.getChildAt(0).setOnClickListener(v -> updateVideoLayouts(!SceneTimeBeauty.this.isLocalFull));
- mBinding.smallVideoContainer.addView(mRemoteVideoLayout);
+ ViewParent parent = mRemoteVideoLayout.getParent();
+ if (parent instanceof ViewGroup && parent != mBinding.smallVideoContainer) {
+ ((ViewGroup) parent).removeView(mRemoteVideoLayout);
+ mBinding.smallVideoContainer.addView(mRemoteVideoLayout);
+ } else if(parent == null){
+ mBinding.smallVideoContainer.addView(mRemoteVideoLayout);
+ }
}
} else {
if (mLocalVideoLayout != null) {
mLocalVideoLayout.getChildAt(0).setOnClickListener(v -> updateVideoLayouts(!SceneTimeBeauty.this.isLocalFull));
- mBinding.smallVideoContainer.addView(mLocalVideoLayout);
+ ViewParent parent = mLocalVideoLayout.getParent();
+ if (parent instanceof ViewGroup && parent != mBinding.smallVideoContainer) {
+ ((ViewGroup) parent).removeView(mLocalVideoLayout);
+ mBinding.smallVideoContainer.addView(mLocalVideoLayout);
+ } else if(parent == null){
+ mBinding.smallVideoContainer.addView(mLocalVideoLayout);
+ }
}
+
if (mRemoteVideoLayout != null) {
- mBinding.fullVideoContainer.addView(mRemoteVideoLayout);
+ ViewParent parent = mRemoteVideoLayout.getParent();
+ if (parent instanceof ViewGroup && parent != mBinding.fullVideoContainer) {
+ ((ViewGroup) parent).removeView(mRemoteVideoLayout);
+ mBinding.fullVideoContainer.addView(mRemoteVideoLayout);
+ } else if(parent == null) {
+ mBinding.fullVideoContainer.addView(mRemoteVideoLayout);
+ }
}
}
}
- private void doOnBeautyCreatingBegin() {
- Log.d(TAG, "doOnBeautyCreatingBegin...");
- }
-
- private void doOnBeautyCreatingEnd() {
- Log.d(TAG, "doOnBeautyCreatingEnd.");
- runOnUIThread(() -> {
- mBinding.cbFilter.setChecked(false);
- mBinding.cbFaceBeautify.setChecked(false);
- mBinding.cbSticker.setChecked(false);
- mBinding.cbMakeup.setChecked(false);
- });
- }
-
- private void doOnBeautyReleasingBegin() {
- Log.d(TAG, "doOnBeautyReleasingBegin...");
- }
-
- private void doOnBeautyReleasingEnd() {
- Log.d(TAG, "doOnBeautyReleasingEnd.");
- }
}
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/VideoCaptureUtils.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/VideoCaptureUtils.java
new file mode 100644
index 000000000..1245a411a
--- /dev/null
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/VideoCaptureUtils.java
@@ -0,0 +1,29 @@
+package io.agora.api.example.examples.advanced.beauty;
+
+import android.graphics.Matrix;
+import android.util.Size;
+
+import io.agora.base.VideoFrame;
+import io.agora.base.internal.video.RendererCommon;
+
+public class VideoCaptureUtils {
+
+ public static Size getCaptureOriginSize(VideoFrame.TextureBuffer texBuffer){
+ int width = texBuffer.getWidth();
+ int height = texBuffer.getHeight();
+ Matrix texMatrix = texBuffer.getTransformMatrix();
+
+ // 根据Matrix反算纹理的真实宽高
+ Matrix renderMatrix = new Matrix();
+ renderMatrix.preTranslate(0.5F, 0.5F);
+ renderMatrix.preScale(1.0F, -1.0F);
+ renderMatrix.preTranslate(-0.5F, -0.5F);
+ Matrix finalMatrix = new Matrix(texMatrix);
+ finalMatrix.preConcat(renderMatrix);
+ float[] finalGlMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(finalMatrix);
+ int texWidth = (int) (width * 1.0f / finalGlMatrix[0] + 0.5f);
+ int texHeight = (int) (height * 1.0f / finalGlMatrix[5] + 0.5f);
+ return new Size(texWidth, texHeight);
+ }
+
+}
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java
index 92c2afb78..e66641d57 100755
--- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java
@@ -236,6 +236,7 @@ public void onClick(View v) {
engine.leaveChannel();
pulling = false;
join.setText(getString(R.string.join));
+ audioSeatManager.downAllSeats();
if(pullingTask != null){
try {
pullingTask.join();
@@ -268,7 +269,7 @@ private void joinChannel(String channelId) {
* 0: Success.
* < 0: Failure.
* PS: Ensure that you call this method before the joinChannel method.*/
- engine.setExternalAudioSource(true, SAMPLE_RATE, SAMPLE_NUM_OF_CHANNEL, 2, false, true);
+ // engine.setExternalAudioSource(true, SAMPLE_RATE, SAMPLE_NUM_OF_CHANNEL, 2, false, true);
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java
index a4403f77a..a94f62cfb 100755
--- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java
@@ -34,6 +34,7 @@
import io.agora.rtc2.RtcEngine;
import io.agora.rtc2.RtcEngineConfig;
import io.agora.rtc2.RtcEngineEx;
+import io.agora.rtc2.audio.AudioTrackConfig;
/**
* This demo demonstrates how to make a one-to-one voice call
@@ -54,10 +55,11 @@ public class CustomAudioSource extends BaseFragment implements View.OnClickListe
public static RtcEngineEx engine;
private Switch mic, pcm;
private ChannelMediaOptions option = new ChannelMediaOptions();
- private volatile int pushTimes = 0;
+ private int pushTimes = 0;
private AudioSeatManager audioSeatManager;
private AudioFileReader audioPushingHelper;
+ private int customAudioTrack = -1;
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
@@ -155,9 +157,9 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) {
engine.setLocalAccessPoint(((MainApplication) getActivity().getApplication()).getGlobalSettings().getPrivateCloudConfig());
audioPushingHelper = new AudioFileReader(requireContext(), (buffer, timestamp) -> {
- if(joined && engine != null){
- Log.i(TAG, "pushExternalAudioFrame times:" + pushTimes++);
- engine.pushExternalAudioFrame(buffer, 0);
+ if(joined && engine != null && customAudioTrack != -1){
+ int ret = engine.pushExternalAudioFrame(buffer, timestamp, AudioFileReader.SAMPLE_RATE, AudioFileReader.SAMPLE_NUM_OF_CHANNEL, Constants.BytesPerSample.TWO_BYTES_PER_SAMPLE, customAudioTrack);
+ Log.i(TAG, "pushExternalAudioFrame times:" + (++pushTimes) + ", ret=" + ret);
}
});
} catch (Exception e) {
@@ -169,6 +171,10 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) {
@Override
public void onDestroy() {
super.onDestroy();
+ if(customAudioTrack != -1){
+ engine.destroyCustomAudioTrack(customAudioTrack);
+ customAudioTrack = -1;
+ }
if(audioPushingHelper != null){
audioPushingHelper.stop();
}
@@ -187,9 +193,10 @@ public void onCheckedChanged(CompoundButton compoundButton, boolean checked) {
option.publishMicrophoneTrack = checked;
engine.updateChannelMediaOptions(option);
} else if (compoundButton.getId() == R.id.localAudio) {
+ option.publishCustomAudioTrackId = customAudioTrack;
option.publishCustomAudioTrack = checked;
engine.updateChannelMediaOptions(option);
- engine.enableCustomAudioLocalPlayback(0, checked);
+ engine.enableCustomAudioLocalPlayback(customAudioTrack, checked);
}
}
@@ -268,10 +275,9 @@ private void joinChannel(String channelId) {
* 0: Success.
* < 0: Failure.
* PS: Ensure that you call this method before the joinChannel method.*/
- engine.setExternalAudioSource(true,
- AudioFileReader.SAMPLE_RATE, AudioFileReader.SAMPLE_NUM_OF_CHANNEL, AudioFileReader.SAMPLE_NUM_OF_CHANNEL,
- false, true);
-
+ AudioTrackConfig config = new AudioTrackConfig();
+ config.enableLocalPlayback = false;
+ customAudioTrack = engine.createCustomAudioTrack(Constants.AudioTrackType.AUDIO_TRACK_MIXABLE, config);
/**Please configure accessToken in the string_config file.
* A temporary token generated in Console. A temporary token is valid for 24 hours. For details, see
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/videoRender/YuvFboProgram.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/videoRender/YuvFboProgram.java
new file mode 100644
index 000000000..9c84356c7
--- /dev/null
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/videoRender/YuvFboProgram.java
@@ -0,0 +1,91 @@
+package io.agora.api.example.examples.advanced.videoRender;
+
+import android.graphics.Matrix;
+import android.opengl.GLES20;
+
+import io.agora.base.JavaI420Buffer;
+import io.agora.base.internal.video.GlRectDrawer;
+import io.agora.base.internal.video.GlUtil;
+import io.agora.base.internal.video.RendererCommon;
+
+public class YuvFboProgram {
+
+ private int[] mFboTextureId;
+ private final YuvUploader yuvUploader;
+ private final GlRectDrawer glRectDrawer;
+
+ private int mWidth, mHeight;
+ private volatile boolean isRelease;
+
+ // GL Thread
+ public YuvFboProgram() {
+ yuvUploader = new YuvUploader();
+ glRectDrawer = new GlRectDrawer();
+ }
+
+ // GL Thread
+ public void release() {
+ isRelease = true;
+ if (mFboTextureId != null) {
+ GLES20.glDeleteFramebuffers(1, mFboTextureId, 0);
+ GLES20.glDeleteTextures(1, mFboTextureId, 1);
+ yuvUploader.release();
+ glRectDrawer.release();
+ mFboTextureId = null;
+ }
+ }
+
+ // GL Thread
+ public Integer drawYuv(byte[] yuv, int width, int height) {
+ if (isRelease) {
+ return -1;
+ }
+ if (mFboTextureId == null) {
+ mFboTextureId = new int[2];
+ GLES20.glGenFramebuffers(1, mFboTextureId, 0);
+ int fboId = mFboTextureId[0];
+
+ int texture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ mFboTextureId[1] = texture;
+
+ mWidth = width;
+ mHeight = height;
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture);
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0,
+ GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId);
+ GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER,
+ GLES20.GL_COLOR_ATTACHMENT0,
+ GLES20.GL_TEXTURE_2D, texture, 0);
+ } else if (mWidth != width || mHeight != height) {
+ GLES20.glDeleteFramebuffers(1, mFboTextureId, 0);
+ GLES20.glDeleteTextures(1, mFboTextureId, 1);
+ mFboTextureId = null;
+ return drawYuv(yuv, width, height);
+ } else {
+ int fboId = mFboTextureId[0];
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId);
+ }
+ GLES20.glViewport(0, 0, mWidth, mHeight);
+
+ JavaI420Buffer i420Buffer = JavaI420Buffer.allocate(width, height);
+ i420Buffer.getDataY().put(yuv, 0, i420Buffer.getDataY().limit());
+ i420Buffer.getDataU().put(yuv, i420Buffer.getDataY().limit(), i420Buffer.getDataU().limit());
+ i420Buffer.getDataV().put(yuv, i420Buffer.getDataY().limit() + i420Buffer.getDataU().limit(), i420Buffer.getDataV().limit());
+
+ yuvUploader.uploadFromBuffer(i420Buffer);
+ Matrix matrix = new Matrix();
+ matrix.preTranslate(0.5f, 0.5f);
+ matrix.preScale(1f, -1f);// I420-frames are upside down
+ matrix.preTranslate(-0.5f, -0.5f);
+ glRectDrawer.drawYuv(yuvUploader.getYuvTextures(), RendererCommon.convertMatrixFromAndroidGraphicsMatrix(matrix), width, height, 0, 0, width, height);
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+ GLES20.glFlush();
+
+ return mFboTextureId[1];
+ }
+
+
+}
diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/utils/VideoFileReader.java b/Android/APIExample/app/src/main/java/io/agora/api/example/utils/VideoFileReader.java
index 8c6e21701..812d65086 100644
--- a/Android/APIExample/app/src/main/java/io/agora/api/example/utils/VideoFileReader.java
+++ b/Android/APIExample/app/src/main/java/io/agora/api/example/utils/VideoFileReader.java
@@ -5,9 +5,6 @@
import java.io.IOException;
import java.io.InputStream;
-import io.agora.base.JavaI420Buffer;
-import io.agora.base.VideoFrame;
-
public class VideoFileReader {
private final String RAW_VIDEO_PATH = "sample.yuv";
private final int RAW_VIDEO_WIDTH = 320;
@@ -51,7 +48,7 @@ public final void stop(){
public interface OnVideoReadListener {
- void onVideoRead(VideoFrame videoFrame);
+ void onVideoRead(byte[] buffer, int width, int height);
}
private class InnerThread extends Thread {
@@ -77,12 +74,8 @@ public void run() {
} catch (IOException e) {
e.printStackTrace();
}
- JavaI420Buffer i420Buffer = JavaI420Buffer.allocate(RAW_VIDEO_WIDTH, RAW_VIDEO_HEIGHT);
- i420Buffer.getDataY().put(buffer, 0, i420Buffer.getDataY().limit());
- i420Buffer.getDataU().put(buffer, i420Buffer.getDataY().limit(), i420Buffer.getDataU().limit());
- i420Buffer.getDataV().put(buffer, i420Buffer.getDataY().limit() + i420Buffer.getDataU().limit(), i420Buffer.getDataV().limit());
if(videoReadListener != null){
- videoReadListener.onVideoRead(new VideoFrame(i420Buffer, 0, System.nanoTime()));
+ videoReadListener.onVideoRead(buffer, RAW_VIDEO_WIDTH, RAW_VIDEO_HEIGHT);
}
long consume = System.nanoTime() - start;
diff --git a/Android/APIExample/app/src/main/res/layout/fragment_beauty_bytedance.xml b/Android/APIExample/app/src/main/res/layout/fragment_beauty_bytedance.xml
index cebcfa44e..14cf7188c 100644
--- a/Android/APIExample/app/src/main/res/layout/fragment_beauty_bytedance.xml
+++ b/Android/APIExample/app/src/main/res/layout/fragment_beauty_bytedance.xml
@@ -39,14 +39,23 @@
+
+
-
+ app:layout_constraintEnd_toEndOf="parent"
+ android:layout_margin="12dp"
+ android:text="@string/colorful_enhance"/>
-
-
+ app:layout_constraintEnd_toEndOf="parent"
+ android:layout_margin="12dp"
+ android:text="@string/colorful_enhance"/>
@@ -87,7 +86,6 @@
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_weight="1"
- android:digits="@string/chanel_support_char"
android:hint="@string/token"
android:singleLine="true"
app:layout_constraintBottom_toTopOf="@id/ll_join" />
diff --git a/Android/APIExample/app/src/main/res/layout/fragment_localvideotranscoding.xml b/Android/APIExample/app/src/main/res/layout/fragment_localvideotranscoding.xml
new file mode 100644
index 000000000..d1d8524f4
--- /dev/null
+++ b/Android/APIExample/app/src/main/res/layout/fragment_localvideotranscoding.xml
@@ -0,0 +1,51 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Android/APIExample/app/src/main/res/layout/fragment_media_recorder.xml b/Android/APIExample/app/src/main/res/layout/fragment_media_recorder.xml
new file mode 100644
index 000000000..6e139b89e
--- /dev/null
+++ b/Android/APIExample/app/src/main/res/layout/fragment_media_recorder.xml
@@ -0,0 +1,152 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Android/APIExample/app/src/main/res/layout/fragment_multi_video_source_tracks.xml b/Android/APIExample/app/src/main/res/layout/fragment_multi_video_source_tracks.xml
index bcd558abe..3ba7d79ef 100644
--- a/Android/APIExample/app/src/main/res/layout/fragment_multi_video_source_tracks.xml
+++ b/Android/APIExample/app/src/main/res/layout/fragment_multi_video_source_tracks.xml
@@ -9,7 +9,7 @@
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_alignParentTop="true"
- android:layout_marginBottom="50dp"
+ android:layout_above="@id/ll_buffer_type"
android:orientation="vertical">
+
+
+
+
+
+
+
+
+ android:layout_above="@+id/ll_buffer_type" />
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Android/APIExample/app/src/main/res/layout/fragment_voice_effects.xml b/Android/APIExample/app/src/main/res/layout/fragment_voice_effects.xml
index 9990b1a41..96db9042b 100644
--- a/Android/APIExample/app/src/main/res/layout/fragment_voice_effects.xml
+++ b/Android/APIExample/app/src/main/res/layout/fragment_voice_effects.xml
@@ -439,6 +439,33 @@
+
+
+
+
+
+
+
+
+
diff --git a/Android/APIExample/app/src/main/res/navigation/nav_graph.xml b/Android/APIExample/app/src/main/res/navigation/nav_graph.xml
index 9a0004b2d..1f7b7b600 100755
--- a/Android/APIExample/app/src/main/res/navigation/nav_graph.xml
+++ b/Android/APIExample/app/src/main/res/navigation/nav_graph.xml
@@ -74,6 +74,9 @@
+
@@ -83,6 +86,9 @@
+
@@ -272,6 +278,11 @@
android:name="io.agora.api.example.examples.advanced.ScreenSharing"
android:label="@string/item_screensharing"
tools:layout="@layout/fragment_screen_sharing" />
+
+
画中画
第三方美颜
KTV版权音乐
+ 本地/远端录制
+ 本地合图
此示例演示了如何使用SDK加入频道进行纯语音通话的功能。
此示例演示了如何使用SDK加入频道进行音视频通话的功能。
@@ -138,6 +140,7 @@
此示例演示了在音视频通话过程中如何进行音视频帧的加解密的方法。
此示例演示了在音视频通话过程中如何以MediaIO的方式进行视频自采集和如何进行屏幕分享的功能。
此示例演示了在音视频通话过程中如何进行屏幕共享的方法。
+ 此示例演示了在音视频通话过程中如何进行本地合图的方法。
此示例演示了在音视频通话过程中,视频如何在摄像头流和屏幕分享流之间灵活切换。
此示例演示了在音视频通话过程中如何伴随视频帧发送meta信息的方法。
此示例演示了在音视频通话过程中如何通过AreaCode指定SDK访问限制。
@@ -145,6 +148,7 @@
此示例演示了在音视频通话过程中播放并管理audio effect和audio mixing文件。
此示例演示了在音视频通话过程中如何使用API提供的一些人声效果,或使用API自行组合出想要的人声效果。
此示例演示了在音视频通话过程中如何集成和使用Agora SDK的媒体播放器。
+ 此示例演示了在音视频通话过程中如何使用Agora SDK的媒体录制器录制本地或远端的视频。
此示例演示了在音视频通话过程中如何通过回调获取当前通话质量。>
此示例演示了在音视频通话过程中如何将A频道的主播流转发到B频道,实现主播PK。
此示例演示了在音视频通话过程中如何进行音视频帧的加解密的方法。
@@ -244,6 +248,7 @@
图片
颜色
毛玻璃
+ 视频
区域
水印
小流
@@ -303,4 +308,8 @@
是否重新打开url?
首帧出图优化
开启后无法关闭,并且需要主辅两端都开启时才生效
+ 开始录制
+ 停止录制
+ 透明背景
+
\ No newline at end of file
diff --git a/Android/APIExample/app/src/main/res/values/arrays.xml b/Android/APIExample/app/src/main/res/values/arrays.xml
index 6ba3bd2aa..8b54eed69 100644
--- a/Android/APIExample/app/src/main/res/values/arrays.xml
+++ b/Android/APIExample/app/src/main/res/values/arrays.xml
@@ -70,6 +70,17 @@
- VOICE_CHANGER_SWEET
- VOICE_CHANGER_SOLID
- VOICE_CHANGER_BASS
+ - VOICE_CHANGER_CARTOON
+ - VOICE_CHANGER_CHILDLIKE
+ - VOICE_CHANGER_PHONE_OPERATOR
+ - VOICE_CHANGER_MONSTER
+ - VOICE_CHANGER_TRANSFORMERS
+ - VOICE_CHANGER_GROOT
+ - VOICE_CHANGER_DARTH_VADER
+ - VOICE_CHANGER_IRON_LADY
+ - VOICE_CHANGER_SHIN_CHAN
+ - VOICE_CHANGER_GIRLISH_MAN
+ - VOICE_CHANGER_CHIPMUNK
- AUDIO_EQUALIZATION_BAND_31
@@ -216,5 +227,16 @@
- Hardware
- Software
+
+ - SCREEN_SCENARIO_DOCUMENT
+ - SCREEN_SCENARIO_GAMING
+ - SCREEN_SCENARIO_VIDEO
+
+
+ - I420
+ - NV21
+ - NV12
+ - Texture2D
+
\ No newline at end of file
diff --git a/Android/APIExample/app/src/main/res/values/strings.xml b/Android/APIExample/app/src/main/res/values/strings.xml
index 2f4d97bc5..9008fc011 100644
--- a/Android/APIExample/app/src/main/res/values/strings.xml
+++ b/Android/APIExample/app/src/main/res/values/strings.xml
@@ -125,6 +125,8 @@
Picture In Picture
Third-party beauty
KTV Copyright Music
+ Local/Remote MediaRecorder
+ LocalVideoTranscoding
This example demonstrates how to use the SDK to join channels for voice only calls.
This example demonstrates how to use the SDK to join channels for audio and video calls.
@@ -145,12 +147,14 @@
This example demonstrates how to use MediaIO to collect video and share screen during audio and video call.
This example demonstrates how video can be flexibly switched between the camera stream and the screen share stream during an audio-video call.
This example demonstrates how to use screen sharing during an audio-video call.
+ This example demonstrates how to use local video transcoding during an audio-video call.
This example demonstrates how to send meta information along with video frames during audio and video calls.
This example demonstrates how to use Area Code to enable SDK geographical fencing feature.
This example demonstrates how to use audio profile to adjust audio configurations.
This example demonstrates how to play and manage audio effect and audio mixing files.
This example demonstrates how to use embedded audio effects in SDK.
This example demonstrates how to use MediaPlayer. It is one of components for Agora SDK.
+ This example demonstrates how to use MediaRecorder to recorde local or remote video.
This example demonstrates how to display in call statistics.
This example demonstrates how to transfer media streaming to another rtc channel.
This example demonstrates how to encrypt and decrypt audio and video frames during audio and video calls.
@@ -254,6 +258,7 @@
Image
Color
Blur
+ Video
Area
Watermark
Low Stream
@@ -314,4 +319,8 @@
Reopen url again?
First Frame Optimization
It cannot be turned off after it is turned on, and it will take effect only when both the main and auxiliary ends are turned on
+ Start Recording
+ Stop Recording
+ RecordingTag
+ TransparentBackground
diff --git a/Android/APIExample/beauty/base/src/main/java/io/agora/beauty/base/IBeautyByteDance.java b/Android/APIExample/beauty/base/src/main/java/io/agora/beauty/base/IBeautyByteDance.java
index 0d71def59..7d5d506fc 100644
--- a/Android/APIExample/beauty/base/src/main/java/io/agora/beauty/base/IBeautyByteDance.java
+++ b/Android/APIExample/beauty/base/src/main/java/io/agora/beauty/base/IBeautyByteDance.java
@@ -6,7 +6,9 @@
public interface IBeautyByteDance {
- int process(int oesTexId, int width, int height, int rotation);
+ int process(int texId, int texType, int width, int height, int rotation);
+
+ int process(byte[] nv21, int width, int height, int rotation);
void release();
diff --git a/Android/APIExample/beauty/base/src/main/java/io/agora/beauty/base/IBeautyFaceUnity.java b/Android/APIExample/beauty/base/src/main/java/io/agora/beauty/base/IBeautyFaceUnity.java
index a98dcdfcb..2e0a29e29 100644
--- a/Android/APIExample/beauty/base/src/main/java/io/agora/beauty/base/IBeautyFaceUnity.java
+++ b/Android/APIExample/beauty/base/src/main/java/io/agora/beauty/base/IBeautyFaceUnity.java
@@ -6,9 +6,9 @@
public interface IBeautyFaceUnity {
- int process(int oesTexId, int width, int height);
+ int process(byte[] nv21, int width, int height, boolean isFront);
- int process(byte[] nv21, int oesTexId, int width, int height);
+ int process(int oesTexId, int width, int height, boolean isFront);
void release();
diff --git a/Android/APIExample/beauty/base/src/main/java/io/agora/beauty/base/IBeautySenseTime.java b/Android/APIExample/beauty/base/src/main/java/io/agora/beauty/base/IBeautySenseTime.java
index 48857f2fb..c0b183fec 100644
--- a/Android/APIExample/beauty/base/src/main/java/io/agora/beauty/base/IBeautySenseTime.java
+++ b/Android/APIExample/beauty/base/src/main/java/io/agora/beauty/base/IBeautySenseTime.java
@@ -19,7 +19,7 @@ public interface IBeautySenseTime {
* @return new Texture ID to render
*/
public int process(
- byte[] nv21, int textureId, int texFormat, int width, int height, int orientation);
+ byte[] nv21, int textureId, int texFormat, int width, int height, int orientation, float[] transformMatrix);
int process(
diff --git a/Android/APIExample/beauty/bytedance/src/main/java/io/agora/beauty/bytedance/BeautyByteDanceImpl.java b/Android/APIExample/beauty/bytedance/src/main/java/io/agora/beauty/bytedance/BeautyByteDanceImpl.java
index ac8e2bf44..4ac2976d8 100644
--- a/Android/APIExample/beauty/bytedance/src/main/java/io/agora/beauty/bytedance/BeautyByteDanceImpl.java
+++ b/Android/APIExample/beauty/bytedance/src/main/java/io/agora/beauty/bytedance/BeautyByteDanceImpl.java
@@ -1,6 +1,7 @@
package io.agora.beauty.bytedance;
import android.content.Context;
+import android.opengl.GLES11Ext;
import com.bytedance.labcv.core.Config;
import com.bytedance.labcv.core.effect.EffectManager;
@@ -9,6 +10,7 @@
import com.bytedance.labcv.effectsdk.BytedEffectConstants;
import java.io.File;
+import java.nio.ByteBuffer;
import io.agora.beauty.base.IBeautyByteDance;
@@ -31,6 +33,9 @@ public class BeautyByteDanceImpl implements IBeautyByteDance {
private final AssetsCopyHelper assetsCopyHelper;
+ private ByteBuffer yByteBuffer;
+ private ByteBuffer vuByteBuffer;
+
public BeautyByteDanceImpl(Context context) {
mContext = context;
mResourcePath = mContext.getExternalFilesDir("assets").getAbsolutePath() + File.separator + "resource";
@@ -58,25 +63,84 @@ private void configSdkDefault() {
sdkIsInit = true;
}
+ @Override
+ public int process(byte[] nv21, int width, int height, int rotation) {
+ if (isReleased) {
+ return -1;
+ }
+ if (!resourceReady) {
+ return -1;
+ }
+ boolean isFront = rotation == 270;
+ configSdkDefault();
+ // 是否为前置摄像头
+ mEffectManager.setCameraPosition(isFront);
+ // 生成目标承载纹理
+ int dstTexture = mImageUtil.prepareTexture(width, height);
+ // NV21转2D纹理
+ ImageUtil.Transition transition = new ImageUtil.Transition();
+ if(rotation == 270){
+ transition.scale(1.f, -1.0f);
+ }
+
+ int yLength = width * height;
+ if (yByteBuffer == null || yByteBuffer.capacity() != yLength) {
+ if (yByteBuffer != null) {
+ yByteBuffer.clear();
+ }
+ yByteBuffer = ByteBuffer.allocateDirect(yLength);
+ }
+ int vuLength = width * height / 2;
+ if(vuByteBuffer == null || vuByteBuffer.capacity() != vuLength){
+ if (vuByteBuffer != null) {
+ vuByteBuffer.clear();
+ }
+ vuByteBuffer = ByteBuffer.allocateDirect(vuLength);
+ }
+ yByteBuffer.position(0);
+ yByteBuffer.put(nv21, 0, yLength);
+ yByteBuffer.position(0);
+ vuByteBuffer.put(nv21, yLength, vuLength);
+ vuByteBuffer.position(0);
+ int texture2d = mImageUtil.transferYUVToTexture(yByteBuffer, vuByteBuffer, width, height, transition);
+
+ // CV SDK 特效处理
+ boolean process = mEffectManager.process(texture2d, dstTexture, width, height,
+ BytedEffectConstants.Rotation.CLOCKWISE_ROTATE_0,
+ System.nanoTime());
+ if (!process) {
+ return -1;
+ }
+ if(rotation == 90){
+ transition.scale(1.f, -1.0f);
+ }
+ return mImageUtil.transferTextureToTexture(dstTexture, BytedEffectConstants.TextureFormat.Texure2D, BytedEffectConstants.TextureFormat.Texure2D,
+ width, height, transition);
+ }
@Override
- public int process(int oesTexId, int width, int height, int rotation) {
+ public int process(int texId, int texType, int width, int height, int rotation) {
if (isReleased) {
return -1;
}
if (!resourceReady) {
return -1;
}
+ boolean isFront = rotation == 270;
configSdkDefault();
// 是否为前置摄像头
- mEffectManager.setCameraPosition(true);
+ // mEffectManager.setCameraPosition(isFront);
// 生成目标承载纹理
int dstTexture = mImageUtil.prepareTexture(width, height);
// OES 纹理转2D纹理
- int texture2d = mImageUtil.transferTextureToTexture(oesTexId,
- BytedEffectConstants.TextureFormat.Texture_Oes,
+ ImageUtil.Transition transition = new ImageUtil.Transition();
+ if(isFront){
+ transition.scale(1.f, -1.0f);
+ }
+ int texture2d = mImageUtil.transferTextureToTexture(texId,
+ texType == GLES11Ext.GL_TEXTURE_EXTERNAL_OES ? BytedEffectConstants.TextureFormat.Texture_Oes : BytedEffectConstants.TextureFormat.Texure2D,
BytedEffectConstants.TextureFormat.Texure2D,
- width, height, new ImageUtil.Transition());
+ width, height, transition);
// CV SDK 特效处理
boolean process = mEffectManager.process(texture2d, dstTexture, width, height,
BytedEffectConstants.Rotation.CLOCKWISE_ROTATE_0,
@@ -94,6 +158,14 @@ public void release() {
sdkIsInit = false;
mImageUtil.release();
assetsCopyHelper.stop();
+ if(yByteBuffer != null){
+ yByteBuffer.clear();
+ yByteBuffer = null;
+ }
+ if(vuByteBuffer != null){
+ vuByteBuffer.clear();
+ vuByteBuffer = null;
+ }
}
@Override
@@ -132,7 +204,7 @@ public void setStickerEnable(boolean enable) {
return;
}
if (enable) {
- String stickerPath = new EffectResourceHelper(mContext).getStickerPath("/stickers/zhaocaimao");
+ String stickerPath = new EffectResourceHelper(mContext).getStickerPath("/stickers/wochaotian");
mEffectManager.setStickerAbs(stickerPath);
} else {
mEffectManager.setStickerAbs(null);
diff --git a/Android/APIExample/beauty/faceunity/README.md b/Android/APIExample/beauty/faceunity/README.md
index 448ab2f8f..49859d979 100644
--- a/Android/APIExample/beauty/faceunity/README.md
+++ b/Android/APIExample/beauty/faceunity/README.md
@@ -7,7 +7,7 @@ This example provides two FaceUnity beauty access methods, single input (NV21 on
## 3 License
-Contact sales@agora.io and get a licence file `authpack.java`, then copy this file to project folder `src/main/java/io/agora/rtcwithfu/authpack.java`. Note this licence determines which FaceUnity functions/effects you are allowed to use.
+Contact sales@agora.io and get a licence file `authpack.java`, then copy this file to project folder `beauty/faceunity/src/main/java/com/faceunity/nama/authpack.java`. Note this licence determines which FaceUnity functions/effects you are allowed to use.
## 4 Configure compilation
diff --git a/Android/APIExample/beauty/faceunity/README.zh.md b/Android/APIExample/beauty/faceunity/README.zh.md
index 67d4801b3..45a2467bc 100644
--- a/Android/APIExample/beauty/faceunity/README.zh.md
+++ b/Android/APIExample/beauty/faceunity/README.zh.md
@@ -7,7 +7,7 @@
## 1 证书激活
-请联系 sales@agora.io 获取证书文件替换本项目中的 **src/main/java/io/agora/rtcwithfu/authpack.java**。
+请联系 sales@agora.io 获取证书文件替换本项目中的 **beauty/faceunity/src/main/java/com/faceunity/nama/authpack.java**。
## 2 配置编译
diff --git a/Android/APIExample/beauty/faceunity/src/main/java/com/faceunity/nama/FURenderer.java b/Android/APIExample/beauty/faceunity/src/main/java/com/faceunity/nama/FURenderer.java
index a72684b9f..457c7cf38 100644
--- a/Android/APIExample/beauty/faceunity/src/main/java/com/faceunity/nama/FURenderer.java
+++ b/Android/APIExample/beauty/faceunity/src/main/java/com/faceunity/nama/FURenderer.java
@@ -123,11 +123,11 @@ public void bindListener(FURendererListener mFURendererListener) {
* @return
*/
@Override
- public int onDrawFrameDualInput(byte[] img, int texId, int width, int height) {
+ public int onDrawFrameDualInput(int texId, int width, int height) {
prepareDrawFrame();
FURenderInputData inputData = new FURenderInputData(width, height);
/*注释掉Buffer配置,启用单纹理模式,防止Buffer跟纹理存在不对齐造成,美妆偏移*/
- inputData.setImageBuffer(new FURenderInputData.FUImageBuffer(inputBufferType, img));//设置为单Buffer输入
+ // inputData.setImageBuffer(new FURenderInputData.FUImageBuffer(inputBufferType, img));//设置为单Buffer输入
inputData.setTexture(new FURenderInputData.FUTexture(inputTextureType, texId));
FURenderInputData.FURenderConfig config = inputData.getRenderConfig();
config.setExternalInputType(externalInputType);
@@ -148,12 +148,11 @@ public int onDrawFrameDualInput(byte[] img, int texId, int width, int height) {
}
@Override
- public int onDrawFrameInput(int texId, int width, int height) {
+ public int onDrawFrameInput(byte[] img, int width, int height) {
prepareDrawFrame();
FURenderInputData inputData = new FURenderInputData(width, height);
/*注释掉Buffer配置,启用单纹理模式,防止Buffer跟纹理存在不对齐造成,美妆偏移*/
-// inputData.setImageBuffer(new FURenderInputData.FUImageBuffer(inputBufferType, img));//设置为单Buffer输入
- inputData.setTexture(new FURenderInputData.FUTexture(inputTextureType, texId));
+ inputData.setImageBuffer(new FURenderInputData.FUImageBuffer(inputBufferType, img));//设置为单Buffer输入
FURenderInputData.FURenderConfig config = inputData.getRenderConfig();
config.setExternalInputType(externalInputType);
config.setInputOrientation(inputOrientation);
@@ -168,7 +167,7 @@ public int onDrawFrameInput(int texId, int width, int height) {
if (outputData.getTexture() != null && outputData.getTexture().getTexId() > 0) {
return outputData.getTexture().getTexId();
}
- return texId;
+ return -1;
}
/**
diff --git a/Android/APIExample/beauty/faceunity/src/main/java/com/faceunity/nama/IFURenderer.java b/Android/APIExample/beauty/faceunity/src/main/java/com/faceunity/nama/IFURenderer.java
index 715a4a6a6..a85140048 100644
--- a/Android/APIExample/beauty/faceunity/src/main/java/com/faceunity/nama/IFURenderer.java
+++ b/Android/APIExample/beauty/faceunity/src/main/java/com/faceunity/nama/IFURenderer.java
@@ -47,9 +47,9 @@ abstract class IFURenderer {
* @param height 高
* @return
*/
- public abstract int onDrawFrameDualInput(byte[] img, int texId, int width, int height);
+ public abstract int onDrawFrameDualInput(int texId, int width, int height);
- public abstract int onDrawFrameInput(int texId, int width, int height);
+ public abstract int onDrawFrameInput(byte[] img, int width, int height);
/**
* 设置检测类型
diff --git a/Android/APIExample/beauty/faceunity/src/main/java/io/agora/beauty/faceunity/BeautyFaceUnityImpl.java b/Android/APIExample/beauty/faceunity/src/main/java/io/agora/beauty/faceunity/BeautyFaceUnityImpl.java
index 81a8bde5b..900826409 100644
--- a/Android/APIExample/beauty/faceunity/src/main/java/io/agora/beauty/faceunity/BeautyFaceUnityImpl.java
+++ b/Android/APIExample/beauty/faceunity/src/main/java/io/agora/beauty/faceunity/BeautyFaceUnityImpl.java
@@ -4,6 +4,7 @@
import com.faceunity.FUConfig;
import com.faceunity.core.entity.FUBundleData;
+import com.faceunity.core.enumeration.FUTransformMatrixEnum;
import com.faceunity.core.faceunity.FUAIKit;
import com.faceunity.core.faceunity.FURenderKit;
import com.faceunity.core.model.bodyBeauty.BodyBeauty;
@@ -75,19 +76,37 @@ private void initBodyBeauty() {
}
@Override
- public int process(int oesTexId, int width, int height) {
+ public int process(byte[] nv21, int width, int height, boolean isFront) {
if (isReleased) {
return -1;
}
- return fuRenderer.onDrawFrameInput(oesTexId, width, height);
+ if(isFront){
+ fuRenderer.setInputBufferMatrix(FUTransformMatrixEnum.CCROT0);
+ fuRenderer.setInputTextureMatrix(FUTransformMatrixEnum.CCROT0);
+ fuRenderer.setOutputMatrix(FUTransformMatrixEnum.CCROT0_FLIPVERTICAL);
+ }else{
+ fuRenderer.setInputBufferMatrix(FUTransformMatrixEnum.CCROT0_FLIPVERTICAL);
+ fuRenderer.setInputTextureMatrix(FUTransformMatrixEnum.CCROT0_FLIPVERTICAL);
+ fuRenderer.setOutputMatrix(FUTransformMatrixEnum.CCROT0_FLIPVERTICAL);
+ }
+ return fuRenderer.onDrawFrameInput(nv21, width, height);
}
@Override
- public int process(byte[] nv21, int oesTexId, int width, int height) {
+ public int process(int oesTexId, int width, int height, boolean isFront) {
if (isReleased) {
return -1;
}
- return fuRenderer.onDrawFrameDualInput(nv21, oesTexId, width, height);
+ if (isFront) {
+ fuRenderer.setInputBufferMatrix(FUTransformMatrixEnum.CCROT0);
+ fuRenderer.setInputTextureMatrix(FUTransformMatrixEnum.CCROT0);
+ fuRenderer.setOutputMatrix(FUTransformMatrixEnum.CCROT0_FLIPVERTICAL);
+ } else {
+ fuRenderer.setInputBufferMatrix(FUTransformMatrixEnum.CCROT0_FLIPVERTICAL);
+ fuRenderer.setInputTextureMatrix(FUTransformMatrixEnum.CCROT0_FLIPVERTICAL);
+ fuRenderer.setOutputMatrix(FUTransformMatrixEnum.CCROT0);
+ }
+ return fuRenderer.onDrawFrameDualInput(oesTexId, width, height);
}
@Override
diff --git a/Android/APIExample/beauty/sense-time/README.md b/Android/APIExample/beauty/sense-time/README.md
index 1f5662b9f..36c190220 100644
--- a/Android/APIExample/beauty/sense-time/README.md
+++ b/Android/APIExample/beauty/sense-time/README.md
@@ -1,7 +1,7 @@
# Configuration Guide
*English | [中文](README.zh.md)*
-This example provides two SenseTime beauty access methods, single input (NV21 only) and dual input (NV21 and texture). Dual input has less NV21 to texture operation than single input, and the conversion performance is relatively high, but on low-end machines, the sticker may not follow the face, etc. It is recommended to choose the appropriate access method according to the customer.
+This example provides the best beauty access method.
> The plug-in cloud market provides a more convenient integration method. You can log in to the sound network [console](https://console.agora.io/) to view the [integration tutorial](https://console.agora.io/ marketplace/license/introduce?serviceName=sensetime-ar)
## 1 SenseTime Beauty SDK
diff --git a/Android/APIExample/beauty/sense-time/README.zh.md b/Android/APIExample/beauty/sense-time/README.zh.md
index 7dfa59656..b02500bb8 100644
--- a/Android/APIExample/beauty/sense-time/README.zh.md
+++ b/Android/APIExample/beauty/sense-time/README.zh.md
@@ -1,7 +1,7 @@
# 配置指南
*[English](README.md) | 中文*
-此示例提供了两种商汤美颜接入方式,单输入(仅NV21)和双输入(NV21和纹理)。双输入比单输入少了 NV21转纹理的这步操作,转换性能比较高,但是在低端机上可能出现贴纸不跟脸等情况,建议根据面向的客户来选择合适的接入方式。
+此示例提供了商汤美颜的最佳接入方式。
> 声网插件云市场上提供了更便捷的集成方式,可以登录声网[控制台](https://console.agora.io/)查看[集成教程](https://console.agora.io/marketplace/license/introduce?serviceName=sensetime-ar)
diff --git a/Android/APIExample/beauty/sense-time/src/main/java/com/sensetime/effects/STRenderer.java b/Android/APIExample/beauty/sense-time/src/main/java/com/sensetime/effects/STRenderer.java
index 507b606c7..335355686 100644
--- a/Android/APIExample/beauty/sense-time/src/main/java/com/sensetime/effects/STRenderer.java
+++ b/Android/APIExample/beauty/sense-time/src/main/java/com/sensetime/effects/STRenderer.java
@@ -65,7 +65,9 @@ public class STRenderer {
private boolean mIsCreateHumanActionHandleSucceeded = false;
private long mDetectConfig = -1;
- private STGLRender mGLRenderBefore;
+ private STGLRender mGLRenderOESBefore;
+ private STGLRender mGLRender2DBefore;
+ private STGLRender mGLRenderAfter;
private int[] mTextureOutId;
private byte[] mImageDataBuffer = null;
protected STHumanAction[] mSTHumanAction = new STHumanAction[2];
@@ -79,6 +81,8 @@ public class STRenderer {
private final STEffectParameters mEffectParams = new STEffectParameters();
private STFaceMeshList faceMeshList;
+ private int mImageWidth;
+ private int mImageHeight;
public STRenderer(Context context) {
mContext = context;
@@ -181,7 +185,9 @@ private void initMobileEffect() {
private void initGLRender() {
- mGLRenderBefore = new STGLRender(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
+ mGLRenderOESBefore = new STGLRender(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
+ mGLRender2DBefore = new STGLRender(GLES20.GL_TEXTURE_2D);
+ mGLRenderAfter = new STGLRender(GLES20.GL_TEXTURE_2D);
}
@@ -262,7 +268,7 @@ private int getCurrentOrientation(int rotation) {
}
public int preProcess(
- int width, int height, int orientation,
+ int width, int height, int rotation,
byte[] cameraPixel, int pixelFormat
) {
@@ -270,14 +276,20 @@ public int preProcess(
return -1;
}
- int imageWidth = width;
- int imageHeight = height;
-// if (orientation == 90 || orientation == 270) {
-// imageWidth = height;
-// imageHeight = width;
-// }
+ boolean sizeChange = mImageWidth != width || mImageHeight != height;
+ mImageWidth = width;
+ mImageHeight = height;
- if (mImageDataBuffer == null || mImageDataBuffer.length != cameraPixel.length) {
+ if (mTextureOutId == null) {
+ mTextureOutId = new int[2];
+ GlUtil.initEffectTexture(mImageWidth, mImageHeight, mTextureOutId, GLES20.GL_TEXTURE_2D);
+ } else if (sizeChange || mTextureOutId.length != 2) {
+ GLES20.glDeleteTextures(mTextureOutId.length, mTextureOutId, 0);
+ mTextureOutId = null;
+ return -1;
+ }
+
+ if (sizeChange || mImageDataBuffer == null) {
mImageDataBuffer = new byte[cameraPixel.length];
}
System.arraycopy(cameraPixel, 0, mImageDataBuffer, 0, cameraPixel.length);
@@ -287,30 +299,27 @@ public int preProcess(
// prepare params
updateHumanActionDetectConfig();
- //mSTHumanActionNative.nativeHumanActionPtrCopy();
int ret = mSTHumanActionNative.nativeHumanActionDetectPtr(
mImageDataBuffer,
pixelFormat,
mDetectConfig,
- getCurrentOrientation(orientation),
+ getCurrentOrientation(rotation),
width,
height);
+ mSTHumanActionNative.nativeHumanActionPtrCopy();
if (ret == 0) {
if (mNeedAnimalDetect) {
- animalDetect(mImageDataBuffer, pixelFormat, getCurrentOrientation(orientation), width, height, 0);
+ animalDetect(mImageDataBuffer, pixelFormat, getCurrentOrientation(rotation), width, height, 0);
} else {
mAnimalFaceInfo[0] = new STAnimalFaceInfo(null, 0);
}
}
// >>>>>> 2. upload nv21 to texture
- if (mTextureOutId == null) {
- mTextureOutId = new int[2];
- GlUtil.initEffectTexture(imageWidth, imageHeight, mTextureOutId, GLES20.GL_TEXTURE_2D);
- }
- mSTMobileColorConvertNative.setTextureSize(imageWidth, imageHeight);
+
+ mSTMobileColorConvertNative.setTextureSize(mImageWidth, mImageHeight);
mSTMobileColorConvertNative.nv21BufferToRgbaTexture(width, height,
STRotateType.ST_CLOCKWISE_ROTATE_0,
false,
@@ -321,16 +330,16 @@ public int preProcess(
// >>>>>> 3. render texture
//输入纹理,纹理只支持2D
- STEffectTexture stEffectTexture = new STEffectTexture(textureId, imageWidth, imageHeight, 0);
+ STEffectTexture stEffectTexture = new STEffectTexture(textureId, mImageWidth, mImageHeight, 0);
//输出纹理,需要在上层初始化
- STEffectTexture stEffectTextureOut = new STEffectTexture(mTextureOutId[1], imageWidth, imageHeight, 0);
+ STEffectTexture stEffectTextureOut = new STEffectTexture(mTextureOutId[1], mImageWidth, mImageHeight, 0);
//渲染接口输入参数
STEffectRenderInParam sTEffectRenderInParam = new STEffectRenderInParam(
- mSTHumanActionNative.getNativeHumanActionResultPtr(),
+ mSTHumanActionNative.getNativeHumanActionPtrCopy(),
mAnimalFaceInfo[0],
- 0,
- 0,
+ getCurrentOrientation(rotation),
+ getCurrentOrientation(rotation),
false,
null,
stEffectTexture,
@@ -347,6 +356,10 @@ public int preProcess(
textureId = stEffectRenderOutParam.getTexture().getId();
}
+ boolean isFront = rotation == 270;
+ mGLRenderAfter.adjustRenderSize(mImageWidth, mImageHeight, 0, false, isFront);
+ textureId = mGLRenderAfter.process(textureId, STGLRender.IDENTITY_MATRIX);
+
GLES20.glFinish();
return textureId;
@@ -356,7 +369,7 @@ public int preProcess(
/**
* @param width camera preview width
* @param height camera preview height
- * @param orientation camera preview orientation
+ * @param rotation camera preview orientation
* @param cameraPixel camera preview pixel data
* @param pixelFormat {@link STCommonNative#ST_PIX_FMT_NV21} and etc.
* @param cameraTextureId camera preview texture id
@@ -364,32 +377,42 @@ public int preProcess(
* @return new Texture ID to render
*/
public int preProcess(
- int width, int height, int orientation,
+ int width, int height, int rotation,
byte[] cameraPixel, int pixelFormat,
- int cameraTextureId, int texFormat) {
+ int cameraTextureId, int texFormat, float[] transformMatrix) {
if (!mAuthorized) {
return -1;
}
- int imageWidth = width;
- int imageHeight = height;
+
+ boolean sizeChange = mImageWidth != width || mImageHeight != height;
+ mImageWidth = width;
+ mImageHeight = height;
+
// >>>>>> 1. translate oes texture to 2d
if (mTextureOutId == null) {
mTextureOutId = new int[1];
- GlUtil.initEffectTexture(imageWidth, imageHeight, mTextureOutId, GLES20.GL_TEXTURE_2D);
+ GlUtil.initEffectTexture(mImageWidth, mImageHeight, mTextureOutId, GLES20.GL_TEXTURE_2D);
+ } else if (sizeChange || mTextureOutId.length != 1) {
+ GLES20.glDeleteTextures(mTextureOutId.length, mTextureOutId, 0);
+ mTextureOutId = null;
+ return -1;
}
int textureId = cameraTextureId;
if (texFormat == GLES11Ext.GL_TEXTURE_EXTERNAL_OES) {
- mGLRenderBefore.adjustRenderSize(imageWidth, imageHeight, 0, false, false);
- textureId = mGLRenderBefore.process(cameraTextureId, STGLRender.IDENTITY_MATRIX);
+ mGLRenderOESBefore.adjustRenderSize(mImageWidth, mImageHeight, 0, false, true);
+ textureId = mGLRenderOESBefore.process(cameraTextureId, transformMatrix);
+ } else {
+ mGLRender2DBefore.adjustRenderSize(mImageWidth, mImageHeight, 0, false, true);
+ textureId = mGLRender2DBefore.process(cameraTextureId, transformMatrix);
}
// >>>>>> 2. detect human point info using cameraData
if (mIsCreateHumanActionHandleSucceeded) {
- if (mImageDataBuffer == null || mImageDataBuffer.length != cameraPixel.length) {
+ if (sizeChange || mImageDataBuffer == null) {
mImageDataBuffer = new byte[cameraPixel.length];
}
System.arraycopy(cameraPixel, 0, mImageDataBuffer, 0, cameraPixel.length);
@@ -401,14 +424,15 @@ public int preProcess(
int ret = mSTHumanActionNative.nativeHumanActionDetectPtr(mImageDataBuffer,
pixelFormat,
mDetectConfig,
- getCurrentOrientation(orientation),
+ getCurrentOrientation(rotation),
width,
height);
+ mSTHumanActionNative.nativeHumanActionPtrCopy();
//STHumanAction nativeHumanAction = mSTHumanActionNative.getNativeHumanAction();
//LogUtils.i(TAG, "human action detect cost time: %d, ret: %d", System.currentTimeMillis() - startHumanAction, ret);
if (ret == 0) {
if (mNeedAnimalDetect) {
- animalDetect(mImageDataBuffer, pixelFormat, getCurrentOrientation(orientation), width, height, 0);
+ animalDetect(mImageDataBuffer, pixelFormat, getCurrentOrientation(rotation), width, height, 0);
} else {
mAnimalFaceInfo[0] = new STAnimalFaceInfo(null, 0);
}
@@ -419,16 +443,16 @@ public int preProcess(
// >>>>>> 3. render texture
//输入纹理,纹理只支持2D
- STEffectTexture stEffectTexture = new STEffectTexture(textureId, imageWidth, imageHeight, 0);
+ STEffectTexture stEffectTexture = new STEffectTexture(textureId, mImageWidth, mImageHeight, 0);
//输出纹理,需要在上层初始化
- STEffectTexture stEffectTextureOut = new STEffectTexture(mTextureOutId[0], imageWidth, imageHeight, 0);
+ STEffectTexture stEffectTextureOut = new STEffectTexture(mTextureOutId[0], mImageWidth, mImageHeight, 0);
//渲染接口输入参数
STEffectRenderInParam sTEffectRenderInParam = new STEffectRenderInParam(
- mSTHumanActionNative.getNativeHumanActionResultPtr(),
+ mSTHumanActionNative.getNativeHumanActionPtrCopy(),
mAnimalFaceInfo[0],
- 0,
- 0,
+ getCurrentOrientation(rotation),
+ getCurrentOrientation(rotation),
false,
null,
stEffectTexture,
@@ -441,6 +465,10 @@ public int preProcess(
textureId = stEffectRenderOutParam.getTexture().getId();
}
+ boolean isFront = rotation == 270;
+ mGLRenderAfter.adjustRenderSize(mImageWidth, mImageHeight, 0, false, isFront);
+ textureId = mGLRenderAfter.process(textureId, STGLRender.IDENTITY_MATRIX);
+
GLES20.glFinish();
return textureId;
@@ -584,7 +612,10 @@ public void release() {
mChangeStickerManagerThread.quit();
mChangeStickerManagerThread = null;
deleteTextures();
- mGLRenderBefore.destroyPrograms();
+ mGLRenderOESBefore.destroyPrograms();
+ mGLRender2DBefore.destroyPrograms();
+ mImageWidth = mImageHeight = 0;
+ mGLRenderAfter.destroyPrograms();
}
private void deleteTextures() {
diff --git a/Android/APIExample/beauty/sense-time/src/main/java/com/sensetime/effects/display/STGLRender.java b/Android/APIExample/beauty/sense-time/src/main/java/com/sensetime/effects/display/STGLRender.java
index 6a6e44898..1b26fac18 100644
--- a/Android/APIExample/beauty/sense-time/src/main/java/com/sensetime/effects/display/STGLRender.java
+++ b/Android/APIExample/beauty/sense-time/src/main/java/com/sensetime/effects/display/STGLRender.java
@@ -14,14 +14,19 @@ public class STGLRender {
private RenderProgram mRenderProgram;
+ private int mRotation;
+ private boolean mFlipH, mFlipV;
public STGLRender(int textureType) {
mRenderProgram = new RenderProgram(textureType);
}
public void adjustRenderSize(int width, int height, int rotation, boolean flipH, boolean flipV) {
- boolean resize = mRenderProgram.resize(width, height);
- if (resize) {
+ boolean change = mRenderProgram.resize(width, height) || mRotation != rotation || mFlipH != flipH || mFlipV != flipV;
+ if (change) {
+ mRotation = rotation;
+ mFlipH = flipH;
+ mFlipV = flipV;
float[] tmp = new float[16];
Matrix.setIdentityM(tmp, 0);
diff --git a/Android/APIExample/beauty/sense-time/src/main/java/io/agora/beauty/sensetime/BeautySenseTimeImpl.java b/Android/APIExample/beauty/sense-time/src/main/java/io/agora/beauty/sensetime/BeautySenseTimeImpl.java
index 492894583..fb28ca7a1 100644
--- a/Android/APIExample/beauty/sense-time/src/main/java/io/agora/beauty/sensetime/BeautySenseTimeImpl.java
+++ b/Android/APIExample/beauty/sense-time/src/main/java/io/agora/beauty/sensetime/BeautySenseTimeImpl.java
@@ -23,18 +23,18 @@ public BeautySenseTimeImpl(Context context) {
}
@Override
- public int process(byte[] nv21, int textureId, int texFormat, int width, int height, int orientation) {
+ public int process(byte[] nv21, int textureId, int texFormat, int width, int height, int rotation, float[] transformMatrix) {
return mSTRenderer.preProcess(
- width, height, orientation,
+ width, height, rotation,
nv21, STCommonNative.ST_PIX_FMT_NV21,
- textureId, texFormat
+ textureId, texFormat, transformMatrix
);
}
@Override
- public int process(byte[] nv21, int width, int height, int orientation) {
+ public int process(byte[] nv21, int width, int height, int rotation) {
return mSTRenderer.preProcess(
- width, height, orientation,
+ width, height, rotation,
nv21, STCommonNative.ST_PIX_FMT_NV21
);
}
diff --git a/README.md b/README.md
index c91001224..f2839aea4 100644
--- a/README.md
+++ b/README.md
@@ -8,12 +8,12 @@ This repository contains sample projects for the Agora RTC Native SDK, including
| Platform | Language | Project Location | SDK |
| -------- | -------- | ------------------------------------------------------ | ------------------------------------------------------------ |
-| Android | Java | [/Android/APIExample](/Android/APIExample) | [RTC Java Video SDK](https://docs.agora.io/en/video-call-4.x-beta/API%20Reference/java_ng/API/rtc_api_overview_ng.html) |
-| Android | Java | [/Android/APIExample-Audio](/Android/APIExample-Audio) | [RTC Java Audio SDK](https://docs.agora.io/en/voice-call-4.x-beta/API%20Reference/java_ng/API/rtc_api_overview_ng.html) |
-| iOS | Swift | [/iOS/APIExample](/iOS/APIExample) | [RTC Objective-C Video SDK](https://docs.agora.io/en/video-call-4.x-beta/API%20Reference/ios_ng/API/rtc_api_overview_ng.html) |
-| iOS | Swift | [/iOS/APIExample-Audio](/iOS/APIExample-Audio) | [RTC Objective-C Audio SDK](https://docs.agora.io/en/voice-call-4.x-beta/API%20Reference/ios_ng/API/rtc_api_overview_ng.html) |
-| macOS | Swift | [/macOS](/macOS) | [RTC Objective-C Video SDK](https://docs.agora.io/en/video-call-4.x-beta/API%20Reference/mac_ng/API/rtc_api_overview_ng.html) |
-| Windows | C++ | [/windows](/windows) | [RTC C++ Video SDK](https://docs.agora.io/en/video-call-4.x-beta/API%20Reference/windows_ng/API/rtc_api_overview_ng.html) |
+| Android | Java | [/Android/APIExample](/Android/APIExample) | [RTC Java Video SDK](https://docs.agora.io/en/sdks?platform=android) |
+| Android | Java | [/Android/APIExample-Audio](/Android/APIExample-Audio) | [RTC Java Audio SDK](https://docs.agora.io/en/sdks?platform=android) |
+| iOS | Swift | [/iOS/APIExample](/iOS/APIExample) | [RTC Objective-C Video SDK](https://docs.agora.io/en/sdks?platform=ios) |
+| iOS | Swift | [/iOS/APIExample-Audio](/iOS/APIExample-Audio) | [RTC Objective-C Audio SDK](https://docs.agora.io/en/sdks?platform=ios) |
+| macOS | Swift | [/macOS](/macOS) | [RTC Objective-C Video SDK](https://docs.agora.io/en/sdks?platform=macos) |
+| Windows | C++ | [/windows](/windows) | [RTC C++ Video SDK](https://docs.agora.io/en/sdks?platform=windows) |
You can refer to each individual platform to learn more about the projects.
diff --git a/README.zh.md b/README.zh.md
index 98c068cff..316f11dde 100644
--- a/README.zh.md
+++ b/README.zh.md
@@ -8,12 +8,12 @@
| 平台 | 语言 | 项目位置 | SDK |
| -------- | -------- | -------------------- | ------------------------------------------------------------------------------------------------------------------------------- |
-| Android | Java | [/Android/APIExample](/Android/APIExample) | [RTC Java Video SDK](https://docs.agora.io/cn/video-call-4.x/API%20Reference/java_ng/API/rtc_api_overview_ng.html) |
-| Android | Java | [/Android/APIExample-Audio](/Android/APIExample-Audio) | [RTC Java Audio SDK](https://docs.agora.io/cn/voice-call-4.x/API%20Reference/java_ng/API/rtc_api_overview_ng.html) |
-| iOS | Swift | [/iOS/APIExample](/iOS/APIExample) | [RTC Objective-C Video SDK](https://docs.agora.io/cn/video-call-4.x/API%20Reference/ios_ng/API/rtc_api_overview_ng.html) |
-| iOS | Swift | [/iOS/APIExample-Audio](/iOS/APIExample-Audio) | [RTC Objective-C Audio SDK](https://docs.agora.io/cn/voice-call-4.x/API%20Reference/ios_ng/API/rtc_api_overview_ng.html) |
-| macOS | Swift | [/macOS](/macOS) | [RTC Objective-C Video SDK](https://docs.agora.io/cn/video-call-4.x/API%20Reference/mac_ng/API/rtc_api_overview_ng.html) |
-| Windows | C++ | [/windows](/windows) | [RTC C++ Video SDK](https://docs.agora.io/cn/video-call-4.x/API%20Reference/windows_ng/API/rtc_api_overview_ng.html) |
+| Android | Java | [/Android/APIExample](/Android/APIExample) | [RTC Java Video SDK](https://docs.agora.io/cn/video-call-4.x/downloads?platform=Android) |
+| Android | Java | [/Android/APIExample-Audio](/Android/APIExample-Audio) | [RTC Java Audio SDK](https://docs.agora.io/cn/voice-call-4.x/downloads?platform=Android) |
+| iOS | Swift | [/iOS/APIExample](/iOS/APIExample) | [RTC Objective-C Video SDK](https://docs.agora.io/cn/video-call-4.x/downloads?platform=iOS) |
+| iOS | Swift | [/iOS/APIExample-Audio](/iOS/APIExample-Audio) | [RTC Objective-C Audio SDK](https://docs.agora.io/cn/voice-call-4.x/downloads?platform=iOS) |
+| macOS | Swift | [/macOS](/macOS) | [RTC Objective-C Video SDK](https://docs.agora.io/cn/video-call-4.x/downloads?platform=macOS) |
+| Windows | C++ | [/windows](/windows) | [RTC C++ Video SDK](https://docs.agora.io/cn/video-call-4.x/downloads?platform=Windows) |
你可以进入不同平台的项目进行试用或参考源代码。
diff --git a/cicd/scripts/ios_build.sh b/cicd/scripts/ios_build.sh
index ae9d9e2a9..a32c91747 100755
--- a/cicd/scripts/ios_build.sh
+++ b/cicd/scripts/ios_build.sh
@@ -54,17 +54,25 @@ echo PBXPROJ_PATH: $PBXPROJ_PATH
/usr/libexec/PlistBuddy -c "Set :objects:8B10BE1826AFFFA6002E1373:buildSettings:DEVELOPMENT_TEAM ''" $PBXPROJ_PATH
/usr/libexec/PlistBuddy -c "Set :objects:8B10BE1826AFFFA6002E1373:buildSettings:PROVISIONING_PROFILE_SPECIFIER ''" $PBXPROJ_PATH
+#修改build number
+# Debug
+/usr/libexec/PlistBuddy -c "Set :objects:03D13BF72448758C00B599B3:buildSettings:CURRENT_PROJECT_VERSION ${BUILD_NUMBER}" $PBXPROJ_PATH
+# Release
+/usr/libexec/PlistBuddy -c "Set :objects:03D13BF82448758C00B599B3:buildSettings:CURRENT_PROJECT_VERSION ${BUILD_NUMBER}" $PBXPROJ_PATH
+
+
+
TARGET_FILE=""
if [ ! -f "Podfile" ];then
TARGET_FILE="${APP_Project}.xcodeproj"
xcodebuild clean -project ${TARGET_FILE} -scheme "${APP_TARGET}" -configuration ${MODE}
-xcodebuild -project ${TARGET_FILE} -scheme "${APP_TARGET}" -configuration ${MODE} -archivePath ${ArchivePath} archive
+xcodebuild CODE_SIGN_STYLE="Manual" -project ${TARGET_FILE} -scheme "${APP_TARGET}" clean CODE_SIGNING_REQUIRED=NO CODE_SIGNING_ALLOWED=NO -configuration ${MODE} -archivePath ${ArchivePath} archive
else
pod install
TARGET_FILE="${APP_Project}.xcworkspace"
xcodebuild clean -workspace ${TARGET_FILE} -scheme "${APP_TARGET}" -configuration ${MODE}
-xcodebuild archive -workspace ${TARGET_FILE} -scheme "${APP_TARGET}" -configuration ${MODE} -archivePath ${ArchivePath} -destination 'generic/platform=iOS'
+xcodebuild CODE_SIGN_STYLE="Manual" archive -workspace ${TARGET_FILE} -scheme "${APP_TARGET}" clean CODE_SIGNING_REQUIRED=NO CODE_SIGNING_ALLOWED=NO -configuration ${MODE} -archivePath ${ArchivePath} -destination 'generic/platform=iOS'
fi
xcodebuild -exportArchive -exportOptionsPlist ${Export_Plist_File} -archivePath ${ArchivePath} -exportPath .
diff --git a/iOS/APIExample-Audio/APIExample-Audio.xcodeproj/project.pbxproj b/iOS/APIExample-Audio/APIExample-Audio.xcodeproj/project.pbxproj
index 261cd6ed1..5c6d0adf6 100644
--- a/iOS/APIExample-Audio/APIExample-Audio.xcodeproj/project.pbxproj
+++ b/iOS/APIExample-Audio/APIExample-Audio.xcodeproj/project.pbxproj
@@ -1033,7 +1033,7 @@
"-framework",
"\"UIKit\"",
);
- PRODUCT_BUNDLE_IDENTIFIER = io.agora.api.examples;
+ PRODUCT_BUNDLE_IDENTIFIER = io.agora.api.examples.audio;
PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = "";
SWIFT_OBJC_BRIDGING_HEADER = "APIExample-Audio/APIExample-Bridging-Header.h";
@@ -1103,7 +1103,7 @@
"-framework",
"\"UIKit\"",
);
- PRODUCT_BUNDLE_IDENTIFIER = io.agora.api.examples;
+ PRODUCT_BUNDLE_IDENTIFIER = io.agora.api.examples.audio;
PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = "";
SWIFT_OBJC_BRIDGING_HEADER = "APIExample-Audio/APIExample-Bridging-Header.h";
diff --git a/iOS/APIExample-Audio/APIExample-Audio/Common/ExternalAudio/ExternalAudio.mm b/iOS/APIExample-Audio/APIExample-Audio/Common/ExternalAudio/ExternalAudio.mm
index 851e8e735..5938ffd58 100644
--- a/iOS/APIExample-Audio/APIExample-Audio/Common/ExternalAudio/ExternalAudio.mm
+++ b/iOS/APIExample-Audio/APIExample-Audio/Common/ExternalAudio/ExternalAudio.mm
@@ -303,7 +303,7 @@ - (void)audioController:(AudioController *)controller didCaptureData:(unsigned c
}
else {
// [self.agoraKit pushExternalAudioFrameNSData:[NSData dataWithBytes:data length:bytesLength] sourceId:1 timestamp:0];
- [self.agoraKit pushExternalAudioFrameRawData: data samples: 441 * 10 sourceId:1 timestamp:0];
+ [self.agoraKit pushExternalAudioFrameRawData: data samples: 441 * 10 trackId:1 timestamp:0];
}
}
diff --git a/iOS/APIExample-Audio/APIExample-Audio/Examples/Advanced/CustomPcmAudioSource/CustomPcmAudioSource.swift b/iOS/APIExample-Audio/APIExample-Audio/Examples/Advanced/CustomPcmAudioSource/CustomPcmAudioSource.swift
index c030bbf27..c7a67743e 100644
--- a/iOS/APIExample-Audio/APIExample-Audio/Examples/Advanced/CustomPcmAudioSource/CustomPcmAudioSource.swift
+++ b/iOS/APIExample-Audio/APIExample-Audio/Examples/Advanced/CustomPcmAudioSource/CustomPcmAudioSource.swift
@@ -42,6 +42,7 @@ class CustomPcmAudioSourceMain: BaseViewController {
var audioViews: [UInt:VideoView] = [:]
@IBOutlet weak var playAudioView: UIView!
@IBOutlet weak var pushPcmSwitch: UISwitch!
+ private var trackId: Int32 = 0
// indicate if current instance has joined channel
var isJoined: Bool = false {
@@ -82,7 +83,10 @@ class CustomPcmAudioSourceMain: BaseViewController {
// setup external audio source
pcmSourcePush = AgoraPcmSourcePush(delegate: self, filePath: filepath, sampleRate: Int(sampleRate),
channelsPerFrame: Int(channel), bitPerSample: bitPerSample, samples: samples)
- agoraKit.setExternalAudioSource(true, sampleRate: Int(sampleRate), channels: Int(channel), sourceNumber: 2, localPlayback: true, publish: true)
+
+ let trackConfig = AgoraAudioTrackConfig()
+ trackConfig.enableLocalPlayback = true
+ trackId = agoraKit.createCustomAudioTrack(.mixable, config: trackConfig)
agoraKit.enableCustomAudioLocalPlayback(1, enabled: true)
// start joining channel
// 1. Users can only see each other after they join the
@@ -92,8 +96,9 @@ class CustomPcmAudioSourceMain: BaseViewController {
// the token has to match the ones used for channel join
let option = AgoraRtcChannelMediaOptions()
option.publishCameraTrack = false
- option.publishMicrophoneTrack = true
- option.publishCustomAudioTrack = true
+ option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ option.publishCustomAudioTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ option.publishCustomAudioTrackId = Int(trackId)
option.clientRoleType = GlobalSettings.shared.getUserRole()
NetworkManager.shared.generateToken(channelName: channelName, success: { token in
let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option)
@@ -112,6 +117,7 @@ class CustomPcmAudioSourceMain: BaseViewController {
// leave channel when exiting the view
pcmSourcePush?.stop()
if isJoined {
+ agoraKit.destroyCustomAudioTrack(Int(trackId))
agoraKit.disableAudio()
pcmSourcePush?.stop()
agoraKit.leaveChannel { (stats) -> Void in
@@ -128,12 +134,20 @@ class CustomPcmAudioSourceMain: BaseViewController {
} else {
pcmSourcePush?.stop()
}
+ let mediaOption = AgoraRtcChannelMediaOptions()
+ mediaOption.publishCustomAudioTrack = sender.isOn
+ agoraKit.updateChannel(with: mediaOption)
}
}
extension CustomPcmAudioSourceMain: AgoraPcmSourcePushDelegate {
func onAudioFrame(data: UnsafeMutablePointer) {
- agoraKit.pushExternalAudioFrameRawData(data, samples: samples, sourceId: 0, timestamp: 0)
+ agoraKit.pushExternalAudioFrameRawData(data,
+ samples: samples,
+ sampleRate: Int(sampleRate),
+ channels: Int(channel),
+ trackId: Int(trackId),
+ timestamp: 0)
}
}
diff --git a/iOS/APIExample-Audio/APIExample-Audio/Examples/Advanced/VoiceChanger/Base.lproj/VoiceChanger.storyboard b/iOS/APIExample-Audio/APIExample-Audio/Examples/Advanced/VoiceChanger/Base.lproj/VoiceChanger.storyboard
index 8f241f996..7f574fd57 100644
--- a/iOS/APIExample-Audio/APIExample-Audio/Examples/Advanced/VoiceChanger/Base.lproj/VoiceChanger.storyboard
+++ b/iOS/APIExample-Audio/APIExample-Audio/Examples/Advanced/VoiceChanger/Base.lproj/VoiceChanger.storyboard
@@ -1,9 +1,9 @@
-
+
-
+
@@ -12,13 +12,13 @@
-
+
-
+
@@ -64,23 +64,23 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
diff --git a/iOS/APIExample-Audio/APIExample-Audio/Examples/Advanced/VoiceChanger/VoiceChanger.swift b/iOS/APIExample-Audio/APIExample-Audio/Examples/Advanced/VoiceChanger/VoiceChanger.swift
index 454452961..332d43e77 100644
--- a/iOS/APIExample-Audio/APIExample-Audio/Examples/Advanced/VoiceChanger/VoiceChanger.swift
+++ b/iOS/APIExample-Audio/APIExample-Audio/Examples/Advanced/VoiceChanger/VoiceChanger.swift
@@ -297,6 +297,9 @@ class VoiceChangerMain: BaseViewController {
LogUtils.log(message: "onLocalVoicePitch \(Double(sender.value))", level: .info)
agoraKit.setLocalVoicePitch(Double(sender.value))
}
+ @IBAction func onVoiceFormantChange(_ sender: UISlider) {
+ agoraKit.setLocalVoiceFormant(Double(sender.value))
+ }
@IBAction func onLocalVoiceEqualizaitonFreq(_ sender:UIButton) {
let alert = UIAlertController(title: "Set Band Frequency".localized, message: nil, preferredStyle: UIDevice.current.userInterfaceIdiom == .pad ? UIAlertController.Style.alert : UIAlertController.Style.actionSheet)
diff --git a/iOS/APIExample-Audio/APIExample-Audio/Info.plist b/iOS/APIExample-Audio/APIExample-Audio/Info.plist
index 3c3d2fc3a..7ad85ab0a 100644
--- a/iOS/APIExample-Audio/APIExample-Audio/Info.plist
+++ b/iOS/APIExample-Audio/APIExample-Audio/Info.plist
@@ -2,8 +2,6 @@
- UIFileSharingEnabled
-
BGTaskSchedulerPermittedIdentifiers
com.yourCompanyName.appName
@@ -35,6 +33,8 @@
audio
processing
+ UIFileSharingEnabled
+
UILaunchStoryboardName
LaunchScreen
UIMainStoryboardFile
diff --git a/iOS/APIExample-Audio/ExportOptions.plist b/iOS/APIExample-Audio/ExportOptions.plist
index cbd9a2593..1727fd11e 100644
--- a/iOS/APIExample-Audio/ExportOptions.plist
+++ b/iOS/APIExample-Audio/ExportOptions.plist
@@ -2,25 +2,25 @@
- provisioningProfiles
-
- io.agora.api.examples.Agora-ScreenShare-Extension
- App
- io.agora.api.examples
- App
-
compileBitcode
-
+
destination
export
method
development
+ provisioningProfiles
+
+ io.agora.api.examples.audio
+ AgoraLab2020
+
+ signingCertificate
+ Apple Development
signingStyle
manual
stripSwiftSymbols
teamID
- GM72UGLGZW
+ JDPG69R49Z
thinning
<none>
diff --git a/iOS/APIExample-Audio/Podfile b/iOS/APIExample-Audio/Podfile
index a105225a4..d8a06674f 100644
--- a/iOS/APIExample-Audio/Podfile
+++ b/iOS/APIExample-Audio/Podfile
@@ -7,7 +7,7 @@ target 'APIExample-Audio' do
pod 'Floaty', '~> 4.2.0'
pod 'AGEVideoLayout', '~> 1.0.2'
- pod 'AgoraAudio_iOS', '4.1.1'
+ pod 'AgoraAudio_iOS', '4.2.0'
# pod 'sdk', :path => 'sdk.podspec'
end
diff --git a/iOS/APIExample-Audio/iOS_ExportOptions.plist b/iOS/APIExample-Audio/iOS_ExportOptions.plist
new file mode 100644
index 000000000..68c3cc00e
--- /dev/null
+++ b/iOS/APIExample-Audio/iOS_ExportOptions.plist
@@ -0,0 +1,27 @@
+
+
+
+
+ compileBitcode
+
+ destination
+ export
+ method
+ development
+ provisioningProfiles
+
+ io.agora.api.examples.audio
+ apiexamples_all
+
+ signingCertificate
+ Apple Development
+ signingStyle
+ manual
+ stripSwiftSymbols
+
+ teamID
+ YS397FG5PA
+ thinning
+ <none>
+
+
diff --git a/iOS/APIExample/APIExample.xcodeproj/project.pbxproj b/iOS/APIExample/APIExample.xcodeproj/project.pbxproj
index e9e1fa16d..519d9dd84 100644
--- a/iOS/APIExample/APIExample.xcodeproj/project.pbxproj
+++ b/iOS/APIExample/APIExample.xcodeproj/project.pbxproj
@@ -83,7 +83,7 @@
576CA80C25AA0FA90091520B /* AgoraPcmSourcePush.swift in Sources */ = {isa = PBXBuildFile; fileRef = 576CA80B25AA0FA90091520B /* AgoraPcmSourcePush.swift */; };
576EA54225AC3310000B3D79 /* CustomPcmAudioSource.swift in Sources */ = {isa = PBXBuildFile; fileRef = 576EA54125AC3310000B3D79 /* CustomPcmAudioSource.swift */; };
576EA54825AC3523000B3D79 /* CustomPcmAudioSource.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 576EA54A25AC3523000B3D79 /* CustomPcmAudioSource.storyboard */; };
- 576EA59025AEDD3C000B3D79 /* (null) in Sources */ = {isa = PBXBuildFile; };
+ 576EA59025AEDD3C000B3D79 /* BuildFile in Sources */ = {isa = PBXBuildFile; };
57FE7C4B26B2D103002D9043 /* CircularBuffer.c in Sources */ = {isa = PBXBuildFile; fileRef = 57FE7C4726B2D103002D9043 /* CircularBuffer.c */; };
670936FD282DFE1600BC3954 /* ContentInspect.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 670936FF282DFE1600BC3954 /* ContentInspect.storyboard */; };
6709B23B2806B0EA000BCC58 /* RawAudioData.swift in Sources */ = {isa = PBXBuildFile; fileRef = 6709B23A2806B0EA000BCC58 /* RawAudioData.swift */; };
@@ -97,7 +97,7 @@
67B8C7B628057D1500195106 /* RawVideoData.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 67B8C7B828057D1500195106 /* RawVideoData.storyboard */; };
67CB2F0C27EB318200CB19D2 /* SpatialAudio.swift in Sources */ = {isa = PBXBuildFile; fileRef = 67CB2F0A27EB318100CB19D2 /* SpatialAudio.swift */; };
67CB2F0D27EB318200CB19D2 /* SpatialAudio.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 67CB2F0B27EB318200CB19D2 /* SpatialAudio.storyboard */; };
- 8407E0942472320800AC5DE8 /* (null) in Sources */ = {isa = PBXBuildFile; };
+ 8407E0942472320800AC5DE8 /* BuildFile in Sources */ = {isa = PBXBuildFile; };
8B10BE1126AFFFA6002E1373 /* SimpleFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = 8B10BE0F26AFFFA6002E1373 /* SimpleFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };
8B1422C226B50AB500FFF91B /* AudioProcessor.mm in Sources */ = {isa = PBXBuildFile; fileRef = 8B1422BC26B50AB400FFF91B /* AudioProcessor.mm */; };
8B1422C326B50AB500FFF91B /* ExtensionAudioFilter.hpp in Headers */ = {isa = PBXBuildFile; fileRef = 8B1422BD26B50AB400FFF91B /* ExtensionAudioFilter.hpp */; };
@@ -152,6 +152,9 @@
E728B85928B86B0700674A4A /* CustomVideoSourcePushMulti.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E728B85528B86B0700674A4A /* CustomVideoSourcePushMulti.storyboard */; };
E728B85A28B86B0700674A4A /* CustomVideoSourcePushMulti.swift in Sources */ = {isa = PBXBuildFile; fileRef = E728B85728B86B0700674A4A /* CustomVideoSourcePushMulti.swift */; };
E728B85C28B8971200674A4A /* sample.yuv in Resources */ = {isa = PBXBuildFile; fileRef = E728B85B28B8971200674A4A /* sample.yuv */; };
+ E74788AC29C7FB6900CD7415 /* JoinChannelVideoRecorder.strings in Resources */ = {isa = PBXBuildFile; fileRef = E74788A729C7FB6800CD7415 /* JoinChannelVideoRecorder.strings */; };
+ E74788AD29C7FB6900CD7415 /* JoinChannelVideoRecorder.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E74788A929C7FB6800CD7415 /* JoinChannelVideoRecorder.storyboard */; };
+ E74788AE29C7FB6900CD7415 /* JoinChannelVideoRecorder.swift in Sources */ = {isa = PBXBuildFile; fileRef = E74788AB29C7FB6800CD7415 /* JoinChannelVideoRecorder.swift */; };
E74877B328A23B2F00CA2F58 /* SimpleFilter.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8B10BE0D26AFFFA6002E1373 /* SimpleFilter.framework */; };
E74877B728A23B8B00CA2F58 /* NetworkManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = E74877B628A23B8B00CA2F58 /* NetworkManager.swift */; };
E74877BA28A23C1400CA2F58 /* JSONObject.swift in Sources */ = {isa = PBXBuildFile; fileRef = E74877B928A23C1400CA2F58 /* JSONObject.swift */; };
@@ -203,6 +206,8 @@
E7A49D652909111400F06DD4 /* BEHttpRequestProvider.mm in Sources */ = {isa = PBXBuildFile; fileRef = E7A49D642909111400F06DD4 /* BEHttpRequestProvider.mm */; };
E7A49D682909113200F06DD4 /* BERender.mm in Sources */ = {isa = PBXBuildFile; fileRef = E7A49D672909113200F06DD4 /* BERender.mm */; };
E7A49D6B2909115200F06DD4 /* BEEffectResourceHelper.m in Sources */ = {isa = PBXBuildFile; fileRef = E7A49D6A2909115100F06DD4 /* BEEffectResourceHelper.m */; };
+ E7AD0DE129C85FFB00C9A4B0 /* sample.mov in Resources */ = {isa = PBXBuildFile; fileRef = E7AD0DE029C85FFB00C9A4B0 /* sample.mov */; };
+ E7AD0DE329C95EB500C9A4B0 /* PickerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = E7AD0DE229C95EB500C9A4B0 /* PickerView.swift */; };
/* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */
@@ -446,6 +451,9 @@
E728B85628B86B0700674A4A /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/CustomVideoSourcePushMulti.storyboard; sourceTree = ""; };
E728B85728B86B0700674A4A /* CustomVideoSourcePushMulti.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CustomVideoSourcePushMulti.swift; sourceTree = ""; };
E728B85B28B8971200674A4A /* sample.yuv */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = sample.yuv; sourceTree = ""; };
+ E74788A829C7FB6800CD7415 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/JoinChannelVideoRecorder.strings"; sourceTree = ""; };
+ E74788AA29C7FB6800CD7415 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/JoinChannelVideoRecorder.storyboard; sourceTree = ""; };
+ E74788AB29C7FB6800CD7415 /* JoinChannelVideoRecorder.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = JoinChannelVideoRecorder.swift; sourceTree = ""; };
E74877B628A23B8B00CA2F58 /* NetworkManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NetworkManager.swift; sourceTree = ""; };
E74877B928A23C1400CA2F58 /* JSONObject.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = JSONObject.swift; sourceTree = ""; };
E74877C928A2611C00CA2F58 /* ToastView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ToastView.swift; sourceTree = ""; };
@@ -486,7 +494,6 @@
E7A49D1F2907DD8F00F06DD4 /* EffectsAttribute.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = EffectsAttribute.h; sourceTree = ""; };
E7A49D212907DD9A00F06DD4 /* EffectsCommonObject.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = EffectsCommonObject.h; sourceTree = ""; };
E7A49D222907DD9A00F06DD4 /* EffectsCommonObject.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = EffectsCommonObject.m; sourceTree = ""; };
- E7A49D252907DDBF00F06DD4 /* EffectMacro.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = EffectMacro.h; path = "../../../../../../../../../Agora-With-SenseTime/Agora-Video-With-SenseTime-iOS/SenseMe/EFRender/EffectMacro.h"; sourceTree = ""; };
E7A49D262907DDFE00F06DD4 /* EffectsDetector.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = EffectsDetector.m; sourceTree = ""; };
E7A49D272907DDFF00F06DD4 /* EffectsDetector.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = EffectsDetector.h; sourceTree = ""; };
E7A49D292907DEE600F06DD4 /* EFMotionManager.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = EFMotionManager.m; sourceTree = ""; };
@@ -528,6 +535,8 @@
E7A49D692909115100F06DD4 /* BEEffectResourceHelper.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = BEEffectResourceHelper.h; sourceTree = ""; };
E7A49D6A2909115100F06DD4 /* BEEffectResourceHelper.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = BEEffectResourceHelper.m; sourceTree = ""; };
E7A49D6E290A744400F06DD4 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/ThirdBeautify.strings"; sourceTree = ""; };
+ E7AD0DE029C85FFB00C9A4B0 /* sample.mov */ = {isa = PBXFileReference; lastKnownFileType = video.quicktime; path = sample.mov; sourceTree = ""; };
+ E7AD0DE229C95EB500C9A4B0 /* PickerView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PickerView.swift; sourceTree = ""; };
EAD308B056B63304DA681699 /* Pods-Agora-ScreenShare-Extension(Socket).release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Agora-ScreenShare-Extension(Socket).release.xcconfig"; path = "Target Support Files/Pods-Agora-ScreenShare-Extension(Socket)/Pods-Agora-ScreenShare-Extension(Socket).release.xcconfig"; sourceTree = ""; };
EB8CDD3F04870C6A31287732 /* Pods_audioFilter.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_audioFilter.framework; sourceTree = BUILT_PRODUCTS_DIR; };
FAAC2AEE355D103B9E8527B5 /* Pods-Agora-ScreenShare-Extension.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Agora-ScreenShare-Extension.debug.xcconfig"; path = "Target Support Files/Pods-Agora-ScreenShare-Extension/Pods-Agora-ScreenShare-Extension.debug.xcconfig"; sourceTree = ""; };
@@ -796,6 +805,7 @@
children = (
8B349FE22681E2CE007247F2 /* agora-logo.png */,
576CA80925A9CC3A0091520B /* output.raw */,
+ E7AD0DE029C85FFB00C9A4B0 /* sample.mov */,
03414B5425546DEC00AB114D /* frames0.yuv */,
E728B85B28B8971200674A4A /* sample.yuv */,
03BEED0C251CAB9C005E78F4 /* audioeffect.mp3 */,
@@ -865,6 +875,7 @@
03F8733124C8696600EDB1A3 /* EntryViewController.swift */,
0339BE63251DCA3B007D4FDD /* GlobalSettings.swift */,
E721600E28D3314B006431BD /* AlertManager.swift */,
+ E7AD0DE229C95EB500C9A4B0 /* PickerView.swift */,
);
path = Common;
sourceTree = "";
@@ -1039,6 +1050,7 @@
A75A56D324A0603000D0089E /* Basic */ = {
isa = PBXGroup;
children = (
+ E74788A629C7FB6800CD7415 /* JoinChannelVideo(Recorder) */,
E77D54C128F55E9100D51C1E /* JoinChannelVideo(Token) */,
0385768025224A88003C369A /* JoinChannelVideo */,
0371D8AC250B4A2C00C0DD61 /* JoinChannelAudio */,
@@ -1147,6 +1159,16 @@
path = APIExample/Examples/Advanced/CustomVideoSourcePushMulti;
sourceTree = SOURCE_ROOT;
};
+ E74788A629C7FB6800CD7415 /* JoinChannelVideo(Recorder) */ = {
+ isa = PBXGroup;
+ children = (
+ E74788A729C7FB6800CD7415 /* JoinChannelVideoRecorder.strings */,
+ E74788A929C7FB6800CD7415 /* JoinChannelVideoRecorder.storyboard */,
+ E74788AB29C7FB6800CD7415 /* JoinChannelVideoRecorder.swift */,
+ );
+ path = "JoinChannelVideo(Recorder)";
+ sourceTree = "";
+ };
E74877B528A23B8B00CA2F58 /* NetworkManager */ = {
isa = PBXGroup;
children = (
@@ -1237,7 +1259,6 @@
isa = PBXGroup;
children = (
E7A49D352907EB6000F06DD4 /* SENSEME.lic */,
- E7A49D252907DDBF00F06DD4 /* EffectMacro.h */,
E7A49D192907DD7800F06DD4 /* Effects.h */,
E7A49D182907DD7800F06DD4 /* Effects.m */,
E7A49D2A2907DEE600F06DD4 /* EFMotionManager.h */,
@@ -1466,6 +1487,7 @@
033A9F8E252D8FF300BC26E1 /* JoinMultiChannel.storyboard in Resources */,
03BEED0B251C4446005E78F4 /* audiomixing.mp3 in Resources */,
8BC751D6273E502700552265 /* LiveStreaming.storyboard in Resources */,
+ E74788AD29C7FB6900CD7415 /* JoinChannelVideoRecorder.storyboard in Resources */,
03B12DAC251127DC00E55818 /* VideoViewMetal.xib in Resources */,
E77D54C828F55E9100D51C1E /* JoinChannelVideoToken.storyboard in Resources */,
E7A49D0A29067F8300F06DD4 /* SenseBeautify.storyboard in Resources */,
@@ -1488,10 +1510,12 @@
8BE7ABC3279E065000DFBCEF /* FusionCDN.storyboard in Resources */,
0339D6D224E91B80008739CD /* QuickSwitchChannelVCItem.xib in Resources */,
E7163F8A29651D8900EBBD55 /* AR.scnassets in Resources */,
+ E7AD0DE129C85FFB00C9A4B0 /* sample.mov in Resources */,
E728B84928B5FFCB00674A4A /* PictureInPicture.storyboard in Resources */,
03BEED0D251CAB9C005E78F4 /* audioeffect.mp3 in Resources */,
A7CA48C424553CF700507435 /* Popover.storyboard in Resources */,
E7A49D42290907E200F06DD4 /* BytedEffect.storyboard in Resources */,
+ E74788AC29C7FB6900CD7415 /* JoinChannelVideoRecorder.strings in Resources */,
E7A49CFC29029E0000F06DD4 /* FUBeautify.strings in Resources */,
03D13BDC2448758B00B599B3 /* LaunchScreen.storyboard in Resources */,
E728B85C28B8971200674A4A /* sample.yuv in Resources */,
@@ -1674,18 +1698,19 @@
8B333DA9267B4BC3002A3785 /* SettingsCells.swift in Sources */,
E7A49D4829090F8000F06DD4 /* BEFrameProcessor.mm in Sources */,
033A9EFC252D61E200BC26E1 /* CustomVideoRender.swift in Sources */,
- 576EA59025AEDD3C000B3D79 /* (null) in Sources */,
+ 576EA59025AEDD3C000B3D79 /* BuildFile in Sources */,
033A9F09252D61FC00BC26E1 /* RTMPStreaming.swift in Sources */,
6709B23B2806B0EA000BCC58 /* RawAudioData.swift in Sources */,
033A9EEA252D5F5E00BC26E1 /* JoinMultiChannel.swift in Sources */,
0339BE64251DCA3B007D4FDD /* GlobalSettings.swift in Sources */,
E728B85A28B86B0700674A4A /* CustomVideoSourcePushMulti.swift in Sources */,
E728B84C28B6015800674A4A /* AgoraPictureInPictureController.m in Sources */,
- 8407E0942472320800AC5DE8 /* (null) in Sources */,
+ 8407E0942472320800AC5DE8 /* BuildFile in Sources */,
8B5E5B50274CB68E0040E97D /* RhythmPlayer.swift in Sources */,
E7A49D342907E74A00F06DD4 /* BundleUtil.m in Sources */,
036C42B524D2A3C600A59000 /* AgoraMetalRender.swift in Sources */,
E72055EA28F943520030E6D1 /* Util.swift in Sources */,
+ E74788AE29C7FB6900CD7415 /* JoinChannelVideoRecorder.swift in Sources */,
E7A49D41290907E200F06DD4 /* BytedEffectVC.m in Sources */,
E7A49D142907DC2800F06DD4 /* EffectsProcess.m in Sources */,
03DF1D9324CFC29700DF7151 /* ExternalAudio.mm in Sources */,
@@ -1725,6 +1750,7 @@
03DF1D9024CFC29700DF7151 /* AudioWriteToFile.m in Sources */,
0339BE6D251DEAFC007D4FDD /* PrecallTest.swift in Sources */,
8BC751DA273E57C900552265 /* VideoProcess.swift in Sources */,
+ E7AD0DE329C95EB500C9A4B0 /* PickerView.swift in Sources */,
E728B84828B5FFCB00674A4A /* PictureInPicture.swift in Sources */,
5744CE0925BA99FF0099AB66 /* VideoChat.swift in Sources */,
034C625E2524A06800296ECF /* VoiceChanger.swift in Sources */,
@@ -2133,6 +2159,22 @@
name = CustomVideoSourcePushMulti.storyboard;
sourceTree = "";
};
+ E74788A729C7FB6800CD7415 /* JoinChannelVideoRecorder.strings */ = {
+ isa = PBXVariantGroup;
+ children = (
+ E74788A829C7FB6800CD7415 /* zh-Hans */,
+ );
+ name = JoinChannelVideoRecorder.strings;
+ sourceTree = "";
+ };
+ E74788A929C7FB6800CD7415 /* JoinChannelVideoRecorder.storyboard */ = {
+ isa = PBXVariantGroup;
+ children = (
+ E74788AA29C7FB6800CD7415 /* Base */,
+ );
+ name = JoinChannelVideoRecorder.storyboard;
+ sourceTree = "";
+ };
E77D54C228F55E9100D51C1E /* JoinChannelVideoToken.strings */ = {
isa = PBXVariantGroup;
children = (
diff --git a/iOS/APIExample/APIExample/Common/ExternalAudio/ExternalAudio.mm b/iOS/APIExample/APIExample/Common/ExternalAudio/ExternalAudio.mm
index cefa82035..dc628b7fb 100644
--- a/iOS/APIExample/APIExample/Common/ExternalAudio/ExternalAudio.mm
+++ b/iOS/APIExample/APIExample/Common/ExternalAudio/ExternalAudio.mm
@@ -303,7 +303,7 @@ - (void)audioController:(AudioController *)controller didCaptureData:(unsigned c
}
else {
// [self.agoraKit pushExternalAudioFrameNSData:[NSData dataWithBytes:data length:bytesLength] sourceId:1 timestamp:0];
- [self.agoraKit pushExternalAudioFrameRawData: data samples: 441 * 10 sourceId:1 timestamp:0];
+ [self.agoraKit pushExternalAudioFrameRawData: data samples: 441 * 10 trackId:1 timestamp:0];
}
}
diff --git a/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraMetalRender.swift b/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraMetalRender.swift
index c5b53057f..b08bb2ac2 100644
--- a/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraMetalRender.swift
+++ b/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraMetalRender.swift
@@ -106,8 +106,8 @@ func getAgoraRotation(rotation: Int32) -> AgoraVideoRotation? {
}
extension AgoraMetalRender: AgoraVideoFrameDelegate {
- func onCapture(_ videoFrame: AgoraOutputVideoFrame) -> Bool {
- return true
+ func onCapture(_ videoFrame: AgoraOutputVideoFrame, sourceType: AgoraVideoSourceType) -> Bool {
+ true
}
func onRenderVideoFrame(_ videoFrame: AgoraOutputVideoFrame, uid: UInt, channelId: String) -> Bool {
@@ -155,9 +155,8 @@ extension AgoraMetalRender: AgoraVideoFrameDelegate {
return .readOnly
}
-
- func onPreEncode(_ videoFrame: AgoraOutputVideoFrame) -> Bool {
- return true
+ func onPreEncode(_ videoFrame: AgoraOutputVideoFrame, sourceType: AgoraVideoSourceType) -> Bool {
+ true
}
func getVideoFormatPreference() -> AgoraVideoFormat {
diff --git a/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraPictureInPictureController.h b/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraPictureInPictureController.h
index e5b577f29..c8ca14124 100644
--- a/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraPictureInPictureController.h
+++ b/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraPictureInPictureController.h
@@ -27,6 +27,8 @@ NS_ASSUME_NONNULL_BEGIN
@property (nonatomic, strong, readonly) AgoraSampleBufferRender *displayView;
- (instancetype)initWithDisplayView:(AgoraSampleBufferRender *)displayView;
+
+- (void)releasePIP;
@end
diff --git a/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraPictureInPictureController.m b/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraPictureInPictureController.m
index 4c71e493e..0958ba80c 100644
--- a/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraPictureInPictureController.m
+++ b/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraPictureInPictureController.m
@@ -10,10 +10,6 @@
@interface AgoraPictureInPictureController ()
-@property (nonatomic, strong) AVPictureInPictureController *pipController;
-
-@property (nonatomic, strong) AgoraSampleBufferRender *displayView;
-
@end
@implementation AgoraPictureInPictureController
@@ -34,6 +30,12 @@ - (instancetype)initWithDisplayView:(AgoraSampleBufferRender *)displayView {
return nil;
}
+- (void)releasePIP {
+ _pipController.delegate = nil;
+ _pipController = nil;
+ [_displayView reset];
+ _displayView = nil;
+}
#pragma mark -
diff --git a/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraSampleBufferRender.m b/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraSampleBufferRender.m
index a651521f2..a3b0b9b60 100644
--- a/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraSampleBufferRender.m
+++ b/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraSampleBufferRender.m
@@ -33,6 +33,13 @@ - (instancetype)init {
return self;
}
+- (instancetype)initWithFrame:(CGRect)frame {
+ if (self = [super initWithFrame:frame]) {
+ [self.layer addSublayer:self.displayLayer];
+ }
+ return self;
+}
+
- (void)awakeFromNib {
[super awakeFromNib];
[self.layer addSublayer:self.displayLayer];
@@ -65,7 +72,7 @@ - (void)layoutDisplayLayer {
}
CGRect renderRect = CGRectMake(0.5 * (viewWidth - videoSize.width), 0.5 * (viewHeight - videoSize.height), videoSize.width, videoSize.height);
-
+
if (!CGRectEqualToRect(renderRect, self.displayLayer.frame)) {
self.displayLayer.frame = renderRect;
}
@@ -75,6 +82,19 @@ - (void)reset {
[self.displayLayer flushAndRemoveImage];
}
+- (OSType)getFormatType: (NSInteger)type {
+ switch (type) {
+ case 1:
+ return kCVPixelFormatType_420YpCbCr8Planar;
+
+ case 2:
+ return kCVPixelFormatType_32BGRA;
+
+ default:
+ return kCVPixelFormatType_32BGRA;
+ }
+}
+
- (void)renderVideoData:(AgoraOutputVideoFrame *_Nonnull)videoData {
if (!videoData) {
return;
@@ -86,6 +106,7 @@ - (void)renderVideoData:(AgoraOutputVideoFrame *_Nonnull)videoData {
[self layoutDisplayLayer];
});
+
size_t width = videoData.width;
size_t height = videoData.height;
size_t yStride = videoData.yStride;
@@ -99,17 +120,23 @@ - (void)renderVideoData:(AgoraOutputVideoFrame *_Nonnull)videoData {
@autoreleasepool {
CVPixelBufferRef pixelBuffer = NULL;
NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}};
- CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_420YpCbCr8Planar, (__bridge CFDictionaryRef)(pixelAttributes), &pixelBuffer);
-
+ OSType type = [self getFormatType:videoData.type];
+ CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault,
+ width,
+ height,
+ type,
+ (__bridge CFDictionaryRef)(pixelAttributes),
+ &pixelBuffer);
+
if (result != kCVReturnSuccess) {
NSLog(@"Unable to create cvpixelbuffer %d", result);
}
-
+
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
void *yPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
int pixelBufferYBytes = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
-
+
if (yStride == pixelBufferYBytes) {
memcpy(yPlane, yBuffer, yStride*height);
}else {
@@ -117,7 +144,7 @@ - (void)renderVideoData:(AgoraOutputVideoFrame *_Nonnull)videoData {
memcpy(yPlane + pixelBufferYBytes * i, yBuffer + yStride * i, MIN(yStride, pixelBufferYBytes));
}
}
-
+
void *uPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
int pixelBufferUBytes = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
if (uStride == pixelBufferUBytes) {
@@ -127,7 +154,7 @@ - (void)renderVideoData:(AgoraOutputVideoFrame *_Nonnull)videoData {
memcpy(uPlane + pixelBufferUBytes * i, uBuffer + uStride * i, MIN(uStride, pixelBufferUBytes));
}
}
-
+
void *vPlane = (void *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 2);
int pixelBufferVBytes = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 2);
if (vStride == pixelBufferVBytes) {
@@ -137,9 +164,9 @@ - (void)renderVideoData:(AgoraOutputVideoFrame *_Nonnull)videoData {
memcpy(vPlane + pixelBufferVBytes * i, vBuffer + vStride * i, MIN(vStride, pixelBufferVBytes));
}
}
-
+
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
-
+
CMVideoFormatDescriptionRef videoInfo;
CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo);
@@ -150,7 +177,7 @@ - (void)renderVideoData:(AgoraOutputVideoFrame *_Nonnull)videoData {
CMSampleBufferRef sampleBuffer;
CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, pixelBuffer, videoInfo, &timingInfo, &sampleBuffer);
-
+
[self.displayLayer enqueueSampleBuffer:sampleBuffer];
if (self.displayLayer.status == AVQueuedSampleBufferRenderingStatusFailed) {
[self.displayLayer flush];
@@ -176,9 +203,11 @@ - (void)renderVideoPixelBuffer:(AgoraOutputVideoFrame *_Nonnull)videoData {
@autoreleasepool {
CVPixelBufferRef pixelBuffer = videoData.pixelBuffer;
-
+
CMVideoFormatDescriptionRef videoInfo;
- CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo);
+ CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault,
+ pixelBuffer,
+ &videoInfo);
CMSampleTimingInfo timingInfo;
timingInfo.duration = kCMTimeZero;
@@ -186,9 +215,16 @@ - (void)renderVideoPixelBuffer:(AgoraOutputVideoFrame *_Nonnull)videoData {
timingInfo.presentationTimeStamp = CMTimeMake(CACurrentMediaTime()*1000, 1000);
CMSampleBufferRef sampleBuffer;
- CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, pixelBuffer, videoInfo, &timingInfo, &sampleBuffer);
-
+ CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault,
+ pixelBuffer,
+ videoInfo,
+ &timingInfo,
+ &sampleBuffer);
+
[self.displayLayer enqueueSampleBuffer:sampleBuffer];
+ if (self.displayLayer.status == AVQueuedSampleBufferRenderingStatusFailed) {
+ [self.displayLayer flush];
+ }
CMSampleBufferInvalidate(sampleBuffer);
CFRelease(sampleBuffer);
}
diff --git a/iOS/APIExample/APIExample/Common/PickerView.swift b/iOS/APIExample/APIExample/Common/PickerView.swift
new file mode 100644
index 000000000..b79ad961a
--- /dev/null
+++ b/iOS/APIExample/APIExample/Common/PickerView.swift
@@ -0,0 +1,124 @@
+//
+// PickerView.swift
+// APIExample
+//
+// Created by zhaoyongqiang on 2023/3/21.
+// Copyright © 2023 Agora Corp. All rights reserved.
+//
+
+import UIKit
+
+class PickerView: UIView {
+ private lazy var cancelButton: UIButton = {
+ let button = UIButton()
+ button.setTitle("Cancel".localized, for: .normal)
+ button.backgroundColor = .blue
+ button.cornerRadius = 5
+ button.setTitleColor(.white, for: .normal)
+ button.titleLabel?.font = .systemFont(ofSize: 14)
+ button.addTarget(self, action: #selector(onTapCancelButton), for: .touchUpInside)
+ return button
+ }()
+ private lazy var sureButton: UIButton = {
+ let button = UIButton()
+ button.setTitle("Sure".localized, for: .normal)
+ button.backgroundColor = .blue
+ button.cornerRadius = 5
+ button.setTitleColor(.white, for: .normal)
+ button.titleLabel?.font = .systemFont(ofSize: 14)
+ button.addTarget(self, action: #selector(onTapSureButton), for: .touchUpInside)
+ return button
+ }()
+ private lazy var titleLabel: UILabel = {
+ let label = UILabel()
+ label.text = ""
+ label.textColor = .black
+ label.font = UIFont.boldSystemFont(ofSize: 16)
+ return label
+ }()
+ private lazy var pickerView: UIPickerView = {
+ let pickerView = UIPickerView()
+ pickerView.dataSource = self
+ pickerView.delegate = self
+ return pickerView
+ }()
+ private var selectedValue: String?
+
+ // MARK: Public
+ var pickerViewSelectedValueClosure: ((String) -> Void)?
+ var dataArray: [String]?
+ var rowHeight: CGFloat = 40
+
+ override init(frame: CGRect) {
+ super.init(frame: frame)
+ setupUI()
+ }
+
+ required init?(coder: NSCoder) {
+ fatalError("init(coder:) has not been implemented")
+ }
+
+ private func setupUI() {
+ backgroundColor = .white
+ addSubview(cancelButton)
+ addSubview(titleLabel)
+ addSubview(sureButton)
+ addSubview(pickerView)
+ cancelButton.translatesAutoresizingMaskIntoConstraints = false
+ titleLabel.translatesAutoresizingMaskIntoConstraints = false
+ sureButton.translatesAutoresizingMaskIntoConstraints = false
+ pickerView.translatesAutoresizingMaskIntoConstraints = false
+
+ widthAnchor.constraint(equalToConstant: UIScreen.main.bounds.width).isActive = true
+
+ cancelButton.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 10).isActive = true
+ cancelButton.topAnchor.constraint(equalTo: topAnchor, constant: 5).isActive = true
+ cancelButton.widthAnchor.constraint(equalToConstant: 50).isActive = true
+ cancelButton.heightAnchor.constraint(equalToConstant: 30).isActive = true
+
+ titleLabel.centerXAnchor.constraint(equalTo: centerXAnchor).isActive = true
+ titleLabel.centerYAnchor.constraint(equalTo: cancelButton.centerYAnchor).isActive = true
+
+ sureButton.centerYAnchor.constraint(equalTo: cancelButton.centerYAnchor).isActive = true
+ sureButton.trailingAnchor.constraint(equalTo: trailingAnchor, constant: -10).isActive = true
+ sureButton.widthAnchor.constraint(equalToConstant: 50).isActive = true
+ sureButton.heightAnchor.constraint(equalToConstant: 30).isActive = true
+
+ pickerView.leadingAnchor.constraint(equalTo: leadingAnchor).isActive = true
+ pickerView.bottomAnchor.constraint(equalTo: bottomAnchor).isActive = true
+ pickerView.trailingAnchor.constraint(equalTo: trailingAnchor).isActive = true
+ pickerView.topAnchor.constraint(equalTo: cancelButton.bottomAnchor, constant: 5).isActive = true
+ pickerView.heightAnchor.constraint(equalToConstant: 160).isActive = true
+ }
+
+ @objc
+ private func onTapCancelButton() {
+ AlertManager.hiddenView()
+ }
+ @objc
+ private func onTapSureButton() {
+ pickerViewSelectedValueClosure?(selectedValue ?? "")
+ AlertManager.hiddenView()
+ }
+}
+
+extension PickerView: UIPickerViewDelegate, UIPickerViewDataSource {
+ func pickerView(_ pickerView: UIPickerView, rowHeightForComponent component: Int) -> CGFloat {
+ rowHeight
+ }
+
+ func pickerView(_ pickerView: UIPickerView, didSelectRow row: Int, inComponent component: Int) {
+ selectedValue = dataArray?[row]
+ }
+
+ func pickerView(_ pickerView: UIPickerView, titleForRow row: Int, forComponent component: Int) -> String? {
+ dataArray?[row] ?? ""
+ }
+
+ func numberOfComponents(in pickerView: UIPickerView) -> Int {
+ 1
+ }
+ func pickerView(_ pickerView: UIPickerView, numberOfRowsInComponent component: Int) -> Int {
+ dataArray?.count ?? 0
+ }
+}
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ARKit/ARKit.swift b/iOS/APIExample/APIExample/Examples/Advanced/ARKit/ARKit.swift
index dd8212bf5..778814d5a 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/ARKit/ARKit.swift
+++ b/iOS/APIExample/APIExample/Examples/Advanced/ARKit/ARKit.swift
@@ -103,7 +103,7 @@ class ARKitMain: BaseViewController {
// make myself a broadcaster
agoraKit.setChannelProfile(.liveBroadcasting)
- agoraKit.setClientRole(.broadcaster)
+ agoraKit.setClientRole(GlobalSettings.shared.getUserRole())
// set AR video source as custom video source
renderer = ARVideoRenderer()
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/CreateDataStream/CreateDataStream.swift b/iOS/APIExample/APIExample/Examples/Advanced/CreateDataStream/CreateDataStream.swift
index 97b978286..da2681c5b 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/CreateDataStream/CreateDataStream.swift
+++ b/iOS/APIExample/APIExample/Examples/Advanced/CreateDataStream/CreateDataStream.swift
@@ -108,8 +108,8 @@ class CreateDataStreamMain: BaseViewController {
// when joining channel. The channel name and uid used to calculate
// the token has to match the ones used for channel join
let option = AgoraRtcChannelMediaOptions()
- option.publishCameraTrack = true
- option.publishMicrophoneTrack = true
+ option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster
option.clientRoleType = GlobalSettings.shared.getUserRole()
NetworkManager.shared.generateToken(channelName: channelName, success: { token in
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/CustomPcmAudioSource/CustomPcmAudioSource.swift b/iOS/APIExample/APIExample/Examples/Advanced/CustomPcmAudioSource/CustomPcmAudioSource.swift
index c030bbf27..c7a67743e 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/CustomPcmAudioSource/CustomPcmAudioSource.swift
+++ b/iOS/APIExample/APIExample/Examples/Advanced/CustomPcmAudioSource/CustomPcmAudioSource.swift
@@ -42,6 +42,7 @@ class CustomPcmAudioSourceMain: BaseViewController {
var audioViews: [UInt:VideoView] = [:]
@IBOutlet weak var playAudioView: UIView!
@IBOutlet weak var pushPcmSwitch: UISwitch!
+ private var trackId: Int32 = 0
// indicate if current instance has joined channel
var isJoined: Bool = false {
@@ -82,7 +83,10 @@ class CustomPcmAudioSourceMain: BaseViewController {
// setup external audio source
pcmSourcePush = AgoraPcmSourcePush(delegate: self, filePath: filepath, sampleRate: Int(sampleRate),
channelsPerFrame: Int(channel), bitPerSample: bitPerSample, samples: samples)
- agoraKit.setExternalAudioSource(true, sampleRate: Int(sampleRate), channels: Int(channel), sourceNumber: 2, localPlayback: true, publish: true)
+
+ let trackConfig = AgoraAudioTrackConfig()
+ trackConfig.enableLocalPlayback = true
+ trackId = agoraKit.createCustomAudioTrack(.mixable, config: trackConfig)
agoraKit.enableCustomAudioLocalPlayback(1, enabled: true)
// start joining channel
// 1. Users can only see each other after they join the
@@ -92,8 +96,9 @@ class CustomPcmAudioSourceMain: BaseViewController {
// the token has to match the ones used for channel join
let option = AgoraRtcChannelMediaOptions()
option.publishCameraTrack = false
- option.publishMicrophoneTrack = true
- option.publishCustomAudioTrack = true
+ option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ option.publishCustomAudioTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ option.publishCustomAudioTrackId = Int(trackId)
option.clientRoleType = GlobalSettings.shared.getUserRole()
NetworkManager.shared.generateToken(channelName: channelName, success: { token in
let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option)
@@ -112,6 +117,7 @@ class CustomPcmAudioSourceMain: BaseViewController {
// leave channel when exiting the view
pcmSourcePush?.stop()
if isJoined {
+ agoraKit.destroyCustomAudioTrack(Int(trackId))
agoraKit.disableAudio()
pcmSourcePush?.stop()
agoraKit.leaveChannel { (stats) -> Void in
@@ -128,12 +134,20 @@ class CustomPcmAudioSourceMain: BaseViewController {
} else {
pcmSourcePush?.stop()
}
+ let mediaOption = AgoraRtcChannelMediaOptions()
+ mediaOption.publishCustomAudioTrack = sender.isOn
+ agoraKit.updateChannel(with: mediaOption)
}
}
extension CustomPcmAudioSourceMain: AgoraPcmSourcePushDelegate {
func onAudioFrame(data: UnsafeMutablePointer) {
- agoraKit.pushExternalAudioFrameRawData(data, samples: samples, sourceId: 0, timestamp: 0)
+ agoraKit.pushExternalAudioFrameRawData(data,
+ samples: samples,
+ sampleRate: Int(sampleRate),
+ channels: Int(channel),
+ trackId: Int(trackId),
+ timestamp: 0)
}
}
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoRender/CustomVideoRender.swift b/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoRender/CustomVideoRender.swift
index 9b9ccd108..e68f475a4 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoRender/CustomVideoRender.swift
+++ b/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoRender/CustomVideoRender.swift
@@ -104,8 +104,8 @@ class CustomVideoRenderMain: BaseViewController {
// when joining channel. The channel name and uid used to calculate
// the token has to match the ones used for channel join
let option = AgoraRtcChannelMediaOptions()
- option.publishCameraTrack = true
- option.publishMicrophoneTrack = true
+ option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster
option.clientRoleType = GlobalSettings.shared.getUserRole()
NetworkManager.shared.generateToken(channelName: channelName, success: { token in
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePush/CustomVideoSourcePush.swift b/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePush/CustomVideoSourcePush.swift
index 3b207fad8..05f3f23eb 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePush/CustomVideoSourcePush.swift
+++ b/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePush/CustomVideoSourcePush.swift
@@ -117,8 +117,8 @@ class CustomVideoSourcePushMain: BaseViewController {
// when joining channel. The channel name and uid used to calculate
// the token has to match the ones used for channel join
let option = AgoraRtcChannelMediaOptions()
- option.publishCustomAudioTrack = false
- option.publishCustomVideoTrack = true
+ option.publishCustomAudioTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ option.publishCustomVideoTrack = GlobalSettings.shared.getUserRole() == .broadcaster
option.clientRoleType = GlobalSettings.shared.getUserRole()
NetworkManager.shared.generateToken(channelName: channelName, success: { token in
let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option)
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/CustomVideoSourcePushMulti.swift b/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/CustomVideoSourcePushMulti.swift
index e51860c29..0f721663f 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/CustomVideoSourcePushMulti.swift
+++ b/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/CustomVideoSourcePushMulti.swift
@@ -185,7 +185,7 @@ class CustomVideoSourcePushMultiMain: BaseViewController {
}
})
let connection = AgoraRtcConnection()
- connection.localUid = 0
+ connection.localUid = 999
connection.channelId = channelName
agoraKit.leaveChannelEx(connection) { state in
LogUtils.log(message: "warning: \(state.description)", level: .info)
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/FusionCDN/FusionCDN.swift b/iOS/APIExample/APIExample/Examples/Advanced/FusionCDN/FusionCDN.swift
index d627d1f50..9cef9ac56 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/FusionCDN/FusionCDN.swift
+++ b/iOS/APIExample/APIExample/Examples/Advanced/FusionCDN/FusionCDN.swift
@@ -124,7 +124,7 @@ class FusionCDNHost: BaseViewController {
Util.configPrivatization(agoraKit: agoraKit)
agoraKit.setLogFile(LogUtils.sdkLogPath())
// make myself a broadcaster
- agoraKit.setClientRole(.broadcaster)
+ agoraKit.setClientRole(GlobalSettings.shared.getUserRole())
// enable video module and set up video encoding configs
agoraKit.enableVideo()
@@ -197,8 +197,8 @@ class FusionCDNHost: BaseViewController {
agoraKit.setDirectCdnStreamingVideoConfiguration(videoConfig)
agoraKit.setDirectCdnStreamingAudioConfiguration(.default)
let options = AgoraDirectCdnStreamingMediaOptions()
- options.publishCameraTrack = true
- options.publishMicrophoneTrack = true
+ options.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ options.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster
let ret = agoraKit.startDirectCdnStreaming(self, publishUrl: streamingUrl, mediaOptions: options)
if ret == 0 {
streamingButton.setTitle("Streaming", for: .normal)
@@ -214,8 +214,8 @@ class FusionCDNHost: BaseViewController {
private func switchToRtcStreaming() {
guard let channelName = configs["channelName"] as? String else {return}
let options = AgoraRtcChannelMediaOptions()
- options.publishCameraTrack = true
- options.publishMicrophoneTrack = true
+ options.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ options.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster
options.clientRoleType = .broadcaster
NetworkManager.shared.generateToken(channelName: channelName, success: { token in
let result = self.agoraKit.joinChannel(byToken: token,
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/LiveStreaming/LiveStreaming.swift b/iOS/APIExample/APIExample/Examples/Advanced/LiveStreaming/LiveStreaming.swift
index 6fefe4933..f1b668294 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/LiveStreaming/LiveStreaming.swift
+++ b/iOS/APIExample/APIExample/Examples/Advanced/LiveStreaming/LiveStreaming.swift
@@ -168,9 +168,9 @@ class LiveStreamingMain: BaseViewController {
// when joining channel. The channel name and uid used to calculate
// the token has to match the ones used for channel join
let option = AgoraRtcChannelMediaOptions()
- option.publishCameraTrack = true
- option.publishMicrophoneTrack = true
- option.clientRoleType = GlobalSettings.shared.getUserRole()
+ option.publishCameraTrack = role == .broadcaster
+ option.publishMicrophoneTrack = role == .broadcaster
+ option.clientRoleType = role
NetworkManager.shared.generateToken(channelName: channelName, success: { token in
let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option)
if result != 0 {
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/MediaChannelRelay/MediaChannelRelay.swift b/iOS/APIExample/APIExample/Examples/Advanced/MediaChannelRelay/MediaChannelRelay.swift
index a7b81bdb4..a8c356b74 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/MediaChannelRelay/MediaChannelRelay.swift
+++ b/iOS/APIExample/APIExample/Examples/Advanced/MediaChannelRelay/MediaChannelRelay.swift
@@ -115,8 +115,8 @@ class MediaChannelRelayMain: BaseViewController {
// when joining channel. The channel name and uid used to calculate
// the token has to match the ones used for channel join
let option = AgoraRtcChannelMediaOptions()
- option.publishCameraTrack = true
- option.publishMicrophoneTrack = true
+ option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster
option.clientRoleType = GlobalSettings.shared.getUserRole()
NetworkManager.shared.generateToken(channelName: channelName, success: { token in
let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option)
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/MediaPlayer/Base.lproj/MediaPlayer.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/MediaPlayer/Base.lproj/MediaPlayer.storyboard
index c091d35d0..f6f71c1c1 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/MediaPlayer/Base.lproj/MediaPlayer.storyboard
+++ b/iOS/APIExample/APIExample/Examples/Advanced/MediaPlayer/Base.lproj/MediaPlayer.storyboard
@@ -1,9 +1,9 @@
-
+
-
+
@@ -18,7 +18,7 @@
-
+
@@ -70,19 +70,19 @@
-
+
-
-
+
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/MutliCamera/MutliCamera.swift b/iOS/APIExample/APIExample/Examples/Advanced/MutliCamera/MutliCamera.swift
index cd00414d8..57a928920 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/MutliCamera/MutliCamera.swift
+++ b/iOS/APIExample/APIExample/Examples/Advanced/MutliCamera/MutliCamera.swift
@@ -109,8 +109,8 @@ class MutliCameraMain: BaseViewController {
// when joining channel. The channel name and uid used to calculate
// the token has to match the ones used for channel join
let option = AgoraRtcChannelMediaOptions()
- option.publishCameraTrack = true
- option.publishMicrophoneTrack = true
+ option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster
option.clientRoleType = GlobalSettings.shared.getUserRole()
NetworkManager.shared.generateToken(channelName: channelName, uid: uid, success: { token in
let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: self.uid, mediaOptions: option)
@@ -123,7 +123,7 @@ class MutliCameraMain: BaseViewController {
}
})
}
-
+
private var isOpenCamera: Bool = false
@IBAction func onTapBackCameraButton(_ sender: UIButton) {
guard let channelName = configs["channelName"] as? String else {return}
@@ -134,36 +134,40 @@ class MutliCameraMain: BaseViewController {
connection.channelId = channelName
connection.localUid = mutliCameraUid
if isOpenCamera {
- let videoCanvas = AgoraRtcVideoCanvas()
- videoCanvas.uid = mutliCameraUid
- videoCanvas.view = localVideo_2.videoView
- videoCanvas.renderMode = .hidden
- videoCanvas.sourceType = .cameraSecondary
- videoCanvas.mirrorMode = .disabled
- agoraKit.setupLocalVideo(videoCanvas)
-
- agoraKit.startSecondaryCameraCapture()
- let option = AgoraRtcChannelMediaOptions()
- option.publishSecondaryCameraTrack = true
- option.publishMicrophoneTrack = true
- option.clientRoleType = .broadcaster
- option.autoSubscribeAudio = false
- option.autoSubscribeVideo = false
- NetworkManager.shared.generateToken(channelName: channelName, uid: mutliCameraUid) { token in
- self.agoraKit.joinChannelEx(byToken: token, connection: connection, delegate: self, mediaOptions: option, joinSuccess: nil)
- self.agoraKit.muteRemoteAudioStream(self.mutliCameraUid, mute: true)
- self.agoraKit.muteRemoteVideoStream(self.mutliCameraUid, mute: true)
- }
+ let videoCanvas = AgoraRtcVideoCanvas()
+ videoCanvas.uid = mutliCameraUid
+ videoCanvas.view = localVideo_2.videoView
+ videoCanvas.renderMode = .hidden
+ videoCanvas.sourceType = .cameraSecondary
+ videoCanvas.mirrorMode = .disabled
+ agoraKit.setupLocalVideo(videoCanvas)
+
+ let cameraConfig = AgoraCameraCapturerConfiguration()
+ cameraConfig.cameraDirection = .rear
+ cameraConfig.dimensions = localVideo_2.videoView.frame.size
+ agoraKit.enableMultiCamera(true, config: cameraConfig)
+ agoraKit.startCameraCapture(.cameraSecondary, config: cameraConfig)
+
+ let option = AgoraRtcChannelMediaOptions()
+ option.publishSecondaryCameraTrack = true
+ option.publishMicrophoneTrack = true
+ option.clientRoleType = .broadcaster
+ option.autoSubscribeAudio = false
+ option.autoSubscribeVideo = false
+ NetworkManager.shared.generateToken(channelName: channelName, uid: mutliCameraUid) { token in
+ self.agoraKit.joinChannelEx(byToken: token, connection: connection, delegate: self, mediaOptions: option, joinSuccess: nil)
+ self.agoraKit.muteRemoteAudioStream(self.mutliCameraUid, mute: true)
+ self.agoraKit.muteRemoteVideoStream(self.mutliCameraUid, mute: true)
+ }
} else {
- let videoCanvas = AgoraRtcVideoCanvas()
- videoCanvas.uid = mutliCameraUid
- videoCanvas.view = nil
- videoCanvas.renderMode = .hidden
- videoCanvas.sourceType = .cameraSecondary
- agoraKit.setupLocalVideo(videoCanvas)
-
- agoraKit.stopSecondaryCameraCapture()
- agoraKit.leaveChannelEx(connection, leaveChannelBlock: nil)
+ let videoCanvas = AgoraRtcVideoCanvas()
+ videoCanvas.uid = mutliCameraUid
+ videoCanvas.view = nil
+ videoCanvas.renderMode = .hidden
+ videoCanvas.sourceType = .cameraSecondary
+ agoraKit.setupLocalVideo(videoCanvas)
+ agoraKit.stopCameraCapture(.cameraSecondary)
+ agoraKit.leaveChannelEx(connection, leaveChannelBlock: nil)
}
}
override func viewDidDisappear(_ animated: Bool) {
@@ -180,7 +184,7 @@ class MutliCameraMain: BaseViewController {
let connection = AgoraRtcConnection()
connection.channelId = channelName
connection.localUid = mutliCameraUid
- agoraKit.stopSecondaryCameraCapture()
+ agoraKit.stopCameraCapture(.cameraSecondary)
agoraKit.leaveChannelEx(connection, leaveChannelBlock: nil)
}
AgoraRtcEngineKit.destroy()
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/PictureInPicture.swift b/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/PictureInPicture.swift
index 3ec7a84ad..faf78c338 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/PictureInPicture.swift
+++ b/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/PictureInPicture.swift
@@ -42,7 +42,6 @@ class PictureInPictureMain: BaseViewController {
var agoraKit: AgoraRtcEngineKit!
var pipController: AgoraPictureInPictureController?
var remoteUid: UInt?
-
// indicate if current instance has joined channel
var isJoined: Bool = false
@@ -54,6 +53,9 @@ class PictureInPictureMain: BaseViewController {
container.layoutStream(views: [localVideo, remoteVideo])
pipController = AgoraPictureInPictureController(displayView: remoteVideo.videoView)
+ if #available(iOS 14.2, *) {
+ pipController?.pipController.canStartPictureInPictureAutomaticallyFromInline = true
+ }
pipController?.pipController.delegate = self
// set up agora instance when view loadedlet config = AgoraRtcEngineConfig()
@@ -79,7 +81,7 @@ class PictureInPictureMain: BaseViewController {
// make myself a broadcaster
agoraKit.setChannelProfile(.liveBroadcasting)
- agoraKit.setClientRole(.broadcaster)
+ agoraKit.setClientRole(GlobalSettings.shared.getUserRole())
// enable video module and set up video encoding configs
@@ -121,15 +123,31 @@ class PictureInPictureMain: BaseViewController {
self.showAlert(title: "Error", message: "joinChannel call failed: \(result), please check your params")
}
})
- }
- @IBAction func onPIP(_btn: UIButton) {
- guard let currentPipController = pipController else {return}
+// rtcEngine(agoraKit, didVideoMuted: true, byUid: 0)
- if currentPipController.pipController.isPictureInPicturePossible {
+ NotificationCenter.default.addObserver(self,
+ selector: #selector(didEnterBackgroundNotification),
+ name: UIApplication.willResignActiveNotification,
+ object: nil)
+ }
+
+ deinit {
+ pipController?.releasePIP()
+ NotificationCenter.default.removeObserver(self)
+ }
+
+ @objc
+ private func didEnterBackgroundNotification() {
+ onPIP(_btn: UIButton())
+ }
+
+ @IBAction func onPIP(_btn: UIButton) {
+ if let currentPipController = pipController {
currentPipController.pipController.startPictureInPicture()
+ } else {
+ showAlert(message: "PIP Support iOS 15+".localized)
}
-
}
override func willMove(toParent parent: UIViewController?) {
@@ -198,23 +216,35 @@ extension PictureInPictureMain: AgoraRtcEngineDelegate {
// to unlink your view from sdk, so that your view reference will be released
// note the video will stay at its last frame, to completely remove it
// you will need to remove the EAGL sublayer from your binded view
- remoteVideo.videoView.reset()
+// remoteVideo.videoView.reset()
+ }
+ func rtcEngine(_ engine: AgoraRtcEngineKit, didVideoMuted muted: Bool, byUid uid: UInt) {
+ guard muted else { return }
+ let pixelBuffer = MediaUtils.cvPixelBufferRef(from: UIImage(named: "agora-logo") ?? UIImage()).takeRetainedValue()
+ let videoFrame = AgoraOutputVideoFrame()
+ videoFrame.pixelBuffer = pixelBuffer
+ videoFrame.width = Int32(remoteVideo.videoView.frame.width)
+ videoFrame.height = Int32(remoteVideo.videoView.frame.height)
+ remoteVideo.videoView.renderVideoPixelBuffer(videoFrame)
}
}
// MARK: - AgoraVideoDataFrameProtocol
extension PictureInPictureMain: AgoraVideoFrameDelegate {
- func onCapture(_ videoFrame: AgoraOutputVideoFrame) -> Bool {
+ func onCapture(_ videoFrame: AgoraOutputVideoFrame, sourceType: AgoraVideoSourceType) -> Bool {
true
}
func onRenderVideoFrame(_ videoFrame: AgoraOutputVideoFrame, uid: UInt, channelId: String) -> Bool {
- remoteVideo.videoView.renderVideoData(videoFrame)
+ remoteVideo.videoView.renderVideoPixelBuffer(videoFrame)
return true
}
func getVideoFormatPreference() -> AgoraVideoFormat {
- .I420
+ .cvPixelBGRA
+ }
+ func getRotationApplied() -> Bool {
+ true
}
}
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/RTMPStreaming/RTMPStreaming.swift b/iOS/APIExample/APIExample/Examples/Advanced/RTMPStreaming/RTMPStreaming.swift
index dca37e338..7670ca7dd 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/RTMPStreaming/RTMPStreaming.swift
+++ b/iOS/APIExample/APIExample/Examples/Advanced/RTMPStreaming/RTMPStreaming.swift
@@ -126,8 +126,8 @@ class RTMPStreamingMain: BaseViewController {
// when joining channel. The channel name and uid used to calculate
// the token has to match the ones used for channel join
let option = AgoraRtcChannelMediaOptions()
- option.publishCameraTrack = true
- option.publishMicrophoneTrack = true
+ option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster
option.clientRoleType = GlobalSettings.shared.getUserRole()
NetworkManager.shared.generateToken(channelName: channelName, success: { token in
@@ -298,7 +298,7 @@ extension RTMPStreamingMain: AgoraRtcEngineDelegate {
self.showAlert(title: "Notice", message: "RTMP Publish Success")
isPublished = true
} else if(state == .failure) {
- self.showAlert(title: "Error", message: "RTMP Publish Failed: \(errCode.rawValue)")
+// self.showAlert(title: "Error", message: "RTMP Publish Failed: \(errCode.rawValue)")
} else if(state == .idle) {
self.showAlert(title: "Notice", message: "RTMP Publish Stopped")
isPublished = false
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/RawVideoData/RawVideoData.swift b/iOS/APIExample/APIExample/Examples/Advanced/RawVideoData/RawVideoData.swift
index f507ba65a..dfea924a7 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/RawVideoData/RawVideoData.swift
+++ b/iOS/APIExample/APIExample/Examples/Advanced/RawVideoData/RawVideoData.swift
@@ -61,8 +61,8 @@ class RawVideoDataViewController: BaseViewController {
agoraKit.startPreview()
let option = AgoraRtcChannelMediaOptions()
- option.publishCameraTrack = true
- option.publishMicrophoneTrack = true
+ option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster
NetworkManager.shared.generateToken(channelName: channelId, success: { token in
let result = self.agoraKit.joinChannel(byToken: token, channelId: channelId, uid: 0, mediaOptions: option, joinSuccess: nil)
@@ -97,7 +97,11 @@ class RawVideoDataViewController: BaseViewController {
// MARK: - AgoraVideoFrameDelegate
extension RawVideoDataViewController: AgoraVideoFrameDelegate {
- func onCapture(_ videoFrame: AgoraOutputVideoFrame) -> Bool {
+ func onCapture(_ videoFrame: AgoraOutputVideoFrame, sourceType: AgoraVideoSourceType) -> Bool {
+ true
+ }
+
+ func onRenderVideoFrame(_ videoFrame: AgoraOutputVideoFrame, uid: UInt, channelId: String) -> Bool {
if isSnapShoting {
isSnapShoting = false
let image = MediaUtils.pixelBuffer(toImage: videoFrame.pixelBuffer!)
@@ -107,10 +111,6 @@ extension RawVideoDataViewController: AgoraVideoFrameDelegate {
}
return true
}
-
- func onRenderVideoFrame(_ videoFrame: AgoraOutputVideoFrame, uid: UInt, channelId: String) -> Bool {
- return true
- }
}
// MARK: - AgoraRtcEngineDelegate
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/RhythmPlayer/RhythmPlayer.swift b/iOS/APIExample/APIExample/Examples/Advanced/RhythmPlayer/RhythmPlayer.swift
index c4e3e6d0e..5ff71133f 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/RhythmPlayer/RhythmPlayer.swift
+++ b/iOS/APIExample/APIExample/Examples/Advanced/RhythmPlayer/RhythmPlayer.swift
@@ -97,8 +97,8 @@ class RhythmPlayerMain : BaseViewController
// when joining channel. The channel name and uid used to calculate
// the token has to match the ones used for channel join
let option = AgoraRtcChannelMediaOptions()
- option.publishCustomAudioTrack = true
- option.publishRhythmPlayerTrack = true
+ option.publishCustomAudioTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ option.publishRhythmPlayerTrack = GlobalSettings.shared.getUserRole() == .broadcaster
option.clientRoleType = GlobalSettings.shared.getUserRole()
NetworkManager.shared.generateToken(channelName: channelName, success: { token in
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ScreenShare/Base.lproj/ScreenShare.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/ScreenShare/Base.lproj/ScreenShare.storyboard
index 6d0c681fd..50034e2f5 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/ScreenShare/Base.lproj/ScreenShare.storyboard
+++ b/iOS/APIExample/APIExample/Examples/Advanced/ScreenShare/Base.lproj/ScreenShare.storyboard
@@ -1,9 +1,9 @@
-
+
-
+
@@ -18,7 +18,7 @@
-
+
@@ -70,7 +70,7 @@
-
+
@@ -84,7 +84,7 @@
-
+
@@ -123,32 +123,32 @@
-
-
+
+
-
+
-
+
-
+
-
+
-
+
@@ -167,16 +167,16 @@
-
+
-
+
-
+
@@ -192,11 +192,37 @@
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -221,6 +247,7 @@
+
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ScreenShare/ScreenShare.swift b/iOS/APIExample/APIExample/Examples/Advanced/ScreenShare/ScreenShare.swift
index 7ed59d5e7..f8afc9fa8 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/ScreenShare/ScreenShare.swift
+++ b/iOS/APIExample/APIExample/Examples/Advanced/ScreenShare/ScreenShare.swift
@@ -40,7 +40,14 @@ class ScreenShareMain: BaseViewController {
@IBOutlet weak var container: AGEVideoContainer!
@IBOutlet weak var broadcasterPickerContainer: UIView!
+ @IBOutlet weak var fpsButton: UIButton!
+
var agoraKit: AgoraRtcEngineKit!
+ private lazy var pickerView: PickerView = {
+ let pickerView = PickerView()
+ pickerView.dataArray = fpsDataSources.map({ "\($0)" })
+ return pickerView
+ }()
private lazy var screenParams: AgoraScreenCaptureParameters2 = {
let params = AgoraScreenCaptureParameters2()
params.captureVideo = true
@@ -50,7 +57,7 @@ class ScreenShareMain: BaseViewController {
params.audioParams = audioParams
let videoParams = AgoraScreenVideoParameters()
videoParams.dimensions = screenShareVideoDimension()
- videoParams.frameRate = .fps30
+ videoParams.frameRate = .fps15
videoParams.bitrate = AgoraVideoBitrateStandard
params.videoParams = videoParams
return params
@@ -65,6 +72,7 @@ class ScreenShareMain: BaseViewController {
}()
private var systemBroadcastPicker: RPSystemBroadcastPickerView?
+ private var fpsDataSources: [Int] = [15, 30, 60]
// indicate if current instance has joined channel
var isJoined: Bool = false
@@ -178,6 +186,14 @@ class ScreenShareMain: BaseViewController {
@IBAction func captureSignalVolumeSlider(_ sender: UISlider) {
screenParams.audioParams.captureSignalVolume = Int(sender.value * 100)
}
+ @IBAction func clickFpsButton(_ sender: UIButton) {
+ pickerView.pickerViewSelectedValueClosure = { [weak self] value in
+ guard let self = self else { return }
+ self.fpsButton.setTitle("\(value)fps", for: .normal)
+ self.screenParams.videoParams.frameRate = AgoraVideoFrameRate(rawValue: Int(value) ?? 15) ?? .fps15
+ }
+ AlertManager.show(view: pickerView, alertPostion: .bottom)
+ }
func isScreenShareUid(uid: UInt) -> Bool {
return uid >= SCREEN_SHARE_UID_MIN && uid <= SCREEN_SHARE_UID_MAX
@@ -322,3 +338,41 @@ extension ScreenShareMain: AgoraRtcEngineDelegate {
remoteVideo.statsInfo?.updateAudioStats(stats)
}
}
+
+
+extension ScreenShareMain: UIPickerViewDataSource, UIPickerViewDelegate {
+ func pickerView(_ pickerView: UIPickerView, rowHeightForComponent component: Int) -> CGFloat {
+ return 60.0
+ }
+
+ func pickerView(_ pickerView: UIPickerView, didSelectRow row: Int, inComponent component: Int) {
+ print("我选择了第"+"\(row)"+"行")
+ }
+
+ func pickerView(_ pickerView: UIPickerView, titleForRow row: Int, forComponent component: Int) -> String? {
+ "\(fpsDataSources[row])fps"
+ }
+
+// // TODO: 可以设置哪一行显示特定的样式
+// func pickerView(_ pickerView: UIPickerView, viewForRow row: Int, forComponent component: Int, reusing view: UIView?) -> UIView {
+// // 创建一个对象
+// let specificView = UIView.init()
+// specificView.frame = CGRect.init(x: 10, y: 5, width: 100, height: 60)
+// specificView.backgroundColor = UIColor.magenta
+// /**
+// 创建一个标题
+// */
+// let specificLable = UILabel.init(frame: CGRect.init(x: 5, y: 0, width: 90, height: 60))
+// specificLable.text = (SourceData[row] as! String)
+// specificLable.textColor = UIColor.white
+// specificView.addSubview(specificLable)
+// return specificView
+// }
+
+ func numberOfComponents(in pickerView: UIPickerView) -> Int {
+ return 1
+ }
+ func pickerView(_ pickerView: UIPickerView, numberOfRowsInComponent component: Int) -> Int {
+ fpsDataSources.count
+ }
+}
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/SimpleFilter/SimpleFilter.swift b/iOS/APIExample/APIExample/Examples/Advanced/SimpleFilter/SimpleFilter.swift
index 4e748b26c..273e3c118 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/SimpleFilter/SimpleFilter.swift
+++ b/iOS/APIExample/APIExample/Examples/Advanced/SimpleFilter/SimpleFilter.swift
@@ -118,8 +118,8 @@ class SimpleFilterMain: BaseViewController {
// when joining channel. The channel name and uid used to calculate
// the token has to match the ones used for channel join
let option = AgoraRtcChannelMediaOptions()
- option.publishCameraTrack = true
- option.publishMicrophoneTrack = true
+ option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster
option.clientRoleType = GlobalSettings.shared.getUserRole()
NetworkManager.shared.generateToken(channelName: channelName, success: { token in
let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option)
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Base.lproj/BytedEffect.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Base.lproj/BytedEffect.storyboard
index df5c4fad8..03cb0cb14 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Base.lproj/BytedEffect.storyboard
+++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Base.lproj/BytedEffect.storyboard
@@ -1,9 +1,9 @@
-
+
-
+
@@ -18,28 +18,14 @@
-
-
- You have not turned on the Byted beauty feature, please follow the steps below:
-1: enter ONLINE_LICENSE_KEY and ONLINE_LICENSE_SECRET in ByteBeautify- > Manager- > Core.h.
-2: open the pod 'bytedEffect' comment in Podfile.
-3: add a BytedEffectSDK folder under the iOS- > APIExample- > ByteEffectLib directory.
-4: add the Resource folder under the iOS- > APIExample- > ByteEffectLib directory.
-5: add libeffect-sdk.a libraries in the iOS- > APIExample- > ByteEffectLib directory.
-6: execute pod install.
-7: rerun the project to see the effect
-
-
-
-
-
-
+
+
-
+
-
+
@@ -47,15 +33,15 @@
-
-
+
+
-
+
-
+
@@ -63,30 +49,105 @@
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
+
+
+
+
+
+
+
+
-
+
+
+
+ You have not turned on the Byted beauty feature, please follow the steps below:
+1: enter ONLINE_LICENSE_KEY and ONLINE_LICENSE_SECRET in ByteBeautify- > Manager- > Core.h.
+2: open the pod 'bytedEffect' comment in Podfile.
+3: Create the ByteEffectLib folder in the iOS ->APIExample directory
+4: add a BytedEffectSDK folder under the iOS- > APIExample- > ByteEffectLib directory.
+5: add the Resource folder under the iOS- > APIExample- > ByteEffectLib directory.
+6: add libeffect-sdk.a libraries in the iOS- > APIExample- > ByteEffectLib directory.
+7: execute pod install.
+8: rerun the project to see the effect
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -94,10 +155,29 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/BytedEffectVC.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/BytedEffectVC.m
index f17d1c9e4..10bf38b0f 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/BytedEffectVC.m
+++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/BytedEffectVC.m
@@ -15,7 +15,7 @@
@interface BytedEffectVC ()
@property (weak, nonatomic) IBOutlet UILabel *tipsLabel;
-@property (weak, nonatomic) IBOutlet UIStackView *container;
+@property (weak, nonatomic) IBOutlet UIView *container;
@property (weak, nonatomic) IBOutlet UIView *localVideo;
@property (weak, nonatomic) IBOutlet UIView *remoteVideo;
@@ -44,6 +44,9 @@ - (void) initSDK {
self.rtcEngineKit = [AgoraRtcEngineKit sharedEngineWithAppId:KeyCenter.AppId delegate:self];
+ AgoraVideoEncoderConfiguration *encoderConfig = [[AgoraVideoEncoderConfiguration alloc] initWithSize:CGSizeMake(375, 667) frameRate:(AgoraVideoFrameRateFps15) bitrate:15 orientationMode:(AgoraVideoOutputOrientationModeFixedPortrait) mirrorMode:(AgoraVideoMirrorModeEnabled)];
+ [self.rtcEngineKit setVideoEncoderConfiguration:encoderConfig];
+
// setup videoFrameDelegate
[self.rtcEngineKit setVideoFrameDelegate:self];
@@ -81,6 +84,26 @@ - (void) initSDK {
}];
}
+- (IBAction)onTapSwitchCameraButton:(id)sender {
+ [self.rtcEngineKit switchCamera];
+}
+- (IBAction)onTapBeautyButton:(UIButton *)sender {
+ [sender setSelected:!sender.isSelected];
+ [self.videoFilter setBuauty:sender.isSelected];
+}
+- (IBAction)onTapMakeupButton:(UIButton *)sender {
+ [sender setSelected:!sender.isSelected];
+ [self.videoFilter setMakeup:sender.isSelected];
+}
+- (IBAction)onTapStickerButton:(UIButton *)sender {
+ [sender setSelected:!sender.isSelected];
+ [self.videoFilter setSticker:sender.isSelected];
+}
+- (IBAction)onTapFilterButton:(UIButton *)sender {
+ [sender setSelected:!sender.isSelected];
+ [self.videoFilter setFilter:sender.isSelected];
+}
+
#pragma mark - VideoFrameDelegate
- (BOOL)onCaptureVideoFrame:(AgoraOutputVideoFrame *)videoFrame {
CVPixelBufferRef pixelBuffer = videoFrame.pixelBuffer;
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.h
index aff951034..51b548b2a 100755
--- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.h
+++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.h
@@ -21,6 +21,11 @@
@property (nonatomic, assign) BOOL enabled;
-
+ (ByteDanceFilter *)shareManager;
+
+- (void)setBuauty: (BOOL)isSelected;
+- (void)setMakeup: (BOOL)isSelected;
+- (void)setSticker: (BOOL)isSelected;
+- (void)setFilter: (BOOL)isSelected;
+
@end
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.m
index 08c3bcfe7..328190ef3 100755
--- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.m
+++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.m
@@ -39,19 +39,53 @@ - (instancetype)init
[_processor setEffectOn:YES];
// [_processor setFilterPath:@"Filter_32_Po10"];
- [_processor setStickerPath:@"test_sticker"];
+// [_processor setStickerPath:@"test_sticker"];
[_processor updateComposerNodes:@[@"/beauty_IOS_lite"]];
- [_processor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"smooth" intensity:0.8];
- [_processor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"whiten" intensity:0.9];
- [_processor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"sharp" intensity:0.96];
- [_processor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Overall" intensity:0.95];
- [_processor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Eye" intensity:0.95];
- [_processor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_MovNose" intensity:0.0];
}
return self;
}
+- (void)setBuauty: (BOOL)isSelected {
+#if __has_include("bef_effect_ai_api.h")
+ if (isSelected) {
+ [_processor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"whiten" intensity:0.6];
+ [_processor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"smooth" intensity:0.6];
+ } else {
+ [_processor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"whiten" intensity:0];
+ [_processor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"smooth" intensity:0];
+ }
+#endif
+}
+- (void)setMakeup: (BOOL)isSelected {
+#if __has_include("bef_effect_ai_api.h")
+ if (isSelected) {
+ [_processor updateComposerNodeIntensity:@"/style_makeup/tianmei" key:@"Makeup_ALL" intensity:0.6];
+ } else {
+ [_processor updateComposerNodeIntensity:@"/style_makeup/tianmei" key:@"Makeup_ALL" intensity:0];
+ }
+#endif
+}
+- (void)setSticker: (BOOL)isSelected {
+#if __has_include("bef_effect_ai_api.h")
+ if (isSelected) {
+ [_processor setStickerPath:@"wochaotian"];
+ } else {
+ [_processor setStickerPath:@""];
+ }
+#endif
+}
+- (void)setFilter: (BOOL)isSelected {
+#if __has_include("bef_effect_ai_api.h")
+ if (isSelected) {
+ [_processor setFilterPath:@"Filter_02_14"];
+ [_processor setFilterIntensity:0.4];
+ } else {
+ [_processor setFilterIntensity:0];
+ }
+#endif
+}
+
#pragma mark - VideoFilterDelegate
/// process your video frame here
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/Core.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/Core.h
index 906dcb32b..069aafd07 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/Core.h
+++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/Core.h
@@ -1,6 +1,6 @@
#import "macro.h"
-#define LICENSE_NAME ((const char *)"labcv_test_20220210_20230210_com.bytedance.labcv.demo_4.2.1.licbag")
+#define LICENSE_NAME ((const char *)"agora_test_20220805_20230208_io.agora.entfull_4.2.3.licbag")
#define ONLINE_LICENSE_KEY ((const char *)"jiaoyang_test")
#define ONLINE_LICENSE_SECRET ((const char *)"04273924-9a77-11eb-94da-0c42a1b32a30")
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/zh-Hans.lproj/BytedEffect.strings b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/zh-Hans.lproj/BytedEffect.strings
index 46475970f..fae52c988 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/zh-Hans.lproj/BytedEffect.strings
+++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/zh-Hans.lproj/BytedEffect.strings
@@ -23,8 +23,14 @@
"j6s-Pm-fSS.text" = "您还没有开启字节美颜功能, 请按如下步骤操作:
1: 在ByteBeautify->Manager->Core.h中填写ONLINE_LICENSE_KEY和ONLINE_LICENSE_SECRET
2: 打开Podfile中 pod 'bytedEffect' 注释
-3: 在iOS->APIExample->ByteEffectLib目录下添加BytedEffectSDK文件夹
-4: 在iOS->APIExample->ByteEffectLib目录下添加Resource文件夹
-5: 在iOS->APIExample->ByteEffectLib目录下添加libeffect-sdk.a库
-6: 执行pod install
-7: 重新运行项目查看效果";
+3: 在iOS->APIExample目录下创建ByteEffectLib文件夹
+4: 在iOS->APIExample->ByteEffectLib目录下添加BytedEffectSDK文件夹
+5: 在iOS->APIExample->ByteEffectLib目录下添加Resource文件夹
+6: 在iOS->APIExample->ByteEffectLib目录下添加libeffect-sdk.a库
+7: 执行pod install
+8: 重新运行项目查看效果";
+
+"8ag-bw-I0V.normalTitle" = "美颜";
+"dow-FW-rpo.normalTitle" = "美妆";
+"dSm-Zl-ccL.normalTitle" = "贴纸";
+"qKk-jv-oyk.normalTitle" = "滤镜";
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Base.lproj/FUBeautify.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Base.lproj/FUBeautify.storyboard
index f8dff7522..c942c1207 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Base.lproj/FUBeautify.storyboard
+++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Base.lproj/FUBeautify.storyboard
@@ -1,9 +1,9 @@
-
+
-
+
@@ -18,27 +18,14 @@
-
-
- You haven't turned on the phase core beauty function, please follow the following steps:
-1: replace license in FUBeautify- > Manager- > authpack.
-2: open the pod 'fuLib' comment in Podfile.
-3: add FURenderKit.framework under the iOS- > APIExample- > FULib directory.
-4: add the Resources resource folder under the iOS- > APIExample- > FULib directory
-5: Add Resource ->Sticker Folder under iOS ->APIExample ->FULib
6: execute pod install.
-7: rerun the project to see the effect
-
-
-
-
-
-
+
+
-
+
-
+
@@ -46,15 +33,15 @@
-
-
+
+
-
+
-
+
@@ -62,30 +49,105 @@
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
+
+
+
+
+
+
+
+
-
+
+
+
+ You haven't turned on the phase core beauty function, please follow the following steps:
+1: replace license in FUBeautify- > Manager- > authpack.
+2: open the pod 'fuLib' comment in Podfile.
+3: Create FUlib folder under iOS ->APIExample directory
+4: add FURenderKit.framework under the iOS- > APIExample- > FULib directory.
+5: add the Resources resource folder under the iOS- > APIExample- > FULib directory
+6: Add Resource ->Sticker Folder under iOS ->APIExample ->FULib
+7: execute pod install.
+8: rerun the project to see the effect
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -93,10 +155,29 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/FUBeautifyVC.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/FUBeautifyVC.m
index c9fa498da..983652f75 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/FUBeautifyVC.m
+++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/FUBeautifyVC.m
@@ -14,7 +14,7 @@
@interface FUBeautifyVC ()
@property (weak, nonatomic) IBOutlet UILabel *tipsLabel;
-@property (weak, nonatomic) IBOutlet UIStackView *container;
+@property (weak, nonatomic) IBOutlet UIView *container;
@property (weak, nonatomic) IBOutlet UIView *localVideo;
@property (weak, nonatomic) IBOutlet UIView *remoteVideo;
@@ -52,9 +52,6 @@ - (void) initSDK {
// add FaceUnity filter and add to process manager
self.videoFilter = [FUManager shareManager];
- // add Sticker
- [self.videoFilter setSticker:@"fashi"];
-
// set up local video to render your local camera preview
AgoraRtcVideoCanvas *videoCanvas = [AgoraRtcVideoCanvas new];
videoCanvas.uid = 0;
@@ -80,6 +77,26 @@ - (void) initSDK {
}];
}];
}
+- (IBAction)onTapCameraSwitch:(id)sender {
+ [self.rtcEngineKit switchCamera];
+}
+
+- (IBAction)onTapBeautyButton:(UIButton *)sender {
+ [sender setSelected:!sender.isSelected];
+ [self.videoFilter setBuauty:sender.isSelected];
+}
+- (IBAction)onTapMakeupButton:(UIButton *)sender {
+ [sender setSelected:!sender.isSelected];
+ [self.videoFilter setMakeup:sender.isSelected];
+}
+- (IBAction)onTapStickerButton:(UIButton *)sender {
+ [sender setSelected:!sender.isSelected];
+ [self.videoFilter setSticker:sender.isSelected];
+}
+- (IBAction)onTapFilterButton:(UIButton *)sender {
+ [sender setSelected:!sender.isSelected];
+ [self.videoFilter setFilter:sender.isSelected];
+}
#pragma mark - VideoFrameDelegate
- (BOOL)onCaptureVideoFrame:(AgoraOutputVideoFrame *)videoFrame {
@@ -96,7 +113,7 @@ - (AgoraVideoFrameProcessMode)getVideoFrameProcessMode{
}
- (BOOL)getMirrorApplied{
- return NO;
+ return YES;
}
- (BOOL)getRotationApplied {
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Manager/FUManager.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Manager/FUManager.h
index f458e23c5..0f6908032 100755
--- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Manager/FUManager.h
+++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Manager/FUManager.h
@@ -39,7 +39,9 @@
/// 更新美颜磨皮效果(根据人脸检测置信度设置不同磨皮效果)
- (void)updateBeautyBlurEffect;
-/// 设置贴纸
-- (void)setSticker: (NSString *)stickerName;
+- (void)setBuauty: (BOOL)isSelected;
+- (void)setMakeup: (BOOL)isSelected;
+- (void)setSticker: (BOOL)isSelected;
+- (void)setFilter: (BOOL)isSelected;
@end
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Manager/FUManager.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Manager/FUManager.m
index 782c62d25..827abb55f 100755
--- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Manager/FUManager.m
+++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Manager/FUManager.m
@@ -66,18 +66,6 @@ - (instancetype)init
NSString *bodyAIPath = [bundle pathForResource:@"model/ai_human_processor" ofType:@"bundle"];//[[NSBundle mainBundle] pathForResource:@"ai_human_processor" ofType:@"bundle"];
[FUAIKit loadAIModeWithAIType:FUAITYPE_HUMAN_PROCESSOR dataPath:bodyAIPath];
- // 加载默认美颜效果
- NSString *beautyPath = [bundle pathForResource:@"graphics/face_beautification" ofType:@"bundle"];//[[NSBundle mainBundle] pathForResource:@"face_beautification" ofType:@"bundle"];
- FUBeauty *beauty = [[FUBeauty alloc] initWithPath:beautyPath name:@"FUBeauty"];
- // 默认均匀磨皮
- beauty.heavyBlur = 0;
- beauty.blurType = 3;
- // 默认自定义脸型
- beauty.faceShape = 4;
- beauty.colorLevel = 0.8;
- beauty.redLevel = 0.8;
- [FURenderKit shareRenderKit].beauty = beauty;
-
CFAbsoluteTime endTime = (CFAbsoluteTimeGetCurrent() - startTime);
NSString *path = [bundle pathForResource:@"graphics/tongue" ofType:@"bundle"];//[[NSBundle mainBundle] pathForResource:@"tongue" ofType:@"bundle"];
[FUAIKit loadTongueMode:path];
@@ -108,13 +96,75 @@ - (void)destoryItems {
#endif
}
+- (void)setBuauty: (BOOL)isSelected {
+#if __has_include()
+ if (isSelected) {
+ NSBundle *bundle = [BundleUtil bundleWithBundleName:@"FURenderKit" podName:@"fuLib"];
+ NSString *beautyPath = [bundle pathForResource:@"graphics/face_beautification" ofType:@"bundle"];
+ FUBeauty *beauty = [[FUBeauty alloc] initWithPath:beautyPath name:@"FUBeauty"];
+ // 默认均匀磨皮
+ beauty.heavyBlur = 0;
+ beauty.blurType = 3;
+ [FURenderKit shareRenderKit].beauty = beauty;
+ } else {
+ [FURenderKit shareRenderKit].beauty = nil;
+ }
+#endif
+}
+- (void)setMakeup: (BOOL)isSelected {
+#if __has_include()
+ if (isSelected) {
+ NSBundle *bundle = [BundleUtil bundleWithBundleName:@"FURenderKit" podName:@"fuLib"];
+ NSString *beautyPath = [bundle pathForResource:@"graphics/face_makeup" ofType:@"bundle"];
+ FUMakeup *makeup = [[FUMakeup alloc] initWithPath:beautyPath name:@"face_makeup"];
+ makeup.isMakeupOn = YES;
+ [FURenderKit setLogLevel:FU_LOG_LEVEL_DEBUG];
+
+ [FURenderKit shareRenderKit].makeup = makeup;
+ [FURenderKit shareRenderKit].makeup.enable = isSelected;
+
+ NSString *makeupPath = [bundle pathForResource:@"美妆/ziyun" ofType:@"bundle"];
+ FUItem *makeupItem = [[FUItem alloc] initWithPath:makeupPath name:@"ziyun"];
+ [makeup updateMakeupPackage:makeupItem needCleanSubItem:NO];
+ makeup.intensity = 0.9;
+ } else {
+ [FURenderKit shareRenderKit].makeup.enable = NO;
+ [FURenderKit shareRenderKit].makeup = nil;
+ }
+#endif
+}
+- (void)setSticker: (BOOL)isSelected {
+#if __has_include()
+ if (isSelected) {
+ [self setStickerPath:@"DaisyPig"];
+ } else {
+ [[FURenderKit shareRenderKit].stickerContainer removeAllSticks];
+ }
+#endif
+}
+- (void)setFilter: (BOOL)isSelected {
+#if __has_include()
+ if (isSelected) {
+ NSBundle *bundle = [BundleUtil bundleWithBundleName:@"FURenderKit" podName:@"fuLib"];
+ NSString *beautyPath = [bundle pathForResource:@"graphics/face_beautification" ofType:@"bundle"];
+ FUBeauty *beauty = [[FUBeauty alloc] initWithPath:beautyPath name:@"FUBeauty"];
+ beauty.filterName = FUFilterMiTao1;
+ beauty.filterLevel = 0.8;
+ [FURenderKit shareRenderKit].beauty = beauty;
+ } else {
+ [FURenderKit shareRenderKit].beauty = nil;
+ }
+#endif
+}
+
+
- (void)onCameraChange {
#if __has_include()
[FUAIKit resetTrackedResult];
#endif
}
-- (void)setSticker: (NSString *)stickerName {
+- (void)setStickerPath: (NSString *)stickerName {
NSBundle *bundle = [BundleUtil bundleWithBundleName:@"FURenderKit" podName:@"fuLib"];
NSString *path = [bundle pathForResource:[NSString stringWithFormat:@"贴纸/%@", stickerName] ofType:@"bundle"];
if (!path) {
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/zh-Hans.lproj/FUBeautify.strings b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/zh-Hans.lproj/FUBeautify.strings
index f07cd2e05..242588805 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/zh-Hans.lproj/FUBeautify.strings
+++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/zh-Hans.lproj/FUBeautify.strings
@@ -23,8 +23,14 @@
"j6s-Pm-fSS.text" = "您还没有开启相芯美颜功能, 请按如下步骤操作:
1: 在FUBeautify->Manager->authpack中替换license
2: 打开Podfile中 pod 'fuLib' 注释
-3: 在iOS->APIExample->FULib目录下添加FURenderKit.framework
-4: 在iOS->APIExample->FULib目录下添加Resources资源文件夹
-5: 在iOS->APIExample->FULib目录下添加Resource->贴纸文件夹
-6: 执行pod install
-7: 重新运行项目查看效果";
+3: 在iOS->APIExample目录下创建FULib文件夹
+4: 在iOS->APIExample->FULib目录下添加FURenderKit.framework
+5: 在iOS->APIExample->FULib目录下添加Resources资源文件夹
+6: 在iOS->APIExample->FULib目录下添加Resource->贴纸文件夹
+7: 执行pod install
+8: 重新运行项目查看效果";
+
+"QZu-iN-Fi6.normalTitle" = "美颜";
+"KHn-B1-epr.normalTitle" = "美妆";
+"aoR-43-iFs.normalTitle" = "贴纸";
+"UYi-3l-nYz.normalTitle" = "滤镜";
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Base.lproj/SenseBeautify.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Base.lproj/SenseBeautify.storyboard
index 943b2bf40..61348e6c8 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Base.lproj/SenseBeautify.storyboard
+++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Base.lproj/SenseBeautify.storyboard
@@ -1,9 +1,9 @@
-
+
-
+
@@ -18,27 +18,14 @@
-
-
- You have not enabled the beauty feature of Shang Tang, please follow the steps below:
-1: replace SENSEME.lic in SenseBeautify- > Manager- >.
-2: replace the Bundle identifier bound by license
3: open the pod 'senseLib' comment in Podfile.
-4: add a remoteSourcesLib folder under the iOS- > APIExample- > SenseLib directory.
-5: add the st_mobile_sdk folder under the iOS- > APIExample- > SenseLib directory.
-6: execute pod install.
-7: rerun the project to see the effect
-
-
-
-
-
-
+
+
-
+
-
+
@@ -46,15 +33,15 @@
-
-
+
+
-
+
-
+
@@ -62,30 +49,104 @@
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
+
+
+
+
+
+
+
+
-
+
+
+
+ You have not enabled the beauty feature of Shang Tang, please follow the steps below:
+1: replace SENSEME.lic in SenseBeautify- > Manager- >.
+2: replace the Bundle identifier bound by license
3: open the pod 'senseLib' comment in Podfile.
+4: Create the SenseLib folder in the iOS ->APIExample directory
+5: add a remoteSourcesLib folder under the iOS- > APIExample- > SenseLib directory.
+6: add the st_mobile_sdk folder under the iOS- > APIExample- > SenseLib directory.
+7: execute pod install.
+8: rerun the project to see the effect
+
+
+
+
+
-
+
+
-
-
+
+
+
+
+
+
+
+
-
+
@@ -93,10 +154,29 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Manager/VideoProcessingManager.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Manager/VideoProcessingManager.h
index ab898f7a9..c757c71b9 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Manager/VideoProcessingManager.h
+++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Manager/VideoProcessingManager.h
@@ -13,6 +13,11 @@ NS_ASSUME_NONNULL_BEGIN
@interface VideoProcessingManager : NSObject
+- (void)setBuauty: (BOOL)isSelected;
+- (void)setMakeup: (BOOL)isSelected;
+- (void)setSticker: (BOOL)isSelected;
+- (void)setFilter: (BOOL)isSelected;
+
- (CVPixelBufferRef)videoProcessHandler:(CVPixelBufferRef)pixelBuffer;
@end
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Manager/VideoProcessingManager.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Manager/VideoProcessingManager.m
index 337949054..26ee21b79 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Manager/VideoProcessingManager.m
+++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Manager/VideoProcessingManager.m
@@ -26,6 +26,7 @@ @interface VideoProcessingManager ()
@property (nonatomic) dispatch_queue_t renderQueue;
///贴纸id
@property (nonatomic, assign) int stickerId;
+@property (nonatomic, assign) int filterId;
@end
@@ -43,18 +44,78 @@ - (instancetype)init {
[self.effectsProcess setModelPath:[bundle pathForResource:@"model" ofType:@"bundle"]];
[EAGLContext setCurrentContext:self.glContext];
self.effectsProcess.detectConfig = ST_MOBILE_FACE_DETECT;
- [self.effectsProcess setBeautyParam:EFFECT_BEAUTY_PARAM_ENABLE_WHITEN_SKIN_MASK andVal:0.7];
- [self.effectsProcess setEffectType:EFFECT_BEAUTY_RESHAPE_SHRINK_FACE value:0.8];
- [self.effectsProcess setEffectType:EFFECT_BEAUTY_BASE_WHITTEN value:0.6];
- [self.effectsProcess setEffectType:EFFECT_BEAUTY_RESHAPE_ENLARGE_EYE value:1.0];
- [self.effectsProcess setEffectType:EFFECT_BEAUTY_RESHAPE_ROUND_EYE value:1.0];
- [self.effectsProcess setEffectType:EFFECT_BEAUTY_PLASTIC_OPEN_CANTHUS value:0.7];
+
#endif
});
}
return self;
}
+- (void)setBuauty: (BOOL)isSelected {
+#if __has_include("st_mobile_common.h")
+ if (isSelected) {
+ [self.effectsProcess setBeautyParam:EFFECT_BEAUTY_PARAM_ENABLE_WHITEN_SKIN_MASK andVal:0.7];
+ [self.effectsProcess setEffectType:EFFECT_BEAUTY_RESHAPE_SHRINK_FACE value:0.8];
+ [self.effectsProcess setEffectType:EFFECT_BEAUTY_BASE_WHITTEN value:0.6];
+ [self.effectsProcess setEffectType:EFFECT_BEAUTY_RESHAPE_ENLARGE_EYE value:1.0];
+ [self.effectsProcess setEffectType:EFFECT_BEAUTY_RESHAPE_ROUND_EYE value:1.0];
+ [self.effectsProcess setEffectType:EFFECT_BEAUTY_PLASTIC_OPEN_CANTHUS value:0.7];
+ } else {
+ [self.effectsProcess setBeautyParam:EFFECT_BEAUTY_PARAM_ENABLE_WHITEN_SKIN_MASK andVal:0];
+ [self.effectsProcess setEffectType:EFFECT_BEAUTY_RESHAPE_SHRINK_FACE value:0];
+ [self.effectsProcess setEffectType:EFFECT_BEAUTY_BASE_WHITTEN value:0];
+ [self.effectsProcess setEffectType:EFFECT_BEAUTY_RESHAPE_ENLARGE_EYE value:0];
+ [self.effectsProcess setEffectType:EFFECT_BEAUTY_RESHAPE_ROUND_EYE value:0];
+ [self.effectsProcess setEffectType:EFFECT_BEAUTY_PLASTIC_OPEN_CANTHUS value:0];
+ }
+#endif
+}
+- (void)setMakeup: (BOOL)isSelected {
+#if __has_include("st_mobile_common.h")
+ if (isSelected) {
+ NSBundle *bundle = [BundleUtil bundleWithBundleName:@"SenseLib" podName:@"senseLib"];
+ NSString *path = [bundle pathForResource:@"qise.zip" ofType:nil];
+ __weak VideoProcessingManager *weakself = self;
+ [self.effectsProcess addStickerWithPath:path callBack:^(st_result_t state, int sticker, uint64_t action) {
+ [weakself.effectsProcess setPackageId:sticker groupType:EFFECT_BEAUTY_GROUP_MAKEUP strength:0.5];
+ weakself.stickerId = sticker;
+ }];
+ } else {
+ [self.effectsProcess removeSticker:self.stickerId];
+ self.stickerId = 0;
+ }
+#endif
+}
+- (void)setSticker: (BOOL)isSelected {
+#if __has_include("st_mobile_common.h")
+ if (isSelected) {
+ NSBundle *bundle = [BundleUtil bundleWithBundleName:@"SenseLib" podName:@"senseLib"];
+ NSString *path = [bundle pathForResource:@"lianxingface.zip" ofType:nil];
+ [self.effectsProcess setStickerWithPath:path callBack:^(st_result_t state, int stickerId, uint64_t action) {
+
+ }];
+ } else {
+ [self.effectsProcess cleareStickers];
+ }
+#endif
+}
+- (void)setFilter: (BOOL)isSelected {
+#if __has_include("st_mobile_common.h")
+ if (isSelected) {
+ NSBundle *bundle = [BundleUtil bundleWithBundleName:@"SenseLib" podName:@"senseLib"];
+ NSString *path = [bundle pathForResource:@"qise.zip" ofType:nil];
+ __weak VideoProcessingManager *weakself = self;
+ [self.effectsProcess addStickerWithPath:path callBack:^(st_result_t state, int sticker, uint64_t action) {
+ [weakself.effectsProcess setPackageId:sticker groupType:EFFECT_BEAUTY_GROUP_FILTER strength:0.5];
+ weakself.filterId = sticker;
+ }];
+ } else {
+ [self.effectsProcess removeSticker:self.filterId];
+ self.filterId = 0;
+ }
+#endif
+}
+
- (CVPixelBufferRef)videoProcessHandler:(CVPixelBufferRef)pixelBuffer {
if (!pixelBuffer) return pixelBuffer;
@@ -69,6 +130,19 @@ - (CVPixelBufferRef)videoProcessHandler:(CVPixelBufferRef)pixelBuffer {
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
int width = (int)CVPixelBufferGetWidth(pixelBuffer);
int heigh = (int)CVPixelBufferGetHeight(pixelBuffer);
+ if (_outTexture) {
+ int _cacheW = (int)CVPixelBufferGetWidth(_outputPixelBuffer);
+ int _cacheH = (int)CVPixelBufferGetHeight(_outputPixelBuffer);
+ if (_cacheH != heigh || _cacheW != width) {
+ GLuint testTexture = 0;
+#if __has_include("st_mobile_common.h")
+ [self.effectsProcess deleteTexture:&testTexture pixelBuffer:&_outputPixelBuffer cvTexture:&_outputCVTexture];
+#endif
+ _outTexture = 0;
+ _outputPixelBuffer = NULL;
+ _outputCVTexture = NULL;
+ }
+ }
if(!_outTexture){
#if __has_include("st_mobile_common.h")
[self.effectsProcess createGLObjectWith:width
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/SenseBeautifyVC.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/SenseBeautifyVC.m
index ad7531c5c..9ad4b0794 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/SenseBeautifyVC.m
+++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/SenseBeautifyVC.m
@@ -16,7 +16,7 @@
@interface SenseBeautifyVC ()
@property (weak, nonatomic) IBOutlet UILabel *tipsLabel;
-@property (weak, nonatomic) IBOutlet UIStackView *container;
+@property (weak, nonatomic) IBOutlet UIView *container;
@property (weak, nonatomic) IBOutlet UIView *localVideo;
@property (weak, nonatomic) IBOutlet UIView *remoteVideo;
@@ -97,6 +97,26 @@ - (void) initSDK {
}];
}
+- (IBAction)onTapSwitchCameraButton:(id)sender {
+ [self.rtcEngineKit switchCamera];
+}
+- (IBAction)onTapBeautyButton:(UIButton *)sender {
+ [sender setSelected:!sender.isSelected];
+ [self.videoProcessing setBuauty:sender.isSelected];
+}
+- (IBAction)onTapMakeupButton:(UIButton *)sender {
+ [sender setSelected:!sender.isSelected];
+ [self.videoProcessing setMakeup:sender.isSelected];
+}
+- (IBAction)onTapStickerButton:(UIButton *)sender {
+ [sender setSelected:!sender.isSelected];
+ [self.videoProcessing setSticker:sender.isSelected];
+}
+- (IBAction)onTapFilterButton:(UIButton *)sender {
+ [sender setSelected:!sender.isSelected];
+ [self.videoProcessing setFilter:sender.isSelected];
+}
+
#pragma mark - VideoFrameDelegate
- (BOOL)onCaptureVideoFrame:(AgoraOutputVideoFrame *)videoFrame {
CVPixelBufferRef pixelBuffer = [self.videoProcessing videoProcessHandler:videoFrame.pixelBuffer];
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/zh-Hans.lproj/SenseBeautify.strings b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/zh-Hans.lproj/SenseBeautify.strings
index 446492001..298b5f9f6 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/zh-Hans.lproj/SenseBeautify.strings
+++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/zh-Hans.lproj/SenseBeautify.strings
@@ -24,7 +24,14 @@
1: 在SenseBeautify->Manager->替换SENSEME.lic
2: 替换license绑定的Bundle identifier
3: 打开Podfile中 pod 'senseLib' 注释
-4: 在iOS->APIExample->SenseLib目录下添加remoteSourcesLib文件夹
-5: 在iOS->APIExample->SenseLib目录下添加st_mobile_sdk文件夹
-6: 执行pod install
-7: 重新运行项目查看效果";
+4: 在iOS->APIExample目录下创建SenseLib文件夹
+5: 在iOS->APIExample->SenseLib目录下添加remoteSourcesLib文件夹
+6: 在iOS->APIExample->SenseLib目录下添加st_mobile_sdk文件夹
+7: 执行pod install
+8: 重新运行项目查看效果";
+
+"CrL-Yf-Cev.normalTitle" = "美颜";
+"3hp-ZM-MMW.normalTitle" = "美妆";
+"UdR-D4-uNu.normalTitle" = "贴纸";
+"K3f-4k-VQ1.normalTitle" = "滤镜";
+
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/VideoChat/VideoChat.swift b/iOS/APIExample/APIExample/Examples/Advanced/VideoChat/VideoChat.swift
index a9fd84226..cfb388817 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/VideoChat/VideoChat.swift
+++ b/iOS/APIExample/APIExample/Examples/Advanced/VideoChat/VideoChat.swift
@@ -160,8 +160,8 @@ class VideoChatMain: BaseViewController {
// when joining channel. The channel name and uid used to calculate
// the token has to match the ones used for channel join
let option = AgoraRtcChannelMediaOptions()
- option.publishCameraTrack = true
- option.publishMicrophoneTrack = true
+ option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster
option.clientRoleType = GlobalSettings.shared.getUserRole()
NetworkManager.shared.generateToken(channelName: channelName, success: { token in
let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option)
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/VideoMetadata/VideoMetadata.swift b/iOS/APIExample/APIExample/Examples/Advanced/VideoMetadata/VideoMetadata.swift
index 6f482b2cb..d20445674 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/VideoMetadata/VideoMetadata.swift
+++ b/iOS/APIExample/APIExample/Examples/Advanced/VideoMetadata/VideoMetadata.swift
@@ -114,8 +114,8 @@ class VideoMetadataMain: BaseViewController {
// when joining channel. The channel name and uid used to calculate
// the token has to match the ones used for channel join
let option = AgoraRtcChannelMediaOptions()
- option.publishCameraTrack = true
- option.publishMicrophoneTrack = true
+ option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster
option.clientRoleType = GlobalSettings.shared.getUserRole()
NetworkManager.shared.generateToken(channelName: channelName, success: { token in
let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option)
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/Base.lproj/VideoProcess.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/Base.lproj/VideoProcess.storyboard
index 6c1c93ae6..3c367780c 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/Base.lproj/VideoProcess.storyboard
+++ b/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/Base.lproj/VideoProcess.storyboard
@@ -1,9 +1,9 @@
-
+
-
+
@@ -337,6 +337,7 @@
+
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/VideoProcess.swift b/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/VideoProcess.swift
index d039ee95c..3813d9b74 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/VideoProcess.swift
+++ b/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/VideoProcess.swift
@@ -78,6 +78,8 @@ class VideoProcessMain : BaseViewController
config.logConfig = logConfig
agoraKit = AgoraRtcEngineKit.sharedEngine(with: config, delegate: self)
+ // enable filters
+ agoraKit.enableExtension(withVendor: "agora_video_filters_clear_vision", extension: "clear_vision", enabled: true, sourceType: .primaryCamera)
// Configuring Privatization Parameters
Util.configPrivatization(agoraKit: agoraKit)
// make myself a broadcaster
@@ -112,8 +114,8 @@ class VideoProcessMain : BaseViewController
// when joining channel. The channel name and uid used to calculate
// the token has to match the ones used for channel join
let option = AgoraRtcChannelMediaOptions()
- option.publishCameraTrack = true
- option.publishMicrophoneTrack = true
+ option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster
option.clientRoleType = GlobalSettings.shared.getUserRole()
NetworkManager.shared.generateToken(channelName: channelName, success: { token in
let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option)
@@ -249,11 +251,18 @@ class VideoProcessMain : BaseViewController
break
case 2:
source.backgroundSourceType = .blur
- source.blurDegree = .high;
+ source.blurDegree = .high
break
+
+ case 3:
+ let videoPath = Bundle.main.path(forResource: "sample", ofType: "mov")
+ source.backgroundSourceType = .video
+ source.source = videoPath
+
default:
break
}
+ source.backgroundSourceType = virtualBgSwitch.isOn ? source.backgroundSourceType : .none
let result = agoraKit.enableVirtualBackground(virtualBgSwitch.isOn, backData: source, segData: AgoraSegmentationProperty())
print("result == \(result)")
}
@@ -304,6 +313,7 @@ extension VideoProcessMain: AgoraRtcEngineDelegate {
// the view to be binded
videoCanvas.view = remoteVideo.videoView
videoCanvas.renderMode = .hidden
+ videoCanvas.enableAlphaMask = true
agoraKit.setupRemoteVideo(videoCanvas)
}
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/zh-Hans.lproj/VideoProcess.strings b/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/zh-Hans.lproj/VideoProcess.strings
index 196ec2fc8..33cc66c5e 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/zh-Hans.lproj/VideoProcess.strings
+++ b/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/zh-Hans.lproj/VideoProcess.strings
@@ -32,6 +32,8 @@
/* Class = "UISegmentedControl"; Qhf-Ob-NYA.segmentTitles[2] = "Blur"; ObjectID = "Qhf-Ob-NYA"; */
"Qhf-Ob-NYA.segmentTitles[2]" = "毛玻璃";
+"Qhf-Ob-NYA.segmentTitles[3]" = "视频";
+
/* Class = "UILabel"; text = "Low light Enhancement"; ObjectID = "RiO-Eg-x0D"; */
"RiO-Eg-x0D.text" = "暗光增强";
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/VoiceChanger/Base.lproj/VoiceChanger.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/VoiceChanger/Base.lproj/VoiceChanger.storyboard
index 8f241f996..b50768cc2 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/VoiceChanger/Base.lproj/VoiceChanger.storyboard
+++ b/iOS/APIExample/APIExample/Examples/Advanced/VoiceChanger/Base.lproj/VoiceChanger.storyboard
@@ -1,9 +1,9 @@
-
+
-
+
@@ -18,7 +18,7 @@
-
+
@@ -70,17 +70,17 @@
-
+
-
+
-
+
-
+
@@ -93,7 +93,7 @@
-
+
@@ -122,7 +122,7 @@
-
+
@@ -149,7 +149,7 @@
-
+
@@ -176,7 +176,7 @@
-
+
@@ -203,7 +203,7 @@
-
+
@@ -269,7 +269,7 @@
-
+
@@ -278,7 +278,7 @@
-
+
@@ -296,10 +296,10 @@
-
+
-
+
@@ -323,10 +323,10 @@
-
+
-
+
@@ -350,7 +350,7 @@
-
+
@@ -359,7 +359,7 @@
-
+
@@ -377,7 +377,7 @@
-
+
@@ -386,7 +386,7 @@
-
+
@@ -404,7 +404,7 @@
-
+
@@ -413,7 +413,7 @@
-
+
@@ -438,30 +438,47 @@
-
+
-
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
+
+
+
+
+
+
-
+
+
diff --git a/iOS/APIExample/APIExample/Examples/Advanced/VoiceChanger/VoiceChanger.swift b/iOS/APIExample/APIExample/Examples/Advanced/VoiceChanger/VoiceChanger.swift
index 710599c07..48955d110 100644
--- a/iOS/APIExample/APIExample/Examples/Advanced/VoiceChanger/VoiceChanger.swift
+++ b/iOS/APIExample/APIExample/Examples/Advanced/VoiceChanger/VoiceChanger.swift
@@ -368,6 +368,10 @@ class VoiceChangerMain: BaseViewController {
agoraKit.setLocalVoiceReverbOf(reverbType, withValue: value)
}
+ @IBAction func onVoiceFormantChange(_ sender: UISlider) {
+ agoraKit.setLocalVoiceFormant(Double(sender.value))
+ }
+
override func viewDidLoad(){
super.viewDidLoad()
diff --git a/iOS/APIExample/APIExample/Examples/Basic/JoinChannelVideo(Recorder)/Base.lproj/JoinChannelVideoRecorder.storyboard b/iOS/APIExample/APIExample/Examples/Basic/JoinChannelVideo(Recorder)/Base.lproj/JoinChannelVideoRecorder.storyboard
new file mode 100644
index 000000000..ad4c20098
--- /dev/null
+++ b/iOS/APIExample/APIExample/Examples/Basic/JoinChannelVideo(Recorder)/Base.lproj/JoinChannelVideoRecorder.storyboard
@@ -0,0 +1,102 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/iOS/APIExample/APIExample/Examples/Basic/JoinChannelVideo(Recorder)/JoinChannelVideoRecorder.swift b/iOS/APIExample/APIExample/Examples/Basic/JoinChannelVideo(Recorder)/JoinChannelVideoRecorder.swift
new file mode 100644
index 000000000..1fddef9ff
--- /dev/null
+++ b/iOS/APIExample/APIExample/Examples/Basic/JoinChannelVideo(Recorder)/JoinChannelVideoRecorder.swift
@@ -0,0 +1,388 @@
+//
+// JoinChannelVideo.swift
+// APIExample
+//
+// Created by 张乾泽 on 2020/4/17.
+// Copyright © 2020 Agora Corp. All rights reserved.
+//
+import UIKit
+import AGEVideoLayout
+import AgoraRtcKit
+
+class JoinChannelVideoRecorderEntry : UIViewController
+{
+ @IBOutlet weak var joinButton: UIButton!
+ @IBOutlet weak var channelTextField: UITextField!
+ let identifier = "JoinChannelVideoRecorder"
+ @IBOutlet var resolutionBtn: UIButton!
+ @IBOutlet var fpsBtn: UIButton!
+ @IBOutlet var orientationBtn: UIButton!
+ var width:Int = 960, height:Int = 540, orientation:AgoraVideoOutputOrientationMode = .adaptative, fps = 15
+
+
+ override func viewDidLoad() {
+ super.viewDidLoad()
+ }
+
+
+ func getResolutionAction(width:Int, height:Int) -> UIAlertAction{
+ return UIAlertAction(title: "\(width)x\(height)", style: .default, handler: {[unowned self] action in
+ self.width = width
+ self.height = height
+ self.resolutionBtn.setTitle("\(width)x\(height)", for: .normal)
+ })
+ }
+
+ func getFpsAction(_ fps:Int) -> UIAlertAction{
+ return UIAlertAction(title: "\(fps)fps", style: .default, handler: {[unowned self] action in
+ self.fps = fps
+ self.fpsBtn.setTitle("\(fps)fps", for: .normal)
+ })
+ }
+
+ func getOrientationAction(_ orientation:AgoraVideoOutputOrientationMode) -> UIAlertAction{
+ return UIAlertAction(title: "\(orientation.description())", style: .default, handler: {[unowned self] action in
+ self.orientation = orientation
+ self.orientationBtn.setTitle("\(orientation.description())", for: .normal)
+ })
+ }
+
+ @IBAction func setResolution(){
+ let alert = UIAlertController(title: "Set Resolution".localized, message: nil, preferredStyle: UIDevice.current.userInterfaceIdiom == .pad ? UIAlertController.Style.alert : UIAlertController.Style.actionSheet)
+ alert.addAction(getResolutionAction(width: 90, height: 90))
+ alert.addAction(getResolutionAction(width: 160, height: 120))
+ alert.addAction(getResolutionAction(width: 320, height: 240))
+ alert.addAction(getResolutionAction(width: 960, height: 540))
+ alert.addAction(getResolutionAction(width: 1280, height: 720))
+ alert.addCancelAction()
+ present(alert, animated: true, completion: nil)
+ }
+
+ @IBAction func setFps(){
+ let alert = UIAlertController(title: "Set Fps".localized, message: nil, preferredStyle: UIDevice.current.userInterfaceIdiom == .pad ? UIAlertController.Style.alert : UIAlertController.Style.actionSheet)
+ alert.addAction(getFpsAction(10))
+ alert.addAction(getFpsAction(15))
+ alert.addAction(getFpsAction(24))
+ alert.addAction(getFpsAction(30))
+ alert.addAction(getFpsAction(60))
+ alert.addCancelAction()
+ present(alert, animated: true, completion: nil)
+ }
+
+ @IBAction func setOrientation(){
+ let alert = UIAlertController(title: "Set Orientation".localized, message: nil, preferredStyle: UIDevice.current.userInterfaceIdiom == .pad ? UIAlertController.Style.alert : UIAlertController.Style.actionSheet)
+ alert.addAction(getOrientationAction(.adaptative))
+ alert.addAction(getOrientationAction(.fixedLandscape))
+ alert.addAction(getOrientationAction(.fixedPortrait))
+ alert.addCancelAction()
+ present(alert, animated: true, completion: nil)
+ }
+
+ @IBAction func doJoinPressed(sender: UIButton) {
+ guard let channelName = channelTextField.text else {return}
+ //resign channel text field
+ channelTextField.resignFirstResponder()
+
+ let storyBoard: UIStoryboard = UIStoryboard(name: identifier, bundle: nil)
+ // create new view controller every time to ensure we get a clean vc
+ guard let newViewController = storyBoard.instantiateViewController(withIdentifier: identifier) as? BaseViewController else {return}
+ newViewController.title = channelName
+ newViewController.configs = ["channelName":channelName, "resolution":CGSize(width: width, height: height), "fps": fps, "orientation": orientation]
+ navigationController?.pushViewController(newViewController, animated: true)
+ }
+}
+
+class JoinChannelVideoRecorder: BaseViewController {
+ private lazy var localRecordButton: UIButton = {
+ let button = UIButton()
+ button.translatesAutoresizingMaskIntoConstraints = false
+ button.setTitle("Recording".localized, for: .normal)
+ button.setTitle("Stop Recording".localized, for: .selected)
+ button.setTitleColor(.red, for: .normal)
+ button.setTitleColor(.red, for: .selected)
+ button.titleLabel?.font = .systemFont(ofSize: 14)
+ button.borderColor = UIColor.red.cgColor
+ button.borderWidth = 1
+ button.cornerRadius = 3
+ button.addTarget(self, action: #selector(onTapLocalRecordButton(sender:)), for: .touchUpInside)
+ return button
+ }()
+ private lazy var remoteRecordButton: UIButton = {
+ let button = UIButton()
+ button.translatesAutoresizingMaskIntoConstraints = false
+ button.setTitle("Recording".localized, for: .normal)
+ button.setTitle("Stop Recording".localized, for: .selected)
+ button.setTitleColor(.red, for: .normal)
+ button.setTitleColor(.red, for: .selected)
+ button.titleLabel?.font = .systemFont(ofSize: 14)
+ button.borderColor = UIColor.red.cgColor
+ button.borderWidth = 1
+ button.cornerRadius = 3
+ button.isHidden = true
+ button.addTarget(self, action: #selector(onTapRemoteRecordButton(sender:)), for: .touchUpInside)
+ return button
+ }()
+ private var storagePath: String {
+ let documentPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first
+ return documentPath ?? ""
+ }
+ private lazy var localUid: UInt = UInt.random(in: 1...99999)
+ private var remoteUid: UInt = 0 {
+ didSet {
+ remoteRecordButton.isHidden = remoteUid == 0
+ }
+ }
+ private lazy var localRecord: AgoraMediaRecorder = {
+ let streamInfo = AgoraRecorderStreamInfo()
+ streamInfo.channelId = title ?? ""
+ streamInfo.uid = localUid
+ let record = agoraKit.createMediaRecorder(withInfo: streamInfo)
+ record?.setMediaRecorderDelegate(self)
+ return record!
+ }()
+ private lazy var remoteRecord: AgoraMediaRecorder = {
+ let streamInfo = AgoraRecorderStreamInfo()
+ streamInfo.channelId = title ?? ""
+ streamInfo.uid = remoteUid
+ let record = agoraKit.createMediaRecorder(withInfo: streamInfo)
+ record?.setMediaRecorderDelegate(self)
+ return record!
+ }()
+ var localVideo = Bundle.loadVideoView(type: .local, audioOnly: false)
+ var remoteVideo = Bundle.loadVideoView(type: .remote, audioOnly: false)
+
+ @IBOutlet weak var container: AGEVideoContainer!
+ var agoraKit: AgoraRtcEngineKit!
+
+ // indicate if current instance has joined channel
+ var isJoined: Bool = false
+
+ override func viewDidLoad() {
+ super.viewDidLoad()
+ // layout render view
+ localVideo.setPlaceholder(text: "Local Host".localized)
+ remoteVideo.setPlaceholder(text: "Remote Host".localized)
+ container.layoutStream(views: [localVideo, remoteVideo])
+
+ localVideo.addSubview(localRecordButton)
+ remoteVideo.addSubview(remoteRecordButton)
+ localRecordButton.bottomAnchor.constraint(equalTo: localVideo.bottomAnchor, constant: -10).isActive = true
+ localRecordButton.trailingAnchor.constraint(equalTo: localVideo.trailingAnchor, constant: -10).isActive = true
+ localRecordButton.widthAnchor.constraint(equalToConstant: 70).isActive = true
+ localRecordButton.heightAnchor.constraint(equalToConstant: 30).isActive = true
+
+ remoteRecordButton.bottomAnchor.constraint(equalTo: remoteVideo.bottomAnchor, constant: -10).isActive = true
+ remoteRecordButton.trailingAnchor.constraint(equalTo: remoteVideo.trailingAnchor, constant: -10).isActive = true
+ remoteRecordButton.widthAnchor.constraint(equalToConstant: 70).isActive = true
+ remoteRecordButton.heightAnchor.constraint(equalToConstant: 30).isActive = true
+
+ // set up agora instance when view loaded
+ let config = AgoraRtcEngineConfig()
+ config.appId = KeyCenter.AppId
+ config.areaCode = GlobalSettings.shared.area
+ config.channelProfile = .liveBroadcasting
+ agoraKit = AgoraRtcEngineKit.sharedEngine(with: config, delegate: self)
+ // Configuring Privatization Parameters
+ Util.configPrivatization(agoraKit: agoraKit)
+
+ agoraKit.setLogFile(LogUtils.sdkLogPath())
+
+ // get channel name from configs
+ guard let channelName = configs["channelName"] as? String,
+ let resolution = configs["resolution"] as? CGSize,
+ let fps = configs["fps"] as? Int,
+ let orientation = configs["orientation"] as? AgoraVideoOutputOrientationMode else {return}
+
+ // make myself a broadcaster
+ agoraKit.setClientRole(GlobalSettings.shared.getUserRole())
+ // enable video module and set up video encoding configs
+ agoraKit.enableVideo()
+ agoraKit.enableAudio()
+ agoraKit.setVideoEncoderConfiguration(AgoraVideoEncoderConfiguration(size: resolution,
+ frameRate: AgoraVideoFrameRate(rawValue: fps) ?? .fps15,
+ bitrate: AgoraVideoBitrateStandard,
+ orientationMode: orientation, mirrorMode: .auto))
+
+ // set up local video to render your local camera preview
+ let videoCanvas = AgoraRtcVideoCanvas()
+ videoCanvas.uid = 0
+ // the view to be binded
+ videoCanvas.view = localVideo.videoView
+ videoCanvas.renderMode = .hidden
+ agoraKit.setupLocalVideo(videoCanvas)
+ // you have to call startPreview to see local video
+ agoraKit.startPreview()
+
+ // Set audio route to speaker
+ agoraKit.setDefaultAudioRouteToSpeakerphone(true)
+
+ // start joining channel
+ // 1. Users can only see each other after they join the
+ // same channel successfully using the same app id.
+ // 2. If app certificate is turned on at dashboard, token is needed
+ // when joining channel. The channel name and uid used to calculate
+ // the token has to match the ones used for channel join
+ let option = AgoraRtcChannelMediaOptions()
+ option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ option.clientRoleType = GlobalSettings.shared.getUserRole()
+ NetworkManager.shared.generateToken(channelName: channelName, uid: localUid, success: { token in
+ let result = self.agoraKit.joinChannel(byToken: token,
+ channelId: channelName,
+ uid: self.localUid,
+ mediaOptions: option)
+ if result != 0 {
+ // Usually happens with invalid parameters
+ // Error code description can be found at:
+ // en: https://api-ref.agora.io/en/voice-sdk/macos/3.x/Constants/AgoraErrorCode.html#content
+ // cn: https://docs.agora.io/cn/Voice/API%20Reference/oc/Constants/AgoraErrorCode.html
+ self.showAlert(title: "Error", message: "joinChannel call failed: \(result), please check your params")
+ }
+ })
+ }
+
+ override func viewDidDisappear(_ animated: Bool) {
+ super.viewDidDisappear(animated)
+ agoraKit.disableAudio()
+ agoraKit.disableVideo()
+ if isJoined {
+ agoraKit.destroy(localRecord)
+ agoraKit.destroy(remoteRecord)
+ agoraKit.stopPreview()
+ agoraKit.leaveChannel { (stats) -> Void in
+ LogUtils.log(message: "left channel, duration: \(stats.duration)", level: .info)
+ }
+ }
+ }
+
+ @objc
+ private func onTapLocalRecordButton(sender: UIButton) {
+ sender.isSelected = !sender.isSelected
+ let path = storagePath + "/\(localUid).mp4"
+ if sender.isSelected {
+ let config = AgoraMediaRecorderConfiguration()
+ config.storagePath = path
+ config.containerFormat = .MP4
+ config.maxDurationMs = 10 * 1000
+ localRecord.startRecording(config)
+ } else {
+ localRecord.stopRecording()
+ ToastView.show(text: path)
+ }
+ }
+ @objc
+ private func onTapRemoteRecordButton(sender: UIButton) {
+ sender.isSelected = !sender.isSelected
+ let path = storagePath + "/\(remoteUid).mp4"
+ if sender.isSelected {
+ let config = AgoraMediaRecorderConfiguration()
+ config.storagePath = path
+ config.containerFormat = .MP4
+ config.maxDurationMs = 10 * 1000
+ remoteRecord.startRecording(config)
+ } else {
+ remoteRecord.stopRecording()
+ ToastView.show(text: path)
+ }
+ }
+}
+
+extension JoinChannelVideoRecorder: AgoraMediaRecorderDelegate {
+ func mediaRecorder(_ recorder: AgoraMediaRecorder, stateDidChanged channelId: String, uid: UInt, state: AgoraMediaRecorderState, error: AgoraMediaRecorderErrorCode) {
+ LogUtils.log(message: "uid == \(uid) state == \(state.rawValue)", level: .info)
+ }
+
+ func mediaRecorder(_ recorder: AgoraMediaRecorder, informationDidUpdated channelId: String, uid: UInt, info: AgoraMediaRecorderInfo) {
+ LogUtils.log(message: "uid == \(uid) info == \(info.recorderFileName)", level: .info)
+ }
+}
+
+/// agora rtc engine delegate events
+extension JoinChannelVideoRecorder: AgoraRtcEngineDelegate {
+ /// callback when warning occured for agora sdk, warning can usually be ignored, still it's nice to check out
+ /// what is happening
+ /// Warning code description can be found at:
+ /// en: https://api-ref.agora.io/en/voice-sdk/ios/3.x/Constants/AgoraWarningCode.html
+ /// cn: https://docs.agora.io/cn/Voice/API%20Reference/oc/Constants/AgoraWarningCode.html
+ /// @param warningCode warning code of the problem
+ func rtcEngine(_ engine: AgoraRtcEngineKit, didOccurWarning warningCode: AgoraWarningCode) {
+ LogUtils.log(message: "warning: \(warningCode.description)", level: .warning)
+ }
+
+ /// callback when error occured for agora sdk, you are recommended to display the error descriptions on demand
+ /// to let user know something wrong is happening
+ /// Error code description can be found at:
+ /// en: https://api-ref.agora.io/en/voice-sdk/macos/3.x/Constants/AgoraErrorCode.html#content
+ /// cn: https://docs.agora.io/cn/Voice/API%20Reference/oc/Constants/AgoraErrorCode.html
+ /// @param errorCode error code of the problem
+ func rtcEngine(_ engine: AgoraRtcEngineKit, didOccurError errorCode: AgoraErrorCode) {
+ LogUtils.log(message: "error: \(errorCode)", level: .error)
+ self.showAlert(title: "Error", message: "Error \(errorCode.description) occur")
+ }
+
+ func rtcEngine(_ engine: AgoraRtcEngineKit, didJoinChannel channel: String, withUid uid: UInt, elapsed: Int) {
+ self.isJoined = true
+ LogUtils.log(message: "Join \(channel) with uid \(uid) elapsed \(elapsed)ms", level: .info)
+ }
+
+ /// callback when a remote user is joinning the channel, note audience in live broadcast mode will NOT trigger this event
+ /// @param uid uid of remote joined user
+ /// @param elapsed time elapse since current sdk instance join the channel in ms
+ func rtcEngine(_ engine: AgoraRtcEngineKit, didJoinedOfUid uid: UInt, elapsed: Int) {
+ LogUtils.log(message: "remote user join: \(uid) \(elapsed)ms", level: .info)
+
+ // Only one remote video view is available for this
+ // tutorial. Here we check if there exists a surface
+ // view tagged as this uid.
+ let videoCanvas = AgoraRtcVideoCanvas()
+ videoCanvas.uid = uid
+ // the view to be binded
+ videoCanvas.view = remoteVideo.videoView
+ videoCanvas.renderMode = .hidden
+ agoraKit.setupRemoteVideo(videoCanvas)
+ remoteUid = uid
+ }
+
+ /// callback when a remote user is leaving the channel, note audience in live broadcast mode will NOT trigger this event
+ /// @param uid uid of remote joined user
+ /// @param reason reason why this user left, note this event may be triggered when the remote user
+ /// become an audience in live broadcasting profile
+ func rtcEngine(_ engine: AgoraRtcEngineKit, didOfflineOfUid uid: UInt, reason: AgoraUserOfflineReason) {
+ LogUtils.log(message: "remote user left: \(uid) reason \(reason)", level: .info)
+
+ // to unlink your view from sdk, so that your view reference will be released
+ // note the video will stay at its last frame, to completely remove it
+ // you will need to remove the EAGL sublayer from your binded view
+ let videoCanvas = AgoraRtcVideoCanvas()
+ videoCanvas.uid = uid
+ // the view to be binded
+ videoCanvas.view = nil
+ videoCanvas.renderMode = .hidden
+ agoraKit.setupRemoteVideo(videoCanvas)
+ remoteUid = 0
+ }
+
+ /// Reports the statistics of the current call. The SDK triggers this callback once every two seconds after the user joins the channel.
+ /// @param stats stats struct
+ func rtcEngine(_ engine: AgoraRtcEngineKit, reportRtcStats stats: AgoraChannelStats) {
+ localVideo.statsInfo?.updateChannelStats(stats)
+ }
+
+ /// Reports the statistics of the uploading local audio streams once every two seconds.
+ /// @param stats stats struct
+ func rtcEngine(_ engine: AgoraRtcEngineKit, localAudioStats stats: AgoraRtcLocalAudioStats) {
+ localVideo.statsInfo?.updateLocalAudioStats(stats)
+ }
+
+ /// Reports the statistics of the video stream from each remote user/host.
+ /// @param stats stats struct
+ func rtcEngine(_ engine: AgoraRtcEngineKit, remoteVideoStats stats: AgoraRtcRemoteVideoStats) {
+ remoteVideo.statsInfo?.updateVideoStats(stats)
+ }
+
+ /// Reports the statistics of the audio stream from each remote user/host.
+ /// @param stats stats struct for current call statistics
+ func rtcEngine(_ engine: AgoraRtcEngineKit, remoteAudioStats stats: AgoraRtcRemoteAudioStats) {
+ remoteVideo.statsInfo?.updateAudioStats(stats)
+ }
+}
diff --git a/iOS/APIExample/APIExample/Examples/Basic/JoinChannelVideo(Recorder)/zh-Hans.lproj/JoinChannelVideoRecorder.strings b/iOS/APIExample/APIExample/Examples/Basic/JoinChannelVideo(Recorder)/zh-Hans.lproj/JoinChannelVideoRecorder.strings
new file mode 100644
index 000000000..25a97ee8c
--- /dev/null
+++ b/iOS/APIExample/APIExample/Examples/Basic/JoinChannelVideo(Recorder)/zh-Hans.lproj/JoinChannelVideoRecorder.strings
@@ -0,0 +1,21 @@
+
+/* Class = "UITextField"; placeholder = "Enter channel name"; ObjectID = "GWc-L5-fZV"; */
+"GWc-L5-fZV.placeholder" = "输入频道名";
+
+/* Class = "UINavigationItem"; title = "Join Channel"; ObjectID = "Iy0-Dq-h5x"; */
+"Iy0-Dq-h5x.title" = "加入频道";
+
+/* Class = "UIButton"; normalTitle = "Button"; ObjectID = "VpM-9W-auG"; */
+"VpM-9W-auG.normalTitle" = "Button";
+
+/* Class = "UIButton"; normalTitle = "Join"; ObjectID = "kbN-ZR-nNn"; */
+"kbN-ZR-nNn.normalTitle" = "加入频道";
+
+/* Class = "UIButton"; normalTitle = "Button"; ObjectID = "kf0-3f-UI5"; */
+"kf0-3f-UI5.normalTitle" = "Button";
+
+/* Class = "UIViewController"; title = "Join Channel Video"; ObjectID = "p70-sh-D1h"; */
+"p70-sh-D1h.title" = "视频实时通话";
+
+/* Class = "UIButton"; normalTitle = "Button"; ObjectID = "wHl-zh-dFe"; */
+"wHl-zh-dFe.normalTitle" = "Button";
diff --git a/iOS/APIExample/APIExample/Examples/Basic/JoinChannelVideo(Token)/JoinChannelVideoToken.swift b/iOS/APIExample/APIExample/Examples/Basic/JoinChannelVideo(Token)/JoinChannelVideoToken.swift
index d384c4fb1..4ac02d95c 100644
--- a/iOS/APIExample/APIExample/Examples/Basic/JoinChannelVideo(Token)/JoinChannelVideoToken.swift
+++ b/iOS/APIExample/APIExample/Examples/Basic/JoinChannelVideo(Token)/JoinChannelVideoToken.swift
@@ -160,8 +160,8 @@ class JoinChannelVideoToken: BaseViewController {
// when joining channel. The channel name and uid used to calculate
// the token has to match the ones used for channel join
let option = AgoraRtcChannelMediaOptions()
- option.publishCameraTrack = true
- option.publishMicrophoneTrack = true
+ option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster
option.clientRoleType = GlobalSettings.shared.getUserRole()
let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option)
if result != 0 {
diff --git a/iOS/APIExample/APIExample/Examples/Basic/JoinChannelVideo/JoinChannelVideo.swift b/iOS/APIExample/APIExample/Examples/Basic/JoinChannelVideo/JoinChannelVideo.swift
index cba6062a8..0a2dbe75b 100644
--- a/iOS/APIExample/APIExample/Examples/Basic/JoinChannelVideo/JoinChannelVideo.swift
+++ b/iOS/APIExample/APIExample/Examples/Basic/JoinChannelVideo/JoinChannelVideo.swift
@@ -156,8 +156,8 @@ class JoinChannelVideoMain: BaseViewController {
// when joining channel. The channel name and uid used to calculate
// the token has to match the ones used for channel join
let option = AgoraRtcChannelMediaOptions()
- option.publishCameraTrack = true
- option.publishMicrophoneTrack = true
+ option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster
+ option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster
option.clientRoleType = GlobalSettings.shared.getUserRole()
NetworkManager.shared.generateToken(channelName: channelName, success: { token in
let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option)
diff --git a/iOS/APIExample/APIExample/Info.plist b/iOS/APIExample/APIExample/Info.plist
index f356acca7..dce12d117 100644
--- a/iOS/APIExample/APIExample/Info.plist
+++ b/iOS/APIExample/APIExample/Info.plist
@@ -2,8 +2,6 @@
- UIFileSharingEnabled
-
Application Supports itunes
BGTaskSchedulerPermittedIdentifiers
@@ -37,6 +35,8 @@
audio
processing
+ UIFileSharingEnabled
+
UILaunchStoryboardName
LaunchScreen
UIMainStoryboardFile
diff --git a/iOS/APIExample/APIExample/Resources/sample.mov b/iOS/APIExample/APIExample/Resources/sample.mov
new file mode 100644
index 000000000..122ea84ff
Binary files /dev/null and b/iOS/APIExample/APIExample/Resources/sample.mov differ
diff --git a/iOS/APIExample/APIExample/ViewController.swift b/iOS/APIExample/APIExample/ViewController.swift
index 86011f514..e87fecc92 100644
--- a/iOS/APIExample/APIExample/ViewController.swift
+++ b/iOS/APIExample/APIExample/ViewController.swift
@@ -27,7 +27,8 @@ class ViewController: AGViewController {
MenuSection(name: "Basic", rows: [
MenuItem(name: "Join a channel (Token)".localized, storyboard: "JoinChannelVideoToken", controller: ""),
MenuItem(name: "Join a channel (Video)".localized, storyboard: "JoinChannelVideo", controller: ""),
- MenuItem(name: "Join a channel (Audio)".localized, storyboard: "JoinChannelAudio", controller: "")
+ MenuItem(name: "Join a channel (Audio)".localized, storyboard: "JoinChannelAudio", controller: ""),
+ MenuItem(name: "Local or remote recording".localized, storyboard: "JoinChannelVideoRecorder", controller: "")
]),
MenuSection(name: "Anvanced", rows: [
// MenuItem(name: "Group Video Chat".localized, storyboard: "VideoChat", controller: "VideoChat"),
diff --git a/iOS/APIExample/APIExample/zh-Hans.lproj/Localizable.strings b/iOS/APIExample/APIExample/zh-Hans.lproj/Localizable.strings
index 2961855f6..be57dba81 100644
--- a/iOS/APIExample/APIExample/zh-Hans.lproj/Localizable.strings
+++ b/iOS/APIExample/APIExample/zh-Hans.lproj/Localizable.strings
@@ -9,6 +9,7 @@
"Join a channel (Token)" = "实时视频通话/直播(Token验证)";
"Join a channel (Video)" = "实时视频通话/直播";
"Join a channel (Audio)" = "实时语音通话/直播";
+"Local or remote recording" = "本地/远端录制";
"Live Streaming" = "RTC实时直播/主播/观众";
"RTMP Streaming" = "RTMP旁路推流";
"Media Injection" = "流媒体注入";
@@ -153,3 +154,6 @@
"This app requires world tracking, which is available only on iOS devices with the A9 processor or later." = "AR功能仅在内置A9处理器后的iOS机型支持";
"Move Camera to find a planar\n(Shown as Red Rectangle)" = "移动相机以找到一个平面\n(以红色方块显示)";
"Tap to place remote video canvas" = "点击屏幕以放置视频画布";
+"Recording" = "开始录制";
+"Stop Recording" = "停止录制";
+"PIP Support iOS 15+" = "画中画只支持iOS15及以上版本";
diff --git a/iOS/APIExample/ExportOptions.plist b/iOS/APIExample/ExportOptions.plist
index cbd9a2593..48bb7f98f 100644
--- a/iOS/APIExample/ExportOptions.plist
+++ b/iOS/APIExample/ExportOptions.plist
@@ -2,25 +2,27 @@
- provisioningProfiles
-
- io.agora.api.examples.Agora-ScreenShare-Extension
- App
- io.agora.api.examples
- App
-
compileBitcode
-
+
destination
export
method
development
+ provisioningProfiles
+
+ io.agora.api.examples
+ AgoraLab2020
+ io.agora.api.examples.Agora-ScreenShare-Extension
+ AgoraLab2020
+
+ signingCertificate
+ Apple Development
signingStyle
manual
stripSwiftSymbols
teamID
- GM72UGLGZW
+ JDPG69R49Z
thinning
<none>
diff --git a/iOS/APIExample/Podfile b/iOS/APIExample/Podfile
index 19bc7cb1f..c2cd9b7f6 100644
--- a/iOS/APIExample/Podfile
+++ b/iOS/APIExample/Podfile
@@ -8,8 +8,8 @@ target 'APIExample' do
pod 'Floaty', '~> 4.2.0'
pod 'AGEVideoLayout', '~> 1.0.2'
pod 'CocoaAsyncSocket', '7.6.5'
- pod 'AgoraRtcEngine_iOS', '4.1.1'
- # pod 'sdk', :path => 'sdk.podspec'
+ pod 'AgoraRtcEngine_iOS', '4.2.0'
+# pod 'sdk', :path => 'sdk.podspec'
# pod 'senseLib', :path => 'sense.podspec'
# pod 'bytedEffect', :path => 'bytedEffect.podspec'
# pod 'fuLib', :path => 'fu.podspec'
@@ -17,12 +17,12 @@ end
target 'Agora-ScreenShare-Extension' do
use_frameworks!
- # pod 'sdk', :path => 'sdk.podspec'
- pod 'AgoraRtcEngine_iOS', '4.1.1'
+# pod 'sdk', :path => 'sdk.podspec'
+ pod 'AgoraRtcEngine_iOS', '4.2.0'
end
target 'SimpleFilter' do
use_frameworks!
- # pod 'sdk', :path => 'sdk.podspec'
- pod 'AgoraRtcEngine_iOS', '4.1.1'
+# pod 'sdk', :path => 'sdk.podspec'
+ pod 'AgoraRtcEngine_iOS', '4.2.0'
end
diff --git a/iOS/APIExample/iOS_ExportOptions.plist b/iOS/APIExample/iOS_ExportOptions.plist
new file mode 100644
index 000000000..d7bc2c920
--- /dev/null
+++ b/iOS/APIExample/iOS_ExportOptions.plist
@@ -0,0 +1,29 @@
+
+
+
+
+ compileBitcode
+
+ destination
+ export
+ method
+ development
+ provisioningProfiles
+
+ io.agora.api.examples
+ apiexamples_all
+ io.agora.api.examples.Agora-ScreenShare-Extension
+ apiexamples_all
+
+ signingCertificate
+ Apple Development
+ signingStyle
+ manual
+ stripSwiftSymbols
+
+ teamID
+ YS397FG5PA
+ thinning
+ <none>
+
+
diff --git a/iOS/APIExample/sense.podspec b/iOS/APIExample/sense.podspec
index 45bf36f6d..8739aa686 100644
--- a/iOS/APIExample/sense.podspec
+++ b/iOS/APIExample/sense.podspec
@@ -12,6 +12,7 @@ Pod::Spec.new do |spec|
spec.library = "z"
spec.vendored_libraries = "SenseLib/**/*.a"
spec.resource_bundles = {"SenseLib" => "SenseLib/**/*.bundle"}
+ spec.resources = "SenseLib/**/*.zip"
spec.requires_arc = true
spec.ios.deployment_target = '9.0'
end
\ No newline at end of file
diff --git a/macOS/APIExample.xcodeproj/project.pbxproj b/macOS/APIExample.xcodeproj/project.pbxproj
index 4a53a3eda..41cf55e2c 100644
--- a/macOS/APIExample.xcodeproj/project.pbxproj
+++ b/macOS/APIExample.xcodeproj/project.pbxproj
@@ -33,7 +33,6 @@
033A9FF5252EB5F400BC26E1 /* JoinMultiChannel.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9FF7252EB5F400BC26E1 /* JoinMultiChannel.storyboard */; };
033A9FFA252EB5FD00BC26E1 /* ScreenShare.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9FFC252EB5FD00BC26E1 /* ScreenShare.storyboard */; };
033A9FFF252EB60800BC26E1 /* StreamEncryption.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033AA001252EB60800BC26E1 /* StreamEncryption.storyboard */; };
- 033AA005252EBBEC00BC26E1 /* Localizable.strings in Resources */ = {isa = PBXBuildFile; fileRef = 033AA004252EBBEC00BC26E1 /* Localizable.strings */; };
034C626425257EA600296ECF /* GlobalSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = 034C626325257EA600296ECF /* GlobalSettings.swift */; };
034C62672525857200296ECF /* JoinChannelAudio.swift in Sources */ = {isa = PBXBuildFile; fileRef = 034C62662525857200296ECF /* JoinChannelAudio.swift */; };
034C626C25259FC200296ECF /* JoinChannelVideo.swift in Sources */ = {isa = PBXBuildFile; fileRef = 034C626B25259FC200296ECF /* JoinChannelVideo.swift */; };
@@ -80,7 +79,6 @@
671BD67127DF478A0076D5E1 /* CustomVideoRender.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 671BD66D27DF478A0076D5E1 /* CustomVideoRender.storyboard */; };
671BD67227DF478A0076D5E1 /* CustomVideoRender.swift in Sources */ = {isa = PBXBuildFile; fileRef = 671BD66F27DF478A0076D5E1 /* CustomVideoRender.swift */; };
671BD67527E0717D0076D5E1 /* MediaPlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 671BD67427E0717D0076D5E1 /* MediaPlayer.swift */; };
- 671BD67727E071A70076D5E1 /* MediaPlayer.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 671BD67627E071A70076D5E1 /* MediaPlayer.storyboard */; };
671BD67927E09E3B0076D5E1 /* AgoraPcmSourcePush.swift in Sources */ = {isa = PBXBuildFile; fileRef = 671BD67827E09E3B0076D5E1 /* AgoraPcmSourcePush.swift */; };
671BD67B27E0A4F50076D5E1 /* output.raw in Resources */ = {isa = PBXBuildFile; fileRef = 671BD67A27E0A4F50076D5E1 /* output.raw */; };
67517BF9282E5206006E41D4 /* ContentInspect.swift in Sources */ = {isa = PBXBuildFile; fileRef = 67517BF8282E5206006E41D4 /* ContentInspect.swift */; };
@@ -88,24 +86,26 @@
67B8C89A28058AA500195106 /* RawVideoData.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 67B8C89928058AA500195106 /* RawVideoData.storyboard */; };
67B8C89C28058AB600195106 /* RawVideoData.swift in Sources */ = {isa = PBXBuildFile; fileRef = 67B8C89B28058AB600195106 /* RawVideoData.swift */; };
67E23C7D2805995200FAB905 /* MediaUtils.m in Sources */ = {isa = PBXBuildFile; fileRef = 67E23C7C2805995200FAB905 /* MediaUtils.m */; };
- 67E40B2427EC82F900C7A6E9 /* SpatialAudio.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 67E40B2227EC82F900C7A6E9 /* SpatialAudio.storyboard */; };
67E40B2527EC82F900C7A6E9 /* SpatialAudio.swift in Sources */ = {isa = PBXBuildFile; fileRef = 67E40B2327EC82F900C7A6E9 /* SpatialAudio.swift */; };
8BCCA45B26271F7A001FD4CE /* AgoraYUVImageSourcePush.m in Sources */ = {isa = PBXBuildFile; fileRef = 8BCCA45A26271F7A001FD4CE /* AgoraYUVImageSourcePush.m */; };
8BD4AE73272513FF00E95B87 /* SimpleFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8BD4AE72272513FF00E95B87 /* SimpleFilter.swift */; };
8BE63B4227253CB000597DB1 /* SimpleFilter.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 8BE63B4427253CB000597DB1 /* SimpleFilter.storyboard */; };
8BF2243B275F82CF00B65EF8 /* SimpleFilter.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 8BD4AE79272518D600E95B87 /* SimpleFilter.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
- E702C1E628B4DB4800D7C7ED /* LiveStreaming.strings in Resources */ = {isa = PBXBuildFile; fileRef = E702C1E128B4DB4800D7C7ED /* LiveStreaming.strings */; };
E702C1E728B4DB4800D7C7ED /* LiveStreaming.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E702C1E328B4DB4800D7C7ED /* LiveStreaming.storyboard */; };
E702C1E828B4DB4800D7C7ED /* LiveStreaming.swift in Sources */ = {isa = PBXBuildFile; fileRef = E702C1E528B4DB4800D7C7ED /* LiveStreaming.swift */; };
E71E7B0B289B7D7900B846C7 /* SimpleFilter.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8BD4AE79272518D600E95B87 /* SimpleFilter.framework */; };
E71E7B0C289B96FA00B846C7 /* VideoProcess.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E71E7B0E289B96FA00B846C7 /* VideoProcess.storyboard */; };
- E71E7B17289BA78D00B846C7 /* QuickSwitchChannel.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E71E7B15289BA78D00B846C7 /* QuickSwitchChannel.storyboard */; };
E71E7B18289BA78D00B846C7 /* QuickSwitchChannel.swift in Sources */ = {isa = PBXBuildFile; fileRef = E71E7B16289BA78D00B846C7 /* QuickSwitchChannel.swift */; };
E72055EE28FE7FC70030E6D1 /* Util.swift in Sources */ = {isa = PBXBuildFile; fileRef = E72055ED28FE7FC70030E6D1 /* Util.swift */; };
E7218BA628BF5E65005A01D5 /* agora-logo.png in Resources */ = {isa = PBXBuildFile; fileRef = E7218BA528BF5E65005A01D5 /* agora-logo.png */; };
E728955128E5B8600013E7E6 /* Pods_APIExample.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 6F65EF2B97B89DE4581B426B /* Pods_APIExample.framework */; };
E74877D728A261D700CA2F58 /* NetworkManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = E74877D428A261D700CA2F58 /* NetworkManager.swift */; };
E74877D828A261D700CA2F58 /* JSONObject.swift in Sources */ = {isa = PBXBuildFile; fileRef = E74877D528A261D700CA2F58 /* JSONObject.swift */; };
+ E75279B529F6651800358F18 /* Localizable.strings in Resources */ = {isa = PBXBuildFile; fileRef = E75279B729F6651800358F18 /* Localizable.strings */; };
+ E75279C329F66CE600358F18 /* QuickSwitchChannel.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E75279C529F66CE600358F18 /* QuickSwitchChannel.storyboard */; };
+ E75279C829F66D1400358F18 /* SpatialAudio.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E75279CA29F66D1400358F18 /* SpatialAudio.storyboard */; };
+ E75279D529F6737100358F18 /* LiveStreaming.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E75279D329F6737100358F18 /* LiveStreaming.storyboard */; };
+ E75279D829F6755300358F18 /* MediaPlayer.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E75279D629F6755300358F18 /* MediaPlayer.storyboard */; };
E763A82D288AA99B00FE99FA /* SimpleFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = E763A81E288AA99B00FE99FA /* SimpleFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };
E763A82E288AA99B00FE99FA /* VideoProcessor.cpp in Sources */ = {isa = PBXBuildFile; fileRef = E763A81F288AA99B00FE99FA /* VideoProcessor.cpp */; };
E763A82F288AA99B00FE99FA /* ExtensionVideoFilter.cpp in Sources */ = {isa = PBXBuildFile; fileRef = E763A820288AA99B00FE99FA /* ExtensionVideoFilter.cpp */; };
@@ -128,6 +128,16 @@
E77D54D628F56D3A00D51C1E /* sample.yuv in Resources */ = {isa = PBXBuildFile; fileRef = E77D54D528F56D3A00D51C1E /* sample.yuv */; };
E77D54D828F5702B00D51C1E /* SampleBufferDisplayView.xib in Resources */ = {isa = PBXBuildFile; fileRef = E77D54D728F5702B00D51C1E /* SampleBufferDisplayView.xib */; };
E7899BD42860B2F600851463 /* NSData+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = E7899BD32860B2F600851463 /* NSData+Extension.swift */; };
+ E7AD0DEA29CAAF2B00C9A4B0 /* JoinChannelVideoRecorder.strings in Resources */ = {isa = PBXBuildFile; fileRef = E7AD0DE529CAAF2B00C9A4B0 /* JoinChannelVideoRecorder.strings */; };
+ E7AD0DEB29CAAF2B00C9A4B0 /* JoinChannelVideoRecorder.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E7AD0DE729CAAF2B00C9A4B0 /* JoinChannelVideoRecorder.storyboard */; };
+ E7AD0DEC29CAAF2B00C9A4B0 /* JoinChannelVideoRecorder.swift in Sources */ = {isa = PBXBuildFile; fileRef = E7AD0DE929CAAF2B00C9A4B0 /* JoinChannelVideoRecorder.swift */; };
+ E7AD0DF529CAF78C00C9A4B0 /* LocalCompositeGraph.strings in Resources */ = {isa = PBXBuildFile; fileRef = E7AD0DF029CAF78C00C9A4B0 /* LocalCompositeGraph.strings */; };
+ E7AD0DF629CAF78C00C9A4B0 /* LocalCompositeGraph.swift in Sources */ = {isa = PBXBuildFile; fileRef = E7AD0DF229CAF78C00C9A4B0 /* LocalCompositeGraph.swift */; };
+ E7AD0DF729CAF78C00C9A4B0 /* LocalCompositeGraph.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E7AD0DF329CAF78C00C9A4B0 /* LocalCompositeGraph.storyboard */; };
+ E7AD0DF929CD84F800C9A4B0 /* sample.mov in Resources */ = {isa = PBXBuildFile; fileRef = E7AD0DF829CD84F800C9A4B0 /* sample.mov */; };
+ E7AD0E0029CDA4F100C9A4B0 /* CustomVideoSourcePushMulti.strings in Resources */ = {isa = PBXBuildFile; fileRef = E7AD0DFB29CDA4F100C9A4B0 /* CustomVideoSourcePushMulti.strings */; };
+ E7AD0E0129CDA4F100C9A4B0 /* CustomVideoSourcePushMulti.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E7AD0DFD29CDA4F100C9A4B0 /* CustomVideoSourcePushMulti.storyboard */; };
+ E7AD0E0229CDA4F100C9A4B0 /* CustomVideoSourcePushMulti.swift in Sources */ = {isa = PBXBuildFile; fileRef = E7AD0DFF29CDA4F100C9A4B0 /* CustomVideoSourcePushMulti.swift */; };
/* End PBXBuildFile section */
/* Begin PBXCopyFilesBuildPhase section */
@@ -184,7 +194,6 @@
033A9FFB252EB5FD00BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/ScreenShare.storyboard; sourceTree = ""; };
033AA000252EB60800BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/StreamEncryption.storyboard; sourceTree = ""; };
033AA003252EB60B00BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/StreamEncryption.strings"; sourceTree = ""; };
- 033AA004252EBBEC00BC26E1 /* Localizable.strings */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.strings; path = Localizable.strings; sourceTree = ""; };
034C626325257EA600296ECF /* GlobalSettings.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GlobalSettings.swift; sourceTree = ""; };
034C62662525857200296ECF /* JoinChannelAudio.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = JoinChannelAudio.swift; sourceTree = ""; };
034C626B25259FC200296ECF /* JoinChannelVideo.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = JoinChannelVideo.swift; sourceTree = ""; };
@@ -238,7 +247,6 @@
671BD66E27DF478A0076D5E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/CustomVideoRender.storyboard; sourceTree = ""; };
671BD66F27DF478A0076D5E1 /* CustomVideoRender.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CustomVideoRender.swift; sourceTree = ""; };
671BD67427E0717D0076D5E1 /* MediaPlayer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MediaPlayer.swift; sourceTree = ""; };
- 671BD67627E071A70076D5E1 /* MediaPlayer.storyboard */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.storyboard; path = MediaPlayer.storyboard; sourceTree = ""; };
671BD67827E09E3B0076D5E1 /* AgoraPcmSourcePush.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AgoraPcmSourcePush.swift; sourceTree = ""; };
671BD67A27E0A4F50076D5E1 /* output.raw */ = {isa = PBXFileReference; lastKnownFileType = file; path = output.raw; sourceTree = ""; };
67517BF8282E5206006E41D4 /* ContentInspect.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentInspect.swift; sourceTree = ""; };
@@ -247,7 +255,6 @@
67B8C89B28058AB600195106 /* RawVideoData.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RawVideoData.swift; sourceTree = ""; };
67E23C7B2805995200FAB905 /* MediaUtils.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MediaUtils.h; sourceTree = ""; };
67E23C7C2805995200FAB905 /* MediaUtils.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = MediaUtils.m; sourceTree = ""; };
- 67E40B2227EC82F900C7A6E9 /* SpatialAudio.storyboard */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.storyboard; path = SpatialAudio.storyboard; sourceTree = ""; };
67E40B2327EC82F900C7A6E9 /* SpatialAudio.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SpatialAudio.swift; sourceTree = ""; };
6F65EF2B97B89DE4581B426B /* Pods_APIExample.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_APIExample.framework; sourceTree = BUILT_PRODUCTS_DIR; };
7330EE777B818DE1FD428DA3 /* Pods_SimpleFilter.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_SimpleFilter.framework; sourceTree = BUILT_PRODUCTS_DIR; };
@@ -257,20 +264,30 @@
8BD4AE72272513FF00E95B87 /* SimpleFilter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SimpleFilter.swift; sourceTree = ""; };
8BD4AE79272518D600E95B87 /* SimpleFilter.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = SimpleFilter.framework; sourceTree = BUILT_PRODUCTS_DIR; };
8BE63B4527253CD900597DB1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/SimpleFilter.storyboard; sourceTree = ""; };
- E702C1E228B4DB4800D7C7ED /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/LiveStreaming.strings"; sourceTree = ""; };
- E702C1E428B4DB4800D7C7ED /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LiveStreaming.storyboard; sourceTree = ""; };
E702C1E528B4DB4800D7C7ED /* LiveStreaming.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = LiveStreaming.swift; sourceTree = ""; };
- E702C1EB28B5C76000D7C7ED /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/LiveStreaming.strings"; sourceTree = ""; };
E71E7B0D289B96FA00B846C7 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/VideoProcess.storyboard; sourceTree = ""; };
E71E7B12289B971900B846C7 /* en */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = en; path = en.lproj/VideoProcess.strings; sourceTree = ""; };
E71E7B13289B9C2C00B846C7 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/VideoProcess.strings"; sourceTree = ""; };
- E71E7B15289BA78D00B846C7 /* QuickSwitchChannel.storyboard */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.storyboard; path = QuickSwitchChannel.storyboard; sourceTree = ""; };
E71E7B16289BA78D00B846C7 /* QuickSwitchChannel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = QuickSwitchChannel.swift; sourceTree = ""; };
E72055ED28FE7FC70030E6D1 /* Util.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Util.swift; sourceTree = ""; };
E7218BA528BF5E65005A01D5 /* agora-logo.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "agora-logo.png"; sourceTree = ""; };
E728955328E5B9420013E7E6 /* AGEVideoLayout.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; path = AGEVideoLayout.framework; sourceTree = BUILT_PRODUCTS_DIR; };
E74877D428A261D700CA2F58 /* NetworkManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NetworkManager.swift; sourceTree = ""; };
E74877D528A261D700CA2F58 /* JSONObject.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = JSONObject.swift; sourceTree = ""; };
+ E75279B629F6651800358F18 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/Localizable.strings"; sourceTree = ""; };
+ E75279B829F66A7100358F18 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/CustomAudioSource.strings"; sourceTree = ""; };
+ E75279B929F66B0F00358F18 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/ScreenShare.strings"; sourceTree = ""; };
+ E75279C429F66CE600358F18 /* en */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = en; path = en.lproj/QuickSwitchChannel.storyboard; sourceTree = ""; };
+ E75279C729F66CEC00358F18 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/QuickSwitchChannel.strings"; sourceTree = ""; };
+ E75279C929F66D1400358F18 /* en */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = en; path = en.lproj/SpatialAudio.storyboard; sourceTree = ""; };
+ E75279CC29F66D1900358F18 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/SpatialAudio.strings"; sourceTree = ""; };
+ E75279D129F6728900358F18 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/LiveStreaming.strings"; sourceTree = ""; };
+ E75279D429F6737100358F18 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LiveStreaming.storyboard; sourceTree = ""; };
+ E75279D729F6755300358F18 /* en */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = en; path = en.lproj/MediaPlayer.storyboard; sourceTree = ""; };
+ E75279DB29F675A500358F18 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/MediaPlayer.strings"; sourceTree = ""; };
+ E75279DC29F6767700358F18 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/MediaPlayer.storyboard; sourceTree = ""; };
+ E75279DD29F676BC00358F18 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/QuickSwitchChannel.storyboard; sourceTree = ""; };
+ E75279DE29F676CD00358F18 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/SpatialAudio.storyboard; sourceTree = ""; };
E763A81E288AA99B00FE99FA /* SimpleFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SimpleFilter.h; sourceTree = ""; };
E763A81F288AA99B00FE99FA /* VideoProcessor.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = VideoProcessor.cpp; sourceTree = ""; };
E763A820288AA99B00FE99FA /* ExtensionVideoFilter.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = ExtensionVideoFilter.cpp; sourceTree = ""; };
@@ -294,6 +311,16 @@
E77D54D528F56D3A00D51C1E /* sample.yuv */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = sample.yuv; sourceTree = ""; };
E77D54D728F5702B00D51C1E /* SampleBufferDisplayView.xib */ = {isa = PBXFileReference; lastKnownFileType = file.xib; path = SampleBufferDisplayView.xib; sourceTree = ""; };
E7899BD32860B2F600851463 /* NSData+Extension.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "NSData+Extension.swift"; sourceTree = ""; };
+ E7AD0DE629CAAF2B00C9A4B0 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/JoinChannelVideoRecorder.strings"; sourceTree = ""; };
+ E7AD0DE829CAAF2B00C9A4B0 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/JoinChannelVideoRecorder.storyboard; sourceTree = ""; };
+ E7AD0DE929CAAF2B00C9A4B0 /* JoinChannelVideoRecorder.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = JoinChannelVideoRecorder.swift; sourceTree = ""; };
+ E7AD0DF129CAF78C00C9A4B0 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/LocalCompositeGraph.strings"; sourceTree = ""; };
+ E7AD0DF229CAF78C00C9A4B0 /* LocalCompositeGraph.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = LocalCompositeGraph.swift; sourceTree = ""; };
+ E7AD0DF429CAF78C00C9A4B0 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LocalCompositeGraph.storyboard; sourceTree = ""; };
+ E7AD0DF829CD84F800C9A4B0 /* sample.mov */ = {isa = PBXFileReference; lastKnownFileType = video.quicktime; path = sample.mov; sourceTree = ""; };
+ E7AD0DFC29CDA4F100C9A4B0 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/CustomVideoSourcePushMulti.strings"; sourceTree = ""; };
+ E7AD0DFE29CDA4F100C9A4B0 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/CustomVideoSourcePushMulti.storyboard; sourceTree = ""; };
+ E7AD0DFF29CDA4F100C9A4B0 /* CustomVideoSourcePushMulti.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CustomVideoSourcePushMulti.swift; sourceTree = ""; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
@@ -329,6 +356,7 @@
0333E63924FA335C0063C5B0 /* Basic */ = {
isa = PBXGroup;
children = (
+ E7AD0DE429CAAF2B00C9A4B0 /* JoinChannelVideo(Recorder) */,
E77D54B828F553DA00D51C1E /* JoinChannelVideo(Token) */,
034C626A25259FC200296ECF /* JoinChannelVideo */,
034C62652525857200296ECF /* JoinChannelAudio */,
@@ -471,6 +499,7 @@
034C629D25297ABB00296ECF /* Resources */ = {
isa = PBXGroup;
children = (
+ E7AD0DF829CD84F800C9A4B0 /* sample.mov */,
E77D54D528F56D3A00D51C1E /* sample.yuv */,
E7218BA528BF5E65005A01D5 /* agora-logo.png */,
67033725279E81DF0069F4B3 /* bg.jpg */,
@@ -494,6 +523,8 @@
036D3AA524FB797700B1D8DC /* Advanced */ = {
isa = PBXGroup;
children = (
+ E7AD0DFA29CDA4F100C9A4B0 /* CustomVideoSourcePushMulti */,
+ E7AD0DEF29CAF78C00C9A4B0 /* LocalCompositeGraph */,
E702C1E028B4DB4800D7C7ED /* LiveStreaming */,
E71E7B14289BA78D00B846C7 /* QuickSwitchChannel */,
67033720279E75770069F4B3 /* VideoProcess */,
@@ -543,7 +574,7 @@
03896D2E24F8A00F008593CD /* APIExample */ = {
isa = PBXGroup;
children = (
- 033AA004252EBBEC00BC26E1 /* Localizable.strings */,
+ E75279B729F6651800358F18 /* Localizable.strings */,
034C629D25297ABB00296ECF /* Resources */,
03267E262500C779004A91A6 /* APIExample-Bridging-Header.h */,
0333E63824FA335C0063C5B0 /* Examples */,
@@ -671,8 +702,8 @@
671BD67327E071410076D5E1 /* MediaPlayer */ = {
isa = PBXGroup;
children = (
+ E75279D629F6755300358F18 /* MediaPlayer.storyboard */,
671BD67427E0717D0076D5E1 /* MediaPlayer.swift */,
- 671BD67627E071A70076D5E1 /* MediaPlayer.storyboard */,
);
path = MediaPlayer;
sourceTree = "";
@@ -708,7 +739,7 @@
67E40B2127EC82E100C7A6E9 /* SpatialAudio */ = {
isa = PBXGroup;
children = (
- 67E40B2227EC82F900C7A6E9 /* SpatialAudio.storyboard */,
+ E75279CA29F66D1400358F18 /* SpatialAudio.storyboard */,
67E40B2327EC82F900C7A6E9 /* SpatialAudio.swift */,
);
path = SpatialAudio;
@@ -759,8 +790,8 @@
E702C1E028B4DB4800D7C7ED /* LiveStreaming */ = {
isa = PBXGroup;
children = (
- E702C1E128B4DB4800D7C7ED /* LiveStreaming.strings */,
E702C1E328B4DB4800D7C7ED /* LiveStreaming.storyboard */,
+ E75279D329F6737100358F18 /* LiveStreaming.storyboard */,
E702C1E528B4DB4800D7C7ED /* LiveStreaming.swift */,
);
path = LiveStreaming;
@@ -769,7 +800,7 @@
E71E7B14289BA78D00B846C7 /* QuickSwitchChannel */ = {
isa = PBXGroup;
children = (
- E71E7B15289BA78D00B846C7 /* QuickSwitchChannel.storyboard */,
+ E75279C529F66CE600358F18 /* QuickSwitchChannel.storyboard */,
E71E7B16289BA78D00B846C7 /* QuickSwitchChannel.swift */,
);
path = QuickSwitchChannel;
@@ -794,6 +825,36 @@
path = "JoinChannelVideo(Token)";
sourceTree = "";
};
+ E7AD0DE429CAAF2B00C9A4B0 /* JoinChannelVideo(Recorder) */ = {
+ isa = PBXGroup;
+ children = (
+ E7AD0DE529CAAF2B00C9A4B0 /* JoinChannelVideoRecorder.strings */,
+ E7AD0DE729CAAF2B00C9A4B0 /* JoinChannelVideoRecorder.storyboard */,
+ E7AD0DE929CAAF2B00C9A4B0 /* JoinChannelVideoRecorder.swift */,
+ );
+ path = "JoinChannelVideo(Recorder)";
+ sourceTree = "";
+ };
+ E7AD0DEF29CAF78C00C9A4B0 /* LocalCompositeGraph */ = {
+ isa = PBXGroup;
+ children = (
+ E7AD0DF029CAF78C00C9A4B0 /* LocalCompositeGraph.strings */,
+ E7AD0DF229CAF78C00C9A4B0 /* LocalCompositeGraph.swift */,
+ E7AD0DF329CAF78C00C9A4B0 /* LocalCompositeGraph.storyboard */,
+ );
+ path = LocalCompositeGraph;
+ sourceTree = "";
+ };
+ E7AD0DFA29CDA4F100C9A4B0 /* CustomVideoSourcePushMulti */ = {
+ isa = PBXGroup;
+ children = (
+ E7AD0DFB29CDA4F100C9A4B0 /* CustomVideoSourcePushMulti.strings */,
+ E7AD0DFD29CDA4F100C9A4B0 /* CustomVideoSourcePushMulti.storyboard */,
+ E7AD0DFF29CDA4F100C9A4B0 /* CustomVideoSourcePushMulti.swift */,
+ );
+ path = CustomVideoSourcePushMulti;
+ sourceTree = "";
+ };
E8D399FF8F860CE7DAAA9D91 /* Frameworks */ = {
isa = PBXGroup;
children = (
@@ -908,13 +969,17 @@
buildActionMask = 2147483647;
files = (
E77D54D828F5702B00D51C1E /* SampleBufferDisplayView.xib in Resources */,
+ E7AD0DF929CD84F800C9A4B0 /* sample.mov in Resources */,
+ E75279D529F6737100358F18 /* LiveStreaming.storyboard in Resources */,
E77D54BF28F553DA00D51C1E /* JoinChannelVideoToken.storyboard in Resources */,
033A9FDB252EB05A00BC26E1 /* PrecallTest.storyboard in Resources */,
+ E7AD0E0029CDA4F100C9A4B0 /* CustomVideoSourcePushMulti.strings in Resources */,
+ E7AD0DEA29CAAF2B00C9A4B0 /* JoinChannelVideoRecorder.strings in Resources */,
033A9FFA252EB5FD00BC26E1 /* ScreenShare.storyboard in Resources */,
E77D54BE28F553DA00D51C1E /* JoinChannelVideoToken.strings in Resources */,
57645A03259B1C22007B1E30 /* CreateDataStream.strings in Resources */,
5770E2D5258C9E6F00812A80 /* Picker.xib in Resources */,
- 033AA005252EBBEC00BC26E1 /* Localizable.strings in Resources */,
+ E75279B529F6651800358F18 /* Localizable.strings in Resources */,
57887A67258856B7006E962A /* Settings.storyboard in Resources */,
033A9FFF252EB60800BC26E1 /* StreamEncryption.storyboard in Resources */,
0301D31D2507C0F300DF3BEA /* MetalVideoView.xib in Resources */,
@@ -922,10 +987,11 @@
67517BFD282E52B9006E41D4 /* ContentInspect.storyboard in Resources */,
57645A04259B1C22007B1E30 /* CreateDataStream.storyboard in Resources */,
8BE63B4227253CB000597DB1 /* SimpleFilter.storyboard in Resources */,
- 671BD67727E071A70076D5E1 /* MediaPlayer.storyboard in Resources */,
57A635DC2591BCF000EDC2F7 /* Slider.xib in Resources */,
033A9FC2252EB02D00BC26E1 /* CustomAudioSource.storyboard in Resources */,
57AF3981259B329B00601E02 /* RawAudioData.storyboard in Resources */,
+ E7AD0E0129CDA4F100C9A4B0 /* CustomVideoSourcePushMulti.storyboard in Resources */,
+ E7AD0DF729CAF78C00C9A4B0 /* LocalCompositeGraph.storyboard in Resources */,
033A9FE5252EB59000BC26E1 /* VoiceChanger.storyboard in Resources */,
033A9FBD252EB02600BC26E1 /* CustomAudioRender.storyboard in Resources */,
67B8C89A28058AA500195106 /* RawVideoData.storyboard in Resources */,
@@ -938,18 +1004,20 @@
036D3AA024FA40EB00B1D8DC /* VideoView.xib in Resources */,
E7218BA628BF5E65005A01D5 /* agora-logo.png in Resources */,
033A9FEB252EB5CC00BC26E1 /* AudioMixing.storyboard in Resources */,
- 67E40B2427EC82F900C7A6E9 /* SpatialAudio.storyboard in Resources */,
+ E75279C829F66D1400358F18 /* SpatialAudio.storyboard in Resources */,
+ E75279D829F6755300358F18 /* MediaPlayer.storyboard in Resources */,
671BD67B27E0A4F50076D5E1 /* output.raw in Resources */,
+ E7AD0DF529CAF78C00C9A4B0 /* LocalCompositeGraph.strings in Resources */,
E71E7B0C289B96FA00B846C7 /* VideoProcess.storyboard in Resources */,
- E702C1E628B4DB4800D7C7ED /* LiveStreaming.strings in Resources */,
033A9FCC252EB03F00BC26E1 /* CustomVideoSourcePush.storyboard in Resources */,
671BD67127DF478A0076D5E1 /* CustomVideoRender.storyboard in Resources */,
E702C1E728B4DB4800D7C7ED /* LiveStreaming.storyboard in Resources */,
57A635F42593544600EDC2F7 /* effectA.wav in Resources */,
+ E7AD0DEB29CAAF2B00C9A4B0 /* JoinChannelVideoRecorder.storyboard in Resources */,
033A9FF5252EB5F400BC26E1 /* JoinMultiChannel.storyboard in Resources */,
033A9FD6252EB05200BC26E1 /* RTMPStreaming.storyboard in Resources */,
033A9FF0252EB5EB00BC26E1 /* ChannelMediaRelay.storyboard in Resources */,
- E71E7B17289BA78D00B846C7 /* QuickSwitchChannel.storyboard in Resources */,
+ E75279C329F66CE600358F18 /* QuickSwitchChannel.storyboard in Resources */,
034C62A125297ABB00296ECF /* audiomixing.mp3 in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
@@ -1048,6 +1116,7 @@
034C629C25295F2800296ECF /* AudioMixing.swift in Sources */,
036D3AA224FAA00A00B1D8DC /* Configs.swift in Sources */,
03267E1C24FF3AF4004A91A6 /* AgoraCameraSourcePush.swift in Sources */,
+ E7AD0DF629CAF78C00C9A4B0 /* LocalCompositeGraph.swift in Sources */,
8BCCA45B26271F7A001FD4CE /* AgoraYUVImageSourcePush.m in Sources */,
034C626C25259FC200296ECF /* JoinChannelVideo.swift in Sources */,
034C62772525C68D00296ECF /* AgoraCustomEncryption.mm in Sources */,
@@ -1064,6 +1133,7 @@
67B8C89C28058AB600195106 /* RawVideoData.swift in Sources */,
57887A87258889ED006E962A /* SettingsViewController.swift in Sources */,
57A635D82591BC0C00EDC2F7 /* Slider.swift in Sources */,
+ E7AD0DEC29CAAF2B00C9A4B0 /* JoinChannelVideoRecorder.swift in Sources */,
034C62932528474D00296ECF /* StatisticsInfo.swift in Sources */,
033A9FA4252EA86A00BC26E1 /* RTMPStreaming.swift in Sources */,
67033723279E76560069F4B3 /* VideoProcess.swift in Sources */,
@@ -1082,6 +1152,7 @@
034C62712525A35800296ECF /* StreamEncryption.swift in Sources */,
671BD67527E0717D0076D5E1 /* MediaPlayer.swift in Sources */,
57887A75258859D8006E962A /* SettingsController.swift in Sources */,
+ E7AD0E0229CDA4F100C9A4B0 /* CustomVideoSourcePushMulti.swift in Sources */,
036D3A9E24FA3A1000B1D8DC /* LogUtils.swift in Sources */,
67E40B2527EC82F900C7A6E9 /* SpatialAudio.swift in Sources */,
033A9EE2252C191000BC26E1 /* PrecallTest.swift in Sources */,
@@ -1139,6 +1210,7 @@
isa = PBXVariantGroup;
children = (
033A9FC3252EB02D00BC26E1 /* Base */,
+ E75279B829F66A7100358F18 /* zh-Hans */,
);
name = CustomAudioSource.storyboard;
sourceTree = "";
@@ -1209,6 +1281,7 @@
isa = PBXVariantGroup;
children = (
033A9FFB252EB5FD00BC26E1 /* Base */,
+ E75279B929F66B0F00358F18 /* zh-Hans */,
);
name = ScreenShare.storyboard;
sourceTree = "";
@@ -1271,19 +1344,10 @@
name = SimpleFilter.storyboard;
sourceTree = "";
};
- E702C1E128B4DB4800D7C7ED /* LiveStreaming.strings */ = {
- isa = PBXVariantGroup;
- children = (
- E702C1E228B4DB4800D7C7ED /* zh-Hans */,
- );
- name = LiveStreaming.strings;
- sourceTree = "";
- };
E702C1E328B4DB4800D7C7ED /* LiveStreaming.storyboard */ = {
isa = PBXVariantGroup;
children = (
- E702C1E428B4DB4800D7C7ED /* Base */,
- E702C1EB28B5C76000D7C7ED /* zh-Hans */,
+ E75279D129F6728900358F18 /* zh-Hans */,
);
name = LiveStreaming.storyboard;
sourceTree = "";
@@ -1298,6 +1362,52 @@
name = VideoProcess.storyboard;
sourceTree = "";
};
+ E75279B729F6651800358F18 /* Localizable.strings */ = {
+ isa = PBXVariantGroup;
+ children = (
+ E75279B629F6651800358F18 /* zh-Hans */,
+ );
+ name = Localizable.strings;
+ sourceTree = "";
+ };
+ E75279C529F66CE600358F18 /* QuickSwitchChannel.storyboard */ = {
+ isa = PBXVariantGroup;
+ children = (
+ E75279C429F66CE600358F18 /* en */,
+ E75279C729F66CEC00358F18 /* zh-Hans */,
+ E75279DD29F676BC00358F18 /* Base */,
+ );
+ name = QuickSwitchChannel.storyboard;
+ sourceTree = "";
+ };
+ E75279CA29F66D1400358F18 /* SpatialAudio.storyboard */ = {
+ isa = PBXVariantGroup;
+ children = (
+ E75279C929F66D1400358F18 /* en */,
+ E75279CC29F66D1900358F18 /* zh-Hans */,
+ E75279DE29F676CD00358F18 /* Base */,
+ );
+ name = SpatialAudio.storyboard;
+ sourceTree = "";
+ };
+ E75279D329F6737100358F18 /* LiveStreaming.storyboard */ = {
+ isa = PBXVariantGroup;
+ children = (
+ E75279D429F6737100358F18 /* Base */,
+ );
+ name = LiveStreaming.storyboard;
+ sourceTree = "";
+ };
+ E75279D629F6755300358F18 /* MediaPlayer.storyboard */ = {
+ isa = PBXVariantGroup;
+ children = (
+ E75279D729F6755300358F18 /* en */,
+ E75279DB29F675A500358F18 /* zh-Hans */,
+ E75279DC29F6767700358F18 /* Base */,
+ );
+ name = MediaPlayer.storyboard;
+ sourceTree = "";
+ };
E77D54B928F553DA00D51C1E /* JoinChannelVideoToken.strings */ = {
isa = PBXVariantGroup;
children = (
@@ -1314,6 +1424,54 @@
name = JoinChannelVideoToken.storyboard;
sourceTree = "";
};
+ E7AD0DE529CAAF2B00C9A4B0 /* JoinChannelVideoRecorder.strings */ = {
+ isa = PBXVariantGroup;
+ children = (
+ E7AD0DE629CAAF2B00C9A4B0 /* zh-Hans */,
+ );
+ name = JoinChannelVideoRecorder.strings;
+ sourceTree = "";
+ };
+ E7AD0DE729CAAF2B00C9A4B0 /* JoinChannelVideoRecorder.storyboard */ = {
+ isa = PBXVariantGroup;
+ children = (
+ E7AD0DE829CAAF2B00C9A4B0 /* Base */,
+ );
+ name = JoinChannelVideoRecorder.storyboard;
+ sourceTree = "";
+ };
+ E7AD0DF029CAF78C00C9A4B0 /* LocalCompositeGraph.strings */ = {
+ isa = PBXVariantGroup;
+ children = (
+ E7AD0DF129CAF78C00C9A4B0 /* zh-Hans */,
+ );
+ name = LocalCompositeGraph.strings;
+ sourceTree = "";
+ };
+ E7AD0DF329CAF78C00C9A4B0 /* LocalCompositeGraph.storyboard */ = {
+ isa = PBXVariantGroup;
+ children = (
+ E7AD0DF429CAF78C00C9A4B0 /* Base */,
+ );
+ name = LocalCompositeGraph.storyboard;
+ sourceTree = "";
+ };
+ E7AD0DFB29CDA4F100C9A4B0 /* CustomVideoSourcePushMulti.strings */ = {
+ isa = PBXVariantGroup;
+ children = (
+ E7AD0DFC29CDA4F100C9A4B0 /* zh-Hans */,
+ );
+ name = CustomVideoSourcePushMulti.strings;
+ sourceTree = "";
+ };
+ E7AD0DFD29CDA4F100C9A4B0 /* CustomVideoSourcePushMulti.storyboard */ = {
+ isa = PBXVariantGroup;
+ children = (
+ E7AD0DFE29CDA4F100C9A4B0 /* Base */,
+ );
+ name = CustomVideoSourcePushMulti.storyboard;
+ sourceTree = "";
+ };
/* End PBXVariantGroup section */
/* Begin XCBuildConfiguration section */
@@ -1440,8 +1598,8 @@
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_ENTITLEMENTS = APIExample/APIExample.entitlements;
- CODE_SIGN_IDENTITY = "Developer ID Application";
- CODE_SIGN_STYLE = Manual;
+ CODE_SIGN_IDENTITY = "Apple Development";
+ CODE_SIGN_STYLE = Automatic;
COMBINE_HIDPI_IMAGES = YES;
CURRENT_PROJECT_VERSION = 1.20220311.37381;
DEVELOPMENT_TEAM = YS397FG5PA;
@@ -1458,7 +1616,7 @@
MARKETING_VERSION = 3.8.200;
PRODUCT_BUNDLE_IDENTIFIER = io.agora.api.examples;
PRODUCT_NAME = "$(TARGET_NAME)";
- PROVISIONING_PROFILE_SPECIFIER = apiexamplemac;
+ PROVISIONING_PROFILE_SPECIFIER = "";
SWIFT_OBJC_BRIDGING_HEADER = "APIExample/APIExample-Bridging-Header.h";
SWIFT_VERSION = 5.0;
};
@@ -1470,8 +1628,8 @@
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_ENTITLEMENTS = APIExample/APIExample.entitlements;
- CODE_SIGN_IDENTITY = "Developer ID Application";
- CODE_SIGN_STYLE = Manual;
+ CODE_SIGN_IDENTITY = "Apple Development";
+ CODE_SIGN_STYLE = Automatic;
COMBINE_HIDPI_IMAGES = YES;
CURRENT_PROJECT_VERSION = 1.20220311.37381;
DEVELOPMENT_TEAM = YS397FG5PA;
@@ -1488,7 +1646,7 @@
MARKETING_VERSION = 3.8.200;
PRODUCT_BUNDLE_IDENTIFIER = io.agora.api.examples;
PRODUCT_NAME = "$(TARGET_NAME)";
- PROVISIONING_PROFILE_SPECIFIER = apiexamplemac;
+ PROVISIONING_PROFILE_SPECIFIER = "";
SWIFT_OBJC_BRIDGING_HEADER = "APIExample/APIExample-Bridging-Header.h";
SWIFT_VERSION = 5.0;
};
diff --git a/macOS/APIExample/Common/AgoraExtension.swift b/macOS/APIExample/Common/AgoraExtension.swift
index 18a6a5e98..273007442 100644
--- a/macOS/APIExample/Common/AgoraExtension.swift
+++ b/macOS/APIExample/Common/AgoraExtension.swift
@@ -93,12 +93,13 @@ extension AgoraVirtualBackgroundSourceType {
case .color: return "Colored Background".localized
case .img: return "Image Background".localized
case .blur: return "Blur Background".localized
+ case .video: return "Video Background".localized
default:
return "\(self.rawValue)"
}
}
static func allValues() -> [AgoraVirtualBackgroundSourceType] {
- return [.color, .img, .blur]
+ return [.color, .img, .blur, .video]
}
}
diff --git a/macOS/APIExample/Common/Configs.swift b/macOS/APIExample/Common/Configs.swift
index 61f458f0f..475dea845 100644
--- a/macOS/APIExample/Common/Configs.swift
+++ b/macOS/APIExample/Common/Configs.swift
@@ -41,7 +41,8 @@ class Configs {
static var defaultFpsIdx: Int = 0
static var Fps:[Int] = [
15,
- 30
+ 30,
+ 60
]
static var Proxy:[Bool] = [
true,
diff --git a/macOS/APIExample/Common/ExternalAudio/ExternalAudio.mm b/macOS/APIExample/Common/ExternalAudio/ExternalAudio.mm
index cefa82035..a6d1a2e7f 100644
--- a/macOS/APIExample/Common/ExternalAudio/ExternalAudio.mm
+++ b/macOS/APIExample/Common/ExternalAudio/ExternalAudio.mm
@@ -302,8 +302,7 @@ - (void)audioController:(AudioController *)controller didCaptureData:(unsigned c
}
}
else {
-// [self.agoraKit pushExternalAudioFrameNSData:[NSData dataWithBytes:data length:bytesLength] sourceId:1 timestamp:0];
- [self.agoraKit pushExternalAudioFrameRawData: data samples: 441 * 10 sourceId:1 timestamp:0];
+ [self.agoraKit pushExternalAudioFrameRawData:data samples:441 * 10 sampleRate:44100 channels:16 trackId:1 timestamp:0];
}
}
diff --git a/macOS/APIExample/Common/ExternalVideo/AgoraMetalRender.swift b/macOS/APIExample/Common/ExternalVideo/AgoraMetalRender.swift
index 6470dac6b..317ac1cce 100644
--- a/macOS/APIExample/Common/ExternalVideo/AgoraMetalRender.swift
+++ b/macOS/APIExample/Common/ExternalVideo/AgoraMetalRender.swift
@@ -105,9 +105,10 @@ func getAgoraRotation(rotation: Int32) -> AgoraVideoRotation? {
extension AgoraMetalRender: AgoraVideoFrameDelegate {
- func onCapture(_ videoFrame: AgoraOutputVideoFrame) -> Bool {
+ func onCapture(_ videoFrame: AgoraOutputVideoFrame, sourceType: AgoraVideoSourceType) -> Bool {
true
}
+
func onRenderVideoFrame(_ videoFrame: AgoraOutputVideoFrame, uid: UInt, channelId: String) -> Bool {
if uid != userId {
return false
@@ -155,9 +156,8 @@ extension AgoraMetalRender: AgoraVideoFrameDelegate {
func getVideoFrameProcessMode() -> AgoraVideoFrameProcessMode {
.readOnly
}
-
- func onPreEncode(_ videoFrame: AgoraOutputVideoFrame) -> Bool {
- return true
+ func onPreEncode(_ videoFrame: AgoraOutputVideoFrame, sourceType: AgoraVideoSourceType) -> Bool {
+ true
}
}
diff --git a/macOS/APIExample/Common/Utils/MediaUtils.h b/macOS/APIExample/Common/Utils/MediaUtils.h
index fdf0eab51..77958a881 100644
--- a/macOS/APIExample/Common/Utils/MediaUtils.h
+++ b/macOS/APIExample/Common/Utils/MediaUtils.h
@@ -16,6 +16,8 @@ NS_ASSUME_NONNULL_BEGIN
+ (CVPixelBufferRef)i420ToPixelBuffer:(void *)srcY srcU:(void *)srcU srcV:(void *)srcV width:(int)width height:(int)height;
++ (NSImage *)pixelBufferToImage: (CVPixelBufferRef)pixelBuffer;
+
+ (nullable NSImage *)i420ToImage:(nullable void *)srcY srcU:(nullable void *)srcU srcV:(nullable void *)srcV width:(int)width height:(int)height;
@end
diff --git a/macOS/APIExample/Common/Utils/MediaUtils.m b/macOS/APIExample/Common/Utils/MediaUtils.m
index d586063dd..a1b8020ba 100644
--- a/macOS/APIExample/Common/Utils/MediaUtils.m
+++ b/macOS/APIExample/Common/Utils/MediaUtils.m
@@ -119,6 +119,22 @@ + (NSImage *)i420ToImage:(void *)srcY srcU:(void *)srcU srcV:(void *)srcV width:
return finalImage;
}
++ (NSImage *)pixelBufferToImage: (CVPixelBufferRef)pixelBuffer {
+ size_t width = CVPixelBufferGetHeight(pixelBuffer);
+ size_t height = CVPixelBufferGetWidth(pixelBuffer);
+
+ CIImage *coreImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
+ CIContext *temporaryContext = [CIContext contextWithOptions:nil];
+ CGImageRef videoImage = [temporaryContext createCGImage:coreImage
+ fromRect:CGRectMake(0, 0, height, width)];
+
+ NSImage *finalImage = [[NSImage alloc] initWithCGImage:videoImage size: CGSizeMake(width, height)];
+
+ // CVPixelBufferRelease(pixelBuffer);
+ CGImageRelease(videoImage);
+ return finalImage;
+}
+
+ (void)yuv420p_to_nv12:(unsigned char*)yuv420p nv12:(unsigned char*)nv12 width:(int)width height:(int)height {
int i, j;
int y_size = width * height;
diff --git a/macOS/APIExample/Common/Utils/Util.swift b/macOS/APIExample/Common/Utils/Util.swift
index 63459706a..8064857a4 100644
--- a/macOS/APIExample/Common/Utils/Util.swift
+++ b/macOS/APIExample/Common/Utils/Util.swift
@@ -29,4 +29,15 @@ enum Util {
agoraKit?.setLocalAccessPoint(withConfig: localAccessPointConfig)
}
}
+
+ static func storagePath() -> String {
+ let filePath = FileManager.default.urls(for: .downloadsDirectory, in: .userDomainMask).first?.absoluteString
+ let programPath = filePath?.components(separatedBy: "/")[4] ?? ""
+ let path = "/Users/\(programPath)/Downloads"
+ return path
+ }
+
+ static func logFile() -> String {
+ storagePath() + "/AgoraLog/agorasdk.log"
+ }
}
diff --git a/macOS/APIExample/Examples/Advanced/CustomAudioSource/Base.lproj/CustomAudioSource.storyboard b/macOS/APIExample/Examples/Advanced/CustomAudioSource/Base.lproj/CustomAudioSource.storyboard
index 4e2714dba..dafa21b00 100644
--- a/macOS/APIExample/Examples/Advanced/CustomAudioSource/Base.lproj/CustomAudioSource.storyboard
+++ b/macOS/APIExample/Examples/Advanced/CustomAudioSource/Base.lproj/CustomAudioSource.storyboard
@@ -1,8 +1,8 @@
-
+
-
+
@@ -41,23 +41,28 @@
-
-
-
+
+
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
diff --git a/macOS/APIExample/Examples/Advanced/CustomAudioSource/CustomAudioSource.swift b/macOS/APIExample/Examples/Advanced/CustomAudioSource/CustomAudioSource.swift
index c84ce1631..a02b9826e 100644
--- a/macOS/APIExample/Examples/Advanced/CustomAudioSource/CustomAudioSource.swift
+++ b/macOS/APIExample/Examples/Advanced/CustomAudioSource/CustomAudioSource.swift
@@ -19,6 +19,7 @@ class CustomAudioSource: BaseViewController {
var agoraKit: AgoraRtcEngineKit!
var exAudio: ExternalAudio = ExternalAudio.shared()
var pcmSourcePush: AgoraPcmSourcePush!
+ private var trackId: Int32 = 0
/**
--- Microphones Picker ---
@@ -158,6 +159,9 @@ class CustomAudioSource: BaseViewController {
} else {
pcmSourcePush.stop()
}
+ let mediaOption = AgoraRtcChannelMediaOptions()
+ mediaOption.publishCustomAudioTrack = sender.state == .on
+ agoraKit.updateChannel(with: mediaOption)
}
let sampleRate:UInt = 44100, audioChannel:UInt = 1, bitPerSample = 16, samples = 441 * 10
@@ -197,12 +201,9 @@ class CustomAudioSource: BaseViewController {
pcmSourcePush = AgoraPcmSourcePush(delegate: self, filePath: filepath, sampleRate: Int(sampleRate),
channelsPerFrame: Int(audioChannel), bitPerSample: bitPerSample, samples: samples)
- agoraKit.setExternalAudioSource(true,
- sampleRate: Int(sampleRate),
- channels: Int(audioChannel),
- sourceNumber: 2,
- localPlayback: true,
- publish: true)
+ let trackConfig = AgoraAudioTrackConfig()
+ trackConfig.enableLocalPlayback = true
+ trackId = agoraKit.createCustomAudioTrack(.mixable, config: trackConfig)
// start joining channel
// 1. Users can only see each other after they join the
@@ -216,6 +217,7 @@ class CustomAudioSource: BaseViewController {
option.publishMicrophoneTrack = true
option.publishCustomAudioTrack = true
option.publishCameraTrack = false
+ option.publishCustomAudioTrackId = Int(trackId)
NetworkManager.shared.generateToken(channelName: channel, success: { token in
let result = self.agoraKit.joinChannel(byToken: token, channelId: channel, uid: 0, mediaOptions: option)
if result != 0 {
@@ -229,6 +231,7 @@ class CustomAudioSource: BaseViewController {
})
} else {
isProcessing = true
+ agoraKit.destroyCustomAudioTrack(Int(trackId))
agoraKit.leaveChannel { (stats:AgoraChannelStats) in
LogUtils.log(message: "Left channel", level: .info)
self.isProcessing = false
@@ -332,6 +335,11 @@ extension CustomAudioSource: AgoraRtcEngineDelegate {
extension CustomAudioSource: AgoraPcmSourcePushDelegate {
func onAudioFrame(data: UnsafeMutablePointer) {
- agoraKit.pushExternalAudioFrameRawData(data, samples: Int(samples), sourceId: 0, timestamp: 0)
+ agoraKit.pushExternalAudioFrameRawData(data,
+ samples: samples,
+ sampleRate: Int(sampleRate),
+ channels: Int(audioChannel),
+ trackId: Int(trackId),
+ timestamp: 0)
}
}
diff --git a/macOS/APIExample/Examples/Advanced/CustomAudioSource/zh-Hans.lproj/CustomAudioSource.strings b/macOS/APIExample/Examples/Advanced/CustomAudioSource/zh-Hans.lproj/CustomAudioSource.strings
index 2afd2d317..21f6960e7 100644
--- a/macOS/APIExample/Examples/Advanced/CustomAudioSource/zh-Hans.lproj/CustomAudioSource.strings
+++ b/macOS/APIExample/Examples/Advanced/CustomAudioSource/zh-Hans.lproj/CustomAudioSource.strings
@@ -22,3 +22,5 @@
/* Class = "NSMenuItem"; title = "1V1"; ObjectID = "yKw-5m-DrZ"; */
"yKw-5m-DrZ.title" = "1V1";
+
+"UzT-xh-vr5.title" = "播放本地PCM";
diff --git a/macOS/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/Base.lproj/CustomVideoSourcePushMulti.storyboard b/macOS/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/Base.lproj/CustomVideoSourcePushMulti.storyboard
new file mode 100644
index 000000000..265623f62
--- /dev/null
+++ b/macOS/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/Base.lproj/CustomVideoSourcePushMulti.storyboard
@@ -0,0 +1,146 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/macOS/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/CustomVideoSourcePushMulti.swift b/macOS/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/CustomVideoSourcePushMulti.swift
new file mode 100644
index 000000000..b91f0d55e
--- /dev/null
+++ b/macOS/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/CustomVideoSourcePushMulti.swift
@@ -0,0 +1,428 @@
+//
+// JoinChannelVC.swift
+// APIExample
+//
+// Created by 张乾泽 on 2020/4/17.
+// Copyright © 2020 Agora Corp. All rights reserved.
+//
+import Cocoa
+import AgoraRtcKit
+import AGEVideoLayout
+
+class UserModel {
+ var uid: UInt = 0
+ var canvasView: SampleBufferDisplayView?
+ var trackId: UInt32 = 0
+ var isJoin: Bool = false
+ var customSource: AgoraYUVImageSourcePush?
+}
+
+class CustomVideoSourcePushMulti: BaseViewController {
+ @IBOutlet weak var Container: AGEVideoContainer!
+ lazy var localVideo: SampleBufferDisplayView = {
+ let videoView = SampleBufferDisplayView.createFromNib()
+ return videoView ?? SampleBufferDisplayView()
+ }()
+ lazy var remoteVideos: [UserModel] = (0..<3).map({ _ in
+ let model = UserModel()
+ model.uid = UInt(Int.random(in: 10000...99999))
+ model.canvasView = SampleBufferDisplayView.createFromNib()
+ model.trackId = agoraKit.createCustomVideoTrack()
+ return model
+ })
+ @IBOutlet weak var createVideoTrackButton: NSButton!
+ @IBOutlet weak var destoryVideoTrackButton: NSButton!
+
+ fileprivate var customCamera: AgoraYUVImageSourcePush?
+
+ var agoraKit: AgoraRtcEngineKit!
+
+ /**
+ --- Resolutions Picker ---
+ */
+ @IBOutlet weak var selectResolutionPicker: Picker!
+ var selectedResolution: Resolution? {
+ let index = self.selectResolutionPicker.indexOfSelectedItem
+ if index >= 0 && index < Configs.Resolutions.count {
+ return Configs.Resolutions[index]
+ } else {
+ return nil
+ }
+ }
+ func initSelectResolutionPicker() {
+ selectResolutionPicker.label.stringValue = "Resolution".localized
+ selectResolutionPicker.picker.addItems(withTitles: Configs.Resolutions.map { $0.name() })
+ selectResolutionPicker.picker.selectItem(at: GlobalSettings.shared.resolutionSetting.selectedOption().value)
+
+ selectResolutionPicker.onSelectChanged {
+ if !self.isJoined {
+ return
+ }
+
+ guard let resolution = self.selectedResolution,
+ let fps = self.selectedFps else {
+ return
+ }
+ self.agoraKit.setVideoEncoderConfiguration(
+ AgoraVideoEncoderConfiguration(
+ size: resolution.size(),
+ frameRate: AgoraVideoFrameRate(rawValue: fps) ?? .fps15,
+ bitrate: AgoraVideoBitrateStandard,
+ orientationMode: .adaptative,
+ mirrorMode: .auto
+ )
+ )
+ }
+ }
+
+ /**
+ --- Fps Picker ---
+ */
+ @IBOutlet weak var selectFpsPicker: Picker!
+ var selectedFps: Int? {
+ let index = self.selectFpsPicker.indexOfSelectedItem
+ if index >= 0 && index < Configs.Fps.count {
+ return Configs.Fps[index]
+ } else {
+ return nil
+ }
+ }
+ func initSelectFpsPicker() {
+ selectFpsPicker.label.stringValue = "Frame Rate".localized
+ selectFpsPicker.picker.addItems(withTitles: Configs.Fps.map { "\($0)fps" })
+ selectFpsPicker.picker.selectItem(at: GlobalSettings.shared.fpsSetting.selectedOption().value)
+
+ selectFpsPicker.onSelectChanged {
+ if !self.isJoined {
+ return
+ }
+
+ guard let resolution = self.selectedResolution,
+ let fps = self.selectedFps else {
+ return
+ }
+ self.agoraKit.setVideoEncoderConfiguration(
+ AgoraVideoEncoderConfiguration(
+ size: resolution.size(),
+ frameRate: AgoraVideoFrameRate(rawValue: fps) ?? .fps15,
+ bitrate: AgoraVideoBitrateStandard,
+ orientationMode: .adaptative,
+ mirrorMode: .auto
+ )
+ )
+ }
+ }
+
+ /**
+ --- Layout Picker ---
+ */
+ func initSelectLayoutPicker() {
+ layoutVideos(2)
+ }
+
+ /**
+ --- Channel TextField ---
+ */
+ @IBOutlet weak var channelField: Input!
+ func initChannelField() {
+ channelField.label.stringValue = "Channel".localized
+ channelField.field.placeholderString = "Channel Name".localized
+ }
+
+ /**
+ --- Button ---
+ */
+ @IBOutlet weak var joinChannelButton: NSButton!
+ func initJoinChannelButton() {
+ joinChannelButton.title = isJoined ? "Leave Channel".localized : "Join Channel".localized
+ }
+
+ // indicate if current instance has joined channel
+ var isJoined: Bool = false {
+ didSet {
+ channelField.isEnabled = !isJoined
+ initJoinChannelButton()
+ }
+ }
+
+ // indicate for doing something
+ var isProcessing: Bool = false {
+ didSet {
+// joinChannelButton.isEnabled = !isProcessing
+ }
+ }
+
+ override func viewDidLoad() {
+ super.viewDidLoad()
+ // Do view setup here.
+ let config = AgoraRtcEngineConfig()
+ config.appId = KeyCenter.AppId
+ config.areaCode = GlobalSettings.shared.area
+ agoraKit = AgoraRtcEngineKit.sharedEngine(with: config, delegate: self)
+ // Configuring Privatization Parameters
+ Util.configPrivatization(agoraKit: agoraKit)
+ agoraKit.enableVideo()
+
+ createVideoTrackButton.title = "Create Video Track".localized
+ destoryVideoTrackButton.title = "Destory Video Track".localized
+
+ initSelectResolutionPicker()
+ initSelectFpsPicker()
+ initSelectLayoutPicker()
+ initChannelField()
+ initJoinChannelButton()
+ }
+
+ override func viewWillBeRemovedFromSplitView() {
+ customCamera?.stopSource()
+ if isJoined {
+ remoteVideos.forEach({
+ let connection = AgoraRtcConnection()
+ connection.localUid = $0.uid
+ connection.channelId = channelField.stringValue
+ $0.customSource?.stopSource()
+ agoraKit.leaveChannelEx(connection) { state in
+ LogUtils.log(message: "warning: \(state.description)", level: .info)
+ }
+ })
+ let connection = AgoraRtcConnection()
+ connection.localUid = 999
+ connection.channelId = channelField.stringValue
+ agoraKit.leaveChannelEx(connection) { state in
+ LogUtils.log(message: "warning: \(state.description)", level: .info)
+ }
+ agoraKit.leaveChannel { (stats:AgoraChannelStats) in
+ LogUtils.log(message: "Left channel", level: .info)
+ }
+ }
+ AgoraRtcEngineKit.destroy()
+ }
+
+ @IBAction func onJoinPressed(_ sender:Any) {
+ if !isJoined {
+ // check configuration
+ let channel = channelField.stringValue
+ if channel.isEmpty {
+ return
+ }
+ guard let resolution = selectedResolution,
+ let fps = selectedFps else {
+ return
+ }
+
+ // set live broadcaster mode
+ agoraKit.setChannelProfile(.liveBroadcasting)
+ // set myself as broadcaster to stream video/audio
+ agoraKit.setClientRole(.broadcaster)
+
+ // set proxy configuration
+// let proxySetting = GlobalSettings.shared.proxySetting.selectedOption().value
+// agoraKit.setCloudProxy(AgoraCloudProxyType.init(rawValue: UInt(proxySetting)) ?? .noneProxy)
+
+ // setup my own camera as custom video source
+ customCamera = AgoraYUVImageSourcePush(size: CGSize(width: 320, height: 180),
+ fileName: "sample" ,
+ frameRate: 15)
+ customCamera?.trackId = agoraKit.createCustomVideoTrack()
+ customCamera?.delegate = self
+ customCamera?.startSource()
+ agoraKit.setExternalVideoSource(true, useTexture: true, sourceType: .videoFrame)
+// agoraKit.setExternalVideoSource(true, useTexture: true, encodedFrame: true)
+ // enable video module and set up video encoding configs
+ agoraKit.setVideoEncoderConfiguration(
+ AgoraVideoEncoderConfiguration(
+ size: resolution.size(),
+ frameRate: AgoraVideoFrameRate(rawValue: fps) ?? .fps15,
+ bitrate: AgoraVideoBitrateStandard,
+ orientationMode: .adaptative,
+ mirrorMode: .auto
+ )
+ )
+ // start joining channel
+ // 1. Users can only see each other after they join the
+ // same channel successfully using the same app id.
+ // 2. If app certificate is turned on at dashboard, token is needed
+ // when joining channel. The channel name and uid used to calculate
+ // the token has to match the ones used for channel join
+ isProcessing = true
+ joinChannel(uid: 999, trackId: customCamera?.trackId ?? 0)
+ } else {
+ self.customCamera?.stopSource()
+ agoraKit.leaveChannel { (stats:AgoraChannelStats) in
+ print(stats)
+ }
+ LogUtils.log(message: "Left channel", level: .info)
+ isProcessing = false
+ isJoined = false
+ }
+ }
+
+ private func joinChannel(uid: UInt, trackId: UInt32) {
+ let channelName = channelField.stringValue
+ let option = AgoraRtcChannelMediaOptions()
+ option.publishCustomVideoTrack = true
+ option.publishMicrophoneTrack = false
+ option.publishCameraTrack = false
+ option.autoSubscribeAudio = true
+ option.autoSubscribeVideo = true
+ option.customVideoTrackId = Int(trackId)
+ option.clientRoleType = .broadcaster
+ let connection = AgoraRtcConnection()
+ connection.localUid = uid
+ connection.channelId = channelName
+ NetworkManager.shared.generateToken(channelName: channelName, uid: uid) { token in
+ let result = self.agoraKit.joinChannelEx(byToken: token,
+ connection: connection,
+ delegate: self,
+ mediaOptions: option,
+ joinSuccess: nil)
+ if result != 0 {
+ // Usually happens with invalid parameters
+ // Error code description can be found at:
+ // en: https://api-ref.agora.io/en/voice-sdk/macos/3.x/Constants/AgoraErrorCode.html#content
+ // cn: https://docs.agora.io/cn/Voice/API%20Reference/oc/Constants/AgoraErrorCode.html
+ self.showAlert(title: "Error", message: "joinChannel call failed: \(result), please check your params")
+ }
+ }
+ }
+ @IBAction func onClickCreateTrack(_ sender: Any) {
+ guard let userModel = remoteVideos.first(where: { $0.isJoin == false }) else { return }
+ let customCamera = AgoraYUVImageSourcePush(size: CGSize(width: 320, height: 180),
+ fileName: "sample" ,
+ frameRate: 15)
+ customCamera.trackId = userModel.trackId
+ customCamera.delegate = self
+ userModel.isJoin = true
+ userModel.customSource = customCamera
+ customCamera.startSource()
+ joinChannel(uid: userModel.uid, trackId: userModel.trackId)
+
+ }
+ @IBAction func onClickDestoryTrack(_ sender: Any) {
+ let channelName = channelField.stringValue
+ let userModel = remoteVideos.filter({ $0.isJoin == true }).last
+ userModel?.isJoin = false
+ userModel?.customSource?.stopSource()
+ userModel?.canvasView?.videoView.reset()
+ userModel?.customSource = nil
+ let connection = AgoraRtcConnection()
+ connection.localUid = userModel?.uid ?? 0
+ connection.channelId = channelName
+ agoraKit.leaveChannelEx(connection) { state in
+ LogUtils.log(message: "warning: \(state.description)", level: .info)
+ }
+ }
+
+ func layoutVideos(_ count: Int) {
+ Container.layoutStream(views: [localVideo] + remoteVideos.compactMap({ $0.canvasView }))
+ }
+}
+
+/// agora rtc engine delegate events
+extension CustomVideoSourcePushMulti: AgoraRtcEngineDelegate {
+ /// callback when warning occured for agora sdk, warning can usually be ignored, still it's nice to check out
+ /// what is happening
+ /// Warning code description can be found at:
+ /// en: https://api-ref.agora.io/en/voice-sdk/ios/3.x/Constants/AgoraWarningCode.html
+ /// cn: https://docs.agora.io/cn/Voice/API%20Reference/oc/Constants/AgoraWarningCode.html
+ /// @param warningCode warning code of the problem
+ func rtcEngine(_ engine: AgoraRtcEngineKit, didOccurWarning warningCode: AgoraWarningCode) {
+ LogUtils.log(message: "warning: \(warningCode.rawValue)", level: .warning)
+ }
+
+ /// callback when error occured for agora sdk, you are recommended to display the error descriptions on demand
+ /// to let user know something wrong is happening
+ /// Error code description can be found at:
+ /// en: https://api-ref.agora.io/en/voice-sdk/macos/3.x/Constants/AgoraErrorCode.html#content
+ /// cn: https://docs.agora.io/cn/Voice/API%20Reference/oc/Constants/AgoraErrorCode.html
+ /// @param errorCode error code of the problem
+ func rtcEngine(_ engine: AgoraRtcEngineKit, didOccurError errorCode: AgoraErrorCode) {
+ LogUtils.log(message: "error: \(errorCode)", level: .error)
+ if isProcessing {
+ isProcessing = false
+ }
+ self.showAlert(title: "Error", message: "Error \(errorCode.rawValue) occur")
+ }
+
+ /// callback when the local user joins a specified channel.
+ /// @param channel
+ /// @param uid uid of local user
+ /// @param elapsed time elapse since current sdk instance join the channel in ms
+ func rtcEngine(_ engine: AgoraRtcEngineKit, didJoinChannel channel: String, withUid uid: UInt, elapsed: Int) {
+ isProcessing = false
+ isJoined = true
+ LogUtils.log(message: "Join \(channel) with uid \(uid) elapsed \(elapsed)ms", level: .info)
+ }
+
+ /// callback when a remote user is joinning the channel, note audience in live broadcast mode will NOT trigger this event
+ /// @param uid uid of remote joined user
+ /// @param elapsed time elapse since current sdk instance join the channel in ms
+ func rtcEngine(_ engine: AgoraRtcEngineKit, didJoinedOfUid uid: UInt, elapsed: Int) {
+ LogUtils.log(message: "remote user join: \(uid) \(elapsed)ms", level: .info)
+ if uid == 999 { return }
+ for model in remoteVideos {
+ if model.uid == uid {
+ return
+ }
+ }
+ let videoCanvas = AgoraRtcVideoCanvas()
+ videoCanvas.uid = uid
+ // the view to be binded
+ let userModel = remoteVideos.first(where: { $0.isJoin == false })
+ videoCanvas.view = userModel?.canvasView?.videoView
+ videoCanvas.renderMode = .hidden
+ userModel?.uid = uid
+ userModel?.isJoin = true
+ agoraKit.setupRemoteVideo(videoCanvas)
+ }
+
+ /// callback when a remote user is leaving the channel, note audience in live broadcast mode will NOT trigger this event
+ /// @param uid uid of remote joined user
+ /// @param reason reason why this user left, note this event may be triggered when the remote user
+ /// become an audience in live broadcasting profile
+ func rtcEngine(_ engine: AgoraRtcEngineKit, didOfflineOfUid uid: UInt, reason: AgoraUserOfflineReason) {
+ LogUtils.log(message: "remote user left: \(uid) reason \(reason)", level: .info)
+
+ // to unlink your view from sdk, so that your view reference will be released
+ // note the video will stay at its last frame, to completely remove it
+ // you will need to remove the EAGL sublayer from your binded view
+ let userModel = remoteVideos.first(where: { $0.uid == uid })
+ userModel?.isJoin = false
+ userModel?.uid = UInt(Int.random(in: 10000...99999))
+ userModel?.canvasView?.videoView.reset()
+ }
+}
+
+/// agora camera video source, the delegate will get frame data from camera
+extension CustomVideoSourcePushMulti: AgoraYUVImageSourcePushDelegate {
+ func onVideoFrame(_ buffer: CVPixelBuffer, size: CGSize, trackId: UInt, rotation: Int32) {
+ let videoFrame = AgoraVideoFrame()
+ /** Video format:
+ * - 1: I420
+ * - 2: BGRA
+ * - 3: NV21
+ * - 4: RGBA
+ * - 5: IMC2
+ * - 7: ARGB
+ * - 8: NV12
+ * - 12: iOS texture (CVPixelBufferRef)
+ */
+ videoFrame.format = 12
+ videoFrame.textureBuf = buffer
+ videoFrame.rotation = Int32(rotation)
+ //once we have the video frame, we can push to agora sdk
+ agoraKit.pushExternalVideoFrame(videoFrame, videoTrackId: trackId)
+
+ let outputVideoFrame = AgoraOutputVideoFrame()
+ outputVideoFrame.width = Int32(size.width)
+ outputVideoFrame.height = Int32(size.height)
+ outputVideoFrame.pixelBuffer = buffer
+ outputVideoFrame.rotation = rotation
+ if customCamera?.trackId ?? 0 == trackId {
+ localVideo.videoView.renderVideoPixelBuffer(outputVideoFrame)
+ } else {
+ let userModel = remoteVideos.first(where: { $0.trackId == trackId })
+ userModel?.canvasView?.videoView.renderVideoPixelBuffer(outputVideoFrame)
+ }
+ }
+}
diff --git a/macOS/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/zh-Hans.lproj/CustomVideoSourcePushMulti.strings b/macOS/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/zh-Hans.lproj/CustomVideoSourcePushMulti.strings
new file mode 100644
index 000000000..ec3db92f9
--- /dev/null
+++ b/macOS/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/zh-Hans.lproj/CustomVideoSourcePushMulti.strings
@@ -0,0 +1,24 @@
+
+/* Class = "NSTextFieldCell"; placeholderString = "加入频道"; ObjectID = "KSj-Qd-L7B"; */
+"KSj-Qd-L7B.placeholderString" = "输入频道名";
+
+/* Class = "NSButtonCell"; title = "Join"; ObjectID = "XQ9-2H-aV1"; */
+"XQ9-2H-aV1.title" = "加入频道";
+
+/* Class = "NSButtonCell"; title = "Leave"; ObjectID = "esh-Yv-lrq"; */
+"esh-Yv-lrq.title" = "离开频道";
+
+/* Class = "NSMenuItem"; title = "1V1"; ObjectID = "lxe-dD-iYs"; */
+"lxe-dD-iYs.title" = "1V1";
+
+/* Class = "NSViewController"; title = "Custom Video Source (Push)"; ObjectID = "sXF-vm-Rrb"; */
+"sXF-vm-Rrb.title" = "音频自采集(Push)";
+
+/* Class = "NSMenuItem"; title = "1V15"; ObjectID = "tBU-fM-94k"; */
+"tBU-fM-94k.title" = "1V15";
+
+/* Class = "NSMenuItem"; title = "1V8"; ObjectID = "ukW-YV-Pc0"; */
+"ukW-YV-Pc0.title" = "1V8";
+
+/* Class = "NSMenuItem"; title = "1V3"; ObjectID = "z6y-AQ-Yeq"; */
+"z6y-AQ-Yeq.title" = "1V3";
diff --git a/macOS/APIExample/Examples/Advanced/LiveStreaming/Base.lproj/LiveStreaming.storyboard b/macOS/APIExample/Examples/Advanced/LiveStreaming/Base.lproj/LiveStreaming.storyboard
index c9edf9184..1836becd3 100644
--- a/macOS/APIExample/Examples/Advanced/LiveStreaming/Base.lproj/LiveStreaming.storyboard
+++ b/macOS/APIExample/Examples/Advanced/LiveStreaming/Base.lproj/LiveStreaming.storyboard
@@ -1,8 +1,8 @@
-
+
-
+
@@ -107,19 +107,19 @@
-
+
-
+
-
+
-
-
+
+
@@ -147,14 +147,14 @@
-
+
-
-
+
+
@@ -174,8 +174,8 @@
-
-
+
+
@@ -183,9 +183,9 @@
-
+
-
+
diff --git a/macOS/APIExample/Examples/Advanced/LiveStreaming/LiveStreaming.swift b/macOS/APIExample/Examples/Advanced/LiveStreaming/LiveStreaming.swift
index ec81bae31..d8d82639c 100644
--- a/macOS/APIExample/Examples/Advanced/LiveStreaming/LiveStreaming.swift
+++ b/macOS/APIExample/Examples/Advanced/LiveStreaming/LiveStreaming.swift
@@ -229,7 +229,8 @@ class LiveStreamingMain: BaseViewController {
}
@IBOutlet weak var snapShot: NSButton!
@IBAction func onTakeSnapshot(_ sender: Any) {
- let programPath = Bundle.main.executablePath?.components(separatedBy: "/")[2] ?? ""
+ let filePath = FileManager.default.urls(for: .downloadsDirectory, in: .userDomainMask).first?.absoluteString
+ let programPath = filePath?.components(separatedBy: "/")[4] ?? ""
let path = "/Users/\(programPath)/Downloads/1.png"
agoraKit.takeSnapshot(Int(remoteUid), filePath: path)
}
@@ -254,7 +255,7 @@ class LiveStreamingMain: BaseViewController {
@IBOutlet weak var dualStreamTips: NSTextField!
@IBAction func onDualStreaming(_ sender: NSSwitch) {
- dualStreamTips.stringValue = sender.state == .on ? "已开启" : "(默认: 大流)"
+ dualStreamTips.stringValue = sender.state == .on ? "Opening".localized : "(Default: flow)".localized
agoraKit.enableDualStreamMode(sender.state == .on)
}
@@ -436,8 +437,8 @@ class LiveStreamingMain: BaseViewController {
// the token has to match the ones used for channel join
isProcessing = true
let option = AgoraRtcChannelMediaOptions()
- option.publishCameraTrack = true
- option.clientRoleType = .broadcaster
+ option.publishCameraTrack = role == .broadcaster
+ option.clientRoleType = role
NetworkManager.shared.generateToken(channelName: channel, success: { token in
let result = self.agoraKit.joinChannel(byToken: token, channelId: channel, uid: 0, mediaOptions: option)
if result != 0 {
diff --git a/macOS/APIExample/Examples/Advanced/LiveStreaming/en.lproj/LiveStreaming.storyboard b/macOS/APIExample/Examples/Advanced/LiveStreaming/en.lproj/LiveStreaming.storyboard
new file mode 100644
index 000000000..02cefd705
--- /dev/null
+++ b/macOS/APIExample/Examples/Advanced/LiveStreaming/en.lproj/LiveStreaming.storyboard
@@ -0,0 +1,304 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/macOS/APIExample/Examples/Advanced/LiveStreaming/en.lproj/LiveStreaming.strings b/macOS/APIExample/Examples/Advanced/LiveStreaming/en.lproj/LiveStreaming.strings
new file mode 100644
index 000000000..7e727c67d
--- /dev/null
+++ b/macOS/APIExample/Examples/Advanced/LiveStreaming/en.lproj/LiveStreaming.strings
@@ -0,0 +1,38 @@
+
+/* Class = "NSButtonCell"; title = "Leave"; ObjectID = "4rc-r1-Ay6"; */
+"4rc-r1-Ay6.title" = "离开频道";
+
+/* Class = "NSMenuItem"; title = "1V1"; ObjectID = "Iws-j3-l2h"; */
+"Iws-j3-l2h.title" = "1V1";
+
+/* Class = "NSMenuItem"; title = "1V15"; ObjectID = "Mmi-d8-vOm"; */
+"Mmi-d8-vOm.title" = "1V15";
+
+/* Class = "NSTextFieldCell"; placeholderString = "加入频道"; ObjectID = "PtD-n2-sEW"; */
+"PtD-n2-sEW.placeholderString" = "输入频道号";
+
+/* Class = "NSMenuItem"; title = "1V3"; ObjectID = "VNU-so-ajb"; */
+"VNU-so-ajb.title" = "1V3";
+
+/* Class = "NSViewController"; title = "Join Channel Video"; ObjectID = "YjT-yy-DnJ"; */
+"YjT-yy-DnJ.title" = "实时视频通话/直播";
+
+/* Class = "NSMenuItem"; title = "1V8"; ObjectID = "cH4-ft-u77"; */
+"cH4-ft-u77.title" = "1V8";
+
+/* Class = "NSButtonCell"; title = "Join"; ObjectID = "guU-jX-Wkg"; */
+"guU-jX-Wkg.title" = "加入频道";
+
+"8YN-Yd-UZv.title" = "水印";
+
+"cDh-B1-x3E.title" = "截图";
+
+"D2B-fw-Vnp.title.0" = "自动";
+"D2B-fw-Vnp.title.1" = "软编";
+"z1l-XW-dGp.title.2" = "硬编";
+
+"CHW-Nt-rwI.title" = "水印";
+"gt3-r0-jqt.title" = "B帧";
+"XH3-Ib-cXr.title" = "大小流";
+"d9V-RQ-OX6.title" = "(默认: 大流)";
+"PDX-e5-ZpY.title" = "首帧出图";
diff --git a/macOS/APIExample/Examples/Advanced/LiveStreaming/zh-Hans.lproj/LiveStreaming.strings b/macOS/APIExample/Examples/Advanced/LiveStreaming/zh-Hans.lproj/LiveStreaming.strings
index 504c71ac6..4ff829739 100644
--- a/macOS/APIExample/Examples/Advanced/LiveStreaming/zh-Hans.lproj/LiveStreaming.strings
+++ b/macOS/APIExample/Examples/Advanced/LiveStreaming/zh-Hans.lproj/LiveStreaming.strings
@@ -1,31 +1,36 @@
-/* Class = "NSButtonCell"; title = "Leave"; ObjectID = "4rc-r1-Ay6"; */
-"4rc-r1-Ay6.title" = "离开频道";
+/* Class = "NSTextFieldCell"; title = "water"; ObjectID = "8YN-Yd-UZv"; */
+"8YN-Yd-UZv.title" = "水印";
-/* Class = "NSMenuItem"; title = "1V1"; ObjectID = "Iws-j3-l2h"; */
-"Iws-j3-l2h.title" = "1V1";
+/* Class = "NSBox"; title = "Box"; ObjectID = "BP9-4w-AfJ"; */
+"BP9-4w-AfJ.title" = "Box";
-/* Class = "NSMenuItem"; title = "1V15"; ObjectID = "Mmi-d8-vOm"; */
-"Mmi-d8-vOm.title" = "1V15";
+/* Class = "NSSegmentedCell"; D2B-fw-Vnp.ibShadowedLabels[0] = "Auto"; ObjectID = "D2B-fw-Vnp"; */
+"D2B-fw-Vnp.ibShadowedLabels[0]" = "自动";
-/* Class = "NSTextFieldCell"; placeholderString = "加入频道"; ObjectID = "PtD-n2-sEW"; */
-"PtD-n2-sEW.placeholderString" = "输入频道号";
+/* Class = "NSSegmentedCell"; D2B-fw-Vnp.ibShadowedLabels[1] = "Soft knitting"; ObjectID = "D2B-fw-Vnp"; */
+"D2B-fw-Vnp.ibShadowedLabels[1]" = "软编";
-/* Class = "NSMenuItem"; title = "1V3"; ObjectID = "VNU-so-ajb"; */
-"VNU-so-ajb.title" = "1V3";
+/* Class = "NSSegmentedCell"; D2B-fw-Vnp.ibShadowedLabels[2] = "Hard knitting"; ObjectID = "D2B-fw-Vnp"; */
+"D2B-fw-Vnp.ibShadowedLabels[2]" = "硬编";
-/* Class = "NSViewController"; title = "Join Channel Video"; ObjectID = "YjT-yy-DnJ"; */
-"YjT-yy-DnJ.title" = "实时视频通话/直播";
+/* Class = "NSButtonCell"; title = "Join"; ObjectID = "Lhu-U1-6qh"; */
+"Lhu-U1-6qh.title" = "加入频道";
-/* Class = "NSMenuItem"; title = "1V8"; ObjectID = "cH4-ft-u77"; */
-"cH4-ft-u77.title" = "1V8";
+/* Class = "NSTextFieldCell"; title = "first frame is drawn"; ObjectID = "PDX-e5-ZpY"; */
+"PDX-e5-ZpY.title" = "首帧出图";
-/* Class = "NSButtonCell"; title = "Join"; ObjectID = "guU-jX-Wkg"; */
-"guU-jX-Wkg.title" = "加入频道";
+/* Class = "NSTextFieldCell"; title = "S or l flow"; ObjectID = "XH3-Ib-cXr"; */
+"XH3-Ib-cXr.title" = "大小流";
-"8YN-Yd-UZv.title" = "水印";
+/* Class = "NSViewController"; title = "Join Channel Video"; ObjectID = "YjT-yy-DnJ"; */
+"YjT-yy-DnJ.title" = "实时视频通话/直播";
+/* Class = "NSButtonCell"; title = "Snap Shot"; ObjectID = "cDh-B1-x3E"; */
"cDh-B1-x3E.title" = "截图";
-"D2B-fw-Vnp.title.0" = "自动";
-"z1l-XW-dGp.title.0" = "wwww";
+/* Class = "NSTextFieldCell"; title = "(Default: flow)"; ObjectID = "d9V-RQ-OX6"; */
+"d9V-RQ-OX6.title" = "(默认: 大流)";
+
+/* Class = "NSTextFieldCell"; title = "BF"; ObjectID = "gt3-r0-jqt"; */
+"gt3-r0-jqt.title" = "B帧";
diff --git a/macOS/APIExample/Examples/Advanced/LocalCompositeGraph/Base.lproj/LocalCompositeGraph.storyboard b/macOS/APIExample/Examples/Advanced/LocalCompositeGraph/Base.lproj/LocalCompositeGraph.storyboard
new file mode 100644
index 000000000..b3376a772
--- /dev/null
+++ b/macOS/APIExample/Examples/Advanced/LocalCompositeGraph/Base.lproj/LocalCompositeGraph.storyboard
@@ -0,0 +1,209 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/macOS/APIExample/Examples/Advanced/LocalCompositeGraph/LocalCompositeGraph.swift b/macOS/APIExample/Examples/Advanced/LocalCompositeGraph/LocalCompositeGraph.swift
new file mode 100644
index 000000000..32d26ddc3
--- /dev/null
+++ b/macOS/APIExample/Examples/Advanced/LocalCompositeGraph/LocalCompositeGraph.swift
@@ -0,0 +1,627 @@
+//
+// JoinChannelVC.swift
+// APIExample
+//
+// Created by 张乾泽 on 2020/4/17.
+// Copyright © 2020 Agora Corp. All rights reserved.
+//
+import Cocoa
+import AgoraRtcKit
+import AGEVideoLayout
+
+class LocalCompositeGraph: BaseViewController {
+ var videos: [VideoView] = []
+
+ @IBOutlet weak var container: AGEVideoContainer!
+
+ var agoraKit: AgoraRtcEngineKit!
+
+ func initSelectLayoutPicker() {
+ layoutVideos(2)
+ }
+
+ /**
+ --- Resolutions Picker ---
+ */
+ @IBOutlet weak var selectResolutionPicker: Picker!
+ var selectedResolution: Resolution? {
+ let index = self.selectResolutionPicker.indexOfSelectedItem
+ if index >= 0 && index < Configs.Resolutions.count {
+ return Configs.Resolutions[index]
+ } else {
+ return nil
+ }
+ }
+ func initSelectResolutionPicker() {
+ selectResolutionPicker.label.stringValue = "Resolution".localized
+ selectResolutionPicker.picker.addItems(withTitles: Configs.Resolutions.map { $0.name() })
+ selectResolutionPicker.picker.selectItem(at: GlobalSettings.shared.resolutionSetting.selectedOption().value)
+
+ selectResolutionPicker.onSelectChanged {
+ if !self.isJoined {
+ return
+ }
+ guard let resolution = self.selectedResolution,
+ let fps = self.selectedFps else {
+ return
+ }
+ if self.isScreenSharing || self.isWindowSharing {
+ let params = AgoraScreenCaptureParameters()
+ params.frameRate = fps
+ params.dimensions = resolution.size()
+// self.agoraKit.stopScreenCapture()
+// self.ag
+ } else {
+ self.agoraKit.setVideoEncoderConfiguration(
+ AgoraVideoEncoderConfiguration(
+ size: resolution.size(),
+ frameRate: AgoraVideoFrameRate(rawValue: fps) ?? .fps15,
+ bitrate: AgoraVideoBitrateStandard,
+ orientationMode: .adaptative,
+ mirrorMode: .auto
+ )
+ )
+ }
+ }
+ }
+
+ /**
+ --- Fps Picker ---
+ */
+ @IBOutlet weak var selectFpsPicker: Picker!
+ var selectedFps: Int? {
+ let index = self.selectFpsPicker.indexOfSelectedItem
+ if index >= 0 && index < Configs.Fps.count {
+ return Configs.Fps[index]
+ } else {
+ return nil
+ }
+ }
+ func initSelectFpsPicker() {
+ selectFpsPicker.label.stringValue = "Frame Rate".localized
+ selectFpsPicker.picker.addItems(withTitles: Configs.Fps.map { "\($0)fps" })
+ selectFpsPicker.picker.selectItem(at: GlobalSettings.shared.fpsSetting.selectedOption().value)
+
+ selectFpsPicker.onSelectChanged {
+ if !self.isJoined {
+ return
+ }
+ guard let resolution = self.selectedResolution,
+ let fps = self.selectedFps else {
+ return
+ }
+ if self.isScreenSharing || self.isWindowSharing {
+ let params = AgoraScreenCaptureParameters()
+ params.frameRate = fps
+ params.dimensions = resolution.size()
+// self.agoraKit.update(params)
+ } else {
+ self.agoraKit.setVideoEncoderConfiguration(
+ AgoraVideoEncoderConfiguration(
+ size: resolution.size(),
+ frameRate: AgoraVideoFrameRate(rawValue: fps) ?? .fps15,
+ bitrate: AgoraVideoBitrateStandard,
+ orientationMode: .adaptative,
+ mirrorMode: .auto
+ )
+ )
+ }
+ }
+ }
+
+ /**
+ --- DisplayHint Picker ---
+ */
+ @IBOutlet weak var selectDisplayHintPicker: Picker!
+ var displayHints = ["Default", "Motion", "Detail"]
+ func initSelectDisplayHintPicker() {
+// selectDisplayHintPicker.label.stringValue = "Display Hint".localized
+// selectDisplayHintPicker.picker.addItems(withTitles: displayHints)
+//
+// selectDisplayHintPicker.onSelectChanged {
+// if !self.isJoined {
+// return
+// }
+// guard let displayHint = self.selectedDisplayHint else { return }
+// print("setScreenCapture")
+// self.agoraKit.setScreenCapture(displayHint)
+// }
+ }
+
+ var windowManager: WindowList = WindowList()
+ var windowlist:[Window] = [], screenlist:[Window] = []
+ /**
+ --- Screen Picker ---
+ */
+ @IBOutlet weak var selectScreenPicker: Picker!
+ var selectedScreen: Window? {
+ let index = self.selectScreenPicker.indexOfSelectedItem
+ if index >= 0 && index < screenlist.count {
+ return screenlist[index]
+ } else {
+ return nil
+ }
+ }
+ func initSelectScreenPicker() {
+ screenlist = windowManager.items.filter({$0.type == .screen})
+ selectScreenPicker.label.stringValue = "Screen Share".localized
+ selectScreenPicker.picker.addItems(withTitles: screenlist.map {"\($0.name ?? "Unknown")(\($0.id))"})
+ }
+ var isScreenSharing: Bool = false {
+ didSet {
+ windowShareButton.isEnabled = !isScreenSharing
+ initScreenShareButton()
+ halfScreenShareButton.isEnabled = isScreenSharing
+ windowThumbnailButton.isEnabled = !isScreenSharing
+ videoTranscoderHandler(isTranscoder: isScreenSharing)
+ }
+ }
+ /**
+ --- Screen Share Button ---
+ */
+ @IBOutlet weak var screenShareButton: NSButton!
+ func initScreenShareButton() {
+ screenShareButton.isEnabled = isJoined
+ screenShareButton.title = isScreenSharing ? "Stop Share".localized : "Display Share".localized
+ screenThumbnailButton.isEnabled = isJoined
+ }
+
+ @IBOutlet weak var screenThumbnailButton: NSButton!
+ @IBAction func onScreentThumbnailButton(_ sender: NSButton) {
+ let result = agoraKit.getScreenCaptureSources(withThumbSize: NSScreen.main?.frame.size ?? .zero, iconSize: .zero, includeScreen: true)
+ saveThumbnailToDesktop(result: result, type: .screen)
+ }
+ @IBOutlet weak var windowThumbnailButton: NSButton!
+ @IBAction func onWindowThumbnailButton(_ sender: NSButton) {
+ let result = agoraKit.getScreenCaptureSources(withThumbSize: selectedResolution?.size() ?? .zero, iconSize: .zero, includeScreen: true)
+ saveThumbnailToDesktop(result: result, type: .window)
+ }
+
+ private func saveThumbnailToDesktop(result: [AgoraScreenCaptureSourceInfo]?, type: AgoraScreenCaptureSourceType) {
+ let filePath = FileManager.default.urls(for: .downloadsDirectory, in: .userDomainMask).first?.absoluteString
+ let programPath = filePath?.components(separatedBy: "/")[4] ?? ""
+ let path = "/Users/\(programPath)/Downloads/thumbnail"
+ try? FileManager.default.createDirectory(atPath: path, withIntermediateDirectories: true, attributes: nil)
+ var isShowAlert: Bool = false
+ result?.enumerated().forEach({ index,item in
+ let url = "\(path)/\(index).png"
+ guard item.type == type else { return }
+ let isSucces = item.thumbImage.savePNG(to: URL(fileURLWithPath: url))
+ let message = isSucces ? "Save successfully".localized : "Save failed".localized
+ if isShowAlert == false {
+ showAlert(message: message)
+ isShowAlert = true
+ }
+ })
+ }
+
+ private func createFile() {
+ //在桌面上创建一个文件
+ let manager = FileManager.default
+ let urlForDocument = manager.urls( for: .desktopDirectory,
+ in:.userDomainMask)
+ let url = urlForDocument[0]
+ createFile(name:"test.txt", fileBaseUrl: url)
+ }
+ //根据文件名和路径创建文件
+ func createFile(name:String, fileBaseUrl:URL){
+ let manager = FileManager.default
+ let file = fileBaseUrl.appendingPathComponent(name)
+ let exist = manager.fileExists(atPath: file.path)
+ if !exist {
+ //在文件中随便写入一些内容
+ let data = Data(base64Encoded:"aGVsbG8gd29ybGQ=" ,options:.ignoreUnknownCharacters)
+ let createSuccess = manager.createFile(atPath: file.path, contents:data,attributes:nil)
+ print("文件创建结果: \(createSuccess)")
+ }
+ }
+
+ @IBAction func onScreenShare(_ sender: NSButton) {
+ if !isScreenSharing {
+ guard let resolution = self.selectedResolution,
+ let fps = self.selectedFps,
+ let screen = selectedScreen else {
+ return
+ }
+ let params = AgoraScreenCaptureParameters()
+ params.frameRate = fps
+ params.dimensions = resolution.size()
+ // 增加勾边功能
+ params.highLightWidth = 5
+ params.highLightColor = .green
+ params.highLighted = true
+ let result = agoraKit.startScreenCapture(byDisplayId: UInt32(screen.id), regionRect: .zero, captureParams: params)
+ if result != 0 {
+ // Usually happens with invalid parameters
+ // Error code description can be found at:
+ // en: https://api-ref.agora.io/en/voice-sdk/macos/3.x/Constants/AgoraErrorCode.html#content
+ // cn: https://docs.agora.io/cn/Voice/API%20Reference/oc/Constants/AgoraErrorCode.html
+ self.showAlert(title: "Error", message: "startScreenCapture call failed: \(result), please check your params")
+ } else {
+ isScreenSharing = true
+ let mediaOptions = AgoraRtcChannelMediaOptions()
+// mediaOptions.publishCameraTrack = false
+// mediaOptions.publishScreenTrack = true
+ agoraKit.updateChannel(with: mediaOptions)
+ agoraKit.startPreview()
+ setupLocalPreview(isScreenSharing: true)
+ }
+ } else {
+ agoraKit.stopScreenCapture()
+ isScreenSharing = false
+ let mediaOptions = AgoraRtcChannelMediaOptions()
+// mediaOptions.publishCameraTrack = true
+// mediaOptions.publishScreenTrack = false
+ agoraKit.updateChannel(with: mediaOptions)
+ agoraKit.startPreview()
+ setupLocalPreview(isScreenSharing: false)
+ }
+ }
+
+ /**
+ --- Window Picker ---
+ */
+ @IBOutlet weak var selectWindowPicker: Picker!
+ var selectedWindow: Window? {
+ let index = self.selectWindowPicker.indexOfSelectedItem
+ if index >= 0 && index < windowlist.count {
+ return windowlist[index]
+ } else {
+ return nil
+ }
+ }
+ func initSelectWindowPicker() {
+ windowlist = windowManager.items.filter({$0.type == .window})
+ selectWindowPicker.label.stringValue = "Window Share".localized
+ selectWindowPicker.picker.addItems(withTitles: windowlist.map {"\($0.name ?? "Unknown")(\($0.id))"})
+ }
+ var isWindowSharing: Bool = false {
+ didSet {
+ screenShareButton.isEnabled = !isWindowSharing
+ initWindowShareButton()
+ halfScreenShareButton.isEnabled = isWindowSharing
+ screenThumbnailButton.isEnabled = !isWindowSharing
+ videoTranscoderHandler(isTranscoder: isWindowSharing)
+ }
+ }
+
+ private func videoTranscoderHandler(isTranscoder: Bool) {
+ if isTranscoder {
+ let captureConfig = AgoraCameraCapturerConfiguration()
+ captureConfig.dimensions = videos[1].videocanvas.bounds.size
+ agoraKit.startCameraCapture(.camera, config: captureConfig)
+ let config = AgoraLocalTranscoderConfiguration()
+ let cameraStream = AgoraTranscodingVideoStream()
+ cameraStream.rect = NSRect(origin: NSPoint(x: 250, y: 0), size: NSSize(width: 100, height: 100))
+ cameraStream.sourceType = .camera
+ let screenStream = AgoraTranscodingVideoStream()
+ screenStream.sourceType = .screen
+ screenStream.rect = NSScreen.main?.visibleFrame ?? .zero
+ config.videoInputStreams = [cameraStream, screenStream]
+ agoraKit.startLocalVideoTranscoder(config)
+ let mediaOptions = AgoraRtcChannelMediaOptions()
+ mediaOptions.publishTranscodedVideoTrack = true
+ agoraKit.updateChannel(with: mediaOptions)
+
+ } else {
+ agoraKit.stopLocalVideoTranscoder()
+ }
+ }
+ /**
+ --- Window Share Button ---
+ */
+ @IBOutlet weak var windowShareButton: NSButton!
+ func initWindowShareButton() {
+ windowShareButton.isEnabled = isJoined
+ windowShareButton.title = isWindowSharing ? "Stop Share".localized : "Window Share".localized
+ windowThumbnailButton.isEnabled = isJoined
+ }
+ @IBAction func onWindowShare(_ sender: NSButton) {
+ if !isWindowSharing {
+ guard let resolution = self.selectedResolution,
+ let fps = self.selectedFps,
+ let window = selectedWindow else {
+ return
+ }
+ let params = AgoraScreenCaptureParameters()
+ params.frameRate = fps
+ params.dimensions = resolution.size()
+ // 增加勾边功能
+ params.highLightWidth = 5
+ params.highLightColor = .green
+ params.highLighted = true
+
+ let result = agoraKit.startScreenCapture(byWindowId: UInt32(window.id), regionRect: .zero, captureParams: params)
+ if result != 0 {
+ // Usually happens with invalid parameters
+ // Error code description can be found at:
+ // en:https://api-ref.agora.io/en/voice-sdk/macos/3.x/Constants/AgoraErrorCode.html#content
+ // cn: https://docs.agora.io/cn/Voice/API%20Reference/oc/Constants/AgoraErrorCode.html
+ self.showAlert(title: "Error", message: "startScreenCapture call failed: \(result), please check your params")
+ } else {
+ isWindowSharing = true
+ setupLocalPreview(isScreenSharing: true)
+ }
+ } else {
+ agoraKit.stopScreenCapture()
+ isScreenSharing = false
+ isWindowSharing = false
+ setupLocalPreview(isScreenSharing: false)
+ }
+ }
+
+ /**
+ --- Half Screen Share Button ---
+ */
+ @IBOutlet weak var halfScreenShareButton: NSButton!
+ func initHalfScreenShareButton() {
+ halfScreenShareButton.isEnabled = isJoined
+ halfScreenShareButton.title = "Share Half Screen".localized
+ }
+ var toggleRegionalScreening = false
+ @IBAction func onStartShareHalfScreen(_ sender: Any) {
+ let rect = NSScreen.main?.frame
+ let region = NSMakeRect(0, 0, !toggleRegionalScreening ? rect!.width/2 : rect!.width, !toggleRegionalScreening ? rect!.height/2 : rect!.height)
+ agoraKit.updateScreenCaptureRegion(region)
+ toggleRegionalScreening = !toggleRegionalScreening
+ }
+
+ /**
+ --- Channel TextField ---
+ */
+ @IBOutlet weak var channelField: Input!
+ func initChannelField() {
+ channelField.label.stringValue = "Channel".localized
+ channelField.field.placeholderString = "Channel Name".localized
+ }
+
+ /**
+ --- Join Button ---
+ */
+ @IBOutlet weak var joinChannelButton: NSButton!
+ func initJoinChannelButton() {
+ joinChannelButton.title = isJoined ? "Leave Channel".localized : "Join Channel".localized
+ }
+
+ // indicate if current instance has joined channel
+ var isJoined: Bool = false {
+ didSet {
+ channelField.isEnabled = !isJoined
+ initJoinChannelButton()
+ screenShareButton.isEnabled = isJoined
+ windowShareButton.isEnabled = isJoined
+ halfScreenShareButton.isEnabled = isJoined
+ screenThumbnailButton.isEnabled = isJoined
+ windowThumbnailButton.isEnabled = isJoined
+ }
+ }
+
+ // indicate for doing something
+ var isProcessing: Bool = false {
+ didSet {
+ joinChannelButton.isEnabled = !isProcessing
+ }
+ }
+
+ override func viewDidLoad() {
+ super.viewDidLoad()
+ // prepare window manager and list
+ windowManager.getList()
+ // Do view setup here.
+ let config = AgoraRtcEngineConfig()
+ config.appId = KeyCenter.AppId
+ config.areaCode = GlobalSettings.shared.area
+ agoraKit = AgoraRtcEngineKit.sharedEngine(with: config, delegate: self)
+ // Configuring Privatization Parameters
+ Util.configPrivatization(agoraKit: agoraKit)
+ agoraKit.enableVideo()
+
+ initSelectResolutionPicker()
+ initSelectFpsPicker()
+ initSelectDisplayHintPicker()
+ initSelectLayoutPicker()
+ initSelectScreenPicker()
+ initScreenShareButton()
+ initSelectWindowPicker()
+ initWindowShareButton()
+ initHalfScreenShareButton()
+ initChannelField()
+ initJoinChannelButton()
+ }
+
+ override func viewWillBeRemovedFromSplitView() {
+ if isJoined {
+ agoraKit.leaveChannel { (stats:AgoraChannelStats) in
+ LogUtils.log(message: "Left channel", level: .info)
+ }
+ }
+ agoraKit.stopLocalVideoTranscoder()
+ agoraKit.stopScreenCapture()
+ AgoraRtcEngineKit.destroy()
+ }
+
+ @IBAction func onJoinPressed(_ sender:Any) {
+ if !isJoined {
+ // check configuration
+ let channel = channelField.stringValue
+ if channel.isEmpty {
+ return
+ }
+ guard let resolution = selectedResolution,
+ let fps = selectedFps else {
+ return
+ }
+
+ // set live broadcaster mode
+ agoraKit.setChannelProfile(.liveBroadcasting)
+ // set myself as broadcaster to stream video/audio
+ agoraKit.setClientRole(.broadcaster)
+ // set proxy configuration
+// let proxySetting = GlobalSettings.shared.proxySetting.selectedOption().value
+// agoraKit.setCloudProxy(AgoraCloudProxyType.init(rawValue: UInt(proxySetting)) ?? .noneProxy)
+ // enable video module and set up video encoding configs
+ agoraKit.setVideoEncoderConfiguration(
+ AgoraVideoEncoderConfiguration(
+ size: resolution.size(),
+ frameRate: AgoraVideoFrameRate(rawValue: fps) ?? .fps15,
+ bitrate: AgoraVideoBitrateStandard,
+ orientationMode: .adaptative,
+ mirrorMode: .auto
+ )
+ )
+ setupLocalPreview(isScreenSharing: false)
+
+ // start joining channel
+ // 1. Users can only see each other after they join the
+ // same channel successfully using the same app id.
+ // 2. If app certificate is turned on at dashboard, token is needed
+ // when joining channel. The channel name and uid used to calculate
+ // the token has to match the ones used for channel join
+ isProcessing = true
+ let option = AgoraRtcChannelMediaOptions()
+ option.publishCameraTrack = false
+ option.publishScreenTrack = false
+ option.clientRoleType = .broadcaster
+ NetworkManager.shared.generateToken(channelName: channel, success: { token in
+ let result = self.agoraKit.joinChannel(byToken: token, channelId: channel, uid: 0, mediaOptions: option)
+ if result != 0 {
+ self.isProcessing = false
+ // Usually happens with invalid parameters
+ // Error code description can be found at:
+ // en: https://api-ref.agora.io/en/voice-sdk/macos/3.x/Constants/AgoraErrorCode.html#content
+ // cn: https://docs.agora.io/cn/Voice/API%20Reference/oc/Constants/AgoraErrorCode.html
+ self.showAlert(title: "Error", message: "joinChannel call failed: \(result), please check your params")
+ }
+ })
+ } else {
+ isProcessing = true
+ agoraKit.stopLocalVideoTranscoder()
+ agoraKit.stopScreenCapture()
+ isScreenSharing = false
+ isWindowSharing = false
+ agoraKit.leaveChannel { [unowned self] (stats:AgoraChannelStats) in
+ self.isProcessing = false
+ LogUtils.log(message: "Left channel", level: .info)
+ self.videos[0].uid = nil
+ self.isJoined = false
+ self.videos.forEach {
+ $0.uid = nil
+ $0.statsLabel.stringValue = ""
+ }
+ }
+ }
+ }
+
+ func setupLocalPreview(isScreenSharing: Bool) {
+ // set up local video to render your local camera preview
+ let localVideo = videos[isScreenSharing ? 0 : 1]
+ let videoCanvas = AgoraRtcVideoCanvas()
+ videoCanvas.uid = 0
+ // the view to be binded
+ videoCanvas.view = localVideo.videocanvas
+ videoCanvas.renderMode = .hidden
+ videoCanvas.sourceType = isScreenSharing ? .screen : .transCoded
+ videoCanvas.mirrorMode = .disabled
+ agoraKit.setupLocalVideo(videoCanvas)
+ // you have to call startPreview to see local video
+ agoraKit.startPreview()
+ }
+
+ func layoutVideos(_ count: Int) {
+ videos = []
+ for i in 0...count - 1 {
+ let view = VideoView.createFromNib()!
+ if(i == 0) {
+ view.placeholder.stringValue = "Screen"
+ } else {
+ view.placeholder.stringValue = "Preview"
+ }
+ videos.append(view)
+ }
+ // layout render view
+ container.layoutStream(views: videos)
+ }
+}
+
+/// agora rtc engine delegate events
+extension LocalCompositeGraph: AgoraRtcEngineDelegate {
+ /// callback when warning occured for agora sdk, warning can usually be ignored, still it's nice to check out
+ /// what is happening
+ /// Warning code description can be found at:
+ /// en: https://api-ref.agora.io/en/voice-sdk/ios/3.x/Constants/AgoraWarningCode.html
+ /// cn: https://docs.agora.io/cn/Voice/API%20Reference/oc/Constants/AgoraWarningCode.html
+ /// @param warningCode warning code of the problem
+ func rtcEngine(_ engine: AgoraRtcEngineKit, didOccurWarning warningCode: AgoraWarningCode) {
+ LogUtils.log(message: "warning: \(warningCode.rawValue)", level: .warning)
+ }
+
+ /// callback when error occured for agora sdk, you are recommended to display the error descriptions on demand
+ /// to let user know something wrong is happening
+ /// Error code description can be found at:
+ /// en: https://api-ref.agora.io/en/voice-sdk/macos/3.x/Constants/AgoraErrorCode.html#content
+ /// cn: https://docs.agora.io/cn/Voice/API%20Reference/oc/Constants/AgoraErrorCode.html
+ /// @param errorCode error code of the problem
+ func rtcEngine(_ engine: AgoraRtcEngineKit, didOccurError errorCode: AgoraErrorCode) {
+ LogUtils.log(message: "error: \(errorCode)", level: .error)
+ if isProcessing {
+ isProcessing = false
+ }
+ self.showAlert(title: "Error", message: "Error \(errorCode.rawValue) occur")
+ }
+
+ /// callback when the local user joins a specified channel.
+ /// @param channel
+ /// @param uid uid of local user
+ /// @param elapsed time elapse since current sdk instance join the channel in ms
+ func rtcEngine(_ engine: AgoraRtcEngineKit, didJoinChannel channel: String, withUid uid: UInt, elapsed: Int) {
+ isProcessing = false
+ isJoined = true
+ let localVideo = videos[0]
+ localVideo.uid = uid
+ LogUtils.log(message: "Join \(channel) with uid \(uid) elapsed \(elapsed)ms", level: .info)
+ }
+
+ /// callback when a remote user is joinning the channel, note audience in live broadcast mode will NOT trigger this event
+ /// @param uid uid of remote joined user
+ /// @param elapsed time elapse since current sdk instance join the channel in ms
+ func rtcEngine(_ engine: AgoraRtcEngineKit, didJoinedOfUid uid: UInt, elapsed: Int) {
+ LogUtils.log(message: "remote user join: \(uid) \(elapsed)ms", level: .info)
+
+ // find a VideoView w/o uid assigned
+ if let remoteVideo = videos.first(where: { $0.uid == nil }) {
+ let videoCanvas = AgoraRtcVideoCanvas()
+ videoCanvas.uid = uid
+ // the view to be binded
+ videoCanvas.view = remoteVideo.videocanvas
+ videoCanvas.renderMode = .hidden
+ agoraKit.setupRemoteVideo(videoCanvas)
+ remoteVideo.uid = uid
+ } else {
+ LogUtils.log(message: "no video canvas available for \(uid), cancel bind", level: .warning)
+ }
+ }
+
+ /// callback when a remote user is leaving the channel, note audience in live broadcast mode will NOT trigger this event
+ /// @param uid uid of remote joined user
+ /// @param reason reason why this user left, note this event may be triggered when the remote user
+ /// become an audience in live broadcasting profile
+ func rtcEngine(_ engine: AgoraRtcEngineKit, didOfflineOfUid uid: UInt, reason: AgoraUserOfflineReason) {
+ LogUtils.log(message: "remote user left: \(uid) reason \(reason)", level: .info)
+
+ // to unlink your view from sdk, so that your view reference will be released
+ // note the video will stay at its last frame, to completely remove it
+ // you will need to remove the EAGL sublayer from your binded view
+ if let remoteVideo = videos.first(where: { $0.uid == uid }) {
+ let videoCanvas = AgoraRtcVideoCanvas()
+ videoCanvas.uid = uid
+ // the view to be binded
+ videoCanvas.view = nil
+ videoCanvas.renderMode = .hidden
+ agoraKit.setupRemoteVideo(videoCanvas)
+ remoteVideo.uid = nil
+ } else {
+ LogUtils.log(message: "no matching video canvas for \(uid), cancel unbind", level: .warning)
+ }
+ }
+
+}
diff --git a/macOS/APIExample/Examples/Advanced/LocalCompositeGraph/zh-Hans.lproj/LocalCompositeGraph.strings b/macOS/APIExample/Examples/Advanced/LocalCompositeGraph/zh-Hans.lproj/LocalCompositeGraph.strings
new file mode 100644
index 000000000..d590eeb73
--- /dev/null
+++ b/macOS/APIExample/Examples/Advanced/LocalCompositeGraph/zh-Hans.lproj/LocalCompositeGraph.strings
@@ -0,0 +1,45 @@
+
+/* Class = "NSButtonCell"; title = "Join"; ObjectID = "1ik-om-mWj"; */
+"1ik-om-mWj.title" = "加入频道";
+
+/* Class = "NSMenuItem"; title = "1V1"; ObjectID = "6f9-0B-egB"; */
+"6f9-0B-egB.title" = "1V1";
+
+/* Class = "NSButtonCell"; title = "Display Share"; ObjectID = "ACV-0l-kRZ"; */
+"ACV-0l-kRZ.title" = "屏幕共享";
+
+/* Class = "NSViewController"; title = "Stream Encryption"; ObjectID = "Gwp-vd-c2J"; */
+"Gwp-vd-c2J.title" = "码流加密";
+
+/* Class = "NSButtonCell"; title = "Leave"; ObjectID = "Owt-vb-7U9"; */
+"Owt-vb-7U9.title" = "离开频道";
+
+/* Class = "NSMenuItem"; title = "1V3"; ObjectID = "S4i-eh-YzK"; */
+"S4i-eh-YzK.title" = "1V3";
+
+/* Class = "NSButtonCell"; title = "Stop Share"; ObjectID = "TlR-ef-9cf"; */
+"TlR-ef-9cf.title" = "停止共享";
+
+/* Class = "NSTextFieldCell"; placeholderString = "加入频道"; ObjectID = "aj5-Fn-je9"; */
+"aj5-Fn-je9.placeholderString" = "输入频道名";
+
+/* Class = "NSMenuItem"; title = "1V15"; ObjectID = "cxo-X2-S8L"; */
+"cxo-X2-S8L.title" = "1V15";
+
+/* Class = "NSButtonCell"; title = "Window Share"; ObjectID = "ftv-L5-p8U"; */
+"ftv-L5-p8U.title" = "窗口共享";
+
+/* Class = "NSButtonCell"; title = "Stop Share"; ObjectID = "ka7-2T-SiW"; */
+"ka7-2T-SiW.title" = "停止共享";
+
+/* Class = "NSMenuItem"; title = "1V8"; ObjectID = "zu1-vg-leG"; */
+"zu1-vg-leG.title" = "1V8";
+
+/* Class = "NSButtonCell"; title = "Share Half Screen"; ObjectID = "0Ao-Fe-BEt"; */
+"0Ao-Fe-BEt.title" = "分享部分区域";
+
+/* Class = "NSButtonCell"; title = "Update Config"; ObjectID = "siB-l9-qc1"; */
+"siB-l9-qc1.title" = "更新参数";
+
+"G2K-yC-ti4.title" = "生成缩略图";
+"ghC-jR-1O0.title" = "生成缩略图";
diff --git a/macOS/APIExample/Examples/Advanced/MediaPlayer/MediaPlayer.storyboard b/macOS/APIExample/Examples/Advanced/MediaPlayer/Base.lproj/MediaPlayer.storyboard
similarity index 94%
rename from macOS/APIExample/Examples/Advanced/MediaPlayer/MediaPlayer.storyboard
rename to macOS/APIExample/Examples/Advanced/MediaPlayer/Base.lproj/MediaPlayer.storyboard
index 968c48096..fdf53d110 100644
--- a/macOS/APIExample/Examples/Advanced/MediaPlayer/MediaPlayer.storyboard
+++ b/macOS/APIExample/Examples/Advanced/MediaPlayer/Base.lproj/MediaPlayer.storyboard
@@ -1,8 +1,8 @@
-
+
-
+
@@ -47,7 +47,7 @@
signingCertificate
- Developer ID Application
+ 35CC30DFC3AA90845718DE3E28DFD32DB1A672BF
signingStyle
manual
teamID
diff --git a/macOS/Podfile b/macOS/Podfile
index d04475104..278c8d3e4 100644
--- a/macOS/Podfile
+++ b/macOS/Podfile
@@ -4,12 +4,12 @@ target 'APIExample' do
use_frameworks!
pod 'AGEVideoLayout', '1.0.2'
- pod 'AgoraRtcEngine_macOS', '4.1.1'
- # pod 'sdk', :path => 'sdk.podspec'
+ pod 'AgoraRtcEngine_macOS', '4.2.0'
+# pod 'sdk', :path => 'sdk.podspec'
end
target 'SimpleFilter' do
use_frameworks!
- # pod 'sdk', :path => 'sdk.podspec'
- pod 'AgoraRtcEngine_macOS', '4.1.1'
+# pod 'sdk', :path => 'sdk.podspec'
+ pod 'AgoraRtcEngine_macOS', '4.2.0'
end
diff --git a/macOS/README.md b/macOS/README.md
index 92ea231fa..71db4f17f 100644
--- a/macOS/README.md
+++ b/macOS/README.md
@@ -61,6 +61,10 @@ To build and run the sample application, get an App Id:
```
+
+Then you can compile and run the project using `APIExample.xcworkspace`.
+
+
## Contact Us
- For potential issues, take a look at our [FAQ](https://docs.agora.io/en/faq) first
diff --git a/windows/APIExample/APIExample/AGVideoWnd.cpp b/windows/APIExample/APIExample/AGVideoWnd.cpp
index 2c32ae6e9..19d7d68de 100755
--- a/windows/APIExample/APIExample/AGVideoWnd.cpp
+++ b/windows/APIExample/APIExample/AGVideoWnd.cpp
@@ -146,11 +146,11 @@ void CAGInfoWnd::OnPaint()
GetClientRect(&rcClient);
rcClient.top += 4;
if (m_isRemote) {
- strTip.Format(_T("%dx%d, %dfps\nVRecv: %dkbps\nVLossRate: %d\nVDelay: %dms\nARecv: %dkbps\nALossRate: %d\nADelay: %dms"),
+ strTip.Format(_T("%dx%d, %dfps\nVRecv: %dkbps\nVLossRate: %d%%\nVDelay: %dms\nARecv: %dkbps\nALossRate: %d%%\nADelay: %dms"),
m_nWidth, m_nHeight, m_nFps, m_nVideoBitrate, m_nVideoLossRate, m_nVideoDelay, m_nAudioBitrate, m_nAudioLossRate, m_nAudioDelay);
}
else {
- strTip.Format(_T("%dx%d, %dfps\nVSent: %dkbps\nVLossRate: %d\nASent: %dkbps\nALossRate: %d"),
+ strTip.Format(_T("%dx%d, %dfps\nVSent: %dkbps\nVLossRate: %d%%\nASent: %dkbps\nALossRate: %d%%"),
m_nWidth, m_nHeight, m_nFps, m_nVideoBitrate, m_nVideoLossRate, m_nAudioBitrate, m_nAudioLossRate);
}
diff --git a/windows/APIExample/APIExample/APIExample.rc b/windows/APIExample/APIExample/APIExample.rc
index 5f15d1320..4908c24f6 100755
--- a/windows/APIExample/APIExample/APIExample.rc
+++ b/windows/APIExample/APIExample/APIExample.rc
@@ -128,18 +128,18 @@ FONT 8, "MS Shell Dlg", 400, 0, 0x1
BEGIN
LTEXT "",IDC_STATIC_VIDEO,1,0,483,310,NOT WS_VISIBLE
LISTBOX IDC_LIST_INFO_BROADCASTING,491,0,139,312,LBS_NOINTEGRALHEIGHT | LBS_DISABLENOSCROLL | WS_VSCROLL | WS_HSCROLL | WS_TABSTOP
- LTEXT "Channel Name",IDC_STATIC_CHANNELNAME,11,328,48,8
- EDITTEXT IDC_EDIT_CHANNELNAME,71,326,218,13,ES_AUTOHSCROLL
- PUSHBUTTON "JoinChannel",IDC_BUTTON_JOINCHANNEL,307,326,50,14
- LTEXT "Publish URL",IDC_STATIC_RTMP_URL,11,351,48,8
- EDITTEXT IDC_EDIT_RTMP_URL,71,349,218,13,ES_AUTOHSCROLL
- PUSHBUTTON "AddStream",IDC_BUTTON_ADDSTREAM,307,349,50,14
- LTEXT "Publish URL",IDC_STATIC_RTMP_URLS,11,373,48,8
- COMBOBOX IDC_COMBO_RTMPURLS,70,370,218,30,CBS_DROPDOWNLIST | CBS_SORT | WS_VSCROLL | WS_TABSTOP
- PUSHBUTTON "RemoveStream",IDC_BUTTON_REMOVE_STREAM,307,370,62,14
- PUSHBUTTON "RemoveAll",IDC_BUTTON_REMOVE_ALLSTREAM,376,370,55,15
- LTEXT "",IDC_STATIC_DETAIL,442,325,181,58
- CONTROL "Check1",IDC_CHK_TRANS_CODING,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,373,349,63,10
+ LTEXT "Channel Name",IDC_STATIC_CHANNELNAME,12,327,48,8
+ EDITTEXT IDC_EDIT_CHANNELNAME,71,325,218,13,ES_AUTOHSCROLL
+ PUSHBUTTON "JoinChannel",IDC_BUTTON_JOINCHANNEL,308,325,62,14
+ LTEXT "Publish URL",IDC_STATIC_RTMP_URL,11,350,48,8
+ EDITTEXT IDC_EDIT_RTMP_URL,71,348,218,13,ES_AUTOHSCROLL
+ PUSHBUTTON "AddStream",IDC_BUTTON_ADDSTREAM,308,347,62,14
+ LTEXT "Publish URL",IDC_STATIC_RTMP_URLS,12,373,48,8
+ COMBOBOX IDC_COMBO_RTMPURLS,71,371,218,30,CBS_DROPDOWNLIST | CBS_SORT | WS_VSCROLL | WS_TABSTOP
+ PUSHBUTTON "RemoveStream",IDC_BUTTON_REMOVE_STREAM,308,370,62,14
+ PUSHBUTTON "RemoveAll",IDC_BUTTON_REMOVE_ALLSTREAM,390,370,55,15
+ LTEXT "",IDC_STATIC_DETAIL,457,324,173,53
+ CONTROL "Check1",IDC_CHK_TRANS_CODING,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,390,349,63,10
END
IDD_DIALOG_METADATA DIALOGEX 0, 0, 632, 400
@@ -159,31 +159,39 @@ BEGIN
PUSHBUTTON "Clear",IDC_BUTTON_CLEAR,385,351,50,14
END
-IDD_DIALOG_SCREEN_SHARE DIALOGEX 0, 0, 632, 400
+IDD_DIALOG_SCREEN_SHARE DIALOGEX 0, 0, 632, 412
STYLE DS_SETFONT | DS_FIXEDSYS | WS_CHILD | WS_SYSMENU
FONT 8, "MS Shell Dlg", 400, 0, 0x1
BEGIN
LTEXT "",IDC_STATIC_VIDEO,1,0,484,306,NOT WS_VISIBLE
LISTBOX IDC_LIST_INFO_BROADCASTING,487,0,139,306,LBS_NOINTEGRALHEIGHT | LBS_DISABLENOSCROLL | WS_VSCROLL | WS_HSCROLL | WS_TABSTOP
- LTEXT "Channel Name",IDC_STATIC_CHANNELNAME,11,319,48,8
- EDITTEXT IDC_EDIT_CHANNELNAME,71,317,182,13,ES_AUTOHSCROLL
- PUSHBUTTON "JoinChannel",IDC_BUTTON_JOINCHANNEL,265,317,50,14
- LTEXT "Window HWND",IDC_STATIC_SCREEN_CAPTURE,11,341,54,8
- PUSHBUTTON "Share Window",IDC_BUTTON_START_CAPUTRE,265,338,50,14
- COMBOBOX IDC_COMBO_SCREEN_CAPTURE,71,338,181,30,CBS_DROPDOWNLIST | CBS_SORT | WS_VSCROLL | WS_TABSTOP
- LTEXT "",IDC_STATIC_DETAIL,487,321,136,62
- GROUPBOX "General Settings",IDC_STATIC_GENERAL,320,317,161,46
- LTEXT "FPS",IDC_STATIC_FPS,324,329,21,10
- EDITTEXT IDC_EDIT_FPS,347,327,55,12,ES_AUTOHSCROLL
- LTEXT "bitrate",IDC_STATIC_BITRATE,405,329,27,9
- EDITTEXT IDC_EDIT_BITRATE,432,327,46,14,ES_AUTOHSCROLL
- PUSHBUTTON "Update Calpture Param",IDC_BUTTON_UPDATEPARAM,390,346,89,14
- LTEXT "Screen",IDC_STATIC_SCREEN_SHARE,11,359,48,8
- COMBOBOX IDC_COMBO_SCREEN_SCREEN,71,360,181,30,CBS_DROPDOWNLIST | CBS_SORT | WS_VSCROLL | WS_TABSTOP
- PUSHBUTTON "Share Screen",IDC_BUTTON_START_SHARE_SCREEN,265,357,50,14
- LTEXT "",IDC_STATIC_SCREEN_INFO,8,382,305,8
- LTEXT "",IDC_STATIC_SCREEN_INFO2,315,368,163,29
- CONTROL "Share Cursor",IDC_CHECK_CURSOR,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,324,349,58,10
+ LTEXT "Channel Name",IDC_STATIC_CHANNELNAME,8,314,48,8
+ EDITTEXT IDC_EDIT_CHANNELNAME,71,314,182,13,ES_AUTOHSCROLL
+ PUSHBUTTON "JoinChannel",IDC_BUTTON_JOINCHANNEL,265,314,50,14
+ LTEXT "Window HWND",IDC_STATIC_SCREEN_CAPTURE,8,354,54,8
+ PUSHBUTTON "Share Window",IDC_BUTTON_START_CAPUTRE,265,351,50,14
+ COMBOBOX IDC_COMBO_SCREEN_CAPTURE,71,352,181,30,CBS_DROPDOWNLIST | CBS_SORT | WS_VSCROLL | WS_TABSTOP
+ LTEXT "",IDC_STATIC_DETAIL,487,321,136,69
+ GROUPBOX "General Settings",IDC_STATIC_GENERAL,320,314,161,95
+ LTEXT "FPS",IDC_STATIC_FPS,325,328,26,10
+ LTEXT "bitrate",IDC_STATIC_BITRATE,325,342,27,9
+ LTEXT "Screen",IDC_STATIC_SCREEN_SHARE,8,373,48,8
+ COMBOBOX IDC_COMBO_SCREEN_SCREEN,70,371,181,30,CBS_DROPDOWNLIST | CBS_SORT | WS_VSCROLL | WS_TABSTOP
+ PUSHBUTTON "Share Screen",IDC_BUTTON_START_SHARE_SCREEN,265,370,50,14
+ CONTROL "Share Cursor",IDC_CHECK_CURSOR,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,325,390,58,10
+ LTEXT "Scenario",IDC_STATIC_SCREEN_CAPTURE2,8,335,54,8
+ COMBOBOX IDC_COMBO_SCREEN_CAPTURE_SCENARIO,71,333,181,30,CBS_DROPDOWNLIST | CBS_SORT | WS_VSCROLL | WS_TABSTOP
+ LTEXT "scale",IDC_STATIC_SCALE,325,357,27,9
+ LTEXT "HWND",IDC_STATIC_HWND,325,372,27,9
+ COMBOBOX IDC_COMBO_HWND,353,370,67,30,CBS_DROPDOWNLIST | CBS_SORT | WS_VSCROLL | WS_TABSTOP
+ PUSHBUTTON "Execlude",IDC_BUTTON_HWND_EXECLUDE,428,370,50,14
+ CONTROL "",IDC_SLIDER_FPS,"msctls_trackbar32",TBS_BOTH | TBS_NOTICKS | WS_TABSTOP,348,324,100,15
+ CONTROL "",IDC_SLIDER_BITRATE,"msctls_trackbar32",TBS_BOTH | TBS_NOTICKS | WS_TABSTOP,348,339,100,15
+ CONTROL "",IDC_SLIDER_SCALE,"msctls_trackbar32",TBS_BOTH | TBS_NOTICKS | WS_TABSTOP,348,354,100,15
+ CONTROL "High Light",IDC_CHECK_HIGH_LIGHT,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,385,390,47,10
+ LTEXT "15",IDC_STATIC_FPS_VALUE,448,326,26,10
+ LTEXT "700",IDC_STATIC_BITRATE_VALUE,448,342,26,10
+ LTEXT "1.0",IDC_STATIC_SCALE_VALUE,448,358,26,10
END
IDD_DIALOG_CUSTOM_CAPTURE_VIDEO DIALOGEX 0, 0, 632, 400
@@ -230,26 +238,29 @@ BEGIN
PUSHBUTTON "JoinChannel",IDC_BUTTON_JOINCHANNEL,194,306,50,14
LTEXT "lightening contrast",IDC_STATIC_BEAUTY_LIGHTENING_CONTRAST_LEVEL,12,334,64,8
COMBOBOX IDC_COMBO_BEAUTE_LIGHTENING_CONTRAST_LEVEL,81,333,79,30,CBS_DROPDOWNLIST | CBS_SORT | WS_VSCROLL | WS_TABSTOP
- LTEXT "",IDC_STATIC_DETAIL,485,297,132,56
+ LTEXT "",IDC_STATIC_DETAIL,498,341,132,56
LTEXT "lightening",IDC_STATIC_BEAUTY_LIGHTENING,12,351,45,8
LTEXT "redness",IDC_STATIC_BEAUTY_REDNESS,167,334,41,8
LTEXT "smoothness",IDC_STATIC_BEAUTY_SMOOTHNESS,167,352,42,8
CONTROL "Beauty Enable",IDC_CHECK_BEAUTY_ENABLE,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,251,307,62,10
- GROUPBOX "Beauty",IDC_STATIC_BEaUTY,0,300,352,67
- GROUPBOX "Enhance",IDC_STATIC,0,373,412,24
- CONTROL "Colorful Enhance",IDC_CHECK_ENHANCE,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,5,384,75,13
- CONTROL "",IDC_SLIDER_STRENGTH,"msctls_trackbar32",TBS_BOTH | TBS_NOTICKS | WS_TABSTOP,130,382,104,15
- LTEXT "Strength",IDC_STATIC_STRENTH,92,389,29,8
- LTEXT "Skin Protect",IDC_STATIC_SKIN_PROTECT,241,389,40,8
- LTEXT "Strength",IDC_STATIC_STRENTH2,95,389,29,8
- LTEXT "Static",IDC_STATIC,267,389,19,8
- CONTROL "",IDC_SLIDER_SKIN_PROTECT,"msctls_trackbar32",TBS_BOTH | TBS_NOTICKS | WS_TABSTOP,294,382,110,15
- CONTROL "Video Denoise",IDC_CHECK_VIDEO_DENOISE,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,370,299,58,10
- CONTROL "Virtual Background",IDC_CHECK_VIDEO_DENOISE2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,367,322,72,10
+ GROUPBOX "Beauty",IDC_STATIC_BEaUTY,5,297,352,67
+ GROUPBOX "Enhance",IDC_STATIC,5,368,412,24
+ CONTROL "Colorful Enhance",IDC_CHECK_ENHANCE,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,13,376,75,13
+ CONTROL "",IDC_SLIDER_STRENGTH,"msctls_trackbar32",TBS_BOTH | TBS_NOTICKS | WS_TABSTOP,125,375,104,15
+ LTEXT "Strength",IDC_STATIC_STRENTH,93,378,29,8
+ LTEXT "Skin Protect",IDC_STATIC_SKIN_PROTECT,247,378,40,8
+ CONTROL "",IDC_SLIDER_SKIN_PROTECT,"msctls_trackbar32",TBS_BOTH | TBS_NOTICKS | WS_TABSTOP,291,375,110,15
+ CONTROL "Video Denoise",IDC_CHECK_VIDEO_DENOISE,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,367,306,58,10
+ CONTROL "Virtual Background",IDC_CHECK_VIDEO_DENOISE2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,367,323,72,10
CONTROL "",IDC_SLIDER_REDNESS,"msctls_trackbar32",TBS_BOTH | TBS_NOTICKS | WS_TABSTOP,219,330,121,15
CONTROL "",IDC_SLIDER_LIGHTENING,"msctls_trackbar32",TBS_BOTH | TBS_NOTICKS | WS_TABSTOP,55,348,100,15
- CONTROL "",IDC_SLIDER_SMOOTHNESS,"msctls_trackbar32",TBS_BOTH | TBS_NOTICKS | WS_TABSTOP,222,346,117,15
+ CONTROL "",IDC_SLIDER_SMOOTHNESS,"msctls_trackbar32",TBS_BOTH | TBS_NOTICKS | WS_TABSTOP,218,346,121,15
CONTROL "Lowlight Enhance",IDC_CHECK_LOWLIGHT,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,368,340,71,10
+ CONTROL "Blur",IDC_RADIO_VIRTUAL_BG_BLUR,"Button",BS_AUTORADIOBUTTON,453,323,28,10
+ CONTROL "Color",IDC_RADIO_VIRTUAL_BG_COLOR,"Button",BS_AUTORADIOBUTTON,491,323,33,10
+ CONTROL "Video",IDC_RADIO_VIRTUAL_BG_VIDEO,"Button",BS_AUTORADIOBUTTON,567,323,33,10
+ GROUPBOX "",IDC_STATIC_VIRTUAL_BG_CHOOSE,443,316,159,20
+ CONTROL "Image",IDC_RADIO_VIRTUAL_BG_IMAGE,"Button",BS_AUTORADIOBUTTON,527,323,36,10
END
IDD_DIALOG_AUDIO_PROFILE DIALOGEX 0, 0, 632, 400
@@ -561,7 +572,9 @@ BEGIN
LTEXT "Channel Name",IDC_STATIC_CHANNELNAME,165,351,48,8
EDITTEXT IDC_EDIT_CHANNELNAME,233,346,172,13,ES_AUTOHSCROLL
PUSHBUTTON "JoinChannel",IDC_BUTTON_JOINCHANNEL,443,345,75,14
- LTEXT "",IDC_STATIC_DETAIL,23,370,456,27
+ LTEXT "",IDC_STATIC_DETAIL,23,370,412,27
+ CONTROL "TranslusionVirtualBackground",IDC_CHECK_VIRTUAL_BG,
+ "Button",BS_AUTOCHECKBOX | WS_TABSTOP,445,370,109,10
END
IDD_DIALOG_BEAUTY_AUDIO DIALOGEX 0, 0, 632, 400
@@ -571,18 +584,21 @@ BEGIN
LTEXT "",IDC_STATIC_VIDEO,1,0,483,310,NOT WS_VISIBLE
LISTBOX IDC_LIST_INFO_BROADCASTING,491,0,139,312,LBS_NOINTEGRALHEIGHT | LBS_DISABLENOSCROLL | WS_VSCROLL | WS_HSCROLL | WS_TABSTOP
LTEXT "Channel Name",IDC_STATIC_CHANNELNAME,11,328,48,8
- EDITTEXT IDC_EDIT_CHANNELNAME,71,326,218,13,ES_AUTOHSCROLL
+ EDITTEXT IDC_EDIT_CHANNELNAME,70,326,218,13,ES_AUTOHSCROLL
PUSHBUTTON "JoinChannel",IDC_BUTTON_JOINCHANNEL,307,326,60,14
- LTEXT "Audio Change",IDC_STATIC_AUDIO_CHANGER,11,352,48,8
- COMBOBOX IDC_COMBO_AUDIO_CHANGER,71,350,172,30,CBS_DROPDOWNLIST | CBS_SORT | WS_VSCROLL | WS_TABSTOP
- LTEXT "",IDC_STATIC_DETAIL,456,325,167,58
- LTEXT "Reverb Preset",IDC_STATIC_BEAUTY_AUDIO_TYPE,12,374,48,8
- COMBOBOX IDC_COMBO_AUDIO_PERVERB_PRESET,71,373,171,30,CBS_DROPDOWNLIST | CBS_SORT | WS_VSCROLL | WS_TABSTOP
- PUSHBUTTON "Button2",IDC_BUTTON_SET_BEAUTY_AUDIO,355,358,58,14
- EDITTEXT IDC_EDIT_PARAM1,292,349,56,14,ES_AUTOHSCROLL
- LTEXT "param1",IDC_STATIC_PARAM1,251,351,36,8
- LTEXT "param2",IDC_STATIC_PARAM2,251,373,38,8
- EDITTEXT IDC_EDIT_PARAM2,292,371,56,14,ES_AUTOHSCROLL
+ LTEXT "Audio Change",IDC_STATIC_AUDIO_CHANGER,12,345,48,8
+ COMBOBOX IDC_COMBO_AUDIO_CHANGER,70,343,172,30,CBS_DROPDOWNLIST | CBS_SORT | WS_VSCROLL | WS_TABSTOP
+ LTEXT "",IDC_STATIC_DETAIL,463,331,167,58
+ LTEXT "Reverb Preset",IDC_STATIC_BEAUTY_AUDIO_TYPE,11,362,48,8
+ COMBOBOX IDC_COMBO_AUDIO_PERVERB_PRESET,70,360,171,30,CBS_DROPDOWNLIST | CBS_SORT | WS_VSCROLL | WS_TABSTOP
+ PUSHBUTTON "Button2",IDC_BUTTON_SET_BEAUTY_AUDIO,463,342,58,14
+ EDITTEXT IDC_EDIT_PARAM1,289,342,56,14,ES_AUTOHSCROLL
+ LTEXT "param1",IDC_STATIC_PARAM1,251,345,36,8
+ LTEXT "param2",IDC_STATIC_PARAM2,355,345,38,8
+ EDITTEXT IDC_EDIT_PARAM2,393,342,56,14,ES_AUTOHSCROLL
+ CONTROL "",IDC_SLIDER_VOICE_FORMANT,"msctls_trackbar32",TBS_BOTH | TBS_NOTICKS | WS_TABSTOP,70,375,123,15
+ LTEXT "VoiceFomant",IDC_STATIC_BEAUTY_AUDIO_TYPE2,11,378,48,8
+ LTEXT "0",IDC_STATIC_VOICE_FORMAT_VALUE,197,378,46,8
END
IDD_DIALOG_SPATIAL_AUDIO DIALOGEX 0, 0, 633, 399
@@ -637,6 +653,47 @@ BEGIN
PUSHBUTTON "Publish Screen",IDC_BUTTON_PUBLISH_SCREEN,315,376,75,14
END
+IDD_DIALOG_MEDIA_RECORDER DIALOGEX 0, 0, 632, 400
+STYLE DS_SETFONT | DS_FIXEDSYS | WS_CHILD | WS_SYSMENU
+FONT 8, "MS Shell Dlg", 400, 0, 0x1
+BEGIN
+ LTEXT "",IDC_STATIC_VIDEO,6,3,474,311,NOT WS_VISIBLE
+ LISTBOX IDC_LIST_INFO_BROADCASTING,491,0,139,312,LBS_NOINTEGRALHEIGHT | LBS_DISABLENOSCROLL | WS_VSCROLL | WS_HSCROLL | WS_TABSTOP
+ LTEXT "Channel Name",IDC_STATIC_CHANNELNAME,11,328,48,8
+ EDITTEXT IDC_EDIT_CHANNELNAME,71,326,242,15,ES_AUTOHSCROLL
+ PUSHBUTTON "JoinChannel",IDC_BUTTON_JOINCHANNEL,322,326,50,14
+ LTEXT "",IDC_STATIC_METADATA_INFO,493,321,137,16
+ PUSHBUTTON "StartRecording",IDC_BUTTON_LOCALRECORDER,163,285,62,14
+ PUSHBUTTON "StartRecording",IDC_BUTTON_REMOTERECORDER,402,285,63,14
+ EDITTEXT IDC_EDIT_DETAIL_INFO,399,327,183,53,ES_MULTILINE | ES_READONLY | NOT WS_BORDER | WS_VSCROLL
+END
+
+IDD_DIALOG_PUSH_EXTERNAL_VIDEO DIALOGEX 0, 0, 632, 400
+STYLE DS_SETFONT | DS_FIXEDSYS | WS_CHILD | WS_SYSMENU
+FONT 8, "MS Shell Dlg", 400, 0, 0x1
+BEGIN
+ LTEXT "",IDC_STATIC_VIDEO,1,0,483,310,NOT WS_VISIBLE
+ LISTBOX IDC_LIST_INFO_BROADCASTING,491,0,139,312,LBS_NOINTEGRALHEIGHT | LBS_DISABLENOSCROLL | WS_VSCROLL | WS_HSCROLL | WS_TABSTOP
+ LTEXT "Channel Name",IDC_STATIC_CHANNELNAME,11,328,48,8
+ EDITTEXT IDC_EDIT_CHANNELNAME,71,326,218,13,ES_AUTOHSCROLL
+ PUSHBUTTON "JoinChannel",IDC_BUTTON_JOINCHANNEL,307,326,50,14
+ LTEXT "",IDC_STATIC_DETAIL,442,325,181,58
+END
+
+IDD_DIALOG_MULTI_VIDEO_SOURCE_TRACKS DIALOGEX 0, 0, 632, 400
+STYLE DS_SETFONT | DS_FIXEDSYS | WS_CHILD | WS_SYSMENU
+FONT 8, "MS Shell Dlg", 400, 0, 0x1
+BEGIN
+ LTEXT "",IDC_STATIC_VIDEO,4,0,483,310
+ LISTBOX IDC_LIST_INFO_BROADCASTING,491,0,139,312,LBS_NOINTEGRALHEIGHT | LBS_DISABLENOSCROLL | WS_VSCROLL | WS_HSCROLL | WS_TABSTOP
+ LTEXT "Channel Name",IDC_STATIC_CHANNELNAME,17,325,48,8
+ EDITTEXT IDC_EDIT_CHANNELNAME,78,323,136,13,ES_AUTOHSCROLL
+ PUSHBUTTON "JoinChannel",IDC_BUTTON_JOINCHANNEL,231,322,69,14
+ LTEXT "",IDC_STATIC_DETAIL,413,329,191,27
+ PUSHBUTTON "Create track",IDC_BUTTON_CREATE_TRACK,231,340,69,14
+ PUSHBUTTON "Destroy track",IDC_BUTTON_DESTROY_TRACK,231,358,69,14
+END
+
/////////////////////////////////////////////////////////////////////////////
//
@@ -671,7 +728,16 @@ BEGIN
IDD_DIALOG_RTMP_STREAMING, DIALOG
BEGIN
RIGHTMARGIN, 630
+ VERTGUIDE, 12
+ VERTGUIDE, 71
+ VERTGUIDE, 289
+ VERTGUIDE, 308
+ VERTGUIDE, 370
+ VERTGUIDE, 390
BOTTOMMARGIN, 397
+ HORZGUIDE, 331
+ HORZGUIDE, 354
+ HORZGUIDE, 377
END
IDD_DIALOG_METADATA, DIALOG
@@ -684,10 +750,15 @@ BEGIN
IDD_DIALOG_SCREEN_SHARE, DIALOG
BEGIN
RIGHTMARGIN, 626
+ VERTGUIDE, 8
+ VERTGUIDE, 71
VERTGUIDE, 481
- BOTTOMMARGIN, 397
+ BOTTOMMARGIN, 406
HORZGUIDE, 306
- HORZGUIDE, 317
+ HORZGUIDE, 314
+ HORZGUIDE, 339
+ HORZGUIDE, 358
+ HORZGUIDE, 377
END
IDD_DIALOG_CUSTOM_CAPTURE_VIDEO, DIALOG
@@ -705,7 +776,12 @@ BEGIN
IDD_DIALOG_BEAUTY, DIALOG
BEGIN
RIGHTMARGIN, 630
+ VERTGUIDE, 218
+ VERTGUIDE, 367
+ VERTGUIDE, 452
BOTTOMMARGIN, 397
+ HORZGUIDE, 328
+ HORZGUIDE, 382
END
IDD_DIALOG_AUDIO_PROFILE, DIALOG
@@ -796,7 +872,12 @@ BEGIN
IDD_DIALOG_BEAUTY_AUDIO, DIALOG
BEGIN
RIGHTMARGIN, 630
+ VERTGUIDE, 11
+ VERTGUIDE, 70
BOTTOMMARGIN, 397
+ HORZGUIDE, 349
+ HORZGUIDE, 366
+ HORZGUIDE, 382
END
IDD_DIALOG_SPATIAL_AUDIO, DIALOG
@@ -834,6 +915,38 @@ BEGIN
HORZGUIDE, 365
HORZGUIDE, 383
END
+
+ IDD_DIALOG_MEDIA_RECORDER, DIALOG
+ BEGIN
+ RIGHTMARGIN, 630
+ VERTGUIDE, 240
+ VERTGUIDE, 480
+ BOTTOMMARGIN, 397
+ HORZGUIDE, 292
+ END
+
+ IDD_DIALOG_PUSH_EXTERNAL_VIDEO, DIALOG
+ BEGIN
+ RIGHTMARGIN, 630
+ BOTTOMMARGIN, 397
+ END
+
+ IDD_DIALOG_MULTI_VIDEO_SOURCE_TRACKS, DIALOG
+ BEGIN
+ RIGHTMARGIN, 630
+ VERTGUIDE, 17
+ VERTGUIDE, 78
+ VERTGUIDE, 214
+ VERTGUIDE, 231
+ VERTGUIDE, 300
+ VERTGUIDE, 352
+ BOTTOMMARGIN, 397
+ HORZGUIDE, 310
+ HORZGUIDE, 329
+ HORZGUIDE, 347
+ HORZGUIDE, 365
+ HORZGUIDE, 383
+ END
END
#endif // APSTUDIO_INVOKED
@@ -948,6 +1061,21 @@ BEGIN
0
END
+IDD_DIALOG_MEDIA_RECORDER AFX_DIALOG_LAYOUT
+BEGIN
+ 0
+END
+
+IDD_DIALOG_PUSH_EXTERNAL_VIDEO AFX_DIALOG_LAYOUT
+BEGIN
+ 0
+END
+
+IDD_DIALOG_MULTI_VIDEO_SOURCE_TRACKS AFX_DIALOG_LAYOUT
+BEGIN
+ 0
+END
+
/////////////////////////////////////////////////////////////////////////////
//
diff --git a/windows/APIExample/APIExample/APIExample.vcxproj b/windows/APIExample/APIExample/APIExample.vcxproj
index 40332b027..7b179c2b2 100644
--- a/windows/APIExample/APIExample/APIExample.vcxproj
+++ b/windows/APIExample/APIExample/APIExample.vcxproj
@@ -118,7 +118,8 @@
if exist zh-cn.ini (copy zh-cn.ini $(SolutionDir)$(Configuration))
if exist en.ini (copy en.ini $(SolutionDir)$(Configuration))
copy $(SolutionDir)APIExample\Advanced\LocalVideoTranscoding\agora.png $(SolutionDir)$(Configuration)
-copy $(SolutionDir)APIExample\Advanced\LocalVideoTranscoding\agora.jpg $(SolutionDir)$(Configuration)
+copy $(SolutionDir)APIExample\Advanced\LocalVideoTranscoding\agora.jpg $(SolutionDir)$(Configuration)
+if exist sample.yuv (copy sample.yuv $(SolutionDir)$(Configuration))
@@ -160,7 +161,9 @@ copy $(SolutionDir)APIExample\Advanced\LocalVideoTranscoding\agora.jpg $(Solutio
if exist $(SolutionDir)\sdk (copy $(SolutionDir)sdk\x64\*.dll $(SolutionDir)$(Platform)\$(Configuration))
if exist zh-cn.ini (copy zh-cn.ini $(SolutionDir)$(Platform)\$(Configuration))
if exist en.ini (copy en.ini $(SolutionDir)$(Platform)\$(Configuration))
+copy $(SolutionDir)APIExample\Advanced\LocalVideoTranscoding\agora.png $(SolutionDir)$(Platform)\$(Configuration)
copy $(SolutionDir)APIExample\Advanced\LocalVideoTranscoding\agora.jpg $(SolutionDir)$(Platform)\$(Configuration)
+if exist sample.yuv (copy sample.yuv $(SolutionDir)$(Platform)$(Configuration))
@@ -202,7 +205,9 @@ copy $(SolutionDir)APIExample\Advanced\LocalVideoTranscoding\agora.jpg $(Solutio
if exist zh-cn.ini (copy zh-cn.ini $(SolutionDir)$(Configuration))
if exist en.ini (copy en.ini $(SolutionDir)$(Configuration))
copy $(SolutionDir)APIExample\Advanced\LocalVideoTranscoding\agora.png $(SolutionDir)$(Configuration)
-copy $(SolutionDir)APIExample\Advanced\LocalVideoTranscoding\agora.jpg $(SolutionDir)$(Configuration)
+copy $(SolutionDir)APIExample\Advanced\LocalVideoTranscoding\agora.jpg $(SolutionDir)$(Configuration)
+
+if exist sample.yuv (copy sample.yuv $(SolutionDir)$(Configuration))
@@ -247,6 +252,8 @@ copy $(SolutionDir)APIExample\Advanced\LocalVideoTranscoding\agora.jpg $(Solutio
if exist zh-cn.ini (copy zh-cn.ini $(SolutionDir)$(Platform)\$(Configuration))
if exist en.ini (copy en.ini $(SolutionDir)$(Platform)\$(Configuration))
copy $(SolutionDir)APIExample\Advanced\LocalVideoTranscoding\agora.jpg $(SolutionDir)$(Platform)\$(Configuration)
+
+if exist sample.yuv (copy sample.yuv $(SolutionDir)$(Platform)\$(Configuration))
@@ -266,11 +273,14 @@ copy $(SolutionDir)APIExample\Advanced\LocalVideoTranscoding\agora.jpg $(Solutio
+
+
+
@@ -297,6 +307,7 @@ copy $(SolutionDir)APIExample\Advanced\LocalVideoTranscoding\agora.jpg $(Solutio
+
@@ -311,11 +322,14 @@ copy $(SolutionDir)APIExample\Advanced\LocalVideoTranscoding\agora.jpg $(Solutio
+
+
+
@@ -355,6 +369,7 @@ copy $(SolutionDir)APIExample\Advanced\LocalVideoTranscoding\agora.jpg $(Solutio
Create
Create
+
@@ -365,6 +380,9 @@ copy $(SolutionDir)APIExample\Advanced\LocalVideoTranscoding\agora.jpg $(Solutio
Document
+
+ Document
+
diff --git a/windows/APIExample/APIExample/APIExample.vcxproj.filters b/windows/APIExample/APIExample/APIExample.vcxproj.filters
index 8af48596a..ed8896e3d 100644
--- a/windows/APIExample/APIExample/APIExample.vcxproj.filters
+++ b/windows/APIExample/APIExample/APIExample.vcxproj.filters
@@ -97,6 +97,15 @@
{0b7c44e6-e661-44e6-981a-25e1ab54bb43}
+
+ {33ddb658-b378-4cf7-8c4c-c3e616528e24}
+
+
+ {b88bb5af-125e-4ecd-b138-b475aed36f5d}
+
+
+ {a7d62827-b116-4a28-b8e7-2e5fddbce712}
+
@@ -228,6 +237,18 @@
Advanced\MultiChannel
+
+ Advanced\MediaRecorder
+
+
+ Header Files
+
+
+ Advanced\PushExternalVideoYUV
+
+
+ Advanced\MultiVideoSourceTracks
+
@@ -347,6 +368,18 @@
Advanced\MultiChannel
+
+ Advanced\MediaRecorder
+
+
+ Source Files
+
+
+ Advanced\PushExternalVideoYUV
+
+
+ Advanced\MultiVideoSourceTracks
+
@@ -405,5 +438,8 @@
Resource Files
+
+ Resource Files
+
\ No newline at end of file
diff --git a/windows/APIExample/APIExample/APIExampleDlg.cpp b/windows/APIExample/APIExample/APIExampleDlg.cpp
index 7dd2301c9..8725e35ba 100755
--- a/windows/APIExample/APIExample/APIExampleDlg.cpp
+++ b/windows/APIExample/APIExample/APIExampleDlg.cpp
@@ -221,6 +221,7 @@ void CAPIExampleDlg::InitSceneDialog()
m_vecAdvanced.push_back(advancedRtmpStreaming);
m_vecAdvanced.push_back(advancedVideoMetadata);
m_vecAdvanced.push_back(advancedMediaPlayer);
+ m_vecAdvanced.push_back(advancedMediaRecorder);
m_vecAdvanced.push_back(advancedScreenCap);
m_vecAdvanced.push_back(advancedAudioProfile);
m_vecAdvanced.push_back(advancedAudioMixing);
@@ -232,6 +233,7 @@ void CAPIExampleDlg::InitSceneDialog()
m_vecAdvanced.push_back(advancedCustomEncrypt);
m_vecAdvanced.push_back(advancedMultiChannel);
m_vecAdvanced.push_back(advancedMultiVideoSource);
+ m_vecAdvanced.push_back(advancedMultiVideoSourceTracks);
m_vecAdvanced.push_back(advancedPerCallTest);
m_vecAdvanced.push_back(advancedAudioVolume);
//m_vecAdvanced.push_back(advancedReportInCall);
@@ -276,9 +278,9 @@ void CAPIExampleDlg::InitSceneDialog()
m_pAudioMixingDlg->MoveWindow(&rcWnd);
//custom video capture
- m_pCaputreVideoDlg = new CAgoraCaptureVideoDlg(&m_staMainArea);
- m_pCaputreVideoDlg->Create(CAgoraCaptureVideoDlg::IDD);
- m_pCaputreVideoDlg->MoveWindow(&rcWnd);
+// m_pCaputreVideoDlg = new CAgoraCaptureVideoDlg(&m_staMainArea);
+// m_pCaputreVideoDlg->Create(CAgoraCaptureVideoDlg::IDD);
+// m_pCaputreVideoDlg->MoveWindow(&rcWnd);
//original video process
m_pOriginalVideoDlg = new CAgoraOriginalVideoDlg(&m_staMainArea);
@@ -311,6 +313,11 @@ void CAPIExampleDlg::InitSceneDialog()
m_pmediaPlayerDlg->Create(CAgoraMediaPlayer::IDD);
m_pmediaPlayerDlg->MoveWindow(&rcWnd);
+ //media recorder
+ m_pmediaRecorderDlg = new CAgoraMediaRecorder(&m_staMainArea);
+ m_pmediaRecorderDlg->Create(CAgoraMediaRecorder::IDD);
+ m_pmediaRecorderDlg->MoveWindow(&rcWnd);
+
//per call test
m_pPerCallTestDlg = new CAgoraPreCallTestDlg(&m_staMainArea);
m_pPerCallTestDlg->Create(CAgoraPreCallTestDlg::IDD);
@@ -355,6 +362,14 @@ void CAPIExampleDlg::InitSceneDialog()
m_pSpatialAudioDlg = new CAgoraSpatialAudioDlg(&m_staMainArea);
m_pSpatialAudioDlg->Create(CAgoraSpatialAudioDlg::IDD);
m_pSpatialAudioDlg->MoveWindow(&rcWnd);
+
+ m_pPushExternalVideoYUV = new PushExternalVideoYUV(&m_staMainArea);
+ m_pPushExternalVideoYUV->Create(PushExternalVideoYUV::IDD);
+ m_pPushExternalVideoYUV->MoveWindow(&rcWnd);
+
+ m_pMultiVideoSourceTracks = new MultiVideoSourceTracks(&m_staMainArea);
+ m_pMultiVideoSourceTracks->Create(MultiVideoSourceTracks::IDD);
+ m_pMultiVideoSourceTracks->MoveWindow(&rcWnd);
}
void CAPIExampleDlg::InitSceneList()
@@ -447,18 +462,17 @@ void CAPIExampleDlg::OnSelchangingListBasic(NMHDR *pNMHDR, LRESULT *pResult)
LPNMTREEVIEW pNMTreeView = reinterpret_cast(pNMHDR);
HTREEITEM hOldItem = pNMTreeView->itemOld.hItem;
HTREEITEM hAdvancedItem = m_lstAdvanced.GetSelectedItem();
+ HTREEITEM hNewItem = pNMTreeView->itemNew.hItem;
- if (m_preSelectedItemText.Compare(m_lstBasicScene.GetItemText(hOldItem)) == 0) {
- ReleaseScene(m_lstBasicScene, hOldItem);
- }
- else if (m_preSelectedItemText.Compare(m_lstAdvanced.GetItemText(hAdvancedItem)) == 0) {
- // m_lstAdvanced.SetItemState(hAdvancedItem, 0, TVIS_SELECTED);
- m_lstAdvanced.SelectItem(NULL);
- ReleaseScene(m_lstAdvanced, hAdvancedItem);
- }
+ if (m_preSelectedItemText.Compare(m_lstBasicScene.GetItemText(hOldItem)) == 0) {
+ ReleaseScene(m_lstBasicScene, hOldItem);
+ }
+ else if (m_preSelectedItemText.Compare(m_lstAdvanced.GetItemText(hAdvancedItem)) == 0) {
+ // m_lstAdvanced.SetItemState(hAdvancedItem, 0, TVIS_SELECTED);
+ m_lstAdvanced.SelectItem(NULL);
+ }
- HTREEITEM hNewItem = pNMTreeView->itemNew.hItem;
- *pResult = 0;
+ *pResult = 0;
}
void CAPIExampleDlg::OnSelchangingListAdvanced(NMHDR *pNMHDR, LRESULT *pResult)
@@ -466,13 +480,14 @@ void CAPIExampleDlg::OnSelchangingListAdvanced(NMHDR *pNMHDR, LRESULT *pResult)
LPNMTREEVIEW pNMTreeView = reinterpret_cast(pNMHDR);
HTREEITEM hBasicItem = m_lstBasicScene.GetSelectedItem();
HTREEITEM hOldItem = pNMTreeView->itemOld.hItem;
- if (m_preSelectedItemText.Compare(m_lstBasicScene.GetItemText(hBasicItem)) == 0) {
- m_lstBasicScene.SelectItem(NULL);
- ReleaseScene(m_lstBasicScene, hBasicItem);
- }
- else if (m_preSelectedItemText.Compare(m_lstAdvanced.GetItemText(hOldItem)) == 0) {
- ReleaseScene(m_lstAdvanced, hOldItem);
- }
+ HTREEITEM hNewItem = pNMTreeView->itemNew.hItem;
+
+ if (m_preSelectedItemText.Compare(m_lstBasicScene.GetItemText(hBasicItem)) == 0) {
+ m_lstBasicScene.SelectItem(NULL);
+ }
+ else if (m_preSelectedItemText.Compare(m_lstAdvanced.GetItemText(hOldItem)) == 0) {
+ ReleaseScene(m_lstAdvanced, hOldItem);
+ }
*pResult = 0;
}
@@ -497,11 +512,11 @@ void CAPIExampleDlg::CreateScene(CTreeCtrl& treeScene, CString selectedText)
m_pScreenCap->InitAgora();
m_pScreenCap->ShowWindow(SW_SHOW);
}else if (selectedText.Compare(advancedCustomVideoCapture)==0) {
- m_pCaputreVideoDlg->InitAgora();
- m_pCaputreVideoDlg->ShowWindow(SW_SHOW);
+ m_pPushExternalVideoYUV->InitAgora();
+ m_pPushExternalVideoYUV->ShowWindow(SW_SHOW);
}else if (selectedText.Compare(advancedCustomAudioCapture)==0) {
m_pCaptureAudioDlg->InitAgora();
- m_pCaptureAudioDlg->ShowWindow(SW_SHOW);
+ m_pCaptureAudioDlg->ShowWindow(SW_SHOW);
}else if (selectedText.Compare(advancedAudioProfile) == 0) {
m_pAudioProfileDlg->InitAgora();
m_pAudioProfileDlg->ShowWindow(SW_SHOW);
@@ -546,6 +561,10 @@ void CAPIExampleDlg::CreateScene(CTreeCtrl& treeScene, CString selectedText)
m_pMultiVideoSourceDlg->InitAgora();
m_pMultiVideoSourceDlg->ShowWindow(SW_SHOW);
}
+ else if (selectedText.Compare(advancedMultiVideoSourceTracks) == 0) {
+ m_pMultiVideoSourceTracks->InitAgora();
+ m_pMultiVideoSourceTracks->ShowWindow(SW_SHOW);
+ }
else if (selectedText.Compare(advancedBeautyAudio) == 0) {
m_pDlgBeautyAudio->InitAgora();
m_pDlgBeautyAudio->ShowWindow(SW_SHOW);
@@ -563,8 +582,12 @@ void CAPIExampleDlg::CreateScene(CTreeCtrl& treeScene, CString selectedText)
m_pSpatialAudioDlg->InitAgora();
m_pSpatialAudioDlg->ShowWindow(SW_SHOW);
}
+ else if (selectedText.Compare(advancedMediaRecorder) == 0) {
+ m_pmediaRecorderDlg->InitAgora();
+ m_pmediaRecorderDlg->ShowWindow(SW_SHOW);
+ }
- Sleep(500);
+ //Sleep(500);
}
void CAPIExampleDlg::ReleaseScene(CTreeCtrl& treeScene, HTREEITEM& hSelectItem)
@@ -588,11 +611,11 @@ void CAPIExampleDlg::ReleaseScene(CTreeCtrl& treeScene, HTREEITEM& hSelectItem)
m_pScreenCap->UnInitAgora();
m_pScreenCap->ShowWindow(SW_HIDE);
}else if (str.Compare(advancedCustomVideoCapture) == 0) {
- m_pCaputreVideoDlg->UnInitAgora();
- m_pCaputreVideoDlg->ShowWindow(SW_HIDE);
+ m_pPushExternalVideoYUV->UnInitAgora();
+ m_pPushExternalVideoYUV->ShowWindow(SW_HIDE);
}else if (str.Compare(advancedCustomAudioCapture) == 0) {
m_pCaptureAudioDlg->UnInitAgora();
- m_pCaptureAudioDlg->ShowWindow(SW_HIDE);
+ m_pCaptureAudioDlg->ShowWindow(SW_HIDE);
}else if (str.Compare(advancedAudioProfile) == 0) {
m_pAudioProfileDlg->UnInitAgora();
m_pAudioProfileDlg->ShowWindow(SW_HIDE);
@@ -634,6 +657,10 @@ void CAPIExampleDlg::ReleaseScene(CTreeCtrl& treeScene, HTREEITEM& hSelectItem)
m_pMultiVideoSourceDlg->UnInitAgora();
m_pMultiVideoSourceDlg->ShowWindow(SW_HIDE);
}
+ else if (str.Compare(advancedMultiVideoSourceTracks) == 0) {
+ m_pMultiVideoSourceTracks->UnInitAgora();
+ m_pMultiVideoSourceTracks->ShowWindow(SW_HIDE);
+ }
else if (str.Compare(advancedBeautyAudio) == 0) {
m_pDlgBeautyAudio->UnInitAgora();
m_pDlgBeautyAudio->ShowWindow(SW_HIDE);
@@ -654,7 +681,11 @@ void CAPIExampleDlg::ReleaseScene(CTreeCtrl& treeScene, HTREEITEM& hSelectItem)
m_pSpatialAudioDlg->UnInitAgora();
m_pSpatialAudioDlg->ShowWindow(SW_HIDE);
}
- Sleep(500);
+ else if (str.Compare(advancedMediaRecorder) == 0) {
+ m_pmediaRecorderDlg->UnInitAgora();
+ m_pmediaRecorderDlg->ShowWindow(SW_HIDE);
+ }
+ //Sleep(500);
}
LRESULT CAPIExampleDlg::OnEIDJoinLeaveChannel(WPARAM wParam, LPARAM lParam)
diff --git a/windows/APIExample/APIExample/APIExampleDlg.h b/windows/APIExample/APIExample/APIExampleDlg.h
index 59635290c..bdd3bd849 100755
--- a/windows/APIExample/APIExample/APIExampleDlg.h
+++ b/windows/APIExample/APIExample/APIExampleDlg.h
@@ -28,6 +28,9 @@
#include "Advanced/Beauty/CDlgBeauty.h"
#include "Advanced/BeautyAudio/CAgoraBeautyAudio.h"
#include "Advanced/SpatialAudio/CAgoraSpatialAudioDlg.h"
+#include "Advanced/MediaRecorder/CAgoraMediaRecorder.h"
+#include "Advanced/PushExternalVideoYUV/PushExternalVideoYUV.h"
+#include "Advanced/MultiVideoSourceTracks/MultiVideoSourceTracks.h"
#include
#include
#include