Skip to content

Commit 2e2d097

Browse files
authored
Merge pull request #1 from webrtcsdk/feat/virtual-background-android
Feat/virtual background android
2 parents 170af06 + af067c6 commit 2e2d097

File tree

6 files changed

+265
-6
lines changed

6 files changed

+265
-6
lines changed

android/build.gradle

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ android {
3131
compileSdkVersion 31
3232

3333
defaultConfig {
34-
minSdkVersion 21
34+
minSdkVersion 23
3535
testInstrumentationRunner 'androidx.test.runner.AndroidJUnitRunner'
3636
consumerProguardFiles 'proguard-rules.pro'
3737
}
@@ -51,9 +51,16 @@ android {
5151
}
5252

5353
dependencies {
54-
implementation 'io.github.webrtc-sdk:android:114.5735.02'
54+
implementation 'io.github.webrtc-sdk:android:114.5735.02'
5555
implementation 'com.twilio:audioswitch:1.1.8'
5656
implementation 'androidx.annotation:annotation:1.1.0'
5757
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
58+
59+
// ML Kit
60+
implementation 'com.google.mlkit:segmentation-selfie:16.0.0-beta3'
61+
62+
// Libyuv
63+
implementation "io.github.crow-misia.libyuv:libyuv-android:0.28.0"
64+
implementation 'androidx.camera:camera-core:1.0.2'
5865
// implementation files('libwebrtc.aar')
5966
}

android/local.properties

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,5 +4,5 @@
44
# Location of the SDK. This is only used by Gradle.
55
# For customization when using a Version Control System, please read the
66
# header note.
7-
#Sat May 20 23:50:57 ICT 2023
8-
sdk.dir=/home/lambiengcode/Android/Sdk
7+
#Tue Jul 18 10:35:26 ICT 2023
8+
sdk.dir=/Users/lambiengcode/Library/Android/sdk

android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java

Lines changed: 251 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,8 @@
88
import android.content.Context;
99
import android.content.Intent;
1010
import android.content.pm.PackageManager;
11+
import android.graphics.Bitmap;
12+
import android.graphics.ImageFormat;
1113
import android.hardware.Camera;
1214
import android.hardware.Camera.Parameters;
1315
import android.hardware.camera2.CameraAccessException;
@@ -33,6 +35,7 @@
3335
import android.view.Surface;
3436
import android.view.WindowManager;
3537

38+
import androidx.annotation.NonNull;
3639
import androidx.annotation.Nullable;
3740
import androidx.annotation.RequiresApi;
3841

@@ -48,6 +51,14 @@
4851
import com.cloudwebrtc.webrtc.utils.MediaConstraintsUtils;
4952
import com.cloudwebrtc.webrtc.utils.ObjectType;
5053
import com.cloudwebrtc.webrtc.utils.PermissionUtils;
54+
import com.google.android.gms.tasks.OnFailureListener;
55+
import com.google.android.gms.tasks.OnSuccessListener;
56+
import com.google.mlkit.common.MlKitException;
57+
import com.google.mlkit.vision.common.InputImage;
58+
import com.google.mlkit.vision.segmentation.Segmentation;
59+
import com.google.mlkit.vision.segmentation.SegmentationMask;
60+
import com.google.mlkit.vision.segmentation.Segmenter;
61+
import com.google.mlkit.vision.segmentation.selfie.SelfieSegmenterOptions;
5162

5263
import org.webrtc.AudioSource;
5364
import org.webrtc.AudioTrack;
@@ -58,25 +69,39 @@
5869
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
5970
import org.webrtc.CameraEnumerator;
6071
import org.webrtc.CameraVideoCapturer;
72+
import org.webrtc.JavaI420Buffer;
6173
import org.webrtc.MediaConstraints;
6274
import org.webrtc.MediaStream;
6375
import org.webrtc.MediaStreamTrack;
6476
import org.webrtc.PeerConnectionFactory;
6577
import org.webrtc.SurfaceTextureHelper;
6678
import org.webrtc.VideoCapturer;
79+
import org.webrtc.VideoFrame;
80+
import org.webrtc.VideoProcessor;
81+
import org.webrtc.VideoSink;
6782
import org.webrtc.VideoSource;
6883
import org.webrtc.VideoTrack;
84+
import org.webrtc.YuvHelper;
6985
import org.webrtc.audio.JavaAudioDeviceModule;
7086

7187
import java.io.File;
7288
import java.lang.reflect.Field;
89+
import java.nio.ByteBuffer;
7390
import java.util.ArrayList;
7491
import java.util.HashMap;
7592
import java.util.List;
7693
import java.util.Map;
7794

7895
import io.flutter.plugin.common.MethodChannel.Result;
7996

97+
import android.graphics.Bitmap;
98+
import android.graphics.BitmapFactory;
99+
import android.graphics.Canvas;
100+
import android.graphics.PorterDuff;
101+
import android.media.Image;
102+
import android.util.Log;
103+
import androidx.camera.core.ImageProxy;
104+
80105
/**
81106
* The implementation of {@code getUserMedia} extracted into a separate file in order to reduce
82107
* complexity and to (somewhat) separate concerns.
@@ -112,6 +137,14 @@ class GetUserMediaImpl {
112137
private final SparseArray<MediaRecorderImpl> mediaRecorders = new SparseArray<>();
113138
private AudioDeviceInfo preferredInput = null;
114139

140+
private final SelfieSegmenterOptions segmentOptions = new SelfieSegmenterOptions.Builder()
141+
.setDetectorMode(SelfieSegmenterOptions.SINGLE_IMAGE_MODE)
142+
.build();
143+
private final Segmenter segmenter = Segmentation.getClient(segmentOptions);
144+
145+
private VideoSource vbVideoSource = null;
146+
private VideoSink vbVideoSink = null;
147+
115148
public void screenRequestPermissions(ResultReceiver resultReceiver) {
116149
final Activity activity = stateProvider.getActivity();
117150
if (activity == null) {
@@ -739,6 +772,9 @@ private ConstraintsMap getUserVideo(ConstraintsMap constraints, MediaStream medi
739772

740773
PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory();
741774
VideoSource videoSource = pcFactory.createVideoSource(false);
775+
776+
vbVideoSource = videoSource;
777+
742778
String threadName = Thread.currentThread().getName() + "_texture_camera_thread";
743779
SurfaceTextureHelper surfaceTextureHelper =
744780
SurfaceTextureHelper.create(threadName, EglUtils.getRootEglBaseContext());
@@ -802,6 +838,221 @@ private ConstraintsMap getUserVideo(ConstraintsMap constraints, MediaStream medi
802838
return trackParams;
803839
}
804840

841+
void setVirtualBackground() {
842+
vbVideoSource.setVideoProcessor(new VideoProcessor() {
843+
@Override
844+
public void onCapturerStarted(boolean success) {
845+
// Xử lý khi bắt đầu capture video
846+
}
847+
848+
@Override
849+
public void onCapturerStopped() {
850+
// Xử lý khi dừng capture video
851+
}
852+
853+
@Override
854+
public void onFrameCaptured(VideoFrame frame) {
855+
// Chuyển đổi frame thành bitmap
856+
Bitmap bitmap = videoFrameToBitmap(frame);
857+
858+
// Xử lý segment với bitmap
859+
processSegmentation(bitmap);
860+
}
861+
862+
@Override
863+
public void setSink(VideoSink sink) {
864+
// Lưu sink để gửi frame đã được cập nhật trở lại WebRTC
865+
// Sink sẽ được sử dụng sau khi xử lý segment
866+
vbVideoSink = sink;
867+
}
868+
});
869+
}
870+
871+
public Bitmap videoFrameToBitmap(VideoFrame videoFrame) {
872+
VideoFrame.Buffer buffer = videoFrame.getBuffer();
873+
int width = buffer.getWidth();
874+
int height = buffer.getHeight();
875+
876+
if (buffer instanceof VideoFrame.TextureBuffer) {
877+
// Không hỗ trợ trực tiếp chuyển đổi từ TextureBuffer sang Bitmap
878+
return null;
879+
} else if (buffer instanceof VideoFrame.I420Buffer) {
880+
VideoFrame.I420Buffer i420Buffer = (VideoFrame.I420Buffer) buffer;
881+
882+
int ySize = width * height;
883+
int uvSize = width * height / 4;
884+
885+
ByteBuffer dataY = i420Buffer.getDataY();
886+
ByteBuffer dataU = i420Buffer.getDataU();
887+
ByteBuffer dataV = i420Buffer.getDataV();
888+
889+
byte[] dataYArray = new byte[ySize];
890+
byte[] dataUArray = new byte[uvSize];
891+
byte[] dataVArray = new byte[uvSize];
892+
893+
dataY.get(dataYArray);
894+
dataU.get(dataUArray);
895+
dataV.get(dataVArray);
896+
897+
// Chuyển đổi từ YUV sang RGB
898+
int[] rgbData = convertYUVtoRGB(dataYArray, dataUArray, dataVArray, width, height);
899+
900+
// Tạo Bitmap từ dữ liệu RGB
901+
Bitmap bitmap = Bitmap.createBitmap(rgbData, width, height, Bitmap.Config.ARGB_8888);
902+
903+
return bitmap;
904+
}
905+
906+
return null;
907+
}
908+
909+
private int[] convertYUVtoRGB(byte[] yData, byte[] uData, byte[] vData, int width, int height) {
910+
int[] rgbData = new int[width * height];
911+
int uvIndex = 0;
912+
int yOffset = 0;
913+
914+
for (int y = 0; y < height; y++) {
915+
int uvRowStart = uvIndex;
916+
int uvRowOffset = y >> 1;
917+
918+
for (int x = 0; x < width; x++) {
919+
int yIndex = yOffset + x;
920+
int uvIndexOffset = uvRowStart + (x >> 1);
921+
922+
int yValue = yData[yIndex] & 0xFF;
923+
int uValue = uData[uvIndexOffset] & 0xFF;
924+
int vValue = vData[uvIndexOffset] & 0xFF;
925+
926+
int r = yValue + (int) (1.370705f * (vValue - 128));
927+
int g = yValue - (int) (0.698001f * (vValue - 128)) - (int) (0.337633f * (uValue - 128));
928+
int b = yValue + (int) (1.732446f * (uValue - 128));
929+
930+
r = Math.max(0, Math.min(255, r));
931+
g = Math.max(0, Math.min(255, g));
932+
b = Math.max(0, Math.min(255, b));
933+
934+
int pixelColor = 0xFF000000 | (r << 16) | (g << 8) | b;
935+
rgbData[y * width + x] = pixelColor;
936+
}
937+
938+
if (y % 2 == 1) {
939+
uvIndex = uvRowStart + width / 2;
940+
yOffset += width;
941+
}
942+
}
943+
944+
return rgbData;
945+
}
946+
947+
private void processSegmentation(Bitmap bitmap) {
948+
// Tạo InputImage từ bitmap
949+
InputImage inputImage = InputImage.fromBitmap(bitmap, 0);
950+
951+
// Xử lý phân đoạn
952+
segmenter.process(inputImage)
953+
.addOnSuccessListener(new OnSuccessListener<SegmentationMask>() {
954+
@Override
955+
public void onSuccess(@NonNull SegmentationMask segmentationMask) {
956+
// Xử lý khi phân đoạn thành công
957+
ByteBuffer mask = segmentationMask.getBuffer();
958+
int maskWidth = segmentationMask.getWidth();
959+
int maskHeight = segmentationMask.getHeight();
960+
mask.rewind();
961+
962+
// Chuyển đổi buffer thành mảng màu
963+
int[] colors = maskColorsFromByteBuffer(mask, maskWidth, maskHeight);
964+
965+
// Tạo bitmap đã được phân đoạn từ mảng màu
966+
Bitmap segmentedBitmap = createBitmapFromColors(colors, maskWidth, maskHeight);
967+
968+
// Vẽ ảnh nền đã phân đoạn lên canvas
969+
Bitmap outputBitmap = drawSegmentedBackground(segmentedBitmap, segmentedBitmap);
970+
971+
// Tạo VideoFrame mới từ bitmap đã xử lý
972+
int frameRotation = 180; // Frame rotation angle (customize as needed)
973+
long frameTimestamp = System.nanoTime(); // Frame timestamp (customize as needed)
974+
VideoFrame outputVideoFrame = createVideoFrame(outputBitmap, frameRotation, frameTimestamp);
975+
976+
// Gửi frame đã được cập nhật trở lại WebRTC
977+
vbVideoSink.onFrame(outputVideoFrame);
978+
}
979+
})
980+
.addOnFailureListener(new OnFailureListener() {
981+
@Override
982+
public void onFailure(@NonNull Exception exception) {
983+
// Xử lý khi phân đoạn thất bại
984+
Log.e(TAG, "Segmentation failed: " + exception.getMessage());
985+
}
986+
});
987+
}
988+
989+
private Bitmap drawSegmentedBackground(Bitmap segmentedBitmap, Bitmap backgroundBitmap) {
990+
Bitmap outputBitmap = Bitmap.createBitmap(
991+
segmentedBitmap.getWidth(), segmentedBitmap.getHeight(), Bitmap.Config.ARGB_8888
992+
);
993+
Canvas canvas = new Canvas(outputBitmap);
994+
995+
// Vẽ ảnh nền đã phân đoạn lên canvas
996+
canvas.drawBitmap(backgroundBitmap, 0, 0, null);
997+
canvas.drawBitmap(segmentedBitmap, 0, 0, null);
998+
999+
return outputBitmap;
1000+
}
1001+
1002+
private VideoFrame createVideoFrame(Bitmap bitmap, int rotation, long timestampNs) {
1003+
ByteBuffer buffer = ByteBuffer.allocate(bitmap.getByteCount());
1004+
bitmap.copyPixelsToBuffer(buffer);
1005+
byte[] data = buffer.array();
1006+
1007+
int width = bitmap.getWidth();
1008+
int height = bitmap.getHeight();
1009+
int strideY = width;
1010+
int strideU = (width + 1) / 2;
1011+
int strideV = (width + 1) / 2;
1012+
1013+
byte[] dataU = new byte[width * height / 4];
1014+
byte[] dataV = new byte[width * height / 4];
1015+
for (int i = 0; i < width * height / 4; i++) {
1016+
dataU[i] = data[width * height + i];
1017+
dataV[i] = data[width * height + width * height / 4 + i];
1018+
}
1019+
1020+
Runnable releaseCallback = () -> {
1021+
// Thực hiện các thao tác giải phóng tài nguyên liên quan tại đây (nếu có)
1022+
};
1023+
1024+
VideoFrame.I420Buffer i420Buffer = JavaI420Buffer.wrap(
1025+
width,
1026+
height,
1027+
ByteBuffer.wrap(data),
1028+
strideY,
1029+
ByteBuffer.wrap(dataU),
1030+
strideU, ByteBuffer.wrap(dataV), strideV, releaseCallback
1031+
);
1032+
1033+
return new VideoFrame(i420Buffer, rotation, timestampNs);
1034+
}
1035+
1036+
1037+
// Hàm chuyển đổi buffer thành mảng màu
1038+
private int[] maskColorsFromByteBuffer(ByteBuffer buffer, int width, int height) {
1039+
// Chuyển đổi từ ByteBuffer thành mảng màu, tùy thuộc vào định dạng màu
1040+
// của buffer. Đảm bảo bạn sử dụng đúng định dạng màu tương ứng với
1041+
// phân đoạn của ML Kit.
1042+
// Trong ví dụ này, chúng tôi giả định rằng buffer có định dạng ARGB_8888.
1043+
1044+
// Ví dụ: chuyển đổi từ ByteBuffer thành mảng ARGB_8888
1045+
int[] colors = new int[width * height];
1046+
buffer.asIntBuffer().get(colors);
1047+
1048+
return colors;
1049+
}
1050+
1051+
// Hàm tạo bitmap từ mảng màu
1052+
private Bitmap createBitmapFromColors(int[] colors, int width, int height) {
1053+
return Bitmap.createBitmap(colors, width, height, Bitmap.Config.ARGB_8888);
1054+
}
1055+
8051056
void removeVideoCapturerSync(String id) {
8061057
synchronized (mVideoCapturers) {
8071058
VideoCapturerInfo info = mVideoCapturers.get(id);

example/android/build.gradle

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ buildscript {
66
}
77

88
dependencies {
9-
classpath 'com.android.tools.build:gradle:7.3.0'
9+
classpath 'com.android.tools.build:gradle:7.4.2'
1010
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
1111
}
1212
}

ios/flutter_webrtc.podspec

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,6 @@ A new flutter plugin project.
1616
s.public_header_files = 'Classes/**/*.h'
1717
s.dependency 'Flutter'
1818
s.dependency 'WebRTC-lbc', '116.5845.02'
19-
s.ios.deployment_target = '10.0'
19+
s.ios.deployment_target = '11.0'
2020
s.static_framework = true
2121
end

pubspec.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ name: flutter_webrtc
22
description: Flutter WebRTC plugin for iOS/Android/Destkop/Web, based on GoogleWebRTC.
33
version: 0.9.36+2
44
homepage: https://github.com/cloudwebrtc/flutter-webrtc
5+
publish_to: none
56
environment:
67
sdk: '>=2.12.0 <4.0.0'
78
flutter: '>=1.22.0'

0 commit comments

Comments
 (0)