Feature: fit landscape and portrait preview

pull/209/head
xufuji456 3 years ago
parent dbf34fc9c9
commit 03648a4066
  1. 24
      Live/src/main/java/com/frank/live/camera/Camera2Helper.java
  2. 33
      Live/src/main/java/com/frank/live/stream/VideoStreamNew.java
  3. 36
      Live/src/main/java/com/frank/live/util/YUVUtil.java
  4. 11
      app/src/main/AndroidManifest.xml

@ -36,6 +36,8 @@ import java.util.concurrent.locks.ReentrantLock;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import com.frank.live.util.YUVUtil;
/** /**
* Camera2: open, preview and close * Camera2: open, preview and close
* Created by frank on 2019/12/18. * Created by frank on 2019/12/18.
@ -67,11 +69,14 @@ public class Camera2Helper {
private Size mPreviewSize; private Size mPreviewSize;
private int rotateDegree = 0;
private Camera2Helper(Builder builder) { private Camera2Helper(Builder builder) {
mTextureView = builder.previewDisplayView; mTextureView = builder.previewDisplayView;
specificCameraId = builder.specificCameraId; specificCameraId = builder.specificCameraId;
camera2Listener = builder.camera2Listener; camera2Listener = builder.camera2Listener;
rotation = builder.rotation; rotation = builder.rotation;
rotateDegree = builder.rotateDegree;
previewViewSize = builder.previewViewSize; previewViewSize = builder.previewViewSize;
context = builder.context; context = builder.context;
} }
@ -479,12 +484,13 @@ public class Camera2Helper {
private int rotation; private int rotation;
private int rotateDegree;
private Context context; private Context context;
public Builder() { public Builder() {
} }
public Builder previewOn(TextureView val) { public Builder previewOn(TextureView val) {
previewDisplayView = val; previewDisplayView = val;
return this; return this;
@ -500,6 +506,10 @@ public class Camera2Helper {
return this; return this;
} }
public Builder rotateDegree(int val) {
rotateDegree = val;
return this;
}
public Builder specificCameraId(String val) { public Builder specificCameraId(String val) {
specificCameraId = val; specificCameraId = val;
@ -527,6 +537,7 @@ public class Camera2Helper {
private class OnImageAvailableListenerImpl implements ImageReader.OnImageAvailableListener { private class OnImageAvailableListenerImpl implements ImageReader.OnImageAvailableListener {
private byte[] temp = null; private byte[] temp = null;
private byte[] yuvData = null; private byte[] yuvData = null;
private byte[] dstData = null;
private final ReentrantLock lock = new ReentrantLock(); private final ReentrantLock lock = new ReentrantLock();
@Override @Override
@ -569,9 +580,20 @@ public class Camera2Helper {
offset += len / 4; offset += len / 4;
} }
if (rotateDegree == 90) {
if (dstData == null) {
dstData = new byte[len * 3 / 2];
}
YUVUtil.YUV420pRotate90(dstData, yuvData, width, height);
if (camera2Listener != null) {
camera2Listener.onPreviewFrame(dstData);
}
} else {
if (camera2Listener != null) { if (camera2Listener != null) {
camera2Listener.onPreviewFrame(yuvData); camera2Listener.onPreviewFrame(yuvData);
} }
}
lock.unlock(); lock.unlock();
} }
image.close(); image.close();

@ -6,6 +6,7 @@ import android.graphics.Point;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
import android.util.Log; import android.util.Log;
import android.util.Size; import android.util.Size;
import android.view.Surface;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
import android.view.TextureView; import android.view.TextureView;
import android.view.View; import android.view.View;
@ -24,6 +25,7 @@ public class VideoStreamNew extends VideoStreamBase
private static final String TAG = VideoStreamNew.class.getSimpleName(); private static final String TAG = VideoStreamNew.class.getSimpleName();
private int rotation = 0;
private boolean isLiving; private boolean isLiving;
private final Context mContext; private final Context mContext;
private Camera2Helper camera2Helper; private Camera2Helper camera2Helper;
@ -47,9 +49,8 @@ public class VideoStreamNew extends VideoStreamBase
* start previewing * start previewing
*/ */
private void startPreview() { private void startPreview() {
int rotateDegree = 0;
if (mContext instanceof Activity) { if (mContext instanceof Activity) {
rotateDegree = ((Activity) mContext).getWindowManager().getDefaultDisplay().getRotation(); rotation = ((Activity) mContext).getWindowManager().getDefaultDisplay().getRotation();
} }
camera2Helper = new Camera2Helper.Builder() camera2Helper = new Camera2Helper.Builder()
.cameraListener(this) .cameraListener(this)
@ -57,7 +58,8 @@ public class VideoStreamNew extends VideoStreamBase
.context(mContext.getApplicationContext()) .context(mContext.getApplicationContext())
.previewOn(mTextureView) .previewOn(mTextureView)
.previewViewSize(new Point(mVideoParam.getWidth(), mVideoParam.getHeight())) .previewViewSize(new Point(mVideoParam.getWidth(), mVideoParam.getHeight()))
.rotation(rotateDegree) .rotation(rotation)
.rotateDegree(getPreviewDegree(rotation))
.build(); .build();
camera2Helper.start(); camera2Helper.start();
} }
@ -137,12 +139,33 @@ public class VideoStreamNew extends VideoStreamBase
} }
} }
private int getPreviewDegree(int rotation) {
switch (rotation) {
case Surface.ROTATION_0:
return 90;
case Surface.ROTATION_90:
return 0;
case Surface.ROTATION_180:
return 270;
case Surface.ROTATION_270:
return 180;
default:
return -1;
}
}
@Override @Override
public void onCameraOpened(Size previewSize, int displayOrientation) { public void onCameraOpened(Size previewSize, int displayOrientation) {
Log.i(TAG, "onCameraOpened previewSize=" + previewSize.toString()); Log.i(TAG, "onCameraOpened previewSize=" + previewSize.toString());
if (mCallback != null && mVideoParam != null) { if (mCallback != null && mVideoParam != null) {
mCallback.onVideoCodecInfo(previewSize.getWidth(), previewSize.getHeight(), int width = previewSize.getWidth();
mVideoParam.getFrameRate(), mVideoParam.getBitRate()); int height = previewSize.getHeight();
if (getPreviewDegree(rotation) == 90 || getPreviewDegree(rotation) == 270) {
int temp = width;
width = height;
height = temp;
}
mCallback.onVideoCodecInfo(width, height, mVideoParam.getFrameRate(), mVideoParam.getBitRate());
} }
} }

@ -19,23 +19,22 @@ public class YUVUtil {
for (int j = 0; j < height; j++) { for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) { for (int i = 0; i < width; i++) {
a = (input[index] & 0xff000000) >> 24; // a is not used obviously a = (input[index] & 0xff000000) >> 24;
R = (input[index] & 0xff0000) >> 16; R = (input[index] & 0xff0000) >> 16;
G = (input[index] & 0xff00) >> 8; G = (input[index] & 0xff00) >> 8;
B = (input[index] & 0xff); B = (input[index] & 0xff);
// well known RGB to YUV algorithm // RGB to YUV algorithm
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16; Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128; U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128; V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
// NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2 // NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2
// meaning for every 4 Y pixels there are 1 V and 1 U. Note the sampling is every other // meaning for every 4 Y pixels there are 1 V and 1 U.
// pixel AND every other scanLine. yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : (Math.min(Y, 255)));
yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
if (j % 2 == 0 && index % 2 == 0) { if (j % 2 == 0 && index % 2 == 0) {
yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V)); yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : (Math.min(V, 255)));
yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U)); yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : (Math.min(U, 255)));
} }
index++; index++;
@ -44,4 +43,27 @@ public class YUVUtil {
return yuv420sp; return yuv420sp;
} }
public static void YUV420pRotate90(byte[] dstData, byte[] data, int width, int height) {
int n = 0;
int wh = width * height;
//y
for (int j = 0; j < width; j++) {
for(int i = height - 1; i >= 0; i--) {
dstData[n++] = data[width * i + j];
}
}
//u
for (int i = 0; i < width / 2; i++) {
for (int j = 1; j <= height / 2; j++) {
dstData[n++] = data[wh + ((height/2 - j) * (width / 2) + i)];
}
}
//v
for(int i = 0; i < width / 2; i++) {
for(int j = 1; j <= height / 2; j++) {
dstData[n++] = data[wh + wh / 4 + ((height / 2 - j) * (width / 2) + i)];
}
}
}
} }

@ -38,16 +38,13 @@
<activity android:name=".activity.PushActivity" /> <activity android:name=".activity.PushActivity" />
<activity <activity
android:name=".activity.LiveActivity" android:name=".activity.LiveActivity"
android:configChanges="orientation" /> android:configChanges="orientation|screenSize" />
<activity <activity
android:name=".activity.FilterActivity" android:name=".activity.FilterActivity"
android:screenOrientation="landscape" /> android:screenOrientation="landscape" />
<activity <activity android:name=".activity.VideoPreviewActivity" />
android:name=".activity.VideoPreviewActivity" /> <activity android:name=".activity.ProbeFormatActivity" />
<activity <activity android:name=".activity.AudioEffectActivity" />
android:name=".activity.ProbeFormatActivity" />
<activity
android:name=".activity.AudioEffectActivity" />
<activity android:name=".activity.AudioPlayActivity" /> <activity android:name=".activity.AudioPlayActivity" />
<activity android:name=".activity.EqualizerActivity" /> <activity android:name=".activity.EqualizerActivity" />
</application> </application>

Loading…
Cancel
Save