gpt4 book ai didi

android - 如何在android中裁剪横向视频的可见部分?

转载 作者:行者123 更新时间:2023-12-04 23:00:07 27 4
gpt4 key购买 nike

  • 我正在使用Texture View进行平移和裁剪横向视频。我可以通过使用此示例从左到右平移横向视频,反之亦然
    https://github.com/crust87/Android-VideoCropView .

  • enter image description here

    enter image description here
  • FFMPEG 可以使用此命令裁剪视频的特定部分ffmpeg -i /sdcard/videokit/in.mp4 -filter:v crop=720:1088:0:0 -c:a
    copy /sdcard/videokit/out.mp4

  • 如何仅裁剪纹理 View 中可见的视频并将其保存在 Android 的本地存储中。

    crop=720:1088:0:0 is a hard coded width and height of the video and it is cropping fine.But how can I get the width and height of the visible video in Texture View to crop the visible video and Save it to the local storage in android.


    public class MainActivity extends Activity {

    // Layout Components
    private FrameLayout top_frame;

    // Attributes
    private String originalPath;
    @Override
    protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.check);
    top_frame = (FrameLayout)findViewById(R.id.top_frame);
    }

    @Override
    protected void onActivityResult(int requestCode, int resultCode, Intent data) {
    if (requestCode == 1000 && resultCode == RESULT_OK) {
    final VideoCropView mVideoCropView = new VideoCropView(this);
    mVideoCropView.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {

    @Override
    public void onPrepared(MediaPlayer mp) {
    mVideoCropView.start();
    }
    });
    top_frame.addView(mVideoCropView);
    Uri selectedVideoUri = data.getData();

    originalPath = getRealPathFromURI(selectedVideoUri);

    mVideoCropView.setVideoURI(selectedVideoUri);

    mVideoCropView.seekTo(1);
    }
    }

    public void onButtonLoadClick(View v) {
    top_frame.removeAllViews();
    Intent lIntent = new Intent(Intent.ACTION_PICK);
    lIntent.setType("video/*");
    lIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
    startActivityForResult(lIntent, 1000);
    }

    public String getRealPathFromURI(Uri contentUri) { // getting image path from gallery.
    Cursor cursor = null;
    try {
    String[] proj = { MediaStore.Images.Media.DATA };
    cursor = getApplicationContext().getContentResolver().query(contentUri, proj, null, null, null);
    int column_index = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
    cursor.moveToFirst();
    return cursor.getString(column_index);
    } finally {
    if (cursor != null) {
    cursor.close();
    }
    }
    }

    }

    裁剪视频查看
    public class VideoCropView extends TextureView implements MediaPlayerControl {
    // Constants
    private static final String LOG_TAG = "VideoCropView";
    private static final int STATE_ERROR = -1;
    private static final int STATE_IDLE = 0;
    private static final int STATE_PREPARING = 1;
    private static final int STATE_PREPARED = 2;
    private static final int STATE_PLAYING = 3;
    private static final int STATE_PAUSED = 4;
    private static final int STATE_PLAYBACK_COMPLETED = 5;

    // MediaPlayer Components
    protected Context mContext;
    private MediaPlayer mMediaPlayer;
    private Surface mSurface;
    private OnInfoListener mOnInfoListener;
    private OnCompletionListener mOCompletionListener;
    private OnErrorListener mOnErrorListener;
    private OnPreparedListener mOnPreparedListener;
    private OnTranslatePositionListener mOnTranslatePositionListener;

    // CropView Components
    private Matrix mMatrix;

    // MediaPlayer Attributes
    protected Uri mUri;
    private int mCurrentBufferPercentage;
    private int mSeekWhenPrepared;
    protected int mVideoWidth;
    protected int mVideoHeight;

    // CropView Attributes
    private float mRatioWidth;
    private float mRatioHeight;
    private float mPositionX;
    private float mPositionY;
    private float mBoundX;
    private float mBoundY;
    private int mRotate;
    private float mScaleX;
    private float mScaleY;
    private float mScale;

    // Working Variables
    private int mCurrentState = STATE_IDLE;
    private int mTargetState = STATE_IDLE;

    // Touch Event
    // past position x, y and move point
    float mPastX;
    float mPastY;
    float mTouchDistance;
    private Context context;

    // Constructors
    public VideoCropView(final Context context) {
    super(context);
    mContext = context;

    initAttributes();
    initVideoView();
    }

    public VideoCropView(final Context context, final AttributeSet attrs) {
    super(context, attrs);
    mContext = context;

    initAttributes(context, attrs, 0);
    initVideoView();
    }

    public VideoCropView(Context context, AttributeSet attrs, int defStyleAttr) {
    super(context, attrs, defStyleAttr);
    mContext = context;

    initAttributes(context, attrs, defStyleAttr);
    initVideoView();
    }

    private void initAttributes() {
    mRatioWidth = 1;
    mRatioHeight = 1;
    }

    private void initAttributes(Context context, AttributeSet attrs, int defStyleAttr) {
    TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.VideoCropView, defStyleAttr, 0);

    mRatioWidth = typedArray.getInteger(R.styleable.VideoCropView_ratio_width, 3);
    mRatioHeight = typedArray.getInteger(R.styleable.VideoCropView_ratio_height, 4);
    }

    @Override
    protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
    int heightLayout;
    int widthLayout;
    widthLayout = MeasureSpec.getSize(widthMeasureSpec);
    heightLayout = MeasureSpec.getSize(heightMeasureSpec);
    setMeasuredDimension(widthLayout, heightLayout);

    /*if(widthMeasureSpec < heightMeasureSpec){

    int width = MeasureSpec.getSize(widthMeasureSpec);
    int height = (int) ((width / mRatioWidth) * mRatioHeight);


    setMeasuredDimension(width, height);

    }else{

    int width = MeasureSpec.getSize(widthMeasureSpec);
    int height =MeasureSpec.getSize(heightMeasureSpec);
    setMeasuredDimension(width, height);

    }
    */
    }

    @Override
    public boolean onTouchEvent(MotionEvent event) {
    if(mCurrentState == STATE_ERROR || mCurrentState == STATE_IDLE || mCurrentState == STATE_PREPARING) {
    return false;
    }

    switch (event.getAction()) {
    case MotionEvent.ACTION_DOWN:
    mPastX = event.getX();
    mPastY = event.getY();
    mTouchDistance = 0;
    case MotionEvent.ACTION_MOVE:
    if(mBoundX!=0 || mBoundY!=0) {
    float dx = event.getX() - mPastX;
    float dy = event.getY() - mPastY;
    updateViewPosition(dx, dy);
    mPastX = event.getX();
    mPastY = event.getY();
    mTouchDistance += (Math.abs(dx) + Math.abs(dy));
    }
    break;
    case MotionEvent.ACTION_UP:
    if (mTouchDistance < 25) {
    if (isPlaying()) {
    pause();
    } else {
    start();
    }
    }

    mTouchDistance = 0;
    break;
    }

    return true;
    }

    @Override
    public void onInitializeAccessibilityEvent(AccessibilityEvent event) {
    super.onInitializeAccessibilityEvent(event);
    event.setClassName(VideoView.class.getName());
    }

    @Override
    public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) {
    super.onInitializeAccessibilityNodeInfo(info);
    info.setClassName(VideoView.class.getName());
    }

    public int resolveAdjustedSize(int desiredSize, int measureSpec) {
    Log.d(LOG_TAG, "Resolve called.");
    int result = desiredSize;
    int specMode = MeasureSpec.getMode(measureSpec);
    int specSize = MeasureSpec.getSize(measureSpec);

    switch (specMode) {
    case MeasureSpec.UNSPECIFIED:
    /*
    * Parent says we can be as big as we want. Just don't be larger
    * than max size imposed on ourselves.
    */
    result = desiredSize;
    break;

    case MeasureSpec.AT_MOST:
    /*
    * Parent says we can be as big as we want, up to specSize. Don't be
    * larger than specSize, and don't be larger than the max size
    * imposed on ourselves.
    */
    result = Math.min(desiredSize, specSize);
    break;

    case MeasureSpec.EXACTLY:
    // No choice. Do what we are told.
    result = specSize;
    break;
    }
    return result;
    }

    public void initVideoView() {

    mVideoHeight = 0;
    mVideoWidth = 0;
    setFocusable(false);
    setSurfaceTextureListener(mSurfaceTextureListener);
    mCurrentState = STATE_IDLE;
    mTargetState = STATE_IDLE;

    }

    public void setVideoPath(String path) {
    if (path != null) {
    setVideoURI(Uri.parse(path));
    }
    }

    public void setVideoURI(Uri pVideoURI) {
    mUri = pVideoURI;
    mSeekWhenPrepared = 0;

    MediaMetadataRetriever retriever = new MediaMetadataRetriever();
    retriever.setDataSource(mContext, pVideoURI);

    // create thumbnail bitmap
    if(android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN_MR1) {
    String rotation = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);

    try {
    mRotate = Integer.parseInt(rotation);
    } catch(NumberFormatException e) {
    mRotate = 0;
    }
    }

    retriever.release();

    openVideo();
    requestLayout();
    invalidate();
    }

    public void stopPlayback() {
    if (mMediaPlayer != null) {
    mMediaPlayer.stop();
    mMediaPlayer.release();
    mMediaPlayer = null;
    mCurrentState = STATE_IDLE;
    mTargetState = STATE_IDLE;
    }
    }

    public void openVideo() {
    if ((mUri == null) || (mSurface == null)) {
    // not ready for playback just yet, will try again later
    return;
    }
    // Tell the music playback service to pause
    // TODO: these constants need to be published somewhere in the
    // framework.
    Intent intent = new Intent("com.android.music.musicservicecommand");
    intent.putExtra("command", "pause");
    mContext.sendBroadcast(intent);

    // we shouldn't clear the target state, because somebody might have
    // called start() previously
    release(false);
    try {
    mMediaPlayer = new MediaPlayer();
    // TODO: create SubtitleController in MediaPlayer, but we need
    // a context for the subtitle renderers

    mMediaPlayer.setOnPreparedListener(mPreparedListener);
    mMediaPlayer.setOnVideoSizeChangedListener(mSizeChangedListener);
    mMediaPlayer.setOnCompletionListener(mCompletionListener);
    mMediaPlayer.setOnErrorListener(mErrorListener);
    mMediaPlayer.setOnInfoListener(mInfoListener);
    mMediaPlayer.setOnBufferingUpdateListener(mBufferingUpdateListener);
    mCurrentBufferPercentage = 0;
    mMediaPlayer.setDataSource(mContext, mUri);
    mMediaPlayer.setSurface(mSurface);
    mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);

    mMediaPlayer.setScreenOnWhilePlaying(true);
    mMediaPlayer.prepareAsync();
    mMediaPlayer.setLooping(true);
    mCurrentState = STATE_PREPARING;
    } catch (IllegalStateException e) {
    mCurrentState = STATE_ERROR;
    mTargetState = STATE_ERROR;
    e.printStackTrace();
    } catch (IOException e) {
    mCurrentState = STATE_ERROR;
    mTargetState = STATE_ERROR;
    e.printStackTrace();
    }
    }

    private OnVideoSizeChangedListener mSizeChangedListener = new OnVideoSizeChangedListener() {
    @Override
    public void onVideoSizeChanged(final MediaPlayer mp, final int width,
    final int height) {
    mVideoWidth = mp.getVideoWidth();
    mVideoHeight = mp.getVideoHeight();

    if (mVideoWidth != 0 && mVideoHeight != 0) {
    requestLayout();
    if(mVideoWidth >= mVideoHeight)
    initVideo();
    }
    }
    };

    private OnPreparedListener mPreparedListener = new OnPreparedListener() {
    @Override
    public void onPrepared(final MediaPlayer mp) {
    mCurrentState = STATE_PREPARED;

    if (mOnPreparedListener != null) {
    mOnPreparedListener.onPrepared(mp);
    }

    mVideoWidth = mp.getVideoWidth();
    mVideoHeight = mp.getVideoHeight();

    int seekToPosition = mSeekWhenPrepared; // mSeekWhenPrepared may be
    // changed after seekTo()
    if (seekToPosition != 0) {
    seekTo(seekToPosition);
    }

    if ((mVideoWidth != 0) && (mVideoHeight != 0)) {
    if(mVideoWidth >= mVideoHeight) initVideo();

    if (mTargetState == STATE_PLAYING) {
    start();
    }
    } else {
    // We don't know the video size yet, but should start anyway.
    // The video size might be reported to us later.
    if (mTargetState == STATE_PLAYING) {
    start();
    }
    }
    }
    };

    private OnCompletionListener mCompletionListener = new OnCompletionListener() {
    @Override
    public void onCompletion(final MediaPlayer mp) {
    mCurrentState = STATE_PLAYBACK_COMPLETED;
    mTargetState = STATE_PLAYBACK_COMPLETED;

    if (mOCompletionListener != null) {
    mOCompletionListener.onCompletion(mMediaPlayer);
    }
    }
    };

    private OnInfoListener mInfoListener = new OnInfoListener() {
    public boolean onInfo(MediaPlayer mp, int arg1, int arg2) {
    if (mOnInfoListener != null) {
    mOnInfoListener.onInfo(mp, arg1, arg2);
    }
    return true;
    }
    };

    private OnErrorListener mErrorListener = new OnErrorListener() {
    @Override
    public boolean onError(MediaPlayer mp, int framework_err, int impl_err) {
    Log.d(LOG_TAG, "Error: " + framework_err + "," + impl_err);
    mCurrentState = STATE_ERROR;
    mTargetState = STATE_ERROR;

    /* If an error handler has been supplied, use it and finish. */
    if (mOnErrorListener != null) {
    if (mOnErrorListener.onError(mMediaPlayer, framework_err,
    impl_err)) {
    return true;
    }
    }
    return true;
    }
    };

    private OnBufferingUpdateListener mBufferingUpdateListener = new OnBufferingUpdateListener() {
    @Override
    public void onBufferingUpdate(final MediaPlayer mp, final int percent) {
    mCurrentBufferPercentage = percent;
    }
    };

    public void setOnPreparedListener(OnPreparedListener listener) {
    mOnPreparedListener = listener;
    }

    public void setOnCompletionListener(OnCompletionListener listener) {
    mOCompletionListener = listener;
    }

    public void setOnErrorListener(OnErrorListener listener) {
    mOnErrorListener = listener;
    }

    public void setOnInfoListener(OnInfoListener listener) {
    mOnInfoListener = listener;
    }

    private void release(boolean cleartargetstate) {
    if (mMediaPlayer != null) {
    mMediaPlayer.reset();
    mMediaPlayer.release();
    mMediaPlayer = null;
    mCurrentState = STATE_IDLE;
    if (cleartargetstate) {
    mTargetState = STATE_IDLE;
    }
    }
    }

    @Override
    public void start() {
    if (isInPlaybackState()) {
    mMediaPlayer.start();
    mCurrentState = STATE_PLAYING;

    }
    mTargetState = STATE_PLAYING;
    }

    @Override
    public void pause() {
    if (isInPlaybackState()) {
    if (mMediaPlayer.isPlaying()) {
    mMediaPlayer.pause();
    mCurrentState = STATE_PAUSED;
    }
    }

    mTargetState = STATE_PAUSED;
    }

    @Override
    public int getDuration() {
    if (isInPlaybackState()) {
    return mMediaPlayer.getDuration();
    }

    return -1;
    }

    @Override
    public int getCurrentPosition() {
    if (isInPlaybackState()) {
    return mMediaPlayer.getCurrentPosition();
    }
    return 0;
    }

    @Override
    public void seekTo(int msec) {
    if (isInPlaybackState()) {
    mMediaPlayer.seekTo(msec);
    mSeekWhenPrepared = 0;
    } else {
    mSeekWhenPrepared = msec;
    }
    }

    @Override
    public boolean isPlaying() {
    return isInPlaybackState() && mMediaPlayer.isPlaying();
    }

    @Override
    public int getBufferPercentage() {
    if (mMediaPlayer != null) {
    return mCurrentBufferPercentage;
    }
    return 0;
    }

    private boolean isInPlaybackState() {
    return (mMediaPlayer != null && mCurrentState != STATE_ERROR
    && mCurrentState != STATE_IDLE && mCurrentState != STATE_PREPARING);
    }

    @Override
    public boolean canPause() {
    return false;
    }

    @Override
    public boolean canSeekBackward() {
    return false;
    }

    @Override
    public boolean canSeekForward() {
    return false;
    }

    @Override
    public int getAudioSessionId() {
    return -1;
    }

    SurfaceTextureListener mSurfaceTextureListener = new SurfaceTextureListener() {
    @Override
    public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
    mSurface = new Surface(surface);
    openVideo();
    }

    @Override
    public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
    boolean isValidState = (mTargetState == STATE_PLAYING);
    boolean hasValidSize = (mVideoWidth == width && mVideoHeight == height);
    if (mMediaPlayer != null && isValidState && hasValidSize) {
    start();
    }
    }

    @Override
    public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
    if (mMediaPlayer != null) {
    mMediaPlayer.reset();
    mMediaPlayer.release();
    mMediaPlayer = null;
    }

    if (mSurface != null) {
    mSurface.release();
    mSurface = null;
    }

    return true;
    }

    @Override
    public void onSurfaceTextureUpdated(final SurfaceTexture surface) {

    }
    };

    @Override
    protected void onVisibilityChanged(View changedView, int visibility) {
    super.onVisibilityChanged(changedView, visibility);

    if (visibility == View.INVISIBLE || visibility == View.GONE) {
    if (isPlaying()) {
    stopPlayback();
    }
    }
    }

    public float getScale() {
    return mScale;
    }

    private void initVideo() {
    try {
    int width = getWidth();
    int height = getHeight();
    mScaleX = 1.0f;
    mScaleY = 1.0f;
    mPositionX = 0;
    mPositionY = 0;
    mBoundX = 0;
    mBoundY = 0;
    mMatrix = new Matrix();

    mScaleX = (float) mVideoWidth / width;
    mScaleY = (float) mVideoHeight / height;

    mBoundX = width - mVideoWidth / mScaleY;
    mBoundY = height - mVideoHeight / mScaleX;

    if (mScaleX < mScaleY) {
    mScale = mScaleX;
    mScaleY = mScaleY * (1.0f / mScaleX);
    mScaleX = 1.0f;
    mBoundX = 0;
    } else {
    mScale = mScaleY;
    mScaleX = mScaleX * (1.0f / mScaleY);
    mScaleY = 1.0f;
    mBoundY = 0;
    }

    mMatrix = new Matrix();
    mMatrix.setScale(mScaleX, mScaleY);
    setTransform(mMatrix);
    } catch (NumberFormatException e) {
    e.printStackTrace();
    }
    }

    public void updateViewPosition(float x, float y) {

    float nextX = mPositionX + x;
    float nextY = mPositionY + y;

    if(mScaleX == 1.0f) {
    x = 0;
    } else {
    if(nextX > 0) {
    x = -mPositionX;
    mPositionX = mPositionX + x;
    } else if(nextX < mBoundX) {
    x = mBoundX - mPositionX;
    mPositionX = mPositionX + x;
    } else {
    mPositionX = nextX;
    }
    }

    if(mScaleY == 1.0f) {
    y = 0;
    } else {
    if(nextY > 0) {
    y = -mPositionY;
    mPositionY = mPositionY + y;
    } else if(nextY < mBoundY) {
    y = mBoundY - mPositionY;
    mPositionY = mPositionY + y;
    } else {
    mPositionY = nextY;
    }
    }

    if(mOnTranslatePositionListener != null) {
    mOnTranslatePositionListener.onTranslatePosition(mPositionX, mPositionY, mPositionX * -mScale, mPositionY * -mScale);
    }

    mMatrix.postTranslate(x, y);
    setTransform(mMatrix);
    invalidate();
    }

    // public void setOriginalRatio() {
    // if(mVideoWidth != 0 && mVideoHeight != 0) {
    // int gcd = gcd(mVideoWidth, mVideoHeight);
    // setRatio(mVideoWidth / gcd, mVideoHeight / gcd);
    // }
    // }

    public int gcd(int n, int m) {
    while (m != 0) {
    int t = n % m;
    n = m;
    m = t;
    }

    return Math.abs(n);
    }

    // public void setRatio(float ratioWidth, float ratioHeight) {
    // mRatioWidth = ratioWidth;
    // mRatioHeight = ratioHeight;
    //
    // int seek = getCurrentPosition();
    //
    // requestLayout();
    // invalidate();
    // openVideo();
    //
    // seekTo(seek);
    // }


    public float getRatioWidth() {
    return mRatioWidth;
    }

    public float getRatioHeight() {
    return mRatioHeight;
    }

    public float getRealPositionX() {
    return mPositionX * -mScale;
    }

    public float getRealPositionY() {
    return mPositionY * -mScale;
    }

    public int getVideoWidth() {
    return mVideoWidth;
    }

    public int getVideoHeight() {
    return mVideoHeight;
    }

    public int getRotate() {
    return mRotate;
    }

    public void setOnTranslatePositionListener(OnTranslatePositionListener pOnTranslatePositionListener) {
    mOnTranslatePositionListener = pOnTranslatePositionListener;
    }

    public void setContext(Context context) {
    this.context = context;
    }

    public interface OnTranslatePositionListener {
    public abstract void onTranslatePosition(float x, float y, float rx, float ry);
    }
    }

    FFMPEG 用于裁剪特定部分

    ffmpeg -i /sdcard/videokit/in.mp4 -filter:v crop=720:1088:0:0 -c:a copy /sdcard/videokit/out.mp4


    public class SimpleExample extends Activity {

    String workFolder = null;
    String demoVideoFolder = null;
    String demoVideoPath = null;
    String vkLogPath = null;
    private boolean commandValidationFailedFlag = false;


    @Override
    public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.ffmpeg_demo_client_1);

    demoVideoFolder = Environment.getExternalStorageDirectory().getAbsolutePath() + "/videokit/";
    demoVideoPath = demoVideoFolder + "in.mp4";

    Log.i(Prefs.TAG, getString(R.string.app_name) + " version: " + GeneralUtils.getVersionName(getApplicationContext()) );
    workFolder = getApplicationContext().getFilesDir().getAbsolutePath() + "/";
    //Log.i(Prefs.TAG, "workFolder: " + workFolder);
    vkLogPath = workFolder + "vk.log";

    GeneralUtils.copyLicenseFromAssetsToSDIfNeeded(this, workFolder);
    GeneralUtils.copyDemoVideoFromAssetsToSDIfNeeded(this, demoVideoFolder);

    Button invoke = (Button)findViewById(R.id.invokeButton);
    invoke.setOnClickListener(new OnClickListener() {
    public void onClick(View v){
    Log.i(Prefs.TAG, "run clicked.");
    if (GeneralUtils.checkIfFileExistAndNotEmpty(demoVideoPath)) {
    new TranscdingBackground(SimpleExample.this).execute();
    }
    else {
    Toast.makeText(getApplicationContext(), demoVideoPath + " not found", Toast.LENGTH_LONG).show();
    }
    }
    });

    int rc = GeneralUtils.isLicenseValid(getApplicationContext(), workFolder);
    Log.i(Prefs.TAG, "License check RC: " + rc);
    }

    public class TranscdingBackground extends AsyncTask<String, Integer, Integer>
    {

    ProgressDialog progressDialog;
    Activity _act;
    String commandStr;

    public TranscdingBackground (Activity act) {
    _act = act;
    }



    @Override
    protected void onPreExecute() {
    EditText commandText = (EditText)findViewById(R.id.CommandText);
    commandStr = commandText.getText().toString();

    progressDialog = new ProgressDialog(_act);
    progressDialog.setMessage("FFmpeg4Android Transcoding in progress...");
    progressDialog.show();

    }

    protected Integer doInBackground(String... paths) {
    Log.i(Prefs.TAG, "doInBackground started...");

    // delete previous log
    boolean isDeleted = GeneralUtils.deleteFileUtil(workFolder + "/vk.log");
    Log.i(Prefs.TAG, "vk deleted: " + isDeleted);

    PowerManager powerManager = (PowerManager)_act.getSystemService(Activity.POWER_SERVICE);
    WakeLock wakeLock = powerManager.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "VK_LOCK");
    Log.d(Prefs.TAG, "Acquire wake lock");
    wakeLock.acquire();

    ///////////// Set Command using code (overriding the UI EditText) /////
    //commandStr = "ffmpeg -y -i /sdcard/videokit/in.mp4 -strict experimental -s 320x240 -r 30 -aspect 3:4 -ab 48000 -ac 2 -ar 22050 -vcodec mpeg4 -b 2097152 /sdcard/videokit/out.mp4";
    //String[] complexCommand = {"ffmpeg", "-y" ,"-i", "/sdcard/videokit/in.mp4","-strict","experimental","-s", "160x120","-r","25", "-vcodec", "mpeg4", "-b", "150k", "-ab","48000", "-ac", "2", "-ar", "22050", "/sdcard/videokit/out.mp4"};
    ///////////////////////////////////////////////////////////////////////


    LoadJNI vk = new LoadJNI();
    try {


    vk.run(GeneralUtils.utilConvertToComplex(commandStr), workFolder, getApplicationContext());




    GeneralUtils.copyFileToFolder(vkLogPath, demoVideoFolder);


    } catch (Throwable e) {
    Log.e(Prefs.TAG, "vk run exeption.", e);
    }
    finally {
    if (wakeLock.isHeld())
    wakeLock.release();
    else{
    Log.i(Prefs.TAG, "Wake lock is already released, doing nothing");
    }
    }
    Log.i(Prefs.TAG, "doInBackground finished");
    return Integer.valueOf(0);
    }

    protected void onProgressUpdate(Integer... progress) {
    }

    @Override
    protected void onCancelled() {
    Log.i(Prefs.TAG, "onCancelled");
    //progressDialog.dismiss();
    super.onCancelled();
    }


    @Override
    protected void onPostExecute(Integer result) {
    Log.i(Prefs.TAG, "onPostExecute");
    progressDialog.dismiss();
    super.onPostExecute(result);

    // finished Toast
    String rc = null;
    if (commandValidationFailedFlag) {
    rc = "Command Vaidation Failed";
    }
    else {
    rc = GeneralUtils.getReturnCodeFromLog(vkLogPath);
    }
    final String status = rc;
    SimpleExample.this.runOnUiThread(new Runnable() {
    public void run() {
    Toast.makeText(SimpleExample.this, status, Toast.LENGTH_LONG).show();
    if (status.equals("Transcoding Status: Failed")) {
    Toast.makeText(SimpleExample.this, "Check: " + vkLogPath + " for more information.", Toast.LENGTH_LONG).show();
    }
    }
    });
    }

    }


    }

    最佳答案

    尝试使用 getBitmap : TextureView.getBitmap对于 VideoCropView它返回位图想要的分辨率。

    然后您可以使用 Bitmap.createBitmap 进行裁剪
    像这样
    resizedbitmap=Bitmap.createBitmap(bmp, 0,0,yourwidth, yourheight);

    关于android - 如何在android中裁剪横向视频的可见部分?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/39077385/

    27 4 0
    Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
    广告合作:1813099741@qq.com 6ren.com