ffmpeg - 我想知道为什么 logcat 说 "NO SUCH A FILE OR DIRECTORY(2)"

标签 ffmpeg

我想用 ffmpeg 在 Android 上播放音频。
但是当我运行这个项目时,发生了错误

我应该怎么办?

java端

import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.os.SystemClock;

public class FFmpegBasic extends Activity
{
    private AudioTrack track;
    private FileOutputStream os;
    /** Called when the activity is first created. */
    @Override
    public void onCreate(Bundle savedInstanceState)
    {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.main);
        createEngine();

        int bufSize = AudioTrack.getMinBufferSize(44100,
                                                  AudioFormat.CHANNEL_CONFIGURATION_STEREO, 
                                                  AudioFormat.ENCODING_PCM_16BIT);


        track = new AudioTrack(AudioManager.STREAM_MUSIC, 
                               44100, 
                               AudioFormat.CHANNEL_CONFIGURATION_STEREO, 
                               AudioFormat.ENCODING_PCM_16BIT, 
                               bufSize,
                               AudioTrack.MODE_STREAM);

        byte[] bytes = new byte[bufSize];

        try {
            os = new FileOutputStream("/sdcard/a.out",false);
        } catch (FileNotFoundException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        String result = "/mnt/sdcard/Wildlife.mp3";
        loadFile(result,bytes);

        try {
            os.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

    void playSound(byte[] buf, int size) {  
        if(track.getPlayState()!=AudioTrack.PLAYSTATE_PLAYING)
            track.play();
        track.write(buf, 0, size);

        try {
            os.write(buf,0,size);
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }


    private native void createEngine();
    private native void loadFile(String file, byte[] array);

    /** Load jni .so on initialization*/ 
    static {
         System.loadLibrary("avutil"); 
         System.loadLibrary("avcore"); 
         System.loadLibrary("avcodec");
         System.loadLibrary("avformat");
         System.loadLibrary("avdevice");
         System.loadLibrary("swscale");
         System.loadLibrary("avfilter");
         System.loadLibrary("ffmpeg");
         System.loadLibrary("basicplayer");
    }
}

c侧
#include <assert.h>
#include <jni.h>
#include <string.h>
#include <android/log.h>

#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "avcodec.h"
#include "avformat.h"




void Java_net_jbong_FFmpegBasic_FFmpegBasic_createEngine(JNIEnv* env, jclass clazz)
    {
        //avcodec_init();

        av_register_all();


    }

void Java_net_jbong_FFmpegBasic_FFmpegBasic_loadFile(JNIEnv* env, jobject obj, jstring file, jbyteArray array)
{

    AVFormatContext *gFormatCtx = NULL;
    AVCodecContext *gAudioCodecCtx = NULL;
    AVCodec *gAudioCodec = NULL;
    int gAudioStreamIdx = -1;
    char *gAudioBuffer = NULL;
    int i, outsize = 0;
    AVPacket packet;
    const char *str; 
    str = (*env)->GetStringUTFChars(env, file, NULL); 
    jclass cls = (*env)->GetObjectClass(env, obj); 
    jmethodID play = (*env)->GetMethodID(env, cls, "playSound", "([BI)V"); 

    if (gFormatCtx != NULL)
        return -1;
    if (av_open_input_file(&gFormatCtx,str,NULL,0,NULL)!=0)
        return -2;
    if (av_find_stream_info(gFormatCtx) < 0)
        return -3;

    for(i=0; i<gFormatCtx->nb_streams; i++)
    {
        if(gFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
        {
            gAudioStreamIdx = i;
            break;
        }
    }

    if (gAudioStreamIdx == -1)
        return -4;
    gAudioCodecCtx = gFormatCtx->streams[gAudioStreamIdx]->codec;
    gAudioCodec = avcodec_find_decoder(gAudioCodecCtx->codec_id);

    if (gAudioCodec == NULL)
        return -5;

    if (avcodec_open(gAudioCodecCtx, gAudioCodec)<0)
        return -6;


    gAudioBuffer = (char *)av_malloc(AVCODEC_MAX_AUDIO_FRAME_SIZE *2); 
    int decode = 0;

    while (av_read_frame(gFormatCtx, &packet) >= 0)
    {
     if (gFormatCtx-> streams[packet.stream_index]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
     {
         int data_size = AVCODEC_MAX_AUDIO_FRAME_SIZE * 8; 
         gAudioBuffer = (char *)av_malloc(data_size);
         int size=packet.size;
         while(size > 0)
         {
          int len = avcodec_decode_audio3(gAudioCodecCtx,
            (short *) gAudioBuffer, &data_size, &packet);
          if (data_size > 0)
          {
                    jbyte *bytes = (*env)->GetByteArrayElements(env, array, NULL);
                       memcpy(bytes + decode, (int16_t *)gAudioBuffer, size);
                       (*env)->ReleaseByteArrayElements(env, array, bytes, 0);
                       (*env)->CallVoidMethod(env, obj, play, array, data_size); 
                       decode += size;
                       size -= len;

          }
         }
     }
     av_free_packet(&packet);
    }

    av_close_input_file(gFormatCtx);
    return 0;
}

为什么我的 android logcat 向我显示此消息?
“打开跟踪文件时出错:没有这样的文件或目录 (2)”

最佳答案

你播放的网址是什么?如果支持,您可以检查您的 ffmpeg 协议(protocol)吗? ffmpeg -protocol

关于ffmpeg - 我想知道为什么 logcat 说 "NO SUCH A FILE OR DIRECTORY(2)",我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/11642907/

相关文章:

c++ - FFMpeg C++ 如何创建具有多个输出的过滤器?

ffmpeg - ffmpeg 是否支持将原始视频序列编码为原始 Theora Packets/Elementary bits tream

audio - 使用 ffmpeg 从视频中提取每个音频和字幕

ffmpeg 说 : "option framerate not found"

ffmpeg 透明波形和纯色背景

c# - 有没有关于使用 .net Tao.FFmpeg 编码视频\音频的代码示例\教程?

django - 为内存上传的视频文件生成缩略图

video - 重新创建/匹配 ffmpeg 设置

ffmpeg - DirectShow 捕获源和 FFMPEG

php - FFMPEG - 不显示 -vstats