- android - RelativeLayout 背景可绘制重叠内容
- android - 如何链接 cpufeatures lib 以获取 native android 库?
- java - OnItemClickListener 不起作用,但 OnLongItemClickListener 在自定义 ListView 中起作用
- java - Android 文件转字符串
根据我的研究.. 人们一直说它是基于不匹配/错误的格式.. 但我对输入和输出都使用 lPCM 格式.. 你怎么会出错呢?我得到的结果只是噪音..(像白噪音)
我决定只粘贴我的整个代码..也许这会有所帮助:
#import "AppDelegate.h"
#import "ViewController.h"
@implementation AppDelegate
@synthesize window = _window;
@synthesize viewController = _viewController;
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions
{
self.window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
// Override point for customization after application launch.
self.viewController = [[ViewController alloc] initWithNibName:@"ViewController" bundle:nil];
self.window.rootViewController = self.viewController;
[self.window makeKeyAndVisible];
// Insert code here to initialize your application
player = [[Player alloc] init];
[self setupReader];
[self setupQueue];
// initialize reader in a new thread
internalThread =[[NSThread alloc]
initWithTarget:self
selector:@selector(readPackets)
object:nil];
[internalThread start];
// start the queue. this function returns immedatly and begins
// invoking the callback, as needed, asynchronously.
//CheckError(AudioQueueStart(queue, NULL), "AudioQueueStart failed");
// and wait
printf("Playing...\n");
do
{
CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.25, false);
} while (!player.isDone /*|| gIsRunning*/);
// isDone represents the state of the Audio File enqueuing. This does not mean the
// Audio Queue is actually done playing yet. Since we have 3 half-second buffers in-flight
// run for continue to run for a short additional time so they can be processed
CFRunLoopRunInMode(kCFRunLoopDefaultMode, 2, false);
// end playback
player.isDone = true;
CheckError(AudioQueueStop(queue, TRUE), "AudioQueueStop failed");
cleanup:
AudioQueueDispose(queue, TRUE);
AudioFileClose(player.playbackFile);
return YES;
}
- (void) setupReader
{
NSURL *assetURL = [NSURL URLWithString:@"ipod-library://item/item.m4a?id=1053020204400037178"]; // from ilham's ipod
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];
// from AVAssetReader Class Reference:
// AVAssetReader is not intended for use with real-time sources,
// and its performance is not guaranteed for real-time operations.
NSError * error = nil;
AVAssetReader* reader = [[AVAssetReader alloc] initWithAsset:songAsset error:&error];
AVAssetTrack* track = [songAsset.tracks objectAtIndex:0];
readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:track
outputSettings:nil];
// AVAssetReaderOutput* readerOutput = [[AVAssetReaderAudioMixOutput alloc] initWithAudioTracks:songAsset.tracks audioSettings:nil];
[reader addOutput:readerOutput];
[reader startReading];
}
- (void) setupQueue
{
// get the audio data format from the file
// we know that it is PCM.. since it's converted
AudioStreamBasicDescription dataFormat;
dataFormat.mSampleRate = 44100.0;
dataFormat.mFormatID = kAudioFormatLinearPCM;
dataFormat.mFormatFlags = kAudioFormatFlagIsBigEndian | kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
dataFormat.mBytesPerPacket = 4;
dataFormat.mFramesPerPacket = 1;
dataFormat.mBytesPerFrame = 4;
dataFormat.mChannelsPerFrame = 2;
dataFormat.mBitsPerChannel = 16;
// create a output (playback) queue
CheckError(AudioQueueNewOutput(&dataFormat, // ASBD
MyAQOutputCallback, // Callback
(__bridge void *)self, // user data
NULL, // run loop
NULL, // run loop mode
0, // flags (always 0)
&queue), // output: reference to AudioQueue object
"AudioQueueNewOutput failed");
// adjust buffer size to represent about a half second (0.5) of audio based on this format
CalculateBytesForTime(dataFormat, 0.5, &bufferByteSize, &player->numPacketsToRead);
// check if we are dealing with a VBR file. ASBDs for VBR files always have
// mBytesPerPacket and mFramesPerPacket as 0 since they can fluctuate at any time.
// If we are dealing with a VBR file, we allocate memory to hold the packet descriptions
bool isFormatVBR = (dataFormat.mBytesPerPacket == 0 || dataFormat.mFramesPerPacket == 0);
if (isFormatVBR)
player.packetDescs = (AudioStreamPacketDescription*)malloc(sizeof(AudioStreamPacketDescription) * player.numPacketsToRead);
else
player.packetDescs = NULL; // we don't provide packet descriptions for constant bit rate formats (like linear PCM)
// get magic cookie from file and set on queue
MyCopyEncoderCookieToQueue(player.playbackFile, queue);
// allocate the buffers and prime the queue with some data before starting
player.isDone = false;
player.packetPosition = 0;
int i;
for (i = 0; i < kNumberPlaybackBuffers; ++i)
{
CheckError(AudioQueueAllocateBuffer(queue, bufferByteSize, &audioQueueBuffers[i]), "AudioQueueAllocateBuffer failed");
// EOF (the entire file's contents fit in the buffers)
if (player.isDone)
break;
}
}
-(void)readPackets
{
// initialize a mutex and condition so that we can block on buffers in use.
pthread_mutex_init(&queueBuffersMutex, NULL);
pthread_cond_init(&queueBufferReadyCondition, NULL);
state = AS_BUFFERING;
while ((sample = [readerOutput copyNextSampleBuffer])) {
AudioBufferList audioBufferList;
CMBlockBufferRef CMBuffer = CMSampleBufferGetDataBuffer( sample );
CheckError(CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
sample,
NULL,
&audioBufferList,
sizeof(audioBufferList),
NULL,
NULL,
kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
&CMBuffer
),
"could not read samples");
AudioBuffer audioBuffer = audioBufferList.mBuffers[0];
UInt32 inNumberBytes = audioBuffer.mDataByteSize;
size_t incomingDataOffset = 0;
while (inNumberBytes) {
size_t bufSpaceRemaining;
bufSpaceRemaining = bufferByteSize - bytesFilled;
@synchronized(self)
{
bufSpaceRemaining = bufferByteSize - bytesFilled;
size_t copySize;
if (bufSpaceRemaining < inNumberBytes)
{
copySize = bufSpaceRemaining;
}
else
{
copySize = inNumberBytes;
}
// copy data to the audio queue buffer
AudioQueueBufferRef fillBuf = audioQueueBuffers[fillBufferIndex];
memcpy((char*)fillBuf->mAudioData + bytesFilled, (const char*)(audioBuffer.mData + incomingDataOffset), copySize);
// keep track of bytes filled
bytesFilled +=copySize;
incomingDataOffset +=copySize;
inNumberBytes -=copySize;
}
// if the space remaining in the buffer is not enough for this packet, then enqueue the buffer.
if (bufSpaceRemaining < inNumberBytes + bytesFilled)
{
[self enqueueBuffer];
}
}
}
}
-(void)enqueueBuffer
{
@synchronized(self)
{
inuse[fillBufferIndex] = true; // set in use flag
buffersUsed++;
// enqueue buffer
AudioQueueBufferRef fillBuf = audioQueueBuffers[fillBufferIndex];
NSLog(@"we are now enqueing buffer %d",fillBufferIndex);
fillBuf->mAudioDataByteSize = bytesFilled;
err = AudioQueueEnqueueBuffer(queue, fillBuf, 0, NULL);
if (err)
{
NSLog(@"could not enqueue queue with buffer");
return;
}
if (state == AS_BUFFERING)
{
//
// Fill all the buffers before starting. This ensures that the
// AudioFileStream stays a small amount ahead of the AudioQueue to
// avoid an audio glitch playing streaming files on iPhone SDKs < 3.0
//
if (buffersUsed == kNumberPlaybackBuffers - 1)
{
err = AudioQueueStart(queue, NULL);
if (err)
{
NSLog(@"couldn't start queue");
return;
}
state = AS_PLAYING;
}
}
// go to next buffer
if (++fillBufferIndex >= kNumberPlaybackBuffers) fillBufferIndex = 0;
bytesFilled = 0; // reset bytes filled
}
// wait until next buffer is not in use
pthread_mutex_lock(&queueBuffersMutex);
while (inuse[fillBufferIndex])
{
pthread_cond_wait(&queueBufferReadyCondition, &queueBuffersMutex);
}
pthread_mutex_unlock(&queueBuffersMutex);
}
#pragma mark - utility functions -
// generic error handler - if err is nonzero, prints error message and exits program.
static void CheckError(OSStatus error, const char *operation)
{
if (error == noErr) return;
char str[20];
// see if it appears to be a 4-char-code
*(UInt32 *)(str + 1) = CFSwapInt32HostToBig(error);
if (isprint(str[1]) && isprint(str[2]) && isprint(str[3]) && isprint(str[4])) {
str[0] = str[5] = '\'';
str[6] = '\0';
} else
// no, format it as an integer
sprintf(str, "%d", (int)error);
fprintf(stderr, "Error: %s (%s)\n", operation, str);
exit(1);
}
// we only use time here as a guideline
// we're really trying to get somewhere between 16K and 64K buffers, but not allocate too much if we don't need it/*
void CalculateBytesForTime(AudioStreamBasicDescription inDesc, Float64 inSeconds, UInt32 *outBufferSize, UInt32 *outNumPackets)
{
// we need to calculate how many packets we read at a time, and how big a buffer we need.
// we base this on the size of the packets in the file and an approximate duration for each buffer.
//
// first check to see what the max size of a packet is, if it is bigger than our default
// allocation size, that needs to become larger
// we don't have access to file packet size, so we just default it to maxBufferSize
UInt32 maxPacketSize = 0x10000;
static const int maxBufferSize = 0x10000; // limit size to 64K
static const int minBufferSize = 0x4000; // limit size to 16K
if (inDesc.mFramesPerPacket) {
Float64 numPacketsForTime = inDesc.mSampleRate / inDesc.mFramesPerPacket * inSeconds;
*outBufferSize = numPacketsForTime * maxPacketSize;
} else {
// if frames per packet is zero, then the codec has no predictable packet == time
// so we can't tailor this (we don't know how many Packets represent a time period
// we'll just return a default buffer size
*outBufferSize = maxBufferSize > maxPacketSize ? maxBufferSize : maxPacketSize;
}
// we're going to limit our size to our default
if (*outBufferSize > maxBufferSize && *outBufferSize > maxPacketSize)
*outBufferSize = maxBufferSize;
else {
// also make sure we're not too small - we don't want to go the disk for too small chunks
if (*outBufferSize < minBufferSize)
*outBufferSize = minBufferSize;
}
*outNumPackets = *outBufferSize / maxPacketSize;
}
// many encoded formats require a 'magic cookie'. if the file has a cookie we get it
// and configure the queue with it
static void MyCopyEncoderCookieToQueue(AudioFileID theFile, AudioQueueRef queue ) {
UInt32 propertySize;
OSStatus result = AudioFileGetPropertyInfo (theFile, kAudioFilePropertyMagicCookieData, &propertySize, NULL);
if (result == noErr && propertySize > 0)
{
Byte* magicCookie = (UInt8*)malloc(sizeof(UInt8) * propertySize);
CheckError(AudioFileGetProperty (theFile, kAudioFilePropertyMagicCookieData, &propertySize, magicCookie), "get cookie from file failed");
CheckError(AudioQueueSetProperty(queue, kAudioQueueProperty_MagicCookie, magicCookie, propertySize), "set cookie on queue failed");
free(magicCookie);
}
}
#pragma mark - audio queue -
static void MyAQOutputCallback(void *inUserData, AudioQueueRef inAQ, AudioQueueBufferRef inCompleteAQBuffer)
{
AppDelegate *appDelegate = (__bridge AppDelegate *) inUserData;
[appDelegate myCallback:inUserData
inAudioQueue:inAQ
audioQueueBufferRef:inCompleteAQBuffer];
}
- (void)myCallback:(void *)userData
inAudioQueue:(AudioQueueRef)inAQ
audioQueueBufferRef:(AudioQueueBufferRef)inCompleteAQBuffer
{
unsigned int bufIndex = -1;
for (unsigned int i = 0; i < kNumberPlaybackBuffers; ++i)
{
if (inCompleteAQBuffer == audioQueueBuffers[i])
{
bufIndex = i;
break;
}
}
if (bufIndex == -1)
{
NSLog(@"something went wrong at queue callback");
return;
}
// signal waiting thread that the buffer is free.
pthread_mutex_lock(&queueBuffersMutex);
NSLog(@"signalling that buffer %d is free",bufIndex);
inuse[bufIndex] = false;
buffersUsed--;
pthread_cond_signal(&queueBufferReadyCondition);
pthread_mutex_unlock(&queueBuffersMutex);
}
@end
更新: btomw的 answer下面很好地解决了这个问题。但我想弄个水落石出(大多数像我这样的新手开发者,甚至 btomw,当他刚开始时通常在黑暗中拍摄参数、格式等 - 参见 here 的例子 -)..
我提供nul作为参数的原因 AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:audioReadSettings];
是因为根据documentation经过反复试验,我意识到我输入的除 lPCM 以外的任何格式都会被彻底拒绝。换句话说,当您使用 AVAseetReader 或转换时,即使结果总是 lPCM.. 所以我认为默认格式无论如何都是 lPCM,所以我将其保留为 null.. 但我想我错了。
其中奇怪的部分(如果我错了,请纠正我)正如我提到的那样......假设原始文件是 .mp3,我的目的是播放它(或通过网络发送数据包)网络等)作为 mp3.. 所以我提供了一个 mp3 ABSD.. Assets 阅读器会崩溃!那么,如果我想以原始形式发送它,我是否只提供 null?这样做的明显问题是,一旦我在另一侧收到它,我就无法弄清楚它有什么 ABSD。或者我可以吗?
更新 2:您可以从 github 下载代码.
最佳答案
所以这就是我认为正在发生的事情以及我认为您可以如何解决它。
您正在从 iOS 设备上的 ipod(音乐)库中提取预定义项目。然后,您将使用 Assets 读取器来收集它的缓冲区,并在可能的情况下将这些缓冲区排队到 AudioQueue 中。
我认为,您遇到的问题是您将音频队列缓冲区的输入格式设置为线性脉冲编码调制(LPCM - 希望我没听错,我可能没用上首字母缩写词)。您传递给 Assets 读取器输出的输出设置为零,这意味着您将获得的输出很可能不是 LPCM,而是 aiff 或 aac 或 mp3 或歌曲中存在的任何格式iOS 的媒体库。但是,您可以通过传入不同的输出设置来补救这种情况。
尝试改变
readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:track outputSettings:nil];
到:
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
[NSNumber numberWithFloat:44100.0], AVSampleRateKey,
[NSNumber numberWithInt:2], AVNumberOfChannelsKey,
[NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)],
AVChannelLayoutKey,
[NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
[NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
nil];
output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:track audioSettings:outputSettings];
据我了解(根据 Apple 的文档 1 )传递 nil
作为输出设置参数可为您提供与原始音轨相同文件类型的样本。即使您有一个 LPCM 文件,某些其他设置也可能关闭,这可能会导致您出现问题。至少,这将使所有阅读器的输出正常化,这应该会更容易排除故障。
希望对您有所帮助!
编辑:
the reason why I provided nul as a parameter for AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:audioReadSettings];
was because according to the documentation and trial and error, I...
AVAssetReader 做两件事;读回磁盘上存在的音频文件(即:mp3、aac、aiff),或将音频转换为 lpcm。
如果您将 nil
作为输出设置传递,它将读取存在的文件,在这一点上您是正确的。我很抱歉没有提到 Assets 阅读器只允许 nil 或 LPCM。实际上,我自己也遇到了这个问题(它在某个地方的文档中,但需要深入研究),但没有选择在这里提及它,因为当时我并没有想到它。太……抱歉了?
如果你想在阅读之前知道你正在阅读的轨道的 AudioStreamBasicDescription (ASBD),你可以通过这样做来获得它:
AVURLAsset* uasset = [[AVURLAsset URLAssetWithURL:<#assetURL#> options:nil]retain];
AVAssetTrack*track = [uasset.tracks objectAtIndex:0];
CMFormatDescriptionRef formDesc = (CMFormatDescriptionRef)[[track formatDescriptions] objectAtIndex:0];
const AudioStreamBasicDescription* asbdPointer = CMAudioFormatDescriptionGetStreamBasicDescription(formDesc);
//because this is a pointer and not a struct we need to move the data into a struct so we can use it
AudioStreamBasicDescription asbd = {0};
memcpy(&asbd, asbdPointer, sizeof(asbd));
//asbd now contains a basic description for the track
然后您可以将 asbd
转换为您认为合适的任何格式的二进制数据并通过网络传输。然后,您应该能够开始通过网络发送音频缓冲区数据,并使用 AudioQueue 成功播放它。
事实上,不久前我有一个这样的系统在工作,但由于当 iOS 客户端设备进入后台时我无法保持连接,所以我无法将其用于我的目的。尽管如此,如果所有这些工作让我能够帮助真正可以使用这些信息的其他人,那对我来说似乎是一种胜利。
关于ios - 为什么在将 AVAssetReader 与音频队列一起使用时音频出现乱码,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/12264799/
我的应用程序从一个有 5 个选项卡的选项卡栏 Controller 开始。一开始,第一个出现了它的名字,但其他四个没有名字,直到我点击它们。然后根据用户使用的语言显示名称。如何在选项卡栏出现之前设置选
我有嵌套数组 json 对象(第 1 层、第 2 层和第 3 层)。我的问题是数据表没有出现。任何相关的 CDN 均已导入。该表仅显示部分。我引用了很多网站,但都没有解决我的问题。 之前我使用标准表来
我正在尝试设置要显示的 Parse PFLoginViewController。这是我的一个 View Controller 的类。 import UIKit import Parse import
我遇到了这个问题,我绘制的对象没有出现在 GUI 中。我知道它正在被处理,因为数据被推送到日志文件。但是,图形没有出现。 这是我的一些代码: public static void main(Strin
我有一个树状图,其中包含出现这样的词...... TreeMap occurrence = new TreeMap (); 字符串 = 单词 整数 = 出现次数。 我如何获得最大出现次数 - 整数,
因此,我提示用户输入变量。如果变量小于 0 且大于 10。如果用户输入 10,我想要求用户再次输入数字。我问时间的时候输入4,它说你输入错误。但在第二次尝试时效果很好。例如:如果我输入 25,它会打印
我已经用 css overflow 属性做了一个例子。在这个例子中我遇到了一个溢出滚动的问题。滚动条出现了,但没有工作意味着每当将光标移动到滚动条时,在这个滚动条不活动的时间。我对此一无所知,所以请帮
我现在正在做一个元素。当您单击一个元素时,会出现以下信息,我想知道如何在您单击下一个元素而不重新单击同一元素时使其消失....例如,我的元素中有披萨,我想单击肉披萨看到浇头然后点击奶酪披萨看到浇头和肉
我有一个路由器模块,它将主题与正则表达式进行比较,并将出现的事件与一致的键掩码链接起来。 (它是一个简单的 url 路由过滤,如 symfony http://symfony.com/doc/curr
这个问题在这里已经有了答案: 9年前关闭。 Possible Duplicate: mysql_fetch_array() expects parameter 1 to be resource, bo
我在底部有一个带有工具栏的 View ,我正在使用 NavigationLink 导航到该 View 。但是当 View 出现时,工具栏显示得有点太低了。大约半秒钟后,它突然跳到位。它只会在应用程序启
我试图在我的应用程序上为背景音乐添加一个 AVAudioPlayer,我正在主屏幕上启动播放器,尝试在应用程序打开时开始播放但出现意外行为... 它播放并立即不断创建新玩家并播放这些玩家,因此同时播放
这是获取一个数字,获取其阶乘并将其加倍,但是由于基本情况,如果您输入 0,它会给出 2 作为答案,因此为了绕过它,我使用了 if 语句,但收到错误输入“if”时解析错误。如果你们能提供帮助,我真的很感
暂停期间抛出异常 android.os.DeadObjectException 在 android.os.BinderProxy.transactNative( native 方法) 在 androi
我已经为猜词游戏编写了一些代码。它从用户输入中读取字符并在单词中搜索该字符;根据字符是否在单词中,程序返回并控制一些变量。 代码如下: import java.util.Random; import
我是自动化领域的新手。这是我的简单 TestNG 登录代码,当我以 TestNG 身份运行该代码时,它会出现 java.lang.NullPointerException,双击它会突出显示我导航到 U
我是c#程序员,我习惯了c#的封装语法和其他东西。但是现在,由于某些原因,我应该用java写一些东西,我现在正在练习java一天!我要创建一个为我自己创建一个虚拟项目,以便让自己更熟悉 Java 的
我正在使用 Intellij,我的源类是 main.com.coding,我的资源文件是 main.com.testing。我将 spring.xml 文件放入资源文件中。 我的测试类位于 test.
我想要我的tests folder separate到我的应用程序代码。我的项目结构是这样的 myproject/ myproject/ myproject.py moduleon
这个问题已经有答案了: What is a NullPointerException, and how do I fix it? (12 个回答) 已关闭 6 年前。 因此,我尝试比较 2 个值,一个
我是一名优秀的程序员,十分优秀!