- objective-c - iOS 5 : Can you override UIAppearance customisations in specific classes?
- iphone - 如何将 CGFontRef 转换为 UIFont?
- ios - 以编程方式关闭标记的信息窗口 google maps iOS
- ios - Xcode 5 - 尝试验证存档时出现 "No application records were found"
我正在寻找有关如何在 IOS 上播放 midi 文件的信息。我不需要任何 MIDI 输入或输出消息。我只是想读取 midi 文件并将轨道播放给用户,用每个音符代替钢琴声音样本。能够调整节奏将是另一个要求。
请注意,我对将 midi 文件转换为 wav 或其他格式不感兴趣。我想直接读取 midi 文件。
谁能给我指出一些信息的方向,以帮助我理解所需的过程。
干杯
最佳答案
我也需要这个功能。下面是骨架解析器的代码,它解析 NSData 对象中提供的 MIDI 文件数据(例如来自 NSData:dataWithContentsOfFile),并将它找到的内容写入可变字符串日志。真实的应用程序会以更有用的方式处理各种事件,但对于需要解析标准 MIDI 文件的任何人来说,这应该是一个很好的起点,因为它解决了大部分痛点。
// MidiParser.h
#import <Foundation/Foundation.h>
typedef enum tagMidiTimeFormat
{
MidiTimeFormatTicksPerBeat,
MidiTimeFormatFramesPerSecond
} MidiTimeFormat;
@interface MidiParser : NSObject
{
NSMutableString *log;
NSData *data;
NSUInteger offset;
UInt16 format;
UInt16 trackCount;
MidiTimeFormat timeFormat;
UInt16 ticksPerBeat;
UInt16 framesPerSecond;
UInt16 ticksPerFrame;
}
@property (nonatomic, retain) NSMutableString *log;
@property (readonly) UInt16 format;
@property (readonly) UInt16 trackCount;
@property (readonly) MidiTimeFormat timeFormat;
- (BOOL) parseData: (NSData *) midiData;
@end
// MidiParser.m
#import "MidiParser.h"
#define kFileCorrupt @"File is corrupt"
#define kInvalidHeader @"Invalid MIDI header"
#define kInvalidTrackHeader @"Invalid Track header"
#define MAIN_HEADER_SIZE 6
#define META_SEQUENCE_NUMBER 0x0
#define META_TEXT_EVENT 0x1
#define META_COPYRIGHT_NOTICE 0x2
#define META_TRACK_NAME 0x3
#define META_INSTRUMENT_NAME 0x4
#define META_LYRICS 0x5
#define META_MARKER 0x6
#define META_CUE_POINT 0x7
#define META_CHANNEL_PREFIX 0x20
#define META_END_OF_TRACK 0x2f
#define META_SET_TEMPO 0x51
#define META_SMPTE_OFFSET 0x54
#define META_TIME_SIGNATURE 0x58
#define META_KEY_SIGNATURE 0x59
#define META_SEQ_SPECIFIC 0x7f
#define CHANNEL_NOTE_OFF 0x8
#define CHANNEL_NOTE_ON 0x9
#define CHANNEL_NOTE_AFTERTOUCH 0xA
#define CHANNEL_CONTROLLER 0xB
#define CHANNEL_PROGRAM_CHANGE 0xC
#define CHANNEL_AFTERTOUCH 0xD
#define CHANNEL_PITCH_BEND 0xE
#define MICRO_PER_MINUTE 60000000
@implementation MidiParser
@synthesize log;
@synthesize format;
@synthesize trackCount;
@synthesize timeFormat;
- (void) dealloc
{
[log release];
log = nil;
[super dealloc];
}
- (UInt32) readDWord
{
UInt32 value = 0;
[data getBytes:&value range:NSMakeRange(offset, sizeof(value))];
value = CFSwapInt32BigToHost(value);
offset += sizeof(value);
return value;
}
- (UInt16) readWord
{
UInt16 value = 0;
[data getBytes:&value range:NSMakeRange(offset, sizeof(value))];
value = CFSwapInt16BigToHost(value);
offset += sizeof(value);
return value;
}
- (UInt8) readByte
{
UInt8 value = 0;
[data getBytes:&value range:NSMakeRange(offset, sizeof(value))];
offset += sizeof(value);
return value;
}
- (UInt8) readByteAtRelativeOffset: (UInt32) o
{
UInt8 value = 0;
[data getBytes:&value range:NSMakeRange(offset + o, sizeof(value))];
return value;
}
- (UInt32) readVariableValue
{
UInt32 value = 0;
UInt8 byte;
UInt8 shift = 0;
do
{
value <<= shift;
[data getBytes:&byte range:NSMakeRange(offset, 1)];
offset++;
value |= (byte & 0x7f);
shift = 7;
} while ((byte & 0x80) != 0);
return value;
}
- (NSString *) readString: (int) length
{
char *buffer = malloc(length + 1);
memcpy(buffer, ([data bytes] + offset), length);
buffer[length] = 0x0;
NSString *string = [NSString stringWithCString:buffer encoding:NSASCIIStringEncoding];
free(buffer);
return string;
}
- (void) readMetaSequence
{
UInt32 sequenceNumber = 0;
sequenceNumber |= [self readByteAtRelativeOffset:0];
sequenceNumber <<= 8;
sequenceNumber |= [self readByteAtRelativeOffset:1];
[self.log appendFormat:@"Meta Sequence Number: %d\n", sequenceNumber];
}
- (void) readMetaTextEvent: (UInt32) length
{
NSString *text = [self readString:length];
[self.log appendFormat:@"Meta Text: %@\n", text];
}
- (void) readMetaCopyrightNotice: (UInt32) length
{
NSString *text = [self readString:length];
[self.log appendFormat:@"Meta Copyright: %@\n", text];
}
- (void) readMetaTrackName: (UInt32) length
{
NSString *text = [self readString:length];
[self.log appendFormat:@"Meta Track Name: %@\n", text];
}
- (void) readMetaInstrumentName: (UInt32) length
{
NSString *text = [self readString:length];
[self.log appendFormat:@"Meta Instrument Name: %@\n", text];
}
- (void) readMetaLyrics: (UInt32) length
{
NSString *text = [self readString:length];
[self.log appendFormat:@"Meta Text: %@\n", text];
}
- (void) readMetaMarker: (UInt32) length
{
NSString *text = [self readString:length];
[self.log appendFormat:@"Meta Marker: %@\n", text];
}
- (void) readMetaCuePoint: (UInt32) length
{
NSString *text = [self readString:length];
[self.log appendFormat:@"Meta Cue Point: %@\n", text];
}
- (void) readMetaChannelPrefix
{
UInt8 channel = [self readByteAtRelativeOffset:0];
[self.log appendFormat:@"Meta Channel Prefix: %d\n", channel];
}
- (void) readMetaEndOfTrack
{
[self.log appendFormat:@"Meta End of Track\n"];
}
- (void) readMetaSetTempo
{
UInt32 microPerQuarter = 0;
microPerQuarter |= [self readByteAtRelativeOffset:0];
microPerQuarter <<= 8;
microPerQuarter |= [self readByteAtRelativeOffset:1];
microPerQuarter <<= 8;
microPerQuarter |= [self readByteAtRelativeOffset:2];
UInt32 bpm = MICRO_PER_MINUTE / microPerQuarter;
[self.log appendFormat:@"Meta Set Tempo: Micro Per Quarter: %d, Beats Per Minute: %d\n", microPerQuarter, bpm];
}
- (void) readMetaSMPTEOffset
{
UInt8 byte = [self readByteAtRelativeOffset:0];
UInt8 hour = byte & 0x1f;
UInt8 rate = (byte & 0x60) >> 5;
UInt8 fps = 0;
switch(rate)
{
case 0: fps = 24; break;
case 1: fps = 25; break;
case 2: fps = 29; break;
case 3: fps = 30; break;
default: fps = 0; break;
}
UInt8 minutes = [self readByteAtRelativeOffset:1];
UInt8 seconds = [self readByteAtRelativeOffset:2];
UInt8 frame = [self readByteAtRelativeOffset:3];
UInt8 subframe = [self readByteAtRelativeOffset:4];
[self.log appendFormat:@"Meta SMPTE Offset (%d): %2d:%2d:%2d:%2d:%2d\n", fps, hour, minutes, seconds, frame, subframe];
}
- (void) readMetaTimeSignature
{
UInt8 numerator = [self readByteAtRelativeOffset:0];
UInt8 denominator = [self readByteAtRelativeOffset:1];
UInt8 metro = [self readByteAtRelativeOffset:2];
UInt8 thirty_seconds = [self readByteAtRelativeOffset:3];
[self.log appendFormat:@"Meta Time Signature: %d/%.0f, Metronome: %d, 32nds: %d\n", numerator, powf(2, denominator), metro, thirty_seconds];
}
- (void) readMetaKeySignature
{
UInt8 value = [self readByteAtRelativeOffset:0];
UInt8 accidentals = value & 0x7f;
BOOL sharps = YES;
NSString *accidentalsType = nil;
if((value & 0x80) != 0)
{
accidentalsType = [NSString stringWithString:@"Flats"];
sharps = NO;
}
else
{
accidentalsType = [NSString stringWithString:@"Sharps"];
}
UInt8 scale = [self readByteAtRelativeOffset:1];
NSString *scaleType = nil;
if(scale == 0)
{
scaleType = [NSString stringWithString:@"Major"];
}
else
{
scaleType = [NSString stringWithString:@"Minor"];
}
[self.log appendFormat:@"Meta Key Signature: %d %@ Type: %@\n", accidentals, accidentalsType, scaleType];
}
- (void) readMetaSeqSpecific: (UInt32) length
{
[self.log appendFormat:@"Meta Event Sequencer Specific: - Length: %d\n", length];
}
- (void) readNoteOff: (UInt8) channel parameter1: (UInt8) p1 parameter2: (UInt8) p2
{
[self.log appendFormat:@"Note Off (Channel %d): %d, Velocity: %d\n", channel, p1, p2];
}
- (void) readNoteOn: (UInt8) channel parameter1: (UInt8) p1 parameter2: (UInt8) p2
{
[self.log appendFormat:@"Note On (Channel %d): %d, Velocity: %d\n", channel, p1, p2];
}
- (void) readNoteAftertouch: (UInt8) channel parameter1: (UInt8) p1 parameter2: (UInt8) p2
{
[self.log appendFormat:@"Note Aftertouch (Channel %d): %d, Amount: %d\n", channel, p1, p2];
}
- (void) readControllerEvent: (UInt8) channel parameter1: (UInt8) p1 parameter2: (UInt8) p2
{
[self.log appendFormat:@"Controller (Channel %d): %d, Value: %d\n", channel, p1, p2];
}
- (void) readProgramChange: (UInt8) channel parameter1: (UInt8) p1
{
[self.log appendFormat:@"Program Change (Channel %d): %d\n", channel, p1];
}
- (void) readChannelAftertouch: (UInt8) channel parameter1: (UInt8) p1
{
[self.log appendFormat:@"Channel Aftertouch (Channel %d): %d\n", channel, p1];
}
- (void) readPitchBend: (UInt8) channel parameter1: (UInt8) p1 parameter2: (UInt8) p2
{
UInt32 value = p1;
value <<= 8;
value |= p2;
[self.log appendFormat:@"Pitch Bend (Channel %d): %d\n", channel, value];
}
- (BOOL) parseData:(NSData *)midiData
{
BOOL success = YES;
self.log = [[[NSMutableString alloc] init] autorelease];
@try
{
// Parse data
data = midiData;
offset = 0;
// If size is less than header size, then abort
NSUInteger dataLength = [data length];
if((offset + MAIN_HEADER_SIZE) > dataLength)
{
NSException *ex = [NSException exceptionWithName:kFileCorrupt
reason:kFileCorrupt userInfo:nil];
@throw ex;
}
// Parse header
if(memcmp([data bytes], "MThd", 4) != 0)
{
NSException *ex = [NSException exceptionWithName:kFileCorrupt
reason:kInvalidHeader userInfo:nil];
@throw ex;
}
offset += 4;
UInt32 chunkSize = [self readDWord];
[self.log appendFormat:@"Header Chunk Size: %d\n", chunkSize];
// Read format
format = [self readWord];
[self.log appendFormat:@"Format: %d\n", format];
// Read track count
trackCount = [self readWord];
[self.log appendFormat:@"Tracks: %d\n", trackCount];
// Read time format
UInt16 timeDivision = [self readWord];
if((timeDivision & 0x8000) == 0)
{
timeFormat = MidiTimeFormatTicksPerBeat;
ticksPerBeat = timeDivision & 0x7fff;
[self.log appendFormat:@"Time Format: %d Ticks Per Beat\n", ticksPerBeat];
}
else
{
timeFormat = MidiTimeFormatFramesPerSecond;
framesPerSecond = (timeDivision & 0x7f00) >> 8;
ticksPerFrame = (timeDivision & 0xff);
[self.log appendFormat:@"Time Division: %d Frames Per Second, %d Ticks Per Frame\n", framesPerSecond, ticksPerFrame];
}
// Try to parse tracks
UInt32 expectedTrackOffset = offset;
for(UInt16 track = 0; track < trackCount; track++)
{
if(offset != expectedTrackOffset)
{
[self.log appendFormat:@"Track Offset Incorrect for Track %d - Offset: %d, Expected: %d", track, offset, expectedTrackOffset];
offset = expectedTrackOffset;
}
// Parse track header
if(memcmp([data bytes] + offset, "MTrk", 4) != 0)
{
NSException *ex = [NSException exceptionWithName:kFileCorrupt
reason:kInvalidTrackHeader userInfo:nil];
@throw ex;
}
offset += 4;
UInt32 trackSize = [self readDWord];
expectedTrackOffset = offset + trackSize;
[self.log appendFormat:@"Track %d : %d bytes\n", track, trackSize];
UInt32 trackEnd = offset + trackSize;
UInt32 deltaTime;
UInt8 nextByte = 0;
UInt8 peekByte = 0;
while(offset < trackEnd)
{
deltaTime = [self readVariableValue];
[self.log appendFormat:@" (%05d): ", deltaTime];
// Peak at next byte
peekByte = [self readByteAtRelativeOffset:0];
// If high bit not set, then assume running status
if((peekByte & 0x80) != 0)
{
nextByte = [self readByte];
}
// Meta event
if(nextByte == 0xFF)
{
UInt8 metaEventType = [self readByte];
UInt32 metaEventLength = [self readVariableValue];
switch (metaEventType)
{
case META_SEQUENCE_NUMBER:
[self readMetaSequence];
break;
case META_TEXT_EVENT:
[self readMetaTextEvent: metaEventLength];
break;
case META_COPYRIGHT_NOTICE:
[self readMetaCopyrightNotice: metaEventLength];
break;
case META_TRACK_NAME:
[self readMetaTrackName: metaEventLength];
break;
case META_INSTRUMENT_NAME:
[self readMetaInstrumentName: metaEventLength];
break;
case META_LYRICS:
[self readMetaLyrics: metaEventLength];
break;
case META_MARKER:
[self readMetaMarker: metaEventLength];
break;
case META_CUE_POINT:
[self readMetaCuePoint: metaEventLength];
break;
case META_CHANNEL_PREFIX:
[self readMetaChannelPrefix];
break;
case META_END_OF_TRACK:
[self readMetaEndOfTrack];
break;
case META_SET_TEMPO:
[self readMetaSetTempo];
break;
case META_SMPTE_OFFSET:
[self readMetaSMPTEOffset];
break;
case META_TIME_SIGNATURE:
[self readMetaTimeSignature];
break;
case META_KEY_SIGNATURE:
[self readMetaKeySignature];
break;
case META_SEQ_SPECIFIC:
[self readMetaSeqSpecific: metaEventLength];
break;
default:
[self.log appendFormat:@"Meta Event Type: 0x%x, Length: %d\n", metaEventType, metaEventLength];
break;
}
offset += metaEventLength;
}
else if(nextByte == 0xf0)
{
// SysEx event
UInt32 sysExDataLength = [self readVariableValue];
[self.log appendFormat:@"SysEx Event - Length: %d\n", sysExDataLength];
offset += sysExDataLength;
}
else
{
// Channel event
UInt8 eventType = (nextByte & 0xF0) >> 4;
UInt8 channel = (nextByte & 0xF);
UInt8 p1 = 0;
UInt8 p2 = 0;
switch (eventType)
{
case CHANNEL_NOTE_OFF:
p1 = [self readByte];
p2 = [self readByte];
[self readNoteOff: channel parameter1: p1 parameter2: p2];
break;
case CHANNEL_NOTE_ON:
p1 = [self readByte];
p2 = [self readByte];
[self readNoteOn:channel parameter1:p1 parameter2:p2];
break;
case CHANNEL_NOTE_AFTERTOUCH:
p1 = [self readByte];
p2 = [self readByte];
[self readNoteAftertouch:channel parameter1:p1 parameter2:p2];
break;
case CHANNEL_CONTROLLER:
p1 = [self readByte];
p2 = [self readByte];
[self readControllerEvent:channel parameter1:p1 parameter2:p2];
break;
case CHANNEL_PROGRAM_CHANGE:
p1 = [self readByte];
[self readProgramChange:channel parameter1:p1];
break;
case CHANNEL_AFTERTOUCH:
p1 = [self readByte];
[self readChannelAftertouch:channel parameter1:p1];
break;
case CHANNEL_PITCH_BEND:
p1 = [self readByte];
p2 = [self readByte];
[self readPitchBend:channel parameter1:p1 parameter2:p2];
break;
default:
break;
}
}
}
}
}
@catch (NSException *exception)
{
success = NO;
[self.log appendString:[exception reason]];
}
return success;
}
@end
关于ios - 在 IOS 上读取 Midi 文件,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/7193695/
IO 设备如何知道属于它的内存中的值在memory mapped IO 中发生了变化? ? 例如,假设内存地址 0 专用于保存 VGA 设备的背景颜色。当我们更改 memory[0] 中的值时,VGA
我目前正在开发一个使用Facebook sdk登录(通过FBLoginView)的iOS应用。 一切正常,除了那些拥有较旧版本的facebook的人。 当他们按下“使用Facebook登录”按钮时,他
假设我有: this - is an - example - with some - dashesNSRange将使用`rangeOfString:@“-”拾取“-”的第一个实例,但是如果我只想要最后
Card.io SDK提供以下详细信息: 卡号,有效期,月份,年份,CVV和邮政编码。 如何从此SDK获取国家名称。 - (void)userDidProvideCreditCardInfo:(Car
iOS 应用程序如何从网络服务下载图片并在安装过程中将它们安装到用户的 iOS 设备上?可能吗? 最佳答案 您无法控制应用在用户设备上的安装,因此无法在安装过程中下载其他数据。 只需在安装后首次启动应
我曾经开发过一款企业版 iOS 产品,我们公司曾将其出售给大型企业,供他们的员工使用。 该应用程序通过 AppStore 提供,企业用户获得了公司特定的配置文件(包含应用程序配置文件)以启用他们有权使
我正在尝试将 Card.io SDK 集成到我的 iOS 应用程序中。我想为 CardIO ui 做一个简单的本地化,如更改取消按钮标题或“在此保留信用卡”提示文本。 我在 github 上找到了这个
我正在使用 CardIOView 和 CardIOViewDelegate 类,没有可以设置为 YES 的 BOOL 来扫描 collectCardholderName。我可以看到它在 CardIOP
我有一个集成了通话工具包的 voip 应用程序。每次我从我的 voip 应用程序调用时,都会在 native 电话应用程序中创建一个新的最近通话记录。我在 voip 应用程序中也有自定义联系人(电话应
iOS 应用程序如何知道应用程序打开时屏幕上是否已经有键盘?应用程序运行后,它可以接收键盘显示/隐藏通知。但是,如果应用程序在分屏模式下作为辅助应用程序打开,而主应用程序已经显示键盘,则辅助应用程序不
我在模拟器中收到以下错误: ImageIO: CGImageReadSessionGetCachedImageBlockData *** CGImageReadSessionGetCachedIm
如 Apple 文档所示,可以通过 EAAccessory Framework 与经过认证的配件(由 Apple 认证)进行通信。但是我有点困惑,因为一些帖子告诉我它也可以通过 CoreBluetoo
尽管现在的调试器已经很不错了,但有时找出应用程序中正在发生的事情的最好方法仍然是古老的 NSLog。当您连接到计算机时,这样做很容易; Xcode 会帮助弹出日志查看器面板,然后就可以了。当您不在办公
在我的 iOS 应用程序中,我定义了一些兴趣点。其中一些有一个 Kontakt.io 信标的名称,它绑定(bind)到一个特定的 PoI(我的意思是通常贴在信标标签上的名称)。现在我想在附近发现信标,
我正在为警报提示创建一个 trigger.io 插件。尝试从警报提示返回数据。这是我的代码: // Prompt + (void)show_prompt:(ForgeTask*)task{
您好,我是 Apple iOS 的新手。我阅读并搜索了很多关于推送通知的文章,但我没有发现任何关于 APNS 从 io4 到 ios 6 的新更新的信息。任何人都可以向我提供 APNS 如何在 ios
UITabBar 的高度似乎在 iOS 7 和 8/9/10/11 之间发生了变化。我发布这个问题是为了让其他人轻松找到答案。 那么:在 iPhone 和 iPad 上的 iOS 8/9/10/11
我想我可以针对不同的 iOS 版本使用不同的 Storyboard。 由于 UI 的差异,我将创建下一个 Storyboard: Main_iPhone.storyboard Main_iPad.st
我正在写一些东西,我将使用设备的 iTunes 库中的一部分音轨来覆盖 2 个视频的组合,例如: AVMutableComposition* mixComposition = [[AVMutableC
我创建了一个简单的 iOS 程序,可以顺利编译并在 iPad 模拟器上运行良好。当我告诉 XCode 4 使用我连接的 iPad 设备时,无法编译相同的程序。问题似乎是当我尝试使用附加的 iPad 时
我是一名优秀的程序员,十分优秀!