- objective-c - iOS 5 : Can you override UIAppearance customisations in specific classes?
- iphone - 如何将 CGFontRef 转换为 UIFont?
- ios - 以编程方式关闭标记的信息窗口 google maps iOS
- ios - Xcode 5 - 尝试验证存档时出现 "No application records were found"
我正在寻找有关如何在 IOS 上播放 midi 文件的信息。我不需要任何 MIDI 输入或输出消息。我只是想读取 midi 文件并将轨道播放给用户,用每个音符代替钢琴声音样本。能够调整节奏将是另一个要求。
请注意,我对将 midi 文件转换为 wav 或其他格式不感兴趣。我想直接读取 midi 文件。
谁能给我指出一些信息的方向,以帮助我理解所需的过程。
干杯
最佳答案
我也需要这个功能。下面是骨架解析器的代码,它解析 NSData 对象中提供的 MIDI 文件数据(例如来自 NSData:dataWithContentsOfFile),并将它找到的内容写入可变字符串日志。真实的应用程序会以更有用的方式处理各种事件,但对于需要解析标准 MIDI 文件的任何人来说,这应该是一个很好的起点,因为它解决了大部分痛点。
// MidiParser.h
#import <Foundation/Foundation.h>
typedef enum tagMidiTimeFormat
{
MidiTimeFormatTicksPerBeat,
MidiTimeFormatFramesPerSecond
} MidiTimeFormat;
@interface MidiParser : NSObject
{
NSMutableString *log;
NSData *data;
NSUInteger offset;
UInt16 format;
UInt16 trackCount;
MidiTimeFormat timeFormat;
UInt16 ticksPerBeat;
UInt16 framesPerSecond;
UInt16 ticksPerFrame;
}
@property (nonatomic, retain) NSMutableString *log;
@property (readonly) UInt16 format;
@property (readonly) UInt16 trackCount;
@property (readonly) MidiTimeFormat timeFormat;
- (BOOL) parseData: (NSData *) midiData;
@end
// MidiParser.m
#import "MidiParser.h"
#define kFileCorrupt @"File is corrupt"
#define kInvalidHeader @"Invalid MIDI header"
#define kInvalidTrackHeader @"Invalid Track header"
#define MAIN_HEADER_SIZE 6
#define META_SEQUENCE_NUMBER 0x0
#define META_TEXT_EVENT 0x1
#define META_COPYRIGHT_NOTICE 0x2
#define META_TRACK_NAME 0x3
#define META_INSTRUMENT_NAME 0x4
#define META_LYRICS 0x5
#define META_MARKER 0x6
#define META_CUE_POINT 0x7
#define META_CHANNEL_PREFIX 0x20
#define META_END_OF_TRACK 0x2f
#define META_SET_TEMPO 0x51
#define META_SMPTE_OFFSET 0x54
#define META_TIME_SIGNATURE 0x58
#define META_KEY_SIGNATURE 0x59
#define META_SEQ_SPECIFIC 0x7f
#define CHANNEL_NOTE_OFF 0x8
#define CHANNEL_NOTE_ON 0x9
#define CHANNEL_NOTE_AFTERTOUCH 0xA
#define CHANNEL_CONTROLLER 0xB
#define CHANNEL_PROGRAM_CHANGE 0xC
#define CHANNEL_AFTERTOUCH 0xD
#define CHANNEL_PITCH_BEND 0xE
#define MICRO_PER_MINUTE 60000000
@implementation MidiParser
@synthesize log;
@synthesize format;
@synthesize trackCount;
@synthesize timeFormat;
- (void) dealloc
{
[log release];
log = nil;
[super dealloc];
}
- (UInt32) readDWord
{
UInt32 value = 0;
[data getBytes:&value range:NSMakeRange(offset, sizeof(value))];
value = CFSwapInt32BigToHost(value);
offset += sizeof(value);
return value;
}
- (UInt16) readWord
{
UInt16 value = 0;
[data getBytes:&value range:NSMakeRange(offset, sizeof(value))];
value = CFSwapInt16BigToHost(value);
offset += sizeof(value);
return value;
}
- (UInt8) readByte
{
UInt8 value = 0;
[data getBytes:&value range:NSMakeRange(offset, sizeof(value))];
offset += sizeof(value);
return value;
}
- (UInt8) readByteAtRelativeOffset: (UInt32) o
{
UInt8 value = 0;
[data getBytes:&value range:NSMakeRange(offset + o, sizeof(value))];
return value;
}
- (UInt32) readVariableValue
{
UInt32 value = 0;
UInt8 byte;
UInt8 shift = 0;
do
{
value <<= shift;
[data getBytes:&byte range:NSMakeRange(offset, 1)];
offset++;
value |= (byte & 0x7f);
shift = 7;
} while ((byte & 0x80) != 0);
return value;
}
- (NSString *) readString: (int) length
{
char *buffer = malloc(length + 1);
memcpy(buffer, ([data bytes] + offset), length);
buffer[length] = 0x0;
NSString *string = [NSString stringWithCString:buffer encoding:NSASCIIStringEncoding];
free(buffer);
return string;
}
- (void) readMetaSequence
{
UInt32 sequenceNumber = 0;
sequenceNumber |= [self readByteAtRelativeOffset:0];
sequenceNumber <<= 8;
sequenceNumber |= [self readByteAtRelativeOffset:1];
[self.log appendFormat:@"Meta Sequence Number: %d\n", sequenceNumber];
}
- (void) readMetaTextEvent: (UInt32) length
{
NSString *text = [self readString:length];
[self.log appendFormat:@"Meta Text: %@\n", text];
}
- (void) readMetaCopyrightNotice: (UInt32) length
{
NSString *text = [self readString:length];
[self.log appendFormat:@"Meta Copyright: %@\n", text];
}
- (void) readMetaTrackName: (UInt32) length
{
NSString *text = [self readString:length];
[self.log appendFormat:@"Meta Track Name: %@\n", text];
}
- (void) readMetaInstrumentName: (UInt32) length
{
NSString *text = [self readString:length];
[self.log appendFormat:@"Meta Instrument Name: %@\n", text];
}
- (void) readMetaLyrics: (UInt32) length
{
NSString *text = [self readString:length];
[self.log appendFormat:@"Meta Text: %@\n", text];
}
- (void) readMetaMarker: (UInt32) length
{
NSString *text = [self readString:length];
[self.log appendFormat:@"Meta Marker: %@\n", text];
}
- (void) readMetaCuePoint: (UInt32) length
{
NSString *text = [self readString:length];
[self.log appendFormat:@"Meta Cue Point: %@\n", text];
}
- (void) readMetaChannelPrefix
{
UInt8 channel = [self readByteAtRelativeOffset:0];
[self.log appendFormat:@"Meta Channel Prefix: %d\n", channel];
}
- (void) readMetaEndOfTrack
{
[self.log appendFormat:@"Meta End of Track\n"];
}
- (void) readMetaSetTempo
{
UInt32 microPerQuarter = 0;
microPerQuarter |= [self readByteAtRelativeOffset:0];
microPerQuarter <<= 8;
microPerQuarter |= [self readByteAtRelativeOffset:1];
microPerQuarter <<= 8;
microPerQuarter |= [self readByteAtRelativeOffset:2];
UInt32 bpm = MICRO_PER_MINUTE / microPerQuarter;
[self.log appendFormat:@"Meta Set Tempo: Micro Per Quarter: %d, Beats Per Minute: %d\n", microPerQuarter, bpm];
}
- (void) readMetaSMPTEOffset
{
UInt8 byte = [self readByteAtRelativeOffset:0];
UInt8 hour = byte & 0x1f;
UInt8 rate = (byte & 0x60) >> 5;
UInt8 fps = 0;
switch(rate)
{
case 0: fps = 24; break;
case 1: fps = 25; break;
case 2: fps = 29; break;
case 3: fps = 30; break;
default: fps = 0; break;
}
UInt8 minutes = [self readByteAtRelativeOffset:1];
UInt8 seconds = [self readByteAtRelativeOffset:2];
UInt8 frame = [self readByteAtRelativeOffset:3];
UInt8 subframe = [self readByteAtRelativeOffset:4];
[self.log appendFormat:@"Meta SMPTE Offset (%d): %2d:%2d:%2d:%2d:%2d\n", fps, hour, minutes, seconds, frame, subframe];
}
- (void) readMetaTimeSignature
{
UInt8 numerator = [self readByteAtRelativeOffset:0];
UInt8 denominator = [self readByteAtRelativeOffset:1];
UInt8 metro = [self readByteAtRelativeOffset:2];
UInt8 thirty_seconds = [self readByteAtRelativeOffset:3];
[self.log appendFormat:@"Meta Time Signature: %d/%.0f, Metronome: %d, 32nds: %d\n", numerator, powf(2, denominator), metro, thirty_seconds];
}
- (void) readMetaKeySignature
{
UInt8 value = [self readByteAtRelativeOffset:0];
UInt8 accidentals = value & 0x7f;
BOOL sharps = YES;
NSString *accidentalsType = nil;
if((value & 0x80) != 0)
{
accidentalsType = [NSString stringWithString:@"Flats"];
sharps = NO;
}
else
{
accidentalsType = [NSString stringWithString:@"Sharps"];
}
UInt8 scale = [self readByteAtRelativeOffset:1];
NSString *scaleType = nil;
if(scale == 0)
{
scaleType = [NSString stringWithString:@"Major"];
}
else
{
scaleType = [NSString stringWithString:@"Minor"];
}
[self.log appendFormat:@"Meta Key Signature: %d %@ Type: %@\n", accidentals, accidentalsType, scaleType];
}
- (void) readMetaSeqSpecific: (UInt32) length
{
[self.log appendFormat:@"Meta Event Sequencer Specific: - Length: %d\n", length];
}
- (void) readNoteOff: (UInt8) channel parameter1: (UInt8) p1 parameter2: (UInt8) p2
{
[self.log appendFormat:@"Note Off (Channel %d): %d, Velocity: %d\n", channel, p1, p2];
}
- (void) readNoteOn: (UInt8) channel parameter1: (UInt8) p1 parameter2: (UInt8) p2
{
[self.log appendFormat:@"Note On (Channel %d): %d, Velocity: %d\n", channel, p1, p2];
}
- (void) readNoteAftertouch: (UInt8) channel parameter1: (UInt8) p1 parameter2: (UInt8) p2
{
[self.log appendFormat:@"Note Aftertouch (Channel %d): %d, Amount: %d\n", channel, p1, p2];
}
- (void) readControllerEvent: (UInt8) channel parameter1: (UInt8) p1 parameter2: (UInt8) p2
{
[self.log appendFormat:@"Controller (Channel %d): %d, Value: %d\n", channel, p1, p2];
}
- (void) readProgramChange: (UInt8) channel parameter1: (UInt8) p1
{
[self.log appendFormat:@"Program Change (Channel %d): %d\n", channel, p1];
}
- (void) readChannelAftertouch: (UInt8) channel parameter1: (UInt8) p1
{
[self.log appendFormat:@"Channel Aftertouch (Channel %d): %d\n", channel, p1];
}
- (void) readPitchBend: (UInt8) channel parameter1: (UInt8) p1 parameter2: (UInt8) p2
{
UInt32 value = p1;
value <<= 8;
value |= p2;
[self.log appendFormat:@"Pitch Bend (Channel %d): %d\n", channel, value];
}
- (BOOL) parseData:(NSData *)midiData
{
BOOL success = YES;
self.log = [[[NSMutableString alloc] init] autorelease];
@try
{
// Parse data
data = midiData;
offset = 0;
// If size is less than header size, then abort
NSUInteger dataLength = [data length];
if((offset + MAIN_HEADER_SIZE) > dataLength)
{
NSException *ex = [NSException exceptionWithName:kFileCorrupt
reason:kFileCorrupt userInfo:nil];
@throw ex;
}
// Parse header
if(memcmp([data bytes], "MThd", 4) != 0)
{
NSException *ex = [NSException exceptionWithName:kFileCorrupt
reason:kInvalidHeader userInfo:nil];
@throw ex;
}
offset += 4;
UInt32 chunkSize = [self readDWord];
[self.log appendFormat:@"Header Chunk Size: %d\n", chunkSize];
// Read format
format = [self readWord];
[self.log appendFormat:@"Format: %d\n", format];
// Read track count
trackCount = [self readWord];
[self.log appendFormat:@"Tracks: %d\n", trackCount];
// Read time format
UInt16 timeDivision = [self readWord];
if((timeDivision & 0x8000) == 0)
{
timeFormat = MidiTimeFormatTicksPerBeat;
ticksPerBeat = timeDivision & 0x7fff;
[self.log appendFormat:@"Time Format: %d Ticks Per Beat\n", ticksPerBeat];
}
else
{
timeFormat = MidiTimeFormatFramesPerSecond;
framesPerSecond = (timeDivision & 0x7f00) >> 8;
ticksPerFrame = (timeDivision & 0xff);
[self.log appendFormat:@"Time Division: %d Frames Per Second, %d Ticks Per Frame\n", framesPerSecond, ticksPerFrame];
}
// Try to parse tracks
UInt32 expectedTrackOffset = offset;
for(UInt16 track = 0; track < trackCount; track++)
{
if(offset != expectedTrackOffset)
{
[self.log appendFormat:@"Track Offset Incorrect for Track %d - Offset: %d, Expected: %d", track, offset, expectedTrackOffset];
offset = expectedTrackOffset;
}
// Parse track header
if(memcmp([data bytes] + offset, "MTrk", 4) != 0)
{
NSException *ex = [NSException exceptionWithName:kFileCorrupt
reason:kInvalidTrackHeader userInfo:nil];
@throw ex;
}
offset += 4;
UInt32 trackSize = [self readDWord];
expectedTrackOffset = offset + trackSize;
[self.log appendFormat:@"Track %d : %d bytes\n", track, trackSize];
UInt32 trackEnd = offset + trackSize;
UInt32 deltaTime;
UInt8 nextByte = 0;
UInt8 peekByte = 0;
while(offset < trackEnd)
{
deltaTime = [self readVariableValue];
[self.log appendFormat:@" (%05d): ", deltaTime];
// Peak at next byte
peekByte = [self readByteAtRelativeOffset:0];
// If high bit not set, then assume running status
if((peekByte & 0x80) != 0)
{
nextByte = [self readByte];
}
// Meta event
if(nextByte == 0xFF)
{
UInt8 metaEventType = [self readByte];
UInt32 metaEventLength = [self readVariableValue];
switch (metaEventType)
{
case META_SEQUENCE_NUMBER:
[self readMetaSequence];
break;
case META_TEXT_EVENT:
[self readMetaTextEvent: metaEventLength];
break;
case META_COPYRIGHT_NOTICE:
[self readMetaCopyrightNotice: metaEventLength];
break;
case META_TRACK_NAME:
[self readMetaTrackName: metaEventLength];
break;
case META_INSTRUMENT_NAME:
[self readMetaInstrumentName: metaEventLength];
break;
case META_LYRICS:
[self readMetaLyrics: metaEventLength];
break;
case META_MARKER:
[self readMetaMarker: metaEventLength];
break;
case META_CUE_POINT:
[self readMetaCuePoint: metaEventLength];
break;
case META_CHANNEL_PREFIX:
[self readMetaChannelPrefix];
break;
case META_END_OF_TRACK:
[self readMetaEndOfTrack];
break;
case META_SET_TEMPO:
[self readMetaSetTempo];
break;
case META_SMPTE_OFFSET:
[self readMetaSMPTEOffset];
break;
case META_TIME_SIGNATURE:
[self readMetaTimeSignature];
break;
case META_KEY_SIGNATURE:
[self readMetaKeySignature];
break;
case META_SEQ_SPECIFIC:
[self readMetaSeqSpecific: metaEventLength];
break;
default:
[self.log appendFormat:@"Meta Event Type: 0x%x, Length: %d\n", metaEventType, metaEventLength];
break;
}
offset += metaEventLength;
}
else if(nextByte == 0xf0)
{
// SysEx event
UInt32 sysExDataLength = [self readVariableValue];
[self.log appendFormat:@"SysEx Event - Length: %d\n", sysExDataLength];
offset += sysExDataLength;
}
else
{
// Channel event
UInt8 eventType = (nextByte & 0xF0) >> 4;
UInt8 channel = (nextByte & 0xF);
UInt8 p1 = 0;
UInt8 p2 = 0;
switch (eventType)
{
case CHANNEL_NOTE_OFF:
p1 = [self readByte];
p2 = [self readByte];
[self readNoteOff: channel parameter1: p1 parameter2: p2];
break;
case CHANNEL_NOTE_ON:
p1 = [self readByte];
p2 = [self readByte];
[self readNoteOn:channel parameter1:p1 parameter2:p2];
break;
case CHANNEL_NOTE_AFTERTOUCH:
p1 = [self readByte];
p2 = [self readByte];
[self readNoteAftertouch:channel parameter1:p1 parameter2:p2];
break;
case CHANNEL_CONTROLLER:
p1 = [self readByte];
p2 = [self readByte];
[self readControllerEvent:channel parameter1:p1 parameter2:p2];
break;
case CHANNEL_PROGRAM_CHANGE:
p1 = [self readByte];
[self readProgramChange:channel parameter1:p1];
break;
case CHANNEL_AFTERTOUCH:
p1 = [self readByte];
[self readChannelAftertouch:channel parameter1:p1];
break;
case CHANNEL_PITCH_BEND:
p1 = [self readByte];
p2 = [self readByte];
[self readPitchBend:channel parameter1:p1 parameter2:p2];
break;
default:
break;
}
}
}
}
}
@catch (NSException *exception)
{
success = NO;
[self.log appendString:[exception reason]];
}
return success;
}
@end
关于ios - 在 IOS 上读取 Midi 文件,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/7193695/
今天我在一个 Java 应用程序中看到了几种不同的加载文件的方法。 文件:/ 文件:// 文件:/// 这三个 URL 开头有什么区别?使用它们的首选方式是什么? 非常感谢 斯特凡 最佳答案 file
就目前而言,这个问题不适合我们的问答形式。我们希望答案得到事实、引用或专业知识的支持,但这个问题可能会引起辩论、争论、投票或扩展讨论。如果您觉得这个问题可以改进并可能重新打开,visit the he
我有一个 javascript 文件,并且在该方法中有一个“测试”方法,我喜欢调用 C# 函数。 c# 函数与 javascript 文件不在同一文件中。 它位于 .cs 文件中。那么我该如何管理 j
需要检查我使用的文件/目录的权限 //filePath = path of file/directory access denied by user ( in windows ) File fil
我在一个目录中有很多 java 文件,我想在我的 Intellij 项目中使用它。但是我不想每次开始一个新项目时都将 java 文件复制到我的项目中。 我知道我可以在 Visual Studio 和
已关闭。此问题不符合Stack Overflow guidelines 。目前不接受答案。 这个问题似乎不是关于 a specific programming problem, a software
我有 3 个组件的 Twig 文件: 文件 1: {# content-here #} 文件 2: {{ title-here }} {# content-here #}
我得到了 mod_ldap.c 和 mod_authnz_ldap.c 文件。我需要使用 Linux 命令的 mod_ldap.so 和 mod_authnz_ldap.so 文件。 最佳答案 从 c
我想使用PIE在我的项目中使用 IE7。 但是我不明白的是,我只能在网络服务器上使用 .htc 文件吗? 我可以在没有网络服务器的情况下通过浏览器加载的本地页面中使用它吗? 我在 PIE 的文档中看到
我在 CI 管道中考虑这一点,我应该首先构建和测试我的应用程序,结果应该是一个 docker 镜像。 我想知道使用构建环境在构建服务器上构建然后运行测试是否更常见。也许为此使用构建脚本。最后只需将 j
using namespace std; struct WebSites { string siteName; int rank; string getSiteName() {
我是 Linux 新手,目前正在尝试使用 ginkgo USB-CAN 接口(interface) 的 API 编程功能。为了使用 C++ 对 API 进行编程,他们提供了库文件,其中包含三个带有 .
我刚学C语言,在实现一个程序时遇到了问题将 test.txt 文件作为程序的输入。 test.txt 文件的内容是: 1 30 30 40 50 60 2 40 30 50 60 60 3 30 20
如何连接两个tcpdump文件,使一个流量在文件中出现一个接一个?具体来说,我想“乘以”一个 tcpdump 文件,这样所有的 session 将一个接一个地按顺序重复几次。 最佳答案 mergeca
我有一个名为 input.MP4 的文件,它已损坏。它来自闭路电视摄像机。我什么都试过了,ffmpeg , VLC 转换,没有运气。但是,我使用了 mediainfo和 exiftool并提取以下信息
我想做什么? 我想提取 ISO 文件并编辑其中的文件,然后将其重新打包回 ISO 文件。 (正如你已经读过的) 我为什么要这样做? 我想开始修改 PSP ISO,为此我必须使用游戏资源、 Assets
给定一个 gzip 文件 Z,如果我将其解压缩为 Z',有什么办法可以重新压缩它以恢复完全相同的 gzip 文件 Z?在粗略阅读了 DEFLATE 格式后,我猜不会,因为任何给定的文件都可能在 DEF
我必须从数据库向我的邮件 ID 发送一封带有附件的邮件。 EXEC msdb.dbo.sp_send_dbmail @profile_name = 'Adventure Works Admin
我有一个大的 M4B 文件和一个 CUE 文件。我想将其拆分为多个 M4B 文件,或将其拆分为多个 MP3 文件(以前首选)。 我想在命令行中执行此操作(OS X,但如果需要可以使用 Linux),而
快速提问。我有一个没有实现文件的类的项目。 然后在 AppDelegate 我有: #import "AppDelegate.h" #import "SomeClass.h" @interface A
我是一名优秀的程序员,十分优秀!