gpt4 book ai didi

javascript - 如何使用 Microphone.getMicrophone() Actionscript 3 切换音频源

转载 作者:行者123 更新时间:2023-12-03 11:08:07 24 4
gpt4 key购买 nike

我有一个基于 Wowza 的 webcam.fla 示例的网络摄像头流应用程序。该应用程序将音频和视频从 Flash 传输到 Wowza 服务器,并在其中进行转码等。

我们正在尝试添加一项功能,允许将音频源更改为任何其他系统音频源。到目前为止,我们成功创建了一个包含所有接口(interface)的下拉列表并处理回调,但是,尽管使用 doConnect() 函数启动和停止流,音频源似乎仍然是默认的。

import flash.media.*;
import flash.geom.*;
import flash.net.*;
import flash.media.*;// Should this be duplicated

var parsed:Object = root.loaderInfo.parameters;

var nc:NetConnection = null;
var nsPublish:NetStream = null;
var nsPlay:NetStream = null;
var camera:Camera = null;
var microphone:Microphone = null;

// Testing
var serverName:String = "rtmp://stream-na.example.tv:1935/live";
var movieName:String = "streamName";

var flushVideoBufferTimer:Number = 0;

// Quality settings

var videoBitrate:Number = 200000;
var videoQuality:Number = 80; // Quality %

var videoWidth:Number = 640;
var videoHeight:Number = 360;

var videoFrameRate:Number = 30;


//////////////// UI Functions Bellow

import fl.controls.ComboBox;
import fl.data.DataProvider;

var aCb:ComboBox = new ComboBox();

function createAudioComboBox(sources)
{

var sourcesArray:Array = new Array();

aCb.dropdownWidth = 210;
aCb.width = 200;
aCb.move(0, 365);
aCb.prompt = "Change Audio Source";
aCb.dataProvider = new DataProvider(sourcesArray);
aCb.addEventListener(Event.CHANGE, changeAudioHandler);

addChild(aCb);

for (var index in sources)
{
//ExternalInterface.call("logBrowserStreaming", sources[index]);

aCb.addItem( { label: sources[index], data: index} );

}

function changeAudioHandler(event:Event):void
{

doConnect();

//var request:URLRequest = new URLRequest();
//request.url = ComboBox(event.target).selectedItem.data;
//navigateToURL(request);
//aCb.selectedIndex = -1;
var audioSource = ComboBox(event.target).selectedItem.data;

//microphone:Microphone = null;

microphone = Microphone.getMicrophone(audioSource);

microphone.rate = 16;
microphone.codec = SoundCodec.SPEEX;
microphone.encodeQuality = 10; // This is shit!! offer better audio in native app?
microphone.setSilenceLevel(0, -1);
microphone.setUseEchoSuppression(true);

//ExternalInterface.call("logBrowserStreaming", audioSource);

// Trigger restart camera...
//startCamera(); // Nope

doConnect();

}

}

//////////////// Core Streaming Functions Bellow


function startCamera()
{
// get the default Flash camera and microphone
camera = Camera.getCamera();
microphone = Microphone.getMicrophone();

// here are all the quality and performance settings
// here are all the quality and performance settings
if (camera != null)
{
//camera.setMode(1280, 720, 30, false);
camera.setMode(videoWidth, videoHeight, videoFrameRate, false); // false gives framerate priority apparently?? http://www.flash-communications.net/technotes/setMode/index.html
camera.setQuality(videoBitrate, videoQuality);
// Max 800kbps;
camera.setKeyFrameInterval(2);

// List audio sources names
// sourceVideoLabel.text += Camera.names;

// Create audio sources dropdown

// Hide video sources for now...
//createVideoComboBox(Camera.names);


}
else
{
sourceVideoLabel.text = "No Camera Found\n";
}
if ( microphone != null)
{
microphone.rate = 16;
microphone.codec = SoundCodec.SPEEX;
microphone.encodeQuality = 10; // This is shit!! offer better audio in native app?
microphone.setSilenceLevel(0, -1);
microphone.setUseEchoSuppression(true);

// List audio sources names;
// sourceVideoLabel.text += Microphone.names;

// Create audio sources dropdown
createAudioComboBox(Microphone.names);

// Don't show audio slider for now...
// createAudioSlider();


// Don't monitor audio level for now...
//monitorAudioLevel();

}
else
{
sourceVideoLabel.text += "No Microphone Found\n";
}

nameStr.text = movieName;
AppendCheckbox.selected = false;
connect.connectStr.text = serverName;
connect.connectButton.addEventListener(MouseEvent.CLICK, doConnect);

//enablePlayControls(false);

doConnect();
}

function ncOnStatus(infoObject:NetStatusEvent)
{
trace("nc: "+infoObject.info.code+" ("+infoObject.info.description+")");
if (infoObject.info.code == "NetConnection.Connect.Failed")
{
prompt.text = "Connection failed. Try again or email support@chew.tv";
}
else if (infoObject.info.code == "NetConnection.Connect.Rejected")
{
// Hide connect fail...
prompt.text = infoObject.info.description;
}
}

// Ask for permission to use the camera and show the preview to the user
// event:MouseEvent
// doConnect toggles connections on and off.
function doConnect()
{
// connect to the Wowza Media Server
if (nc == null)
{
// create a connection to the wowza media server
nc = new NetConnection();
nc.addEventListener(NetStatusEvent.NET_STATUS, ncOnStatus);
nc.connect(connect.connectStr.text);

//connect.connectButton.label = "Disconnect";

// uncomment this to monitor frame rate and buffer length
//setInterval("updateStreamValues", 500);

// Attach camera to preview
videoCamera.clear();
videoCamera.attachCamera(camera);

//enablePlayControls(true);

// Pass status to
// ExternalInterface.call("logBrowserStreaming", "cameraagreed");

}
else
{
nsPublish = null;
nsPlay = null;

videoCamera.attachNetStream(null);
videoCamera.clear();

videoRemote.attachNetStream(null);
videoRemote.clear();

nc.close();
nc = null;

//enablePlayControls(false);

doSubscribe.label = 'Play';
doPublish.label = 'Stream';
AppendCheckbox.selected = false;

connect.connectButton.label = "Connect";
prompt.text = "";
}
}

// function to monitor the frame rate and buffer length
function updateStreamValues()
{
if (nsPlay != null)
{
fpsText.text = (Math.round(nsPlay.currentFPS*1000)/1000)+" fps";
bufferLenText.text = (Math.round(nsPlay.bufferLength*1000)/1000)+" secs";
}
else
{
fpsText.text = "";
bufferLenText.text = "";
}
}

function nsPlayOnStatus(infoObject:NetStatusEvent)
{
trace("nsPlay: onStatus: "+infoObject.info.code+" ("+infoObject.info.description+")");
if (infoObject.info.code == "NetStream.Play.StreamNotFound" || infoObject.info.code == "NetStream.Play.Failed")
{
prompt.text = infoObject.info.description;
}
}

function doCloseRecord()
{
// after we have hit "Stop" recording and after the buffered video data has been
// sent to the Wowza Media Server close the publishing stream
nsPublish.publish("null");
}

// this function gets called every 250 ms to monitor the;
// progress of flushing the video buffer. Once the video
// buffer is empty we close publishing stream
function flushVideoBuffer()
{
var buffLen:Number = nsPublish.bufferLength;
if (buffLen == 0)
{
clearInterval(flushVideoBufferTimer);
flushVideoBufferTimer = 0;
doCloseRecord();
doPublish.label = 'Stream';
}
}

function nsPublicOnStatus(infoObject:NetStatusEvent)
{
trace("nsPublish: "+infoObject.info.code+" ("+infoObject.info.description+")");

// After calling nsPublish.publish(false); we wait for a status;
// event of "NetStream.Unpublish.Success" which tells us all the video
// and audio data has been written to the flv file. It is at this time
// that we can start playing the video we just recorded.
if (infoObject.info.code == "NetStream.Unpublish.Success")
{
//doPlayStart();
}

if (infoObject.info.code == "NetStream.Play.StreamNotFound" || infoObject.info.code == "NetStream.Play.Failed")
{
prompt.text = infoObject.info.description;
}
}

function initH264Recording(nsPublish:NetStream)
{
var h264Settings:H264VideoStreamSettings = new H264VideoStreamSettings();
h264Settings.setProfileLevel(H264Profile.BASELINE, H264Level.LEVEL_3);
nsPublish.videoStreamSettings = h264Settings;
}

// Start recording video to the server
function doStreamStart()
{
//prompt.text = "Starting stream with mic...";
//prompt.text = microphone;

ExternalInterface.call("logBrowserStreaming", "starting stream");

// stop video playback
//doPlayStop();

// create a new NetStream object for publishing
nsPublish = new NetStream(nc);

var nsPublishClient:Object = new Object();
nsPublish.client = nsPublishClient;

// Set the H.264 encoding parameters
if (testVersion(11,0,0,0))
{
initH264Recording(nsPublish);
}
else
{
prompt.text = "Flash player 11 or greater is required for H.264 encoding (" + Capabilities.version + ").";

}// trace the NetStream status information
nsPublish.addEventListener(NetStatusEvent.NET_STATUS, nsPublicOnStatus);

// publish the stream by name;
nsPublish.publish(nameStr.text, (AppendCheckbox.selected?"append":"record"));

// add custom metadata to the header of the .flv file;
var metaData:Object = new Object();
metaData["description"] = "Recorded using WebcamRecording example.";
nsPublish.send("@setDataFrame", "onMetaData", metaData);

// attach the camera and microphone to the server;
nsPublish.attachCamera(camera);
nsPublish.attachAudio(microphone);

ExternalInterface.call("logBrowserStreaming", microphone);

// set the buffer time to 20 seconds to buffer 20 seconds of video;
// data for better performance and higher quality video
nsPublish.bufferTime = 20;

// Disable the audio choice dropdown
aCb.enabled = false;

}

function doStreamStop()
{

ExternalInterface.call("logBrowserStreaming", "stopping stream");

// stop streaming video and audio to the publishing
// NetStream object
nsPublish.attachAudio(null);
nsPublish.attachCamera(null);

// After stopping the publishing we need to check if there is;
// video content in the NetStream buffer. If there is data
// we are going to monitor the video upload progress by calling
// flushVideoBuffer every 250ms. If the buffer length is 0
// we close the recording immediately.
var buffLen:Number = nsPublish.bufferLength;
if (buffLen > 0)
{
flushVideoBufferTimer = setInterval(flushVideoBuffer,250);
doPublish.label = 'Wait...';
}
else
{
trace("nsPublish.publish(null)");
doCloseRecord();
doPublish.label = 'Start';
}

// Disable the audio choice dropdown
aCb.enabled = true;

}

// Test version function checks if the current flash version supports H.264 Encoding.
function testVersion(v0:Number, v1:Number, v2:Number, v3:Number):Boolean
{
var version:String = Capabilities.version;
var index:Number = version.indexOf(" ");
version = version.substr(index+1);
var verParts:Array = version.split(",");

var i:Number;

var ret:Boolean = true;
while (true)
{
if (Number(verParts[0]) < v0)
{
ret = false;
break;
}
else if (Number(verParts[0]) > v0)
{
break;

}
if (Number(verParts[1]) < v1)
{
ret = false;
break;
}
else if (Number(verParts[1]) > v1)
{
break;

}
if (Number(verParts[2]) < v2)
{
ret = false;
break;
}
else if (Number(verParts[2]) > v2)
{
break;

}
if (Number(verParts[3]) < v3)
{
ret = false;
break;
}
break;
}
trace("testVersion: "+Capabilities.version+">="+v0+","+v1+","+v2+","+v3+": "+ret);

return ret;
}


// External trigger from Javascript;

// Allow stream to start with startBrowserStreaming call from js
ExternalInterface.addCallback("startBrowserStreaming", doStreamStart);

// Allow stream to stop with stopBrowserStreaming call from js;
ExternalInterface.addCallback("stopBrowserStreaming", doStreamStop);

stage.align = "TL";
stage.scaleMode = "noScale";

startCamera();

最佳答案

您可以切换音频源,而无需触摸 NetConnection 和/或 NetStream

举这个简单的例子,我使用一个按钮来更改我的音频源:

const server:String = 'rtmp://localhost/live';
const stream:String = 'live';

var nc:NetConnection;
var ns_publish:NetStream;

nc = new NetConnection();
nc.addEventListener(
NetStatusEvent.NET_STATUS,
function(e:NetStatusEvent):void {
if(e.info.code == 'NetConnection.Connect.Success'){
publish();
}
}
)
nc.addEventListener(AsyncErrorEvent.ASYNC_ERROR, function(e:AsyncErrorEvent):void {})
nc.connect(server);

function publish():void {

var cam:Camera = Camera.getCamera();

// for my case, I have 2 mic, and I start with the first
var mic:Microphone = Microphone.getMicrophone(0);
ns_publish = new NetStream(nc);
ns_publish.attachAudio(mic);
ns_publish.attachCamera(cam);
ns_publish.publish(stream, 'record');
}

btn_switch_mic.addEventListener(MouseEvent.CLICK, function(e){
// I can switch to the second mic without initialize my NetConnection and/or my NetStream
var mic:Microphone = Microphone.getMicrophone(1);
ns_publish.attachAudio(mic);
})

我使用 Wowza Streaming Engine 4.1.1(当然没有 Wowza Transcoder AddOn 的免费版本)和 Flash Media Server 4.5 测试了此代码,并且工作正常。

注意:我们可以使用相同的方式来更改视频源(相机)。

希望以上内容对您有所帮助。

关于javascript - 如何使用 Microphone.getMicrophone() Actionscript 3 切换音频源,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/27747565/

24 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com