Try Live Demo

Ant Media Server provides WebSocket interface in publishing and playing WebRTC streams. In this document, we will show both how to publish and play WebRTC audio streams by using JavaScript SDK.

How to Publish Only Audio WebRTC stream with JavaScript SDK

Let’s see how to make it step by step

1.Load the below scripts in head element of the html file

<head>
...
<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>
<script src="js/webrtc_adaptor.js" ></script>
...
</head>

2.Create local audio element somewhere in the body tag

<audio id="localVideo" autoplay controls muted></audio>

3.Initialize the WebRTCAdaptor object in script tag

        var pc_config = null;

	var sdpConstraints = {
		OfferToReceiveAudio : false,
		OfferToReceiveVideo : false

	};
	var mediaConstraints = {
		video : false,
		audio : true
	};

        var websocketURL = "ws://" + location.hostname + ":5080/WebRTCAppEE/websocket";
       
       if (location.protocol.startsWith("https")) {
           websocketURL = "wss://" + location.hostname + ":5443/WebRTCAppEE/websocket";
       }

	var webRTCAdaptor = new WebRTCAdaptor({
		websocket_url : websocketURL,
		mediaConstraints : mediaConstraints,
		peerconnection_config : pc_config,
		sdp_constraints : sdpConstraints,
		localVideoId : "localVideo",
                debug : true,
		callback : function(info) {
			if (info == "initialized") {
				console.log("initialized");
	                        start_publish_button.disabled = false;
                                stop_publish_button.disabled = true;
			} else if (info == "publish_started") {
				//stream is being published 
				console.log("publish started");
				start_publish_button.disabled = true;
                                stop_publish_button.disabled = false;
                                startAnimation();
			} else if (info == "publish_finished") {
				//stream is finished
				console.log("publish finished");
				start_publish_button.disabled = false;
                                stop_publish_button.disabled = true;
			}
                        else if(info == "closed"){
                                //console.log("Connection closed");
                                if (typeof description != "undefined") {
                                    console.log("Connecton closed: " + JSON.stringify(description));
                                }
                        }
		},
		callbackError : function(error) {
			//some of the possible errors, NotFoundError, SecurityError,PermissionDeniedError

			console.log("error callback: " +  JSON.stringify(error));
			var errorMessage = JSON.stringify(error);
                        if (typeof message != "undefined") {
                            errorMessage = message;
                        }
                        var errorMessage = JSON.stringify(error);
                        if (error.indexOf("NotFoundError") != -1) {
                            errorMessage = "Camera or Mic are not found or not allowed in your device";
                        }
                        else if (error.indexOf("NotReadableError") != -1 || error.indexOf("TrackStartError") != -1){
                                 errorMessage = "Camera or Mic is being used by some other process that does not let read the devices";
                        }
                        else if(error.indexOf("OverconstrainedError") != -1 || error.indexOf("ConstraintNotSatisfiedError") != -1) {
                                errorMessage = "There is no device found that fits your video and audio constraints. You may change video and audio constraints"
                        }
                        else if (error.indexOf("NotAllowedError") != -1 || error.indexOf("PermissionDeniedError") != -1) {
                                 errorMessage = "You are not allowed to access camera and mic.";
                        }
                        else if (error.indexOf("TypeError") != -1) {
                                 errorMessage = "Video/Audio is required";
                        }
                        alert(errorMessage);
		}
	});

4.Call publish(streamName) to Start Publishing

In order to publish WebRTC stream to Ant Media Server, WebRTCAdaptor’s publish(streamName)function should be called. You can choose the call this function in a button click.

function startPublishing() {  
  // Get streamId as parameter from a text box                            
  streamId = streamNameBox.value;
  webRTCAdaptor.publish("streamId");
 }

<button onclick="startPublishing()" class="btn btn-info" 
disabled id="start_publish_button">Start Publishing</button>

5.Call stop(streamName) to Stop Publishing

You may want to stop publishing anytime by calling stop function of WebRTCAdaptor

function stopPublishing() {
  webRTCAdaptor.stop(streamId)
}

Please take a look at the WebRTCAppEE/audio_publish.html file in order to see How JavaScript SDK can be used for publishing an audio stream.

How to Play Audio Only WebRTC stream with JavaScript SDK

1.Load the below scripts in head element of the html file

<head>
...
<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>
<script src="js/webrtc_adaptor.js" ></script>
...
</head>

2.Create remote audio element somewhere in the body tag

<audio id="remoteVideo" autoplay controls muted></audio>

3.Initialize the WebRTCAdaptor object in script tag

        var pc_config = null;

	var sdpConstraints = {
		OfferToReceiveAudio : true,
		OfferToReceiveVideo : true

	};
	var mediaConstraints = {
		video : false,
		audio : false
	};

        var websocketURL = "ws://" + location.hostname + ":5080/WebRTCAppEE/websocket";
       
       if (location.protocol.startsWith("https")) {
           websocketURL = "wss://" + location.hostname + ":5443/WebRTCAppEE/websocket";
       }

	var webRTCAdaptor = new WebRTCAdaptor({
		websocket_url : websocketURL,
		mediaConstraints : mediaConstraints,
		peerconnection_config : pc_config,
		sdp_constraints : sdpConstraints,
		remoteVideoId : "remoteVideo",
                isPlayMode:true,
                debug : true,
		callback : function(info) {
			if (info == "initialized") {
				console.log("initialized");
	                        start_play_button.disabled = false;
                                stop_play_button.disabled = true;
			} else if (info == "publish_started") {
				//joined the stream
				console.log("play started");
				start_play_button.disabled = true;
                                stop_play_button.disabled = false;
                                startAnimation();
			} else if (info == "publish_finished") {
				//leaved the stream
				console.log("publish finished");
				start_play_button.disabled = false;
                                stop_play_button.disabled = true;
			}
                        else if(info == "closed"){
                                //console.log("Connection closed");
                                if (typeof description != "undefined") {
                                    console.log("Connecton closed: " + JSON.stringify(description));
                                }
                        }
		},
		callbackError : function(error) {
			//some of the possible errors, NotFoundError, SecurityError,PermissionDeniedError

			console.log("error callback: " +  JSON.stringify(error));
			var errorMessage = JSON.stringify(error);
                        alert(errorMessage);
		}
	});

4.Call play(streamName) to Start Playing

In order to play WebRTC stream from Ant Media Server, WebRTCAdaptor’s play(streamName)function should be called. You can choose the call this function in a button click.

function startPlaying() {  
  // Get streamId as parameter from a text box                            
  streamId = streamNameBox.value;
  webRTCAdaptor.play(streamId);
 }

<button onclick="startPlaying()" class="btn btn-info" 
disabled id="start_play_button">Start Playing</button>

5.Call stop(streamName) to Stop Playing

You may want to stop publishing anytime by calling stop function of WebRTCAdaptor

function stopPlaying() {
  webRTCAdaptor.stop(streamId)
}

Please take a look at the WebRTCAppEE/audio_player.html file in order to see How JavaScript SDK can be used for playing an audio stream.

Categories: Tutorial