Closed PauloTroguilho closed 6 days ago
Hello, I did a try catch on the method to handle the error and I'm getting new error, could you help me? thankful ! Error: Application:UserAgentServer.exe CoreCLR Version:4.7 .net core version 3.1.4 Description:The process was terminated due to an unhandled exeception. Exception Info: execption code c00000005, exception address 00007FF928F938D4
Can you share your code? It seems the audio file you're attempting to play is getting closed for some reason.
//----------------------------------------------------------------------------- // Filename: Program.cs // // Description: An example program of how to use the SIPSorcery core library to // act as the server for a SIP call. // // Author(s): // Aaron Clauson (aaron@sipsorcery.com) // // History: // 09 Oct 2019 Aaron Clauson Created, Dublin, Ireland. // 26 Feb 2020 Aaron Clauson Switched RTP to use RtpAVSession. // // License: // BSD 3-Clause "New" or "Revised" License, see included LICENSE.md file. //-----------------------------------------------------------------------------
//----------------------------------------------------------------------------- // This example can be used with the automated SIP test tool [SIPp] (https://github.com/SIPp/sipp) // and its inbuilt User Agent Client scenario. // Note: SIPp doesn't support IPv6. // // To install on WSL: // $ sudo apt install sip-tester // // Running tests (press the '+' key while test is running to increase the call rate): // For UDP testing: sipp -sn uac 127.0.0.1 // For TCP testing: sipp -sn uac localhost -t t1 //-----------------------------------------------------------------------------
//----------------------------------------------------------------------------- // Media files: // The "Simplicity" audio used in this example is from an artist called MACROFORM // and can be downloaded directly from: https://www.jamendo.com/track/579315/simplicity?language=en // The use of the audio is licensed under the Creative Commons // https://creativecommons.org/licenses/by-nd/2.0/ // The audio is free for personal use but a license may be required for commercial use. // If it sounds familiar this particular file is also included as part of Asterisk's // (asterisk.org) music on hold. // // ffmpeg can be used to convert the mp3 file into the required format for placing directly // into the RTP packets. Currently this example supports two audio formats: G711.ULAW (or PCMU) // and G722. // // ffmpeg -i Macroform_-Simplicity.mp3 -ac 1 -ar 8k -ab 64k -f mulaw Macroform-Simplicity.ulaw // ffmpeg -i Macroform-Simplicity.mp3 -ar 16k -acodec g722 Macroform-_Simplicity.g722 //-----------------------------------------------------------------------------
using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.IO; using System.Linq; using System.Net; using System.Net.Sockets; using System.Security.Cryptography.X509Certificates; using System.Threading; using System.Threading.Tasks; using Microsoft.CognitiveServices.Speech; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; using Serilog; using Serilog.Extensions.Logging; using SIPSorcery.Media; using SIPSorcery.Net; using SIPSorcery.SIP; using SIPSorcery.SIP.App; using SIPSorceryMedia.Abstractions; using SIPSorceryMedia.Windows;
namespace SIPSorcery { class Program { private static int SIP_LISTEN_PORT = 5060; private static int SIPS_LISTEN_PORT = 5060; //private static int SIP_WEBSOCKET_LISTEN_PORT = 80; //private static int SIP_SECURE_WEBSOCKET_LISTEN_PORT = 443; private static string SIPS_CERTIFICATE_PATH = "localhost.pfx"; private static string SSK = "iiiiiii"; private static string SSR = "westus"; private static Microsoft.Extensions.Logging.ILogger Log = NullLogger.Instance; private static ConcurrentDictionary<string, SIPServerUserAgent> _calls = new ConcurrentDictionary<string, SIPServerUserAgent>();
private static ConcurrentDictionary<string, string> _callsINVITE = new ConcurrentDictionary<string, string>();
public static FileStream dia = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "dia.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream tarde = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "tarde.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream noite = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "noite.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream gostaria = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "gostaria.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream sevoce = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "sevoce.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream d1 = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "d1.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream seconhece = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "seconhece.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream d2 = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "d2.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream senconhece = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "senconhece.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream d3 = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "d3.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
static void Main(string[] args)
{
Log.LogDebug("SIPSorcery user agent server example.");
Log.LogDebug("Press h to hangup a call or ctrl-c to exit.");
Log = AddConsoleLogger();
IPAddress listenAddress = IPAddress.Any;
IPAddress listenIPv6Address = IPAddress.IPv6Any;
if (args != null && args.Length > 0)
{
if (!IPAddress.TryParse(args[0], out var customListenAddress))
{
Log.LogDebug($"Command line argument could not be parsed as an IP address \"{args[0]}\"");
listenAddress = IPAddress.Any;
}
else
{
if (customListenAddress.AddressFamily == AddressFamily.InterNetwork)
{
listenAddress = customListenAddress;
}
if (customListenAddress.AddressFamily == AddressFamily.InterNetworkV6)
{
listenIPv6Address = customListenAddress;
}
}
}
// Set up a default SIP transport.
var sipTransport = new SIPTransport();
//var localhostCertificate = new X509Certificate2(SIPS_CERTIFICATE_PATH);
// IPv4 channels.
//sipTransport.AddSIPChannel(new SIPUDPChannel(new IPEndPoint(listenAddress, SIP_LISTEN_PORT)));
//sipTransport.AddSIPChannel(new SIPTCPChannel(new IPEndPoint(listenAddress, SIP_LISTEN_PORT)));
//sipTransport.AddSIPChannel(new SIPTLSChannel(localhostCertificate, new IPEndPoint(listenAddress, SIPS_LISTEN_PORT)));
////sipTransport.AddSIPChannel(new SIPWebSocketChannel(IPAddress.Any, SIP_WEBSOCKET_LISTEN_PORT));
////sipTransport.AddSIPChannel(new SIPWebSocketChannel(IPAddress.Any, SIP_SECURE_WEBSOCKET_LISTEN_PORT, localhostCertificate));
sipTransport.AddSIPChannel(new SIPUDPChannel(new IPEndPoint(IPAddress.Parse("10.192.8.160"), SIP_LISTEN_PORT)));
sipTransport.AddSIPChannel(new SIPTCPChannel(new IPEndPoint(IPAddress.Parse("10.192.8.160"), SIP_LISTEN_PORT)));
//// IPv6 channels.
//sipTransport.AddSIPChannel(new SIPUDPChannel(new IPEndPoint(listenIPv6Address, SIP_LISTEN_PORT)));
//sipTransport.AddSIPChannel(new SIPTCPChannel(new IPEndPoint(listenIPv6Address, SIP_LISTEN_PORT)));
//sipTransport.AddSIPChannel(new SIPTLSChannel(localhostCertificate, new IPEndPoint(listenIPv6Address, SIPS_LISTEN_PORT)));
////sipTransport.AddSIPChannel(new SIPWebSocketChannel(IPAddress.IPv6Any, SIP_WEBSOCKET_LISTEN_PORT));
////sipTransport.AddSIPChannel(new SIPWebSocketChannel(IPAddress.IPv6Any, SIP_SECURE_WEBSOCKET_LISTEN_PORT, localhostCertificate));
EnableTraceLogs(sipTransport);
string executableDir = Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location);
// To keep things a bit simpler this example only supports a single call at a time and the SIP server user agent
// acts as a singleton
//SIPServerUserAgent uas = null;
//CancellationTokenSource rtpCts = null; // Cancellation token to stop the RTP stream.
//VoIPMediaSession rtpSession = null;
// Because this is a server user agent the SIP transport must start listening for client user agents.
sipTransport.SIPTransportRequestReceived += async (SIPEndPoint localSIPEndPoint, SIPEndPoint remoteEndPoint, SIPRequest sipRequest) =>
{
try
{
SIPServerUserAgent uas = null;
CancellationTokenSource rtpCts = null; // Cancellation token to stop the RTP stream.
VoIPMediaSession rtpSession = null;
if (sipRequest.Method == SIPMethodsEnum.INVITE)
{
IEnumerable<string> list = _callsINVITE.Keys.Where(p => p == sipRequest?.Header?.CallId);
if (list.Count() > 0)
{
Log.LogDebug("List>0");
}
else
{
Log.LogDebug("Call invite");
var Name = string.Empty;
if (sipRequest?.Header?.From?.FromName == null)
{
Name = "Nulo";
}
else
{
Name = sipRequest?.Header?.From?.FromName;
}
_callsINVITE.TryAdd(sipRequest?.Header?.CallId, Name);
Log.LogInformation($"Incoming call request: {localSIPEndPoint}<-{remoteEndPoint} {sipRequest.URI}.");
var localAudios = @"C:\URA\recoreapp_URA\";
// Check there's a codec we support in the INVITE offer.
var offerSdp = SDP.ParseSDPDescription(sipRequest.Body);
IPEndPoint dstRtpEndPoint = SDP.GetSDPRTPEndPoint(sipRequest.Body);
if (offerSdp.Media.Any(x => x.Media == SDPMediaTypesEnum.audio && x.MediaFormats.Any(x => x.Key == (int)SDPWellKnownMediaFormatsEnum.PCMA)))
{
Log.LogDebug($"Client offer contained PCMA audio codec.");
WindowsAudioEndPoint winAudio_n = new WindowsAudioEndPoint(new AudioEncoder());
// winAudio_n.RestrictCodecs(new List<AudioCodecsEnum> { AudioCodecsEnum.G722,AudioCodecsEnum.PCMA,AudioCodecsEnum.PCMU });
winAudio_n.RestrictFormats(x => x.Codec == AudioCodecsEnum.PCMA);
AudioExtrasSource extrasSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions { AudioSource = AudioSourcesEnum.Music });
// rtpSession = new VoIPMediaSession(new MediaEndPoints { AudioSource = extrasSource });
rtpSession = new VoIPMediaSession(winAudio_n.ToMediaEndPoints());
rtpSession.AcceptRtpFromAny = true;
var setResult = rtpSession?.SetRemoteDescription(SdpType.offer, offerSdp);
if (setResult != SetDescriptionResultEnum.OK)
{
// Didn't get a match on the codecs we support.
SIPResponse noMatchingCodecResponse = SIPResponse.GetResponse(sipRequest, SIPResponseStatusCodesEnum.NotAcceptableHere, setResult.ToString());
await sipTransport?.SendResponseAsync(noMatchingCodecResponse);
}
else
{
// If there's already a call in progress hang it up. Of course this is not ideal for a real softphone or server but it
// means this example can be kept simpler.
if (uas?.IsHungup == false)
{
uas?.Hangup(false);
}
//rtpCts?.Cancel();
//rtpCts = new CancellationTokenSource();
UASInviteTransaction uasTransaction = new UASInviteTransaction(sipTransport, sipRequest, null);
uas = new SIPServerUserAgent(sipTransport, null, uasTransaction, null);
uas.CallCancelled += (uasAgent) =>
{
rtpCts?.Cancel();
rtpSession.Close(null);
uas?.Hangup(true);
if (uas?.SIPDialogue != null)
{
string callID = uas?.SIPDialogue?.CallId;
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
}
}
}
};
// rtpSession.OnRtpClosed += (reason) => uas?.Hangup(false);
rtpSession.OnTimeout += (mediaType) =>
{
if (uas?.SIPDialogue != null)
{
Log.LogWarning($"RTP timeout on call with {uas?.SIPDialogue?.RemoteTarget}, hanging up.");
}
else
{
Log.LogWarning($"RTP timeout on incomplete call, closing RTP session.");
}
uas?.Hangup(true);
rtpCts?.Cancel();
rtpSession?.Close(null);
if (uas?.SIPDialogue != null)
{
string callID = uas?.SIPDialogue?.CallId;
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
}
}
}
};
rtpSession.OnRtcpBye += (mediaType) =>
{
if (uas?.SIPDialogue != null)
{
Log.LogWarning($"RTP timeout on call with {uas?.SIPDialogue?.RemoteTarget}, hanging up.");
}
else
{
Log.LogWarning($"RTP timeout on incomplete call, closing RTP session.");
}
uas?.Hangup(true);
rtpCts?.Cancel();
rtpSession?.Close(null);
// winAudio_n?.CloseAudio();
string callID = uas?.SIPDialogue?.CallId;
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
}
}
};
rtpSession.OnRtpClosed += (mediaType) =>
{
if (uas?.SIPDialogue != null)
{
Log.LogWarning($"RTP timeout on call with {uas?.SIPDialogue?.RemoteTarget}, hanging up.");
}
else
{
Log.LogWarning($"RTP timeout on incomplete call, closing RTP session.");
}
if (uas?.SIPDialogue != null)
{
string callID = uas?.SIPDialogue?.CallId;
if (_calls.ContainsKey(callID))
{
if (_calls.TryRemove(callID, out var ua))
{
// This app only uses each SIP user agent once so here the agent is
// explicitly closed to prevent is responding to any new SIP requests.
//uas?.Hangup(false);
rtpCts?.Cancel();
rtpSession?.Close(null);
// winAudio_n?.CloseAudio();
ua?.Hangup(true);
}
}
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
}
}
}
GC.Collect();
};
uas.Progress(SIPResponseStatusCodesEnum.Trying, null, null, null, null);
// await Task.Delay(100);
uas.Progress(SIPResponseStatusCodesEnum.Ringing, null, null, null, null);
// await Task.Delay(100);
var answerSdp = rtpSession.CreateAnswer(null);
uas.Answer(SDP.SDP_MIME_CONTENTTYPE, answerSdp.ToString(), null, SIPDialogueTransferModesEnum.NotAllowed);
if (uas.IsUASAnswered)
{
if (uas.IsHungup != true)
{
_calls.TryAdd(uas?.SIPDialogue?.CallId, uas);
}
}
try
{
var HFNS = Name.Replace(" ", "_").Split("_");
var HFNSN = string.Empty;
if (HFNS.Count() > 1)
{
HFNSN = HFNS[0];
}
else
{
HFNSN = Name.Replace(" ", "_");
}
string fileNamemm = HFNSN.Replace(" ", "_") + ".wav";
Log.LogDebug(fileNamemm);
if (!File.Exists(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav"))
{
var config_t = SpeechConfig.FromSubscription("iiii", "westus");
config_t.SpeechRecognitionLanguage = "pt-BR";
config_t.SpeechSynthesisLanguage = "pt-BR";
config_t.SetSpeechSynthesisOutputFormat(SpeechSynthesisOutputFormat.Raw8Khz16BitMonoPcm);
// logger.Debug("synsthesizer");
// using var synthesizer_t = new SpeechSynthesizer(config_t, null);
using (var synthesizer_t = new SpeechSynthesizer(config_t, null))
{
Log.LogDebug("Synt Inicio");
// logger.Debug("resultt");
var result__t = await synthesizer_t.SpeakTextAsync(HFNSN);
if (result__t.Reason == ResultReason.SynthesizingAudioCompleted)
{
Log.LogDebug("Complete");
// using var stream = AudioDataStream.FromResult(result);
using (var audioDataStream_t = AudioDataStream.FromResult(result__t))
{
// You can save all the data in the audio data stream to a file
Log?.LogDebug("Save");
await audioDataStream_t.SaveToWaveFileAsync(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav");
}
}
else if (result__t.Reason == ResultReason.Canceled)
{
var cancellation = SpeechSynthesisCancellationDetails.FromResult(result__t);
Log.LogDebug($"CANCELED: Reason={cancellation.Reason}");
if (cancellation.Reason == CancellationReason.Error)
{
Log.LogDebug($"CANCELED: ErrorCode={cancellation.ErrorCode}");
Log.LogDebug($"CANCELED: ErrorDetails=[{cancellation.ErrorDetails}]");
}
}
}
}
}
catch (Exception e)
{
Log.LogError(e.Message);
}
await rtpSession.Start();
await winAudio_n.PauseAudio();
// if()
//voipSession.AudioExtrasSource.AudioSamplePeriodMilliseconds = 20;
await rtpSession.AudioExtrasSource.StartAudio();
//await Task.Delay(100);
if (DateTime.Now.Hour < 12)
{
if (uas?.IsUASAnswered == true)
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(dia, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "dia.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
}
else if (DateTime.Now.Hour < 18)
{
if (uas?.IsUASAnswered == true)
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(tarde, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "tarde.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
}
else
{
if (uas?.IsUASAnswered == true)
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(noite, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "noite.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
}
if (uas?.IsUASAnswered == true)
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(gostaria, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "gostaria.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
if (uas?.IsUASAnswered == true)
{
var HFN = Name;
var HFNS = HFN.Replace(" ", "_").Split("_");
var HFNSN = string.Empty;
if (HFNS.Count() > 1)
{
HFNSN = HFNS[0];
}
else
{
HFNSN = HFN.Replace(" ", "_");
}
string fileNamemm = HFNSN.Replace(" ", "_") + ".wav";
if (!File.Exists(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav"))
{
//await Task.Delay(1000);
// logger.Debug("synsthesizer");
// using var synthesizer_t = new SpeechSynthesizer(config_t, null);
var config_t = SpeechConfig.FromSubscription("iiiiii", "westus");
config_t.SpeechRecognitionLanguage = "pt-BR";
config_t.SpeechSynthesisLanguage = "pt-BR";
config_t.SetSpeechSynthesisOutputFormat(SpeechSynthesisOutputFormat.Raw8Khz16BitMonoPcm);
using (var synthesizer_t = new SpeechSynthesizer(config_t, null))
{
Log.LogDebug("Synt Inicio");
// logger.Debug("resultt");
var result__t = await synthesizer_t.SpeakTextAsync(HFNSN);
if (result__t.Reason == ResultReason.SynthesizingAudioCompleted)
{
Log.LogDebug("Complete");
// using var stream = AudioDataStream.FromResult(result);
using (var audioDataStream_t = AudioDataStream.FromResult(result__t))
{
// You can save all the data in the audio data stream to a file
Log?.LogDebug("Save");
await audioDataStream_t?.SaveToWaveFileAsync(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav");
}
}
else if (result__t.Reason == ResultReason.Canceled)
{
var cancellation = SpeechSynthesisCancellationDetails.FromResult(result__t);
Log.LogDebug($"CANCELED: Reason={cancellation.Reason}");
if (cancellation.Reason == CancellationReason.Error)
{
Log.LogDebug($"CANCELED: ErrorCode={cancellation.ErrorCode}");
Log.LogDebug($"CANCELED: ErrorDetails=[{cancellation.ErrorDetails}]");
}
}
}
}
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
if (uas?.IsUASAnswered == true)
{
// await rtpSession?.AudioExtrasSource.SendAudioFromStream(sevoce, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "sevoce.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
if (uas?.IsUASAnswered == true)
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(d1, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "d1.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
if (uas?.IsUASAnswered == true)
{
// await rtpSession?.AudioExtrasSource.SendAudioFromStream(seconhece, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "seconhece.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
if (uas?.IsUASAnswered == true)
{
// await rtpSession?.AudioExtrasSource.SendAudioFromStream(d2, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "d2.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
if (uas?.IsUASAnswered == true)
{
// await rtpSession?.AudioExtrasSource.SendAudioFromStream(senconhece, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "senconhece.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
if (uas?.IsUASAnswered == true)
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(d3, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "d3.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
//Task t2 = Task.Factory.StartNew(() =>
//{
// // Thread.Sleep(300);
// foreach (var item in _callsINVITE)
// {
// Log.LogDebug(item.Key + "-" + item.Value);
// // Thread.Sleep(150);
// }
//});
//try
//{
// Task.WaitAll(t2);
//}
//catch (AggregateException ex) // No exception
//{
//Log.LogWarning(ex.Flatten().Message);
//}
uas?.Hangup(false);
await rtpSession?.AudioExtrasSource.PauseAudio();
await winAudio_n?.CloseAudio();
rtpCts?.Cancel();
rtpSession?.Close(null);
//winAudio_n?.CloseAudio();
if (uas?.SIPDialogue != null)
{
string callID = uas?.SIPDialogue?.CallId;
if (_calls.ContainsKey(callID))
{
if (_calls.TryRemove(callID, out var ua))
{
//// This app only uses each SIP user agent once so here the agent is
//// explicitly closed to prevent is responding to any new SIP requests.
////uas?.Hangup(false);
//rtpCts?.Cancel();
//rtpSession.Close(null);
////winAudio_n?.CloseAudio();
//ua?.Hangup(true);
}
}
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
}
}
}
}
}
else if(offerSdp.Media.Any(x => x.Media == SDPMediaTypesEnum.audio && x.MediaFormats.Any(x => x.Key == (int)SDPWellKnownMediaFormatsEnum.PCMU)))
{
Log.LogDebug($"Client offer contained PCMU audio codec.");
WindowsAudioEndPoint winAudio_n = new WindowsAudioEndPoint(new AudioEncoder());
// winAudio_n.RestrictCodecs(new List<AudioCodecsEnum> { AudioCodecsEnum.G722,AudioCodecsEnum.PCMA,AudioCodecsEnum.PCMU });
winAudio_n.RestrictFormats(x => x.Codec == AudioCodecsEnum.PCMU);
// AudioExtrasSource extrasSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions { AudioSource = AudioSourcesEnum.Music });
// rtpSession = new VoIPMediaSession(new MediaEndPoints { AudioSource = extrasSource });
rtpSession = new VoIPMediaSession(winAudio_n.ToMediaEndPoints());
rtpSession.AcceptRtpFromAny = true;
var setResult = rtpSession?.SetRemoteDescription(SdpType.offer, offerSdp);
if (setResult != SetDescriptionResultEnum.OK)
{
// Didn't get a match on the codecs we support.
SIPResponse noMatchingCodecResponse = SIPResponse.GetResponse(sipRequest, SIPResponseStatusCodesEnum.NotAcceptableHere, setResult.ToString());
await sipTransport?.SendResponseAsync(noMatchingCodecResponse);
}
else
{
// If there's already a call in progress hang it up. Of course this is not ideal for a real softphone or server but it
// means this example can be kept simpler.
if (uas?.IsHungup == false)
{
uas?.Hangup(false);
}
//rtpCts?.Cancel();
//rtpCts = new CancellationTokenSource();
UASInviteTransaction uasTransaction = new UASInviteTransaction(sipTransport, sipRequest, null);
uas = new SIPServerUserAgent(sipTransport, null, uasTransaction, null);
uas.CallCancelled += (uasAgent) =>
{
rtpCts?.Cancel();
rtpSession.Close(null);
uas?.Hangup(true);
if (uas?.SIPDialogue != null)
{
string callID = uas?.SIPDialogue?.CallId;
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
}
}
}
};
// rtpSession.OnRtpClosed += (reason) => uas?.Hangup(false);
rtpSession.OnTimeout += (mediaType) =>
{
if (uas?.SIPDialogue != null)
{
Log.LogWarning($"RTP timeout on call with {uas?.SIPDialogue?.RemoteTarget}, hanging up.");
}
else
{
Log.LogWarning($"RTP timeout on incomplete call, closing RTP session.");
}
uas?.Hangup(true);
rtpCts?.Cancel();
rtpSession?.Close(null);
if (uas?.SIPDialogue != null)
{
string callID = uas?.SIPDialogue?.CallId;
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
}
}
}
};
rtpSession.OnRtcpBye += (mediaType) =>
{
if (uas?.SIPDialogue != null)
{
Log.LogWarning($"RTP timeout on call with {uas?.SIPDialogue?.RemoteTarget}, hanging up.");
}
else
{
Log.LogWarning($"RTP timeout on incomplete call, closing RTP session.");
}
uas?.Hangup(true);
rtpCts?.Cancel();
rtpSession?.Close(null);
// winAudio_n?.CloseAudio();
string callID = uas?.SIPDialogue?.CallId;
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
}
}
};
rtpSession.OnRtpClosed += (mediaType) =>
{
if (uas?.SIPDialogue != null)
{
Log.LogWarning($"RTP timeout on call with {uas?.SIPDialogue?.RemoteTarget}, hanging up.");
}
else
{
Log.LogWarning($"RTP timeout on incomplete call, closing RTP session.");
}
if (uas?.SIPDialogue != null)
{
string callID = uas?.SIPDialogue?.CallId;
if (_calls.ContainsKey(callID))
{
if (_calls.TryRemove(callID, out var ua))
{
// This app only uses each SIP user agent once so here the agent is
// explicitly closed to prevent is responding to any new SIP requests.
//uas?.Hangup(false);
rtpCts?.Cancel();
rtpSession?.Close(null);
// winAudio_n?.CloseAudio();
ua?.Hangup(true);
}
}
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
}
}
}
GC.Collect();
};
uas.Progress(SIPResponseStatusCodesEnum.Trying, null, null, null, null);
// await Task.Delay(100);
uas.Progress(SIPResponseStatusCodesEnum.Ringing, null, null, null, null);
// await Task.Delay(100);
var answerSdp = rtpSession.CreateAnswer(null);
uas.Answer(SDP.SDP_MIME_CONTENTTYPE, answerSdp.ToString(), null, SIPDialogueTransferModesEnum.NotAllowed);
if (uas.IsUASAnswered)
{
if (uas.IsHungup != true)
{
_calls.TryAdd(uas?.SIPDialogue?.CallId, uas);
}
}
try
{
var HFNS = Name.Replace(" ", "_").Split("_");
var HFNSN = string.Empty;
if (HFNS.Count() > 1)
{
HFNSN = HFNS[0];
}
else
{
HFNSN = Name.Replace(" ", "_");
}
string fileNamemm = HFNSN.Replace(" ", "_") + ".wav";
Log.LogDebug(fileNamemm);
if (!File.Exists(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav"))
{
var config_t = SpeechConfig.FromSubscription("iiiiiiii", "westus");
config_t.SpeechRecognitionLanguage = "pt-BR";
config_t.SpeechSynthesisLanguage = "pt-BR";
config_t.SetSpeechSynthesisOutputFormat(SpeechSynthesisOutputFormat.Raw8Khz16BitMonoPcm);
// logger.Debug("synsthesizer");
// using var synthesizer_t = new SpeechSynthesizer(config_t, null);
using (var synthesizer_t = new SpeechSynthesizer(config_t, null))
{
Log.LogDebug("Synt Inicio");
// logger.Debug("resultt");
var result__t = await synthesizer_t.SpeakTextAsync(HFNSN);
if (result__t.Reason == ResultReason.SynthesizingAudioCompleted)
{
Log.LogDebug("Complete");
// using var stream = AudioDataStream.FromResult(result);
using (var audioDataStream_t = AudioDataStream.FromResult(result__t))
{
// You can save all the data in the audio data stream to a file
Log?.LogDebug("Save");
await audioDataStream_t.SaveToWaveFileAsync(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav");
}
}
else if (result__t.Reason == ResultReason.Canceled)
{
var cancellation = SpeechSynthesisCancellationDetails.FromResult(result__t);
Log.LogDebug($"CANCELED: Reason={cancellation.Reason}");
if (cancellation.Reason == CancellationReason.Error)
{
Log.LogDebug($"CANCELED: ErrorCode={cancellation.ErrorCode}");
Log.LogDebug($"CANCELED: ErrorDetails=[{cancellation.ErrorDetails}]");
}
}
}
}
}
catch (Exception e)
{
Log.LogError(e.Message);
}
await rtpSession.Start();
await winAudio_n.PauseAudio();
// if()
//voipSession.AudioExtrasSource.AudioSamplePeriodMilliseconds = 20;
await rtpSession.AudioExtrasSource.StartAudio();
//await Task.Delay(100);
if (DateTime.Now.Hour < 12)
{
if (uas?.IsUASAnswered == true)
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(dia, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "dia.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
}
else if (DateTime.Now.Hour < 18)
{
if (uas?.IsUASAnswered == true)
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(tarde, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "tarde.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
}
else
{
if (uas?.IsUASAnswered == true)
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(noite, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "noite.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
}
if (uas?.IsUASAnswered == true)
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(gostaria, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "gostaria.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
if (uas?.IsUASAnswered == true)
{
var HFN = Name;
var HFNS = HFN.Replace(" ", "_").Split("_");
var HFNSN = string.Empty;
if (HFNS.Count() > 1)
{
HFNSN = HFNS[0];
}
else
{
HFNSN = HFN.Replace(" ", "_");
}
string fileNamemm = HFNSN.Replace(" ", "_") + ".wav";
if (!File.Exists(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav"))
{
//await Task.Delay(1000);
// logger.Debug("synsthesizer");
// using var synthesizer_t = new SpeechSynthesizer(config_t, null);
var config_t = SpeechConfig.FromSubscription("iiiiii", "westus");
config_t.SpeechRecognitionLanguage = "pt-BR";
config_t.SpeechSynthesisLanguage = "pt-BR";
config_t.SetSpeechSynthesisOutputFormat(SpeechSynthesisOutputFormat.Raw8Khz16BitMonoPcm);
using (var synthesizer_t = new SpeechSynthesizer(config_t, null))
{
Log.LogDebug("Synt Inicio");
// logger.Debug("resultt");
var result__t = await synthesizer_t.SpeakTextAsync(HFNSN);
if (result__t.Reason == ResultReason.SynthesizingAudioCompleted)
{
Log.LogDebug("Complete");
// using var stream = AudioDataStream.FromResult(result);
using (var audioDataStream_t = AudioDataStream.FromResult(result__t))
{
// You can save all the data in the audio data stream to a file
Log?.LogDebug("Save");
await audioDataStream_t?.SaveToWaveFileAsync(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav");
}
}
else if (result__t.Reason == ResultReason.Canceled)
{
var cancellation = SpeechSynthesisCancellationDetails.FromResult(result__t);
Log.LogDebug($"CANCELED: Reason={cancellation.Reason}");
if (cancellation.Reason == CancellationReason.Error)
{
Log.LogDebug($"CANCELED: ErrorCode={cancellation.ErrorCode}");
Log.LogDebug($"CANCELED: ErrorDetails=[{cancellation.ErrorDetails}]");
}
}
}
}
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
if (uas?.IsUASAnswered == true)
{
// await rtpSession?.AudioExtrasSource.SendAudioFromStream(sevoce, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "sevoce.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
if (uas?.IsUASAnswered == true)
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(d1, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "d1.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
if (uas?.IsUASAnswered == true)
{
// await rtpSession?.AudioExtrasSource.SendAudioFromStream(seconhece, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "seconhece.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
if (uas?.IsUASAnswered == true)
{
// await rtpSession?.AudioExtrasSource.SendAudioFromStream(d2, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "d2.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
if (uas?.IsUASAnswered == true)
{
// await rtpSession?.AudioExtrasSource.SendAudioFromStream(senconhece, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "senconhece.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
if (uas?.IsUASAnswered == true)
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(d3, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "d3.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
//Task t2 = Task.Factory.StartNew(() =>
//{
// // Thread.Sleep(300);
// foreach (var item in _callsINVITE)
// {
// Log.LogDebug(item.Key + "-" + item.Value);
// // Thread.Sleep(150);
// }
//});
//try
//{
// Task.WaitAll(t2);
//}
//catch (AggregateException ex) // No exception
//{
//Log.LogWarning(ex.Flatten().Message);
//}
uas?.Hangup(false);
await rtpSession?.AudioExtrasSource.PauseAudio();
await winAudio_n?.CloseAudio();
rtpCts?.Cancel();
rtpSession?.Close(null);
//winAudio_n?.CloseAudio();
if (uas?.SIPDialogue != null)
{
string callID = uas?.SIPDialogue?.CallId;
if (_calls.ContainsKey(callID))
{
if (_calls.TryRemove(callID, out var ua))
{
//// This app only uses each SIP user agent once so here the agent is
//// explicitly closed to prevent is responding to any new SIP requests.
////uas?.Hangup(false);
//rtpCts?.Cancel();
//rtpSession.Close(null);
////winAudio_n?.CloseAudio();
//ua?.Hangup(true);
}
}
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
}
}
}
}
}
}
}
else if (sipRequest.Method == SIPMethodsEnum.BYE)
{
Log.LogInformation("Call hungup.");
SIPResponse byeResponse = SIPResponse.GetResponse(sipRequest, SIPResponseStatusCodesEnum.Ok, null);
await sipTransport.SendResponseAsync(byeResponse);
uas?.Hangup(true);
rtpSession?.Close(null);
rtpCts?.Cancel();
}
else if (sipRequest.Method == SIPMethodsEnum.SUBSCRIBE)
{
SIPResponse notAllowededResponse = SIPResponse.GetResponse(sipRequest, SIPResponseStatusCodesEnum.MethodNotAllowed, null);
await sipTransport.SendResponseAsync(notAllowededResponse);
}
else if (sipRequest.Method == SIPMethodsEnum.OPTIONS || sipRequest.Method == SIPMethodsEnum.REGISTER)
{
SIPResponse optionsResponse = SIPResponse.GetResponse(sipRequest, SIPResponseStatusCodesEnum.Ok, null);
await sipTransport.SendResponseAsync(optionsResponse);
}
}
catch (Exception reqExcp)
{
Log.LogWarning($"Exception handling {sipRequest.Method}. {reqExcp.Message}");
}
};
ManualResetEvent exitMre = new ManualResetEvent(false);
Console.CancelKeyPress += delegate (object sender, ConsoleCancelEventArgs e)
{
e.Cancel = true;
Log.LogInformation("Exiting...");
//Hangup(uas).Wait();
//rtpSession?.Close(null);
//rtpCts?.Cancel();
if (sipTransport != null)
{
Log.LogInformation("Shutting down SIP transport...");
sipTransport.Shutdown();
}
exitMre.Set();
};
// Task to handle user key presses.
Task.Run(() =>
{
try
{
while (!exitMre.WaitOne(0))
{
var keyProps = Console.ReadKey();
if (keyProps.KeyChar == 'h' || keyProps.KeyChar == 'q')
{
Console.WriteLine();
Console.WriteLine("Hangup requested by user...");
//Hangup(uas).Wait();
//rtpSession?.Close(null);
//rtpCts?.Cancel();
}
if (keyProps.KeyChar == 'q')
{
Log.LogInformation("Quitting...");
if (sipTransport != null)
{
Log.LogInformation("Shutting down SIP transport...");
sipTransport.Shutdown();
}
exitMre.Set();
}
}
}
catch (Exception excp)
{
Log.LogError($"Exception Key Press listener. {excp.Message}.");
}
});
exitMre.WaitOne();
}
/// <summary>
/// Hangs up the current call.
/// </summary>
/// <param name="uas">The user agent server to hangup the call on.</param>
private static async Task Hangup(SIPServerUserAgent uas)
{
try
{
if (uas?.IsHungup == false)
{
uas?.Hangup(false);
// Give the BYE or CANCEL request time to be transmitted.
Log.LogInformation("Waiting 1s for call to hangup...");
await Task.Delay(1000);
}
}
catch (Exception excp)
{
Log.LogError($"Exception Hangup. {excp.Message}");
}
}
/// <summary>
/// Enable detailed SIP log messages.
/// </summary>
private static void EnableTraceLogs(SIPTransport sipTransport)
{
sipTransport.SIPRequestInTraceEvent += (localEP, remoteEP, req) =>
{
Log.LogDebug($"Request received: {localEP}<-{remoteEP}");
Log.LogDebug(req.ToString());
};
sipTransport.SIPRequestOutTraceEvent += (localEP, remoteEP, req) =>
{
Log.LogDebug($"Request sent: {localEP}->{remoteEP}");
Log.LogDebug(req.ToString());
};
sipTransport.SIPResponseInTraceEvent += (localEP, remoteEP, resp) =>
{
Log.LogDebug($"Response received: {localEP}<-{remoteEP}");
Log.LogDebug(resp.ToString());
};
sipTransport.SIPResponseOutTraceEvent += (localEP, remoteEP, resp) =>
{
Log.LogDebug($"Response sent: {localEP}->{remoteEP}");
Log.LogDebug(resp.ToString());
};
sipTransport.SIPRequestRetransmitTraceEvent += (tx, req, count) =>
{
Log.LogDebug($"Request retransmit {count} for request {req.StatusLine}, initial transmit {DateTime.Now.Subtract(tx.InitialTransmit).TotalSeconds.ToString("0.###")}s ago.");
};
sipTransport.SIPResponseRetransmitTraceEvent += (tx, resp, count) =>
{
Log.LogDebug($"Response retransmit {count} for response {resp.ShortDescription}, initial transmit {DateTime.Now.Subtract(tx.InitialTransmit).TotalSeconds.ToString("0.###")}s ago.");
};
}
/// <summary>
/// Adds a console logger. Can be omitted if internal SIPSorcery debug and warning messages are not required.
/// </summary>
private static Microsoft.Extensions.Logging.ILogger AddConsoleLogger()
{
var serilogLogger = new LoggerConfiguration()
.Enrich.FromLogContext()
.MinimumLevel.Is(Serilog.Events.LogEventLevel.Debug)
.WriteTo.Console()
.CreateLogger();
var factory = new SerilogLoggerFactory(serilogLogger);
SIPSorcery.LogFactory.Set(factory);
return factory.CreateLogger<Program>();
}
}
}
of course, follow the code above, thanks for the feedback! if you can evaluate and inform if it would hold up to 1000 assistance, I would be grateful
explaining the flow, the customer's name is vocalized in the service, if the file does not exist it generates a file with azure, the process of generating this with more than 2000 files in a folder.
add try cath in stopsendfromaudiostrem by error possible https://github.com/sipsorcery-org/sipsorcery/issues/550 , I try
private void StopSendFromAudioStream() { try { if (_streamSendInProgress) { lock (_streamSourceTimer) { _streamSourceTimer?.Dispose(); _streamSendInProgress = false;
OnSendFromAudioStreamComplete?.Invoke();
}
}
}
catch (Exception e)
{
Console.Write(e.Message);
}
}
Wow that's a lot of code! I'm pretty sure ou could simplify that significantly.
The first thing I'd recommend is only checking uas?.IsUASAnswered
once. After that check uas?.IsHungup
. You need to check that the call hasn't been hungup by the remote party each time you start playing a new audio file.
//----------------------------------------------------------------------------- // Filename: Program.cs // // Description: An example program of how to use the SIPSorcery core library to // act as the server for a SIP call. // // Author(s): // Aaron Clauson (aaron@sipsorcery.com) // // History: // 09 Oct 2019 Aaron Clauson Created, Dublin, Ireland. // 26 Feb 2020 Aaron Clauson Switched RTP to use RtpAVSession. // // License: // BSD 3-Clause "New" or "Revised" License, see included LICENSE.md file. //-----------------------------------------------------------------------------
//----------------------------------------------------------------------------- // This example can be used with the automated SIP test tool [SIPp] (https://github.com/SIPp/sipp) // and its inbuilt User Agent Client scenario. // Note: SIPp doesn't support IPv6. // // To install on WSL: // $ sudo apt install sip-tester // // Running tests (press the '+' key while test is running to increase the call rate): // For UDP testing: sipp -sn uac 127.0.0.1 // For TCP testing: sipp -sn uac localhost -t t1 //-----------------------------------------------------------------------------
//----------------------------------------------------------------------------- // Media files: // The "Simplicity" audio used in this example is from an artist called MACROFORM // and can be downloaded directly from: https://www.jamendo.com/track/579315/simplicity?language=en // The use of the audio is licensed under the Creative Commons // https://creativecommons.org/licenses/by-nd/2.0/ // The audio is free for personal use but a license may be required for commercial use. // If it sounds familiar this particular file is also included as part of Asterisk's // (asterisk.org) music on hold. // // ffmpeg can be used to convert the mp3 file into the required format for placing directly // into the RTP packets. Currently this example supports two audio formats: G711.ULAW (or PCMU) // and G722. // // ffmpeg -i Macroform_-Simplicity.mp3 -ac 1 -ar 8k -ab 64k -f mulaw Macroform-Simplicity.ulaw // ffmpeg -i Macroform-Simplicity.mp3 -ar 16k -acodec g722 Macroform-_Simplicity.g722 //-----------------------------------------------------------------------------
using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.IO; using System.Linq; using System.Net; using System.Net.Sockets; using System.Security.Cryptography.X509Certificates; using System.Threading; using System.Threading.Tasks; using Microsoft.CognitiveServices.Speech; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; using Serilog; using Serilog.Extensions.Logging; using SIPSorcery.Media; using SIPSorcery.Net; using SIPSorcery.SIP; using SIPSorcery.SIP.App; using SIPSorceryMedia.Abstractions; using SIPSorceryMedia.Windows;
namespace SIPSorcery { class Program { private static int SIP_LISTEN_PORT = 5060; private static int SIPS_LISTEN_PORT = 5060; //private static int SIP_WEBSOCKET_LISTEN_PORT = 80; //private static int SIP_SECURE_WEBSOCKET_LISTEN_PORT = 443; private static string SIPS_CERTIFICATE_PATH = "localhost.pfx"; private static string SSK = "oooo"; private static string SSR = "westus"; private static Microsoft.Extensions.Logging.ILogger Log = NullLogger.Instance; private static ConcurrentDictionary<string, SIPServerUserAgent> _calls = new ConcurrentDictionary<string, SIPServerUserAgent>();
private static ConcurrentDictionary<string, string> _callsINVITE = new ConcurrentDictionary<string, string>();
public static FileStream dia = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "dia.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream tarde = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "tarde.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream noite = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "noite.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream gostaria = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "gostaria.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream sevoce = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "sevoce.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream d1 = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "d1.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream seconhece = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "seconhece.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream d2 = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "d2.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream senconhece = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "senconhece.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream d3 = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "d3.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
static void Main(string[] args)
{
Log.LogDebug("SIPSorcery user agent server example.");
Log.LogDebug("Press h to hangup a call or ctrl-c to exit.");
Log = AddConsoleLogger();
IPAddress listenAddress = IPAddress.Any;
IPAddress listenIPv6Address = IPAddress.IPv6Any;
if (args != null && args.Length > 0)
{
if (!IPAddress.TryParse(args[0], out var customListenAddress))
{
Log.LogDebug($"Command line argument could not be parsed as an IP address \"{args[0]}\"");
listenAddress = IPAddress.Any;
}
else
{
if (customListenAddress.AddressFamily == AddressFamily.InterNetwork)
{
listenAddress = customListenAddress;
}
if (customListenAddress.AddressFamily == AddressFamily.InterNetworkV6)
{
listenIPv6Address = customListenAddress;
}
}
}
// Set up a default SIP transport.
var sipTransport = new SIPTransport();
//var localhostCertificate = new X509Certificate2(SIPS_CERTIFICATE_PATH);
// IPv4 channels.
//sipTransport.AddSIPChannel(new SIPUDPChannel(new IPEndPoint(listenAddress, SIP_LISTEN_PORT)));
//sipTransport.AddSIPChannel(new SIPTCPChannel(new IPEndPoint(listenAddress, SIP_LISTEN_PORT)));
//sipTransport.AddSIPChannel(new SIPTLSChannel(localhostCertificate, new IPEndPoint(listenAddress, SIPS_LISTEN_PORT)));
////sipTransport.AddSIPChannel(new SIPWebSocketChannel(IPAddress.Any, SIP_WEBSOCKET_LISTEN_PORT));
////sipTransport.AddSIPChannel(new SIPWebSocketChannel(IPAddress.Any, SIP_SECURE_WEBSOCKET_LISTEN_PORT, localhostCertificate));
///
var builder = new ConfigurationBuilder()
.SetBasePath(Directory.GetCurrentDirectory())
.AddJsonFile("appsettings.json");
var configuration = builder.Build();
//string myKey1 = configuration["myKey1"];
//Console.WriteLine(myKey1);
string ip = configuration.GetSection("ip").Value;
Console.WriteLine(ip);
sipTransport.AddSIPChannel(new SIPUDPChannel(new IPEndPoint(IPAddress.Parse(ip), SIP_LISTEN_PORT)));
sipTransport.AddSIPChannel(new SIPTCPChannel(new IPEndPoint(IPAddress.Parse(ip), SIP_LISTEN_PORT)));
//// IPv6 channels.
//sipTransport.AddSIPChannel(new SIPUDPChannel(new IPEndPoint(listenIPv6Address, SIP_LISTEN_PORT)));
//sipTransport.AddSIPChannel(new SIPTCPChannel(new IPEndPoint(listenIPv6Address, SIP_LISTEN_PORT)));
//sipTransport.AddSIPChannel(new SIPTLSChannel(localhostCertificate, new IPEndPoint(listenIPv6Address, SIPS_LISTEN_PORT)));
////sipTransport.AddSIPChannel(new SIPWebSocketChannel(IPAddress.IPv6Any, SIP_WEBSOCKET_LISTEN_PORT));
////sipTransport.AddSIPChannel(new SIPWebSocketChannel(IPAddress.IPv6Any, SIP_SECURE_WEBSOCKET_LISTEN_PORT, localhostCertificate));
EnableTraceLogs(sipTransport);
string executableDir = Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location);
// To keep things a bit simpler this example only supports a single call at a time and the SIP server user agent
// acts as a singleton
//SIPServerUserAgent uas = null;
//CancellationTokenSource rtpCts = null; // Cancellation token to stop the RTP stream.
//VoIPMediaSession rtpSession = null;
// Because this is a server user agent the SIP transport must start listening for client user agents.
sipTransport.SIPTransportRequestReceived += async (SIPEndPoint localSIPEndPoint, SIPEndPoint remoteEndPoint, SIPRequest sipRequest) =>
{
try
{
SIPServerUserAgent uas = null;
CancellationTokenSource rtpCts = null; // Cancellation token to stop the RTP stream.
VoIPMediaSession rtpSession = null;
if (sipRequest.Method == SIPMethodsEnum.INVITE)
{
IEnumerable<string> list = _callsINVITE.Keys.Where(p => p == sipRequest?.Header?.CallId);
if (list.Count() > 0)
{
Log.LogDebug("List>0");
}
else
{
Log.LogDebug("Call invite");
var Name = string.Empty;
if (sipRequest?.Header?.From?.FromName == null)
{
Name = "Nulo";
}
else
{
Name = sipRequest?.Header?.From?.FromName;
}
_callsINVITE.TryAdd(sipRequest?.Header?.CallId, Name);
Log.LogInformation($"Incoming call request: {localSIPEndPoint}<-{remoteEndPoint} {sipRequest.URI}.");
var localAudios = @"C:\URA\recoreapp_URA\";
// Check there's a codec we support in the INVITE offer.
var offerSdp = SDP.ParseSDPDescription(sipRequest.Body);
IPEndPoint dstRtpEndPoint = SDP.GetSDPRTPEndPoint(sipRequest.Body);
if (offerSdp.Media.Any(x => x.Media == SDPMediaTypesEnum.audio && x.MediaFormats.Any(x => x.Key == (int)SDPWellKnownMediaFormatsEnum.PCMA)))
{
Log.LogDebug($"Client offer contained PCMA audio codec.");
WindowsAudioEndPoint winAudio_n = new WindowsAudioEndPoint(new AudioEncoder());
// winAudio_n.RestrictCodecs(new List<AudioCodecsEnum> { AudioCodecsEnum.G722,AudioCodecsEnum.PCMA,AudioCodecsEnum.PCMU });
winAudio_n.RestrictFormats(x => x.Codec == AudioCodecsEnum.PCMA);
AudioExtrasSource extrasSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions { AudioSource = AudioSourcesEnum.Music });
// rtpSession = new VoIPMediaSession(new MediaEndPoints { AudioSource = extrasSource });
rtpSession = new VoIPMediaSession(winAudio_n.ToMediaEndPoints());
rtpSession.AcceptRtpFromAny = true;
var setResult = rtpSession?.SetRemoteDescription(SdpType.offer, offerSdp);
if (setResult != SetDescriptionResultEnum.OK)
{
// Didn't get a match on the codecs we support.
SIPResponse noMatchingCodecResponse = SIPResponse.GetResponse(sipRequest, SIPResponseStatusCodesEnum.NotAcceptableHere, setResult.ToString());
await sipTransport?.SendResponseAsync(noMatchingCodecResponse);
}
else
{
// If there's already a call in progress hang it up. Of course this is not ideal for a real softphone or server but it
// means this example can be kept simpler.
if (uas?.IsHungup == false)
{
uas?.Hangup(false);
}
//rtpCts?.Cancel();
//rtpCts = new CancellationTokenSource();
UASInviteTransaction uasTransaction = new UASInviteTransaction(sipTransport, sipRequest, null);
uas = new SIPServerUserAgent(sipTransport, null, uasTransaction, null);
uas.CallCancelled += (uasAgent) =>
{
//rtpCts?.Cancel();
//rtpSession.Close(null);
//uas?.Hangup(true);
if (uas?.SIPDialogue != null)
{
string callID = uas?.SIPDialogue?.CallId;
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
if (uas != null)
{
uas?.Hangup(true);
}
if (rtpCts != null)
{
rtpCts?.Cancel();
}
if (rtpSession != null)
{
rtpSession?.Close(null);
}
}
}
}
};
// rtpSession.OnRtpClosed += (reason) => uas?.Hangup(false);
rtpSession.OnTimeout += (mediaType) =>
{
if (uas?.SIPDialogue != null)
{
Log.LogWarning($"RTP timeout on call with {uas?.SIPDialogue?.RemoteTarget}, hanging up.");
}
else
{
Log.LogWarning($"RTP timeout on incomplete call, closing RTP session.");
}
if (uas?.SIPDialogue != null)
{
string callID = uas?.SIPDialogue?.CallId;
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
if (uas != null)
{
uas?.Hangup(true);
}
if (rtpCts != null)
{
rtpCts?.Cancel();
}
if (rtpSession != null)
{
rtpSession?.Close(null);
}
}
}
}
};
rtpSession.OnRtcpBye += (mediaType) =>
{
if (uas?.SIPDialogue != null)
{
Log.LogWarning($"RTP timeout on call with {uas?.SIPDialogue?.RemoteTarget}, hanging up.");
}
else
{
Log.LogWarning($"RTP timeout on incomplete call, closing RTP session.");
}
//uas?.Hangup(true);
//rtpCts?.Cancel();
//rtpSession?.Close(null);
// winAudio_n?.CloseAudio();
string callID = uas?.SIPDialogue?.CallId;
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
if (uas != null)
{
uas?.Hangup(true);
}
if (rtpCts != null)
{
rtpCts?.Cancel();
}
if (rtpSession != null)
{
rtpSession?.Close(null);
}
}
}
};
rtpSession.OnRtpClosed += (mediaType) =>
{
if (uas?.SIPDialogue != null)
{
Log.LogWarning($"RTP timeout on call with {uas?.SIPDialogue?.RemoteTarget}, hanging up.");
}
else
{
Log.LogWarning($"RTP timeout on incomplete call, closing RTP session.");
}
if (uas?.SIPDialogue != null)
{
string callID = uas?.SIPDialogue?.CallId;
if (_calls.ContainsKey(callID))
{
if (_calls.TryRemove(callID, out var ua))
{
// This app only uses each SIP user agent once so here the agent is
// explicitly closed to prevent is responding to any new SIP requests.
//uas?.Hangup(false);
// rtpCts?.Cancel();
// rtpSession?.Close(null);
//// winAudio_n?.CloseAudio();
// ua?.Hangup(true);
if (uas != null)
{
uas?.Hangup(true);
}
if (rtpCts != null)
{
rtpCts?.Cancel();
}
if (rtpSession != null)
{
rtpSession?.Close(null);
}
}
}
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
}
}
}
GC.Collect();
};
uas.Progress(SIPResponseStatusCodesEnum.Trying, null, null, null, null);
// await Task.Delay(100);
uas.Progress(SIPResponseStatusCodesEnum.Ringing, null, null, null, null);
// await Task.Delay(100);
var answerSdp = rtpSession.CreateAnswer(null);
uas.Answer(SDP.SDP_MIME_CONTENTTYPE, answerSdp.ToString(), null, SIPDialogueTransferModesEnum.NotAllowed);
if (uas.IsUASAnswered)
{
if (uas.IsHungup != true)
{
_calls.TryAdd(uas?.SIPDialogue?.CallId, uas);
}
}
try
{
var HFNS = Name.Replace(" ", "_").Split("_");
var HFNSN = string.Empty;
if (HFNS.Count() > 1)
{
HFNSN = HFNS[0];
}
else
{
HFNSN = Name.Replace(" ", "_");
}
string fileNamemm = HFNSN.Replace(" ", "_") + ".wav";
Log.LogDebug(fileNamemm);
if (!File.Exists(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav"))
{
var config_t = SpeechConfig.FromSubscription("000", "westus");
config_t.SpeechRecognitionLanguage = "pt-BR";
config_t.SpeechSynthesisLanguage = "pt-BR";
config_t.SetSpeechSynthesisOutputFormat(SpeechSynthesisOutputFormat.Raw8Khz16BitMonoPcm);
// logger.Debug("synsthesizer");
// using var synthesizer_t = new SpeechSynthesizer(config_t, null);
using (var synthesizer_t = new SpeechSynthesizer(config_t, null))
{
Log.LogDebug("Synt Inicio");
// logger.Debug("resultt");
var result__t = await synthesizer_t.SpeakTextAsync(HFNSN);
if (result__t.Reason == ResultReason.SynthesizingAudioCompleted)
{
Log.LogDebug("Complete");
// using var stream = AudioDataStream.FromResult(result);
using (var audioDataStream_t = AudioDataStream.FromResult(result__t))
{
// You can save all the data in the audio data stream to a file
Log?.LogDebug("Save");
await audioDataStream_t.SaveToWaveFileAsync(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav");
}
}
else if (result__t.Reason == ResultReason.Canceled)
{
var cancellation = SpeechSynthesisCancellationDetails.FromResult(result__t);
Log.LogDebug($"CANCELED: Reason={cancellation.Reason}");
if (cancellation.Reason == CancellationReason.Error)
{
Log.LogDebug($"CANCELED: ErrorCode={cancellation.ErrorCode}");
Log.LogDebug($"CANCELED: ErrorDetails=[{cancellation.ErrorDetails}]");
}
}
}
}
}
catch (Exception e)
{
Log.LogError(e.Message);
}
await rtpSession.Start();
await winAudio_n.PauseAudio();
// if()
//voipSession.AudioExtrasSource.AudioSamplePeriodMilliseconds = 20;
await rtpSession.AudioExtrasSource.StartAudio();
//await Task.Delay(100);
if (DateTime.Now.Hour < 12)
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(dia, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "dia.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
else if (DateTime.Now.Hour < 18)
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(tarde, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "tarde.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
else
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(noite, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "noite.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(gostaria, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "gostaria.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
var HFN = Name;
var HFNS = HFN.Replace(" ", "_").Split("_");
var HFNSN = string.Empty;
if (HFNS.Count() > 1)
{
HFNSN = HFNS[0];
}
else
{
HFNSN = HFN.Replace(" ", "_");
}
string fileNamemm = HFNSN.Replace(" ", "_") + ".wav";
if (!File.Exists(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav"))
{
//await Task.Delay(1000);
// logger.Debug("synsthesizer");
// using var synthesizer_t = new SpeechSynthesizer(config_t, null);
var config_t = SpeechConfig.FromSubscription("00", "westus");
config_t.SpeechRecognitionLanguage = "pt-BR";
config_t.SpeechSynthesisLanguage = "pt-BR";
config_t.SetSpeechSynthesisOutputFormat(SpeechSynthesisOutputFormat.Raw8Khz16BitMonoPcm);
using (var synthesizer_t = new SpeechSynthesizer(config_t, null))
{
Log.LogDebug("Synt Inicio");
// logger.Debug("resultt");
var result__t = await synthesizer_t.SpeakTextAsync(HFNSN);
if (result__t.Reason == ResultReason.SynthesizingAudioCompleted)
{
Log.LogDebug("Complete");
// using var stream = AudioDataStream.FromResult(result);
using (var audioDataStream_t = AudioDataStream.FromResult(result__t))
{
// You can save all the data in the audio data stream to a file
Log?.LogDebug("Save");
await audioDataStream_t?.SaveToWaveFileAsync(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav");
}
}
else if (result__t.Reason == ResultReason.Canceled)
{
var cancellation = SpeechSynthesisCancellationDetails.FromResult(result__t);
Log.LogDebug($"CANCELED: Reason={cancellation.Reason}");
if (cancellation.Reason == CancellationReason.Error)
{
Log.LogDebug($"CANCELED: ErrorCode={cancellation.ErrorCode}");
Log.LogDebug($"CANCELED: ErrorDetails=[{cancellation.ErrorDetails}]");
}
}
}
}
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
// await rtpSession?.AudioExtrasSource.SendAudioFromStream(sevoce, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "sevoce.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(d1, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "d1.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
// await rtpSession?.AudioExtrasSource.SendAudioFromStream(seconhece, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "seconhece.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
// await rtpSession?.AudioExtrasSource.SendAudioFromStream(d2, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "d2.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
// await rtpSession?.AudioExtrasSource.SendAudioFromStream(senconhece, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "senconhece.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(d3, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "d3.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
//Task t2 = Task.Factory.StartNew(() =>
//{
// // Thread.Sleep(300);
// foreach (var item in _callsINVITE)
// {
// Log.LogDebug(item.Key + "-" + item.Value);
// // Thread.Sleep(150);
// }
//});
//try
//{
// Task.WaitAll(t2);
//}
//catch (AggregateException ex) // No exception
//{
//Log.LogWarning(ex.Flatten().Message);
//}
uas?.Hangup(false);
await rtpSession?.AudioExtrasSource.PauseAudio();
await winAudio_n?.CloseAudio();
rtpCts?.Cancel();
rtpSession?.Close(null);
//winAudio_n?.CloseAudio();
if (uas?.SIPDialogue != null)
{
string callID = uas?.SIPDialogue?.CallId;
if (_calls.ContainsKey(callID))
{
if (_calls.TryRemove(callID, out var ua))
{
//// This app only uses each SIP user agent once so here the agent is
//// explicitly closed to prevent is responding to any new SIP requests.
////uas?.Hangup(false);
//rtpCts?.Cancel();
//rtpSession.Close(null);
////winAudio_n?.CloseAudio();
//ua?.Hangup(true);
}
}
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
}
}
}
}
}
else if(offerSdp.Media.Any(x => x.Media == SDPMediaTypesEnum.audio && x.MediaFormats.Any(x => x.Key == (int)SDPWellKnownMediaFormatsEnum.PCMU)))
{
Log.LogDebug($"Client offer contained PCMU audio codec.");
WindowsAudioEndPoint winAudio_n = new WindowsAudioEndPoint(new AudioEncoder());
// winAudio_n.RestrictCodecs(new List<AudioCodecsEnum> { AudioCodecsEnum.G722,AudioCodecsEnum.PCMA,AudioCodecsEnum.PCMU });
winAudio_n.RestrictFormats(x => x.Codec == AudioCodecsEnum.PCMU);
// AudioExtrasSource extrasSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions { AudioSource = AudioSourcesEnum.Music });
// rtpSession = new VoIPMediaSession(new MediaEndPoints { AudioSource = extrasSource });
rtpSession = new VoIPMediaSession(winAudio_n.ToMediaEndPoints());
rtpSession.AcceptRtpFromAny = true;
var setResult = rtpSession?.SetRemoteDescription(SdpType.offer, offerSdp);
if (setResult != SetDescriptionResultEnum.OK)
{
// Didn't get a match on the codecs we support.
SIPResponse noMatchingCodecResponse = SIPResponse.GetResponse(sipRequest, SIPResponseStatusCodesEnum.NotAcceptableHere, setResult.ToString());
await sipTransport?.SendResponseAsync(noMatchingCodecResponse);
}
else
{
// If there's already a call in progress hang it up. Of course this is not ideal for a real softphone or server but it
// means this example can be kept simpler.
if (uas?.IsHungup == false)
{
uas?.Hangup(false);
}
//rtpCts?.Cancel();
//rtpCts = new CancellationTokenSource();
UASInviteTransaction uasTransaction = new UASInviteTransaction(sipTransport, sipRequest, null);
uas = new SIPServerUserAgent(sipTransport, null, uasTransaction, null);
uas.CallCancelled += (uasAgent) =>
{
rtpCts?.Cancel();
rtpSession.Close(null);
uas?.Hangup(true);
if (uas?.SIPDialogue != null)
{
string callID = uas?.SIPDialogue?.CallId;
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
}
}
}
};
// rtpSession.OnRtpClosed += (reason) => uas?.Hangup(false);
rtpSession.OnTimeout += (mediaType) =>
{
if (uas?.SIPDialogue != null)
{
Log.LogWarning($"RTP timeout on call with {uas?.SIPDialogue?.RemoteTarget}, hanging up.");
}
else
{
Log.LogWarning($"RTP timeout on incomplete call, closing RTP session.");
}
if (uas?.SIPDialogue != null)
{
string callID = uas?.SIPDialogue?.CallId;
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
uas?.Hangup(true);
rtpCts?.Cancel();
rtpSession?.Close(null);
}
}
}
};
rtpSession.OnRtcpBye += (mediaType) =>
{
if (uas?.SIPDialogue != null)
{
Log.LogWarning($"RTP timeout on call with {uas?.SIPDialogue?.RemoteTarget}, hanging up.");
}
else
{
Log.LogWarning($"RTP timeout on incomplete call, closing RTP session.");
}
// winAudio_n?.CloseAudio();
string callID = uas?.SIPDialogue?.CallId;
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
uas?.Hangup(true);
rtpCts?.Cancel();
rtpSession?.Close(null);
}
}
};
rtpSession.OnRtpClosed += (mediaType) =>
{
if (uas?.SIPDialogue != null)
{
Log.LogWarning($"RTP timeout on call with {uas?.SIPDialogue?.RemoteTarget}, hanging up.");
}
else
{
Log.LogWarning($"RTP timeout on incomplete call, closing RTP session.");
}
if (uas?.SIPDialogue != null)
{
string callID = uas?.SIPDialogue?.CallId;
if (_calls.ContainsKey(callID))
{
if (_calls.TryRemove(callID, out var ua))
{
// This app only uses each SIP user agent once so here the agent is
// explicitly closed to prevent is responding to any new SIP requests.
//uas?.Hangup(false);
rtpCts?.Cancel();
rtpSession?.Close(null);
// winAudio_n?.CloseAudio();
ua?.Hangup(true);
}
}
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
}
}
}
GC.Collect();
};
uas.Progress(SIPResponseStatusCodesEnum.Trying, null, null, null, null);
// await Task.Delay(100);
uas.Progress(SIPResponseStatusCodesEnum.Ringing, null, null, null, null);
// await Task.Delay(100);
var answerSdp = rtpSession.CreateAnswer(null);
uas.Answer(SDP.SDP_MIME_CONTENTTYPE, answerSdp.ToString(), null, SIPDialogueTransferModesEnum.NotAllowed);
if (uas.IsUASAnswered)
{
if (uas.IsHungup != true)
{
_calls.TryAdd(uas?.SIPDialogue?.CallId, uas);
}
}
try
{
var HFNS = Name.Replace(" ", "_").Split("_");
var HFNSN = string.Empty;
if (HFNS.Count() > 1)
{
HFNSN = HFNS[0];
}
else
{
HFNSN = Name.Replace(" ", "_");
}
string fileNamemm = HFNSN.Replace(" ", "_") + ".wav";
Log.LogDebug(fileNamemm);
if (!File.Exists(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav"))
{
var config_t = SpeechConfig.FromSubscription("00", "westus");
config_t.SpeechRecognitionLanguage = "pt-BR";
config_t.SpeechSynthesisLanguage = "pt-BR";
config_t.SetSpeechSynthesisOutputFormat(SpeechSynthesisOutputFormat.Raw8Khz16BitMonoPcm);
// logger.Debug("synsthesizer");
// using var synthesizer_t = new SpeechSynthesizer(config_t, null);
using (var synthesizer_t = new SpeechSynthesizer(config_t, null))
{
Log.LogDebug("Synt Inicio");
// logger.Debug("resultt");
var result__t = await synthesizer_t.SpeakTextAsync(HFNSN);
if (result__t.Reason == ResultReason.SynthesizingAudioCompleted)
{
Log.LogDebug("Complete");
// using var stream = AudioDataStream.FromResult(result);
using (var audioDataStream_t = AudioDataStream.FromResult(result__t))
{
// You can save all the data in the audio data stream to a file
Log?.LogDebug("Save");
await audioDataStream_t.SaveToWaveFileAsync(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav");
}
}
else if (result__t.Reason == ResultReason.Canceled)
{
var cancellation = SpeechSynthesisCancellationDetails.FromResult(result__t);
Log.LogDebug($"CANCELED: Reason={cancellation.Reason}");
if (cancellation.Reason == CancellationReason.Error)
{
Log.LogDebug($"CANCELED: ErrorCode={cancellation.ErrorCode}");
Log.LogDebug($"CANCELED: ErrorDetails=[{cancellation.ErrorDetails}]");
}
}
}
}
}
catch (Exception e)
{
Log.LogError(e.Message);
}
await rtpSession.Start();
await winAudio_n.PauseAudio();
// if()
//voipSession.AudioExtrasSource.AudioSamplePeriodMilliseconds = 20;
await rtpSession.AudioExtrasSource.StartAudio();
//await Task.Delay(100);
if (DateTime.Now.Hour < 12)
{
if (uas?.IsUASAnswered == true)
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(dia, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "dia.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
}
else if (DateTime.Now.Hour < 18)
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(tarde, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "tarde.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
else
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(noite, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "noite.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(gostaria, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "gostaria.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
var HFN = Name;
var HFNS = HFN.Replace(" ", "_").Split("_");
var HFNSN = string.Empty;
if (HFNS.Count() > 1)
{
HFNSN = HFNS[0];
}
else
{
HFNSN = HFN.Replace(" ", "_");
}
string fileNamemm = HFNSN.Replace(" ", "_") + ".wav";
if (!File.Exists(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav"))
{
//await Task.Delay(1000);
// logger.Debug("synsthesizer");
// using var synthesizer_t = new SpeechSynthesizer(config_t, null);
var config_t = SpeechConfig.FromSubscription("00", "westus");
config_t.SpeechRecognitionLanguage = "pt-BR";
config_t.SpeechSynthesisLanguage = "pt-BR";
config_t.SetSpeechSynthesisOutputFormat(SpeechSynthesisOutputFormat.Raw8Khz16BitMonoPcm);
using (var synthesizer_t = new SpeechSynthesizer(config_t, null))
{
Log.LogDebug("Synt Inicio");
// logger.Debug("resultt");
var result__t = await synthesizer_t.SpeakTextAsync(HFNSN);
if (result__t.Reason == ResultReason.SynthesizingAudioCompleted)
{
Log.LogDebug("Complete");
// using var stream = AudioDataStream.FromResult(result);
using (var audioDataStream_t = AudioDataStream.FromResult(result__t))
{
// You can save all the data in the audio data stream to a file
Log?.LogDebug("Save");
await audioDataStream_t?.SaveToWaveFileAsync(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav");
}
}
else if (result__t.Reason == ResultReason.Canceled)
{
var cancellation = SpeechSynthesisCancellationDetails.FromResult(result__t);
Log.LogDebug($"CANCELED: Reason={cancellation.Reason}");
if (cancellation.Reason == CancellationReason.Error)
{
Log.LogDebug($"CANCELED: ErrorCode={cancellation.ErrorCode}");
Log.LogDebug($"CANCELED: ErrorDetails=[{cancellation.ErrorDetails}]");
}
}
}
}
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
// await rtpSession?.AudioExtrasSource.SendAudioFromStream(sevoce, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "sevoce.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(d1, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "d1.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
// await rtpSession?.AudioExtrasSource.SendAudioFromStream(seconhece, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "seconhece.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
// await rtpSession?.AudioExtrasSource.SendAudioFromStream(d2, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "d2.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
// await rtpSession?.AudioExtrasSource.SendAudioFromStream(senconhece, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "senconhece.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(d3, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "d3.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
//Task t2 = Task.Factory.StartNew(() =>
//{
// // Thread.Sleep(300);
// foreach (var item in _callsINVITE)
// {
// Log.LogDebug(item.Key + "-" + item.Value);
// // Thread.Sleep(150);
// }
//});
//try
//{
// Task.WaitAll(t2);
//}
//catch (AggregateException ex) // No exception
//{
//Log.LogWarning(ex.Flatten().Message);
//}
uas?.Hangup(false);
await rtpSession?.AudioExtrasSource.PauseAudio();
await winAudio_n?.CloseAudio();
rtpCts?.Cancel();
rtpSession?.Close(null);
//winAudio_n?.CloseAudio();
if (uas?.SIPDialogue != null)
{
string callID = uas?.SIPDialogue?.CallId;
if (_calls.ContainsKey(callID))
{
if (_calls.TryRemove(callID, out var ua))
{
//// This app only uses each SIP user agent once so here the agent is
//// explicitly closed to prevent is responding to any new SIP requests.
////uas?.Hangup(false);
//rtpCts?.Cancel();
//rtpSession.Close(null);
////winAudio_n?.CloseAudio();
//ua?.Hangup(true);
}
}
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
}
}
}
}
}
}
}
else if (sipRequest.Method == SIPMethodsEnum.BYE)
{
Log.LogInformation("Call hungup.");
SIPResponse byeResponse = SIPResponse.GetResponse(sipRequest, SIPResponseStatusCodesEnum.Ok, null);
await sipTransport.SendResponseAsync(byeResponse);
uas?.Hangup(true);
rtpSession?.Close(null);
rtpCts?.Cancel();
}
else if (sipRequest.Method == SIPMethodsEnum.SUBSCRIBE)
{
SIPResponse notAllowededResponse = SIPResponse.GetResponse(sipRequest, SIPResponseStatusCodesEnum.MethodNotAllowed, null);
await sipTransport.SendResponseAsync(notAllowededResponse);
}
else if (sipRequest.Method == SIPMethodsEnum.OPTIONS || sipRequest.Method == SIPMethodsEnum.REGISTER)
{
SIPResponse optionsResponse = SIPResponse.GetResponse(sipRequest, SIPResponseStatusCodesEnum.Ok, null);
await sipTransport.SendResponseAsync(optionsResponse);
}
}
catch (Exception reqExcp)
{
Log.LogWarning($"Exception handling {sipRequest.Method}. {reqExcp.Message}");
}
};
ManualResetEvent exitMre = new ManualResetEvent(false);
Console.CancelKeyPress += delegate (object sender, ConsoleCancelEventArgs e)
{
e.Cancel = true;
Log.LogInformation("Exiting...");
//Hangup(uas).Wait();
//rtpSession?.Close(null);
//rtpCts?.Cancel();
if (sipTransport != null)
{
Log.LogInformation("Shutting down SIP transport...");
sipTransport.Shutdown();
}
exitMre.Set();
};
// Task to handle user key presses.
Task.Run(() =>
{
try
{
while (!exitMre.WaitOne(0))
{
var keyProps = Console.ReadKey();
if (keyProps.KeyChar == 'h' || keyProps.KeyChar == 'q')
{
Console.WriteLine();
Console.WriteLine("Hangup requested by user...");
//Hangup(uas).Wait();
//rtpSession?.Close(null);
//rtpCts?.Cancel();
}
if (keyProps.KeyChar == 'q')
{
Log.LogInformation("Quitting...");
if (sipTransport != null)
{
Log.LogInformation("Shutting down SIP transport...");
sipTransport.Shutdown();
}
exitMre.Set();
}
}
}
catch (Exception excp)
{
Log.LogError($"Exception Key Press listener. {excp.Message}.");
}
});
exitMre.WaitOne();
}
/// <summary>
/// Hangs up the current call.
/// </summary>
/// <param name="uas">The user agent server to hangup the call on.</param>
private static async Task Hangup(SIPServerUserAgent uas)
{
try
{
if (uas?.IsHungup == false)
{
uas?.Hangup(false);
// Give the BYE or CANCEL request time to be transmitted.
Log.LogInformation("Waiting 1s for call to hangup...");
await Task.Delay(1000);
}
}
catch (Exception excp)
{
Log.LogError($"Exception Hangup. {excp.Message}");
}
}
/// <summary>
/// Enable detailed SIP log messages.
/// </summary>
private static void EnableTraceLogs(SIPTransport sipTransport)
{
sipTransport.SIPRequestInTraceEvent += (localEP, remoteEP, req) =>
{
Log.LogDebug($"Request received: {localEP}<-{remoteEP}");
Log.LogDebug(req.ToString());
};
sipTransport.SIPRequestOutTraceEvent += (localEP, remoteEP, req) =>
{
Log.LogDebug($"Request sent: {localEP}->{remoteEP}");
Log.LogDebug(req.ToString());
};
sipTransport.SIPResponseInTraceEvent += (localEP, remoteEP, resp) =>
{
Log.LogDebug($"Response received: {localEP}<-{remoteEP}");
Log.LogDebug(resp.ToString());
};
sipTransport.SIPResponseOutTraceEvent += (localEP, remoteEP, resp) =>
{
Log.LogDebug($"Response sent: {localEP}->{remoteEP}");
Log.LogDebug(resp.ToString());
};
sipTransport.SIPRequestRetransmitTraceEvent += (tx, req, count) =>
{
Log.LogDebug($"Request retransmit {count} for request {req.StatusLine}, initial transmit {DateTime.Now.Subtract(tx.InitialTransmit).TotalSeconds.ToString("0.###")}s ago.");
};
sipTransport.SIPResponseRetransmitTraceEvent += (tx, resp, count) =>
{
Log.LogDebug($"Response retransmit {count} for response {resp.ShortDescription}, initial transmit {DateTime.Now.Subtract(tx.InitialTransmit).TotalSeconds.ToString("0.###")}s ago.");
};
}
/// <summary>
/// Adds a console logger. Can be omitted if internal SIPSorcery debug and warning messages are not required.
/// </summary>
private static Microsoft.Extensions.Logging.ILogger AddConsoleLogger()
{
var serilogLogger = new LoggerConfiguration()
.Enrich.FromLogContext()
.MinimumLevel.Is(Serilog.Events.LogEventLevel.Debug)
.WriteTo.Console()
.CreateLogger();
var factory = new SerilogLoggerFactory(serilogLogger);
SIPSorcery.LogFactory.Set(factory);
return factory.CreateLogger<Program>();
}
}
}
Of course, all guidance is welcome, I made the adjustments.... checking uas?.IsUASAnswered
error persist, 6 to 11 times a day, the bad thing is that he closes the application :/ Error: Application:UserAgentServer.exe CoreCLR Version:4.7 .net core version 3.1.4 Description:The process was terminated due to an unhandled exeception. Exception Info: execption code c00000005, exception address 00007FF928F938D4
Your code is too big for me to try and identify the problem. If you can provide a smaller sample with the exception maybe I could help. It does seem like the issue is being caused by a file being accessed after it's been closed. Maybe try putting some locks around your file access calls.
could you give me an example please?
I will decrease the code and test blocking
The AudioExtrasSource class uses locks to control access to critical resources.
hello I did the lock but it stopped playing, I'm providing a code if you can help me I would like to understand and solve this error to stop closing automatically
Grateful
code
//----------------------------------------------------------------------------- // Filename: Program.cs // // Description: An example program of how to use the SIPSorcery core library to // act as the server for a SIP call. // // Author(s): // Aaron Clauson (aaron@sipsorcery.com) // // History: // 09 Oct 2019 Aaron Clauson Created, Dublin, Ireland. // 26 Feb 2020 Aaron Clauson Switched RTP to use RtpAVSession. // // License: // BSD 3-Clause "New" or "Revised" License, see included LICENSE.md file. //-----------------------------------------------------------------------------
//----------------------------------------------------------------------------- // This example can be used with the automated SIP test tool [SIPp] (https://github.com/SIPp/sipp) // and its inbuilt User Agent Client scenario. // Note: SIPp doesn't support IPv6. // // To install on WSL: // $ sudo apt install sip-tester // // Running tests (press the '+' key while test is running to increase the call rate): // For UDP testing: sipp -sn uac 127.0.0.1 // For TCP testing: sipp -sn uac localhost -t t1 //-----------------------------------------------------------------------------
//----------------------------------------------------------------------------- // Media files: // The "Simplicity" audio used in this example is from an artist called MACROFORM // and can be downloaded directly from: https://www.jamendo.com/track/579315/simplicity?language=en // The use of the audio is licensed under the Creative Commons // https://creativecommons.org/licenses/by-nd/2.0/ // The audio is free for personal use but a license may be required for commercial use. // If it sounds familiar this particular file is also included as part of Asterisk's // (asterisk.org) music on hold. // // ffmpeg can be used to convert the mp3 file into the required format for placing directly // into the RTP packets. Currently this example supports two audio formats: G711.ULAW (or PCMU) // and G722. // // ffmpeg -i Macroform_-Simplicity.mp3 -ac 1 -ar 8k -ab 64k -f mulaw Macroform-Simplicity.ulaw // ffmpeg -i Macroform-Simplicity.mp3 -ar 16k -acodec g722 Macroform-_Simplicity.g722 //-----------------------------------------------------------------------------
using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.IO; using System.Linq; using System.Net; using System.Net.Sockets; using System.Security.Cryptography.X509Certificates; using System.Threading; using System.Threading.Tasks; using Microsoft.CognitiveServices.Speech; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; using Serilog; using Serilog.Extensions.Logging; using SIPSorcery.Media; using SIPSorcery.Net; using SIPSorcery.SIP; using SIPSorcery.SIP.App; using SIPSorceryMedia.Abstractions; using SIPSorceryMedia.Windows;
namespace SIPSorcery { class Program { private static int SIP_LISTEN_PORT = 5060; private static int SIPS_LISTEN_PORT = 5060; private static string SIPS_CERTIFICATE_PATH = "localhost.pfx"; private static string SSK = ""; private static string SSR = "westus"; private static Microsoft.Extensions.Logging.ILogger Log = NullLogger.Instance; private static ConcurrentDictionary<string, SIPServerUserAgent> _calls = new ConcurrentDictionary<string, SIPServerUserAgent>();
private static ConcurrentDictionary<string, string> _callsINVITE = new ConcurrentDictionary<string, string>();
public static FileStream dia = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "dia.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream tarde = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "tarde.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream noite = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "noite.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream gostaria = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "gostaria.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream sevoce = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "sevoce.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream d1 = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "d1.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream seconhece = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "seconhece.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream d2 = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "d2.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream senconhece = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "senconhece.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
public static FileStream d3 = new FileStream(@"C:\URA\recoreapp_URA\" + "\\" + "d3.wav", FileMode.Open, FileAccess.Read, FileShare.Read);
static void Main(string[] args)
{
Log.LogDebug("SIPSorcery user agent server example.");
Log.LogDebug("Press h to hangup a call or ctrl-c to exit.");
Log = AddConsoleLogger();
IPAddress listenAddress = IPAddress.Any;
IPAddress listenIPv6Address = IPAddress.IPv6Any;
if (args != null && args.Length > 0)
{
if (!IPAddress.TryParse(args[0], out var customListenAddress))
{
Log.LogDebug($"Command line argument could not be parsed as an IP address \"{args[0]}\"");
listenAddress = IPAddress.Any;
}
else
{
if (customListenAddress.AddressFamily == AddressFamily.InterNetwork)
{
listenAddress = customListenAddress;
}
if (customListenAddress.AddressFamily == AddressFamily.InterNetworkV6)
{
listenIPv6Address = customListenAddress;
}
}
}
// Set up a default SIP transport.
var sipTransport = new SIPTransport();
///
var builder = new ConfigurationBuilder()
.SetBasePath(Directory.GetCurrentDirectory())
.AddJsonFile("appsettings.json");
var configuration = builder.Build();
string ip = configuration.GetSection("ip").Value;
Console.WriteLine(ip);
sipTransport.AddSIPChannel(new SIPUDPChannel(new IPEndPoint(IPAddress.Parse(ip), SIP_LISTEN_PORT)));
sipTransport.AddSIPChannel(new SIPTCPChannel(new IPEndPoint(IPAddress.Parse(ip), SIP_LISTEN_PORT)));
//// IPv6 channels.
EnableTraceLogs(sipTransport);
string executableDir = Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location);
// To keep things a bit simpler this example only supports a single call at a time and the SIP server user agent
// acts as a singleton
// Because this is a server user agent the SIP transport must start listening for client user agents.
sipTransport.SIPTransportRequestReceived += async (SIPEndPoint localSIPEndPoint, SIPEndPoint remoteEndPoint, SIPRequest sipRequest) =>
{
try
{
SIPServerUserAgent uas = null;
CancellationTokenSource rtpCts = null; // Cancellation token to stop the RTP stream.
VoIPMediaSession rtpSession = null;
if (sipRequest.Method == SIPMethodsEnum.INVITE)
{
IEnumerable<string> list = _callsINVITE.Keys.Where(p => p == sipRequest?.Header?.CallId);
if (list.Count() > 0)
{
Log.LogDebug("List>0");
}
else
{
Log.LogDebug("Call invite");
var Name = string.Empty;
if (sipRequest?.Header?.From?.FromName == null)
{
Name = "Nulo";
}
else
{
Name = sipRequest?.Header?.From?.FromName;
}
_callsINVITE.TryAdd(sipRequest?.Header?.CallId, Name);
Log.LogInformation($"Incoming call request: {localSIPEndPoint}<-{remoteEndPoint} {sipRequest.URI}.");
var localAudios = @"C:\URA\recoreapp_URA\";
// Check there's a codec we support in the INVITE offer.
var offerSdp = SDP.ParseSDPDescription(sipRequest.Body);
IPEndPoint dstRtpEndPoint = SDP.GetSDPRTPEndPoint(sipRequest.Body);
if (offerSdp.Media.Any(x => x.Media == SDPMediaTypesEnum.audio && x.MediaFormats.Any(x => x.Key == (int)SDPWellKnownMediaFormatsEnum.PCMA)) || offerSdp.Media.Any(x => x.Media == SDPMediaTypesEnum.audio && x.MediaFormats.Any(x => x.Key == (int)SDPWellKnownMediaFormatsEnum.PCMU)))
{
Log.LogDebug($"Client offer contained PCMA audio codec.");
WindowsAudioEndPoint winAudio_n = new WindowsAudioEndPoint(new AudioEncoder());
winAudio_n.RestrictFormats(x => x.Codec == AudioCodecsEnum.PCMA || x.Codec == AudioCodecsEnum.PCMU);
// winAudio_n.RestrictFormats(x => x.Codec == AudioCodecsEnum.PCMA);
AudioExtrasSource extrasSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions { AudioSource = AudioSourcesEnum.Music });
// rtpSession = new VoIPMediaSession(new MediaEndPoints { AudioSource = extrasSource });
rtpSession = new VoIPMediaSession(winAudio_n.ToMediaEndPoints());
rtpSession.AcceptRtpFromAny = true;
var setResult = rtpSession?.SetRemoteDescription(SdpType.offer, offerSdp);
if (setResult != SetDescriptionResultEnum.OK)
{
// Didn't get a match on the codecs we support.
SIPResponse noMatchingCodecResponse = SIPResponse.GetResponse(sipRequest, SIPResponseStatusCodesEnum.NotAcceptableHere, setResult.ToString());
await sipTransport?.SendResponseAsync(noMatchingCodecResponse);
}
else
{
// If there's already a call in progress hang it up. Of course this is not ideal for a real softphone or server but it
// means this example can be kept simpler.
if (uas?.IsHungup == false)
{
uas?.Hangup(false);
}
UASInviteTransaction uasTransaction = new UASInviteTransaction(sipTransport, sipRequest, null);
uas = new SIPServerUserAgent(sipTransport, null, uasTransaction, null);
uas.CallCancelled += (uasAgent) =>
{
if (uas?.SIPDialogue != null)
{
string callID = uas?.SIPDialogue?.CallId;
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
if (uas != null)
{
uas?.Hangup(true);
}
if (rtpCts != null)
{
rtpCts?.Cancel();
}
if (rtpSession != null)
{
rtpSession?.Close(null);
}
}
}
}
};
// rtpSession.OnRtpClosed += (reason) => uas?.Hangup(false);
rtpSession.OnTimeout += (mediaType) =>
{
if (uas?.SIPDialogue != null)
{
Log.LogWarning($"RTP timeout on call with {uas?.SIPDialogue?.RemoteTarget}, hanging up.");
}
else
{
Log.LogWarning($"RTP timeout on incomplete call, closing RTP session.");
}
if (uas?.SIPDialogue != null)
{
string callID = uas?.SIPDialogue?.CallId;
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
if (uas != null)
{
uas?.Hangup(true);
}
if (rtpCts != null)
{
rtpCts?.Cancel();
}
if (rtpSession != null)
{
rtpSession?.Close(null);
}
}
}
}
};
rtpSession.OnRtcpBye += (mediaType) =>
{
if (uas?.SIPDialogue != null)
{
Log.LogWarning($"RTP timeout on call with {uas?.SIPDialogue?.RemoteTarget}, hanging up.");
}
else
{
Log.LogWarning($"RTP timeout on incomplete call, closing RTP session.");
}
string callID = uas?.SIPDialogue?.CallId;
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
if (uas != null)
{
uas?.Hangup(true);
}
if (rtpCts != null)
{
rtpCts?.Cancel();
}
if (rtpSession != null)
{
rtpSession?.Close(null);
}
}
}
};
rtpSession.OnRtpClosed += (mediaType) =>
{
if (uas?.SIPDialogue != null)
{
Log.LogWarning($"RTP timeout on call with {uas?.SIPDialogue?.RemoteTarget}, hanging up.");
}
else
{
Log.LogWarning($"RTP timeout on incomplete call, closing RTP session.");
}
if (uas?.SIPDialogue != null)
{
string callID = uas?.SIPDialogue?.CallId;
if (_calls.ContainsKey(callID))
{
if (_calls.TryRemove(callID, out var ua))
{
// This app only uses each SIP user agent once so here the agent is
// explicitly closed to prevent is responding to any new SIP requests.
if (uas != null)
{
uas?.Hangup(true);
}
if (rtpCts != null)
{
rtpCts?.Cancel();
}
if (rtpSession != null)
{
rtpSession?.Close(null);
}
}
}
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
}
}
}
GC.Collect();
};
uas.Progress(SIPResponseStatusCodesEnum.Trying, null, null, null, null);
// await Task.Delay(100);
uas.Progress(SIPResponseStatusCodesEnum.Ringing, null, null, null, null);
// await Task.Delay(100);
var answerSdp = rtpSession.CreateAnswer(null);
uas.Answer(SDP.SDP_MIME_CONTENTTYPE, answerSdp.ToString(), null, SIPDialogueTransferModesEnum.NotAllowed);
if (uas.IsUASAnswered)
{
if (uas.IsHungup != true)
{
_calls.TryAdd(uas?.SIPDialogue?.CallId, uas);
}
}
try
{
var HFNS1 = Name.Replace(" ", "_").Split("_");
var HFNSN1 = string.Empty;
if (HFNS1.Count() > 1)
{
HFNSN1 = HFNS1[0];
}
else
{
HFNSN1 = Name.Replace(" ", "_");
}
string fileNamemm1 = HFNSN1.Replace(" ", "_") + ".wav";
Log.LogDebug(fileNamemm1);
if (!File.Exists(localAudios + "\\" + HFNSN1.Replace(" ", "_") + ".wav"))
{
var config_t = SpeechConfig.FromSubscription("", "westus");
config_t.SpeechRecognitionLanguage = "pt-BR";
config_t.SpeechSynthesisLanguage = "pt-BR";
config_t.SetSpeechSynthesisOutputFormat(SpeechSynthesisOutputFormat.Raw8Khz16BitMonoPcm);
// using var synthesizer_t = new SpeechSynthesizer(config_t, null);
using (var synthesizer_t = new SpeechSynthesizer(config_t, null))
{
Log.LogDebug("Synt Inicio");
// logger.Debug("resultt");
var result__t = await synthesizer_t.SpeakTextAsync(HFNSN1);
if (result__t.Reason == ResultReason.SynthesizingAudioCompleted)
{
Log.LogDebug("Complete");
// using var stream = AudioDataStream.FromResult(result);
using (var audioDataStream_t = AudioDataStream.FromResult(result__t))
{
// You can save all the data in the audio data stream to a file
Log?.LogDebug("Save");
await audioDataStream_t.SaveToWaveFileAsync(localAudios + "\\" + HFNSN1.Replace(" ", "_") + ".wav");
}
}
else if (result__t.Reason == ResultReason.Canceled)
{
var cancellation = SpeechSynthesisCancellationDetails.FromResult(result__t);
Log.LogDebug($"CANCELED: Reason={cancellation.Reason}");
if (cancellation.Reason == CancellationReason.Error)
{
Log.LogDebug($"CANCELED: ErrorCode={cancellation.ErrorCode}");
Log.LogDebug($"CANCELED: ErrorDetails=[{cancellation.ErrorDetails}]");
}
}
}
}
}
catch (Exception e)
{
Log.LogError(e.Message);
}
await rtpSession.Start();
await winAudio_n.PauseAudio();
await rtpSession.AudioExtrasSource.StartAudio();
//await Task.Delay(100);
if (DateTime.Now.Hour < 12)
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(dia, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "dia.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
else if (DateTime.Now.Hour < 18)
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(tarde, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "tarde.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
else
{
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(noite, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "noite.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
await rtpSession?.AudioExtrasSource.SendAudioFromStream(gostaria, AudioSamplingRatesEnum.Rate8KHz);
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "gostaria.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
var HFN = Name;
var HFNS = HFN.Replace(" ", "_").Split("_");
var HFNSN = string.Empty;
if (HFNS.Count() > 1)
{
HFNSN = HFNS[0];
}
else
{
HFNSN = HFN.Replace(" ", "_");
}
string fileNamemm = HFNSN.Replace(" ", "_") + ".wav";
if (File.Exists(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav"))
{
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + HFNSN.Replace(" ", "_") + ".wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
}
// await rtpSession?.AudioExtrasSource.SendAudioFromStream(sevoce, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "sevoce.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(d1, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "d1.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
// await rtpSession?.AudioExtrasSource.SendAudioFromStream(seconhece, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "seconhece.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
// await rtpSession?.AudioExtrasSource.SendAudioFromStream(d2, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "d2.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
// await rtpSession?.AudioExtrasSource.SendAudioFromStream(senconhece, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "senconhece.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
//await rtpSession?.AudioExtrasSource.SendAudioFromStream(d3, AudioSamplingRatesEnum.Rate8KHz);
await rtpSession?.AudioExtrasSource.SendAudioFromStream(new FileStream(localAudios + "\\" + "d3.wav", FileMode.Open, FileAccess.Read, FileShare.Read), AudioSamplingRatesEnum.Rate8KHz);
uas?.Hangup(false);
await rtpSession?.AudioExtrasSource.PauseAudio();
await winAudio_n?.CloseAudio();
rtpCts?.Cancel();
rtpSession?.Close(null);
//winAudio_n?.CloseAudio();
if (uas?.SIPDialogue != null)
{
string callID = uas?.SIPDialogue?.CallId;
if (_calls.ContainsKey(callID))
{
if (_calls.TryRemove(callID, out var ua))
{
}
}
if (_callsINVITE.ContainsKey(callID))
{
if (_callsINVITE.TryRemove(callID, out var ua))
{
}
}
}
}
}
}
}
else if (sipRequest.Method == SIPMethodsEnum.BYE)
{
Log.LogInformation("Call hungup.");
SIPResponse byeResponse = SIPResponse.GetResponse(sipRequest, SIPResponseStatusCodesEnum.Ok, null);
await sipTransport.SendResponseAsync(byeResponse);
uas?.Hangup(true);
rtpSession?.Close(null);
rtpCts?.Cancel();
}
else if (sipRequest.Method == SIPMethodsEnum.SUBSCRIBE)
{
SIPResponse notAllowededResponse = SIPResponse.GetResponse(sipRequest, SIPResponseStatusCodesEnum.MethodNotAllowed, null);
await sipTransport.SendResponseAsync(notAllowededResponse);
}
else if (sipRequest.Method == SIPMethodsEnum.OPTIONS || sipRequest.Method == SIPMethodsEnum.REGISTER)
{
SIPResponse optionsResponse = SIPResponse.GetResponse(sipRequest, SIPResponseStatusCodesEnum.Ok, null);
await sipTransport.SendResponseAsync(optionsResponse);
}
}
catch (Exception reqExcp)
{
Log.LogWarning($"Exception handling {sipRequest.Method}. {reqExcp.Message}");
}
};
ManualResetEvent exitMre = new ManualResetEvent(false);
Console.CancelKeyPress += delegate (object sender, ConsoleCancelEventArgs e)
{
e.Cancel = true;
Log.LogInformation("Exiting...");
//Hangup(uas).Wait();
//rtpSession?.Close(null);
//rtpCts?.Cancel();
if (sipTransport != null)
{
Log.LogInformation("Shutting down SIP transport...");
sipTransport.Shutdown();
}
exitMre.Set();
};
// Task to handle user key presses.
Task.Run(() =>
{
try
{
while (!exitMre.WaitOne(0))
{
var keyProps = Console.ReadKey();
if (keyProps.KeyChar == 'h' || keyProps.KeyChar == 'q')
{
Console.WriteLine();
Console.WriteLine("Hangup requested by user...");
}
if (keyProps.KeyChar == 'q')
{
Log.LogInformation("Quitting...");
if (sipTransport != null)
{
Log.LogInformation("Shutting down SIP transport...");
sipTransport.Shutdown();
}
exitMre.Set();
}
}
}
catch (Exception excp)
{
Log.LogError($"Exception Key Press listener. {excp.Message}.");
}
});
exitMre.WaitOne();
}
/// <summary>
/// Hangs up the current call.
/// </summary>
/// <param name="uas">The user agent server to hangup the call on.</param>
private static async Task Hangup(SIPServerUserAgent uas)
{
try
{
if (uas?.IsHungup == false)
{
uas?.Hangup(false);
// Give the BYE or CANCEL request time to be transmitted.
Log.LogInformation("Waiting 1s for call to hangup...");
await Task.Delay(1000);
}
}
catch (Exception excp)
{
Log.LogError($"Exception Hangup. {excp.Message}");
}
}
/// <summary>
/// Enable detailed SIP log messages.
/// </summary>
private static void EnableTraceLogs(SIPTransport sipTransport)
{
sipTransport.SIPRequestInTraceEvent += (localEP, remoteEP, req) =>
{
Log.LogDebug($"Request received: {localEP}<-{remoteEP}");
Log.LogDebug(req.ToString());
};
sipTransport.SIPRequestOutTraceEvent += (localEP, remoteEP, req) =>
{
Log.LogDebug($"Request sent: {localEP}->{remoteEP}");
Log.LogDebug(req.ToString());
};
sipTransport.SIPResponseInTraceEvent += (localEP, remoteEP, resp) =>
{
Log.LogDebug($"Response received: {localEP}<-{remoteEP}");
Log.LogDebug(resp.ToString());
};
sipTransport.SIPResponseOutTraceEvent += (localEP, remoteEP, resp) =>
{
Log.LogDebug($"Response sent: {localEP}->{remoteEP}");
Log.LogDebug(resp.ToString());
};
sipTransport.SIPRequestRetransmitTraceEvent += (tx, req, count) =>
{
Log.LogDebug($"Request retransmit {count} for request {req.StatusLine}, initial transmit {DateTime.Now.Subtract(tx.InitialTransmit).TotalSeconds.ToString("0.###")}s ago.");
};
sipTransport.SIPResponseRetransmitTraceEvent += (tx, resp, count) =>
{
Log.LogDebug($"Response retransmit {count} for response {resp.ShortDescription}, initial transmit {DateTime.Now.Subtract(tx.InitialTransmit).TotalSeconds.ToString("0.###")}s ago.");
};
}
/// <summary>
/// Adds a console logger. Can be omitted if internal SIPSorcery debug and warning messages are not required.
/// </summary>
private static Microsoft.Extensions.Logging.ILogger AddConsoleLogger()
{
var serilogLogger = new LoggerConfiguration()
.Enrich.FromLogContext()
.MinimumLevel.Is(Serilog.Events.LogEventLevel.Debug)
.WriteTo.Console()
.CreateLogger();
var factory = new SerilogLoggerFactory(serilogLogger);
SIPSorcery.LogFactory.Set(factory);
return factory.CreateLogger<Program>();
}
}
}
Lock
public Task SendAudioFromStream(Stream audioStream, AudioSamplingRatesEnum streamSampleRate) { if (!_isClosed && audioStream != null && audioStream.Length > 0) { lock (_sendSampleTimer) { // Stop any existing send from stream operation. StopSendFromAudioStream();
TaskCompletionSource<bool> tcs = new TaskCompletionSource<bool>(TaskCreationOptions.RunContinuationsAsynchronously);
try
{
Action handler = null;
handler = () =>
{
tcs.TrySetResult(true);
OnSendFromAudioStreamComplete -= handler;
};
OnSendFromAudioStreamComplete += handler;
InitialiseSendAudioFromStreamTimer(audioStream, streamSampleRate);
_streamSourceTimer.Change(_audioSamplePeriodMilliseconds, _audioSamplePeriodMilliseconds);
}
catch (Exception e)
{
Console.WriteLine(e.Message);
}
return tcs.Task;
}
}
else
{
return Task.CompletedTask;
}
}
Hello Master,
I need help to understand why I have this exception, it ends my application, what I see is that it generates when the audio is sent!
Error 9/13/2021 12:49:12 PM Application Error 1000 (100)
Application: UserAgentServer.exe CoreCLR Version: 4.700.20.20201 .NET Core Version: 3.1.4 Description: The process was terminated due to an unhandled exception. Exception Info: System.ObjectDisposedException: Cannot access a closed file. at System.IO.FileStream.get_Position() at SIPSorcery.Media.AudioExtrasSource.GetPcmSampleFromReader(BinaryReader binaryReader, AudioSamplingRatesEnum inputSampleRate, Int32& samplesRead) at SIPSorcery.Media.AudioExtrasSource.SendStreamSample(Object state) at System.Threading.ExecutionContext.RunFromThreadPoolDispatchLoop(Thread threadPoolThread, ExecutionContext executionContext, ContextCallback callback, Object state) --- End of stack trace from previous location where exception was thrown --- at System.Threading.ExecutionContext.RunFromThreadPoolDispatchLoop(Thread threadPoolThread, ExecutionContext executionContext, ContextCallback callback, Object state) at System.Threading.TimerQueueTimer.CallCallback(Boolean isThreadPool) at System.Threading.TimerQueueTimer.Fire(Boolean isThreadPool) at System.Threading.TimerQueueTimer.System.Threading.IThreadPoolWorkItem.Execute() at System.Threading.ThreadPoolWorkQueue.Dispatch() at System.Threading._ThreadPoolWaitCallback.PerformWaitCallback()