RTMP/Assets/RtmpStream.cs

257 lines
7.2 KiB
C#
Raw Normal View History

2024-11-18 14:20:44 +08:00
using System;
using System.Diagnostics;
using System.IO;
using System.Threading.Tasks;
using Unity.Collections.LowLevel.Unsafe;
using UnityEngine;
using UnityEngine.UI;
using Debug = UnityEngine.Debug;
/*
* rtmp
* rtmp
* 使flv
*/
public class RtmpStream : MonoBehaviour
{
/// <summary>
/// 抓取摄像头图像的纹理
/// </summary>
private Texture2D outputTexture;
private int frameNum = 0;
/// <summary>
/// 推流进程
/// </summary>
private Process _proc;
private Process _pullProcess;
/// <summary>
/// 记录是否正在推流
/// </summary>
private bool _isRunning = false;
Camera _camera;
/// <summary>
/// 推流地址
/// </summary>
string formattedPath = "rtmp://127.0.0.1:1935/live/";
/// <summary>
/// 最大码率 单位kb
/// </summary>
int maxBitRate = 5000;
/// <summary>
/// 推流码,加在formattedPath后面区分不同流地址
/// </summary>
[SerializeField]
private string streamCode = "default";
private Texture2D inputTexture;
[SerializeField] private RawImage image;
byte[] buffer = new byte[4096];
private void Awake()
{
}
// Start is called before the first frame update
void Start()
{
_camera = GetComponent<Camera>();
outputTexture = new Texture2D(_camera.targetTexture.width, _camera.targetTexture.height);
}
// Update is called once per frame
void Update()
{
frameNum++;
if (_isRunning && _camera != null && _camera.isActiveAndEnabled)
{
RenderTexture.active = _camera.targetTexture;
outputTexture.ReadPixels(new Rect(0, 0, _camera.targetTexture.width, _camera.targetTexture.height), 0, 0, true);
var na = outputTexture.GetRawTextureData<byte>();
var val = ImageConversion.EncodeNativeArrayToPNG<byte>(na, outputTexture.graphicsFormat, (uint)outputTexture.width, (uint)outputTexture.height);
SendImage2Pipe(val);
na.Dispose();
val.Dispose();
}
}
private void OnDisable()
{
StopCapture();
}
private void OnDestroy()
{
StopCapture();
}
/// <summary>
/// 使用ffmpeg将摄像头图像编码成视频流并推流
/// </summary>
private void FfmpegToStream()
{
ReadConfig();
_proc = new Process();
_proc.StartInfo.RedirectStandardInput = true;
_proc.StartInfo.RedirectStandardOutput = true;
_proc.StartInfo.UseShellExecute = false;
_proc.StartInfo.CreateNoWindow = true;
_proc.StartInfo.FileName = Application.streamingAssetsPath + "/ffmpeg.exe";
_proc.StartInfo.Arguments = $"-f image2pipe -use_wallclock_as_timestamps 1 -i - -c:v libx264 -bf 0 -preset ultrafast -tune zerolatency -r 60 -pix_fmt yuv420p -g 20 -s 512X512 -tcp_nodelay 1 -rtmp_flush_interval 1 -maxrate {maxBitRate}k -f flv {GetRtmpServerUrl()}";
_proc.Start();
}
private string GetRtmpServerUrl()
{
return formattedPath + streamCode;
}
private void CreateProcessGetSteamFromRtmp()
{
ReadConfig();
_pullProcess = new Process();
_pullProcess.StartInfo.RedirectStandardInput = true;
_pullProcess.StartInfo.RedirectStandardOutput = true;
_pullProcess.StartInfo.UseShellExecute = false;
_pullProcess.StartInfo.CreateNoWindow = true;
_pullProcess.StartInfo.FileName = Application.streamingAssetsPath + "/ffmpeg.exe";
_pullProcess.StartInfo.Arguments = $"-i {GetRtmpServerUrl()} -f image2pipe -pix_fmt rgb24 -vcodec rawvideo -";
_pullProcess.Start();
}
/// <summary>
/// 读取配置文件
/// </summary>
private void ReadConfig()
{
formattedPath = "rtmp://localhost:1935/live/";
maxBitRate = 120;
}
/// <summary>
/// 使用ffmpeg将摄像头图像编码成视频流并保存到本地
/// </summary>
private void FfmpegToVedio()
{
string formattedPath = "D:\\rtmp.mp4";
int maxBitRate = 5000;
_proc = new Process();
_proc.StartInfo.RedirectStandardInput = true;
_proc.StartInfo.RedirectStandardOutput = true;
_proc.StartInfo.UseShellExecute = false;
_proc.StartInfo.CreateNoWindow = true;
_proc.StartInfo.FileName = @"ffmpeg";
_proc.StartInfo.Arguments = $"-f image2pipe -use_wallclock_as_timestamps 1 -i - -c:v libx264 -vsync passthrough -s 1920x1080 -maxrate {maxBitRate}k -an -y {formattedPath}";
_proc.Start();
}
/// <summary>
/// 开始推流
/// 默认推流地址 rtmp://192.168.48.4:1935/live/
/// 默认推流码 default
/// 拉流地址为推流地址+推流码即 rtmp://192.168.48.4:1935/live/default
/// 推流地址在streamingAssets中的config.ini文件设置推流码由streamCode变量设置
/// 使用前先设置streamCode再推流
/// </summary>
public void StartCapture()
{
if (_isRunning)
return;
Debug.Log("Start");
//FfmpegToVedio();
FfmpegToStream();
_isRunning = true;
}
/// <summary>
/// 停止推流
/// </summary>
public void StopCapture()
{
_isRunning = false;
if (_proc != null)
{
try
{
_proc.StartInfo.RedirectStandardInput = false;
_proc.StartInfo.RedirectStandardOutput = false;
_proc.StandardInput.Close();
_proc.StandardOutput.Close();
_proc.Close();
}
catch
{
Debug.LogError("stop capture error");
}
}
}
/// <summary>
/// 将图片推入管道供编码器使用
/// </summary>
/// <param name="bytes"></param>
private void SendImage2Pipe(Unity.Collections.NativeArray<byte> bytes)
{
if (_proc.StartInfo.RedirectStandardInput)
{
_proc.StandardInput.BaseStream.Write(bytes);
}
}
private void GetImageFromPipe()
{
if (this._pullProcess.StartInfo.RedirectStandardOutput)
{
_pullProcess.StandardOutput.BaseStream.BeginRead(buffer, 0, buffer.Length, Callback, buffer);
}
}
private void Callback(IAsyncResult ar)
{
var readCount = this._pullProcess.StandardOutput.BaseStream.EndRead(ar);
if (readCount > 0)
{
unsafe
{
Debug.LogError(readCount);
var buffer = (byte[])ar.AsyncState;
var bytes = new byte[readCount];
fixed (byte* origin = buffer)
fixed (byte* destination = bytes)
UnsafeUtility.MemCpy(destination, origin, readCount * sizeof(byte));
this.inputTexture.LoadImage(bytes);
}
}
}
/// <summary>
/// unity editor调试
/// </summary>
[ContextMenu("start")]
private void TestStartCaputre()
{
StartCapture();
}
/// <summary>
/// unity editor调试
/// </summary>
[ContextMenu("stop")]
private void TestStopCaputre()
{
StopCapture();
}
}